lang
stringclasses 2
values | license
stringclasses 13
values | stderr
stringlengths 0
343
| commit
stringlengths 40
40
| returncode
int64 0
128
| repos
stringlengths 6
87.7k
| new_contents
stringlengths 0
6.23M
| new_file
stringlengths 3
311
| old_contents
stringlengths 0
6.23M
| message
stringlengths 6
9.1k
| old_file
stringlengths 3
311
| subject
stringlengths 0
4k
| git_diff
stringlengths 0
6.31M
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
Java | mit | f8a2ad482f05b58bc7cd56bbf14b3360ff182e64 | 0 | navalev/azure-sdk-for-java,selvasingh/azure-sdk-for-java,navalev/azure-sdk-for-java,Azure/azure-sdk-for-java,selvasingh/azure-sdk-for-java,Azure/azure-sdk-for-java,navalev/azure-sdk-for-java,navalev/azure-sdk-for-java,selvasingh/azure-sdk-for-java,navalev/azure-sdk-for-java,Azure/azure-sdk-for-java,Azure/azure-sdk-for-java,Azure/azure-sdk-for-java | /**
*
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*
*/
package com.microsoft.rest;
import com.microsoft.rest.retry.RetryHandler;
import com.microsoft.rest.serializer.JacksonMapperAdapter;
import java.net.CookieManager;
import java.net.CookiePolicy;
import okhttp3.JavaNetCookieJar;
import okhttp3.OkHttpClient;
import retrofit2.Retrofit;
/**
* ServiceClient is the abstraction for accessing REST operations and their payload data types.
*/
public abstract class ServiceClient {
/** The HTTP client. */
private OkHttpClient httpClient;
/** The Retrofit instance. */
private Retrofit retrofit;
/** The adapter to a Jackson {@link com.fasterxml.jackson.databind.ObjectMapper}. */
private JacksonMapperAdapter mapperAdapter;
/**
* Initializes a new instance of the ServiceClient class.
*
* @param baseUrl the service endpoint
*/
protected ServiceClient(String baseUrl) {
this(baseUrl, new OkHttpClient.Builder(), new Retrofit.Builder());
}
/**
* Initializes a new instance of the ServiceClient class.
*
* @param baseUrl the service base uri
* @param clientBuilder the http client builder
* @param restBuilder the retrofit rest client builder
*/
protected ServiceClient(String baseUrl, OkHttpClient.Builder clientBuilder, Retrofit.Builder restBuilder) {
if (clientBuilder == null) {
throw new IllegalArgumentException("clientBuilder == null");
}
if (restBuilder == null) {
throw new IllegalArgumentException("restBuilder == null");
}
this.mapperAdapter = new JacksonMapperAdapter();
CookieManager cookieManager = new CookieManager();
cookieManager.setCookiePolicy(CookiePolicy.ACCEPT_ALL);
this.httpClient = clientBuilder
.cookieJar(new JavaNetCookieJar(cookieManager))
.addInterceptor(new UserAgentInterceptor())
.addInterceptor(new BaseUrlHandler())
.addInterceptor(new CustomHeadersInterceptor())
.addInterceptor(new RetryHandler())
.build();
this.retrofit = restBuilder
.baseUrl(baseUrl)
.client(httpClient)
.addConverterFactory(mapperAdapter.getConverterFactory())
.build();
}
/**
* @return the Retrofit instance.
*/
public Retrofit retrofit() {
return this.retrofit;
}
/**
* @return the HTTP client.
*/
public OkHttpClient httpClient() {
return this.httpClient;
}
/**
* @return the adapter to a Jackson {@link com.fasterxml.jackson.databind.ObjectMapper}.
*/
public JacksonMapperAdapter mapperAdapter() {
return this.mapperAdapter;
}
}
| client-runtime/src/main/java/com/microsoft/rest/ServiceClient.java | /**
*
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*
*/
package com.microsoft.rest;
import com.microsoft.rest.retry.RetryHandler;
import com.microsoft.rest.serializer.JacksonMapperAdapter;
import java.net.CookieManager;
import java.net.CookiePolicy;
import okhttp3.JavaNetCookieJar;
import okhttp3.OkHttpClient;
import retrofit2.Retrofit;
/**
* ServiceClient is the abstraction for accessing REST operations and their payload data types.
*/
public abstract class ServiceClient {
/** The HTTP client. */
private OkHttpClient httpClient;
/** The Retrofit instance. */
private Retrofit retrofit;
/** The adapter to a Jackson {@link com.fasterxml.jackson.databind.ObjectMapper}. */
private JacksonMapperAdapter mapperAdapter;
/**
* Initializes a new instance of the ServiceClient class.
*
* @param baseUrl the service endpoint
*/
protected ServiceClient(String baseUrl) {
this(baseUrl, new OkHttpClient.Builder(), new Retrofit.Builder());
}
/**
* Initializes a new instance of the ServiceClient class.
*
*/
protected ServiceClient(String baseUrl, OkHttpClient.Builder clientBuilder, Retrofit.Builder restBuilder) {
if (clientBuilder == null) {
throw new IllegalArgumentException("clientBuilder == null");
}
if (restBuilder == null) {
throw new IllegalArgumentException("restBuilder == null");
}
this.mapperAdapter = new JacksonMapperAdapter();
CookieManager cookieManager = new CookieManager();
cookieManager.setCookiePolicy(CookiePolicy.ACCEPT_ALL);
this.httpClient = clientBuilder
.cookieJar(new JavaNetCookieJar(cookieManager))
.addInterceptor(new UserAgentInterceptor())
.addInterceptor(new BaseUrlHandler())
.addInterceptor(new CustomHeadersInterceptor())
.addInterceptor(new RetryHandler())
.build();
this.retrofit = restBuilder
.baseUrl(baseUrl)
.client(httpClient)
.addConverterFactory(mapperAdapter.getConverterFactory())
.build();
}
/**
* @return the Retrofit instance.
*/
public Retrofit retrofit() {
return this.retrofit;
}
/**
* @return the HTTP client.
*/
public OkHttpClient httpClient() {
return this.httpClient;
}
/**
* @return the adapter to a Jackson {@link com.fasterxml.jackson.databind.ObjectMapper}.
*/
public JacksonMapperAdapter mapperAdapter() {
return this.mapperAdapter;
}
}
| Adding javadoc gen as a part of build since javadoc generation catches some of the checkstyle and reference issue
| client-runtime/src/main/java/com/microsoft/rest/ServiceClient.java | Adding javadoc gen as a part of build since javadoc generation catches some of the checkstyle and reference issue | <ide><path>lient-runtime/src/main/java/com/microsoft/rest/ServiceClient.java
<ide> /**
<ide> * Initializes a new instance of the ServiceClient class.
<ide> *
<add> * @param baseUrl the service base uri
<add> * @param clientBuilder the http client builder
<add> * @param restBuilder the retrofit rest client builder
<ide> */
<ide> protected ServiceClient(String baseUrl, OkHttpClient.Builder clientBuilder, Retrofit.Builder restBuilder) {
<ide> if (clientBuilder == null) { |
|
Java | apache-2.0 | 2b78a3bf4d6d7cfa12a84de9ea9d0495eccf9a6f | 0 | thomasdarimont/keycloak,mhajas/keycloak,reneploetz/keycloak,hmlnarik/keycloak,gregjones60/keycloak,thomasdarimont/keycloak,didiez/keycloak,vmuzikar/keycloak,chameleon82/keycloak,lennartj/keycloak,iperdomo/keycloak,brat000012001/keycloak,ssilvert/keycloak,mhajas/keycloak,pfiled/keycloak,vmuzikar/keycloak,AOEpeople/keycloak,gregjones60/keycloak,j-bore/keycloak,brat000012001/keycloak,grange74/keycloak,cmoulliard/keycloak,girirajsharma/keycloak,dbarentine/keycloak,mposolda/keycloak,srose/keycloak,mposolda/keycloak,pfiled/keycloak,dbarentine/keycloak,lennartj/keycloak,amalalex/keycloak,pedroigor/keycloak,jpkrohling/keycloak,pedroigor/keycloak,cfsnyder/keycloak,reneploetz/keycloak,lennartj/keycloak,wildfly-security-incubator/keycloak,ppolavar/keycloak,dylanplecki/keycloak,amalalex/keycloak,abstractj/keycloak,girirajsharma/keycloak,stianst/keycloak,arivanajoki/keycloak,iperdomo/keycloak,ahus1/keycloak,AOEpeople/keycloak,wildfly-security-incubator/keycloak,brat000012001/keycloak,hmlnarik/keycloak,srose/keycloak,pedroigor/keycloak,gregjones60/keycloak,srose/keycloak,cmoulliard/keycloak,ssilvert/keycloak,ahus1/keycloak,darranl/keycloak,dylanplecki/keycloak,ppolavar/keycloak,hmlnarik/keycloak,VihreatDeGrona/keycloak,almighty/keycloak,cfsnyder/keycloak,anaerobic/keycloak,keycloak/keycloak,ssilvert/keycloak,agolPL/keycloak,arivanajoki/keycloak,srose/keycloak,grange74/keycloak,manuel-palacio/keycloak,abstractj/keycloak,raehalme/keycloak,jpkrohling/keycloak,pedroigor/keycloak,arivanajoki/keycloak,matzew/keycloak,thomasdarimont/keycloak,reneploetz/keycloak,agolPL/keycloak,ppolavar/keycloak,raehalme/keycloak,raehalme/keycloak,agolPL/keycloak,stianst/keycloak,chameleon82/keycloak,eugene-chow/keycloak,stianst/keycloak,anaerobic/keycloak,cmoulliard/keycloak,manuel-palacio/keycloak,darranl/keycloak,grange74/keycloak,jpkrohling/keycloak,pedroigor/keycloak,thomasdarimont/keycloak,darranl/keycloak,raehalme/keycloak,anaerobic/keycloak,ewjmulder/keycloak,srose/keycloak,pfiled/keycloak,hmlnarik/keycloak,mbaluch/keycloak,WebJustDevelopment/keycloak,keycloak/keycloak,WebJustDevelopment/keycloak,ewjmulder/keycloak,ahus1/keycloak,abstractj/keycloak,almighty/keycloak,VihreatDeGrona/keycloak,vmuzikar/keycloak,arivanajoki/keycloak,wildfly-security-incubator/keycloak,ssilvert/keycloak,keycloak/keycloak,eugene-chow/keycloak,vmuzikar/keycloak,WebJustDevelopment/keycloak,eugene-chow/keycloak,vmuzikar/keycloak,AOEpeople/keycloak,amalalex/keycloak,gregjones60/keycloak,thomasdarimont/keycloak,dbarentine/keycloak,WebJustDevelopment/keycloak,mposolda/keycloak,almighty/keycloak,dylanplecki/keycloak,jean-merelis/keycloak,almighty/keycloak,grange74/keycloak,ahus1/keycloak,AOEpeople/keycloak,manuel-palacio/keycloak,mhajas/keycloak,mposolda/keycloak,didiez/keycloak,jean-merelis/keycloak,matzew/keycloak,raehalme/keycloak,ewjmulder/keycloak,reneploetz/keycloak,lennartj/keycloak,brat000012001/keycloak,jpkrohling/keycloak,abstractj/keycloak,cfsnyder/keycloak,ssilvert/keycloak,j-bore/keycloak,abstractj/keycloak,girirajsharma/keycloak,mposolda/keycloak,lkubik/keycloak,pfiled/keycloak,j-bore/keycloak,lkubik/keycloak,thomasdarimont/keycloak,lkubik/keycloak,mbaluch/keycloak,mposolda/keycloak,anaerobic/keycloak,jean-merelis/keycloak,didiez/keycloak,amalalex/keycloak,jpkrohling/keycloak,stianst/keycloak,dbarentine/keycloak,lkubik/keycloak,ppolavar/keycloak,cmoulliard/keycloak,mbaluch/keycloak,matzew/keycloak,iperdomo/keycloak,girirajsharma/keycloak,dylanplecki/keycloak,wildfly-security-incubator/keycloak,brat000012001/keycloak,mbaluch/keycloak,didiez/keycloak,VihreatDeGrona/keycloak,darranl/keycloak,matzew/keycloak,manuel-palacio/keycloak,ahus1/keycloak,mhajas/keycloak,reneploetz/keycloak,keycloak/keycloak,stianst/keycloak,jean-merelis/keycloak,pedroigor/keycloak,ewjmulder/keycloak,mhajas/keycloak,vmuzikar/keycloak,VihreatDeGrona/keycloak,chameleon82/keycloak,keycloak/keycloak,raehalme/keycloak,hmlnarik/keycloak,cfsnyder/keycloak,agolPL/keycloak,hmlnarik/keycloak,eugene-chow/keycloak,ahus1/keycloak,chameleon82/keycloak,j-bore/keycloak,iperdomo/keycloak | /*
* JBoss, Home of Professional Open Source.
* Copyright 2012, Red Hat, Inc., and individual contributors
* as indicated by the @author tags. See the copyright.txt file in the
* distribution for a full listing of individual contributors.
*
* This is free software; you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2.1 of
* the License, or (at your option) any later version.
*
* This software is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this software; if not, write to the Free
* Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
* 02110-1301 USA, or see the FSF site: http://www.fsf.org.
*/
package org.keycloak.testsuite.oauth;
import org.junit.Assert;
import org.junit.ClassRule;
import org.junit.Rule;
import org.junit.Test;
import org.keycloak.OAuth2Constants;
import org.keycloak.enums.SslRequired;
import org.keycloak.events.Details;
import org.keycloak.events.Errors;
import org.keycloak.events.Event;
import org.keycloak.models.KeycloakSession;
import org.keycloak.models.RealmModel;
import org.keycloak.models.UserSessionModel;
import org.keycloak.models.utils.KeycloakModelUtils;
import org.keycloak.protocol.oidc.OIDCLoginProtocolService;
import org.keycloak.representations.AccessToken;
import org.keycloak.representations.RefreshToken;
import org.keycloak.services.managers.RealmManager;
import org.keycloak.testsuite.AssertEvents;
import org.keycloak.testsuite.OAuthClient;
import org.keycloak.testsuite.OAuthClient.AccessTokenResponse;
import org.keycloak.testsuite.pages.LoginPage;
import org.keycloak.testsuite.rule.KeycloakRule;
import org.keycloak.testsuite.rule.WebResource;
import org.keycloak.testsuite.rule.WebRule;
import org.keycloak.util.BasicAuthHelper;
import org.keycloak.util.Time;
import org.openqa.selenium.WebDriver;
import javax.ws.rs.client.Client;
import javax.ws.rs.client.ClientBuilder;
import javax.ws.rs.client.Entity;
import javax.ws.rs.client.WebTarget;
import javax.ws.rs.core.Form;
import javax.ws.rs.core.HttpHeaders;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.UriBuilder;
import java.net.URI;
import java.security.PrivateKey;
import java.security.PublicKey;
import static org.hamcrest.Matchers.*;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;
/**
* @author <a href="mailto:[email protected]">Stian Thorgersen</a>
*/
public class RefreshTokenTest {
@ClassRule
public static KeycloakRule keycloakRule = new KeycloakRule();
@Rule
public WebRule webRule = new WebRule(this);
@WebResource
protected WebDriver driver;
@WebResource
protected OAuthClient oauth;
@WebResource
protected LoginPage loginPage;
@Rule
public AssertEvents events = new AssertEvents(keycloakRule);
/**
* KEYCLOAK-547
*
* @throws Exception
*/
@Test
public void nullRefreshToken() throws Exception {
Client client = ClientBuilder.newClient();
UriBuilder builder = UriBuilder.fromUri(org.keycloak.testsuite.Constants.AUTH_SERVER_ROOT);
URI uri = OIDCLoginProtocolService.refreshUrl(builder).build("test");
WebTarget target = client.target(uri);
org.keycloak.representations.AccessTokenResponse tokenResponse = null;
{
String header = BasicAuthHelper.createHeader("test-app", "password");
Form form = new Form();
Response response = target.request()
.header(HttpHeaders.AUTHORIZATION, header)
.post(Entity.form(form));
Assert.assertEquals(400, response.getStatus());
response.close();
}
events.clear();
}
@Test
public void refreshTokenRequest() throws Exception {
oauth.doLogin("test-user@localhost", "password");
Event loginEvent = events.expectLogin().assertEvent();
String sessionId = loginEvent.getSessionId();
String codeId = loginEvent.getDetails().get(Details.CODE_ID);
String code = oauth.getCurrentQuery().get(OAuth2Constants.CODE);
AccessTokenResponse tokenResponse = oauth.doAccessTokenRequest(code, "password");
AccessToken token = oauth.verifyToken(tokenResponse.getAccessToken());
String refreshTokenString = tokenResponse.getRefreshToken();
RefreshToken refreshToken = oauth.verifyRefreshToken(refreshTokenString);
Event tokenEvent = events.expectCodeToToken(codeId, sessionId).assertEvent();
Assert.assertNotNull(refreshTokenString);
Assert.assertEquals("bearer", tokenResponse.getTokenType());
Assert.assertThat(token.getExpiration() - Time.currentTime(), allOf(greaterThanOrEqualTo(200), lessThanOrEqualTo(350)));
int actual = refreshToken.getExpiration() - Time.currentTime();
Assert.assertThat(actual, allOf(greaterThanOrEqualTo(1799), lessThanOrEqualTo(1800)));
Assert.assertEquals(sessionId, refreshToken.getSessionState());
Thread.sleep(2000);
AccessTokenResponse response = oauth.doRefreshTokenRequest(refreshTokenString, "password");
AccessToken refreshedToken = oauth.verifyToken(response.getAccessToken());
RefreshToken refreshedRefreshToken = oauth.verifyRefreshToken(response.getRefreshToken());
Assert.assertEquals(200, response.getStatusCode());
Assert.assertEquals(sessionId, refreshedToken.getSessionState());
Assert.assertEquals(sessionId, refreshedRefreshToken.getSessionState());
Assert.assertThat(response.getExpiresIn(), allOf(greaterThanOrEqualTo(250), lessThanOrEqualTo(300)));
Assert.assertThat(refreshedToken.getExpiration() - Time.currentTime(), allOf(greaterThanOrEqualTo(250), lessThanOrEqualTo(300)));
Assert.assertThat(refreshedToken.getExpiration() - token.getExpiration(), allOf(greaterThanOrEqualTo(1), lessThanOrEqualTo(3)));
Assert.assertThat(refreshedRefreshToken.getExpiration() - refreshToken.getExpiration(), allOf(greaterThanOrEqualTo(1), lessThanOrEqualTo(3)));
Assert.assertNotEquals(token.getId(), refreshedToken.getId());
Assert.assertNotEquals(refreshToken.getId(), refreshedRefreshToken.getId());
Assert.assertEquals("bearer", response.getTokenType());
Assert.assertEquals(keycloakRule.getUser("test", "test-user@localhost").getId(), refreshedToken.getSubject());
Assert.assertNotEquals("test-user@localhost", refreshedToken.getSubject());
Assert.assertEquals(1, refreshedToken.getRealmAccess().getRoles().size());
Assert.assertTrue(refreshedToken.getRealmAccess().isUserInRole("user"));
Assert.assertEquals(1, refreshedToken.getResourceAccess(oauth.getClientId()).getRoles().size());
Assert.assertTrue(refreshedToken.getResourceAccess(oauth.getClientId()).isUserInRole("customer-user"));
Event refreshEvent = events.expectRefresh(tokenEvent.getDetails().get(Details.REFRESH_TOKEN_ID), sessionId).assertEvent();
Assert.assertNotEquals(tokenEvent.getDetails().get(Details.TOKEN_ID), refreshEvent.getDetails().get(Details.TOKEN_ID));
Assert.assertNotEquals(tokenEvent.getDetails().get(Details.REFRESH_TOKEN_ID), refreshEvent.getDetails().get(Details.UPDATED_REFRESH_TOKEN_ID));
}
PrivateKey privateKey;
PublicKey publicKey;
@Test
public void refreshTokenRealmKeysChanged() throws Exception {
oauth.doLogin("test-user@localhost", "password");
Event loginEvent = events.expectLogin().assertEvent();
String sessionId = loginEvent.getSessionId();
String codeId = loginEvent.getDetails().get(Details.CODE_ID);
String code = oauth.getCurrentQuery().get(OAuth2Constants.CODE);
AccessTokenResponse response = oauth.doAccessTokenRequest(code, "password");
String refreshTokenString = response.getRefreshToken();
RefreshToken refreshToken = oauth.verifyRefreshToken(refreshTokenString);
events.expectCodeToToken(codeId, sessionId).assertEvent();
try {
keycloakRule.configure(new KeycloakRule.KeycloakSetup() {
@Override
public void config(RealmManager manager, RealmModel adminstrationRealm, RealmModel appRealm) {
privateKey = appRealm.getPrivateKey();
publicKey = appRealm.getPublicKey();
KeycloakModelUtils.generateRealmKeys(appRealm);
}
});
response = oauth.doRefreshTokenRequest(refreshTokenString, "password");
assertEquals(400, response.getStatusCode());
assertEquals("invalid_grant", response.getError());
events.expectRefresh(refreshToken.getId(), sessionId).user((String) null).session((String) null).clearDetails().error(Errors.INVALID_TOKEN).assertEvent();
} finally {
keycloakRule.configure(new KeycloakRule.KeycloakSetup() {
@Override
public void config(RealmManager manager, RealmModel adminstrationRealm, RealmModel appRealm) {
appRealm.setPrivateKey(privateKey);
appRealm.setPublicKey(publicKey);
}
});
}
}
@Test
public void refreshTokenUserSessionExpired() {
oauth.doLogin("test-user@localhost", "password");
Event loginEvent = events.expectLogin().assertEvent();
String sessionId = loginEvent.getSessionId();
String code = oauth.getCurrentQuery().get(OAuth2Constants.CODE);
OAuthClient.AccessTokenResponse tokenResponse = oauth.doAccessTokenRequest(code, "password");
events.poll();
String refreshId = oauth.verifyRefreshToken(tokenResponse.getRefreshToken()).getId();
keycloakRule.removeUserSession(sessionId);
tokenResponse = oauth.doRefreshTokenRequest(tokenResponse.getRefreshToken(), "password");
assertEquals(400, tokenResponse.getStatusCode());
assertNull(tokenResponse.getAccessToken());
assertNull(tokenResponse.getRefreshToken());
events.expectRefresh(refreshId, sessionId).error(Errors.INVALID_TOKEN);
events.clear();
}
@Test
public void testUserSessionRefreshAndIdle() throws Exception {
oauth.doLogin("test-user@localhost", "password");
Event loginEvent = events.expectLogin().assertEvent();
String sessionId = loginEvent.getSessionId();
String code = oauth.getCurrentQuery().get(OAuth2Constants.CODE);
OAuthClient.AccessTokenResponse tokenResponse = oauth.doAccessTokenRequest(code, "password");
events.poll();
String refreshId = oauth.verifyRefreshToken(tokenResponse.getRefreshToken()).getId();
KeycloakSession session = keycloakRule.startSession();
RealmModel realm = session.realms().getRealmByName("test");
UserSessionModel userSession = session.sessions().getUserSession(realm, sessionId);
int last = userSession.getLastSessionRefresh();
session.getTransaction().commit();
session.close();
Thread.sleep(2000);
tokenResponse = oauth.doRefreshTokenRequest(tokenResponse.getRefreshToken(), "password");
AccessToken refreshedToken = oauth.verifyToken(tokenResponse.getAccessToken());
RefreshToken refreshedRefreshToken = oauth.verifyRefreshToken(tokenResponse.getRefreshToken());
Assert.assertEquals(200, tokenResponse.getStatusCode());
session = keycloakRule.startSession();
realm = session.realms().getRealmByName("test");
userSession = session.sessions().getUserSession(realm, sessionId);
int next = userSession.getLastSessionRefresh();
session.getTransaction().commit();
session.close();
Assert.assertNotEquals(last, next);
session = keycloakRule.startSession();
realm = session.realms().getRealmByName("test");
int lastAccessTokenLifespan = realm.getAccessTokenLifespan();
realm.setAccessTokenLifespan(100000);
session.getTransaction().commit();
session.close();
Thread.sleep(2000);
tokenResponse = oauth.doRefreshTokenRequest(tokenResponse.getRefreshToken(), "password");
session = keycloakRule.startSession();
realm = session.realms().getRealmByName("test");
userSession = session.sessions().getUserSession(realm, sessionId);
next = userSession.getLastSessionRefresh();
session.getTransaction().commit();
session.close();
// lastSEssionRefresh should be updated because access code lifespan is higher than sso idle timeout
Assert.assertThat(next, allOf(greaterThan(last), lessThan(last + 50)));
session = keycloakRule.startSession();
realm = session.realms().getRealmByName("test");
int originalIdle = realm.getSsoSessionIdleTimeout();
realm.setSsoSessionIdleTimeout(1);
session.getTransaction().commit();
session.close();
events.clear();
Thread.sleep(2000);
tokenResponse = oauth.doRefreshTokenRequest(tokenResponse.getRefreshToken(), "password");
// test idle timeout
assertEquals(400, tokenResponse.getStatusCode());
assertNull(tokenResponse.getAccessToken());
assertNull(tokenResponse.getRefreshToken());
events.expectRefresh(refreshId, sessionId).error(Errors.INVALID_TOKEN);
session = keycloakRule.startSession();
realm = session.realms().getRealmByName("test");
realm.setSsoSessionIdleTimeout(originalIdle);
realm.setAccessTokenLifespan(lastAccessTokenLifespan);
session.getTransaction().commit();
session.close();
events.clear();
}
@Test
public void refreshTokenUserSessionMaxLifespan() throws Exception {
oauth.doLogin("test-user@localhost", "password");
Event loginEvent = events.expectLogin().assertEvent();
String sessionId = loginEvent.getSessionId();
String code = oauth.getCurrentQuery().get(OAuth2Constants.CODE);
OAuthClient.AccessTokenResponse tokenResponse = oauth.doAccessTokenRequest(code, "password");
events.poll();
String refreshId = oauth.verifyRefreshToken(tokenResponse.getRefreshToken()).getId();
KeycloakSession session = keycloakRule.startSession();
RealmModel realm = session.realms().getRealmByName("test");
int maxLifespan = realm.getSsoSessionMaxLifespan();
realm.setSsoSessionMaxLifespan(1);
session.getTransaction().commit();
session.close();
Thread.sleep(1000);
tokenResponse = oauth.doRefreshTokenRequest(tokenResponse.getRefreshToken(), "password");
assertEquals(400, tokenResponse.getStatusCode());
assertNull(tokenResponse.getAccessToken());
assertNull(tokenResponse.getRefreshToken());
session = keycloakRule.startSession();
realm = session.realms().getRealmByName("test");
realm.setSsoSessionMaxLifespan(maxLifespan);
session.getTransaction().commit();
session.close();
events.expectRefresh(refreshId, sessionId).error(Errors.INVALID_TOKEN);
events.clear();
}
@Test
public void testCheckSsl() throws Exception {
Client client = ClientBuilder.newClient();
UriBuilder builder = UriBuilder.fromUri(org.keycloak.testsuite.Constants.AUTH_SERVER_ROOT);
URI grantUri = OIDCLoginProtocolService.grantAccessTokenUrl(builder).build("test");
WebTarget grantTarget = client.target(grantUri);
builder = UriBuilder.fromUri(org.keycloak.testsuite.Constants.AUTH_SERVER_ROOT);
URI uri = OIDCLoginProtocolService.refreshUrl(builder).build("test");
WebTarget refreshTarget = client.target(uri);
String refreshToken = null;
{
Response response = executeGrantAccessTokenRequest(grantTarget);
Assert.assertEquals(200, response.getStatus());
org.keycloak.representations.AccessTokenResponse tokenResponse = response.readEntity(org.keycloak.representations.AccessTokenResponse.class);
refreshToken = tokenResponse.getRefreshToken();
response.close();
}
{
Response response = executeRefreshToken(refreshTarget, refreshToken);
Assert.assertEquals(200, response.getStatus());
org.keycloak.representations.AccessTokenResponse tokenResponse = response.readEntity(org.keycloak.representations.AccessTokenResponse.class);
refreshToken = tokenResponse.getRefreshToken();
response.close();
}
{ // test checkSsl
{
KeycloakSession session = keycloakRule.startSession();
RealmModel realm = session.realms().getRealmByName("test");
realm.setSslRequired(SslRequired.ALL);
session.getTransaction().commit();
session.close();
}
Response response = executeRefreshToken(refreshTarget, refreshToken);
Assert.assertEquals(403, response.getStatus());
response.close();
{
KeycloakSession session = keycloakRule.startSession();
RealmModel realm = session.realms().getRealmByName("test");
realm.setSslRequired(SslRequired.EXTERNAL);
session.getTransaction().commit();
session.close();
}
}
{
Response response = executeRefreshToken(refreshTarget, refreshToken);
Assert.assertEquals(200, response.getStatus());
org.keycloak.representations.AccessTokenResponse tokenResponse = response.readEntity(org.keycloak.representations.AccessTokenResponse.class);
refreshToken = tokenResponse.getRefreshToken();
response.close();
}
client.close();
events.clear();
}
protected Response executeRefreshToken(WebTarget refreshTarget, String refreshToken) {
String header = BasicAuthHelper.createHeader("test-app", "password");
Form form = new Form();
form.param("refresh_token", refreshToken);
return refreshTarget.request()
.header(HttpHeaders.AUTHORIZATION, header)
.post(Entity.form(form));
}
protected Response executeGrantAccessTokenRequest(WebTarget grantTarget) {
String header = BasicAuthHelper.createHeader("test-app", "password");
Form form = new Form();
form.param("username", "test-user@localhost")
.param("password", "password");
return grantTarget.request()
.header(HttpHeaders.AUTHORIZATION, header)
.post(Entity.form(form));
}
}
| testsuite/integration/src/test/java/org/keycloak/testsuite/oauth/RefreshTokenTest.java | /*
* JBoss, Home of Professional Open Source.
* Copyright 2012, Red Hat, Inc., and individual contributors
* as indicated by the @author tags. See the copyright.txt file in the
* distribution for a full listing of individual contributors.
*
* This is free software; you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2.1 of
* the License, or (at your option) any later version.
*
* This software is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this software; if not, write to the Free
* Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
* 02110-1301 USA, or see the FSF site: http://www.fsf.org.
*/
package org.keycloak.testsuite.oauth;
import org.junit.Assert;
import org.junit.ClassRule;
import org.junit.Rule;
import org.junit.Test;
import org.keycloak.OAuth2Constants;
import org.keycloak.enums.SslRequired;
import org.keycloak.events.Details;
import org.keycloak.events.Errors;
import org.keycloak.events.Event;
import org.keycloak.models.KeycloakSession;
import org.keycloak.models.RealmModel;
import org.keycloak.models.UserSessionModel;
import org.keycloak.models.utils.KeycloakModelUtils;
import org.keycloak.protocol.oidc.OIDCLoginProtocolService;
import org.keycloak.representations.AccessToken;
import org.keycloak.representations.RefreshToken;
import org.keycloak.services.managers.RealmManager;
import org.keycloak.testsuite.AssertEvents;
import org.keycloak.testsuite.OAuthClient;
import org.keycloak.testsuite.OAuthClient.AccessTokenResponse;
import org.keycloak.testsuite.pages.LoginPage;
import org.keycloak.testsuite.rule.KeycloakRule;
import org.keycloak.testsuite.rule.WebResource;
import org.keycloak.testsuite.rule.WebRule;
import org.keycloak.util.BasicAuthHelper;
import org.keycloak.util.Time;
import org.openqa.selenium.WebDriver;
import javax.ws.rs.client.Client;
import javax.ws.rs.client.ClientBuilder;
import javax.ws.rs.client.Entity;
import javax.ws.rs.client.WebTarget;
import javax.ws.rs.core.Form;
import javax.ws.rs.core.HttpHeaders;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.UriBuilder;
import java.net.URI;
import java.security.PrivateKey;
import java.security.PublicKey;
import static org.hamcrest.Matchers.*;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;
/**
* @author <a href="mailto:[email protected]">Stian Thorgersen</a>
*/
public class RefreshTokenTest {
@ClassRule
public static KeycloakRule keycloakRule = new KeycloakRule();
@Rule
public WebRule webRule = new WebRule(this);
@WebResource
protected WebDriver driver;
@WebResource
protected OAuthClient oauth;
@WebResource
protected LoginPage loginPage;
@Rule
public AssertEvents events = new AssertEvents(keycloakRule);
/**
* KEYCLOAK-547
*
* @throws Exception
*/
@Test
public void nullRefreshToken() throws Exception {
Client client = ClientBuilder.newClient();
UriBuilder builder = UriBuilder.fromUri(org.keycloak.testsuite.Constants.AUTH_SERVER_ROOT);
URI uri = OIDCLoginProtocolService.refreshUrl(builder).build("test");
WebTarget target = client.target(uri);
org.keycloak.representations.AccessTokenResponse tokenResponse = null;
{
String header = BasicAuthHelper.createHeader("test-app", "password");
Form form = new Form();
Response response = target.request()
.header(HttpHeaders.AUTHORIZATION, header)
.post(Entity.form(form));
Assert.assertEquals(400, response.getStatus());
response.close();
}
events.clear();
}
@Test
public void refreshTokenRequest() throws Exception {
oauth.doLogin("test-user@localhost", "password");
Event loginEvent = events.expectLogin().assertEvent();
String sessionId = loginEvent.getSessionId();
String codeId = loginEvent.getDetails().get(Details.CODE_ID);
String code = oauth.getCurrentQuery().get(OAuth2Constants.CODE);
AccessTokenResponse tokenResponse = oauth.doAccessTokenRequest(code, "password");
AccessToken token = oauth.verifyToken(tokenResponse.getAccessToken());
String refreshTokenString = tokenResponse.getRefreshToken();
RefreshToken refreshToken = oauth.verifyRefreshToken(refreshTokenString);
Event tokenEvent = events.expectCodeToToken(codeId, sessionId).assertEvent();
Assert.assertNotNull(refreshTokenString);
Assert.assertEquals("bearer", tokenResponse.getTokenType());
Assert.assertThat(token.getExpiration() - Time.currentTime(), allOf(greaterThanOrEqualTo(250), lessThanOrEqualTo(300)));
int actual = refreshToken.getExpiration() - Time.currentTime();
Assert.assertThat(actual, allOf(greaterThanOrEqualTo(1799), lessThanOrEqualTo(1800)));
Assert.assertEquals(sessionId, refreshToken.getSessionState());
Thread.sleep(2000);
AccessTokenResponse response = oauth.doRefreshTokenRequest(refreshTokenString, "password");
AccessToken refreshedToken = oauth.verifyToken(response.getAccessToken());
RefreshToken refreshedRefreshToken = oauth.verifyRefreshToken(response.getRefreshToken());
Assert.assertEquals(200, response.getStatusCode());
Assert.assertEquals(sessionId, refreshedToken.getSessionState());
Assert.assertEquals(sessionId, refreshedRefreshToken.getSessionState());
Assert.assertThat(response.getExpiresIn(), allOf(greaterThanOrEqualTo(250), lessThanOrEqualTo(300)));
Assert.assertThat(refreshedToken.getExpiration() - Time.currentTime(), allOf(greaterThanOrEqualTo(250), lessThanOrEqualTo(300)));
Assert.assertThat(refreshedToken.getExpiration() - token.getExpiration(), allOf(greaterThanOrEqualTo(1), lessThanOrEqualTo(3)));
Assert.assertThat(refreshedRefreshToken.getExpiration() - refreshToken.getExpiration(), allOf(greaterThanOrEqualTo(1), lessThanOrEqualTo(3)));
Assert.assertNotEquals(token.getId(), refreshedToken.getId());
Assert.assertNotEquals(refreshToken.getId(), refreshedRefreshToken.getId());
Assert.assertEquals("bearer", response.getTokenType());
Assert.assertEquals(keycloakRule.getUser("test", "test-user@localhost").getId(), refreshedToken.getSubject());
Assert.assertNotEquals("test-user@localhost", refreshedToken.getSubject());
Assert.assertEquals(1, refreshedToken.getRealmAccess().getRoles().size());
Assert.assertTrue(refreshedToken.getRealmAccess().isUserInRole("user"));
Assert.assertEquals(1, refreshedToken.getResourceAccess(oauth.getClientId()).getRoles().size());
Assert.assertTrue(refreshedToken.getResourceAccess(oauth.getClientId()).isUserInRole("customer-user"));
Event refreshEvent = events.expectRefresh(tokenEvent.getDetails().get(Details.REFRESH_TOKEN_ID), sessionId).assertEvent();
Assert.assertNotEquals(tokenEvent.getDetails().get(Details.TOKEN_ID), refreshEvent.getDetails().get(Details.TOKEN_ID));
Assert.assertNotEquals(tokenEvent.getDetails().get(Details.REFRESH_TOKEN_ID), refreshEvent.getDetails().get(Details.UPDATED_REFRESH_TOKEN_ID));
}
PrivateKey privateKey;
PublicKey publicKey;
@Test
public void refreshTokenRealmKeysChanged() throws Exception {
oauth.doLogin("test-user@localhost", "password");
Event loginEvent = events.expectLogin().assertEvent();
String sessionId = loginEvent.getSessionId();
String codeId = loginEvent.getDetails().get(Details.CODE_ID);
String code = oauth.getCurrentQuery().get(OAuth2Constants.CODE);
AccessTokenResponse response = oauth.doAccessTokenRequest(code, "password");
String refreshTokenString = response.getRefreshToken();
RefreshToken refreshToken = oauth.verifyRefreshToken(refreshTokenString);
events.expectCodeToToken(codeId, sessionId).assertEvent();
try {
keycloakRule.configure(new KeycloakRule.KeycloakSetup() {
@Override
public void config(RealmManager manager, RealmModel adminstrationRealm, RealmModel appRealm) {
privateKey = appRealm.getPrivateKey();
publicKey = appRealm.getPublicKey();
KeycloakModelUtils.generateRealmKeys(appRealm);
}
});
response = oauth.doRefreshTokenRequest(refreshTokenString, "password");
assertEquals(400, response.getStatusCode());
assertEquals("invalid_grant", response.getError());
events.expectRefresh(refreshToken.getId(), sessionId).user((String) null).session((String) null).clearDetails().error(Errors.INVALID_TOKEN).assertEvent();
} finally {
keycloakRule.configure(new KeycloakRule.KeycloakSetup() {
@Override
public void config(RealmManager manager, RealmModel adminstrationRealm, RealmModel appRealm) {
appRealm.setPrivateKey(privateKey);
appRealm.setPublicKey(publicKey);
}
});
}
}
@Test
public void refreshTokenUserSessionExpired() {
oauth.doLogin("test-user@localhost", "password");
Event loginEvent = events.expectLogin().assertEvent();
String sessionId = loginEvent.getSessionId();
String code = oauth.getCurrentQuery().get(OAuth2Constants.CODE);
OAuthClient.AccessTokenResponse tokenResponse = oauth.doAccessTokenRequest(code, "password");
events.poll();
String refreshId = oauth.verifyRefreshToken(tokenResponse.getRefreshToken()).getId();
keycloakRule.removeUserSession(sessionId);
tokenResponse = oauth.doRefreshTokenRequest(tokenResponse.getRefreshToken(), "password");
assertEquals(400, tokenResponse.getStatusCode());
assertNull(tokenResponse.getAccessToken());
assertNull(tokenResponse.getRefreshToken());
events.expectRefresh(refreshId, sessionId).error(Errors.INVALID_TOKEN);
events.clear();
}
@Test
public void testUserSessionRefreshAndIdle() throws Exception {
oauth.doLogin("test-user@localhost", "password");
Event loginEvent = events.expectLogin().assertEvent();
String sessionId = loginEvent.getSessionId();
String code = oauth.getCurrentQuery().get(OAuth2Constants.CODE);
OAuthClient.AccessTokenResponse tokenResponse = oauth.doAccessTokenRequest(code, "password");
events.poll();
String refreshId = oauth.verifyRefreshToken(tokenResponse.getRefreshToken()).getId();
KeycloakSession session = keycloakRule.startSession();
RealmModel realm = session.realms().getRealmByName("test");
UserSessionModel userSession = session.sessions().getUserSession(realm, sessionId);
int last = userSession.getLastSessionRefresh();
session.getTransaction().commit();
session.close();
Thread.sleep(2000);
tokenResponse = oauth.doRefreshTokenRequest(tokenResponse.getRefreshToken(), "password");
AccessToken refreshedToken = oauth.verifyToken(tokenResponse.getAccessToken());
RefreshToken refreshedRefreshToken = oauth.verifyRefreshToken(tokenResponse.getRefreshToken());
Assert.assertEquals(200, tokenResponse.getStatusCode());
session = keycloakRule.startSession();
realm = session.realms().getRealmByName("test");
userSession = session.sessions().getUserSession(realm, sessionId);
int next = userSession.getLastSessionRefresh();
session.getTransaction().commit();
session.close();
Assert.assertNotEquals(last, next);
session = keycloakRule.startSession();
realm = session.realms().getRealmByName("test");
int lastAccessTokenLifespan = realm.getAccessTokenLifespan();
realm.setAccessTokenLifespan(100000);
session.getTransaction().commit();
session.close();
Thread.sleep(2000);
tokenResponse = oauth.doRefreshTokenRequest(tokenResponse.getRefreshToken(), "password");
session = keycloakRule.startSession();
realm = session.realms().getRealmByName("test");
userSession = session.sessions().getUserSession(realm, sessionId);
next = userSession.getLastSessionRefresh();
session.getTransaction().commit();
session.close();
// lastSEssionRefresh should be updated because access code lifespan is higher than sso idle timeout
Assert.assertThat(next, allOf(greaterThan(last), lessThan(last + 6)));
session = keycloakRule.startSession();
realm = session.realms().getRealmByName("test");
int originalIdle = realm.getSsoSessionIdleTimeout();
realm.setSsoSessionIdleTimeout(1);
session.getTransaction().commit();
session.close();
events.clear();
Thread.sleep(2000);
tokenResponse = oauth.doRefreshTokenRequest(tokenResponse.getRefreshToken(), "password");
// test idle timeout
assertEquals(400, tokenResponse.getStatusCode());
assertNull(tokenResponse.getAccessToken());
assertNull(tokenResponse.getRefreshToken());
events.expectRefresh(refreshId, sessionId).error(Errors.INVALID_TOKEN);
session = keycloakRule.startSession();
realm = session.realms().getRealmByName("test");
realm.setSsoSessionIdleTimeout(originalIdle);
realm.setAccessTokenLifespan(lastAccessTokenLifespan);
session.getTransaction().commit();
session.close();
events.clear();
}
@Test
public void refreshTokenUserSessionMaxLifespan() throws Exception {
oauth.doLogin("test-user@localhost", "password");
Event loginEvent = events.expectLogin().assertEvent();
String sessionId = loginEvent.getSessionId();
String code = oauth.getCurrentQuery().get(OAuth2Constants.CODE);
OAuthClient.AccessTokenResponse tokenResponse = oauth.doAccessTokenRequest(code, "password");
events.poll();
String refreshId = oauth.verifyRefreshToken(tokenResponse.getRefreshToken()).getId();
KeycloakSession session = keycloakRule.startSession();
RealmModel realm = session.realms().getRealmByName("test");
int maxLifespan = realm.getSsoSessionMaxLifespan();
realm.setSsoSessionMaxLifespan(1);
session.getTransaction().commit();
session.close();
Thread.sleep(1000);
tokenResponse = oauth.doRefreshTokenRequest(tokenResponse.getRefreshToken(), "password");
assertEquals(400, tokenResponse.getStatusCode());
assertNull(tokenResponse.getAccessToken());
assertNull(tokenResponse.getRefreshToken());
session = keycloakRule.startSession();
realm = session.realms().getRealmByName("test");
realm.setSsoSessionMaxLifespan(maxLifespan);
session.getTransaction().commit();
session.close();
events.expectRefresh(refreshId, sessionId).error(Errors.INVALID_TOKEN);
events.clear();
}
@Test
public void testCheckSsl() throws Exception {
Client client = ClientBuilder.newClient();
UriBuilder builder = UriBuilder.fromUri(org.keycloak.testsuite.Constants.AUTH_SERVER_ROOT);
URI grantUri = OIDCLoginProtocolService.grantAccessTokenUrl(builder).build("test");
WebTarget grantTarget = client.target(grantUri);
builder = UriBuilder.fromUri(org.keycloak.testsuite.Constants.AUTH_SERVER_ROOT);
URI uri = OIDCLoginProtocolService.refreshUrl(builder).build("test");
WebTarget refreshTarget = client.target(uri);
String refreshToken = null;
{
Response response = executeGrantAccessTokenRequest(grantTarget);
Assert.assertEquals(200, response.getStatus());
org.keycloak.representations.AccessTokenResponse tokenResponse = response.readEntity(org.keycloak.representations.AccessTokenResponse.class);
refreshToken = tokenResponse.getRefreshToken();
response.close();
}
{
Response response = executeRefreshToken(refreshTarget, refreshToken);
Assert.assertEquals(200, response.getStatus());
org.keycloak.representations.AccessTokenResponse tokenResponse = response.readEntity(org.keycloak.representations.AccessTokenResponse.class);
refreshToken = tokenResponse.getRefreshToken();
response.close();
}
{ // test checkSsl
{
KeycloakSession session = keycloakRule.startSession();
RealmModel realm = session.realms().getRealmByName("test");
realm.setSslRequired(SslRequired.ALL);
session.getTransaction().commit();
session.close();
}
Response response = executeRefreshToken(refreshTarget, refreshToken);
Assert.assertEquals(403, response.getStatus());
response.close();
{
KeycloakSession session = keycloakRule.startSession();
RealmModel realm = session.realms().getRealmByName("test");
realm.setSslRequired(SslRequired.EXTERNAL);
session.getTransaction().commit();
session.close();
}
}
{
Response response = executeRefreshToken(refreshTarget, refreshToken);
Assert.assertEquals(200, response.getStatus());
org.keycloak.representations.AccessTokenResponse tokenResponse = response.readEntity(org.keycloak.representations.AccessTokenResponse.class);
refreshToken = tokenResponse.getRefreshToken();
response.close();
}
client.close();
events.clear();
}
protected Response executeRefreshToken(WebTarget refreshTarget, String refreshToken) {
String header = BasicAuthHelper.createHeader("test-app", "password");
Form form = new Form();
form.param("refresh_token", refreshToken);
return refreshTarget.request()
.header(HttpHeaders.AUTHORIZATION, header)
.post(Entity.form(form));
}
protected Response executeGrantAccessTokenRequest(WebTarget grantTarget) {
String header = BasicAuthHelper.createHeader("test-app", "password");
Form form = new Form();
form.param("username", "test-user@localhost")
.param("password", "password");
return grantTarget.request()
.header(HttpHeaders.AUTHORIZATION, header)
.post(Entity.form(form));
}
}
| Tweaks to tests that fail on travis
| testsuite/integration/src/test/java/org/keycloak/testsuite/oauth/RefreshTokenTest.java | Tweaks to tests that fail on travis | <ide><path>estsuite/integration/src/test/java/org/keycloak/testsuite/oauth/RefreshTokenTest.java
<ide>
<ide> Assert.assertEquals("bearer", tokenResponse.getTokenType());
<ide>
<del> Assert.assertThat(token.getExpiration() - Time.currentTime(), allOf(greaterThanOrEqualTo(250), lessThanOrEqualTo(300)));
<add> Assert.assertThat(token.getExpiration() - Time.currentTime(), allOf(greaterThanOrEqualTo(200), lessThanOrEqualTo(350)));
<ide> int actual = refreshToken.getExpiration() - Time.currentTime();
<ide> Assert.assertThat(actual, allOf(greaterThanOrEqualTo(1799), lessThanOrEqualTo(1800)));
<ide>
<ide> session.close();
<ide>
<ide> // lastSEssionRefresh should be updated because access code lifespan is higher than sso idle timeout
<del> Assert.assertThat(next, allOf(greaterThan(last), lessThan(last + 6)));
<add> Assert.assertThat(next, allOf(greaterThan(last), lessThan(last + 50)));
<ide>
<ide> session = keycloakRule.startSession();
<ide> realm = session.realms().getRealmByName("test"); |
|
JavaScript | apache-2.0 | 91ae0b0a83565e66058afb76f331c702582c9ff6 | 0 | netfishx/new_react_rockstar | ccc4a4b6-2f8c-11e5-bfe4-34363bc765d8 | hello.js | ccbe666e-2f8c-11e5-8d25-34363bc765d8 | ccc4a4b6-2f8c-11e5-bfe4-34363bc765d8 | hello.js | ccc4a4b6-2f8c-11e5-bfe4-34363bc765d8 | <ide><path>ello.js
<del>ccbe666e-2f8c-11e5-8d25-34363bc765d8
<add>ccc4a4b6-2f8c-11e5-bfe4-34363bc765d8 |
|
Java | apache-2.0 | 4a6366321b6988810c6a4f0e41441210f5d7367e | 0 | yangfuhai/jboot,yangfuhai/jboot | /**
* Copyright (c) 2015-2019, Michael Yang 杨福海 ([email protected]).
* <p>
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.jboot.core.spi;
import io.jboot.utils.AnnotationUtil;
import io.jboot.utils.ClassScanner;
import io.jboot.utils.StrUtil;
import java.util.Iterator;
import java.util.List;
import java.util.ServiceLoader;
/**
* SPI 扩展加载器
* <p>
* 使用方法:
* <p>
* 第一步:编写支持扩展点的类,例如MyJbootRpc extends Jbootrpc。
* 第二步:给该类添加上注解 JbootSpi, 例如 @JbootSpi("myrpc") MyJbootRpc extends Jbootrpc ...
* 第三步:给jboot.properties配置上类型,jboot.rpc.type = myrpc
* <p>
* 通过这三步,就可以扩展自己的Jbootrpc实现
*/
public class JbootSpiLoader {
/**
* 通过 SPI 去加载相应的扩展子类
*
* @param clazz
* @param spiName
* @param <T>
* @return
*/
public static <T> T load(Class<T> clazz, String spiName) {
T returnObject = loadByServiceLoader(clazz, spiName);
if (returnObject != null) return returnObject;
if (StrUtil.isBlank(spiName)) return null;
List<Class<T>> classes = ClassScanner.scanSubClass(clazz);
if (classes == null || classes.isEmpty()) return null;
for (Class<T> c : classes) {
JbootSpi spiConfig = c.getAnnotation(JbootSpi.class);
if (spiConfig == null) {
continue;
}
if (spiName.equals(AnnotationUtil.get(spiConfig.value()))) {
return returnObject;
}
}
return null;
}
/**
* 通过 ServiceLoader 加载
*
* @param clazz
* @param spiName
* @param <T>
* @return
*/
public static <T> T loadByServiceLoader(Class<T> clazz, String spiName) {
ServiceLoader<T> serviceLoader = ServiceLoader.load(clazz);
Iterator<T> iterator = serviceLoader.iterator();
while (iterator.hasNext()) {
T returnObject = iterator.next();
if (spiName == null) {
return returnObject;
}
JbootSpi spiConfig = returnObject.getClass().getAnnotation(JbootSpi.class);
if (spiConfig == null) {
continue;
}
if (spiName.equals(AnnotationUtil.get(spiConfig.value()))) {
return returnObject;
}
}
return null;
}
}
| src/main/java/io/jboot/core/spi/JbootSpiLoader.java | /**
* Copyright (c) 2015-2019, Michael Yang 杨福海 ([email protected]).
* <p>
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.jboot.core.spi;
import io.jboot.utils.AnnotationUtil;
import java.util.Iterator;
import java.util.ServiceLoader;
/**
* SPI 扩展加载器
* <p>
* 使用方法:
* <p>
* 第一步:编写支持扩展点的类,例如MyJbootRpc extends Jbootrpc。
* 第二步:给该类添加上注解 JbootSpi, 例如 @JbootSpi("myrpc") MyJbootRpc extends Jbootrpc ...
* 第三步:给jboot.properties配置上类型,jboot.rpc.type = myrpc
* <p>
* 通过这三步,就可以扩展自己的Jbootrpc实现
*/
public class JbootSpiLoader {
/**
* 通过 SPI 去加载相应的扩展子类
*
* @param clazz
* @param spiName
* @param <T>
* @return
*/
public static <T> T load(Class<T> clazz, String spiName) {
ServiceLoader<T> serviceLoader = ServiceLoader.load(clazz);
Iterator<T> iterator = serviceLoader.iterator();
while (iterator.hasNext()) {
T returnObject = iterator.next();
if (spiName == null) {
return returnObject;
}
JbootSpi spiConfig = returnObject.getClass().getAnnotation(JbootSpi.class);
if (spiConfig == null) {
continue;
}
if (spiName.equals(AnnotationUtil.get(spiConfig.value()))) {
return returnObject;
}
}
return null;
}
}
| v2.0-rc.4
| src/main/java/io/jboot/core/spi/JbootSpiLoader.java | v2.0-rc.4 | <ide><path>rc/main/java/io/jboot/core/spi/JbootSpiLoader.java
<ide> package io.jboot.core.spi;
<ide>
<ide> import io.jboot.utils.AnnotationUtil;
<add>import io.jboot.utils.ClassScanner;
<add>import io.jboot.utils.StrUtil;
<ide>
<ide> import java.util.Iterator;
<add>import java.util.List;
<ide> import java.util.ServiceLoader;
<ide>
<ide> /**
<ide> * @return
<ide> */
<ide> public static <T> T load(Class<T> clazz, String spiName) {
<add> T returnObject = loadByServiceLoader(clazz, spiName);
<add> if (returnObject != null) return returnObject;
<add>
<add> if (StrUtil.isBlank(spiName)) return null;
<add>
<add> List<Class<T>> classes = ClassScanner.scanSubClass(clazz);
<add> if (classes == null || classes.isEmpty()) return null;
<add>
<add> for (Class<T> c : classes) {
<add> JbootSpi spiConfig = c.getAnnotation(JbootSpi.class);
<add> if (spiConfig == null) {
<add> continue;
<add> }
<add>
<add> if (spiName.equals(AnnotationUtil.get(spiConfig.value()))) {
<add> return returnObject;
<add> }
<add> }
<add> return null;
<add> }
<add>
<add> /**
<add> * 通过 ServiceLoader 加载
<add> *
<add> * @param clazz
<add> * @param spiName
<add> * @param <T>
<add> * @return
<add> */
<add> public static <T> T loadByServiceLoader(Class<T> clazz, String spiName) {
<ide> ServiceLoader<T> serviceLoader = ServiceLoader.load(clazz);
<ide> Iterator<T> iterator = serviceLoader.iterator();
<ide> |
|
Java | bsd-3-clause | 73c638710c4bd5b1fbe86ad8b455d6e80a6c6b52 | 0 | Peragore/Team3373_2013 | /*----------------------------------------------------------------------------*/
/* Copyright (c) FIRST 2008. All Rights Reserved. */
/* Open Source Software - may be modified and shared by FRC teams. The code */
/* must be accompanied by the FIRST BSD license file in the root directory of */
/* the project. */
/*----------------------------------------------------------------------------*/
package edu.wpi.first.wpilibj.templates;
import edu.wpi.first.wpilibj.*;
import edu.wpi.first.wpilibj.smartdashboard.SmartDashboard;
import edu.wpi.first.wpilibj.DriverStationLCD.Line;
//import edu.wpi.first.wpilibj.RobotDrive;
//import edu.wpi.first.wpilibj.SimpleRobot;
//import edu.wpi.first.wpilibj.templates.Shooter;
/**
* The VM is configured to automatically run this class, and to call the
* functions corresponding to each mode, as described in the SimpleRobot
* documentation. If you change the name of this class or the package after
* creating this project, you must also update the manifest file in the resource
* directory.
*/
public class Team3373 extends SimpleRobot{
/**
* This function is called once each time the robot enters autonomous mode.
*/
int StageOneMotorPWM = 1; //Declares channel of StageOne PWM
int StageTwoMotorPWM = 2; //Declares channel of StageTwo PWM
Talon StageOneTalon = new Talon(1, 1); //Creates instance of StageOne PWM
Talon StageTwoTalon = new Talon(1, 2); //Creates instance of StageTwo PWM
DriverStationLCD LCD = DriverStationLCD.getInstance();
//SmartDashboard smartDashboard;
Joystick shootStick = new Joystick(2);
Shooter objShooter = new Shooter(this);
//Deadband objDeadband = new Deadband();
Timer robotTimer = new Timer();
/************************
* XBOX Shooter Buttons *
* *********************/
boolean shootA;
boolean shootB;
boolean shootX;
boolean shootY;
boolean shootRB;
boolean shootLB;
boolean shootBack;
boolean shootStart;
boolean test;
/************************
* XBOX Shooter Axes *
* *********************/
double shootLX = shootStick.getRawAxis(1);
double shootLY = shootStick.getRawAxis(2);
double shootTriggers = shootStick.getRawAxis(3);
double shootRX = shootStick.getRawAxis(4);
double shootRY = shootStick.getRawAxis(5);
double shootDP = shootStick.getRawAxis(6);
/*********************************
* Math/Shooter Action Variables *
*********************************/
double ShooterSpeedStage2 = 0;//was StageTwoTalon.get()
double percentageScaler = 0.75;
double ShooterSpeedStage1 = ShooterSpeedStage2 * percentageScaler;//was StageOneTalon.get()
double ShooterSpeedMax = 5300.0;
double ShooterSpeedAccel = 250;
double stageOneScaler = .5; //What stage one is multiplied by in order to make it a pecentage of stage 2
double PWMMax = 1; //maximum voltage sent to motor
double MaxScaler = PWMMax/5300;
double ShooterSpeedScale = MaxScaler * ShooterSpeedMax; //Scaler for voltage to RPM. Highly experimental!!
double currentRPMT2 = StageTwoTalon.get()*ShooterSpeedScale;
double currentRPMT1 = currentRPMT2*stageOneScaler;
double target;
double RPMModifier = 250;
double idle = 1 * ShooterSpeedScale;
double off = 0;
double Scaler = 5936;
double change;
double startTime = 9000000;
double backTime = 90000000;
double aTime = 900000000;
double bTime = 900000000;
boolean flagA;
boolean flagB;
boolean flagX;
boolean flagY;
boolean flagStart;
boolean flagBack;
boolean flagBack2;
public Team3373(){
}
public void autonomous() {
for (int i = 0; i < 4; i++) {
}
}
/**
* This function is called once each time the robot enters operator control.
*/
public void operatorControl() {
robotTimer.start();
while (isOperatorControl() & isDisabled()){
}
flagA = true;
flagB = true;
flagX = true;
flagY = true;
flagStart = true;
flagBack = true;
flagBack2 = false;
while (isOperatorControl() & isEnabled()){
/************************
* XBOX Shooter Buttons *
* *********************/
shootA = shootStick.getRawButton(1);
shootB = shootStick.getRawButton(2);
shootX = shootStick.getRawButton(3);
shootY = shootStick.getRawButton(4);
shootRB = shootStick.getRawButton(5);
shootLB = shootStick.getRawButton(6);
shootBack = shootStick.getRawButton(7);
shootStart = shootStick.getRawButton(8);
/************************
* XBOX Shooter Axes *
* *********************/
shootLX = shootStick.getRawAxis(1);
shootLY = shootStick.getRawAxis(2);
shootTriggers = shootStick.getRawAxis(3);
shootRX = shootStick.getRawAxis(4);
shootRY = shootStick.getRawAxis(5);
shootDP = shootStick.getRawAxis(6);
ShooterSpeedStage1 = ShooterSpeedStage2 * percentageScaler;
StageOneTalon.set(ShooterSpeedStage1);
StageTwoTalon.set(ShooterSpeedStage2);
if (shootStart && flagStart) {
ShooterSpeedStage2 = objShooter.start();
flagStart = false;
} else if (shootA && flagA){//increases stage 2
ShooterSpeedStage2 = objShooter.increaseSpeed(ShooterSpeedStage2);
if (ShooterSpeedStage2 >= 1) {
ShooterSpeedStage2 = 1;
}
flagA = false;
} else if (shootB && flagB){//decrease stage 2
ShooterSpeedStage2 = objShooter.decreaseSpeed(ShooterSpeedStage2);
if (ShooterSpeedStage2 <= 0) {
ShooterSpeedStage2 = 0;
}
flagB = false;
} else if (shootX && flagX){//increases percentage between Stage1 and Stage2
percentageScaler = objShooter.decreasePercentage(percentageScaler);
if (percentageScaler >= 1) {
percentageScaler = 1;
}
flagX = false;
} else if (shootY && flagY){//decreases percentage between Stage1 and Stage2
percentageScaler = objShooter.decreasePercentage(percentageScaler);
if (percentageScaler <= 0 ) {
percentageScaler = 0;
}
flagY = false;
} else if (shootBack && flagBack){//turns off
ShooterSpeedStage2 = objShooter.stop();
if (ShooterSpeedStage2 == 0){
flagBack = false;
}
}
//if (shootBack && flagBack){
// flagBack2 = true;
if (!shootA && !flagA) { //toggles
flagA = true;
} else if (!shootB && !flagB){
flagB = true;
}else if (!shootX && !flagX){
flagX = true;
}else if (!shootY && !flagY){
flagY = true;
} else if (!shootStart && !flagStart){
flagStart = true;
}else if (!shootBack && !flagBack){
flagBack = true;
//flagBack2 = false;
}
//try {Thread.sleep(1000);} catch(Exception e){}
//String percentage = Double.toString();
double speedOne = StageOneTalon.get();
String speed1 = Double.toString(speedOne);
double speedTwo = StageTwoTalon.get();
String speed2 = Double.toString(speedTwo);
LCD.println(Line.kUser3, 1, ((StageOneTalon.get()/StageTwoTalon.get()) *100) + " %");
LCD.println(Line.kUser4, 1,"S1:" + speed1);
LCD.println(Line.kUser5, 1,"S2:" + speed2);
LCD.println(Line.kUser1, 1, "RPM1: " + (speedOne * Scaler));
LCD.println(Line.kUser2, 1, "RPM2: " + (speedTwo * Scaler));
LCD.updateLCD();
/*if (shootA & !flagA) { //increases speed
objShooter.speedChange();
LCD.println(Line.kUser2, 1, "Pressing A");
LCD.updateLCD();
flagA = true;
}
if (!shootA & flagA) { //if a is not pressed and it has been pressed set it to false
flagA = false;
}
if (shootB & !flagB) { //decreases speed
objShooter.speedChange();
LCD.println(Line.kUser2, 1, "Pressing B");
LCD.updateLCD();
flagB = true;
}
if (!shootB & flagB) { //if b is not pressed and it has been pressed set it to false
flagB = false;
}
if (shootX & stageOneScaler <= 100 & !flagX){
stageOneScaler += 0.05;
//changes stage1 percentage of stage2 adds 5%
LCD.println(Line.kUser6, 1, "Adding 5% to Stage One Percentile");
LCD.updateLCD();
flagX = true;
}
if (!shootX & flagX) { //if x is not pressed and it has been pressed set it to false
flagX = false;
}
if (shootY & !flagY){
objShooter.percentageSubtract();
LCD.println(Line.kUser2, 1, "Pressing Y");
LCD.updateLCD();
}*/
String currentTime = Double.toString(robotTimer.get());
LCD.println(Line.kUser6, 1, currentTime);
}
}
}
| src/edu/wpi/first/wpilibj/templates/Team3373.java | /*----------------------------------------------------------------------------*/
/* Copyright (c) FIRST 2008. All Rights Reserved. */
/* Open Source Software - may be modified and shared by FRC teams. The code */
/* must be accompanied by the FIRST BSD license file in the root directory of */
/* the project. */
/*----------------------------------------------------------------------------*/
package edu.wpi.first.wpilibj.templates;
import edu.wpi.first.wpilibj.*;
import edu.wpi.first.wpilibj.smartdashboard.SmartDashboard;
import edu.wpi.first.wpilibj.DriverStationLCD.Line;
//import edu.wpi.first.wpilibj.RobotDrive;
//import edu.wpi.first.wpilibj.SimpleRobot;
//import edu.wpi.first.wpilibj.templates.Shooter;
/**
* The VM is configured to automatically run this class, and to call the
* functions corresponding to each mode, as described in the SimpleRobot
* documentation. If you change the name of this class or the package after
* creating this project, you must also update the manifest file in the resource
* directory.
*/
public class Team3373 extends SimpleRobot{
/**
* This function is called once each time the robot enters autonomous mode.
*/
int StageOneMotorPWM = 1; //Declares channel of StageOne PWM
int StageTwoMotorPWM = 2; //Declares channel of StageTwo PWM
Talon StageOneTalon = new Talon(1, 1); //Creates instance of StageOne PWM
Talon StageTwoTalon = new Talon(1, 2); //Creates instance of StageTwo PWM
DriverStationLCD LCD = DriverStationLCD.getInstance();
//SmartDashboard smartDashboard;
Joystick shootStick = new Joystick(2);
Shooter objShooter = new Shooter(this);
//Deadband objDeadband = new Deadband();
Timer robotTimer = new Timer();
/************************
* XBOX Shooter Buttons *
* *********************/
boolean shootA;
boolean shootB;
boolean shootX;
boolean shootY;
boolean shootRB;
boolean shootLB;
boolean shootBack;
boolean shootStart;
boolean test;
/************************
* XBOX Shooter Axes *
* *********************/
double shootLX = shootStick.getRawAxis(1);
double shootLY = shootStick.getRawAxis(2);
double shootTriggers = shootStick.getRawAxis(3);
double shootRX = shootStick.getRawAxis(4);
double shootRY = shootStick.getRawAxis(5);
double shootDP = shootStick.getRawAxis(6);
/*********************************
* Math/Shooter Action Variables *
*********************************/
double ShooterSpeedStage2 = 0.1;//was StageTwoTalon.get()
double percentageScaler = 0.5;
double ShooterSpeedStage1 = ShooterSpeedStage2 * percentageScaler;//was StageOneTalon.get()
double ShooterSpeedMax = 5300.0;
double ShooterSpeedAccel = 250;
double stageOneScaler = .5; //What stage one is multiplied by in order to make it a pecentage of stage 2
double PWMMax = 1; //maximum voltage sent to motor
double MaxScaler = PWMMax/5300;
double ShooterSpeedScale = MaxScaler * ShooterSpeedMax; //Scaler for voltage to RPM. Highly experimental!!
double currentRPMT2 = StageTwoTalon.get()*ShooterSpeedScale;
double currentRPMT1 = currentRPMT2*stageOneScaler;
double target;
double RPMModifier = 250;
double idle = 1 * ShooterSpeedScale;
double off = 0;
double Scaler = 5936;
double change;
double startTime = 9000000;
double backTime = 90000000;
double aTime = 900000000;
double bTime = 900000000;
boolean flagA;
boolean flagB;
boolean flagX;
boolean flagY;
boolean flagStart;
boolean flagBack;
boolean flagBack2;
public Team3373(){
}
public void autonomous() {
for (int i = 0; i < 4; i++) {
}
}
/**
* This function is called once each time the robot enters operator control.
*/
public void operatorControl() {
robotTimer.start();
while (isOperatorControl() & isDisabled()){
objShooter.shootInit();
}
flagA = true;
flagB = true;
flagX = true;
flagY = true;
flagStart = true;
flagBack = true;
flagBack2 = false;
while (isOperatorControl() & isEnabled()){
/************************
* XBOX Shooter Buttons *
* *********************/
shootA = shootStick.getRawButton(1);
shootB = shootStick.getRawButton(2);
shootX = shootStick.getRawButton(3);
shootY = shootStick.getRawButton(4);
shootRB = shootStick.getRawButton(5);
shootLB = shootStick.getRawButton(6);
shootBack = shootStick.getRawButton(7);
shootStart = shootStick.getRawButton(8);
/************************
* XBOX Shooter Axes *
* *********************/
shootLX = shootStick.getRawAxis(1);
shootLY = shootStick.getRawAxis(2);
shootTriggers = shootStick.getRawAxis(3);
shootRX = shootStick.getRawAxis(4);
shootRY = shootStick.getRawAxis(5);
shootDP = shootStick.getRawAxis(6);
StageOneTalon.set(ShooterSpeedStage1);
StageTwoTalon.set(ShooterSpeedStage2);
//Shooter objShooter = new Shooter();
//objShooter.shooterPrint();
//objShooter.Start();
if (shootStart && flagStart) {
startTime = robotTimer.get();
flagStart = false;
} else if (shootA && flagA){//increases stage 2
ShooterSpeedStage2 += 0.1;
ShooterSpeedStage1 = ShooterSpeedStage2 * percentageScaler;
StageOneTalon.set(ShooterSpeedStage1);
StageTwoTalon.set(ShooterSpeedStage2);
if (ShooterSpeedStage2 >= 1) {
ShooterSpeedStage2 = 1;
}
flagA = false;
} else if (shootB && flagB){//decrease stage 2
ShooterSpeedStage2 -= 0.1;
ShooterSpeedStage1 = ShooterSpeedStage2 * percentageScaler;
StageOneTalon.set(ShooterSpeedStage1);
StageTwoTalon.set(ShooterSpeedStage2);
if (ShooterSpeedStage2 <= 0) {
ShooterSpeedStage2 = 0;
}
flagB = false;
} else if (shootX && flagX){//increases percentage between Stage1 and Stage2
percentageScaler += 0.05;
if (percentageScaler >= 1) {
percentageScaler = 1;
}
ShooterSpeedStage1 = ShooterSpeedStage2 * percentageScaler;
StageOneTalon.set(ShooterSpeedStage1);
StageTwoTalon.set(ShooterSpeedStage2);
flagX = false;
} else if (shootY && flagY){//decreases percentage between Stage1 and Stage2
percentageScaler -= 0.05;
if (percentageScaler <= 0 ) {
percentageScaler = 0;
}
ShooterSpeedStage1 = ShooterSpeedStage2 * percentageScaler;
StageOneTalon.set(ShooterSpeedStage1);
StageTwoTalon.set(ShooterSpeedStage2);
flagY = false;
} else if (shootBack && flagBack){//turns off
ShooterSpeedStage2 -= 0.1;
ShooterSpeedStage1 = ShooterSpeedStage2 * percentageScaler;
if (ShooterSpeedStage2 == 0){
flagBack = false;
}
ShooterSpeedStage2 = .1;
ShooterSpeedStage1 = ShooterSpeedStage2 * percentageScaler;
}
//if (shootBack && flagBack){
// flagBack2 = true;
if (!shootA && !flagA) { //toggles
flagA = true;
} else if (!shootB && !flagB){
flagB = true;
}else if (!shootX && !flagX){
flagX = true;
}else if (!shootY && !flagY){
flagY = true;
} else if (!shootStart && !flagStart){
flagStart = true;
}else if (!shootBack && !flagBack){
flagBack = true;
//flagBack2 = false;
}
//try {Thread.sleep(1000);} catch(Exception e){}
//String percentage = Double.toString();
double speedOne = StageOneTalon.get();
String speed1 = Double.toString(speedOne);
double speedTwo = StageTwoTalon.get();
String speed2 = Double.toString(speedTwo);
LCD.println(Line.kUser3, 1, ((StageOneTalon.get()/StageTwoTalon.get()) *100) + " %");
LCD.println(Line.kUser4, 1,"S1:" + speed1);
LCD.println(Line.kUser5, 1,"S2:" + speed2);
LCD.println(Line.kUser1, 1, "RPM1: " + (speedOne * Scaler));
LCD.println(Line.kUser2, 1, "RPM2: " + (speedTwo * Scaler));
LCD.updateLCD();
/*if (shootA & !flagA) { //increases speed
objShooter.speedChange();
LCD.println(Line.kUser2, 1, "Pressing A");
LCD.updateLCD();
flagA = true;
}
if (!shootA & flagA) { //if a is not pressed and it has been pressed set it to false
flagA = false;
}
if (shootB & !flagB) { //decreases speed
objShooter.speedChange();
LCD.println(Line.kUser2, 1, "Pressing B");
LCD.updateLCD();
flagB = true;
}
if (!shootB & flagB) { //if b is not pressed and it has been pressed set it to false
flagB = false;
}
if (shootX & stageOneScaler <= 100 & !flagX){
stageOneScaler += 0.05;
//changes stage1 percentage of stage2 adds 5%
LCD.println(Line.kUser6, 1, "Adding 5% to Stage One Percentile");
LCD.updateLCD();
flagX = true;
}
if (!shootX & flagX) { //if x is not pressed and it has been pressed set it to false
flagX = false;
}
if (shootY & !flagY){
objShooter.percentageSubtract();
LCD.println(Line.kUser2, 1, "Pressing Y");
LCD.updateLCD();
}*/
String currentTime = Double.toString(robotTimer.get());
LCD.println(Line.kUser6, 1, currentTime);
}
}
}
| added methods for shooter testing | src/edu/wpi/first/wpilibj/templates/Team3373.java | added methods for shooter testing | <ide><path>rc/edu/wpi/first/wpilibj/templates/Team3373.java
<ide> *********************************/
<ide>
<ide>
<del> double ShooterSpeedStage2 = 0.1;//was StageTwoTalon.get()
<del> double percentageScaler = 0.5;
<add> double ShooterSpeedStage2 = 0;//was StageTwoTalon.get()
<add> double percentageScaler = 0.75;
<ide> double ShooterSpeedStage1 = ShooterSpeedStage2 * percentageScaler;//was StageOneTalon.get()
<ide>
<ide> double ShooterSpeedMax = 5300.0;
<ide> */
<ide> public void operatorControl() {
<ide> robotTimer.start();
<del> while (isOperatorControl() & isDisabled()){
<del> objShooter.shootInit();
<add> while (isOperatorControl() & isDisabled()){
<ide> }
<ide>
<ide> flagA = true;
<ide> shootRY = shootStick.getRawAxis(5);
<ide> shootDP = shootStick.getRawAxis(6);
<ide>
<del>
<del> StageOneTalon.set(ShooterSpeedStage1);
<del> StageTwoTalon.set(ShooterSpeedStage2);
<add>
<add>
<add>
<add> ShooterSpeedStage1 = ShooterSpeedStage2 * percentageScaler;
<add> StageOneTalon.set(ShooterSpeedStage1);
<add> StageTwoTalon.set(ShooterSpeedStage2);
<ide>
<del>
<del> //Shooter objShooter = new Shooter();
<del>
<del> //objShooter.shooterPrint();
<del> //objShooter.Start();
<ide> if (shootStart && flagStart) {
<del> startTime = robotTimer.get();
<del>
<add> ShooterSpeedStage2 = objShooter.start();
<ide> flagStart = false;
<ide> } else if (shootA && flagA){//increases stage 2
<del>
<del> ShooterSpeedStage2 += 0.1;
<del> ShooterSpeedStage1 = ShooterSpeedStage2 * percentageScaler;
<del> StageOneTalon.set(ShooterSpeedStage1);
<del> StageTwoTalon.set(ShooterSpeedStage2);
<del>
<del>
<add> ShooterSpeedStage2 = objShooter.increaseSpeed(ShooterSpeedStage2);
<ide> if (ShooterSpeedStage2 >= 1) {
<ide> ShooterSpeedStage2 = 1;
<ide> }
<del>
<ide> flagA = false;
<ide>
<ide> } else if (shootB && flagB){//decrease stage 2
<del>
<del> ShooterSpeedStage2 -= 0.1;
<del> ShooterSpeedStage1 = ShooterSpeedStage2 * percentageScaler;
<del> StageOneTalon.set(ShooterSpeedStage1);
<del> StageTwoTalon.set(ShooterSpeedStage2);
<del>
<add> ShooterSpeedStage2 = objShooter.decreaseSpeed(ShooterSpeedStage2);
<ide> if (ShooterSpeedStage2 <= 0) {
<ide> ShooterSpeedStage2 = 0;
<ide> }
<ide> flagB = false;
<ide> } else if (shootX && flagX){//increases percentage between Stage1 and Stage2
<del> percentageScaler += 0.05;
<add> percentageScaler = objShooter.decreasePercentage(percentageScaler);
<ide> if (percentageScaler >= 1) {
<ide> percentageScaler = 1;
<ide> }
<del> ShooterSpeedStage1 = ShooterSpeedStage2 * percentageScaler;
<del> StageOneTalon.set(ShooterSpeedStage1);
<del> StageTwoTalon.set(ShooterSpeedStage2);
<ide> flagX = false;
<ide> } else if (shootY && flagY){//decreases percentage between Stage1 and Stage2
<del> percentageScaler -= 0.05;
<add> percentageScaler = objShooter.decreasePercentage(percentageScaler);
<ide> if (percentageScaler <= 0 ) {
<ide> percentageScaler = 0;
<del> }
<del> ShooterSpeedStage1 = ShooterSpeedStage2 * percentageScaler;
<del> StageOneTalon.set(ShooterSpeedStage1);
<del> StageTwoTalon.set(ShooterSpeedStage2);
<add> }
<ide> flagY = false;
<ide> } else if (shootBack && flagBack){//turns off
<del>
<del> ShooterSpeedStage2 -= 0.1;
<del> ShooterSpeedStage1 = ShooterSpeedStage2 * percentageScaler;
<add> ShooterSpeedStage2 = objShooter.stop();
<ide> if (ShooterSpeedStage2 == 0){
<ide> flagBack = false;
<del> }
<del> ShooterSpeedStage2 = .1;
<del> ShooterSpeedStage1 = ShooterSpeedStage2 * percentageScaler;
<del>
<add> }
<ide> }
<ide>
<ide> //if (shootBack && flagBack){ |
|
JavaScript | mit | 2cc89968242f3c571e207ecb5720b738776bb5f8 | 0 | olistik/poi-filters,olistik/poi-filters | // marker icons: https://www.mapbox.com/maki-icons
(function() {
var filters = null;
var checkboxes = [];
var map = null;
var geojsonData = {};
var featureLayer = null;
var mapboxSettings = {
accessToken: "pk.eyJ1Ijoib2xpc3RpayIsImEiOiJjaW03a2lvd2MwMDBsdzhtNTZzeG9pYzFsIn0.EnuMVTEKyFfJN6XZhtLmIA",
tileLayerId: "olistik.00d43ij6" // or "mapbox.streets"
};
var viewSettings = {
// center of Desio
latitude: 45.61820986655421,
longitude: 9.207572937011719,
zoomLevel: 15
};
function capitalize(string) {
if (string.length > 0) {
return string.charAt(0).toUpperCase() + string.slice(1);
} else {
return "";
}
}
function isPresent(string) {
return string !== undefined && string !== null && string.length > 0;
}
function fetchDataset(datasetUrl, callback) {
fetch(datasetUrl).then(function(response) {
return response.json();
}).then(function(json) {
callback(json);
});
}
function createCustomTooltips() {
var layers = featureLayer.getLayers();
for (var i = 0; i < layers.length; ++i) {
var layer = layers[i];
var marker = layer;
var feature = marker.feature;
var content = "";
content += "<strong>" + feature.properties.title + "</strong><br />";
if (isPresent(feature.properties.address)) {
content += "Indirizzo: " + feature.properties.address + "<br />";
}
if (isPresent(feature.properties.url)) {
content += "URL: " + "<a href=\"" + feature.properties.url + "\">" + feature.properties.url + "</a><br />";
}
if (isPresent(feature.properties["e-mail"])) {
content += "e-mail: " + "<a href=\"mailto:" + feature.properties["e-mail"] + "\">" + feature.properties["e-mail"] + "</a><br />";
}
if (isPresent(feature.properties.telephone)) {
content += "Tel. " + "<a href=\"call:" + feature.properties.telephone + "\">" + feature.properties.telephone + "</a><br />";
}
marker.bindPopup(content, {
closeButton: false,
minWidth: 320
});
}
}
function update() {
var enabled = {};
for (var i = 0; i < checkboxes.length; i++) {
if (checkboxes[i].checked) {
enabled[checkboxes[i].id] = true;
}
}
featureLayer.setFilter(function(feature) {
return (feature.properties.category in enabled);
});
createCustomTooltips();
if (Object.keys(enabled).length > 0) {
map.fitBounds(featureLayer.getBounds());
} else {
map.panTo({
lat: viewSettings.latitude,
lon: viewSettings.longitude
});
map.setZoom(viewSettings.zoomLevel);
}
}
function createFilters() {
var typesObj = {}, types = [];
var features = featureLayer.getGeoJSON().features;
for (var i = 0; i < features.length; i++) {
var feature = features[i];
typesObj[feature.properties.category] = true;
}
for (var k in typesObj) {
types.push(k);
}
// Create a filter interface.
for (var i = 0; i < types.length; i++) {
// Create an an input checkbox and label inside.
var item = filters.appendChild(document.createElement("div"));
var checkbox = item.appendChild(document.createElement("input"));
var label = item.appendChild(document.createElement("label"));
checkbox.type = "checkbox";
checkbox.id = types[i];
checkbox.checked = true;
// create a label to the right of the checkbox with explanatory text
label.innerHTML = capitalize(types[i]);
label.setAttribute("for", types[i]);
// Whenever a person clicks on this checkbox, call the update().
checkbox.addEventListener("change", update);
checkboxes.push(checkbox);
}
}
function initMap() {
L.mapbox.accessToken = mapboxSettings.accessToken;
var mapElement = document.querySelector("[data-map]");
map = L.mapbox.map(mapElement);
var tileLayer = L.mapbox.tileLayer(mapboxSettings.tileLayerId);
tileLayer.addTo(map);
map.setView([
viewSettings.latitude,
viewSettings.longitude
], viewSettings.zoomLevel);
featureLayer = L.mapbox.featureLayer();
featureLayer.addTo(map);
var datasetUrl = "https://raw.githubusercontent.com/olistik/poi-filters/master/dataset.geojson";
fetchDataset(datasetUrl, function(json) {
geojsonData = json;
featureLayer.setGeoJSON(geojsonData);
map.fitBounds(featureLayer.getBounds());
createCustomTooltips();
createFilters();
});
// callback fired when a marker gets clicked
featureLayer.on("click", function(e) {
// centers on marker
map.panTo(e.layer.getLatLng());
});
}
document.addEventListener("DOMContentLoaded", function(event) {
filters = document.querySelector(".categories-picker");
initMap();
});
})();
| application.js | // marker icons: https://www.mapbox.com/maki-icons
(function() {
var filters = null;
var checkboxes = [];
var map = null;
var geojsonData = {};
var featureLayer = null;
var mapboxSettings = {
accessToken: "pk.eyJ1Ijoib2xpc3RpayIsImEiOiJjaW03a2lvd2MwMDBsdzhtNTZzeG9pYzFsIn0.EnuMVTEKyFfJN6XZhtLmIA",
tileLayerId: "olistik.00d43ij6" // or "mapbox.streets"
};
var viewSettings = {
// center of Desio
latitude: 45.61820986655421,
longitude: 9.207572937011719,
zoomLevel: 15
};
function capitalize(string) {
if (string.length > 0) {
return string.charAt(0).toUpperCase() + string.slice(1);
} else {
return "";
}
}
}
function fetchDataset(datasetUrl, callback) {
fetch(datasetUrl).then(function(response) {
return response.json();
}).then(function(json) {
callback(json);
});
}
function createCustomTooltips() {
var layers = featureLayer.getLayers();
for (var i = 0; i < layers.length; ++i) {
var layer = layers[i];
var marker = layer;
var feature = marker.feature;
var content = "";
content += feature.properties.title + "<br />";
content += "Indirizzo: " + feature.properties.address + "<br />";
content += "URL: " + "<a href=\"" + feature.properties.url + "\">" + feature.properties.url + "</a><br />";
content += "e-mail: " + "<a href=\"mailto:" + feature.properties["e-mail"] + "\">" + feature.properties["e-mail"] + "</a><br />";
content += "Tel. " + "<a href=\"call:" + feature.properties.telephone + "\">" + feature.properties.telephone + "</a><br />";
marker.bindPopup(content, {
closeButton: false,
minWidth: 320
});
}
}
function update() {
var enabled = {};
for (var i = 0; i < checkboxes.length; i++) {
if (checkboxes[i].checked) {
enabled[checkboxes[i].id] = true;
}
}
featureLayer.setFilter(function(feature) {
return (feature.properties.category in enabled);
});
createCustomTooltips();
if (Object.keys(enabled).length > 0) {
map.fitBounds(featureLayer.getBounds());
} else {
map.panTo({
lat: viewSettings.latitude,
lon: viewSettings.longitude
});
map.setZoom(viewSettings.zoomLevel);
}
}
function createFilters() {
var typesObj = {}, types = [];
var features = featureLayer.getGeoJSON().features;
for (var i = 0; i < features.length; i++) {
var feature = features[i];
typesObj[feature.properties.category] = true;
}
for (var k in typesObj) {
types.push(k);
}
// Create a filter interface.
for (var i = 0; i < types.length; i++) {
// Create an an input checkbox and label inside.
var item = filters.appendChild(document.createElement("div"));
var checkbox = item.appendChild(document.createElement("input"));
var label = item.appendChild(document.createElement("label"));
checkbox.type = "checkbox";
checkbox.id = types[i];
checkbox.checked = true;
// create a label to the right of the checkbox with explanatory text
label.innerHTML = capitalize(types[i]);
label.setAttribute("for", types[i]);
// Whenever a person clicks on this checkbox, call the update().
checkbox.addEventListener("change", update);
checkboxes.push(checkbox);
}
}
function initMap() {
L.mapbox.accessToken = mapboxSettings.accessToken;
var mapElement = document.querySelector("[data-map]");
map = L.mapbox.map(mapElement);
var tileLayer = L.mapbox.tileLayer(mapboxSettings.tileLayerId);
tileLayer.addTo(map);
map.setView([
viewSettings.latitude,
viewSettings.longitude
], viewSettings.zoomLevel);
featureLayer = L.mapbox.featureLayer();
featureLayer.addTo(map);
var datasetUrl = "https://raw.githubusercontent.com/olistik/poi-filters/master/dataset.geojson";
fetchDataset(datasetUrl, function(json) {
geojsonData = json;
featureLayer.setGeoJSON(geojsonData);
map.fitBounds(featureLayer.getBounds());
createCustomTooltips();
createFilters();
});
// callback fired when a marker gets clicked
featureLayer.on("click", function(e) {
// centers on marker
map.panTo(e.layer.getLatLng());
});
}
document.addEventListener("DOMContentLoaded", function(event) {
filters = document.querySelector(".categories-picker");
initMap();
});
})();
| shows fields only if present
| application.js | shows fields only if present | <ide><path>pplication.js
<ide> }
<ide> }
<ide>
<add> function isPresent(string) {
<add> return string !== undefined && string !== null && string.length > 0;
<ide> }
<ide>
<ide> function fetchDataset(datasetUrl, callback) {
<ide> var feature = marker.feature;
<ide>
<ide> var content = "";
<del> content += feature.properties.title + "<br />";
<del> content += "Indirizzo: " + feature.properties.address + "<br />";
<del> content += "URL: " + "<a href=\"" + feature.properties.url + "\">" + feature.properties.url + "</a><br />";
<del> content += "e-mail: " + "<a href=\"mailto:" + feature.properties["e-mail"] + "\">" + feature.properties["e-mail"] + "</a><br />";
<del> content += "Tel. " + "<a href=\"call:" + feature.properties.telephone + "\">" + feature.properties.telephone + "</a><br />";
<add> content += "<strong>" + feature.properties.title + "</strong><br />";
<add>
<add> if (isPresent(feature.properties.address)) {
<add> content += "Indirizzo: " + feature.properties.address + "<br />";
<add> }
<add> if (isPresent(feature.properties.url)) {
<add> content += "URL: " + "<a href=\"" + feature.properties.url + "\">" + feature.properties.url + "</a><br />";
<add> }
<add> if (isPresent(feature.properties["e-mail"])) {
<add> content += "e-mail: " + "<a href=\"mailto:" + feature.properties["e-mail"] + "\">" + feature.properties["e-mail"] + "</a><br />";
<add> }
<add> if (isPresent(feature.properties.telephone)) {
<add> content += "Tel. " + "<a href=\"call:" + feature.properties.telephone + "\">" + feature.properties.telephone + "</a><br />";
<add> }
<ide>
<ide> marker.bindPopup(content, {
<ide> closeButton: false, |
|
Java | bsd-2-clause | 03eecbb60e66da534a8105b8ba8728cf19dc12eb | 0 | oci-pronghorn/PronghornCompression | package com.ociweb.pronghorn.components.decompression;
import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertTrue;
import java.util.concurrent.TimeUnit;
import org.junit.Ignore;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.ociweb.pronghorn.components.compression.KanziCompressionComponent.KanziCompressionStage;
import com.ociweb.pronghorn.components.decompression.KanziDecompressionComponent.KanziDecompressionStage;
import com.ociweb.pronghorn.components.utilities.TestingComponent.Dumper;
import com.ociweb.pronghorn.components.utilities.TestingComponent.Generator;
import com.ociweb.pronghorn.ring.FieldReferenceOffsetManager;
import com.ociweb.pronghorn.ring.RingBuffer;
import com.ociweb.pronghorn.ring.RingBufferConfig;
import com.ociweb.pronghorn.stage.scheduling.GraphManager;
import com.ociweb.pronghorn.stage.scheduling.ThreadPerStageScheduler;
public class KanziDecompressionStageTest {
private final String codecs[] = new String[] {"PAQ", "FPAQ", "ANS", "HUFFMAN", "RANGE", "NONE"};
private final String transforms[] = new String[] {"BWT", "BWTS", "SNAPPY", "LZ4", "RLT", "BWT+MTF", "BWT+TIMESTAMP", "NONE"};
private final Logger logger = LoggerFactory.getLogger(KanziDecompressionStageTest.class);
private final RingBufferConfig config = new RingBufferConfig(FieldReferenceOffsetManager.RAW_BYTES, 20, 4096);
@Test
public void verifyInstantiation() {
GraphManager manager = new GraphManager();
RingBuffer input = new RingBuffer(config);
RingBuffer output = new RingBuffer(config);
KanziDecompressionStage stage = new KanziDecompressionStage(manager, input, output);
}
@Ignore //TODO: Must be fixed before anyone should assume Kanzi can be used.
public void verify() {
for(String codec : codecs) {
for(String transform : transforms) {
// NONE+SNAPPY doesn't decode correctly...
if(codec == "NONE" && transform == "SNAPPY") continue;
verify(codec, transform);
}
}
}
private void verify(String codec, String transform) {
GraphManager manager = new GraphManager();
RingBuffer[] rings = new RingBuffer[] {
new RingBuffer(config) // input to compression stage
, new RingBuffer(config) // output for compression stage, input for decompression stage
, new RingBuffer(config) // output for decompression stage, input for dumper.
};
Generator generator = new Generator(manager, rings[0], 100);
KanziCompressionStage compressor = new KanziCompressionStage(manager, rings[0], rings[1], codec, transform);
KanziDecompressionStage decompressor = new KanziDecompressionStage(manager, rings[1], rings[2]);
Dumper dumper = new Dumper(manager, rings[2]);
ThreadPerStageScheduler service = new ThreadPerStageScheduler(manager);
service.startup();
boolean completed = service.awaitTermination(1, TimeUnit.SECONDS);
// make sure data traversing RingBuffers didn't mangle anything.
assertArrayEquals(generator.data(), dumper.data());
if (!completed) {
logger.warn("Did not shut down cleanly, should investigate");
}
}
}
| src/test/java/com/ociweb/pronghorn/components/decompression/KanziDecompressionStageTest.java | package com.ociweb.pronghorn.components.decompression;
import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertTrue;
import java.util.concurrent.TimeUnit;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.ociweb.pronghorn.components.compression.KanziCompressionComponent.KanziCompressionStage;
import com.ociweb.pronghorn.components.decompression.KanziDecompressionComponent.KanziDecompressionStage;
import com.ociweb.pronghorn.components.utilities.TestingComponent.Dumper;
import com.ociweb.pronghorn.components.utilities.TestingComponent.Generator;
import com.ociweb.pronghorn.ring.FieldReferenceOffsetManager;
import com.ociweb.pronghorn.ring.RingBuffer;
import com.ociweb.pronghorn.ring.RingBufferConfig;
import com.ociweb.pronghorn.stage.scheduling.GraphManager;
import com.ociweb.pronghorn.stage.scheduling.ThreadPerStageScheduler;
public class KanziDecompressionStageTest {
private final String codecs[] = new String[] {"PAQ", "FPAQ", "ANS", "HUFFMAN", "RANGE", "NONE"};
private final String transforms[] = new String[] {"BWT", "BWTS", "SNAPPY", "LZ4", "RLT", "BWT+MTF", "BWT+TIMESTAMP", "NONE"};
private final Logger logger = LoggerFactory.getLogger(KanziDecompressionStageTest.class);
private final RingBufferConfig config = new RingBufferConfig(FieldReferenceOffsetManager.RAW_BYTES, 20, 4096);
@Test
public void verifyInstantiation() {
GraphManager manager = new GraphManager();
RingBuffer input = new RingBuffer(config);
RingBuffer output = new RingBuffer(config);
KanziDecompressionStage stage = new KanziDecompressionStage(manager, input, output);
}
@Test
public void verify() {
for(String codec : codecs) {
for(String transform : transforms) {
// NONE+SNAPPY doesn't decode correctly...
if(codec == "NONE" && transform == "SNAPPY") continue;
verify(codec, transform);
}
}
}
private void verify(String codec, String transform) {
GraphManager manager = new GraphManager();
RingBuffer[] rings = new RingBuffer[] {
new RingBuffer(config) // input to compression stage
, new RingBuffer(config) // output for compression stage, input for decompression stage
, new RingBuffer(config) // output for decompression stage, input for dumper.
};
Generator generator = new Generator(manager, rings[0], 100);
KanziCompressionStage compressor = new KanziCompressionStage(manager, rings[0], rings[1], codec, transform);
KanziDecompressionStage decompressor = new KanziDecompressionStage(manager, rings[1], rings[2]);
Dumper dumper = new Dumper(manager, rings[2]);
ThreadPerStageScheduler service = new ThreadPerStageScheduler(manager);
service.startup();
boolean completed = service.awaitTermination(1, TimeUnit.SECONDS);
// make sure data traversing RingBuffers didn't mangle anything.
assertArrayEquals(generator.data(), dumper.data());
if (!completed) {
logger.warn("Did not shut down cleanly, should investigate");
}
}
}
| Disable unused Kansi feature in test. | src/test/java/com/ociweb/pronghorn/components/decompression/KanziDecompressionStageTest.java | Disable unused Kansi feature in test. | <ide><path>rc/test/java/com/ociweb/pronghorn/components/decompression/KanziDecompressionStageTest.java
<ide>
<ide> import java.util.concurrent.TimeUnit;
<ide>
<add>import org.junit.Ignore;
<ide> import org.junit.Test;
<ide> import org.slf4j.Logger;
<ide> import org.slf4j.LoggerFactory;
<ide> KanziDecompressionStage stage = new KanziDecompressionStage(manager, input, output);
<ide> }
<ide>
<del> @Test
<add> @Ignore //TODO: Must be fixed before anyone should assume Kanzi can be used.
<ide> public void verify() {
<ide>
<ide> for(String codec : codecs) { |
|
JavaScript | apache-2.0 | 36389a08a7fb3a5853422a25fa06e58aa4e94374 | 0 | caneruguz/ember-preprints,baylee-d/ember-preprints,laurenrevere/ember-preprints,laurenrevere/ember-preprints,CenterForOpenScience/ember-preprints,CenterForOpenScience/ember-preprints,baylee-d/ember-preprints,caneruguz/ember-preprints | import Ember from 'ember';
import Analytics from '../mixins/analytics';
/**
* @module ember-preprints
* @submodule components
*/
/**
* Displays active preprint providers in a horizontal carousel with five providers per slide. Does not auto-advance.
* Handles display on two pages: index (lightLogo=true) and discover (lightLogo=false). If using elsewhere, need to add more customization
* around how provider logos and links are built.
*
* Sample usage:
* ```handlebars
* {{provider-carousel
* providers=providers
}}
* ```
* @class provider-carousel
*/
export default Ember.Component.extend(Analytics, {
_resizeListener: null,
providers: Ember.A(), // Pass in preprint providers
itemsPerSlide: 5, // Default
lightLogo: true, // Light logos by default, for Index page.
numProviders: Ember.computed('providers', function() {
return this.get('providers').length;
}),
numSlides: Ember.computed('numProviders', 'itemsPerSlide', function() {
return Math.ceil(this.get('numProviders') / this.get('itemsPerSlide'));
}),
slides: Ember.computed('numSlides', 'providers', 'itemsPerSlide', function() {
const numSlides = this.get('numSlides');
const itemsPerSlide = this.get('itemsPerSlide');
return new Array(numSlides).fill().map((_, i) => this.get('providers').slice(i * itemsPerSlide, i * itemsPerSlide + itemsPerSlide));
}),
setSlideItems: function() {
// On xs screens, show one provider per slide. Otherwise, five.
if (window.innerWidth < 768) {
this.set('itemsPerSlide', 1);
} else {
this.set('itemsPerSlide', 5);
}
},
didInsertElement: function () {
// On xs screen, display one provider per slide
Ember.$('.carousel').carousel();
},
init: function() {
// Set resize listener so number of providers per slide can be changed
this._super(...arguments);
this.setSlideItems();
this._resizeListener = Ember.run.bind(this, this.setSlideItems);
Ember.$(window).on('resize', this._resizeListener);
},
willDestroy: function() {
// Unbinds _resizeListener
if (this._resizeListener) {
Ember.$(window).off('resize', this._resizeListener);
}
}
});
| app/components/provider-carousel.js | import Ember from 'ember';
import Analytics from '../mixins/analytics';
/**
* @module ember-preprints
* @submodule components
*/
/**
* Displays active preprint providers in a horizontal carousel with five providers per slide. Does not auto-advance.
* Handles display on two pages: index (lightLogo=true) and discover (lightLogo=false). If using elsewhere, need to add more customization
* around how provider logos and links are built.
*
* Sample usage:
* ```handlebars
* {{provider-carousel
* providers=providers
}}
* ```
* @class provider-carousel
*/
export default Ember.Component.extend(Analytics, {
_resizeListener: null,
providers: Ember.A(), // Pass in preprint providers
itemsPerSlide: 5, // Default
lightLogo: true, // Light logos by default, for Index page.
numProviders: Ember.computed('providers', function() {
return this.get('providers').length;
}),
numSlides: Ember.computed('numProviders', 'itemsPerSlide', function() {
return Math.ceil(this.get('numProviders')/this.get('itemsPerSlide'));
}),
slides: Ember.computed('numSlides', 'providers', 'itemsPerSlide', function() {
const numSlides = this.get('numSlides');
const itemsPerSlide = this.get('itemsPerSlide');
return new Array(numSlides).fill().map((_, i) => {
return this.get('providers').slice(i * itemsPerSlide, i * itemsPerSlide + itemsPerSlide);
});
}),
setSlideItems: function() {
// On xs screens, show one provider per slide. Otherwise, five.
if (window.innerWidth < 768) {
this.set('itemsPerSlide', 1);
} else {
this.set('itemsPerSlide', 5);
}
},
didInsertElement: function () {
// On xs screen, display one provider per slide
Ember.$('.carousel').carousel();
},
init: function() {
// Set resize listener so number of providers per slide can be changed
this._super(...arguments);
this.setSlideItems();
this._resizeListener = Ember.run.bind(this, this.setSlideItems);
Ember.$(window).on('resize', this._resizeListener);
},
willDestroy: function() {
// Unbinds _resizeListener
if (this._resizeListener) {
Ember.$(window).off('resize', this._resizeListener);
}
}
});
| Styling.
| app/components/provider-carousel.js | Styling. | <ide><path>pp/components/provider-carousel.js
<ide> return this.get('providers').length;
<ide> }),
<ide> numSlides: Ember.computed('numProviders', 'itemsPerSlide', function() {
<del> return Math.ceil(this.get('numProviders')/this.get('itemsPerSlide'));
<add> return Math.ceil(this.get('numProviders') / this.get('itemsPerSlide'));
<ide> }),
<ide> slides: Ember.computed('numSlides', 'providers', 'itemsPerSlide', function() {
<ide> const numSlides = this.get('numSlides');
<ide> const itemsPerSlide = this.get('itemsPerSlide');
<del> return new Array(numSlides).fill().map((_, i) => {
<del> return this.get('providers').slice(i * itemsPerSlide, i * itemsPerSlide + itemsPerSlide);
<del> });
<add> return new Array(numSlides).fill().map((_, i) => this.get('providers').slice(i * itemsPerSlide, i * itemsPerSlide + itemsPerSlide));
<ide> }),
<ide> setSlideItems: function() {
<ide> // On xs screens, show one provider per slide. Otherwise, five. |
|
Java | apache-2.0 | 8360c86bc5d35c300f2b7b14dc15d3a24b71627a | 0 | GoogleCloudPlatform/appengine-plugins-core,GoogleCloudPlatform/appengine-plugins-core | /**
* Copyright 2016 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package com.google.cloud.tools.app.impl.cloudsdk;
import static java.nio.file.StandardCopyOption.REPLACE_EXISTING;
import com.google.cloud.tools.app.api.AppEngineException;
import com.google.cloud.tools.app.api.deploy.AppEngineFlexibleStaging;
import com.google.cloud.tools.app.api.deploy.StageFlexibleConfiguration;
import com.google.common.base.Preconditions;
import com.google.common.collect.Sets;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.attribute.PosixFilePermission;
import java.util.Set;
/**
* Cloud SDK based implementation of {@link AppEngineFlexibleStaging}.
*/
public class CloudSdkAppEngineFlexibleStaging implements AppEngineFlexibleStaging {
/**
* Stages a Java JAR/WAR Managed VMs application to be deployed.
*
* <p></p>Copies app.yaml, Dockerfile and the application artifact to the staging area.
*
* <p>If app.yaml or Dockerfile do not exist, gcloud cloud will create them.
*/
@Override
public void stageFlexible(StageFlexibleConfiguration config) throws AppEngineException {
Preconditions.checkNotNull(config);
Preconditions.checkNotNull(config.getStagingDirectory());
Preconditions.checkNotNull(config.getArtifact());
if (!config.getStagingDirectory().exists()) {
throw new AppEngineException("Staging directory does not exist. Location: "
+ config.getStagingDirectory().toPath().toString());
}
if (!config.getStagingDirectory().isDirectory()) {
throw new AppEngineException("Staging location is not a directory. Location: "
+ config.getStagingDirectory().toPath().toString());
}
try {
// Copy app.yaml to staging.
if (config.getAppYaml() != null && config.getAppYaml().exists()) {
Files.copy(config.getAppYaml().toPath(),
config.getStagingDirectory().toPath()
.resolve(config.getAppYaml().toPath().getFileName()),
REPLACE_EXISTING);
}
// Copy Dockerfile to staging.
if (config.getDockerfile() != null && config.getDockerfile().exists()) {
Files.copy(config.getDockerfile().toPath(),
config.getStagingDirectory().toPath()
.resolve(config.getDockerfile().toPath().getFileName()),
REPLACE_EXISTING);
}
// TODO : looks like this section should error on no artifacts found? and maybe the
// TODO : earlier ones should warn?
// Copy the JAR/WAR file to staging.
if (config.getArtifact() != null && config.getArtifact().exists()) {
Path destination = config.getStagingDirectory().toPath()
.resolve(config.getArtifact().toPath().getFileName());
Files.copy(config.getArtifact().toPath(), destination, REPLACE_EXISTING);
// Update artifact permissions so docker can read it when deployed
if (!System.getProperty("os.name").contains("Windows")) {
Set<PosixFilePermission> permissions = Sets.newHashSet();
permissions.add(PosixFilePermission.OWNER_READ);
permissions.add(PosixFilePermission.OWNER_WRITE);
permissions.add(PosixFilePermission.GROUP_READ);
permissions.add(PosixFilePermission.OTHERS_READ);
Files.setPosixFilePermissions(destination, permissions);
}
}
} catch (IOException e) {
throw new AppEngineException(e);
}
}
}
| src/main/java/com/google/cloud/tools/app/impl/cloudsdk/CloudSdkAppEngineFlexibleStaging.java | /**
* Copyright 2016 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package com.google.cloud.tools.app.impl.cloudsdk;
import static java.nio.file.StandardCopyOption.REPLACE_EXISTING;
import com.google.cloud.tools.app.api.AppEngineException;
import com.google.cloud.tools.app.api.deploy.AppEngineFlexibleStaging;
import com.google.cloud.tools.app.api.deploy.StageFlexibleConfiguration;
import com.google.common.base.Preconditions;
import java.io.IOException;
import java.nio.file.Files;
/**
* Cloud SDK based implementation of {@link AppEngineFlexibleStaging}.
*/
public class CloudSdkAppEngineFlexibleStaging implements AppEngineFlexibleStaging {
/**
* Stages a Java JAR/WAR Managed VMs application to be deployed.
*
* <p></p>Copies app.yaml, Dockerfile and the application artifact to the staging area.
*
* <p>If app.yaml or Dockerfile do not exist, gcloud cloud will create them.
*/
@Override
public void stageFlexible(StageFlexibleConfiguration config) throws AppEngineException {
Preconditions.checkNotNull(config);
Preconditions.checkNotNull(config.getStagingDirectory());
Preconditions.checkNotNull(config.getArtifact());
if (!config.getStagingDirectory().exists()) {
throw new AppEngineException("Staging directory does not exist. Location: "
+ config.getStagingDirectory().toPath().toString());
}
if (!config.getStagingDirectory().isDirectory()) {
throw new AppEngineException("Staging location is not a directory. Location: "
+ config.getStagingDirectory().toPath().toString());
}
try {
// Copy app.yaml to staging.
if (config.getAppYaml() != null && config.getAppYaml().exists()) {
Files.copy(config.getAppYaml().toPath(),
config.getStagingDirectory().toPath()
.resolve(config.getAppYaml().toPath().getFileName()),
REPLACE_EXISTING);
}
// Copy Dockerfile to staging.
if (config.getDockerfile() != null && config.getDockerfile().exists()) {
Files.copy(config.getDockerfile().toPath(),
config.getStagingDirectory().toPath()
.resolve(config.getDockerfile().toPath().getFileName()),
REPLACE_EXISTING);
}
// TODO : looks like this section should error on no artifacts found? and maybe the
// TODO : earlier ones should warn?
// Copy the JAR/WAR file to staging.
if (config.getArtifact() != null && config.getArtifact().exists()) {
Files.copy(config.getArtifact().toPath(),
config.getStagingDirectory().toPath()
.resolve(config.getArtifact().toPath().getFileName()),
REPLACE_EXISTING);
}
} catch (IOException e) {
throw new AppEngineException(e);
}
}
}
| Ensure docker can read artifacts in flexible deploys (#89)
This only affect posix systems. | src/main/java/com/google/cloud/tools/app/impl/cloudsdk/CloudSdkAppEngineFlexibleStaging.java | Ensure docker can read artifacts in flexible deploys (#89) | <ide><path>rc/main/java/com/google/cloud/tools/app/impl/cloudsdk/CloudSdkAppEngineFlexibleStaging.java
<ide> import com.google.cloud.tools.app.api.deploy.AppEngineFlexibleStaging;
<ide> import com.google.cloud.tools.app.api.deploy.StageFlexibleConfiguration;
<ide> import com.google.common.base.Preconditions;
<add>import com.google.common.collect.Sets;
<ide>
<ide> import java.io.IOException;
<ide> import java.nio.file.Files;
<add>import java.nio.file.Path;
<add>import java.nio.file.attribute.PosixFilePermission;
<add>import java.util.Set;
<ide>
<ide> /**
<ide> * Cloud SDK based implementation of {@link AppEngineFlexibleStaging}.
<ide> // TODO : earlier ones should warn?
<ide> // Copy the JAR/WAR file to staging.
<ide> if (config.getArtifact() != null && config.getArtifact().exists()) {
<del> Files.copy(config.getArtifact().toPath(),
<del> config.getStagingDirectory().toPath()
<del> .resolve(config.getArtifact().toPath().getFileName()),
<del> REPLACE_EXISTING);
<add> Path destination = config.getStagingDirectory().toPath()
<add> .resolve(config.getArtifact().toPath().getFileName());
<add> Files.copy(config.getArtifact().toPath(), destination, REPLACE_EXISTING);
<add>
<add> // Update artifact permissions so docker can read it when deployed
<add> if (!System.getProperty("os.name").contains("Windows")) {
<add> Set<PosixFilePermission> permissions = Sets.newHashSet();
<add> permissions.add(PosixFilePermission.OWNER_READ);
<add> permissions.add(PosixFilePermission.OWNER_WRITE);
<add> permissions.add(PosixFilePermission.GROUP_READ);
<add> permissions.add(PosixFilePermission.OTHERS_READ);
<add>
<add> Files.setPosixFilePermissions(destination, permissions);
<add> }
<ide> }
<ide> } catch (IOException e) {
<ide> throw new AppEngineException(e); |
|
JavaScript | mit | d1908cf1e2d48f214914135f36758ef3d2d2532f | 0 | greeny/green.dj | /**
* green.js - a plug.dj plugin
* @licence MIT
* @author greeny
*/
(function(){
if(typeof API === 'undefined') {
alert('You are not at plug.dj. Please use this bookmark at plug.dj.\n\nYou can find more info at greeny.github.io/green.dj');
}
if(typeof window.greenDj === 'undefined') {
function GreenDjObject() {
this.API = API;
this.version = '1.0.0';
this.settings = {};
var that = this;
/* EVENTS */
this.API.on(API.USER_JOIN, function(user) {
that.onUserJoin(user);
});
this.API.on(API.USER_LEFT, function(user) {
that.onUserLeave(user);
});
this.API.on(API.VOTE_UPDATE, function(data) {
that.onUserVote(data.user, data.vote);
});
this.API.on(API.GRAB_UPDATE, function(user) {
that.onGrab(user.user);
});
this.API.on(API.ADVANCE, function(data) {
that.onAdvance('advance', data);
});
this.API.on(API.USER_SKIP, function(data) {
that.onAdvance('skip', data);
});
this.API.on(API.MOD_SKIP, function(data) {
that.onAdvance('forceSkip', data);
});
/* FUNCTIONALITY */
this.onUserJoin = function(user) {
this.info(user.username + ' has joined the room.', 'userJoin');
};
this.onUserLeave = function(user) {
this.info(user.username + ' has left the room.', 'userLeave');
};
this.onUserVote = function(user, vote) {
console.log(vote);
this.info('<i class="icon icon-woot" style="width: 15px;background-position: ' + (vote === 1 ? '-217px' : '-181px') + ' -287px;left: 4px;"></i>' +
user.username + ' had ' + (vote === 1 ? '<span style="color: #90ad2f">Woot!</span>ed' : '<span style="color: #c42e3b;">Meh!</span>ed') + ' this song!', 'userVote');
};
this.onGrab = function(user) {
console.log(user);
this.info('<i class="icon icon-grab" style="width: 17px;background-position: -146px -287px;left: 4px;"></i>' +
user.username + ' has <span style="color: #aa74ff">added</span> this song to his playlist.', 'userGrab');
};
this.onAdvance = function(reason, data) {
if(this.featureEnabled('autoWoot')) {
/*this.woot();*/
}
};
this.woot = function() {
setTimeout(function() {
$('#woot').click();
}, 2000);
};
this.grab = function() {
setTimeout(function() {
$('#grab').click();
}, 2000);
};
this.meh = function() {
setTimeout(function() {
$('#meh').click();
}, 2000);
};
this.info = function(message, required) {
/*if(message) {
if(!(required && this.messageEnabled(required))) {*/
var $el = $('<div class="message deletable" style="padding-left: 25px;border-left: green 3px solid;" onmouseover="$(this).find(\'.delete-button\').show();" onmouseout="$(this).find(\'.delete-button\').hide();">' +
'<div class="delete-button" style="display: none;" onclick="$(this).parent().remove()">Delete</div>' +
'[<span style="color: green">green.dj</span>] ' + message + '' +
'</div>');
var $chat = $("#chat-messages");
$chat.append($el);
$chat.scrollTop($chat.scrollTop() + $el.outerHeight(true));
/*}
}*/
};
/* SETTINGS */
this.init = function() {
$('#chat-header').find('.divider').after('<div class="chat-header-button" style="margin-left: 13px; margin-right: 0;" onclick="greenDj.onSettingsClick()">' +
'<i class="icon icon-settings-grey" onmouseover="$(this).toggleClass(\'icon-settings-grey icon-settings-white\')" onmouseout="$(this).toggleClass(\'icon-settings-grey icon-settings-white\')"></i>' +
'</div>');
$('body [data-greendj-settings]').on('change', function(e) {
e.preventDefault();
that.onSettingsChange($(this));
});
if(typeof Storage === 'undefined') {
this.info('green.dj cannot be loaded, because your browser does not support localStorage.');
return;
}
var data = localStorage.getItem('green.dj');
if(data) {
this.settings = JSON.parse(data);
} else {
this.settings = {};
}
this.info('green.dj version ' + this.version + ' loaded. Enjoy!');
};
this.onSettingsClick = function() {
$('#dialog-container').html('<div class="dialog" id="green_dj_settings_dialog" style="height: 500px;">' +
'<div class="dialog-frame"><span class="title">green.dj settings</span><i class="icon icon-dialog-close" onclick="greenDj.closeSettingsDialog()"></i></div>' +
'<div class="dialog-body" style="height: 390px;text-align: left;">' + this.getSettingsHtml() + '</div>' +
'<div class="dialog-frame"><div class="button cancel"><span>Export</span></div><div class="button submit" onclick="greenDj.closeSettingsDialog()"><span>OK</span></div></div>' +
'</div>').show();
};
this.onSettingsChange = function(el) {
var s = this.getSettings(), key = el.data('greendj-settings'), val = el.attr('name'), checked = el.val();
if(!s[key]) {
s[key] = {};
}
if(!s[key][val]) {
s[key][val] = false;
}
this.settings[key][val] = !this.settings[key][val];
el.val(this.settings[key][val]);
localStorage.setItem('green.dj', JSON.stringify(this.settings));
};
this.messageEnabled = function(key) {
var s = this.getSettings();
if(!s.messages) {
s.messages = {};
return false;
}
if(!s.messages[key]) {
s.messages[key] = true;
}
return s.messages[key];
};
this.featureEnabled = function(key) {
var s = this.getSettings();
if(!s.features) {
s.features = {};
return false;
}
if(!s.features[key]) {
s.features[key] = true;
}
return s.features[key];
};
this.getSettings = function() {
return this.settings;
};
this.closeSettingsDialog = function() {
$('#dialog-container').html('').hide();
};
this.getSettingsHtml = function() {
return '<ul style="left: 20px;position: absolute;top: 15px;margin: 0;padding: 0;width: 50%;list-style-type: none;font-size: 16px;">' +
'<li style="font-weight: bold;">Features:</li>' +
'<li><label><input type="checkbox" name="autoWoot" data-greendj-settings="features"> AutoWoot</label></li>' +
'</ul>' +
'<ul style="right: -5px;position: absolute;top: 15px;margin: 0;padding: 0;width: 50%;list-style-type: none;font-size: 16px;">' +
'<li style="font-weight: bold;">Messages:</li>' +
'<li><label><input type="checkbox" name="userVote" data-greendj-settings="messages"> User vote (woot / meh)</label></li>' +
'<li><label><input type="checkbox" name="userJoin" data-greendj-settings="messages"> User join</label></li>' +
'<li><label><input type="checkbox" name="userLeave" data-greendj-settings="messages"> User leave</label></li>' +
'</ul>';
};
this.init();
}
window.greenDj = new GreenDjObject();
} else {
greenDj.info('Already loaded!');
}
})();
| src/green.dj.js | /**
* green.js - a plug.dj plugin
* @licence MIT
* @author greeny
*/
(function(){
if(typeof API === 'undefined') {
alert('You are not at plug.dj. Please use this bookmark at plug.dj.\n\nYou can find more info at greeny.github.io/green.dj');
}
if(typeof window.greenDj === 'undefined') {
function GreenDjObject() {
this.API = API;
this.version = '1.0.0';
this.settings = {};
var that = this;
/* EVENTS */
this.API.on(API.USER_JOIN, function(user) {
that.onUserJoin(user);
});
this.API.on(API.USER_LEFT, function(user) {
that.onUserLeave(user);
});
this.API.on(API.VOTE_UPDATE, function(data) {
that.onUserVote(data.user, data.vote);
});
this.API.on(API.GRAB_UPDATE, function(user) {
that.onGrab(user.user);
});
this.API.on(API.ADVANCE, function(data) {
that.onAdvance('advance', data);
});
this.API.on(API.USER_SKIP, function(data) {
that.onAdvance('skip', data);
});
this.API.on(API.MOD_SKIP, function(data) {
that.onAdvance('forceSkip', data);
});
/* FUNCTIONALITY */
this.onUserJoin = function(user) {
this.info(user.username + ' has joined the room.', 'userJoin');
};
this.onUserLeave = function(user) {
this.info(user.username + ' has left the room.', 'userLeave');
};
this.onUserVote = function(user, vote) {
console.log(vote);
this.info(user.username + ' had ' + (vote === 1 ? '<span style="color: #90ad2f">Woot!</span>ed' : '<span style="color: #c42e3b;">Meh!</span>ed') + ' this song!', 'userVote');
};
this.onGrab = function(user) {
console.log(user);
this.info(user.username + ' has <span style="color: #aa74ff">added</span> this song to his playlist.', 'userGrab');
};
this.onAdvance = function(reason, data) {
if(this.featureEnabled('autoWoot')) {
/*this.woot();*/
}
};
this.woot = function() {
setTimeout(function() {
$('#woot').click();
}, 2000);
};
this.grab = function() {
setTimeout(function() {
$('#grab').click();
}, 2000);
};
this.meh = function() {
setTimeout(function() {
$('#meh').click();
}, 2000);
};
this.info = function(message, required) {
/*if(message) {
if(!(required && this.messageEnabled(required))) {*/
var $el = $('<div class="message deletable" style="padding-left: 25px;border-left: green 3px solid;" onmouseover="$(this).find(\'.delete-button\').show();" onmouseout="$(this).find(\'.delete-button\').hide();">' +
'<div class="delete-button" style="display: none;" onclick="$(this).parent().remove()">Delete</div>' +
'[<span style="color: green">green.dj</span>] ' + message + '' +
'</div>');
var $chat = $("#chat-messages");
$chat.append($el);
$chat.scrollTop($chat.scrollTop() + $el.outerHeight(true));
/*}
}*/
};
/* SETTINGS */
this.init = function() {
$('#chat-header').find('.divider').after('<div class="chat-header-button" style="margin-left: 13px; margin-right: 0;" onclick="greenDj.onSettingsClick()">' +
'<i class="icon icon-settings-grey" onmouseover="$(this).toggleClass(\'icon-settings-grey icon-settings-white\')" onmouseout="$(this).toggleClass(\'icon-settings-grey icon-settings-white\')"></i>' +
'</div>');
$('body [data-greendj-settings]').on('change', function(e) {
e.preventDefault();
that.onSettingsChange($(this));
});
if(typeof Storage === 'undefined') {
this.info('green.dj cannot be loaded, because your browser does not support localStorage.');
return;
}
var data = localStorage.getItem('green.dj');
if(data) {
this.settings = JSON.parse(data);
} else {
this.settings = {};
}
this.info('green.dj version ' + this.version + ' loaded. Enjoy!');
};
this.onSettingsClick = function() {
$('#dialog-container').html('<div class="dialog" id="green_dj_settings_dialog" style="height: 500px;">' +
'<div class="dialog-frame"><span class="title">green.dj settings</span><i class="icon icon-dialog-close" onclick="greenDj.closeSettingsDialog()"></i></div>' +
'<div class="dialog-body" style="height: 390px;text-align: left;">' + this.getSettingsHtml() + '</div>' +
'<div class="dialog-frame"><div class="button cancel"><span>Export</span></div><div class="button submit" onclick="greenDj.closeSettingsDialog()"><span>OK</span></div></div>' +
'</div>').show();
};
this.onSettingsChange = function(el) {
var s = this.getSettings(), key = el.data('greendj-settings'), val = el.attr('name'), checked = el.val();
if(!s[key]) {
s[key] = {};
}
if(!s[key][val]) {
s[key][val] = false;
}
this.settings[key][val] = !this.settings[key][val];
el.val(this.settings[key][val]);
localStorage.setItem('green.dj', JSON.stringify(this.settings));
};
this.messageEnabled = function(key) {
var s = this.getSettings();
if(!s.messages) {
s.messages = {};
return false;
}
if(!s.messages[key]) {
s.messages[key] = true;
}
return s.messages[key];
};
this.featureEnabled = function(key) {
var s = this.getSettings();
if(!s.features) {
s.features = {};
return false;
}
if(!s.features[key]) {
s.features[key] = true;
}
return s.features[key];
};
this.getSettings = function() {
return this.settings;
};
this.closeSettingsDialog = function() {
$('#dialog-container').html('').hide();
};
this.getSettingsHtml = function() {
return '<ul style="left: 20px;position: absolute;top: 15px;margin: 0;padding: 0;width: 50%;list-style-type: none;font-size: 16px;">' +
'<li style="font-weight: bold;">Features:</li>' +
'<li><label><input type="checkbox" name="autoWoot" data-greendj-settings="features"> AutoWoot</label></li>' +
'</ul>' +
'<ul style="right: -5px;position: absolute;top: 15px;margin: 0;padding: 0;width: 50%;list-style-type: none;font-size: 16px;">' +
'<li style="font-weight: bold;">Messages:</li>' +
'<li><label><input type="checkbox" name="userVote" data-greendj-settings="messages"> User vote (woot / meh)</label></li>' +
'<li><label><input type="checkbox" name="userJoin" data-greendj-settings="messages"> User join</label></li>' +
'<li><label><input type="checkbox" name="userLeave" data-greendj-settings="messages"> User leave</label></li>' +
'</ul>';
};
this.init();
}
window.greenDj = new GreenDjObject();
} else {
greenDj.info('Already loaded!');
}
})();
| Added icons
| src/green.dj.js | Added icons | <ide><path>rc/green.dj.js
<ide>
<ide> this.onUserVote = function(user, vote) {
<ide> console.log(vote);
<del> this.info(user.username + ' had ' + (vote === 1 ? '<span style="color: #90ad2f">Woot!</span>ed' : '<span style="color: #c42e3b;">Meh!</span>ed') + ' this song!', 'userVote');
<add> this.info('<i class="icon icon-woot" style="width: 15px;background-position: ' + (vote === 1 ? '-217px' : '-181px') + ' -287px;left: 4px;"></i>' +
<add> user.username + ' had ' + (vote === 1 ? '<span style="color: #90ad2f">Woot!</span>ed' : '<span style="color: #c42e3b;">Meh!</span>ed') + ' this song!', 'userVote');
<ide> };
<ide>
<ide> this.onGrab = function(user) {
<ide> console.log(user);
<del> this.info(user.username + ' has <span style="color: #aa74ff">added</span> this song to his playlist.', 'userGrab');
<add> this.info('<i class="icon icon-grab" style="width: 17px;background-position: -146px -287px;left: 4px;"></i>' +
<add> user.username + ' has <span style="color: #aa74ff">added</span> this song to his playlist.', 'userGrab');
<ide> };
<ide>
<ide> this.onAdvance = function(reason, data) { |
|
Java | apache-2.0 | 49e995172a6be0459f7cd320c8fa9d2dc656c673 | 0 | frasese/sakai,joserabal/sakai,zqian/sakai,willkara/sakai,frasese/sakai,frasese/sakai,ouit0408/sakai,duke-compsci290-spring2016/sakai,duke-compsci290-spring2016/sakai,willkara/sakai,joserabal/sakai,frasese/sakai,OpenCollabZA/sakai,ouit0408/sakai,duke-compsci290-spring2016/sakai,rodriguezdevera/sakai,duke-compsci290-spring2016/sakai,zqian/sakai,Fudan-University/sakai,zqian/sakai,Fudan-University/sakai,OpenCollabZA/sakai,rodriguezdevera/sakai,Fudan-University/sakai,duke-compsci290-spring2016/sakai,duke-compsci290-spring2016/sakai,conder/sakai,ouit0408/sakai,ouit0408/sakai,OpenCollabZA/sakai,willkara/sakai,joserabal/sakai,OpenCollabZA/sakai,zqian/sakai,rodriguezdevera/sakai,joserabal/sakai,ouit0408/sakai,conder/sakai,Fudan-University/sakai,willkara/sakai,zqian/sakai,rodriguezdevera/sakai,zqian/sakai,joserabal/sakai,frasese/sakai,frasese/sakai,rodriguezdevera/sakai,willkara/sakai,OpenCollabZA/sakai,joserabal/sakai,conder/sakai,Fudan-University/sakai,duke-compsci290-spring2016/sakai,rodriguezdevera/sakai,conder/sakai,OpenCollabZA/sakai,Fudan-University/sakai,conder/sakai,zqian/sakai,Fudan-University/sakai,willkara/sakai,duke-compsci290-spring2016/sakai,frasese/sakai,conder/sakai,Fudan-University/sakai,rodriguezdevera/sakai,OpenCollabZA/sakai,joserabal/sakai,willkara/sakai,willkara/sakai,OpenCollabZA/sakai,ouit0408/sakai,frasese/sakai,ouit0408/sakai,zqian/sakai,ouit0408/sakai,conder/sakai,rodriguezdevera/sakai,joserabal/sakai,conder/sakai | /**********************************************************************************
* $URL$
* $Id$
***********************************************************************************
*
* Copyright (c) 2004, 2005, 2006, 2008, 2009 The Sakai Foundation
*
* Licensed under the Educational Community License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.opensource.org/licenses/ECL-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
**********************************************************************************/
package org.sakaiproject.tool.assessment.data.dao.grading;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
import org.apache.commons.lang3.builder.EqualsBuilder;
import org.apache.commons.lang3.builder.HashCodeBuilder;
/**
* Represents a students response on a particular quiz
*
*
*/
public class AssessmentGradingData implements java.io.Serializable
// need to implement org.osid.assessment.ItemTaken in the future
// - daisyf 10/11/04
{
private static final long serialVersionUID = 7526471155622776147L;
private Long assessmentGradingId;
private String agentId;
// private PublishedAssessmentIfc publishedAssessment;
private Date submittedDate;
private Boolean isLate;
private Boolean forGrade;
private Double totalAutoScore;
private Double totalOverrideScore;
private Double finalScore; // final total score
private String comments;
private Integer status;
private String gradedBy;
private Date gradedDate;
private Set<ItemGradingData> itemGradingSet = new HashSet<ItemGradingData>();
private Date attemptDate;
private Integer timeElapsed;
private Boolean submitFromTimeoutPopup;
private int totalSubmitted;
private Long publishedAssessmentId;
private String publishedAssessmentTitle;
private Boolean isAutoSubmitted;
private Integer lastVisitedPart = 0;
private Integer lastVisitedQuestion = 0;
private Boolean hasAutoSubmissionRun = false;
private Set<AssessmentGradingAttachment> assessmentGradingAttachmentSet = new HashSet<AssessmentGradingAttachment>();
// Because of SAK-16456, we no longer need to show the auto/human graded
// status per submission, I don't think we
// need to distinguish status 2 and 3 anymore. But I just leave them here...
/**
* status = 1: submit but not grade yet
*/
public static final Integer SUBMITTED = Integer.valueOf(1);
/**
* status = 2: grader has went to total score page and graded + AUTO_GRADED
*/
public static final Integer AUTO_GRADED = Integer.valueOf(2);
/**
* status = 3: grader has went to total score page and graded + at least one
* question NEED_HUMAN_ATTENTION
*/
public static final Integer NEED_HUMAN_ATTENTION = Integer.valueOf(3);
/**
* status = 4: the assessment has be republished. This assessment has been
* submitted. Therefore, this it needs to be resubmit
*/
public static final Integer ASSESSMENT_UPDATED_NEED_RESUBMIT = Integer
.valueOf(4);
/**
* status = 5: there is no submission but grader update something in the
* score page
*/
public static final Integer NO_SUBMISSION = Integer.valueOf(5);
/**
* status = 6: the assessment has be republished. This assessment has begun
* but not yet been submitted (saved/in progress). Therefore, just warn the
* student about the update ("resubmit" is not applicable here).
*/
public static final Integer ASSESSMENT_UPDATED = Integer.valueOf(6);
/**
* status = 7: the student has other(s) submission(s) and autosubmit detects
* this submission will keep in progress forever
*/
public static final Integer AUTOSUBMIT_UPDATED = Integer.valueOf(7);
public AssessmentGradingData() {
}
// this constructor do not contains Set of ItemGradingData
public AssessmentGradingData(Long assessmentGradingId,
Long publishedAssessmentId, String publishedAssessmentTitle,
String agentId, Date submittedDate, Boolean isLate,
Boolean forGrade, Double totalAutoScore, Double totalOverrideScore,
Double finalScore, String comments, Integer status, String gradedBy,
Date gradedDate, Date attemptDate, Integer timeElapsed,
Boolean isAutoSubmitted) {
this.assessmentGradingId = assessmentGradingId;
this.publishedAssessmentId = publishedAssessmentId;
this.publishedAssessmentTitle = publishedAssessmentTitle;
this.agentId = agentId;
this.submittedDate = submittedDate;
this.isLate = isLate;
this.forGrade = forGrade;
this.totalAutoScore = totalAutoScore;
this.totalOverrideScore = totalOverrideScore;
this.finalScore = finalScore;
this.comments = comments;
this.status = status;
this.gradedBy = gradedBy;
this.gradedDate = gradedDate;
this.attemptDate = attemptDate;
this.timeElapsed = timeElapsed;
this.isAutoSubmitted = isAutoSubmitted;
}
public AssessmentGradingData(Long assessmentGradingId,
Long publishedAssessmentId, String publishedAssessmentTitle,
String agentId, Date submittedDate, Boolean isLate,
Boolean forGrade, Double totalAutoScore, Double totalOverrideScore,
Double finalScore, String comments, Integer status, String gradedBy,
Date gradedDate, Date attemptDate, Integer timeElapsed) {
this(assessmentGradingId, publishedAssessmentId,
publishedAssessmentTitle, agentId, submittedDate, isLate,
forGrade, totalAutoScore, totalOverrideScore, finalScore,
comments, status, gradedBy, gradedDate, attemptDate,
timeElapsed, Boolean.valueOf(false));
}
public AssessmentGradingData(Long assessmentGradingId,
Long publishedAssessmentId, String agentId, Date submittedDate,
Boolean isLate, Boolean forGrade, Double totalAutoScore,
Double totalOverrideScore, Double finalScore, String comments,
Integer status, String gradedBy, Date gradedDate, Date attemptDate,
Integer timeElapsed) {
this.assessmentGradingId = assessmentGradingId;
this.publishedAssessmentId = publishedAssessmentId;
this.agentId = agentId;
this.submittedDate = submittedDate;
this.isLate = isLate;
this.forGrade = forGrade;
this.totalAutoScore = totalAutoScore;
this.totalOverrideScore = totalOverrideScore;
this.finalScore = finalScore;
this.comments = comments;
this.status = status;
this.gradedBy = gradedBy;
this.gradedDate = gradedDate;
this.attemptDate = attemptDate;
this.timeElapsed = timeElapsed;
}
public AssessmentGradingData(Long publishedAssessmentId, int totalSubmitted) {
this.publishedAssessmentId = publishedAssessmentId;
this.totalSubmitted = totalSubmitted;
}
public Long getAssessmentGradingId() {
return assessmentGradingId;
}
public void setAssessmentGradingId(Long assessmentGradingId) {
this.assessmentGradingId = assessmentGradingId;
}
public Long getPublishedAssessmentId() {
return publishedAssessmentId;
}
public void setPublishedAssessmentId(Long publishedAssessmentId) {
this.publishedAssessmentId = publishedAssessmentId;
}
public String getAgentId() {
return agentId;
}
public void setAgentId(String agentId) {
this.agentId = agentId;
}
public Date getSubmittedDate() {
return submittedDate;
}
public void setSubmittedDate(Date submittedDate) {
this.submittedDate = submittedDate;
}
public Boolean getIsLate() {
return isLate;
}
public void setIsLate(Boolean isLate) {
this.isLate = isLate;
}
public Boolean getForGrade() {
return forGrade;
}
public void setForGrade(Boolean forGrade) {
this.forGrade = forGrade;
}
public Double getTotalAutoScore() {
return this.totalAutoScore;
}
public void setTotalAutoScore(Double totalAutoScore) {
if (totalAutoScore != null) {
if (totalAutoScore.doubleValue() < 0) {
this.totalAutoScore = new Double("0");
} else {
this.totalAutoScore = totalAutoScore;
}
} else {
this.totalAutoScore = null;
}
}
public Double getTotalOverrideScore() {
return this.totalOverrideScore;
}
public void setTotalOverrideScore(Double totalOverrideScore) {
this.totalOverrideScore = totalOverrideScore;
}
public Double getFinalScore() {
/*
* if (this.totalAutoScore != null && this.totalOverrideScore != null ){
* double total = 0; if (this.totalAutoScore != null) total +=
* this.totalAutoScore.doubleValue(); if (this.totalOverrideScore !=
* null) total += this.totalOverrideScore.doubleValue(); this.finalScore
* = new Double(total); }
*
* // remove rounding , SAK-2848 // Round to the nearest 1/10th. if
* (this.finalScore !=null ){ double alignment =
* this.finalScore.doubleValue(); int tmp = Math.round(alignment *
* 10.0d); alignment = (double)tmp / 10.0d; this.finalScore = new
* Double(alignment); }
*/
return this.finalScore;
}
public void setFinalScore(Double finalScore) {
this.finalScore = finalScore;
}
public String getComments() {
return comments;
}
public void setComments(String comments) {
this.comments = comments;
}
public String getGradedBy() {
return gradedBy;
}
public void setGradedBy(String gradedBy) {
this.gradedBy = gradedBy;
}
public Date getGradedDate() {
return gradedDate;
}
public void setGradedDate(Date gradedDate) {
this.gradedDate = gradedDate;
}
/**
* In some cases, students are allowed to submit multiple assessment for
* grading. However, the grader has the choice to select one to represent
* how well the student does overall. status = 1 means this submitted
* assessment is selected.
*/
// daisy's comment: I am not sure Integer(1) is being used at all. 11/18/05
public Integer getStatus() {
return status;
}
public void setStatus(Integer status) {
this.status = status;
}
public Set<ItemGradingData> getItemGradingSet() {
return itemGradingSet;
}
public void setItemGradingSet(Set<ItemGradingData> itemGradingSet) {
this.itemGradingSet = itemGradingSet;
}
public Date getAttemptDate() {
return attemptDate;
}
public void setAttemptDate(Date attemptDate) {
this.attemptDate = attemptDate;
}
public Integer getTimeElapsed() {
return timeElapsed;
}
public void setTimeElapsed(Integer timeElapsed) {
this.timeElapsed = timeElapsed;
}
public Boolean getSubmitFromTimeoutPopup() {
return submitFromTimeoutPopup;
}
public void setSubmitFromTimeoutPopup(Boolean submitFromTimeoutPopup) {
this.submitFromTimeoutPopup = submitFromTimeoutPopup;
}
public int getTotalSubmitted() {
return totalSubmitted;
}
public void setTotalSubmitted(int totalSubmitted) {
this.totalSubmitted = totalSubmitted;
}
public String getPublishedAssessmentTitle() {
return publishedAssessmentTitle;
}
public void setPublishedAssessmentTitle(String publishedAssessmentTitle) {
this.publishedAssessmentTitle = publishedAssessmentTitle;
}
// daisy added this for mining partial assessmentGradingData object
// 11/17/05
private Long publishedItemId;
private boolean isRecorded;
public Long getPublishedItemId() {
return publishedItemId;
}
public void setItemGradingId(Long publishedItemId) {
this.publishedItemId = publishedItemId;
}
public AssessmentGradingData(Long assessmentGradingId,
Long publishedItemId, String agentId, Double finalScore,
Date submittedDate) {
this.assessmentGradingId = assessmentGradingId;
this.publishedItemId = publishedItemId;
this.agentId = agentId;
this.finalScore = finalScore;
this.submittedDate = submittedDate;
}
public Boolean getIsAutoSubmitted() {
return isAutoSubmitted;
}
public void setIsAutoSubmitted(Boolean isAutoSubmitted) {
this.isAutoSubmitted = isAutoSubmitted;
}
public Integer getLastVisitedPart() {
return lastVisitedPart;
}
public void setLastVisitedPart(Integer lastVisitedPart) {
this.lastVisitedPart = lastVisitedPart;
}
public Integer getLastVisitedQuestion() {
return lastVisitedQuestion;
}
public void setLastVisitedQuestion(Integer lastVisitedQuestion) {
this.lastVisitedQuestion = lastVisitedQuestion;
}
public boolean getIsRecorded() {
return isRecorded;
}
public void setIsRecorded(boolean isRecorded) {
this.isRecorded = isRecorded;
}
public Boolean getHasAutoSubmissionRun() {
return hasAutoSubmissionRun;
}
public void setHasAutoSubmissionRun(Boolean hasAutoSubmissionRun) {
this.hasAutoSubmissionRun = hasAutoSubmissionRun;
}
public Set<AssessmentGradingAttachment> getAssessmentGradingAttachmentSet() {
return assessmentGradingAttachmentSet;
}
public void setAssessmentGradingAttachmentSet(
Set<AssessmentGradingAttachment> assessmentGradingAttachmentSet) {
this.assessmentGradingAttachmentSet = assessmentGradingAttachmentSet;
}
public List<AssessmentGradingAttachment> getAssessmentGradingAttachmentList() {
List<AssessmentGradingAttachment> list = new ArrayList<AssessmentGradingAttachment>();
if (assessmentGradingAttachmentSet != null) {
Iterator<AssessmentGradingAttachment> iter = assessmentGradingAttachmentSet
.iterator();
while (iter.hasNext()) {
AssessmentGradingAttachment a = (AssessmentGradingAttachment) iter.next();
list.add(a);
}
}
return list;
}
public void setAssessmentGradingAttachmentList(
List<AssessmentGradingAttachment> assessmentGradingAttachmentList) {
Set<AssessmentGradingAttachment> assessmentGradingAttachmentSet = null;
if (assessmentGradingAttachmentList != null) {
assessmentGradingAttachmentSet = new HashSet<AssessmentGradingAttachment>(assessmentGradingAttachmentList);
} else {
assessmentGradingAttachmentSet = new HashSet<AssessmentGradingAttachment>();
}
this.assessmentGradingAttachmentSet = assessmentGradingAttachmentSet;
}
@Override
public int hashCode() {
HashCodeBuilder builder = new HashCodeBuilder(1,31);
builder.append(agentId);
builder.append(assessmentGradingId);
builder.append(attemptDate);
builder.append(comments);
builder.append(finalScore);
builder.append(forGrade);
builder.append(gradedBy);
builder.append(gradedDate);
builder.append(gradedDate);
builder.append(isAutoSubmitted);
builder.append(isLate);
builder.append(isRecorded);
builder.append(itemGradingSet);
builder.append(lastVisitedPart);
builder.append(lastVisitedQuestion);
builder.append(publishedAssessmentId);
builder.append(publishedAssessmentTitle);
builder.append(publishedItemId);
builder.append(submittedDate);
builder.append(timeElapsed);
builder.append(totalAutoScore);
builder.append(totalOverrideScore);
builder.append(totalSubmitted);
return builder.toHashCode();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
AssessmentGradingData other = (AssessmentGradingData) obj;
EqualsBuilder builder = new EqualsBuilder();
builder.appendSuper(super.equals(obj));
builder.append(agentId,other.agentId);
builder.append(assessmentGradingId,other.assessmentGradingId);
builder.append(attemptDate,other.attemptDate);
builder.append(comments,other.comments);
builder.append(finalScore,other.finalScore);
builder.append(forGrade,other.forGrade);
builder.append(gradedBy,other.gradedBy);
builder.append(gradedDate,other.gradedDate);
builder.append(isAutoSubmitted,other.isAutoSubmitted);
builder.append(isLate,other.isLate);
builder.append(isRecorded,other.isRecorded);
builder.append(itemGradingSet,other.itemGradingSet);
builder.append(lastVisitedPart,other.lastVisitedPart);
builder.append(lastVisitedQuestion,other.lastVisitedQuestion);
builder.append(publishedAssessmentId,other.publishedAssessmentId);
builder.append(publishedAssessmentTitle,other.publishedAssessmentTitle);
builder.append(publishedItemId,other.publishedItemId);
builder.append(status,other.status);
builder.append(submittedDate,other.submittedDate);
builder.append(timeElapsed,other.timeElapsed);
builder.append(totalAutoScore,other.totalAutoScore);
builder.append(totalOverrideScore,other.totalOverrideScore);
builder.append(totalSubmitted,other.totalSubmitted);
return builder.isEquals();
}
}
| samigo/samigo-api/src/java/org/sakaiproject/tool/assessment/data/dao/grading/AssessmentGradingData.java | /**********************************************************************************
* $URL$
* $Id$
***********************************************************************************
*
* Copyright (c) 2004, 2005, 2006, 2008, 2009 The Sakai Foundation
*
* Licensed under the Educational Community License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.opensource.org/licenses/ECL-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
**********************************************************************************/
package org.sakaiproject.tool.assessment.data.dao.grading;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
import org.apache.commons.lang3.builder.EqualsBuilder;
import org.apache.commons.lang3.builder.HashCodeBuilder;
/**
* Represents a students response on a particular quiz
*
*
*/
public class AssessmentGradingData implements java.io.Serializable
// need to implement org.osid.assessment.ItemTaken in the future
// - daisyf 10/11/04
{
private static final long serialVersionUID = 7526471155622776147L;
private Long assessmentGradingId;
private String agentId;
// private PublishedAssessmentIfc publishedAssessment;
private Date submittedDate;
private Boolean isLate;
private Boolean forGrade;
private Double totalAutoScore;
private Double totalOverrideScore;
private Double finalScore; // final total score
private String comments;
private Integer status;
private String gradedBy;
private Date gradedDate;
private Set<ItemGradingData> itemGradingSet = new HashSet<ItemGradingData>();
private Date attemptDate;
private Integer timeElapsed;
private Boolean submitFromTimeoutPopup;
private int totalSubmitted;
private Long publishedAssessmentId;
private String publishedAssessmentTitle;
private Boolean isAutoSubmitted;
private Integer lastVisitedPart = 0;
private Integer lastVisitedQuestion = 0;
private Boolean hasAutoSubmissionRun = false;
private Set<AssessmentGradingAttachment> assessmentGradingAttachmentSet = new HashSet<AssessmentGradingAttachment>();
// Because of SAK-16456, we no longer need to show the auto/human graded
// status per submission, I don't think we
// need to distinguish status 2 and 3 anymore. But I just leave them here...
/**
* status = 1: submit but not grade yet
*/
public static final Integer SUBMITTED = Integer.valueOf(1);
/**
* status = 2: grader has went to total score page and graded + AUTO_GRADED
*/
public static final Integer AUTO_GRADED = Integer.valueOf(2);
/**
* status = 3: grader has went to total score page and graded + at least one
* question NEED_HUMAN_ATTENTION
*/
public static final Integer NEED_HUMAN_ATTENTION = Integer.valueOf(3);
/**
* status = 4: the assessment has be republished. This assessment has been
* submitted. Therefore, this it needs to be resubmit
*/
public static final Integer ASSESSMENT_UPDATED_NEED_RESUBMIT = Integer
.valueOf(4);
/**
* status = 5: there is no submission but grader update something in the
* score page
*/
public static final Integer NO_SUBMISSION = Integer.valueOf(5);
/**
* status = 6: the assessment has be republished. This assessment has begun
* but not yet been submitted (saved/in progress). Therefore, just warn the
* student about the update ("resubmit" is not applicable here).
*/
public static final Integer ASSESSMENT_UPDATED = Integer.valueOf(6);
/**
* status = 7: the student has other(s) submission(s) and autosubmit detects
* this submission will keep in progress forever
*/
public static final Integer AUTOSUBMIT_UPDATED = Integer.valueOf(7);
public AssessmentGradingData() {
}
// this constructor do not contains Set of ItemGradingData
public AssessmentGradingData(Long assessmentGradingId,
Long publishedAssessmentId, String publishedAssessmentTitle,
String agentId, Date submittedDate, Boolean isLate,
Boolean forGrade, Double totalAutoScore, Double totalOverrideScore,
Double finalScore, String comments, Integer status, String gradedBy,
Date gradedDate, Date attemptDate, Integer timeElapsed,
Boolean isAutoSubmitted) {
this.assessmentGradingId = assessmentGradingId;
this.publishedAssessmentId = publishedAssessmentId;
this.publishedAssessmentTitle = publishedAssessmentTitle;
this.agentId = agentId;
this.submittedDate = submittedDate;
this.isLate = isLate;
this.forGrade = forGrade;
this.totalAutoScore = totalAutoScore;
this.totalOverrideScore = totalOverrideScore;
this.finalScore = finalScore;
this.comments = comments;
this.status = status;
this.gradedBy = gradedBy;
this.gradedDate = gradedDate;
this.attemptDate = attemptDate;
this.timeElapsed = timeElapsed;
this.isAutoSubmitted = isAutoSubmitted;
}
public AssessmentGradingData(Long assessmentGradingId,
Long publishedAssessmentId, String publishedAssessmentTitle,
String agentId, Date submittedDate, Boolean isLate,
Boolean forGrade, Double totalAutoScore, Double totalOverrideScore,
Double finalScore, String comments, Integer status, String gradedBy,
Date gradedDate, Date attemptDate, Integer timeElapsed) {
this(assessmentGradingId, publishedAssessmentId,
publishedAssessmentTitle, agentId, submittedDate, isLate,
forGrade, totalAutoScore, totalOverrideScore, finalScore,
comments, status, gradedBy, gradedDate, attemptDate,
timeElapsed, Boolean.valueOf(false));
}
public AssessmentGradingData(Long assessmentGradingId,
Long publishedAssessmentId, String agentId, Date submittedDate,
Boolean isLate, Boolean forGrade, Double totalAutoScore,
Double totalOverrideScore, Double finalScore, String comments,
Integer status, String gradedBy, Date gradedDate, Date attemptDate,
Integer timeElapsed) {
this.assessmentGradingId = assessmentGradingId;
this.publishedAssessmentId = publishedAssessmentId;
this.agentId = agentId;
this.submittedDate = submittedDate;
this.isLate = isLate;
this.forGrade = forGrade;
this.totalAutoScore = totalAutoScore;
this.totalOverrideScore = totalOverrideScore;
this.finalScore = finalScore;
this.comments = comments;
this.status = status;
this.gradedBy = gradedBy;
this.gradedDate = gradedDate;
this.attemptDate = attemptDate;
this.timeElapsed = timeElapsed;
}
public AssessmentGradingData(Long publishedAssessmentId, int totalSubmitted) {
this.publishedAssessmentId = publishedAssessmentId;
this.totalSubmitted = totalSubmitted;
}
public Long getAssessmentGradingId() {
return assessmentGradingId;
}
public void setAssessmentGradingId(Long assessmentGradingId) {
this.assessmentGradingId = assessmentGradingId;
}
public Long getPublishedAssessmentId() {
return publishedAssessmentId;
}
public void setPublishedAssessmentId(Long publishedAssessmentId) {
this.publishedAssessmentId = publishedAssessmentId;
}
public String getAgentId() {
return agentId;
}
public void setAgentId(String agentId) {
this.agentId = agentId;
}
public Date getSubmittedDate() {
return submittedDate;
}
public void setSubmittedDate(Date submittedDate) {
this.submittedDate = submittedDate;
}
public Boolean getIsLate() {
return isLate;
}
public void setIsLate(Boolean isLate) {
this.isLate = isLate;
}
public Boolean getForGrade() {
return forGrade;
}
public void setForGrade(Boolean forGrade) {
this.forGrade = forGrade;
}
public Double getTotalAutoScore() {
return this.totalAutoScore;
}
public void setTotalAutoScore(Double totalAutoScore) {
if (totalAutoScore != null) {
if (totalAutoScore.doubleValue() < 0) {
this.totalAutoScore = new Double("0");
} else {
this.totalAutoScore = totalAutoScore;
}
} else {
this.totalAutoScore = null;
}
}
public Double getTotalOverrideScore() {
return this.totalOverrideScore;
}
public void setTotalOverrideScore(Double totalOverrideScore) {
this.totalOverrideScore = totalOverrideScore;
}
public Double getFinalScore() {
/*
* if (this.totalAutoScore != null && this.totalOverrideScore != null ){
* double total = 0; if (this.totalAutoScore != null) total +=
* this.totalAutoScore.doubleValue(); if (this.totalOverrideScore !=
* null) total += this.totalOverrideScore.doubleValue(); this.finalScore
* = new Double(total); }
*
* // remove rounding , SAK-2848 // Round to the nearest 1/10th. if
* (this.finalScore !=null ){ double alignment =
* this.finalScore.doubleValue(); int tmp = Math.round(alignment *
* 10.0d); alignment = (double)tmp / 10.0d; this.finalScore = new
* Double(alignment); }
*/
return this.finalScore;
}
public void setFinalScore(Double finalScore) {
this.finalScore = finalScore;
}
public String getComments() {
return comments;
}
public void setComments(String comments) {
this.comments = comments;
}
public String getGradedBy() {
return gradedBy;
}
public void setGradedBy(String gradedBy) {
this.gradedBy = gradedBy;
}
public Date getGradedDate() {
return gradedDate;
}
public void setGradedDate(Date gradedDate) {
this.gradedDate = gradedDate;
}
/**
* In some cases, students are allowed to submit multiple assessment for
* grading. However, the grader has the choice to select one to represent
* how well the student does overall. status = 1 means this submitted
* assessment is selected.
*/
// daisy's comment: I am not sure Integer(1) is being used at all. 11/18/05
public Integer getStatus() {
return status;
}
public void setStatus(Integer status) {
this.status = status;
}
public Set<ItemGradingData> getItemGradingSet() {
return itemGradingSet;
}
public void setItemGradingSet(Set<ItemGradingData> itemGradingSet) {
this.itemGradingSet = itemGradingSet;
}
public Date getAttemptDate() {
return attemptDate;
}
public void setAttemptDate(Date attemptDate) {
this.attemptDate = attemptDate;
}
public Integer getTimeElapsed() {
return timeElapsed;
}
public void setTimeElapsed(Integer timeElapsed) {
this.timeElapsed = timeElapsed;
}
public Boolean getSubmitFromTimeoutPopup() {
return submitFromTimeoutPopup;
}
public void setSubmitFromTimeoutPopup(Boolean submitFromTimeoutPopup) {
this.submitFromTimeoutPopup = submitFromTimeoutPopup;
}
public int getTotalSubmitted() {
return totalSubmitted;
}
public void setTotalSubmitted(int totalSubmitted) {
this.totalSubmitted = totalSubmitted;
}
public String getPublishedAssessmentTitle() {
return publishedAssessmentTitle;
}
public void setPublishedAssessmentTitle(String publishedAssessmentTitle) {
this.publishedAssessmentTitle = publishedAssessmentTitle;
}
// daisy added this for mining partial assessmentGradingData object
// 11/17/05
private Long publishedItemId;
private boolean isRecorded;
public Long getPublishedItemId() {
return publishedItemId;
}
public void setItemGradingId(Long publishedItemId) {
this.publishedItemId = publishedItemId;
}
public AssessmentGradingData(Long assessmentGradingId,
Long publishedItemId, String agentId, Double finalScore,
Date submittedDate) {
this.assessmentGradingId = assessmentGradingId;
this.publishedItemId = publishedItemId;
this.agentId = agentId;
this.finalScore = finalScore;
this.submittedDate = submittedDate;
}
public Boolean getIsAutoSubmitted() {
return isAutoSubmitted;
}
public void setIsAutoSubmitted(Boolean isAutoSubmitted) {
this.isAutoSubmitted = isAutoSubmitted;
}
public Integer getLastVisitedPart() {
return lastVisitedPart;
}
public void setLastVisitedPart(Integer lastVisitedPart) {
this.lastVisitedPart = lastVisitedPart;
}
public Integer getLastVisitedQuestion() {
return lastVisitedQuestion;
}
public void setLastVisitedQuestion(Integer lastVisitedQuestion) {
this.lastVisitedQuestion = lastVisitedQuestion;
}
public boolean getIsRecorded() {
return isRecorded;
}
public void setIsRecorded(boolean isRecorded) {
this.isRecorded = isRecorded;
}
public Boolean getHasAutoSubmissionRun() {
return hasAutoSubmissionRun;
}
public void setHasAutoSubmissionRun(Boolean hasAutoSubmissionRun) {
this.hasAutoSubmissionRun = hasAutoSubmissionRun;
}
public Set<AssessmentGradingAttachment> getAssessmentGradingAttachmentSet() {
return assessmentGradingAttachmentSet;
}
public void setAssessmentGradingAttachmentSet(
Set<AssessmentGradingAttachment> assessmentGradingAttachmentSet) {
this.assessmentGradingAttachmentSet = assessmentGradingAttachmentSet;
}
public List<AssessmentGradingAttachment> getAssessmentGradingAttachmentList() {
List<AssessmentGradingAttachment> list = new ArrayList<AssessmentGradingAttachment>();
if (assessmentGradingAttachmentSet != null) {
Iterator<AssessmentGradingAttachment> iter = assessmentGradingAttachmentSet
.iterator();
while (iter.hasNext()) {
AssessmentGradingAttachment a = (AssessmentGradingAttachment) iter.next();
list.add(a);
}
}
return list;
}
public void setAssessmentGradingAttachmentList(
List<AssessmentGradingAttachment> assessmentGradingAttachmentList) {
Set<AssessmentGradingAttachment> assessmentGradingAttachmentSet = new HashSet<AssessmentGradingAttachment>(assessmentGradingAttachmentList);
this.assessmentGradingAttachmentSet = assessmentGradingAttachmentSet;
}
@Override
public int hashCode() {
HashCodeBuilder builder = new HashCodeBuilder(1,31);
builder.append(agentId);
builder.append(assessmentGradingId);
builder.append(attemptDate);
builder.append(comments);
builder.append(finalScore);
builder.append(forGrade);
builder.append(gradedBy);
builder.append(gradedDate);
builder.append(gradedDate);
builder.append(isAutoSubmitted);
builder.append(isLate);
builder.append(isRecorded);
builder.append(itemGradingSet);
builder.append(lastVisitedPart);
builder.append(lastVisitedQuestion);
builder.append(publishedAssessmentId);
builder.append(publishedAssessmentTitle);
builder.append(publishedItemId);
builder.append(submittedDate);
builder.append(timeElapsed);
builder.append(totalAutoScore);
builder.append(totalOverrideScore);
builder.append(totalSubmitted);
return builder.toHashCode();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
AssessmentGradingData other = (AssessmentGradingData) obj;
EqualsBuilder builder = new EqualsBuilder();
builder.appendSuper(super.equals(obj));
builder.append(agentId,other.agentId);
builder.append(assessmentGradingId,other.assessmentGradingId);
builder.append(attemptDate,other.attemptDate);
builder.append(comments,other.comments);
builder.append(finalScore,other.finalScore);
builder.append(forGrade,other.forGrade);
builder.append(gradedBy,other.gradedBy);
builder.append(gradedDate,other.gradedDate);
builder.append(isAutoSubmitted,other.isAutoSubmitted);
builder.append(isLate,other.isLate);
builder.append(isRecorded,other.isRecorded);
builder.append(itemGradingSet,other.itemGradingSet);
builder.append(lastVisitedPart,other.lastVisitedPart);
builder.append(lastVisitedQuestion,other.lastVisitedQuestion);
builder.append(publishedAssessmentId,other.publishedAssessmentId);
builder.append(publishedAssessmentTitle,other.publishedAssessmentTitle);
builder.append(publishedItemId,other.publishedItemId);
builder.append(status,other.status);
builder.append(submittedDate,other.submittedDate);
builder.append(timeElapsed,other.timeElapsed);
builder.append(totalAutoScore,other.totalAutoScore);
builder.append(totalOverrideScore,other.totalOverrideScore);
builder.append(totalSubmitted,other.totalSubmitted);
return builder.isEquals();
}
}
| SAM-2717 - Handle copy of null grading attachments
There is a setter for the grading attachment list on
AssessmentGradingData, which wraps an underlying HashSet. When copying,
for example, from AgentResults, where there are no attachments, the
HashSet constructor on the null value throws an
InvocationTargetException, which obscured the problem. This change
creates a new, empty set if null is passed to this setter.
| samigo/samigo-api/src/java/org/sakaiproject/tool/assessment/data/dao/grading/AssessmentGradingData.java | SAM-2717 - Handle copy of null grading attachments | <ide><path>amigo/samigo-api/src/java/org/sakaiproject/tool/assessment/data/dao/grading/AssessmentGradingData.java
<ide>
<ide> public void setAssessmentGradingAttachmentList(
<ide> List<AssessmentGradingAttachment> assessmentGradingAttachmentList) {
<del> Set<AssessmentGradingAttachment> assessmentGradingAttachmentSet = new HashSet<AssessmentGradingAttachment>(assessmentGradingAttachmentList);
<add> Set<AssessmentGradingAttachment> assessmentGradingAttachmentSet = null;
<add>
<add> if (assessmentGradingAttachmentList != null) {
<add> assessmentGradingAttachmentSet = new HashSet<AssessmentGradingAttachment>(assessmentGradingAttachmentList);
<add> } else {
<add> assessmentGradingAttachmentSet = new HashSet<AssessmentGradingAttachment>();
<add> }
<add>
<ide> this.assessmentGradingAttachmentSet = assessmentGradingAttachmentSet;
<ide> }
<ide> |
|
Java | agpl-3.0 | 07ea1e5adf300573b22ca87e1f18013b3d52c846 | 0 | duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test | 03a7964e-2e60-11e5-9284-b827eb9e62be | hello.java | 03a22d44-2e60-11e5-9284-b827eb9e62be | 03a7964e-2e60-11e5-9284-b827eb9e62be | hello.java | 03a7964e-2e60-11e5-9284-b827eb9e62be | <ide><path>ello.java
<del>03a22d44-2e60-11e5-9284-b827eb9e62be
<add>03a7964e-2e60-11e5-9284-b827eb9e62be |
|
Java | mit | error: pathspec 'src/test/java/nl/github/martijn9612/fishy/position/DrawPositionTest.java' did not match any file(s) known to git
| 0ec4416a58d1dcf57dced62b31041e0fbe2615ce | 1 | martijn9612/fishy | package nl.github.martijn9612.fishy.position;
import junit.framework.TestCase;
import static org.mockito.Mockito.mock;
import org.junit.Test;
import org.newdawn.slick.Graphics;
import org.newdawn.slick.Image;
public class DrawPositionTest extends TestCase {
private int posX;
private int posY;
private DrawPosition test;
private Graphics g = mock(Graphics.class);
private Image im = mock(Image.class);
@Override
protected void setUp() {
posX = 1;
posY = 1;
test = new DrawPosition(posX, posY);
}
@Test
public void testDrawImage() {
test.drawImage(g, im);
}
@Test
public void testGetMousePosition() {
MousePosition mousetest = new MousePosition(posX, posY);
mousetest.flipYAxis();
assertEquals(test.getMousePosition().toString(), mousetest.toString());
}
@Test
public void testToString() {
String retstring = "Draw position X: 1, Y: 1";
assertEquals(retstring, test.toString());
}
}
| src/test/java/nl/github/martijn9612/fishy/position/DrawPositionTest.java | add DrawPositionTest, 100% coverage <testing
| src/test/java/nl/github/martijn9612/fishy/position/DrawPositionTest.java | add DrawPositionTest, 100% coverage <testing | <ide><path>rc/test/java/nl/github/martijn9612/fishy/position/DrawPositionTest.java
<add>package nl.github.martijn9612.fishy.position;
<add>
<add>import junit.framework.TestCase;
<add>import static org.mockito.Mockito.mock;
<add>
<add>import org.junit.Test;
<add>import org.newdawn.slick.Graphics;
<add>import org.newdawn.slick.Image;
<add>
<add>public class DrawPositionTest extends TestCase {
<add>
<add> private int posX;
<add> private int posY;
<add> private DrawPosition test;
<add> private Graphics g = mock(Graphics.class);
<add> private Image im = mock(Image.class);
<add>
<add> @Override
<add> protected void setUp() {
<add> posX = 1;
<add> posY = 1;
<add> test = new DrawPosition(posX, posY);
<add> }
<add>
<add> @Test
<add> public void testDrawImage() {
<add> test.drawImage(g, im);
<add> }
<add>
<add> @Test
<add> public void testGetMousePosition() {
<add> MousePosition mousetest = new MousePosition(posX, posY);
<add> mousetest.flipYAxis();
<add> assertEquals(test.getMousePosition().toString(), mousetest.toString());
<add> }
<add>
<add> @Test
<add> public void testToString() {
<add> String retstring = "Draw position X: 1, Y: 1";
<add> assertEquals(retstring, test.toString());
<add> }
<add>} |
|
Java | apache-2.0 | b6b65e389bedae8a2a0a316bc7f3e1da2aef98a8 | 0 | aertoria/opennlp,jsubercaze/opennlp-tools-steroids,jsubercaze/opennlp-tools-steroids,aertoria/opennlp,jsubercaze/opennlp-tools-steroids,Eagles2F/opennlp,Eagles2F/opennlp,aertoria/opennlp,Eagles2F/opennlp | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package opennlp.maxent;
import java.io.IOException;
import java.io.Reader;
import opennlp.model.Event;
import opennlp.model.EventStream;
import opennlp.model.MaxentModel;
/**
* Trains or evaluates maxent components which have implemented the Evalable
* interface.
*/
public class TrainEval {
public static void eval(MaxentModel model, Reader r, Evalable e) {
eval(model, r, e, false);
}
public static void eval(MaxentModel model, Reader r,
Evalable e, boolean verbose) {
float totPos=0, truePos=0, falsePos=0;
Event[] events = (e.getEventCollector(r)).getEvents(true);
//MaxentModel model = e.getModel(dir, name);
String negOutcome = e.getNegativeOutcome();
for (Event event : events) {
String guess = model.getBestOutcome(model.eval(event.getContext()));
String ans = event.getOutcome();
if (verbose)
System.out.println(ans + " " + guess);
if (!ans.equals(negOutcome))
totPos++;
if (!guess.equals(negOutcome) && !guess.equals(ans))
falsePos++;
else if (ans.equals(guess))
truePos++;
}
System.out.println("Precision: " + truePos/(truePos+falsePos));
System.out.println("Recall: " + truePos/totPos);
}
public static MaxentModel train(EventStream events, int cutoff) throws IOException {
return GIS.trainModel(events, 100, cutoff);
}
public static void run(String[] args, Evalable e) throws IOException {
// TOM: Was commented out to remove dependency on gnu getopt.
// String dir = "./";
// String stem = "maxent";
// int cutoff = 0; // default to no cutoff
// boolean train = false;
// boolean verbose = false;
// boolean local = false;
// gnu.getopt.Getopt g =
// new gnu.getopt.Getopt("maxent", args, "d:s:c:tvl");
// int c;
// while ((c = g.getopt()) != -1) {
// switch(c) {
// case 'd':
// dir = g.getOptarg()+"/";
// break;
// case 's':
// stem = g.getOptarg();
// break;
// case 'c':
// cutoff = Integer.parseInt(g.getOptarg());
// break;
// case 't':
// train = true;
// break;
// case 'l':
// local = true;
// break;
// case 'v':
// verbose = true;
// break;
// }
// }
//
// int lastIndex = g.getOptind();
// if (lastIndex >= args.length) {
// System.out.println("This is a usage message from opennlp.maxent.TrainEval. You have called the training procedure for a maxent application with the incorrect arguments. These are the options:");
//
// System.out.println("\nOptions for defining the model location and name:");
// System.out.println(" -d <directoryName>");
// System.out.println("\tThe directory in which to store the model.");
// System.out.println(" -s <modelName>");
// System.out.println("\tThe name of the model, e.g. EnglishPOS.bin.gz or NameFinder.txt.");
//
// System.out.println("\nOptions for training:");
// System.out.println(" -c <cutoff>");
// System.out.println("\tAn integer cutoff level to reduce infrequent contextual predicates.");
// System.out.println(" -t\tTrain a model. If absent, the given model will be loaded and evaluated.");
// System.out.println("\nOptions for evaluation:");
// System.out.println(" -l\t the evaluation method of class that uses the model. If absent, TrainEval's eval method is used.");
// System.out.println(" -v\t verbose.");
// System.out.println("\nThe final argument is the data file to be loaded and used for either training or evaluation.");
// System.out.println("\nAs an example for training:\n java opennlp.grok.preprocess.postag.POSTaggerME -t -d ./ -s EnglishPOS.bin.gz -c 7 postag.data");
// System.exit(0);
// }
//
// FileReader datafr = new FileReader(args[lastIndex]);
//
// if (train) {
// MaxentModel m =
// train(new EventCollectorAsStream(e.getEventCollector(datafr)),
// cutoff);
// new SuffixSensitiveGISModelWriter((AbstractModel)m,
// new File(dir+stem)).persist();
// }
// else {
// MaxentModel model =
// new SuffixSensitiveGISModelReader(new File(dir+stem)).getModel();
// if (local) {
// e.localEval(model, datafr, e, verbose);
// } else {
// eval(model, datafr, e, verbose);
// }
// }
}
} | opennlp-maxent/src/main/java/opennlp/maxent/TrainEval.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package opennlp.maxent;
import java.io.IOException;
import java.io.Reader;
import opennlp.model.Event;
import opennlp.model.EventStream;
import opennlp.model.MaxentModel;
/**
* Trains or evaluates maxent components which have implemented the Evalable
* interface.
*/
public class TrainEval {
public static void eval(MaxentModel model, Reader r, Evalable e) {
eval(model, r, e, false);
}
public static void eval(MaxentModel model, Reader r,
Evalable e, boolean verbose) {
float totPos=0, truePos=0, falsePos=0;
Event[] events = (e.getEventCollector(r)).getEvents(true);
//MaxentModel model = e.getModel(dir, name);
String negOutcome = e.getNegativeOutcome();
for(int i=0; i<events.length; i++) {
String guess =
model.getBestOutcome(model.eval(events[i].getContext()));
String ans = events[i].getOutcome();
if(verbose)
System.out.println(ans + " " + guess);
if(!ans.equals(negOutcome)) totPos++;
if(!guess.equals(negOutcome) && !guess.equals(ans))
falsePos++;
else if(ans.equals(guess))
truePos++;
}
System.out.println("Precision: " + truePos/(truePos+falsePos));
System.out.println("Recall: " + truePos/totPos);
}
public static MaxentModel train(EventStream events, int cutoff) throws IOException {
return GIS.trainModel(events, 100, cutoff);
}
public static void run(String[] args, Evalable e) throws IOException {
// TOM: Was commented out to remove dependency on gnu getopt.
// String dir = "./";
// String stem = "maxent";
// int cutoff = 0; // default to no cutoff
// boolean train = false;
// boolean verbose = false;
// boolean local = false;
// gnu.getopt.Getopt g =
// new gnu.getopt.Getopt("maxent", args, "d:s:c:tvl");
// int c;
// while ((c = g.getopt()) != -1) {
// switch(c) {
// case 'd':
// dir = g.getOptarg()+"/";
// break;
// case 's':
// stem = g.getOptarg();
// break;
// case 'c':
// cutoff = Integer.parseInt(g.getOptarg());
// break;
// case 't':
// train = true;
// break;
// case 'l':
// local = true;
// break;
// case 'v':
// verbose = true;
// break;
// }
// }
//
// int lastIndex = g.getOptind();
// if (lastIndex >= args.length) {
// System.out.println("This is a usage message from opennlp.maxent.TrainEval. You have called the training procedure for a maxent application with the incorrect arguments. These are the options:");
//
// System.out.println("\nOptions for defining the model location and name:");
// System.out.println(" -d <directoryName>");
// System.out.println("\tThe directory in which to store the model.");
// System.out.println(" -s <modelName>");
// System.out.println("\tThe name of the model, e.g. EnglishPOS.bin.gz or NameFinder.txt.");
//
// System.out.println("\nOptions for training:");
// System.out.println(" -c <cutoff>");
// System.out.println("\tAn integer cutoff level to reduce infrequent contextual predicates.");
// System.out.println(" -t\tTrain a model. If absent, the given model will be loaded and evaluated.");
// System.out.println("\nOptions for evaluation:");
// System.out.println(" -l\t the evaluation method of class that uses the model. If absent, TrainEval's eval method is used.");
// System.out.println(" -v\t verbose.");
// System.out.println("\nThe final argument is the data file to be loaded and used for either training or evaluation.");
// System.out.println("\nAs an example for training:\n java opennlp.grok.preprocess.postag.POSTaggerME -t -d ./ -s EnglishPOS.bin.gz -c 7 postag.data");
// System.exit(0);
// }
//
// FileReader datafr = new FileReader(args[lastIndex]);
//
// if (train) {
// MaxentModel m =
// train(new EventCollectorAsStream(e.getEventCollector(datafr)),
// cutoff);
// new SuffixSensitiveGISModelWriter((AbstractModel)m,
// new File(dir+stem)).persist();
// }
// else {
// MaxentModel model =
// new SuffixSensitiveGISModelReader(new File(dir+stem)).getModel();
// if (local) {
// e.localEval(model, datafr, e, verbose);
// } else {
// eval(model, datafr, e, verbose);
// }
// }
}
} | OPENNLP-369 Now using for each loop. Thanks to Aliaksandr Autayeu for providing patch.
git-svn-id: 924c1ce098d5c0cf43d98e06e1f2b659f3b417ce@1200826 13f79535-47bb-0310-9956-ffa450edef68
| opennlp-maxent/src/main/java/opennlp/maxent/TrainEval.java | OPENNLP-369 Now using for each loop. Thanks to Aliaksandr Autayeu for providing patch. | <ide><path>pennlp-maxent/src/main/java/opennlp/maxent/TrainEval.java
<ide> Event[] events = (e.getEventCollector(r)).getEvents(true);
<ide> //MaxentModel model = e.getModel(dir, name);
<ide> String negOutcome = e.getNegativeOutcome();
<del> for(int i=0; i<events.length; i++) {
<del> String guess =
<del> model.getBestOutcome(model.eval(events[i].getContext()));
<del> String ans = events[i].getOutcome();
<del> if(verbose)
<del> System.out.println(ans + " " + guess);
<del> if(!ans.equals(negOutcome)) totPos++;
<del> if(!guess.equals(negOutcome) && !guess.equals(ans))
<del> falsePos++;
<del> else if(ans.equals(guess))
<del> truePos++;
<del> }
<add> for (Event event : events) {
<add> String guess = model.getBestOutcome(model.eval(event.getContext()));
<add> String ans = event.getOutcome();
<add> if (verbose)
<add> System.out.println(ans + " " + guess);
<add>
<add> if (!ans.equals(negOutcome))
<add> totPos++;
<add>
<add> if (!guess.equals(negOutcome) && !guess.equals(ans))
<add> falsePos++;
<add> else if (ans.equals(guess))
<add> truePos++;
<add> }
<ide>
<ide> System.out.println("Precision: " + truePos/(truePos+falsePos));
<ide> System.out.println("Recall: " + truePos/totPos); |
|
Java | mit | 66cf0b65962f40ca0e053b9a0a603eaf156c8567 | 0 | DTL-FAIRData/FAIRifier,DTL-FAIRData/FAIRifier,DTL-FAIRData/FAIRifier,DTL-FAIRData/FAIRifier,DTL-FAIRData/FAIRifier,DTL-FAIRData/FAIRifier | package com.google.refine.importers;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.Reader;
import java.io.UnsupportedEncodingException;
import java.nio.CharBuffer;
import com.google.refine.importing.FormatGuesser;
public class TextFormatGuesser implements FormatGuesser {
@Override
public String guess(File file, String encoding, String seedFormat) {
try {
InputStream is = new FileInputStream(file);
try {
Reader reader = encoding != null ? new InputStreamReader(is, encoding) : new InputStreamReader(is);
int totalBytes = 0;
int bytes;
int openBraces = 0;
int closeBraces = 0;
int openAngleBrackets = 0;
int closeAngleBrackets = 0;
CharBuffer charBuffer = CharBuffer.allocate(4096);
while (totalBytes < 64 * 1024 && (bytes = reader.read(charBuffer)) > 0) {
String chunk = charBuffer.toString();
openBraces += countSubstrings(chunk, "{");
closeBraces += countSubstrings(chunk, "}");
openAngleBrackets += countSubstrings(chunk, "<");
closeAngleBrackets += countSubstrings(chunk, ">");
charBuffer.clear();
totalBytes += bytes;
}
if (openBraces >= 5 && closeBraces >= 5) {
return "text/json";
} else if (openAngleBrackets >= 5 && closeAngleBrackets >= 5) {
return "text/xml";
} else {
return "text/line-based";
}
} finally {
is.close();
}
} catch (UnsupportedEncodingException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
return null;
}
static public int countSubstrings(String s, String sub) {
int count = 0;
int from = 0;
while (from < s.length()) {
int i = s.indexOf(sub, from);
if (i < 0) {
break;
} else {
from = i + sub.length();
count++;
}
}
return count;
}
}
| main/src/com/google/refine/importers/TextFormatGuesser.java | package com.google.refine.importers;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.Reader;
import java.io.UnsupportedEncodingException;
import java.nio.CharBuffer;
import com.google.refine.importing.FormatGuesser;
public class TextFormatGuesser implements FormatGuesser {
@Override
public String guess(File file, String encoding, String seedFormat) {
try {
InputStream is = new FileInputStream(file);
try {
Reader reader = encoding != null ? new InputStreamReader(is, encoding) : new InputStreamReader(is);
int totalBytes = 0;
int bytes;
int lineBreaks = 0;
CharBuffer charBuffer = CharBuffer.allocate(4096);
while (totalBytes < 64 * 1024 && (bytes = reader.read(charBuffer)) > 0) {
lineBreaks += countSubstrings(charBuffer.toString(), "\n");
charBuffer.clear();
totalBytes += bytes;
}
if (lineBreaks > 3) {
return "text/line-based";
}
} finally {
is.close();
}
} catch (UnsupportedEncodingException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
return null;
}
static public int countSubstrings(String s, String sub) {
int count = 0;
int from = 0;
while (from < s.length()) {
int i = s.indexOf(sub, from);
if (i < 0) {
break;
} else {
from = i + sub.length();
count++;
}
}
return count;
}
}
| Fixed Issue 449: Uncaught exception from Excel importer.
git-svn-id: 434d687192588585fc4b74a81d202f670dfb77fb@2245 7d457c2a-affb-35e4-300a-418c747d4874
| main/src/com/google/refine/importers/TextFormatGuesser.java | Fixed Issue 449: Uncaught exception from Excel importer. | <ide><path>ain/src/com/google/refine/importers/TextFormatGuesser.java
<ide>
<ide> int totalBytes = 0;
<ide> int bytes;
<del> int lineBreaks = 0;
<add> int openBraces = 0;
<add> int closeBraces = 0;
<add> int openAngleBrackets = 0;
<add> int closeAngleBrackets = 0;
<ide>
<ide> CharBuffer charBuffer = CharBuffer.allocate(4096);
<ide> while (totalBytes < 64 * 1024 && (bytes = reader.read(charBuffer)) > 0) {
<del> lineBreaks += countSubstrings(charBuffer.toString(), "\n");
<add> String chunk = charBuffer.toString();
<add> openBraces += countSubstrings(chunk, "{");
<add> closeBraces += countSubstrings(chunk, "}");
<add> openAngleBrackets += countSubstrings(chunk, "<");
<add> closeAngleBrackets += countSubstrings(chunk, ">");
<ide>
<ide> charBuffer.clear();
<ide> totalBytes += bytes;
<ide> }
<ide>
<del> if (lineBreaks > 3) {
<add> if (openBraces >= 5 && closeBraces >= 5) {
<add> return "text/json";
<add> } else if (openAngleBrackets >= 5 && closeAngleBrackets >= 5) {
<add> return "text/xml";
<add> } else {
<ide> return "text/line-based";
<ide> }
<ide> } finally { |
|
JavaScript | apache-2.0 | 371c0e6aea893c7dbf91b0c75cbecb2ba87b5465 | 0 | bisone/saiku,bisone/saiku,bisone/saiku,bisone/saiku,bisone/saiku,bisone/saiku | /*
* Copyright 2012 OSBI Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Class which handles table rendering of resultsets
*/
var Table = Backbone.View.extend({
className: 'table_wrapper',
events: {
'click th.row' : 'clicked_cell',
'click th.col' : 'clicked_cell'
},
initialize: function(args) {
this.workspace = args.workspace;
this.renderer = new SaikuTableRenderer();
// Bind table rendering to query result event
_.bindAll(this, "render", "process_data");
this.workspace.bind('query:result', this.render);
this.id = _.uniqueId("table_");
$(this.el).attr('id', this.id);
},
clicked_cell: function(event) {
var self = this;
if (/*this.workspace.query.get('type') != 'QM' || */Settings.MODE == "table") {
return false;
}
if ($(this.workspace.el).find( ".workspace_results.ui-selectable" ).length > 0) {
$(this.workspace.el).find( ".workspace_results" ).selectable( "destroy" );
}
var $target = ($(event.target).hasClass('row') || $(event.target).hasClass('col') ) ?
$(event.target).find('div') : $(event.target);
var $body = $(document);
$.contextMenu('destroy', '.row, .col');
$.contextMenu({
appendTo: $target,
selector: '.row, .col',
ignoreRightClick: true,
build: function($trigger, e) {
var $target = $(e.currentTarget).find('div');
var axis = $(e.currentTarget).hasClass('rows') ? "ROWS" : "COLUMNS";
var pos = $target.attr('rel').split(':');
var row = parseInt(pos[0]);
var col = parseInt(pos[1]);
var cell = self.workspace.query.result.lastresult().cellset[row][col];
var query = self.workspace.query;
var schema = query.get('schema');
var cube = query.get('connection') + "/" +
query.get('catalog') + "/" +
((schema === "" || schema === null) ? "null" : schema) +
"/" + query.get('cube');
var d = cell.properties.dimension;
var h = cell.properties.hierarchy;
var l = cell.properties.level;
var l_caption = "";
var keep_payload = JSON.stringify(
{
"hierarchy" : h,
"uniquename" : l,
"type" : "level",
"action" : "delete"
}) +
"," + JSON.stringify(
{
"hierarchy" : h,
"uniquename" : cell.properties.uniquename,
"type" : "member",
"action" : "add"
}
);
var children_payload = cell.properties.uniquename;
var levels = [];
var items = {};
var key = self.workspace.selected_cube;
var cubeModel = Saiku.session.sessionworkspace.cube[key];
var dimensions;
if (!cubeModel || !dimensions || !measures) {
if (typeof localStorage !== "undefined" && localStorage && localStorage.getItem("cube." + key) !== null) {
Saiku.session.sessionworkspace.cube[key] = new Cube(JSON.parse(localStorage.getItem("cube." + key)));
} else {
Saiku.session.sessionworkspace.cube[key] = new Cube({ key: key });
Saiku.session.sessionworkspace.cube[key].fetch({ async : false });
}
dimensions = Saiku.session.sessionworkspace.cube[key].get('data').dimensions;
}
var dimsel = {};
var used_levels = [];
//TODO GET USED LEVELS
/*
self.workspace.query.action.gett("/axis/" + axis + "/dimension/" + encodeURIComponent(d), {
success: function(response, model) {
dimsel = model;
},
async: false
});
_.each(dimsel.selections, function(selection) {
if(_.indexOf(used_levels, selection.levelUniqueName) == -1)
used_levels.push(selection.levelUniqueName);
});
*/
_.each(dimensions, function(dimension) {
if (dimension.name == d) {
_.each(dimension.hierarchies, function(hierarchy) {
if (hierarchy.uniqueName == h) {
_.each(hierarchy.levels, function(level) {
items[level.name] = {
name: level.caption,
payload: JSON.stringify({
"hierarchy" : h,
uniquename : level.uniqueName,
type : "level",
action : "add"
})
};
if(_.indexOf(used_levels, level.uniqueName) > -1) {
items[level.name].disabled = true;
items["remove-" + level.name] = {
name: level.caption,
payload: JSON.stringify({
"hierarchy" : h,
uniquename : level.uniqueName,
type : "level",
action : "delete"
})
};
}
if (level.uniqueName == l) {
l_caption = level.caption;
l_name = level.name;
}
items["keep-" + level.name] = items[level.name];
items["include-" + level.name] = JSON.parse(JSON.stringify(items[level.name]));
items["keep-" + level.name].payload = keep_payload + "," + items[level.name].payload;
});
}
});
}
});
items.keeponly = { payload: keep_payload };
items.getchildren = { payload: children_payload };
if (items.hasOwnProperty("remove-" + l_name) && items.hasOwnProperty("include-" + l_name)) {
items.showall = { payload: items["remove-" + l_name].payload + ", " + items["include-" + l_name].payload};
}
var lvlitems = function(prefix) {
var ritems = {};
for (var key in items) {
if (prefix !== null && prefix.length < key.length && key.substr(0, prefix.length) == prefix) {
ritems[key] = items[key];
}
}
return ritems;
};
var member = $target.html();
var citems = {
"name" : {name: "<b>" + member + "</b>", disabled: true },
"sep1": "---------",
"keeponly": {name: "Keep Only", i18n: true, payload: keep_payload }
};
if (d != "Measures") {
citems.getchildren = {name: "Show Children", i18n: true, payload: children_payload };
citems.fold1key = {
name: "Include Level", i18n: true,
items: lvlitems("include-")
};
citems.fold2key = {
name: "Keep and Include Level", i18n: true,
items: lvlitems("keep-")
};
citems.fold3key = {
name: "Remove Level", i18n: true,
items: lvlitems("remove-")
};
citems.filterlevel = {
name: "Filter Level", i18n: true
};
if (items.showall) {
citems.showall = { name: "Remove Filters", i18n: true };
}
}
$.each(citems, function(key, item){
recursive_menu_translate(item, Saiku.i18n.po_file);
});
return {
callback: function(key, options) {
var url = '/axis/' + axis + '/dimension/' + encodeURIComponent(d);
var children = false;
if (key.indexOf("filterlevel") >= 0) {
key = encodeURIComponent(d) + "/hierarchy/" + encodeURIComponent(h) + "/" + encodeURIComponent(l);
(new SelectionsModal({
target: null,
axis: axis,
name: l_caption,
key: key,
workspace: self.workspace
})).open();
return;
}
if (key.indexOf("children") >= 0) {
url = '/axis/' + axis + '/dimension/' + encodeURIComponent(d) + "/children";
children = true;
}
if (children) {
self.workspace.query.set({ 'formatter' : 'flat' });
}
self.workspace.query.action.put(url, { success: self.workspace.sync_query,
dataType: "text",
data: children ?
{
member: items[key].payload
}
:
{
selections: "[" + items[key].payload + "]"
}
});
},
items: citems
};
}
});
$target.contextMenu();
},
render: function(args, block) {
if (typeof args == "undefined" || typeof args.data == "undefined" ||
($(this.workspace.el).is(':visible') && !$(this.el).is(':visible'))) {
return;
}
if (args.data !== null && args.data.error !== null) {
return;
}
// Check to see if there is data
if (args.data === null || (args.data.height && args.data.height === 0)) {
return;
}
this.clearOut();
$(this.el).html('Rendering ' + args.data.width + ' columns and ' + args.data.height + ' rows...');
// Render the table without blocking the UI thread
_.delay(this.process_data, 2, args.data);
},
clearOut: function() {
// Do some clearing in the renderer
this.renderer.clear();
$(this.workspace.el).find( ".workspace_results" ).unbind('scroll');
var element = document.getElementById(this.id);
var table = element.firstChild;
if (table) {
element.removeChild(table);
}
},
process_data: function(data) {
this.workspace.processing.hide();
this.workspace.adjust();
// Append the table
this.clearOut();
$(this.el).html('<table></table>');
var contents = this.renderer.render(data, {
htmlObject: $(this.el).find('table'),
batch: Settings.TABLE_LAZY,
batchSize: Settings.TABLE_LAZY_SIZE,
batchIntervalSize: Settings.TABLE_LAZY_LOAD_ITEMS,
batchIntervalTime: Settings.TABLE_LAZY_LOAD_TIME
});
this.post_process();
},
post_process: function() {
if (this.workspace.query.get('type') == 'QM' && Settings.MODE != "view") {
$(this.el).addClass('headerhighlight');
} else {
$(this.el).removeClass('headerhighlight');
}
/*
var tipOptions = {
delayIn: 200,
delayOut:80,
offset: 2,
html: true,
gravity: "nw",
fade: false,
followMouse: true,
corners: true,
arrow: false,
opacity: 1
};
$(this.el).find('th.row, th.col').tipsy(tipOptions);
*/
$(this.el).find(".i18n").i18n(Saiku.i18n.po_file);
this.workspace.trigger('table:rendered', this);
}
});
| js/saiku/views/Table.js | /*
* Copyright 2012 OSBI Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Class which handles table rendering of resultsets
*/
var Table = Backbone.View.extend({
className: 'table_wrapper',
events: {
'click th.row' : 'clicked_cell',
'click th.col' : 'clicked_cell'
},
initialize: function(args) {
this.workspace = args.workspace;
this.renderer = new SaikuTableRenderer();
// Bind table rendering to query result event
_.bindAll(this, "render", "process_data");
this.workspace.bind('query:result', this.render);
this.id = _.uniqueId("table_");
$(this.el).attr('id', this.id);
},
clicked_cell: function(event) {
var self = this;
if (this.workspace.query.get('type') != 'QM' || Settings.MODE == "table") {
return false;
}
if ($(this.workspace.el).find( ".workspace_results.ui-selectable" ).length > 0) {
$(this.workspace.el).find( ".workspace_results" ).selectable( "destroy" );
}
var $target = ($(event.target).hasClass('row') || $(event.target).hasClass('col') ) ?
$(event.target).find('div') : $(event.target);
var $body = $(document);
$.contextMenu('destroy', '.row, .col');
$.contextMenu({
appendTo: $target,
selector: '.row, .col',
ignoreRightClick: true,
build: function($trigger, e) {
var $target = $(e.currentTarget).find('div');
var axis = $(e.currentTarget).hasClass('rows') ? "ROWS" : "COLUMNS";
var pos = $target.attr('rel').split(':');
var row = parseInt(pos[0]);
var col = parseInt(pos[1]);
var cell = self.workspace.query.result.lastresult().cellset[row][col];
var query = self.workspace.query;
var schema = query.get('schema');
var cube = query.get('connection') + "/" +
query.get('catalog') + "/" +
((schema === "" || schema === null) ? "null" : schema) +
"/" + query.get('cube');
var d = cell.properties.dimension;
var h = cell.properties.hierarchy;
var l = cell.properties.level;
var l_caption = "";
var keep_payload = JSON.stringify(
{
"hierarchy" : h,
"uniquename" : l,
"type" : "level",
"action" : "delete"
}) +
"," + JSON.stringify(
{
"hierarchy" : h,
"uniquename" : cell.properties.uniquename,
"type" : "member",
"action" : "add"
}
);
var children_payload = cell.properties.uniquename;
var levels = [];
var items = {};
var cubeModel = Saiku.session.sessionworkspace.cube[cube];
var dimensions = (cubeModel && cubeModel.has('data')) ? cubeModel.get('data').dimensions : null;
if (!dimensions) {
Saiku.session.sessionworkspace.cube[cube].fetch({async : false});
dimensions = Saiku.session.sessionworkspace.cube[cube].get('data').dimensions;
}
var dimsel = {};
var used_levels = [];
self.workspace.query.action.gett("/axis/" + axis + "/dimension/" + encodeURIComponent(d), {
success: function(response, model) {
dimsel = model;
},
async: false
});
_.each(dimsel.selections, function(selection) {
if(_.indexOf(used_levels, selection.levelUniqueName) == -1)
used_levels.push(selection.levelUniqueName);
});
_.each(dimensions, function(dimension) {
if (dimension.name == d) {
_.each(dimension.hierarchies, function(hierarchy) {
if (hierarchy.uniqueName == h) {
_.each(hierarchy.levels, function(level) {
items[level.name] = {
name: level.caption,
payload: JSON.stringify({
"hierarchy" : h,
uniquename : level.uniqueName,
type : "level",
action : "add"
})
};
if(_.indexOf(used_levels, level.uniqueName) > -1) {
items[level.name].disabled = true;
items["remove-" + level.name] = {
name: level.caption,
payload: JSON.stringify({
"hierarchy" : h,
uniquename : level.uniqueName,
type : "level",
action : "delete"
})
};
}
if (level.uniqueName == l) {
l_caption = level.caption;
l_name = level.name;
}
items["keep-" + level.name] = items[level.name];
items["include-" + level.name] = JSON.parse(JSON.stringify(items[level.name]));
items["keep-" + level.name].payload = keep_payload + "," + items[level.name].payload;
});
}
});
}
});
items.keeponly = { payload: keep_payload };
items.getchildren = { payload: children_payload };
if (items.hasOwnProperty("remove-" + l_name) && items.hasOwnProperty("include-" + l_name)) {
items.showall = { payload: items["remove-" + l_name].payload + ", " + items["include-" + l_name].payload};
}
var lvlitems = function(prefix) {
var ritems = {};
for (var key in items) {
if (prefix !== null && prefix.length < key.length && key.substr(0, prefix.length) == prefix) {
ritems[key] = items[key];
}
}
return ritems;
};
var member = $target.html();
var citems = {
"name" : {name: "<b>" + member + "</b>", disabled: true },
"sep1": "---------",
"keeponly": {name: "Keep Only", i18n: true, payload: keep_payload }
};
if (d != "Measures") {
citems.getchildren = {name: "Show Children", i18n: true, payload: children_payload };
citems.fold1key = {
name: "Include Level", i18n: true,
items: lvlitems("include-")
};
citems.fold2key = {
name: "Keep and Include Level", i18n: true,
items: lvlitems("keep-")
};
citems.fold3key = {
name: "Remove Level", i18n: true,
items: lvlitems("remove-")
};
citems.filterlevel = {
name: "Filter Level", i18n: true
};
if (items.showall) {
citems.showall = { name: "Remove Filters", i18n: true };
}
}
$.each(citems, function(key, item){
recursive_menu_translate(item, Saiku.i18n.po_file);
});
return {
callback: function(key, options) {
var url = '/axis/' + axis + '/dimension/' + encodeURIComponent(d);
var children = false;
if (key.indexOf("filterlevel") >= 0) {
key = encodeURIComponent(d) + "/hierarchy/" + encodeURIComponent(h) + "/" + encodeURIComponent(l);
(new SelectionsModal({
target: null,
axis: axis,
name: l_caption,
key: key,
workspace: self.workspace
})).open();
return;
}
if (key.indexOf("children") >= 0) {
url = '/axis/' + axis + '/dimension/' + encodeURIComponent(d) + "/children";
children = true;
}
if (children) {
self.workspace.query.set({ 'formatter' : 'flat' });
}
self.workspace.query.action.put(url, { success: self.workspace.sync_query,
dataType: "text",
data: children ?
{
member: items[key].payload
}
:
{
selections: "[" + items[key].payload + "]"
}
});
},
items: citems
};
}
});
$target.contextMenu();
},
render: function(args, block) {
if (typeof args == "undefined" || typeof args.data == "undefined" ||
($(this.workspace.el).is(':visible') && !$(this.el).is(':visible'))) {
return;
}
if (args.data !== null && args.data.error !== null) {
return;
}
// Check to see if there is data
if (args.data === null || (args.data.height && args.data.height === 0)) {
return;
}
this.clearOut();
$(this.el).html('Rendering ' + args.data.width + ' columns and ' + args.data.height + ' rows...');
// Render the table without blocking the UI thread
_.delay(this.process_data, 2, args.data);
},
clearOut: function() {
// Do some clearing in the renderer
this.renderer.clear();
$(this.workspace.el).find( ".workspace_results" ).unbind('scroll');
var element = document.getElementById(this.id);
var table = element.firstChild;
if (table) {
element.removeChild(table);
}
},
process_data: function(data) {
this.workspace.processing.hide();
this.workspace.adjust();
// Append the table
this.clearOut();
$(this.el).html('<table></table>');
var contents = this.renderer.render(data, {
htmlObject: $(this.el).find('table'),
batch: Settings.TABLE_LAZY,
batchSize: Settings.TABLE_LAZY_SIZE,
batchIntervalSize: Settings.TABLE_LAZY_LOAD_ITEMS,
batchIntervalTime: Settings.TABLE_LAZY_LOAD_TIME
});
this.post_process();
},
post_process: function() {
if (this.workspace.query.get('type') == 'QM' && Settings.MODE != "view") {
$(this.el).addClass('headerhighlight');
} else {
$(this.el).removeClass('headerhighlight');
}
/*
var tipOptions = {
delayIn: 200,
delayOut:80,
offset: 2,
html: true,
gravity: "nw",
fade: false,
followMouse: true,
corners: true,
arrow: false,
opacity: 1
};
$(this.el).find('th.row, th.col').tipsy(tipOptions);
*/
$(this.el).find(".i18n").i18n(Saiku.i18n.po_file);
this.workspace.trigger('table:rendered', this);
}
});
| fixes
| js/saiku/views/Table.js | fixes | <ide><path>s/saiku/views/Table.js
<ide> clicked_cell: function(event) {
<ide> var self = this;
<ide>
<del> if (this.workspace.query.get('type') != 'QM' || Settings.MODE == "table") {
<add> if (/*this.workspace.query.get('type') != 'QM' || */Settings.MODE == "table") {
<ide> return false;
<ide> }
<ide> if ($(this.workspace.el).find( ".workspace_results.ui-selectable" ).length > 0) {
<ide>
<ide> var levels = [];
<ide> var items = {};
<del> var cubeModel = Saiku.session.sessionworkspace.cube[cube];
<del> var dimensions = (cubeModel && cubeModel.has('data')) ? cubeModel.get('data').dimensions : null;
<del> if (!dimensions) {
<del> Saiku.session.sessionworkspace.cube[cube].fetch({async : false});
<del> dimensions = Saiku.session.sessionworkspace.cube[cube].get('data').dimensions;
<del> }
<add> var key = self.workspace.selected_cube;
<add> var cubeModel = Saiku.session.sessionworkspace.cube[key];
<add>
<add> var dimensions;
<add> if (!cubeModel || !dimensions || !measures) {
<add> if (typeof localStorage !== "undefined" && localStorage && localStorage.getItem("cube." + key) !== null) {
<add> Saiku.session.sessionworkspace.cube[key] = new Cube(JSON.parse(localStorage.getItem("cube." + key)));
<add> } else {
<add> Saiku.session.sessionworkspace.cube[key] = new Cube({ key: key });
<add> Saiku.session.sessionworkspace.cube[key].fetch({ async : false });
<add> }
<add> dimensions = Saiku.session.sessionworkspace.cube[key].get('data').dimensions;
<add> }
<ide> var dimsel = {};
<ide> var used_levels = [];
<del>
<add> //TODO GET USED LEVELS
<add>/*
<ide> self.workspace.query.action.gett("/axis/" + axis + "/dimension/" + encodeURIComponent(d), {
<ide> success: function(response, model) {
<ide> dimsel = model;
<ide> used_levels.push(selection.levelUniqueName);
<ide>
<ide> });
<del>
<add>*/
<ide> _.each(dimensions, function(dimension) {
<ide> if (dimension.name == d) {
<ide> _.each(dimension.hierarchies, function(hierarchy) { |
|
Java | epl-1.0 | 39a97f53eabe47c765456b22f5d12069fda3862e | 0 | rrimmana/birt-1,sguan-actuate/birt,Charling-Huang/birt,Charling-Huang/birt,rrimmana/birt-1,rrimmana/birt-1,sguan-actuate/birt,rrimmana/birt-1,Charling-Huang/birt,Charling-Huang/birt,Charling-Huang/birt,sguan-actuate/birt,rrimmana/birt-1,sguan-actuate/birt,sguan-actuate/birt | /***********************************************************************
* Copyright (c) 2004 Actuate Corporation.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* Actuate Corporation - initial API and implementation
***********************************************************************/
package org.eclipse.birt.chart.ui.swt.composites;
import java.util.LinkedHashMap;
import org.eclipse.birt.chart.model.attribute.AttributeFactory;
import org.eclipse.birt.chart.model.attribute.ExtendedProperty;
import org.eclipse.birt.chart.model.impl.ChartModelHelper;
import org.eclipse.birt.chart.ui.extension.i18n.Messages;
import org.eclipse.birt.chart.ui.swt.wizard.ChartWizardContext;
import org.eclipse.emf.common.util.EList;
import org.eclipse.swt.SWT;
import org.eclipse.swt.custom.TableEditor;
import org.eclipse.swt.events.SelectionEvent;
import org.eclipse.swt.events.SelectionListener;
import org.eclipse.swt.layout.GridData;
import org.eclipse.swt.layout.GridLayout;
import org.eclipse.swt.widgets.Button;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.swt.widgets.Control;
import org.eclipse.swt.widgets.Event;
import org.eclipse.swt.widgets.Listener;
import org.eclipse.swt.widgets.Table;
import org.eclipse.swt.widgets.TableColumn;
import org.eclipse.swt.widgets.TableItem;
import org.eclipse.swt.widgets.Text;
public class ExtendedPropertyEditorComposite extends Composite implements
SelectionListener
{
private LinkedHashMap<String, ExtendedProperty> propMap = null;
private LinkedHashMap<String, Boolean> propDisabledMap = null;
private Table table = null;
private TableEditor editorValue = null;
private Text txtNewKey = null;
private Button btnAdd = null;
private Button btnRemove = null;
private EList<ExtendedProperty> extendedProperties;
private ChartWizardContext context;
public ExtendedPropertyEditorComposite( Composite parent, int style,
EList<ExtendedProperty> extendedProperties,
ChartWizardContext context )
{
super( parent, style );
this.extendedProperties = extendedProperties;
this.context = context;
init( );
placeComponents( );
}
private void init( )
{
propMap = new LinkedHashMap<String, ExtendedProperty>( 6 );
for ( ExtendedProperty property : extendedProperties )
{
propMap.put( property.getName( ), property );
}
propDisabledMap = new LinkedHashMap<String, Boolean>( 2 );
for ( String disabledName : ChartModelHelper.instance( )
.getBuiltInExtendedProperties( ) )
{
propDisabledMap.put( disabledName, Boolean.TRUE );
}
}
private void placeComponents( )
{
GridLayout glContent = new GridLayout( );
glContent.horizontalSpacing = 5;
glContent.verticalSpacing = 5;
glContent.marginHeight = 7;
glContent.marginWidth = 7;
this.setLayout( glContent );
table = new Table( this, SWT.SINGLE | SWT.FULL_SELECTION | SWT.BORDER );
GridData gdTable = new GridData( GridData.FILL_BOTH );
table.setLayoutData( gdTable );
table.setHeaderVisible( true );
table.setLinesVisible( true );
TableColumn tcKey = new TableColumn( table, SWT.CENTER );
tcKey.setWidth( 186 );
tcKey.setText( Messages.getString( "PropertyEditorDialog.Lbl.Key" ) ); //$NON-NLS-1$
TableColumn tcValue = new TableColumn( table, SWT.LEFT );
tcValue.setWidth( 186 );
tcValue.setText( Messages.getString( "PropertyEditorDialog.Lbl.Value" ) ); //$NON-NLS-1$
editorValue = new TableEditor( table );
editorValue.setColumn( 1 );
editorValue.grabHorizontal = true;
editorValue.minimumWidth = 30;
table.addSelectionListener( this );
// Layout for buttons panel
GridLayout glButtons = new GridLayout( );
glButtons.numColumns = 3;
glButtons.horizontalSpacing = 5;
glButtons.verticalSpacing = 5;
glButtons.marginWidth = 0;
glButtons.marginHeight = 0;
Composite cmpButtons = new Composite( this, SWT.NONE );
GridData gdCMPButtons = new GridData( GridData.FILL_HORIZONTAL );
cmpButtons.setLayoutData( gdCMPButtons );
cmpButtons.setLayout( glButtons );
txtNewKey = new Text( cmpButtons, SWT.SINGLE | SWT.BORDER );
GridData gdTXTNewKey = new GridData( GridData.FILL_HORIZONTAL );
gdTXTNewKey.grabExcessHorizontalSpace = true;
txtNewKey.setLayoutData( gdTXTNewKey );
btnAdd = new Button( cmpButtons, SWT.PUSH );
GridData gdBTNAdd = new GridData( GridData.HORIZONTAL_ALIGN_END );
gdBTNAdd.grabExcessHorizontalSpace = false;
btnAdd.setLayoutData( gdBTNAdd );
btnAdd.setText( Messages.getString( "PropertyEditorDialog.Lbl.Add" ) ); //$NON-NLS-1$
btnAdd.addSelectionListener( this );
btnRemove = new Button( cmpButtons, SWT.PUSH );
GridData gdBTNRemove = new GridData( GridData.HORIZONTAL_ALIGN_END );
gdBTNRemove.grabExcessHorizontalSpace = false;
btnRemove.setLayoutData( gdBTNRemove );
btnRemove.setText( Messages.getString( "PropertyEditorDialog.Lbl.Remove" ) ); //$NON-NLS-1$
btnRemove.addSelectionListener( this );
populateTable( );
}
private void populateTable( )
{
for ( String propName : propMap.keySet( ) )
{
ExtendedProperty property = propMap.get( propName );
String[] sProperty = new String[2];
sProperty[0] = property.getName( );
sProperty[1] = property.getValue( );
TableItem tiProp = new TableItem( table, SWT.CHECK );
tiProp.setText( sProperty );
}
if ( table.getItemCount( ) > 0 )
{
table.select( 0 );
btnRemove.setEnabled( !propDisabledMap.containsKey( table.getItem( 0 )
.getText( ) ) );
}
else
{
txtNewKey.forceFocus( );
btnRemove.setEnabled( false );
}
}
/*
* (non-Javadoc)
*
* @see
* org.eclipse.swt.events.SelectionListener#widgetDefaultSelected(org.eclipse
* .swt.events.SelectionEvent)
*/
public void widgetDefaultSelected( SelectionEvent e )
{
}
/*
* (non-Javadoc)
*
* @see
* org.eclipse.swt.events.SelectionListener#widgetSelected(org.eclipse.swt
* .events.SelectionEvent)
*/
public void widgetSelected( SelectionEvent e )
{
if ( e.getSource( ).equals( btnAdd ) )
{
String sKey = txtNewKey.getText( );
if ( sKey.length( ) > 0 && !propMap.containsKey( sKey ) )
{
String[] sProperty = new String[2];
sProperty[0] = sKey;
sProperty[1] = ""; //$NON-NLS-1$
TableItem tiProp = new TableItem( table, SWT.NONE );
tiProp.setText( sProperty );
table.select( table.getItemCount( ) - 1 );
updateModel( sProperty[0], sProperty[1] );
txtNewKey.setText( "" ); //$NON-NLS-1$
}
}
else if ( e.getSource( ).equals( btnRemove ) )
{
if ( table.getSelection( ).length != 0 )
{
int index = table.getSelectionIndex( );
String key = table.getSelection( )[0].getText( 0 );
ExtendedProperty property = propMap.get( key );
if ( property != null )
{
extendedProperties.remove( property );
propMap.remove( key );
table.remove( table.getSelectionIndex( ) );
table.select( index<table.getItemCount( ) ?index:table.getItemCount( )- 1 );
}
Control editor = editorValue.getEditor( );
if ( editor != null )
{
editor.dispose( );
}
}
}
else if ( e.getSource( ).equals( table ) )
{
Control oldEditor = editorValue.getEditor( );
if ( oldEditor != null )
oldEditor.dispose( );
// Identify the selected row
final TableItem item = (TableItem) e.item;
if ( item == null )
{
return;
}
// The control that will be the editor must be a child of the Table
Text newEditor = new Text( table, SWT.NONE );
newEditor.setText( item.getText( 1 ) );
newEditor.addListener( SWT.FocusOut, new Listener( ) {
public void handleEvent( Event event )
{
Text text = (Text) event.widget;
editorValue.getItem( ).setText( 1, text.getText( ) );
updateModel( item.getText( 0 ), text.getText( ) );
}
} );
newEditor.selectAll( );
newEditor.setFocus( );
editorValue.setEditor( newEditor, item, 1 );
}
btnRemove.setEnabled( !propDisabledMap.containsKey( table.getSelection( )[0].getText( 0 ) ) );
}
private void updateModel( String key, String value )
{
ExtendedProperty property = propMap.get( key );
if ( property == null )
{
property = AttributeFactory.eINSTANCE.createExtendedProperty( );
property.setName( key );
property.setValue( value );
property.eAdapters( ).addAll( context.getModel( ).eAdapters( ) );
extendedProperties.add( property );
propMap.put( key, property );
}
else
{
property.setValue( value );
}
}
} | chart/org.eclipse.birt.chart.ui.extension/src/org/eclipse/birt/chart/ui/swt/composites/ExtendedPropertyEditorComposite.java | /***********************************************************************
* Copyright (c) 2004 Actuate Corporation.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* Actuate Corporation - initial API and implementation
***********************************************************************/
package org.eclipse.birt.chart.ui.swt.composites;
import java.util.LinkedHashMap;
import org.eclipse.birt.chart.model.attribute.AttributeFactory;
import org.eclipse.birt.chart.model.attribute.ExtendedProperty;
import org.eclipse.birt.chart.model.impl.ChartModelHelper;
import org.eclipse.birt.chart.ui.extension.i18n.Messages;
import org.eclipse.birt.chart.ui.swt.wizard.ChartWizardContext;
import org.eclipse.emf.common.util.EList;
import org.eclipse.swt.SWT;
import org.eclipse.swt.custom.TableEditor;
import org.eclipse.swt.events.SelectionEvent;
import org.eclipse.swt.events.SelectionListener;
import org.eclipse.swt.graphics.Color;
import org.eclipse.swt.layout.GridData;
import org.eclipse.swt.layout.GridLayout;
import org.eclipse.swt.widgets.Button;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.swt.widgets.Control;
import org.eclipse.swt.widgets.Event;
import org.eclipse.swt.widgets.Listener;
import org.eclipse.swt.widgets.Table;
import org.eclipse.swt.widgets.TableColumn;
import org.eclipse.swt.widgets.TableItem;
import org.eclipse.swt.widgets.Text;
public class ExtendedPropertyEditorComposite extends Composite implements
SelectionListener
{
private LinkedHashMap<String, ExtendedProperty> propMap = null;
private LinkedHashMap<String, Boolean> propDisabledMap = null;
private Table table = null;
private TableEditor editorValue = null;
private Text txtNewKey = null;
private Button btnAdd = null;
private Button btnRemove = null;
private EList<ExtendedProperty> extendedProperties;
private ChartWizardContext context;
public ExtendedPropertyEditorComposite( Composite parent, int style,
EList<ExtendedProperty> extendedProperties,
ChartWizardContext context )
{
super( parent, style );
this.extendedProperties = extendedProperties;
this.context = context;
init( );
placeComponents( );
}
private void init( )
{
propMap = new LinkedHashMap<String, ExtendedProperty>( 6 );
for ( ExtendedProperty property : extendedProperties )
{
propMap.put( property.getName( ), property );
}
propDisabledMap = new LinkedHashMap<String, Boolean>( 2 );
for ( String disabledName : ChartModelHelper.instance( )
.getBuiltInExtendedProperties( ) )
{
propDisabledMap.put( disabledName, Boolean.TRUE );
}
}
private void placeComponents( )
{
GridLayout glContent = new GridLayout( );
glContent.horizontalSpacing = 5;
glContent.verticalSpacing = 5;
glContent.marginHeight = 7;
glContent.marginWidth = 7;
this.setLayout( glContent );
table = new Table( this, SWT.SINGLE | SWT.FULL_SELECTION | SWT.BORDER );
GridData gdTable = new GridData( GridData.FILL_BOTH );
table.setLayoutData( gdTable );
table.setHeaderVisible( true );
table.setLinesVisible( true );
TableColumn tcKey = new TableColumn( table, SWT.CENTER );
tcKey.setWidth( 186 );
tcKey.setText( Messages.getString( "PropertyEditorDialog.Lbl.Key" ) ); //$NON-NLS-1$
TableColumn tcValue = new TableColumn( table, SWT.LEFT );
tcValue.setWidth( 186 );
tcValue.setText( Messages.getString( "PropertyEditorDialog.Lbl.Value" ) ); //$NON-NLS-1$
editorValue = new TableEditor( table );
editorValue.setColumn( 1 );
editorValue.grabHorizontal = true;
editorValue.minimumWidth = 30;
table.addSelectionListener( this );
// Layout for buttons panel
GridLayout glButtons = new GridLayout( );
glButtons.numColumns = 3;
glButtons.horizontalSpacing = 5;
glButtons.verticalSpacing = 5;
glButtons.marginWidth = 0;
glButtons.marginHeight = 0;
Composite cmpButtons = new Composite( this, SWT.NONE );
GridData gdCMPButtons = new GridData( GridData.FILL_HORIZONTAL );
cmpButtons.setLayoutData( gdCMPButtons );
cmpButtons.setLayout( glButtons );
txtNewKey = new Text( cmpButtons, SWT.SINGLE | SWT.BORDER );
GridData gdTXTNewKey = new GridData( GridData.FILL_HORIZONTAL );
gdTXTNewKey.grabExcessHorizontalSpace = true;
txtNewKey.setLayoutData( gdTXTNewKey );
btnAdd = new Button( cmpButtons, SWT.PUSH );
GridData gdBTNAdd = new GridData( GridData.HORIZONTAL_ALIGN_END );
gdBTNAdd.grabExcessHorizontalSpace = false;
btnAdd.setLayoutData( gdBTNAdd );
btnAdd.setText( Messages.getString( "PropertyEditorDialog.Lbl.Add" ) ); //$NON-NLS-1$
btnAdd.addSelectionListener( this );
btnRemove = new Button( cmpButtons, SWT.PUSH );
GridData gdBTNRemove = new GridData( GridData.HORIZONTAL_ALIGN_END );
gdBTNRemove.grabExcessHorizontalSpace = false;
btnRemove.setLayoutData( gdBTNRemove );
btnRemove.setText( Messages.getString( "PropertyEditorDialog.Lbl.Remove" ) ); //$NON-NLS-1$
btnRemove.addSelectionListener( this );
populateTable( );
}
private void populateTable( )
{
for ( String propName : propMap.keySet( ) )
{
ExtendedProperty property = propMap.get( propName );
String[] sProperty = new String[2];
sProperty[0] = property.getName( );
sProperty[1] = property.getValue( );
TableItem tiProp = new TableItem( table, SWT.CHECK );
// tiProp.setBackground( getSelectionColor( ) );
tiProp.setText( sProperty );
}
if ( table.getItemCount( ) > 0 )
{
table.select( 0 );
btnRemove.setEnabled( !propDisabledMap.containsKey( table.getItem( 0 )
.getText( ) ) );
}
else
{
txtNewKey.forceFocus( );
btnRemove.setEnabled( false );
}
}
/*
* (non-Javadoc)
*
* @see
* org.eclipse.swt.events.SelectionListener#widgetDefaultSelected(org.eclipse
* .swt.events.SelectionEvent)
*/
public void widgetDefaultSelected( SelectionEvent e )
{
}
/*
* (non-Javadoc)
*
* @see
* org.eclipse.swt.events.SelectionListener#widgetSelected(org.eclipse.swt
* .events.SelectionEvent)
*/
public void widgetSelected( SelectionEvent e )
{
if ( e.getSource( ).equals( btnAdd ) )
{
String sKey = txtNewKey.getText( );
if ( sKey.length( ) > 0 && !propMap.containsKey( sKey ) )
{
String[] sProperty = new String[2];
sProperty[0] = sKey;
sProperty[1] = ""; //$NON-NLS-1$
TableItem tiProp = new TableItem( table, SWT.NONE );
tiProp.setBackground( getSelectionColor( ) );
tiProp.setText( sProperty );
table.select( table.getItemCount( ) - 1 );
updateModel( sProperty[0], sProperty[1] );
txtNewKey.setText( "" ); //$NON-NLS-1$
}
}
else if ( e.getSource( ).equals( btnRemove ) )
{
if ( table.getSelection( ).length != 0 )
{
int index = table.getSelectionIndex( );
String key = table.getSelection( )[0].getText( 0 );
ExtendedProperty property = propMap.get( key );
if ( property != null )
{
extendedProperties.remove( property );
propMap.remove( key );
table.remove( table.getSelectionIndex( ) );
table.select( index<table.getItemCount( ) ?index:table.getItemCount( )- 1 );
}
Control editor = editorValue.getEditor( );
if ( editor != null )
{
editor.dispose( );
}
}
}
else if ( e.getSource( ).equals( table ) )
{
Control oldEditor = editorValue.getEditor( );
if ( oldEditor != null )
oldEditor.dispose( );
// Identify the selected row
final TableItem item = (TableItem) e.item;
if ( item == null )
{
return;
}
// The control that will be the editor must be a child of the Table
Text newEditor = new Text( table, SWT.NONE );
newEditor.setText( item.getText( 1 ) );
newEditor.addListener( SWT.FocusOut, new Listener( ) {
public void handleEvent( Event event )
{
Text text = (Text) event.widget;
editorValue.getItem( ).setText( 1, text.getText( ) );
updateModel( item.getText( 0 ), text.getText( ) );
}
} );
newEditor.selectAll( );
newEditor.setFocus( );
editorValue.setEditor( newEditor, item, 1 );
}
btnRemove.setEnabled( !propDisabledMap.containsKey( table.getSelection( )[0].getText( 0 ) ) );
}
private void updateModel( String key, String value )
{
ExtendedProperty property = propMap.get( key );
if ( property == null )
{
property = AttributeFactory.eINSTANCE.createExtendedProperty( );
property.setName( key );
property.setValue( value );
property.eAdapters( ).addAll( context.getModel( ).eAdapters( ) );
extendedProperties.add( property );
propMap.put( key, property );
}
else
{
property.setValue( value );
}
}
private Color getSelectionColor( )
{
return getDisplay( ).getSystemColor( SWT.COLOR_LIST_SELECTION_TEXT );
}
} | Fix the bug: The background of the contents which are not selected are black in Custom Properties(26572).
| chart/org.eclipse.birt.chart.ui.extension/src/org/eclipse/birt/chart/ui/swt/composites/ExtendedPropertyEditorComposite.java | Fix the bug: The background of the contents which are not selected are black in Custom Properties(26572). | <ide><path>hart/org.eclipse.birt.chart.ui.extension/src/org/eclipse/birt/chart/ui/swt/composites/ExtendedPropertyEditorComposite.java
<ide> import org.eclipse.swt.custom.TableEditor;
<ide> import org.eclipse.swt.events.SelectionEvent;
<ide> import org.eclipse.swt.events.SelectionListener;
<del>import org.eclipse.swt.graphics.Color;
<ide> import org.eclipse.swt.layout.GridData;
<ide> import org.eclipse.swt.layout.GridLayout;
<ide> import org.eclipse.swt.widgets.Button;
<ide> sProperty[1] = property.getValue( );
<ide>
<ide> TableItem tiProp = new TableItem( table, SWT.CHECK );
<del> // tiProp.setBackground( getSelectionColor( ) );
<ide> tiProp.setText( sProperty );
<ide> }
<ide> if ( table.getItemCount( ) > 0 )
<ide> sProperty[1] = ""; //$NON-NLS-1$
<ide>
<ide> TableItem tiProp = new TableItem( table, SWT.NONE );
<del> tiProp.setBackground( getSelectionColor( ) );
<ide> tiProp.setText( sProperty );
<ide> table.select( table.getItemCount( ) - 1 );
<ide>
<ide> property.setValue( value );
<ide> }
<ide> }
<del>
<del> private Color getSelectionColor( )
<del> {
<del> return getDisplay( ).getSystemColor( SWT.COLOR_LIST_SELECTION_TEXT );
<del> }
<ide> } |
|
Java | lgpl-2.1 | dc4512e8393186523969f15f6a2ef51db72d2792 | 0 | brltty/brltty,brltty/brltty,brltty/brltty,brltty/brltty,brltty/brltty,brltty/brltty | /*
* BRLTTY - A background process providing access to the console screen (when in
* text mode) for a blind person using a refreshable braille display.
*
* Copyright (C) 1995-2013 by The BRLTTY Developers.
*
* BRLTTY comes with ABSOLUTELY NO WARRANTY.
*
* This is free software, placed under the terms of the
* GNU General Public License, as published by the Free Software
* Foundation; either version 2 of the License, or (at your option) any
* later version. Please see the file LICENSE-GPL for details.
*
* Web Page: http://mielke.cc/brltty/
*
* This software is maintained by Dave Mielke <[email protected]>.
*/
package org.a11y.brltty.android;
import java.util.List;
import android.graphics.Rect;
public class SimpleBrailleRenderer extends BrailleRenderer {
@Override
public void renderScreenElements (List<CharSequence> rows, ScreenElementList elements) {
elements.sortByVisualLocation();
elements.groupByContainer();
addVirtualElements(elements);
int left = 0;
int top = 0;
int right = 0;
int bottom = 0;
boolean wasVirtual = false;
for (ScreenElement element : elements) {
String text = element.getBrailleText();
boolean isVirtual = element.getVisualLocation() == null;
boolean append = wasVirtual && isVirtual;
wasVirtual = isVirtual;
if (text.length() > 0) {
List<CharSequence> lines = makeTextLines(text);
int width = getTextWidth(lines);
if (append) {
left = right + 3;
int row = top;
for (CharSequence line : lines) {
StringBuilder sb = new StringBuilder();
while (row >= rows.size()) rows.add("");
sb.append(rows.get(row));
while (sb.length() < left) sb.append(' ');
sb.append(line);
rows.set(row++, sb.toString());
}
} else {
left = 0;
top = rows.size();
rows.addAll(lines);
}
right = left + width - 1;
bottom = top + lines.size() - 1;
element.setBrailleLocation(new Rect(left, top, right, bottom));
}
}
}
public SimpleBrailleRenderer () {
super();
}
}
| Android/Application/src/org/a11y/brltty/android/SimpleBrailleRenderer.java | /*
* BRLTTY - A background process providing access to the console screen (when in
* text mode) for a blind person using a refreshable braille display.
*
* Copyright (C) 1995-2013 by The BRLTTY Developers.
*
* BRLTTY comes with ABSOLUTELY NO WARRANTY.
*
* This is free software, placed under the terms of the
* GNU General Public License, as published by the Free Software
* Foundation; either version 2 of the License, or (at your option) any
* later version. Please see the file LICENSE-GPL for details.
*
* Web Page: http://mielke.cc/brltty/
*
* This software is maintained by Dave Mielke <[email protected]>.
*/
package org.a11y.brltty.android;
import java.util.List;
import android.graphics.Rect;
public class SimpleBrailleRenderer extends BrailleRenderer {
@Override
public void renderScreenElements (List<CharSequence> rows, ScreenElementList elements) {
elements.sortByVisualLocation();
elements.groupByContainer();
addVirtualElements(elements);
int left = 0;
int top = 0;
int right = 0;
int bottom = 0;
boolean wasVirtual = false;
for (ScreenElement element : elements) {
String text = element.getBrailleText();
boolean isVirtual = element.getVisualLocation() == null;
boolean append = wasVirtual && isVirtual;
wasVirtual = isVirtual;
if (text.length() > 0) {
List<CharSequence> lines = makeTextLines(text);
int width = getTextWidth(lines);
if (append) {
left = right + 3;
int row = top;
for (CharSequence line : lines) {
StringBuilder sb = new StringBuilder();
if (row < rows.size()) sb.append(rows.get(row));
while (sb.length() < left) sb.append(' ');
sb.append(line);
rows.set(row++, sb.toString());
}
} else {
left = 0;
top = rows.size();
rows.addAll(lines);
}
right = left + width - 1;
bottom = top + lines.size() - 1;
element.setBrailleLocation(new Rect(left, top, right, bottom));
}
}
}
public SimpleBrailleRenderer () {
super();
}
}
| Rendering of multi-line virtual screen elements could fail. (dm)
git-svn-id: 30a5f035a20f1bc647618dbad7eea2a951b61b7c@6927 91a5dbb7-01b9-0310-9b5f-b28072856b6e
| Android/Application/src/org/a11y/brltty/android/SimpleBrailleRenderer.java | Rendering of multi-line virtual screen elements could fail. (dm) | <ide><path>ndroid/Application/src/org/a11y/brltty/android/SimpleBrailleRenderer.java
<ide>
<ide> for (CharSequence line : lines) {
<ide> StringBuilder sb = new StringBuilder();
<del> if (row < rows.size()) sb.append(rows.get(row));
<add> while (row >= rows.size()) rows.add("");
<add> sb.append(rows.get(row));
<ide> while (sb.length() < left) sb.append(' ');
<ide> sb.append(line);
<ide> rows.set(row++, sb.toString()); |
|
Java | apache-2.0 | ab1b48cc7d91500fdd62e225200285b8e8c2055f | 0 | IBM-MIL/CafeJava | /*
* Licensed Materials - Property of IBM
* © Copyright IBM Corporation 2015. All Rights Reserved.
*/
package com.ibm.mil.cafejava;
import android.content.Context;
import android.support.annotation.NonNull;
import android.support.annotation.Nullable;
import com.google.gson.Gson;
import com.google.gson.JsonElement;
import com.google.gson.JsonObject;
import com.google.gson.JsonParser;
import com.google.gson.reflect.TypeToken;
import com.worklight.wlclient.api.WLClient;
import com.worklight.wlclient.api.WLFailResponse;
import com.worklight.wlclient.api.WLProcedureInvocationData;
import com.worklight.wlclient.api.WLRequestOptions;
import com.worklight.wlclient.api.WLResponse;
import com.worklight.wlclient.api.WLResponseListener;
import rx.Observable;
import rx.Subscriber;
import rx.functions.Func1;
import static rx.Observable.Transformer;
/**
* Configurable MFP client for establishing connections and invoking procedures in a reactive
* manner. For a detailed guide on using this class, visit
* <a href="https://github.com/t-preiss/CafeJava" target="_blank">the project's GitHub page</a> and
* view the README.
*
* @author John Petitto (github @jpetitto)
* @author Tanner Preiss (github @t-preiss)
*/
public final class CafeJava {
private int timeout = 30_000;
private Object invocationContext;
/**
* The timeout that will be used for any MFP call invoked from this instance of {@code
* CafeJava}.
*
* @param timeout Number of millis to wait for an MFP call to respond.
* @return The current instance of {@code CafeJava} to allow for easy call chaining.
*/
public CafeJava setTimeout(int timeout) {
if (timeout >= 0) {
this.timeout = timeout;
}
return this;
}
/**
* @return Number of millis to wait for an MFP call to respond.
*/
public int getTimeout() {
return timeout;
}
/**
* An {@code invocationContext} serves as a mechanism for tagging a {@code WLResponse}. This
* can be useful when the source of a {@code WLResponse} is unknown. This context will be used
* for any MFP call invoked from this instance of {@code CafeJava}.
*
* @param invocationContext Returned as part of any {@code WLResponse} that was originally
* invoked from this instance of {@code CafeJava}.
* @return The current instance of {@code CafeJava} to allow for easy call chaining.
*/
@NonNull
public CafeJava setInvocationContext(@Nullable Object invocationContext) {
this.invocationContext = invocationContext;
return this;
}
/**
* @return The context (object) for any {@code WLResponse} that was originally invoked from
* this instance of {@code CafeJava}.
*/
@Nullable
public Object getInvocationContext() {
return invocationContext;
}
/**
* Creates an {@code Observable} that emits a {@code WLResponse} after attempting connection
* to the MFP server instance defined in the {@code wlclient.properties} file. This
* connection is only performed when there is a new {@code Subscriber} to the {@code
* Observable}. The {@code Observable} will automatically perform its work on a dedicated
* background thread, so there is usually no need to use the {@code subscribeOn} method of
* RxJava.
*
* @param context
* @return {@code Observable} that emits a {@code WLResponse} for an MFP connection.
*/
@NonNull
public Observable<WLResponse> connect(@NonNull final Context context) {
return Observable.create(new Observable.OnSubscribe<WLResponse>() {
@Override
public void call(Subscriber<? super WLResponse> subscriber) {
WLClient client = WLClient.createInstance(context);
client.connect(new RxResponseListener(subscriber), getRequestOptions());
}
});
}
/**
* Creates an {@code Observable} that emits a {@code WLResponse} after attempting invocation
* of the specified procedure for the given adapter. This invocation is only performed when
* there is a new {@code Subscriber} to the {@code Observable}. The {@code Observable} will
* automatically perform its work on a dedicated background thread, so there is usually no
* need to use the {@code subscribeOn} method of RxJava.
*
* @param adapterName Name of the targeted adapter.
* @param procedureName Name of the targeted procedure for the specified adapter.
* @param parameters Variable number of parameters that the specified procedure is
* expecting. The types of each parameter need to match the type that
* the procedure is expecting on the server.
* @return {@code Observable} that emits a {@code WLResponse} for an MFP procedure invocation.
*/
@NonNull
public Observable<WLResponse> invokeProcedure(@NonNull final String adapterName,
@NonNull final String procedureName,
@Nullable final Object... parameters) {
return Observable.create(new Observable.OnSubscribe<WLResponse>() {
@Override
public void call(Subscriber<? super WLResponse> subscriber) {
WLClient client = WLClient.getInstance();
if (client == null) {
subscriber.onError(new Throwable("WLClient instance does not exist"));
return;
}
WLProcedureInvocationData invocationData =
new WLProcedureInvocationData(adapterName, procedureName, false);
invocationData.setParameters(parameters);
client.invokeProcedure(invocationData, new RxResponseListener(subscriber),
getRequestOptions());
}
});
}
/**
* Transforms an {@code Observable} that emits a {@code WLResponse} with a valid JSON payload
* into a new {@code Observable} with the targeted {@code Class} type. This can be done by
* passing the result of this method to the {@code compose} operator of RxJava. A variable
* number of member names can be provided for accessing JSON data that is nested arbitrarily
* deep inside the response payload.
*
* @param clazz Targeted {@code Class} type for the JSON payload to be serialized into.
* @param memberNames Variable number of member names for accessing JSON data that is nested
* arbitrarily deep inside the response payload.
* @return {@code Transformer} that can be supplied to the {@code compose} operator of RxJava
* . The input {@code Observable} must emit a {@code WLResponse} with a valid JSON payload.
*/
@NonNull
public static <T> Transformer<WLResponse, T> serializeTo(@NonNull final Class<T> clazz,
@NonNull final String... memberNames) {
return transformJson(new Func1<WLResponse, T>() {
@Override
public T call(WLResponse wlResponse) {
JsonElement element = parseNestedJson(wlResponse, memberNames);
return new Gson().fromJson(element, clazz);
}
});
}
/**
* Transforms an {@code Observable} that emits a {@code WLResponse} with a valid JSON payload
* into a new Observable for the targeted {@code TypeToken}. This can be done by passing the
* result of this method to the {@code compose} operator of RxJava. A {@code TypeToken} is
* necessary when the targeted type is a parameterized type, such as {@code List}. A variable
* number of member names can be provided for accessing JSON data that is nested arbitrarily
* deep inside the response payload.
*
* @param typeToken Captures the necessary type information for the targeted parameterized
* type, such as {@code List}.
* @param memberNames Variable number of member names for accessing JSON data that is nested
* arbitrarily deep inside the response payload.
* @return {@code Transformer} that can be supplied to the {@code compose} operator of RxJava
* . The input {@code Observable} must emit a {@code WLResponse} with a valid JSON payload.
*/
@NonNull
public static <T> Transformer<WLResponse, T> serializeTo(@NonNull final TypeToken<T> typeToken,
@NonNull final String... memberNames) {
return transformJson(new Func1<WLResponse, T>() {
@Override
public T call(WLResponse wlResponse) {
JsonElement element = parseNestedJson(wlResponse, memberNames);
return new Gson().fromJson(element, typeToken.getType());
}
});
}
private static <T> Transformer<WLResponse, T> transformJson(final Func1<WLResponse, T> func) {
return new Transformer<WLResponse, T>() {
@Override
public Observable<T> call(Observable<WLResponse> wlResponseObservable) {
return wlResponseObservable.map(func);
}
};
}
private static JsonElement parseNestedJson(WLResponse wlResponse, String... memberNames) {
String json = wlResponse.getResponseJSON().toString();
JsonObject jsonObject = new JsonParser().parse(json).getAsJsonObject();
// For each member name, fetch the object it maps to until you reach the final member name.
// Once the final member name is reached, return its corresponding value.
for (int i = 0, size = memberNames.length; i < size; i++) {
String member = memberNames[i];
if (i == size - 1) {
// last member name reached; return its value
return jsonObject.get(member);
} else {
// more member names remain, therefore current member must map to an object
jsonObject = jsonObject.getAsJsonObject(member);
}
}
// no nesting required; return top-level object
return jsonObject;
}
private WLRequestOptions getRequestOptions() {
WLRequestOptions requestOptions = new WLRequestOptions();
requestOptions.setTimeout(timeout);
requestOptions.setInvocationContext(invocationContext);
return requestOptions;
}
private static class RxResponseListener implements WLResponseListener {
private Subscriber<? super WLResponse> subscriber;
RxResponseListener(Subscriber<? super WLResponse> subscriber) {
this.subscriber = subscriber;
}
@Override
public void onSuccess(WLResponse wlResponse) {
subscriber.onNext(wlResponse);
subscriber.onCompleted();
}
@Override
public void onFailure(WLFailResponse wlFailResponse) {
subscriber.onError(new Throwable(wlFailResponse.getErrorMsg()));
}
}
}
| cafejava/src/main/java/com/ibm/mil/cafejava/CafeJava.java | /*
* Licensed Materials - Property of IBM
* © Copyright IBM Corporation 2015. All Rights Reserved.
*/
package com.ibm.mil.cafejava;
import android.content.Context;
import android.support.annotation.NonNull;
import android.support.annotation.Nullable;
import com.google.gson.Gson;
import com.google.gson.JsonElement;
import com.google.gson.JsonObject;
import com.google.gson.JsonParser;
import com.google.gson.reflect.TypeToken;
import com.worklight.wlclient.api.WLClient;
import com.worklight.wlclient.api.WLFailResponse;
import com.worklight.wlclient.api.WLProcedureInvocationData;
import com.worklight.wlclient.api.WLRequestOptions;
import com.worklight.wlclient.api.WLResponse;
import com.worklight.wlclient.api.WLResponseListener;
import rx.Observable;
import rx.Subscriber;
import rx.functions.Func1;
import static rx.Observable.Transformer;
/**
* Configurable MFP client for establishing connections and invoking procedures in a reactive
* manner. For a detailed guide on using this class, visit
* <a href="https://github.com/t-preiss/CafeJava" target="_blank">the project's GitHub page</a> and
* view the README.
*
* @author John Petitto (github @jpetitto)
* @author Tanner Preiss (github @t-preiss)
*/
public final class CafeJava {
private int timeout = 30_000;
private Object invocationContext;
/**
* The timeout that will be used for any MFP call invoked from this instance of {@code
* CafeJava}.
*
* @param timeout Number of millis to wait for an MFP call to respond.
* @return The current instance of {@code CafeJava} to allow for easy call chaining.
*/
public CafeJava setTimeout(int timeout) {
if (timeout >= 0) {
this.timeout = timeout;
}
return this;
}
/**
* @return Number of millis to wait for an MFP call to respond.
*/
public int getTimeout() {
return timeout;
}
/**
* An {@code invocationContext} serves as a mechanism for tagging a {@code WLResponse}. This
* can be useful when the source of a {@code WLResponse} is unknown. This context will be used
* for any MFP call invoked from this instance of {@code CafeJava}.
*
* @param invocationContext Returned as part of any {@code WLResponse} that was originally
* invoked from this instance of {@code CafeJava}.
* @return The current instance of {@code CafeJava} to allow for easy call chaining.
*/
@NonNull
public CafeJava setInvocationContext(@Nullable Object invocationContext) {
this.invocationContext = invocationContext;
return this;
}
/**
* @return The context (object) for any {@code WLResponse} that was originally invoked from
* this instance of {@code CafeJava}.
*/
@Nullable
public Object getInvocationContext() {
return invocationContext;
}
/**
* Creates an {@code Observable} that emits a {@code WLResponse} after attempting connection
* to the MFP server instance defined in the {@code wlclient.properties} file. This
* connection is only performed when there is a new {@code Subscriber} to the {@code
* Observable}. The {@code Observable} will automatically perform its work on a dedicated
* background thread, so there is usually no need to use the {@code subscribeOn} method of
* RxJava.
*
* @param context
* @return {@code Observable} that emits a {@code WLResponse} for an MFP connection.
*/
@NonNull
public Observable<WLResponse> connect(@NonNull final Context context) {
return Observable.create(new Observable.OnSubscribe<WLResponse>() {
@Override
public void call(Subscriber<? super WLResponse> subscriber) {
WLClient client = WLClient.createInstance(context);
client.connect(new RxResponseListener(subscriber), getRequestOptions());
}
});
}
/**
* Creates an {@code Observable} that emits a {@code WLResponse} after attempting invocation
* of the specified procedure for the given adapter. This invocation is only performed when
* there is a new {@code Subscriber} to the {@code Observable}. The {@code Observable} will
* automatically perform its work on a dedicated background thread, so there is usually no
* need to use the {@code subscribeOn} method of RxJava.
*
* @param adapterName Name of the targeted adapter.
* @param procedureName Name of the targeted procedure for the specified adapter.
* @param parameters Variable number of parameters that the specified procedure is
* expecting. The types of each parameter need to match the type that
* the procedure is expecting on the server.
* @return {@code Observable} that emits a {@code WLResponse} for an MFP procedure invocation.
*/
@NonNull
public Observable<WLResponse> invokeProcedure(@NonNull final String adapterName,
@NonNull final String procedureName,
@Nullable final Object... parameters) {
return Observable.create(new Observable.OnSubscribe<WLResponse>() {
@Override
public void call(Subscriber<? super WLResponse> subscriber) {
WLClient client = WLClient.getInstance();
if (client == null) {
subscriber.onError(new Throwable("WLClient instance does not exist"));
return;
}
WLProcedureInvocationData invocationData =
new WLProcedureInvocationData(adapterName, procedureName, false);
invocationData.setParameters(parameters);
client.invokeProcedure(invocationData, new RxResponseListener(subscriber),
getRequestOptions());
}
});
}
/**
* Transforms an {@code Observable} that emits a {@code WLResponse} with a valid JSON payload
* into a new {@code Observable} with the targeted {@code Class} type. This can be done by
* passing the result of this method to the {@code compose} operator of RxJava. A variable
* number of member names can be provided for accessing JSON data that is nested arbitrarily
* deep inside the response payload.
*
* @param clazz Targeted {@code Class} type for the JSON payload to be serialized into.
* @param memberNames Variable number of member names for accessing JSON data that is nested
* arbitrarily deep inside the response payload.
* @return {@code Transformer} that can be supplied to the {@code compose} operator of RxJava
* . The input {@code Observable} must emit a {@code WLResponse} with a valid JSON payload.
*/
@NonNull
public static <T> Transformer<WLResponse, T> serializeTo(@NonNull final Class<T> clazz,
@NonNull final String... memberNames) {
return transformJson(new Func1<WLResponse, T>() {
@Override
public T call(WLResponse wlResponse) {
JsonElement element = parseNestedJson(wlResponse, memberNames);
return new Gson().fromJson(element, clazz);
}
});
}
/**
* Transforms an {@code Observable} that emits a {@code WLResponse} with a valid JSON payload
* into a new Observable for the targeted {@code TypeToken}. This can be done by passing the
* result of this method to the {@code compose} operator of RxJava. A {@code TypeToken} is
* necessary when the targeted type is a parameterized type, such as {@code List}. A variable
* number of member names can be provided for accessing JSON data that is nested arbitrarily
* deep inside the response payload.
*
* @param typeToken Captures the necessary type information for the targeted parameterized
* type, such as {@code List}.
* @param memberNames Variable number of member names for accessing JSON data that is nested
* arbitrarily deep inside the response payload.
* @return {@code Transformer} that can be supplied to the {@code compose} operator of RxJava
* . The input {@code Observable} must emit a {@code WLResponse} with a valid JSON payload.
*/
@NonNull
public static <T> Transformer<WLResponse, T> serializeTo(@NonNull final TypeToken<T> typeToken,
@NonNull final String... memberNames) {
return transformJson(new Func1<WLResponse, T>() {
@Override
public T call(WLResponse wlResponse) {
JsonElement element = parseNestedJson(wlResponse, memberNames);
return new Gson().fromJson(element, typeToken.getType());
}
});
}
private static <T> Transformer<WLResponse, T> transformJson(final Func1<WLResponse, T> func) {
return new Transformer<WLResponse, T>() {
@Override
public Observable<T> call(Observable<WLResponse> wlResponseObservable) {
return wlResponseObservable.map(func);
}
};
}
private static JsonElement parseNestedJson(WLResponse wlResponse, String... memberNames) {
String json = wlResponse.getResponseJSON().toString();
JsonObject jsonObject = new JsonParser().parse(json).getAsJsonObject();
for (int i = 0, size = memberNames.length; i < size; i++) {
String member = memberNames[i];
if (i == size - 1) {
return jsonObject.get(member);
} else {
jsonObject = jsonObject.getAsJsonObject(member);
}
}
return jsonObject;
}
private WLRequestOptions getRequestOptions() {
WLRequestOptions requestOptions = new WLRequestOptions();
requestOptions.setTimeout(timeout);
requestOptions.setInvocationContext(invocationContext);
return requestOptions;
}
private static class RxResponseListener implements WLResponseListener {
private Subscriber<? super WLResponse> subscriber;
RxResponseListener(Subscriber<? super WLResponse> subscriber) {
this.subscriber = subscriber;
}
@Override
public void onSuccess(WLResponse wlResponse) {
subscriber.onNext(wlResponse);
subscriber.onCompleted();
}
@Override
public void onFailure(WLFailResponse wlFailResponse) {
subscriber.onError(new Throwable(wlFailResponse.getErrorMsg()));
}
}
}
| Clarify intent of parseNestedJson logic with comments
| cafejava/src/main/java/com/ibm/mil/cafejava/CafeJava.java | Clarify intent of parseNestedJson logic with comments | <ide><path>afejava/src/main/java/com/ibm/mil/cafejava/CafeJava.java
<ide> String json = wlResponse.getResponseJSON().toString();
<ide> JsonObject jsonObject = new JsonParser().parse(json).getAsJsonObject();
<ide>
<add> // For each member name, fetch the object it maps to until you reach the final member name.
<add> // Once the final member name is reached, return its corresponding value.
<ide> for (int i = 0, size = memberNames.length; i < size; i++) {
<ide> String member = memberNames[i];
<ide>
<ide> if (i == size - 1) {
<add> // last member name reached; return its value
<ide> return jsonObject.get(member);
<ide> } else {
<add> // more member names remain, therefore current member must map to an object
<ide> jsonObject = jsonObject.getAsJsonObject(member);
<ide> }
<ide> }
<ide>
<add> // no nesting required; return top-level object
<ide> return jsonObject;
<ide> }
<ide> |
|
JavaScript | mit | 468ab59420ea7989cc473c193a96e0702953d3a6 | 0 | SerkanSipahi/app-decorators,SerkanSipahi/app-decorators |
// internal libs
import { Router } from 'src/apps/router';
import { XRegExp } from 'src/libs/dependencies';
import { Event } from 'test/mocks/event';
import { Location } from 'test/mocks/location';
describe('Class Router', () => {
describe('_isDynamicURL method', () => {
it('should return true if has variable in url otherwise false', () => {
// setup
let router = Router.create();
// test
router._isDynamicURL('{{a}}').should.be.true();
router._isDynamicURL('a').should.be.false();
// cleanup
router.destroy();
});
});
describe('_convertURLToRegex method', () => {
it('should convert passed url to regex', () => {
// setup
let router = Router.create();
// test
router._convertRouteToXRegexExp('{{year}}').should.be.equal('(?<year>[\\d\\w?()|{}_.,-]+)');
router._convertRouteToXRegexExp('{{hour}}:{{min}}').should.be.equal('(?<hour>[\\d\\w?()|{}_.,-]+):(?<min>[\\d\\w?()|{}_.,-]+)');
router._convertRouteToXRegexExp('{{a}}/{{b}}/{{c}}').should.be.equal('(?<a>[\\d\\w?()|{}_.,-]+)\\/(?<b>[\\d\\w?()|{}_.,-]+)\\/(?<c>[\\d\\w?()|{}_.,-]+)');
router._convertRouteToXRegexExp('?id={{id}}&name={{name}}').should.be.equal('\\?id=(?<id>[\\d\\w?()|{}_.,-]+)&name=(?<name>[\\d\\w?()|{}_.,-]+)');
router._convertRouteToXRegexExp('/details?page={{page}}').should.be.equal('\\/details\\?page=(?<page>[\\d\\w?()|{}_.,-]+)');
router._convertRouteToXRegexExp('/details?page={{a}}|{{b}}').should.be.equal('\\/details\\?page=(?<a>[\\d\\w?()|{}_.,-]+)\\|(?<b>[\\d\\w?()|{}_.,-]+)');
router._convertRouteToXRegexExp('/calc?add={{a}}+{{b}}').should.be.equal('\\/calc\\?add=(?<a>[\\d\\w?()|{}_.,-]+)\\+(?<b>[\\d\\w?()|{}_.,-]+)');
router._convertRouteToXRegexExp('/calc?multi={{a}}*{{b}}').should.be.equal('\\/calc\\?multi=(?<a>[\\d\\w?()|{}_.,-]+)\\*(?<b>[\\d\\w?()|{}_.,-]+)');
router._convertRouteToXRegexExp('/group?that=({{group}})').should.be.equal('\\/group\\?that=\\((?<group>[\\d\\w?()|{}_.,-]+)\\)');
//cleanup
router.destroy();
});
});
describe('_addRoute method', () => {
// setup
let router = null;
beforeEach(() => {
// setup
router = Router.create();
// add static routes
router._addRoute('/this/is/a/route/1', 'name1');
router._addRoute('/this/is/a/route/2', 'name2');
router._addRoute('/this/is/{{a}}/route/4', 'name4');
router._addRoute('/this/is/{{b}}/{{c}}/route/5', 'name5');
router._addRoute('/page?id={{id}}&name={{name}}', 'name6');
});
afterEach(() => router.destroy() );
it('should return registered static routes', () => {
router._getRoutes('static').should.containEql({
'/this/is/a/route/1': {
name: 'name1',
type: 'static',
route: '/this/is/a/route/1',
regex: null,
params: null,
fragment: null,
cache: false,
},
'/this/is/a/route/2': {
name: 'name2',
type: 'static',
route: '/this/is/a/route/2',
regex: null,
params: null,
fragment: null,
cache: false,
},
});
});
it('should return registered dynamic routes', () => {
router._getRoutes('dynamic').should.containEql({
'/this/is/{{a}}/route/4': {
name: 'name4',
type: 'dynamic',
route: '/this/is/{{a}}/route/4',
regex: '\\/this\\/is\\/(?<a>[\\d\\w?()|{}_.,-]+)\\/route\\/4',
params: null,
fragment: null,
cache: false,
},
'/this/is/{{b}}/{{c}}/route/5': {
name: 'name5',
type: 'dynamic',
route: '/this/is/{{b}}/{{c}}/route/5',
regex: '\\/this\\/is\\/(?<b>[\\d\\w?()|{}_.,-]+)\\/(?<c>[\\d\\w?()|{}_.,-]+)\\/route\\/5',
params: null,
fragment: null,
cache: false,
},
'/page?id={{id}}&name={{name}}': {
name: 'name6',
type: 'dynamic',
route: '/page?id={{id}}&name={{name}}',
regex: '\\/page\\?id=(?<id>[\\d\\w?()|{}_.,-]+)&name=(?<name>[\\d\\w?()|{}_.,-]+)',
params: null,
fragment: null,
cache: false,
}
});
});
it('should throw error if duplicate route added', () => {
(() => { router._addRoute('/this/is/a/route/2', 'name2'); }).should.throw();
(() => { router._addRoute('/this/is/{{a}}/route/4', 'name4'); }).should.throw();
});
});
describe('_matchStaticURL method', () => {
it('should return matchedObject by passed fragment', () => {
// setup
let router = Router.create();
router._addRoute('/this/is/a/route/1', 'route1');
// test: positiv
router._matchStaticURL('/this/is/a/route/1').should.containEql({
name: 'route1',
type: 'static',
route: '/this/is/a/route/1',
params: null,
regex: null,
fragment: '/this/is/a/route/1',
cache: false,
});
// test: negativ
should(router._matchStaticURL('/not/added/route')).be.exactly(null);
// cleanup
router.destroy();
});
});
describe('_matchDynamicURL method', () => {
it('should return matchedObject by passed fragment', () => {
// setup
let router = Router.create();
router._addRoute('/{{a}}/b/{{c}}/d', 'route1');
router._addRoute('?id={{id}}&name={{name}}', 'route2');
// test 1: positiv
router._matchDynamicURL('/foo/b/bar/d').should.containEql({
name: 'route1',
type: 'dynamic',
route: '/{{a}}/b/{{c}}/d',
regex: '\\/(?<a>[\\d\\w?()|{}_.,-]+)\\/b\\/(?<c>[\\d\\w?()|{}_.,-]+)\\/d',
params: {
a: 'foo',
c: 'bar',
},
fragment: '/foo/b/bar/d',
cache: false,
});
// test 2: positiv (with queryString)
router._matchDynamicURL('/a/b/c/page?id=26&name=mars&update=true').should.containEql({
name: 'route2',
type: 'dynamic',
route: '?id={{id}}&name={{name}}',
regex: '\\?id=(?<id>[\\d\\w?()|{}_.,-]+)&name=(?<name>[\\d\\w?()|{}_.,-]+)',
params: {
id: 26,
name: 'mars',
},
fragment: '/a/b/c/page?id=26&name=mars&update=true',
cache: false,
});
// test: negativ
should(router._matchDynamicURL('/not/added/route')).be.exactly(null);
// cleanup
router.destroy();
});
});
describe('_matchURL method', () => {
let router = null;
it('should return only valid value from _matchStaticURL by passed static path', () => {
// setup
router = Router.create();
router._addRoute('/some/static/path', 'route1');
// test: it should call _matchStaticURL
router._matchURL('/some/static/path');
router._matchStaticURL.returnValues[0].name.should.be.equal('route1');
router._matchDynamicURL.returnValues.should.be.length(0);
should(router._getRouteCache.returnValues[0]).be.null();
});
it('should return only valid value from _matchDynamicURL by passed dynamic path', () => {
// setup
router = Router.create();
router._addRoute('/{{dynamic}}/b/{{path}}/d', 'route2');
// test: it should call _matchDynamicURL
router._matchURL('/hey/b/there/d');
should(router._matchStaticURL.returnValues[0]).be.null();
router._matchDynamicURL.returnValues[0].name.should.be.equal('route2');
router._matchDynamicURL.returnValues[0].cache.should.be.false();
should(router._getRouteCache.returnValues[0]).be.null();
});
it('should return on second call from cache by passed dynamic path', () => {
// setup
router = Router.create();
router._addRoute('/{{dynamic}}/b/{{path}}/d', 'route2');
// test: returned not from cache
router._matchURL('/hey/b/there/d');
router._matchDynamicURL.returnValues[0].cache.should.be.false();
// test: returned from cache
router._matchURL('/hey/b/there/d');
router._getRouteCache.returnValues[1].cache.should.be.true();
// cleanup
router.destroy();
});
// setup
beforeEach(() => {
sinon.spy(Router.prototype, '_matchStaticURL');
sinon.spy(Router.prototype, '_matchDynamicURL');
sinon.spy(Router.prototype, '_getRouteCache');
});
afterEach(() => {
router._matchStaticURL.restore();
router._matchDynamicURL.restore();
router._getRouteCache.restore();
router.destroy();
});
});
describe('_applyActionEvent (integration test) method', () => {
/**
* We check only pushstate inside of _applyActionEvent,
* all other methods are tested.
*/
it('should call pushstate if click event passed', () => {
// setup
let router = Router.create({ event_action: 'click a' });
let spy_pushState = sinon.spy(router, "pushState");
// mock click event
let event = new Event('click', {
target: {
href: 'http://www.domain.com/some/path.html',
}
});
// test
router._applyActionEvent(event);
spy_pushState.callCount.should.equal(1);
// cleanup
router.pushState.restore();
router.destroy();
});
it('should not call pushstate if pushstate event passed', () => {
// setup
let router = Router.create({ event_action: 'click a' });
let spy_pushState = sinon.spy(router, "pushState");
// mock pushState vent
let event = new Event('pushState', {
target: {
href: 'http://www.domain.com/some/path.html',
}
});
// test
router._applyActionEvent(event);
spy_pushState.callCount.should.equal(0);
// cleanup
router.pushState.restore();
router.destroy();
});
});
describe('_getDefinedEventAction method', () => {
it('should get defined event action', () => {
//setup
let router = Router.create({
event_action: 'my-action pattern'
});
// test
router._getDefinedEventAction().should.be.equal('my-action');
// cleanup
router.destroy();
});
});
describe('_isDefinedEventAction method', () => {
it('should check if passed event_type is euqal to our defined event type', () => {
// setup
let router = Router.create({
event_action: 'my-action pattern'
});
// test
router._isDefinedEventAction('my-action').should.be.true();
router._isDefinedEventAction('other-action').should.be.false();
// cleanup
router.destroy();
});
});
describe('_getCurrentHref method', () => {
it('should get current href by passed event', () => {
// mock location ( see above import)
let locationMock = new Location({
href : 'http://mockdomain.com/event/pushState/href.html'
});
// setup
let router = Router.create({
event_action: 'myEvent a',
location: locationMock,
});
//*** Test click event ***
// mock click event ( see above import)
let eventClickMock = new Event('myEvent', {
target: {
href: 'http://mockdomain.com/event/click/href.html'
}
});
router._getCurrentHref(eventClickMock).should.be.equal('http://mockdomain.com/event/click/href.html');
//*** Test pushstate event ***
// mock pushstate event ( see above import)
let eventPushstateMock = new Event('pushstate');
router._getCurrentHref(eventPushstateMock).should.be.equal('http://mockdomain.com/event/pushState/href.html');
// cleanup
router.destroy();
});
});
describe('_addPromise method', () => {
it('should add and create promise collection on prototype.promise', () => {
// setup
let router = Router.create();
let promise1 = router._addPromise('name1');
let promise2 = router._addPromise('name2');
// test 1
promise1.should.be.Promise();
promise2.should.be.Promise();
// test 2
router.promise.should.have.propertyByPath('name1', 0).and.is.Function(); // resolve function
router.promise.should.have.propertyByPath('name2', 0).and.is.Function(); // resolve function
// cleanup: resolve promises
router.promise.name1[0]();
router.promise.name2[0]();
// cleanup: destroy router
router.destroy();
});
});
describe('_urlFragmentChanged method', () => {
it('should check if fragment is changed in combination with _setURLFragment', () => {
// setup
let router = Router.create();
// test 1
router._setURLFragment('/');
router._urlFragmentChanged('/some/url/fragment').should.be.true();
// test 2
router._setURLFragment('/some/url/fragment');
router._urlFragmentChanged('/some/url/fragment').should.be.false();
// cleanup
router.destroy();
});
});
describe('_isFloat', () => {
it('should check if passed value is number or not', () => {
// setup
let router = Router.create();
// test positiv: integer
router._isNumeric(123).should.be.true();
router._isNumeric('123').should.be.true();
router._isNumeric(123.45).should.be.true();
router._isNumeric('123.45').should.be.true();
// test negativ: integer
router._isNumeric('123.45a').should.be.false();
router._isNumeric('123b').should.be.false();
// cleanup
router.destroy();
});
});
describe('createURL method', () => {
it('should return url object with additional "fragment" property', () => {
// setup
let router = Router.create({
scope: document.createElement('div'),
});
// test
let url = router.createURL('http://www.mydomain.com/path/to/somewhere.html?a=1&b=2');
url.fragment.should.equal('/path/to/somewhere.html?a=1&b=2');
// cleanup
router.destroy();
});
});
describe('Promise method', () => {
it('should return promise object', () => {
// setup
let router = Router.create({
scope: document.createElement('div'),
});
// test
router.Promise(function(){}).should.be.Promise();
// cleanup
router.destroy();
});
});
describe('addRouteListener method', () => {
it('should do the same like prototype.on', () => {
// It´s not necessary to test it, it´s almost the same
// like prototype.on. The only difference is
// the argument signature
});
});
describe('stop method', () => {
it('it should stop if router running', () => {
// setup
sinon.spy(Router.prototype, '_applyActionEvent');
let router = Router.create();
router.on('action', ::router._onAction);
// test
router.stop();
router.trigger('action');
router._applyActionEvent.callCount.should.be.equal(0);
//cleanup
router._applyActionEvent.restore();
router.destroy();
});
});
describe('start method', () => {
it('it should start if router stoped', () => {
// setup
sinon.spy(Router.prototype, '_applyActionEvent');
let router = Router.create();
router.on('action', ::router._onAction);
// test 1
router.stop();
router.trigger('action');
router._applyActionEvent.callCount.should.be.equal(0);
// test 2
router.start();
router.trigger('action');
router._applyActionEvent.callCount.should.be.equal(1);
// cleanup
router._applyActionEvent.restore();
router.destroy();
});
});
describe('whoami method', () => {
it('it should return route information by passed fragment', () => {
// setup
let router = Router.create();
router.on('myRoute1 /some/path.html', () => {});
router.on('myRoute2 /some/{{integer}}/{{float}}/path.html', () => {});
// test 1 - positiv
router.whoami('/some/path.html').should.be.containEql({
name: 'myRoute1',
type: 'static',
regex: null,
cache: false,
});
// test 2 - positiv
router.whoami('/some/123/4.34/path.html').should.be.containEql({
name: 'myRoute2',
type: 'dynamic',
fragment: '/some/123/4.34/path.html',
route: '/some/{{integer}}/{{float}}/path.html',
regex: '\\/some\\/(?<integer>[\\d\\w?()|{}_.,-]+)\\/(?<float>[\\d\\w?()|{}_.,-]+)\\/path\\.html',
params: {
integer: 123,
float: 4.34,
},
cache: false,
});
// test 3 - negativ
should(router.whoami('/unknown/path.html')).be.exactly(null);
// cleanup
router.destroy();
});
});
describe('which method', () => {
it('it should return route information by passed name', () => {
// setup
let router = Router.create();
router.on('myRoute1 /some/{{integer}}/{{float}}/path.html', () => {});
// test 1 - positiv
router.which('myRoute1').should.be.containEql({
name: 'myRoute1',
type: 'dynamic',
fragment: null,
route: '/some/{{integer}}/{{float}}/path.html',
regex: '\\/some\\/(?<integer>[\\d\\w?()|{}_.,-]+)\\/(?<float>[\\d\\w?()|{}_.,-]+)\\/path\\.html',
params: null,
cache: false,
});
// test 2 - negativ
should(router.which('unknownRoute')).be.exactly(null);
// cleanup
router.destroy();
});
});
describe('_constructDynamicURL', () => {
it('should construct url by passt dynamic routename', () => {
// setup
let routeObject = null;
let router = Router.create();
router._addRoute('/person/{{name}}/{{surename}}/id/{{id}}', 'myRoute1');
router._addRoute('?page={{page}}&id={{id}}', 'myRoute2');
// test 1
routeObject = router.which('myRoute1');
router._constructDynamicURL(routeObject.route,
{ name: 'serkan', surename: 'sipahi', id: 333 }
).should.equal('/person/serkan/sipahi/id/333');
// test 2
routeObject = router.which('myRoute2');
router._constructDynamicURL(routeObject.route,
{ page: 'details', id: 999 }
).should.equal('?page=details&id=999');
// cleanup
router.destroy();
});
it('should throw error if something gone wrong', () => {
// setup
let router = Router.create();
router._addRoute('/person/{{name}}/{{surename}}/id/{{id}}', 'myRoute1');
let routeObject = router.which('myRoute1');
// should throw if not params passed
(() => { router._constructDynamicURL(routeObject.route) }).should.throw();
// should throw if param is missing
(() => { router._constructDynamicURL(routeObject.route, { id: 1 }) }).should.throw();
// cleanup
router.destroy();
});
});
describe('constructURL method', () => {
it('should construct url by passed static routename', () => {
// setup
let router = Router.create();
router.on('myRoute1 /some/static/path.html', () => {});
// test 1
router.constructURL('myRoute1').should.be.equal('/some/static/path.html');
// test 2 - ingores params
router.constructURL('myRoute1', { a:1, b:2 }).should.be.equal('/some/static/path.html');
// test 3 - throw
(() => { router.constructURL() }).should.throw();
// test 4 - throw
(() => { router.constructURL('myRoute99') }).should.throw();
(() => { router.constructURL('myRoute99', {c:3, d:4}) }).should.throw();
// cleanup
router.destroy();
});
it('should construct url by passed dynmic routename and params ', () => {
});
});
describe('go method', () => {
it('should go by passed static routename', () => {
let router = Router.create();
router.on('myRoute1 /some/static/path.html', () => {});
sinon.spy(router, "pushState");
// test 1
router.go('myRoute1');
router.pushState.callCount.should.be.equal(1);
// test 2 - ingores params
router.go('myRoute1', { a:1, b:2 });
router.pushState.callCount.should.be.equal(2);
// test 3 - throw
(() => { router.go() }).should.throw();
// test 4 - throw
(() => { router.go('myRoute99') }).should.throw();
(() => { router.go('myRoute99', {c:3, d:4}) }).should.throw();
router.pushState.restore();
router.destroy();
});
it('should go by passed dynmic routename and params ', () => {
});
});
describe('on method, no arguments passed', () => {
it('should throw an error', () => {
// setup
let router = Router.create({
scope: document.createElement('div'),
});
// test
(() => { router.on() }).should.throw();
// cleanup
router.destroy();
});
});
describe('on method, no handler passed', () => {
it('should not throw an error', () => {
// setup
let router = Router.create({
scope: document.createElement('div'),
});
// test
(() => { router.on('urlchange') }).should.not.throw();
// cleanup
router.destroy();
});
});
describe('on method, eventType and handler passed', () => {
it('should not throw an error', () => {
// setup
let router = Router.create({
scope: document.createElement('div'),
});
// test
(() => { router.on('someevent', () => {}) }).should.not.throw();
// cleanup
router.destroy();
});
});
describe('on method, passed only eventType', () => {
it('should return promise', () => {
// setup
let router = Router.create({
scope: document.createElement('div'),
});
// test
router.on('urlchange').should.be.Promise();
router.on('urlchange').then(() => {}).should.be.Promise();
// cleanup
router.destroy();
});
});
describe('on method, passed eventType and handler', () => {
it('should return null', () => {
// setup
let router = Router.create({
scope: document.createElement('div'),
});
// test
should(router.on('my route', (event) => {})).be.null();
// cleanup
router.destroy();
});
});
describe('on method', () => {
it('should handle promise or handler correctly', (done) => {
// setup
let router = Router.create({
scope: document.createElement('div'),
});
let spy_handler = sinon.spy(() => {});
router.on('urlchange', spy_handler);
// test if promise resolved
router.on('urlchange').should.be.finally.propertyByPath('thats').eql('nice');
router.on('urlchange').should.be.finally.propertyByPath('thats').eql('nice');
router.on('urlchange').then(({thats}) => {
return `#${thats}#`;
}).should.be.finally.eql('#nice#');
// test promise count
router.promise.urlchange.should.be.length(3);
// test triggering of urlchange
router.trigger('urlchange', { thats: 'nice' });
spy_handler.callCount.should.equal(1);
router.promise.urlchange.should.be.length(0);
// test with trigger args
router.trigger('urlchange', { hello: 'world' });
spy_handler.callCount.should.equal(2);
spy_handler.args[1][0].should.propertyByPath('hello').eql('world');
// need this timeout for promise tests
setTimeout(() => {
// cleanup
router.destroy();
done();
}, 10);
});
});
describe('on method, registered many events', () => {
it('should assign routes correctly', () => {
// setup
let router = Router.create({
scope: document.createElement('div'),
});
router.on('Startpage /index.html');
router.on('Resultpage /results.html');
router.on('Detailpage /details.html');
router.on('Configurator /configurator.html');
// test routes count
Object.keys(router._getRoutes('static')).should.be.length(4);
// should throw error because route is already exists
(() => { router.on('Detailpage /details.html'); }).should.throw();
// routes count must be the same
Object.keys(router._getRoutes('static')).should.be.length(4);
// cleanup
router.destroy();
});
});
describe('on method, if explicitly triggering without params', () => {
it('should trigger correct handler', () => {
// setup
let router = Router.create({
scope: document.createElement('div'),
});
// spyies
let spy_startpage_handler = sinon.spy(() => {});
let spy_resultpage_handler = sinon.spy(() => {});
router.on('Startpage /index.html', spy_startpage_handler);
router.on('Resultpage /results.html', spy_resultpage_handler);
router.trigger('Startpage');
router.trigger('Resultpage');
router.trigger('Resultpage');
spy_startpage_handler.callCount.should.equal(1);
spy_resultpage_handler.callCount.should.equal(2);
// cleanup
router.destroy();
});
});
describe('on method, explicitly triggering with params', () => {
it('should trigger correct handler', () => {
// setup
let router = Router.create({
scope: document.createElement('div'),
});
// spyies
let spy_configurator_handler = sinon.spy(() => {});
router.on('Product /product/{{id}}-{{name}}.html', spy_configurator_handler);
// test: positiv
router.trigger('Product', { id: 123, name: 'foo' });
router.trigger('Product', { id: 456, name: 'bar' });
spy_configurator_handler.callCount.should.equal(2);
// cleanup
router.destroy();
});
});
describe('on method, passed handler as href', () => {
it.skip('should route to href', () => {
// setup
let router = Router.create({
scope: document.createElement('div'),
});
router.on('Google /index.html', 'https://www.google.de');
router.on('AbsoluteURL /absolute.html', '/absolute-internal-url.html');
router.on('RelativeURL /relative.html', 'relative-internal-url.html');
router.trigger('Google');
router.trigger('AbsoluteURL');
router.trigger('RelativeURL');
});
});
describe('_onUrlchange method', () => {
it('should call registered callback on element.click()', () => {
element.querySelector('.foo').click();
element.querySelector('.bar').click();
element.querySelector('.baz').click();
element.querySelector('.baz').click();
spy_urlchange.callCount.should.equal(3);
spy_urlchange.args[0][0].fragment.should.equal('/index');
spy_urlchange.args[1][0].fragment.should.equal('/index/details');
spy_urlchange.args[2][0].fragment.should.equal('/index/details?a=1&b=2');
});
it('should call registered callback on router.back()', (done) => {
element.querySelector('.foo').click();
element.querySelector('.bar').click();
element.querySelector('.baz').click();
router.back();
router.back();
router.back();
setTimeout(() => {
spy_urlchange.args[0][0].fragment.should.equal('/index');
spy_urlchange.args[1][0].fragment.should.equal('/index/details');
spy_urlchange.args[2][0].fragment.should.equal('/index/details?a=1&b=2');
spy_urlchange.args[3][0].fragment.should.equal('/index/details');
spy_urlchange.args[4][0].fragment.should.equal('/index');
done();
}, 100);
});
// Setup
let router, spy_onUrlchange, spy_urlchange, element;
beforeEach(() => {
spy_onUrlchange = sinon.spy(Router.prototype, "_onUrlchange");
spy_urlchange = sinon.spy(() => {});
element = document.createElement("div");
router = Router.create({
scope: element,
});
router.on('urlchange', spy_urlchange);
element.classList.add('anchor-container');
element.innerHTML = `
<a class="foo" href="/index"> Index </a>
<a class="bar" href="/index/details"> Details </a>
<a class="baz" href="/index/details?a=1&b=2"> Params </a>
`;
document.body.appendChild(element);
});
afterEach(() => {
// cleanup check
router.destroy();
document.body.removeChild(element);
router._onUrlchange.restore();
});
});
describe('on method', () => {
it('should match registered static and dynamic url route', (done) => {
/**
* Static Test
*/
// register
router.on('Index /index.html', spy_index_handler);
router.on('SomePathURL /some/path/url.html', spy_somePathURL_handler);
// trigger events by clicking
element.querySelector('.index').click();
element.querySelector('.some-path-url').click();
element.querySelector('.not-registered-url').click();
element.querySelector('.some-path-url').click();
// test
spy_index_handler.callCount.should.equal(1);
spy_somePathURL_handler.callCount.should.equal(2);
/**
* Dynamic Test
*/
// register
router.on('SomeDynamicURL /{{name}}/{{surname}}/123', ({ params, fragment }) => {
params.name.should.be.equal('serkan');
params.surname.should.be.equal('sipahi');
fragment.should.be.equal('/serkan/sipahi/123');
done();
});
// trigger events by clicking
element.querySelector('.some-dynamic-path').click();
});
it('should trigger registered promise event', (done) => {
// test 1
router.on('SomePathURL /some/path/url.html').then(({fragment, id}) => {
return `${fragment}?id=${id}`;
}).should.be.finally.eql('/some/path/url.html?id=123');
router.trigger('SomePathURL', { fragment: '/some/path/url.html', id: 123 });
setTimeout(done, 200);
});
// Setup
let element, spy_index_handler, spy_somePathURL_handler, spy_dynamicPath_handler, router;
beforeEach(() => {
router = Router.create({
scope: element,
});
spy_index_handler = sinon.spy(() => {});
spy_somePathURL_handler = sinon.spy(() => {});
spy_dynamicPath_handler = sinon.spy(() => {});
element = document.createElement("div");
element.classList.add('anchor-container');
element.innerHTML = `
<a class="index" href="/index.html"> Index </a>
<a class="some-path-url" href="/some/path/url.html"> Some path URL </a>
<a class="some-dynamic-path" href="/serkan/sipahi/123"> Some Dynamic URL </a>
<a class="not-registered-url" href="/not/registered/url.html"> Not registered URL </a>
`;
document.body.appendChild(element);
});
afterEach(() => {
// cleanup
document.body.removeChild(element);
router.destroy();
});
});
describe('off method', () => {
it('should remove event by passed eventType', () => {
});
it('should throw error if not eventType passed', () => {
});
});
describe('destroy method', () => {
it('should remove all registered events', () => {
});
});
describe('create() function', () => {
it('should register many routes at once', () => {
});
it('should bind scope to handlers', () => {
});
});
});
| test/libs/router.spec.js |
// internal libs
import { Router } from 'src/apps/router';
import { XRegExp } from 'src/libs/dependencies';
import { Event } from 'test/mocks/event';
import { Location } from 'test/mocks/location';
describe('Class Router', () => {
describe('_isDynamicURL method', () => {
it('should return true if has variable in url otherwise false', () => {
// setup
let router = Router.create();
// test
router._isDynamicURL('{{a}}').should.be.true();
router._isDynamicURL('a').should.be.false();
// cleanup
router.destroy();
});
});
describe('_convertURLToRegex method', () => {
it('should convert passed url to regex', () => {
// setup
let router = Router.create();
// test
router._convertRouteToXRegexExp('{{year}}').should.be.equal('(?<year>[\\d\\w?()|{}_.,-]+)');
router._convertRouteToXRegexExp('{{hour}}:{{min}}').should.be.equal('(?<hour>[\\d\\w?()|{}_.,-]+):(?<min>[\\d\\w?()|{}_.,-]+)');
router._convertRouteToXRegexExp('{{a}}/{{b}}/{{c}}').should.be.equal('(?<a>[\\d\\w?()|{}_.,-]+)\\/(?<b>[\\d\\w?()|{}_.,-]+)\\/(?<c>[\\d\\w?()|{}_.,-]+)');
router._convertRouteToXRegexExp('?id={{id}}&name={{name}}').should.be.equal('\\?id=(?<id>[\\d\\w?()|{}_.,-]+)&name=(?<name>[\\d\\w?()|{}_.,-]+)');
router._convertRouteToXRegexExp('/details?page={{page}}').should.be.equal('\\/details\\?page=(?<page>[\\d\\w?()|{}_.,-]+)');
router._convertRouteToXRegexExp('/details?page={{a}}|{{b}}').should.be.equal('\\/details\\?page=(?<a>[\\d\\w?()|{}_.,-]+)\\|(?<b>[\\d\\w?()|{}_.,-]+)');
router._convertRouteToXRegexExp('/calc?add={{a}}+{{b}}').should.be.equal('\\/calc\\?add=(?<a>[\\d\\w?()|{}_.,-]+)\\+(?<b>[\\d\\w?()|{}_.,-]+)');
router._convertRouteToXRegexExp('/calc?multi={{a}}*{{b}}').should.be.equal('\\/calc\\?multi=(?<a>[\\d\\w?()|{}_.,-]+)\\*(?<b>[\\d\\w?()|{}_.,-]+)');
router._convertRouteToXRegexExp('/group?that=({{group}})').should.be.equal('\\/group\\?that=\\((?<group>[\\d\\w?()|{}_.,-]+)\\)');
//cleanup
router.destroy();
});
});
describe('_addRoute method', () => {
// setup
let router = null;
beforeEach(() => {
// setup
router = Router.create();
// add static routes
router._addRoute('/this/is/a/route/1', 'name1');
router._addRoute('/this/is/a/route/2', 'name2');
router._addRoute('/this/is/{{a}}/route/4', 'name4');
router._addRoute('/this/is/{{b}}/{{c}}/route/5', 'name5');
router._addRoute('/page?id={{id}}&name={{name}}', 'name6');
});
afterEach(() => router.destroy() );
it('should return registered static routes', () => {
router._getRoutes('static').should.containEql({
'/this/is/a/route/1': {
name: 'name1',
type: 'static',
route: '/this/is/a/route/1',
regex: null,
params: null,
fragment: null,
cache: false,
},
'/this/is/a/route/2': {
name: 'name2',
type: 'static',
route: '/this/is/a/route/2',
regex: null,
params: null,
fragment: null,
cache: false,
},
});
});
it('should return registered dynamic routes', () => {
router._getRoutes('dynamic').should.containEql({
'/this/is/{{a}}/route/4': {
name: 'name4',
type: 'dynamic',
route: '/this/is/{{a}}/route/4',
regex: '\\/this\\/is\\/(?<a>[\\d\\w?()|{}_.,-]+)\\/route\\/4',
params: null,
fragment: null,
cache: false,
},
'/this/is/{{b}}/{{c}}/route/5': {
name: 'name5',
type: 'dynamic',
route: '/this/is/{{b}}/{{c}}/route/5',
regex: '\\/this\\/is\\/(?<b>[\\d\\w?()|{}_.,-]+)\\/(?<c>[\\d\\w?()|{}_.,-]+)\\/route\\/5',
params: null,
fragment: null,
cache: false,
},
'/page?id={{id}}&name={{name}}': {
name: 'name6',
type: 'dynamic',
route: '/page?id={{id}}&name={{name}}',
regex: '\\/page\\?id=(?<id>[\\d\\w?()|{}_.,-]+)&name=(?<name>[\\d\\w?()|{}_.,-]+)',
params: null,
fragment: null,
cache: false,
}
});
});
it('should throw error if duplicate route added', () => {
(() => { router._addRoute('/this/is/a/route/2', 'name2'); }).should.throw();
(() => { router._addRoute('/this/is/{{a}}/route/4', 'name4'); }).should.throw();
});
});
describe('_matchStaticURL method', () => {
it('should return matchedObject by passed fragment', () => {
// setup
let router = Router.create();
router._addRoute('/this/is/a/route/1', 'route1');
// test: positiv
router._matchStaticURL('/this/is/a/route/1').should.containEql({
name: 'route1',
type: 'static',
route: '/this/is/a/route/1',
params: null,
regex: null,
fragment: '/this/is/a/route/1',
cache: false,
});
// test: negativ
should(router._matchStaticURL('/not/added/route')).be.exactly(null);
// cleanup
router.destroy();
});
});
describe('_matchDynamicURL method', () => {
it('should return matchedObject by passed fragment', () => {
// setup
let router = Router.create();
router._addRoute('/{{a}}/b/{{c}}/d', 'route1');
router._addRoute('?id={{id}}&name={{name}}', 'route2');
// test 1: positiv
router._matchDynamicURL('/foo/b/bar/d').should.containEql({
name: 'route1',
type: 'dynamic',
route: '/{{a}}/b/{{c}}/d',
regex: '\\/(?<a>[\\d\\w?()|{}_.,-]+)\\/b\\/(?<c>[\\d\\w?()|{}_.,-]+)\\/d',
params: {
a: 'foo',
c: 'bar',
},
fragment: '/foo/b/bar/d',
cache: false,
});
// test 2: positiv (with queryString)
router._matchDynamicURL('/a/b/c/page?id=26&name=mars&update=true').should.containEql({
name: 'route2',
type: 'dynamic',
route: '?id={{id}}&name={{name}}',
regex: '\\?id=(?<id>[\\d\\w?()|{}_.,-]+)&name=(?<name>[\\d\\w?()|{}_.,-]+)',
params: {
id: 26,
name: 'mars',
},
fragment: '/a/b/c/page?id=26&name=mars&update=true',
cache: false,
});
// test: negativ
should(router._matchDynamicURL('/not/added/route')).be.exactly(null);
// cleanup
router.destroy();
});
});
describe('_matchURL method', () => {
let router = null;
it('should return only valid value from _matchStaticURL by passed static path', () => {
// setup
router = Router.create();
router._addRoute('/some/static/path', 'route1');
// test: it should call _matchStaticURL
router._matchURL('/some/static/path');
router._matchStaticURL.returnValues[0].name.should.be.equal('route1');
router._matchDynamicURL.returnValues.should.be.length(0);
should(router._getRouteCache.returnValues[0]).be.null();
});
it('should return only valid value from _matchDynamicURL by passed dynamic path', () => {
// setup
router = Router.create();
router._addRoute('/{{dynamic}}/b/{{path}}/d', 'route2');
// test: it should call _matchDynamicURL
router._matchURL('/hey/b/there/d');
should(router._matchStaticURL.returnValues[0]).be.null();
router._matchDynamicURL.returnValues[0].name.should.be.equal('route2');
router._matchDynamicURL.returnValues[0].cache.should.be.false();
should(router._getRouteCache.returnValues[0]).be.null();
});
it('should return on second call from cache by passed dynamic path', () => {
// setup
router = Router.create();
router._addRoute('/{{dynamic}}/b/{{path}}/d', 'route2');
// test: returned not from cache
router._matchURL('/hey/b/there/d');
router._matchDynamicURL.returnValues[0].cache.should.be.false();
// test: returned from cache
router._matchURL('/hey/b/there/d');
router._getRouteCache.returnValues[1].cache.should.be.true();
// cleanup
router.destroy();
});
// setup
beforeEach(() => {
sinon.spy(Router.prototype, '_matchStaticURL');
sinon.spy(Router.prototype, '_matchDynamicURL');
sinon.spy(Router.prototype, '_getRouteCache');
});
afterEach(() => {
router._matchStaticURL.restore();
router._matchDynamicURL.restore();
router._getRouteCache.restore();
router.destroy();
});
});
describe('_applyActionEvent (integration test) method', () => {
/**
* We check only pushstate inside of _applyActionEvent,
* all other methods are tested.
*/
it('should call pushstate if click event passed', () => {
// setup
let router = Router.create({ event_action: 'click a' });
let spy_pushState = sinon.spy(router, "pushState");
// mock click event
let event = new Event('click', {
target: {
href: 'http://www.domain.com/some/path.html',
}
});
// test
router._applyActionEvent(event);
spy_pushState.callCount.should.equal(1);
// cleanup
router.pushState.restore();
router.destroy();
});
it('should not call pushstate if pushstate event passed', () => {
// setup
let router = Router.create({ event_action: 'click a' });
let spy_pushState = sinon.spy(router, "pushState");
// mock pushState vent
let event = new Event('pushState', {
target: {
href: 'http://www.domain.com/some/path.html',
}
});
// test
router._applyActionEvent(event);
spy_pushState.callCount.should.equal(0);
// cleanup
router.pushState.restore();
router.destroy();
});
});
describe('_getDefinedEventAction method', () => {
it('should get defined event action', () => {
//setup
let router = Router.create({
event_action: 'my-action pattern'
});
// test
router._getDefinedEventAction().should.be.equal('my-action');
// cleanup
router.destroy();
});
});
describe('_isDefinedEventAction method', () => {
it('should check if passed event_type is euqal to our defined event type', () => {
// setup
let router = Router.create({
event_action: 'my-action pattern'
});
// test
router._isDefinedEventAction('my-action').should.be.true();
router._isDefinedEventAction('other-action').should.be.false();
// cleanup
router.destroy();
});
});
describe('_getCurrentHref method', () => {
it('should get current href by passed event', () => {
// mock location ( see above import)
let locationMock = new Location({
href : 'http://mockdomain.com/event/pushState/href.html'
});
// setup
let router = Router.create({
event_action: 'myEvent a',
location: locationMock,
});
//*** Test click event ***
// mock click event ( see above import)
let eventClickMock = new Event('myEvent', {
target: {
href: 'http://mockdomain.com/event/click/href.html'
}
});
router._getCurrentHref(eventClickMock).should.be.equal('http://mockdomain.com/event/click/href.html');
//*** Test pushstate event ***
// mock pushstate event ( see above import)
let eventPushstateMock = new Event('pushstate');
router._getCurrentHref(eventPushstateMock).should.be.equal('http://mockdomain.com/event/pushState/href.html');
// cleanup
router.destroy();
});
});
describe('_addPromise method', () => {
it('should add and create promise collection on prototype.promise', () => {
// setup
let router = Router.create();
let promise1 = router._addPromise('name1');
let promise2 = router._addPromise('name2');
// test 1
promise1.should.be.Promise();
promise2.should.be.Promise();
// test 2
router.promise.should.have.propertyByPath('name1', 0).and.is.Function(); // resolve function
router.promise.should.have.propertyByPath('name2', 0).and.is.Function(); // resolve function
// cleanup: resolve promises
router.promise.name1[0]();
router.promise.name2[0]();
// cleanup: destroy router
router.destroy();
});
});
describe('_urlFragmentChanged method', () => {
it('should check if fragment is changed in combination with _setURLFragment', () => {
// setup
let router = Router.create();
// test 1
router._setURLFragment('/');
router._urlFragmentChanged('/some/url/fragment').should.be.true();
// test 2
router._setURLFragment('/some/url/fragment');
router._urlFragmentChanged('/some/url/fragment').should.be.false();
// cleanup
router.destroy();
});
});
describe('_isFloat', () => {
it('should check if passed value is number or not', () => {
// setup
let router = Router.create();
// test positiv: integer
router._isNumeric(123).should.be.true();
router._isNumeric('123').should.be.true();
router._isNumeric(123.45).should.be.true();
router._isNumeric('123.45').should.be.true();
// test negativ: integer
router._isNumeric('123.45a').should.be.false();
router._isNumeric('123b').should.be.false();
// cleanup
router.destroy();
});
});
describe('createURL method', () => {
it('should return url object with additional "fragment" property', () => {
// setup
let router = Router.create({
scope: document.createElement('div'),
});
// test
let url = router.createURL('http://www.mydomain.com/path/to/somewhere.html?a=1&b=2');
url.fragment.should.equal('/path/to/somewhere.html?a=1&b=2');
// cleanup
router.destroy();
});
});
describe('Promise method', () => {
it('should return promise object', () => {
// setup
let router = Router.create({
scope: document.createElement('div'),
});
// test
router.Promise(function(){}).should.be.Promise();
// cleanup
router.destroy();
});
});
describe('addRouteListener method', () => {
it('should do the same like prototype.on', () => {
// It´s not necessary to test it, it´s almost the same
// like prototype.on. The only difference is
// the argument signature
});
});
describe('stop method', () => {
it('it should stop if router running', () => {
// setup
sinon.spy(Router.prototype, '_applyActionEvent');
let router = Router.create();
router.on('action', ::router._onAction);
// test
router.stop();
router.trigger('action');
router._applyActionEvent.callCount.should.be.equal(0);
//cleanup
router._applyActionEvent.restore();
router.destroy();
});
});
describe('start method', () => {
it('it should start if router stoped', () => {
// setup
sinon.spy(Router.prototype, '_applyActionEvent');
let router = Router.create();
router.on('action', ::router._onAction);
// test 1
router.stop();
router.trigger('action');
router._applyActionEvent.callCount.should.be.equal(0);
// test 2
router.start();
router.trigger('action');
router._applyActionEvent.callCount.should.be.equal(1);
// cleanup
router._applyActionEvent.restore();
router.destroy();
});
});
describe('whoami method', () => {
it('it should return route information by passed fragment', () => {
// setup
let router = Router.create();
router.on('myRoute1 /some/path.html', () => {});
router.on('myRoute2 /some/{{integer}}/{{float}}/path.html', () => {});
// test 1 - positiv
router.whoami('/some/path.html').should.be.containEql({
name: 'myRoute1',
type: 'static',
regex: null,
cache: false,
});
// test 2 - positiv
router.whoami('/some/123/4.34/path.html').should.be.containEql({
name: 'myRoute2',
type: 'dynamic',
fragment: '/some/123/4.34/path.html',
route: '/some/{{integer}}/{{float}}/path.html',
regex: '\\/some\\/(?<integer>[\\d\\w?()|{}_.,-]+)\\/(?<float>[\\d\\w?()|{}_.,-]+)\\/path\\.html',
params: {
integer: 123,
float: 4.34,
},
cache: false,
});
// test 3 - negativ
should(router.whoami('/unknown/path.html')).be.exactly(null);
// cleanup
router.destroy();
});
});
describe('which method', () => {
it('it should return route information by passed name', () => {
// setup
let router = Router.create();
router.on('myRoute1 /some/{{integer}}/{{float}}/path.html', () => {});
// test 1 - positiv
router.which('myRoute1').should.be.containEql({
name: 'myRoute1',
type: 'dynamic',
fragment: null,
route: '/some/{{integer}}/{{float}}/path.html',
regex: '\\/some\\/(?<integer>[\\d\\w?()|{}_.,-]+)\\/(?<float>[\\d\\w?()|{}_.,-]+)\\/path\\.html',
params: null,
cache: false,
});
// test 2 - negativ
should(router.which('unknownRoute')).be.exactly(null);
// cleanup
router.destroy();
});
});
describe('constructURL method', () => {
it('should construct url by passed static routename', () => {
// setup
let router = Router.create();
router.on('myRoute1 /some/static/path.html', () => {});
// test 1
router.constructURL('myRoute1').should.be.equal('/some/static/path.html');
// test 2 - ingores params
router.constructURL('myRoute1', { a:1, b:2 }).should.be.equal('/some/static/path.html');
// test 3 - throw
(() => { router.constructURL() }).should.throw();
// test 4 - throw
(() => { router.constructURL('myRoute99') }).should.throw();
(() => { router.constructURL('myRoute99', {c:3, d:4}) }).should.throw();
// cleanup
router.destroy();
});
it('should construct url by passed dynmic routename and params ', () => {
});
});
describe('go method', () => {
it('should go by passed static routename', () => {
let router = Router.create();
router.on('myRoute1 /some/static/path.html', () => {});
sinon.spy(router, "pushState");
// test 1
router.go('myRoute1');
router.pushState.callCount.should.be.equal(1);
// test 2 - ingores params
router.go('myRoute1', { a:1, b:2 });
router.pushState.callCount.should.be.equal(2);
// test 3 - throw
(() => { router.go() }).should.throw();
// test 4 - throw
(() => { router.go('myRoute99') }).should.throw();
(() => { router.go('myRoute99', {c:3, d:4}) }).should.throw();
router.pushState.restore();
router.destroy();
});
it('should go by passed dynmic routename and params ', () => {
});
});
describe('on method, no arguments passed', () => {
it('should throw an error', () => {
// setup
let router = Router.create({
scope: document.createElement('div'),
});
// test
(() => { router.on() }).should.throw();
// cleanup
router.destroy();
});
});
describe('on method, no handler passed', () => {
it('should not throw an error', () => {
// setup
let router = Router.create({
scope: document.createElement('div'),
});
// test
(() => { router.on('urlchange') }).should.not.throw();
// cleanup
router.destroy();
});
});
describe('on method, eventType and handler passed', () => {
it('should not throw an error', () => {
// setup
let router = Router.create({
scope: document.createElement('div'),
});
// test
(() => { router.on('someevent', () => {}) }).should.not.throw();
// cleanup
router.destroy();
});
});
describe('on method, passed only eventType', () => {
it('should return promise', () => {
// setup
let router = Router.create({
scope: document.createElement('div'),
});
// test
router.on('urlchange').should.be.Promise();
router.on('urlchange').then(() => {}).should.be.Promise();
// cleanup
router.destroy();
});
});
describe('on method, passed eventType and handler', () => {
it('should return null', () => {
// setup
let router = Router.create({
scope: document.createElement('div'),
});
// test
should(router.on('my route', (event) => {})).be.null();
// cleanup
router.destroy();
});
});
describe('on method', () => {
it('should handle promise or handler correctly', (done) => {
// setup
let router = Router.create({
scope: document.createElement('div'),
});
let spy_handler = sinon.spy(() => {});
router.on('urlchange', spy_handler);
// test if promise resolved
router.on('urlchange').should.be.finally.propertyByPath('thats').eql('nice');
router.on('urlchange').should.be.finally.propertyByPath('thats').eql('nice');
router.on('urlchange').then(({thats}) => {
return `#${thats}#`;
}).should.be.finally.eql('#nice#');
// test promise count
router.promise.urlchange.should.be.length(3);
// test triggering of urlchange
router.trigger('urlchange', { thats: 'nice' });
spy_handler.callCount.should.equal(1);
router.promise.urlchange.should.be.length(0);
// test with trigger args
router.trigger('urlchange', { hello: 'world' });
spy_handler.callCount.should.equal(2);
spy_handler.args[1][0].should.propertyByPath('hello').eql('world');
// need this timeout for promise tests
setTimeout(() => {
// cleanup
router.destroy();
done();
}, 10);
});
});
describe('on method, registered many events', () => {
it('should assign routes correctly', () => {
// setup
let router = Router.create({
scope: document.createElement('div'),
});
router.on('Startpage /index.html');
router.on('Resultpage /results.html');
router.on('Detailpage /details.html');
router.on('Configurator /configurator.html');
// test routes count
Object.keys(router._getRoutes('static')).should.be.length(4);
// should throw error because route is already exists
(() => { router.on('Detailpage /details.html'); }).should.throw();
// routes count must be the same
Object.keys(router._getRoutes('static')).should.be.length(4);
// cleanup
router.destroy();
});
});
describe('on method, if explicitly triggering without params', () => {
it('should trigger correct handler', () => {
// setup
let router = Router.create({
scope: document.createElement('div'),
});
// spyies
let spy_startpage_handler = sinon.spy(() => {});
let spy_resultpage_handler = sinon.spy(() => {});
router.on('Startpage /index.html', spy_startpage_handler);
router.on('Resultpage /results.html', spy_resultpage_handler);
router.trigger('Startpage');
router.trigger('Resultpage');
router.trigger('Resultpage');
spy_startpage_handler.callCount.should.equal(1);
spy_resultpage_handler.callCount.should.equal(2);
// cleanup
router.destroy();
});
});
describe('on method, explicitly triggering with params', () => {
it('should trigger correct handler', () => {
// setup
let router = Router.create({
scope: document.createElement('div'),
});
// spyies
let spy_configurator_handler = sinon.spy(() => {});
router.on('Product /product/{{id}}-{{name}}.html', spy_configurator_handler);
// test: positiv
router.trigger('Product', { id: 123, name: 'foo' });
router.trigger('Product', { id: 456, name: 'bar' });
spy_configurator_handler.callCount.should.equal(2);
// cleanup
router.destroy();
});
});
describe('on method, passed handler as href', () => {
it.skip('should route to href', () => {
// setup
let router = Router.create({
scope: document.createElement('div'),
});
router.on('Google /index.html', 'https://www.google.de');
router.on('AbsoluteURL /absolute.html', '/absolute-internal-url.html');
router.on('RelativeURL /relative.html', 'relative-internal-url.html');
router.trigger('Google');
router.trigger('AbsoluteURL');
router.trigger('RelativeURL');
});
});
describe('_onUrlchange method', () => {
it('should call registered callback on element.click()', () => {
element.querySelector('.foo').click();
element.querySelector('.bar').click();
element.querySelector('.baz').click();
element.querySelector('.baz').click();
spy_urlchange.callCount.should.equal(3);
spy_urlchange.args[0][0].fragment.should.equal('/index');
spy_urlchange.args[1][0].fragment.should.equal('/index/details');
spy_urlchange.args[2][0].fragment.should.equal('/index/details?a=1&b=2');
});
it('should call registered callback on router.back()', (done) => {
element.querySelector('.foo').click();
element.querySelector('.bar').click();
element.querySelector('.baz').click();
router.back();
router.back();
router.back();
setTimeout(() => {
spy_urlchange.args[0][0].fragment.should.equal('/index');
spy_urlchange.args[1][0].fragment.should.equal('/index/details');
spy_urlchange.args[2][0].fragment.should.equal('/index/details?a=1&b=2');
spy_urlchange.args[3][0].fragment.should.equal('/index/details');
spy_urlchange.args[4][0].fragment.should.equal('/index');
done();
}, 100);
});
// Setup
let router, spy_onUrlchange, spy_urlchange, element;
beforeEach(() => {
spy_onUrlchange = sinon.spy(Router.prototype, "_onUrlchange");
spy_urlchange = sinon.spy(() => {});
element = document.createElement("div");
router = Router.create({
scope: element,
});
router.on('urlchange', spy_urlchange);
element.classList.add('anchor-container');
element.innerHTML = `
<a class="foo" href="/index"> Index </a>
<a class="bar" href="/index/details"> Details </a>
<a class="baz" href="/index/details?a=1&b=2"> Params </a>
`;
document.body.appendChild(element);
});
afterEach(() => {
// cleanup check
router.destroy();
document.body.removeChild(element);
router._onUrlchange.restore();
});
});
describe('on method', () => {
it('should match registered static and dynamic url route', (done) => {
/**
* Static Test
*/
// register
router.on('Index /index.html', spy_index_handler);
router.on('SomePathURL /some/path/url.html', spy_somePathURL_handler);
// trigger events by clicking
element.querySelector('.index').click();
element.querySelector('.some-path-url').click();
element.querySelector('.not-registered-url').click();
element.querySelector('.some-path-url').click();
// test
spy_index_handler.callCount.should.equal(1);
spy_somePathURL_handler.callCount.should.equal(2);
/**
* Dynamic Test
*/
// register
router.on('SomeDynamicURL /{{name}}/{{surname}}/123', ({ params, fragment }) => {
params.name.should.be.equal('serkan');
params.surname.should.be.equal('sipahi');
fragment.should.be.equal('/serkan/sipahi/123');
done();
});
// trigger events by clicking
element.querySelector('.some-dynamic-path').click();
});
it('should trigger registered promise event', (done) => {
// test 1
router.on('SomePathURL /some/path/url.html').then(({fragment, id}) => {
return `${fragment}?id=${id}`;
}).should.be.finally.eql('/some/path/url.html?id=123');
router.trigger('SomePathURL', { fragment: '/some/path/url.html', id: 123 });
setTimeout(done, 200);
});
// Setup
let element, spy_index_handler, spy_somePathURL_handler, spy_dynamicPath_handler, router;
beforeEach(() => {
router = Router.create({
scope: element,
});
spy_index_handler = sinon.spy(() => {});
spy_somePathURL_handler = sinon.spy(() => {});
spy_dynamicPath_handler = sinon.spy(() => {});
element = document.createElement("div");
element.classList.add('anchor-container');
element.innerHTML = `
<a class="index" href="/index.html"> Index </a>
<a class="some-path-url" href="/some/path/url.html"> Some path URL </a>
<a class="some-dynamic-path" href="/serkan/sipahi/123"> Some Dynamic URL </a>
<a class="not-registered-url" href="/not/registered/url.html"> Not registered URL </a>
`;
document.body.appendChild(element);
});
afterEach(() => {
// cleanup
document.body.removeChild(element);
router.destroy();
});
});
describe('off method', () => {
it('should remove event by passed eventType', () => {
});
it('should throw error if not eventType passed', () => {
});
});
describe('destroy method', () => {
it('should remove all registered events', () => {
});
});
describe('create() function', () => {
it('should register many routes at once', () => {
});
it('should bind scope to handlers', () => {
});
});
});
| test/libs/router.spec.js: implement test for Router.prototype._constructDynamicURL
| test/libs/router.spec.js | test/libs/router.spec.js: implement test for Router.prototype._constructDynamicURL | <ide><path>est/libs/router.spec.js
<ide>
<ide> });
<ide>
<add> describe('_constructDynamicURL', () => {
<add>
<add> it('should construct url by passt dynamic routename', () => {
<add>
<add> // setup
<add> let routeObject = null;
<add> let router = Router.create();
<add> router._addRoute('/person/{{name}}/{{surename}}/id/{{id}}', 'myRoute1');
<add> router._addRoute('?page={{page}}&id={{id}}', 'myRoute2');
<add>
<add> // test 1
<add> routeObject = router.which('myRoute1');
<add> router._constructDynamicURL(routeObject.route,
<add> { name: 'serkan', surename: 'sipahi', id: 333 }
<add> ).should.equal('/person/serkan/sipahi/id/333');
<add>
<add> // test 2
<add> routeObject = router.which('myRoute2');
<add> router._constructDynamicURL(routeObject.route,
<add> { page: 'details', id: 999 }
<add> ).should.equal('?page=details&id=999');
<add>
<add> // cleanup
<add> router.destroy();
<add>
<add> });
<add>
<add> it('should throw error if something gone wrong', () => {
<add>
<add> // setup
<add> let router = Router.create();
<add> router._addRoute('/person/{{name}}/{{surename}}/id/{{id}}', 'myRoute1');
<add> let routeObject = router.which('myRoute1');
<add>
<add> // should throw if not params passed
<add> (() => { router._constructDynamicURL(routeObject.route) }).should.throw();
<add>
<add> // should throw if param is missing
<add> (() => { router._constructDynamicURL(routeObject.route, { id: 1 }) }).should.throw();
<add>
<add> // cleanup
<add> router.destroy();
<add>
<add> });
<add>
<add> });
<add>
<ide> describe('constructURL method', () => {
<ide>
<ide> it('should construct url by passed static routename', () => { |
|
Java | apache-2.0 | 92bcbe52c1e8b9e920b94bfb157c1f420e0a4c0a | 0 | googlegsa/documentum | // Copyright 2014 Google Inc. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.enterprise.adaptor.documentum;
import static com.google.enterprise.adaptor.documentum.JdbcFixture.dropAllObjects;
import static com.google.enterprise.adaptor.documentum.JdbcFixture.executeUpdate;
import static com.google.enterprise.adaptor.documentum.JdbcFixture.getConnection;
import static java.nio.charset.StandardCharsets.UTF_8;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import com.google.common.base.Joiner;
import com.google.common.base.Predicate;
import com.google.common.base.Strings;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Iterators;
import com.google.common.collect.Maps;
import com.google.common.collect.Multimap;
import com.google.common.collect.TreeMultimap;
import com.google.common.collect.UnmodifiableIterator;
import com.google.enterprise.adaptor.Acl;
import com.google.enterprise.adaptor.Acl.InheritanceType;
import com.google.enterprise.adaptor.AdaptorContext;
import com.google.enterprise.adaptor.Config;
import com.google.enterprise.adaptor.DocId;
import com.google.enterprise.adaptor.DocIdEncoder;
import com.google.enterprise.adaptor.DocIdPusher;
import com.google.enterprise.adaptor.DocIdPusher.Record;
import com.google.enterprise.adaptor.GroupPrincipal;
import com.google.enterprise.adaptor.InvalidConfigurationException;
import com.google.enterprise.adaptor.Principal;
import com.google.enterprise.adaptor.Request;
import com.google.enterprise.adaptor.UserPrincipal;
import com.google.enterprise.adaptor.documentum.DocumentumAdaptor.CaseSensitivityType;
import com.google.enterprise.adaptor.documentum.DocumentumAdaptor.Checkpoint;
import com.documentum.com.IDfClientX;
import com.documentum.fc.client.DfPermit;
import com.documentum.fc.client.IDfACL;
import com.documentum.fc.client.IDfClient;
import com.documentum.fc.client.IDfCollection;
import com.documentum.fc.client.IDfEnumeration;
import com.documentum.fc.client.IDfFolder;
import com.documentum.fc.client.IDfGroup;
import com.documentum.fc.client.IDfObjectPath;
import com.documentum.fc.client.IDfPermit;
import com.documentum.fc.client.IDfPermitType;
import com.documentum.fc.client.IDfQuery;
import com.documentum.fc.client.IDfSession;
import com.documentum.fc.client.IDfSessionManager;
import com.documentum.fc.client.IDfSysObject;
import com.documentum.fc.client.IDfType;
import com.documentum.fc.client.IDfUser;
import com.documentum.fc.client.IDfVirtualDocument;
import com.documentum.fc.client.IDfVirtualDocumentNode;
import com.documentum.fc.common.DfException;
import com.documentum.fc.common.IDfAttr;
import com.documentum.fc.common.IDfId;
import com.documentum.fc.common.IDfLoginInfo;
import com.documentum.fc.common.IDfTime;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.URI;
import java.sql.Connection;
import java.sql.DatabaseMetaData;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.sql.Statement;
import java.text.SimpleDateFormat;
import java.util.ArrayDeque;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Calendar;
import java.util.Collection;
import java.util.Date;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.Vector;
// TODO(bmj): Add tests to test the exception handling.
// TODO(bmj): Add tests that call getDocIds and getModifiedDocIds with
// expected returns for all three items: documents, groups, and ACLs.
/** Unit tests for DocumentAdaptor class. */
public class DocumentumAdaptorTest {
private static enum LocalGroupsOnly { TRUE, FALSE };
private static final SimpleDateFormat dateFormat =
new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
private static final String EPOCH_1970 = "1970-01-01 00:00:00";
private static final String JAN_1970 = "1970-01-01 02:03:04";
private static final String FEB_1970 = "1970-02-01 02:03:04";
private static final String MAR_1970 = "1970-03-01 02:03:04";
private static final String START_PATH = "/Folder1/path1";
private static final String DEFAULT_ACL = "45DefaultACL";
private static final String CREATE_TABLE_ACL = "create table dm_acl "
+ "(r_object_id varchar, r_accessor_name varchar, "
+ "r_accessor_permit int, r_permit_type int, r_is_group boolean)";
private static final String CREATE_TABLE_AUDITTRAIL =
"create table dm_audittrail "
+ "(r_object_id varchar, audited_obj_id varchar, chronicle_id varchar, "
+ "event_name varchar, time_stamp_utc timestamp, attribute_list varchar)";
private static final String CREATE_TABLE_AUDITTRAIL_ACL =
"create table dm_audittrail_acl "
+ "(r_object_id varchar, chronicle_id varchar, audited_obj_id varchar, "
+ "event_name varchar, time_stamp_utc timestamp)";
private static final String CREATE_TABLE_CABINET = "create table dm_cabinet "
+ "(r_object_id varchar, r_folder_path varchar, object_name varchar)";
private static final String CREATE_TABLE_FOLDER = "create table dm_folder "
// Note: mock_acl_id is ACL id for the folder, and is used to
// create AclMock.
+ "(r_object_id varchar, r_folder_path varchar, mock_acl_id varchar)";
private static final String CREATE_TABLE_GROUP = "create table dm_group "
+ "(r_object_id varchar, group_name varchar, group_source varchar, "
+ "groups_names varchar, users_names varchar, r_modify_date timestamp)";
private static final String CREATE_TABLE_USER = "create table dm_user "
+ "(user_name varchar primary key, user_login_name varchar, "
+ "user_source varchar, user_ldap_dn varchar, r_is_group boolean, "
+ "user_state int DEFAULT 0)";
private static final String CREATE_TABLE_SYSOBJECT =
"create table dm_sysobject "
+ "(r_object_id varchar, r_modify_date timestamp, r_object_type varchar, "
+ "object_name varchar, a_content_type varchar, i_folder_id varchar, "
+ "r_is_virtual_doc boolean, "
// Note: mock_content ia an artifact that stores the content as a string,
// and mock_object_path is an artifact used to emulate FOLDER predicate,
// and to assist getObjectByPath.
+ "mock_content varchar, mock_object_path varchar, "
// Note: mock_acl_id is ACL id for the document, and is used to
// create AclMock in SysObjectMock.
+ "mock_acl_id varchar )";
@Before
public void setUp() throws Exception {
Principals.clearCache();
executeUpdate(CREATE_TABLE_ACL, CREATE_TABLE_AUDITTRAIL,
CREATE_TABLE_AUDITTRAIL_ACL, CREATE_TABLE_CABINET, CREATE_TABLE_FOLDER,
CREATE_TABLE_GROUP, CREATE_TABLE_SYSOBJECT, CREATE_TABLE_USER);
// Force the default test start path to exist, so we pass init().
insertFolder(EPOCH_1970, "0bStartPath", START_PATH);
}
@After
public void tearDown() throws Exception {
dropAllObjects();
}
private Config getTestAdaptorConfig() {
return initTestAdaptorConfig(ProxyAdaptorContext.getInstance());
}
private Config initTestAdaptorConfig(AdaptorContext context) {
Config config = context.getConfig();
config.addKey("documentum.username", "testuser");
config.addKey("documentum.password", "testpwd");
config.addKey("documentum.docbaseName", "testdocbase");
config.addKey("documentum.displayUrlPattern", "http://webtop/drl/{0}");
config.addKey("documentum.src", START_PATH);
config.addKey("documentum.src.separator", ",");
config.addKey("documentum.documentTypes", "dm_document");
config.addKey("documentum.excludedAttributes", "");
config.addKey("adaptor.namespace", "globalNS");
config.addKey("documentum.windowsDomain", "");
config.addKey("documentum.pushLocalGroupsOnly", "false");
config.addKey("documentum.maxHtmlSize", "1000");
config.addKey("documentum.cabinetWhereCondition", "");
config.addKey("adaptor.caseSensitivityType", "");
return config;
}
/**
* Initialize adaptor using proxy clientX and proxy AdptorContext.
* Verifies that the proper user is set;
* @throws DfException if DFC initialization can't establish connection
* to Documentum repository.
*/
@Test
public void testDfcConnection() throws DfException {
InitTestProxies proxyCls = new InitTestProxies();
DocumentumAdaptor adaptor =
new DocumentumAdaptor(proxyCls.getProxyClientX());
AdaptorContext context = ProxyAdaptorContext.getInstance();
initTestAdaptorConfig(context);
adaptor.init(context);
assertEquals("testuser", proxyCls.username);
// MockSensitiveValueDecoder just uppercases the input.
assertEquals("TESTPWD", proxyCls.password);
assertEquals("testdocbase", proxyCls.docbaseName);
assertEquals(1, proxyCls.docbaseLoginInfoMap.size());
assertEquals(1, proxyCls.docbaseSessionMap.size());
List<String> expectedMethodCallSequence = Arrays.asList(
"getLocalClient", "newSessionManager",
"getLoginInfo", "setIdentity",
"getSession", "release",
"getSession", "release"
);
assertEquals(expectedMethodCallSequence, proxyCls.methodCallSequence);
Set<String> expectedMethodCallSet =
ImmutableSet.of("setUser", "setPassword", "getDFCVersion",
"getServerVersion", "getObjectByPath");
assertEquals(expectedMethodCallSet, proxyCls.methodCalls);
}
@Test
public void testInitStartPaths() throws DfException {
InitTestProxies proxyCls = new InitTestProxies();
DocumentumAdaptor adaptor =
new DocumentumAdaptor(proxyCls.getProxyClientX());
AdaptorContext context = ProxyAdaptorContext.getInstance();
Config config = initTestAdaptorConfig(context);
config.overrideKey("documentum.src", "/Folder1/path1, /Folder2/path2,"
+ "/Folder3/path3");
adaptor.init(context);
assertEquals(Arrays.asList("/Folder1/path1", "/Folder2/path2",
"/Folder3/path3"), adaptor.getStartPaths());
}
private class InitTestProxies {
List <String> methodCallSequence = new ArrayList<String>();
Set <String> methodCalls = new HashSet<String>();
String serverVersion = "1.0.0.000 (Mock CS)";
IDfClient client = getProxyClient();
IDfLoginInfo loginInfo = getProxyLoginInfo();
IDfSessionManager sessionManager = getProxySessionManager();
Map<String, IDfLoginInfo> docbaseLoginInfoMap =
new HashMap<String, IDfLoginInfo>();
Map<String, IDfSession> docbaseSessionMap =
new HashMap<String, IDfSession>();
Map<String, String> folderPathIdsMap = new HashMap<String, String>() {
{
put("/Folder1/path1", "0b01081f80078d2a");
put("/Folder2/path2", "0b01081f80078d29");
put("/Folder3/path3", "0b01081f80078d28");
put("/Folder1/path1,/Folder2/path2,/Folder3/path3,/Folder4/path4",
"0b01081f80078d2b");
}
};
String username;
String password;
String docbaseName;
public void setServerVersion(String serverVersion) {
this.serverVersion = serverVersion;
}
public IDfClientX getProxyClientX() {
return Proxies.newProxyInstance(IDfClientX.class, new ClientXMock());
}
private class ClientXMock {
public String getDFCVersion() {
methodCalls.add(Proxies.getMethodName());
return "1.0.0.000 (Mock DFC)";
}
public IDfClient getLocalClient() {
methodCallSequence.add(Proxies.getMethodName());
return client;
}
public IDfLoginInfo getLoginInfo() {
methodCallSequence.add(Proxies.getMethodName());
return loginInfo;
}
}
public IDfClient getProxyClient() {
return Proxies.newProxyInstance(IDfClient.class, new ClientMock());
}
private class ClientMock {
public IDfSessionManager newSessionManager() {
methodCallSequence.add(Proxies.getMethodName());
return sessionManager;
}
}
public IDfLoginInfo getProxyLoginInfo() {
return Proxies.newProxyInstance(IDfLoginInfo.class, new LoginInfoMock());
}
private class LoginInfoMock {
public void setPassword(String password) {
methodCalls.add(Proxies.getMethodName());
InitTestProxies.this.password = password;
}
public void setUser(String username) {
methodCalls.add(Proxies.getMethodName());
InitTestProxies.this.username = username;
}
}
public IDfSessionManager getProxySessionManager() {
return Proxies.newProxyInstance(IDfSessionManager.class,
new SessionManagerMock());
}
private class SessionManagerMock {
public IDfSession getSession(String docbaseName) {
methodCallSequence.add(Proxies.getMethodName());
IDfSession session = docbaseSessionMap.get(docbaseName);
if (session == null) {
session =
Proxies.newProxyInstance(IDfSession.class, new SessionMock());
docbaseSessionMap.put(docbaseName, session);
}
return session;
}
public void release(IDfSession session) {
methodCallSequence.add(Proxies.getMethodName());
// TODO(sveldurthi): remove from the map to release the session
}
public void setIdentity(String docbaseName, IDfLoginInfo loginInfo) {
methodCallSequence.add(Proxies.getMethodName());
InitTestProxies.this.docbaseName = docbaseName;
docbaseLoginInfoMap.put(docbaseName, loginInfo);
}
}
private class SessionMock {
public String getServerVersion() {
methodCalls.add(Proxies.getMethodName());
return serverVersion;
}
public IDfSysObject getObjectByPath(String path) {
methodCalls.add(Proxies.getMethodName());
if (folderPathIdsMap.containsKey(path)) {
return Proxies.newProxyInstance(IDfSysObject.class,
new SysObjectMock(path));
} else {
return null;
}
}
public IDfType getType(String type) {
return Proxies.newProxyInstance(IDfType.class, new TypeMock(type));
}
}
private class SysObjectMock {
private String objectPath;
public SysObjectMock(String objectPath) {
this.objectPath = objectPath;
}
public IDfId getObjectId() {
String id = folderPathIdsMap.get(objectPath);
return Proxies.newProxyInstance(IDfId.class, new IdMock(id));
}
}
private class IdMock {
private String objectId;
public IdMock(String objectId) {
this.objectId = objectId;
}
public String toString() {
return objectId;
}
}
private class TypeMock {
private final String type;
private final ImmutableMap<String, String> superTypes =
ImmutableMap.of("dm_document", "dm_sysobject");
public TypeMock(String type) {
this.type = type;
}
public boolean isTypeOf(String otherType) {
if (type.startsWith(otherType)) {
return true;
}
String parent = superTypes.get(type);
while (parent != null) {
if (superTypes.get(type).startsWith(otherType)) {
return true;
}
parent = superTypes.get(parent);
}
return false;
}
}
}
@Test
public void testParseStartPaths() {
String path1 = "Folder1/path1";
String path2 = "Folder2/path2";
String path3 = "Folder3/path3";
String startPaths = path1 + "," + path2 + "," + path3;
List<String> paths = DocumentumAdaptor.parseStartPaths(startPaths, ",");
assertEquals(ImmutableList.of(path1, path2, path3), paths);
}
@Test
public void testParseStartPathsSeperator() {
String path1 = "Folder1/path1";
String path2 = "Folder2/path2";
String path3 = "Folder3/path3";
String separator = ":";
String startPaths = path1 + separator + path2 + separator + path3;
List<String> paths =
DocumentumAdaptor.parseStartPaths(startPaths, separator);
assertEquals(ImmutableList.of(path1, path2, path3), paths);
}
@Test
public void testParseStartPathsNotUsingRegExSeparator() {
String path1 = "Folder1/path1";
String path2 = "Folder2/path2";
String path3 = "Folder3/path3";
String startPaths = path1 + ";" + path2 + ":" + path3 + ",";
List<String> paths = DocumentumAdaptor.parseStartPaths(startPaths, "[:;,]");
assertEquals(ImmutableList.of(startPaths), paths);
startPaths = path1 + "[:;,]" + path2 + "[:;,]" + path3 + "[:;,]";
paths = DocumentumAdaptor.parseStartPaths(startPaths, "[:;,]");
assertEquals(ImmutableList.of(path1, path2, path3), paths);
}
@Test
public void testParseStartPathsBlankSeparator() {
String path1 = "Folder1/path1";
String path2 = "Folder2/path2";
String path3 = "Folder3/path3";
String startPaths = path1 + "," + path2 + "," + path3;
List<String> paths = DocumentumAdaptor.parseStartPaths(startPaths, "");
assertEquals(ImmutableList.of(startPaths), paths);
}
@Test
public void testParseStartPathsSinglePath() {
String path1 = "Folder1/path1";
String startPaths = path1;
List<String> paths = DocumentumAdaptor.parseStartPaths(startPaths, ",");
assertEquals(ImmutableList.of(path1), paths);
}
@Test
public void testParseStartPathsEmptyPath() {
String path1 = "Folder1/path1";
String path2 = "Folder2/path2";
String path3 = "";
String startPaths = path1 + "," + path2 + "," + path3;
List<String> paths = DocumentumAdaptor.parseStartPaths(startPaths, ",");
assertEquals(ImmutableList.of(path1, path2), paths);
}
@Test
public void testParseStartPathsWhiteSpacePath() {
String path1 = "Folder 1/path 1";
String path2 = " Folder 2/path 2 ";
String path3 = "Folder 3/ path 3 ";
String startPaths = path1 + "," + path2 + "," + path3;
List<String> paths = DocumentumAdaptor.parseStartPaths(startPaths, ",");
assertEquals(ImmutableList.of(path1.trim(), path2.trim(), path3.trim()),
paths);
}
@Test
public void testSlashAsStartPath() throws Exception {
String root = "/";
DocId docid = DocumentumAdaptor.docIdFromPath(root);
assertEquals(root, DocumentumAdaptor.docIdToPath(docid));
assertEquals("/foo", DocumentumAdaptor.docIdToPath(
DocumentumAdaptor.docIdFromPath(root, "foo")));
}
private void initializeAdaptor(DocumentumAdaptor adaptor, String src,
String separator) throws DfException {
AdaptorContext context = ProxyAdaptorContext.getInstance();
Config config = context.getConfig();
adaptor.initConfig(config);
config.overrideKey("documentum.username", "testuser");
config.overrideKey("documentum.password", "testpwd");
config.overrideKey("documentum.docbaseName", "testdocbase");
config.overrideKey("documentum.displayUrlPattern",
"http://webtopurl/drl/{0}");
config.overrideKey("documentum.src", src);
if (separator != null) {
config.overrideKey("documentum.src.separator", separator);
}
config.overrideKey("documentum.documentTypes", "dm_document");
adaptor.init(context);
}
@Test
public void testConfigSeparator() throws DfException {
DocumentumAdaptor adaptor =
new DocumentumAdaptor(new InitTestProxies().getProxyClientX());
String path1 = "/Folder1/path1";
String path2 = "/Folder2/path2";
String path3 = "/Folder3/path3";
String path4 = "/Folder4/path4";
String startPaths = path1 + ";" + path2 + ";" + path3 + ";" + path4;
initializeAdaptor(adaptor, startPaths, ";");
assertEquals(ImmutableList.of(path1, path2, path3, path4),
adaptor.getStartPaths());
}
@Test
public void testConfigBlankSeparatorValue() throws DfException {
DocumentumAdaptor adaptor =
new DocumentumAdaptor(new InitTestProxies().getProxyClientX());
String path1 = "/Folder1/path1";
String path2 = "/Folder2/path2";
String path3 = "/Folder3/path3";
String path4 = "/Folder4/path4";
String startPaths = path1 + "," + path2 + "," + path3 + "," + path4;
initializeAdaptor(adaptor, startPaths, "");
assertEquals(ImmutableList.of(startPaths), adaptor.getStartPaths());
}
@Test
public void testConfigNoSeparatorEntry() throws DfException {
DocumentumAdaptor adaptor =
new DocumentumAdaptor(new InitTestProxies().getProxyClientX());
String path1 = "/Folder1/path1";
String path2 = "/Folder2/path2";
String path3 = "/Folder3/path3";
String path4 = "/Folder4/path4";
String startPaths = path1 + "," + path2 + "," + path3 + "," + path4;
initializeAdaptor(adaptor, startPaths, null);
assertEquals(ImmutableList.of(path1, path2, path3, path4),
adaptor.getStartPaths());
}
private void initValidStartPaths(DocumentumAdaptor adaptor,
String... paths) throws DfException {
AdaptorContext context = ProxyAdaptorContext.getInstance();
Config config = initTestAdaptorConfig(context);
config.overrideKey("documentum.src", Joiner.on(",").join(paths));
adaptor.init(context);
}
@Test
public void testValidateStartPathsRootPath() throws DfException {
DocumentumAdaptor adaptor =
new DocumentumAdaptor(new InitTestProxies().getProxyClientX());
String path1 = "/";
initValidStartPaths(adaptor, path1);
assertEquals(ImmutableList.of(path1), adaptor.getValidatedStartPaths());
}
@Test
public void testValidateStartPathsAllValid() throws DfException {
DocumentumAdaptor adaptor =
new DocumentumAdaptor(new InitTestProxies().getProxyClientX());
String path1 = "/Folder1/path1";
String path2 = "/Folder2/path2";
String path3 = "/Folder3/path3";
initValidStartPaths(adaptor, path1, path2, path3);
assertEquals(ImmutableList.of(path1, path2, path3),
adaptor.getValidatedStartPaths());
}
@Test
public void testValidateStartPathsSomeValid() throws DfException {
DocumentumAdaptor adaptor =
new DocumentumAdaptor(new InitTestProxies().getProxyClientX());
String path1 = "/Folder1/path1";
String path2 = "/Folder2/path2";
String path3 = "/Folder4/path3";
String path4 = "/Folder4/path4";
initValidStartPaths(adaptor, path1, path2, path3, path4);
assertEquals(ImmutableList.of(path1, path2),
adaptor.getValidatedStartPaths());
}
@Test
public void testValidateStartPathsSomeInvalid() throws DfException {
DocumentumAdaptor adaptor =
new DocumentumAdaptor(new InitTestProxies().getProxyClientX());
String path1 = "/Folder4/path4";
String path2 = "/Folder5/path5";
String path3 = "/Folder1/path1";
String path4 = "/Folder2/path2";
initValidStartPaths(adaptor, path1, path2, path3, path4);
assertEquals(ImmutableList.of(path3, path4),
adaptor.getValidatedStartPaths());
}
@Test
public void testValidateStartPathsNormalizePaths() throws DfException {
DocumentumAdaptor adaptor =
new DocumentumAdaptor(new InitTestProxies().getProxyClientX());
String path1 = "/Folder1/path1/";
String path2 = "Folder2/path2";
String path3 = "Folder3/path3/";
String path4 = "Folder5/path5";
initValidStartPaths(adaptor, path1, path2, path3, path4);
assertEquals(ImmutableList.of("/Folder1/path1", "/Folder2/path2",
"/Folder3/path3"), adaptor.getValidatedStartPaths());
}
@Test(expected = IllegalStateException.class)
public void testValidateStartPathsNoneValid() throws DfException {
DocumentumAdaptor adaptor =
new DocumentumAdaptor(new InitTestProxies().getProxyClientX());
String path1 = "/Folder1/path4";
String path2 = "/Folder2/path5";
String path3 = "/Folder3/path6";
initValidStartPaths(adaptor, path1, path2, path3);
}
private void testValidateDisplayUrlPattern(String pattern)
throws DfException {
DocumentumAdaptor adaptor =
new DocumentumAdaptor(new InitTestProxies().getProxyClientX());
AdaptorContext context = ProxyAdaptorContext.getInstance();
Config config = initTestAdaptorConfig(context);
config.overrideKey("documentum.displayUrlPattern", pattern);
adaptor.init(context);
}
@Test
public void testValidateDisplayUrlPatternObjectId() throws DfException {
testValidateDisplayUrlPattern("http://webtopurl/drl/{0}");
}
@Test
public void testValidateDisplayUrlPatternPath() throws DfException {
testValidateDisplayUrlPattern("http://webtopurl/drl{1}");
}
@Test(expected = InvalidConfigurationException.class)
public void testValidateDisplayUrlPatternEmptyPattern() throws DfException {
testValidateDisplayUrlPattern("");
}
@Test(expected = InvalidConfigurationException.class)
public void testValidateDisplayUrlPatternBadPattern() throws DfException {
testValidateDisplayUrlPattern("{0}tp://webtop/");
}
@Test(expected = InvalidConfigurationException.class)
public void testValidateDisplayUrlPatternNoSubstitutions()
throws DfException {
testValidateDisplayUrlPattern("http://webtop/");
}
private void testDateToString(String version, String expected)
throws DfException {
InitTestProxies initProxies = new InitTestProxies();
initProxies.setServerVersion(version);
DocumentumAdaptor adaptor =
new DocumentumAdaptor(initProxies.getProxyClientX());
initializeAdaptor(adaptor, "/Folder1/path1", ";");
assertEquals(expected, adaptor.dateToStringFunction);
}
@Test
public void testDateToString_version6() throws DfException {
testDateToString("6.5.0.033 Win32.SQLServer", "DATETOSTRING");
}
@Test
public void testDateToString_version7() throws DfException {
testDateToString("7.2.0000.0155 Win64.SQLServer", "DATETOSTRING_LOCAL");
}
@Test
public void testDateToString_version75() throws DfException {
testDateToString("7.5.0000.0100 Win32.SQLServer", "DATETOSTRING_LOCAL");
}
@Test
public void testDateToString_version8() throws DfException {
testDateToString("8.0.0000.0000 Win64.SQLServer", "DATETOSTRING_LOCAL");
}
@Test
public void testDateToString_version10() throws DfException {
testDateToString("10.0.0000.0010 Win64.SQLServer", "DATETOSTRING_LOCAL");
}
/* Mock proxy classes backed by the H2 database tables. */
private class H2BackedTestProxies {
public IDfClientX getProxyClientX() {
return Proxies.newProxyInstance(IDfClientX.class, new ClientXMock());
}
private class ClientXMock {
public String getDFCVersion() {
return "1.0.0.000 (Mock DFC)";
}
public IDfClient getLocalClient() {
return Proxies.newProxyInstance(IDfClient.class, new ClientMock());
}
public IDfLoginInfo getLoginInfo() {
return Proxies.newProxyInstance(IDfLoginInfo.class,
new LoginInfoMock());
}
public IDfQuery getQuery() {
return Proxies.newProxyInstance(IDfQuery.class, new QueryMock());
}
}
private class ClientMock {
public IDfSessionManager newSessionManager() {
return Proxies.newProxyInstance(IDfSessionManager.class,
new SessionManagerMock());
}
}
private class LoginInfoMock {
public void setPassword(String password) {
}
public void setUser(String username) {
}
}
private class QueryMock {
private String query;
public void setDQL(String query) {
this.query = query;
}
public IDfCollection execute(IDfSession session, int arg1)
throws DfException {
return Proxies.newProxyInstance(IDfCollection.class,
new CollectionMock(query));
}
}
private class CollectionMock {
final Statement stmt;
final ResultSet rs;
public CollectionMock(String query) throws DfException {
try {
stmt = getConnection().createStatement();
query = query.replaceAll("DATETOSTRING(_LOCAL)?", "FORMATDATETIME")
.replace("DATE(", "PARSEDATETIME(")
.replace("yyyy-mm-dd hh:mi:ss", "yyyy-MM-dd HH:mm:ss")
.replaceAll("TYPE\\((dm_document_subtype|dm_sysobject_subtype|"
+ "dm_document|dm_folder)\\)", "r_object_type LIKE '$1%'")
.replace("FOLDER(", "(mock_object_path LIKE ")
.replace("',descend", "%'")
.replace("ENABLE(ROW_BASED)", "");
rs = stmt.executeQuery(query);
} catch (SQLException e) {
throw new DfException(e);
}
}
private String[] getRepeatingValue(String colName) throws DfException {
String value = getString(colName);
if (Strings.isNullOrEmpty(value)) {
return new String[0];
}
return value.split(",");
}
public int getValueCount(String colName) throws DfException {
return getRepeatingValue(colName).length;
}
public String getRepeatingString(String colName, int index)
throws DfException {
return getRepeatingValue(colName)[index];
}
public String getString(String colName) throws DfException {
try {
return rs.getString(colName);
} catch (SQLException e) {
throw new DfException(e);
}
}
public boolean next() throws DfException {
try {
return rs.next();
} catch (SQLException e) {
throw new DfException(e);
}
}
public void close() throws DfException {
try {
rs.close();
stmt.close();
} catch (SQLException e) {
throw new DfException(e);
}
}
}
private class SessionManagerMock {
public IDfSession newSession(String docbaseName) {
return Proxies.newProxyInstance(IDfSession.class, new SessionMock());
}
public IDfSession getSession(String docbaseName) {
return newSession(docbaseName);
}
public void release(IDfSession session) {
}
public void setIdentity(String docbaseName, IDfLoginInfo loginInfo) {
}
}
private class SessionMock {
public String getServerVersion() {
return "1.0.0.000 (Mock CS)";
}
public IDfACL getObject(IDfId id) throws DfException {
String query = String.format(
"SELECT r_object_id FROM dm_acl WHERE r_object_id = '%s'",
id.toString());
try (Connection connection = getConnection();
Statement stmt = connection.createStatement();
ResultSet rs = stmt.executeQuery(query)) {
if (rs.first()) {
return Proxies.newProxyInstance(IDfACL.class,
new AclMock(id.toString()));
} else {
throw new AssertionError("Object ID " + id + " doesn't exist.");
}
} catch (SQLException e) {
throw new DfException(e);
}
}
public IDfSysObject getObjectByPath(String path) throws DfException {
String query = String.format(
"SELECT *, mock_object_path AS r_folder_path "
+ "FROM dm_sysobject WHERE mock_object_path = '%s'", path);
try (Connection connection = getConnection();
Statement stmt = connection.createStatement();
ResultSet rs = stmt.executeQuery(query)) {
if (rs.first()) {
if (rs.getString("r_object_type").startsWith("dm_folder")) {
return Proxies.newProxyInstance(IDfFolder.class,
new FolderMock(rs));
} else {
return Proxies.newProxyInstance(IDfSysObject.class,
new SysObjectMock(rs));
}
}
return null;
} catch (SQLException e) {
throw new DfException(e);
}
}
public Object getObjectByQualification(String query) throws DfException {
if (Strings.isNullOrEmpty(query)) {
return null;
}
try (Connection connection = getConnection();
Statement stmt = connection.createStatement();
ResultSet rs = stmt.executeQuery("SELECT * FROM " + query)) {
if (rs.first()) {
if (query.toLowerCase().startsWith("dm_user ")) {
return Proxies.newProxyInstance(IDfUser.class, new UserMock(rs));
} else if (query.toLowerCase().startsWith("dm_group ")) {
return
Proxies.newProxyInstance(IDfGroup.class, new GroupMock(rs));
}
}
return null;
} catch (SQLException e) {
throw new DfException(e);
}
}
public IDfEnumeration getObjectPaths(IDfId id) throws DfException {
String query = String.format("SELECT i_folder_id FROM dm_sysobject "
+ "WHERE r_object_id = '%s'", id.toString());
try (Connection connection = getConnection();
Statement stmt = connection.createStatement();
ResultSet rs = stmt.executeQuery(query)) {
if (rs.next()) {
return Proxies.newProxyInstance(IDfEnumeration.class,
new EnumerationMock(rs.getString("i_folder_id")));
}
return null;
} catch (SQLException e) {
throw new DfException(e);
}
}
public IDfFolder getFolderBySpecification(String spec)
throws DfException {
if (Strings.isNullOrEmpty(spec)) {
return null;
}
String query = String.format(
"SELECT s.*, f.r_folder_path FROM dm_sysobject s "
+ "JOIN dm_folder f ON s.r_object_id = f.r_object_id "
+ "WHERE s.r_object_id = '%s'", spec);
try (Connection connection = getConnection();
Statement stmt = connection.createStatement();
ResultSet rs = stmt.executeQuery(query)) {
if (rs.first()) {
return
Proxies.newProxyInstance(IDfFolder.class, new FolderMock(rs));
}
return null;
} catch (SQLException e) {
throw new DfException(e);
}
}
public IDfType getType(String type) {
return Proxies.newProxyInstance(IDfType.class, new TypeMock(type));
}
}
private class SysObjectMock {
private final String id;
private final String name;
private final String type;
private final String contentType;
private final String content;
private final String aclId;
private final Date lastModified;
private final boolean isVirtualDocument;
private final Multimap<String, String> attributes;
public SysObjectMock(ResultSet rs) throws SQLException {
id = rs.getString("r_object_id");
name = rs.getString("object_name");
type = rs.getString("r_object_type");
contentType = rs.getString("a_content_type");
content = rs.getString("mock_content");
aclId = rs.getString("mock_acl_id");
lastModified = new Date(rs.getTimestamp("r_modify_date").getTime());
isVirtualDocument = rs.getBoolean("r_is_virtual_doc");
attributes = readAttributes(id);
}
public IDfId getObjectId() {
return Proxies.newProxyInstance(IDfId.class, new IdMock(id));
}
public String getObjectName() {
return name;
}
public String getString(String attrName) {
switch (attrName) {
case "object_name": return name;
case "r_object_id": return id;
default: return null;
}
}
public InputStream getContent() {
if (content == null) {
return null;
}
return new ByteArrayInputStream(content.getBytes(UTF_8));
}
public IDfType getType() {
return Proxies.newProxyInstance(IDfType.class, new TypeMock(type));
}
public String getContentType() {
return contentType;
}
public IDfTime getTime(String attr) {
if (attr.equals("r_modify_date")) {
return Proxies.newProxyInstance(IDfTime.class,
new TimeMock(lastModified));
} else {
return null;
}
}
public boolean isVirtualDocument() {
return isVirtualDocument;
}
public IDfVirtualDocument asVirtualDocument(String lateBinding,
boolean followRootAssembly) {
return Proxies.newProxyInstance(IDfVirtualDocument.class,
new VirtualDocumentMock(id));
}
public Enumeration<IDfAttr> enumAttrs() throws DfException {
Vector<IDfAttr> v = new Vector<IDfAttr>();
for (String name : attributes.keySet()) {
v.add(Proxies.newProxyInstance(IDfAttr.class, new AttrMock(name)));
}
return v.elements();
}
public int getValueCount(String name) {
return attributes.get(name).size();
}
public String getRepeatingString(String name, int index) {
return new ArrayList<String>(attributes.get(name)).get(index);
}
public IDfACL getACL() {
return Proxies.newProxyInstance(IDfACL.class,
new AclMock(aclId.toString()));
}
}
private class VirtualDocumentMock {
private final String vdocId;
public VirtualDocumentMock(String vdocId) {
this.vdocId = vdocId;
}
public IDfVirtualDocumentNode getRootNode() throws DfException {
return Proxies.newProxyInstance(IDfVirtualDocumentNode.class,
new VdocRootNodeMock(vdocId));
}
}
private class VdocRootNodeMock {
private final ArrayList<String> vdocChildren = new ArrayList<>();
public VdocRootNodeMock(String vdocId) throws DfException {
String query = String.format("SELECT mock_object_path "
+ "FROM dm_sysobject WHERE i_folder_id = '%s'", vdocId);
try (Connection connection = getConnection();
Statement stmt = connection.createStatement();
ResultSet rs = stmt.executeQuery(query)) {
while (rs.next()) {
vdocChildren.add(rs.getString("mock_object_path"));
}
} catch (SQLException e) {
throw new DfException(e);
}
}
public int getChildCount() {
return vdocChildren.size();
}
public IDfVirtualDocumentNode getChild(int index) {
return Proxies.newProxyInstance(IDfVirtualDocumentNode.class,
new VdocChildNodeMock(vdocChildren.get(index)));
}
}
private class VdocChildNodeMock {
private final String childPath;
public VdocChildNodeMock(String childPath) {
this.childPath = childPath;
}
public IDfSysObject getSelectedObject() throws DfException {
IDfSessionManager sessionManager =
getProxyClientX().getLocalClient().newSessionManager();
IDfSession session = sessionManager.getSession("foo");
try {
return (IDfSysObject) session.getObjectByPath(childPath);
} finally {
sessionManager.release(session);
}
}
}
private class FolderMock extends SysObjectMock {
private String[] folderPaths;
public FolderMock(ResultSet rs) throws SQLException {
super(rs);
this.folderPaths = rs.getString("r_folder_path").split(",");
}
public int getFolderPathCount() {
return folderPaths.length;
}
public String getFolderPath(int index) {
return folderPaths[index];
}
public IDfCollection getContents(String colNames) throws DfException {
String query = String.format(
"SELECT %s FROM dm_sysobject WHERE i_folder_id = '%s'",
colNames, getObjectId());
return Proxies.newProxyInstance(IDfCollection.class,
new CollectionMock(query));
}
}
private class TypeMock {
private final String type;
private final ImmutableMap<String, String> superTypes =
ImmutableMap.of("dm_document_subtype", "dm_document",
"dm_document", "dm_sysobject",
"dm_sysobject_subtype", "dm_sysobject",
"dm_folder_subtype", "dm_folder",
"dm_folder", "dm_sysobject");
public TypeMock(String type) {
this.type = type;
}
public boolean isTypeOf(String otherType) {
if (type.startsWith(otherType)) {
return true;
}
String parent = superTypes.get(type);
while (!Strings.isNullOrEmpty(parent)) {
if (parent.startsWith(otherType)) {
return true;
}
parent = superTypes.get(parent);
}
return false;
}
public String getName() {
return type;
}
public IDfType getSuperType() {
if (superTypes.containsKey(type)) {
return Proxies.newProxyInstance(IDfType.class,
new TypeMock(superTypes.get(type)));
} else {
return null;
}
}
}
private class TimeMock {
private final Date date;
public TimeMock(Date date) {
this.date = date;
}
public Date getDate() {
return date;
}
}
private class IdMock {
private final String objectId;
public IdMock(String objectId) {
this.objectId = objectId;
}
public String toString() {
return objectId;
}
}
private class AttrMock {
private final String name;
public AttrMock(String name) {
this.name = name;
}
public String getName() {
return name;
}
}
private class UserMock {
private String loginName;
private String source;
private String ldapDn;
private boolean isGroup;
public UserMock(ResultSet rs) throws SQLException {
loginName = rs.getString("user_login_name");
source = rs.getString("user_source");
ldapDn = rs.getString("user_ldap_dn");
isGroup = rs.getBoolean("r_is_group");
}
public String getUserLoginName() {
return loginName;
}
public String getUserSourceAsString() {
return source;
}
public String getUserDistinguishedLDAPName() {
return ldapDn;
}
public boolean isGroup() {
return isGroup;
}
}
private class GroupMock {
private String source;
public GroupMock(ResultSet rs) throws SQLException {
source = rs.getString("group_source");
}
public String getGroupSource() {
return source;
}
}
private class AccessorInfo {
String name;
int permitType;
int permit;
boolean isGroup;
AccessorInfo(String name, int permitType, int permit, boolean isGroup) {
this.name = name;
this.permitType = permitType;
this.permit = permit;
this.isGroup = isGroup;
}
String getName() {
return name;
}
int getPermitType() {
return permitType;
}
int getPermit() {
return permit;
}
boolean isGroup() {
return isGroup;
}
}
public class AclMock {
private String id;
List<AccessorInfo> accessorList = new ArrayList<AccessorInfo>();
public AclMock(String id) {
this.id = id;
try {
getAccessorInfo();
} catch (SQLException e) {
e.printStackTrace();
}
}
private void getAccessorInfo() throws SQLException {
try (Connection connection = getConnection();
Statement stmt = connection.createStatement();
ResultSet rs = stmt.executeQuery(
"select r_accessor_name, r_accessor_permit, "
+ "r_permit_type, r_is_group from dm_acl "
+ "where r_object_id = '" + id + "'")) {
while (rs.next()) {
String accessorName = rs.getString("r_accessor_name");
int accessorPermit = rs.getInt("r_accessor_permit");
int accessorPermitType = rs.getInt("r_permit_type");
boolean isGroup = rs.getBoolean("r_is_group");
if (!Strings.isNullOrEmpty(accessorName)) {
accessorList.add(new AccessorInfo(accessorName,
accessorPermitType, accessorPermit, isGroup));
}
}
}
}
public int getAccessorCount() {
return accessorList.size();
}
public String getAccessorName(int n) {
return accessorList.get(n).getName();
}
public int getAccessorPermitType(int n) {
return accessorList.get(n).getPermitType();
}
public int getAccessorPermit(int n) {
return accessorList.get(n).getPermit();
}
public boolean isGroup(int n) {
return accessorList.get(n).isGroup();
}
public IDfId getObjectId() {
return Proxies.newProxyInstance(IDfId.class, new IdMock(id));
}
}
public class EnumerationMock {
private final UnmodifiableIterator<String> iter;
public EnumerationMock(String folderIds) {
iter = Iterators.forArray(folderIds.split("\\s*,\\s*"));
}
public boolean hasMoreElements() throws DfException {
return iter.hasNext();
}
public IDfObjectPath nextElement() throws DfException {
return Proxies.newProxyInstance(IDfObjectPath.class,
new ObjectPathMock(iter.next()));
}
}
public class ObjectPathMock {
private final String id;
public ObjectPathMock(String id) throws DfException {
this.id = id;
}
public String getFullPath() throws DfException {
//TODO(sveldurthi): Add test for multiple r_folder_path values.
String query =
String.format("SELECT r_folder_path "
+ "FROM dm_folder WHERE r_object_id = '%s'", id);
try (Connection connection = getConnection();
Statement stmt = connection.createStatement();
ResultSet rs = stmt.executeQuery(query)) {
if (rs.next()) {
return rs.getString("r_folder_path");
}
return null;
} catch (SQLException e) {
throw new DfException(e);
}
}
}
}
private DocumentumAdaptor getObjectUnderTest() throws DfException {
return getObjectUnderTest(ImmutableMap.<String, String>of());
}
private DocumentumAdaptor getObjectUnderTest(Map<String, String> configMap)
throws DfException {
H2BackedTestProxies proxyCls = new H2BackedTestProxies();
IDfClientX dmClientX = proxyCls.getProxyClientX();
DocumentumAdaptor adaptor = new DocumentumAdaptor(dmClientX);
AdaptorContext context = ProxyAdaptorContext.getInstance();
Config config = initTestAdaptorConfig(context);
for (Map.Entry<String, String> entry : configMap.entrySet()) {
config.overrideKey(entry.getKey(), entry.getValue());
}
adaptor.init(context);
return adaptor;
}
private void insertCabinets(String... cabinets) throws SQLException {
for (String cabinet : cabinets) {
executeUpdate(String.format("INSERT INTO dm_cabinet "
+ "(r_object_id, r_folder_path, object_name) VALUES('%s','%s','%s')",
"0c" + cabinet, "/" + cabinet, cabinet));
}
}
private void checkGetRootContent(String whereClause, int maxHtmlLinks,
String... expectedCabinets) throws Exception {
List<String> queries = new ArrayList<>();
Logging.captureLogMessages(DocumentumAdaptor.class,
"Get All Cabinets Query", queries);
String startPath = "/";
AdaptorContext context = ProxyAdaptorContext.getInstance();
DocIdEncoder docidEncoder = context.getDocIdEncoder();
Config config = initTestAdaptorConfig(context);
config.overrideKey("documentum.src", startPath);
config.overrideKey("documentum.maxHtmlSize", "" + maxHtmlLinks);
config.overrideKey("documentum.cabinetWhereCondition", whereClause);
Request request =
new MockRequest(DocumentumAdaptor.docIdFromPath(startPath));
MockResponse response = getDocContent(context, request);
assertEquals(queries.toString(), 1, queries.size());
String query = queries.get(0);
if (whereClause.isEmpty()) {
assertFalse(query, query.contains(" WHERE "));
} else {
assertTrue(query, query.contains(" WHERE " + whereClause));
}
assertEquals("text/html; charset=UTF-8", response.contentType);
String content = response.content.toString(UTF_8.name());
assertEquals(content, maxHtmlLinks == 0 || expectedCabinets.length == 0,
content.indexOf("href") < 0);
assertEquals(response.anchors.toString(),
maxHtmlLinks >= expectedCabinets.length,
response.anchors.isEmpty());
for (String cabinet : expectedCabinets) {
// First look in the HTML links for the cabinet. If not there,
// look in the external anchors.
String link = "<a href=\"" + cabinet + "\">" + cabinet + "</a>";
if (content.indexOf(link) < 0) {
URI uri = docidEncoder.encodeDocId(new DocId(cabinet));
URI anchor = response.anchors.get(cabinet);
assertNotNull("Cabinet " + cabinet + " with URI " + uri + " is missing"
+ " from response:/n" + content + "/n" + response.anchors, anchor);
assertEquals(uri, anchor);
}
}
}
@Test
public void testGetRootContentNoCabinets() throws Exception {
checkGetRootContent("1=1", 100);
}
@Test
public void testGetRootContentEmptyWhereClause() throws Exception {
insertCabinets("System", "Cabinet1", "Cabinet2");
checkGetRootContent("", 100, "System", "Cabinet1", "Cabinet2");
}
@Test
public void testGetRootContentHtmlResponseOnly() throws Exception {
insertCabinets("Cabinet1", "Cabinet2", "Cabinet3");
checkGetRootContent("", 100, "Cabinet1", "Cabinet2", "Cabinet3");
}
@Test
public void testGetRootContentAnchorResponseOnly() throws Exception {
insertCabinets("Cabinet1", "Cabinet2", "Cabinet3");
checkGetRootContent("", 0, "Cabinet1", "Cabinet2", "Cabinet3");
}
@Test
public void testGetRootContentHtmlAndAnchorResponse() throws Exception {
insertCabinets("Cabinet1", "Cabinet2", "Cabinet3", "Cabinet4");
checkGetRootContent("", 2, "Cabinet1", "Cabinet2", "Cabinet3",
"Cabinet4");
}
@Test
public void testGetRootContentAddedWhereClause() throws Exception {
insertCabinets("System", "Temp", "Cabinet1", "Cabinet2");
checkGetRootContent("object_name NOT IN ('System', 'Temp')",
100, "Cabinet1", "Cabinet2");
}
@Test
public void testGetRootContentDefaultWhereClause() throws Exception {
executeUpdate(
"CREATE TABLE dm_docbase_config (owner_name varchar)",
"INSERT INTO dm_docbase_config (owner_name) VALUES('Owner')",
"CREATE TABLE dm_server_config (r_install_owner varchar)",
"INSERT INTO dm_server_config (r_install_owner) VALUES('Installer')");
insertCabinets("Integration", "Resources", "System");
insertCabinets("Temp", "Templates", "Owner", "Installer");
insertCabinets("Cabinet1", "Cabinet2");
Config config = ProxyAdaptorContext.getInstance().getConfig();
new DocumentumAdaptor(null).initConfig(config);
checkGetRootContent(config.getValue("documentum.cabinetWhereCondition"),
100, "Cabinet1", "Cabinet2");
}
@Test
public void testGetRootContentInvalidWhereClause() throws Exception {
insertCabinets("Cabinet1", "Cabinet2");
try {
checkGetRootContent("( xyzzy", 100);
fail("Expected exception not thrown.");
} catch (IOException expected) {
assertTrue(expected.getCause() instanceof DfException);
}
}
private void insertDocument(String path) throws SQLException {
insertDocument(new Date(), path, "text/plain", "Hello World");
}
private void insertDocument(Date lastModified, String path,
String contentType, String content) throws SQLException {
String name = path.substring(path.lastIndexOf("/") + 1);
executeUpdate(String.format(
"insert into dm_sysobject(r_object_id, object_name, mock_object_path, "
+ "r_object_type, a_content_type, mock_content, r_modify_date, "
+ "mock_acl_id) "
+ "values('%s', '%s', '%s', '%s', '%s', '%s', {ts '%s'}, '%s')",
"09" + name, name, path, "dm_document", contentType, content,
dateFormat.format(lastModified), DEFAULT_ACL));
}
private void insertDocument(String lastModified, String id, String path,
String... folderIds) throws SQLException {
String name = path.substring(path.lastIndexOf("/") + 1);
insertSysObject(lastModified, id, name, path, "dm_document", folderIds);
}
private void insertFolder(String lastModified, String id, String... paths)
throws SQLException {
executeUpdate(String.format(
"insert into dm_folder(r_object_id, r_folder_path) values('%s', '%s')",
id, Joiner.on(",").join(paths)));
for (String path : paths) {
String name = path.substring(path.lastIndexOf("/") + 1);
insertSysObject(lastModified, id, name, path, "dm_folder");
}
}
private void setParentFolderId(String id, String parentId)
throws SQLException {
executeUpdate(String.format(
"UPDATE dm_sysobject SET i_folder_id = '%s' WHERE r_object_id = "
+ "'%s'", parentId, id));
}
private void insertSysObject(String lastModified, String id, String name,
String path, String type, String... folderIds) throws SQLException {
executeUpdate(String.format(
"insert into dm_sysobject(r_object_id, object_name, mock_object_path, "
+ "r_object_type, i_folder_id, r_modify_date, mock_acl_id) "
+ "values('%s', '%s', '%s', '%s', '%s', {ts '%s'}, '%s')",
id, name, path, type, Joiner.on(",").join(folderIds), lastModified,
DEFAULT_ACL));
}
private void setSysObjectACL(String path, String aclId)
throws SQLException {
executeUpdate(String.format(
"UPDATE dm_sysobject SET mock_acl_id = '%s' WHERE mock_object_path = "
+ "'%s'", aclId, path));
}
private void testDocContent(Date lastCrawled, Date lastModified,
boolean expectNotModified) throws DfException, IOException, SQLException {
String path = START_PATH + "/object1";
String contentType = "crtext/html";
String content = "<html><body>Hello</body></html>";
insertDocument(lastModified, path, contentType, content);
AdaptorContext context = ProxyAdaptorContext.getInstance();
initTestAdaptorConfig(context);
Request request = new MockRequest(DocumentumAdaptor.docIdFromPath(path),
lastCrawled);
MockResponse response = getDocContent(context, request);
if (expectNotModified) {
assertTrue(response.notModified);
assertNull(response.contentType);
assertNull(response.content);
} else {
assertFalse(response.notModified);
assertEquals(contentType, response.contentType);
assertEquals(content, response.content.toString(UTF_8.name()));
}
}
private MockResponse getDocContent(String path)
throws DfException, IOException {
AdaptorContext context = ProxyAdaptorContext.getInstance();
initTestAdaptorConfig(context);
Request request = new MockRequest(DocumentumAdaptor.docIdFromPath(path));
return getDocContent(context, request);
}
private MockResponse getDocContent(AdaptorContext context, Request request)
throws DfException, IOException {
H2BackedTestProxies proxyCls = new H2BackedTestProxies();
IDfClientX dmClientX = proxyCls.getProxyClientX();
DocumentumAdaptor adaptor = new DocumentumAdaptor(dmClientX);
adaptor.init(context);
MockResponse response = new MockResponse();
adaptor.getDocContent(request, response);
return response;
}
@Test
public void testDocContentInitialCrawl() throws Exception {
Date lastModified = new Date();
testDocContent(null, lastModified, false);
}
@Test
public void testDocContentModifiedSinceLastCrawl() throws Exception {
Date lastCrawled = new Date();
Date lastModified = new Date(lastCrawled.getTime() + (120 * 1000L));
testDocContent(lastCrawled, lastModified, false);
}
@Test
public void testDocContentOneDayBeforeWindowJustShort() throws Exception {
Date lastCrawled = new Date();
Date lastModified = new Date( // Two seconds short of a full day.
lastCrawled.getTime() - (24 * 60 * 60 * 1000L - 2000L));
testDocContent(lastCrawled, lastModified, false);
}
@Test
public void testDocContentOneDayBeforeWindowJustOver() throws Exception {
Date lastCrawled = new Date();
Date lastModified = new Date( // Two seconds more than a full day.
lastCrawled.getTime() - (24 * 60 * 60 * 1000L + 2000L));
testDocContent(lastCrawled, lastModified, true);
}
@Test
public void testDocContentRecentlyModified() throws Exception {
// Even though content was crawled after it was recently
// modified, don't trust Documentum dates to be UTC, so
// content should be returned anyway.
Date lastCrawled = new Date();
Date lastModified = new Date(lastCrawled.getTime() - (8 * 60 * 60 * 1000L));
testDocContent(lastModified, lastCrawled, false);
}
@Test
public void testDocContentNotRecentlyModified() throws Exception {
Date lastCrawled = new Date();
Date lastModified =
new Date(lastCrawled.getTime() - (72 * 60 * 60 * 1000L));
testDocContent(lastCrawled, lastModified, true);
}
private String getDisplayUrl(String displayUrlPattern, String path)
throws Exception {
assertTrue(path, path.startsWith(START_PATH));
insertDocument(path);
AdaptorContext context = ProxyAdaptorContext.getInstance();
Config config = initTestAdaptorConfig(context);
config.overrideKey("documentum.displayUrlPattern", displayUrlPattern);
Request request = new MockRequest(DocumentumAdaptor.docIdFromPath(path));
MockResponse response = getDocContent(context, request);
assertNotNull(response.toString(), response.displayUrl);
return response.displayUrl.toString();
}
@Test
public void testDisplayUrlWithId() throws Exception {
String path = "/Folder1/path1/object1";
assertEquals("http://webtopurl/drl/09object1",
getDisplayUrl("http://webtopurl/drl/{0}", path));
}
@Test
public void testDisplayUrlWithPath() throws Exception {
String path = "/Folder1/path1/object1";
assertEquals("http://webtopurl/drl//Folder1/path1/object1",
getDisplayUrl("http://webtopurl/drl/{1}", path));
}
@Test
public void testDisplayUrlWithIdAndPath() throws Exception {
String path = "/Folder1/path1/object1";
assertEquals("/Folder1/path1/object1-http://webtopurl/09object1/drl/",
getDisplayUrl("{1}-http://webtopurl/{0}/drl/", path));
}
private Acl getACL(String path) throws Exception {
assertTrue(path, path.startsWith(START_PATH));
AdaptorContext context = ProxyAdaptorContext.getInstance();
initTestAdaptorConfig(context);
Request request = new MockRequest(DocumentumAdaptor.docIdFromPath(path));
MockResponse response = getDocContent(context, request);
assertNotNull(response.toString(), response.acl);
return response.acl;
}
@Test
public void testDocumentACL() throws Exception {
String path = "/Folder1/path1/object1";
String documentACL = "45DocumentACL";
insertDocument(path);
setSysObjectACL(path, documentACL);
Acl acl = getACL(path);
assertEquals(new DocId(documentACL), acl.getInheritFrom());
}
@Test
public void testFolderACL() throws Exception {
String now = getNowPlusMinutes(0);
String folderId = "0b01081f80078d29";
String folder = START_PATH + "/path2";
String folderACL = "45FolderAcl";
insertFolder(now, folderId, folder);
setSysObjectACL(folder, folderACL);
Acl acl = getACL(folder);
assertEquals(new DocId(folderACL), acl.getInheritFrom());
}
/*
* Note that the metadata structure stored in these tests is slightly
* different that Documentum stores them:
*
* DCTM stores the data as:
*
* attr1 attr2 attr3
* ----- ----- -----
* valu1 valuA valuI
* valu2 valuII
* valu3
*
* whereas this table is:
*
* attr1 attr2 attr3
* ----- ----- -----
* valu1
* valu2
* valu3
* valuA
* valuI
* valuII
*
* The difference is insignificant for these tests.
*/
private void writeAttributes(String objectId, Multimap<String, String> attrs)
throws SQLException {
StringBuilder ddl = new StringBuilder();
ddl.append("CREATE TABLE attributes (r_object_id varchar");
for (String attr : attrs.keySet()) {
ddl.append(", ").append(attr).append(" varchar");
}
ddl.append(")");
executeUpdate(ddl.toString());
for (String attr : attrs.keySet()) {
for (String value : attrs.get(attr)) {
executeUpdate(String.format(
"INSERT INTO attributes (r_object_id, %s) VALUES ('%s', '%s')",
attr, objectId, value));
}
}
}
private Multimap<String, String> readAttributes(String objectId)
throws SQLException {
Multimap<String, String> attributes = TreeMultimap.create();
try (Connection connection = getConnection()) {
DatabaseMetaData dbm = connection.getMetaData();
try (ResultSet tables = dbm.getTables(null, null, "ATTRIBUTES", null)) {
if (!tables.next()) {
// Attributes table does not exist if there are
// no attributes in the test.
return attributes;
}
}
// Read all the attributes for our objectId.
String query = String.format("SELECT * FROM attributes "
+ "WHERE r_object_id = '%s'", objectId);
try (Statement stmt = connection.createStatement();
ResultSet rs = stmt.executeQuery(query)) {
ResultSetMetaData rsm = rs.getMetaData();
while (rs.next()) {
for (int i = 1; i <= rsm.getColumnCount(); i++) {
// H2 uppercases the column names.
String attr = rsm.getColumnName(i).toLowerCase();
if (!attr.equals("r_object_id")) {
String value = rs.getString(attr);
if (value != null) {
attributes.put(attr, value);
}
}
}
}
}
}
return attributes;
}
private void testExcludeMetadata(TreeMultimap<String, String> attrs,
String excludedAttrs, TreeMultimap<String, String> expected)
throws Exception {
String path = START_PATH + "/object1";
String objectId = "09object1";
insertDocument(path);
writeAttributes(objectId, attrs);
AdaptorContext context = ProxyAdaptorContext.getInstance();
Config config = initTestAdaptorConfig(context);
if (excludedAttrs != null) {
config.overrideKey("documentum.excludedAttributes", excludedAttrs);
}
Request request = new MockRequest(DocumentumAdaptor.docIdFromPath(path));
MockResponse response = getDocContent(context, request);
assertEquals(expected, response.metadata);
}
private void testMetadata(TreeMultimap<String, String> attrs,
TreeMultimap<String, String> expected) throws Exception {
testExcludeMetadata(attrs, null, expected);
}
@Test
public void testSingleValueMetadata() throws Exception {
TreeMultimap<String, String> attributes = TreeMultimap.create();
attributes.put("attr1", "value1");
attributes.put("attr2", "value2");
attributes.put("attr3", "value3");
TreeMultimap<String, String> expected = TreeMultimap.create(attributes);
expected.put("r_object_id", "09object1");
testMetadata(attributes, expected);
}
@Test
public void testMultiValueMetadata() throws Exception {
TreeMultimap<String, String> attributes = TreeMultimap.create();
attributes.put("attr1", "value1");
attributes.put("attr1", "value2");
attributes.put("attr1", "value3");
assertEquals(1, attributes.keySet().size());
assertEquals(3, attributes.get("attr1").size());
TreeMultimap<String, String> expected = TreeMultimap.create(attributes);
expected.put("r_object_id", "09object1");
testMetadata(attributes, expected);
}
@Test
public void testEmptyValueMetadata() throws Exception {
TreeMultimap<String, String> attributes = TreeMultimap.create();
attributes.put("attr1", "value1");
attributes.put("attr2", "value2");
attributes.put("attr2", "");
attributes.put("attr3", "");
TreeMultimap<String, String> expected = TreeMultimap.create(attributes);
expected.put("r_object_id", "09object1");
testMetadata(attributes, expected);
}
@Test
public void testExcludeAttrMetadata() throws Exception {
TreeMultimap<String, String> attributes = TreeMultimap.create();
attributes.put("attr1", "value1");
attributes.put("attr2", "value2");
attributes.put("attr3", "value3");
attributes.put("foo", "foo1");
attributes.put("bar", "bar1");
TreeMultimap<String, String> expected = TreeMultimap.create(attributes);
String excluded = "foo, bar, r_object_id";
expected.removeAll("foo");
expected.removeAll("bar");
testExcludeMetadata(attributes, excluded, expected);
}
@Test
public void testObjectTypeMetadata() throws Exception {
TreeMultimap<String, String> attributes = TreeMultimap.create();
attributes.put("r_object_type", "dm_document");
attributes.put("attr2", "value2");
TreeMultimap<String, String> expected = TreeMultimap.create(attributes);
expected.put("r_object_id", "09object1");
expected.removeAll("r_object_type");
expected.put("r_object_type", "dm_document");
expected.put("r_object_type", "dm_sysobject");
testMetadata(attributes, expected);
}
private void insertVirtualDocument(String vdocPath, String contentType,
String content, String... children) throws SQLException {
String name = vdocPath.substring(vdocPath.lastIndexOf("/") + 1);
String vdocId = "09" + name;
String now = getNowPlusMinutes(0);
executeUpdate(String.format(
"INSERT INTO dm_sysobject(r_object_id, object_name, mock_object_path, "
+ "r_object_type, r_is_virtual_doc, a_content_type, mock_content, "
+ "r_modify_date, mock_acl_id) "
+ "VALUES('%s', '%s', '%s', '%s', TRUE, '%s', '%s', {ts '%s'}, '%s')",
vdocId, name, vdocPath, "dm_document_virtual", contentType, content,
now, DEFAULT_ACL));
for (String child : children) {
insertDocument(now, "09" + child, vdocPath + "/" + child, vdocId);
}
}
@Test
public void testVirtualDocContentNoChildren() throws Exception {
String path = START_PATH + "/vdoc";
String objectContentType = "crtext/html";
String objectContent = "<html><body>Hello</body></html>";
insertVirtualDocument(path, objectContentType, objectContent);
MockResponse response = getDocContent(path);
assertEquals(objectContentType, response.contentType);
assertEquals(objectContent, response.content.toString(UTF_8.name()));
assertTrue(response.anchors.isEmpty());
}
@Test
public void testVirtualDocContentWithChildren() throws Exception {
String path = START_PATH + "/vdoc";
String objectContentType = "crtext/html";
String objectContent = "<html><body>Hello</body></html>";
insertVirtualDocument(path, objectContentType, objectContent,
"object1", "object2", "object3");
MockResponse response = getDocContent(path);
assertEquals(objectContentType, response.contentType);
assertEquals(objectContent, response.content.toString(UTF_8.name()));
// Verify child links.
assertEquals(3, response.anchors.size());
for (String name : ImmutableList.of("object1", "object2", "object3")) {
URI uri = response.anchors.get(name);
assertNotNull(uri);
assertTrue(uri.toString().endsWith(path + "/" + name + ":09" + name));
}
}
@Test
public void testFolderDocContent() throws Exception {
String now = getNowPlusMinutes(0);
String folderId = "0b01081f80078d29";
String folder = START_PATH + "/path2";
insertFolder(now, folderId, folder);
insertDocument(now, "0901081f80079263", folder + "/file1", folderId);
insertDocument(now, "0901081f8007926d", folder + "/file2 evil<chars?",
folderId);
insertDocument(now, "0901081f80079278", folder + "/file3", folderId);
StringBuilder expected = new StringBuilder();
expected.append("<!DOCTYPE html>\n<html><head><title>");
expected.append("Folder path2");
expected.append("</title></head><body><h1>");
expected.append("Folder path2");
expected.append("</h1>");
expected.append("<li><a href=\"path2/file1\">file1</a></li>");
expected.append("<li><a href=\"path2/file2%20evil%3Cchars%3F\">"
+ "file2 evil<chars?</a></li>");
expected.append("<li><a href=\"path2/file3\">file3</a></li>");
expected.append("</body></html>");
MockResponse response = getDocContent(folder);
assertFalse(response.notFound);
assertEquals("text/html; charset=UTF-8", response.contentType);
assertEquals(expected.toString(), response.content.toString(UTF_8.name()));
}
@Test
public void testGetDocContentNotFound() throws Exception {
String path = START_PATH + "/doesNotExist";
assertTrue(getDocContent(path).notFound);
}
@Test
public void testGetDocContentNotUnderStartPath() throws Exception {
String now = getNowPlusMinutes(0);
String path = "/Folder2/path2";
insertFolder(now, "0b01081f80078d30", path);
assertFalse(path.startsWith(START_PATH));
assertTrue(getDocContent(path).notFound);
}
/**
* Builds a list of expected DocId Records that the Pusher should receive.
*/
private List<Record> expectedRecordsFor(String... paths) {
ImmutableList.Builder<Record> builder = ImmutableList.builder();
for (String path : paths) {
DocId docid = DocumentumAdaptor.docIdFromPath(path);
builder.add(new Record.Builder(docid).build());
}
return builder.build();
}
private void testGetDocIds(List<String> startPaths,
List<Record> expectedRecords)
throws DfException, IOException, InterruptedException {
DocumentumAdaptor adaptor = getObjectUnderTest(
ImmutableMap.of("documentum.src", Joiner.on(",").join(startPaths)));
AccumulatingDocIdPusher pusher = new AccumulatingDocIdPusher();
adaptor.getDocIds(pusher);
assertEquals(expectedRecords, pusher.getRecords());
}
@Test
public void testGetDocIdsRootStartPath() throws Exception {
testGetDocIds(startPaths("/"), expectedRecordsFor("/"));
}
@Test
public void testGetDocIdsSingleStartPath() throws Exception {
testGetDocIds(startPaths(START_PATH), expectedRecordsFor(START_PATH));
}
@Test
public void testGetDocIdsMultipleStartPaths() throws Exception {
String now = getNowPlusMinutes(0);
String path2 = "/Folder2";
String path3 = "/Folder3";
insertFolder(now, "0bFolder2", path2);
insertFolder(now, "0bFolder3", path3);
testGetDocIds(startPaths(START_PATH, path2, path3),
expectedRecordsFor(START_PATH, path2, path3));
}
@Test
public void testGetDocIdsMultipleStartPathsSomeOffline() throws Exception {
String now = getNowPlusMinutes(0);
String path2 = "/Folder2";
String path3 = "/Folder3";
insertFolder(now, "0bFolder3", path3);
testGetDocIds(startPaths(START_PATH, path2, path3),
expectedRecordsFor(START_PATH, path3));
}
/**
* A traversal action includes an expected input checkpoint, an
* exception to throw, and a final checkpoint to return. All fields
* are optional and may be null.
*/
private static class Action {
public final String input;
public final DfException error;
public final String output;
public Action(String input, DfException error, String output) {
this.input = input;
this.error = error;
this.output = output;
}
}
/**
* Tests the traversers by replaying a sequence of actions. An
* assertion will fail if the traverser loops more or fewer times
* than the given number of actions, or if the checkpoints or thrown
* exceptions do not match.
*/
private void testTraverserTemplate(Action... actionArray) throws Exception {
// The actions are removed from the deque as they are performed.
final ArrayDeque<Action> actions =
new ArrayDeque<>(Arrays.asList(actionArray));
DocumentumAdaptor adaptor = getObjectUnderTest();
DocumentumAdaptor.TraverserTemplate template =
adaptor.new TraverserTemplate(Checkpoint.full()) {
@Override protected void createCollection() {}
@Override
protected boolean fillCollection(IDfSession dmSession,
Principals principals, Checkpoint checkpoint)
throws DfException {
assertEquals(actions.getFirst().input, checkpoint.getObjectId());
if (actions.getFirst().error != null) {
throw actions.getFirst().error;
}
return actions.getFirst().output == null;
}
@Override
protected Checkpoint pushCollection(DocIdPusher pusher) {
return new Checkpoint(actions.removeFirst().output);
}
@Override protected void sleep() {}
};
// We only expect an exception if the last loop iteration throws.
ArrayList<DfException> expectedExceptions = new ArrayList<>();
if (actions.getLast().error != null) {
expectedExceptions.add(actions.getLast().error);
}
AccumulatingDocIdPusher pusher = new AccumulatingDocIdPusher();
ArrayList<DfException> savedExceptions = new ArrayList<>();
template.run(pusher, savedExceptions);
assertTrue(actions.toString(), actions.isEmpty());
assertEquals(expectedExceptions, savedExceptions);
}
private static final String C = "non-null checkpoint";
private static final String D = "another checkpoint";
private static final DfException E = new DfException("first error");
private static final DfException F = new DfException("second error");
@Test
public void testTraverserTemplate_noProgress() throws Exception {
testTraverserTemplate(
new Action(null, E, null));
}
@Test
public void testTraverserTemplate_completeTraversal() throws Exception {
testTraverserTemplate(
new Action(null, null, null));
}
@Test
public void testTraverserTemplate_impossible() throws Exception {
// If no exception is thrown, the checkpoint should be null.
// But if it happens, we expect a second call.
testTraverserTemplate(
new Action(null, null, C),
new Action(C, null, null));
}
@Test
public void testTraverserTemplate_throwThenNoProgress() throws Exception {
testTraverserTemplate(
new Action(null, E, C),
new Action(C, F, C));
}
@Test
public void testTraverserTemplate_throwThenProgress() throws Exception {
testTraverserTemplate(
new Action(null, E, C),
new Action(C, F, D),
new Action(D, null, null));
}
@Test
public void testTraverserTemplate_throwThenComplete() throws Exception {
testTraverserTemplate(
new Action(null, E, C),
new Action(C, null, null));
}
private void insertUsers(String... names) throws SQLException {
for (String name : names) {
executeUpdate(String.format(
"insert into dm_user(user_name, user_login_name) values('%s', '%s')",
name, name));
}
}
private void disableUsers(String... names) throws SQLException {
// TODO(sveldurthi): modify query to use where user_name in ('u1', 'u2')
for (String name : names) {
executeUpdate(String.format(
"UPDATE dm_user SET user_state = 1 WHERE user_name = '%s'", name));
}
}
private void insertGroup(String groupName, String... members)
throws SQLException {
insertGroupEx(getNowPlusMinutes(0), "", groupName, members);
}
private void insertLdapGroup(String groupName, String... members)
throws SQLException {
insertGroupEx(getNowPlusMinutes(0), "LDAP", groupName, members);
}
private void insertGroupEx(String lastModified, String source,
String groupName, String... members) throws SQLException {
executeUpdate(String.format("INSERT INTO dm_user"
+ "(user_name, user_login_name, user_source, user_ldap_dn, r_is_group) "
+ "VALUES('%s', '%s', '%s', '%s', TRUE)", groupName, groupName,
source, "LDAP".equals(source) ? ("CN=" + groupName) : ""));
List<String> users = new ArrayList<String>();
List<String> groups = new ArrayList<String>();
for (String member : members) {
if (member.toLowerCase().startsWith("group")) {
groups.add(member);
} else {
users.add(member);
}
}
// Emulate ROW_BASED retrieval by storing the values that way.
int numRows = Math.max(1, Math.max(users.size(), groups.size()));
for (int i = 0; i < numRows; i++) {
executeUpdate(String.format("INSERT INTO dm_group"
+ "(r_object_id, group_name, group_source, r_modify_date, "
+ "users_names, groups_names) VALUES('%s', '%s', '%s', {ts '%s'}, "
+ "%s, %s)",
"12" + groupName, groupName, source, lastModified,
(i < users.size()) ? "'" + users.get(i) + "'" : "NULL",
(i < groups.size()) ? "'" + groups.get(i) + "'" : "NULL"));
}
}
private void createAcl(String id) throws SQLException {
executeUpdate(String.format(
"insert into dm_acl(r_object_id) values('%s')", id));
}
private boolean isAccessorGroup(String accessorName) throws SQLException {
try (Connection connection = getConnection();
Statement stmt = connection.createStatement();
ResultSet rs = stmt.executeQuery("select r_is_group from dm_user"
+ " where user_name = '" + accessorName + "'")) {
if (rs.next()) {
return rs.getBoolean(1);
}
}
return false;
}
private void grantPermit(String id, IDfPermit permit) throws SQLException {
executeUpdate(String.format(
"insert into dm_acl(r_object_id, r_accessor_name, "
+ "r_accessor_permit, r_permit_type, r_is_group) values("
+ "'%s', '%s', '%s', '%s', '%s')",
id, permit.getAccessorName(), permit.getPermitValueInt(),
permit.getPermitType(), isAccessorGroup(permit.getAccessorName())));
}
private void addAllowPermitToAcl(String id, String accessorName, int permit)
throws SQLException {
IDfPermit permitobj = new DfPermit();
permitobj.setAccessorName(accessorName);
permitobj.setPermitType(IDfPermitType.ACCESS_PERMIT);
permitobj.setPermitValue(Integer.toString(permit));
grantPermit(id, permitobj);
}
private void addDenyPermitToAcl(String id, String accessorName, int permit)
throws SQLException {
IDfPermit permitobj = new DfPermit();
permitobj.setAccessorName(accessorName);
permitobj.setPermitType(IDfPermitType.ACCESS_RESTRICTION);
permitobj.setPermitValue(Integer.toString(permit));
grantPermit(id, permitobj);
}
private Map<DocId, Acl> getAllAcls() throws Exception {
return getAllAcls("");
}
private Map<DocId, Acl> getAllAcls(String windowsDomain)
throws DfException, IOException, InterruptedException {
DocumentumAdaptor adaptor = getObjectUnderTest(
ImmutableMap.<String, String>builder()
.put("documentum.windowsDomain", windowsDomain)
.put("adaptor.namespace", "NS")
.put("documentum.docbaseName", "Local") // Local Namespace
.build());
AccumulatingDocIdPusher pusher = new AccumulatingDocIdPusher();
adaptor.getDocIds(pusher);
return pusher.getNamedResources();
}
// tests for ACLs
// TODO: (Srinivas) - Add a unit test and perform manual test of
// user and group names with quotes in them.
@Test
public void testAcls() throws Exception {
createAcl("4501081f80000100");
createAcl("4501081f80000101");
createAcl("4501081f80000102");
Map<DocId, Acl> namedResources = getAllAcls();
assertEquals(3, namedResources.size());
}
@Test
public void testAllowAcls() throws Exception {
insertUsers("User1", "User2", "User3", "User4", "User5");
String id = "4501081f80000100";
createAcl(id);
addAllowPermitToAcl(id, "User4", IDfACL.DF_PERMIT_WRITE);
addAllowPermitToAcl(id, "User5", IDfACL.DF_PERMIT_READ);
addDenyPermitToAcl(id, "User1", IDfACL.DF_PERMIT_DELETE);
addDenyPermitToAcl(id, "User2", IDfACL.DF_PERMIT_BROWSE);
addDenyPermitToAcl(id, "User3", IDfACL.DF_PERMIT_WRITE);
Map<DocId, Acl> namedResources = getAllAcls();
Acl acl = namedResources.get(new DocId(id));
assertEquals(ImmutableSet.of(new UserPrincipal("User4", "NS"),
new UserPrincipal("User5", "NS")),
acl.getPermitUsers());
assertEquals(ImmutableSet.of(new UserPrincipal("User2", "NS")),
acl.getDenyUsers());
assertEquals(ImmutableSet.of(), acl.getPermitGroups());
assertEquals(ImmutableSet.of(), acl.getDenyGroups());
}
@Test
public void testBrowseAcls() throws Exception {
insertUsers("User1", "User2", "User3", "User4", "User5");
String id = "4501081f80000100";
createAcl(id);
addAllowPermitToAcl(id, "User4", IDfACL.DF_PERMIT_WRITE);
addAllowPermitToAcl(id, "User5", IDfACL.DF_PERMIT_READ);
addDenyPermitToAcl(id, "User1", IDfACL.DF_PERMIT_DELETE);
addDenyPermitToAcl(id, "User2", IDfACL.DF_PERMIT_BROWSE);
addDenyPermitToAcl(id, "User3", IDfACL.DF_PERMIT_WRITE);
Map<DocId, Acl> namedResources = getAllAcls();
Acl acl = namedResources.get(new DocId(id));
assertEquals(ImmutableSet.of(new UserPrincipal("User4", "NS"),
new UserPrincipal("User5", "NS")),
acl.getPermitUsers());
assertEquals(ImmutableSet.of(new UserPrincipal("User2", "NS")),
acl.getDenyUsers());
assertEquals(ImmutableSet.of(), acl.getPermitGroups());
assertEquals(ImmutableSet.of(), acl.getDenyGroups());
}
@Test
public void testGroupAcls() throws Exception {
insertUsers("User1", "User2");
insertGroup("Group1", "User2", "User3");
insertGroup("Group2", "User4", "User5");
insertGroup("Group3", "User6", "User7");
String id = "4501081f80000101";
createAcl(id);
addAllowPermitToAcl(id, "User1", IDfACL.DF_PERMIT_WRITE);
addAllowPermitToAcl(id, "User2", IDfACL.DF_PERMIT_READ);
addAllowPermitToAcl(id, "Group1", IDfACL.DF_PERMIT_READ);
addAllowPermitToAcl(id, "Group2", IDfACL.DF_PERMIT_WRITE);
addDenyPermitToAcl(id, "Group3", IDfACL.DF_PERMIT_READ);
Map<DocId, Acl> namedResources = getAllAcls();
Acl acl = namedResources.get(new DocId(id));
assertEquals(ImmutableSet.of(new GroupPrincipal("Group1", "NS_Local"),
new GroupPrincipal("Group2", "NS_Local")),
acl.getPermitGroups());
assertEquals(ImmutableSet.of(new GroupPrincipal("Group3", "NS_Local")),
acl.getDenyGroups());
assertEquals(ImmutableSet.of(new UserPrincipal("User1", "NS"),
new UserPrincipal("User2", "NS")),
acl.getPermitUsers());
assertEquals(ImmutableSet.of(), acl.getDenyUsers());
}
@Test
public void testDisabledUserAcls() throws Exception {
insertUsers("User2", "User3", "User4", "User5");
disableUsers("User2", "User4");
String id = "4501081f80000100";
createAcl(id);
addAllowPermitToAcl(id, "User4", IDfACL.DF_PERMIT_WRITE);
addAllowPermitToAcl(id, "User5", IDfACL.DF_PERMIT_READ);
addDenyPermitToAcl(id, "User2", IDfACL.DF_PERMIT_READ);
addDenyPermitToAcl(id, "User3", IDfACL.DF_PERMIT_READ);
Map<DocId, Acl> namedResources = getAllAcls();
Acl acl = namedResources.get(new DocId(id));
assertEquals(ImmutableSet.of(new UserPrincipal("User5", "NS")),
acl.getPermitUsers());
assertEquals(ImmutableSet.of(new UserPrincipal("User3", "NS")),
acl.getDenyUsers());
assertEquals(ImmutableSet.of(), acl.getPermitGroups());
assertEquals(ImmutableSet.of(), acl.getDenyGroups());
}
@Test
public void testDisabledGroupAcls() throws Exception {
insertGroup("Group1", "User2", "User3");
insertGroup("Group2", "User4", "User5");
insertGroup("Group3", "User6", "User7");
insertGroup("Group4", "User8", "User9");
disableUsers("Group2", "Group3");
String id = "4501081f80000101";
createAcl(id);
addAllowPermitToAcl(id, "Group1", IDfACL.DF_PERMIT_READ);
addAllowPermitToAcl(id, "Group2", IDfACL.DF_PERMIT_WRITE);
addDenyPermitToAcl(id, "Group3", IDfACL.DF_PERMIT_READ);
addDenyPermitToAcl(id, "Group4", IDfACL.DF_PERMIT_READ);
Map<DocId, Acl> namedResources = getAllAcls();
Acl acl = namedResources.get(new DocId(id));
assertEquals(ImmutableSet.of(new GroupPrincipal("Group1", "NS_Local")),
acl.getPermitGroups());
assertEquals(ImmutableSet.of(new GroupPrincipal("Group4", "NS_Local")),
acl.getDenyGroups());
assertEquals(ImmutableSet.of(), acl.getPermitUsers());
assertEquals(ImmutableSet.of(), acl.getDenyUsers());
}
@Test
public void testGroupDmWorldAcl() throws Exception {
insertUsers("User1", "User3");
insertGroup("Group1", "User2", "User3");
String id = "4501081f80000102";
createAcl(id);
addAllowPermitToAcl(id, "Group1", IDfACL.DF_PERMIT_BROWSE);
addAllowPermitToAcl(id, "dm_world", IDfACL.DF_PERMIT_READ);
addDenyPermitToAcl(id, "User1", IDfACL.DF_PERMIT_READ);
addDenyPermitToAcl(id, "User3", IDfACL.DF_PERMIT_WRITE);
Map<DocId, Acl> namedResources = getAllAcls();
Acl acl = namedResources.get(new DocId(id));
assertEquals(ImmutableSet.of(new GroupPrincipal("dm_world", "NS_Local")),
acl.getPermitGroups());
assertEquals(ImmutableSet.of(), acl.getDenyGroups());
assertEquals(ImmutableSet.of(), acl.getPermitUsers());
assertEquals(ImmutableSet.of(new UserPrincipal("User1", "NS")),
acl.getDenyUsers());
}
@Test
public void testDomainForAclUser() throws Exception {
insertUsers("User1", "User2", "User3", "User4", "User5");
String id = "4501081f80000100";
createAcl(id);
addAllowPermitToAcl(id, "User4", IDfACL.DF_PERMIT_WRITE);
addAllowPermitToAcl(id, "User5", IDfACL.DF_PERMIT_READ);
addDenyPermitToAcl(id, "User1", IDfACL.DF_PERMIT_DELETE);
addDenyPermitToAcl(id, "User2", IDfACL.DF_PERMIT_BROWSE);
addDenyPermitToAcl(id, "User3", IDfACL.DF_PERMIT_WRITE);
Map<DocId, Acl> namedResources = getAllAcls("ajax");
Acl acl = namedResources.get(new DocId(id));
assertEquals(ImmutableSet.of(new UserPrincipal("ajax\\User4", "NS"),
new UserPrincipal("ajax\\User5", "NS")),
acl.getPermitUsers());
assertEquals(ImmutableSet.of(new UserPrincipal("ajax\\User2", "NS")),
acl.getDenyUsers());
}
@Test
public void testDnsDomainForAclUser() throws Exception {
insertUsers("User1", "User2", "User3", "User4", "User5");
String id = "4501081f80000100";
createAcl(id);
addAllowPermitToAcl(id, "User4", IDfACL.DF_PERMIT_WRITE);
addAllowPermitToAcl(id, "User5", IDfACL.DF_PERMIT_READ);
addDenyPermitToAcl(id, "User1", IDfACL.DF_PERMIT_DELETE);
addDenyPermitToAcl(id, "User2", IDfACL.DF_PERMIT_BROWSE);
addDenyPermitToAcl(id, "User3", IDfACL.DF_PERMIT_WRITE);
Map<DocId, Acl> namedResources = getAllAcls("ajax.example.com");
Acl acl = namedResources.get(new DocId(id));
assertEquals(ImmutableSet.of(
new UserPrincipal("ajax.example.com\\User4", "NS"),
new UserPrincipal("ajax.example.com\\User5", "NS")),
acl.getPermitUsers());
assertEquals(ImmutableSet.of(
new UserPrincipal("ajax.example.com\\User2", "NS")),
acl.getDenyUsers());
}
@Test
public void testDomainForAclGroup() throws Exception {
insertUsers("User1", "User2");
insertGroup("Group1", "User2", "User3");
insertGroup("Group2", "User4", "User5");
insertGroup("Group3", "User6", "User7");
String id = "4501081f80000101";
createAcl(id);
addAllowPermitToAcl(id, "User1", IDfACL.DF_PERMIT_WRITE);
addAllowPermitToAcl(id, "User2", IDfACL.DF_PERMIT_READ);
addAllowPermitToAcl(id, "Group1", IDfACL.DF_PERMIT_READ);
addAllowPermitToAcl(id, "Group2", IDfACL.DF_PERMIT_WRITE);
addDenyPermitToAcl(id, "Group3", IDfACL.DF_PERMIT_READ);
Map<DocId, Acl> namedResources = getAllAcls("ajax");
Acl acl = namedResources.get(new DocId(id));
assertEquals(ImmutableSet.of(new GroupPrincipal("Group1", "NS_Local"),
new GroupPrincipal("Group2", "NS_Local")),
acl.getPermitGroups());
assertEquals(ImmutableSet.of(new GroupPrincipal("Group3", "NS_Local")),
acl.getDenyGroups());
}
// Tests for required groups and required group sets.
private void addRequiredGroupSetToAcl(String id, String accessorName)
throws SQLException {
IDfPermit permitobj = new DfPermit();
permitobj.setAccessorName(accessorName);
permitobj.setPermitType(IDfPermitType.REQUIRED_GROUP_SET);
grantPermit(id, permitobj);
}
private void addRequiredGroupToAcl(String id, String accessorName)
throws SQLException {
IDfPermit permitobj = new DfPermit();
permitobj.setAccessorName(accessorName);
permitobj.setPermitType(IDfPermitType.REQUIRED_GROUP);
grantPermit(id, permitobj);
}
@Test
public void testRequiredGroupSetAcl() throws Exception {
insertUsers("User1", "User2", "User3", "User4", "User5", "User6", "User7");
insertGroup("Group1", "User2", "User3");
insertGroup("Group2", "User4", "User5");
insertGroup("Group3", "User6", "User7");
insertGroup("GroupSet1", "Group1", "Group2");
insertGroup("GroupSet2", "Group2", "Group3");
String id = "45Acl0";
createAcl(id);
addAllowPermitToAcl(id, "Group1", IDfACL.DF_PERMIT_READ);
addAllowPermitToAcl(id, "Group2", IDfACL.DF_PERMIT_WRITE);
addDenyPermitToAcl(id, "Group3", IDfACL.DF_PERMIT_READ);
addRequiredGroupSetToAcl(id, "GroupSet1");
addRequiredGroupSetToAcl(id, "GroupSet2");
Map<DocId, Acl> namedResources = getAllAcls();
assertEquals(2, namedResources.size());
Acl acl1 = namedResources.get(new DocId("45Acl0_reqGroupSet"));
assertEquals(InheritanceType.AND_BOTH_PERMIT, acl1.getInheritanceType());
assertEquals(ImmutableSet.of(new GroupPrincipal("GroupSet1", "NS_Local"),
new GroupPrincipal("GroupSet2", "NS_Local")),
acl1.getPermitGroups());
assertEquals(ImmutableSet.of(), acl1.getDenyGroups());
Acl acl2 = namedResources.get(new DocId(id));
assertEquals(new DocId("45Acl0_reqGroupSet"),
acl2.getInheritFrom());
assertEquals(InheritanceType.PARENT_OVERRIDES, acl2.getInheritanceType());
assertEquals(ImmutableSet.of(new GroupPrincipal("Group1", "NS_Local"),
new GroupPrincipal("Group2", "NS_Local")),
acl2.getPermitGroups());
assertEquals(ImmutableSet.of(new GroupPrincipal("Group3", "NS_Local")),
acl2.getDenyGroups());
}
@Test
public void testRequiredGroupsAcl() throws Exception {
insertUsers("User1", "User2", "User3", "User4", "User5", "User6", "User7");
insertGroup("Group1", "User2", "User3");
insertGroup("Group2", "User4", "User5");
insertGroup("Group3", "User6", "User7");
insertGroup("Group4", "User2", "User3");
insertGroup("Group5", "User4", "User5");
insertGroup("Group6", "User6", "User7");
String id = "45Acl0";
createAcl(id);
addAllowPermitToAcl(id, "Group1", IDfACL.DF_PERMIT_READ);
addAllowPermitToAcl(id, "Group2", IDfACL.DF_PERMIT_WRITE);
addDenyPermitToAcl(id, "Group3", IDfACL.DF_PERMIT_READ);
addRequiredGroupToAcl(id, "Group4");
addRequiredGroupToAcl(id, "Group5");
addRequiredGroupToAcl(id, "Group6");
Map<DocId, Acl> namedResources = getAllAcls();
assertEquals(4, namedResources.size());
Acl acl1 = namedResources.get(new DocId("45Acl0_Group6"));
assertEquals(new DocId("45Acl0_Group5"), acl1.getInheritFrom());
assertEquals(InheritanceType.AND_BOTH_PERMIT, acl1.getInheritanceType());
assertEquals(ImmutableSet.of(new GroupPrincipal("Group6", "NS_Local")),
acl1.getPermitGroups());
assertEquals(ImmutableSet.of(), acl1.getDenyGroups());
Acl acl2 = namedResources.get(new DocId("45Acl0_Group5"));
assertEquals(new DocId("45Acl0_Group4"), acl2.getInheritFrom());
assertEquals(InheritanceType.AND_BOTH_PERMIT, acl2.getInheritanceType());
assertEquals(ImmutableSet.of(new GroupPrincipal("Group5", "NS_Local")),
acl2.getPermitGroups());
assertEquals(ImmutableSet.of(), acl2.getDenyGroups());
Acl acl3 = namedResources.get(new DocId("45Acl0_Group4"));
assertEquals(InheritanceType.AND_BOTH_PERMIT, acl3.getInheritanceType());
assertEquals(ImmutableSet.of(new GroupPrincipal("Group4", "NS_Local")),
acl3.getPermitGroups());
assertEquals(ImmutableSet.of(), acl3.getDenyGroups());
Acl acl4 = namedResources.get(new DocId(id));
assertEquals(new DocId("45Acl0_Group6"), acl4.getInheritFrom());
assertEquals(InheritanceType.PARENT_OVERRIDES, acl4.getInheritanceType());
assertEquals(ImmutableSet.of(new GroupPrincipal("Group1", "NS_Local"),
new GroupPrincipal("Group2", "NS_Local")),
acl4.getPermitGroups());
assertEquals(ImmutableSet.of(new GroupPrincipal("Group3", "NS_Local")),
acl4.getDenyGroups());
}
@Test
public void testRequiredGroupsAndSetsAcl() throws Exception {
insertUsers("User1", "User2", "User3", "User4", "User5", "User6", "User7");
insertGroup("Group1", "User2", "User3");
insertGroup("Group2", "User4", "User5");
insertGroup("Group3", "User6", "User7");
insertGroup("Group4", "User2", "User3");
insertGroup("Group5", "User4", "User5");
insertGroup("Group6", "User6", "User7");
insertGroup("GroupSet1", "Group1", "Group2");
insertGroup("GroupSet2", "Group5", "Group6");
String id = "45Acl0";
createAcl(id);
addAllowPermitToAcl(id, "Group1", IDfACL.DF_PERMIT_READ);
addAllowPermitToAcl(id, "Group2", IDfACL.DF_PERMIT_WRITE);
addDenyPermitToAcl(id, "Group3", IDfACL.DF_PERMIT_READ);
addRequiredGroupToAcl(id, "Group4");
addRequiredGroupToAcl(id, "Group5");
addRequiredGroupToAcl(id, "Group6");
addRequiredGroupSetToAcl(id, "GroupSet1");
addRequiredGroupSetToAcl(id, "GroupSet2");
Map<DocId, Acl> namedResources = getAllAcls();
assertEquals(5, namedResources.size());
Acl acl1 = namedResources.get(new DocId("45Acl0_Group6"));
assertEquals(new DocId("45Acl0_Group5"), acl1.getInheritFrom());
assertEquals(InheritanceType.AND_BOTH_PERMIT, acl1.getInheritanceType());
assertEquals(ImmutableSet.of(new GroupPrincipal("Group6", "NS_Local")),
acl1.getPermitGroups());
assertEquals(ImmutableSet.of(), acl1.getDenyGroups());
Acl acl2 = namedResources.get(new DocId("45Acl0_Group5"));
assertEquals(new DocId("45Acl0_Group4"), acl2.getInheritFrom());
assertEquals(InheritanceType.AND_BOTH_PERMIT, acl2.getInheritanceType());
assertEquals(ImmutableSet.of(new GroupPrincipal("Group5", "NS_Local")),
acl2.getPermitGroups());
assertEquals(ImmutableSet.of(), acl2.getDenyGroups());
Acl acl3 = namedResources.get(new DocId("45Acl0_Group4"));
assertEquals(InheritanceType.AND_BOTH_PERMIT, acl3.getInheritanceType());
assertEquals(ImmutableSet.of(new GroupPrincipal("Group4", "NS_Local")),
acl3.getPermitGroups());
assertEquals(ImmutableSet.of(), acl3.getDenyGroups());
Acl acl4 = namedResources.get(new DocId("45Acl0_reqGroupSet"));
assertEquals(new DocId("45Acl0_Group6"), acl4.getInheritFrom());
assertEquals(InheritanceType.AND_BOTH_PERMIT, acl4.getInheritanceType());
assertEquals(ImmutableSet.of(new GroupPrincipal("GroupSet1", "NS_Local"),
new GroupPrincipal("GroupSet2", "NS_Local")),
acl4.getPermitGroups());
assertEquals(ImmutableSet.of(), acl4.getDenyGroups());
Acl acl5 = namedResources.get(new DocId(id));
assertEquals(new DocId("45Acl0_reqGroupSet"),
acl5.getInheritFrom());
assertEquals(InheritanceType.PARENT_OVERRIDES, acl5.getInheritanceType());
assertEquals(ImmutableSet.of(new GroupPrincipal("Group1", "NS_Local"),
new GroupPrincipal("Group2", "NS_Local")),
acl5.getPermitGroups());
assertEquals(ImmutableSet.of(new GroupPrincipal("Group3", "NS_Local")),
acl5.getDenyGroups());
}
// TODO(srinivas): we should check whether we have a test of non-existent
// users and groups in permits and denies.
@Test
public void testMissingRequiredGroup() throws Exception {
insertUsers("User1", "User2", "User3");
insertGroup("Group1", "User2", "User3");
String id = "45Acl0";
createAcl(id);
addAllowPermitToAcl(id, "Group1", IDfACL.DF_PERMIT_READ);
addRequiredGroupToAcl(id, "GroupNotExists");
Map<DocId, Acl> namedResources = getAllAcls();
assertEquals(2, namedResources.size());
Acl acl1 = namedResources.get(new DocId("45Acl0_GroupNotExists"));
assertEquals(InheritanceType.AND_BOTH_PERMIT, acl1.getInheritanceType());
assertEquals(ImmutableSet.of(), acl1.getPermitGroups());
assertEquals(ImmutableSet.of(), acl1.getDenyGroups());
// Verify GroupNotExists group is not in permit or deny groups.
Acl acl2 = namedResources.get(new DocId(id));
assertEquals(new DocId("45Acl0_GroupNotExists"),
acl2.getInheritFrom());
assertEquals(InheritanceType.PARENT_OVERRIDES, acl2.getInheritanceType());
assertEquals(ImmutableSet.of(new GroupPrincipal("Group1", "NS_Local")),
acl2.getPermitGroups());
assertEquals(ImmutableSet.of(), acl2.getDenyGroups());
}
private Map<DocId, Acl> getAclMap(CaseSensitivityType caseSensitivityType)
throws Exception {
IDfClientX dmClientX = new H2BackedTestProxies().getProxyClientX();
DocumentumAdaptor adaptor = new DocumentumAdaptor(dmClientX);
AdaptorContext context = ProxyAdaptorContext.getInstance();
initTestAdaptorConfig(context);
if (caseSensitivityType != null) {
context.getConfig().overrideKey("adaptor.caseSensitivityType",
caseSensitivityType.toString());
}
adaptor.init(context);
AccumulatingDocIdPusher pusher = new AccumulatingDocIdPusher();
adaptor.getDocIds(pusher);
return pusher.getNamedResources();
}
private Map<DocId, Acl> getAclMap() throws Exception {
return getAclMap(null);
}
@Test
public void testAclCaseSensitivity_basic() throws Exception {
createAcl("4501081f80000100");
Map<DocId, Acl> aclMap = getAclMap();
Acl acl = aclMap.get(new DocId("4501081f80000100"));
assertTrue("Expected everything-case-sensitive",
acl.isEverythingCaseSensitive());
}
@Test
public void testAclCaseSensitivity_required() throws Exception {
insertUsers("User1", "User2", "User3");
insertGroup("Group1", "User2", "User3");
createAcl("4501081f80000100");
addRequiredGroupToAcl("4501081f80000100", "Group1");
Map<DocId, Acl> aclMap = getAclMap();
Acl acl = aclMap.get(new DocId("4501081f80000100_Group1"));
assertTrue("Expected everything-case-sensitive",
acl.isEverythingCaseSensitive());
}
@Test
public void testAclCaseSensitivity_sensitive() throws Exception {
createAcl("4501081f80000100");
Map<DocId, Acl> aclMap =
getAclMap(CaseSensitivityType.EVERYTHING_CASE_SENSITIVE);
Acl acl = aclMap.get(new DocId("4501081f80000100"));
assertTrue("Expected everything-case-sensitive",
acl.isEverythingCaseSensitive());
}
@Test
public void testAclCaseSensitivity_insensitive() throws Exception {
createAcl("4501081f80000100");
Map<DocId, Acl> aclMap =
getAclMap(CaseSensitivityType.EVERYTHING_CASE_INSENSITIVE);
Acl acl = aclMap.get(new DocId("4501081f80000100"));
assertTrue("Expected everything-case-insensitive",
acl.isEverythingCaseInsensitive());
}
@Test
public void testAclCaseSensitivity_required_insensitive() throws Exception {
insertUsers("User1", "User2", "User3");
insertGroup("Group1", "User2", "User3");
createAcl("4501081f80000100");
addRequiredGroupToAcl("4501081f80000100", "Group1");
Map<DocId, Acl> aclMap =
getAclMap(CaseSensitivityType.EVERYTHING_CASE_INSENSITIVE);
Acl acl = aclMap.get(new DocId("4501081f80000100_Group1"));
assertTrue("Expected everything-case-insensitive",
acl.isEverythingCaseInsensitive());
}
private void insertAclAudit(String id, String auditObjId,
String eventName, String date) throws SQLException {
executeUpdate(String.format(
"insert into dm_audittrail_acl(r_object_id, audited_obj_id, "
+ "event_name, time_stamp_utc) "
+ "values('%s', '%s', '%s', {ts '%s'})",
id, auditObjId, eventName, date));
}
/**
* Returns date string for the given number of minutes into the future
* or past.
*
* @param minutes minutes to add.
* @return date in string format.
*/
private String getNowPlusMinutes(int minutes) {
Calendar calendar = Calendar.getInstance();
calendar.add(Calendar.MINUTE, minutes);
return dateFormat.format(calendar.getTime());
}
private Map<DocId, Acl> testUpdateAcls(Checkpoint checkpoint,
Checkpoint expectedCheckpoint)
throws DfException, IOException, InterruptedException {
DocumentumAdaptor adaptor = getObjectUnderTest();
AccumulatingDocIdPusher pusher = new AccumulatingDocIdPusher();
adaptor.modifiedAclTraverser.setCheckpoint(checkpoint);
adaptor.getModifiedDocIds(pusher);
assertEquals(expectedCheckpoint,
adaptor.modifiedAclTraverser.getCheckpoint());
return pusher.getNamedResources();
}
private void testUpdateAcls(Checkpoint checkpoint, Set<DocId> expectedAclIds,
Checkpoint expectedCheckpoint)
throws DfException, IOException, InterruptedException {
Map<DocId, Acl> aclMap = testUpdateAcls(checkpoint, expectedCheckpoint);
assertEquals(expectedAclIds, aclMap.keySet());
}
private void assertUsers(Set<UserPrincipal> actual, String... expected) {
ImmutableSet.Builder<UserPrincipal> builder = ImmutableSet.builder();
for (String user : expected) {
builder.add(new UserPrincipal(user, "globalNS"));
}
assertEquals(builder.build(), actual);
}
@Test
public void testUpdateAcls() throws Exception {
createAcl("4501081f80000100");
createAcl("4501081f80000101");
String dateStr = getNowPlusMinutes(5);
insertAclAudit("123", "4501081f80000100", "dm_save", dateStr);
insertAclAudit("124", "4501081f80000101", "dm_saveasnew", dateStr);
insertAclAudit("125", "4501081f80000102", "dm_destroy", dateStr);
testUpdateAcls(Checkpoint.incremental(),
ImmutableSet.of(
new DocId("4501081f80000100"),
new DocId("4501081f80000101"),
new DocId("4501081f80000102")),
new Checkpoint(dateStr, "125"));
}
@Test
public void testUpdateAclsPrincipals() throws Exception {
insertUsers("User1", "User2", "User3", "User4", "User5", "User6");
String aclId1 = "4501081f80000100";
String aclId2 = "4501081f80000101";
createAcl(aclId1);
addAllowPermitToAcl(aclId1, "User1", IDfACL.DF_PERMIT_WRITE);
addAllowPermitToAcl(aclId1, "User2", IDfACL.DF_PERMIT_READ);
addDenyPermitToAcl(aclId1, "User3", IDfACL.DF_PERMIT_READ);
createAcl(aclId2);
addAllowPermitToAcl(aclId2, "User4", IDfACL.DF_PERMIT_WRITE);
addAllowPermitToAcl(aclId2, "User5", IDfACL.DF_PERMIT_READ);
addDenyPermitToAcl(aclId2, "User6", IDfACL.DF_PERMIT_READ);
String dateStr = getNowPlusMinutes(5);
insertAclAudit("123", "4501081f80000100", "dm_save", dateStr);
insertAclAudit("124", "4501081f80000101", "dm_saveasnew", dateStr);
insertAclAudit("125", "4501081f80000102", "dm_destroy", dateStr);
Map<DocId, Acl> aclMap = testUpdateAcls(Checkpoint.incremental(),
new Checkpoint(dateStr, "125"));
Acl acl1 = aclMap.get(new DocId(aclId1));
assertUsers(acl1.getPermitUsers(), "User1", "User2");
assertUsers(acl1.getDenyUsers(), "User3");
Acl acl2 = aclMap.get(new DocId(aclId2));
assertUsers(acl2.getPermitUsers(), "User4", "User5");
assertUsers(acl2.getDenyUsers(), "User6");
Acl acl3 = aclMap.get(new DocId("4501081f80000102"));
assertTrue(acl3.getPermitUsers().toString(),
acl3.getPermitUsers().isEmpty());
assertTrue(acl3.getDenyUsers().toString(), acl3.getDenyUsers().isEmpty());
assertTrue(acl3.getPermitGroups().toString(),
acl3.getPermitGroups().isEmpty());
assertTrue(acl3.getDenyGroups().toString(), acl3.getDenyGroups().isEmpty());
}
@Test
public void testUpdateAclsWithSameObjectId() throws Exception {
createAcl("4501081f80000100");
createAcl("4501081f80000101");
String dateStr = getNowPlusMinutes(6);
insertAclAudit("123", "4501081f80000100", "dm_saveasnew", dateStr);
insertAclAudit("124", "4501081f80000100", "dm_save", dateStr);
insertAclAudit("125", "4501081f80000100", "dm_destroy", dateStr);
testUpdateAcls(Checkpoint.incremental(),
ImmutableSet.of(new DocId("4501081f80000100")),
new Checkpoint(dateStr, "125"));
}
@Test
public void testPreviouslyUpdatedAcls() throws Exception {
createAcl("4501081f80000100");
createAcl("4501081f80000101");
String dateStr = getNowPlusMinutes(-10);
insertAclAudit("123", "4501081f80000100", "dm_save", dateStr);
insertAclAudit("124", "4501081f80000101", "dm_saveasnew", dateStr);
insertAclAudit("125", "4501081f80000102", "dm_destroy", dateStr);
Checkpoint checkpoint = new Checkpoint(getNowPlusMinutes(0), "0");
testUpdateAcls(checkpoint, ImmutableSet.<DocId>of(), checkpoint);
}
@Test
public void testMultiUpdateAcls() throws Exception {
createAcl("4501081f80000100");
createAcl("4501081f80000101");
createAcl("4501081f80000102");
createAcl("4501081f80000103");
String dateStr = getNowPlusMinutes(10);
insertAclAudit("123", "4501081f80000100", "dm_save", dateStr);
insertAclAudit("124", "4501081f80000101", "dm_saveasnew", dateStr);
insertAclAudit("125", "4501081f80000102", "dm_saveasnew", dateStr);
Checkpoint firstCheckpoint = new Checkpoint(dateStr, "125");
testUpdateAcls(Checkpoint.incremental(),
ImmutableSet.of(
new DocId("4501081f80000100"),
new DocId("4501081f80000101"),
new DocId("4501081f80000102")),
firstCheckpoint);
dateStr = getNowPlusMinutes(15);
insertAclAudit("126", "4501081f80000103", "dm_saveasnew", dateStr);
insertAclAudit("127", "4501081f80000104", "dm_destroy", dateStr);
testUpdateAcls(firstCheckpoint,
ImmutableSet.of(
new DocId("4501081f80000103"),
new DocId("4501081f80000104")),
new Checkpoint(dateStr, "127"));
}
@Test
public void testMultiUpdateAclsWithNoResults() throws Exception {
createAcl("4501081f80000106");
createAcl("4501081f80000107");
String dateStr = getNowPlusMinutes(20);
insertAclAudit("128", "4501081f80000106", "dm_saveasnew", dateStr);
insertAclAudit("129", "4501081f80000107", "dm_saveasnew", dateStr);
Checkpoint expectedCheckpoint = new Checkpoint(dateStr, "129");
testUpdateAcls(Checkpoint.incremental(),
ImmutableSet.of(
new DocId("4501081f80000106"),
new DocId("4501081f80000107")),
expectedCheckpoint);
testUpdateAcls(expectedCheckpoint, ImmutableSet.<DocId>of(),
expectedCheckpoint);
}
/*
* TODO(jlacey): A hack of sizeable proportions. To mimic an
* exception thrown from the loop in getUpdateAcls, we create an
* non-destroy audit event with no corresponding ACL. The mock
* getObject throws an AssertionError.
*/
private DocumentumAcls getUpdateAclsAndFail() throws Exception {
H2BackedTestProxies proxyCls = new H2BackedTestProxies();
IDfClientX dmClientX = proxyCls.getProxyClientX();
IDfSessionManager dmSessionManager =
dmClientX.getLocalClient().newSessionManager();
IDfSession dmSession = dmSessionManager.getSession("testdocbase");
DocumentumAcls dctmAcls = new DocumentumAcls(dmClientX, dmSession,
new Principals(dmSession, "localNS", "globalNS", null),
CaseSensitivityType.EVERYTHING_CASE_INSENSITIVE);
Map<DocId, Acl> aclMap = new HashMap<>();
try {
dctmAcls.getUpdateAcls(Checkpoint.incremental(), aclMap);
fail("Expected an AssertionError");
} catch (AssertionError expected) {
}
return dctmAcls;
}
@Test
public void testUpdateAclsFirstRowFailure() throws Exception {
String dateStr = getNowPlusMinutes(5);
insertAclAudit("123", "4501081f80000100", "dm_save", dateStr);
DocumentumAcls dctmAcls = getUpdateAclsAndFail();
assertEquals(Checkpoint.incremental(), dctmAcls.getCheckpoint());
}
@Test
public void testUpdateAclsSecondRowFailure() throws Exception {
createAcl("4501081f80000100");
String dateStr = getNowPlusMinutes(3);
insertAclAudit("123", "4501081f80000100", "dm_save", dateStr);
insertAclAudit("124", "4501081f80000101", "dm_saveasnew",
getNowPlusMinutes(5));
DocumentumAcls dctmAcls = getUpdateAclsAndFail();
assertEquals(new Checkpoint(dateStr, "123"), dctmAcls.getCheckpoint());
}
private void insertAuditTrailEvent(String date, String id, String eventName,
String attributeList, String auditObjId, String chronicleId)
throws SQLException {
executeUpdate(String.format(
"insert into dm_audittrail(time_stamp_utc, r_object_id, event_name, "
+ "attribute_list, audited_obj_id, chronicle_id) "
+ "values({ts '%s'},'%s', '%s', '%s', '%s', '%s')", date, id,
eventName, attributeList, auditObjId, chronicleId));
}
private void insertAuditTrailAclEvent(String date, String id,
String auditObjId) throws SQLException {
insertAuditTrailEvent(date, id, "dm_save", "acl_name=", auditObjId,
auditObjId);
}
private void insertAuditTrailAclEvent(String date, String id,
String auditObjId, String chronicleId) throws SQLException {
insertAuditTrailEvent(date, id, "dm_save", "acl_name=", auditObjId,
chronicleId);
}
private void testUpdatedPermissions(Checkpoint docCheckpoint,
Checkpoint permissionsCheckpoint, List<Record> expectedDocIdlist,
Checkpoint expectedCheckpoint)
throws DfException, IOException, InterruptedException {
DocumentumAdaptor adaptor = getObjectUnderTest();
AccumulatingDocIdPusher pusher = new AccumulatingDocIdPusher();
adaptor.modifiedDocumentsCheckpoint = docCheckpoint;
adaptor.modifiedPermissionsCheckpoint = permissionsCheckpoint;
adaptor.getModifiedDocIds(pusher);
assertEquals(expectedDocIdlist, pusher.getRecords());
assertEquals(expectedCheckpoint, adaptor.modifiedPermissionsCheckpoint);
}
private Checkpoint insertTestDocuments() throws SQLException {
String folderId = "0bd29";
String folder = START_PATH;
// To skip doc updates, set time for document creation 5 min earlier.
String dateStr = getNowPlusMinutes(-5);
insertFolder(dateStr, folderId, folder);
insertDocument(dateStr, "09514", folder + "/file1", folderId);
insertDocument(dateStr, "09515", folder + "/file2", folderId);
insertDocument(dateStr, "09516", folder + "/file3", folderId);
return new Checkpoint(dateStr, folderId);
}
@Test
public void testUpdatedPermissions() throws Exception {
Checkpoint docCheckpoint = insertTestDocuments();
String dateStr = getNowPlusMinutes(5);
insertAuditTrailAclEvent(dateStr, "5f123", "09514");
insertAuditTrailAclEvent(dateStr, "5f124", "09515");
insertAuditTrailAclEvent(dateStr, "5f125", "09516");
testUpdatedPermissions(docCheckpoint, Checkpoint.incremental(),
makeExpectedDocIds(START_PATH, "file1", "file2", "file3"),
new Checkpoint(dateStr, "5f125"));
}
@Test
public void testUpdatedPermissions_ModifiedCheckpoint() throws Exception {
Checkpoint docCheckpoint = insertTestDocuments();
String dateStr = getNowPlusMinutes(5);
insertAuditTrailAclEvent(dateStr, "5f123", "09514");
insertAuditTrailAclEvent(dateStr, "5f124", "09515");
insertAuditTrailAclEvent(dateStr, "5f125", "09516");
testUpdatedPermissions(docCheckpoint, new Checkpoint(dateStr, "5f123"),
makeExpectedDocIds(START_PATH, "file2", "file3"),
new Checkpoint(dateStr, "5f125"));
}
@Test
public void testUpdatedPermissions_MultipleUpdates() throws Exception {
Checkpoint docCheckpoint = insertTestDocuments();
String dateStr = getNowPlusMinutes(5);
insertAuditTrailAclEvent(getNowPlusMinutes(3), "5f123", "09514");
insertAuditTrailAclEvent(getNowPlusMinutes(4), "5f124", "09514");
insertAuditTrailAclEvent(getNowPlusMinutes(5), "5f125", "09514");
insertAuditTrailAclEvent(getNowPlusMinutes(5), "5f126", "09515");
testUpdatedPermissions(docCheckpoint, Checkpoint.incremental(),
makeExpectedDocIds(START_PATH, "file1", "file2"),
new Checkpoint(dateStr, "5f126"));
}
@Test
public void testUpdatedPermissions_SameChronicleId() throws Exception {
Checkpoint docCheckpoint = insertTestDocuments();
String dateStr = getNowPlusMinutes(5);
insertAuditTrailAclEvent(dateStr, "5f123", "09514", "09234");
insertAuditTrailAclEvent(dateStr, "5f124", "09515", "09234");
insertAuditTrailAclEvent(dateStr, "5f125", "09516", "09234");
testUpdatedPermissions(docCheckpoint, Checkpoint.incremental(),
makeExpectedDocIds(START_PATH, "file1"),
new Checkpoint(dateStr, "5f125"));
}
@Test
public void testUpdatedPermissions_EmptyResults() throws Exception {
Checkpoint docCheckpoint = insertTestDocuments();
String dateStr = getNowPlusMinutes(5);
insertAuditTrailAclEvent(dateStr, "5f123", "09514");
insertAuditTrailAclEvent(dateStr, "5f124", "09515");
insertAuditTrailAclEvent(dateStr, "5f125", "09516");
testUpdatedPermissions(docCheckpoint, new Checkpoint(dateStr, "5f125"),
makeExpectedDocIds(START_PATH),
new Checkpoint(dateStr, "5f125"));
}
@Test
public void testUpdatedPermissions_MultiplePaths() throws Exception {
// To skip doc updates, set time for document creation 5 min earlier.
String min5back = getNowPlusMinutes(-5);
insertFolder(min5back, "0bd30", START_PATH + "/folder1");
insertFolder(min5back, "0bd31", START_PATH + "/folder2");
insertFolder(min5back, "0bd32", START_PATH + "/folder/folder3");
insertSysObject(min5back, "09514", "file1", START_PATH + "/folder1/file1,"
+ START_PATH + "/folder2/file1," + START_PATH + "/folder/folder3/file1",
"dm_document", "0bd30", "0bd31", "0bd32");
String dateStr = getNowPlusMinutes(5);
insertAuditTrailAclEvent(dateStr, "5f123", "09514");
testUpdatedPermissions(new Checkpoint(min5back, "0bd32"),
Checkpoint.incremental(),
makeExpectedDocIds(START_PATH, "folder1/file1", "folder2/file1",
"folder/folder3/file1"), new Checkpoint(dateStr, "5f123"));
}
@Test
public void testUpdatedPermissions_InvalidStartPath() throws Exception {
// To skip doc updates, set time for document creation 5 min earlier.
String min5back = getNowPlusMinutes(-5);
insertFolder(min5back, "0bd30", START_PATH + "/folder1");
insertFolder(min5back, "0bd31", START_PATH + "/folder2");
insertFolder(min5back, "0bd32", "/Folder2/folder3");
insertSysObject(min5back, "09514", "file1", START_PATH + "/folder1/file1,"
+ START_PATH + "/folder2/file1," + "/Folder2/folder3/file1",
"dm_document", "0bd30", "0bd31", "0bd32");
String dateStr = getNowPlusMinutes(5);
insertAuditTrailAclEvent(dateStr, "5f123", "09514");
testUpdatedPermissions(new Checkpoint(min5back, "0bd32"),
Checkpoint.incremental(),
makeExpectedDocIds(START_PATH, "folder1/file1", "folder2/file1"),
new Checkpoint(dateStr, "5f123"));
}
@Test
public void testUpdatedPermissions_DocAndPermissions() throws Exception {
Checkpoint docCheckpoint = new Checkpoint(getNowPlusMinutes(-5), "5f125");
String dateStr = getNowPlusMinutes(5);
String folderId = "0bd29";
String folder = START_PATH;
insertFolder(getNowPlusMinutes(-5), folderId, folder);
insertSysObject(dateStr, "09514", "file1", START_PATH + "/file1",
"dm_document", "0bd29");
insertAuditTrailAclEvent(dateStr, "5f123", "09514");
testUpdatedPermissions(docCheckpoint, Checkpoint.incremental(),
makeExpectedDocIds(START_PATH, "file1", "file1"),
new Checkpoint(dateStr, "5f123"));
}
@Test
public void testUpdatedPermissions_AclNonAclEvents() throws Exception {
Checkpoint docCheckpoint = insertTestDocuments();
String dateStr = getNowPlusMinutes(5);
insertAuditTrailEvent(dateStr, "5f123", "dm_save", "acl_name=",
"09514", "09514");
insertAuditTrailEvent(dateStr, "5f124", "dm_link", "acl_name=",
"09515", "09515");
insertAuditTrailEvent(dateStr, "5f125", "dm_save", "object_name=",
"09516", "09516");
insertAuditTrailEvent(dateStr, "5f126", "dm_link", "object_name=",
"09517", "09517");
Checkpoint checkPoint = Checkpoint.incremental();
testUpdatedPermissions(docCheckpoint, checkPoint,
makeExpectedDocIds(START_PATH, "file1"),
new Checkpoint(dateStr, "5f123"));
}
@Test
public void testCheckpoint() throws Exception {
Checkpoint checkpoint = Checkpoint.incremental();
assertEquals("0", checkpoint.getObjectId());
assertNotNull(checkpoint.getLastModified());
assertTrue(checkpoint.equals(checkpoint));
checkpoint = new Checkpoint("foo", "bar");
assertEquals("foo", checkpoint.getLastModified());
assertEquals("bar", checkpoint.getObjectId());
assertTrue(checkpoint.equals(checkpoint));
assertTrue(checkpoint.equals(new Checkpoint("foo", "bar")));
assertFalse(checkpoint.equals(null));
assertFalse(checkpoint.equals(Checkpoint.incremental()));
assertFalse(checkpoint.equals(new Checkpoint("foo", "xyzzy")));
}
private Map<GroupPrincipal, ? extends Collection<Principal>> getGroups()
throws Exception {
return getGroups(LocalGroupsOnly.FALSE, "");
}
private Map<GroupPrincipal, ? extends Collection<Principal>> getGroups(
LocalGroupsOnly localGroupsOnly, String windowsDomain)
throws DfException, IOException, InterruptedException {
DocumentumAdaptor adaptor = getObjectUnderTest(
ImmutableMap.<String, String>builder()
.put("documentum.pushLocalGroupsOnly", localGroupsOnly.toString())
.put("documentum.windowsDomain", windowsDomain)
.put("adaptor.namespace", "NS")
.put("documentum.docbaseName", "Local") // Local Namespace
.build());
AccumulatingDocIdPusher pusher = new AccumulatingDocIdPusher();
adaptor.getDocIds(pusher);
return pusher.getGroups();
}
/* Filters the 'dm_world' group out of the map of groups. */
private <T> Map<GroupPrincipal, T> filterDmWorld(Map<GroupPrincipal, T> map) {
return Maps.filterKeys(map, new Predicate<GroupPrincipal>() {
public boolean apply(GroupPrincipal principal) {
return !"dm_world".equals(principal.getName());
}
});
}
@Test
public void testGetGroupsDmWorldOnly() throws Exception {
insertUsers("User1", "User2", "User3", "User4", "User5");
// The only group should be the virtual group, dm_world, which consists
// of all users.
ImmutableMap<GroupPrincipal, ? extends Collection<? extends Principal>>
expected = ImmutableMap.of(new GroupPrincipal("dm_world", "NS_Local"),
ImmutableSet.of(new UserPrincipal("User1", "NS"),
new UserPrincipal("User2", "NS"),
new UserPrincipal("User3", "NS"),
new UserPrincipal("User4", "NS"),
new UserPrincipal("User5", "NS")));
assertEquals(expected, getGroups());
}
@Test
public void testGetGroupsUserMembersOnly() throws Exception {
insertUsers("User1", "User2", "User3", "User4", "User5");
insertGroup("Group1", "User1", "User2", "User3");
insertGroup("Group2", "User3", "User4", "User5");
ImmutableMap<GroupPrincipal, ? extends Collection<? extends Principal>>
expected = ImmutableMap.of(new GroupPrincipal("Group1", "NS_Local"),
ImmutableSet.of(new UserPrincipal("User1", "NS"),
new UserPrincipal("User2", "NS"),
new UserPrincipal("User3", "NS")),
new GroupPrincipal("Group2", "NS_Local"),
ImmutableSet.of(new UserPrincipal("User3", "NS"),
new UserPrincipal("User4", "NS"),
new UserPrincipal("User5", "NS")));
assertEquals(expected, filterDmWorld(getGroups()));
}
@Test
public void testGetGroupsInvalidMembers() throws Exception {
insertUsers("User1", "User3", "User5");
insertGroup("Group1", "User1", "User2", "User3");
insertGroup("Group2", "User3", "User4", "User5");
ImmutableMap<GroupPrincipal, ? extends Collection<? extends Principal>>
expected = ImmutableMap.of(new GroupPrincipal("Group1", "NS_Local"),
ImmutableSet.of(new UserPrincipal("User1", "NS"),
new UserPrincipal("User3", "NS")),
new GroupPrincipal("Group2", "NS_Local"),
ImmutableSet.of(new UserPrincipal("User3", "NS"),
new UserPrincipal("User5", "NS")));
assertEquals(expected, filterDmWorld(getGroups()));
}
@Test
public void testGetGroupsDisabledMembers() throws Exception {
insertUsers("User1", "User2", "User3", "User4", "User5", "User6", "User7");
insertGroup("Group1", "User1", "User2", "User3");
insertGroup("Group2", "User3", "User4", "User5");
insertGroup("Group3", "User5", "User6", "User7");
disableUsers("User2", "User4", "User6", "Group2");
ImmutableMap<GroupPrincipal, ? extends Collection<? extends Principal>>
expected = ImmutableMap.of(new GroupPrincipal("Group1", "NS_Local"),
ImmutableSet.of(new UserPrincipal("User1", "NS"),
new UserPrincipal("User3", "NS")),
new GroupPrincipal("Group3", "NS_Local"),
ImmutableSet.of(new UserPrincipal("User5", "NS"),
new UserPrincipal("User7", "NS")));
assertEquals(expected, filterDmWorld(getGroups()));
}
@Test
public void testGetGroupsEmptyGroup() throws Exception {
insertUsers("User1", "User3", "User5");
insertGroup("Group1", "User1", "User2", "User3");
insertGroup("Group2");
ImmutableMap<GroupPrincipal, ? extends Collection<? extends Principal>>
expected = ImmutableMap.of(new GroupPrincipal("Group1", "NS_Local"),
ImmutableSet.of(new UserPrincipal("User1", "NS"),
new UserPrincipal("User3", "NS")),
new GroupPrincipal("Group2", "NS_Local"),
ImmutableSet.<Principal>of());
assertEquals(expected, filterDmWorld(getGroups()));
}
@Test
public void testGetGroupsUserAndGroupMembers() throws Exception {
insertUsers("User1", "User2", "User3", "User4", "User5");
insertGroup("Group1", "User1", "User2", "User3");
insertGroup("Group2", "Group1", "User4", "User5");
ImmutableMap<GroupPrincipal, ? extends Collection<? extends Principal>>
expected = ImmutableMap.of(new GroupPrincipal("Group1", "NS_Local"),
ImmutableSet.of(new UserPrincipal("User1", "NS"),
new UserPrincipal("User2", "NS"),
new UserPrincipal("User3", "NS")),
new GroupPrincipal("Group2", "NS_Local"),
ImmutableSet.of(new GroupPrincipal("Group1", "NS_Local"),
new UserPrincipal("User4", "NS"),
new UserPrincipal("User5", "NS")));
assertEquals(expected, filterDmWorld(getGroups()));
}
@Test
public void testGetGroupsDifferentMemberLoginName() throws Exception {
insertUsers("User1", "User2");
executeUpdate("insert into dm_user(user_name, user_login_name) "
+ "values('User3', 'UserTres')");
insertGroup("Group1", "User1", "User2", "User3");
ImmutableMap<GroupPrincipal, ? extends Collection<? extends Principal>>
expected = ImmutableMap.of(new GroupPrincipal("Group1", "NS_Local"),
ImmutableSet.of(new UserPrincipal("User1", "NS"),
new UserPrincipal("User2", "NS"),
new UserPrincipal("UserTres", "NS")));
assertEquals(expected, filterDmWorld(getGroups()));
}
@Test
public void testGetGroupsDifferentGroupLoginName() throws Exception {
insertUsers("User1", "User2");
executeUpdate(
"insert into dm_user(user_name, user_login_name, r_is_group) "
+ "values('Group1', 'GroupUno', TRUE)");
executeUpdate("insert into dm_group(group_name, users_names) "
+ "values ('Group1', 'User1'), ('Group1', 'User2')");
ImmutableMap<GroupPrincipal, ? extends Collection<? extends Principal>>
expected = ImmutableMap.of(new GroupPrincipal("GroupUno", "NS_Local"),
ImmutableSet.of(new UserPrincipal("User1", "NS"),
new UserPrincipal("User2", "NS")));
assertEquals(expected, filterDmWorld(getGroups()));
}
@Test
public void testGetGroupsMemberLdapDn() throws Exception {
insertUsers("User1", "User2");
executeUpdate("insert into dm_user(user_name, user_login_name, "
+ "user_source, user_ldap_dn, r_is_group) values('User3', 'User3', "
+ "'LDAP', 'cn=User3,dc=test,dc=com', TRUE)");
insertGroup("Group1", "User1", "User2", "User3");
ImmutableMap<GroupPrincipal, ? extends Collection<? extends Principal>>
expected = ImmutableMap.of(new GroupPrincipal("Group1", "NS_Local"),
ImmutableSet.of(new UserPrincipal("User1", "NS"),
new UserPrincipal("User2", "NS"),
new UserPrincipal("test\\User3", "NS")));
assertEquals(expected, filterDmWorld(getGroups()));
}
@Test
public void testGetGroupsGroupLdapDn() throws Exception {
insertUsers("User1", "User2");
executeUpdate("insert into dm_user(user_name, user_login_name, "
+ "user_source, user_ldap_dn) values('Group1', 'Group1', 'LDAP', "
+ "'cn=Group1,dc=test,dc=com')");
executeUpdate("insert into dm_group(group_name, group_source, users_names) "
+ "values ('Group1', 'LDAP', 'User1'), ('Group1', 'LDAP', 'User2')");
ImmutableMap<GroupPrincipal, ? extends Collection<? extends Principal>>
expected =
ImmutableMap.of(new GroupPrincipal("test\\Group1", "NS"),
ImmutableSet.of(new UserPrincipal("User1", "NS"),
new UserPrincipal("User2", "NS")));
assertEquals(expected, filterDmWorld(getGroups()));
}
@Test
public void testGetGroupsWindowsDomainUsers() throws Exception {
insertUsers("User1", "User2", "User3", "User4", "User5");
insertGroup("Group1", "User1", "User2", "User3");
insertGroup("Group2", "Group1", "User4", "User5");
ImmutableMap<GroupPrincipal, ? extends Collection<? extends Principal>>
expected = ImmutableMap.of(new GroupPrincipal("Group1", "NS_Local"),
ImmutableSet.of(new UserPrincipal("TEST\\User1", "NS"),
new UserPrincipal("TEST\\User2", "NS"),
new UserPrincipal("TEST\\User3", "NS")),
new GroupPrincipal("Group2", "NS_Local"),
ImmutableSet.of(new GroupPrincipal("Group1", "NS_Local"),
new UserPrincipal("TEST\\User4", "NS"),
new UserPrincipal("TEST\\User5", "NS")));
Map<GroupPrincipal, ? extends Collection<Principal>> groups =
getGroups(LocalGroupsOnly.FALSE, "TEST");
assertEquals(expected, filterDmWorld(groups));
}
@Test
public void testGetGroupsLocalAndGlobalGroups() throws Exception {
insertUsers("User1", "User2", "User3", "User4", "User5");
insertGroup("Group1", "User1", "User2", "User3");
insertLdapGroup("Group2", "User3", "User4", "User5");
ImmutableMap<GroupPrincipal, ? extends Collection<? extends Principal>>
expected = ImmutableMap.of(new GroupPrincipal("Group1", "NS_Local"),
ImmutableSet.of(new UserPrincipal("User1", "NS"),
new UserPrincipal("User2", "NS"),
new UserPrincipal("User3", "NS")),
new GroupPrincipal("Group2", "NS"),
ImmutableSet.of(new UserPrincipal("User3", "NS"),
new UserPrincipal("User4", "NS"),
new UserPrincipal("User5", "NS")));
assertEquals(expected, filterDmWorld(getGroups()));
}
@Test
public void testGetGroupsGlobalGroupMembers() throws Exception {
insertUsers("User1", "User2", "User3", "User4", "User5");
insertLdapGroup("Group1", "User1", "User2", "User3");
insertGroup("Group2", "Group1", "User4", "User5");
ImmutableMap<GroupPrincipal, ? extends Collection<? extends Principal>>
expected = ImmutableMap.of(new GroupPrincipal("Group1", "NS"),
ImmutableSet.of(new UserPrincipal("User1", "NS"),
new UserPrincipal("User2", "NS"),
new UserPrincipal("User3", "NS")),
new GroupPrincipal("Group2", "NS_Local"),
ImmutableSet.of(new GroupPrincipal("Group1", "NS"),
new UserPrincipal("User4", "NS"),
new UserPrincipal("User5", "NS")));
assertEquals(expected, filterDmWorld(getGroups()));
}
@Test
public void testGetGroupsLocalGroupsOnly() throws Exception {
insertUsers("User1", "User2", "User3", "User4", "User5");
insertLdapGroup("Group1", "User1", "User2", "User3");
insertGroup("Group2", "Group1", "User4", "User5");
ImmutableMap<GroupPrincipal, ? extends Collection<? extends Principal>>
expected = ImmutableMap.of(new GroupPrincipal("Group2", "NS_Local"),
ImmutableSet.of(new GroupPrincipal("Group1", "NS"),
new UserPrincipal("User4", "NS"),
new UserPrincipal("User5", "NS")));
Map<GroupPrincipal, ? extends Collection<Principal>> groups =
getGroups(LocalGroupsOnly.TRUE, "");
assertEquals(expected, filterDmWorld(groups));
}
private void insertModifiedGroup(String lastModified, String groupName,
String... members) throws SQLException {
insertGroupEx(lastModified, "", groupName, members);
}
private void checkModifiedGroupsPushed(LocalGroupsOnly localGroupsOnly,
Checkpoint checkpoint,
Map<GroupPrincipal, ? extends Collection<? extends Principal>>
expectedGroups, Checkpoint expectedCheckpoint)
throws DfException, IOException, InterruptedException {
DocumentumAdaptor adaptor = getObjectUnderTest(
ImmutableMap.<String, String>builder()
.put("documentum.pushLocalGroupsOnly", localGroupsOnly.toString())
.put("adaptor.namespace", "NS")
.put("documentum.docbaseName", "Local") // Local Namespace
.build());
AccumulatingDocIdPusher pusher = new AccumulatingDocIdPusher();
adaptor.modifiedGroupTraverser.setCheckpoint(checkpoint);
adaptor.getModifiedDocIds(pusher);
assertEquals(expectedGroups, pusher.getGroups());
assertEquals(expectedCheckpoint,
adaptor.modifiedGroupTraverser.getCheckpoint());
}
@Test
public void testGetGroupUpdatesNoDmWorld() throws Exception {
insertUsers("User1", "User2", "User3", "User4", "User5");
// The virtual group, dm_world, should not be pushed for updates.
ImmutableMap<GroupPrincipal, ? extends Collection<? extends Principal>>
expected = ImmutableMap.<GroupPrincipal, Collection<Principal>>of();
Checkpoint checkpoint = Checkpoint.incremental();
checkModifiedGroupsPushed(LocalGroupsOnly.FALSE, checkpoint, expected,
checkpoint);
}
@Test
public void testGetGroupUpdatesAllNew() throws Exception {
insertUsers("User1", "User2");
insertModifiedGroup(FEB_1970, "Group2", "User2");
insertModifiedGroup(MAR_1970, "Group1", "User1");
ImmutableMap<GroupPrincipal, ? extends Collection<? extends Principal>>
expected = ImmutableMap.of(new GroupPrincipal("Group1", "NS_Local"),
ImmutableSet.of(new UserPrincipal("User1", "NS")),
new GroupPrincipal("Group2", "NS_Local"),
ImmutableSet.of(new UserPrincipal("User2", "NS")));
checkModifiedGroupsPushed(LocalGroupsOnly.FALSE,
new Checkpoint(JAN_1970, "0"), expected,
new Checkpoint(MAR_1970, "12Group1"));
}
@Test
public void testGetGroupUpdatesSomeNew() throws Exception {
insertUsers("User1", "User2");
insertModifiedGroup(JAN_1970, "Group0", "User2");
insertModifiedGroup(FEB_1970, "Group2", "User2");
insertModifiedGroup(MAR_1970, "Group1", "User1");
ImmutableMap<GroupPrincipal, ? extends Collection<? extends Principal>>
expected = ImmutableMap.of(new GroupPrincipal("Group1", "NS_Local"),
ImmutableSet.of(new UserPrincipal("User1", "NS")),
new GroupPrincipal("Group2", "NS_Local"),
ImmutableSet.of(new UserPrincipal("User2", "NS")));
checkModifiedGroupsPushed(LocalGroupsOnly.FALSE,
new Checkpoint(JAN_1970, "12Group0"), expected,
new Checkpoint(MAR_1970, "12Group1"));
}
@Test
public void testGetGroupUpdatesNoneNew() throws Exception {
insertUsers("User1", "User2");
insertModifiedGroup(FEB_1970, "Group2", "User2");
insertModifiedGroup(MAR_1970, "Group1", "User1");
ImmutableMap<GroupPrincipal, ? extends Collection<? extends Principal>>
expected = ImmutableMap.<GroupPrincipal, Collection<Principal>>of();
Checkpoint checkpoint = new Checkpoint(MAR_1970, "12Group1");
checkModifiedGroupsPushed(LocalGroupsOnly.FALSE, checkpoint, expected,
checkpoint);
}
@Test
public void testGetGroupUpdatesSomeLdapGroups() throws Exception {
insertUsers("User1", "User2");
insertModifiedGroup(JAN_1970, "Group1", "User1");
insertModifiedGroup(FEB_1970, "Group2", "User2");
insertGroupEx(MAR_1970, "LDAP", "GroupLDAP", "User2");
ImmutableMap<GroupPrincipal, ? extends Collection<? extends Principal>>
expected = ImmutableMap.of(new GroupPrincipal("Group2", "NS_Local"),
ImmutableSet.of(new UserPrincipal("User2", "NS")),
new GroupPrincipal("GroupLDAP", "NS"),
ImmutableSet.of(new UserPrincipal("User2", "NS")));
checkModifiedGroupsPushed(LocalGroupsOnly.FALSE,
new Checkpoint(JAN_1970, "12Group1"), expected,
new Checkpoint(MAR_1970, "12GroupLDAP"));
}
@Test
public void testGetGroupUpdatesLocalGroupsOnly() throws Exception {
insertUsers("User1", "User2");
insertModifiedGroup(JAN_1970, "Group1", "User1");
insertModifiedGroup(FEB_1970, "Group2", "User2");
insertGroupEx(MAR_1970, "LDAP", "GroupLDAP", "User2");
ImmutableMap<GroupPrincipal, ? extends Collection<? extends Principal>>
expected = ImmutableMap.of(new GroupPrincipal("Group2", "NS_Local"),
ImmutableSet.of(new UserPrincipal("User2", "NS")));
checkModifiedGroupsPushed(LocalGroupsOnly.TRUE,
new Checkpoint(JAN_1970, "12Group1"), expected,
new Checkpoint(FEB_1970, "12Group2"));
}
/**
* Builds a list of expected DocId Records that the Pusher should receive.
*
* @param folderPath the full path to a folder.
* @param objectNames ojects within that folder that should be added to the
* expected list. If one of the full folderPath is included in
* object names, the folder itself is included in the expected results.
*/
private List<Record> makeExpectedDocIds(String folderPath,
String... objectNames) {
ImmutableList.Builder<Record> builder = ImmutableList.builder();
for (String name : objectNames) {
if (name.equals(folderPath)) {
name = null;
}
DocId docid = DocumentumAdaptor.docIdFromPath(folderPath, name);
builder.add(
new Record.Builder(docid).setCrawlImmediately(true).build());
}
return builder.build();
}
/** Convenience method to assemble a list of start paths for readability. */
private List<String> startPaths(String... paths) {
return ImmutableList.copyOf(paths);
}
private void checkModifiedDocIdsPushed(List<String> startPaths,
Checkpoint checkpoint, List<Record> expectedRecords,
Checkpoint expectedCheckpoint)
throws DfException, IOException, InterruptedException {
DocumentumAdaptor adaptor = getObjectUnderTest(
ImmutableMap.of("documentum.src", Joiner.on(",").join(startPaths)));
AccumulatingDocIdPusher pusher = new AccumulatingDocIdPusher();
adaptor.modifiedDocumentsCheckpoint = checkpoint;
adaptor.getModifiedDocIds(pusher);
assertEquals(expectedRecords, pusher.getRecords());
assertEquals(expectedCheckpoint, adaptor.modifiedDocumentsCheckpoint);
}
@Test
public void testNoDocuments() throws Exception {
String folderId = "0b01081f80001000";
String folder = "/Folder1";
insertFolder(JAN_1970, folderId, folder);
Checkpoint startCheckpoint = Checkpoint.incremental();
checkModifiedDocIdsPushed(startPaths(folder), startCheckpoint,
ImmutableList.<Record>of(), startCheckpoint);
}
@Test
public void testNoModifiedDocuments() throws Exception {
String folderId = "0b01081f80001000";
String folder = "/Folder1";
insertFolder(JAN_1970, folderId, folder);
insertDocument(JAN_1970, "0b01081f80001001", folder + "/foo", folderId);
insertDocument(JAN_1970, "0b01081f80001002", folder + "/bar", folderId);
Checkpoint startCheckpoint = Checkpoint.incremental();
checkModifiedDocIdsPushed(startPaths(folder), startCheckpoint,
ImmutableList.<Record>of(), startCheckpoint);
}
@Test
public void testModifiedDocumentsNoCheckpointObjId() throws Exception {
String parentId = "0b001";
String parentFolder = "/Folder1";
insertFolder(EPOCH_1970, parentId, parentFolder);
String folderId = "0b002";
String folder = "/Folder1/Folder2";
insertFolder(JAN_1970, folderId, folder);
setParentFolderId(folderId, parentId);
insertDocument(FEB_1970, "09001", folder + "/foo", folderId);
insertDocument(FEB_1970, "09002", folder + "/bar", folderId);
checkModifiedDocIdsPushed(startPaths(folder),
new Checkpoint(JAN_1970, "0"),
makeExpectedDocIds(folder, folder, "foo", "bar"),
new Checkpoint(FEB_1970, "09002"));
}
@Test
public void testModifiedDocumentsSameCheckpointTime() throws Exception {
String folderId = "0b01081f80001000";
String folder = "/Folder1";
insertFolder(JAN_1970, folderId, folder);
insertDocument(JAN_1970, "0b01081f80001001", folder + "/foo", folderId);
insertDocument(JAN_1970, "0b01081f80001002", folder + "/bar", folderId);
insertDocument(FEB_1970, "0b01081f80001003", folder + "/baz", folderId);
checkModifiedDocIdsPushed(startPaths(folder),
new Checkpoint(JAN_1970, "0b01081f80001001"),
makeExpectedDocIds(folder, "bar", "baz"),
new Checkpoint(FEB_1970, "0b01081f80001003"));
}
@Test
public void testModifiedDocumentsNewerModifyDate() throws Exception {
String folderId = "0b01081f80001000";
String folder = "/Folder1";
insertFolder(JAN_1970, folderId, folder);
insertDocument(JAN_1970, "0b01081f80001001", folder + "/foo", folderId);
insertDocument(FEB_1970, "0b01081f80001002", folder + "/bar", folderId);
insertDocument(MAR_1970, "0b01081f80001003", folder + "/baz", folderId);
checkModifiedDocIdsPushed(startPaths(folder),
new Checkpoint(JAN_1970, "0b01081f80001001"),
makeExpectedDocIds(folder, "bar", "baz"),
new Checkpoint(MAR_1970, "0b01081f80001003"));
}
@Test
public void testModifiedFolder() throws Exception {
String parentId = "0b001";
String parentFolder = "/Folder1";
insertFolder(EPOCH_1970, parentId, parentFolder);
String folderId = "0b002";
String folder = "/Folder1/Folder2";
insertFolder(FEB_1970, folderId, folder);
setParentFolderId(folderId, parentId);
checkModifiedDocIdsPushed(startPaths(folder),
new Checkpoint(JAN_1970, "0b003"),
makeExpectedDocIds(folder, folder),
new Checkpoint(FEB_1970, folderId));
}
@Test
public void testModifiedFolderNewerThanChildren() throws Exception {
String parentId = "0b001";
String parentFolder = "/Folder1";
insertFolder(EPOCH_1970, parentId, parentFolder);
String folderId = "0b002";
String folder = "/Folder1/Folder2";
insertFolder(MAR_1970, folderId, folder);
setParentFolderId(folderId, parentId);
insertDocument(JAN_1970, "09001", folder + "/foo", folderId);
insertDocument(FEB_1970, "09002", folder + "/bar", folderId);
insertDocument(MAR_1970, "09003", folder + "/baz", folderId);
checkModifiedDocIdsPushed(startPaths(folder),
new Checkpoint(JAN_1970, "09001"),
makeExpectedDocIds(folder, "bar", "baz", folder),
new Checkpoint(MAR_1970, "0b002"));
}
@Test
public void testModifiedDocumentsOutsideStartPath() throws Exception {
String folder1Id = "0b01081f80001000";
String folder1 = "/Folder1";
insertFolder(JAN_1970, folder1Id, folder1);
insertDocument(FEB_1970, "0b01081f80001001", folder1 + "/foo", folder1Id);
insertDocument(FEB_1970, "0b01081f80001002", folder1 + "/bar", folder1Id);
String folder2Id = "0b01081f80002000";
String folder2 = "/Folder2";
insertFolder(JAN_1970, folder2Id, folder2);
insertDocument(FEB_1970, "0b01081f80002001", folder2 + "/baz", folder2Id);
checkModifiedDocIdsPushed(startPaths(folder1),
new Checkpoint(JAN_1970, folder1Id),
makeExpectedDocIds(folder1, "foo", "bar"),
new Checkpoint(FEB_1970, "0b01081f80001002"));
}
@Test
public void testModifiedDocumentsOneParentOutsideStartPath()
throws Exception {
String folder1Id = "0b01081f80001000";
String folder1 = "/Folder1";
insertFolder(JAN_1970, folder1Id, folder1);
String folder2Id = "0b01081f80002000";
String folder2 = "/Folder2";
insertFolder(JAN_1970, folder2Id, folder2);
insertDocument(FEB_1970, "0b01081f80001001", folder1 + "/foo", folder1Id);
insertDocument(FEB_1970, "0b01081f80001002", folder1 + "/bar", folder1Id,
folder2Id);
checkModifiedDocIdsPushed(startPaths(folder1),
new Checkpoint(JAN_1970, folder1Id),
makeExpectedDocIds(folder1, "foo", "bar"),
new Checkpoint(FEB_1970, "0b01081f80001002"));
}
@Test
public void testModifiedDocumentsMultipleParentsInStartPaths()
throws Exception {
String folder1Id = "0b01081f80001000";
String folder1 = "/Folder1";
insertFolder(JAN_1970, folder1Id, folder1);
String folder2Id = "0b01081f80002000";
String folder2 = "/Folder2";
insertFolder(JAN_1970, folder2Id, folder2);
insertDocument(FEB_1970, "0b01081f80001001", folder1 + "/foo", folder1Id);
insertDocument(FEB_1970, "0b01081f80001002", folder1 + "/bar", folder1Id,
folder2Id);
checkModifiedDocIdsPushed(startPaths(folder1, folder2),
new Checkpoint(FEB_1970, folder1Id),
new ImmutableList.Builder<Record>()
.addAll(makeExpectedDocIds(folder1, "foo", "bar"))
.addAll(makeExpectedDocIds(folder2, "bar"))
.build(),
new Checkpoint(FEB_1970, "0b01081f80001002"));
}
@Test
public void testModifiedDocumentsMultipleStartPaths() throws Exception {
String folder1Id = "0b01081f80001000";
String folder1 = "/Folder1";
insertFolder(JAN_1970, folder1Id, folder1);
insertDocument(MAR_1970, "0b01081f80001001", folder1 + "/foo", folder1Id);
insertDocument(MAR_1970, "0b01081f80001002", folder1 + "/bar", folder1Id);
String folder2Id = "0b01081f80002000";
String folder2 = "/Folder2";
insertFolder(JAN_1970, folder2Id, folder2);
insertDocument(MAR_1970, "0b01081f80002001", folder2 + "/baz", folder2Id);
checkModifiedDocIdsPushed(startPaths(folder1, folder2),
new Checkpoint(FEB_1970, folder1Id),
new ImmutableList.Builder<Record>()
.addAll(makeExpectedDocIds(folder1, "foo", "bar"))
.addAll(makeExpectedDocIds(folder2, "baz"))
.build(),
new Checkpoint(MAR_1970, "0b01081f80002001"));
}
@Test
public void testModifiedDocumentsInSubfolder() throws Exception {
String folder1Id = "0b01081f80001000";
String folder1 = "/Folder1";
insertFolder(JAN_1970, folder1Id, folder1);
insertDocument(MAR_1970, "0b01081f80001001", folder1 + "/foo", folder1Id);
insertDocument(MAR_1970, "0b01081f80001002", folder1 + "/bar", folder1Id);
String folder2Id = "0b01081f80002000";
String folder2 = "/Folder1/Folder2";
insertFolder(JAN_1970, folder2Id, folder2);
insertDocument(MAR_1970, "0b01081f80002001", folder2 + "/baz", folder2Id);
checkModifiedDocIdsPushed(startPaths(folder1),
new Checkpoint(FEB_1970, folder1Id),
new ImmutableList.Builder<Record>()
.addAll(makeExpectedDocIds(folder1, "foo", "bar"))
.addAll(makeExpectedDocIds(folder2, "baz"))
.build(),
new Checkpoint(MAR_1970, "0b01081f80002001"));
}
@Test
public void testModifiedDocumentsNotDocumentOrFolder() throws Exception {
String folderId = "0b01081f80001000";
String folder = "/Folder1";
insertFolder(JAN_1970, folderId, folder);
insertDocument(FEB_1970, "0b01081f80001001", folder + "/foo", folderId);
insertDocument(MAR_1970, "0b01081f80001002", folder + "/bar", folderId);
insertSysObject(MAR_1970, "0b01081f80001003", "baz", folder + "/baz",
"dm_other", folderId);
checkModifiedDocIdsPushed(startPaths(folder),
new Checkpoint(FEB_1970, folder),
makeExpectedDocIds(folder, "foo", "bar"),
new Checkpoint(MAR_1970, "0b01081f80001002"));
}
@Test
public void testModifiedDocumentsWithFolderSubtype() throws Exception {
String parentId = "0b001";
String parentFolder = "/Folder1";
insertFolder(EPOCH_1970, parentId, parentFolder);
String folderId = "0b002";
String folder = "/Folder1/Folder2";
executeUpdate(String.format(
"insert into dm_folder(r_object_id, r_folder_path) values('%s', '%s')",
folderId, folder));
insertSysObject(FEB_1970, folderId, "Folder2", folder, "dm_folder_subtype",
parentId);
insertDocument(FEB_1970, "09001", folder + "/foo", folderId);
insertDocument(MAR_1970, "09002", folder + "/bar", folderId);
checkModifiedDocIdsPushed(startPaths(folder),
new Checkpoint(JAN_1970, folderId),
makeExpectedDocIds(folder, "foo", folder, "bar"),
new Checkpoint(MAR_1970, "09002"));
}
@Test
public void testModifiedDocumentsWithDocumentSubtype() throws Exception {
String folderId = "0b01081f80001000";
String folder = "/Folder1";
insertFolder(JAN_1970, folderId, folder);
insertDocument(FEB_1970, "0b01081f80001001", folder + "/foo", folderId);
insertDocument(MAR_1970, "0b01081f80001002", folder + "/bar", folderId);
insertSysObject(MAR_1970, "0b01081f80001003", "baz", folder + "/baz",
"dm_document_subtype", folderId);
checkModifiedDocIdsPushed(startPaths(folder),
new Checkpoint(FEB_1970, folder),
makeExpectedDocIds(folder, "foo", "bar", "baz"),
new Checkpoint(MAR_1970, "0b01081f80001003"));
}
private void initValidDocumentTypes(DocumentumAdaptor adaptor,
String... types) throws DfException {
AdaptorContext context = ProxyAdaptorContext.getInstance();
Config config = initTestAdaptorConfig(context);
config.overrideKey("documentum.documentTypes", Joiner.on(',').join(types));
adaptor.init(context);
}
@Test
public void testValidateDocumentTypes() throws DfException {
DocumentumAdaptor adaptor =
new DocumentumAdaptor(new H2BackedTestProxies().getProxyClientX());
String type1 = "dm_document";
String type2 = "dm_document_subtype";
initValidDocumentTypes(adaptor, type1, type2);
assertEquals(ImmutableList.of(type1, type2),
adaptor.getValidatedDocumentTypes());
}
@Test
public void testValidateDocumentTypesSomeValid() throws DfException {
DocumentumAdaptor adaptor =
new DocumentumAdaptor(new H2BackedTestProxies().getProxyClientX());
String type1 = "dm_document_subtype";
String type2 = "dm_my_type";
String type3 = "dm_document";
String type4 = "dm_folder";
String type5 = "dm_folder_subtype";
initValidDocumentTypes(adaptor, type1, type2, type3, type4, type5);
assertEquals(ImmutableList.of(type1, type3),
adaptor.getValidatedDocumentTypes());
}
@Test
public void testValidateDocumentSysobjectSubtype() throws DfException {
DocumentumAdaptor adaptor =
new DocumentumAdaptor(new H2BackedTestProxies().getProxyClientX());
String type = "dm_sysobject_subtype";
initValidDocumentTypes(adaptor, type);
assertEquals(ImmutableList.of(type),
adaptor.getValidatedDocumentTypes());
}
@Test
public void testValidateDocumentTypesNoneValid() throws DfException {
DocumentumAdaptor adaptor =
new DocumentumAdaptor(new H2BackedTestProxies().getProxyClientX());
String type1 = "dm_some_type";
String type2 = "dm_my_type";
String type3 = "dm_any_type";
initValidDocumentTypes(adaptor, type1, type2, type3);
assertTrue(adaptor.getValidatedDocumentTypes().isEmpty());
}
@Test(expected = InvalidConfigurationException.class)
public void testValidateDocumentTypesEmpty() throws DfException {
DocumentumAdaptor adaptor =
new DocumentumAdaptor(new H2BackedTestProxies().getProxyClientX());
String type1 = "";
initValidDocumentTypes(adaptor, type1);
}
private void checkTypedDocIdsPushed(List<String> startPaths, String docTypes,
Checkpoint checkpoint, List<Record> expectedRecords)
throws DfException, IOException, InterruptedException {
DocumentumAdaptor adaptor =
new DocumentumAdaptor(new H2BackedTestProxies().getProxyClientX());
AdaptorContext context = ProxyAdaptorContext.getInstance();
Config config = initTestAdaptorConfig(context);
config.overrideKey("documentum.src", Joiner.on(",").join(startPaths));
config.overrideKey("documentum.documentTypes", docTypes);
adaptor.init(context);
AccumulatingDocIdPusher pusher = new AccumulatingDocIdPusher();
adaptor.modifiedDocumentsCheckpoint = checkpoint;
adaptor.getModifiedDocIds(pusher);
assertEquals(expectedRecords, pusher.getRecords());
}
private void testCustomType(String docTypes, String... expect)
throws Exception {
String folderId = "0b001";
String folder = "/Folder1";
insertFolder(JAN_1970, folderId, folder);
insertSysObject(MAR_1970, "09001", "foo", folder + "/foo",
"dm_document", folderId);
insertSysObject(MAR_1970, "09002", "bar", folder + "/bar",
"dm_document_subtype", folderId);
insertSysObject(MAR_1970, "09003", "baz", folder + "/baz",
"dm_sysobject_subtype", folderId);
checkTypedDocIdsPushed(startPaths(folder),
docTypes,
new Checkpoint(FEB_1970, folder),
makeExpectedDocIds(folder, expect));
}
@Test
public void testCustomType_all() throws Exception {
testCustomType("dm_document, dm_document_subtype, dm_sysobject_subtype",
"foo", "bar", "baz");
}
@Test
public void testCustomType_skip() throws Exception {
testCustomType("dm_document, dm_document_subtype", "foo", "bar");
}
@Test
public void testCustomType_NonSysobject() throws Exception {
String folderId = "0b001";
String folder = "/Folder1";
insertFolder(JAN_1970, folderId, folder);
insertSysObject(MAR_1970, "09001", "foo", folder + "/foo",
"dm_document", folderId);
insertSysObject(MAR_1970, "09002", "bar", folder + "/bar",
"dm_store", folderId);
checkTypedDocIdsPushed(startPaths(folder),
"dm_document, dm_store",
new Checkpoint(FEB_1970, folder),
makeExpectedDocIds(folder, "foo"));
}
}
| test/com/google/enterprise/adaptor/documentum/DocumentumAdaptorTest.java | // Copyright 2014 Google Inc. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.enterprise.adaptor.documentum;
import static com.google.enterprise.adaptor.documentum.JdbcFixture.dropAllObjects;
import static com.google.enterprise.adaptor.documentum.JdbcFixture.executeUpdate;
import static com.google.enterprise.adaptor.documentum.JdbcFixture.getConnection;
import static java.nio.charset.StandardCharsets.UTF_8;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import com.google.common.base.Joiner;
import com.google.common.base.Predicate;
import com.google.common.base.Strings;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Iterators;
import com.google.common.collect.Maps;
import com.google.common.collect.Multimap;
import com.google.common.collect.TreeMultimap;
import com.google.common.collect.UnmodifiableIterator;
import com.google.enterprise.adaptor.Acl;
import com.google.enterprise.adaptor.Acl.InheritanceType;
import com.google.enterprise.adaptor.AdaptorContext;
import com.google.enterprise.adaptor.Config;
import com.google.enterprise.adaptor.DocId;
import com.google.enterprise.adaptor.DocIdEncoder;
import com.google.enterprise.adaptor.DocIdPusher;
import com.google.enterprise.adaptor.DocIdPusher.Record;
import com.google.enterprise.adaptor.GroupPrincipal;
import com.google.enterprise.adaptor.InvalidConfigurationException;
import com.google.enterprise.adaptor.Principal;
import com.google.enterprise.adaptor.Request;
import com.google.enterprise.adaptor.UserPrincipal;
import com.google.enterprise.adaptor.documentum.DocumentumAdaptor.CaseSensitivityType;
import com.google.enterprise.adaptor.documentum.DocumentumAdaptor.Checkpoint;
import com.documentum.com.IDfClientX;
import com.documentum.fc.client.DfPermit;
import com.documentum.fc.client.IDfACL;
import com.documentum.fc.client.IDfClient;
import com.documentum.fc.client.IDfCollection;
import com.documentum.fc.client.IDfEnumeration;
import com.documentum.fc.client.IDfFolder;
import com.documentum.fc.client.IDfGroup;
import com.documentum.fc.client.IDfObjectPath;
import com.documentum.fc.client.IDfPermit;
import com.documentum.fc.client.IDfPermitType;
import com.documentum.fc.client.IDfQuery;
import com.documentum.fc.client.IDfSession;
import com.documentum.fc.client.IDfSessionManager;
import com.documentum.fc.client.IDfSysObject;
import com.documentum.fc.client.IDfType;
import com.documentum.fc.client.IDfUser;
import com.documentum.fc.client.IDfVirtualDocument;
import com.documentum.fc.client.IDfVirtualDocumentNode;
import com.documentum.fc.common.DfException;
import com.documentum.fc.common.IDfAttr;
import com.documentum.fc.common.IDfId;
import com.documentum.fc.common.IDfLoginInfo;
import com.documentum.fc.common.IDfTime;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.URI;
import java.sql.Connection;
import java.sql.DatabaseMetaData;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.sql.Statement;
import java.text.SimpleDateFormat;
import java.util.ArrayDeque;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Calendar;
import java.util.Collection;
import java.util.Date;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.Vector;
// TODO(bmj): Add tests to test the exception handling.
// TODO(bmj): Add tests that call getDocIds and getModifiedDocIds with
// expected returns for all three items: documents, groups, and ACLs.
/** Unit tests for DocumentAdaptor class. */
public class DocumentumAdaptorTest {
private static enum LocalGroupsOnly { TRUE, FALSE };
private static final SimpleDateFormat dateFormat =
new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
private static final String EPOCH_1970 = "1970-01-01 00:00:00";
private static final String JAN_1970 = "1970-01-01 02:03:04";
private static final String FEB_1970 = "1970-02-01 02:03:04";
private static final String MAR_1970 = "1970-03-01 02:03:04";
private static final String START_PATH = "/Folder1/path1";
private static final String DEFAULT_ACL = "45DefaultACL";
private static final String CREATE_TABLE_ACL = "create table dm_acl "
+ "(r_object_id varchar, r_accessor_name varchar, "
+ "r_accessor_permit int, r_permit_type int, r_is_group boolean)";
private static final String CREATE_TABLE_AUDITTRAIL =
"create table dm_audittrail "
+ "(r_object_id varchar, audited_obj_id varchar, chronicle_id varchar, "
+ "event_name varchar, time_stamp_utc timestamp, attribute_list varchar)";
private static final String CREATE_TABLE_AUDITTRAIL_ACL =
"create table dm_audittrail_acl "
+ "(r_object_id varchar, chronicle_id varchar, audited_obj_id varchar, "
+ "event_name varchar, time_stamp_utc timestamp)";
private static final String CREATE_TABLE_CABINET = "create table dm_cabinet "
+ "(r_object_id varchar, r_folder_path varchar, object_name varchar)";
private static final String CREATE_TABLE_FOLDER = "create table dm_folder "
// Note: mock_acl_id is ACL id for the folder, and is used to
// create AclMock.
+ "(r_object_id varchar, r_folder_path varchar, mock_acl_id varchar)";
private static final String CREATE_TABLE_GROUP = "create table dm_group "
+ "(r_object_id varchar, group_name varchar, group_source varchar, "
+ "groups_names varchar, users_names varchar, r_modify_date timestamp)";
private static final String CREATE_TABLE_USER = "create table dm_user "
+ "(user_name varchar primary key, user_login_name varchar, "
+ "user_source varchar, user_ldap_dn varchar, r_is_group boolean, "
+ "user_state int DEFAULT 0)";
private static final String CREATE_TABLE_SYSOBJECT =
"create table dm_sysobject "
+ "(r_object_id varchar, r_modify_date timestamp, r_object_type varchar, "
+ "object_name varchar, a_content_type varchar, i_folder_id varchar, "
+ "r_is_virtual_doc boolean, "
// Note: mock_content ia an artifact that stores the content as a string,
// and mock_object_path is an artifact used to emulate FOLDER predicate,
// and to assist getObjectByPath.
+ "mock_content varchar, mock_object_path varchar, "
// Note: mock_acl_id is ACL id for the document, and is used to
// create AclMock in SysObjectMock.
+ "mock_acl_id varchar )";
@Before
public void setUp() throws Exception {
Principals.clearCache();
executeUpdate(CREATE_TABLE_ACL, CREATE_TABLE_AUDITTRAIL,
CREATE_TABLE_AUDITTRAIL_ACL, CREATE_TABLE_CABINET, CREATE_TABLE_FOLDER,
CREATE_TABLE_GROUP, CREATE_TABLE_SYSOBJECT, CREATE_TABLE_USER);
// Force the default test start path to exist, so we pass init().
insertFolder(EPOCH_1970, "0bStartPath", START_PATH);
}
@After
public void tearDown() throws Exception {
dropAllObjects();
}
private Config getTestAdaptorConfig() {
return initTestAdaptorConfig(ProxyAdaptorContext.getInstance());
}
private Config initTestAdaptorConfig(AdaptorContext context) {
Config config = context.getConfig();
config.addKey("documentum.username", "testuser");
config.addKey("documentum.password", "testpwd");
config.addKey("documentum.docbaseName", "testdocbase");
config.addKey("documentum.displayUrlPattern", "http://webtop/drl/{0}");
config.addKey("documentum.src", START_PATH);
config.addKey("documentum.src.separator", ",");
config.addKey("documentum.documentTypes", "dm_document");
config.addKey("documentum.excludedAttributes", "");
config.addKey("adaptor.namespace", "globalNS");
config.addKey("documentum.windowsDomain", "");
config.addKey("documentum.pushLocalGroupsOnly", "false");
config.addKey("documentum.maxHtmlSize", "1000");
config.addKey("documentum.cabinetWhereCondition", "");
config.addKey("adaptor.caseSensitivityType", "");
return config;
}
/**
* Initialize adaptor using proxy clientX and proxy AdptorContext.
* Verifies that the proper user is set;
* @throws DfException if DFC initialization can't establish connection
* to Documentum repository.
*/
@Test
public void testDfcConnection() throws DfException {
InitTestProxies proxyCls = new InitTestProxies();
DocumentumAdaptor adaptor =
new DocumentumAdaptor(proxyCls.getProxyClientX());
AdaptorContext context = ProxyAdaptorContext.getInstance();
initTestAdaptorConfig(context);
adaptor.init(context);
assertEquals("testuser", proxyCls.username);
// MockSensitiveValueDecoder just uppercases the input.
assertEquals("TESTPWD", proxyCls.password);
assertEquals("testdocbase", proxyCls.docbaseName);
assertEquals(1, proxyCls.docbaseLoginInfoMap.size());
assertEquals(1, proxyCls.docbaseSessionMap.size());
List<String> expectedMethodCallSequence = Arrays.asList(
"getLocalClient", "newSessionManager",
"getLoginInfo", "setIdentity",
"getSession", "release",
"getSession", "release"
);
assertEquals(expectedMethodCallSequence, proxyCls.methodCallSequence);
Set<String> expectedMethodCallSet =
ImmutableSet.of("setUser", "setPassword", "getDFCVersion",
"getServerVersion", "getObjectByPath");
assertEquals(expectedMethodCallSet, proxyCls.methodCalls);
}
@Test
public void testInitStartPaths() throws DfException {
InitTestProxies proxyCls = new InitTestProxies();
DocumentumAdaptor adaptor =
new DocumentumAdaptor(proxyCls.getProxyClientX());
AdaptorContext context = ProxyAdaptorContext.getInstance();
Config config = initTestAdaptorConfig(context);
config.overrideKey("documentum.src", "/Folder1/path1, /Folder2/path2,"
+ "/Folder3/path3");
adaptor.init(context);
assertEquals(Arrays.asList("/Folder1/path1", "/Folder2/path2",
"/Folder3/path3"), adaptor.getStartPaths());
}
private class InitTestProxies {
List <String> methodCallSequence = new ArrayList<String>();
Set <String> methodCalls = new HashSet<String>();
String serverVersion = "1.0.0.000 (Mock CS)";
IDfClient client = getProxyClient();
IDfLoginInfo loginInfo = getProxyLoginInfo();
IDfSessionManager sessionManager = getProxySessionManager();
Map<String, IDfLoginInfo> docbaseLoginInfoMap =
new HashMap<String, IDfLoginInfo>();
Map<String, IDfSession> docbaseSessionMap =
new HashMap<String, IDfSession>();
Map<String, String> folderPathIdsMap = new HashMap<String, String>() {
{
put("/Folder1/path1", "0b01081f80078d2a");
put("/Folder2/path2", "0b01081f80078d29");
put("/Folder3/path3", "0b01081f80078d28");
put("/Folder1/path1,/Folder2/path2,/Folder3/path3,/Folder4/path4",
"0b01081f80078d2b");
}
};
String username;
String password;
String docbaseName;
public void setServerVersion(String serverVersion) {
this.serverVersion = serverVersion;
}
public IDfClientX getProxyClientX() {
return Proxies.newProxyInstance(IDfClientX.class, new ClientXMock());
}
private class ClientXMock {
public String getDFCVersion() {
methodCalls.add(Proxies.getMethodName());
return "1.0.0.000 (Mock DFC)";
}
public IDfClient getLocalClient() {
methodCallSequence.add(Proxies.getMethodName());
return client;
}
public IDfLoginInfo getLoginInfo() {
methodCallSequence.add(Proxies.getMethodName());
return loginInfo;
}
}
public IDfClient getProxyClient() {
return Proxies.newProxyInstance(IDfClient.class, new ClientMock());
}
private class ClientMock {
public IDfSessionManager newSessionManager() {
methodCallSequence.add(Proxies.getMethodName());
return sessionManager;
}
}
public IDfLoginInfo getProxyLoginInfo() {
return Proxies.newProxyInstance(IDfLoginInfo.class, new LoginInfoMock());
}
private class LoginInfoMock {
public void setPassword(String password) {
methodCalls.add(Proxies.getMethodName());
InitTestProxies.this.password = password;
}
public void setUser(String username) {
methodCalls.add(Proxies.getMethodName());
InitTestProxies.this.username = username;
}
}
public IDfSessionManager getProxySessionManager() {
return Proxies.newProxyInstance(IDfSessionManager.class,
new SessionManagerMock());
}
private class SessionManagerMock {
public IDfSession getSession(String docbaseName) {
methodCallSequence.add(Proxies.getMethodName());
IDfSession session = docbaseSessionMap.get(docbaseName);
if (session == null) {
session =
Proxies.newProxyInstance(IDfSession.class, new SessionMock());
docbaseSessionMap.put(docbaseName, session);
}
return session;
}
public void release(IDfSession session) {
methodCallSequence.add(Proxies.getMethodName());
// TODO(sveldurthi): remove from the map to release the session
}
public void setIdentity(String docbaseName, IDfLoginInfo loginInfo) {
methodCallSequence.add(Proxies.getMethodName());
InitTestProxies.this.docbaseName = docbaseName;
docbaseLoginInfoMap.put(docbaseName, loginInfo);
}
}
private class SessionMock {
public String getServerVersion() {
methodCalls.add(Proxies.getMethodName());
return serverVersion;
}
public IDfSysObject getObjectByPath(String path) {
methodCalls.add(Proxies.getMethodName());
if (folderPathIdsMap.containsKey(path)) {
return Proxies.newProxyInstance(IDfSysObject.class,
new SysObjectMock(path));
} else {
return null;
}
}
public IDfType getType(String type) {
return Proxies.newProxyInstance(IDfType.class, new TypeMock(type));
}
}
private class SysObjectMock {
private String objectPath;
public SysObjectMock(String objectPath) {
this.objectPath = objectPath;
}
public IDfId getObjectId() {
String id = folderPathIdsMap.get(objectPath);
return Proxies.newProxyInstance(IDfId.class, new IdMock(id));
}
}
private class IdMock {
private String objectId;
public IdMock(String objectId) {
this.objectId = objectId;
}
public String toString() {
return objectId;
}
}
private class TypeMock {
private final String type;
private final ImmutableMap<String, String> superTypes =
ImmutableMap.of("dm_document", "dm_sysobject");
public TypeMock(String type) {
this.type = type;
}
public boolean isTypeOf(String otherType) {
if (type.startsWith(otherType)) {
return true;
}
String parent = superTypes.get(type);
while (parent != null) {
if (superTypes.get(type).startsWith(otherType)) {
return true;
}
parent = superTypes.get(parent);
}
return false;
}
}
}
@Test
public void testParseStartPaths() {
String path1 = "Folder1/path1";
String path2 = "Folder2/path2";
String path3 = "Folder3/path3";
String startPaths = path1 + "," + path2 + "," + path3;
List<String> paths = DocumentumAdaptor.parseStartPaths(startPaths, ",");
assertEquals(ImmutableList.of(path1, path2, path3), paths);
}
@Test
public void testParseStartPathsSeperator() {
String path1 = "Folder1/path1";
String path2 = "Folder2/path2";
String path3 = "Folder3/path3";
String separator = ":";
String startPaths = path1 + separator + path2 + separator + path3;
List<String> paths =
DocumentumAdaptor.parseStartPaths(startPaths, separator);
assertEquals(ImmutableList.of(path1, path2, path3), paths);
}
@Test
public void testParseStartPathsNotUsingRegExSeparator() {
String path1 = "Folder1/path1";
String path2 = "Folder2/path2";
String path3 = "Folder3/path3";
String startPaths = path1 + ";" + path2 + ":" + path3 + ",";
List<String> paths = DocumentumAdaptor.parseStartPaths(startPaths, "[:;,]");
assertEquals(ImmutableList.of(startPaths), paths);
startPaths = path1 + "[:;,]" + path2 + "[:;,]" + path3 + "[:;,]";
paths = DocumentumAdaptor.parseStartPaths(startPaths, "[:;,]");
assertEquals(ImmutableList.of(path1, path2, path3), paths);
}
@Test
public void testParseStartPathsBlankSeparator() {
String path1 = "Folder1/path1";
String path2 = "Folder2/path2";
String path3 = "Folder3/path3";
String startPaths = path1 + "," + path2 + "," + path3;
List<String> paths = DocumentumAdaptor.parseStartPaths(startPaths, "");
assertEquals(ImmutableList.of(startPaths), paths);
}
@Test
public void testParseStartPathsSinglePath() {
String path1 = "Folder1/path1";
String startPaths = path1;
List<String> paths = DocumentumAdaptor.parseStartPaths(startPaths, ",");
assertEquals(ImmutableList.of(path1), paths);
}
@Test
public void testParseStartPathsEmptyPath() {
String path1 = "Folder1/path1";
String path2 = "Folder2/path2";
String path3 = "";
String startPaths = path1 + "," + path2 + "," + path3;
List<String> paths = DocumentumAdaptor.parseStartPaths(startPaths, ",");
assertEquals(ImmutableList.of(path1, path2), paths);
}
@Test
public void testParseStartPathsWhiteSpacePath() {
String path1 = "Folder 1/path 1";
String path2 = " Folder 2/path 2 ";
String path3 = "Folder 3/ path 3 ";
String startPaths = path1 + "," + path2 + "," + path3;
List<String> paths = DocumentumAdaptor.parseStartPaths(startPaths, ",");
assertEquals(ImmutableList.of(path1.trim(), path2.trim(), path3.trim()),
paths);
}
@Test
public void testSlashAsStartPath() throws Exception {
String root = "/";
DocId docid = DocumentumAdaptor.docIdFromPath(root);
assertEquals(root, DocumentumAdaptor.docIdToPath(docid));
assertEquals("/foo", DocumentumAdaptor.docIdToPath(
DocumentumAdaptor.docIdFromPath(root, "foo")));
}
private void initializeAdaptor(DocumentumAdaptor adaptor, String src,
String separator) throws DfException {
AdaptorContext context = ProxyAdaptorContext.getInstance();
Config config = context.getConfig();
adaptor.initConfig(config);
config.overrideKey("documentum.username", "testuser");
config.overrideKey("documentum.password", "testpwd");
config.overrideKey("documentum.docbaseName", "testdocbase");
config.overrideKey("documentum.displayUrlPattern",
"http://webtopurl/drl/{0}");
config.overrideKey("documentum.src", src);
if (separator != null) {
config.overrideKey("documentum.src.separator", separator);
}
config.overrideKey("documentum.documentTypes", "dm_document");
adaptor.init(context);
}
@Test
public void testConfigSeparator() throws DfException {
DocumentumAdaptor adaptor =
new DocumentumAdaptor(new InitTestProxies().getProxyClientX());
String path1 = "/Folder1/path1";
String path2 = "/Folder2/path2";
String path3 = "/Folder3/path3";
String path4 = "/Folder4/path4";
String startPaths = path1 + ";" + path2 + ";" + path3 + ";" + path4;
initializeAdaptor(adaptor, startPaths, ";");
assertEquals(ImmutableList.of(path1, path2, path3, path4),
adaptor.getStartPaths());
}
@Test
public void testConfigBlankSeparatorValue() throws DfException {
DocumentumAdaptor adaptor =
new DocumentumAdaptor(new InitTestProxies().getProxyClientX());
String path1 = "/Folder1/path1";
String path2 = "/Folder2/path2";
String path3 = "/Folder3/path3";
String path4 = "/Folder4/path4";
String startPaths = path1 + "," + path2 + "," + path3 + "," + path4;
initializeAdaptor(adaptor, startPaths, "");
assertEquals(ImmutableList.of(startPaths), adaptor.getStartPaths());
}
@Test
public void testConfigNoSeparatorEntry() throws DfException {
DocumentumAdaptor adaptor =
new DocumentumAdaptor(new InitTestProxies().getProxyClientX());
String path1 = "/Folder1/path1";
String path2 = "/Folder2/path2";
String path3 = "/Folder3/path3";
String path4 = "/Folder4/path4";
String startPaths = path1 + "," + path2 + "," + path3 + "," + path4;
initializeAdaptor(adaptor, startPaths, null);
assertEquals(ImmutableList.of(path1, path2, path3, path4),
adaptor.getStartPaths());
}
private void initValidStartPaths(DocumentumAdaptor adaptor,
String... paths) throws DfException {
AdaptorContext context = ProxyAdaptorContext.getInstance();
Config config = initTestAdaptorConfig(context);
config.overrideKey("documentum.src", Joiner.on(",").join(paths));
adaptor.init(context);
}
@Test
public void testValidateStartPathsRootPath() throws DfException {
DocumentumAdaptor adaptor =
new DocumentumAdaptor(new InitTestProxies().getProxyClientX());
String path1 = "/";
initValidStartPaths(adaptor, path1);
assertEquals(ImmutableList.of(path1), adaptor.getValidatedStartPaths());
}
@Test
public void testValidateStartPathsAllValid() throws DfException {
DocumentumAdaptor adaptor =
new DocumentumAdaptor(new InitTestProxies().getProxyClientX());
String path1 = "/Folder1/path1";
String path2 = "/Folder2/path2";
String path3 = "/Folder3/path3";
initValidStartPaths(adaptor, path1, path2, path3);
assertEquals(ImmutableList.of(path1, path2, path3),
adaptor.getValidatedStartPaths());
}
@Test
public void testValidateStartPathsSomeValid() throws DfException {
DocumentumAdaptor adaptor =
new DocumentumAdaptor(new InitTestProxies().getProxyClientX());
String path1 = "/Folder1/path1";
String path2 = "/Folder2/path2";
String path3 = "/Folder4/path3";
String path4 = "/Folder4/path4";
initValidStartPaths(adaptor, path1, path2, path3, path4);
assertEquals(ImmutableList.of(path1, path2),
adaptor.getValidatedStartPaths());
}
@Test
public void testValidateStartPathsSomeInvalid() throws DfException {
DocumentumAdaptor adaptor =
new DocumentumAdaptor(new InitTestProxies().getProxyClientX());
String path1 = "/Folder4/path4";
String path2 = "/Folder5/path5";
String path3 = "/Folder1/path1";
String path4 = "/Folder2/path2";
initValidStartPaths(adaptor, path1, path2, path3, path4);
assertEquals(ImmutableList.of(path3, path4),
adaptor.getValidatedStartPaths());
}
@Test
public void testValidateStartPathsNormalizePaths() throws DfException {
DocumentumAdaptor adaptor =
new DocumentumAdaptor(new InitTestProxies().getProxyClientX());
String path1 = "/Folder1/path1/";
String path2 = "Folder2/path2";
String path3 = "Folder3/path3/";
String path4 = "Folder5/path5";
initValidStartPaths(adaptor, path1, path2, path3, path4);
assertEquals(ImmutableList.of("/Folder1/path1", "/Folder2/path2",
"/Folder3/path3"), adaptor.getValidatedStartPaths());
}
@Test(expected = IllegalStateException.class)
public void testValidateStartPathsNoneValid() throws DfException {
DocumentumAdaptor adaptor =
new DocumentumAdaptor(new InitTestProxies().getProxyClientX());
String path1 = "/Folder1/path4";
String path2 = "/Folder2/path5";
String path3 = "/Folder3/path6";
initValidStartPaths(adaptor, path1, path2, path3);
}
private void testValidateDisplayUrlPattern(String pattern)
throws DfException {
DocumentumAdaptor adaptor =
new DocumentumAdaptor(new InitTestProxies().getProxyClientX());
AdaptorContext context = ProxyAdaptorContext.getInstance();
Config config = initTestAdaptorConfig(context);
config.overrideKey("documentum.displayUrlPattern", pattern);
adaptor.init(context);
}
@Test
public void testValidateDisplayUrlPatternObjectId() throws DfException {
testValidateDisplayUrlPattern("http://webtopurl/drl/{0}");
}
@Test
public void testValidateDisplayUrlPatternPath() throws DfException {
testValidateDisplayUrlPattern("http://webtopurl/drl{1}");
}
@Test(expected = InvalidConfigurationException.class)
public void testValidateDisplayUrlPatternEmptyPattern() throws DfException {
testValidateDisplayUrlPattern("");
}
@Test(expected = InvalidConfigurationException.class)
public void testValidateDisplayUrlPatternBadPattern() throws DfException {
testValidateDisplayUrlPattern("{0}tp://webtop/");
}
@Test(expected = InvalidConfigurationException.class)
public void testValidateDisplayUrlPatternNoSubstitutions()
throws DfException {
testValidateDisplayUrlPattern("http://webtop/");
}
private void testDateToString(String version, String expected)
throws DfException {
InitTestProxies initProxies = new InitTestProxies();
initProxies.setServerVersion(version);
DocumentumAdaptor adaptor =
new DocumentumAdaptor(initProxies.getProxyClientX());
initializeAdaptor(adaptor, "/Folder1/path1", ";");
assertEquals(expected, adaptor.dateToStringFunction);
}
@Test
public void testDateToString_version6() throws DfException {
testDateToString("6.5.0.033 Win32.SQLServer", "DATETOSTRING");
}
@Test
public void testDateToString_version7() throws DfException {
testDateToString("7.2.0000.0155 Win64.SQLServer", "DATETOSTRING_LOCAL");
}
@Test
public void testDateToString_version75() throws DfException {
testDateToString("7.5.0000.0100 Win32.SQLServer", "DATETOSTRING_LOCAL");
}
@Test
public void testDateToString_version8() throws DfException {
testDateToString("8.0.0000.0000 Win64.SQLServer", "DATETOSTRING_LOCAL");
}
@Test
public void testDateToString_version10() throws DfException {
testDateToString("10.0.0000.0010 Win64.SQLServer", "DATETOSTRING_LOCAL");
}
/* Mock proxy classes backed by the H2 database tables. */
private class H2BackedTestProxies {
public IDfClientX getProxyClientX() {
return Proxies.newProxyInstance(IDfClientX.class, new ClientXMock());
}
private class ClientXMock {
public String getDFCVersion() {
return "1.0.0.000 (Mock DFC)";
}
public IDfClient getLocalClient() {
return Proxies.newProxyInstance(IDfClient.class, new ClientMock());
}
public IDfLoginInfo getLoginInfo() {
return Proxies.newProxyInstance(IDfLoginInfo.class,
new LoginInfoMock());
}
public IDfQuery getQuery() {
return Proxies.newProxyInstance(IDfQuery.class, new QueryMock());
}
}
private class ClientMock {
public IDfSessionManager newSessionManager() {
return Proxies.newProxyInstance(IDfSessionManager.class,
new SessionManagerMock());
}
}
private class LoginInfoMock {
public void setPassword(String password) {
}
public void setUser(String username) {
}
}
private class QueryMock {
private String query;
public void setDQL(String query) {
this.query = query;
}
public IDfCollection execute(IDfSession session, int arg1)
throws DfException {
return Proxies.newProxyInstance(IDfCollection.class,
new CollectionMock(query));
}
}
private class CollectionMock {
final Statement stmt;
final ResultSet rs;
public CollectionMock(String query) throws DfException {
try {
stmt = getConnection().createStatement();
query = query.replaceAll("DATETOSTRING(_LOCAL)?", "FORMATDATETIME")
.replace("DATE(", "PARSEDATETIME(")
.replace("yyyy-mm-dd hh:mi:ss", "yyyy-MM-dd HH:mm:ss")
.replaceAll("TYPE\\((dm_document_subtype|dm_sysobject_subtype|"
+ "dm_document|dm_folder)\\)", "r_object_type LIKE '$1%'")
.replace("FOLDER(", "(mock_object_path LIKE ")
.replace("',descend", "%'")
.replace("ENABLE(ROW_BASED)", "");
rs = stmt.executeQuery(query);
} catch (SQLException e) {
throw new DfException(e);
}
}
private String[] getRepeatingValue(String colName) throws DfException {
String value = getString(colName);
if (Strings.isNullOrEmpty(value)) {
return new String[0];
}
return value.split(",");
}
public int getValueCount(String colName) throws DfException {
return getRepeatingValue(colName).length;
}
public String getRepeatingString(String colName, int index)
throws DfException {
return getRepeatingValue(colName)[index];
}
public String getString(String colName) throws DfException {
try {
return rs.getString(colName);
} catch (SQLException e) {
throw new DfException(e);
}
}
public boolean next() throws DfException {
try {
return rs.next();
} catch (SQLException e) {
throw new DfException(e);
}
}
public void close() throws DfException {
try {
rs.close();
stmt.close();
} catch (SQLException e) {
throw new DfException(e);
}
}
}
private class SessionManagerMock {
public IDfSession newSession(String docbaseName) {
return Proxies.newProxyInstance(IDfSession.class, new SessionMock());
}
public IDfSession getSession(String docbaseName) {
return newSession(docbaseName);
}
public void release(IDfSession session) {
}
public void setIdentity(String docbaseName, IDfLoginInfo loginInfo) {
}
}
private class SessionMock {
public String getServerVersion() {
return "1.0.0.000 (Mock CS)";
}
public IDfACL getObject(IDfId id) throws DfException {
String query = String.format(
"SELECT r_object_id FROM dm_acl WHERE r_object_id = '%s'",
id.toString());
try (Connection connection = getConnection();
Statement stmt = connection.createStatement();
ResultSet rs = stmt.executeQuery(query)) {
if (rs.first()) {
return Proxies.newProxyInstance(IDfACL.class,
new AclMock(id.toString()));
} else {
throw new AssertionError("Object ID " + id + " doesn't exist.");
}
} catch (SQLException e) {
throw new DfException(e);
}
}
public IDfSysObject getObjectByPath(String path) throws DfException {
String query = String.format(
"SELECT *, mock_object_path AS r_folder_path "
+ "FROM dm_sysobject WHERE mock_object_path = '%s'", path);
try (Connection connection = getConnection();
Statement stmt = connection.createStatement();
ResultSet rs = stmt.executeQuery(query)) {
if (rs.first()) {
if (rs.getString("r_object_type").startsWith("dm_folder")) {
return Proxies.newProxyInstance(IDfFolder.class,
new FolderMock(rs));
} else {
return Proxies.newProxyInstance(IDfSysObject.class,
new SysObjectMock(rs));
}
}
return null;
} catch (SQLException e) {
throw new DfException(e);
}
}
public Object getObjectByQualification(String query) throws DfException {
if (Strings.isNullOrEmpty(query)) {
return null;
}
try (Connection connection = getConnection();
Statement stmt = connection.createStatement();
ResultSet rs = stmt.executeQuery("SELECT * FROM " + query)) {
if (rs.first()) {
if (query.toLowerCase().startsWith("dm_user ")) {
return Proxies.newProxyInstance(IDfUser.class, new UserMock(rs));
} else if (query.toLowerCase().startsWith("dm_group ")) {
return
Proxies.newProxyInstance(IDfGroup.class, new GroupMock(rs));
}
}
return null;
} catch (SQLException e) {
throw new DfException(e);
}
}
public IDfEnumeration getObjectPaths(IDfId id) throws DfException {
String query = String.format("SELECT i_folder_id FROM dm_sysobject "
+ "WHERE r_object_id = '%s'", id.toString());
try (Connection connection = getConnection();
Statement stmt = connection.createStatement();
ResultSet rs = stmt.executeQuery(query)) {
if (rs.next()) {
return Proxies.newProxyInstance(IDfEnumeration.class,
new EnumerationMock(rs.getString("i_folder_id")));
}
return null;
} catch (SQLException e) {
throw new DfException(e);
}
}
public IDfFolder getFolderBySpecification(String spec)
throws DfException {
if (Strings.isNullOrEmpty(spec)) {
return null;
}
String query = String.format(
"SELECT s.*, f.r_folder_path FROM dm_sysobject s "
+ "JOIN dm_folder f ON s.r_object_id = f.r_object_id "
+ "WHERE s.r_object_id = '%s'", spec);
try (Connection connection = getConnection();
Statement stmt = connection.createStatement();
ResultSet rs = stmt.executeQuery(query)) {
if (rs.first()) {
return
Proxies.newProxyInstance(IDfFolder.class, new FolderMock(rs));
}
return null;
} catch (SQLException e) {
throw new DfException(e);
}
}
public IDfType getType(String type) {
return Proxies.newProxyInstance(IDfType.class, new TypeMock(type));
}
}
private class SysObjectMock {
private final String id;
private final String name;
private final String type;
private final String contentType;
private final String content;
private final String aclId;
private final Date lastModified;
private final boolean isVirtualDocument;
private final Multimap<String, String> attributes;
public SysObjectMock(ResultSet rs) throws SQLException {
id = rs.getString("r_object_id");
name = rs.getString("object_name");
type = rs.getString("r_object_type");
contentType = rs.getString("a_content_type");
content = rs.getString("mock_content");
aclId = rs.getString("mock_acl_id");
lastModified = new Date(rs.getTimestamp("r_modify_date").getTime());
isVirtualDocument = rs.getBoolean("r_is_virtual_doc");
attributes = readAttributes(id);
}
public IDfId getObjectId() {
return Proxies.newProxyInstance(IDfId.class, new IdMock(id));
}
public String getObjectName() {
return name;
}
public String getString(String attrName) {
switch (attrName) {
case "object_name": return name;
case "r_object_id": return id;
default: return null;
}
}
public InputStream getContent() {
if (content == null) {
return null;
}
return new ByteArrayInputStream(content.getBytes(UTF_8));
}
public IDfType getType() {
return Proxies.newProxyInstance(IDfType.class, new TypeMock(type));
}
public String getContentType() {
return contentType;
}
public IDfTime getTime(String attr) {
if (attr.equals("r_modify_date")) {
return Proxies.newProxyInstance(IDfTime.class,
new TimeMock(lastModified));
} else {
return null;
}
}
public boolean isVirtualDocument() {
return isVirtualDocument;
}
public IDfVirtualDocument asVirtualDocument(String lateBinding,
boolean followRootAssembly) {
return Proxies.newProxyInstance(IDfVirtualDocument.class,
new VirtualDocumentMock(id));
}
public Enumeration<IDfAttr> enumAttrs() throws DfException {
Vector<IDfAttr> v = new Vector<IDfAttr>();
for (String name : attributes.keySet()) {
v.add(Proxies.newProxyInstance(IDfAttr.class, new AttrMock(name)));
}
return v.elements();
}
public int getValueCount(String name) {
return attributes.get(name).size();
}
public String getRepeatingString(String name, int index) {
return new ArrayList<String>(attributes.get(name)).get(index);
}
public IDfACL getACL() {
return Proxies.newProxyInstance(IDfACL.class,
new AclMock(aclId.toString()));
}
}
private class VirtualDocumentMock {
private final String vdocId;
public VirtualDocumentMock(String vdocId) {
this.vdocId = vdocId;
}
public IDfVirtualDocumentNode getRootNode() throws DfException {
return Proxies.newProxyInstance(IDfVirtualDocumentNode.class,
new VdocRootNodeMock(vdocId));
}
}
private class VdocRootNodeMock {
private final ArrayList<String> vdocChildren = new ArrayList<>();
public VdocRootNodeMock(String vdocId) throws DfException {
String query = String.format("SELECT mock_object_path "
+ "FROM dm_sysobject WHERE i_folder_id = '%s'", vdocId);
try (Connection connection = getConnection();
Statement stmt = connection.createStatement();
ResultSet rs = stmt.executeQuery(query)) {
while (rs.next()) {
vdocChildren.add(rs.getString("mock_object_path"));
}
} catch (SQLException e) {
throw new DfException(e);
}
}
public int getChildCount() {
return vdocChildren.size();
}
public IDfVirtualDocumentNode getChild(int index) {
return Proxies.newProxyInstance(IDfVirtualDocumentNode.class,
new VdocChildNodeMock(vdocChildren.get(index)));
}
}
private class VdocChildNodeMock {
private final String childPath;
public VdocChildNodeMock(String childPath) {
this.childPath = childPath;
}
public IDfSysObject getSelectedObject() throws DfException {
IDfSessionManager sessionManager =
getProxyClientX().getLocalClient().newSessionManager();
IDfSession session = sessionManager.getSession("foo");
try {
return (IDfSysObject) session.getObjectByPath(childPath);
} finally {
sessionManager.release(session);
}
}
}
private class FolderMock extends SysObjectMock {
private String[] folderPaths;
public FolderMock(ResultSet rs) throws SQLException {
super(rs);
this.folderPaths = rs.getString("r_folder_path").split(",");
}
public int getFolderPathCount() {
return folderPaths.length;
}
public String getFolderPath(int index) {
return folderPaths[index];
}
public IDfCollection getContents(String colNames) throws DfException {
String query = String.format(
"SELECT %s FROM dm_sysobject WHERE i_folder_id = '%s'",
colNames, getObjectId());
return Proxies.newProxyInstance(IDfCollection.class,
new CollectionMock(query));
}
}
private class TypeMock {
private final String type;
private final ImmutableMap<String, String> superTypes =
ImmutableMap.of("dm_document_subtype", "dm_document",
"dm_document", "dm_sysobject",
"dm_sysobject_subtype", "dm_sysobject",
"dm_folder_subtype", "dm_folder",
"dm_folder", "dm_sysobject");
public TypeMock(String type) {
this.type = type;
}
public boolean isTypeOf(String otherType) {
if (type.startsWith(otherType)) {
return true;
}
String parent = superTypes.get(type);
while (!Strings.isNullOrEmpty(parent)) {
if (parent.startsWith(otherType)) {
return true;
}
parent = superTypes.get(parent);
}
return false;
}
public String getName() {
return type;
}
public IDfType getSuperType() {
if (superTypes.containsKey(type)) {
return Proxies.newProxyInstance(IDfType.class,
new TypeMock(superTypes.get(type)));
} else {
return null;
}
}
}
private class TimeMock {
private final Date date;
public TimeMock(Date date) {
this.date = date;
}
public Date getDate() {
return date;
}
}
private class IdMock {
private final String objectId;
public IdMock(String objectId) {
this.objectId = objectId;
}
public String toString() {
return objectId;
}
}
private class AttrMock {
private final String name;
public AttrMock(String name) {
this.name = name;
}
public String getName() {
return name;
}
}
private class UserMock {
private String loginName;
private String source;
private String ldapDn;
private boolean isGroup;
public UserMock(ResultSet rs) throws SQLException {
loginName = rs.getString("user_login_name");
source = rs.getString("user_source");
ldapDn = rs.getString("user_ldap_dn");
isGroup = rs.getBoolean("r_is_group");
}
public String getUserLoginName() {
return loginName;
}
public String getUserSourceAsString() {
return source;
}
public String getUserDistinguishedLDAPName() {
return ldapDn;
}
public boolean isGroup() {
return isGroup;
}
}
private class GroupMock {
private String source;
public GroupMock(ResultSet rs) throws SQLException {
source = rs.getString("group_source");
}
public String getGroupSource() {
return source;
}
}
private class AccessorInfo {
String name;
int permitType;
int permit;
boolean isGroup;
AccessorInfo(String name, int permitType, int permit, boolean isGroup) {
this.name = name;
this.permitType = permitType;
this.permit = permit;
this.isGroup = isGroup;
}
String getName() {
return name;
}
int getPermitType() {
return permitType;
}
int getPermit() {
return permit;
}
boolean isGroup() {
return isGroup;
}
}
public class AclMock {
private String id;
List<AccessorInfo> accessorList = new ArrayList<AccessorInfo>();
public AclMock(String id) {
this.id = id;
try {
getAccessorInfo();
} catch (SQLException e) {
e.printStackTrace();
}
}
private void getAccessorInfo() throws SQLException {
try (Connection connection = getConnection();
Statement stmt = connection.createStatement();
ResultSet rs = stmt.executeQuery(
"select r_accessor_name, r_accessor_permit, "
+ "r_permit_type, r_is_group from dm_acl "
+ "where r_object_id = '" + id + "'")) {
while (rs.next()) {
String accessorName = rs.getString("r_accessor_name");
int accessorPermit = rs.getInt("r_accessor_permit");
int accessorPermitType = rs.getInt("r_permit_type");
boolean isGroup = rs.getBoolean("r_is_group");
if (!Strings.isNullOrEmpty(accessorName)) {
accessorList.add(new AccessorInfo(accessorName,
accessorPermitType, accessorPermit, isGroup));
}
}
}
}
public int getAccessorCount() {
return accessorList.size();
}
public String getAccessorName(int n) {
return accessorList.get(n).getName();
}
public int getAccessorPermitType(int n) {
return accessorList.get(n).getPermitType();
}
public int getAccessorPermit(int n) {
return accessorList.get(n).getPermit();
}
public boolean isGroup(int n) {
return accessorList.get(n).isGroup();
}
public IDfId getObjectId() {
return Proxies.newProxyInstance(IDfId.class, new IdMock(id));
}
}
public class EnumerationMock {
private final UnmodifiableIterator<String> iter;
public EnumerationMock(String folderIds) {
iter = Iterators.forArray(folderIds.split("\\s*,\\s*"));
}
public boolean hasMoreElements() throws DfException {
return iter.hasNext();
}
public IDfObjectPath nextElement() throws DfException {
return Proxies.newProxyInstance(IDfObjectPath.class,
new ObjectPathMock(iter.next()));
}
}
public class ObjectPathMock {
private final String id;
public ObjectPathMock(String id) throws DfException {
this.id = id;
}
public String getFullPath() throws DfException {
//TODO(sveldurthi): Add test for multiple r_folder_path values.
String query =
String.format("SELECT r_folder_path "
+ "FROM dm_folder WHERE r_object_id = '%s'", id);
try (Connection connection = getConnection();
Statement stmt = connection.createStatement();
ResultSet rs = stmt.executeQuery(query)) {
if (rs.next()) {
return rs.getString("r_folder_path");
}
return null;
} catch (SQLException e) {
throw new DfException(e);
}
}
}
}
private DocumentumAdaptor getObjectUnderTest() throws DfException {
return getObjectUnderTest(ImmutableMap.<String, String>of());
}
private DocumentumAdaptor getObjectUnderTest(Map<String, String> configMap)
throws DfException {
H2BackedTestProxies proxyCls = new H2BackedTestProxies();
IDfClientX dmClientX = proxyCls.getProxyClientX();
DocumentumAdaptor adaptor = new DocumentumAdaptor(dmClientX);
AdaptorContext context = ProxyAdaptorContext.getInstance();
Config config = initTestAdaptorConfig(context);
for (Map.Entry<String, String> entry : configMap.entrySet()) {
config.overrideKey(entry.getKey(), entry.getValue());
}
adaptor.init(context);
return adaptor;
}
private void insertCabinets(String... cabinets) throws SQLException {
for (String cabinet : cabinets) {
executeUpdate(String.format("INSERT INTO dm_cabinet "
+ "(r_object_id, r_folder_path, object_name) VALUES('%s','%s','%s')",
"0c" + cabinet, "/" + cabinet, cabinet));
}
}
private void checkGetRootContent(String whereClause, int maxHtmlLinks,
String... expectedCabinets) throws Exception {
List<String> queries = new ArrayList<>();
Logging.captureLogMessages(DocumentumAdaptor.class,
"Get All Cabinets Query", queries);
String startPath = "/";
AdaptorContext context = ProxyAdaptorContext.getInstance();
DocIdEncoder docidEncoder = context.getDocIdEncoder();
Config config = initTestAdaptorConfig(context);
config.overrideKey("documentum.src", startPath);
config.overrideKey("documentum.maxHtmlSize", "" + maxHtmlLinks);
config.overrideKey("documentum.cabinetWhereCondition", whereClause);
Request request =
new MockRequest(DocumentumAdaptor.docIdFromPath(startPath));
MockResponse response = getDocContent(context, request);
assertEquals(queries.toString(), 1, queries.size());
String query = queries.get(0);
if (whereClause.isEmpty()) {
assertFalse(query, query.contains(" WHERE "));
} else {
assertTrue(query, query.contains(" WHERE " + whereClause));
}
assertEquals("text/html; charset=UTF-8", response.contentType);
String content = response.content.toString(UTF_8.name());
assertEquals(content, maxHtmlLinks == 0 || expectedCabinets.length == 0,
content.indexOf("href") < 0);
assertEquals(response.anchors.toString(),
maxHtmlLinks >= expectedCabinets.length,
response.anchors.isEmpty());
for (String cabinet : expectedCabinets) {
// First look in the HTML links for the cabinet. If not there,
// look in the external anchors.
String link = "<a href=\"" + cabinet + "\">" + cabinet + "</a>";
if (content.indexOf(link) < 0) {
URI uri = docidEncoder.encodeDocId(new DocId(cabinet));
URI anchor = response.anchors.get(cabinet);
assertNotNull("Cabinet " + cabinet + " with URI " + uri + " is missing"
+ " from response:/n" + content + "/n" + response.anchors, anchor);
assertEquals(uri, anchor);
}
}
}
@Test
public void testGetRootContentNoCabinets() throws Exception {
checkGetRootContent("1=1", 100);
}
@Test
public void testGetRootContentEmptyWhereClause() throws Exception {
insertCabinets("System", "Cabinet1", "Cabinet2");
checkGetRootContent("", 100, "System", "Cabinet1", "Cabinet2");
}
@Test
public void testGetRootContentHtmlResponseOnly() throws Exception {
insertCabinets("Cabinet1", "Cabinet2", "Cabinet3");
checkGetRootContent("", 100, "Cabinet1", "Cabinet2", "Cabinet3");
}
@Test
public void testGetRootContentAnchorResponseOnly() throws Exception {
insertCabinets("Cabinet1", "Cabinet2", "Cabinet3");
checkGetRootContent("", 0, "Cabinet1", "Cabinet2", "Cabinet3");
}
@Test
public void testGetRootContentHtmlAndAnchorResponse() throws Exception {
insertCabinets("Cabinet1", "Cabinet2", "Cabinet3", "Cabinet4");
checkGetRootContent("", 2, "Cabinet1", "Cabinet2", "Cabinet3",
"Cabinet4");
}
@Test
public void testGetRootContentAddedWhereClause() throws Exception {
insertCabinets("System", "Temp", "Cabinet1", "Cabinet2");
checkGetRootContent("object_name NOT IN ('System', 'Temp')",
100, "Cabinet1", "Cabinet2");
}
@Test
public void testGetRootContentDefaultWhereClause() throws Exception {
executeUpdate(
"CREATE TABLE dm_docbase_config (owner_name varchar)",
"INSERT INTO dm_docbase_config (owner_name) VALUES('Owner')",
"CREATE TABLE dm_server_config (r_install_owner varchar)",
"INSERT INTO dm_server_config (r_install_owner) VALUES('Installer')");
insertCabinets("Integration", "Resources", "System");
insertCabinets("Temp", "Templates", "Owner", "Installer");
insertCabinets("Cabinet1", "Cabinet2");
Config config = ProxyAdaptorContext.getInstance().getConfig();
new DocumentumAdaptor(null).initConfig(config);
checkGetRootContent(config.getValue("documentum.cabinetWhereCondition"),
100, "Cabinet1", "Cabinet2");
}
@Test
public void testGetRootContentInvalidWhereClause() throws Exception {
insertCabinets("Cabinet1", "Cabinet2");
try {
checkGetRootContent("( xyzzy", 100);
fail("Expected exception not thrown.");
} catch (IOException expected) {
assertTrue(expected.getCause() instanceof DfException);
}
}
private void insertDocument(String path) throws SQLException {
insertDocument(new Date(), path, "text/plain", "Hello World");
}
private void insertDocument(Date lastModified, String path,
String contentType, String content) throws SQLException {
String name = path.substring(path.lastIndexOf("/") + 1);
executeUpdate(String.format(
"insert into dm_sysobject(r_object_id, object_name, mock_object_path, "
+ "r_object_type, a_content_type, mock_content, r_modify_date, "
+ "mock_acl_id) "
+ "values('%s', '%s', '%s', '%s', '%s', '%s', {ts '%s'}, '%s')",
"09" + name, name, path, "dm_document", contentType, content,
dateFormat.format(lastModified), DEFAULT_ACL));
}
private void insertDocument(String lastModified, String id, String path,
String... folderIds) throws SQLException {
String name = path.substring(path.lastIndexOf("/") + 1);
insertSysObject(lastModified, id, name, path, "dm_document", folderIds);
}
private void insertFolder(String lastModified, String id, String... paths)
throws SQLException {
executeUpdate(String.format(
"insert into dm_folder(r_object_id, r_folder_path) values('%s', '%s')",
id, Joiner.on(",").join(paths)));
for (String path : paths) {
String name = path.substring(path.lastIndexOf("/") + 1);
insertSysObject(lastModified, id, name, path, "dm_folder");
}
}
private void setParentFolderId(String id, String parentId)
throws SQLException {
executeUpdate(String.format(
"UPDATE dm_sysobject SET i_folder_id = '%s' WHERE r_object_id = "
+ "'%s'", parentId, id));
}
private void insertSysObject(String lastModified, String id, String name,
String path, String type, String... folderIds) throws SQLException {
executeUpdate(String.format(
"insert into dm_sysobject(r_object_id, object_name, mock_object_path, "
+ "r_object_type, i_folder_id, r_modify_date, mock_acl_id) "
+ "values('%s', '%s', '%s', '%s', '%s', {ts '%s'}, '%s')",
id, name, path, type, Joiner.on(",").join(folderIds), lastModified,
DEFAULT_ACL));
}
private void setSysObjectACL(String path, String aclId)
throws SQLException {
executeUpdate(String.format(
"UPDATE dm_sysobject SET mock_acl_id = '%s' WHERE mock_object_path = "
+ "'%s'", aclId, path));
}
private void testDocContent(Date lastCrawled, Date lastModified,
boolean expectNotModified) throws DfException, IOException, SQLException {
String path = START_PATH + "/object1";
String contentType = "crtext/html";
String content = "<html><body>Hello</body></html>";
insertDocument(lastModified, path, contentType, content);
AdaptorContext context = ProxyAdaptorContext.getInstance();
initTestAdaptorConfig(context);
Request request = new MockRequest(DocumentumAdaptor.docIdFromPath(path),
lastCrawled);
MockResponse response = getDocContent(context, request);
if (expectNotModified) {
assertTrue(response.notModified);
assertNull(response.contentType);
assertNull(response.content);
} else {
assertFalse(response.notModified);
assertEquals(contentType, response.contentType);
assertEquals(content, response.content.toString(UTF_8.name()));
}
}
private MockResponse getDocContent(String path)
throws DfException, IOException {
AdaptorContext context = ProxyAdaptorContext.getInstance();
initTestAdaptorConfig(context);
Request request = new MockRequest(DocumentumAdaptor.docIdFromPath(path));
return getDocContent(context, request);
}
private MockResponse getDocContent(AdaptorContext context, Request request)
throws DfException, IOException {
H2BackedTestProxies proxyCls = new H2BackedTestProxies();
IDfClientX dmClientX = proxyCls.getProxyClientX();
DocumentumAdaptor adaptor = new DocumentumAdaptor(dmClientX);
adaptor.init(context);
MockResponse response = new MockResponse();
adaptor.getDocContent(request, response);
return response;
}
@Test
public void testDocContentInitialCrawl() throws Exception {
Date lastModified = new Date();
testDocContent(null, lastModified, false);
}
@Test
public void testDocContentModifiedSinceLastCrawl() throws Exception {
Date lastCrawled = new Date();
Date lastModified = new Date(lastCrawled.getTime() + (120 * 1000L));
testDocContent(lastCrawled, lastModified, false);
}
@Test
public void testDocContentOneDayBeforeWindowJustShort() throws Exception {
Date lastCrawled = new Date();
Date lastModified = new Date( // Two seconds short of a full day.
lastCrawled.getTime() - (24 * 60 * 60 * 1000L - 2000L));
testDocContent(lastCrawled, lastModified, false);
}
@Test
public void testDocContentOneDayBeforeWindowJustOver() throws Exception {
Date lastCrawled = new Date();
Date lastModified = new Date( // Two seconds more than a full day.
lastCrawled.getTime() - (24 * 60 * 60 * 1000L + 2000L));
testDocContent(lastCrawled, lastModified, true);
}
@Test
public void testDocContentRecentlyModified() throws Exception {
// Even though content was crawled after it was recently
// modified, don't trust Documentum dates to be UTC, so
// content should be returned anyway.
Date lastCrawled = new Date();
Date lastModified = new Date(lastCrawled.getTime() - (8 * 60 * 60 * 1000L));
testDocContent(lastModified, lastCrawled, false);
}
@Test
public void testDocContentNotRecentlyModified() throws Exception {
Date lastCrawled = new Date();
Date lastModified =
new Date(lastCrawled.getTime() - (72 * 60 * 60 * 1000L));
testDocContent(lastCrawled, lastModified, true);
}
private String getDisplayUrl(String displayUrlPattern, String path)
throws Exception {
assertTrue(path, path.startsWith(START_PATH));
insertDocument(path);
AdaptorContext context = ProxyAdaptorContext.getInstance();
Config config = initTestAdaptorConfig(context);
config.overrideKey("documentum.displayUrlPattern", displayUrlPattern);
Request request = new MockRequest(DocumentumAdaptor.docIdFromPath(path));
MockResponse response = getDocContent(context, request);
assertNotNull(response.toString(), response.displayUrl);
return response.displayUrl.toString();
}
@Test
public void testDisplayUrlWithId() throws Exception {
String path = "/Folder1/path1/object1";
assertEquals("http://webtopurl/drl/09object1",
getDisplayUrl("http://webtopurl/drl/{0}", path));
}
@Test
public void testDisplayUrlWithPath() throws Exception {
String path = "/Folder1/path1/object1";
assertEquals("http://webtopurl/drl//Folder1/path1/object1",
getDisplayUrl("http://webtopurl/drl/{1}", path));
}
@Test
public void testDisplayUrlWithIdAndPath() throws Exception {
String path = "/Folder1/path1/object1";
assertEquals("/Folder1/path1/object1-http://webtopurl/09object1/drl/",
getDisplayUrl("{1}-http://webtopurl/{0}/drl/", path));
}
private Acl getACL(String path) throws Exception {
assertTrue(path, path.startsWith(START_PATH));
AdaptorContext context = ProxyAdaptorContext.getInstance();
initTestAdaptorConfig(context);
Request request = new MockRequest(DocumentumAdaptor.docIdFromPath(path));
MockResponse response = getDocContent(context, request);
assertNotNull(response.toString(), response.acl);
return response.acl;
}
@Test
public void testDocumentACL() throws Exception {
String path = "/Folder1/path1/object1";
String documentACL = "45DocumentACL";
insertDocument(path);
setSysObjectACL(path, documentACL);
Acl acl = getACL(path);
assertEquals(new DocId(documentACL), acl.getInheritFrom());
}
@Test
public void testFolderACL() throws Exception {
String now = getNowPlusMinutes(0);
String folderId = "0b01081f80078d29";
String folder = START_PATH + "/path2";
String folderACL = "45FolderAcl";
insertFolder(now, folderId, folder);
setSysObjectACL(folder, folderACL);
Acl acl = getACL(folder);
assertEquals(new DocId(folderACL), acl.getInheritFrom());
}
/*
* Note that the metadata structure stored in these tests is slightly
* different that Documentum stores them:
*
* DCTM stores the data as:
*
* attr1 attr2 attr3
* ----- ----- -----
* valu1 valuA valuI
* valu2 valuII
* valu3
*
* whereas this table is:
*
* attr1 attr2 attr3
* ----- ----- -----
* valu1
* valu2
* valu3
* valuA
* valuI
* valuII
*
* The difference is insignificant for these tests.
*/
private void writeAttributes(String objectId, Multimap<String, String> attrs)
throws SQLException {
StringBuilder ddl = new StringBuilder();
ddl.append("CREATE TABLE attributes (r_object_id varchar");
for (String attr : attrs.keySet()) {
ddl.append(", ").append(attr).append(" varchar");
}
ddl.append(")");
executeUpdate(ddl.toString());
for (String attr : attrs.keySet()) {
for (String value : attrs.get(attr)) {
executeUpdate(String.format(
"INSERT INTO attributes (r_object_id, %s) VALUES ('%s', '%s')",
attr, objectId, value));
}
}
}
private Multimap<String, String> readAttributes(String objectId)
throws SQLException {
Multimap<String, String> attributes = TreeMultimap.create();
try (Connection connection = getConnection()) {
DatabaseMetaData dbm = connection.getMetaData();
try (ResultSet tables = dbm.getTables(null, null, "ATTRIBUTES", null)) {
if (!tables.next()) {
// Attributes table does not exist if there are
// no attributes in the test.
return attributes;
}
}
// Read all the attributes for our objectId.
String query = String.format("SELECT * FROM attributes "
+ "WHERE r_object_id = '%s'", objectId);
try (Statement stmt = connection.createStatement();
ResultSet rs = stmt.executeQuery(query)) {
ResultSetMetaData rsm = rs.getMetaData();
while (rs.next()) {
for (int i = 1; i <= rsm.getColumnCount(); i++) {
// H2 uppercases the column names.
String attr = rsm.getColumnName(i).toLowerCase();
if (!attr.equals("r_object_id")) {
String value = rs.getString(attr);
if (value != null) {
attributes.put(attr, value);
}
}
}
}
}
}
return attributes;
}
private void testExcludeMetadata(TreeMultimap<String, String> attrs,
String excludedAttrs, TreeMultimap<String, String> expected)
throws Exception {
String path = START_PATH + "/object1";
String objectId = "09object1";
insertDocument(path);
writeAttributes(objectId, attrs);
AdaptorContext context = ProxyAdaptorContext.getInstance();
Config config = initTestAdaptorConfig(context);
if (excludedAttrs != null) {
config.overrideKey("documentum.excludedAttributes", excludedAttrs);
}
Request request = new MockRequest(DocumentumAdaptor.docIdFromPath(path));
MockResponse response = getDocContent(context, request);
assertEquals(expected, response.metadata);
}
private void testMetadata(TreeMultimap<String, String> attrs,
TreeMultimap<String, String> expected) throws Exception {
testExcludeMetadata(attrs, null, expected);
}
@Test
public void testSingleValueMetadata() throws Exception {
TreeMultimap<String, String> attributes = TreeMultimap.create();
attributes.put("attr1", "value1");
attributes.put("attr2", "value2");
attributes.put("attr3", "value3");
TreeMultimap<String, String> expected = TreeMultimap.create(attributes);
expected.put("r_object_id", "09object1");
testMetadata(attributes, expected);
}
@Test
public void testMultiValueMetadata() throws Exception {
TreeMultimap<String, String> attributes = TreeMultimap.create();
attributes.put("attr1", "value1");
attributes.put("attr1", "value2");
attributes.put("attr1", "value3");
assertEquals(1, attributes.keySet().size());
assertEquals(3, attributes.get("attr1").size());
TreeMultimap<String, String> expected = TreeMultimap.create(attributes);
expected.put("r_object_id", "09object1");
testMetadata(attributes, expected);
}
@Test
public void testEmptyValueMetadata() throws Exception {
TreeMultimap<String, String> attributes = TreeMultimap.create();
attributes.put("attr1", "value1");
attributes.put("attr2", "value2");
attributes.put("attr2", "");
attributes.put("attr3", "");
TreeMultimap<String, String> expected = TreeMultimap.create(attributes);
expected.put("r_object_id", "09object1");
testMetadata(attributes, expected);
}
@Test
public void testExcludeAttrMetadata() throws Exception {
TreeMultimap<String, String> attributes = TreeMultimap.create();
attributes.put("attr1", "value1");
attributes.put("attr2", "value2");
attributes.put("attr3", "value3");
attributes.put("foo", "foo1");
attributes.put("bar", "bar1");
TreeMultimap<String, String> expected = TreeMultimap.create(attributes);
String excluded = "foo, bar, r_object_id";
expected.removeAll("foo");
expected.removeAll("bar");
testExcludeMetadata(attributes, excluded, expected);
}
@Test
public void testObjectTypeMetadata() throws Exception {
TreeMultimap<String, String> attributes = TreeMultimap.create();
attributes.put("r_object_type", "dm_document");
attributes.put("attr2", "value2");
TreeMultimap<String, String> expected = TreeMultimap.create(attributes);
expected.put("r_object_id", "09object1");
expected.removeAll("r_object_type");
expected.put("r_object_type", "dm_document");
expected.put("r_object_type", "dm_sysobject");
testMetadata(attributes, expected);
}
private void insertVirtualDocument(String vdocPath, String contentType,
String content, String... children) throws SQLException {
String name = vdocPath.substring(vdocPath.lastIndexOf("/") + 1);
String vdocId = "09" + name;
String now = getNowPlusMinutes(0);
executeUpdate(String.format(
"INSERT INTO dm_sysobject(r_object_id, object_name, mock_object_path, "
+ "r_object_type, r_is_virtual_doc, a_content_type, mock_content, "
+ "r_modify_date, mock_acl_id) "
+ "VALUES('%s', '%s', '%s', '%s', TRUE, '%s', '%s', {ts '%s'}, '%s')",
vdocId, name, vdocPath, "dm_document_virtual", contentType, content,
now, DEFAULT_ACL));
for (String child : children) {
insertDocument(now, "09" + child, vdocPath + "/" + child, vdocId);
}
}
@Test
public void testVirtualDocContentNoChildren() throws Exception {
String path = START_PATH + "/vdoc";
String objectContentType = "crtext/html";
String objectContent = "<html><body>Hello</body></html>";
insertVirtualDocument(path, objectContentType, objectContent);
MockResponse response = getDocContent(path);
assertEquals(objectContentType, response.contentType);
assertEquals(objectContent, response.content.toString(UTF_8.name()));
assertTrue(response.anchors.isEmpty());
}
@Test
public void testVirtualDocContentWithChildren() throws Exception {
String path = START_PATH + "/vdoc";
String objectContentType = "crtext/html";
String objectContent = "<html><body>Hello</body></html>";
insertVirtualDocument(path, objectContentType, objectContent,
"object1", "object2", "object3");
MockResponse response = getDocContent(path);
assertEquals(objectContentType, response.contentType);
assertEquals(objectContent, response.content.toString(UTF_8.name()));
// Verify child links.
assertEquals(3, response.anchors.size());
for (String name : ImmutableList.of("object1", "object2", "object3")) {
URI uri = response.anchors.get(name);
assertNotNull(uri);
assertTrue(uri.toString().endsWith(path + "/" + name + ":09" + name));
}
}
@Test
public void testFolderDocContent() throws Exception {
String now = getNowPlusMinutes(0);
String folderId = "0b01081f80078d29";
String folder = START_PATH + "/path2";
insertFolder(now, folderId, folder);
insertDocument(now, "0901081f80079263", folder + "/file1", folderId);
insertDocument(now, "0901081f8007926d", folder + "/file2 evil<chars?",
folderId);
insertDocument(now, "0901081f80079278", folder + "/file3", folderId);
StringBuilder expected = new StringBuilder();
expected.append("<!DOCTYPE html>\n<html><head><title>");
expected.append("Folder path2");
expected.append("</title></head><body><h1>");
expected.append("Folder path2");
expected.append("</h1>");
expected.append("<li><a href=\"path2/file1\">file1</a></li>");
expected.append("<li><a href=\"path2/file2%20evil%3Cchars%3F\">"
+ "file2 evil<chars?</a></li>");
expected.append("<li><a href=\"path2/file3\">file3</a></li>");
expected.append("</body></html>");
MockResponse response = getDocContent(folder);
assertFalse(response.notFound);
assertEquals("text/html; charset=UTF-8", response.contentType);
assertEquals(expected.toString(), response.content.toString(UTF_8.name()));
}
@Test
public void testGetDocContentNotFound() throws Exception {
String path = START_PATH + "/doesNotExist";
assertTrue(getDocContent(path).notFound);
}
@Test
public void testGetDocContentNotUnderStartPath() throws Exception {
String now = getNowPlusMinutes(0);
String path = "/Folder2/path2";
insertFolder(now, "0b01081f80078d30", path);
assertFalse(path.startsWith(START_PATH));
assertTrue(getDocContent(path).notFound);
}
/**
* Builds a list of expected DocId Records that the Pusher should receive.
*/
private List<Record> expectedRecordsFor(String... paths) {
ImmutableList.Builder<Record> builder = ImmutableList.builder();
for (String path : paths) {
DocId docid = DocumentumAdaptor.docIdFromPath(path);
builder.add(new Record.Builder(docid).build());
}
return builder.build();
}
private void testGetDocIds(List<String> startPaths,
List<Record> expectedRecords)
throws DfException, IOException, InterruptedException {
DocumentumAdaptor adaptor = getObjectUnderTest(
ImmutableMap.of("documentum.src", Joiner.on(",").join(startPaths)));
AccumulatingDocIdPusher pusher = new AccumulatingDocIdPusher();
adaptor.getDocIds(pusher);
assertEquals(expectedRecords, pusher.getRecords());
}
@Test
public void testGetDocIdsRootStartPath() throws Exception {
testGetDocIds(startPaths("/"), expectedRecordsFor("/"));
}
@Test
public void testGetDocIdsSingleStartPath() throws Exception {
testGetDocIds(startPaths(START_PATH), expectedRecordsFor(START_PATH));
}
@Test
public void testGetDocIdsMultipleStartPaths() throws Exception {
String now = getNowPlusMinutes(0);
String path2 = "/Folder2";
String path3 = "/Folder3";
insertFolder(now, "0bFolder2", path2);
insertFolder(now, "0bFolder3", path3);
testGetDocIds(startPaths(START_PATH, path2, path3),
expectedRecordsFor(START_PATH, path2, path3));
}
@Test
public void testGetDocIdsMultipleStartPathsSomeOffline() throws Exception {
String now = getNowPlusMinutes(0);
String path2 = "/Folder2";
String path3 = "/Folder3";
insertFolder(now, "0bFolder3", path3);
testGetDocIds(startPaths(START_PATH, path2, path3),
expectedRecordsFor(START_PATH, path3));
}
/**
* A traversal action includes an expected input checkpoint, an
* exception to throw, and a final checkpoint to return. All fields
* are optional and may be null.
*/
private static class Action {
public final String input;
public final DfException error;
public final String output;
public Action(String input, DfException error, String output) {
this.input = input;
this.error = error;
this.output = output;
}
}
/**
* Tests the traversers by replaying a sequence of actions. An
* assertion will fail if the traverser loops more or fewer times
* than the given number of actions, or if the checkpoints or thrown
* exceptions do not match.
*/
private void testTraverserTemplate(Action... actionArray) throws Exception {
// The actions are removed from the deque as they are performed.
final ArrayDeque<Action> actions =
new ArrayDeque<>(Arrays.asList(actionArray));
DocumentumAdaptor adaptor = getObjectUnderTest();
DocumentumAdaptor.TraverserTemplate template =
adaptor.new TraverserTemplate(Checkpoint.full()) {
@Override protected void createCollection() {}
@Override
protected boolean fillCollection(IDfSession dmSession,
Principals principals, Checkpoint checkpoint)
throws DfException {
assertEquals(actions.getFirst().input, checkpoint.getObjectId());
if (actions.getFirst().error != null) {
throw actions.getFirst().error;
}
return actions.getFirst().output == null;
}
@Override
protected Checkpoint pushCollection(DocIdPusher pusher) {
return new Checkpoint(actions.removeFirst().output);
}
@Override protected void sleep() {}
};
// We only expect an exception if the last loop iteration throws.
ArrayList<DfException> expectedExceptions = new ArrayList<>();
if (actions.getLast().error != null) {
expectedExceptions.add(actions.getLast().error);
}
AccumulatingDocIdPusher pusher = new AccumulatingDocIdPusher();
ArrayList<DfException> savedExceptions = new ArrayList<>();
template.run(pusher, savedExceptions);
assertTrue(actions.toString(), actions.isEmpty());
assertEquals(expectedExceptions, savedExceptions);
}
private static final String C = "non-null checkpoint";
private static final String D = "another checkpoint";
private static final DfException E = new DfException("first error");
private static final DfException F = new DfException("second error");
@Test
public void testTraverserTemplate_noProgress() throws Exception {
testTraverserTemplate(
new Action(null, E, null));
}
@Test
public void testTraverserTemplate_completeTraversal() throws Exception {
testTraverserTemplate(
new Action(null, null, null));
}
@Test
public void testTraverserTemplate_impossible() throws Exception {
// If no exception is thrown, the checkpoint should be null.
// But if it happens, we expect a second call.
testTraverserTemplate(
new Action(null, null, C),
new Action(C, null, null));
}
@Test
public void testTraverserTemplate_throwThenNoProgress() throws Exception {
testTraverserTemplate(
new Action(null, E, C),
new Action(C, F, C));
}
@Test
public void testTraverserTemplate_throwThenProgress() throws Exception {
testTraverserTemplate(
new Action(null, E, C),
new Action(C, F, D),
new Action(D, null, null));
}
@Test
public void testTraverserTemplate_throwThenComplete() throws Exception {
testTraverserTemplate(
new Action(null, E, C),
new Action(C, null, null));
}
private void insertUsers(String... names) throws SQLException {
for (String name : names) {
executeUpdate(String.format(
"insert into dm_user(user_name, user_login_name) values('%s', '%s')",
name, name));
}
}
private void disableUsers(String... names) throws SQLException {
// TODO(sveldurthi): modify query to use where user_name in ('u1', 'u2')
for (String name : names) {
executeUpdate(String.format(
"UPDATE dm_user SET user_state = 1 WHERE user_name = '%s'", name));
}
}
private void insertGroup(String groupName, String... members)
throws SQLException {
insertGroupEx(getNowPlusMinutes(0), "", groupName, members);
}
private void insertLdapGroup(String groupName, String... members)
throws SQLException {
insertGroupEx(getNowPlusMinutes(0), "LDAP", groupName, members);
}
private void insertGroupEx(String lastModified, String source,
String groupName, String... members) throws SQLException {
executeUpdate(String.format("INSERT INTO dm_user"
+ "(user_name, user_login_name, user_source, user_ldap_dn, r_is_group) "
+ "VALUES('%s', '%s', '%s', '%s', TRUE)", groupName, groupName,
source, "LDAP".equals(source) ? ("CN=" + groupName) : ""));
List<String> users = new ArrayList<String>();
List<String> groups = new ArrayList<String>();
for (String member : members) {
if (member.toLowerCase().startsWith("group")) {
groups.add(member);
} else {
users.add(member);
}
}
// Emulate ROW_BASED retrieval by storing the values that way.
int numRows = Math.max(1, Math.max(users.size(), groups.size()));
for (int i = 0; i < numRows; i++) {
executeUpdate(String.format("INSERT INTO dm_group"
+ "(r_object_id, group_name, group_source, r_modify_date, "
+ "users_names, groups_names) VALUES('%s', '%s', '%s', {ts '%s'}, "
+ "%s, %s)",
"12" + groupName, groupName, source, lastModified,
(i < users.size()) ? "'" + users.get(i) + "'" : "NULL",
(i < groups.size()) ? "'" + groups.get(i) + "'" : "NULL"));
}
}
private void createAcl(String id) throws SQLException {
executeUpdate(String.format(
"insert into dm_acl(r_object_id) values('%s')", id));
}
private boolean isAccessorGroup(String accessorName) throws SQLException {
try (Connection connection = getConnection();
Statement stmt = connection.createStatement();
ResultSet rs = stmt.executeQuery("select r_is_group from dm_user"
+ " where user_name = '" + accessorName + "'")) {
if (rs.next()) {
return rs.getBoolean(1);
}
}
return false;
}
private void grantPermit(String id, IDfPermit permit) throws SQLException {
executeUpdate(String.format(
"insert into dm_acl(r_object_id, r_accessor_name, "
+ "r_accessor_permit, r_permit_type, r_is_group) values("
+ "'%s', '%s', '%s', '%s', '%s')",
id, permit.getAccessorName(), permit.getPermitValueInt(),
permit.getPermitType(), isAccessorGroup(permit.getAccessorName())));
}
private void addAllowPermitToAcl(String id, String accessorName, int permit)
throws SQLException {
IDfPermit permitobj = new DfPermit();
permitobj.setAccessorName(accessorName);
permitobj.setPermitType(IDfPermitType.ACCESS_PERMIT);
permitobj.setPermitValue(Integer.toString(permit));
grantPermit(id, permitobj);
}
private void addDenyPermitToAcl(String id, String accessorName, int permit)
throws SQLException {
IDfPermit permitobj = new DfPermit();
permitobj.setAccessorName(accessorName);
permitobj.setPermitType(IDfPermitType.ACCESS_RESTRICTION);
permitobj.setPermitValue(Integer.toString(permit));
grantPermit(id, permitobj);
}
private Map<DocId, Acl> getAllAcls() throws Exception {
return getAllAcls("");
}
private Map<DocId, Acl> getAllAcls(String windowsDomain)
throws DfException, IOException, InterruptedException {
DocumentumAdaptor adaptor = getObjectUnderTest(
ImmutableMap.<String, String>builder()
.put("documentum.windowsDomain", windowsDomain)
.put("adaptor.namespace", "NS")
.put("documentum.docbaseName", "Local") // Local Namespace
.build());
AccumulatingDocIdPusher pusher = new AccumulatingDocIdPusher();
adaptor.getDocIds(pusher);
return pusher.getNamedResources();
}
// tests for ACLs
// TODO: (Srinivas) - Add a unit test and perform manual test of
// user and group names with quotes in them.
@Test
public void testAcls() throws Exception {
createAcl("4501081f80000100");
createAcl("4501081f80000101");
createAcl("4501081f80000102");
Map<DocId, Acl> namedResources = getAllAcls();
assertEquals(3, namedResources.size());
}
@Test
public void testAllowAcls() throws Exception {
insertUsers("User1", "User2", "User3", "User4", "User5");
String id = "4501081f80000100";
createAcl(id);
addAllowPermitToAcl(id, "User4", IDfACL.DF_PERMIT_WRITE);
addAllowPermitToAcl(id, "User5", IDfACL.DF_PERMIT_READ);
addDenyPermitToAcl(id, "User1", IDfACL.DF_PERMIT_DELETE);
addDenyPermitToAcl(id, "User2", IDfACL.DF_PERMIT_BROWSE);
addDenyPermitToAcl(id, "User3", IDfACL.DF_PERMIT_WRITE);
Map<DocId, Acl> namedResources = getAllAcls();
Acl acl = namedResources.get(new DocId(id));
assertEquals(ImmutableSet.of(new UserPrincipal("User4", "NS"),
new UserPrincipal("User5", "NS")),
acl.getPermitUsers());
assertEquals(ImmutableSet.of(new UserPrincipal("User2", "NS")),
acl.getDenyUsers());
assertEquals(ImmutableSet.of(), acl.getPermitGroups());
assertEquals(ImmutableSet.of(), acl.getDenyGroups());
}
@Test
public void testBrowseAcls() throws Exception {
insertUsers("User1", "User2", "User3", "User4", "User5");
String id = "4501081f80000100";
createAcl(id);
addAllowPermitToAcl(id, "User4", IDfACL.DF_PERMIT_WRITE);
addAllowPermitToAcl(id, "User5", IDfACL.DF_PERMIT_READ);
addDenyPermitToAcl(id, "User1", IDfACL.DF_PERMIT_DELETE);
addDenyPermitToAcl(id, "User2", IDfACL.DF_PERMIT_BROWSE);
addDenyPermitToAcl(id, "User3", IDfACL.DF_PERMIT_WRITE);
Map<DocId, Acl> namedResources = getAllAcls();
Acl acl = namedResources.get(new DocId(id));
assertEquals(ImmutableSet.of(new UserPrincipal("User4", "NS"),
new UserPrincipal("User5", "NS")),
acl.getPermitUsers());
assertEquals(ImmutableSet.of(new UserPrincipal("User2", "NS")),
acl.getDenyUsers());
assertEquals(ImmutableSet.of(), acl.getPermitGroups());
assertEquals(ImmutableSet.of(), acl.getDenyGroups());
}
@Test
public void testGroupAcls() throws Exception {
insertUsers("User1", "User2");
insertGroup("Group1", "User2", "User3");
insertGroup("Group2", "User4", "User5");
insertGroup("Group3", "User6", "User7");
String id = "4501081f80000101";
createAcl(id);
addAllowPermitToAcl(id, "User1", IDfACL.DF_PERMIT_WRITE);
addAllowPermitToAcl(id, "User2", IDfACL.DF_PERMIT_READ);
addAllowPermitToAcl(id, "Group1", IDfACL.DF_PERMIT_READ);
addAllowPermitToAcl(id, "Group2", IDfACL.DF_PERMIT_WRITE);
addDenyPermitToAcl(id, "Group3", IDfACL.DF_PERMIT_READ);
Map<DocId, Acl> namedResources = getAllAcls();
Acl acl = namedResources.get(new DocId(id));
assertEquals(ImmutableSet.of(new GroupPrincipal("Group1", "NS_Local"),
new GroupPrincipal("Group2", "NS_Local")),
acl.getPermitGroups());
assertEquals(ImmutableSet.of(new GroupPrincipal("Group3", "NS_Local")),
acl.getDenyGroups());
assertEquals(ImmutableSet.of(new UserPrincipal("User1", "NS"),
new UserPrincipal("User2", "NS")),
acl.getPermitUsers());
assertEquals(ImmutableSet.of(), acl.getDenyUsers());
}
@Test
public void testDisabledUserAcls() throws Exception {
insertUsers("User2", "User3", "User4", "User5");
disableUsers("User2", "User4");
String id = "4501081f80000100";
createAcl(id);
addAllowPermitToAcl(id, "User4", IDfACL.DF_PERMIT_WRITE);
addAllowPermitToAcl(id, "User5", IDfACL.DF_PERMIT_READ);
addDenyPermitToAcl(id, "User2", IDfACL.DF_PERMIT_READ);
addDenyPermitToAcl(id, "User3", IDfACL.DF_PERMIT_READ);
Map<DocId, Acl> namedResources = getAllAcls();
Acl acl = namedResources.get(new DocId(id));
assertEquals(ImmutableSet.of(new UserPrincipal("User5", "NS")),
acl.getPermitUsers());
assertEquals(ImmutableSet.of(new UserPrincipal("User3", "NS")),
acl.getDenyUsers());
assertEquals(ImmutableSet.of(), acl.getPermitGroups());
assertEquals(ImmutableSet.of(), acl.getDenyGroups());
}
@Test
public void testDisabledGroupAcls() throws Exception {
insertGroup("Group1", "User2", "User3");
insertGroup("Group2", "User4", "User5");
insertGroup("Group3", "User6", "User7");
insertGroup("Group4", "User8", "User9");
disableUsers("Group2", "Group3");
String id = "4501081f80000101";
createAcl(id);
addAllowPermitToAcl(id, "Group1", IDfACL.DF_PERMIT_READ);
addAllowPermitToAcl(id, "Group2", IDfACL.DF_PERMIT_WRITE);
addDenyPermitToAcl(id, "Group3", IDfACL.DF_PERMIT_READ);
addDenyPermitToAcl(id, "Group4", IDfACL.DF_PERMIT_READ);
Map<DocId, Acl> namedResources = getAllAcls();
Acl acl = namedResources.get(new DocId(id));
assertEquals(ImmutableSet.of(new GroupPrincipal("Group1", "NS_Local")),
acl.getPermitGroups());
assertEquals(ImmutableSet.of(new GroupPrincipal("Group4", "NS_Local")),
acl.getDenyGroups());
assertEquals(ImmutableSet.of(), acl.getPermitUsers());
assertEquals(ImmutableSet.of(), acl.getDenyUsers());
}
@Test
public void testGroupDmWorldAcl() throws Exception {
insertUsers("User1", "User3");
insertGroup("Group1", "User2", "User3");
String id = "4501081f80000102";
createAcl(id);
addAllowPermitToAcl(id, "Group1", IDfACL.DF_PERMIT_BROWSE);
addAllowPermitToAcl(id, "dm_world", IDfACL.DF_PERMIT_READ);
addDenyPermitToAcl(id, "User1", IDfACL.DF_PERMIT_READ);
addDenyPermitToAcl(id, "User3", IDfACL.DF_PERMIT_WRITE);
Map<DocId, Acl> namedResources = getAllAcls();
Acl acl = namedResources.get(new DocId(id));
assertEquals(ImmutableSet.of(new GroupPrincipal("dm_world", "NS_Local")),
acl.getPermitGroups());
assertEquals(ImmutableSet.of(), acl.getDenyGroups());
assertEquals(ImmutableSet.of(), acl.getPermitUsers());
assertEquals(ImmutableSet.of(new UserPrincipal("User1", "NS")),
acl.getDenyUsers());
}
@Test
public void testDomainForAclUser() throws Exception {
insertUsers("User1", "User2", "User3", "User4", "User5");
String id = "4501081f80000100";
createAcl(id);
addAllowPermitToAcl(id, "User4", IDfACL.DF_PERMIT_WRITE);
addAllowPermitToAcl(id, "User5", IDfACL.DF_PERMIT_READ);
addDenyPermitToAcl(id, "User1", IDfACL.DF_PERMIT_DELETE);
addDenyPermitToAcl(id, "User2", IDfACL.DF_PERMIT_BROWSE);
addDenyPermitToAcl(id, "User3", IDfACL.DF_PERMIT_WRITE);
Map<DocId, Acl> namedResources = getAllAcls("ajax");
Acl acl = namedResources.get(new DocId(id));
assertEquals(ImmutableSet.of(new UserPrincipal("ajax\\User4", "NS"),
new UserPrincipal("ajax\\User5", "NS")),
acl.getPermitUsers());
assertEquals(ImmutableSet.of(new UserPrincipal("ajax\\User2", "NS")),
acl.getDenyUsers());
}
@Test
public void testDnsDomainForAclUser() throws Exception {
insertUsers("User1", "User2", "User3", "User4", "User5");
String id = "4501081f80000100";
createAcl(id);
addAllowPermitToAcl(id, "User4", IDfACL.DF_PERMIT_WRITE);
addAllowPermitToAcl(id, "User5", IDfACL.DF_PERMIT_READ);
addDenyPermitToAcl(id, "User1", IDfACL.DF_PERMIT_DELETE);
addDenyPermitToAcl(id, "User2", IDfACL.DF_PERMIT_BROWSE);
addDenyPermitToAcl(id, "User3", IDfACL.DF_PERMIT_WRITE);
Map<DocId, Acl> namedResources = getAllAcls("ajax.example.com");
Acl acl = namedResources.get(new DocId(id));
assertEquals(ImmutableSet.of(
new UserPrincipal("ajax.example.com\\User4", "NS"),
new UserPrincipal("ajax.example.com\\User5", "NS")),
acl.getPermitUsers());
assertEquals(ImmutableSet.of(
new UserPrincipal("ajax.example.com\\User2", "NS")),
acl.getDenyUsers());
}
@Test
public void testDomainForAclGroup() throws Exception {
insertUsers("User1", "User2");
insertGroup("Group1", "User2", "User3");
insertGroup("Group2", "User4", "User5");
insertGroup("Group3", "User6", "User7");
String id = "4501081f80000101";
createAcl(id);
addAllowPermitToAcl(id, "User1", IDfACL.DF_PERMIT_WRITE);
addAllowPermitToAcl(id, "User2", IDfACL.DF_PERMIT_READ);
addAllowPermitToAcl(id, "Group1", IDfACL.DF_PERMIT_READ);
addAllowPermitToAcl(id, "Group2", IDfACL.DF_PERMIT_WRITE);
addDenyPermitToAcl(id, "Group3", IDfACL.DF_PERMIT_READ);
Map<DocId, Acl> namedResources = getAllAcls("ajax");
Acl acl = namedResources.get(new DocId(id));
assertEquals(ImmutableSet.of(new GroupPrincipal("Group1", "NS_Local"),
new GroupPrincipal("Group2", "NS_Local")),
acl.getPermitGroups());
assertEquals(ImmutableSet.of(new GroupPrincipal("Group3", "NS_Local")),
acl.getDenyGroups());
}
// Tests for required groups and required group sets.
private void addRequiredGroupSetToAcl(String id, String accessorName)
throws SQLException {
IDfPermit permitobj = new DfPermit();
permitobj.setAccessorName(accessorName);
permitobj.setPermitType(IDfPermitType.REQUIRED_GROUP_SET);
grantPermit(id, permitobj);
}
private void addRequiredGroupToAcl(String id, String accessorName)
throws SQLException {
IDfPermit permitobj = new DfPermit();
permitobj.setAccessorName(accessorName);
permitobj.setPermitType(IDfPermitType.REQUIRED_GROUP);
grantPermit(id, permitobj);
}
@Test
public void testRequiredGroupSetAcl() throws Exception {
insertUsers("User1", "User2", "User3", "User4", "User5", "User6", "User7");
insertGroup("Group1", "User2", "User3");
insertGroup("Group2", "User4", "User5");
insertGroup("Group3", "User6", "User7");
insertGroup("GroupSet1", "Group1", "Group2");
insertGroup("GroupSet2", "Group2", "Group3");
String id = "45Acl0";
createAcl(id);
addAllowPermitToAcl(id, "Group1", IDfACL.DF_PERMIT_READ);
addAllowPermitToAcl(id, "Group2", IDfACL.DF_PERMIT_WRITE);
addDenyPermitToAcl(id, "Group3", IDfACL.DF_PERMIT_READ);
addRequiredGroupSetToAcl(id, "GroupSet1");
addRequiredGroupSetToAcl(id, "GroupSet2");
Map<DocId, Acl> namedResources = getAllAcls();
assertEquals(2, namedResources.size());
Acl acl1 = namedResources.get(new DocId("45Acl0_reqGroupSet"));
assertEquals(InheritanceType.AND_BOTH_PERMIT, acl1.getInheritanceType());
assertEquals(ImmutableSet.of(new GroupPrincipal("GroupSet1", "NS_Local"),
new GroupPrincipal("GroupSet2", "NS_Local")),
acl1.getPermitGroups());
assertEquals(ImmutableSet.of(), acl1.getDenyGroups());
Acl acl2 = namedResources.get(new DocId(id));
assertEquals(new DocId("45Acl0_reqGroupSet"),
acl2.getInheritFrom());
assertEquals(InheritanceType.PARENT_OVERRIDES, acl2.getInheritanceType());
assertEquals(ImmutableSet.of(new GroupPrincipal("Group1", "NS_Local"),
new GroupPrincipal("Group2", "NS_Local")),
acl2.getPermitGroups());
assertEquals(ImmutableSet.of(new GroupPrincipal("Group3", "NS_Local")),
acl2.getDenyGroups());
}
@Test
public void testRequiredGroupsAcl() throws Exception {
insertUsers("User1", "User2", "User3", "User4", "User5", "User6", "User7");
insertGroup("Group1", "User2", "User3");
insertGroup("Group2", "User4", "User5");
insertGroup("Group3", "User6", "User7");
insertGroup("Group4", "User2", "User3");
insertGroup("Group5", "User4", "User5");
insertGroup("Group6", "User6", "User7");
String id = "45Acl0";
createAcl(id);
addAllowPermitToAcl(id, "Group1", IDfACL.DF_PERMIT_READ);
addAllowPermitToAcl(id, "Group2", IDfACL.DF_PERMIT_WRITE);
addDenyPermitToAcl(id, "Group3", IDfACL.DF_PERMIT_READ);
addRequiredGroupToAcl(id, "Group4");
addRequiredGroupToAcl(id, "Group5");
addRequiredGroupToAcl(id, "Group6");
Map<DocId, Acl> namedResources = getAllAcls();
assertEquals(4, namedResources.size());
Acl acl1 = namedResources.get(new DocId("45Acl0_Group6"));
assertEquals(new DocId("45Acl0_Group5"), acl1.getInheritFrom());
assertEquals(InheritanceType.AND_BOTH_PERMIT, acl1.getInheritanceType());
assertEquals(ImmutableSet.of(new GroupPrincipal("Group6", "NS_Local")),
acl1.getPermitGroups());
assertEquals(ImmutableSet.of(), acl1.getDenyGroups());
Acl acl2 = namedResources.get(new DocId("45Acl0_Group5"));
assertEquals(new DocId("45Acl0_Group4"), acl2.getInheritFrom());
assertEquals(InheritanceType.AND_BOTH_PERMIT, acl2.getInheritanceType());
assertEquals(ImmutableSet.of(new GroupPrincipal("Group5", "NS_Local")),
acl2.getPermitGroups());
assertEquals(ImmutableSet.of(), acl2.getDenyGroups());
Acl acl3 = namedResources.get(new DocId("45Acl0_Group4"));
assertEquals(InheritanceType.AND_BOTH_PERMIT, acl3.getInheritanceType());
assertEquals(ImmutableSet.of(new GroupPrincipal("Group4", "NS_Local")),
acl3.getPermitGroups());
assertEquals(ImmutableSet.of(), acl3.getDenyGroups());
Acl acl4 = namedResources.get(new DocId(id));
assertEquals(new DocId("45Acl0_Group6"), acl4.getInheritFrom());
assertEquals(InheritanceType.PARENT_OVERRIDES, acl4.getInheritanceType());
assertEquals(ImmutableSet.of(new GroupPrincipal("Group1", "NS_Local"),
new GroupPrincipal("Group2", "NS_Local")),
acl4.getPermitGroups());
assertEquals(ImmutableSet.of(new GroupPrincipal("Group3", "NS_Local")),
acl4.getDenyGroups());
}
@Test
public void testRequiredGroupsAndSetsAcl() throws Exception {
insertUsers("User1", "User2", "User3", "User4", "User5", "User6", "User7");
insertGroup("Group1", "User2", "User3");
insertGroup("Group2", "User4", "User5");
insertGroup("Group3", "User6", "User7");
insertGroup("Group4", "User2", "User3");
insertGroup("Group5", "User4", "User5");
insertGroup("Group6", "User6", "User7");
insertGroup("GroupSet1", "Group1", "Group2");
insertGroup("GroupSet2", "Group5", "Group6");
String id = "45Acl0";
createAcl(id);
addAllowPermitToAcl(id, "Group1", IDfACL.DF_PERMIT_READ);
addAllowPermitToAcl(id, "Group2", IDfACL.DF_PERMIT_WRITE);
addDenyPermitToAcl(id, "Group3", IDfACL.DF_PERMIT_READ);
addRequiredGroupToAcl(id, "Group4");
addRequiredGroupToAcl(id, "Group5");
addRequiredGroupToAcl(id, "Group6");
addRequiredGroupSetToAcl(id, "GroupSet1");
addRequiredGroupSetToAcl(id, "GroupSet2");
Map<DocId, Acl> namedResources = getAllAcls();
assertEquals(5, namedResources.size());
Acl acl1 = namedResources.get(new DocId("45Acl0_Group6"));
assertEquals(new DocId("45Acl0_Group5"), acl1.getInheritFrom());
assertEquals(InheritanceType.AND_BOTH_PERMIT, acl1.getInheritanceType());
assertEquals(ImmutableSet.of(new GroupPrincipal("Group6", "NS_Local")),
acl1.getPermitGroups());
assertEquals(ImmutableSet.of(), acl1.getDenyGroups());
Acl acl2 = namedResources.get(new DocId("45Acl0_Group5"));
assertEquals(new DocId("45Acl0_Group4"), acl2.getInheritFrom());
assertEquals(InheritanceType.AND_BOTH_PERMIT, acl2.getInheritanceType());
assertEquals(ImmutableSet.of(new GroupPrincipal("Group5", "NS_Local")),
acl2.getPermitGroups());
assertEquals(ImmutableSet.of(), acl2.getDenyGroups());
Acl acl3 = namedResources.get(new DocId("45Acl0_Group4"));
assertEquals(InheritanceType.AND_BOTH_PERMIT, acl3.getInheritanceType());
assertEquals(ImmutableSet.of(new GroupPrincipal("Group4", "NS_Local")),
acl3.getPermitGroups());
assertEquals(ImmutableSet.of(), acl3.getDenyGroups());
Acl acl4 = namedResources.get(new DocId("45Acl0_reqGroupSet"));
assertEquals(new DocId("45Acl0_Group6"), acl4.getInheritFrom());
assertEquals(InheritanceType.AND_BOTH_PERMIT, acl4.getInheritanceType());
assertEquals(ImmutableSet.of(new GroupPrincipal("GroupSet1", "NS_Local"),
new GroupPrincipal("GroupSet2", "NS_Local")),
acl4.getPermitGroups());
assertEquals(ImmutableSet.of(), acl4.getDenyGroups());
Acl acl5 = namedResources.get(new DocId(id));
assertEquals(new DocId("45Acl0_reqGroupSet"),
acl5.getInheritFrom());
assertEquals(InheritanceType.PARENT_OVERRIDES, acl5.getInheritanceType());
assertEquals(ImmutableSet.of(new GroupPrincipal("Group1", "NS_Local"),
new GroupPrincipal("Group2", "NS_Local")),
acl5.getPermitGroups());
assertEquals(ImmutableSet.of(new GroupPrincipal("Group3", "NS_Local")),
acl5.getDenyGroups());
}
// TODO(srinivas): we should check whether we have a test of non-existent
// users and groups in permits and denies.
@Test
public void testMissingRequiredGroup() throws Exception {
insertUsers("User1", "User2", "User3");
insertGroup("Group1", "User2", "User3");
String id = "45Acl0";
createAcl(id);
addAllowPermitToAcl(id, "Group1", IDfACL.DF_PERMIT_READ);
addRequiredGroupToAcl(id, "GroupNotExists");
Map<DocId, Acl> namedResources = getAllAcls();
assertEquals(2, namedResources.size());
Acl acl1 = namedResources.get(new DocId("45Acl0_GroupNotExists"));
assertEquals(InheritanceType.AND_BOTH_PERMIT, acl1.getInheritanceType());
assertEquals(ImmutableSet.of(), acl1.getPermitGroups());
assertEquals(ImmutableSet.of(), acl1.getDenyGroups());
// Verify GroupNotExists group is not in permit or deny groups.
Acl acl2 = namedResources.get(new DocId(id));
assertEquals(new DocId("45Acl0_GroupNotExists"),
acl2.getInheritFrom());
assertEquals(InheritanceType.PARENT_OVERRIDES, acl2.getInheritanceType());
assertEquals(ImmutableSet.of(new GroupPrincipal("Group1", "NS_Local")),
acl2.getPermitGroups());
assertEquals(ImmutableSet.of(), acl2.getDenyGroups());
}
private Map<DocId, Acl> getAclMap(CaseSensitivityType caseSensitivityType)
throws Exception {
IDfClientX dmClientX = new H2BackedTestProxies().getProxyClientX();
DocumentumAdaptor adaptor = new DocumentumAdaptor(dmClientX);
AdaptorContext context = ProxyAdaptorContext.getInstance();
initTestAdaptorConfig(context);
if (caseSensitivityType != null) {
context.getConfig().overrideKey("adaptor.caseSensitivityType",
caseSensitivityType.toString());
}
adaptor.init(context);
AccumulatingDocIdPusher pusher = new AccumulatingDocIdPusher();
adaptor.getDocIds(pusher);
return pusher.getNamedResources();
}
private Map<DocId, Acl> getAclMap() throws Exception {
return getAclMap(null);
}
@Test
public void testAclCaseSensitivity_basic() throws Exception {
createAcl("4501081f80000100");
Map<DocId, Acl> aclMap = getAclMap();
Acl acl = aclMap.get(new DocId("4501081f80000100"));
assertTrue("Expected everything-case-sensitive",
acl.isEverythingCaseSensitive());
}
@Test
public void testAclCaseSensitivity_required() throws Exception {
insertUsers("User1", "User2", "User3");
insertGroup("Group1", "User2", "User3");
createAcl("4501081f80000100");
addRequiredGroupToAcl("4501081f80000100", "Group1");
Map<DocId, Acl> aclMap = getAclMap();
Acl acl = aclMap.get(new DocId("4501081f80000100_Group1"));
assertTrue("Expected everything-case-sensitive",
acl.isEverythingCaseSensitive());
}
@Test
public void testAclCaseSensitivity_sensitive() throws Exception {
createAcl("4501081f80000100");
Map<DocId, Acl> aclMap =
getAclMap(CaseSensitivityType.EVERYTHING_CASE_SENSITIVE);
Acl acl = aclMap.get(new DocId("4501081f80000100"));
assertTrue("Expected everything-case-sensitive",
acl.isEverythingCaseSensitive());
}
@Test
public void testAclCaseSensitivity_insensitive() throws Exception {
createAcl("4501081f80000100");
Map<DocId, Acl> aclMap =
getAclMap(CaseSensitivityType.EVERYTHING_CASE_INSENSITIVE);
Acl acl = aclMap.get(new DocId("4501081f80000100"));
assertTrue("Expected everything-case-insensitive",
acl.isEverythingCaseInsensitive());
}
@Test
public void testAclCaseSensitivity_required_insensitive() throws Exception {
insertUsers("User1", "User2", "User3");
insertGroup("Group1", "User2", "User3");
createAcl("4501081f80000100");
addRequiredGroupToAcl("4501081f80000100", "Group1");
Map<DocId, Acl> aclMap =
getAclMap(CaseSensitivityType.EVERYTHING_CASE_INSENSITIVE);
Acl acl = aclMap.get(new DocId("4501081f80000100_Group1"));
assertTrue("Expected everything-case-insensitive",
acl.isEverythingCaseInsensitive());
}
private void insertAclAudit(String id, String auditObjId,
String eventName, String date) throws SQLException {
executeUpdate(String.format(
"insert into dm_audittrail_acl(r_object_id, audited_obj_id, "
+ "event_name, time_stamp_utc) "
+ "values('%s', '%s', '%s', {ts '%s'})",
id, auditObjId, eventName, date));
}
/**
* Returns date string for the given number of minutes into the future
* or past.
*
* @param minutes minutes to add.
* @return date in string format.
*/
private String getNowPlusMinutes(int minutes) {
Calendar calendar = Calendar.getInstance();
calendar.add(Calendar.MINUTE, minutes);
return dateFormat.format(calendar.getTime());
}
private Map<DocId, Acl> testUpdateAcls(Checkpoint checkpoint,
Checkpoint expectedCheckpoint)
throws DfException, IOException, InterruptedException {
DocumentumAdaptor adaptor = getObjectUnderTest();
AccumulatingDocIdPusher pusher = new AccumulatingDocIdPusher();
adaptor.modifiedAclTraverser.setCheckpoint(checkpoint);
adaptor.getModifiedDocIds(pusher);
assertEquals(expectedCheckpoint,
adaptor.modifiedAclTraverser.getCheckpoint());
return pusher.getNamedResources();
}
private void testUpdateAcls(Checkpoint checkpoint, Set<DocId> expectedAclIds,
Checkpoint expectedCheckpoint)
throws DfException, IOException, InterruptedException {
Map<DocId, Acl> aclMap = testUpdateAcls(checkpoint, expectedCheckpoint);
assertEquals(expectedAclIds, aclMap.keySet());
}
private void assertUsers(Set<UserPrincipal> actual, String... expected) {
ImmutableSet.Builder<UserPrincipal> builder = ImmutableSet.builder();
for (String user : expected) {
builder.add(new UserPrincipal(user, "globalNS"));
}
assertEquals(builder.build(), actual);
}
@Test
public void testUpdateAcls() throws Exception {
createAcl("4501081f80000100");
createAcl("4501081f80000101");
String dateStr = getNowPlusMinutes(5);
insertAclAudit("123", "4501081f80000100", "dm_save", dateStr);
insertAclAudit("124", "4501081f80000101", "dm_saveasnew", dateStr);
insertAclAudit("125", "4501081f80000102", "dm_destroy", dateStr);
testUpdateAcls(Checkpoint.incremental(),
ImmutableSet.of(
new DocId("4501081f80000100"),
new DocId("4501081f80000101"),
new DocId("4501081f80000102")),
new Checkpoint(dateStr, "125"));
}
@Test
public void testUpdateAclsPrincipals() throws Exception {
insertUsers("User1", "User2", "User3", "User4", "User5", "User6");
String aclId1 = "4501081f80000100";
String aclId2 = "4501081f80000101";
createAcl(aclId1);
addAllowPermitToAcl(aclId1, "User1", IDfACL.DF_PERMIT_WRITE);
addAllowPermitToAcl(aclId1, "User2", IDfACL.DF_PERMIT_READ);
addDenyPermitToAcl(aclId1, "User3", IDfACL.DF_PERMIT_READ);
createAcl(aclId2);
addAllowPermitToAcl(aclId2, "User4", IDfACL.DF_PERMIT_WRITE);
addAllowPermitToAcl(aclId2, "User5", IDfACL.DF_PERMIT_READ);
addDenyPermitToAcl(aclId2, "User6", IDfACL.DF_PERMIT_READ);
String dateStr = getNowPlusMinutes(5);
insertAclAudit("123", "4501081f80000100", "dm_save", dateStr);
insertAclAudit("124", "4501081f80000101", "dm_saveasnew", dateStr);
insertAclAudit("125", "4501081f80000102", "dm_destroy", dateStr);
Map<DocId, Acl> aclMap = testUpdateAcls(Checkpoint.incremental(),
new Checkpoint(dateStr, "125"));
Acl acl1 = aclMap.get(new DocId(aclId1));
assertUsers(acl1.getPermitUsers(), "User1", "User2");
assertUsers(acl1.getDenyUsers(), "User3");
Acl acl2 = aclMap.get(new DocId(aclId2));
assertUsers(acl2.getPermitUsers(), "User4", "User5");
assertUsers(acl2.getDenyUsers(), "User6");
Acl acl3 = aclMap.get(new DocId("4501081f80000102"));
assertTrue(acl3.getPermitUsers().toString(),
acl3.getPermitUsers().isEmpty());
assertTrue(acl3.getDenyUsers().toString(), acl3.getDenyUsers().isEmpty());
assertTrue(acl3.getPermitGroups().toString(),
acl3.getPermitGroups().isEmpty());
assertTrue(acl3.getDenyGroups().toString(), acl3.getDenyGroups().isEmpty());
}
@Test
public void testUpdateAclsWithSameObjectId() throws Exception {
createAcl("4501081f80000100");
createAcl("4501081f80000101");
String dateStr = getNowPlusMinutes(6);
insertAclAudit("123", "4501081f80000100", "dm_saveasnew", dateStr);
insertAclAudit("124", "4501081f80000100", "dm_save", dateStr);
insertAclAudit("125", "4501081f80000100", "dm_destroy", dateStr);
testUpdateAcls(Checkpoint.incremental(),
ImmutableSet.of(new DocId("4501081f80000100")),
new Checkpoint(dateStr, "125"));
}
@Test
public void testPreviouslyUpdatedAcls() throws Exception {
createAcl("4501081f80000100");
createAcl("4501081f80000101");
String dateStr = getNowPlusMinutes(-10);
insertAclAudit("123", "4501081f80000100", "dm_save", dateStr);
insertAclAudit("124", "4501081f80000101", "dm_saveasnew", dateStr);
insertAclAudit("125", "4501081f80000102", "dm_destroy", dateStr);
Checkpoint checkpoint = new Checkpoint(getNowPlusMinutes(0), "0");
testUpdateAcls(checkpoint, ImmutableSet.<DocId>of(), checkpoint);
}
@Test
public void testMultiUpdateAcls() throws Exception {
createAcl("4501081f80000100");
createAcl("4501081f80000101");
createAcl("4501081f80000102");
createAcl("4501081f80000103");
String dateStr = getNowPlusMinutes(10);
insertAclAudit("123", "4501081f80000100", "dm_save", dateStr);
insertAclAudit("124", "4501081f80000101", "dm_saveasnew", dateStr);
insertAclAudit("125", "4501081f80000102", "dm_saveasnew", dateStr);
Checkpoint firstCheckpoint = new Checkpoint(dateStr, "125");
testUpdateAcls(Checkpoint.incremental(),
ImmutableSet.of(
new DocId("4501081f80000100"),
new DocId("4501081f80000101"),
new DocId("4501081f80000102")),
firstCheckpoint);
dateStr = getNowPlusMinutes(15);
insertAclAudit("126", "4501081f80000103", "dm_saveasnew", dateStr);
insertAclAudit("127", "4501081f80000104", "dm_destroy", dateStr);
testUpdateAcls(firstCheckpoint,
ImmutableSet.of(
new DocId("4501081f80000103"),
new DocId("4501081f80000104")),
new Checkpoint(dateStr, "127"));
}
@Test
public void testMultiUpdateAclsWithNoResults() throws Exception {
createAcl("4501081f80000106");
createAcl("4501081f80000107");
String dateStr = getNowPlusMinutes(20);
insertAclAudit("128", "4501081f80000106", "dm_saveasnew", dateStr);
insertAclAudit("129", "4501081f80000107", "dm_saveasnew", dateStr);
Checkpoint expectedCheckpoint = new Checkpoint(dateStr, "129");
testUpdateAcls(Checkpoint.incremental(),
ImmutableSet.of(
new DocId("4501081f80000106"),
new DocId("4501081f80000107")),
expectedCheckpoint);
testUpdateAcls(expectedCheckpoint, ImmutableSet.<DocId>of(),
expectedCheckpoint);
}
/*
* TODO(jlacey): A hack of sizeable proportions. To mimic an
* exception thrown from the loop in getUpdateAcls, we create an
* non-destroy audit event with no corresponding ACL. The mock
* getObject throws an AssertionError.
*/
private DocumentumAcls getUpdateAclsAndFail() throws Exception {
H2BackedTestProxies proxyCls = new H2BackedTestProxies();
IDfClientX dmClientX = proxyCls.getProxyClientX();
IDfSessionManager dmSessionManager =
dmClientX.getLocalClient().newSessionManager();
IDfSession dmSession = dmSessionManager.getSession("testdocbase");
DocumentumAcls dctmAcls = new DocumentumAcls(dmClientX, dmSession,
new Principals(dmSession, "localNS", "globalNS", null),
CaseSensitivityType.EVERYTHING_CASE_INSENSITIVE);
Map<DocId, Acl> aclMap = new HashMap<>();
try {
dctmAcls.getUpdateAcls(Checkpoint.incremental(), aclMap);
fail("Expected an AssertionError");
} catch (AssertionError expected) {
}
return dctmAcls;
}
@Test
public void testUpdateAclsFirstRowFailure() throws Exception {
String dateStr = getNowPlusMinutes(5);
insertAclAudit("123", "234", "4501081f80000100", "dm_save", dateStr);
DocumentumAcls dctmAcls = getUpdateAclsAndFail();
assertEquals(Checkpoint.incremental(), dctmAcls.getCheckpoint());
}
@Test
public void testUpdateAclsSecondRowFailure() throws Exception {
createAcl("4501081f80000100");
String dateStr = getNowPlusMinutes(3);
insertAclAudit("123", "234", "4501081f80000100", "dm_save", dateStr);
insertAclAudit("124", "235", "4501081f80000101", "dm_saveasnew",
getNowPlusMinutes(5));
DocumentumAcls dctmAcls = getUpdateAclsAndFail();
assertEquals(new Checkpoint(dateStr, "123"), dctmAcls.getCheckpoint());
}
private void insertAuditTrailEvent(String date, String id, String eventName,
String attributeList, String auditObjId, String chronicleId)
throws SQLException {
executeUpdate(String.format(
"insert into dm_audittrail(time_stamp_utc, r_object_id, event_name, "
+ "attribute_list, audited_obj_id, chronicle_id) "
+ "values({ts '%s'},'%s', '%s', '%s', '%s', '%s')", date, id,
eventName, attributeList, auditObjId, chronicleId));
}
private void insertAuditTrailAclEvent(String date, String id,
String auditObjId) throws SQLException {
insertAuditTrailEvent(date, id, "dm_save", "acl_name=", auditObjId,
auditObjId);
}
private void insertAuditTrailAclEvent(String date, String id,
String auditObjId, String chronicleId) throws SQLException {
insertAuditTrailEvent(date, id, "dm_save", "acl_name=", auditObjId,
chronicleId);
}
private void testUpdatedPermissions(Checkpoint docCheckpoint,
Checkpoint permissionsCheckpoint, List<Record> expectedDocIdlist,
Checkpoint expectedCheckpoint)
throws DfException, IOException, InterruptedException {
DocumentumAdaptor adaptor = getObjectUnderTest();
AccumulatingDocIdPusher pusher = new AccumulatingDocIdPusher();
adaptor.modifiedDocumentsCheckpoint = docCheckpoint;
adaptor.modifiedPermissionsCheckpoint = permissionsCheckpoint;
adaptor.getModifiedDocIds(pusher);
assertEquals(expectedDocIdlist, pusher.getRecords());
assertEquals(expectedCheckpoint, adaptor.modifiedPermissionsCheckpoint);
}
private Checkpoint insertTestDocuments() throws SQLException {
String folderId = "0bd29";
String folder = START_PATH;
// To skip doc updates, set time for document creation 5 min earlier.
String dateStr = getNowPlusMinutes(-5);
insertFolder(dateStr, folderId, folder);
insertDocument(dateStr, "09514", folder + "/file1", folderId);
insertDocument(dateStr, "09515", folder + "/file2", folderId);
insertDocument(dateStr, "09516", folder + "/file3", folderId);
return new Checkpoint(dateStr, folderId);
}
@Test
public void testUpdatedPermissions() throws Exception {
Checkpoint docCheckpoint = insertTestDocuments();
String dateStr = getNowPlusMinutes(5);
insertAuditTrailAclEvent(dateStr, "5f123", "09514");
insertAuditTrailAclEvent(dateStr, "5f124", "09515");
insertAuditTrailAclEvent(dateStr, "5f125", "09516");
testUpdatedPermissions(docCheckpoint, Checkpoint.incremental(),
makeExpectedDocIds(START_PATH, "file1", "file2", "file3"),
new Checkpoint(dateStr, "5f125"));
}
@Test
public void testUpdatedPermissions_ModifiedCheckpoint() throws Exception {
Checkpoint docCheckpoint = insertTestDocuments();
String dateStr = getNowPlusMinutes(5);
insertAuditTrailAclEvent(dateStr, "5f123", "09514");
insertAuditTrailAclEvent(dateStr, "5f124", "09515");
insertAuditTrailAclEvent(dateStr, "5f125", "09516");
testUpdatedPermissions(docCheckpoint, new Checkpoint(dateStr, "5f123"),
makeExpectedDocIds(START_PATH, "file2", "file3"),
new Checkpoint(dateStr, "5f125"));
}
@Test
public void testUpdatedPermissions_MultipleUpdates() throws Exception {
Checkpoint docCheckpoint = insertTestDocuments();
String dateStr = getNowPlusMinutes(5);
insertAuditTrailAclEvent(getNowPlusMinutes(3), "5f123", "09514");
insertAuditTrailAclEvent(getNowPlusMinutes(4), "5f124", "09514");
insertAuditTrailAclEvent(getNowPlusMinutes(5), "5f125", "09514");
insertAuditTrailAclEvent(getNowPlusMinutes(5), "5f126", "09515");
testUpdatedPermissions(docCheckpoint, Checkpoint.incremental(),
makeExpectedDocIds(START_PATH, "file1", "file2"),
new Checkpoint(dateStr, "5f126"));
}
@Test
public void testUpdatedPermissions_SameChronicleId() throws Exception {
Checkpoint docCheckpoint = insertTestDocuments();
String dateStr = getNowPlusMinutes(5);
insertAuditTrailAclEvent(dateStr, "5f123", "09514", "09234");
insertAuditTrailAclEvent(dateStr, "5f124", "09515", "09234");
insertAuditTrailAclEvent(dateStr, "5f125", "09516", "09234");
testUpdatedPermissions(docCheckpoint, Checkpoint.incremental(),
makeExpectedDocIds(START_PATH, "file1"),
new Checkpoint(dateStr, "5f125"));
}
@Test
public void testUpdatedPermissions_EmptyResults() throws Exception {
Checkpoint docCheckpoint = insertTestDocuments();
String dateStr = getNowPlusMinutes(5);
insertAuditTrailAclEvent(dateStr, "5f123", "09514");
insertAuditTrailAclEvent(dateStr, "5f124", "09515");
insertAuditTrailAclEvent(dateStr, "5f125", "09516");
testUpdatedPermissions(docCheckpoint, new Checkpoint(dateStr, "5f125"),
makeExpectedDocIds(START_PATH),
new Checkpoint(dateStr, "5f125"));
}
@Test
public void testUpdatedPermissions_MultiplePaths() throws Exception {
// To skip doc updates, set time for document creation 5 min earlier.
String min5back = getNowPlusMinutes(-5);
insertFolder(min5back, "0bd30", START_PATH + "/folder1");
insertFolder(min5back, "0bd31", START_PATH + "/folder2");
insertFolder(min5back, "0bd32", START_PATH + "/folder/folder3");
insertSysObject(min5back, "09514", "file1", START_PATH + "/folder1/file1,"
+ START_PATH + "/folder2/file1," + START_PATH + "/folder/folder3/file1",
"dm_document", "0bd30", "0bd31", "0bd32");
String dateStr = getNowPlusMinutes(5);
insertAuditTrailAclEvent(dateStr, "5f123", "09514");
testUpdatedPermissions(new Checkpoint(min5back, "0bd32"),
Checkpoint.incremental(),
makeExpectedDocIds(START_PATH, "folder1/file1", "folder2/file1",
"folder/folder3/file1"), new Checkpoint(dateStr, "5f123"));
}
@Test
public void testUpdatedPermissions_InvalidStartPath() throws Exception {
// To skip doc updates, set time for document creation 5 min earlier.
String min5back = getNowPlusMinutes(-5);
insertFolder(min5back, "0bd30", START_PATH + "/folder1");
insertFolder(min5back, "0bd31", START_PATH + "/folder2");
insertFolder(min5back, "0bd32", "/Folder2/folder3");
insertSysObject(min5back, "09514", "file1", START_PATH + "/folder1/file1,"
+ START_PATH + "/folder2/file1," + "/Folder2/folder3/file1",
"dm_document", "0bd30", "0bd31", "0bd32");
String dateStr = getNowPlusMinutes(5);
insertAuditTrailAclEvent(dateStr, "5f123", "09514");
testUpdatedPermissions(new Checkpoint(min5back, "0bd32"),
Checkpoint.incremental(),
makeExpectedDocIds(START_PATH, "folder1/file1", "folder2/file1"),
new Checkpoint(dateStr, "5f123"));
}
@Test
public void testUpdatedPermissions_DocAndPermissions() throws Exception {
Checkpoint docCheckpoint = new Checkpoint(getNowPlusMinutes(-5), "5f125");
String dateStr = getNowPlusMinutes(5);
String folderId = "0bd29";
String folder = START_PATH;
insertFolder(getNowPlusMinutes(-5), folderId, folder);
insertSysObject(dateStr, "09514", "file1", START_PATH + "/file1",
"dm_document", "0bd29");
insertAuditTrailAclEvent(dateStr, "5f123", "09514");
testUpdatedPermissions(docCheckpoint, Checkpoint.incremental(),
makeExpectedDocIds(START_PATH, "file1", "file1"),
new Checkpoint(dateStr, "5f123"));
}
@Test
public void testUpdatedPermissions_AclNonAclEvents() throws Exception {
Checkpoint docCheckpoint = insertTestDocuments();
String dateStr = getNowPlusMinutes(5);
insertAuditTrailEvent(dateStr, "5f123", "dm_save", "acl_name=",
"09514", "09514");
insertAuditTrailEvent(dateStr, "5f124", "dm_link", "acl_name=",
"09515", "09515");
insertAuditTrailEvent(dateStr, "5f125", "dm_save", "object_name=",
"09516", "09516");
insertAuditTrailEvent(dateStr, "5f126", "dm_link", "object_name=",
"09517", "09517");
Checkpoint checkPoint = Checkpoint.incremental();
testUpdatedPermissions(docCheckpoint, checkPoint,
makeExpectedDocIds(START_PATH, "file1"),
new Checkpoint(dateStr, "5f123"));
}
@Test
public void testCheckpoint() throws Exception {
Checkpoint checkpoint = Checkpoint.incremental();
assertEquals("0", checkpoint.getObjectId());
assertNotNull(checkpoint.getLastModified());
assertTrue(checkpoint.equals(checkpoint));
checkpoint = new Checkpoint("foo", "bar");
assertEquals("foo", checkpoint.getLastModified());
assertEquals("bar", checkpoint.getObjectId());
assertTrue(checkpoint.equals(checkpoint));
assertTrue(checkpoint.equals(new Checkpoint("foo", "bar")));
assertFalse(checkpoint.equals(null));
assertFalse(checkpoint.equals(Checkpoint.incremental()));
assertFalse(checkpoint.equals(new Checkpoint("foo", "xyzzy")));
}
private Map<GroupPrincipal, ? extends Collection<Principal>> getGroups()
throws Exception {
return getGroups(LocalGroupsOnly.FALSE, "");
}
private Map<GroupPrincipal, ? extends Collection<Principal>> getGroups(
LocalGroupsOnly localGroupsOnly, String windowsDomain)
throws DfException, IOException, InterruptedException {
DocumentumAdaptor adaptor = getObjectUnderTest(
ImmutableMap.<String, String>builder()
.put("documentum.pushLocalGroupsOnly", localGroupsOnly.toString())
.put("documentum.windowsDomain", windowsDomain)
.put("adaptor.namespace", "NS")
.put("documentum.docbaseName", "Local") // Local Namespace
.build());
AccumulatingDocIdPusher pusher = new AccumulatingDocIdPusher();
adaptor.getDocIds(pusher);
return pusher.getGroups();
}
/* Filters the 'dm_world' group out of the map of groups. */
private <T> Map<GroupPrincipal, T> filterDmWorld(Map<GroupPrincipal, T> map) {
return Maps.filterKeys(map, new Predicate<GroupPrincipal>() {
public boolean apply(GroupPrincipal principal) {
return !"dm_world".equals(principal.getName());
}
});
}
@Test
public void testGetGroupsDmWorldOnly() throws Exception {
insertUsers("User1", "User2", "User3", "User4", "User5");
// The only group should be the virtual group, dm_world, which consists
// of all users.
ImmutableMap<GroupPrincipal, ? extends Collection<? extends Principal>>
expected = ImmutableMap.of(new GroupPrincipal("dm_world", "NS_Local"),
ImmutableSet.of(new UserPrincipal("User1", "NS"),
new UserPrincipal("User2", "NS"),
new UserPrincipal("User3", "NS"),
new UserPrincipal("User4", "NS"),
new UserPrincipal("User5", "NS")));
assertEquals(expected, getGroups());
}
@Test
public void testGetGroupsUserMembersOnly() throws Exception {
insertUsers("User1", "User2", "User3", "User4", "User5");
insertGroup("Group1", "User1", "User2", "User3");
insertGroup("Group2", "User3", "User4", "User5");
ImmutableMap<GroupPrincipal, ? extends Collection<? extends Principal>>
expected = ImmutableMap.of(new GroupPrincipal("Group1", "NS_Local"),
ImmutableSet.of(new UserPrincipal("User1", "NS"),
new UserPrincipal("User2", "NS"),
new UserPrincipal("User3", "NS")),
new GroupPrincipal("Group2", "NS_Local"),
ImmutableSet.of(new UserPrincipal("User3", "NS"),
new UserPrincipal("User4", "NS"),
new UserPrincipal("User5", "NS")));
assertEquals(expected, filterDmWorld(getGroups()));
}
@Test
public void testGetGroupsInvalidMembers() throws Exception {
insertUsers("User1", "User3", "User5");
insertGroup("Group1", "User1", "User2", "User3");
insertGroup("Group2", "User3", "User4", "User5");
ImmutableMap<GroupPrincipal, ? extends Collection<? extends Principal>>
expected = ImmutableMap.of(new GroupPrincipal("Group1", "NS_Local"),
ImmutableSet.of(new UserPrincipal("User1", "NS"),
new UserPrincipal("User3", "NS")),
new GroupPrincipal("Group2", "NS_Local"),
ImmutableSet.of(new UserPrincipal("User3", "NS"),
new UserPrincipal("User5", "NS")));
assertEquals(expected, filterDmWorld(getGroups()));
}
@Test
public void testGetGroupsDisabledMembers() throws Exception {
insertUsers("User1", "User2", "User3", "User4", "User5", "User6", "User7");
insertGroup("Group1", "User1", "User2", "User3");
insertGroup("Group2", "User3", "User4", "User5");
insertGroup("Group3", "User5", "User6", "User7");
disableUsers("User2", "User4", "User6", "Group2");
ImmutableMap<GroupPrincipal, ? extends Collection<? extends Principal>>
expected = ImmutableMap.of(new GroupPrincipal("Group1", "NS_Local"),
ImmutableSet.of(new UserPrincipal("User1", "NS"),
new UserPrincipal("User3", "NS")),
new GroupPrincipal("Group3", "NS_Local"),
ImmutableSet.of(new UserPrincipal("User5", "NS"),
new UserPrincipal("User7", "NS")));
assertEquals(expected, filterDmWorld(getGroups()));
}
@Test
public void testGetGroupsEmptyGroup() throws Exception {
insertUsers("User1", "User3", "User5");
insertGroup("Group1", "User1", "User2", "User3");
insertGroup("Group2");
ImmutableMap<GroupPrincipal, ? extends Collection<? extends Principal>>
expected = ImmutableMap.of(new GroupPrincipal("Group1", "NS_Local"),
ImmutableSet.of(new UserPrincipal("User1", "NS"),
new UserPrincipal("User3", "NS")),
new GroupPrincipal("Group2", "NS_Local"),
ImmutableSet.<Principal>of());
assertEquals(expected, filterDmWorld(getGroups()));
}
@Test
public void testGetGroupsUserAndGroupMembers() throws Exception {
insertUsers("User1", "User2", "User3", "User4", "User5");
insertGroup("Group1", "User1", "User2", "User3");
insertGroup("Group2", "Group1", "User4", "User5");
ImmutableMap<GroupPrincipal, ? extends Collection<? extends Principal>>
expected = ImmutableMap.of(new GroupPrincipal("Group1", "NS_Local"),
ImmutableSet.of(new UserPrincipal("User1", "NS"),
new UserPrincipal("User2", "NS"),
new UserPrincipal("User3", "NS")),
new GroupPrincipal("Group2", "NS_Local"),
ImmutableSet.of(new GroupPrincipal("Group1", "NS_Local"),
new UserPrincipal("User4", "NS"),
new UserPrincipal("User5", "NS")));
assertEquals(expected, filterDmWorld(getGroups()));
}
@Test
public void testGetGroupsDifferentMemberLoginName() throws Exception {
insertUsers("User1", "User2");
executeUpdate("insert into dm_user(user_name, user_login_name) "
+ "values('User3', 'UserTres')");
insertGroup("Group1", "User1", "User2", "User3");
ImmutableMap<GroupPrincipal, ? extends Collection<? extends Principal>>
expected = ImmutableMap.of(new GroupPrincipal("Group1", "NS_Local"),
ImmutableSet.of(new UserPrincipal("User1", "NS"),
new UserPrincipal("User2", "NS"),
new UserPrincipal("UserTres", "NS")));
assertEquals(expected, filterDmWorld(getGroups()));
}
@Test
public void testGetGroupsDifferentGroupLoginName() throws Exception {
insertUsers("User1", "User2");
executeUpdate(
"insert into dm_user(user_name, user_login_name, r_is_group) "
+ "values('Group1', 'GroupUno', TRUE)");
executeUpdate("insert into dm_group(group_name, users_names) "
+ "values ('Group1', 'User1'), ('Group1', 'User2')");
ImmutableMap<GroupPrincipal, ? extends Collection<? extends Principal>>
expected = ImmutableMap.of(new GroupPrincipal("GroupUno", "NS_Local"),
ImmutableSet.of(new UserPrincipal("User1", "NS"),
new UserPrincipal("User2", "NS")));
assertEquals(expected, filterDmWorld(getGroups()));
}
@Test
public void testGetGroupsMemberLdapDn() throws Exception {
insertUsers("User1", "User2");
executeUpdate("insert into dm_user(user_name, user_login_name, "
+ "user_source, user_ldap_dn, r_is_group) values('User3', 'User3', "
+ "'LDAP', 'cn=User3,dc=test,dc=com', TRUE)");
insertGroup("Group1", "User1", "User2", "User3");
ImmutableMap<GroupPrincipal, ? extends Collection<? extends Principal>>
expected = ImmutableMap.of(new GroupPrincipal("Group1", "NS_Local"),
ImmutableSet.of(new UserPrincipal("User1", "NS"),
new UserPrincipal("User2", "NS"),
new UserPrincipal("test\\User3", "NS")));
assertEquals(expected, filterDmWorld(getGroups()));
}
@Test
public void testGetGroupsGroupLdapDn() throws Exception {
insertUsers("User1", "User2");
executeUpdate("insert into dm_user(user_name, user_login_name, "
+ "user_source, user_ldap_dn) values('Group1', 'Group1', 'LDAP', "
+ "'cn=Group1,dc=test,dc=com')");
executeUpdate("insert into dm_group(group_name, group_source, users_names) "
+ "values ('Group1', 'LDAP', 'User1'), ('Group1', 'LDAP', 'User2')");
ImmutableMap<GroupPrincipal, ? extends Collection<? extends Principal>>
expected =
ImmutableMap.of(new GroupPrincipal("test\\Group1", "NS"),
ImmutableSet.of(new UserPrincipal("User1", "NS"),
new UserPrincipal("User2", "NS")));
assertEquals(expected, filterDmWorld(getGroups()));
}
@Test
public void testGetGroupsWindowsDomainUsers() throws Exception {
insertUsers("User1", "User2", "User3", "User4", "User5");
insertGroup("Group1", "User1", "User2", "User3");
insertGroup("Group2", "Group1", "User4", "User5");
ImmutableMap<GroupPrincipal, ? extends Collection<? extends Principal>>
expected = ImmutableMap.of(new GroupPrincipal("Group1", "NS_Local"),
ImmutableSet.of(new UserPrincipal("TEST\\User1", "NS"),
new UserPrincipal("TEST\\User2", "NS"),
new UserPrincipal("TEST\\User3", "NS")),
new GroupPrincipal("Group2", "NS_Local"),
ImmutableSet.of(new GroupPrincipal("Group1", "NS_Local"),
new UserPrincipal("TEST\\User4", "NS"),
new UserPrincipal("TEST\\User5", "NS")));
Map<GroupPrincipal, ? extends Collection<Principal>> groups =
getGroups(LocalGroupsOnly.FALSE, "TEST");
assertEquals(expected, filterDmWorld(groups));
}
@Test
public void testGetGroupsLocalAndGlobalGroups() throws Exception {
insertUsers("User1", "User2", "User3", "User4", "User5");
insertGroup("Group1", "User1", "User2", "User3");
insertLdapGroup("Group2", "User3", "User4", "User5");
ImmutableMap<GroupPrincipal, ? extends Collection<? extends Principal>>
expected = ImmutableMap.of(new GroupPrincipal("Group1", "NS_Local"),
ImmutableSet.of(new UserPrincipal("User1", "NS"),
new UserPrincipal("User2", "NS"),
new UserPrincipal("User3", "NS")),
new GroupPrincipal("Group2", "NS"),
ImmutableSet.of(new UserPrincipal("User3", "NS"),
new UserPrincipal("User4", "NS"),
new UserPrincipal("User5", "NS")));
assertEquals(expected, filterDmWorld(getGroups()));
}
@Test
public void testGetGroupsGlobalGroupMembers() throws Exception {
insertUsers("User1", "User2", "User3", "User4", "User5");
insertLdapGroup("Group1", "User1", "User2", "User3");
insertGroup("Group2", "Group1", "User4", "User5");
ImmutableMap<GroupPrincipal, ? extends Collection<? extends Principal>>
expected = ImmutableMap.of(new GroupPrincipal("Group1", "NS"),
ImmutableSet.of(new UserPrincipal("User1", "NS"),
new UserPrincipal("User2", "NS"),
new UserPrincipal("User3", "NS")),
new GroupPrincipal("Group2", "NS_Local"),
ImmutableSet.of(new GroupPrincipal("Group1", "NS"),
new UserPrincipal("User4", "NS"),
new UserPrincipal("User5", "NS")));
assertEquals(expected, filterDmWorld(getGroups()));
}
@Test
public void testGetGroupsLocalGroupsOnly() throws Exception {
insertUsers("User1", "User2", "User3", "User4", "User5");
insertLdapGroup("Group1", "User1", "User2", "User3");
insertGroup("Group2", "Group1", "User4", "User5");
ImmutableMap<GroupPrincipal, ? extends Collection<? extends Principal>>
expected = ImmutableMap.of(new GroupPrincipal("Group2", "NS_Local"),
ImmutableSet.of(new GroupPrincipal("Group1", "NS"),
new UserPrincipal("User4", "NS"),
new UserPrincipal("User5", "NS")));
Map<GroupPrincipal, ? extends Collection<Principal>> groups =
getGroups(LocalGroupsOnly.TRUE, "");
assertEquals(expected, filterDmWorld(groups));
}
private void insertModifiedGroup(String lastModified, String groupName,
String... members) throws SQLException {
insertGroupEx(lastModified, "", groupName, members);
}
private void checkModifiedGroupsPushed(LocalGroupsOnly localGroupsOnly,
Checkpoint checkpoint,
Map<GroupPrincipal, ? extends Collection<? extends Principal>>
expectedGroups, Checkpoint expectedCheckpoint)
throws DfException, IOException, InterruptedException {
DocumentumAdaptor adaptor = getObjectUnderTest(
ImmutableMap.<String, String>builder()
.put("documentum.pushLocalGroupsOnly", localGroupsOnly.toString())
.put("adaptor.namespace", "NS")
.put("documentum.docbaseName", "Local") // Local Namespace
.build());
AccumulatingDocIdPusher pusher = new AccumulatingDocIdPusher();
adaptor.modifiedGroupTraverser.setCheckpoint(checkpoint);
adaptor.getModifiedDocIds(pusher);
assertEquals(expectedGroups, pusher.getGroups());
assertEquals(expectedCheckpoint,
adaptor.modifiedGroupTraverser.getCheckpoint());
}
@Test
public void testGetGroupUpdatesNoDmWorld() throws Exception {
insertUsers("User1", "User2", "User3", "User4", "User5");
// The virtual group, dm_world, should not be pushed for updates.
ImmutableMap<GroupPrincipal, ? extends Collection<? extends Principal>>
expected = ImmutableMap.<GroupPrincipal, Collection<Principal>>of();
Checkpoint checkpoint = Checkpoint.incremental();
checkModifiedGroupsPushed(LocalGroupsOnly.FALSE, checkpoint, expected,
checkpoint);
}
@Test
public void testGetGroupUpdatesAllNew() throws Exception {
insertUsers("User1", "User2");
insertModifiedGroup(FEB_1970, "Group2", "User2");
insertModifiedGroup(MAR_1970, "Group1", "User1");
ImmutableMap<GroupPrincipal, ? extends Collection<? extends Principal>>
expected = ImmutableMap.of(new GroupPrincipal("Group1", "NS_Local"),
ImmutableSet.of(new UserPrincipal("User1", "NS")),
new GroupPrincipal("Group2", "NS_Local"),
ImmutableSet.of(new UserPrincipal("User2", "NS")));
checkModifiedGroupsPushed(LocalGroupsOnly.FALSE,
new Checkpoint(JAN_1970, "0"), expected,
new Checkpoint(MAR_1970, "12Group1"));
}
@Test
public void testGetGroupUpdatesSomeNew() throws Exception {
insertUsers("User1", "User2");
insertModifiedGroup(JAN_1970, "Group0", "User2");
insertModifiedGroup(FEB_1970, "Group2", "User2");
insertModifiedGroup(MAR_1970, "Group1", "User1");
ImmutableMap<GroupPrincipal, ? extends Collection<? extends Principal>>
expected = ImmutableMap.of(new GroupPrincipal("Group1", "NS_Local"),
ImmutableSet.of(new UserPrincipal("User1", "NS")),
new GroupPrincipal("Group2", "NS_Local"),
ImmutableSet.of(new UserPrincipal("User2", "NS")));
checkModifiedGroupsPushed(LocalGroupsOnly.FALSE,
new Checkpoint(JAN_1970, "12Group0"), expected,
new Checkpoint(MAR_1970, "12Group1"));
}
@Test
public void testGetGroupUpdatesNoneNew() throws Exception {
insertUsers("User1", "User2");
insertModifiedGroup(FEB_1970, "Group2", "User2");
insertModifiedGroup(MAR_1970, "Group1", "User1");
ImmutableMap<GroupPrincipal, ? extends Collection<? extends Principal>>
expected = ImmutableMap.<GroupPrincipal, Collection<Principal>>of();
Checkpoint checkpoint = new Checkpoint(MAR_1970, "12Group1");
checkModifiedGroupsPushed(LocalGroupsOnly.FALSE, checkpoint, expected,
checkpoint);
}
@Test
public void testGetGroupUpdatesSomeLdapGroups() throws Exception {
insertUsers("User1", "User2");
insertModifiedGroup(JAN_1970, "Group1", "User1");
insertModifiedGroup(FEB_1970, "Group2", "User2");
insertGroupEx(MAR_1970, "LDAP", "GroupLDAP", "User2");
ImmutableMap<GroupPrincipal, ? extends Collection<? extends Principal>>
expected = ImmutableMap.of(new GroupPrincipal("Group2", "NS_Local"),
ImmutableSet.of(new UserPrincipal("User2", "NS")),
new GroupPrincipal("GroupLDAP", "NS"),
ImmutableSet.of(new UserPrincipal("User2", "NS")));
checkModifiedGroupsPushed(LocalGroupsOnly.FALSE,
new Checkpoint(JAN_1970, "12Group1"), expected,
new Checkpoint(MAR_1970, "12GroupLDAP"));
}
@Test
public void testGetGroupUpdatesLocalGroupsOnly() throws Exception {
insertUsers("User1", "User2");
insertModifiedGroup(JAN_1970, "Group1", "User1");
insertModifiedGroup(FEB_1970, "Group2", "User2");
insertGroupEx(MAR_1970, "LDAP", "GroupLDAP", "User2");
ImmutableMap<GroupPrincipal, ? extends Collection<? extends Principal>>
expected = ImmutableMap.of(new GroupPrincipal("Group2", "NS_Local"),
ImmutableSet.of(new UserPrincipal("User2", "NS")));
checkModifiedGroupsPushed(LocalGroupsOnly.TRUE,
new Checkpoint(JAN_1970, "12Group1"), expected,
new Checkpoint(FEB_1970, "12Group2"));
}
/**
* Builds a list of expected DocId Records that the Pusher should receive.
*
* @param folderPath the full path to a folder.
* @param objectNames ojects within that folder that should be added to the
* expected list. If one of the full folderPath is included in
* object names, the folder itself is included in the expected results.
*/
private List<Record> makeExpectedDocIds(String folderPath,
String... objectNames) {
ImmutableList.Builder<Record> builder = ImmutableList.builder();
for (String name : objectNames) {
if (name.equals(folderPath)) {
name = null;
}
DocId docid = DocumentumAdaptor.docIdFromPath(folderPath, name);
builder.add(
new Record.Builder(docid).setCrawlImmediately(true).build());
}
return builder.build();
}
/** Convenience method to assemble a list of start paths for readability. */
private List<String> startPaths(String... paths) {
return ImmutableList.copyOf(paths);
}
private void checkModifiedDocIdsPushed(List<String> startPaths,
Checkpoint checkpoint, List<Record> expectedRecords,
Checkpoint expectedCheckpoint)
throws DfException, IOException, InterruptedException {
DocumentumAdaptor adaptor = getObjectUnderTest(
ImmutableMap.of("documentum.src", Joiner.on(",").join(startPaths)));
AccumulatingDocIdPusher pusher = new AccumulatingDocIdPusher();
adaptor.modifiedDocumentsCheckpoint = checkpoint;
adaptor.getModifiedDocIds(pusher);
assertEquals(expectedRecords, pusher.getRecords());
assertEquals(expectedCheckpoint, adaptor.modifiedDocumentsCheckpoint);
}
@Test
public void testNoDocuments() throws Exception {
String folderId = "0b01081f80001000";
String folder = "/Folder1";
insertFolder(JAN_1970, folderId, folder);
Checkpoint startCheckpoint = Checkpoint.incremental();
checkModifiedDocIdsPushed(startPaths(folder), startCheckpoint,
ImmutableList.<Record>of(), startCheckpoint);
}
@Test
public void testNoModifiedDocuments() throws Exception {
String folderId = "0b01081f80001000";
String folder = "/Folder1";
insertFolder(JAN_1970, folderId, folder);
insertDocument(JAN_1970, "0b01081f80001001", folder + "/foo", folderId);
insertDocument(JAN_1970, "0b01081f80001002", folder + "/bar", folderId);
Checkpoint startCheckpoint = Checkpoint.incremental();
checkModifiedDocIdsPushed(startPaths(folder), startCheckpoint,
ImmutableList.<Record>of(), startCheckpoint);
}
@Test
public void testModifiedDocumentsNoCheckpointObjId() throws Exception {
String parentId = "0b001";
String parentFolder = "/Folder1";
insertFolder(EPOCH_1970, parentId, parentFolder);
String folderId = "0b002";
String folder = "/Folder1/Folder2";
insertFolder(JAN_1970, folderId, folder);
setParentFolderId(folderId, parentId);
insertDocument(FEB_1970, "09001", folder + "/foo", folderId);
insertDocument(FEB_1970, "09002", folder + "/bar", folderId);
checkModifiedDocIdsPushed(startPaths(folder),
new Checkpoint(JAN_1970, "0"),
makeExpectedDocIds(folder, folder, "foo", "bar"),
new Checkpoint(FEB_1970, "09002"));
}
@Test
public void testModifiedDocumentsSameCheckpointTime() throws Exception {
String folderId = "0b01081f80001000";
String folder = "/Folder1";
insertFolder(JAN_1970, folderId, folder);
insertDocument(JAN_1970, "0b01081f80001001", folder + "/foo", folderId);
insertDocument(JAN_1970, "0b01081f80001002", folder + "/bar", folderId);
insertDocument(FEB_1970, "0b01081f80001003", folder + "/baz", folderId);
checkModifiedDocIdsPushed(startPaths(folder),
new Checkpoint(JAN_1970, "0b01081f80001001"),
makeExpectedDocIds(folder, "bar", "baz"),
new Checkpoint(FEB_1970, "0b01081f80001003"));
}
@Test
public void testModifiedDocumentsNewerModifyDate() throws Exception {
String folderId = "0b01081f80001000";
String folder = "/Folder1";
insertFolder(JAN_1970, folderId, folder);
insertDocument(JAN_1970, "0b01081f80001001", folder + "/foo", folderId);
insertDocument(FEB_1970, "0b01081f80001002", folder + "/bar", folderId);
insertDocument(MAR_1970, "0b01081f80001003", folder + "/baz", folderId);
checkModifiedDocIdsPushed(startPaths(folder),
new Checkpoint(JAN_1970, "0b01081f80001001"),
makeExpectedDocIds(folder, "bar", "baz"),
new Checkpoint(MAR_1970, "0b01081f80001003"));
}
@Test
public void testModifiedFolder() throws Exception {
String parentId = "0b001";
String parentFolder = "/Folder1";
insertFolder(EPOCH_1970, parentId, parentFolder);
String folderId = "0b002";
String folder = "/Folder1/Folder2";
insertFolder(FEB_1970, folderId, folder);
setParentFolderId(folderId, parentId);
checkModifiedDocIdsPushed(startPaths(folder),
new Checkpoint(JAN_1970, "0b003"),
makeExpectedDocIds(folder, folder),
new Checkpoint(FEB_1970, folderId));
}
@Test
public void testModifiedFolderNewerThanChildren() throws Exception {
String parentId = "0b001";
String parentFolder = "/Folder1";
insertFolder(EPOCH_1970, parentId, parentFolder);
String folderId = "0b002";
String folder = "/Folder1/Folder2";
insertFolder(MAR_1970, folderId, folder);
setParentFolderId(folderId, parentId);
insertDocument(JAN_1970, "09001", folder + "/foo", folderId);
insertDocument(FEB_1970, "09002", folder + "/bar", folderId);
insertDocument(MAR_1970, "09003", folder + "/baz", folderId);
checkModifiedDocIdsPushed(startPaths(folder),
new Checkpoint(JAN_1970, "09001"),
makeExpectedDocIds(folder, "bar", "baz", folder),
new Checkpoint(MAR_1970, "0b002"));
}
@Test
public void testModifiedDocumentsOutsideStartPath() throws Exception {
String folder1Id = "0b01081f80001000";
String folder1 = "/Folder1";
insertFolder(JAN_1970, folder1Id, folder1);
insertDocument(FEB_1970, "0b01081f80001001", folder1 + "/foo", folder1Id);
insertDocument(FEB_1970, "0b01081f80001002", folder1 + "/bar", folder1Id);
String folder2Id = "0b01081f80002000";
String folder2 = "/Folder2";
insertFolder(JAN_1970, folder2Id, folder2);
insertDocument(FEB_1970, "0b01081f80002001", folder2 + "/baz", folder2Id);
checkModifiedDocIdsPushed(startPaths(folder1),
new Checkpoint(JAN_1970, folder1Id),
makeExpectedDocIds(folder1, "foo", "bar"),
new Checkpoint(FEB_1970, "0b01081f80001002"));
}
@Test
public void testModifiedDocumentsOneParentOutsideStartPath()
throws Exception {
String folder1Id = "0b01081f80001000";
String folder1 = "/Folder1";
insertFolder(JAN_1970, folder1Id, folder1);
String folder2Id = "0b01081f80002000";
String folder2 = "/Folder2";
insertFolder(JAN_1970, folder2Id, folder2);
insertDocument(FEB_1970, "0b01081f80001001", folder1 + "/foo", folder1Id);
insertDocument(FEB_1970, "0b01081f80001002", folder1 + "/bar", folder1Id,
folder2Id);
checkModifiedDocIdsPushed(startPaths(folder1),
new Checkpoint(JAN_1970, folder1Id),
makeExpectedDocIds(folder1, "foo", "bar"),
new Checkpoint(FEB_1970, "0b01081f80001002"));
}
@Test
public void testModifiedDocumentsMultipleParentsInStartPaths()
throws Exception {
String folder1Id = "0b01081f80001000";
String folder1 = "/Folder1";
insertFolder(JAN_1970, folder1Id, folder1);
String folder2Id = "0b01081f80002000";
String folder2 = "/Folder2";
insertFolder(JAN_1970, folder2Id, folder2);
insertDocument(FEB_1970, "0b01081f80001001", folder1 + "/foo", folder1Id);
insertDocument(FEB_1970, "0b01081f80001002", folder1 + "/bar", folder1Id,
folder2Id);
checkModifiedDocIdsPushed(startPaths(folder1, folder2),
new Checkpoint(FEB_1970, folder1Id),
new ImmutableList.Builder<Record>()
.addAll(makeExpectedDocIds(folder1, "foo", "bar"))
.addAll(makeExpectedDocIds(folder2, "bar"))
.build(),
new Checkpoint(FEB_1970, "0b01081f80001002"));
}
@Test
public void testModifiedDocumentsMultipleStartPaths() throws Exception {
String folder1Id = "0b01081f80001000";
String folder1 = "/Folder1";
insertFolder(JAN_1970, folder1Id, folder1);
insertDocument(MAR_1970, "0b01081f80001001", folder1 + "/foo", folder1Id);
insertDocument(MAR_1970, "0b01081f80001002", folder1 + "/bar", folder1Id);
String folder2Id = "0b01081f80002000";
String folder2 = "/Folder2";
insertFolder(JAN_1970, folder2Id, folder2);
insertDocument(MAR_1970, "0b01081f80002001", folder2 + "/baz", folder2Id);
checkModifiedDocIdsPushed(startPaths(folder1, folder2),
new Checkpoint(FEB_1970, folder1Id),
new ImmutableList.Builder<Record>()
.addAll(makeExpectedDocIds(folder1, "foo", "bar"))
.addAll(makeExpectedDocIds(folder2, "baz"))
.build(),
new Checkpoint(MAR_1970, "0b01081f80002001"));
}
@Test
public void testModifiedDocumentsInSubfolder() throws Exception {
String folder1Id = "0b01081f80001000";
String folder1 = "/Folder1";
insertFolder(JAN_1970, folder1Id, folder1);
insertDocument(MAR_1970, "0b01081f80001001", folder1 + "/foo", folder1Id);
insertDocument(MAR_1970, "0b01081f80001002", folder1 + "/bar", folder1Id);
String folder2Id = "0b01081f80002000";
String folder2 = "/Folder1/Folder2";
insertFolder(JAN_1970, folder2Id, folder2);
insertDocument(MAR_1970, "0b01081f80002001", folder2 + "/baz", folder2Id);
checkModifiedDocIdsPushed(startPaths(folder1),
new Checkpoint(FEB_1970, folder1Id),
new ImmutableList.Builder<Record>()
.addAll(makeExpectedDocIds(folder1, "foo", "bar"))
.addAll(makeExpectedDocIds(folder2, "baz"))
.build(),
new Checkpoint(MAR_1970, "0b01081f80002001"));
}
@Test
public void testModifiedDocumentsNotDocumentOrFolder() throws Exception {
String folderId = "0b01081f80001000";
String folder = "/Folder1";
insertFolder(JAN_1970, folderId, folder);
insertDocument(FEB_1970, "0b01081f80001001", folder + "/foo", folderId);
insertDocument(MAR_1970, "0b01081f80001002", folder + "/bar", folderId);
insertSysObject(MAR_1970, "0b01081f80001003", "baz", folder + "/baz",
"dm_other", folderId);
checkModifiedDocIdsPushed(startPaths(folder),
new Checkpoint(FEB_1970, folder),
makeExpectedDocIds(folder, "foo", "bar"),
new Checkpoint(MAR_1970, "0b01081f80001002"));
}
@Test
public void testModifiedDocumentsWithFolderSubtype() throws Exception {
String parentId = "0b001";
String parentFolder = "/Folder1";
insertFolder(EPOCH_1970, parentId, parentFolder);
String folderId = "0b002";
String folder = "/Folder1/Folder2";
executeUpdate(String.format(
"insert into dm_folder(r_object_id, r_folder_path) values('%s', '%s')",
folderId, folder));
insertSysObject(FEB_1970, folderId, "Folder2", folder, "dm_folder_subtype",
parentId);
insertDocument(FEB_1970, "09001", folder + "/foo", folderId);
insertDocument(MAR_1970, "09002", folder + "/bar", folderId);
checkModifiedDocIdsPushed(startPaths(folder),
new Checkpoint(JAN_1970, folderId),
makeExpectedDocIds(folder, "foo", folder, "bar"),
new Checkpoint(MAR_1970, "09002"));
}
@Test
public void testModifiedDocumentsWithDocumentSubtype() throws Exception {
String folderId = "0b01081f80001000";
String folder = "/Folder1";
insertFolder(JAN_1970, folderId, folder);
insertDocument(FEB_1970, "0b01081f80001001", folder + "/foo", folderId);
insertDocument(MAR_1970, "0b01081f80001002", folder + "/bar", folderId);
insertSysObject(MAR_1970, "0b01081f80001003", "baz", folder + "/baz",
"dm_document_subtype", folderId);
checkModifiedDocIdsPushed(startPaths(folder),
new Checkpoint(FEB_1970, folder),
makeExpectedDocIds(folder, "foo", "bar", "baz"),
new Checkpoint(MAR_1970, "0b01081f80001003"));
}
private void initValidDocumentTypes(DocumentumAdaptor adaptor,
String... types) throws DfException {
AdaptorContext context = ProxyAdaptorContext.getInstance();
Config config = initTestAdaptorConfig(context);
config.overrideKey("documentum.documentTypes", Joiner.on(',').join(types));
adaptor.init(context);
}
@Test
public void testValidateDocumentTypes() throws DfException {
DocumentumAdaptor adaptor =
new DocumentumAdaptor(new H2BackedTestProxies().getProxyClientX());
String type1 = "dm_document";
String type2 = "dm_document_subtype";
initValidDocumentTypes(adaptor, type1, type2);
assertEquals(ImmutableList.of(type1, type2),
adaptor.getValidatedDocumentTypes());
}
@Test
public void testValidateDocumentTypesSomeValid() throws DfException {
DocumentumAdaptor adaptor =
new DocumentumAdaptor(new H2BackedTestProxies().getProxyClientX());
String type1 = "dm_document_subtype";
String type2 = "dm_my_type";
String type3 = "dm_document";
String type4 = "dm_folder";
String type5 = "dm_folder_subtype";
initValidDocumentTypes(adaptor, type1, type2, type3, type4, type5);
assertEquals(ImmutableList.of(type1, type3),
adaptor.getValidatedDocumentTypes());
}
@Test
public void testValidateDocumentSysobjectSubtype() throws DfException {
DocumentumAdaptor adaptor =
new DocumentumAdaptor(new H2BackedTestProxies().getProxyClientX());
String type = "dm_sysobject_subtype";
initValidDocumentTypes(adaptor, type);
assertEquals(ImmutableList.of(type),
adaptor.getValidatedDocumentTypes());
}
@Test
public void testValidateDocumentTypesNoneValid() throws DfException {
DocumentumAdaptor adaptor =
new DocumentumAdaptor(new H2BackedTestProxies().getProxyClientX());
String type1 = "dm_some_type";
String type2 = "dm_my_type";
String type3 = "dm_any_type";
initValidDocumentTypes(adaptor, type1, type2, type3);
assertTrue(adaptor.getValidatedDocumentTypes().isEmpty());
}
@Test(expected = InvalidConfigurationException.class)
public void testValidateDocumentTypesEmpty() throws DfException {
DocumentumAdaptor adaptor =
new DocumentumAdaptor(new H2BackedTestProxies().getProxyClientX());
String type1 = "";
initValidDocumentTypes(adaptor, type1);
}
private void checkTypedDocIdsPushed(List<String> startPaths, String docTypes,
Checkpoint checkpoint, List<Record> expectedRecords)
throws DfException, IOException, InterruptedException {
DocumentumAdaptor adaptor =
new DocumentumAdaptor(new H2BackedTestProxies().getProxyClientX());
AdaptorContext context = ProxyAdaptorContext.getInstance();
Config config = initTestAdaptorConfig(context);
config.overrideKey("documentum.src", Joiner.on(",").join(startPaths));
config.overrideKey("documentum.documentTypes", docTypes);
adaptor.init(context);
AccumulatingDocIdPusher pusher = new AccumulatingDocIdPusher();
adaptor.modifiedDocumentsCheckpoint = checkpoint;
adaptor.getModifiedDocIds(pusher);
assertEquals(expectedRecords, pusher.getRecords());
}
private void testCustomType(String docTypes, String... expect)
throws Exception {
String folderId = "0b001";
String folder = "/Folder1";
insertFolder(JAN_1970, folderId, folder);
insertSysObject(MAR_1970, "09001", "foo", folder + "/foo",
"dm_document", folderId);
insertSysObject(MAR_1970, "09002", "bar", folder + "/bar",
"dm_document_subtype", folderId);
insertSysObject(MAR_1970, "09003", "baz", folder + "/baz",
"dm_sysobject_subtype", folderId);
checkTypedDocIdsPushed(startPaths(folder),
docTypes,
new Checkpoint(FEB_1970, folder),
makeExpectedDocIds(folder, expect));
}
@Test
public void testCustomType_all() throws Exception {
testCustomType("dm_document, dm_document_subtype, dm_sysobject_subtype",
"foo", "bar", "baz");
}
@Test
public void testCustomType_skip() throws Exception {
testCustomType("dm_document, dm_document_subtype", "foo", "bar");
}
@Test
public void testCustomType_NonSysobject() throws Exception {
String folderId = "0b001";
String folder = "/Folder1";
insertFolder(JAN_1970, folderId, folder);
insertSysObject(MAR_1970, "09001", "foo", folder + "/foo",
"dm_document", folderId);
insertSysObject(MAR_1970, "09002", "bar", folder + "/bar",
"dm_store", folderId);
checkTypedDocIdsPushed(startPaths(folder),
"dm_document, dm_store",
new Checkpoint(FEB_1970, folder),
makeExpectedDocIds(folder, "foo"));
}
}
| Fix broken tests.
Fix tests broken in rebase master patch set of previous commit 947a674.
Code review: https://codereview.appspot.com/309550043/
| test/com/google/enterprise/adaptor/documentum/DocumentumAdaptorTest.java | Fix broken tests. | <ide><path>est/com/google/enterprise/adaptor/documentum/DocumentumAdaptorTest.java
<ide> @Test
<ide> public void testUpdateAclsFirstRowFailure() throws Exception {
<ide> String dateStr = getNowPlusMinutes(5);
<del> insertAclAudit("123", "234", "4501081f80000100", "dm_save", dateStr);
<add> insertAclAudit("123", "4501081f80000100", "dm_save", dateStr);
<ide>
<ide> DocumentumAcls dctmAcls = getUpdateAclsAndFail();
<ide> assertEquals(Checkpoint.incremental(), dctmAcls.getCheckpoint());
<ide> public void testUpdateAclsSecondRowFailure() throws Exception {
<ide> createAcl("4501081f80000100");
<ide> String dateStr = getNowPlusMinutes(3);
<del> insertAclAudit("123", "234", "4501081f80000100", "dm_save", dateStr);
<del> insertAclAudit("124", "235", "4501081f80000101", "dm_saveasnew",
<add> insertAclAudit("123", "4501081f80000100", "dm_save", dateStr);
<add> insertAclAudit("124", "4501081f80000101", "dm_saveasnew",
<ide> getNowPlusMinutes(5));
<ide>
<ide> DocumentumAcls dctmAcls = getUpdateAclsAndFail(); |
|
Java | apache-2.0 | error: pathspec 'pax-web-tomcat/src/main/java/org/ops4j/pax/web/service/tomcat/OSGiMemoryRealm.java' did not match any file(s) known to git
| 93a2ae621b2e9f666ad0a53291a5aedd99c74f5c | 1 | lostiniceland/org.ops4j.pax.web,lostiniceland/org.ops4j.pax.web | package org.ops4j.pax.web.service.tomcat;
import java.io.IOException;
import java.io.InputStream;
import java.net.MalformedURLException;
import java.net.URL;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.util.Enumeration;
import org.apache.catalina.LifecycleException;
import org.apache.catalina.LifecycleState;
import org.apache.catalina.realm.MemoryRealm;
import org.apache.tomcat.util.digester.Digester;
public class OSGiMemoryRealm extends MemoryRealm {
@Override
protected void startInternal() throws LifecycleException {
if (getPathname().startsWith("classpath")) {
String pathName = getPathname();
try {
URL pathUrl = new URL(pathName);
pathName = pathUrl.getHost();
} catch (MalformedURLException e1) {
// TODO Auto-generated catch block
e1.printStackTrace();
}
ClassLoader classLoader = getClass().getClassLoader();
InputStream inputStream = classLoader
.getResourceAsStream(pathName);
if (inputStream == null) {
Enumeration<URL> resources;
try {
resources = classLoader.getResources(
pathName);
while (resources.hasMoreElements()) {
URL nextElement = resources.nextElement();
inputStream = nextElement.openStream();
continue;
}
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
Digester digester = getDigester();
try {
synchronized (digester) {
digester.push(this);
digester.parse(inputStream);
}
} catch (Exception e) {
throw new LifecycleException(
sm.getString("memoryRealm.readXml"), e);
} finally {
digester.reset();
}
// Create a MessageDigest instance for credentials, if desired
if (digest != null) {
try {
md = MessageDigest.getInstance(digest);
} catch (NoSuchAlgorithmException e) {
throw new LifecycleException(sm.getString(
"realmBase.algorithm", digest), e);
}
}
setState(LifecycleState.STARTING);
} else {
super.startInternal();
}
}
}
| pax-web-tomcat/src/main/java/org/ops4j/pax/web/service/tomcat/OSGiMemoryRealm.java | [PAXWEB-601] - TOMCAT: SecurityConstraingMapping needs to be set ...
| pax-web-tomcat/src/main/java/org/ops4j/pax/web/service/tomcat/OSGiMemoryRealm.java | [PAXWEB-601] - TOMCAT: SecurityConstraingMapping needs to be set ... | <ide><path>ax-web-tomcat/src/main/java/org/ops4j/pax/web/service/tomcat/OSGiMemoryRealm.java
<add>package org.ops4j.pax.web.service.tomcat;
<add>
<add>import java.io.IOException;
<add>import java.io.InputStream;
<add>import java.net.MalformedURLException;
<add>import java.net.URL;
<add>import java.security.MessageDigest;
<add>import java.security.NoSuchAlgorithmException;
<add>import java.util.Enumeration;
<add>
<add>import org.apache.catalina.LifecycleException;
<add>import org.apache.catalina.LifecycleState;
<add>import org.apache.catalina.realm.MemoryRealm;
<add>import org.apache.tomcat.util.digester.Digester;
<add>
<add>public class OSGiMemoryRealm extends MemoryRealm {
<add>
<add> @Override
<add> protected void startInternal() throws LifecycleException {
<add>
<add> if (getPathname().startsWith("classpath")) {
<add>
<add> String pathName = getPathname();
<add> try {
<add> URL pathUrl = new URL(pathName);
<add> pathName = pathUrl.getHost();
<add> } catch (MalformedURLException e1) {
<add> // TODO Auto-generated catch block
<add> e1.printStackTrace();
<add> }
<add>
<add>
<add> ClassLoader classLoader = getClass().getClassLoader();
<add> InputStream inputStream = classLoader
<add> .getResourceAsStream(pathName);
<add>
<add> if (inputStream == null) {
<add> Enumeration<URL> resources;
<add> try {
<add> resources = classLoader.getResources(
<add> pathName);
<add> while (resources.hasMoreElements()) {
<add> URL nextElement = resources.nextElement();
<add> inputStream = nextElement.openStream();
<add> continue;
<add> }
<add>
<add>
<add> } catch (IOException e) {
<add> // TODO Auto-generated catch block
<add> e.printStackTrace();
<add> }
<add> }
<add>
<add> Digester digester = getDigester();
<add> try {
<add> synchronized (digester) {
<add> digester.push(this);
<add> digester.parse(inputStream);
<add> }
<add> } catch (Exception e) {
<add> throw new LifecycleException(
<add> sm.getString("memoryRealm.readXml"), e);
<add> } finally {
<add> digester.reset();
<add> }
<add>
<add> // Create a MessageDigest instance for credentials, if desired
<add> if (digest != null) {
<add> try {
<add> md = MessageDigest.getInstance(digest);
<add> } catch (NoSuchAlgorithmException e) {
<add> throw new LifecycleException(sm.getString(
<add> "realmBase.algorithm", digest), e);
<add> }
<add> }
<add>
<add> setState(LifecycleState.STARTING);
<add> } else {
<add> super.startInternal();
<add> }
<add>
<add> }
<add>
<add>} |
|
Java | apache-2.0 | 63828ad32813f63ac94ee2bcf93c5217e0fa8d0c | 0 | AKSW/sml-converters,AKSW/sml-converters | package org.aksw.sml.converters.r2rml2sml;
import java.util.HashSet;
import java.util.Set;
import org.aksw.sml.converters.vocabs.RR;
import com.hp.hpl.jena.rdf.model.Model;
import com.hp.hpl.jena.rdf.model.RDFNode;
import com.hp.hpl.jena.rdf.model.Resource;
public class TriplesMap {
private Model model;
private Resource subject;
/**
* @author sherif
*/
public TriplesMap(Model model, Resource subject) {
super();
this.model = model;
this.subject = subject;
}
/**
* @return the model
*/
public Model getModel() {
return model;
}
/**
* @return the subject
*/
@Deprecated
public Resource getSubject() {
return subject;
}
/**
* Returns the actual triples map resource
* @return the actual triples map resource
*/
public Resource getResource() {
return subject;
}
public Set<PredicateObjectMap> getPredicateObjectMaps() {
Set<PredicateObjectMap> result = new HashSet<PredicateObjectMap>();
// list all predicate object maps
Set<RDFNode> objects = model.listObjectsOfProperty(subject, RR.predicateObjectMap).toSet();
for(RDFNode object : objects) {
Resource r = (Resource)object;
PredicateObjectMap item = new PredicateObjectMap(model, r);
result.add(item);
}
return result;
}
public LogicalTable getLogicalTable() {
Set<RDFNode> objects = model.listObjectsOfProperty(subject, RR.logicalTable).toSet();
Resource resource = RRUtils.getResourceFromSet(objects);
LogicalTable result = new LogicalTable(model, resource);
return result;
}
public SubjectMap getSubjectMap() {
Set<RDFNode> objects = model.listObjectsOfProperty(subject, RR.subjectMap).toSet();
Resource resource = RRUtils.getResourceFromSet(objects);
SubjectMap result = new SubjectMap(model, resource);
return result;
}
/* (non-Javadoc)
* @see java.lang.Object#hashCode()
*/
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((model == null) ? 0 : model.hashCode());
result = prime * result + ((subject == null) ? 0 : subject.hashCode());
return result;
}
/* (non-Javadoc)
* @see java.lang.Object#equals(java.lang.Object)
*/
@Override
public boolean equals(Object obj) {
if (this == obj) return true;
if (obj == null) return false;
if (getClass() != obj.getClass()) return false;
TriplesMap other = (TriplesMap) obj;
if (model == null) {
if (other.model != null) return false;
} else if (!model.equals(other.model)) return false;
if (subject == null) {
if (other.subject != null) return false;
} else if (!subject.equals(other.subject)) return false;
return true;
}
/* (non-Javadoc)
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
return "" + subject;
}
}
| src/main/java/org/aksw/sml/converters/r2rml2sml/TriplesMap.java | package org.aksw.sml.converters.r2rml2sml;
import java.util.HashSet;
import java.util.Set;
import org.aksw.sml.converters.vocabs.RR;
import com.hp.hpl.jena.rdf.model.Model;
import com.hp.hpl.jena.rdf.model.RDFNode;
import com.hp.hpl.jena.rdf.model.Resource;
public class TriplesMap {
private Model model;
private Resource subject;
/**
* @author sherif
*/
public TriplesMap(Model model, Resource subject) {
super();
this.model = model;
this.subject = subject;
}
/**
* @return the model
*/
public Model getModel() {
return model;
}
/**
* @return the subject
*/
public Resource getSubject() {
return subject;
}
public Set<PredicateObjectMap> getPredicateObjectMaps() {
Set<PredicateObjectMap> result = new HashSet<PredicateObjectMap>();
// list all predicate object maps
Set<RDFNode> objects = model.listObjectsOfProperty(subject, RR.predicateObjectMap).toSet();
for(RDFNode object : objects) {
Resource r = (Resource)object;
PredicateObjectMap item = new PredicateObjectMap(model, r);
result.add(item);
}
return result;
}
public LogicalTable getLogicalTable() {
Set<RDFNode> objects = model.listObjectsOfProperty(subject, RR.logicalTable).toSet();
Resource resource = RRUtils.getResourceFromSet(objects);
LogicalTable result = new LogicalTable(model, resource);
return result;
}
public SubjectMap getSubjectMap() {
Set<RDFNode> objects = model.listObjectsOfProperty(subject, RR.subjectMap).toSet();
Resource resource = RRUtils.getResourceFromSet(objects);
SubjectMap result = new SubjectMap(model, resource);
return result;
}
/* (non-Javadoc)
* @see java.lang.Object#hashCode()
*/
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((model == null) ? 0 : model.hashCode());
result = prime * result + ((subject == null) ? 0 : subject.hashCode());
return result;
}
/* (non-Javadoc)
* @see java.lang.Object#equals(java.lang.Object)
*/
@Override
public boolean equals(Object obj) {
if (this == obj) return true;
if (obj == null) return false;
if (getClass() != obj.getClass()) return false;
TriplesMap other = (TriplesMap) obj;
if (model == null) {
if (other.model != null) return false;
} else if (!model.equals(other.model)) return false;
if (subject == null) {
if (other.subject != null) return false;
} else if (!subject.equals(other.subject)) return false;
return true;
}
/* (non-Javadoc)
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
return "" + subject;
}
}
| method renaming in TriplesMap
- declared getSubject method to be deprecated since I find it
misleading: when calling triplesMap.getSubject() one could expect to get
the subject map
- introduced a method named getResource being a copy of getSubject hoping
that calling triplesMap.getResource() better reflects that the triples
map resource will be returned
| src/main/java/org/aksw/sml/converters/r2rml2sml/TriplesMap.java | method renaming in TriplesMap | <ide><path>rc/main/java/org/aksw/sml/converters/r2rml2sml/TriplesMap.java
<ide> /**
<ide> * @return the subject
<ide> */
<add> @Deprecated
<ide> public Resource getSubject() {
<add> return subject;
<add> }
<add>
<add> /**
<add> * Returns the actual triples map resource
<add> * @return the actual triples map resource
<add> */
<add> public Resource getResource() {
<ide> return subject;
<ide> }
<ide> |
|
Java | apache-2.0 | dda5ae7dfe37540d8f09407a8b55c328ec49b22c | 0 | hhshuai/concurrentlinkedhashmap,sdslnmd/concurrentlinkedhashmap,ben-manes/concurrentlinkedhashmap,songchuanyuan66/concurrentlinkedhashmap,mapiman/concurrentlinkedhashmap,rzel/concurrentlinkedhashmap,easyfmxu/concurrentlinkedhashmap,Tradeshift/concurrentlinkedhashmap | /*
* Copyright 2011 Benjamin Manes
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.googlecode.concurrentlinkedhashmap;
import static com.googlecode.concurrentlinkedhashmap.ConcurrentTestHarness.timeTasks;
import com.google.common.util.concurrent.ThreadFactoryBuilder;
import com.googlecode.concurrentlinkedhashmap.ConcurrentLinkedHashMap.Builder;
import org.apache.commons.lang.time.DurationFormatUtils;
import org.testng.annotations.AfterMethod;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import java.text.NumberFormat;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.ThreadFactory;
import java.util.concurrent.TimeUnit;
/**
* A unit-test to assert that the cache does not have a memory leak by not being
* able to drain the buffers fast enough.
*
* @author [email protected] (Ben Manes)
*/
@Test(groups = "memoryLeak")
public final class MemoryLeakTest {
private static final long DAY = 86400000;
private static final long SECONDS = 1000;
private static final long TIME_OUT = 1 * DAY;
private static final long STATUS_INTERVAL = 5 * SECONDS;
private static final int THREADS = 250;
private ConcurrentLinkedHashMap<Long, Long> map;
private ScheduledExecutorService statusExecutor;
private ExecutorService catchupExecutor;
@BeforeMethod
public void beforeMemoryLeakTest() {
ThreadFactory threadFactory = new ThreadFactoryBuilder()
.setPriority(Thread.MAX_PRIORITY)
.setDaemon(true)
.build();
catchupExecutor = Executors.newSingleThreadExecutor(threadFactory);
statusExecutor = Executors.newSingleThreadScheduledExecutor(threadFactory);
statusExecutor.scheduleAtFixedRate(newStatusTask(),
STATUS_INTERVAL, STATUS_INTERVAL, TimeUnit.MILLISECONDS);
map = new Builder<Long, Long>()
.maximumWeightedCapacity(THREADS)
//.catchup(catchupExecutor)
.build();
}
@AfterMethod
public void afterMemoryLeakTest() {
catchupExecutor.shutdownNow();
statusExecutor.shutdownNow();
}
@Test(timeOut = TIME_OUT)
public void memoryLeak() throws InterruptedException {
timeTasks(THREADS, new Runnable() {
@Override public void run() {
Long id = Thread.currentThread().getId();
map.put(id, id);
for (;;) {
map.get(id);
Thread.yield();
}
}
});
}
private Runnable newStatusTask() {
return new Runnable() {
int runningTime;
@Override public void run() {
long pending = 0;
for (int i = 0; i < map.buffers.length; i++) {
pending += map.bufferLengths.get(i);
}
runningTime += STATUS_INTERVAL;
String elapsedTime = DurationFormatUtils.formatDuration(runningTime, "H:mm:ss");
String pendingReads = NumberFormat.getInstance().format(pending);
System.out.printf("---------- %s ----------\n", elapsedTime);
System.out.printf("Pending tasks = %s\n", pendingReads);
System.out.printf("Drain status = %s\n", map.drainStatus);
}
};
}
}
| unittest/src/java/com/googlecode/concurrentlinkedhashmap/MemoryLeakTest.java | /*
* Copyright 2011 Benjamin Manes
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.googlecode.concurrentlinkedhashmap;
import static com.googlecode.concurrentlinkedhashmap.ConcurrentTestHarness.timeTasks;
import com.google.common.util.concurrent.ThreadFactoryBuilder;
import com.googlecode.concurrentlinkedhashmap.ConcurrentLinkedHashMap.Builder;
import org.apache.commons.lang.time.DurationFormatUtils;
import org.testng.annotations.AfterMethod;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import java.text.NumberFormat;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.ThreadFactory;
import java.util.concurrent.TimeUnit;
/**
* A unit-test to assert that the cache does not have a memory leak by not being
* able to drain the buffers fast enough.
*
* @author [email protected] (Ben Manes)
*/
@Test(groups = "memoryLeak")
public final class MemoryLeakTest {
private static final long DAY = 86400000;
private static final long SECONDS = 1000;
private static final long TIME_OUT = 1 * DAY;
private static final long STATUS_INTERVAL = 5 * SECONDS;
private static final int THREADS = 250;
private ConcurrentLinkedHashMap<Long, Long> map;
private ScheduledExecutorService statusExecutor;
@BeforeMethod
public void beforeMemoryLeakTest() {
ThreadFactory threadFactory = new ThreadFactoryBuilder()
.setPriority(Thread.MAX_PRIORITY)
.setDaemon(true)
.build();
statusExecutor = Executors.newSingleThreadScheduledExecutor(threadFactory);
statusExecutor.scheduleAtFixedRate(newStatusTask(),
STATUS_INTERVAL, STATUS_INTERVAL, TimeUnit.MILLISECONDS);
map = new Builder<Long, Long>()
.maximumWeightedCapacity(THREADS)
.build();
}
@AfterMethod
public void afterMemoryLeakTest() {
statusExecutor.shutdownNow();
}
@Test(timeOut = TIME_OUT)
public void memoryLeak() throws InterruptedException {
timeTasks(THREADS, new Runnable() {
@Override public void run() {
Long id = Thread.currentThread().getId();
map.put(id, id);
for (;;) {
map.get(id);
Thread.yield();
}
}
});
}
private Runnable newStatusTask() {
return new Runnable() {
int runningTime;
@Override public void run() {
long pending = 0;
for (int i = 0; i < map.buffers.length; i++) {
pending += map.bufferLengths.get(i);
}
runningTime += STATUS_INTERVAL;
String elapsedTime = DurationFormatUtils.formatDuration(runningTime, "H:mm:ss");
String pendingReads = NumberFormat.getInstance().format(pending);
System.out.printf("---------- %s ----------\n", elapsedTime);
System.out.printf("Pending tasks = %s\n", pendingReads);
System.out.printf("Drain status = %s\n", map.drainStatus);
}
};
}
}
| Add catchup to test, but disabled. Reminder for investigating why it can't keep up.
git-svn-id: 907e4e34b61c8b36c3c46852ca75613c7936718f@592 52670e78-85c4-11dd-9ffd-cdf95c2cae98
| unittest/src/java/com/googlecode/concurrentlinkedhashmap/MemoryLeakTest.java | Add catchup to test, but disabled. Reminder for investigating why it can't keep up. | <ide><path>nittest/src/java/com/googlecode/concurrentlinkedhashmap/MemoryLeakTest.java
<ide> import org.testng.annotations.Test;
<ide>
<ide> import java.text.NumberFormat;
<add>import java.util.concurrent.ExecutorService;
<ide> import java.util.concurrent.Executors;
<ide> import java.util.concurrent.ScheduledExecutorService;
<ide> import java.util.concurrent.ThreadFactory;
<ide>
<ide> private ConcurrentLinkedHashMap<Long, Long> map;
<ide> private ScheduledExecutorService statusExecutor;
<add> private ExecutorService catchupExecutor;
<ide>
<ide> @BeforeMethod
<ide> public void beforeMemoryLeakTest() {
<ide> .setPriority(Thread.MAX_PRIORITY)
<ide> .setDaemon(true)
<ide> .build();
<add> catchupExecutor = Executors.newSingleThreadExecutor(threadFactory);
<ide> statusExecutor = Executors.newSingleThreadScheduledExecutor(threadFactory);
<ide> statusExecutor.scheduleAtFixedRate(newStatusTask(),
<ide> STATUS_INTERVAL, STATUS_INTERVAL, TimeUnit.MILLISECONDS);
<ide> map = new Builder<Long, Long>()
<ide> .maximumWeightedCapacity(THREADS)
<add> //.catchup(catchupExecutor)
<ide> .build();
<ide> }
<ide>
<ide> @AfterMethod
<ide> public void afterMemoryLeakTest() {
<add> catchupExecutor.shutdownNow();
<ide> statusExecutor.shutdownNow();
<ide> }
<ide> |
|
Java | apache-2.0 | 2c72b14e9f75dca6433f90322ba0c64c53edbaa6 | 0 | researchgate/kafka-metamorph | package net.researchgate.kafka.metamorph.kafka09;
import net.researchgate.kafka.metamorph.AbstractKafkaPartitionConsumerTest;
import net.researchgate.kafka.metamorph.KafkaTestContext;
import net.researchgate.kafka.metamorph.PartitionConsumer;
import net.researchgate.kafka.metamorph.kafka09.utils.Kafka09TestContext;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.clients.producer.RecordMetadata;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.apache.kafka.common.serialization.StringSerializer;
import java.util.ArrayList;
import java.util.List;
import java.util.Properties;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;
public class Kafka09PartitionConsumerTest extends AbstractKafkaPartitionConsumerTest {
@Override
protected KafkaTestContext getContext() {
return new Kafka09TestContext();
}
private KafkaProducer<String, String> createProducer() {
return createProducer(StringSerializer.class, StringSerializer.class);
}
private <K,V> KafkaProducer<K,V> createProducer(Class keySerializerClass , Class valueSerializerClass) {
Properties props = new Properties();
props.put(org.apache.kafka.clients.producer.ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, context.getBootstrapServerString());
props.put(org.apache.kafka.clients.producer.ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, keySerializerClass);
props.put(org.apache.kafka.clients.producer.ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, valueSerializerClass);
return new KafkaProducer<>(props);
}
@Override
protected void produceMessagesOrdered(String topic, int messageNum) throws ExecutionException, InterruptedException {
KafkaProducer<String, String> producer = createProducer();
for (int i = 0; i < messageNum; i++) {
Future<RecordMetadata> future = producer.send(new ProducerRecord<>(topic, "test-key-" + i, "test-value-" + i));
future.get();
}
producer.close();
}
@Override
protected void produceMessagesUnordered(String topic, int messageNum) throws ExecutionException, InterruptedException {
KafkaProducer<String, String> producer = createProducer();
List<Future<RecordMetadata>> futures = new ArrayList<>();
for (int i = 0; i < messageNum; i++) {
Future<RecordMetadata> future = producer.send(new ProducerRecord<>(topic, "test-key-" + i, "test-value"));
futures.add(future);
}
for (Future f : futures) {
f.get();
}
producer.close();
}
@Override
protected PartitionConsumer<String,String> initializeUnitUnderTest() {
Properties props = new Properties();
props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, context.getBootstrapServerString());
return new Kafka09PartitionConsumer<>(props, new StringDeserializer(), new StringDeserializer());
}
}
| metamorph-kafka-09/src/test/java/net/researchgate/kafka/metamorph/kafka09/Kafka09PartitionConsumerTest.java | package net.researchgate.kafka.metamorph.kafka09;
import net.researchgate.kafka.metamorph.AbstractKafkaPartitionConsumerTest;
import net.researchgate.kafka.metamorph.KafkaTestContext;
import net.researchgate.kafka.metamorph.PartitionConsumer;
import net.researchgate.kafka.metamorph.kafka09.utils.Kafka09TestContext;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.common.serialization.StringDeserializer;
import java.util.Properties;
import java.util.concurrent.ExecutionException;
public class Kafka09PartitionConsumerTest extends AbstractKafkaPartitionConsumerTest {
@Override
protected KafkaTestContext getContext() {
return new Kafka09TestContext();
}
@Override
protected void produceMessagesOrdered(String topic, int messageNum) throws ExecutionException, InterruptedException {
}
@Override
protected void produceMessagesUnordered(String topic, int messageNum) throws ExecutionException, InterruptedException {
}
@Override
protected PartitionConsumer<String,String> initializeUnitUnderTest() {
Properties props = new Properties();
props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, context.getBootstrapServerString());
return new Kafka09PartitionConsumer<>(props, new StringDeserializer(), new StringDeserializer());
}
}
| Copied implementation for Kafka 09 producer
| metamorph-kafka-09/src/test/java/net/researchgate/kafka/metamorph/kafka09/Kafka09PartitionConsumerTest.java | Copied implementation for Kafka 09 producer | <ide><path>etamorph-kafka-09/src/test/java/net/researchgate/kafka/metamorph/kafka09/Kafka09PartitionConsumerTest.java
<ide> import net.researchgate.kafka.metamorph.PartitionConsumer;
<ide> import net.researchgate.kafka.metamorph.kafka09.utils.Kafka09TestContext;
<ide> import org.apache.kafka.clients.consumer.ConsumerConfig;
<add>import org.apache.kafka.clients.producer.KafkaProducer;
<add>import org.apache.kafka.clients.producer.ProducerRecord;
<add>import org.apache.kafka.clients.producer.RecordMetadata;
<ide> import org.apache.kafka.common.serialization.StringDeserializer;
<add>import org.apache.kafka.common.serialization.StringSerializer;
<ide>
<add>import java.util.ArrayList;
<add>import java.util.List;
<ide> import java.util.Properties;
<ide> import java.util.concurrent.ExecutionException;
<add>import java.util.concurrent.Future;
<ide>
<ide> public class Kafka09PartitionConsumerTest extends AbstractKafkaPartitionConsumerTest {
<ide>
<ide> protected KafkaTestContext getContext() {
<ide> return new Kafka09TestContext();
<ide> }
<add> private KafkaProducer<String, String> createProducer() {
<add> return createProducer(StringSerializer.class, StringSerializer.class);
<add> }
<add>
<add> private <K,V> KafkaProducer<K,V> createProducer(Class keySerializerClass , Class valueSerializerClass) {
<add> Properties props = new Properties();
<add> props.put(org.apache.kafka.clients.producer.ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, context.getBootstrapServerString());
<add> props.put(org.apache.kafka.clients.producer.ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, keySerializerClass);
<add> props.put(org.apache.kafka.clients.producer.ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, valueSerializerClass);
<add> return new KafkaProducer<>(props);
<add> }
<ide>
<ide> @Override
<ide> protected void produceMessagesOrdered(String topic, int messageNum) throws ExecutionException, InterruptedException {
<add> KafkaProducer<String, String> producer = createProducer();
<ide>
<add> for (int i = 0; i < messageNum; i++) {
<add> Future<RecordMetadata> future = producer.send(new ProducerRecord<>(topic, "test-key-" + i, "test-value-" + i));
<add> future.get();
<add> }
<add>
<add> producer.close();
<ide> }
<ide>
<ide> @Override
<ide> protected void produceMessagesUnordered(String topic, int messageNum) throws ExecutionException, InterruptedException {
<add> KafkaProducer<String, String> producer = createProducer();
<ide>
<add> List<Future<RecordMetadata>> futures = new ArrayList<>();
<add> for (int i = 0; i < messageNum; i++) {
<add> Future<RecordMetadata> future = producer.send(new ProducerRecord<>(topic, "test-key-" + i, "test-value"));
<add> futures.add(future);
<add> }
<add>
<add> for (Future f : futures) {
<add> f.get();
<add> }
<add>
<add> producer.close();
<ide> }
<ide>
<ide> @Override |
|
Java | apache-2.0 | 640e54f3517b47a8ef1e088539ca6541742bee8b | 0 | wisgood/hive,asonipsl/hive,asonipsl/hive,WANdisco/amplab-hive,asonipsl/hive,wisgood/hive,winningsix/hive,WANdisco/hive,asonipsl/hive,WANdisco/amplab-hive,WANdisco/hive,winningsix/hive,WANdisco/amplab-hive,WANdisco/hive,WANdisco/hive,WANdisco/amplab-hive,wisgood/hive,winningsix/hive,wisgood/hive,asonipsl/hive,winningsix/hive,wisgood/hive,WANdisco/amplab-hive,asonipsl/hive,asonipsl/hive,WANdisco/amplab-hive,WANdisco/hive,wisgood/hive,winningsix/hive,wisgood/hive,WANdisco/amplab-hive,wisgood/hive,WANdisco/amplab-hive,winningsix/hive,winningsix/hive,WANdisco/hive,asonipsl/hive,winningsix/hive,WANdisco/hive,WANdisco/hive | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hive.jdbc;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.metastore.api.MetaException;
import org.apache.hadoop.hive.service.HiveClient;
import org.apache.hadoop.hive.service.HiveInterface;
import org.apache.hadoop.hive.service.HiveServer;
import org.apache.thrift.TException;
import org.apache.thrift.protocol.TBinaryProtocol;
import org.apache.thrift.protocol.TProtocol;
import org.apache.thrift.transport.TSocket;
import org.apache.thrift.transport.TTransport;
import org.apache.thrift.transport.TTransportException;
import java.sql.Array;
import java.sql.Blob;
import java.sql.CallableStatement;
import java.sql.Clob;
import java.sql.Connection;
import java.sql.DatabaseMetaData;
import java.sql.NClob;
import java.sql.PreparedStatement;
import java.sql.SQLClientInfoException;
import java.sql.SQLException;
import java.sql.SQLWarning;
import java.sql.SQLXML;
import java.sql.Savepoint;
import java.sql.Statement;
import java.sql.Struct;
import java.util.Map;
import java.util.Properties;
/**
* HiveConnection.
*
*/
public class HiveConnection implements java.sql.Connection {
private TTransport transport;
private HiveInterface client;
private boolean isClosed = true;
private SQLWarning warningChain = null;
private static final String URI_PREFIX = "jdbc:hive://";
/**
* Create a connection to a local Hive
*
* @param hiveConf
* @throws SQLException
*/
public HiveConnection(HiveConf hiveConf) throws SQLException {
try {
client = new HiveServer.HiveServerHandler(hiveConf);
} catch (MetaException e) {
throw new SQLException("Error accessing Hive metastore: "
+ e.getMessage(), "08S01",e);
}
isClosed = false;
configureConnection();
}
/**
* TODO: - parse uri (use java.net.URI?).
*/
public HiveConnection(String uri, Properties info) throws SQLException {
if (!uri.startsWith(URI_PREFIX)) {
throw new SQLException("Invalid URL: " + uri, "08S01");
}
// remove prefix
uri = uri.substring(URI_PREFIX.length());
// If uri is not specified, use local mode.
if (uri.isEmpty()) {
try {
client = new HiveServer.HiveServerHandler();
} catch (MetaException e) {
throw new SQLException("Error accessing Hive metastore: "
+ e.getMessage(), "08S01",e);
}
} else {
// parse uri
// form: hostname:port/databasename
String[] parts = uri.split("/");
String[] hostport = parts[0].split(":");
int port = 10000;
String host = hostport[0];
try {
port = Integer.parseInt(hostport[1]);
} catch (Exception e) {
}
transport = new TSocket(host, port);
TProtocol protocol = new TBinaryProtocol(transport);
client = new HiveClient(protocol);
try {
transport.open();
} catch (TTransportException e) {
throw new SQLException("Could not establish connection to "
+ uri + ": " + e.getMessage(), "08S01");
}
}
isClosed = false;
configureConnection();
}
private void configureConnection() throws SQLException {
Statement stmt = createStatement();
stmt.execute(
"set hive.fetch.output.serde = org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe");
stmt.close();
}
/*
* (non-Javadoc)
*
* @see java.sql.Connection#clearWarnings()
*/
public void clearWarnings() throws SQLException {
warningChain = null;
}
/*
* (non-Javadoc)
*
* @see java.sql.Connection#close()
*/
public void close() throws SQLException {
if (!isClosed) {
try {
client.clean();
} catch (TException e) {
throw new SQLException("Error while cleaning up the server resources", e);
} finally {
isClosed = true;
if (transport != null) {
transport.close();
}
}
}
}
/*
* (non-Javadoc)
*
* @see java.sql.Connection#commit()
*/
public void commit() throws SQLException {
// TODO Auto-generated method stub
throw new SQLException("Method not supported");
}
/*
* (non-Javadoc)
*
* @see java.sql.Connection#createArrayOf(java.lang.String,
* java.lang.Object[])
*/
public Array createArrayOf(String arg0, Object[] arg1) throws SQLException {
// TODO Auto-generated method stub
throw new SQLException("Method not supported");
}
/*
* (non-Javadoc)
*
* @see java.sql.Connection#createBlob()
*/
public Blob createBlob() throws SQLException {
// TODO Auto-generated method stub
throw new SQLException("Method not supported");
}
/*
* (non-Javadoc)
*
* @see java.sql.Connection#createClob()
*/
public Clob createClob() throws SQLException {
// TODO Auto-generated method stub
throw new SQLException("Method not supported");
}
/*
* (non-Javadoc)
*
* @see java.sql.Connection#createNClob()
*/
public NClob createNClob() throws SQLException {
// TODO Auto-generated method stub
throw new SQLException("Method not supported");
}
/*
* (non-Javadoc)
*
* @see java.sql.Connection#createSQLXML()
*/
public SQLXML createSQLXML() throws SQLException {
// TODO Auto-generated method stub
throw new SQLException("Method not supported");
}
/**
* Creates a Statement object for sending SQL statements to the database.
*
* @throws SQLException
* if a database access error occurs.
* @see java.sql.Connection#createStatement()
*/
public Statement createStatement() throws SQLException {
if (isClosed) {
throw new SQLException("Can't create Statement, connection is closed");
}
return new HiveStatement(client);
}
/*
* (non-Javadoc)
*
* @see java.sql.Connection#createStatement(int, int)
*/
public Statement createStatement(int resultSetType, int resultSetConcurrency)
throws SQLException {
// TODO Auto-generated method stub
throw new SQLException("Method not supported");
}
/*
* (non-Javadoc)
*
* @see java.sql.Connection#createStatement(int, int, int)
*/
public Statement createStatement(int resultSetType, int resultSetConcurrency,
int resultSetHoldability) throws SQLException {
// TODO Auto-generated method stub
throw new SQLException("Method not supported");
}
/*
* (non-Javadoc)
*
* @see java.sql.Connection#createStruct(java.lang.String, java.lang.Object[])
*/
public Struct createStruct(String typeName, Object[] attributes)
throws SQLException {
// TODO Auto-generated method stub
throw new SQLException("Method not supported");
}
/*
* (non-Javadoc)
*
* @see java.sql.Connection#getAutoCommit()
*/
public boolean getAutoCommit() throws SQLException {
return true;
}
/*
* (non-Javadoc)
*
* @see java.sql.Connection#getCatalog()
*/
public String getCatalog() throws SQLException {
return "";
}
/*
* (non-Javadoc)
*
* @see java.sql.Connection#getClientInfo()
*/
public Properties getClientInfo() throws SQLException {
// TODO Auto-generated method stub
throw new SQLException("Method not supported");
}
/*
* (non-Javadoc)
*
* @see java.sql.Connection#getClientInfo(java.lang.String)
*/
public String getClientInfo(String name) throws SQLException {
// TODO Auto-generated method stub
throw new SQLException("Method not supported");
}
/*
* (non-Javadoc)
*
* @see java.sql.Connection#getHoldability()
*/
public int getHoldability() throws SQLException {
// TODO Auto-generated method stub
throw new SQLException("Method not supported");
}
/*
* (non-Javadoc)
*
* @see java.sql.Connection#getMetaData()
*/
public DatabaseMetaData getMetaData() throws SQLException {
return new HiveDatabaseMetaData(client);
}
/*
* (non-Javadoc)
*
* @see java.sql.Connection#getTransactionIsolation()
*/
public int getTransactionIsolation() throws SQLException {
return Connection.TRANSACTION_NONE;
}
/*
* (non-Javadoc)
*
* @see java.sql.Connection#getTypeMap()
*/
public Map<String, Class<?>> getTypeMap() throws SQLException {
// TODO Auto-generated method stub
throw new SQLException("Method not supported");
}
/*
* (non-Javadoc)
*
* @see java.sql.Connection#getWarnings()
*/
public SQLWarning getWarnings() throws SQLException {
return warningChain;
}
/*
* (non-Javadoc)
*
* @see java.sql.Connection#isClosed()
*/
public boolean isClosed() throws SQLException {
return isClosed;
}
/*
* (non-Javadoc)
*
* @see java.sql.Connection#isReadOnly()
*/
public boolean isReadOnly() throws SQLException {
return false;
}
/*
* (non-Javadoc)
*
* @see java.sql.Connection#isValid(int)
*/
public boolean isValid(int timeout) throws SQLException {
// TODO Auto-generated method stub
throw new SQLException("Method not supported");
}
/*
* (non-Javadoc)
*
* @see java.sql.Connection#nativeSQL(java.lang.String)
*/
public String nativeSQL(String sql) throws SQLException {
// TODO Auto-generated method stub
throw new SQLException("Method not supported");
}
/*
* (non-Javadoc)
*
* @see java.sql.Connection#prepareCall(java.lang.String)
*/
public CallableStatement prepareCall(String sql) throws SQLException {
// TODO Auto-generated method stub
throw new SQLException("Method not supported");
}
/*
* (non-Javadoc)
*
* @see java.sql.Connection#prepareCall(java.lang.String, int, int)
*/
public CallableStatement prepareCall(String sql, int resultSetType,
int resultSetConcurrency) throws SQLException {
// TODO Auto-generated method stub
throw new SQLException("Method not supported");
}
/*
* (non-Javadoc)
*
* @see java.sql.Connection#prepareCall(java.lang.String, int, int, int)
*/
public CallableStatement prepareCall(String sql, int resultSetType,
int resultSetConcurrency, int resultSetHoldability) throws SQLException {
// TODO Auto-generated method stub
throw new SQLException("Method not supported");
}
/*
* (non-Javadoc)
*
* @see java.sql.Connection#prepareStatement(java.lang.String)
*/
public PreparedStatement prepareStatement(String sql) throws SQLException {
return new HivePreparedStatement(client, sql);
}
/*
* (non-Javadoc)
*
* @see java.sql.Connection#prepareStatement(java.lang.String, int)
*/
public PreparedStatement prepareStatement(String sql, int autoGeneratedKeys)
throws SQLException {
return new HivePreparedStatement(client, sql);
}
/*
* (non-Javadoc)
*
* @see java.sql.Connection#prepareStatement(java.lang.String, int[])
*/
public PreparedStatement prepareStatement(String sql, int[] columnIndexes)
throws SQLException {
// TODO Auto-generated method stub
throw new SQLException("Method not supported");
}
/*
* (non-Javadoc)
*
* @see java.sql.Connection#prepareStatement(java.lang.String,
* java.lang.String[])
*/
public PreparedStatement prepareStatement(String sql, String[] columnNames)
throws SQLException {
// TODO Auto-generated method stub
throw new SQLException("Method not supported");
}
/*
* (non-Javadoc)
*
* @see java.sql.Connection#prepareStatement(java.lang.String, int, int)
*/
public PreparedStatement prepareStatement(String sql, int resultSetType,
int resultSetConcurrency) throws SQLException {
return new HivePreparedStatement(client, sql);
}
/*
* (non-Javadoc)
*
* @see java.sql.Connection#prepareStatement(java.lang.String, int, int, int)
*/
public PreparedStatement prepareStatement(String sql, int resultSetType,
int resultSetConcurrency, int resultSetHoldability) throws SQLException {
// TODO Auto-generated method stub
throw new SQLException("Method not supported");
}
/*
* (non-Javadoc)
*
* @see java.sql.Connection#releaseSavepoint(java.sql.Savepoint)
*/
public void releaseSavepoint(Savepoint savepoint) throws SQLException {
// TODO Auto-generated method stub
throw new SQLException("Method not supported");
}
/*
* (non-Javadoc)
*
* @see java.sql.Connection#rollback()
*/
public void rollback() throws SQLException {
// TODO Auto-generated method stub
throw new SQLException("Method not supported");
}
/*
* (non-Javadoc)
*
* @see java.sql.Connection#rollback(java.sql.Savepoint)
*/
public void rollback(Savepoint savepoint) throws SQLException {
// TODO Auto-generated method stub
throw new SQLException("Method not supported");
}
/*
* (non-Javadoc)
*
* @see java.sql.Connection#setAutoCommit(boolean)
*/
public void setAutoCommit(boolean autoCommit) throws SQLException {
// TODO Auto-generated method stub
throw new SQLException("Method not supported");
}
/*
* (non-Javadoc)
*
* @see java.sql.Connection#setCatalog(java.lang.String)
*/
public void setCatalog(String catalog) throws SQLException {
// TODO Auto-generated method stub
throw new SQLException("Method not supported");
}
/*
* (non-Javadoc)
*
* @see java.sql.Connection#setClientInfo(java.util.Properties)
*/
public void setClientInfo(Properties properties)
throws SQLClientInfoException {
// TODO Auto-generated method stub
throw new SQLClientInfoException("Method not supported", null);
}
/*
* (non-Javadoc)
*
* @see java.sql.Connection#setClientInfo(java.lang.String, java.lang.String)
*/
public void setClientInfo(String name, String value)
throws SQLClientInfoException {
// TODO Auto-generated method stub
throw new SQLClientInfoException("Method not supported", null);
}
/*
* (non-Javadoc)
*
* @see java.sql.Connection#setHoldability(int)
*/
public void setHoldability(int holdability) throws SQLException {
// TODO Auto-generated method stub
throw new SQLException("Method not supported");
}
/*
* (non-Javadoc)
*
* @see java.sql.Connection#setReadOnly(boolean)
*/
public void setReadOnly(boolean readOnly) throws SQLException {
// TODO Auto-generated method stub
throw new SQLException("Method not supported");
}
/*
* (non-Javadoc)
*
* @see java.sql.Connection#setSavepoint()
*/
public Savepoint setSavepoint() throws SQLException {
// TODO Auto-generated method stub
throw new SQLException("Method not supported");
}
/*
* (non-Javadoc)
*
* @see java.sql.Connection#setSavepoint(java.lang.String)
*/
public Savepoint setSavepoint(String name) throws SQLException {
// TODO Auto-generated method stub
throw new SQLException("Method not supported");
}
/*
* (non-Javadoc)
*
* @see java.sql.Connection#setTransactionIsolation(int)
*/
public void setTransactionIsolation(int level) throws SQLException {
// TODO Auto-generated method stub
throw new SQLException("Method not supported");
}
/*
* (non-Javadoc)
*
* @see java.sql.Connection#setTypeMap(java.util.Map)
*/
public void setTypeMap(Map<String, Class<?>> map) throws SQLException {
// TODO Auto-generated method stub
throw new SQLException("Method not supported");
}
/*
* (non-Javadoc)
*
* @see java.sql.Wrapper#isWrapperFor(java.lang.Class)
*/
public boolean isWrapperFor(Class<?> iface) throws SQLException {
// TODO Auto-generated method stub
throw new SQLException("Method not supported");
}
/*
* (non-Javadoc)
*
* @see java.sql.Wrapper#unwrap(java.lang.Class)
*/
public <T> T unwrap(Class<T> iface) throws SQLException {
// TODO Auto-generated method stub
throw new SQLException("Method not supported");
}
}
| jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveConnection.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hive.jdbc;
import org.apache.hadoop.hive.metastore.api.MetaException;
import org.apache.hadoop.hive.service.HiveClient;
import org.apache.hadoop.hive.service.HiveInterface;
import org.apache.hadoop.hive.service.HiveServer;
import org.apache.thrift.TException;
import org.apache.thrift.protocol.TBinaryProtocol;
import org.apache.thrift.protocol.TProtocol;
import org.apache.thrift.transport.TSocket;
import org.apache.thrift.transport.TTransport;
import org.apache.thrift.transport.TTransportException;
import java.sql.Array;
import java.sql.Blob;
import java.sql.CallableStatement;
import java.sql.Clob;
import java.sql.Connection;
import java.sql.DatabaseMetaData;
import java.sql.NClob;
import java.sql.PreparedStatement;
import java.sql.SQLClientInfoException;
import java.sql.SQLException;
import java.sql.SQLWarning;
import java.sql.SQLXML;
import java.sql.Savepoint;
import java.sql.Statement;
import java.sql.Struct;
import java.util.Map;
import java.util.Properties;
/**
* HiveConnection.
*
*/
public class HiveConnection implements java.sql.Connection {
private TTransport transport;
private HiveInterface client;
private boolean isClosed = true;
private SQLWarning warningChain = null;
private static final String URI_PREFIX = "jdbc:hive://";
/**
* TODO: - parse uri (use java.net.URI?).
*/
public HiveConnection(String uri, Properties info) throws SQLException {
if (!uri.startsWith(URI_PREFIX)) {
throw new SQLException("Invalid URL: " + uri, "08S01");
}
// remove prefix
uri = uri.substring(URI_PREFIX.length());
// If uri is not specified, use local mode.
if (uri.isEmpty()) {
try {
client = new HiveServer.HiveServerHandler();
} catch (MetaException e) {
throw new SQLException("Error accessing Hive metastore: "
+ e.getMessage(), "08S01",e);
}
} else {
// parse uri
// form: hostname:port/databasename
String[] parts = uri.split("/");
String[] hostport = parts[0].split(":");
int port = 10000;
String host = hostport[0];
try {
port = Integer.parseInt(hostport[1]);
} catch (Exception e) {
}
transport = new TSocket(host, port);
TProtocol protocol = new TBinaryProtocol(transport);
client = new HiveClient(protocol);
try {
transport.open();
} catch (TTransportException e) {
throw new SQLException("Could not establish connection to "
+ uri + ": " + e.getMessage(), "08S01");
}
}
isClosed = false;
configureConnection();
}
private void configureConnection() throws SQLException {
Statement stmt = createStatement();
stmt.execute(
"set hive.fetch.output.serde = org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe");
stmt.close();
}
/*
* (non-Javadoc)
*
* @see java.sql.Connection#clearWarnings()
*/
public void clearWarnings() throws SQLException {
warningChain = null;
}
/*
* (non-Javadoc)
*
* @see java.sql.Connection#close()
*/
public void close() throws SQLException {
if (!isClosed) {
try {
client.clean();
} catch (TException e) {
throw new SQLException("Error while cleaning up the server resources", e);
} finally {
isClosed = true;
if (transport != null) {
transport.close();
}
}
}
}
/*
* (non-Javadoc)
*
* @see java.sql.Connection#commit()
*/
public void commit() throws SQLException {
// TODO Auto-generated method stub
throw new SQLException("Method not supported");
}
/*
* (non-Javadoc)
*
* @see java.sql.Connection#createArrayOf(java.lang.String,
* java.lang.Object[])
*/
public Array createArrayOf(String arg0, Object[] arg1) throws SQLException {
// TODO Auto-generated method stub
throw new SQLException("Method not supported");
}
/*
* (non-Javadoc)
*
* @see java.sql.Connection#createBlob()
*/
public Blob createBlob() throws SQLException {
// TODO Auto-generated method stub
throw new SQLException("Method not supported");
}
/*
* (non-Javadoc)
*
* @see java.sql.Connection#createClob()
*/
public Clob createClob() throws SQLException {
// TODO Auto-generated method stub
throw new SQLException("Method not supported");
}
/*
* (non-Javadoc)
*
* @see java.sql.Connection#createNClob()
*/
public NClob createNClob() throws SQLException {
// TODO Auto-generated method stub
throw new SQLException("Method not supported");
}
/*
* (non-Javadoc)
*
* @see java.sql.Connection#createSQLXML()
*/
public SQLXML createSQLXML() throws SQLException {
// TODO Auto-generated method stub
throw new SQLException("Method not supported");
}
/**
* Creates a Statement object for sending SQL statements to the database.
*
* @throws SQLException
* if a database access error occurs.
* @see java.sql.Connection#createStatement()
*/
public Statement createStatement() throws SQLException {
if (isClosed) {
throw new SQLException("Can't create Statement, connection is closed");
}
return new HiveStatement(client);
}
/*
* (non-Javadoc)
*
* @see java.sql.Connection#createStatement(int, int)
*/
public Statement createStatement(int resultSetType, int resultSetConcurrency)
throws SQLException {
// TODO Auto-generated method stub
throw new SQLException("Method not supported");
}
/*
* (non-Javadoc)
*
* @see java.sql.Connection#createStatement(int, int, int)
*/
public Statement createStatement(int resultSetType, int resultSetConcurrency,
int resultSetHoldability) throws SQLException {
// TODO Auto-generated method stub
throw new SQLException("Method not supported");
}
/*
* (non-Javadoc)
*
* @see java.sql.Connection#createStruct(java.lang.String, java.lang.Object[])
*/
public Struct createStruct(String typeName, Object[] attributes)
throws SQLException {
// TODO Auto-generated method stub
throw new SQLException("Method not supported");
}
/*
* (non-Javadoc)
*
* @see java.sql.Connection#getAutoCommit()
*/
public boolean getAutoCommit() throws SQLException {
return true;
}
/*
* (non-Javadoc)
*
* @see java.sql.Connection#getCatalog()
*/
public String getCatalog() throws SQLException {
return "";
}
/*
* (non-Javadoc)
*
* @see java.sql.Connection#getClientInfo()
*/
public Properties getClientInfo() throws SQLException {
// TODO Auto-generated method stub
throw new SQLException("Method not supported");
}
/*
* (non-Javadoc)
*
* @see java.sql.Connection#getClientInfo(java.lang.String)
*/
public String getClientInfo(String name) throws SQLException {
// TODO Auto-generated method stub
throw new SQLException("Method not supported");
}
/*
* (non-Javadoc)
*
* @see java.sql.Connection#getHoldability()
*/
public int getHoldability() throws SQLException {
// TODO Auto-generated method stub
throw new SQLException("Method not supported");
}
/*
* (non-Javadoc)
*
* @see java.sql.Connection#getMetaData()
*/
public DatabaseMetaData getMetaData() throws SQLException {
return new HiveDatabaseMetaData(client);
}
/*
* (non-Javadoc)
*
* @see java.sql.Connection#getTransactionIsolation()
*/
public int getTransactionIsolation() throws SQLException {
return Connection.TRANSACTION_NONE;
}
/*
* (non-Javadoc)
*
* @see java.sql.Connection#getTypeMap()
*/
public Map<String, Class<?>> getTypeMap() throws SQLException {
// TODO Auto-generated method stub
throw new SQLException("Method not supported");
}
/*
* (non-Javadoc)
*
* @see java.sql.Connection#getWarnings()
*/
public SQLWarning getWarnings() throws SQLException {
return warningChain;
}
/*
* (non-Javadoc)
*
* @see java.sql.Connection#isClosed()
*/
public boolean isClosed() throws SQLException {
return isClosed;
}
/*
* (non-Javadoc)
*
* @see java.sql.Connection#isReadOnly()
*/
public boolean isReadOnly() throws SQLException {
return false;
}
/*
* (non-Javadoc)
*
* @see java.sql.Connection#isValid(int)
*/
public boolean isValid(int timeout) throws SQLException {
// TODO Auto-generated method stub
throw new SQLException("Method not supported");
}
/*
* (non-Javadoc)
*
* @see java.sql.Connection#nativeSQL(java.lang.String)
*/
public String nativeSQL(String sql) throws SQLException {
// TODO Auto-generated method stub
throw new SQLException("Method not supported");
}
/*
* (non-Javadoc)
*
* @see java.sql.Connection#prepareCall(java.lang.String)
*/
public CallableStatement prepareCall(String sql) throws SQLException {
// TODO Auto-generated method stub
throw new SQLException("Method not supported");
}
/*
* (non-Javadoc)
*
* @see java.sql.Connection#prepareCall(java.lang.String, int, int)
*/
public CallableStatement prepareCall(String sql, int resultSetType,
int resultSetConcurrency) throws SQLException {
// TODO Auto-generated method stub
throw new SQLException("Method not supported");
}
/*
* (non-Javadoc)
*
* @see java.sql.Connection#prepareCall(java.lang.String, int, int, int)
*/
public CallableStatement prepareCall(String sql, int resultSetType,
int resultSetConcurrency, int resultSetHoldability) throws SQLException {
// TODO Auto-generated method stub
throw new SQLException("Method not supported");
}
/*
* (non-Javadoc)
*
* @see java.sql.Connection#prepareStatement(java.lang.String)
*/
public PreparedStatement prepareStatement(String sql) throws SQLException {
return new HivePreparedStatement(client, sql);
}
/*
* (non-Javadoc)
*
* @see java.sql.Connection#prepareStatement(java.lang.String, int)
*/
public PreparedStatement prepareStatement(String sql, int autoGeneratedKeys)
throws SQLException {
return new HivePreparedStatement(client, sql);
}
/*
* (non-Javadoc)
*
* @see java.sql.Connection#prepareStatement(java.lang.String, int[])
*/
public PreparedStatement prepareStatement(String sql, int[] columnIndexes)
throws SQLException {
// TODO Auto-generated method stub
throw new SQLException("Method not supported");
}
/*
* (non-Javadoc)
*
* @see java.sql.Connection#prepareStatement(java.lang.String,
* java.lang.String[])
*/
public PreparedStatement prepareStatement(String sql, String[] columnNames)
throws SQLException {
// TODO Auto-generated method stub
throw new SQLException("Method not supported");
}
/*
* (non-Javadoc)
*
* @see java.sql.Connection#prepareStatement(java.lang.String, int, int)
*/
public PreparedStatement prepareStatement(String sql, int resultSetType,
int resultSetConcurrency) throws SQLException {
return new HivePreparedStatement(client, sql);
}
/*
* (non-Javadoc)
*
* @see java.sql.Connection#prepareStatement(java.lang.String, int, int, int)
*/
public PreparedStatement prepareStatement(String sql, int resultSetType,
int resultSetConcurrency, int resultSetHoldability) throws SQLException {
// TODO Auto-generated method stub
throw new SQLException("Method not supported");
}
/*
* (non-Javadoc)
*
* @see java.sql.Connection#releaseSavepoint(java.sql.Savepoint)
*/
public void releaseSavepoint(Savepoint savepoint) throws SQLException {
// TODO Auto-generated method stub
throw new SQLException("Method not supported");
}
/*
* (non-Javadoc)
*
* @see java.sql.Connection#rollback()
*/
public void rollback() throws SQLException {
// TODO Auto-generated method stub
throw new SQLException("Method not supported");
}
/*
* (non-Javadoc)
*
* @see java.sql.Connection#rollback(java.sql.Savepoint)
*/
public void rollback(Savepoint savepoint) throws SQLException {
// TODO Auto-generated method stub
throw new SQLException("Method not supported");
}
/*
* (non-Javadoc)
*
* @see java.sql.Connection#setAutoCommit(boolean)
*/
public void setAutoCommit(boolean autoCommit) throws SQLException {
// TODO Auto-generated method stub
throw new SQLException("Method not supported");
}
/*
* (non-Javadoc)
*
* @see java.sql.Connection#setCatalog(java.lang.String)
*/
public void setCatalog(String catalog) throws SQLException {
// TODO Auto-generated method stub
throw new SQLException("Method not supported");
}
/*
* (non-Javadoc)
*
* @see java.sql.Connection#setClientInfo(java.util.Properties)
*/
public void setClientInfo(Properties properties)
throws SQLClientInfoException {
// TODO Auto-generated method stub
throw new SQLClientInfoException("Method not supported", null);
}
/*
* (non-Javadoc)
*
* @see java.sql.Connection#setClientInfo(java.lang.String, java.lang.String)
*/
public void setClientInfo(String name, String value)
throws SQLClientInfoException {
// TODO Auto-generated method stub
throw new SQLClientInfoException("Method not supported", null);
}
/*
* (non-Javadoc)
*
* @see java.sql.Connection#setHoldability(int)
*/
public void setHoldability(int holdability) throws SQLException {
// TODO Auto-generated method stub
throw new SQLException("Method not supported");
}
/*
* (non-Javadoc)
*
* @see java.sql.Connection#setReadOnly(boolean)
*/
public void setReadOnly(boolean readOnly) throws SQLException {
// TODO Auto-generated method stub
throw new SQLException("Method not supported");
}
/*
* (non-Javadoc)
*
* @see java.sql.Connection#setSavepoint()
*/
public Savepoint setSavepoint() throws SQLException {
// TODO Auto-generated method stub
throw new SQLException("Method not supported");
}
/*
* (non-Javadoc)
*
* @see java.sql.Connection#setSavepoint(java.lang.String)
*/
public Savepoint setSavepoint(String name) throws SQLException {
// TODO Auto-generated method stub
throw new SQLException("Method not supported");
}
/*
* (non-Javadoc)
*
* @see java.sql.Connection#setTransactionIsolation(int)
*/
public void setTransactionIsolation(int level) throws SQLException {
// TODO Auto-generated method stub
throw new SQLException("Method not supported");
}
/*
* (non-Javadoc)
*
* @see java.sql.Connection#setTypeMap(java.util.Map)
*/
public void setTypeMap(Map<String, Class<?>> map) throws SQLException {
// TODO Auto-generated method stub
throw new SQLException("Method not supported");
}
/*
* (non-Javadoc)
*
* @see java.sql.Wrapper#isWrapperFor(java.lang.Class)
*/
public boolean isWrapperFor(Class<?> iface) throws SQLException {
// TODO Auto-generated method stub
throw new SQLException("Method not supported");
}
/*
* (non-Javadoc)
*
* @see java.sql.Wrapper#unwrap(java.lang.Class)
*/
public <T> T unwrap(Class<T> iface) throws SQLException {
// TODO Auto-generated method stub
throw new SQLException("Method not supported");
}
}
| HIVE-2577: Expose the HiveConf in HiveConnection API (Nicolas Lalevee via Ashutosh Chauhan)
git-svn-id: c2303eb81cb646bce052f55f7f0d14f181a5956c@1304068 13f79535-47bb-0310-9956-ffa450edef68
| jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveConnection.java | HIVE-2577: Expose the HiveConf in HiveConnection API (Nicolas Lalevee via Ashutosh Chauhan) | <ide><path>dbc/src/java/org/apache/hadoop/hive/jdbc/HiveConnection.java
<ide>
<ide> package org.apache.hadoop.hive.jdbc;
<ide>
<add>import org.apache.hadoop.hive.conf.HiveConf;
<ide> import org.apache.hadoop.hive.metastore.api.MetaException;
<ide> import org.apache.hadoop.hive.service.HiveClient;
<ide> import org.apache.hadoop.hive.service.HiveInterface;
<ide> private SQLWarning warningChain = null;
<ide>
<ide> private static final String URI_PREFIX = "jdbc:hive://";
<add>
<add> /**
<add> * Create a connection to a local Hive
<add> *
<add> * @param hiveConf
<add> * @throws SQLException
<add> */
<add> public HiveConnection(HiveConf hiveConf) throws SQLException {
<add> try {
<add> client = new HiveServer.HiveServerHandler(hiveConf);
<add> } catch (MetaException e) {
<add> throw new SQLException("Error accessing Hive metastore: "
<add> + e.getMessage(), "08S01",e);
<add> }
<add> isClosed = false;
<add> configureConnection();
<add> }
<ide>
<ide> /**
<ide> * TODO: - parse uri (use java.net.URI?). |
|
JavaScript | agpl-3.0 | 7d24c5fda24abccd712a6df8002b62955d2f83a4 | 0 | grafana/grafana,grafana/grafana,grafana/grafana,grafana/grafana,grafana/grafana,grafana/grafana,grafana/grafana | define([
'jquery',
'lodash',
],
function($, _) {
'use strict';
var kbn = {};
kbn.valueFormats = {};
///// HELPER FUNCTIONS /////
kbn.round_interval = function(interval) {
switch (true) {
// 0.5s
case (interval <= 500):
return 100; // 0.1s
// 5s
case (interval <= 5000):
return 1000; // 1s
// 7.5s
case (interval <= 7500):
return 5000; // 5s
// 15s
case (interval <= 15000):
return 10000; // 10s
// 45s
case (interval <= 45000):
return 30000; // 30s
// 3m
case (interval <= 180000):
return 60000; // 1m
// 9m
case (interval <= 450000):
return 300000; // 5m
// 20m
case (interval <= 1200000):
return 600000; // 10m
// 45m
case (interval <= 2700000):
return 1800000; // 30m
// 2h
case (interval <= 7200000):
return 3600000; // 1h
// 6h
case (interval <= 21600000):
return 10800000; // 3h
// 24h
case (interval <= 86400000):
return 43200000; // 12h
// 48h
case (interval <= 172800000):
return 86400000; // 24h
// 1w
case (interval <= 604800000):
return 86400000; // 24h
// 3w
case (interval <= 1814400000):
return 604800000; // 1w
// 2y
case (interval < 3628800000):
return 2592000000; // 30d
default:
return 31536000000; // 1y
}
};
kbn.secondsToHms = function(seconds) {
var numyears = Math.floor(seconds / 31536000);
if(numyears){
return numyears + 'y';
}
var numdays = Math.floor((seconds % 31536000) / 86400);
if(numdays){
return numdays + 'd';
}
var numhours = Math.floor(((seconds % 31536000) % 86400) / 3600);
if(numhours){
return numhours + 'h';
}
var numminutes = Math.floor((((seconds % 31536000) % 86400) % 3600) / 60);
if(numminutes){
return numminutes + 'm';
}
var numseconds = Math.floor((((seconds % 31536000) % 86400) % 3600) % 60);
if(numseconds){
return numseconds + 's';
}
var nummilliseconds = Math.floor(seconds * 1000.0);
if(nummilliseconds){
return nummilliseconds + 'ms';
}
return 'less then a millisecond'; //'just now' //or other string you like;
};
kbn.to_percent = function(number,outof) {
return Math.floor((number/outof)*10000)/100 + "%";
};
kbn.addslashes = function(str) {
str = str.replace(/\\/g, '\\\\');
str = str.replace(/\'/g, '\\\'');
str = str.replace(/\"/g, '\\"');
str = str.replace(/\0/g, '\\0');
return str;
};
kbn.interval_regex = /(\d+(?:\.\d+)?)([Mwdhmsy])/;
// histogram & trends
kbn.intervals_in_seconds = {
y: 31536000,
M: 2592000,
w: 604800,
d: 86400,
h: 3600,
m: 60,
s: 1
};
kbn.calculateInterval = function(range, resolution, userInterval) {
var lowLimitMs = 1; // 1 millisecond default low limit
var intervalMs, lowLimitInterval;
if (userInterval) {
if (userInterval[0] === '>') {
lowLimitInterval = userInterval.slice(1);
lowLimitMs = kbn.interval_to_ms(lowLimitInterval);
}
else {
return userInterval;
}
}
intervalMs = kbn.round_interval((range.to.valueOf() - range.from.valueOf()) / resolution);
if (lowLimitMs > intervalMs) {
intervalMs = lowLimitMs;
}
return kbn.secondsToHms(intervalMs / 1000);
};
kbn.describe_interval = function (string) {
var matches = string.match(kbn.interval_regex);
if (!matches || !_.has(kbn.intervals_in_seconds, matches[2])) {
throw new Error('Invalid interval string, expexcting a number followed by one of "Mwdhmsy"');
} else {
return {
sec: kbn.intervals_in_seconds[matches[2]],
type: matches[2],
count: parseInt(matches[1], 10)
};
}
};
kbn.interval_to_ms = function(string) {
var info = kbn.describe_interval(string);
return info.sec * 1000 * info.count;
};
kbn.interval_to_seconds = function (string) {
var info = kbn.describe_interval(string);
return info.sec * info.count;
};
kbn.query_color_dot = function (color, diameter) {
return '<div class="icon-circle" style="' + [
'display:inline-block',
'color:' + color,
'font-size:' + diameter + 'px',
].join(';') + '"></div>';
};
kbn.slugifyForUrl = function(str) {
return str
.toLowerCase()
.replace(/[^\w ]+/g,'')
.replace(/ +/g,'-');
};
kbn.exportSeriesListToCsv = function(seriesList) {
var text = 'Series;Time;Value\n';
_.each(seriesList, function(series) {
_.each(series.datapoints, function(dp) {
text += series.alias + ';' + new Date(dp[1]).toISOString() + ';' + dp[0] + '\n';
});
});
var blob = new Blob([text], { type: "text/csv;charset=utf-8" });
window.saveAs(blob, 'grafana_data_export.csv');
};
kbn.stringToJsRegex = function(str) {
if (str[0] !== '/') {
return new RegExp(str);
}
var match = str.match(new RegExp('^/(.*?)/(g?i?m?y?)$'));
return new RegExp(match[1], match[2]);
};
kbn.toFixed = function(value, decimals) {
if (value === null) {
return "";
}
var factor = decimals ? Math.pow(10, Math.max(0, decimals)) : 1;
var formatted = String(Math.round(value * factor) / factor);
// if exponent return directly
if (formatted.indexOf('e') !== -1 || value === 0) {
return formatted;
}
// If tickDecimals was specified, ensure that we have exactly that
// much precision; otherwise default to the value's own precision.
if (decimals != null) {
var decimalPos = formatted.indexOf(".");
var precision = decimalPos === -1 ? 0 : formatted.length - decimalPos - 1;
if (precision < decimals) {
return (precision ? formatted : formatted + ".") + (String(factor)).substr(1, decimals - precision);
}
}
return formatted;
};
kbn.toFixedScaled = function(value, decimals, scaledDecimals, additionalDecimals, ext) {
if (scaledDecimals === null) {
return kbn.toFixed(value, decimals) + ext;
} else {
return kbn.toFixed(value, scaledDecimals + additionalDecimals) + ext;
}
};
kbn.roundValue = function (num, decimals) {
if (num === null) { return null; }
var n = Math.pow(10, decimals);
return Math.round((n * num).toFixed(decimals)) / n;
};
///// FORMAT FUNCTION CONSTRUCTORS /////
kbn.formatBuilders = {};
// Formatter which always appends a fixed unit string to the value. No
// scaling of the value is performed.
kbn.formatBuilders.fixedUnit = function(unit, separator) {
return function(size, decimals) {
if (size === null) { return ""; }
return kbn.toFixed(size, decimals) + (separator || ' ') + unit;
};
};
kbn.formatFuncCreator = function(factor, extArray) {
return function(size, decimals, scaledDecimals) {
if (size === null) {
return "";
}
var steps = 0;
var limit = extArray.length;
while (Math.abs(size) >= factor) {
steps++;
size /= factor;
if (steps >= limit) { return "NA"; }
}
if (steps > 0 && scaledDecimals !== null) {
decimals = scaledDecimals + (3 * steps);
}
return kbn.toFixed(size, decimals) + extArray[steps];
};
};
///// VALUE FORMATS /////
// Dimensionless Units
kbn.valueFormats.none = kbn.toFixed;
kbn.valueFormats.short = kbn.formatFuncCreator(1000, ['', ' K', ' Mil', ' Bil', ' Tri', ' Quadr', ' Quint', ' Sext', ' Sept']);
kbn.valueFormats.ppm = kbn.formatBuilders.fixedUnit('ppm');
kbn.valueFormats.percent = kbn.formatBuilders.fixedUnit('%', '');
// Data
kbn.valueFormats.bits = kbn.formatFuncCreator(1024, [' b', ' Kib', ' Mib', ' Gib', ' Tib', ' Pib', ' Eib', ' Zib', ' Yib']);
kbn.valueFormats.bytes = kbn.formatFuncCreator(1024, [' B', ' KiB', ' MiB', ' GiB', ' TiB', ' PiB', ' EiB', ' ZiB', ' YiB']);
kbn.valueFormats.kbytes = kbn.formatFuncCreator(1024, [' KiB', ' MiB', ' GiB', ' TiB', ' PiB', ' EiB', ' ZiB', ' YiB']);
kbn.valueFormats.mbytes = kbn.formatFuncCreator(1024, [' MiB', ' GiB', ' TiB', ' PiB', ' EiB', ' ZiB', ' YiB']);
kbn.valueFormats.gbytes = kbn.formatFuncCreator(1024, [' GiB', ' TiB', ' PiB', ' EiB', ' ZiB', ' YiB']);
// Data Rate
kbn.valueFormats.pps = kbn.formatFuncCreator(1000, [' pps', ' Kpps', ' Mpps', ' Gpps', ' Tpps', ' Ppps', ' Epps', ' Zpps', ' Ypps']);
kbn.valueFormats.bps = kbn.formatFuncCreator(1000, [' bps', ' Kbps', ' Mbps', ' Gbps', ' Tbps', ' Pbps', ' Ebps', ' Zbps', ' Ybps']);
kbn.valueFormats.Bps = kbn.formatFuncCreator(1000, [' Bps', ' KBps', ' MBps', ' GBps', ' TBps', ' PBps', ' EBps', ' ZBps', ' YBps']);
// Energy
kbn.valueFormats.watt = kbn.formatFuncCreator(1000, [' W', ' kW', ' MW', ' GW', ' TW', ' PW', ' EW', ' ZW', ' YW']);
kbn.valueFormats.kwatt = kbn.formatFuncCreator(1000, [' kW', ' MW', ' GW', ' TW', ' PW', ' EW', ' ZW', ' YW']);
kbn.valueFormats.watth = kbn.formatFuncCreator(1000, [' Wh', ' kWh', ' MWh', ' GWh', ' TWh', ' PWh', ' EWh', ' ZWh', ' YWh']);
kbn.valueFormats.kwatth = kbn.formatFuncCreator(1000, [' kWh', ' MWh', ' GWh', ' TWh', ' PWh', ' EWh', ' ZWh', ' YWh']);
kbn.valueFormats.joule = kbn.formatFuncCreator(1000, [' J', ' kJ', ' MJ', ' GJ', ' TJ', ' PJ', ' EJ', ' ZJ', ' YJ']);
kbn.valueFormats.ev = kbn.formatFuncCreator(1000, [' eV', ' keV', ' MeV', 'GeV', 'TeV', 'PeV', 'EeV', 'ZeV', 'YeV']);
kbn.valueFormats.amp = kbn.formatFuncCreator(1000, [' A', ' kA', ' MA', ' GA', ' TA', ' PA', ' EA', ' ZA', ' YA']);
kbn.valueFormats.volt = kbn.formatFuncCreator(1000, [' V', ' kV', ' MV', ' GV', ' TV', ' PV', ' EV', ' ZV', ' YV']);
// Temperature
kbn.valueFormats.celsius = kbn.formatBuilders.fixedUnit('°C');
kbn.valueFormats.farenheit = kbn.formatBuilders.fixedUnit('°F');
kbn.valueFormats.humidity = kbn.formatBuilders.fixedUnit('%H');
// Pressure
kbn.valueFormats.pressurembar = kbn.formatBuilders.fixedUnit('mbar');
kbn.valueFormats.pressurehpa = kbn.formatBuilders.fixedUnit('hPa');
// Velocity
kbn.valueFormats.velocityms = kbn.formatBuilders.fixedUnit('m/s');
kbn.valueFormats.velocitykmh = kbn.formatBuilders.fixedUnit('km/h');
kbn.valueFormats.velocitymph = kbn.formatBuilders.fixedUnit('mph');
kbn.valueFormats.velocityknot = kbn.formatBuilders.fixedUnit('kn');
// Time
kbn.valueFormats.hertz = kbn.formatFuncCreator(1000, [' Hz', ' kHz', ' MHz', ' GHz', ' THz', ' PHz', ' EHz', ' ZHz', ' YHz']);
kbn.valueFormats.ms = function(size, decimals, scaledDecimals) {
if (size === null) { return ""; }
if (Math.abs(size) < 1000) {
return kbn.toFixed(size, decimals) + " ms";
}
// Less than 1 min
else if (Math.abs(size) < 60000) {
return kbn.toFixedScaled(size / 1000, decimals, scaledDecimals, 3, " s");
}
// Less than 1 hour, devide in minutes
else if (Math.abs(size) < 3600000) {
return kbn.toFixedScaled(size / 60000, decimals, scaledDecimals, 5, " min");
}
// Less than one day, devide in hours
else if (Math.abs(size) < 86400000) {
return kbn.toFixedScaled(size / 3600000, decimals, scaledDecimals, 7, " hour");
}
// Less than one year, devide in days
else if (Math.abs(size) < 31536000000) {
return kbn.toFixedScaled(size / 86400000, decimals, scaledDecimals, 8, " day");
}
return kbn.toFixedScaled(size / 31536000000, decimals, scaledDecimals, 10, " year");
};
kbn.valueFormats.s = function(size, decimals, scaledDecimals) {
if (size === null) { return ""; }
if (Math.abs(size) < 600) {
return kbn.toFixed(size, decimals) + " s";
}
// Less than 1 hour, devide in minutes
else if (Math.abs(size) < 3600) {
return kbn.toFixedScaled(size / 60, decimals, scaledDecimals, 1, " min");
}
// Less than one day, devide in hours
else if (Math.abs(size) < 86400) {
return kbn.toFixedScaled(size / 3600, decimals, scaledDecimals, 4, " hour");
}
// Less than one week, devide in days
else if (Math.abs(size) < 604800) {
return kbn.toFixedScaled(size / 86400, decimals, scaledDecimals, 5, " day");
}
// Less than one year, devide in week
else if (Math.abs(size) < 31536000) {
return kbn.toFixedScaled(size / 604800, decimals, scaledDecimals, 6, " week");
}
return kbn.toFixedScaled(size / 3.15569e7, decimals, scaledDecimals, 7, " year");
};
kbn.valueFormats['µs'] = function(size, decimals, scaledDecimals) {
if (size === null) { return ""; }
if (Math.abs(size) < 1000) {
return kbn.toFixed(size, decimals) + " µs";
}
else if (Math.abs(size) < 1000000) {
return kbn.toFixedScaled(size / 1000, decimals, scaledDecimals, 3, " ms");
}
else {
return kbn.toFixedScaled(size / 1000000, decimals, scaledDecimals, 6, " s");
}
};
kbn.valueFormats.ns = function(size, decimals, scaledDecimals) {
if (size === null) { return ""; }
if (Math.abs(size) < 1000) {
return kbn.toFixed(size, decimals) + " ns";
}
else if (Math.abs(size) < 1000000) {
return kbn.toFixedScaled(size / 1000, decimals, scaledDecimals, 3, " µs");
}
else if (Math.abs(size) < 1000000000) {
return kbn.toFixedScaled(size / 1000000, decimals, scaledDecimals, 6, " ms");
}
else if (Math.abs(size) < 60000000000){
return kbn.toFixedScaled(size / 1000000000, decimals, scaledDecimals, 9, " s");
}
else {
return kbn.toFixedScaled(size / 60000000000, decimals, scaledDecimals, 12, " min");
}
};
///// FORMAT MENU /////
kbn.getUnitFormats = function() {
return [
{
text: 'none',
submenu: [
{text: 'none' , value: 'none'},
{text: 'short', value: 'short'},
{text: 'percent', value: 'percent'},
{text: 'ppm', value: 'ppm'},
{text: 'dB', value: 'dB'},
]
},
{
text: 'duration',
submenu: [
{text: 'nanoseconds (ns)' , value: 'ns'},
{text: 'microseconds (µs)', value: 'µs'},
{text: 'milliseconds (ms)', value: 'ms'},
{text: 'seconds (s)', value: 's'},
{text: 'Hertz (1/s)', value: 'hertz'},
]
},
{
text: 'data',
submenu: [
{text: 'bits', value: 'bits'},
{text: 'bytes', value: 'bytes'},
{text: 'kilobytes', value: 'kbytes'},
{text: 'megabytes', value: 'mbytes'},
{text: 'gigabytes', value: 'gbytes'},
]
},
{
text: 'data rate',
submenu: [
{text: 'packets/sec', value: 'pps'},
{text: 'bits/sec', value: 'bps'},
{text: 'bytes/sec', value: 'Bps'},
]
},
{
text: 'energy',
submenu: [
{text: 'watt (W)', value: 'watt'},
{text: 'kilowatt (kW)', value: 'kwatt'},
{text: 'watt-hour (Wh)', value: 'watth'},
{text: 'kilowatt-hour (kWh)', value: 'kwatth'},
{text: 'joule (J)', value: 'joule'},
{text: 'electron volt (eV)', value: 'ev'},
{text: 'Ampere (A)', value: 'amp'},
{text: 'Volt (V)', value: 'volt'},
]
},
{
text: 'weather',
submenu: [
{text: 'Celcius (°C)', value: 'celsius' },
{text: 'Farenheit (°F)', value: 'farenheit'},
{text: 'Humidity (%H)', value: 'humidity' },
{text: 'Pressure (mbar)', value: 'pressurembar' },
{text: 'Pressure (hPa)', value: 'pressurehpa' },
]
},
{
text: 'velocity',
submenu: [
{text: 'm/s', value: 'velocityms' },
{text: 'km/h', value: 'velocitykmh' },
{text: 'mph', value: 'velocitymph' },
{text: 'knot (kn)', value: 'velocityknot' },
]
},
];
};
return kbn;
});
| public/app/components/kbn.js | define([
'jquery',
'lodash',
],
function($, _) {
'use strict';
var kbn = {};
kbn.valueFormats = {};
///// HELPER FUNCTIONS /////
kbn.round_interval = function(interval) {
switch (true) {
// 0.5s
case (interval <= 500):
return 100; // 0.1s
// 5s
case (interval <= 5000):
return 1000; // 1s
// 7.5s
case (interval <= 7500):
return 5000; // 5s
// 15s
case (interval <= 15000):
return 10000; // 10s
// 45s
case (interval <= 45000):
return 30000; // 30s
// 3m
case (interval <= 180000):
return 60000; // 1m
// 9m
case (interval <= 450000):
return 300000; // 5m
// 20m
case (interval <= 1200000):
return 600000; // 10m
// 45m
case (interval <= 2700000):
return 1800000; // 30m
// 2h
case (interval <= 7200000):
return 3600000; // 1h
// 6h
case (interval <= 21600000):
return 10800000; // 3h
// 24h
case (interval <= 86400000):
return 43200000; // 12h
// 48h
case (interval <= 172800000):
return 86400000; // 24h
// 1w
case (interval <= 604800000):
return 86400000; // 24h
// 3w
case (interval <= 1814400000):
return 604800000; // 1w
// 2y
case (interval < 3628800000):
return 2592000000; // 30d
default:
return 31536000000; // 1y
}
};
kbn.secondsToHms = function(seconds) {
var numyears = Math.floor(seconds / 31536000);
if(numyears){
return numyears + 'y';
}
var numdays = Math.floor((seconds % 31536000) / 86400);
if(numdays){
return numdays + 'd';
}
var numhours = Math.floor(((seconds % 31536000) % 86400) / 3600);
if(numhours){
return numhours + 'h';
}
var numminutes = Math.floor((((seconds % 31536000) % 86400) % 3600) / 60);
if(numminutes){
return numminutes + 'm';
}
var numseconds = Math.floor((((seconds % 31536000) % 86400) % 3600) % 60);
if(numseconds){
return numseconds + 's';
}
var nummilliseconds = Math.floor(seconds * 1000.0);
if(nummilliseconds){
return nummilliseconds + 'ms';
}
return 'less then a millisecond'; //'just now' //or other string you like;
};
kbn.to_percent = function(number,outof) {
return Math.floor((number/outof)*10000)/100 + "%";
};
kbn.addslashes = function(str) {
str = str.replace(/\\/g, '\\\\');
str = str.replace(/\'/g, '\\\'');
str = str.replace(/\"/g, '\\"');
str = str.replace(/\0/g, '\\0');
return str;
};
kbn.interval_regex = /(\d+(?:\.\d+)?)([Mwdhmsy])/;
// histogram & trends
kbn.intervals_in_seconds = {
y: 31536000,
M: 2592000,
w: 604800,
d: 86400,
h: 3600,
m: 60,
s: 1
};
kbn.calculateInterval = function(range, resolution, userInterval) {
var lowLimitMs = 1; // 1 millisecond default low limit
var intervalMs, lowLimitInterval;
if (userInterval) {
if (userInterval[0] === '>') {
lowLimitInterval = userInterval.slice(1);
lowLimitMs = kbn.interval_to_ms(lowLimitInterval);
}
else {
return userInterval;
}
}
intervalMs = kbn.round_interval((range.to.valueOf() - range.from.valueOf()) / resolution);
if (lowLimitMs > intervalMs) {
intervalMs = lowLimitMs;
}
return kbn.secondsToHms(intervalMs / 1000);
};
kbn.describe_interval = function (string) {
var matches = string.match(kbn.interval_regex);
if (!matches || !_.has(kbn.intervals_in_seconds, matches[2])) {
throw new Error('Invalid interval string, expexcting a number followed by one of "Mwdhmsy"');
} else {
return {
sec: kbn.intervals_in_seconds[matches[2]],
type: matches[2],
count: parseInt(matches[1], 10)
};
}
};
kbn.interval_to_ms = function(string) {
var info = kbn.describe_interval(string);
return info.sec * 1000 * info.count;
};
kbn.interval_to_seconds = function (string) {
var info = kbn.describe_interval(string);
return info.sec * info.count;
};
kbn.query_color_dot = function (color, diameter) {
return '<div class="icon-circle" style="' + [
'display:inline-block',
'color:' + color,
'font-size:' + diameter + 'px',
].join(';') + '"></div>';
};
kbn.slugifyForUrl = function(str) {
return str
.toLowerCase()
.replace(/[^\w ]+/g,'')
.replace(/ +/g,'-');
};
kbn.exportSeriesListToCsv = function(seriesList) {
var text = 'Series;Time;Value\n';
_.each(seriesList, function(series) {
_.each(series.datapoints, function(dp) {
text += series.alias + ';' + new Date(dp[1]).toISOString() + ';' + dp[0] + '\n';
});
});
var blob = new Blob([text], { type: "text/csv;charset=utf-8" });
window.saveAs(blob, 'grafana_data_export.csv');
};
kbn.stringToJsRegex = function(str) {
if (str[0] !== '/') {
return new RegExp(str);
}
var match = str.match(new RegExp('^/(.*?)/(g?i?m?y?)$'));
return new RegExp(match[1], match[2]);
};
kbn.toFixed = function(value, decimals) {
if (value === null) {
return "";
}
var factor = decimals ? Math.pow(10, Math.max(0, decimals)) : 1;
var formatted = String(Math.round(value * factor) / factor);
// if exponent return directly
if (formatted.indexOf('e') !== -1 || value === 0) {
return formatted;
}
// If tickDecimals was specified, ensure that we have exactly that
// much precision; otherwise default to the value's own precision.
if (decimals != null) {
var decimalPos = formatted.indexOf(".");
var precision = decimalPos === -1 ? 0 : formatted.length - decimalPos - 1;
if (precision < decimals) {
return (precision ? formatted : formatted + ".") + (String(factor)).substr(1, decimals - precision);
}
}
return formatted;
};
kbn.toFixedScaled = function(value, decimals, scaledDecimals, additionalDecimals, ext) {
if (scaledDecimals === null) {
return kbn.toFixed(value, decimals) + ext;
} else {
return kbn.toFixed(value, scaledDecimals + additionalDecimals) + ext;
}
};
kbn.roundValue = function (num, decimals) {
if (num === null) { return null; }
var n = Math.pow(10, decimals);
return Math.round((n * num).toFixed(decimals)) / n;
};
///// FORMAT FUNCTION CONSTRUCTORS /////
kbn.formatFuncCreator = function(factor, extArray) {
return function(size, decimals, scaledDecimals) {
if (size === null) {
return "";
}
var steps = 0;
var limit = extArray.length;
while (Math.abs(size) >= factor) {
steps++;
size /= factor;
if (steps >= limit) { return "NA"; }
}
if (steps > 0 && scaledDecimals !== null) {
decimals = scaledDecimals + (3 * steps);
}
return kbn.toFixed(size, decimals) + extArray[steps];
};
};
///// VALUE FORMATS /////
// Dimensionless Units
kbn.valueFormats.none = kbn.toFixed;
kbn.valueFormats.short = kbn.formatFuncCreator(1000, ['', ' K', ' Mil', ' Bil', ' Tri', ' Quadr', ' Quint', ' Sext', ' Sept']);
kbn.valueFormats.ppm = function(value, decimals) { return kbn.toFixed(value, decimals) + ' ppm'; };
kbn.valueFormats.percent = function(size, decimals) {
if (size == null) { return ""; }
return kbn.toFixed(size, decimals) + '%';
};
// Data
kbn.valueFormats.bits = kbn.formatFuncCreator(1024, [' b', ' Kib', ' Mib', ' Gib', ' Tib', ' Pib', ' Eib', ' Zib', ' Yib']);
kbn.valueFormats.bytes = kbn.formatFuncCreator(1024, [' B', ' KiB', ' MiB', ' GiB', ' TiB', ' PiB', ' EiB', ' ZiB', ' YiB']);
kbn.valueFormats.kbytes = kbn.formatFuncCreator(1024, [' KiB', ' MiB', ' GiB', ' TiB', ' PiB', ' EiB', ' ZiB', ' YiB']);
kbn.valueFormats.mbytes = kbn.formatFuncCreator(1024, [' MiB', ' GiB', ' TiB', ' PiB', ' EiB', ' ZiB', ' YiB']);
kbn.valueFormats.gbytes = kbn.formatFuncCreator(1024, [' GiB', ' TiB', ' PiB', ' EiB', ' ZiB', ' YiB']);
// Data Rate
kbn.valueFormats.pps = kbn.formatFuncCreator(1000, [' pps', ' Kpps', ' Mpps', ' Gpps', ' Tpps', ' Ppps', ' Epps', ' Zpps', ' Ypps']);
kbn.valueFormats.bps = kbn.formatFuncCreator(1000, [' bps', ' Kbps', ' Mbps', ' Gbps', ' Tbps', ' Pbps', ' Ebps', ' Zbps', ' Ybps']);
kbn.valueFormats.Bps = kbn.formatFuncCreator(1000, [' Bps', ' KBps', ' MBps', ' GBps', ' TBps', ' PBps', ' EBps', ' ZBps', ' YBps']);
// Energy
kbn.valueFormats.watt = kbn.formatFuncCreator(1000, [' W', ' kW', ' MW', ' GW', ' TW', ' PW', ' EW', ' ZW', ' YW']);
kbn.valueFormats.kwatt = kbn.formatFuncCreator(1000, [' kW', ' MW', ' GW', ' TW', ' PW', ' EW', ' ZW', ' YW']);
kbn.valueFormats.watth = kbn.formatFuncCreator(1000, [' Wh', ' kWh', ' MWh', ' GWh', ' TWh', ' PWh', ' EWh', ' ZWh', ' YWh']);
kbn.valueFormats.kwatth = kbn.formatFuncCreator(1000, [' kWh', ' MWh', ' GWh', ' TWh', ' PWh', ' EWh', ' ZWh', ' YWh']);
kbn.valueFormats.joule = kbn.formatFuncCreator(1000, [' J', ' kJ', ' MJ', ' GJ', ' TJ', ' PJ', ' EJ', ' ZJ', ' YJ']);
kbn.valueFormats.ev = kbn.formatFuncCreator(1000, [' eV', ' keV', ' MeV', 'GeV', 'TeV', 'PeV', 'EeV', 'ZeV', 'YeV']);
kbn.valueFormats.amp = kbn.formatFuncCreator(1000, [' A', ' kA', ' MA', ' GA', ' TA', ' PA', ' EA', ' ZA', ' YA']);
kbn.valueFormats.volt = kbn.formatFuncCreator(1000, [' V', ' kV', ' MV', ' GV', ' TV', ' PV', ' EV', ' ZV', ' YV']);
// Temperature
kbn.valueFormats.celsius = function(value, decimals) { return kbn.toFixed(value, decimals) + ' °C'; };
kbn.valueFormats.farenheit = function(value, decimals) { return kbn.toFixed(value, decimals) + ' °F'; };
kbn.valueFormats.humidity = function(value, decimals) { return kbn.toFixed(value, decimals) + ' %H'; };
// Pressure
kbn.valueFormats.pressurembar = function(value, decimals) { return kbn.toFixed(value, decimals) + ' mbar'; };
kbn.valueFormats.pressurehpa = function(value, decimals) { return kbn.toFixed(value, decimals) + ' hPa'; };
// Velocity
kbn.valueFormats.velocityms = function(value, decimals) { return kbn.toFixed(value, decimals) + ' m/s'; };
kbn.valueFormats.velocitykmh = function(value, decimals) { return kbn.toFixed(value, decimals) + ' km/h'; };
kbn.valueFormats.velocitymph = function(value, decimals) { return kbn.toFixed(value, decimals) + ' mph'; };
kbn.valueFormats.velocityknot = function(value, decimals) { return kbn.toFixed(value, decimals) + ' kn'; };
// Time
kbn.valueFormats.hertz = kbn.formatFuncCreator(1000, [' Hz', ' kHz', ' MHz', ' GHz', ' THz', ' PHz', ' EHz', ' ZHz', ' YHz']);
kbn.valueFormats.ms = function(size, decimals, scaledDecimals) {
if (size === null) { return ""; }
if (Math.abs(size) < 1000) {
return kbn.toFixed(size, decimals) + " ms";
}
// Less than 1 min
else if (Math.abs(size) < 60000) {
return kbn.toFixedScaled(size / 1000, decimals, scaledDecimals, 3, " s");
}
// Less than 1 hour, devide in minutes
else if (Math.abs(size) < 3600000) {
return kbn.toFixedScaled(size / 60000, decimals, scaledDecimals, 5, " min");
}
// Less than one day, devide in hours
else if (Math.abs(size) < 86400000) {
return kbn.toFixedScaled(size / 3600000, decimals, scaledDecimals, 7, " hour");
}
// Less than one year, devide in days
else if (Math.abs(size) < 31536000000) {
return kbn.toFixedScaled(size / 86400000, decimals, scaledDecimals, 8, " day");
}
return kbn.toFixedScaled(size / 31536000000, decimals, scaledDecimals, 10, " year");
};
kbn.valueFormats.s = function(size, decimals, scaledDecimals) {
if (size === null) { return ""; }
if (Math.abs(size) < 600) {
return kbn.toFixed(size, decimals) + " s";
}
// Less than 1 hour, devide in minutes
else if (Math.abs(size) < 3600) {
return kbn.toFixedScaled(size / 60, decimals, scaledDecimals, 1, " min");
}
// Less than one day, devide in hours
else if (Math.abs(size) < 86400) {
return kbn.toFixedScaled(size / 3600, decimals, scaledDecimals, 4, " hour");
}
// Less than one week, devide in days
else if (Math.abs(size) < 604800) {
return kbn.toFixedScaled(size / 86400, decimals, scaledDecimals, 5, " day");
}
// Less than one year, devide in week
else if (Math.abs(size) < 31536000) {
return kbn.toFixedScaled(size / 604800, decimals, scaledDecimals, 6, " week");
}
return kbn.toFixedScaled(size / 3.15569e7, decimals, scaledDecimals, 7, " year");
};
kbn.valueFormats['µs'] = function(size, decimals, scaledDecimals) {
if (size === null) { return ""; }
if (Math.abs(size) < 1000) {
return kbn.toFixed(size, decimals) + " µs";
}
else if (Math.abs(size) < 1000000) {
return kbn.toFixedScaled(size / 1000, decimals, scaledDecimals, 3, " ms");
}
else {
return kbn.toFixedScaled(size / 1000000, decimals, scaledDecimals, 6, " s");
}
};
kbn.valueFormats.ns = function(size, decimals, scaledDecimals) {
if (size === null) { return ""; }
if (Math.abs(size) < 1000) {
return kbn.toFixed(size, decimals) + " ns";
}
else if (Math.abs(size) < 1000000) {
return kbn.toFixedScaled(size / 1000, decimals, scaledDecimals, 3, " µs");
}
else if (Math.abs(size) < 1000000000) {
return kbn.toFixedScaled(size / 1000000, decimals, scaledDecimals, 6, " ms");
}
else if (Math.abs(size) < 60000000000){
return kbn.toFixedScaled(size / 1000000000, decimals, scaledDecimals, 9, " s");
}
else {
return kbn.toFixedScaled(size / 60000000000, decimals, scaledDecimals, 12, " min");
}
};
///// FORMAT MENU /////
kbn.getUnitFormats = function() {
return [
{
text: 'none',
submenu: [
{text: 'none' , value: 'none'},
{text: 'short', value: 'short'},
{text: 'percent', value: 'percent'},
{text: 'ppm', value: 'ppm'},
{text: 'dB', value: 'dB'},
]
},
{
text: 'duration',
submenu: [
{text: 'nanoseconds (ns)' , value: 'ns'},
{text: 'microseconds (µs)', value: 'µs'},
{text: 'milliseconds (ms)', value: 'ms'},
{text: 'seconds (s)', value: 's'},
{text: 'Hertz (1/s)', value: 'hertz'},
]
},
{
text: 'data',
submenu: [
{text: 'bits', value: 'bits'},
{text: 'bytes', value: 'bytes'},
{text: 'kilobytes', value: 'kbytes'},
{text: 'megabytes', value: 'mbytes'},
{text: 'gigabytes', value: 'gbytes'},
]
},
{
text: 'data rate',
submenu: [
{text: 'packets/sec', value: 'pps'},
{text: 'bits/sec', value: 'bps'},
{text: 'bytes/sec', value: 'Bps'},
]
},
{
text: 'energy',
submenu: [
{text: 'watt (W)', value: 'watt'},
{text: 'kilowatt (kW)', value: 'kwatt'},
{text: 'watt-hour (Wh)', value: 'watth'},
{text: 'kilowatt-hour (kWh)', value: 'kwatth'},
{text: 'joule (J)', value: 'joule'},
{text: 'electron volt (eV)', value: 'ev'},
{text: 'Ampere (A)', value: 'amp'},
{text: 'Volt (V)', value: 'volt'},
]
},
{
text: 'weather',
submenu: [
{text: 'Celcius (°C)', value: 'celsius' },
{text: 'Farenheit (°F)', value: 'farenheit'},
{text: 'Humidity (%H)', value: 'humidity' },
{text: 'Pressure (mbar)', value: 'pressurembar' },
{text: 'Pressure (hPa)', value: 'pressurehpa' },
]
},
{
text: 'velocity',
submenu: [
{text: 'm/s', value: 'velocityms' },
{text: 'km/h', value: 'velocitykmh' },
{text: 'mph', value: 'velocitymph' },
{text: 'knot (kn)', value: 'velocityknot' },
]
},
];
};
return kbn;
});
| Add fixedUnit format builder.
| public/app/components/kbn.js | Add fixedUnit format builder. | <ide><path>ublic/app/components/kbn.js
<ide>
<ide> ///// FORMAT FUNCTION CONSTRUCTORS /////
<ide>
<add> kbn.formatBuilders = {};
<add>
<add> // Formatter which always appends a fixed unit string to the value. No
<add> // scaling of the value is performed.
<add> kbn.formatBuilders.fixedUnit = function(unit, separator) {
<add> return function(size, decimals) {
<add> if (size === null) { return ""; }
<add> return kbn.toFixed(size, decimals) + (separator || ' ') + unit;
<add> };
<add> };
<add>
<ide> kbn.formatFuncCreator = function(factor, extArray) {
<ide> return function(size, decimals, scaledDecimals) {
<ide> if (size === null) {
<ide> ///// VALUE FORMATS /////
<ide>
<ide> // Dimensionless Units
<del> kbn.valueFormats.none = kbn.toFixed;
<del> kbn.valueFormats.short = kbn.formatFuncCreator(1000, ['', ' K', ' Mil', ' Bil', ' Tri', ' Quadr', ' Quint', ' Sext', ' Sept']);
<del> kbn.valueFormats.ppm = function(value, decimals) { return kbn.toFixed(value, decimals) + ' ppm'; };
<del>
<del> kbn.valueFormats.percent = function(size, decimals) {
<del> if (size == null) { return ""; }
<del> return kbn.toFixed(size, decimals) + '%';
<del> };
<add> kbn.valueFormats.none = kbn.toFixed;
<add> kbn.valueFormats.short = kbn.formatFuncCreator(1000, ['', ' K', ' Mil', ' Bil', ' Tri', ' Quadr', ' Quint', ' Sext', ' Sept']);
<add> kbn.valueFormats.ppm = kbn.formatBuilders.fixedUnit('ppm');
<add> kbn.valueFormats.percent = kbn.formatBuilders.fixedUnit('%', '');
<ide>
<ide> // Data
<ide> kbn.valueFormats.bits = kbn.formatFuncCreator(1024, [' b', ' Kib', ' Mib', ' Gib', ' Tib', ' Pib', ' Eib', ' Zib', ' Yib']);
<ide> kbn.valueFormats.volt = kbn.formatFuncCreator(1000, [' V', ' kV', ' MV', ' GV', ' TV', ' PV', ' EV', ' ZV', ' YV']);
<ide>
<ide> // Temperature
<del> kbn.valueFormats.celsius = function(value, decimals) { return kbn.toFixed(value, decimals) + ' °C'; };
<del> kbn.valueFormats.farenheit = function(value, decimals) { return kbn.toFixed(value, decimals) + ' °F'; };
<del> kbn.valueFormats.humidity = function(value, decimals) { return kbn.toFixed(value, decimals) + ' %H'; };
<add> kbn.valueFormats.celsius = kbn.formatBuilders.fixedUnit('°C');
<add> kbn.valueFormats.farenheit = kbn.formatBuilders.fixedUnit('°F');
<add> kbn.valueFormats.humidity = kbn.formatBuilders.fixedUnit('%H');
<ide>
<ide> // Pressure
<del> kbn.valueFormats.pressurembar = function(value, decimals) { return kbn.toFixed(value, decimals) + ' mbar'; };
<del> kbn.valueFormats.pressurehpa = function(value, decimals) { return kbn.toFixed(value, decimals) + ' hPa'; };
<add> kbn.valueFormats.pressurembar = kbn.formatBuilders.fixedUnit('mbar');
<add> kbn.valueFormats.pressurehpa = kbn.formatBuilders.fixedUnit('hPa');
<ide>
<ide> // Velocity
<del> kbn.valueFormats.velocityms = function(value, decimals) { return kbn.toFixed(value, decimals) + ' m/s'; };
<del> kbn.valueFormats.velocitykmh = function(value, decimals) { return kbn.toFixed(value, decimals) + ' km/h'; };
<del> kbn.valueFormats.velocitymph = function(value, decimals) { return kbn.toFixed(value, decimals) + ' mph'; };
<del> kbn.valueFormats.velocityknot = function(value, decimals) { return kbn.toFixed(value, decimals) + ' kn'; };
<add> kbn.valueFormats.velocityms = kbn.formatBuilders.fixedUnit('m/s');
<add> kbn.valueFormats.velocitykmh = kbn.formatBuilders.fixedUnit('km/h');
<add> kbn.valueFormats.velocitymph = kbn.formatBuilders.fixedUnit('mph');
<add> kbn.valueFormats.velocityknot = kbn.formatBuilders.fixedUnit('kn');
<ide>
<ide> // Time
<ide> kbn.valueFormats.hertz = kbn.formatFuncCreator(1000, [' Hz', ' kHz', ' MHz', ' GHz', ' THz', ' PHz', ' EHz', ' ZHz', ' YHz']); |
|
JavaScript | isc | dadd060a7cf45ad64b264447aba67b05b50ebfb2 | 0 | bhj/karaoke-forever,bhj/karaoke-forever | const db = require('sqlite')
const squel = require('squel')
const debug = require('debug')
const log = debug('app:library:get')
/**
* Gets all artists/songs without detailed media info
* (suitable for pushing out to non-admin users)
*
* @return {[object]} Results normalized by artist and song
*/
async function getLibrary () {
const artists = {
result: [],
entities: {}
}
const songs = {
result: [],
entities: {}
}
// First query: artists
try {
const q = squel.select()
.from('artists')
.order('name')
// log(q.toString())
const { text, values } = q.toParam()
const rows = await db.all(text, values)
// normalize results
for (const row of rows) {
artists.result.push(row.artistId)
artists.entities[row.artistId] = row
// prep LUT for songIds
artists.entities[row.artistId].songIds = []
}
} catch (err) {
log(err.message)
return Promise.reject(err)
}
// Second query: media/songs
//
// This could probably be improved but writing a single query that properly
// returns the right media item while handling enabled/disabled providers,
// provider priority, per-song preferred media (with fallback if preferred
// media's provider is disabled), etc. proved difficult. At the very least
// this query avoids subqueries and should be relatively performant.
try {
const q = squel.select()
.field('media.mediaId, media.songId, media.duration, media.provider, media.isPreferred')
.field('songs.artistId, songs.title')
.field('COUNT(stars.userId) AS numStars')
.from('media')
.join('providers ON (providers.name = media.provider)')
.join('songs USING(songId)')
.left_join('stars USING(songId)')
.where('providers.isEnabled = 1')
.group('mediaId')
.order('providers.priority')
.order('songs.title')
const { text, values } = q.toParam()
const rows = await db.all(text, values)
// normalize and process results
for (const row of rows) {
if (typeof songs.entities[row.songId] === 'undefined') {
// add songId to artist's LUT
artists.entities[row.artistId].songIds.push(row.songId)
songs.result.push(row.songId)
songs.entities[row.songId] = row
songs.entities[row.songId].numMedia = 1
} else if (row.isPreffered === 1) {
// The previous query should have returned media ordered by the
// correct provider priority, so we only want to overwrite an
// exisiting entry if this media's isPreferred flag is set.
songs.entities[row.songId] = row
}
// increment song's media item count
++songs.entities[row.songId].numMedia
}
} catch (err) {
log(err.message)
return Promise.reject(err)
}
log('%s songs by %s artists', songs.result.length, artists.result.length)
return { artists, songs }
}
module.exports = getLibrary
| server/lib/getLibrary.js | const db = require('sqlite')
const squel = require('squel')
const debug = require('debug')
const log = debug('app:library:get')
async function getLibrary (find = {}, providerData = false) {
let artists = {
result: [],
entities: {}
}
let songs = {
result: [],
entities: {}
}
// first query: songs
try {
const q = squel.select()
.field('songId, artistId, title, duration, provider')
.field('COUNT(stars.userId) AS stars')
.from('songs')
.left_join('stars USING(songId)')
.group('songId')
.order('title')
// off by default since it requires extra processing
if (providerData) {
q.field('providerData')
}
// artistId filter
if (typeof find.artistId !== 'undefined') {
q.where('artistId = ?', find.artistId)
}
// other filters
Object.keys(find).map(key => {
if (key === 'providerData' && typeof find.providerData === 'object') {
Object.keys(find.providerData).map(i => {
q.where(`json_extract(providerData, '$.${i}') = ?`, find.providerData[i])
})
} else if (key !== 'artistId') {
q.where(`${key} = ?`, find[key])
}
})
// log(q.toString())
const { text, values } = q.toParam()
const rows = await db.all(text, values)
// normalize results
rows.forEach(function (row) {
if (providerData) {
row.providerData = JSON.parse(row.providerData)
}
songs.result.push(row.songId)
songs.entities[row.songId] = row
// used in library view as parent/child LUT
if (typeof artists.entities[row.artistId] === 'undefined') {
artists.entities[row.artistId] = {
songIds: [],
}
}
artists.entities[row.artistId].songIds.push(row.songId)
})
} catch (err) {
log(err.message)
return Promise.reject(err)
}
// second query: artists
try {
const q = squel.select()
.from('artists')
.order('name')
// artistId filter
if (typeof find.artistId !== 'undefined') {
q.where('artistId = ?', find.artistId)
delete find.artistId
}
// if other filters are present we'll need to
// only include songs returned in first query
if (Object.keys(find).length) {
q.where('artistId IN ?', songs.result.map(songId => songs.entities[songId].artistId))
}
// log(q.toString())
const { text, values } = q.toParam()
const rows = await db.all(text, values)
// normalize results
rows.forEach(function (row) {
artists.result.push(row.artistId)
// merge with the LUT from first query
artists.entities[row.artistId] = Object.assign(artists.entities[row.artistId], row)
})
} catch (err) {
log(err.message)
return Promise.reject(err)
}
log('%s songs by %s artists', songs.result.length, artists.result.length)
return { artists, songs }
}
module.exports = getLibrary
| Initial rewrite of getLibrary for the new media table
| server/lib/getLibrary.js | Initial rewrite of getLibrary for the new media table | <ide><path>erver/lib/getLibrary.js
<ide> const debug = require('debug')
<ide> const log = debug('app:library:get')
<ide>
<del>async function getLibrary (find = {}, providerData = false) {
<del> let artists = {
<add>/**
<add> * Gets all artists/songs without detailed media info
<add> * (suitable for pushing out to non-admin users)
<add> *
<add> * @return {[object]} Results normalized by artist and song
<add> */
<add>async function getLibrary () {
<add> const artists = {
<ide> result: [],
<ide> entities: {}
<ide> }
<del> let songs = {
<add> const songs = {
<ide> result: [],
<ide> entities: {}
<ide> }
<ide>
<del> // first query: songs
<add> // First query: artists
<ide> try {
<ide> const q = squel.select()
<del> .field('songId, artistId, title, duration, provider')
<del> .field('COUNT(stars.userId) AS stars')
<del> .from('songs')
<del> .left_join('stars USING(songId)')
<del> .group('songId')
<del> .order('title')
<del>
<del> // off by default since it requires extra processing
<del> if (providerData) {
<del> q.field('providerData')
<del> }
<del>
<del> // artistId filter
<del> if (typeof find.artistId !== 'undefined') {
<del> q.where('artistId = ?', find.artistId)
<del> }
<del>
<del> // other filters
<del> Object.keys(find).map(key => {
<del> if (key === 'providerData' && typeof find.providerData === 'object') {
<del> Object.keys(find.providerData).map(i => {
<del> q.where(`json_extract(providerData, '$.${i}') = ?`, find.providerData[i])
<del> })
<del> } else if (key !== 'artistId') {
<del> q.where(`${key} = ?`, find[key])
<del> }
<del> })
<add> .from('artists')
<add> .order('name')
<ide>
<ide> // log(q.toString())
<ide> const { text, values } = q.toParam()
<ide> const rows = await db.all(text, values)
<ide>
<ide> // normalize results
<del> rows.forEach(function (row) {
<del> if (providerData) {
<del> row.providerData = JSON.parse(row.providerData)
<del> }
<del>
<del> songs.result.push(row.songId)
<del> songs.entities[row.songId] = row
<del>
<del> // used in library view as parent/child LUT
<del> if (typeof artists.entities[row.artistId] === 'undefined') {
<del> artists.entities[row.artistId] = {
<del> songIds: [],
<del> }
<del> }
<del>
<del> artists.entities[row.artistId].songIds.push(row.songId)
<del> })
<add> for (const row of rows) {
<add> artists.result.push(row.artistId)
<add> artists.entities[row.artistId] = row
<add> // prep LUT for songIds
<add> artists.entities[row.artistId].songIds = []
<add> }
<ide> } catch (err) {
<ide> log(err.message)
<ide> return Promise.reject(err)
<ide> }
<ide>
<del> // second query: artists
<add> // Second query: media/songs
<add> //
<add> // This could probably be improved but writing a single query that properly
<add> // returns the right media item while handling enabled/disabled providers,
<add> // provider priority, per-song preferred media (with fallback if preferred
<add> // media's provider is disabled), etc. proved difficult. At the very least
<add> // this query avoids subqueries and should be relatively performant.
<ide> try {
<ide> const q = squel.select()
<del> .from('artists')
<del> .order('name')
<add> .field('media.mediaId, media.songId, media.duration, media.provider, media.isPreferred')
<add> .field('songs.artistId, songs.title')
<add> .field('COUNT(stars.userId) AS numStars')
<add> .from('media')
<add> .join('providers ON (providers.name = media.provider)')
<add> .join('songs USING(songId)')
<add> .left_join('stars USING(songId)')
<add> .where('providers.isEnabled = 1')
<add> .group('mediaId')
<add> .order('providers.priority')
<add> .order('songs.title')
<ide>
<del> // artistId filter
<del> if (typeof find.artistId !== 'undefined') {
<del> q.where('artistId = ?', find.artistId)
<del> delete find.artistId
<del> }
<del>
<del> // if other filters are present we'll need to
<del> // only include songs returned in first query
<del> if (Object.keys(find).length) {
<del> q.where('artistId IN ?', songs.result.map(songId => songs.entities[songId].artistId))
<del> }
<del>
<del> // log(q.toString())
<ide> const { text, values } = q.toParam()
<ide> const rows = await db.all(text, values)
<ide>
<del> // normalize results
<del> rows.forEach(function (row) {
<del> artists.result.push(row.artistId)
<del> // merge with the LUT from first query
<del> artists.entities[row.artistId] = Object.assign(artists.entities[row.artistId], row)
<del> })
<add> // normalize and process results
<add> for (const row of rows) {
<add> if (typeof songs.entities[row.songId] === 'undefined') {
<add> // add songId to artist's LUT
<add> artists.entities[row.artistId].songIds.push(row.songId)
<add>
<add> songs.result.push(row.songId)
<add> songs.entities[row.songId] = row
<add> songs.entities[row.songId].numMedia = 1
<add> } else if (row.isPreffered === 1) {
<add> // The previous query should have returned media ordered by the
<add> // correct provider priority, so we only want to overwrite an
<add> // exisiting entry if this media's isPreferred flag is set.
<add> songs.entities[row.songId] = row
<add> }
<add>
<add> // increment song's media item count
<add> ++songs.entities[row.songId].numMedia
<add> }
<ide> } catch (err) {
<ide> log(err.message)
<ide> return Promise.reject(err) |
|
Java | apache-2.0 | 15fb9c2d495f9c1f5f21bdd02aca815800f230a2 | 0 | hortonworks/cloudbreak,hortonworks/cloudbreak,sequenceiq/cloudbreak,sequenceiq/cloudbreak,hortonworks/cloudbreak,hortonworks/cloudbreak,hortonworks/cloudbreak,sequenceiq/cloudbreak,sequenceiq/cloudbreak,hortonworks/cloudbreak,sequenceiq/cloudbreak | package com.sequenceiq.cloudbreak.service.stack.connector.gcc;
import java.io.IOException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Component;
import com.google.api.services.compute.model.Operation;
import com.sequenceiq.cloudbreak.logger.MDCBuilder;
import com.sequenceiq.cloudbreak.service.StatusCheckerTask;
@Component
public class GccResourceCheckerStatus implements StatusCheckerTask<GccResourceReadyPollerObject> {
private static final Logger LOGGER = LoggerFactory.getLogger(GccResourceCheckerStatus.class);
private static final int FINISHED = 100;
@Override
public boolean checkStatus(GccResourceReadyPollerObject gccResourceReadyPollerObject) {
MDCBuilder.buildMdcContext(gccResourceReadyPollerObject.getStack());
LOGGER.info("Checking status of Gcc resource '{}'.", gccResourceReadyPollerObject.getName());
Operation execute = null;
try {
execute = gccResourceReadyPollerObject.getZoneOperations().execute();
return analyzeOperation(execute, gccResourceReadyPollerObject);
} catch (IOException e) {
throw new GccResourceCreationException(String.format(
"Something went wrong. Resource in Gcc '%s' with '%s' operation failed on '%s' stack with %s message.",
gccResourceReadyPollerObject.getName(),
gccResourceReadyPollerObject.getOperationName(),
gccResourceReadyPollerObject.getStack().getId(),
execute.getHttpErrorMessage()));
}
}
@Override
public void handleTimeout(GccResourceReadyPollerObject gccResourceReadyPollerObject) {
throw new GccResourceCreationException(String.format(
"Something went wrong. Resource in Gcc '%s' with '%s' operation not started in a reasonable timeframe on '%s' stack.",
gccResourceReadyPollerObject.getName(), gccResourceReadyPollerObject.getOperationName(), gccResourceReadyPollerObject.getStack().getId()));
}
@Override
public String successMessage(GccResourceReadyPollerObject gccResourceReadyPollerObject) {
MDCBuilder.buildMdcContext(gccResourceReadyPollerObject.getStack());
return String.format("Gcc resource '%s' is ready on '%s' stack",
gccResourceReadyPollerObject.getName(), gccResourceReadyPollerObject.getStack().getId());
}
private boolean analyzeOperation(Operation operation, GccResourceReadyPollerObject gccResourceReadyPollerObject) {
MDCBuilder.buildMdcContext(gccResourceReadyPollerObject.getStack());
if (operation.getHttpErrorStatusCode() != null) {
StringBuilder error = new StringBuilder();
if (operation.getError() != null) {
if (operation.getError().getErrors() != null && operation.getError().getErrors().size() > 0) {
for (Operation.Error.Errors errors : operation.getError().getErrors()) {
error.append(String.format("code: %s -> message: %s %s", errors.getCode(), errors.getMessage(), System.lineSeparator()));
}
}
}
throw new GccResourceCreationException(String.format(
"Something went wrong. Resource in Gcc '%s' with '%s' operation failed on '%s' stack with %s message: %s",
gccResourceReadyPollerObject.getName(),
gccResourceReadyPollerObject.getOperationName(),
gccResourceReadyPollerObject.getStack().getId(),
operation.getHttpErrorMessage(),
error.toString()));
} else {
Integer progress = operation.getProgress();
return (progress.intValue() != FINISHED) ? false : true;
}
}
}
| src/main/java/com/sequenceiq/cloudbreak/service/stack/connector/gcc/GccResourceCheckerStatus.java | package com.sequenceiq.cloudbreak.service.stack.connector.gcc;
import java.io.IOException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Component;
import com.google.api.services.compute.model.Operation;
import com.sequenceiq.cloudbreak.logger.MDCBuilder;
import com.sequenceiq.cloudbreak.service.StatusCheckerTask;
@Component
public class GccResourceCheckerStatus implements StatusCheckerTask<GccResourceReadyPollerObject> {
private static final Logger LOGGER = LoggerFactory.getLogger(GccResourceCheckerStatus.class);
private static final int FINISHED = 100;
@Override
public boolean checkStatus(GccResourceReadyPollerObject gccResourceReadyPollerObject) {
MDCBuilder.buildMdcContext(gccResourceReadyPollerObject.getStack());
LOGGER.info("Checking status of Gcc resource '{}'.", gccResourceReadyPollerObject.getName());
Operation execute = null;
try {
execute = gccResourceReadyPollerObject.getZoneOperations().execute();
if (execute.getHttpErrorStatusCode() != null) {
throw new GccResourceCreationException(String.format(
"Something went wrong. Resource in Gcc '%s' with '%s' operation failed on '%s' stack with %s message.",
gccResourceReadyPollerObject.getName(),
gccResourceReadyPollerObject.getOperationName(),
gccResourceReadyPollerObject.getStack().getId(),
execute.getHttpErrorMessage()));
} else {
Integer progress = execute.getProgress();
return (progress.intValue() != FINISHED) ? false : true;
}
} catch (IOException e) {
throw new GccResourceCreationException(String.format(
"Something went wrong. Resource in Gcc '%s' with '%s' operation failed on '%s' stack with %s message.",
gccResourceReadyPollerObject.getName(),
gccResourceReadyPollerObject.getOperationName(),
gccResourceReadyPollerObject.getStack().getId(),
execute.getHttpErrorMessage()));
}
}
@Override
public void handleTimeout(GccResourceReadyPollerObject gccResourceReadyPollerObject) {
throw new GccResourceCreationException(String.format(
"Something went wrong. Resource in Gcc '%s' with '%s' operation not started in a reasonable timeframe on '%s' stack.",
gccResourceReadyPollerObject.getName(), gccResourceReadyPollerObject.getOperationName(), gccResourceReadyPollerObject.getStack().getId()));
}
@Override
public String successMessage(GccResourceReadyPollerObject gccResourceReadyPollerObject) {
MDCBuilder.buildMdcContext(gccResourceReadyPollerObject.getStack());
return String.format("Gcc resource '%s' is ready on '%s' stack",
gccResourceReadyPollerObject.getName(), gccResourceReadyPollerObject.getStack().getId());
}
}
| CLOUD-306 fixed opertaion analyze
| src/main/java/com/sequenceiq/cloudbreak/service/stack/connector/gcc/GccResourceCheckerStatus.java | CLOUD-306 fixed opertaion analyze | <ide><path>rc/main/java/com/sequenceiq/cloudbreak/service/stack/connector/gcc/GccResourceCheckerStatus.java
<ide> Operation execute = null;
<ide> try {
<ide> execute = gccResourceReadyPollerObject.getZoneOperations().execute();
<del>
<del> if (execute.getHttpErrorStatusCode() != null) {
<del> throw new GccResourceCreationException(String.format(
<del> "Something went wrong. Resource in Gcc '%s' with '%s' operation failed on '%s' stack with %s message.",
<del> gccResourceReadyPollerObject.getName(),
<del> gccResourceReadyPollerObject.getOperationName(),
<del> gccResourceReadyPollerObject.getStack().getId(),
<del> execute.getHttpErrorMessage()));
<del> } else {
<del> Integer progress = execute.getProgress();
<del> return (progress.intValue() != FINISHED) ? false : true;
<del> }
<add> return analyzeOperation(execute, gccResourceReadyPollerObject);
<ide> } catch (IOException e) {
<ide> throw new GccResourceCreationException(String.format(
<ide> "Something went wrong. Resource in Gcc '%s' with '%s' operation failed on '%s' stack with %s message.",
<ide> return String.format("Gcc resource '%s' is ready on '%s' stack",
<ide> gccResourceReadyPollerObject.getName(), gccResourceReadyPollerObject.getStack().getId());
<ide> }
<add>
<add>
<add> private boolean analyzeOperation(Operation operation, GccResourceReadyPollerObject gccResourceReadyPollerObject) {
<add> MDCBuilder.buildMdcContext(gccResourceReadyPollerObject.getStack());
<add> if (operation.getHttpErrorStatusCode() != null) {
<add> StringBuilder error = new StringBuilder();
<add> if (operation.getError() != null) {
<add> if (operation.getError().getErrors() != null && operation.getError().getErrors().size() > 0) {
<add> for (Operation.Error.Errors errors : operation.getError().getErrors()) {
<add> error.append(String.format("code: %s -> message: %s %s", errors.getCode(), errors.getMessage(), System.lineSeparator()));
<add> }
<add> }
<add> }
<add> throw new GccResourceCreationException(String.format(
<add> "Something went wrong. Resource in Gcc '%s' with '%s' operation failed on '%s' stack with %s message: %s",
<add> gccResourceReadyPollerObject.getName(),
<add> gccResourceReadyPollerObject.getOperationName(),
<add> gccResourceReadyPollerObject.getStack().getId(),
<add> operation.getHttpErrorMessage(),
<add> error.toString()));
<add> } else {
<add> Integer progress = operation.getProgress();
<add> return (progress.intValue() != FINISHED) ? false : true;
<add> }
<add>
<add> }
<ide> } |
|
Java | apache-2.0 | 1dace9037fa38babe3048bc05669951a654b08be | 0 | digipost/digg | /**
* Copyright (C) Posten Norge AS
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package no.digipost.jdbc;
import java.io.InputStream;
import java.io.Reader;
import java.math.BigDecimal;
import java.net.URL;
import java.sql.Date;
import java.sql.ResultSet;
import java.sql.Timestamp;
import java.time.Instant;
import java.util.Optional;
import static java.util.Optional.empty;
import static java.util.Optional.ofNullable;
/**
* Various predefined mappers used to extract result(s) from a {@link ResultSet}.
*
* @see ColumnMapper
* @see RowMapper
*/
public final class Mappers {
/** @see ResultSet#getBoolean(String) */
public static final BasicColumnMapper<Boolean> getBoolean = (name, rs) -> rs.getBoolean(name);
/** @see ResultSet#getBoolean(String) */
public static final NullableColumnMapper<Boolean> getNullableBoolean = (name, rs) -> {
boolean value = rs.getBoolean(name);
if (rs.wasNull()) {
return empty();
} else {
return Optional.of(value);
}
};
/** @see ResultSet#getByte(String) */
public static final BasicColumnMapper<Byte> getByte = (name, rs) -> rs.getByte(name);
/** @see ResultSet#getByte(String) */
public static final NullableColumnMapper<Byte> getNullableByte = (name, rs) -> {
byte value = rs.getByte(name);
if (rs.wasNull()) {
return empty();
} else {
return Optional.of(value);
}
};
/** @see ResultSet#getInt(String) */
public static final BasicColumnMapper<Integer> getInt = (name, rs) -> rs.getInt(name);
/** @see ResultSet#getInt(String) */
public static final NullableColumnMapper<Integer> getNullableInt = (name, rs) -> {
int value = rs.getInt(name);
if (rs.wasNull()) {
return empty();
} else {
return Optional.of(value);
}
};
/** @see ResultSet#getLong(String) */
public static final BasicColumnMapper<Long> getLong = (name, rs) -> rs.getLong(name);
/** @see ResultSet#getLong(String) */
public static final NullableColumnMapper<Long> getNullableLong = (name, rs) -> {
long value = rs.getLong(name);
if (rs.wasNull()) {
return empty();
} else {
return Optional.of(value);
}
};
/** @see ResultSet#getFloat(String) */
public static final BasicColumnMapper<Float> getFloat = (name, rs) -> rs.getFloat(name);
/** @see ResultSet#getFloat(String) */
public static final NullableColumnMapper<Float> getNullableFloat = (name, rs) -> {
float value = rs.getFloat(name);
if (rs.wasNull()) {
return empty();
} else {
return Optional.of(value);
}
};
/** @see ResultSet#getDouble(String) */
public static final BasicColumnMapper<Double> getDouble = (name, rs) -> rs.getDouble(name);
/** @see ResultSet#getDouble(String) */
public static final NullableColumnMapper<Double> getNullableDouble = (name, rs) -> {
double value = rs.getDouble(name);
if (rs.wasNull()) {
return empty();
} else {
return Optional.of(value);
}
};
/** @see ResultSet#getBigDecimal(String) */
public static final BasicColumnMapper<BigDecimal> getBigDecimal = (name, rs) -> rs.getBigDecimal(name);
/** @see ResultSet#getBigDecimal(String) */
public static final NullableColumnMapper<BigDecimal> getNullableBigDecimal = (name, rs) -> ofNullable(rs.getBigDecimal(name));
/** @see ResultSet#getBytes(String) */
public static final BasicColumnMapper<byte[]> getBytes = (name, rs) -> rs.getBytes(name);
/** @see ResultSet#getBytes(String) */
public static final NullableColumnMapper<byte[]> getNullableBytes = (name, rs) -> ofNullable(rs.getBytes(name));
/** @see ResultSet#getString(String) */
public static final BasicColumnMapper<String> getString = (name, rs) -> rs.getString(name);
/** @see ResultSet#getString(String) */
public static final NullableColumnMapper<String> getNullableString = (name, rs) -> ofNullable(rs.getString(name));
/** @see ResultSet#getURL(String) */
public static final BasicColumnMapper<URL> getURL = (name, rs) -> rs.getURL(name);
/** @see ResultSet#getURL(String) */
public static final NullableColumnMapper<URL> getNullableURL = (name, rs) -> ofNullable(rs.getURL(name));
/** @see ResultSet#getDate(String) */
public static final BasicColumnMapper<Date> getDate = (name, rs) -> rs.getDate(name);
/** @see ResultSet#getDate(String) */
public static final NullableColumnMapper<Date> getNullableDate = (name, rs) -> ofNullable(rs.getDate(name));
/** @see ResultSet#getTimestamp(String) */
public static final BasicColumnMapper<Timestamp> getTimestamp = (name, rs) -> rs.getTimestamp(name);
/** @see ResultSet#getTimestamp(String) */
public static final NullableColumnMapper<Timestamp> getNullableTimestamp = (name, rs) -> ofNullable(rs.getTimestamp(name));
/**
* Combination of {@link #getTimestamp} and a conversion to an {@link Instant} using {@link Timestamp#toInstant()}.
*/
public static final BasicColumnMapper<Instant> getInstant = getTimestamp.andThen(Timestamp::toInstant);
/**
* Combination of {@link #getNullableTimestamp} and a conversion to an {@link Instant}
* using {@link Timestamp#toInstant()}.
*/
public static final NullableColumnMapper<Instant> getNullableInstant = getNullableTimestamp.andThen(Timestamp::toInstant);
/** @see ResultSet#getAsciiStream(String) */
public static final BasicColumnMapper<InputStream> getAsciiStream = (name, rs) -> rs.getAsciiStream(name);
/** @see ResultSet#getAsciiStream(String) */
public static final NullableColumnMapper<InputStream> getNullableAsciiStream = (name, rs) -> ofNullable(rs.getAsciiStream(name));
/** @see ResultSet#getBinaryStream(String) */
public static final BasicColumnMapper<InputStream> getBinaryStream = (name, rs) -> rs.getBinaryStream(name);
/** @see ResultSet#getBinaryStream(String) */
public static final NullableColumnMapper<InputStream> getNullableBinaryStream = (name, rs) -> ofNullable(rs.getBinaryStream(name));
/** @see ResultSet#getCharacterStream(String) */
public static final BasicColumnMapper<Reader> getCharacterStream = (name, rs) -> rs.getCharacterStream(name);
/** @see ResultSet#getCharacterStream(String) */
public static final NullableColumnMapper<Reader> getNullableCharacterStream = (name, rs) -> ofNullable(rs.getCharacterStream(name));
private Mappers() {}
}
| src/main/java/no/digipost/jdbc/Mappers.java | /**
* Copyright (C) Posten Norge AS
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package no.digipost.jdbc;
import java.io.InputStream;
import java.io.Reader;
import java.math.BigDecimal;
import java.net.URL;
import java.sql.Date;
import java.sql.ResultSet;
import java.sql.Timestamp;
import java.time.Instant;
import static java.util.Optional.ofNullable;
/**
* Various predefined mappers used to extract result(s) from a {@link ResultSet}.
*
* @see ColumnMapper
* @see RowMapper
*/
public final class Mappers {
/** @see ResultSet#getBoolean(String) */
public static final BasicColumnMapper<Boolean> getBoolean = (name, rs) -> rs.getBoolean(name);
/** @see ResultSet#getByte(String) */
public static final BasicColumnMapper<Byte> getByte = (name, rs) -> rs.getByte(name);
/** @see ResultSet#getInt(String) */
public static final BasicColumnMapper<Integer> getInt = (name, rs) -> rs.getInt(name);
/** @see ResultSet#getLong(String) */
public static final BasicColumnMapper<Long> getLong = (name, rs) -> rs.getLong(name);
/** @see ResultSet#getFloat(String) */
public static final BasicColumnMapper<Float> getFloat = (name, rs) -> rs.getFloat(name);
/** @see ResultSet#getDouble(String) */
public static final BasicColumnMapper<Double> getDouble = (name, rs) -> rs.getDouble(name);
/** @see ResultSet#getBigDecimal(String) */
public static final BasicColumnMapper<BigDecimal> getBigDecimal = (name, rs) -> rs.getBigDecimal(name);
/** @see ResultSet#getBigDecimal(String) */
public static final NullableColumnMapper<BigDecimal> getNullableBigDecimal = (name, rs) -> ofNullable(rs.getBigDecimal(name));
/** @see ResultSet#getBytes(String) */
public static final BasicColumnMapper<byte[]> getBytes = (name, rs) -> rs.getBytes(name);
/** @see ResultSet#getBytes(String) */
public static final NullableColumnMapper<byte[]> getNullableBytes = (name, rs) -> ofNullable(rs.getBytes(name));
/** @see ResultSet#getString(String) */
public static final BasicColumnMapper<String> getString = (name, rs) -> rs.getString(name);
/** @see ResultSet#getString(String) */
public static final NullableColumnMapper<String> getNullableString = (name, rs) -> ofNullable(rs.getString(name));
/** @see ResultSet#getURL(String) */
public static final BasicColumnMapper<URL> getURL = (name, rs) -> rs.getURL(name);
/** @see ResultSet#getURL(String) */
public static final NullableColumnMapper<URL> getNullableURL = (name, rs) -> ofNullable(rs.getURL(name));
/** @see ResultSet#getDate(String) */
public static final BasicColumnMapper<Date> getDate = (name, rs) -> rs.getDate(name);
/** @see ResultSet#getDate(String) */
public static final NullableColumnMapper<Date> getNullableDate = (name, rs) -> ofNullable(rs.getDate(name));
/** @see ResultSet#getTimestamp(String) */
public static final BasicColumnMapper<Timestamp> getTimestamp = (name, rs) -> rs.getTimestamp(name);
/** @see ResultSet#getTimestamp(String) */
public static final NullableColumnMapper<Timestamp> getNullableTimestamp = (name, rs) -> ofNullable(rs.getTimestamp(name));
/**
* Combination of {@link #getTimestamp} and a conversion to an {@link Instant} using {@link Timestamp#toInstant()}.
*/
public static final BasicColumnMapper<Instant> getInstant = getTimestamp.andThen(Timestamp::toInstant);
/**
* Combination of {@link #getNullableTimestamp} and a conversion to an {@link Instant}
* using {@link Timestamp#toInstant()}.
*/
public static final NullableColumnMapper<Instant> getNullableInstant = getNullableTimestamp.andThen(Timestamp::toInstant);
/** @see ResultSet#getAsciiStream(String) */
public static final BasicColumnMapper<InputStream> getAsciiStream = (name, rs) -> rs.getAsciiStream(name);
/** @see ResultSet#getAsciiStream(String) */
public static final NullableColumnMapper<InputStream> getNullableAsciiStream = (name, rs) -> ofNullable(rs.getAsciiStream(name));
/** @see ResultSet#getBinaryStream(String) */
public static final BasicColumnMapper<InputStream> getBinaryStream = (name, rs) -> rs.getBinaryStream(name);
/** @see ResultSet#getBinaryStream(String) */
public static final NullableColumnMapper<InputStream> getNullableBinaryStream = (name, rs) -> ofNullable(rs.getBinaryStream(name));
/** @see ResultSet#getCharacterStream(String) */
public static final BasicColumnMapper<Reader> getCharacterStream = (name, rs) -> rs.getCharacterStream(name);
/** @see ResultSet#getCharacterStream(String) */
public static final NullableColumnMapper<Reader> getNullableCharacterStream = (name, rs) -> ofNullable(rs.getCharacterStream(name));
private Mappers() {}
}
| Adds mappers for nullable (`Optional`) primitive types
| src/main/java/no/digipost/jdbc/Mappers.java | Adds mappers for nullable (`Optional`) primitive types | <ide><path>rc/main/java/no/digipost/jdbc/Mappers.java
<ide> import java.sql.ResultSet;
<ide> import java.sql.Timestamp;
<ide> import java.time.Instant;
<add>import java.util.Optional;
<ide>
<add>import static java.util.Optional.empty;
<ide> import static java.util.Optional.ofNullable;
<ide>
<ide> /**
<ide> /** @see ResultSet#getBoolean(String) */
<ide> public static final BasicColumnMapper<Boolean> getBoolean = (name, rs) -> rs.getBoolean(name);
<ide>
<add> /** @see ResultSet#getBoolean(String) */
<add> public static final NullableColumnMapper<Boolean> getNullableBoolean = (name, rs) -> {
<add> boolean value = rs.getBoolean(name);
<add> if (rs.wasNull()) {
<add> return empty();
<add> } else {
<add> return Optional.of(value);
<add> }
<add> };
<add>
<ide> /** @see ResultSet#getByte(String) */
<ide> public static final BasicColumnMapper<Byte> getByte = (name, rs) -> rs.getByte(name);
<add>
<add> /** @see ResultSet#getByte(String) */
<add> public static final NullableColumnMapper<Byte> getNullableByte = (name, rs) -> {
<add> byte value = rs.getByte(name);
<add> if (rs.wasNull()) {
<add> return empty();
<add> } else {
<add> return Optional.of(value);
<add> }
<add> };
<ide>
<ide> /** @see ResultSet#getInt(String) */
<ide> public static final BasicColumnMapper<Integer> getInt = (name, rs) -> rs.getInt(name);
<ide>
<add> /** @see ResultSet#getInt(String) */
<add> public static final NullableColumnMapper<Integer> getNullableInt = (name, rs) -> {
<add> int value = rs.getInt(name);
<add> if (rs.wasNull()) {
<add> return empty();
<add> } else {
<add> return Optional.of(value);
<add> }
<add> };
<add>
<ide> /** @see ResultSet#getLong(String) */
<ide> public static final BasicColumnMapper<Long> getLong = (name, rs) -> rs.getLong(name);
<add>
<add> /** @see ResultSet#getLong(String) */
<add> public static final NullableColumnMapper<Long> getNullableLong = (name, rs) -> {
<add> long value = rs.getLong(name);
<add> if (rs.wasNull()) {
<add> return empty();
<add> } else {
<add> return Optional.of(value);
<add> }
<add> };
<ide>
<ide> /** @see ResultSet#getFloat(String) */
<ide> public static final BasicColumnMapper<Float> getFloat = (name, rs) -> rs.getFloat(name);
<ide>
<add> /** @see ResultSet#getFloat(String) */
<add> public static final NullableColumnMapper<Float> getNullableFloat = (name, rs) -> {
<add> float value = rs.getFloat(name);
<add> if (rs.wasNull()) {
<add> return empty();
<add> } else {
<add> return Optional.of(value);
<add> }
<add> };
<add>
<ide> /** @see ResultSet#getDouble(String) */
<ide> public static final BasicColumnMapper<Double> getDouble = (name, rs) -> rs.getDouble(name);
<add>
<add> /** @see ResultSet#getDouble(String) */
<add> public static final NullableColumnMapper<Double> getNullableDouble = (name, rs) -> {
<add> double value = rs.getDouble(name);
<add> if (rs.wasNull()) {
<add> return empty();
<add> } else {
<add> return Optional.of(value);
<add> }
<add> };
<ide>
<ide>
<ide> /** @see ResultSet#getBigDecimal(String) */ |
|
JavaScript | epl-1.0 | 7dcd6a4cdbdb1c3d93cd46c5ca4a143b4312ce30 | 0 | codenvy/codenvy,R-Brain/codenvy,codenvy/codenvy,codenvy/codenvy,codenvy/codenvy,codenvy/codenvy,R-Brain/codenvy,R-Brain/codenvy,R-Brain/codenvy,R-Brain/codenvy,codenvy/codenvy,R-Brain/codenvy | /*
* CODENVY CONFIDENTIAL
* __________________
*
* [2012] - [2013] Codenvy, S.A.
* All Rights Reserved.
*
* NOTICE: All information contained herein is, and remains
* the property of Codenvy S.A. and its suppliers,
* if any. The intellectual and technical concepts contained
* herein are proprietary to Codenvy S.A.
* and its suppliers and may be covered by U.S. and Foreign Patents,
* patents in process, and are protected by trade secret or copyright law.
* Dissemination of this information or reproduction of this material
* is strictly forbidden unless prior written permission is obtained
* from Codenvy S.A..
*/
(function(window){
var _gaq = _gaq || [];
define(["jquery","json", "models/tenant","models/profile","cookies"],function($,JSON,Tenant,Profile){
/*
AccountError is used to report errors through error callback function
(see details below ). Example usage:
new AccountError("password","Your password is too short")
*/
var userProfile = userProfile || {}; // user Profile to store user's data from server
var showSupportLink = function(isPaid){
if (isPaid){
var uv = document.createElement('script'); uv.type = 'text/javascript'; uv.async = true;
uv.src = ('https:' === document.location.protocol ? 'https://' : 'http://') + 'widget.uservoice.com/wfZmoiHoOptcKkBgu238zw.js';
var s = document.getElementsByTagName('script')[0]; s.parentNode.insertBefore(uv, s);
}else {
var el = $("footer").find("ul");
el.append('<li><a class="footer-link" href="http://helpdesk.codenvy.com">Feedback & support<b></b></a></li>');
}
};
var AccountError = function(fieldName, errorDescription){
return {
getFieldName : function(){
return fieldName;
},
getErrorDescription : function(){
return errorDescription;
}
};
};
var isBadGateway = function(jqXHR){
return jqXHR.status === 502;
};
var getQueryParameterByName = function(name){
name = name.replace(/[\[]/, "\\[").replace(/[\]]/, "\\]");
var regex = new RegExp("[\\?&]" + name + "=([^&#]*)");
var results = regex.exec(window.location.search);
if(results){
return decodeURIComponent(results[1].replace(/\+/g, " "));
}
};
/*START paid support tab*/
/*global ActiveXObject: false */
// Verify subscriptions for Organization
function checkSubscriptionFor(orgId) {
var request;
var plansArray = ["PremiumWorkspace", "TrackedFactory"];
var url = "/api/account/" + orgId + "/subscriptions";
if (window.XMLHttpRequest) {// code for IE7+, Firefox, Chrome, Opera, Safari
request = new XMLHttpRequest();
} else {// code for IE6, IE5
request = new ActiveXObject("Microsoft.XMLHTTP");
}
request.onreadystatechange = function () {
var response;
var paid = false;
if (request.readyState === 4 && request.status === 200) {
try {
response = JSON.parse(request.responseText);
if (response.length) {
if (response.some(function (sub) {
return (plansArray.indexOf(sub.serviceId) >= 0);
})) {
paid = true;
}
}
showSupportLink(paid);
} catch(err) {
showSupportLink(false);
}
}
};
request.open("GET", url, true);
request.send();
}
// get Organizations where user has owner role
function getOrganizations(user) {
var request;
//var url = "/api/workspace";
var url = "/api/account";
if (window.XMLHttpRequest) {// code for IE7+, Firefox, Chrome, Opera, Safari
request = new XMLHttpRequest();
} else {// code for IE6, IE5
request = new ActiveXObject("Microsoft.XMLHTTP");
}
request.onreadystatechange = function () {
var response;
if (request.readyState === 4 && request.status === 200) {
try {
response = JSON.parse(request.responseText);
response.forEach(function (org) {
if (org.owner === user) {
checkSubscriptionFor(org.id);
}
});
} catch(err) {
showSupportLink(false);
}
}
};
request.open("GET", url, true);
request.send();
}
// Sets Info for Premium User
function setPremiumUserInfo() {
var request;
var url = "/api/user";
if (window.XMLHttpRequest) {// code for IE7+, Firefox, Chrome, Opera, Safari
request = new XMLHttpRequest();
} else {// code for IE6, IE5
request = new ActiveXObject("Microsoft.XMLHTTP");
}
request.onreadystatechange = function () {
var response;
if (request.readyState === 4 && request.status === 200) {
try {
response = JSON.parse(request.responseText);
getOrganizations(response.id); //params: userId
} catch(err) {
showSupportLink(false);
}
}
};
request.open("GET", url, true);
request.send();
}
/*END paid support tab*/
/*
Filling the Profile page
*/
function onReceiveUserProfileInfo(response)
{
userProfile = response.attributes;
var profileAttributes = userProfile.attributes;
var attributes = {};
profileAttributes.forEach(
function(attribute){
// Get attributes only for Profile page
var profilePageAttributes = ["firstName","lastName","phone","employer","jobtitle","email"];
if (profilePageAttributes.indexOf(attribute.name)>=0){
Object.defineProperty(attributes, attribute.name,{value:attribute.value});
}
});
document.getElementById("account_value").innerHTML = attributes.email || "";
document.getElementsByName("first_name")[0].value = attributes.firstName || "";
document.getElementsByName("last_name")[0].value = attributes.lastName || "";
document.getElementsByName("phone_work")[0].value = attributes.phone || "";
document.getElementsByName("company")[0].value = attributes.employer || "";
document.getElementsByName("title")[0].value = attributes.jobtitle || "";
}
var loginWithGoogle = function(page,callback){
if (isWebsocketEnabled()) {
_gaq.push(['_trackEvent', 'Regisration', 'Google registration', page]);
var url = "/api/oauth/authenticate?oauth_provider=google&mode=federated_login" +
"&scope=https://www.googleapis.com/auth/userinfo.profile&scope=https://www.googleapis.com/auth/userinfo.email"+
"&redirect_after_login=" + encodeURIComponent(window.location.protocol + "//" + window.location.host +
"/api/oauth?" + window.location.search.substring(1) + "&oauth_provider=google");
if(typeof callback !== 'undefined'){
callback(url);
}
}
};
var loginWithGithub = function(page,callback){
if (isWebsocketEnabled()) {
_gaq.push(['_trackEvent', 'Regisration', 'GitHub registration', page]);
var url = "/api/oauth/authenticate?oauth_provider=github&mode=federated_login&scope=user&scope=repo" +
"&redirect_after_login=" + encodeURIComponent(window.location.protocol + "//" + window.location.host +
"/api/oauth?" + window.location.search.substring(1) + "&oauth_provider=github");
if(typeof callback !== 'undefined'){
callback(url);
}
}
};
/*
Every method accepts 0 or more data values and two callbacks (success and error)
Success callback spec:
function success(data){
// data includes any additonal data you want to pass back
}
Error callback spec:
function error(errors){
// errors is a list of AccountError instances
}
*/
var isWebsocketEnabled = function(){
var USER_AGENT = navigator.userAgent.toLowerCase();
var IS_WS_SUPPORTED = ("WebSocket" in window);
if (!IS_WS_SUPPORTED || (USER_AGENT.indexOf("chrome") === -1 && USER_AGENT.indexOf("firefox") === -1 && USER_AGENT.indexOf("safari") === -1)) {
window.location = "/site/error/browser-not-supported#show";
return false;
}
return true;
};
var removeCookie = function(cookie){
if ($.cookie(cookie)){
$.cookie(cookie, null);
}
};
return {
removeCookie : removeCookie,
isWebsocketEnabled :isWebsocketEnabled,
getQueryParameterByName : getQueryParameterByName,
AccountError : AccountError,
isValidDomain : function(domain){
return (/^[a-z0-9][a-z0-9_.-]{2,19}$/).exec(domain) !== null ;
},
isValidEmail : function(email){
return (/^[^\+\/]+$/).test(email);
},
login : function(email, password, redirect_url, success, error){
if (isWebsocketEnabled()){
var loginUrl = "/api/auth/login?" + window.location.search.substring(1);
var selectWsUrl = "../site/private/select-tenant?cookiePresent&" + window.location.search.substring(1);
var data = {username: email, password: password};
$.ajax({
url : loginUrl,
type : "POST",
contentType: "application/json",
data: JSON.stringify(data),
success : function(){
if (redirect_url) {
success({url: redirect_url});
} else {
success({url: selectWsUrl});
}
},
error : function(xhr/*, status , err*/){
error([
new AccountError(null,xhr.responseText)
]);
}
});
}
},
adminLogin : function(email, password, redirect_url, success, error){
if (isWebsocketEnabled()){
var loginUrl = "/api/auth/login?" + window.location.search.substring(1);
var selectWsUrl = "../site/private/select-tenant?cookiePresent&" + window.location.search.substring(1);
var data = {username: email, password: password, realm:"sysldap"};
$.ajax({
url : loginUrl,
type : "POST",
contentType: "application/json",
data: JSON.stringify(data),
success : function(){
if (redirect_url) {
success({url: redirect_url});
} else {
success({url: selectWsUrl});
}
},
error : function(xhr/*, status , err*/){
error([
new AccountError(null,xhr.responseText)
]);
}
});
}
},
loginWithGoogle : loginWithGoogle,
loginWithGithub : loginWithGithub,
createTenant : function(email,domain,success,error){
var data = {email: email.toLowerCase(), workspacename: domain.toLowerCase()};
var emailValidateUrl = "/api/internal/token/validate?" + window.location.search.substring(1);
$.ajax({
url : emailValidateUrl,
type : "POST",
contentType: "application/json",
data: JSON.stringify(data),
success : function(){
success({url: '../site/thank-you'});
},
error : function(xhr/*, status , err*/){
error([
new AccountError(null,xhr.responseText)
]);
}
});
},
createWorkspace : function(username,bearertoken,workspace,redirect_url,success,error){
var data = {username: username.toLowerCase(), token: bearertoken};
var destinationUrl = window.location.protocol + "//" + window.location.host + "/ide/" + workspace + "?" +
window.location.search.substring(1);
var waitUrl = "../wait-for-tenant?type=create&tenantName=" + workspace + "&redirect_url=" + encodeURIComponent(destinationUrl);
var workspaceName = {name: workspace};
var authenticateUrl = "/api/internal/token/authenticate";
var createWSUrl = "/api/workspace/create";
var selectWsUrl = "/site/private/select-tenant";
$.ajax({
url : authenticateUrl,
type : "POST",
contentType: "application/json",
data: JSON.stringify(data),
success : function(){
if (workspace){
$.ajax({
url : createWSUrl,
type : "POST",
contentType: "application/json",
data: JSON.stringify(workspaceName),
success : function(){
success({url: waitUrl});
},
error : function(xhr/*, status , err*/){
error([
new AccountError(null,xhr.responseText)
]);
}
});
} else {
if (redirect_url) {
success({url: redirect_url});
} else {
success({url: selectWsUrl});
}
}
},
error : function(xhr/*, status , err*/){
error([
new AccountError(null,xhr.responseText)
]);
}
});
},
joinWorkspace : function(username,bearertoken,workspace,success,error){
var data = {username: username.toLowerCase(), token: bearertoken};
var destinationUrl = window.location.protocol + "//" + window.location.host + "/ide/" + workspace;
var waitUrl = "../wait-for-tenant?type=start&tenantName=" + workspace + "&redirect_url=" + encodeURIComponent(destinationUrl);
//var workspaceName = {name: workspace};
var authenticateUrl = "/api/internal/token/authenticate";
$.ajax({
url : authenticateUrl,
type : "POST",
contentType: "application/json",
data: JSON.stringify(data),
success : function(){
success({url: waitUrl});
},
error : function(xhr/*, status , err*/){
error([
new AccountError(null,xhr.responseText)
]);
}
});
},
recoverPassword : function(email,success,error){
//implementation based on this:
//https://github.com/codenvy/cloud-ide/blob/master/cloud-ide-war/src/main/webapp/js/recover-password.js
var passwordRecoveryUrl = "/api/password/recover/" + email;
$.ajax({
url : passwordRecoveryUrl,
type : "POST",
data: {},
success : function(output, status, xhr){
success({message: xhr.responseText});
},
error : function(xhr){
error([
new AccountError(null,xhr.responseText)
]);
}
});
},
confirmSetupPassword : function(success,error){
// implementation based on this:
// https://github.com/codenvy/cloud-ide/blob/master/cloud-ide-war/src/main/webapp/js/setup-password.js
// just like with setupPassword, we expect the id to be in the url:
// https://codenvy.com/pages/setup-password?id=df3c62fe-1459-48af-a4a0-d0c1cc17614a
var confirmSetupPasswordUrl = "/api/password/verify",
id = getQueryParameterByName("id");
if(typeof id === 'undefined'){
error([
new AccountError(null,"Invalid password reset url")
]);
return;
}
$.ajax({
url : confirmSetupPasswordUrl + "/" + id,
type : "GET",
success : function(output, status, xhr){
success({ email : xhr.responseText });
},
error : function(xhr /*,status , err*/){
setTimeout(function(){window.location = "/site/recover-password";}, 10000);
error([
new AccountError(null,xhr.responseText + ".<br>You will be redirected in 10 sec")
]);
}
});
},
setupPassword : function(password,success,error){
// implementation based on this:
// https://github.com/codenvy/cloud-ide/blob/master/cloud-ide-war/src/main/webapp/js/setup-password.js
// We assume that uid is part of the url :
// https://codenvy.com/pages/setup-password?id=df3c62fe-1459-48af-a4a0-d0c1cc17614a
var setupPasswordUrl = "/api/password/setup",
id = getQueryParameterByName("id");
$.ajax({
url : setupPasswordUrl,
type : "POST",
data : { uuid : id, password : password },
success : function(){
success({url: "/site/login"});
},
error : function(xhr){
error([
new AccountError(null,xhr.responseText)
]);
}
});
},
// change password in Profile page
changePassword : function(password,success,error){
var changePasswordUrl = "/api/user/password";
$.ajax({
url : changePasswordUrl,
type : "POST",
data : "password="+password,
success : function(){
success({url: "/"});
},
error : function(xhr){
error([
new AccountError(null,xhr.responseText)
]);
}
});
},
// update User`s profile in Profile page
updateProfile : function(userAttributes,success,error){
// userProfile.attributes = body;//Updating profile attributes
Object.getOwnPropertyNames(userAttributes).forEach(function(prop){
var newAttribute = true;
userProfile.attributes.forEach(function(attribute){
if (attribute.name === prop) {
attribute.value = userAttributes[prop];
newAttribute = false;
}
});
if (newAttribute){
var el = {};
el["name"] = prop;
el["value"] = userAttributes[prop];
userProfile.attributes.push(el);
}
});
var data = JSON.stringify(userProfile.attributes);
$.ajax({
url : "/api/profile",
type : "POST",
data : data,
contentType: "application/json; charset=utf-8",
success : function(){
success();
},
error : function(xhr){
error([
new AccountError(null,xhr.responseText)
]);
}
});
},
// get User`s profile in Profile page
getUserProfile : function(success,error){
$.when(Profile.getUser()).done(function(user){
onReceiveUserProfileInfo(user);
}).fail(function(msg){
error([
new AccountError(null,msg)
]);
});
},
/**
* Encode all special characters including ~!*()'. Replace " " on "+"
* @see http://xkr.us/articles/javascript/encode-compare/
* @param {Object} string
*/
encodeSpecialSymbolsForPost: function (string)
{
if (string)
{
string = encodeURIComponent(string);
string = string.replace(/~/g, escape("~"));
string = string.replace(/!/g, escape("!"));
string = string.replace(/\*/g, escape("*"));
string = string.replace(/[(]/g, escape("("));
string = string.replace(/[)]/g, escape(")"));
string = string.replace(/'/g, escape("'"));
string = string.replace(/%20/g, escape("+"));
}
return string;
},
/**
* Escape special symbols from user input
* @param string
* @returns
*/
escapeSpecialSymbols : function (string)
{
if (string)
{
string = string.replace(/\n/g, "\\n");
string = string.replace(/\r/g, "\\r");
string = string.replace(/\t/g, "\\t");
string = string.replace(/[\b]/g, "\\b");
string = string.replace(/\f/g, "\\f");
string = string.replace(/\\/g, "\\\\");
string = string.replace(/\"/g, "\\\"");
}
return string;
},
getTenants : function(success,error,redirect){
$.when(Tenant.getTenants()).done(function(tenants){
switch (tenants.length) {
case 0: redirect({url:"/site/create-account"});
break;
case 1: redirect({url:"/ide/" + tenants[0].toJSON().name});
break;
default:
$.when(Profile.getUser()).done(function(user){
success(tenants,user);
}).fail(function(msg){
error([
new AccountError(null,msg)
]);
});
}
}).fail(function(msg){
error([
new AccountError(null,msg)
]);
});
},
// Returns true if User has WS with tariff plan
supportTab : function(){
var getAccountUrl = "/api/account/subscriptions";
var paid = false;
$.ajax({
url : getAccountUrl,
type : "GET",
async : false,
success : function(subscriptions){
if (typeof(subscriptions)==='object'){
subscriptions.forEach(
function(subscription){
if (subscription.serviceId){
paid = true;
}
});
}
},
error : function(){
}
});
if($.cookie("logged_in")){
setPremiumUserInfo();
} else {
showSupportLink(false);
}
},
// Changing login page behavior if authtype=ldap
isAuthtypeLdap : function() {
var type = getQueryParameterByName("authtype");
return type;
},
waitForTenant : function(success, error){
//based on : https://github.com/codenvy/cloud-ide/blob/8fe1e50cc6434899dfdfd7b2e85c82008a39a880/cloud-ide-war/src/main/webapp/js/wait-tenant-creation.js
var type = getQueryParameterByName("type");//create OR start
var redirectUrl = getQueryParameterByName("redirect_url");
var tenantName = getQueryParameterByName("tenantName");
if(typeof tenantName === 'undefined'){
error([
new AccountError(null,"This is not a valid url")
]);
}
var MAX_WAIT_TIME_SECONDS = 180,
PING_TIMEOUT_MILLISECONDS = 500,
endTime = new Date().getTime() + MAX_WAIT_TIME_SECONDS * 1000;
function buildRedirectUrl(){ return redirectUrl; }
function hitServer(){
if(new Date().getTime() >= endTime){
// removing autologin cookie if exist
removeCookie("autologin");
if (type === "create"){
error([
new AccountError(
null,
"Workspace creation delayed. We'll email you the credentials after your workspace is created."
)
]);
} else if (type === "factory"){
window.location = "/site/error/error-factory-creation";
}else{
error([
new AccountError(
null,
"The requested workspace <strong>'" + tenantName + "'</strong> is not available. Please, contact support."
)
]);
}
return;
}
$.ajax({
url : "/cloud-admin/rest/cloud-admin/tenant-service/tenant-state/" + tenantName,
type : "GET",
success : function(output,status, xhr){
if(xhr.responseText === "ONLINE"){
success({
url : buildRedirectUrl()
});
} else if(xhr.responseText === "CREATION_FAIL"){
success({
url : "/site/error/error-create-tenant"
});
}else{
setTimeout(hitServer,PING_TIMEOUT_MILLISECONDS);
}
},
error : function(xhr){
if(isBadGateway(xhr)){
error([
new AccountError(null,"The requested workspace is not available. Please, contact support.")
]);
} else {
error([
new AccountError(null,xhr.responseText)
]);
}
}
});
}
hitServer();
}
};
});
}(window));
| app/site/scripts/models/account.js | /*
* CODENVY CONFIDENTIAL
* __________________
*
* [2012] - [2013] Codenvy, S.A.
* All Rights Reserved.
*
* NOTICE: All information contained herein is, and remains
* the property of Codenvy S.A. and its suppliers,
* if any. The intellectual and technical concepts contained
* herein are proprietary to Codenvy S.A.
* and its suppliers and may be covered by U.S. and Foreign Patents,
* patents in process, and are protected by trade secret or copyright law.
* Dissemination of this information or reproduction of this material
* is strictly forbidden unless prior written permission is obtained
* from Codenvy S.A..
*/
(function(window){
var _gaq = _gaq || [];
define(["jquery","json", "models/tenant","models/profile","cookies"],function($,JSON,Tenant,Profile){
/*
AccountError is used to report errors through error callback function
(see details below ). Example usage:
new AccountError("password","Your password is too short")
*/
var userProfile = userProfile || {}; // user Profile to store user's data from server
var showSupportLink = function(isPaid){
if (isPaid){
var uv = document.createElement('script'); uv.type = 'text/javascript'; uv.async = true;
uv.src = ('https:' === document.location.protocol ? 'https://' : 'http://') + 'widget.uservoice.com/wfZmoiHoOptcKkBgu238zw.js';
var s = document.getElementsByTagName('script')[0]; s.parentNode.insertBefore(uv, s);
}else {
var el = $("footer").find("ul");
el.append('<li><a class="footer-link" href="http://helpdesk.codenvy.com">Feedback & support<b></b></a></li>');
}
};
var AccountError = function(fieldName, errorDescription){
return {
getFieldName : function(){
return fieldName;
},
getErrorDescription : function(){
return errorDescription;
}
};
};
var isBadGateway = function(jqXHR){
return jqXHR.status === 502;
};
var getQueryParameterByName = function(name){
name = name.replace(/[\[]/, "\\[").replace(/[\]]/, "\\]");
var regex = new RegExp("[\\?&]" + name + "=([^&#]*)");
var results = regex.exec(window.location.search);
if(results){
return decodeURIComponent(results[1].replace(/\+/g, " "));
}
};
/*START paid support tab*/
/*global ActiveXObject: false */
// Verify subscriptions for Organization
function checkSubscriptionFor(orgId) {
var request;
var plansArray = ["PremiumWorkspace", "TrackedFactory"];
var url = "/api/account/" + orgId + "/subscriptions";
if (window.XMLHttpRequest) {// code for IE7+, Firefox, Chrome, Opera, Safari
request = new XMLHttpRequest();
} else {// code for IE6, IE5
request = new ActiveXObject("Microsoft.XMLHTTP");
}
request.onreadystatechange = function () {
var response;
var paid = false;
if (request.readyState === 4 && request.status === 200) {
try {
response = JSON.parse(request.responseText);
if (response.length) {
if (response.some(function (sub) {
return (plansArray.indexOf(sub.serviceId) >= 0);
})) {
paid = true;
}
}
showSupportLink(paid);
} catch(err) {
showSupportLink(false);
}
}
};
request.open("GET", url, true);
request.send();
}
// get Organizations where user has owner role
function getOrganizations(user) {
var request;
//var url = "/api/workspace";
var url = "/api/account";
if (window.XMLHttpRequest) {// code for IE7+, Firefox, Chrome, Opera, Safari
request = new XMLHttpRequest();
} else {// code for IE6, IE5
request = new ActiveXObject("Microsoft.XMLHTTP");
}
request.onreadystatechange = function () {
var response;
if (request.readyState === 4 && request.status === 200) {
try {
response = JSON.parse(request.responseText);
response.forEach(function (org) {
if (org.owner === user) {
checkSubscriptionFor(org.id);
}
});
} catch(err) {
showSupportLink(false);
}
}
};
request.open("GET", url, true);
request.send();
}
// Sets Info for Premium User
function setPremiumUserInfo() {
var request;
var url = "/api/user";
if (window.XMLHttpRequest) {// code for IE7+, Firefox, Chrome, Opera, Safari
request = new XMLHttpRequest();
} else {// code for IE6, IE5
request = new ActiveXObject("Microsoft.XMLHTTP");
}
request.onreadystatechange = function () {
var response;
if (request.readyState === 4 && request.status === 200) {
try {
response = JSON.parse(request.responseText);
getOrganizations(response.id); //params: userId
} catch(err) {
showSupportLink(false);
}
}
};
request.open("GET", url, true);
request.send();
}
/*END paid support tab*/
/*
Filling the Profile page
*/
function onReceiveUserProfileInfo(response)
{
userProfile = response.attributes;
var profileAttributes = userProfile.attributes;
var attributes = {};
profileAttributes.forEach(
function(attribute){
// Get attributes only for Profile page
var profilePageAttributes = ["firstName","lastName","phone","employer","jobtitle","email"];
if (profilePageAttributes.indexOf(attribute.name)>=0){
Object.defineProperty(attributes, attribute.name,{value:attribute.value});
}
});
document.getElementById("account_value").innerHTML = attributes.email || "";
document.getElementsByName("first_name")[0].value = attributes.firstName || "";
document.getElementsByName("last_name")[0].value = attributes.lastName || "";
document.getElementsByName("phone_work")[0].value = attributes.phone || "";
document.getElementsByName("company")[0].value = attributes.employer || "";
document.getElementsByName("title")[0].value = attributes.jobtitle || "";
}
var loginWithGoogle = function(page,callback){
if (isWebsocketEnabled()) {
_gaq.push(['_trackEvent', 'Regisration', 'Google registration', page]);
var url = "/api/oauth/authenticate?oauth_provider=google&mode=federated_login" +
"&scope=https://www.googleapis.com/auth/userinfo.profile&scope=https://www.googleapis.com/auth/userinfo.email"+
"&redirect_after_login=" + encodeURIComponent(window.location.protocol + "//" + window.location.host +
"/api/oauth?" + window.location.search.substring(1) + "&oauth_provider=google");
if(typeof callback !== 'undefined'){
callback(url);
}
}
};
var loginWithGithub = function(page,callback){
if (isWebsocketEnabled()) {
_gaq.push(['_trackEvent', 'Regisration', 'GitHub registration', page]);
var url = "/api/oauth/authenticate?oauth_provider=github&mode=federated_login&scope=user&scope=repo" +
"&redirect_after_login=" + encodeURIComponent(window.location.protocol + "//" + window.location.host +
"/api/oauth?" + window.location.search.substring(1) + "&oauth_provider=github");
if(typeof callback !== 'undefined'){
callback(url);
}
}
};
/*
Every method accepts 0 or more data values and two callbacks (success and error)
Success callback spec:
function success(data){
// data includes any additonal data you want to pass back
}
Error callback spec:
function error(errors){
// errors is a list of AccountError instances
}
*/
var isWebsocketEnabled = function(){
var USER_AGENT = navigator.userAgent.toLowerCase();
var IS_WS_SUPPORTED = ("WebSocket" in window);
if (!IS_WS_SUPPORTED || (USER_AGENT.indexOf("chrome") === -1 && USER_AGENT.indexOf("firefox") === -1 && USER_AGENT.indexOf("safari") === -1)) {
window.location = "/site/error/browser-not-supported#show";
return false;
}
return true;
};
var removeCookie = function(cookie){
if ($.cookie(cookie)){
$.cookie(cookie, null);
}
};
return {
removeCookie : removeCookie,
isWebsocketEnabled :isWebsocketEnabled,
getQueryParameterByName : getQueryParameterByName,
AccountError : AccountError,
isValidDomain : function(domain){
return (/^[a-z0-9][a-z0-9_.-]{2,19}$/).exec(domain) !== null ;
},
isValidEmail : function(email){
return (/^[^\+\/]+$/).test(email);
},
login : function(email, password, redirect_url, success, error){
if (isWebsocketEnabled()){
var loginUrl = "/api/auth/login?" + window.location.search.substring(1);
var selectWsUrl = "../site/private/select-tenant?cookiePresent&" + window.location.search.substring(1);
var data = {username: email, password: password};
$.ajax({
url : loginUrl,
type : "POST",
contentType: "application/json",
data: JSON.stringify(data),
success : function(){
if (redirect_url) {
success({url: redirect_url});
} else {
success({url: selectWsUrl});
}
},
error : function(xhr/*, status , err*/){
error([
new AccountError(null,xhr.responseText)
]);
}
});
}
},
adminLogin : function(email, password, redirect_url, success, error){
if (isWebsocketEnabled()){
var loginUrl = "/api/auth/login?" + window.location.search.substring(1);
var selectWsUrl = "../site/private/select-tenant?cookiePresent&" + window.location.search.substring(1);
var data = {username: email, password: password, realm:"sysldap"};
$.ajax({
url : loginUrl,
type : "POST",
contentType: "application/json",
data: JSON.stringify(data),
success : function(){
if (redirect_url) {
success({url: redirect_url});
} else {
success({url: selectWsUrl});
}
},
error : function(xhr/*, status , err*/){
error([
new AccountError(null,xhr.responseText)
]);
}
});
}
},
loginWithGoogle : loginWithGoogle,
loginWithGithub : loginWithGithub,
createTenant : function(email,domain,success,error){
var data = {email: email.toLowerCase(), workspacename: domain.toLowerCase()};
var emailValidateUrl = "/api/internal/token/validate?" + window.location.search.substring(1);
$.ajax({
url : emailValidateUrl,
type : "POST",
contentType: "application/json",
data: JSON.stringify(data),
success : function(){
success({url: '../site/thank-you'});
},
error : function(xhr/*, status , err*/){
error([
new AccountError(null,xhr.responseText)
]);
}
});
},
createWorkspace : function(username,bearertoken,workspace,redirect_url,success,error){
var data = {username: username.toLowerCase(), token: bearertoken};
var destinationUrl = window.location.protocol + "//" + window.location.host + "/ide/" + workspace + "?" +
window.location.search.substring(1);
var waitUrl = "../wait-for-tenant?type=create&tenantName=" + workspace + "&redirect_url=" + encodeURIComponent(destinationUrl);
var workspaceName = {name: workspace};
var authenticateUrl = "/api/internal/token/authenticate";
var createWSUrl = "/api/workspace/create";
var selectWsUrl = "/site/private/select-tenant";
$.ajax({
url : authenticateUrl,
type : "POST",
contentType: "application/json",
data: JSON.stringify(data),
success : function(){
if (workspace){
$.ajax({
url : createWSUrl,
type : "POST",
contentType: "application/json",
data: JSON.stringify(workspaceName),
success : function(){
success({url: waitUrl});
},
error : function(xhr/*, status , err*/){
error([
new AccountError(null,xhr.responseText)
]);
}
});
} else {
if (redirect_url) {
success({url: redirect_url});
} else {
success({url: selectWsUrl});
}
}
},
error : function(xhr/*, status , err*/){
error([
new AccountError(null,xhr.responseText)
]);
}
});
},
joinWorkspace : function(username,bearertoken,workspace,success,error){
var data = {username: username.toLowerCase(), token: bearertoken};
var destinationUrl = window.location.protocol + "//" + window.location.host + "/ide/" + workspace;
var waitUrl = "../wait-for-tenant?type=start&tenantName=" + workspace + "&redirect_url=" + encodeURIComponent(destinationUrl);
//var workspaceName = {name: workspace};
var authenticateUrl = "/api/internal/token/authenticate";
$.ajax({
url : authenticateUrl,
type : "POST",
contentType: "application/json",
data: JSON.stringify(data),
success : function(){
success({url: waitUrl});
},
error : function(xhr/*, status , err*/){
error([
new AccountError(null,xhr.responseText)
]);
}
});
},
recoverPassword : function(email,success,error){
//implementation based on this:
//https://github.com/codenvy/cloud-ide/blob/master/cloud-ide-war/src/main/webapp/js/recover-password.js
var passwordRecoveryUrl = "/api/password/recover/" + email;
$.ajax({
url : passwordRecoveryUrl,
type : "POST",
data: {},
success : function(output, status, xhr){
success({message: xhr.responseText});
},
error : function(xhr){
error([
new AccountError(null,xhr.responseText)
]);
}
});
},
confirmSetupPassword : function(success,error){
// implementation based on this:
// https://github.com/codenvy/cloud-ide/blob/master/cloud-ide-war/src/main/webapp/js/setup-password.js
// just like with setupPassword, we expect the id to be in the url:
// https://codenvy.com/pages/setup-password?id=df3c62fe-1459-48af-a4a0-d0c1cc17614a
var confirmSetupPasswordUrl = "/api/password/verify",
id = getQueryParameterByName("id");
if(typeof id === 'undefined'){
error([
new AccountError(null,"Invalid password reset url")
]);
return;
}
$.ajax({
url : confirmSetupPasswordUrl + "/" + id,
type : "GET",
success : function(output, status, xhr){
success({ email : xhr.responseText });
},
error : function(xhr /*,status , err*/){
setTimeout(function(){window.location = "/site/recover-password";}, 10000);
error([
new AccountError(null,xhr.responseText + ".<br>You will be redirected in 10 sec")
]);
}
});
},
setupPassword : function(password,success,error){
// implementation based on this:
// https://github.com/codenvy/cloud-ide/blob/master/cloud-ide-war/src/main/webapp/js/setup-password.js
// We assume that uid is part of the url :
// https://codenvy.com/pages/setup-password?id=df3c62fe-1459-48af-a4a0-d0c1cc17614a
var setupPasswordUrl = "/api/password/setup",
id = getQueryParameterByName("id");
$.ajax({
url : setupPasswordUrl,
type : "POST",
data : { uuid : id, password : password },
success : function(){
success({url: "/site/login"});
},
error : function(xhr){
error([
new AccountError(null,xhr.responseText)
]);
}
});
},
// change password in Profile page
changePassword : function(password,success,error){
var changePasswordUrl = "/api/user/password";
$.ajax({
url : changePasswordUrl,
type : "POST",
data : "password="+password,
success : function(){
success({url: "/"});
},
error : function(xhr){
error([
new AccountError(null,xhr.responseText)
]);
}
});
},
// update User`s profile in Profile page
updateProfile : function(userAttributes,success,error){
// userProfile.attributes = body;//Updating profile attributes
Object.getOwnPropertyNames(userAttributes).forEach(function(prop){
var newAttribute = true;
userProfile.attributes.forEach(function(attribute){
if (attribute.name === prop) {
attribute.value = userAttributes[prop];
newAttribute = false;
}
});
if (newAttribute){
var el = {};
el["name"] = prop;
el["value"] = userAttributes[prop];
userProfile.attributes.push(el);
}
});
var data = JSON.stringify(userProfile.attributes);
$.ajax({
url : "/api/profile",
type : "POST",
data : data,
contentType: "application/json; charset=utf-8",
success : function(){
success();
},
error : function(xhr){
error([
new AccountError(null,xhr.responseText)
]);
}
});
},
// get User`s profile in Profile page
getUserProfile : function(success,error){
$.when(Profile.getUser()).done(function(user){
onReceiveUserProfileInfo(user);
}).fail(function(msg){
error([
new AccountError(null,msg)
]);
});
},
/**
* Encode all special characters including ~!*()'. Replace " " on "+"
* @see http://xkr.us/articles/javascript/encode-compare/
* @param {Object} string
*/
encodeSpecialSymbolsForPost: function (string)
{
if (string)
{
string = encodeURIComponent(string);
string = string.replace(/~/g, escape("~"));
string = string.replace(/!/g, escape("!"));
string = string.replace(/\*/g, escape("*"));
string = string.replace(/[(]/g, escape("("));
string = string.replace(/[)]/g, escape(")"));
string = string.replace(/'/g, escape("'"));
string = string.replace(/%20/g, escape("+"));
}
return string;
},
/**
* Escape special symbols from user input
* @param string
* @returns
*/
escapeSpecialSymbols : function (string)
{
if (string)
{
string = string.replace(/\n/g, "\\n");
string = string.replace(/\r/g, "\\r");
string = string.replace(/\t/g, "\\t");
string = string.replace(/[\b]/g, "\\b");
string = string.replace(/\f/g, "\\f");
string = string.replace(/\\/g, "\\\\");
string = string.replace(/\"/g, "\\\"");
}
return string;
},
getTenants : function(success,error,redirect){
$.when(Tenant.getTenants()).done(function(tenants){
switch (tenants.length) {
case 0: redirect({url:"/site/create-account"});
break;
case 1: redirect({url:"/ide/" + tenants[0].toJSON().name});
break;
default:
$.when(Profile.getUser()).done(function(user){
success(tenants,user);
}).fail(function(msg){
error([
new AccountError(null,msg)
]);
});
}
}).fail(function(msg){
error([
new AccountError(null,msg)
]);
});
},
// Returns true if User has WS with tariff plan
supportTab : function(){
<<<<<<< HEAD
var getAccountUrl = "/api/account/subscriptions";
var paid = false;
$.ajax({
url : getAccountUrl,
type : "GET",
async : false,
success : function(subscriptions){
if (typeof(subscriptions)==='object'){
subscriptions.forEach(
function(subscription){
if (subscription.serviceId){
paid = true;
}
});
}
},
error : function(){
}
});
showSupportLink(paid);
=======
if($.cookie("logged_in")){
setPremiumUserInfo();
} else {
showSupportLink(false);
}
>>>>>>> b2ae6222f30a06a6231ea37e6deb37c1c3e84e46
},
// Changing login page behavior if authtype=ldap
isAuthtypeLdap : function() {
var type = getQueryParameterByName("authtype");
return type;
},
waitForTenant : function(success, error){
//based on : https://github.com/codenvy/cloud-ide/blob/8fe1e50cc6434899dfdfd7b2e85c82008a39a880/cloud-ide-war/src/main/webapp/js/wait-tenant-creation.js
var type = getQueryParameterByName("type");//create OR start
var redirectUrl = getQueryParameterByName("redirect_url");
var tenantName = getQueryParameterByName("tenantName");
if(typeof tenantName === 'undefined'){
error([
new AccountError(null,"This is not a valid url")
]);
}
var MAX_WAIT_TIME_SECONDS = 180,
PING_TIMEOUT_MILLISECONDS = 500,
endTime = new Date().getTime() + MAX_WAIT_TIME_SECONDS * 1000;
function buildRedirectUrl(){ return redirectUrl; }
function hitServer(){
if(new Date().getTime() >= endTime){
// removing autologin cookie if exist
removeCookie("autologin");
if (type === "create"){
error([
new AccountError(
null,
"Workspace creation delayed. We'll email you the credentials after your workspace is created."
)
]);
} else if (type === "factory"){
window.location = "/site/error/error-factory-creation";
}else{
error([
new AccountError(
null,
"The requested workspace <strong>'" + tenantName + "'</strong> is not available. Please, contact support."
)
]);
}
return;
}
$.ajax({
url : "/cloud-admin/rest/cloud-admin/tenant-service/tenant-state/" + tenantName,
type : "GET",
success : function(output,status, xhr){
if(xhr.responseText === "ONLINE"){
success({
url : buildRedirectUrl()
});
} else if(xhr.responseText === "CREATION_FAIL"){
success({
url : "/site/error/error-create-tenant"
});
}else{
setTimeout(hitServer,PING_TIMEOUT_MILLISECONDS);
}
},
error : function(xhr){
if(isBadGateway(xhr)){
error([
new AccountError(null,"The requested workspace is not available. Please, contact support.")
]);
} else {
error([
new AccountError(null,xhr.responseText)
]);
}
}
});
}
hitServer();
}
};
});
}(window));
| IDEX-541; fix acount;
| app/site/scripts/models/account.js | IDEX-541; fix acount; | <ide><path>pp/site/scripts/models/account.js
<ide>
<ide> // Returns true if User has WS with tariff plan
<ide> supportTab : function(){
<del><<<<<<< HEAD
<ide> var getAccountUrl = "/api/account/subscriptions";
<ide> var paid = false;
<ide> $.ajax({
<ide> error : function(){
<ide> }
<ide> });
<del> showSupportLink(paid);
<del>=======
<ide> if($.cookie("logged_in")){
<ide> setPremiumUserInfo();
<ide> } else {
<ide> showSupportLink(false);
<ide> }
<del>
<del>>>>>>>> b2ae6222f30a06a6231ea37e6deb37c1c3e84e46
<ide> },
<ide>
<ide> // Changing login page behavior if authtype=ldap |
|
Java | apache-2.0 | 0d21a2b7a8d250972b46dfa07ff15ff65ab5542a | 0 | ogcs/Okra-Ax | /*
* Copyright 2016 - 2026 TinyZ.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.ogcs.ax.component;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import java.io.*;
import java.util.Properties;
import java.util.concurrent.atomic.AtomicInteger;
/**
* Ax参数
*
* @author TinyZ
* @since 1.0
*/
public class AxProperties {
private static final Logger LOG = LogManager.getLogger(AxProperties.class);
private static final AtomicInteger ATOMIC_INTEGER = new AtomicInteger(0);
private static final String filePath = new File("").getAbsolutePath() + "/conf/" + File.separator + "ax.properties";
// zookeeper setting
public static String axZkConnectString = "127.0.0.1:2181";
public static int axZkTimeout = 5000;
public static String axZkRootPath = "/ax";
public static String[] axZkWatches = new String[]{};
// 组件配置
public static String axModule = "remote/chess";
public static long axId = 1;
public static String axHost = "127.0.0.1";
public static int axPort = 9000;
public static int axBind = 0;
public static String axInnerAuth = "password";
public static int axLoginPort = 0;
static {
Properties props = new Properties();
try {
InputStream in = new BufferedInputStream(new FileInputStream(filePath));
props.load(in);
// set
axZkConnectString = props.getProperty("ax.zookeeper.connectString", axZkConnectString);
axZkRootPath = props.getProperty("ax.zookeeper.root", axZkRootPath);
axZkTimeout = Integer.parseInt(props.getProperty("ax.zookeeper.timeout", String.valueOf(axZkTimeout)));
String watches = props.getProperty("ax.zookeeper.watches").toLowerCase();
if (!watches.isEmpty()) {
axZkWatches = watches.split(",");
}
axModule = props.getProperty("ax.module", axModule).toLowerCase();
axId = Long.valueOf(props.getProperty("ax.id", String.valueOf(axId)));
axHost = props.getProperty("ax.host", axHost).toLowerCase();
axPort = Integer.parseInt(props.getProperty("ax.port", String.valueOf(axPort)));
axBind = Integer.parseInt(props.getProperty("ax.bind", String.valueOf(axBind)));
axInnerAuth = props.getProperty("ax.inner.auth", axInnerAuth).toLowerCase();
axLoginPort = Integer.parseInt(props.getProperty("ax.login.port", String.valueOf(axLoginPort)));
LOG.info("Okra-Ax properties load success.");
} catch (IOException e) {
LOG.warn("Okra-Ax properties load failed.", e);
}
}
/**
* 生成全服唯一ID
*/
public static long id() {
return (
(AxProperties.axId & 0xFFFF) << 48)
| (((System.currentTimeMillis() / 1000) & 0x00000000FFFFFFFFL) << 16)
| (ATOMIC_INTEGER.getAndIncrement() & 0x0000FFFF
);
}
}
| okra-ax-inner/src/main/java/org/ogcs/ax/component/AxProperties.java | /*
* Copyright 2016 - 2026 TinyZ.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.ogcs.ax.component;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import java.io.*;
import java.util.Properties;
import java.util.concurrent.atomic.AtomicInteger;
/**
* Ax参数
*
* @author TinyZ
* @since 1.0
*/
public class AxProperties {
private static final Logger LOG = LogManager.getLogger(AxProperties.class);
private static final AtomicInteger ATOMIC_INTEGER = new AtomicInteger(0);
private static final String filePath = new File("").getAbsolutePath() + "/conf/" + File.separator + "ax.properties";
// zookeeper setting
public static String axZkConnectString = "127.0.0.1:2181";
public static int axZkTimeout = 5000;
public static String axZkRootPath = "/ax";
public static String[] axZkWatches = new String[]{};
// 组件配置
public static String axModule = "remote/chess";
public static long axId = 1;
public static String axHost = "127.0.0.1";
public static int axPort = 9000;
public static int axBind = 0;
public static String axInnerAuth = "password";
public static int axLoginPort = 0;
static {
Properties props = new Properties();
try {
InputStream in = new BufferedInputStream(new FileInputStream(filePath));
props.load(in);
// set
axZkConnectString = props.getProperty("ax.zookeeper.connectString", axZkConnectString);
axZkRootPath = props.getProperty("ax.zookeeper.root", axZkRootPath);
axZkTimeout = Integer.parseInt(props.getProperty("ax.zookeeper.timeout", String.valueOf(axZkTimeout)));
String watches = props.getProperty("ax.zookeeper.watches").toLowerCase();
if (!watches.isEmpty()) {
axZkWatches = watches.split(",");
}
axModule = props.getProperty("ax.module", axModule).toLowerCase();
axId = Long.valueOf(props.getProperty("ax.id", String.valueOf(axId)));
axHost = props.getProperty("ax.host", axHost).toLowerCase();
axPort = Integer.parseInt(props.getProperty("ax.port", String.valueOf(axPort)));
axBind = Integer.parseInt(props.getProperty("ax.bind", String.valueOf(axBind)));
axInnerAuth = props.getProperty("ax.inner.auth", axInnerAuth).toLowerCase();
axLoginPort = Integer.parseInt(props.getProperty("ax.login.port", String.valueOf(axLoginPort)));
LOG.info("Okra-Ax properties load success.");
} catch (IOException e) {
LOG.warn("Okra-Ax properties load failed.", e);
}
}
/**
* 生成全服唯一ID
*/
public static long id() {
return ((AxProperties.axId & 0xFFFF) << 48) | (((System.currentTimeMillis() / 1000) & 0x00000000FFFFFFFFL) << 16) | (ATOMIC_INTEGER.getAndIncrement() & 0x0000FFFF);
}
}
| 调整AxProperties结构
| okra-ax-inner/src/main/java/org/ogcs/ax/component/AxProperties.java | 调整AxProperties结构 | <ide><path>kra-ax-inner/src/main/java/org/ogcs/ax/component/AxProperties.java
<ide> * 生成全服唯一ID
<ide> */
<ide> public static long id() {
<del> return ((AxProperties.axId & 0xFFFF) << 48) | (((System.currentTimeMillis() / 1000) & 0x00000000FFFFFFFFL) << 16) | (ATOMIC_INTEGER.getAndIncrement() & 0x0000FFFF);
<add> return (
<add> (AxProperties.axId & 0xFFFF) << 48)
<add> | (((System.currentTimeMillis() / 1000) & 0x00000000FFFFFFFFL) << 16)
<add> | (ATOMIC_INTEGER.getAndIncrement() & 0x0000FFFF
<add> );
<ide> }
<ide> } |
|
Java | apache-2.0 | 9df8dac295c875ee3395d78a2bab3cda0c3c9ddd | 0 | sijie/bookkeeper,ivankelly/bookkeeper,sijie/bookkeeper,sijie/bookkeeper,sijie/bookkeeper,apache/bookkeeper,ivankelly/bookkeeper,sijie/bookkeeper,sijie/bookkeeper,sijie/bookkeeper,apache/bookkeeper,sijie/bookkeeper,ivankelly/bookkeeper,apache/bookkeeper,apache/bookkeeper,apache/bookkeeper,apache/bookkeeper | /*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.bookkeeper.proto;
import static com.google.common.base.Charsets.UTF_8;
import static org.apache.bookkeeper.util.SafeRunnable.safeRun;
import com.google.common.collect.Lists;
import com.google.protobuf.ExtensionRegistry;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.Unpooled;
import io.netty.channel.EventLoopGroup;
import io.netty.channel.nio.NioEventLoopGroup;
import io.netty.util.Recycler;
import io.netty.util.Recycler.Handle;
import io.netty.util.concurrent.DefaultThreadFactory;
import java.io.IOException;
import java.util.List;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.Executors;
import java.util.concurrent.RejectedExecutionException;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.ScheduledFuture;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import org.apache.bookkeeper.auth.AuthProviderFactoryFactory;
import org.apache.bookkeeper.auth.ClientAuthProvider;
import org.apache.bookkeeper.client.BKException;
import org.apache.bookkeeper.client.BookieInfoReader.BookieInfo;
import org.apache.bookkeeper.common.util.SafeRunnable;
import org.apache.bookkeeper.conf.ClientConfiguration;
import org.apache.bookkeeper.net.BookieSocketAddress;
import org.apache.bookkeeper.proto.BookkeeperInternalCallbacks.GenericCallback;
import org.apache.bookkeeper.proto.BookkeeperInternalCallbacks.GetBookieInfoCallback;
import org.apache.bookkeeper.proto.BookkeeperInternalCallbacks.ReadEntryCallback;
import org.apache.bookkeeper.proto.BookkeeperInternalCallbacks.ReadLacCallback;
import org.apache.bookkeeper.proto.BookkeeperInternalCallbacks.WriteCallback;
import org.apache.bookkeeper.proto.BookkeeperInternalCallbacks.WriteLacCallback;
import org.apache.bookkeeper.stats.NullStatsLogger;
import org.apache.bookkeeper.stats.StatsLogger;
import org.apache.bookkeeper.tls.SecurityException;
import org.apache.bookkeeper.tls.SecurityHandlerFactory;
import org.apache.bookkeeper.util.ByteBufList;
import org.apache.bookkeeper.util.OrderedSafeExecutor;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Implements the client-side part of the BookKeeper protocol.
*
*/
public class BookieClient implements PerChannelBookieClientFactory {
static final Logger LOG = LoggerFactory.getLogger(BookieClient.class);
// This is global state that should be across all BookieClients
AtomicLong totalBytesOutstanding = new AtomicLong();
OrderedSafeExecutor executor;
ScheduledExecutorService scheduler;
ScheduledFuture<?> timeoutFuture;
EventLoopGroup eventLoopGroup;
final ConcurrentHashMap<BookieSocketAddress, PerChannelBookieClientPool> channels =
new ConcurrentHashMap<BookieSocketAddress, PerChannelBookieClientPool>();
private final ClientAuthProvider.Factory authProviderFactory;
private final ExtensionRegistry registry;
private final ClientConfiguration conf;
private volatile boolean closed;
private final ReentrantReadWriteLock closeLock;
private final StatsLogger statsLogger;
private final int numConnectionsPerBookie;
private final long bookieErrorThresholdPerInterval;
public BookieClient(ClientConfiguration conf, EventLoopGroup eventLoopGroup,
OrderedSafeExecutor executor, ScheduledExecutorService scheduler,
StatsLogger statsLogger) throws IOException {
this.conf = conf;
this.eventLoopGroup = eventLoopGroup;
this.executor = executor;
this.closed = false;
this.closeLock = new ReentrantReadWriteLock();
this.registry = ExtensionRegistry.newInstance();
this.authProviderFactory = AuthProviderFactoryFactory.newClientAuthProviderFactory(conf);
this.statsLogger = statsLogger;
this.numConnectionsPerBookie = conf.getNumChannelsPerBookie();
this.bookieErrorThresholdPerInterval = conf.getBookieErrorThresholdPerInterval();
this.scheduler = scheduler;
if (conf.getAddEntryTimeout() > 0 || conf.getReadEntryTimeout() > 0) {
SafeRunnable monitor = safeRun(() -> {
monitorPendingOperations();
});
this.timeoutFuture = this.scheduler.scheduleAtFixedRate(monitor,
conf.getTimeoutMonitorIntervalSec(),
conf.getTimeoutMonitorIntervalSec(),
TimeUnit.SECONDS);
}
}
private int getRc(int rc) {
if (BKException.Code.OK == rc) {
return rc;
} else {
if (closed) {
return BKException.Code.ClientClosedException;
} else {
return rc;
}
}
}
public List<BookieSocketAddress> getFaultyBookies() {
List<BookieSocketAddress> faultyBookies = Lists.newArrayList();
for (PerChannelBookieClientPool channelPool : channels.values()) {
if (channelPool instanceof DefaultPerChannelBookieClientPool) {
DefaultPerChannelBookieClientPool pool = (DefaultPerChannelBookieClientPool) channelPool;
if (pool.errorCounter.getAndSet(0) >= bookieErrorThresholdPerInterval) {
faultyBookies.add(pool.address);
}
}
}
return faultyBookies;
}
@Override
public PerChannelBookieClient create(BookieSocketAddress address, PerChannelBookieClientPool pcbcPool,
SecurityHandlerFactory shFactory) throws SecurityException {
return new PerChannelBookieClient(conf, executor, eventLoopGroup, address, statsLogger,
authProviderFactory, registry, pcbcPool, shFactory);
}
public PerChannelBookieClientPool lookupClient(BookieSocketAddress addr) {
PerChannelBookieClientPool clientPool = channels.get(addr);
if (null == clientPool) {
closeLock.readLock().lock();
try {
if (closed) {
return null;
}
PerChannelBookieClientPool newClientPool =
new DefaultPerChannelBookieClientPool(conf, this, addr, numConnectionsPerBookie);
PerChannelBookieClientPool oldClientPool = channels.putIfAbsent(addr, newClientPool);
if (null == oldClientPool) {
clientPool = newClientPool;
// initialize the pool only after we put the pool into the map
clientPool.intialize();
} else {
clientPool = oldClientPool;
newClientPool.close(false);
}
} catch (SecurityException e) {
LOG.error("Security Exception in creating new default PCBC pool: ", e);
return null;
} finally {
closeLock.readLock().unlock();
}
}
return clientPool;
}
public void writeLac(final BookieSocketAddress addr, final long ledgerId, final byte[] masterKey,
final long lac, final ByteBufList toSend, final WriteLacCallback cb, final Object ctx) {
final PerChannelBookieClientPool client = lookupClient(addr);
if (client == null) {
cb.writeLacComplete(getRc(BKException.Code.BookieHandleNotAvailableException),
ledgerId, addr, ctx);
return;
}
toSend.retain();
client.obtain((rc, pcbc) -> {
if (rc != BKException.Code.OK) {
try {
executor.submitOrdered(ledgerId, safeRun(() -> {
cb.writeLacComplete(rc, ledgerId, addr, ctx);
}));
} catch (RejectedExecutionException re) {
cb.writeLacComplete(getRc(BKException.Code.InterruptedException), ledgerId, addr, ctx);
}
} else {
pcbc.writeLac(ledgerId, masterKey, lac, toSend, cb, ctx);
}
toSend.release();
}, ledgerId);
}
private void completeAdd(final int rc,
final long ledgerId,
final long entryId,
final BookieSocketAddress addr,
final WriteCallback cb,
final Object ctx) {
try {
executor.submitOrdered(ledgerId, new SafeRunnable() {
@Override
public void safeRun() {
cb.writeComplete(rc, ledgerId, entryId, addr, ctx);
}
@Override
public String toString() {
return String.format("CompleteWrite(ledgerId=%d, entryId=%d, addr=%s)", ledgerId, entryId, addr);
}
});
} catch (RejectedExecutionException ree) {
cb.writeComplete(getRc(BKException.Code.InterruptedException), ledgerId, entryId, addr, ctx);
}
}
public void addEntry(final BookieSocketAddress addr,
final long ledgerId,
final byte[] masterKey,
final long entryId,
final ByteBufList toSend,
final WriteCallback cb,
final Object ctx,
final int options) {
final PerChannelBookieClientPool client = lookupClient(addr);
if (client == null) {
completeAdd(getRc(BKException.Code.BookieHandleNotAvailableException),
ledgerId, entryId, addr, cb, ctx);
return;
}
// Retain the buffer, since the connection could be obtained after
// the PendingApp might have already failed
toSend.retain();
client.obtain(ChannelReadyForAddEntryCallback.create(
this, toSend, ledgerId, entryId, addr,
ctx, cb, options, masterKey),
ledgerId);
}
private void completeRead(final int rc,
final long ledgerId,
final long entryId,
final ByteBuf entry,
final ReadEntryCallback cb,
final Object ctx) {
try {
executor.submitOrdered(ledgerId, new SafeRunnable() {
@Override
public void safeRun() {
cb.readEntryComplete(rc, ledgerId, entryId, entry, ctx);
}
});
} catch (RejectedExecutionException ree) {
cb.readEntryComplete(getRc(BKException.Code.InterruptedException),
ledgerId, entryId, entry, ctx);
}
}
private static class ChannelReadyForAddEntryCallback
implements GenericCallback<PerChannelBookieClient> {
private final Handle<ChannelReadyForAddEntryCallback> recyclerHandle;
private BookieClient bookieClient;
private ByteBufList toSend;
private long ledgerId;
private long entryId;
private BookieSocketAddress addr;
private Object ctx;
private WriteCallback cb;
private int options;
private byte[] masterKey;
static ChannelReadyForAddEntryCallback create(
BookieClient bookieClient, ByteBufList toSend, long ledgerId,
long entryId, BookieSocketAddress addr, Object ctx,
WriteCallback cb, int options, byte[] masterKey) {
ChannelReadyForAddEntryCallback callback = RECYCLER.get();
callback.bookieClient = bookieClient;
callback.toSend = toSend;
callback.ledgerId = ledgerId;
callback.entryId = entryId;
callback.addr = addr;
callback.ctx = ctx;
callback.cb = cb;
callback.options = options;
callback.masterKey = masterKey;
return callback;
}
@Override
public void operationComplete(final int rc,
PerChannelBookieClient pcbc) {
if (rc != BKException.Code.OK) {
bookieClient.completeAdd(rc, ledgerId, entryId, addr, cb, ctx);
} else {
pcbc.addEntry(ledgerId, masterKey, entryId,
toSend, cb, ctx, options);
}
toSend.release();
recycle();
}
private ChannelReadyForAddEntryCallback(
Handle<ChannelReadyForAddEntryCallback> recyclerHandle) {
this.recyclerHandle = recyclerHandle;
}
private static final Recycler<ChannelReadyForAddEntryCallback> RECYCLER =
new Recycler<ChannelReadyForAddEntryCallback>() {
protected ChannelReadyForAddEntryCallback newObject(
Recycler.Handle<ChannelReadyForAddEntryCallback> recyclerHandle) {
return new ChannelReadyForAddEntryCallback(recyclerHandle);
}
};
public void recycle() {
bookieClient = null;
toSend = null;
ledgerId = -1;
entryId = -1;
addr = null;
ctx = null;
cb = null;
options = -1;
masterKey = null;
recyclerHandle.recycle(this);
}
}
public void readLac(final BookieSocketAddress addr, final long ledgerId, final ReadLacCallback cb,
final Object ctx) {
final PerChannelBookieClientPool client = lookupClient(addr);
if (client == null) {
cb.readLacComplete(getRc(BKException.Code.BookieHandleNotAvailableException), ledgerId, null, null,
ctx);
return;
}
client.obtain((rc, pcbc) -> {
if (rc != BKException.Code.OK) {
try {
executor.submitOrdered(ledgerId, safeRun(() -> {
cb.readLacComplete(rc, ledgerId, null, null, ctx);
}));
} catch (RejectedExecutionException re) {
cb.readLacComplete(getRc(BKException.Code.InterruptedException),
ledgerId, null, null, ctx);
}
} else {
pcbc.readLac(ledgerId, cb, ctx);
}
}, ledgerId);
}
public void readEntry(BookieSocketAddress addr, long ledgerId, long entryId,
ReadEntryCallback cb, Object ctx, int flags) {
readEntry(addr, ledgerId, entryId, cb, ctx, flags, null);
}
public void readEntry(final BookieSocketAddress addr, final long ledgerId, final long entryId,
final ReadEntryCallback cb, final Object ctx, int flags, byte[] masterKey) {
final PerChannelBookieClientPool client = lookupClient(addr);
if (client == null) {
cb.readEntryComplete(getRc(BKException.Code.BookieHandleNotAvailableException),
ledgerId, entryId, null, ctx);
return;
}
client.obtain((rc, pcbc) -> {
if (rc != BKException.Code.OK) {
completeRead(rc, ledgerId, entryId, null, cb, ctx);
} else {
pcbc.readEntry(ledgerId, entryId, cb, ctx, flags, masterKey);
}
}, ledgerId);
}
public void readEntryWaitForLACUpdate(final BookieSocketAddress addr,
final long ledgerId,
final long entryId,
final long previousLAC,
final long timeOutInMillis,
final boolean piggyBackEntry,
final ReadEntryCallback cb,
final Object ctx) {
final PerChannelBookieClientPool client = lookupClient(addr);
if (client == null) {
completeRead(BKException.Code.BookieHandleNotAvailableException,
ledgerId, entryId, null, cb, ctx);
return;
}
client.obtain((rc, pcbc) -> {
if (rc != BKException.Code.OK) {
completeRead(rc, ledgerId, entryId, null, cb, ctx);
} else {
pcbc.readEntryWaitForLACUpdate(ledgerId, entryId, previousLAC, timeOutInMillis, piggyBackEntry, cb,
ctx);
}
}, ledgerId);
}
public void getBookieInfo(final BookieSocketAddress addr, final long requested, final GetBookieInfoCallback cb,
final Object ctx) {
final PerChannelBookieClientPool client = lookupClient(addr);
if (client == null) {
cb.getBookieInfoComplete(getRc(BKException.Code.BookieHandleNotAvailableException), new BookieInfo(),
ctx);
return;
}
client.obtain((rc, pcbc) -> {
if (rc != BKException.Code.OK) {
try {
executor.submit(safeRun(() -> {
cb.getBookieInfoComplete(rc, new BookieInfo(), ctx);
}));
} catch (RejectedExecutionException re) {
cb.getBookieInfoComplete(getRc(BKException.Code.InterruptedException),
new BookieInfo(), ctx);
}
} else {
pcbc.getBookieInfo(requested, cb, ctx);
}
}, requested);
}
private void monitorPendingOperations() {
for (PerChannelBookieClientPool clientPool : channels.values()) {
clientPool.checkTimeoutOnPendingOperations();
}
}
public boolean isClosed() {
return closed;
}
public void close() {
closeLock.writeLock().lock();
try {
closed = true;
for (PerChannelBookieClientPool pool : channels.values()) {
pool.close(true);
}
channels.clear();
authProviderFactory.close();
if (timeoutFuture != null) {
timeoutFuture.cancel(false);
}
} finally {
closeLock.writeLock().unlock();
}
}
private static class Counter {
int i;
int total;
synchronized void inc() {
i++;
total++;
}
synchronized void dec() {
i--;
notifyAll();
}
synchronized void wait(int limit) throws InterruptedException {
while (i > limit) {
wait();
}
}
synchronized int total() {
return total;
}
}
/**
* @param args
* @throws IOException
* @throws NumberFormatException
* @throws InterruptedException
*/
public static void main(String[] args) throws NumberFormatException, IOException, InterruptedException {
if (args.length != 3) {
System.err.println("USAGE: BookieClient bookieHost port ledger#");
return;
}
WriteCallback cb = new WriteCallback() {
public void writeComplete(int rc, long ledger, long entry, BookieSocketAddress addr, Object ctx) {
Counter counter = (Counter) ctx;
counter.dec();
if (rc != 0) {
System.out.println("rc = " + rc + " for " + entry + "@" + ledger);
}
}
};
Counter counter = new Counter();
byte hello[] = "hello".getBytes(UTF_8);
long ledger = Long.parseLong(args[2]);
EventLoopGroup eventLoopGroup = new NioEventLoopGroup(1);
OrderedSafeExecutor executor = OrderedSafeExecutor.newBuilder()
.name("BookieClientWorker")
.numThreads(1)
.build();
ScheduledExecutorService scheduler = Executors.newSingleThreadScheduledExecutor(
new DefaultThreadFactory("BookKeeperClientScheduler"));
BookieClient bc = new BookieClient(new ClientConfiguration(), eventLoopGroup, executor,
scheduler, NullStatsLogger.INSTANCE);
BookieSocketAddress addr = new BookieSocketAddress(args[0], Integer.parseInt(args[1]));
for (int i = 0; i < 100000; i++) {
counter.inc();
bc.addEntry(addr, ledger, new byte[0], i, ByteBufList.get(Unpooled.wrappedBuffer(hello)), cb, counter, 0);
}
counter.wait(0);
System.out.println("Total = " + counter.total());
scheduler.shutdown();
eventLoopGroup.shutdownGracefully();
executor.shutdown();
}
}
| bookkeeper-server/src/main/java/org/apache/bookkeeper/proto/BookieClient.java | /*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.bookkeeper.proto;
import static com.google.common.base.Charsets.UTF_8;
import com.google.common.collect.Lists;
import com.google.protobuf.ExtensionRegistry;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.Unpooled;
import io.netty.channel.EventLoopGroup;
import io.netty.channel.nio.NioEventLoopGroup;
import io.netty.util.Recycler;
import io.netty.util.Recycler.Handle;
import io.netty.util.concurrent.DefaultThreadFactory;
import java.io.IOException;
import java.util.List;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.Executors;
import java.util.concurrent.RejectedExecutionException;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.ScheduledFuture;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import org.apache.bookkeeper.auth.AuthProviderFactoryFactory;
import org.apache.bookkeeper.auth.ClientAuthProvider;
import org.apache.bookkeeper.client.BKException;
import org.apache.bookkeeper.client.BookieInfoReader.BookieInfo;
import org.apache.bookkeeper.conf.ClientConfiguration;
import org.apache.bookkeeper.net.BookieSocketAddress;
import org.apache.bookkeeper.proto.BookkeeperInternalCallbacks.GenericCallback;
import org.apache.bookkeeper.proto.BookkeeperInternalCallbacks.GetBookieInfoCallback;
import org.apache.bookkeeper.proto.BookkeeperInternalCallbacks.ReadEntryCallback;
import org.apache.bookkeeper.proto.BookkeeperInternalCallbacks.ReadLacCallback;
import org.apache.bookkeeper.proto.BookkeeperInternalCallbacks.WriteCallback;
import org.apache.bookkeeper.proto.BookkeeperInternalCallbacks.WriteLacCallback;
import org.apache.bookkeeper.stats.NullStatsLogger;
import org.apache.bookkeeper.stats.StatsLogger;
import org.apache.bookkeeper.tls.SecurityException;
import org.apache.bookkeeper.tls.SecurityHandlerFactory;
import org.apache.bookkeeper.util.ByteBufList;
import org.apache.bookkeeper.util.OrderedSafeExecutor;
import org.apache.bookkeeper.util.SafeRunnable;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Implements the client-side part of the BookKeeper protocol.
*
*/
public class BookieClient implements PerChannelBookieClientFactory {
static final Logger LOG = LoggerFactory.getLogger(BookieClient.class);
// This is global state that should be across all BookieClients
AtomicLong totalBytesOutstanding = new AtomicLong();
OrderedSafeExecutor executor;
ScheduledExecutorService scheduler;
ScheduledFuture<?> timeoutFuture;
EventLoopGroup eventLoopGroup;
final ConcurrentHashMap<BookieSocketAddress, PerChannelBookieClientPool> channels =
new ConcurrentHashMap<BookieSocketAddress, PerChannelBookieClientPool>();
private final ClientAuthProvider.Factory authProviderFactory;
private final ExtensionRegistry registry;
private final ClientConfiguration conf;
private volatile boolean closed;
private final ReentrantReadWriteLock closeLock;
private final StatsLogger statsLogger;
private final int numConnectionsPerBookie;
private final long bookieErrorThresholdPerInterval;
public BookieClient(ClientConfiguration conf, EventLoopGroup eventLoopGroup,
OrderedSafeExecutor executor, ScheduledExecutorService scheduler,
StatsLogger statsLogger) throws IOException {
this.conf = conf;
this.eventLoopGroup = eventLoopGroup;
this.executor = executor;
this.closed = false;
this.closeLock = new ReentrantReadWriteLock();
this.registry = ExtensionRegistry.newInstance();
this.authProviderFactory = AuthProviderFactoryFactory.newClientAuthProviderFactory(conf);
this.statsLogger = statsLogger;
this.numConnectionsPerBookie = conf.getNumChannelsPerBookie();
this.bookieErrorThresholdPerInterval = conf.getBookieErrorThresholdPerInterval();
this.scheduler = scheduler;
if (conf.getAddEntryTimeout() > 0 || conf.getReadEntryTimeout() > 0) {
SafeRunnable monitor = new SafeRunnable() {
@Override
public void safeRun() {
monitorPendingOperations();
}
};
this.timeoutFuture = this.scheduler.scheduleAtFixedRate(monitor,
conf.getTimeoutMonitorIntervalSec(),
conf.getTimeoutMonitorIntervalSec(),
TimeUnit.SECONDS);
}
}
private int getRc(int rc) {
if (BKException.Code.OK == rc) {
return rc;
} else {
if (closed) {
return BKException.Code.ClientClosedException;
} else {
return rc;
}
}
}
public List<BookieSocketAddress> getFaultyBookies() {
List<BookieSocketAddress> faultyBookies = Lists.newArrayList();
for (PerChannelBookieClientPool channelPool : channels.values()) {
if (channelPool instanceof DefaultPerChannelBookieClientPool) {
DefaultPerChannelBookieClientPool pool = (DefaultPerChannelBookieClientPool) channelPool;
if (pool.errorCounter.getAndSet(0) >= bookieErrorThresholdPerInterval) {
faultyBookies.add(pool.address);
}
}
}
return faultyBookies;
}
@Override
public PerChannelBookieClient create(BookieSocketAddress address, PerChannelBookieClientPool pcbcPool,
SecurityHandlerFactory shFactory) throws SecurityException {
return new PerChannelBookieClient(conf, executor, eventLoopGroup, address, statsLogger,
authProviderFactory, registry, pcbcPool, shFactory);
}
public PerChannelBookieClientPool lookupClient(BookieSocketAddress addr) {
PerChannelBookieClientPool clientPool = channels.get(addr);
if (null == clientPool) {
closeLock.readLock().lock();
try {
if (closed) {
return null;
}
PerChannelBookieClientPool newClientPool =
new DefaultPerChannelBookieClientPool(conf, this, addr, numConnectionsPerBookie);
PerChannelBookieClientPool oldClientPool = channels.putIfAbsent(addr, newClientPool);
if (null == oldClientPool) {
clientPool = newClientPool;
// initialize the pool only after we put the pool into the map
clientPool.intialize();
} else {
clientPool = oldClientPool;
newClientPool.close(false);
}
} catch (SecurityException e) {
LOG.error("Security Exception in creating new default PCBC pool: ", e);
return null;
} finally {
closeLock.readLock().unlock();
}
}
return clientPool;
}
public void writeLac(final BookieSocketAddress addr, final long ledgerId, final byte[] masterKey,
final long lac, final ByteBufList toSend, final WriteLacCallback cb, final Object ctx) {
closeLock.readLock().lock();
try {
final PerChannelBookieClientPool client = lookupClient(addr);
if (client == null) {
cb.writeLacComplete(getRc(BKException.Code.BookieHandleNotAvailableException),
ledgerId, addr, ctx);
return;
}
toSend.retain();
client.obtain(new GenericCallback<PerChannelBookieClient>() {
@Override
public void operationComplete(final int rc, PerChannelBookieClient pcbc) {
if (rc != BKException.Code.OK) {
try {
executor.submitOrdered(ledgerId, new SafeRunnable() {
@Override
public void safeRun() {
cb.writeLacComplete(rc, ledgerId, addr, ctx);
}
});
} catch (RejectedExecutionException re) {
cb.writeLacComplete(getRc(BKException.Code.InterruptedException), ledgerId, addr, ctx);
}
} else {
pcbc.writeLac(ledgerId, masterKey, lac, toSend, cb, ctx);
}
toSend.release();
}
}, ledgerId);
} finally {
closeLock.readLock().unlock();
}
}
private void completeAdd(final int rc,
final long ledgerId,
final long entryId,
final BookieSocketAddress addr,
final WriteCallback cb,
final Object ctx) {
try {
executor.submitOrdered(ledgerId, new SafeRunnable() {
@Override
public void safeRun() {
cb.writeComplete(rc, ledgerId, entryId, addr, ctx);
}
@Override
public String toString() {
return String.format("CompleteWrite(ledgerId=%d, entryId=%d, addr=%s)", ledgerId, entryId, addr);
}
});
} catch (RejectedExecutionException ree) {
cb.writeComplete(getRc(BKException.Code.InterruptedException), ledgerId, entryId, addr, ctx);
}
}
public void addEntry(final BookieSocketAddress addr,
final long ledgerId,
final byte[] masterKey,
final long entryId,
final ByteBufList toSend,
final WriteCallback cb,
final Object ctx,
final int options) {
closeLock.readLock().lock();
try {
final PerChannelBookieClientPool client = lookupClient(addr);
if (client == null) {
completeAdd(getRc(BKException.Code.BookieHandleNotAvailableException),
ledgerId, entryId, addr, cb, ctx);
return;
}
// Retain the buffer, since the connection could be obtained after
// the PendingApp might have already failed
toSend.retain();
client.obtain(ChannelReadyForAddEntryCallback.create(
this, toSend, ledgerId, entryId, addr,
ctx, cb, options, masterKey),
ledgerId);
} finally {
closeLock.readLock().unlock();
}
}
private void completeRead(final int rc,
final long ledgerId,
final long entryId,
final ByteBuf entry,
final ReadEntryCallback cb,
final Object ctx) {
try {
executor.submitOrdered(ledgerId, new SafeRunnable() {
@Override
public void safeRun() {
cb.readEntryComplete(rc, ledgerId, entryId, entry, ctx);
}
});
} catch (RejectedExecutionException ree) {
cb.readEntryComplete(getRc(BKException.Code.InterruptedException),
ledgerId, entryId, entry, ctx);
}
}
private static class ChannelReadyForAddEntryCallback
implements GenericCallback<PerChannelBookieClient> {
private final Handle<ChannelReadyForAddEntryCallback> recyclerHandle;
private BookieClient bookieClient;
private ByteBufList toSend;
private long ledgerId;
private long entryId;
private BookieSocketAddress addr;
private Object ctx;
private WriteCallback cb;
private int options;
private byte[] masterKey;
static ChannelReadyForAddEntryCallback create(
BookieClient bookieClient, ByteBufList toSend, long ledgerId,
long entryId, BookieSocketAddress addr, Object ctx,
WriteCallback cb, int options, byte[] masterKey) {
ChannelReadyForAddEntryCallback callback = RECYCLER.get();
callback.bookieClient = bookieClient;
callback.toSend = toSend;
callback.ledgerId = ledgerId;
callback.entryId = entryId;
callback.addr = addr;
callback.ctx = ctx;
callback.cb = cb;
callback.options = options;
callback.masterKey = masterKey;
return callback;
}
@Override
public void operationComplete(final int rc,
PerChannelBookieClient pcbc) {
if (rc != BKException.Code.OK) {
bookieClient.completeAdd(rc, ledgerId, entryId, addr, cb, ctx);
} else {
pcbc.addEntry(ledgerId, masterKey, entryId,
toSend, cb, ctx, options);
}
toSend.release();
recycle();
}
private ChannelReadyForAddEntryCallback(
Handle<ChannelReadyForAddEntryCallback> recyclerHandle) {
this.recyclerHandle = recyclerHandle;
}
private static final Recycler<ChannelReadyForAddEntryCallback> RECYCLER =
new Recycler<ChannelReadyForAddEntryCallback>() {
protected ChannelReadyForAddEntryCallback newObject(
Recycler.Handle<ChannelReadyForAddEntryCallback> recyclerHandle) {
return new ChannelReadyForAddEntryCallback(recyclerHandle);
}
};
public void recycle() {
bookieClient = null;
toSend = null;
ledgerId = -1;
entryId = -1;
addr = null;
ctx = null;
cb = null;
options = -1;
masterKey = null;
recyclerHandle.recycle(this);
}
}
public void readLac(final BookieSocketAddress addr, final long ledgerId, final ReadLacCallback cb,
final Object ctx) {
closeLock.readLock().lock();
try {
final PerChannelBookieClientPool client = lookupClient(addr);
if (client == null) {
cb.readLacComplete(getRc(BKException.Code.BookieHandleNotAvailableException), ledgerId, null, null,
ctx);
return;
}
client.obtain(new GenericCallback<PerChannelBookieClient>() {
@Override
public void operationComplete(final int rc, PerChannelBookieClient pcbc) {
if (rc != BKException.Code.OK) {
try {
executor.submitOrdered(ledgerId, new SafeRunnable() {
@Override
public void safeRun() {
cb.readLacComplete(rc, ledgerId, null, null, ctx);
}
});
} catch (RejectedExecutionException re) {
cb.readLacComplete(getRc(BKException.Code.InterruptedException),
ledgerId, null, null, ctx);
}
return;
}
pcbc.readLac(ledgerId, cb, ctx);
}
}, ledgerId);
} finally {
closeLock.readLock().unlock();
}
}
public void readEntry(BookieSocketAddress addr, long ledgerId, long entryId,
ReadEntryCallback cb, Object ctx, int flags) {
readEntry(addr, ledgerId, entryId, cb, ctx, flags, null);
}
public void readEntry(final BookieSocketAddress addr, final long ledgerId, final long entryId,
final ReadEntryCallback cb, final Object ctx, int flags, byte[] masterKey) {
closeLock.readLock().lock();
try {
final PerChannelBookieClientPool client = lookupClient(addr);
if (client == null) {
cb.readEntryComplete(getRc(BKException.Code.BookieHandleNotAvailableException),
ledgerId, entryId, null, ctx);
return;
}
client.obtain(new GenericCallback<PerChannelBookieClient>() {
@Override
public void operationComplete(final int rc, PerChannelBookieClient pcbc) {
if (rc != BKException.Code.OK) {
completeRead(rc, ledgerId, entryId, null, cb, ctx);
return;
}
pcbc.readEntry(ledgerId, entryId, cb, ctx, flags, masterKey);
}
}, ledgerId);
} finally {
closeLock.readLock().unlock();
}
}
public void readEntryWaitForLACUpdate(final BookieSocketAddress addr,
final long ledgerId,
final long entryId,
final long previousLAC,
final long timeOutInMillis,
final boolean piggyBackEntry,
final ReadEntryCallback cb,
final Object ctx) {
closeLock.readLock().lock();
try {
final PerChannelBookieClientPool client = lookupClient(addr);
if (client == null) {
completeRead(BKException.Code.BookieHandleNotAvailableException,
ledgerId, entryId, null, cb, ctx);
return;
}
client.obtain(new GenericCallback<PerChannelBookieClient>() {
@Override
public void operationComplete(final int rc, PerChannelBookieClient pcbc) {
if (rc != BKException.Code.OK) {
completeRead(rc, ledgerId, entryId, null, cb, ctx);
return;
}
pcbc.readEntryWaitForLACUpdate(ledgerId, entryId, previousLAC, timeOutInMillis, piggyBackEntry, cb,
ctx);
}
}, ledgerId);
} finally {
closeLock.readLock().unlock();
}
}
public void getBookieInfo(final BookieSocketAddress addr, final long requested, final GetBookieInfoCallback cb,
final Object ctx) {
closeLock.readLock().lock();
try {
final PerChannelBookieClientPool client = lookupClient(addr);
if (client == null) {
cb.getBookieInfoComplete(getRc(BKException.Code.BookieHandleNotAvailableException), new BookieInfo(),
ctx);
return;
}
client.obtain(new GenericCallback<PerChannelBookieClient>() {
@Override
public void operationComplete(final int rc, PerChannelBookieClient pcbc) {
if (rc != BKException.Code.OK) {
try {
executor.submit(new SafeRunnable() {
@Override
public void safeRun() {
cb.getBookieInfoComplete(rc, new BookieInfo(), ctx);
}
});
} catch (RejectedExecutionException re) {
cb.getBookieInfoComplete(getRc(BKException.Code.InterruptedException),
new BookieInfo(), ctx);
}
return;
}
pcbc.getBookieInfo(requested, cb, ctx);
}
}, requested);
} finally {
closeLock.readLock().unlock();
}
}
private void monitorPendingOperations() {
for (PerChannelBookieClientPool clientPool : channels.values()) {
clientPool.checkTimeoutOnPendingOperations();
}
}
public boolean isClosed() {
return closed;
}
public void close() {
closeLock.writeLock().lock();
try {
closed = true;
for (PerChannelBookieClientPool pool : channels.values()) {
pool.close(true);
}
channels.clear();
authProviderFactory.close();
if (timeoutFuture != null) {
timeoutFuture.cancel(false);
}
} finally {
closeLock.writeLock().unlock();
}
}
private static class Counter {
int i;
int total;
synchronized void inc() {
i++;
total++;
}
synchronized void dec() {
i--;
notifyAll();
}
synchronized void wait(int limit) throws InterruptedException {
while (i > limit) {
wait();
}
}
synchronized int total() {
return total;
}
}
/**
* @param args
* @throws IOException
* @throws NumberFormatException
* @throws InterruptedException
*/
public static void main(String[] args) throws NumberFormatException, IOException, InterruptedException {
if (args.length != 3) {
System.err.println("USAGE: BookieClient bookieHost port ledger#");
return;
}
WriteCallback cb = new WriteCallback() {
public void writeComplete(int rc, long ledger, long entry, BookieSocketAddress addr, Object ctx) {
Counter counter = (Counter) ctx;
counter.dec();
if (rc != 0) {
System.out.println("rc = " + rc + " for " + entry + "@" + ledger);
}
}
};
Counter counter = new Counter();
byte hello[] = "hello".getBytes(UTF_8);
long ledger = Long.parseLong(args[2]);
EventLoopGroup eventLoopGroup = new NioEventLoopGroup(1);
OrderedSafeExecutor executor = OrderedSafeExecutor.newBuilder()
.name("BookieClientWorker")
.numThreads(1)
.build();
ScheduledExecutorService scheduler = Executors.newSingleThreadScheduledExecutor(
new DefaultThreadFactory("BookKeeperClientScheduler"));
BookieClient bc = new BookieClient(new ClientConfiguration(), eventLoopGroup, executor,
scheduler, NullStatsLogger.INSTANCE);
BookieSocketAddress addr = new BookieSocketAddress(args[0], Integer.parseInt(args[1]));
for (int i = 0; i < 100000; i++) {
counter.inc();
bc.addEntry(addr, ledger, new byte[0], i, ByteBufList.get(Unpooled.wrappedBuffer(hello)), cb, counter, 0);
}
counter.wait(0);
System.out.println("Total = " + counter.total());
scheduler.shutdown();
eventLoopGroup.shutdownGracefully();
executor.shutdown();
}
}
| Avoid acquiring closeLock.readLock() on every add/read operation
In the `BookieClient`, we are always acquiring a readlock when grabbing a connection to use for sending a write/read request.
The lock is the `closeLock` and it's only acquired in "write" mode when the `BookKeeper` instance is closed.
The problem with the read-lock is that it introduces contention between the threads that are acquiring it (even if all of them in read mode). Multiple threads can be be in read mode in the critical section, though they have contention when they're entering/exiting the section.
Additionally, the Java implementation of read/write lock is creating and destroying a lot of objects when that contention happens.
My understanding of the code is that we don't need to acquire the read lock in that point. The reason is that, we are already acquiring the lock in the `lookupClient()` method, although only if the pool is null. Additionally, when `Bookkeeper.close()` is invoked all PCBC will be set to closed as well, so it will not be possibile to create a new connection.
All the line changes in the patch are just removing the readLock acquire and try/finally, and reducing the indentation level.
Author: Matteo Merli <[email protected]>
Reviewers: Ivan Kelly <[email protected]>, Enrico Olivelli <[email protected]>, Jia Zhai <None>, Sijie Guo <[email protected]>
This closes #1292 from merlimat/bookie-client-rw-lock and squashes the following commits:
2104a3aa7 [Matteo Merli] Converted anonymous classes into lambdas
cabad14e5 [Matteo Merli] Avoid acquiring closeLock.readLock() on every add/read operation
| bookkeeper-server/src/main/java/org/apache/bookkeeper/proto/BookieClient.java | Avoid acquiring closeLock.readLock() on every add/read operation | <ide><path>ookkeeper-server/src/main/java/org/apache/bookkeeper/proto/BookieClient.java
<ide> package org.apache.bookkeeper.proto;
<ide>
<ide> import static com.google.common.base.Charsets.UTF_8;
<add>import static org.apache.bookkeeper.util.SafeRunnable.safeRun;
<ide>
<ide> import com.google.common.collect.Lists;
<ide> import com.google.protobuf.ExtensionRegistry;
<ide> import org.apache.bookkeeper.auth.ClientAuthProvider;
<ide> import org.apache.bookkeeper.client.BKException;
<ide> import org.apache.bookkeeper.client.BookieInfoReader.BookieInfo;
<add>import org.apache.bookkeeper.common.util.SafeRunnable;
<ide> import org.apache.bookkeeper.conf.ClientConfiguration;
<ide> import org.apache.bookkeeper.net.BookieSocketAddress;
<ide> import org.apache.bookkeeper.proto.BookkeeperInternalCallbacks.GenericCallback;
<ide> import org.apache.bookkeeper.tls.SecurityHandlerFactory;
<ide> import org.apache.bookkeeper.util.ByteBufList;
<ide> import org.apache.bookkeeper.util.OrderedSafeExecutor;
<del>import org.apache.bookkeeper.util.SafeRunnable;
<ide> import org.slf4j.Logger;
<ide> import org.slf4j.LoggerFactory;
<ide>
<ide>
<ide> this.scheduler = scheduler;
<ide> if (conf.getAddEntryTimeout() > 0 || conf.getReadEntryTimeout() > 0) {
<del> SafeRunnable monitor = new SafeRunnable() {
<del> @Override
<del> public void safeRun() {
<del> monitorPendingOperations();
<del> }
<del> };
<add> SafeRunnable monitor = safeRun(() -> {
<add> monitorPendingOperations();
<add> });
<ide> this.timeoutFuture = this.scheduler.scheduleAtFixedRate(monitor,
<ide> conf.getTimeoutMonitorIntervalSec(),
<ide> conf.getTimeoutMonitorIntervalSec(),
<ide>
<ide> public void writeLac(final BookieSocketAddress addr, final long ledgerId, final byte[] masterKey,
<ide> final long lac, final ByteBufList toSend, final WriteLacCallback cb, final Object ctx) {
<del> closeLock.readLock().lock();
<del> try {
<del> final PerChannelBookieClientPool client = lookupClient(addr);
<del> if (client == null) {
<del> cb.writeLacComplete(getRc(BKException.Code.BookieHandleNotAvailableException),
<del> ledgerId, addr, ctx);
<del> return;
<del> }
<del>
<del> toSend.retain();
<del> client.obtain(new GenericCallback<PerChannelBookieClient>() {
<del> @Override
<del> public void operationComplete(final int rc, PerChannelBookieClient pcbc) {
<del> if (rc != BKException.Code.OK) {
<del> try {
<del> executor.submitOrdered(ledgerId, new SafeRunnable() {
<del> @Override
<del> public void safeRun() {
<del> cb.writeLacComplete(rc, ledgerId, addr, ctx);
<del> }
<del> });
<del> } catch (RejectedExecutionException re) {
<del> cb.writeLacComplete(getRc(BKException.Code.InterruptedException), ledgerId, addr, ctx);
<del> }
<del> } else {
<del> pcbc.writeLac(ledgerId, masterKey, lac, toSend, cb, ctx);
<del> }
<del>
<del> toSend.release();
<del> }
<del> }, ledgerId);
<del> } finally {
<del> closeLock.readLock().unlock();
<del> }
<add> final PerChannelBookieClientPool client = lookupClient(addr);
<add> if (client == null) {
<add> cb.writeLacComplete(getRc(BKException.Code.BookieHandleNotAvailableException),
<add> ledgerId, addr, ctx);
<add> return;
<add> }
<add>
<add> toSend.retain();
<add> client.obtain((rc, pcbc) -> {
<add> if (rc != BKException.Code.OK) {
<add> try {
<add> executor.submitOrdered(ledgerId, safeRun(() -> {
<add> cb.writeLacComplete(rc, ledgerId, addr, ctx);
<add> }));
<add> } catch (RejectedExecutionException re) {
<add> cb.writeLacComplete(getRc(BKException.Code.InterruptedException), ledgerId, addr, ctx);
<add> }
<add> } else {
<add> pcbc.writeLac(ledgerId, masterKey, lac, toSend, cb, ctx);
<add> }
<add>
<add> toSend.release();
<add> }, ledgerId);
<ide> }
<ide>
<ide> private void completeAdd(final int rc,
<ide> final WriteCallback cb,
<ide> final Object ctx,
<ide> final int options) {
<del> closeLock.readLock().lock();
<del> try {
<del> final PerChannelBookieClientPool client = lookupClient(addr);
<del> if (client == null) {
<del> completeAdd(getRc(BKException.Code.BookieHandleNotAvailableException),
<del> ledgerId, entryId, addr, cb, ctx);
<del> return;
<del> }
<del>
<del> // Retain the buffer, since the connection could be obtained after
<del> // the PendingApp might have already failed
<del> toSend.retain();
<del>
<del> client.obtain(ChannelReadyForAddEntryCallback.create(
<del> this, toSend, ledgerId, entryId, addr,
<del> ctx, cb, options, masterKey),
<del> ledgerId);
<del> } finally {
<del> closeLock.readLock().unlock();
<del> }
<add> final PerChannelBookieClientPool client = lookupClient(addr);
<add> if (client == null) {
<add> completeAdd(getRc(BKException.Code.BookieHandleNotAvailableException),
<add> ledgerId, entryId, addr, cb, ctx);
<add> return;
<add> }
<add>
<add> // Retain the buffer, since the connection could be obtained after
<add> // the PendingApp might have already failed
<add> toSend.retain();
<add>
<add> client.obtain(ChannelReadyForAddEntryCallback.create(
<add> this, toSend, ledgerId, entryId, addr,
<add> ctx, cb, options, masterKey),
<add> ledgerId);
<ide> }
<ide>
<ide> private void completeRead(final int rc,
<ide>
<ide> public void readLac(final BookieSocketAddress addr, final long ledgerId, final ReadLacCallback cb,
<ide> final Object ctx) {
<del> closeLock.readLock().lock();
<del> try {
<del> final PerChannelBookieClientPool client = lookupClient(addr);
<del> if (client == null) {
<del> cb.readLacComplete(getRc(BKException.Code.BookieHandleNotAvailableException), ledgerId, null, null,
<del> ctx);
<del> return;
<del> }
<del> client.obtain(new GenericCallback<PerChannelBookieClient>() {
<del> @Override
<del> public void operationComplete(final int rc, PerChannelBookieClient pcbc) {
<del> if (rc != BKException.Code.OK) {
<del> try {
<del> executor.submitOrdered(ledgerId, new SafeRunnable() {
<del> @Override
<del> public void safeRun() {
<del> cb.readLacComplete(rc, ledgerId, null, null, ctx);
<del> }
<del> });
<del> } catch (RejectedExecutionException re) {
<del> cb.readLacComplete(getRc(BKException.Code.InterruptedException),
<del> ledgerId, null, null, ctx);
<del> }
<del> return;
<del> }
<del> pcbc.readLac(ledgerId, cb, ctx);
<del> }
<del> }, ledgerId);
<del> } finally {
<del> closeLock.readLock().unlock();
<del> }
<add> final PerChannelBookieClientPool client = lookupClient(addr);
<add> if (client == null) {
<add> cb.readLacComplete(getRc(BKException.Code.BookieHandleNotAvailableException), ledgerId, null, null,
<add> ctx);
<add> return;
<add> }
<add> client.obtain((rc, pcbc) -> {
<add> if (rc != BKException.Code.OK) {
<add> try {
<add> executor.submitOrdered(ledgerId, safeRun(() -> {
<add> cb.readLacComplete(rc, ledgerId, null, null, ctx);
<add> }));
<add> } catch (RejectedExecutionException re) {
<add> cb.readLacComplete(getRc(BKException.Code.InterruptedException),
<add> ledgerId, null, null, ctx);
<add> }
<add> } else {
<add> pcbc.readLac(ledgerId, cb, ctx);
<add> }
<add> }, ledgerId);
<ide> }
<ide>
<ide> public void readEntry(BookieSocketAddress addr, long ledgerId, long entryId,
<ide>
<ide> public void readEntry(final BookieSocketAddress addr, final long ledgerId, final long entryId,
<ide> final ReadEntryCallback cb, final Object ctx, int flags, byte[] masterKey) {
<del> closeLock.readLock().lock();
<del> try {
<del> final PerChannelBookieClientPool client = lookupClient(addr);
<del> if (client == null) {
<del> cb.readEntryComplete(getRc(BKException.Code.BookieHandleNotAvailableException),
<del> ledgerId, entryId, null, ctx);
<del> return;
<del> }
<del>
<del> client.obtain(new GenericCallback<PerChannelBookieClient>() {
<del> @Override
<del> public void operationComplete(final int rc, PerChannelBookieClient pcbc) {
<del> if (rc != BKException.Code.OK) {
<del> completeRead(rc, ledgerId, entryId, null, cb, ctx);
<del> return;
<del> }
<del> pcbc.readEntry(ledgerId, entryId, cb, ctx, flags, masterKey);
<del> }
<del> }, ledgerId);
<del> } finally {
<del> closeLock.readLock().unlock();
<del> }
<add> final PerChannelBookieClientPool client = lookupClient(addr);
<add> if (client == null) {
<add> cb.readEntryComplete(getRc(BKException.Code.BookieHandleNotAvailableException),
<add> ledgerId, entryId, null, ctx);
<add> return;
<add> }
<add>
<add> client.obtain((rc, pcbc) -> {
<add> if (rc != BKException.Code.OK) {
<add> completeRead(rc, ledgerId, entryId, null, cb, ctx);
<add> } else {
<add> pcbc.readEntry(ledgerId, entryId, cb, ctx, flags, masterKey);
<add> }
<add> }, ledgerId);
<ide> }
<ide>
<ide>
<ide> final boolean piggyBackEntry,
<ide> final ReadEntryCallback cb,
<ide> final Object ctx) {
<del> closeLock.readLock().lock();
<del> try {
<del> final PerChannelBookieClientPool client = lookupClient(addr);
<del> if (client == null) {
<del> completeRead(BKException.Code.BookieHandleNotAvailableException,
<del> ledgerId, entryId, null, cb, ctx);
<del> return;
<del> }
<del>
<del> client.obtain(new GenericCallback<PerChannelBookieClient>() {
<del> @Override
<del> public void operationComplete(final int rc, PerChannelBookieClient pcbc) {
<del>
<del> if (rc != BKException.Code.OK) {
<del> completeRead(rc, ledgerId, entryId, null, cb, ctx);
<del> return;
<del> }
<del> pcbc.readEntryWaitForLACUpdate(ledgerId, entryId, previousLAC, timeOutInMillis, piggyBackEntry, cb,
<del> ctx);
<del> }
<del> }, ledgerId);
<del> } finally {
<del> closeLock.readLock().unlock();
<del> }
<add> final PerChannelBookieClientPool client = lookupClient(addr);
<add> if (client == null) {
<add> completeRead(BKException.Code.BookieHandleNotAvailableException,
<add> ledgerId, entryId, null, cb, ctx);
<add> return;
<add> }
<add>
<add> client.obtain((rc, pcbc) -> {
<add> if (rc != BKException.Code.OK) {
<add> completeRead(rc, ledgerId, entryId, null, cb, ctx);
<add> } else {
<add> pcbc.readEntryWaitForLACUpdate(ledgerId, entryId, previousLAC, timeOutInMillis, piggyBackEntry, cb,
<add> ctx);
<add> }
<add> }, ledgerId);
<ide> }
<ide>
<ide> public void getBookieInfo(final BookieSocketAddress addr, final long requested, final GetBookieInfoCallback cb,
<ide> final Object ctx) {
<del> closeLock.readLock().lock();
<del> try {
<del> final PerChannelBookieClientPool client = lookupClient(addr);
<del> if (client == null) {
<del> cb.getBookieInfoComplete(getRc(BKException.Code.BookieHandleNotAvailableException), new BookieInfo(),
<del> ctx);
<del> return;
<del> }
<del> client.obtain(new GenericCallback<PerChannelBookieClient>() {
<del> @Override
<del> public void operationComplete(final int rc, PerChannelBookieClient pcbc) {
<del> if (rc != BKException.Code.OK) {
<del> try {
<del> executor.submit(new SafeRunnable() {
<del> @Override
<del> public void safeRun() {
<del> cb.getBookieInfoComplete(rc, new BookieInfo(), ctx);
<del> }
<del> });
<del> } catch (RejectedExecutionException re) {
<del> cb.getBookieInfoComplete(getRc(BKException.Code.InterruptedException),
<del> new BookieInfo(), ctx);
<del> }
<del> return;
<del> }
<del> pcbc.getBookieInfo(requested, cb, ctx);
<del> }
<del> }, requested);
<del> } finally {
<del> closeLock.readLock().unlock();
<del> }
<add> final PerChannelBookieClientPool client = lookupClient(addr);
<add> if (client == null) {
<add> cb.getBookieInfoComplete(getRc(BKException.Code.BookieHandleNotAvailableException), new BookieInfo(),
<add> ctx);
<add> return;
<add> }
<add> client.obtain((rc, pcbc) -> {
<add> if (rc != BKException.Code.OK) {
<add> try {
<add> executor.submit(safeRun(() -> {
<add> cb.getBookieInfoComplete(rc, new BookieInfo(), ctx);
<add> }));
<add> } catch (RejectedExecutionException re) {
<add> cb.getBookieInfoComplete(getRc(BKException.Code.InterruptedException),
<add> new BookieInfo(), ctx);
<add> }
<add> } else {
<add> pcbc.getBookieInfo(requested, cb, ctx);
<add> }
<add> }, requested);
<ide> }
<ide>
<ide> private void monitorPendingOperations() { |
|
Java | mit | f9b11cdd609e12849df15bbb08792f10fc8cc7d0 | 0 | braintree/braintree_android,braintree/braintree_android,braintree/braintree_android,braintree/braintree_android | package com.braintreepayments.api;
import android.app.Activity;
import android.os.SystemClock;
import android.support.test.espresso.web.webdriver.Locator;
import android.support.test.runner.AndroidJUnit4;
import android.test.suitebuilder.annotation.LargeTest;
import android.test.suitebuilder.annotation.MediumTest;
import com.braintreepayments.api.exceptions.AuthorizationException;
import com.braintreepayments.api.interfaces.BraintreeCancelListener;
import com.braintreepayments.api.interfaces.BraintreeErrorListener;
import com.braintreepayments.api.interfaces.PaymentMethodNonceCreatedListener;
import com.braintreepayments.api.models.CardBuilder;
import com.braintreepayments.api.models.CardNonce;
import com.braintreepayments.api.models.PaymentMethodNonce;
import com.braintreepayments.api.test.TestActivity;
import com.braintreepayments.testutils.BraintreeActivityTestRule;
import com.braintreepayments.testutils.TestClientTokenBuilder;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.runner.RunWith;
import java.util.concurrent.CountDownLatch;
import static android.support.test.espresso.Espresso.onView;
import static android.support.test.espresso.Espresso.pressBack;
import static android.support.test.espresso.action.ViewActions.click;
import static android.support.test.espresso.matcher.ViewMatchers.withContentDescription;
import static android.support.test.espresso.web.sugar.Web.onWebView;
import static android.support.test.espresso.web.webdriver.DriverAtoms.findElement;
import static android.support.test.espresso.web.webdriver.DriverAtoms.webClick;
import static android.support.test.espresso.web.webdriver.DriverAtoms.webKeys;
import static com.braintreepayments.testutils.TestTokenizationKey.TOKENIZATION_KEY;
import static com.braintreepayments.testutils.ui.Matchers.withId;
import static com.braintreepayments.testutils.ui.ViewHelper.waitForView;
import static junit.framework.Assert.assertEquals;
import static junit.framework.Assert.assertTrue;
@RunWith(AndroidJUnit4.class)
public class ThreeDSecureVerificationTest {
private static final String TEST_AMOUNT = "1";
@Rule
public final BraintreeActivityTestRule<TestActivity> mActivityTestRule =
new BraintreeActivityTestRule<>(TestActivity.class);
private Activity mActivity;
private CountDownLatch mCountDownLatch;
@Before
public void setUp() {
mActivity = mActivityTestRule.getActivity();
mCountDownLatch = new CountDownLatch(1);
}
@Test(timeout = 30000)
@LargeTest
public void performVerification_callsCancelListenerWhenUpIsPressed()
throws InterruptedException {
CardBuilder cardBuilder = new CardBuilder()
.cardNumber("4000000000000002")
.expirationDate("12/30");
BraintreeFragment fragment = getFragment();
fragment.addListener(new BraintreeCancelListener() {
@Override
public void onCancel(int requestCode) {
assertEquals(ThreeDSecure.THREE_D_SECURE_REQUEST_CODE, requestCode);
mCountDownLatch.countDown();
}
});
ThreeDSecure.performVerification(getFragment(), cardBuilder, TEST_AMOUNT);
waitForView(withId(android.R.id.widget_frame));
onView(withContentDescription("Navigate up")).perform(click());
mCountDownLatch.await();
}
@Test(timeout = 30000)
@LargeTest
public void performVerification_callsCancelListenerWhenBackIsPressedOnFirstPage()
throws InterruptedException {
CardBuilder cardBuilder = new CardBuilder()
.cardNumber("4000000000000002")
.expirationDate("12/30");
BraintreeFragment fragment = getFragment();
fragment.addListener(new BraintreeCancelListener() {
@Override
public void onCancel(int requestCode) {
assertEquals(ThreeDSecure.THREE_D_SECURE_REQUEST_CODE, requestCode);
mCountDownLatch.countDown();
}
});
ThreeDSecure.performVerification(fragment, cardBuilder, TEST_AMOUNT);
waitForView(withId(android.R.id.widget_frame));
pressBack();
mCountDownLatch.await();
}
@Test(timeout = 30000)
@LargeTest
public void performVerification_callsCancelListenerWhenUserGoesOnePageDeepAndPressesBack()
throws InterruptedException {
CardBuilder cardBuilder = new CardBuilder()
.cardNumber("4000000000000002")
.expirationDate("12/30");
BraintreeFragment fragment = getFragment();
fragment.addListener(new BraintreeCancelListener() {
@Override
public void onCancel(int requestCode) {
assertEquals(ThreeDSecure.THREE_D_SECURE_REQUEST_CODE, requestCode);
mCountDownLatch.countDown();
}
});
ThreeDSecure.performVerification(fragment, cardBuilder, TEST_AMOUNT);
waitForView(withId(android.R.id.widget_frame));
onWebView().withElement(findElement(Locator.LINK_TEXT, "New User / Forgot your password?"))
.perform(webClick());
SystemClock.sleep(2000);
pressBack();
SystemClock.sleep(2000);
pressBack();
pressBack();
mCountDownLatch.await();
}
@Test(timeout = 10000)
@MediumTest
public void performVerification_doesALookupAndReturnsACardAndANullACSUrlWhenAuthenticationIsNotRequired()
throws InterruptedException {
BraintreeFragment fragment = getFragment();
fragment.addListener(new PaymentMethodNonceCreatedListener() {
@Override
public void onPaymentMethodNonceCreated(PaymentMethodNonce paymentMethodNonce) {
CardNonce cardNonce = (CardNonce) paymentMethodNonce;
assertEquals("51", cardNonce.getLastTwo());
assertTrue(cardNonce.getThreeDSecureInfo().isLiabilityShifted());
assertTrue(cardNonce.getThreeDSecureInfo().isLiabilityShiftPossible());
mCountDownLatch.countDown();
}
});
CardBuilder cardBuilder = new CardBuilder()
.cardNumber("4000000000000051")
.expirationDate("12/20");
ThreeDSecure.performVerification(fragment, cardBuilder, TEST_AMOUNT);
mCountDownLatch.await();
}
@Test(timeout = 10000)
@MediumTest
public void performVerification_failsWithATokenizationKey() throws InterruptedException {
BraintreeFragment fragment = BraintreeFragmentTestUtils.getFragment(mActivity,
TOKENIZATION_KEY);
fragment.addListener(new BraintreeErrorListener() {
@Override
public void onError(Exception error) {
assertTrue(error instanceof AuthorizationException);
assertEquals(
"Client key authorization not allowed for this endpoint. Please use an authentication method with upgraded permissions",
error.getMessage());
mCountDownLatch.countDown();
}
});
CardBuilder cardBuilder = new CardBuilder()
.cardNumber("4000000000000051")
.expirationDate("12/20");
ThreeDSecure.performVerification(fragment, cardBuilder, TEST_AMOUNT);
mCountDownLatch.await();
}
@Test(timeout = 10000)
@MediumTest
public void performVerification_doesALookupAndReturnsACardWhenThereIsALookupError()
throws InterruptedException {
BraintreeFragment fragment = getFragment();
fragment.addListener(new PaymentMethodNonceCreatedListener() {
@Override
public void onPaymentMethodNonceCreated(PaymentMethodNonce paymentMethodNonce) {
assertEquals("77", ((CardNonce) paymentMethodNonce).getLastTwo());
mCountDownLatch.countDown();
}
});
CardBuilder cardBuilder = new CardBuilder()
.cardNumber("4000000000000077")
.expirationDate("12/20");
ThreeDSecure.performVerification(fragment, cardBuilder, TEST_AMOUNT);
mCountDownLatch.await();
}
@Test(timeout = 30000)
@LargeTest
public void performVerification_requestsAuthenticationWhenRequired()
throws InterruptedException {
BraintreeFragment fragment = getFragment();
fragment.addListener(new PaymentMethodNonceCreatedListener() {
@Override
public void onPaymentMethodNonceCreated(PaymentMethodNonce paymentMethodNonce) {
CardNonce cardNonce = (CardNonce) paymentMethodNonce;
assertEquals("02", cardNonce.getLastTwo());
assertTrue(cardNonce.getThreeDSecureInfo().isLiabilityShifted());
assertTrue(cardNonce.getThreeDSecureInfo().isLiabilityShiftPossible());
mCountDownLatch.countDown();
}
});
CardBuilder cardBuilder = new CardBuilder()
.cardNumber("4000000000000002")
.expirationDate("12/30");
ThreeDSecure.performVerification(fragment, cardBuilder, TEST_AMOUNT);
waitForView(withId(android.R.id.widget_frame));
onWebView().withElement(findElement(Locator.NAME, "external.field.password"))
.perform(webKeys("1234"));
onWebView().withElement(findElement(Locator.NAME, "UsernamePasswordEntry"))
.perform(webClick());
mCountDownLatch.await();
}
@Test(timeout = 30000)
@LargeTest
public void performVerification_returnsAnErrorWhenAuthenticationFails()
throws InterruptedException {
BraintreeFragment fragment = getFragment();
fragment.addListener(new BraintreeErrorListener() {
@Override
public void onError(Exception error) {
assertEquals("Failed to authenticate, please try a different form of payment",
error.getMessage());
mCountDownLatch.countDown();
}
});
CardBuilder cardBuilder = new CardBuilder()
.cardNumber("4000000000000028")
.expirationDate("12/30");
ThreeDSecure.performVerification(fragment, cardBuilder, TEST_AMOUNT);
waitForView(withId(android.R.id.widget_frame));
onWebView().withElement(findElement(Locator.NAME, "external.field.password"))
.perform(webKeys("1234"));
onWebView().withElement(findElement(Locator.NAME, "UsernamePasswordEntry"))
.perform(webClick());
onWebView().withElement(findElement(Locator.NAME, "Submit"))
.perform(webClick());
mCountDownLatch.await();
}
@Test(timeout = 30000)
@LargeTest
public void performVerification_returnsASuccessfulAuthenticationWhenIssuerDoesNotParticipate()
throws InterruptedException {
BraintreeFragment fragment = getFragment();
fragment.addListener(new PaymentMethodNonceCreatedListener() {
@Override
public void onPaymentMethodNonceCreated(PaymentMethodNonce paymentMethodNonce) {
CardNonce cardNonce = (CardNonce) paymentMethodNonce;
assertEquals("01", cardNonce.getLastTwo());
assertTrue(cardNonce.getThreeDSecureInfo().isLiabilityShifted());
assertTrue(cardNonce.getThreeDSecureInfo().isLiabilityShiftPossible());
mCountDownLatch.countDown();
}
});
CardBuilder cardBuilder = new CardBuilder()
.cardNumber("4000000000000101")
.expirationDate("12/30");
ThreeDSecure.performVerification(fragment, cardBuilder, TEST_AMOUNT);
mCountDownLatch.await();
}
@Test(timeout = 30000)
@LargeTest
public void performVerification_returnsAFailedAuthenticationWhenSignatureVerificationFails()
throws InterruptedException {
BraintreeFragment fragment = getFragment();
fragment.addListener(new BraintreeErrorListener() {
@Override
public void onError(Exception error) {
assertEquals("Failed to authenticate, please try a different form of payment",
error.getMessage());
mCountDownLatch.countDown();
}
});
CardBuilder cardBuilder = new CardBuilder()
.cardNumber("4000000000000010")
.expirationDate("12/30");
ThreeDSecure.performVerification(fragment, cardBuilder, TEST_AMOUNT);
waitForView(withId(android.R.id.widget_frame));
onWebView().withElement(findElement(Locator.NAME, "external.field.password"))
.perform(webKeys("1234"));
onWebView().withElement(findElement(Locator.NAME, "UsernamePasswordEntry"))
.perform(webClick());
mCountDownLatch.await();
}
@Test(timeout = 30000)
@LargeTest
public void performVerification_returnsAnUnexpectedErrorWhenIssuerIsDown() throws InterruptedException {
BraintreeFragment fragment = getFragment();
fragment.addListener(new BraintreeErrorListener() {
@Override
public void onError(Exception error) {
assertEquals("An unexpected error occurred", error.getMessage());
mCountDownLatch.countDown();
}
});
CardBuilder cardBuilder = new CardBuilder()
.cardNumber("4000000000000036")
.expirationDate("12/30");
ThreeDSecure.performVerification(fragment, cardBuilder, TEST_AMOUNT);
waitForView(withId(android.R.id.widget_frame));
onWebView().withElement(findElement(Locator.NAME, "Submit"))
.perform(webClick());
mCountDownLatch.await();
}
@Test(timeout = 30000)
@LargeTest
public void performVerification_returnsAnErrorWhenCardinalReturnsError()
throws InterruptedException {
BraintreeFragment fragment = getFragment();
fragment.addListener(new BraintreeErrorListener() {
@Override
public void onError(Exception error) {
assertEquals("An unexpected error occurred", error.getMessage());
mCountDownLatch.countDown();
}
});
CardBuilder cardBuilder = new CardBuilder()
.cardNumber("4000000000000093")
.expirationDate("12/30");
ThreeDSecure.performVerification(fragment, cardBuilder, TEST_AMOUNT);
waitForView(withId(android.R.id.widget_frame));
onWebView().withElement(findElement(Locator.NAME, "external.field.password"))
.perform(webKeys("1234"));
onWebView().withElement(findElement(Locator.NAME, "UsernamePasswordEntry"))
.perform(webClick());
mCountDownLatch.await();
}
/* helpers */
private BraintreeFragment getFragment() {
String clientToken = new TestClientTokenBuilder().withThreeDSecure().build();
return BraintreeFragmentTestUtils.getFragment(mActivity, clientToken);
}
}
| BraintreeApi/src/androidTest/java/com/braintreepayments/api/ThreeDSecureVerificationTest.java | package com.braintreepayments.api;
import android.app.Activity;
import android.app.KeyguardManager;
import android.content.Context;
import android.os.SystemClock;
import android.support.test.espresso.web.webdriver.Locator;
import android.support.test.runner.AndroidJUnit4;
import android.test.suitebuilder.annotation.LargeTest;
import android.test.suitebuilder.annotation.MediumTest;
import com.braintreepayments.api.exceptions.AuthorizationException;
import com.braintreepayments.api.interfaces.BraintreeCancelListener;
import com.braintreepayments.api.interfaces.BraintreeErrorListener;
import com.braintreepayments.api.interfaces.PaymentMethodNonceCreatedListener;
import com.braintreepayments.api.models.CardBuilder;
import com.braintreepayments.api.models.CardNonce;
import com.braintreepayments.api.models.PaymentMethodNonce;
import com.braintreepayments.testutils.BraintreeActivityTestRule;
import com.braintreepayments.api.test.TestActivity;
import com.braintreepayments.testutils.TestClientTokenBuilder;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.runner.RunWith;
import java.util.concurrent.CountDownLatch;
import static android.support.test.InstrumentationRegistry.getTargetContext;
import static android.support.test.espresso.Espresso.onView;
import static android.support.test.espresso.Espresso.pressBack;
import static android.support.test.espresso.action.ViewActions.click;
import static android.support.test.espresso.matcher.ViewMatchers.withContentDescription;
import static android.support.test.espresso.web.sugar.Web.onWebView;
import static android.support.test.espresso.web.webdriver.DriverAtoms.findElement;
import static android.support.test.espresso.web.webdriver.DriverAtoms.webClick;
import static android.support.test.espresso.web.webdriver.DriverAtoms.webKeys;
import static com.braintreepayments.testutils.TestTokenizationKey.TOKENIZATION_KEY;
import static com.braintreepayments.testutils.ui.Matchers.withId;
import static com.braintreepayments.testutils.ui.ViewHelper.waitForView;
import static junit.framework.Assert.assertEquals;
import static junit.framework.Assert.assertTrue;
@RunWith(AndroidJUnit4.class)
public class ThreeDSecureVerificationTest {
private static final String TEST_AMOUNT = "1";
@Rule
public final BraintreeActivityTestRule<TestActivity> mActivityTestRule =
new BraintreeActivityTestRule<>(TestActivity.class);
private Activity mActivity;
private CountDownLatch mCountDownLatch;
@Before
public void setUp() {
((KeyguardManager) getTargetContext().getSystemService(Context.KEYGUARD_SERVICE))
.newKeyguardLock("ThreeDSecureVerificationTest")
.disableKeyguard();
mActivity = mActivityTestRule.getActivity();
mCountDownLatch = new CountDownLatch(1);
}
@Test(timeout = 30000)
@LargeTest
public void performVerification_callsCancelListenerWhenUpIsPressed()
throws InterruptedException {
CardBuilder cardBuilder = new CardBuilder()
.cardNumber("4000000000000002")
.expirationDate("12/30");
BraintreeFragment fragment = getFragment();
fragment.addListener(new BraintreeCancelListener() {
@Override
public void onCancel(int requestCode) {
assertEquals(ThreeDSecure.THREE_D_SECURE_REQUEST_CODE, requestCode);
mCountDownLatch.countDown();
}
});
ThreeDSecure.performVerification(getFragment(), cardBuilder, TEST_AMOUNT);
waitForView(withId(android.R.id.widget_frame));
onView(withContentDescription("Navigate up")).perform(click());
mCountDownLatch.await();
}
@Test(timeout = 30000)
@LargeTest
public void performVerification_callsCancelListenerWhenBackIsPressedOnFirstPage()
throws InterruptedException {
CardBuilder cardBuilder = new CardBuilder()
.cardNumber("4000000000000002")
.expirationDate("12/30");
BraintreeFragment fragment = getFragment();
fragment.addListener(new BraintreeCancelListener() {
@Override
public void onCancel(int requestCode) {
assertEquals(ThreeDSecure.THREE_D_SECURE_REQUEST_CODE, requestCode);
mCountDownLatch.countDown();
}
});
ThreeDSecure.performVerification(fragment, cardBuilder, TEST_AMOUNT);
waitForView(withId(android.R.id.widget_frame));
pressBack();
mCountDownLatch.await();
}
@Test(timeout = 30000)
@LargeTest
public void performVerification_callsCancelListenerWhenUserGoesOnePageDeepAndPressesBack()
throws InterruptedException {
CardBuilder cardBuilder = new CardBuilder()
.cardNumber("4000000000000002")
.expirationDate("12/30");
BraintreeFragment fragment = getFragment();
fragment.addListener(new BraintreeCancelListener() {
@Override
public void onCancel(int requestCode) {
assertEquals(ThreeDSecure.THREE_D_SECURE_REQUEST_CODE, requestCode);
mCountDownLatch.countDown();
}
});
ThreeDSecure.performVerification(fragment, cardBuilder, TEST_AMOUNT);
waitForView(withId(android.R.id.widget_frame));
onWebView().withElement(findElement(Locator.LINK_TEXT, "New User / Forgot your password?"))
.perform(webClick());
SystemClock.sleep(2000);
pressBack();
SystemClock.sleep(2000);
pressBack();
pressBack();
mCountDownLatch.await();
}
@Test(timeout = 10000)
@MediumTest
public void performVerification_doesALookupAndReturnsACardAndANullACSUrlWhenAuthenticationIsNotRequired()
throws InterruptedException {
BraintreeFragment fragment = getFragment();
fragment.addListener(new PaymentMethodNonceCreatedListener() {
@Override
public void onPaymentMethodNonceCreated(PaymentMethodNonce paymentMethodNonce) {
CardNonce cardNonce = (CardNonce) paymentMethodNonce;
assertEquals("51", cardNonce.getLastTwo());
assertTrue(cardNonce.getThreeDSecureInfo().isLiabilityShifted());
assertTrue(cardNonce.getThreeDSecureInfo().isLiabilityShiftPossible());
mCountDownLatch.countDown();
}
});
CardBuilder cardBuilder = new CardBuilder()
.cardNumber("4000000000000051")
.expirationDate("12/20");
ThreeDSecure.performVerification(fragment, cardBuilder, TEST_AMOUNT);
mCountDownLatch.await();
}
@Test(timeout = 10000)
@MediumTest
public void performVerification_failsWithATokenizationKey() throws InterruptedException {
BraintreeFragment fragment = BraintreeFragmentTestUtils.getFragment(mActivity,
TOKENIZATION_KEY);
fragment.addListener(new BraintreeErrorListener() {
@Override
public void onError(Exception error) {
assertTrue(error instanceof AuthorizationException);
assertEquals(
"Client key authorization not allowed for this endpoint. Please use an authentication method with upgraded permissions",
error.getMessage());
mCountDownLatch.countDown();
}
});
CardBuilder cardBuilder = new CardBuilder()
.cardNumber("4000000000000051")
.expirationDate("12/20");
ThreeDSecure.performVerification(fragment, cardBuilder, TEST_AMOUNT);
mCountDownLatch.await();
}
@Test(timeout = 10000)
@MediumTest
public void performVerification_doesALookupAndReturnsACardWhenThereIsALookupError()
throws InterruptedException {
BraintreeFragment fragment = getFragment();
fragment.addListener(new PaymentMethodNonceCreatedListener() {
@Override
public void onPaymentMethodNonceCreated(PaymentMethodNonce paymentMethodNonce) {
assertEquals("77", ((CardNonce) paymentMethodNonce).getLastTwo());
mCountDownLatch.countDown();
}
});
CardBuilder cardBuilder = new CardBuilder()
.cardNumber("4000000000000077")
.expirationDate("12/20");
ThreeDSecure.performVerification(fragment, cardBuilder, TEST_AMOUNT);
mCountDownLatch.await();
}
@Test(timeout = 30000)
@LargeTest
public void performVerification_requestsAuthenticationWhenRequired()
throws InterruptedException {
BraintreeFragment fragment = getFragment();
fragment.addListener(new PaymentMethodNonceCreatedListener() {
@Override
public void onPaymentMethodNonceCreated(PaymentMethodNonce paymentMethodNonce) {
CardNonce cardNonce = (CardNonce) paymentMethodNonce;
assertEquals("02", cardNonce.getLastTwo());
assertTrue(cardNonce.getThreeDSecureInfo().isLiabilityShifted());
assertTrue(cardNonce.getThreeDSecureInfo().isLiabilityShiftPossible());
mCountDownLatch.countDown();
}
});
CardBuilder cardBuilder = new CardBuilder()
.cardNumber("4000000000000002")
.expirationDate("12/30");
ThreeDSecure.performVerification(fragment, cardBuilder, TEST_AMOUNT);
waitForView(withId(android.R.id.widget_frame));
onWebView().withElement(findElement(Locator.NAME, "external.field.password"))
.perform(webKeys("1234"));
onWebView().withElement(findElement(Locator.NAME, "UsernamePasswordEntry"))
.perform(webClick());
mCountDownLatch.await();
}
@Test(timeout = 30000)
@LargeTest
public void performVerification_returnsAnErrorWhenAuthenticationFails()
throws InterruptedException {
BraintreeFragment fragment = getFragment();
fragment.addListener(new BraintreeErrorListener() {
@Override
public void onError(Exception error) {
assertEquals("Failed to authenticate, please try a different form of payment",
error.getMessage());
mCountDownLatch.countDown();
}
});
CardBuilder cardBuilder = new CardBuilder()
.cardNumber("4000000000000028")
.expirationDate("12/30");
ThreeDSecure.performVerification(fragment, cardBuilder, TEST_AMOUNT);
waitForView(withId(android.R.id.widget_frame));
onWebView().withElement(findElement(Locator.NAME, "external.field.password"))
.perform(webKeys("1234"));
onWebView().withElement(findElement(Locator.NAME, "UsernamePasswordEntry"))
.perform(webClick());
onWebView().withElement(findElement(Locator.NAME, "Submit"))
.perform(webClick());
mCountDownLatch.await();
}
@Test(timeout = 30000)
@LargeTest
public void performVerification_returnsASuccessfulAuthenticationWhenIssuerDoesNotParticipate()
throws InterruptedException {
BraintreeFragment fragment = getFragment();
fragment.addListener(new PaymentMethodNonceCreatedListener() {
@Override
public void onPaymentMethodNonceCreated(PaymentMethodNonce paymentMethodNonce) {
CardNonce cardNonce = (CardNonce) paymentMethodNonce;
assertEquals("01", cardNonce.getLastTwo());
assertTrue(cardNonce.getThreeDSecureInfo().isLiabilityShifted());
assertTrue(cardNonce.getThreeDSecureInfo().isLiabilityShiftPossible());
mCountDownLatch.countDown();
}
});
CardBuilder cardBuilder = new CardBuilder()
.cardNumber("4000000000000101")
.expirationDate("12/30");
ThreeDSecure.performVerification(fragment, cardBuilder, TEST_AMOUNT);
mCountDownLatch.await();
}
@Test(timeout = 30000)
@LargeTest
public void performVerification_returnsAFailedAuthenticationWhenSignatureVerificationFails()
throws InterruptedException {
BraintreeFragment fragment = getFragment();
fragment.addListener(new BraintreeErrorListener() {
@Override
public void onError(Exception error) {
assertEquals("Failed to authenticate, please try a different form of payment",
error.getMessage());
mCountDownLatch.countDown();
}
});
CardBuilder cardBuilder = new CardBuilder()
.cardNumber("4000000000000010")
.expirationDate("12/30");
ThreeDSecure.performVerification(fragment, cardBuilder, TEST_AMOUNT);
waitForView(withId(android.R.id.widget_frame));
onWebView().withElement(findElement(Locator.NAME, "external.field.password"))
.perform(webKeys("1234"));
onWebView().withElement(findElement(Locator.NAME, "UsernamePasswordEntry"))
.perform(webClick());
mCountDownLatch.await();
}
@Test(timeout = 30000)
@LargeTest
public void performVerification_returnsAnUnexpectedErrorWhenIssuerIsDown() throws InterruptedException {
BraintreeFragment fragment = getFragment();
fragment.addListener(new BraintreeErrorListener() {
@Override
public void onError(Exception error) {
assertEquals("An unexpected error occurred", error.getMessage());
mCountDownLatch.countDown();
}
});
CardBuilder cardBuilder = new CardBuilder()
.cardNumber("4000000000000036")
.expirationDate("12/30");
ThreeDSecure.performVerification(fragment, cardBuilder, TEST_AMOUNT);
waitForView(withId(android.R.id.widget_frame));
onWebView().withElement(findElement(Locator.NAME, "Submit"))
.perform(webClick());
mCountDownLatch.await();
}
@Test(timeout = 30000)
@LargeTest
public void performVerification_returnsAnErrorWhenCardinalReturnsError()
throws InterruptedException {
BraintreeFragment fragment = getFragment();
fragment.addListener(new BraintreeErrorListener() {
@Override
public void onError(Exception error) {
assertEquals("An unexpected error occurred", error.getMessage());
mCountDownLatch.countDown();
}
});
CardBuilder cardBuilder = new CardBuilder()
.cardNumber("4000000000000093")
.expirationDate("12/30");
ThreeDSecure.performVerification(fragment, cardBuilder, TEST_AMOUNT);
waitForView(withId(android.R.id.widget_frame));
onWebView().withElement(findElement(Locator.NAME, "external.field.password"))
.perform(webKeys("1234"));
onWebView().withElement(findElement(Locator.NAME, "UsernamePasswordEntry"))
.perform(webClick());
mCountDownLatch.await();
}
/* helpers */
private BraintreeFragment getFragment() {
String clientToken = new TestClientTokenBuilder().withThreeDSecure().build();
return BraintreeFragmentTestUtils.getFragment(mActivity, clientToken);
}
}
| Remove unnecessary call to disable keyguard in test
| BraintreeApi/src/androidTest/java/com/braintreepayments/api/ThreeDSecureVerificationTest.java | Remove unnecessary call to disable keyguard in test | <ide><path>raintreeApi/src/androidTest/java/com/braintreepayments/api/ThreeDSecureVerificationTest.java
<ide> package com.braintreepayments.api;
<ide>
<ide> import android.app.Activity;
<del>import android.app.KeyguardManager;
<del>import android.content.Context;
<ide> import android.os.SystemClock;
<ide> import android.support.test.espresso.web.webdriver.Locator;
<ide> import android.support.test.runner.AndroidJUnit4;
<ide> import com.braintreepayments.api.models.CardBuilder;
<ide> import com.braintreepayments.api.models.CardNonce;
<ide> import com.braintreepayments.api.models.PaymentMethodNonce;
<add>import com.braintreepayments.api.test.TestActivity;
<ide> import com.braintreepayments.testutils.BraintreeActivityTestRule;
<del>import com.braintreepayments.api.test.TestActivity;
<ide> import com.braintreepayments.testutils.TestClientTokenBuilder;
<ide>
<ide> import org.junit.Before;
<ide>
<ide> import java.util.concurrent.CountDownLatch;
<ide>
<del>import static android.support.test.InstrumentationRegistry.getTargetContext;
<ide> import static android.support.test.espresso.Espresso.onView;
<ide> import static android.support.test.espresso.Espresso.pressBack;
<ide> import static android.support.test.espresso.action.ViewActions.click;
<ide>
<ide> @Before
<ide> public void setUp() {
<del> ((KeyguardManager) getTargetContext().getSystemService(Context.KEYGUARD_SERVICE))
<del> .newKeyguardLock("ThreeDSecureVerificationTest")
<del> .disableKeyguard();
<del>
<ide> mActivity = mActivityTestRule.getActivity();
<ide> mCountDownLatch = new CountDownLatch(1);
<ide> } |
|
Java | mit | error: pathspec 'src/main/java/org/zalando/logbook/Obfuscator.java' did not match any file(s) known to git
| dac9da328d09923028c7e84a26a0f2e4f565aee4 | 1 | zalando/logbook,zalando/logbook,zalando/logbook | package org.zalando.logbook;
import java.util.Objects;
import java.util.function.BiPredicate;
import java.util.function.Predicate;
// TODO implement
// TODO find a nice way for body obfuscation
@FunctionalInterface
public interface Obfuscator {
String obfuscate(final String key, final String value);
static Obfuscator none() {
return (key, value) -> value;
}
static Obfuscator obfuscate(final Predicate<String> keyPredicate, final String replacement) {
return (key, value) -> keyPredicate.test(key) ? replacement : value;
}
static Obfuscator obfuscate(final BiPredicate<String, String> predicate, final String replacement) {
return (key, value) -> predicate.test(key, value) ? replacement : value;
}
static Obfuscator compound(final Obfuscator... obfuscators) {
return (key, value) -> {
for (Obfuscator obfuscator : obfuscators) {
final String replacement = obfuscator.obfuscate(key, value);
if (!Objects.equals(replacement, value)) {
return replacement;
}
}
return value;
};
}
static Obfuscator authorization() {
return obfuscate("Authorization"::equalsIgnoreCase, "XXX");
}
}
| src/main/java/org/zalando/logbook/Obfuscator.java | Added obfuscator interface
| src/main/java/org/zalando/logbook/Obfuscator.java | Added obfuscator interface | <ide><path>rc/main/java/org/zalando/logbook/Obfuscator.java
<add>package org.zalando.logbook;
<add>
<add>import java.util.Objects;
<add>import java.util.function.BiPredicate;
<add>import java.util.function.Predicate;
<add>
<add>// TODO implement
<add>// TODO find a nice way for body obfuscation
<add>@FunctionalInterface
<add>public interface Obfuscator {
<add>
<add> String obfuscate(final String key, final String value);
<add>
<add> static Obfuscator none() {
<add> return (key, value) -> value;
<add> }
<add>
<add> static Obfuscator obfuscate(final Predicate<String> keyPredicate, final String replacement) {
<add> return (key, value) -> keyPredicate.test(key) ? replacement : value;
<add> }
<add>
<add> static Obfuscator obfuscate(final BiPredicate<String, String> predicate, final String replacement) {
<add> return (key, value) -> predicate.test(key, value) ? replacement : value;
<add> }
<add>
<add> static Obfuscator compound(final Obfuscator... obfuscators) {
<add> return (key, value) -> {
<add> for (Obfuscator obfuscator : obfuscators) {
<add> final String replacement = obfuscator.obfuscate(key, value);
<add> if (!Objects.equals(replacement, value)) {
<add> return replacement;
<add> }
<add> }
<add>
<add> return value;
<add> };
<add> }
<add>
<add> static Obfuscator authorization() {
<add> return obfuscate("Authorization"::equalsIgnoreCase, "XXX");
<add> }
<add>
<add>} |
|
Java | mit | 159ed5ef8b069329031784a3a185651c0ad9a465 | 0 | SpongePowered/Sponge,SpongePowered/SpongeCommon,clienthax/SpongeCommon,Grinch/SpongeCommon,clienthax/SpongeCommon,SpongePowered/SpongeCommon,kenzierocks/SpongeCommon,DDoS/SpongeCommon,Grinch/SpongeCommon,SpongePowered/Sponge,kenzierocks/SpongeCommon,JBYoshi/SpongeCommon,SpongePowered/Sponge,sanman00/SpongeCommon,DDoS/SpongeCommon,JBYoshi/SpongeCommon,sanman00/SpongeCommon | /*
* This file is part of Sponge, licensed under the MIT License (MIT).
*
* Copyright (c) SpongePowered <https://www.spongepowered.org>
* Copyright (c) contributors
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package org.spongepowered.common.mixin.core.entity;
import net.minecraft.entity.EntityLiving;
import net.minecraft.entity.EntityLivingBase;
import net.minecraft.entity.ai.EntityAITasks;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.item.ItemStack;
import net.minecraft.world.GameRules;
import net.minecraft.world.World;
import org.objectweb.asm.Opcodes;
import org.spongepowered.api.Sponge;
import org.spongepowered.api.data.key.Keys;
import org.spongepowered.api.data.manipulator.DataManipulator;
import org.spongepowered.api.data.manipulator.mutable.entity.AgentData;
import org.spongepowered.api.data.value.mutable.Value;
import org.spongepowered.api.entity.Entity;
import org.spongepowered.api.entity.ai.Goal;
import org.spongepowered.api.entity.ai.GoalType;
import org.spongepowered.api.entity.ai.GoalTypes;
import org.spongepowered.api.entity.ai.task.AITask;
import org.spongepowered.api.entity.living.Agent;
import org.spongepowered.api.event.SpongeEventFactory;
import org.spongepowered.api.event.cause.Cause;
import org.spongepowered.api.event.cause.NamedCause;
import org.spongepowered.api.event.entity.LeashEntityEvent;
import org.spongepowered.api.event.entity.UnleashEntityEvent;
import org.spongepowered.api.event.entity.ai.AITaskEvent;
import org.spongepowered.asm.mixin.Final;
import org.spongepowered.asm.mixin.Mixin;
import org.spongepowered.asm.mixin.Overwrite;
import org.spongepowered.asm.mixin.Shadow;
import org.spongepowered.asm.mixin.injection.At;
import org.spongepowered.asm.mixin.injection.Constant;
import org.spongepowered.asm.mixin.injection.Inject;
import org.spongepowered.asm.mixin.injection.ModifyConstant;
import org.spongepowered.asm.mixin.injection.Redirect;
import org.spongepowered.asm.mixin.injection.callback.CallbackInfo;
import org.spongepowered.asm.mixin.injection.callback.CallbackInfoReturnable;
import org.spongepowered.asm.mixin.injection.callback.LocalCapture;
import org.spongepowered.common.SpongeImpl;
import org.spongepowered.common.data.manipulator.mutable.entity.SpongeAgentData;
import org.spongepowered.common.data.value.mutable.SpongeValue;
import org.spongepowered.common.interfaces.ai.IMixinEntityAIBase;
import org.spongepowered.common.interfaces.ai.IMixinEntityAITasks;
import org.spongepowered.common.interfaces.entity.IMixinEntity;
import org.spongepowered.common.interfaces.entity.IMixinGriefer;
import org.spongepowered.common.interfaces.world.IMixinWorld;
import java.util.Iterator;
import java.util.List;
import java.util.Optional;
import javax.annotation.Nullable;
@Mixin(EntityLiving.class)
public abstract class MixinEntityLiving extends MixinEntityLivingBase implements Agent {
@Shadow @Final private EntityAITasks tasks;
@Shadow @Final private EntityAITasks targetTasks;
@Shadow private boolean canPickUpLoot;
@Shadow @Nullable private EntityLivingBase attackTarget;
@Shadow public abstract boolean isAIDisabled();
@Shadow protected abstract void setNoAI(boolean p_94061_1_);
@Shadow @Nullable public abstract net.minecraft.entity.Entity getLeashedToEntity();
@Shadow public abstract void setLeashedToEntity(net.minecraft.entity.Entity entityIn, boolean sendAttachNotification);
@Shadow public abstract ItemStack getHeldItem();
@Shadow protected abstract boolean canDespawn();
@Inject(method = "<init>", at = @At(value = "RETURN"))
public void onConstruct(CallbackInfo ci) {
((IMixinEntityAITasks) this.tasks).setOwner((EntityLiving) (Object) this);
((IMixinEntityAITasks) this.tasks).setType(GoalTypes.NORMAL);
((IMixinEntityAITasks) this.targetTasks).setOwner((EntityLiving) (Object) this);
((IMixinEntityAITasks) this.targetTasks).setType(GoalTypes.TARGET);
}
@Override
public void firePostConstructEvents() {
super.firePostConstructEvents();
handleDelayedTaskEventFiring((IMixinEntityAITasks) this.tasks);
handleDelayedTaskEventFiring((IMixinEntityAITasks) this.targetTasks);
}
@SuppressWarnings("unchecked")
private void handleDelayedTaskEventFiring(IMixinEntityAITasks tasks) {
Iterator<EntityAITasks.EntityAITaskEntry> taskItr = tasks.getTasksUnsafe().iterator();
while (taskItr.hasNext()) {
EntityAITasks.EntityAITaskEntry task = taskItr.next();
final AITaskEvent.Add event = SpongeEventFactory.createAITaskEventAdd(Cause.of(NamedCause.source(Sponge.getGame())),
task.priority, task.priority, (Goal<? extends Agent>) tasks, this, (AITask<?>) task.action);
SpongeImpl.postEvent(event);
if (event.isCancelled()) {
((IMixinEntityAIBase) task.action).setGoal(null);
taskItr.remove();
}
}
}
@Inject(method = "interactFirst", at = @At(value = "INVOKE", target = "Lnet/minecraft/entity/EntityLiving;setLeashedToEntity(Lnet/minecraft/entity/Entity;Z)V"), locals = LocalCapture.CAPTURE_FAILEXCEPTION, cancellable = true)
public void callLeashEvent(EntityPlayer playerIn, CallbackInfoReturnable<Boolean> ci, ItemStack itemstack) {
if (!playerIn.worldObj.isRemote) {
Entity leashedEntity = this;
final LeashEntityEvent event = SpongeEventFactory.createLeashEntityEvent(Cause.of(NamedCause.source(playerIn)), leashedEntity);
SpongeImpl.postEvent(event);
if(event.isCancelled()) {
ci.cancel();
}
}
}
@Inject(method = "clearLeashed", at = @At(value = "FIELD", target = "Lnet/minecraft/entity/EntityLiving;isLeashed:Z", opcode = Opcodes.PUTFIELD), cancellable = true)
public void callUnleashEvent(boolean sendPacket, boolean dropLead, CallbackInfo ci) {
net.minecraft.entity.Entity entity = getLeashedToEntity();
if (!this.worldObj.isRemote) {
UnleashEntityEvent event = SpongeEventFactory.createUnleashEntityEvent(entity == null ? Cause.of(NamedCause.of("Self", this))
: Cause.of(NamedCause.source(entity)), this);
SpongeImpl.postEvent(event);
if(event.isCancelled()) {
ci.cancel();
}
}
}
@SuppressWarnings("unchecked")
@Override
public <T extends Agent> Optional<Goal<T>> getGoal(GoalType type) {
if (GoalTypes.NORMAL.equals(type)) {
return Optional.of((Goal<T>) this.tasks);
} else if (GoalTypes.TARGET.equals(type)) {
return Optional.of((Goal<T>) this.targetTasks);
}
return Optional.empty();
}
@ModifyConstant(method = "despawnEntity", constant = @Constant(doubleValue = 16384.0D))
private double getHardDespawnRange(double value) {
if (!this.worldObj.isRemote) {
return Math.pow(((IMixinWorld) this.worldObj).getWorldConfig().getConfig().getEntity().getHardDespawnRange(), 2);
}
return value;
}
// Note that this should inject twice.
@ModifyConstant(method = "despawnEntity", constant = @Constant(doubleValue = 1024.0D), expect = 2)
private double getSoftDespawnRange(double value) {
if (!this.worldObj.isRemote) {
return Math.pow(((IMixinWorld) this.worldObj).getWorldConfig().getConfig().getEntity().getSoftDespawnRange(), 2);
}
return value;
}
@ModifyConstant(method = "despawnEntity", constant = @Constant(intValue = 600))
private int getMinimumLifetime(int value) {
if (!this.worldObj.isRemote) {
return ((IMixinWorld) this.worldObj).getWorldConfig().getConfig().getEntity().getMinimumLife() * 20;
}
return value;
}
@Nullable
@Redirect(method = "despawnEntity", at = @At(value = "INVOKE", target = "Lnet/minecraft/world/World;getClosestPlayerToEntity(Lnet/minecraft/entity/Entity;D)Lnet/minecraft/entity/player/EntityPlayer;"))
public EntityPlayer onDespawnEntity(World world, net.minecraft.entity.Entity entity, double distance) {
return ((IMixinWorld) world).getClosestPlayerToEntityWhoAffectsSpawning(entity, distance);
}
@Override
public Optional<Entity> getTarget() {
return Optional.ofNullable((Entity) this.attackTarget);
}
@Override
public void setTarget(@Nullable Entity target) {
if (target instanceof EntityLivingBase) {
this.attackTarget = (EntityLivingBase) target;
} else {
this.attackTarget = null;
}
}
/**
* @author gabizou - January 4th, 2016
*
* This is to instill the check that if the entity is invisible, check whether they're untargetable
* as well.
*
* @param entitylivingbaseIn The entity living base coming in
*/
@Inject(method = "setAttackTarget", at = @At("HEAD"), cancellable = true)
public void onSetAttackTarget(@Nullable EntityLivingBase entitylivingbaseIn, CallbackInfo ci) {
if (entitylivingbaseIn != null && ((IMixinEntity) entitylivingbaseIn).isVanished()
&& ((IMixinEntity) entitylivingbaseIn).isUntargetable()) {
this.attackTarget = null;
ci.cancel();
}
}
/**
* @author gabizou - January 4th, 2016
* @reason This will still check if the current attack target
* is invisible and is untargetable.
*
* @return The current attack target, if not null
*/
@Nullable
@Overwrite
public EntityLivingBase getAttackTarget() {
if (this.attackTarget != null) {
if (((IMixinEntity) this.attackTarget).isVanished() && ((IMixinEntity) this.attackTarget).isUntargetable()) {
this.attackTarget = null;
}
}
return this.attackTarget;
}
@Redirect(method = "onLivingUpdate", at = @At(value = "INVOKE", target = "Lnet/minecraft/world/GameRules;getBoolean(Ljava/lang/String;)Z"))
private boolean onCanGrief(GameRules gameRules, String rule) {
return gameRules.getBoolean(rule) && ((IMixinGriefer) this).canGrief();
}
// Data delegated methods
@Override
public AgentData getAgentData() {
return new SpongeAgentData(!this.isAIDisabled());
}
@Override
public Value<Boolean> aiEnabled() {
return new SpongeValue<>(Keys.AI_ENABLED, true, !this.isAIDisabled());
}
@Override
public void supplyVanillaManipulators(List<DataManipulator<?, ?>> manipulators) {
super.supplyVanillaManipulators(manipulators);
manipulators.add(getAgentData());
}
}
| src/main/java/org/spongepowered/common/mixin/core/entity/MixinEntityLiving.java | /*
* This file is part of Sponge, licensed under the MIT License (MIT).
*
* Copyright (c) SpongePowered <https://www.spongepowered.org>
* Copyright (c) contributors
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package org.spongepowered.common.mixin.core.entity;
import net.minecraft.entity.EntityLiving;
import net.minecraft.entity.EntityLivingBase;
import net.minecraft.entity.ai.EntityAITasks;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.item.ItemStack;
import net.minecraft.world.GameRules;
import net.minecraft.world.World;
import org.objectweb.asm.Opcodes;
import org.spongepowered.api.Sponge;
import org.spongepowered.api.data.key.Keys;
import org.spongepowered.api.data.manipulator.DataManipulator;
import org.spongepowered.api.data.manipulator.mutable.entity.AgentData;
import org.spongepowered.api.data.value.mutable.Value;
import org.spongepowered.api.entity.Entity;
import org.spongepowered.api.entity.ai.Goal;
import org.spongepowered.api.entity.ai.GoalType;
import org.spongepowered.api.entity.ai.GoalTypes;
import org.spongepowered.api.entity.ai.task.AITask;
import org.spongepowered.api.entity.living.Agent;
import org.spongepowered.api.event.SpongeEventFactory;
import org.spongepowered.api.event.cause.Cause;
import org.spongepowered.api.event.cause.NamedCause;
import org.spongepowered.api.event.entity.LeashEntityEvent;
import org.spongepowered.api.event.entity.UnleashEntityEvent;
import org.spongepowered.api.event.entity.ai.AITaskEvent;
import org.spongepowered.asm.mixin.Final;
import org.spongepowered.asm.mixin.Mixin;
import org.spongepowered.asm.mixin.Overwrite;
import org.spongepowered.asm.mixin.Shadow;
import org.spongepowered.asm.mixin.injection.At;
import org.spongepowered.asm.mixin.injection.Constant;
import org.spongepowered.asm.mixin.injection.Inject;
import org.spongepowered.asm.mixin.injection.ModifyConstant;
import org.spongepowered.asm.mixin.injection.Redirect;
import org.spongepowered.asm.mixin.injection.callback.CallbackInfo;
import org.spongepowered.asm.mixin.injection.callback.CallbackInfoReturnable;
import org.spongepowered.asm.mixin.injection.callback.LocalCapture;
import org.spongepowered.common.SpongeImpl;
import org.spongepowered.common.data.manipulator.mutable.entity.SpongeAgentData;
import org.spongepowered.common.data.value.mutable.SpongeValue;
import org.spongepowered.common.interfaces.ai.IMixinEntityAIBase;
import org.spongepowered.common.interfaces.ai.IMixinEntityAITasks;
import org.spongepowered.common.interfaces.entity.IMixinEntity;
import org.spongepowered.common.interfaces.entity.IMixinGriefer;
import org.spongepowered.common.interfaces.world.IMixinWorld;
import java.util.Iterator;
import java.util.List;
import java.util.Optional;
import javax.annotation.Nullable;
@Mixin(EntityLiving.class)
public abstract class MixinEntityLiving extends MixinEntityLivingBase implements Agent {
private static final String WORLD_FIELD = "Lnet/minecraft/entity/EntityLiving;worldObj:Lnet/minecraft/world/World;";
@Shadow @Final private EntityAITasks tasks;
@Shadow @Final private EntityAITasks targetTasks;
@Shadow private boolean canPickUpLoot;
@Shadow @Nullable private EntityLivingBase attackTarget;
@Shadow public abstract boolean isAIDisabled();
@Shadow protected abstract void setNoAI(boolean p_94061_1_);
@Shadow @Nullable public abstract net.minecraft.entity.Entity getLeashedToEntity();
@Shadow public abstract void setLeashedToEntity(net.minecraft.entity.Entity entityIn, boolean sendAttachNotification);
@Shadow public abstract ItemStack getHeldItem();
@Shadow protected abstract boolean canDespawn();
@Inject(method = "<init>", at = @At(value = "RETURN"))
public void onConstruct(CallbackInfo ci) {
((IMixinEntityAITasks) this.tasks).setOwner((EntityLiving) (Object) this);
((IMixinEntityAITasks) this.tasks).setType(GoalTypes.NORMAL);
((IMixinEntityAITasks) this.targetTasks).setOwner((EntityLiving) (Object) this);
((IMixinEntityAITasks) this.targetTasks).setType(GoalTypes.TARGET);
}
@Override
public void firePostConstructEvents() {
super.firePostConstructEvents();
handleDelayedTaskEventFiring((IMixinEntityAITasks) this.tasks);
handleDelayedTaskEventFiring((IMixinEntityAITasks) this.targetTasks);
}
@SuppressWarnings("unchecked")
private void handleDelayedTaskEventFiring(IMixinEntityAITasks tasks) {
Iterator<EntityAITasks.EntityAITaskEntry> taskItr = tasks.getTasksUnsafe().iterator();
while (taskItr.hasNext()) {
EntityAITasks.EntityAITaskEntry task = taskItr.next();
final AITaskEvent.Add event = SpongeEventFactory.createAITaskEventAdd(Cause.of(NamedCause.source(Sponge.getGame())),
task.priority, task.priority, (Goal<? extends Agent>) tasks, this, (AITask<?>) task.action);
SpongeImpl.postEvent(event);
if (event.isCancelled()) {
((IMixinEntityAIBase) task.action).setGoal(null);
taskItr.remove();
}
}
}
@Inject(method = "interactFirst", at = @At(value = "INVOKE", target = "Lnet/minecraft/entity/EntityLiving;setLeashedToEntity(Lnet/minecraft/entity/Entity;Z)V"), locals = LocalCapture.CAPTURE_FAILEXCEPTION, cancellable = true)
public void callLeashEvent(EntityPlayer playerIn, CallbackInfoReturnable<Boolean> ci, ItemStack itemstack) {
if (!playerIn.worldObj.isRemote) {
Entity leashedEntity = this;
final LeashEntityEvent event = SpongeEventFactory.createLeashEntityEvent(Cause.of(NamedCause.source(playerIn)), leashedEntity);
SpongeImpl.postEvent(event);
if(event.isCancelled()) {
ci.cancel();
}
}
}
@Inject(method = "clearLeashed", at = @At(value = "FIELD", target = "Lnet/minecraft/entity/EntityLiving;isLeashed:Z", opcode = Opcodes.PUTFIELD), cancellable = true)
public void callUnleashEvent(boolean sendPacket, boolean dropLead, CallbackInfo ci) {
net.minecraft.entity.Entity entity = getLeashedToEntity();
if (!this.worldObj.isRemote) {
UnleashEntityEvent event = SpongeEventFactory.createUnleashEntityEvent(entity == null ? Cause.of(NamedCause.of("Self", this))
: Cause.of(NamedCause.source(entity)), this);
SpongeImpl.postEvent(event);
if(event.isCancelled()) {
ci.cancel();
}
}
}
@SuppressWarnings("unchecked")
@Override
public <T extends Agent> Optional<Goal<T>> getGoal(GoalType type) {
if (GoalTypes.NORMAL.equals(type)) {
return Optional.of((Goal<T>) this.tasks);
} else if (GoalTypes.TARGET.equals(type)) {
return Optional.of((Goal<T>) this.targetTasks);
}
return Optional.empty();
}
@Inject(method = "despawnEntity", at = @At(value = "FIELD", target = WORLD_FIELD, ordinal = 0), cancellable = true)
private void checkCanDespawnBeforeGettingPlayer(CallbackInfo callbackInfo) {
if (!this.canDespawn()) {
callbackInfo.cancel();
}
}
@ModifyConstant(method = "despawnEntity", constant = @Constant(doubleValue = 16384.0D))
private double getHardDespawnRange(double value) {
if (!this.worldObj.isRemote) {
return Math.pow(((IMixinWorld) this.worldObj).getWorldConfig().getConfig().getEntity().getHardDespawnRange(), 2);
}
return value;
}
// Note that this should inject twice.
@ModifyConstant(method = "despawnEntity", constant = @Constant(doubleValue = 1024.0D), expect = 2)
private double getSoftDespawnRange(double value) {
if (!this.worldObj.isRemote) {
return Math.pow(((IMixinWorld) this.worldObj).getWorldConfig().getConfig().getEntity().getSoftDespawnRange(), 2);
}
return value;
}
@ModifyConstant(method = "despawnEntity", constant = @Constant(intValue = 600))
private int getMinimumLifetime(int value) {
if (!this.worldObj.isRemote) {
return ((IMixinWorld) this.worldObj).getWorldConfig().getConfig().getEntity().getMinimumLife() * 20;
}
return value;
}
@Nullable
@Redirect(method = "despawnEntity", at = @At(value = "INVOKE", target = "Lnet/minecraft/world/World;getClosestPlayerToEntity(Lnet/minecraft/entity/Entity;D)Lnet/minecraft/entity/player/EntityPlayer;"))
public EntityPlayer onDespawnEntity(World world, net.minecraft.entity.Entity entity, double distance) {
return ((IMixinWorld) world).getClosestPlayerToEntityWhoAffectsSpawning(entity, distance);
}
@Override
public Optional<Entity> getTarget() {
return Optional.ofNullable((Entity) this.attackTarget);
}
@Override
public void setTarget(@Nullable Entity target) {
if (target instanceof EntityLivingBase) {
this.attackTarget = (EntityLivingBase) target;
} else {
this.attackTarget = null;
}
}
/**
* @author gabizou - January 4th, 2016
*
* This is to instill the check that if the entity is invisible, check whether they're untargetable
* as well.
*
* @param entitylivingbaseIn The entity living base coming in
*/
@Inject(method = "setAttackTarget", at = @At("HEAD"), cancellable = true)
public void onSetAttackTarget(@Nullable EntityLivingBase entitylivingbaseIn, CallbackInfo ci) {
if (entitylivingbaseIn != null && ((IMixinEntity) entitylivingbaseIn).isVanished()
&& ((IMixinEntity) entitylivingbaseIn).isUntargetable()) {
this.attackTarget = null;
ci.cancel();
}
}
/**
* @author gabizou - January 4th, 2016
* @reason This will still check if the current attack target
* is invisible and is untargetable.
*
* @return The current attack target, if not null
*/
@Nullable
@Overwrite
public EntityLivingBase getAttackTarget() {
if (this.attackTarget != null) {
if (((IMixinEntity) this.attackTarget).isVanished() && ((IMixinEntity) this.attackTarget).isUntargetable()) {
this.attackTarget = null;
}
}
return this.attackTarget;
}
@Redirect(method = "onLivingUpdate", at = @At(value = "INVOKE", target = "Lnet/minecraft/world/GameRules;getBoolean(Ljava/lang/String;)Z"))
private boolean onCanGrief(GameRules gameRules, String rule) {
return gameRules.getBoolean(rule) && ((IMixinGriefer) this).canGrief();
}
// Data delegated methods
@Override
public AgentData getAgentData() {
return new SpongeAgentData(!this.isAIDisabled());
}
@Override
public Value<Boolean> aiEnabled() {
return new SpongeValue<>(Keys.AI_ENABLED, true, !this.isAIDisabled());
}
@Override
public void supplyVanillaManipulators(List<DataManipulator<?, ?>> manipulators) {
super.supplyVanillaManipulators(manipulators);
manipulators.add(getAgentData());
}
}
| Fix living entities not moving.
| src/main/java/org/spongepowered/common/mixin/core/entity/MixinEntityLiving.java | Fix living entities not moving. | <ide><path>rc/main/java/org/spongepowered/common/mixin/core/entity/MixinEntityLiving.java
<ide> @Mixin(EntityLiving.class)
<ide> public abstract class MixinEntityLiving extends MixinEntityLivingBase implements Agent {
<ide>
<del> private static final String WORLD_FIELD = "Lnet/minecraft/entity/EntityLiving;worldObj:Lnet/minecraft/world/World;";
<ide> @Shadow @Final private EntityAITasks tasks;
<ide> @Shadow @Final private EntityAITasks targetTasks;
<ide> @Shadow private boolean canPickUpLoot;
<ide> return Optional.of((Goal<T>) this.targetTasks);
<ide> }
<ide> return Optional.empty();
<del> }
<del>
<del> @Inject(method = "despawnEntity", at = @At(value = "FIELD", target = WORLD_FIELD, ordinal = 0), cancellable = true)
<del> private void checkCanDespawnBeforeGettingPlayer(CallbackInfo callbackInfo) {
<del> if (!this.canDespawn()) {
<del> callbackInfo.cancel();
<del> }
<ide> }
<ide>
<ide> @ModifyConstant(method = "despawnEntity", constant = @Constant(doubleValue = 16384.0D)) |
|
JavaScript | agpl-3.0 | c18a5c4a33eca4997e8304f8c3faebfff2791415 | 0 | duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test | 72e24740-2e63-11e5-9284-b827eb9e62be | helloWorld.js | 72dc8ec2-2e63-11e5-9284-b827eb9e62be | 72e24740-2e63-11e5-9284-b827eb9e62be | helloWorld.js | 72e24740-2e63-11e5-9284-b827eb9e62be | <ide><path>elloWorld.js
<del>72dc8ec2-2e63-11e5-9284-b827eb9e62be
<add>72e24740-2e63-11e5-9284-b827eb9e62be |
|
JavaScript | mit | dd70ab33889a64630a271c92ded7f29ef9e773dc | 0 | ColorfulCakeChen/query-submit-canvas,ColorfulCakeChen/query-submit-canvas | export { Base, Root };
import * as Pool from "../Pool.js";
/**
* The base class representing a object could be recycled (i.e. disposed without release its main object memory for re-using in the
* future).
*
* Every sub-class of this Recyclable.Base MUST define a static propery named Pool which is usually an instance of Pool.Base:
* <pre>
* class SomeClass extends Recyclable.Base {
*
* static Pool = new XxxPool();
*
* }
* </pre>
*
* Or,
* <pre>
* class SomeClass extends Recyclable.Base {
*
* ...
*
* }
*
* SomeClass.Pool = new XxxPool();
*
* </pre>
*
*
*/
let Base = ( ParentClass = Object ) => class Base extends ParentClass {
/**
* This method will do the following in sequence:
* - call this.disposeResources() (if exists)
* - call this.constructor.Pool.recycle()
*
* Sub-class should NEVER override this method (so NEVER call super.disposeResources_and_recycleToPool()).
*
*
* After calling this method, this object should be viewed as disposed and should not be operated again.
*/
disposeResources_and_recycleToPool() {
if ( this.disposeResources instanceof Function ) { // If this object needs disposing, do it before being recyled.
this.disposeResources();
}
this.constructor.Pool.recycle( this );
}
}
/**
* Almost the same as Recyclable.Base class except its parent class is fixed to Object. In other words, caller can not specify the
* parent class of Recyclable.Root (so it is named "Root" which can not have parent class).
*/
class Root extends Base() {
}
| CNN/util/Recyclable/Recyclable_Base.js | export { Base, Root };
import * as Pool from "../Pool.js";
/**
* The base class representing a object could be recycled (i.e. disposed without release its main object memory for re-using in the
* future).
*
* Every sub-class of this Recyclable.Base MUST define a static propery named Pool which is usually an instance of Pool.Base:
* <pre>
* class SomeClass extends Recyclable.Base {
*
* static Pool = new XxxPool();
*
* }
* </pre>
*
* Or,
* <pre>
* class SomeClass extends Recyclable.Base {
*
* ...
*
* }
*
* SomeClass.Pool = new XxxPool();
*
* </pre>
*
*
*/
let Base = ( ParentClass = Object ) => class Base extends ParentClass {
/**
* This method will do the following in sequence:
* - call this.disposeResources() (if exists)
* - call this.constructor.Pool.recycle()
*
* Sub-class should NEVER override this method (so NEVER call super.disposeResources_and_recycleToPool()).
*
*
* After calling this method, this object should be viewed as disposed and should not be operated again.
*/
disposeResources_and_recycleToPool() {
if ( this.disposeResources instanceof Function ) { // If this object needs disposing, do it before being recyled.
this.disposeResources();
}
this.constructor.Pool.recycle( this );
}
}
/**
* Almost the same as Recyclable.Base class except its parent class is fixed to Object. In other words, caller can not specify the
* parent class of Recyclable.Root (so it is named "Root" which can not have parent class).
*/
class Root extends Base {
}
| Update Recyclable_Base.js | CNN/util/Recyclable/Recyclable_Base.js | Update Recyclable_Base.js | <ide><path>NN/util/Recyclable/Recyclable_Base.js
<ide> * Almost the same as Recyclable.Base class except its parent class is fixed to Object. In other words, caller can not specify the
<ide> * parent class of Recyclable.Root (so it is named "Root" which can not have parent class).
<ide> */
<del>class Root extends Base {
<add>class Root extends Base() {
<ide> }
<ide> |
|
Java | apache-2.0 | 153cab14101625739623a77d82cdb80329869783 | 0 | DenverM80/ds3_java_sdk,SpectraLogic/ds3_java_sdk,DenverM80/ds3_java_sdk,rpmoore/ds3_java_sdk,SpectraLogic/ds3_java_sdk,SpectraLogic/ds3_java_sdk,RachelTucker/ds3_java_sdk,rpmoore/ds3_java_sdk,rpmoore/ds3_java_sdk,DenverM80/ds3_java_sdk,DenverM80/ds3_java_sdk,RachelTucker/ds3_java_sdk,RachelTucker/ds3_java_sdk,RachelTucker/ds3_java_sdk,rpmoore/ds3_java_sdk,SpectraLogic/ds3_java_sdk | /*
* ******************************************************************************
* Copyright 2014-2017 Spectra Logic Corporation. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use
* this file except in compliance with the License. A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file.
* This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
* ****************************************************************************
*/
package com.spectralogic.ds3client.utils;
import com.google.common.escape.Escaper;
import com.google.common.net.PercentEscaper;
import com.spectralogic.ds3client.commands.interfaces.Ds3Request;
import java.util.Date;
/**
* Safely converts a type to a string. This is used when adding
* objects to query params.
*/
public final class SafeStringManipulation {
private static final String DS3_URL_PATH_FRAGMENT_SAFE_CHARS =
"-._~" + // Google escaper URL_PATH_OTHER_SAFE_CHARS_LACKING_PLUS
"!$'()*,&=" + // removed ; (so it will be escaped) and added / (so it will not)
"@:/"; // Their urlFragmentEscaper uses URL_PATH_OTHER_SAFE_CHARS_LACKING_PLUS + "+/?"+
/**
* Specified as query safe characters in spec https://tools.ietf.org/html/rfc3986#section-3.4
* with the following exceptions:
* Encoding: "&", ":", "+", "=", ";" as they have special meaning
* Not Encoding: "/"
*/
private static final String DS3_QUERY_PARAM_SAFE_CHARS = "-._~!$'()*,@/";
private static final Escaper DS3_URL_FRAGMENT_ESCAPER =
new PercentEscaper(DS3_URL_PATH_FRAGMENT_SAFE_CHARS, false);
private static final Escaper DS3_QUERY_PARAM_ESCAPER =
new PercentEscaper(DS3_QUERY_PARAM_SAFE_CHARS, false);
private SafeStringManipulation() {
//pass
}
/**
* Percent encodes user-provided query parameter values.
*/
public static <T> String safeQueryParamEscape(final T obj) {
if (obj == null) {
return null;
}
return DS3_QUERY_PARAM_ESCAPER.escape(safeToString(obj));
}
public static <T> String safeUrlEscape(final T obj) {
if (obj == null) {
return null;
}
return getDs3Escaper().escape(safeToString(obj));
}
public static <T> String safeToString(final T obj) {
if (obj == null) {
return null;
}
if(obj.getClass().isPrimitive()) {
return String.valueOf(obj);
}
if (obj instanceof Date) {
return Long.toString(((Date) obj).getTime());
}
return obj.toString();
}
public static Escaper getDs3Escaper() {
// escaped characters in DS3 path and query parameter value segments
return DS3_URL_FRAGMENT_ESCAPER;
}
public static String getEscapedRequestPath(final Ds3Request request) {
if (request == null || request.getPath() == null){
return "";
}
return getDs3Escaper().escape(request.getPath());
}
}
| ds3-sdk/src/main/java/com/spectralogic/ds3client/utils/SafeStringManipulation.java | /*
* ******************************************************************************
* Copyright 2014-2017 Spectra Logic Corporation. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use
* this file except in compliance with the License. A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file.
* This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
* ****************************************************************************
*/
package com.spectralogic.ds3client.utils;
import com.google.common.escape.Escaper;
import com.google.common.net.PercentEscaper;
import com.spectralogic.ds3client.commands.interfaces.Ds3Request;
import java.util.Date;
/**
* Safely converts a type to a string. This is used when adding
* objects to query params.
*/
public final class SafeStringManipulation {
private static final String DS3_URL_PATH_FRAGMENT_SAFE_CHARS =
"-._~" + // Google escaper URL_PATH_OTHER_SAFE_CHARS_LACKING_PLUS
"!$'()*,&=" + // removed ; (so it will be escaped) and added / (so it will not)
"@:/"; // Their urlFragmentEscaper uses URL_PATH_OTHER_SAFE_CHARS_LACKING_PLUS + "+/?"+
/**
* Specified as query safe characters in spec https://tools.ietf.org/html/rfc3986#section-3.4
* with the following exceptions:
* Encoding: "&", ":", "+", "=" as they have special meaning
* Not Encoding: "/"
*/
private static final String DS3_QUERY_PARAM_SAFE_CHARS = "-._~!$'()*,;@/";
private static final Escaper DS3_URL_FRAGMENT_ESCAPER =
new PercentEscaper(DS3_URL_PATH_FRAGMENT_SAFE_CHARS, false);
private static final Escaper DS3_QUERY_PARAM_ESCAPER =
new PercentEscaper(DS3_QUERY_PARAM_SAFE_CHARS, false);
private SafeStringManipulation() {
//pass
}
/**
* Percent encodes user-provided query parameter values.
*/
public static <T> String safeQueryParamEscape(final T obj) {
if (obj == null) {
return null;
}
return DS3_QUERY_PARAM_ESCAPER.escape(safeToString(obj));
}
public static <T> String safeUrlEscape(final T obj) {
if (obj == null) {
return null;
}
return getDs3Escaper().escape(safeToString(obj));
}
public static <T> String safeToString(final T obj) {
if (obj == null) {
return null;
}
if(obj.getClass().isPrimitive()) {
return String.valueOf(obj);
}
if (obj instanceof Date) {
return Long.toString(((Date) obj).getTime());
}
return obj.toString();
}
public static Escaper getDs3Escaper() {
// escaped characters in DS3 path and query parameter value segments
return DS3_URL_FRAGMENT_ESCAPER;
}
public static String getEscapedRequestPath(final Ds3Request request) {
if (request == null || request.getPath() == null){
return "";
}
return getDs3Escaper().escape(request.getPath());
}
}
| Updated query param encoding to encode semi-colon
| ds3-sdk/src/main/java/com/spectralogic/ds3client/utils/SafeStringManipulation.java | Updated query param encoding to encode semi-colon | <ide><path>s3-sdk/src/main/java/com/spectralogic/ds3client/utils/SafeStringManipulation.java
<ide> /**
<ide> * Specified as query safe characters in spec https://tools.ietf.org/html/rfc3986#section-3.4
<ide> * with the following exceptions:
<del> * Encoding: "&", ":", "+", "=" as they have special meaning
<add> * Encoding: "&", ":", "+", "=", ";" as they have special meaning
<ide> * Not Encoding: "/"
<ide> */
<del> private static final String DS3_QUERY_PARAM_SAFE_CHARS = "-._~!$'()*,;@/";
<add> private static final String DS3_QUERY_PARAM_SAFE_CHARS = "-._~!$'()*,@/";
<ide>
<ide> private static final Escaper DS3_URL_FRAGMENT_ESCAPER =
<ide> new PercentEscaper(DS3_URL_PATH_FRAGMENT_SAFE_CHARS, false); |
|
Java | lgpl-2.1 | 682679c9312a8fe75ab95f2ad0053c9ad8b856f8 | 0 | netarchivesuite/netarchivesuite-svngit-migration,netarchivesuite/netarchivesuite-svngit-migration,netarchivesuite/netarchivesuite-svngit-migration,netarchivesuite/netarchivesuite-svngit-migration,netarchivesuite/netarchivesuite-svngit-migration,netarchivesuite/netarchivesuite-svngit-migration | /* File: $Id$
* Revision: $Revision$
* Author: $Author$
* Date: $Date$
*
* The Netarchive Suite - Software to harvest and preserve websites
* Copyright 2004-2010 Det Kongelige Bibliotek and Statsbiblioteket, Denmark
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301
* USA
*/
package dk.netarkivet.archive.bitarchive.distribute;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import dk.netarkivet.archive.distribute.ArchiveMessage;
import dk.netarkivet.archive.distribute.ArchiveMessageVisitor;
import dk.netarkivet.common.distribute.ChannelID;
import dk.netarkivet.common.distribute.Channels;
import dk.netarkivet.common.exceptions.ArgumentNotValid;
import dk.netarkivet.common.utils.batch.FileBatchJob;
/**
* Container for batch jobs. Messages of this class should be sent to a
* BAMON queue from where they are collected by a BitarchiveMonitorServer.
* The BitarchiveMonitorServer also creates instances of this class and sends
* them to the individual bitarchive machines.
*
* The response to this message comes in the form of a BatchReplyMessage
* placed on the senders queue.
*/
public class BatchMessage extends ArchiveMessage {
/** The batch job, this message is sent to initiate. */
private FileBatchJob job;
/** The id of this replica. */
private String replicaId;
/** The list of arguments for the batchjob.*/
private List<String> args;
/** The ID for the batch process.*/
private String batchID;
/**
* Creates a BatchMessage object which can be used to initiate a batch
* job. This is used by BitarchiveMonitorServer to create the message
* sent to the bitarchive machines.
*
* Note: The id for the batchjob is the empty string, which removes the
* possibility of terminating the batchjob remotely while it is running.
*
* @param to The channel to which the batch message is to be sent
* @param job The batch job to be executed
* @param replicaId id of this replica.
*/
public BatchMessage(ChannelID to, FileBatchJob job, String replicaId) {
this(to, Channels.getError(), job, replicaId, "", new String[]{});
}
/**
* Creates a BatchMessage object which can be used to initiate a batch
* job.
*
* Note: The id for the batchjob is the empty string, which removes the
* possibility of terminating the batchjob remotely while it is running.
*
* @param to The channel to which the batch message is to be sent
* @param replyTo The channel whereto the reply to this message is sent.
* @param job The batch job to be executed
* @param replicaId id of this replica.
* @param arguments The arguments for initialising the batchjob.
* @throws ArgumentNotValid If the job is null, or the replica is either
* null or the empty string.
*/
public BatchMessage(ChannelID to, ChannelID replyTo, FileBatchJob job,
String replicaId, String ... arguments) {
this(to, replyTo, job, replicaId, "", arguments);
}
/**
* Creates a BatchMessage object which can be used to initiate a batch
* job.
*
* @param to The channel to which the batch message is to be sent
* @param replyTo The channel whereto the reply to this message is sent.
* @param job The batch job to be executed
* @param replicaId id of this replica.
* @param batchId The id for the process which runs the batchjob.
* @param arguments The arguments for initialising the batchjob. This is
* allowed to be null.
* @throws ArgumentNotValid If the job is null, or the replica is either
* null or the empty string.
*/
public BatchMessage(ChannelID to, ChannelID replyTo, FileBatchJob job,
String replicaId, String batchId, String ... arguments)
throws ArgumentNotValid {
super(to, replyTo);
ArgumentNotValid.checkNotNull(job, "job");
ArgumentNotValid.checkNotNullOrEmpty(replicaId, "String replicaId");
ArgumentNotValid.checkNotNull(batchId, "String batchId");
this.job = job;
this.replicaId = replicaId;
this.batchID = batchId;
this.args = new ArrayList<String>();
if(arguments != null && !(arguments.length == 0)) {
Collections.addAll(this.args, arguments);
}
}
/**
* Retrieves batch job.
* @return Batch job
*/
public FileBatchJob getJob() {
return job;
}
/**
* Returns the replica id.
* @return the replica id
*/
public String getReplicaId() {
return replicaId;
}
/**
* Returns the arguments for the batchjob.
* @return The arguments for the batchjob.
*/
public List<String> getArgs() {
return args;
}
/**
* Returns the predefined ID for the batch process. If no Id is available,
* then the message id is returned.
* @return The ID for the batch process, or the message id, if no specific
* batch id has been declared.
*/
public String getBatchID() {
// if the batchId is empty, then use the message id as process id.
if(batchID.isEmpty()) {
return super.getID();
}
return batchID;
}
/**
* Should be implemented as a part of the visitor pattern. fx.: public void
* accept(ArchiveMessageVisitor v) { v.visit(this); }
*
* @param v A message visitor
*/
public void accept(ArchiveMessageVisitor v) {
v.visit(this);
}
/**
* Retrieval of a string representation of this object.
*
* @return A string representation of the instance of class.
* @see dk.netarkivet.common.distribute.NetarkivetMessage#toString()
*/
public String toString() {
return super.toString() + " Job: " + job.getClass().getName()
+ ", on filename-pattern: " + job.getFilenamePattern()
+ ", for replica: " + replicaId;
}
}
| src/dk/netarkivet/archive/bitarchive/distribute/BatchMessage.java | /* File: $Id$
* Revision: $Revision$
* Author: $Author$
* Date: $Date$
*
* The Netarchive Suite - Software to harvest and preserve websites
* Copyright 2004-2010 Det Kongelige Bibliotek and Statsbiblioteket, Denmark
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301
* USA
*/
package dk.netarkivet.archive.bitarchive.distribute;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import dk.netarkivet.archive.distribute.ArchiveMessage;
import dk.netarkivet.archive.distribute.ArchiveMessageVisitor;
import dk.netarkivet.common.distribute.ChannelID;
import dk.netarkivet.common.distribute.Channels;
import dk.netarkivet.common.exceptions.ArgumentNotValid;
import dk.netarkivet.common.utils.batch.FileBatchJob;
/**
* Container for batch jobs. Messages of this class should be sent to a
* BAMON queue from where they are collected by a BitarchiveMonitorServer.
* The BitarchiveMonitorServer also creates instances of this class and sends
* them to the individual bitarchive machines.
*
* The response to this message comes in the form of a BatchReplyMessage
* placed on the senders queue.
*/
public class BatchMessage extends ArchiveMessage {
/** The batch job, this message is sent to initiate. */
private FileBatchJob job;
/** The id of this replica. */
private String replicaId;
/** The list of arguments for the batchjob.*/
private List<String> args;
/** The ID for the batch process.*/
private String batchID;
/**
* Creates a BatchMessage object which can be used to initiate a batch
* job. This is used by BitarchiveMonitorServer to create the message
* sent to the bitarchive machines.
*
* Note: The id for the batchjob is the empty string, which removes the
* possibility of terminating the batchjob remotely while it is running.
*
* @param to The channel to which the batch message is to be sent
* @param job The batch job to be executed
* @param replicaId id of this replica.
*/
public BatchMessage(ChannelID to, FileBatchJob job, String replicaId) {
this(to, Channels.getError(), job, replicaId, "", new String[]{});
}
/**
* Creates a BatchMessage object which can be used to initiate a batch
* job.
*
* Note: The id for the batchjob is the empty string, which removes the
* possibility of terminating the batchjob remotely while it is running.
*
* @param to The channel to which the batch message is to be sent
* @param replyTo The channel whereto the reply to this message is sent.
* @param job The batch job to be executed
* @param replicaId id of this replica.
* @param arguments The arguments for initialising the batchjob.
* @throws ArgumentNotValid If the job is null, or the replica is either
* null or the empty string.
*/
public BatchMessage(ChannelID to, ChannelID replyTo, FileBatchJob job,
String replicaId, String ... arguments) {
this(to, replyTo, job, replicaId, "", arguments);
}
/**
* Creates a BatchMessage object which can be used to initiate a batch
* job.
*
* @param to The channel to which the batch message is to be sent
* @param replyTo The channel whereto the reply to this message is sent.
* @param job The batch job to be executed
* @param replicaId id of this replica.
* @param batchId The id for the process which runs the batchjob.
* @param arguments The arguments for initialising the batchjob.
* @param arguments The arguments for initialising the batchjob. This is
* allowed to be null.
* @throws ArgumentNotValid If the job is null, or the replica is either
* null or the empty string.
*/
public BatchMessage(ChannelID to, ChannelID replyTo, FileBatchJob job,
String replicaId, String batchId, String ... arguments)
throws ArgumentNotValid {
super(to, replyTo);
ArgumentNotValid.checkNotNull(job, "job");
ArgumentNotValid.checkNotNullOrEmpty(replicaId, "String replicaId");
ArgumentNotValid.checkNotNull(batchId, "String batchId");
this.job = job;
this.replicaId = replicaId;
this.batchID = batchId;
this.args = new ArrayList<String>();
if(arguments != null && !(arguments.length == 0)) {
Collections.addAll(this.args, arguments);
}
}
/**
* Retrieves batch job.
* @return Batch job
*/
public FileBatchJob getJob() {
return job;
}
/**
* Returns the replica id.
* @return the replica id
*/
public String getReplicaId() {
return replicaId;
}
/**
* Returns the arguments for the batchjob.
* @return The arguments for the batchjob.
*/
public List<String> getArgs() {
return args;
}
/**
* Returns the predefined ID for the batch process. If no Id is available,
* then the message id is returned.
* @return The ID for the batch process, or the message id, if no specific
* batch id has been declared.
*/
public String getBatchID() {
// if the batchId is empty, then use the message id as process id.
if(batchID.isEmpty()) {
return super.getID();
}
return batchID;
}
/**
* Should be implemented as a part of the visitor pattern. fx.: public void
* accept(ArchiveMessageVisitor v) { v.visit(this); }
*
* @param v A message visitor
*/
public void accept(ArchiveMessageVisitor v) {
v.visit(this);
}
/**
* Retrieval of a string representation of this object.
*
* @return A string representation of the instance of class.
* @see dk.netarkivet.common.distribute.NetarkivetMessage#toString()
*/
public String toString() {
return super.toString() + " Job: " + job.getClass().getName()
+ ", on filename-pattern: " + job.getFilenamePattern()
+ ", for replica: " + replicaId;
}
}
| Remove superfluous comment line
| src/dk/netarkivet/archive/bitarchive/distribute/BatchMessage.java | Remove superfluous comment line | <ide><path>rc/dk/netarkivet/archive/bitarchive/distribute/BatchMessage.java
<ide> * @param job The batch job to be executed
<ide> * @param replicaId id of this replica.
<ide> * @param batchId The id for the process which runs the batchjob.
<del> * @param arguments The arguments for initialising the batchjob.
<ide> * @param arguments The arguments for initialising the batchjob. This is
<ide> * allowed to be null.
<ide> * @throws ArgumentNotValid If the job is null, or the replica is either |
|
Java | mit | ab41a4b90bf5ace0850e683ee5a3bc4e5befebcd | 0 | coopernurse/barrister-maven | package com.bitmechanic.barrister.maven;
import org.apache.maven.plugin.AbstractMojo;
import org.apache.maven.plugin.MojoExecutionException;
import org.apache.maven.plugin.MojoFailureException;
import java.io.BufferedReader;
import java.io.ByteArrayOutputStream;
import java.io.Closeable;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.net.HttpURLConnection;
import java.net.URL;
import java.net.URLEncoder;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
/**
* Author: James Cooper <[email protected]>
* Date: 7/4/13
*
* @goal idl2java
*/
public class Idl2JavaMaven extends AbstractMojo {
/**
* Path to barrister Python script
*
* @parameter expression="${idl2java.barristerScript}" default-value="barrister"
*/
private String barristerScript;
/**
* Comma separated list of IDL filenames. If the name is a directory, all
* files ending in .idl contained in that directory will be processed.
*
* @parameter expression="${idl2java.idlFiles}" default-value="${basedir}/src/main/resources/barrister/"
*/
private String idlFiles;
/**
* Comma separated list of filenames to exclude.
*
* @parameter expression="${idl2java.exclude}"
*/
private String exclude;
/**
* If true, generated struct classes will be immutable
*
* @parameter expression="${idl2java.immutable}" default-value="false"
*/
private String immutable;
/**
* Name of base Java package to write generated files into.
* Each IDL file will be generated into a separate package under this base package
* based on the IDL filename.
*
* @parameter expression="${idl2java.basePackage}" default-value="${project.groupId}.${project.artifactId}.generated"
*/
private String basePackage;
/**
* Base source directory to write .java files to
*
* @parameter expression="${idl2java.outputDirectory}" default-value="${basedir}/src/main/java"
*/
private String outputDirectory;
/**
* If true, the base directory: outputDirectory + basePackage will be cleaned (all files removed, recursively)
*
* @parameter expression="${idl2java.clean}" default-value="false"
*/
private String clean;
private File outputDirectoryPlusPackage;
public void execute() throws MojoExecutionException, MojoFailureException {
basePackage = sanitizeForJava(basePackage);
outputDirectoryPlusPackage = new File((outputDirectory + File.separator + basePackageDir()).replace("/", File.separator));
if (cleanBool()) {
getLog().info("Cleaning output dir: " + outputDirectoryPlusPackage);
if (outputDirectoryPlusPackage.exists()) {
delete(outputDirectoryPlusPackage);
}
}
else {
getLog().info("Using output dir: " + outputDirectoryPlusPackage + " - consider setting <clean>true</clean> to ensure this directory is clean on build");
}
if (!outputDirectoryPlusPackage.isDirectory() && !outputDirectoryPlusPackage.mkdirs()) {
throw new MojoExecutionException("Unable to create base output directory: " + outputDirectoryPlusPackage);
}
getLog().info("Using Barrister script: " + barristerScript);
for (File idlFile : allIdlFiles()) {
try {
translateIdlToJava(idlFile);
}
catch (IOException e) {
throw new MojoExecutionException("Error processing: " + idlFile, e);
}
}
}
private Set<String> excludeFiles() {
HashSet<String> set = new HashSet<String>();
if (this.exclude != null) {
for (String fname : this.exclude.split(",")) {
set.add(fname);
}
}
return set;
}
private Set<File> allIdlFiles() throws MojoExecutionException {
getLog().debug("Tokenizing idlFiles=" + idlFiles);
Set<String> excludeFiles = excludeFiles();
HashSet<File> set = new HashSet<File>();
for (String frag : this.idlFiles.split(",")) {
frag = frag.replace("/", File.separator);
File fileOrDir = new File(frag);
if (!fileOrDir.exists()) {
throw new MojoExecutionException("File not found: " + frag);
}
idlFilesRecur(set, fileOrDir, excludeFiles);
}
if (set.isEmpty()) {
getLog().info("No IDL files found in: " + idlFiles);
}
return set;
}
private void idlFilesRecur(HashSet<File> set, File fileOrDir, Set<String> excludeFiles) {
if (fileOrDir.isDirectory()) {
for (File child : fileOrDir.listFiles()) {
if (child.isDirectory()) {
idlFilesRecur(set, child, excludeFiles);
}
else if (child.getName().endsWith(".idl")) {
if (excludeFiles.contains(child.getName())) {
getLog().debug("Excluding file: " + child);
}
else {
set.add(child);
}
}
}
}
else {
set.add(fileOrDir);
}
}
private String basePackageDir() {
return basePackage.replace(".", "/").replace("/", File.separator);
}
private void translateIdlToJava(File idlFile) throws IOException, MojoExecutionException {
File jsonFile = new File(outputDirectoryPlusPackage, idlFile.getName().replace(".idl", ".json"));
jsonFile.getParentFile().mkdirs();
getLog().info("Translating: " + idlFile + " to: " + jsonFile);
translateIdlToJson(idlFile, jsonFile);
try {
new com.bitmechanic.barrister.Idl2Java(jsonFile.getAbsolutePath(),
idlFileToBasePackage(idlFile.getName()),
basePackage,
outputDirectory,
immutableBool());
}
catch (Exception e) {
throw new MojoExecutionException("Error running idl2java with params: " + jsonFile.getAbsolutePath() +
idlFileToBasePackage(idlFile.getName()) +
basePackage +
"src/main/java".replace("/", File.separator) +
immutableBool(), e);
}
}
private void translateIdlToJson(File idlFile, File jsonFile) throws IOException {
if (barristerScript.startsWith("http://") || barristerScript.startsWith("https://")) {
StringBuilder postStr = new StringBuilder();
int i = 0;
for (File f : idlFilesInDir(idlFile)) {
byte[] contents = readFile(f);
if (i > 0) postStr.append("&");
postStr.append("idl." + i + ".filename=").append(URLEncoder.encode(f.getName(), "utf-8"))
.append("&idl." + i + ".content=").append(URLEncoder.encode(new String(contents, "utf-8"), "utf-8"));
i++;
}
byte jsonData[] = httpPost(barristerScript, postStr.toString().getBytes("utf-8"));
if (jsonData == null || jsonData.length == 0 || jsonData[0] != '[') {
throw new IOException("Unexpected response from: " + barristerScript + " response: " + new String(jsonData, "utf-8"));
}
writeFile(jsonData, jsonFile);
}
else {
exec(barristerScript, "-j", jsonFile.getAbsolutePath(), idlFile.getAbsolutePath());
}
}
private String idlFileToBasePackage(String filename) {
return basePackage + "." + sanitizeForJava(filename.replace(".idl", ""));
}
private String sanitizeForJava(String s) {
return s.replaceAll("[^A-Za-z0-9_\\.]", "");
}
private boolean immutableBool() {
return this.immutable != null && this.immutable.trim().equals("true");
}
private boolean cleanBool() {
return this.clean != null && this.clean.trim().equals("true");
}
private void delete(File f) throws MojoExecutionException {
if (f.isDirectory()) {
for (File child : f.listFiles()) {
delete(child);
}
}
if (f.exists() && !f.delete()) {
throw new MojoExecutionException("Unable to delete: " + f.getAbsolutePath());
}
}
private byte[] httpPost(String endpointUrl, byte[] postData) throws IOException {
URL url = new URL(endpointUrl);
HttpURLConnection conn = (HttpURLConnection)url.openConnection();
conn.setDoInput(true);
conn.setDoOutput(true);
conn.addRequestProperty("Content-Length", String.valueOf(postData.length));
conn.addRequestProperty("Content-Type", "application/x-www-form-urlencoded");
OutputStream os = null;
InputStream is = null;
InputStream err = null;
try {
os = conn.getOutputStream();
os.write(postData);
os.flush();
is = conn.getInputStream();
return streamToBytes(is);
}
catch (IOException e) {
if (conn.getResponseCode() == 500) {
err = conn.getErrorStream();
throw new IOException("Error translating IDL: " + new String(streamToBytes(err), "utf-8"));
}
else {
throw e;
}
}
finally {
closeQuietly(os);
closeQuietly(is);
closeQuietly(err);
}
}
private byte[] readFile(File file) throws IOException {
FileInputStream fis = null;
try {
fis = new FileInputStream(file);
return streamToBytes(fis);
}
finally {
closeQuietly(fis);
}
}
private void writeFile(byte[] data, File file) throws IOException {
FileOutputStream fos = null;
try {
fos = new FileOutputStream(file);
fos.write(data);
fos.flush();
}
finally {
closeQuietly(fos);
}
}
private byte[] streamToBytes(InputStream is) throws IOException {
byte[] buffer = new byte[2048];
int numRead = 0;
ByteArrayOutputStream os = new ByteArrayOutputStream();
while((numRead = is.read(buffer)) > 0) {
os.write(buffer, 0, numRead);
}
return os.toByteArray();
}
private void closeQuietly(Closeable c) {
if (c != null) {
try { c.close(); }
catch (Exception e) { }
}
}
private List<File> idlFilesInDir(File firstFile) {
List<File> files = new ArrayList<File>();
files.add(firstFile);
for (File f : firstFile.getParentFile().listFiles()) {
if (f.isFile() && !f.getAbsolutePath().equals(firstFile.getAbsolutePath())) {
files.add(f);
}
}
return files;
}
private void exec(String... args) throws IOException {
String s;
Process p = Runtime.getRuntime().exec(args);
new DrainStream(p.getInputStream(), false).start();
new DrainStream(p.getErrorStream(), true).start();
try {
if (p.waitFor() != 0) {
throw new IOException("Command returned non-zero exit code: " + args);
}
}
catch (InterruptedException e) {
throw new IOException(e);
}
}
class DrainStream implements Runnable {
BufferedReader reader;
boolean logErr;
DrainStream(InputStream is, boolean logErr) {
this.reader = new BufferedReader(new InputStreamReader(is));
this.logErr = logErr;
}
public void start() {
Thread t = new Thread(this);
t.start();
}
public void run() {
String s;
try {
while ((s = reader.readLine()) != null) {
if (logErr) {
getLog().error(s);
}
else {
getLog().info(s);
}
}
}
catch (IOException e) {
getLog().error(e);
}
}
}
}
| src/main/java/com/bitmechanic/barrister/maven/Idl2JavaMaven.java | package com.bitmechanic.barrister.maven;
import org.apache.maven.plugin.AbstractMojo;
import org.apache.maven.plugin.MojoExecutionException;
import org.apache.maven.plugin.MojoFailureException;
import java.io.BufferedReader;
import java.io.ByteArrayOutputStream;
import java.io.Closeable;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.net.HttpURLConnection;
import java.net.URL;
import java.net.URLEncoder;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
/**
* Author: James Cooper <[email protected]>
* Date: 7/4/13
*
* @goal idl2java
*/
public class Idl2JavaMaven extends AbstractMojo {
/**
* Path to barrister Python script
*
* @parameter expression="${idl2java.barristerScript}" default-value="barrister"
*/
private String barristerScript;
/**
* Comma separated list of IDL filenames. If the name is a directory, all
* files ending in .idl contained in that directory will be processed.
*
* @parameter expression="${idl2java.idlFiles}" default-value="${basedir}/src/main/resources/barrister/"
*/
private String idlFiles;
/**
* Comma separated list of filenames to exclude.
*
* @parameter expression="${idl2java.exclude}"
*/
private String exclude;
/**
* If true, generated struct classes will be immutable
*
* @parameter expression="${idl2java.immutable}" default-value="false"
*/
private String immutable;
/**
* Name of base Java package to write generated files into.
* Each IDL file will be generated into a separate package under this base package
* based on the IDL filename.
*
* @parameter expression="${idl2java.basePackage}" default-value="${project.groupId}.${project.artifactId}.generated"
*/
private String basePackage;
/**
* Base source directory to write .java files to
*
* @parameter expression="${idl2java.outputDirectory}" default-value="${basedir}/src/main/java"
*/
private String outputDirectory;
/**
* If true, the base directory: outputDirectory + basePackage will be cleaned (all files removed, recursively)
*
* @parameter expression="${idl2java.clean}" default-value="false"
*/
private String clean;
private File outputDirectoryPlusPackage;
public void execute() throws MojoExecutionException, MojoFailureException {
basePackage = sanitizeForJava(basePackage);
outputDirectoryPlusPackage = new File((outputDirectory + File.separator + basePackageDir()).replace("/", File.separator));
if (cleanBool()) {
getLog().info("Cleaning output dir: " + outputDirectoryPlusPackage);
if (outputDirectoryPlusPackage.exists()) {
delete(outputDirectoryPlusPackage);
}
}
else {
getLog().info("Using output dir: " + outputDirectoryPlusPackage + " - consider setting <clean>true</clean> to ensure this directory is clean on build");
}
if (!outputDirectoryPlusPackage.isDirectory() && !outputDirectoryPlusPackage.mkdirs()) {
throw new MojoExecutionException("Unable to create base output directory: " + outputDirectoryPlusPackage);
}
getLog().info("Using Barrister script: " + barristerScript);
for (File idlFile : allIdlFiles()) {
try {
translateIdlToJava(idlFile);
}
catch (IOException e) {
throw new MojoExecutionException("Error processing: " + idlFile, e);
}
}
}
private Set<String> excludeFiles() {
HashSet<String> set = new HashSet<String>();
for (String fname : this.exclude.split(",")) {
set.add(fname);
}
return set;
}
private Set<File> allIdlFiles() throws MojoExecutionException {
getLog().debug("Tokenizing idlFiles=" + idlFiles);
Set<String> excludeFiles = excludeFiles();
HashSet<File> set = new HashSet<File>();
for (String frag : this.idlFiles.split(",")) {
frag = frag.replace("/", File.separator);
File fileOrDir = new File(frag);
if (!fileOrDir.exists()) {
throw new MojoExecutionException("File not found: " + frag);
}
idlFilesRecur(set, fileOrDir, excludeFiles);
}
if (set.isEmpty()) {
getLog().info("No IDL files found in: " + idlFiles);
}
return set;
}
private void idlFilesRecur(HashSet<File> set, File fileOrDir, Set<String> excludeFiles) {
if (fileOrDir.isDirectory()) {
for (File child : fileOrDir.listFiles()) {
if (child.isDirectory()) {
idlFilesRecur(set, child, excludeFiles);
}
else if (child.getName().endsWith(".idl")) {
if (excludeFiles.contains(child.getName())) {
getLog().debug("Excluding file: " + child);
}
else {
set.add(child);
}
}
}
}
else {
set.add(fileOrDir);
}
}
private String basePackageDir() {
return basePackage.replace(".", "/").replace("/", File.separator);
}
private void translateIdlToJava(File idlFile) throws IOException, MojoExecutionException {
File jsonFile = new File(outputDirectoryPlusPackage, idlFile.getName().replace(".idl", ".json"));
jsonFile.getParentFile().mkdirs();
getLog().info("Translating: " + idlFile + " to: " + jsonFile);
translateIdlToJson(idlFile, jsonFile);
try {
new com.bitmechanic.barrister.Idl2Java(jsonFile.getAbsolutePath(),
idlFileToBasePackage(idlFile.getName()),
basePackage,
outputDirectory,
immutableBool());
}
catch (Exception e) {
throw new MojoExecutionException("Error running idl2java with params: " + jsonFile.getAbsolutePath() +
idlFileToBasePackage(idlFile.getName()) +
basePackage +
"src/main/java".replace("/", File.separator) +
immutableBool(), e);
}
}
private void translateIdlToJson(File idlFile, File jsonFile) throws IOException {
if (barristerScript.startsWith("http://") || barristerScript.startsWith("https://")) {
StringBuilder postStr = new StringBuilder();
int i = 0;
for (File f : idlFilesInDir(idlFile)) {
byte[] contents = readFile(f);
if (i > 0) postStr.append("&");
postStr.append("idl." + i + ".filename=").append(URLEncoder.encode(f.getName(), "utf-8"))
.append("&idl." + i + ".content=").append(URLEncoder.encode(new String(contents, "utf-8"), "utf-8"));
i++;
}
byte jsonData[] = httpPost(barristerScript, postStr.toString().getBytes("utf-8"));
if (jsonData == null || jsonData.length == 0 || jsonData[0] != '[') {
throw new IOException("Unexpected response from: " + barristerScript + " response: " + new String(jsonData, "utf-8"));
}
writeFile(jsonData, jsonFile);
}
else {
exec(barristerScript, "-j", jsonFile.getAbsolutePath(), idlFile.getAbsolutePath());
}
}
private String idlFileToBasePackage(String filename) {
return basePackage + "." + sanitizeForJava(filename.replace(".idl", ""));
}
private String sanitizeForJava(String s) {
return s.replaceAll("[^A-Za-z0-9_\\.]", "");
}
private boolean immutableBool() {
return this.immutable != null && this.immutable.trim().equals("true");
}
private boolean cleanBool() {
return this.clean != null && this.clean.trim().equals("true");
}
private void delete(File f) throws MojoExecutionException {
if (f.isDirectory()) {
for (File child : f.listFiles()) {
delete(child);
}
}
if (f.exists() && !f.delete()) {
throw new MojoExecutionException("Unable to delete: " + f.getAbsolutePath());
}
}
private byte[] httpPost(String endpointUrl, byte[] postData) throws IOException {
URL url = new URL(endpointUrl);
HttpURLConnection conn = (HttpURLConnection)url.openConnection();
conn.setDoInput(true);
conn.setDoOutput(true);
conn.addRequestProperty("Content-Length", String.valueOf(postData.length));
conn.addRequestProperty("Content-Type", "application/x-www-form-urlencoded");
OutputStream os = null;
InputStream is = null;
InputStream err = null;
try {
os = conn.getOutputStream();
os.write(postData);
os.flush();
is = conn.getInputStream();
return streamToBytes(is);
}
catch (IOException e) {
if (conn.getResponseCode() == 500) {
err = conn.getErrorStream();
throw new IOException("Error translating IDL: " + new String(streamToBytes(err), "utf-8"));
}
else {
throw e;
}
}
finally {
closeQuietly(os);
closeQuietly(is);
closeQuietly(err);
}
}
private byte[] readFile(File file) throws IOException {
FileInputStream fis = null;
try {
fis = new FileInputStream(file);
return streamToBytes(fis);
}
finally {
closeQuietly(fis);
}
}
private void writeFile(byte[] data, File file) throws IOException {
FileOutputStream fos = null;
try {
fos = new FileOutputStream(file);
fos.write(data);
fos.flush();
}
finally {
closeQuietly(fos);
}
}
private byte[] streamToBytes(InputStream is) throws IOException {
byte[] buffer = new byte[2048];
int numRead = 0;
ByteArrayOutputStream os = new ByteArrayOutputStream();
while((numRead = is.read(buffer)) > 0) {
os.write(buffer, 0, numRead);
}
return os.toByteArray();
}
private void closeQuietly(Closeable c) {
if (c != null) {
try { c.close(); }
catch (Exception e) { }
}
}
private List<File> idlFilesInDir(File firstFile) {
List<File> files = new ArrayList<File>();
files.add(firstFile);
for (File f : firstFile.getParentFile().listFiles()) {
if (f.isFile() && !f.getAbsolutePath().equals(firstFile.getAbsolutePath())) {
files.add(f);
}
}
return files;
}
private void exec(String... args) throws IOException {
String s;
Process p = Runtime.getRuntime().exec(args);
new DrainStream(p.getInputStream(), false).start();
new DrainStream(p.getErrorStream(), true).start();
try {
if (p.waitFor() != 0) {
throw new IOException("Command returned non-zero exit code: " + args);
}
}
catch (InterruptedException e) {
throw new IOException(e);
}
}
class DrainStream implements Runnable {
BufferedReader reader;
boolean logErr;
DrainStream(InputStream is, boolean logErr) {
this.reader = new BufferedReader(new InputStreamReader(is));
this.logErr = logErr;
}
public void start() {
Thread t = new Thread(this);
t.start();
}
public void run() {
String s;
try {
while ((s = reader.readLine()) != null) {
if (logErr) {
getLog().error(s);
}
else {
getLog().info(s);
}
}
}
catch (IOException e) {
getLog().error(e);
}
}
}
}
| null guard on this.exclude
| src/main/java/com/bitmechanic/barrister/maven/Idl2JavaMaven.java | null guard on this.exclude | <ide><path>rc/main/java/com/bitmechanic/barrister/maven/Idl2JavaMaven.java
<ide>
<ide> private Set<String> excludeFiles() {
<ide> HashSet<String> set = new HashSet<String>();
<del> for (String fname : this.exclude.split(",")) {
<del> set.add(fname);
<add> if (this.exclude != null) {
<add> for (String fname : this.exclude.split(",")) {
<add> set.add(fname);
<add> }
<ide> }
<ide> return set;
<ide> } |
|
Java | epl-1.0 | 0ac08a172e1d71608e326dec48534de5928b289c | 0 | junit-team/junit-lambda,sbrannen/junit-lambda | /*
* Copyright 2015-2016 the original author or authors.
*
* All rights reserved. This program and the accompanying materials are
* made available under the terms of the Eclipse Public License v1.0 which
* accompanies this distribution and is available at
*
* http://www.eclipse.org/legal/epl-v10.html
*/
package org.junit.gen5.api;
import static java.util.Spliterator.ORDERED;
import static java.util.Spliterators.spliteratorUnknownSize;
import static org.junit.gen5.commons.meta.API.Usage.Experimental;
import java.util.Iterator;
import java.util.function.Consumer;
import java.util.function.Function;
import java.util.stream.Stream;
import java.util.stream.StreamSupport;
import org.junit.gen5.commons.meta.API;
import org.junit.gen5.commons.util.Preconditions;
import org.junit.gen5.commons.util.ToStringBuilder;
/**
* A {@code DynamicTest} is a test case generated at runtime.
*
* <p>It is composed of a {@linkplain #getDisplayName display name} and an
* {@link #getExecutable Executable}.
*
* <p>Instances of {@code DynamicTest} must be generated by factory methods
* annotated with {@link TestFactory @TestFactory}.
*
* <p>Note that dynamic tests are quite different from standard {@link Test @Test}
* cases since callbacks such as {@link BeforeEach @BeforeEach} and
* {@link AfterEach @AfterEach} methods are not executed for dynamic tests.
*
* @since 5.0
* @see Test
* @see TestFactory
* @see Executable
*/
@API(Experimental)
public class DynamicTest {
public static <T> Stream<DynamicTest> stream(Iterator<T> inputGenerator,
Function<? super T, String> displayNameGenerator, Consumer<? super T> testGenerator) {
Preconditions.notNull(inputGenerator, "inputGenerator must not be null");
Preconditions.notNull(displayNameGenerator, "displayNameGenerator must not be null");
Preconditions.notNull(testGenerator, "testGenerator must not be null");
// @formatter:off
return StreamSupport.stream(spliteratorUnknownSize(inputGenerator, ORDERED), false)
.map(input -> new DynamicTest(displayNameGenerator.apply(input), () -> testGenerator.accept(input)));
// @formatter:on
}
private final String displayName;
private final Executable executable;
public DynamicTest(String displayName, Executable executable) {
this.displayName = Preconditions.notBlank(displayName, "displayName must not be null or empty");
this.executable = Preconditions.notNull(executable, "executable must not be null");
}
public String getDisplayName() {
return this.displayName;
}
public Executable getExecutable() {
return this.executable;
}
@Override
public String toString() {
return new ToStringBuilder(this).append("displayName", displayName).toString();
}
}
| junit5-api/src/main/java/org/junit/gen5/api/DynamicTest.java | /*
* Copyright 2015-2016 the original author or authors.
*
* All rights reserved. This program and the accompanying materials are
* made available under the terms of the Eclipse Public License v1.0 which
* accompanies this distribution and is available at
*
* http://www.eclipse.org/legal/epl-v10.html
*/
package org.junit.gen5.api;
import static java.util.Spliterator.ORDERED;
import static java.util.Spliterators.spliteratorUnknownSize;
import static org.junit.gen5.commons.meta.API.Usage.Experimental;
import java.util.Iterator;
import java.util.function.Consumer;
import java.util.function.Function;
import java.util.stream.Stream;
import java.util.stream.StreamSupport;
import org.junit.gen5.commons.meta.API;
import org.junit.gen5.commons.util.Preconditions;
import org.junit.gen5.commons.util.ToStringBuilder;
/**
* A {@code DynamicTest} is a test case generated at runtime.
*
* <p>It is composed of a {@linkplain #getDisplayName display name} and an
* {@link #getExecutable Executable}.
*
* <p>Instances of {@code DynamicTest} must be generated by factory methods
* annotated with {@link TestFactory @TestFactory}.
*
* <p>Note that dynamic tests are quite different from standard {@link Test @Test}
* cases since callbacks such as {@link BeforeEach @BeforeEach} and
* {@link AfterEach @AfterEach} methods are not executed for dynamic tests.
*
* @since 5.0
* @see Test
* @see TestFactory
* @see Executable
*/
@API(Experimental)
public class DynamicTest {
public static <T> Stream<DynamicTest> stream(Iterator<T> inputGenerator,
Function<? super T, String> displayNameGenerator, Consumer<? super T> testGenerator) {
Preconditions.notNull(inputGenerator, "inputGenerator must not be null");
Preconditions.notNull(displayNameGenerator, "displayNameGenerator must not be null");
Preconditions.notNull(testGenerator, "testGenerator must not be null");
// @formatter:off
return StreamSupport.stream(spliteratorUnknownSize(inputGenerator, ORDERED), false)
.map(input -> new DynamicTest(displayNameGenerator.apply(input), () -> testGenerator.accept(input)));
// @formatter:on
}
private final String displayName;
private final Executable executable;
public DynamicTest(String displayName, Executable executable) {
this.displayName = displayName;
this.executable = executable;
}
public String getDisplayName() {
return this.displayName;
}
public Executable getExecutable() {
return this.executable;
}
@Override
public String toString() {
return new ToStringBuilder(this).append("displayName", displayName).toString();
}
}
| Add missing preconditions to DynamicTest()
| junit5-api/src/main/java/org/junit/gen5/api/DynamicTest.java | Add missing preconditions to DynamicTest() | <ide><path>unit5-api/src/main/java/org/junit/gen5/api/DynamicTest.java
<ide> private final Executable executable;
<ide>
<ide> public DynamicTest(String displayName, Executable executable) {
<del> this.displayName = displayName;
<del> this.executable = executable;
<add> this.displayName = Preconditions.notBlank(displayName, "displayName must not be null or empty");
<add> this.executable = Preconditions.notNull(executable, "executable must not be null");
<ide> }
<ide>
<ide> public String getDisplayName() { |
|
JavaScript | mit | d033b1acc16e1395aacfb55dad9566e3ed9b214b | 0 | milankinen/react-combinators | import {Observable, Subject} from "rx"
import React from "react"
import {keys, values, zip, zipObject} from "../util"
export default function createComponent(renderFn) {
return React.createClass({
getInitialState() {
const propsSubjects = zipObject(keys(this.props), values(this.props).map(() => new Subject()))
const propsS =
zipObject(keys(this.props), zip(values(propsSubjects), values(this.props)).map(([subject, initial]) => (
subject.startWith(initial).distinctUntilChanged().share()
)))
return {
propsSubjects,
vdomS: renderFn(propsS),
vdom: null
}
},
componentWillMount() {
const updateVDOM = vdom => this.setState({vdom})
if (process.browser) {
this.setState({ subscription: this.state.vdomS.subscribe(updateVDOM) })
} else {
this.state.vdomS.take(1).subscribe(updateVDOM)
}
},
componentWillReceiveProps(nextProps) {
keys(nextProps).forEach(propName => {
const subject = this.state.propsSubjects[propName]
if (!subject) {
console.warn(
`Trying to pass property "${propName}" that is not set during the component creation.`,
`Ignoring this property.`
)
} else {
subject.onNext(nextProps[propName])
}
})
},
shouldComponentUpdate(nextProps, nextState) {
return nextState.vdom !== this.state.vdom
},
componentWillUnmount() {
const {subscription} = this.state
if (subscription) {
subscription.dispose()
}
},
render() {
return this.state.vdom
}
})
}
| src/rx/createComponent.js | import {Observable, Subject} from "rx"
import React from "react"
import {keys, values, zip, zipObject} from "../util"
export default function createComponent(renderFn) {
return React.createClass({
getInitialState() {
const propsSubjects = zipObject(keys(this.props), values(this.props).map(() => new Subject()))
const propsS =
zipObject(keys(this.props), zip(values(propsSubjects), values(this.props)).map(([subject, initial]) => (
subject.startWith(initial).distinctUntilChanged()
)))
return {
propsSubjects,
vdomS: renderFn(propsS),
vdom: null
}
},
componentWillMount() {
const updateVDOM = vdom => this.setState({vdom})
if (process.browser) {
this.setState({ subscription: this.state.vdomS.subscribe(updateVDOM) })
} else {
this.state.vdomS.take(1).subscribe(updateVDOM)
}
},
componentWillReceiveProps(nextProps) {
keys(nextProps).forEach(propName => {
const subject = this.state.propsSubjects[propName]
if (!subject) {
console.warn(
`Trying to pass property "${propName}" that is not set during the component creation.`,
`Ignoring this property.`
)
} else {
subject.onNext(nextProps[propName])
}
})
},
shouldComponentUpdate(nextProps, nextState) {
return nextState.vdom !== this.state.vdom
},
componentWillUnmount() {
const {subscription} = this.state
if (subscription) {
subscription.dispose()
}
},
render() {
return this.state.vdom
}
})
}
| Share Rx component props | src/rx/createComponent.js | Share Rx component props | <ide><path>rc/rx/createComponent.js
<ide>
<ide> const propsS =
<ide> zipObject(keys(this.props), zip(values(propsSubjects), values(this.props)).map(([subject, initial]) => (
<del> subject.startWith(initial).distinctUntilChanged()
<add> subject.startWith(initial).distinctUntilChanged().share()
<ide> )))
<ide>
<ide> return { |
|
Java | apache-2.0 | c3ae7406ce932a49dbde53fa9a79f5e4c49f7d27 | 0 | gchq/stroom,gchq/stroom,gchq/stroom,gchq/stroom,gchq/stroom,gchq/stroom | /*
* Copyright 2017 Crown Copyright
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package stroom.dashboard.client.query;
import stroom.alert.client.event.AlertEvent;
import stroom.core.client.LocationManager;
import stroom.dashboard.client.HasSelection;
import stroom.dashboard.client.main.AbstractComponentPresenter;
import stroom.dashboard.client.main.Component;
import stroom.dashboard.client.main.ComponentRegistry.ComponentType;
import stroom.dashboard.client.main.Components;
import stroom.dashboard.client.main.DashboardUUID;
import stroom.dashboard.client.main.DataSourceFieldsMap;
import stroom.dashboard.client.main.IndexLoader;
import stroom.dashboard.client.main.Queryable;
import stroom.dashboard.client.main.SearchBus;
import stroom.dashboard.client.main.SearchModel;
import stroom.dashboard.client.table.TimeZones;
import stroom.dashboard.shared.Automate;
import stroom.dashboard.shared.ComponentConfig;
import stroom.dashboard.shared.ComponentSelectionHandler;
import stroom.dashboard.shared.ComponentSettings;
import stroom.dashboard.shared.DashboardDoc;
import stroom.dashboard.shared.DashboardQueryKey;
import stroom.dashboard.shared.DashboardResource;
import stroom.dashboard.shared.DashboardSearchRequest;
import stroom.dashboard.shared.DownloadQueryRequest;
import stroom.dashboard.shared.QueryComponentSettings;
import stroom.datasource.api.v2.AbstractField;
import stroom.dispatch.client.ExportFileCompleteUtil;
import stroom.dispatch.client.Rest;
import stroom.dispatch.client.RestFactory;
import stroom.docref.DocRef;
import stroom.document.client.event.DirtyEvent;
import stroom.document.client.event.DirtyEvent.DirtyHandler;
import stroom.document.client.event.HasDirtyHandlers;
import stroom.explorer.client.presenter.EntityChooser;
import stroom.pipeline.client.event.CreateProcessorEvent;
import stroom.pipeline.shared.PipelineDoc;
import stroom.processor.shared.CreateProcessFilterRequest;
import stroom.processor.shared.Limits;
import stroom.processor.shared.ProcessorFilter;
import stroom.processor.shared.ProcessorFilterResource;
import stroom.processor.shared.QueryData;
import stroom.query.api.v2.ExpressionOperator;
import stroom.query.api.v2.ExpressionOperator.Op;
import stroom.query.api.v2.ExpressionUtil;
import stroom.query.client.ExpressionTreePresenter;
import stroom.query.client.ExpressionUiHandlers;
import stroom.security.client.api.ClientSecurityContext;
import stroom.security.shared.DocumentPermissionNames;
import stroom.security.shared.PermissionNames;
import stroom.svg.client.SvgPreset;
import stroom.svg.client.SvgPresets;
import stroom.ui.config.client.UiConfigCache;
import stroom.util.shared.EqualsBuilder;
import stroom.util.shared.ModelStringUtil;
import stroom.util.shared.ResourceGeneration;
import stroom.widget.button.client.ButtonView;
import stroom.widget.menu.client.presenter.IconMenuItem;
import stroom.widget.menu.client.presenter.Item;
import stroom.widget.menu.client.presenter.MenuListPresenter;
import stroom.widget.popup.client.event.HidePopupEvent;
import stroom.widget.popup.client.event.ShowPopupEvent;
import stroom.widget.popup.client.presenter.PopupPosition;
import stroom.widget.popup.client.presenter.PopupSize;
import stroom.widget.popup.client.presenter.PopupUiHandlers;
import stroom.widget.popup.client.presenter.PopupView.PopupType;
import com.google.gwt.core.client.GWT;
import com.google.gwt.dom.client.NativeEvent;
import com.google.gwt.user.client.Timer;
import com.google.inject.Inject;
import com.google.inject.Provider;
import com.google.web.bindery.event.shared.EventBus;
import com.google.web.bindery.event.shared.HandlerRegistration;
import com.gwtplatform.mvp.client.HasUiHandlers;
import com.gwtplatform.mvp.client.View;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.List;
import java.util.Map;
import java.util.function.Function;
import java.util.stream.Collectors;
public class QueryPresenter extends AbstractComponentPresenter<QueryPresenter.QueryView>
implements QueryUiHandlers, HasDirtyHandlers, Queryable {
private static final DashboardResource DASHBOARD_RESOURCE = GWT.create(DashboardResource.class);
private static final ProcessorFilterResource PROCESSOR_FILTER_RESOURCE = GWT.create(ProcessorFilterResource.class);
public static final ComponentType TYPE = new ComponentType(0, "query", "Query");
static final int TEN_SECONDS = 10000;
private final ExpressionTreePresenter expressionPresenter;
private final QueryHistoryPresenter historyPresenter;
private final QueryFavouritesPresenter favouritesPresenter;
private final Provider<EntityChooser> pipelineSelection;
private final Provider<QueryInfoPresenter> queryInfoPresenterProvider;
private final ProcessorLimitsPresenter processorLimitsPresenter;
private final MenuListPresenter menuListPresenter;
private final RestFactory restFactory;
private final LocationManager locationManager;
private final IndexLoader indexLoader;
private final SearchModel searchModel;
private final ButtonView addOperatorButton;
private final ButtonView addTermButton;
private final ButtonView disableItemButton;
private final ButtonView deleteItemButton;
private final ButtonView historyButton;
private final ButtonView favouriteButton;
private final ButtonView downloadQueryButton;
private final ButtonView warningsButton;
private String params;
private String currentWarnings;
private ButtonView processButton;
private long defaultProcessorTimeLimit;
private long defaultProcessorRecordLimit;
private boolean initialised;
private Timer autoRefreshTimer;
private String lastUsedQueryInfo;
private boolean queryOnOpen;
@Inject
public QueryPresenter(final EventBus eventBus,
final QueryView view,
final SearchBus searchBus,
final Provider<QuerySettingsPresenter> settingsPresenterProvider,
final ExpressionTreePresenter expressionPresenter,
final QueryHistoryPresenter historyPresenter,
final QueryFavouritesPresenter favouritesPresenter,
final Provider<EntityChooser> pipelineSelection,
final Provider<QueryInfoPresenter> queryInfoPresenterProvider,
final ProcessorLimitsPresenter processorLimitsPresenter,
final MenuListPresenter menuListPresenter,
final RestFactory restFactory,
final ClientSecurityContext securityContext,
final UiConfigCache clientPropertyCache,
final LocationManager locationManager,
final TimeZones timeZones) {
super(eventBus, view, settingsPresenterProvider);
this.expressionPresenter = expressionPresenter;
this.historyPresenter = historyPresenter;
this.favouritesPresenter = favouritesPresenter;
this.pipelineSelection = pipelineSelection;
this.queryInfoPresenterProvider = queryInfoPresenterProvider;
this.processorLimitsPresenter = processorLimitsPresenter;
this.menuListPresenter = menuListPresenter;
this.restFactory = restFactory;
this.locationManager = locationManager;
view.setExpressionView(expressionPresenter.getView());
view.setUiHandlers(this);
expressionPresenter.setUiHandlers(new ExpressionUiHandlers() {
@Override
public void fireDirty() {
setDirty(true);
}
@Override
public void search() {
start();
}
});
addTermButton = view.addButton(SvgPresets.ADD);
addTermButton.setTitle("Add Term");
addOperatorButton = view.addButton(SvgPresets.OPERATOR);
disableItemButton = view.addButton(SvgPresets.DISABLE);
deleteItemButton = view.addButton(SvgPresets.DELETE);
historyButton = view.addButton(SvgPresets.HISTORY.enabled(true));
favouriteButton = view.addButton(SvgPresets.FAVOURITES.enabled(true));
downloadQueryButton = view.addButton(SvgPresets.DOWNLOAD);
if (securityContext.hasAppPermission(PermissionNames.MANAGE_PROCESSORS_PERMISSION)) {
processButton = view.addButton(SvgPresets.PROCESS.enabled(true));
}
warningsButton = view.addButton(SvgPresets.ALERT.title("Show Warnings"));
warningsButton.setVisible(false);
indexLoader = new IndexLoader(getEventBus(), restFactory);
searchModel = new SearchModel(searchBus, this, indexLoader, timeZones);
clientPropertyCache.get()
.onSuccess(result -> {
defaultProcessorTimeLimit = result.getProcess().getDefaultTimeLimit();
defaultProcessorRecordLimit = result.getProcess().getDefaultRecordLimit();
})
.onFailure(caught -> AlertEvent.fireError(QueryPresenter.this, caught.getMessage(), null));
}
@Override
protected void onBind() {
super.onBind();
registerHandler(expressionPresenter.addDataSelectionHandler(event -> setButtonsEnabled()));
registerHandler(expressionPresenter.addContextMenuHandler(event -> {
final List<Item> menuItems = addExpressionActionsToMenu();
if (menuItems.size() > 0) {
final PopupPosition popupPosition = new PopupPosition(event.getX(), event.getY());
showMenu(popupPosition, menuItems);
}
}));
registerHandler(addOperatorButton.addClickHandler(event -> {
if ((event.getNativeButton() & NativeEvent.BUTTON_LEFT) != 0) {
addOperator();
}
}));
registerHandler(addTermButton.addClickHandler(event -> {
if ((event.getNativeButton() & NativeEvent.BUTTON_LEFT) != 0) {
addTerm();
}
}));
registerHandler(disableItemButton.addClickHandler(event -> {
if ((event.getNativeButton() & NativeEvent.BUTTON_LEFT) != 0) {
disable();
}
}));
registerHandler(deleteItemButton.addClickHandler(event -> {
if ((event.getNativeButton() & NativeEvent.BUTTON_LEFT) != 0) {
delete();
}
}));
registerHandler(historyButton.addClickHandler(event -> {
if ((event.getNativeButton() & NativeEvent.BUTTON_LEFT) != 0) {
historyPresenter.show(QueryPresenter.this, getComponents().getDashboard().getUuid());
}
}));
registerHandler(favouriteButton.addClickHandler(event -> {
if ((event.getNativeButton() & NativeEvent.BUTTON_LEFT) != 0) {
final ExpressionOperator root = expressionPresenter.write();
favouritesPresenter.show(
QueryPresenter.this,
getComponents().getDashboard().getUuid(),
getQuerySettings().getDataSource(),
root);
}
}));
if (processButton != null) {
registerHandler(processButton.addClickHandler(event -> {
if ((event.getNativeButton() & NativeEvent.BUTTON_LEFT) != 0) {
choosePipeline();
}
}));
}
registerHandler(warningsButton.addClickHandler(event -> {
if ((event.getNativeButton() & NativeEvent.BUTTON_LEFT) != 0) {
showWarnings();
}
}));
registerHandler(indexLoader.addChangeDataHandler(event ->
loadedDataSource(indexLoader.getLoadedDataSourceRef(), indexLoader.getDataSourceFieldsMap())));
registerHandler(downloadQueryButton.addClickHandler(event -> downloadQuery()));
}
@Override
public void setComponents(final Components components) {
super.setComponents(components);
registerHandler(components.addComponentChangeHandler(event -> {
if (initialised) {
final Component component = event.getComponent();
if (component instanceof HasSelection) {
final HasSelection hasSelection = (HasSelection) component;
final List<Map<String, String>> selection = hasSelection.getSelection();
final List<ComponentSelectionHandler> selectionHandlers = getQuerySettings().getSelectionHandlers();
if (selectionHandlers != null) {
final List<ComponentSelectionHandler> matchingHandlers = selectionHandlers
.stream()
.filter(ComponentSelectionHandler::isEnabled)
.filter(selectionHandler -> selectionHandler.getComponentId() == null ||
selectionHandler.getComponentId().equals(component.getId()))
.collect(Collectors.toList());
if (matchingHandlers.size() > 0) {
final Function<ExpressionOperator, ExpressionOperator> decorator = (in) -> {
final ExpressionOperator.Builder innerBuilder = ExpressionOperator
.builder();
boolean added = false;
for (final ComponentSelectionHandler selectionHandler : matchingHandlers) {
for (final Map<String, String> params : selection) {
ExpressionOperator ex = selectionHandler.getExpression();
ex = ExpressionUtil.replaceExpressionParameters(ex, params);
innerBuilder.addOperator(ex);
if (!added) {
added = true;
} else {
innerBuilder.op(Op.OR);
}
}
}
if (added) {
return ExpressionOperator
.builder()
.addOperator(in)
.addOperator(innerBuilder.build())
.build();
}
return in;
};
// this.params = params;
// lastUsedQueryInfo = null;
stop();
run(true, true, decorator);
}
}
}
}
// if (component instanceof HasAbstractFields) {
// final VisPresenter visPresenter = (VisPresenter) component;
// final List<Map<String, String>> selection = visPresenter.getCurrentSelection();
// String params = "";
// if (selection != null) {
// for (final Map<String, String> map : selection) {
// for (final Entry<String, String> entry : map.entrySet()) {
// params += entry.getKey() + "=" + entry.getValue() + " ";
// }
// }
// }
// onQuery(params, null);
// }
// if (getTextSettings().getTableId() == null) {
// if (component instanceof TablePresenter) {
// currentTablePresenter = (TablePresenter) component;
// update(currentTablePresenter);
// }
// } else if (EqualsUtil.isEquals(getTextSettings().getTableId(), event.getComponentId())) {
// if (component instanceof TablePresenter) {
// currentTablePresenter = (TablePresenter) component;
// update(currentTablePresenter);
// }
// }
// }
}));
}
public void setErrors(final String errors) {
currentWarnings = errors;
warningsButton.setVisible(currentWarnings != null && !currentWarnings.isEmpty());
}
private void setButtonsEnabled() {
final stroom.query.client.Item selectedItem = getSelectedItem();
if (selectedItem == null) {
disableItemButton.setEnabled(false);
disableItemButton.setTitle("");
} else {
disableItemButton.setEnabled(true);
disableItemButton.setTitle(getEnableDisableText());
}
if (selectedItem == null) {
deleteItemButton.setEnabled(false);
deleteItemButton.setTitle("");
} else {
deleteItemButton.setEnabled(true);
deleteItemButton.setTitle("Delete");
}
final DocRef dataSourceRef = getQuerySettings().getDataSource();
if (dataSourceRef == null) {
downloadQueryButton.setEnabled(false);
downloadQueryButton.setTitle("");
} else {
downloadQueryButton.setEnabled(true);
downloadQueryButton.setTitle("Download Query");
}
}
private void loadDataSource(final DocRef dataSourceRef) {
searchModel.getIndexLoader().loadDataSource(dataSourceRef);
setButtonsEnabled();
}
private void loadedDataSource(final DocRef dataSourceRef, final DataSourceFieldsMap dataSourceFieldsMap) {
// Create a list of index fields.
final List<AbstractField> fields = new ArrayList<>();
if (dataSourceFieldsMap != null) {
for (final AbstractField field : dataSourceFieldsMap.values()) {
// Protection from default values of false not being in the serialised json
if (field.getQueryable() != null
? field.getQueryable()
: false) {
fields.add(field);
}
}
}
fields.sort(Comparator.comparing(AbstractField::getName, String.CASE_INSENSITIVE_ORDER));
expressionPresenter.init(restFactory, dataSourceRef, fields);
final EqualsBuilder builder = new EqualsBuilder();
builder.append(getQuerySettings().getDataSource(), dataSourceRef);
if (!builder.isEquals()) {
setSettings(getQuerySettings()
.copy()
.dataSource(dataSourceRef)
.build());
setDirty(true);
}
// Only allow searching if we have a data source and have loaded fields from it successfully.
getView().setEnabled(dataSourceRef != null && fields.size() > 0);
init();
setButtonsEnabled();
}
private void addOperator() {
expressionPresenter.addOperator();
}
private void addTerm() {
final DocRef dataSourceRef = getQuerySettings().getDataSource();
if (dataSourceRef == null) {
warnNoDataSource();
} else {
expressionPresenter.addTerm();
}
}
private void warnNoDataSource() {
AlertEvent.fireWarn(this, "No data source has been chosen to search", null);
}
private void disable() {
expressionPresenter.disable();
setButtonsEnabled();
}
private void delete() {
expressionPresenter.delete();
}
private void choosePipeline() {
expressionPresenter.clearSelection();
// Write expression.
final ExpressionOperator root = expressionPresenter.write();
final QueryData queryData = new QueryData();
queryData.setDataSource(getQuerySettings().getDataSource());
queryData.setExpression(root);
final EntityChooser chooser = pipelineSelection.get();
chooser.setCaption("Choose Pipeline To Process Results With");
chooser.setIncludedTypes(PipelineDoc.DOCUMENT_TYPE);
chooser.setRequiredPermissions(DocumentPermissionNames.USE);
chooser.addDataSelectionHandler(event -> {
final DocRef pipeline = chooser.getSelectedEntityReference();
if (pipeline != null) {
setProcessorLimits(queryData, pipeline);
}
});
chooser.show();
}
private void setProcessorLimits(final QueryData queryData, final DocRef pipeline) {
processorLimitsPresenter.setTimeLimitMins(defaultProcessorTimeLimit);
processorLimitsPresenter.setRecordLimit(defaultProcessorRecordLimit);
final PopupSize popupSize = new PopupSize(321, 102, false);
ShowPopupEvent.fire(this, processorLimitsPresenter, PopupType.OK_CANCEL_DIALOG, popupSize,
"Process Search Results", new PopupUiHandlers() {
@Override
public void onHideRequest(final boolean autoClose, final boolean ok) {
if (ok) {
final Limits limits = new Limits();
if (processorLimitsPresenter.getRecordLimit() != null) {
limits.setEventCount(processorLimitsPresenter.getRecordLimit());
}
if (processorLimitsPresenter.getTimeLimitMins() != null) {
limits.setDurationMs(processorLimitsPresenter.getTimeLimitMins() * 60 * 1000);
}
queryData.setLimits(limits);
openEditor(queryData, pipeline);
}
HidePopupEvent.fire(QueryPresenter.this, processorLimitsPresenter);
}
@Override
public void onHide(final boolean autoClose, final boolean ok) {
}
});
}
private void openEditor(final QueryData queryData, final DocRef pipeline) {
// Now create the processor filter using the find stream criteria.
final CreateProcessFilterRequest request = new CreateProcessFilterRequest(pipeline, queryData, 1, false, true);
final Rest<ProcessorFilter> rest = restFactory.create();
rest
.onSuccess(streamProcessorFilter -> {
if (streamProcessorFilter != null) {
CreateProcessorEvent.fire(QueryPresenter.this, streamProcessorFilter);
} else {
AlertEvent.fireInfo(this, "Created batch processor", null);
}
})
.call(PROCESSOR_FILTER_RESOURCE)
.create(request);
}
private void showWarnings() {
if (currentWarnings != null && !currentWarnings.isEmpty()) {
AlertEvent.fireWarn(this, "The following warnings have been created while running this search:",
currentWarnings, null);
}
}
@Override
public void onQuery(final String params, final String queryInfo) {
this.params = params;
lastUsedQueryInfo = queryInfo;
if (initialised) {
stop();
run(true, true);
}
}
@Override
public void setQueryOnOpen(final boolean queryOnOpen) {
this.queryOnOpen = queryOnOpen;
}
@Override
public void start() {
if (SearchModel.Mode.INACTIVE.equals(searchModel.getMode())) {
queryInfoPresenterProvider.get().show(lastUsedQueryInfo, state -> {
if (state.isOk()) {
lastUsedQueryInfo = state.getQueryInfo();
run(true, true);
}
});
} else {
run(true, true);
}
}
@Override
public void stop() {
if (autoRefreshTimer != null) {
autoRefreshTimer.cancel();
autoRefreshTimer = null;
}
searchModel.destroy();
}
private void run(final boolean incremental,
final boolean storeHistory) {
run(incremental, storeHistory, Function.identity());
}
private void run(final boolean incremental,
final boolean storeHistory,
final Function<ExpressionOperator, ExpressionOperator> expressionDecorator) {
final DocRef dataSourceRef = getQuerySettings().getDataSource();
if (dataSourceRef == null) {
warnNoDataSource();
} else {
currentWarnings = null;
expressionPresenter.clearSelection();
warningsButton.setVisible(false);
// Write expression.
final ExpressionOperator root = expressionPresenter.write();
final ExpressionOperator decorated = expressionDecorator.apply(root);
// Start search.
searchModel.search(decorated, params, incremental, storeHistory, lastUsedQueryInfo);
}
}
@Override
public void read(final ComponentConfig componentConfig) {
super.read(componentConfig);
final ComponentSettings settings = componentConfig.getSettings();
if (!(settings instanceof QueryComponentSettings)) {
setSettings(QueryComponentSettings.builder()
.build());
}
if (getQuerySettings().getAutomate() == null) {
final Automate automate = Automate.builder().build();
setSettings(getQuerySettings()
.copy()
.automate(automate)
.build());
}
// Create and register the search model.
final DashboardDoc dashboard = getComponents().getDashboard();
final DashboardUUID dashboardUUID = new DashboardUUID(dashboard.getUuid(),
dashboard.getName(),
getComponentConfig().getId());
searchModel.setDashboardUUID(dashboardUUID);
// Read data source.
loadDataSource(getQuerySettings().getDataSource());
// Read expression.
ExpressionOperator root = getQuerySettings().getExpression();
if (root == null) {
root = ExpressionOperator.builder().build();
}
setExpression(root);
}
@Override
public ComponentConfig write() {
// Write expression.
setSettings(getQuerySettings()
.copy()
.expression(expressionPresenter.write())
.build());
return super.write();
}
private QueryComponentSettings getQuerySettings() {
return (QueryComponentSettings) getSettings();
}
@Override
public void onRemove() {
super.onRemove();
stop();
initialised = false;
}
@Override
public void link() {
}
private void init() {
if (!initialised) {
initialised = true;
// An auto search can only commence if the UI has fully loaded and the data source has also
// loaded from the server.
final Automate automate = getQuerySettings().getAutomate();
if (queryOnOpen || automate.isOpen()) {
run(true, false);
}
}
}
@Override
public void changeSettings() {
super.changeSettings();
loadDataSource(getQuerySettings().getDataSource());
}
@Override
public HandlerRegistration addDirtyHandler(final DirtyHandler handler) {
return addHandlerToSource(DirtyEvent.getType(), handler);
}
@Override
public ComponentType getType() {
return TYPE;
}
public SearchModel getSearchModel() {
return searchModel;
}
public void setExpression(final ExpressionOperator root) {
expressionPresenter.read(root);
}
public void setMode(final SearchModel.Mode mode) {
getView().setMode(mode);
// If this is the end of a query then schedule a refresh.
if (SearchModel.Mode.INACTIVE.equals(mode)) {
scheduleRefresh();
}
}
private void scheduleRefresh() {
// Schedule auto refresh after a query has finished.
if (autoRefreshTimer != null) {
autoRefreshTimer.cancel();
}
autoRefreshTimer = null;
final Automate automate = getQuerySettings().getAutomate();
if (initialised && automate.isRefresh()) {
try {
final String interval = automate.getRefreshInterval();
int millis = ModelStringUtil.parseDurationString(interval).intValue();
// Ensure that the refresh interval is not less than 10 seconds.
millis = Math.max(millis, TEN_SECONDS);
autoRefreshTimer = new Timer() {
@Override
public void run() {
if (!initialised) {
stop();
} else {
// Make sure search is currently inactive before we attempt to execute a new query.
if (SearchModel.Mode.INACTIVE.equals(searchModel.getMode())) {
QueryPresenter.this.run(false, false);
}
}
}
};
autoRefreshTimer.schedule(millis);
} catch (final RuntimeException e) {
// Ignore as we cannot display this error now.
}
}
}
private List<Item> addExpressionActionsToMenu() {
final stroom.query.client.Item selectedItem = getSelectedItem();
final boolean hasSelection = selectedItem != null;
final List<Item> menuItems = new ArrayList<>();
menuItems.add(new IconMenuItem(1, SvgPresets.ADD, SvgPresets.ADD, "Add Term", null, true, this::addTerm));
menuItems.add(new IconMenuItem(2, SvgPresets.OPERATOR, SvgPresets.OPERATOR, "Add Operator", null,
true, this::addOperator));
menuItems.add(new IconMenuItem(3, SvgPresets.DISABLE, SvgPresets.DISABLE, getEnableDisableText(),
null, hasSelection, this::disable));
menuItems.add(new IconMenuItem(4, SvgPresets.DELETE, SvgPresets.DELETE, "Delete", null,
hasSelection, this::delete));
return menuItems;
}
private String getEnableDisableText() {
final stroom.query.client.Item selectedItem = getSelectedItem();
if (selectedItem != null && !selectedItem.isEnabled()) {
return "Enable";
}
return "Disable";
}
private stroom.query.client.Item getSelectedItem() {
if (expressionPresenter.getSelectionModel() != null) {
return expressionPresenter.getSelectionModel().getSelectedObject();
}
return null;
}
private void showMenu(final PopupPosition popupPosition, final List<Item> menuItems) {
menuListPresenter.setData(menuItems);
final PopupUiHandlers popupUiHandlers = new PopupUiHandlers() {
@Override
public void onHideRequest(final boolean autoClose, final boolean ok) {
HidePopupEvent.fire(QueryPresenter.this, menuListPresenter);
}
@Override
public void onHide(final boolean autoClose, final boolean ok) {
}
};
ShowPopupEvent.fire(this, menuListPresenter, PopupType.POPUP, popupPosition, popupUiHandlers);
}
private void downloadQuery() {
if (getQuerySettings().getDataSource() != null) {
final DashboardSearchRequest searchRequest = searchModel.createDownloadQueryRequest(
expressionPresenter.write(),
params,
false,
false,
null);
final DashboardDoc dashboard = getComponents().getDashboard();
final DashboardUUID dashboardUUID = new DashboardUUID(
dashboard.getUuid(),
dashboard.getName(),
getComponentConfig().getId());
final DashboardQueryKey dashboardQueryKey = new DashboardQueryKey(
dashboardUUID.getUUID(),
dashboard.getUuid(),
dashboardUUID.getComponentId());
final Rest<ResourceGeneration> rest = restFactory.create();
rest
.onSuccess(result ->
ExportFileCompleteUtil.onSuccess(locationManager, null, result))
.call(DASHBOARD_RESOURCE)
.downloadQuery(new DownloadQueryRequest(dashboardQueryKey, searchRequest));
}
}
public interface QueryView extends View, HasUiHandlers<QueryUiHandlers> {
ButtonView addButton(SvgPreset preset);
void setExpressionView(View view);
void setMode(SearchModel.Mode mode);
void setEnabled(boolean enabled);
}
}
| stroom-dashboard/stroom-dashboard-client/src/main/java/stroom/dashboard/client/query/QueryPresenter.java | /*
* Copyright 2017 Crown Copyright
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package stroom.dashboard.client.query;
import stroom.alert.client.event.AlertEvent;
import stroom.core.client.LocationManager;
import stroom.dashboard.client.HasSelection;
import stroom.dashboard.client.main.AbstractComponentPresenter;
import stroom.dashboard.client.main.Component;
import stroom.dashboard.client.main.ComponentRegistry.ComponentType;
import stroom.dashboard.client.main.Components;
import stroom.dashboard.client.main.DashboardUUID;
import stroom.dashboard.client.main.DataSourceFieldsMap;
import stroom.dashboard.client.main.IndexLoader;
import stroom.dashboard.client.main.Queryable;
import stroom.dashboard.client.main.SearchBus;
import stroom.dashboard.client.main.SearchModel;
import stroom.dashboard.client.table.TimeZones;
import stroom.dashboard.shared.Automate;
import stroom.dashboard.shared.ComponentConfig;
import stroom.dashboard.shared.ComponentSelectionHandler;
import stroom.dashboard.shared.ComponentSettings;
import stroom.dashboard.shared.DashboardDoc;
import stroom.dashboard.shared.DashboardQueryKey;
import stroom.dashboard.shared.DashboardResource;
import stroom.dashboard.shared.DashboardSearchRequest;
import stroom.dashboard.shared.DownloadQueryRequest;
import stroom.dashboard.shared.QueryComponentSettings;
import stroom.datasource.api.v2.AbstractField;
import stroom.dispatch.client.ExportFileCompleteUtil;
import stroom.dispatch.client.Rest;
import stroom.dispatch.client.RestFactory;
import stroom.docref.DocRef;
import stroom.document.client.event.DirtyEvent;
import stroom.document.client.event.DirtyEvent.DirtyHandler;
import stroom.document.client.event.HasDirtyHandlers;
import stroom.explorer.client.presenter.EntityChooser;
import stroom.pipeline.client.event.CreateProcessorEvent;
import stroom.pipeline.shared.PipelineDoc;
import stroom.processor.shared.CreateProcessFilterRequest;
import stroom.processor.shared.Limits;
import stroom.processor.shared.ProcessorFilter;
import stroom.processor.shared.ProcessorFilterResource;
import stroom.processor.shared.QueryData;
import stroom.query.api.v2.ExpressionOperator;
import stroom.query.api.v2.ExpressionOperator.Op;
import stroom.query.api.v2.ExpressionUtil;
import stroom.query.client.ExpressionTreePresenter;
import stroom.query.client.ExpressionUiHandlers;
import stroom.security.client.api.ClientSecurityContext;
import stroom.security.shared.DocumentPermissionNames;
import stroom.security.shared.PermissionNames;
import stroom.svg.client.SvgPreset;
import stroom.svg.client.SvgPresets;
import stroom.ui.config.client.UiConfigCache;
import stroom.util.shared.EqualsBuilder;
import stroom.util.shared.ModelStringUtil;
import stroom.util.shared.ResourceGeneration;
import stroom.widget.button.client.ButtonView;
import stroom.widget.menu.client.presenter.IconMenuItem;
import stroom.widget.menu.client.presenter.Item;
import stroom.widget.menu.client.presenter.MenuListPresenter;
import stroom.widget.popup.client.event.HidePopupEvent;
import stroom.widget.popup.client.event.ShowPopupEvent;
import stroom.widget.popup.client.presenter.PopupPosition;
import stroom.widget.popup.client.presenter.PopupSize;
import stroom.widget.popup.client.presenter.PopupUiHandlers;
import stroom.widget.popup.client.presenter.PopupView.PopupType;
import com.google.gwt.core.client.GWT;
import com.google.gwt.dom.client.NativeEvent;
import com.google.gwt.user.client.Timer;
import com.google.inject.Inject;
import com.google.inject.Provider;
import com.google.web.bindery.event.shared.EventBus;
import com.google.web.bindery.event.shared.HandlerRegistration;
import com.gwtplatform.mvp.client.HasUiHandlers;
import com.gwtplatform.mvp.client.View;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.List;
import java.util.Map;
import java.util.function.Function;
import java.util.stream.Collectors;
public class QueryPresenter extends AbstractComponentPresenter<QueryPresenter.QueryView>
implements QueryUiHandlers, HasDirtyHandlers, Queryable {
private static final DashboardResource DASHBOARD_RESOURCE = GWT.create(DashboardResource.class);
private static final ProcessorFilterResource PROCESSOR_FILTER_RESOURCE = GWT.create(ProcessorFilterResource.class);
public static final ComponentType TYPE = new ComponentType(0, "query", "Query");
static final int TEN_SECONDS = 10000;
private final ExpressionTreePresenter expressionPresenter;
private final QueryHistoryPresenter historyPresenter;
private final QueryFavouritesPresenter favouritesPresenter;
private final Provider<EntityChooser> pipelineSelection;
private final Provider<QueryInfoPresenter> queryInfoPresenterProvider;
private final ProcessorLimitsPresenter processorLimitsPresenter;
private final MenuListPresenter menuListPresenter;
private final RestFactory restFactory;
private final LocationManager locationManager;
private final IndexLoader indexLoader;
private final SearchModel searchModel;
private final ButtonView addOperatorButton;
private final ButtonView addTermButton;
private final ButtonView disableItemButton;
private final ButtonView deleteItemButton;
private final ButtonView historyButton;
private final ButtonView favouriteButton;
private final ButtonView downloadQueryButton;
private final ButtonView warningsButton;
private String params;
private String currentWarnings;
private ButtonView processButton;
private long defaultProcessorTimeLimit;
private long defaultProcessorRecordLimit;
private boolean initialised;
private Timer autoRefreshTimer;
private String lastUsedQueryInfo;
private boolean queryOnOpen;
@Inject
public QueryPresenter(final EventBus eventBus,
final QueryView view,
final SearchBus searchBus,
final Provider<QuerySettingsPresenter> settingsPresenterProvider,
final ExpressionTreePresenter expressionPresenter,
final QueryHistoryPresenter historyPresenter,
final QueryFavouritesPresenter favouritesPresenter,
final Provider<EntityChooser> pipelineSelection,
final Provider<QueryInfoPresenter> queryInfoPresenterProvider,
final ProcessorLimitsPresenter processorLimitsPresenter,
final MenuListPresenter menuListPresenter,
final RestFactory restFactory,
final ClientSecurityContext securityContext,
final UiConfigCache clientPropertyCache,
final LocationManager locationManager,
final TimeZones timeZones) {
super(eventBus, view, settingsPresenterProvider);
this.expressionPresenter = expressionPresenter;
this.historyPresenter = historyPresenter;
this.favouritesPresenter = favouritesPresenter;
this.pipelineSelection = pipelineSelection;
this.queryInfoPresenterProvider = queryInfoPresenterProvider;
this.processorLimitsPresenter = processorLimitsPresenter;
this.menuListPresenter = menuListPresenter;
this.restFactory = restFactory;
this.locationManager = locationManager;
view.setExpressionView(expressionPresenter.getView());
view.setUiHandlers(this);
expressionPresenter.setUiHandlers(new ExpressionUiHandlers() {
@Override
public void fireDirty() {
setDirty(true);
}
@Override
public void search() {
start();
}
});
addTermButton = view.addButton(SvgPresets.ADD);
addTermButton.setTitle("Add Term");
addOperatorButton = view.addButton(SvgPresets.OPERATOR);
disableItemButton = view.addButton(SvgPresets.DISABLE);
deleteItemButton = view.addButton(SvgPresets.DELETE);
historyButton = view.addButton(SvgPresets.HISTORY.enabled(true));
favouriteButton = view.addButton(SvgPresets.FAVOURITES.enabled(true));
downloadQueryButton = view.addButton(SvgPresets.DOWNLOAD);
if (securityContext.hasAppPermission(PermissionNames.MANAGE_PROCESSORS_PERMISSION)) {
processButton = view.addButton(SvgPresets.PROCESS.enabled(true));
}
warningsButton = view.addButton(SvgPresets.ALERT.title("Show Warnings"));
warningsButton.setVisible(false);
indexLoader = new IndexLoader(getEventBus(), restFactory);
searchModel = new SearchModel(searchBus, this, indexLoader, timeZones);
clientPropertyCache.get()
.onSuccess(result -> {
defaultProcessorTimeLimit = result.getProcess().getDefaultTimeLimit();
defaultProcessorRecordLimit = result.getProcess().getDefaultRecordLimit();
})
.onFailure(caught -> AlertEvent.fireError(QueryPresenter.this, caught.getMessage(), null));
}
@Override
protected void onBind() {
super.onBind();
registerHandler(expressionPresenter.addDataSelectionHandler(event -> setButtonsEnabled()));
registerHandler(expressionPresenter.addContextMenuHandler(event -> {
final List<Item> menuItems = addExpressionActionsToMenu();
if (menuItems.size() > 0) {
final PopupPosition popupPosition = new PopupPosition(event.getX(), event.getY());
showMenu(popupPosition, menuItems);
}
}));
registerHandler(addOperatorButton.addClickHandler(event -> {
if ((event.getNativeButton() & NativeEvent.BUTTON_LEFT) != 0) {
addOperator();
}
}));
registerHandler(addTermButton.addClickHandler(event -> {
if ((event.getNativeButton() & NativeEvent.BUTTON_LEFT) != 0) {
addTerm();
}
}));
registerHandler(disableItemButton.addClickHandler(event -> {
if ((event.getNativeButton() & NativeEvent.BUTTON_LEFT) != 0) {
disable();
}
}));
registerHandler(deleteItemButton.addClickHandler(event -> {
if ((event.getNativeButton() & NativeEvent.BUTTON_LEFT) != 0) {
delete();
}
}));
registerHandler(historyButton.addClickHandler(event -> {
if ((event.getNativeButton() & NativeEvent.BUTTON_LEFT) != 0) {
historyPresenter.show(QueryPresenter.this, getComponents().getDashboard().getUuid());
}
}));
registerHandler(favouriteButton.addClickHandler(event -> {
if ((event.getNativeButton() & NativeEvent.BUTTON_LEFT) != 0) {
final ExpressionOperator root = expressionPresenter.write();
favouritesPresenter.show(
QueryPresenter.this,
getComponents().getDashboard().getUuid(),
getQuerySettings().getDataSource(),
root);
}
}));
if (processButton != null) {
registerHandler(processButton.addClickHandler(event -> {
if ((event.getNativeButton() & NativeEvent.BUTTON_LEFT) != 0) {
choosePipeline();
}
}));
}
registerHandler(warningsButton.addClickHandler(event -> {
if ((event.getNativeButton() & NativeEvent.BUTTON_LEFT) != 0) {
showWarnings();
}
}));
registerHandler(indexLoader.addChangeDataHandler(event ->
loadedDataSource(indexLoader.getLoadedDataSourceRef(), indexLoader.getDataSourceFieldsMap())));
registerHandler(downloadQueryButton.addClickHandler(event -> downloadQuery()));
}
@Override
public void setComponents(final Components components) {
super.setComponents(components);
registerHandler(components.addComponentChangeHandler(event -> {
if (initialised) {
final Component component = event.getComponent();
if (component instanceof HasSelection) {
final HasSelection hasSelection = (HasSelection) component;
final List<Map<String, String>> selection = hasSelection.getSelection();
final List<ComponentSelectionHandler> selectionHandlers = getQuerySettings().getSelectionHandlers();
if (selectionHandlers != null) {
final List<ComponentSelectionHandler> matchingHandlers = selectionHandlers
.stream()
.filter(ComponentSelectionHandler::isEnabled)
.filter(selectionHandler -> selectionHandler.getComponentId() == null ||
selectionHandler.getComponentId().equals(component.getId()))
.collect(Collectors.toList());
if (matchingHandlers.size() > 0) {
final Function<ExpressionOperator, ExpressionOperator> decorator = (in) -> {
final ExpressionOperator.Builder innerBuilder = ExpressionOperator
.builder();
boolean added = false;
for (final ComponentSelectionHandler selectionHandler : matchingHandlers) {
for (final Map<String, String> params : selection) {
ExpressionOperator ex = selectionHandler.getExpression();
ex = ExpressionUtil.replaceExpressionParameters(ex, params);
innerBuilder.addOperator(ex);
if (!added) {
added = true;
} else {
innerBuilder.op(Op.OR);
}
}
}
if (added) {
return ExpressionOperator
.builder()
.addOperator(in)
.addOperator(innerBuilder.build())
.build();
}
return in;
};
// this.params = params;
// lastUsedQueryInfo = null;
stop();
run(true, true, decorator);
}
}
}
}
// if (component instanceof HasAbstractFields) {
// final VisPresenter visPresenter = (VisPresenter) component;
// final List<Map<String, String>> selection = visPresenter.getCurrentSelection();
// String params = "";
// if (selection != null) {
// for (final Map<String, String> map : selection) {
// for (final Entry<String, String> entry : map.entrySet()) {
// params += entry.getKey() + "=" + entry.getValue() + " ";
// }
// }
// }
// onQuery(params, null);
// }
// if (getTextSettings().getTableId() == null) {
// if (component instanceof TablePresenter) {
// currentTablePresenter = (TablePresenter) component;
// update(currentTablePresenter);
// }
// } else if (EqualsUtil.isEquals(getTextSettings().getTableId(), event.getComponentId())) {
// if (component instanceof TablePresenter) {
// currentTablePresenter = (TablePresenter) component;
// update(currentTablePresenter);
// }
// }
// }
}));
}
public void setErrors(final String errors) {
currentWarnings = errors;
warningsButton.setVisible(currentWarnings != null && !currentWarnings.isEmpty());
}
private void setButtonsEnabled() {
final stroom.query.client.Item selectedItem = getSelectedItem();
if (selectedItem == null) {
disableItemButton.setEnabled(false);
disableItemButton.setTitle("");
} else {
disableItemButton.setEnabled(true);
disableItemButton.setTitle(getEnableDisableText());
}
if (selectedItem == null) {
deleteItemButton.setEnabled(false);
deleteItemButton.setTitle("");
} else {
deleteItemButton.setEnabled(true);
deleteItemButton.setTitle("Delete");
}
final DocRef dataSourceRef = getQuerySettings().getDataSource();
if (dataSourceRef == null) {
downloadQueryButton.setEnabled(false);
downloadQueryButton.setTitle("");
} else {
downloadQueryButton.setEnabled(true);
downloadQueryButton.setTitle("Download Query");
}
}
private void loadDataSource(final DocRef dataSourceRef) {
searchModel.getIndexLoader().loadDataSource(dataSourceRef);
setButtonsEnabled();
}
private void loadedDataSource(final DocRef dataSourceRef, final DataSourceFieldsMap dataSourceFieldsMap) {
// Create a list of index fields.
final List<AbstractField> fields = new ArrayList<>();
if (dataSourceFieldsMap != null) {
for (final AbstractField field : dataSourceFieldsMap.values()) {
// Protection from default values of false not being in the serialised json
if (field.getQueryable() != null
? field.getQueryable()
: false) {
fields.add(field);
}
}
}
fields.sort(Comparator.comparing(AbstractField::getName));
expressionPresenter.init(restFactory, dataSourceRef, fields);
final EqualsBuilder builder = new EqualsBuilder();
builder.append(getQuerySettings().getDataSource(), dataSourceRef);
if (!builder.isEquals()) {
setSettings(getQuerySettings()
.copy()
.dataSource(dataSourceRef)
.build());
setDirty(true);
}
// Only allow searching if we have a data source and have loaded fields from it successfully.
getView().setEnabled(dataSourceRef != null && fields.size() > 0);
init();
setButtonsEnabled();
}
private void addOperator() {
expressionPresenter.addOperator();
}
private void addTerm() {
final DocRef dataSourceRef = getQuerySettings().getDataSource();
if (dataSourceRef == null) {
warnNoDataSource();
} else {
expressionPresenter.addTerm();
}
}
private void warnNoDataSource() {
AlertEvent.fireWarn(this, "No data source has been chosen to search", null);
}
private void disable() {
expressionPresenter.disable();
setButtonsEnabled();
}
private void delete() {
expressionPresenter.delete();
}
private void choosePipeline() {
expressionPresenter.clearSelection();
// Write expression.
final ExpressionOperator root = expressionPresenter.write();
final QueryData queryData = new QueryData();
queryData.setDataSource(getQuerySettings().getDataSource());
queryData.setExpression(root);
final EntityChooser chooser = pipelineSelection.get();
chooser.setCaption("Choose Pipeline To Process Results With");
chooser.setIncludedTypes(PipelineDoc.DOCUMENT_TYPE);
chooser.setRequiredPermissions(DocumentPermissionNames.USE);
chooser.addDataSelectionHandler(event -> {
final DocRef pipeline = chooser.getSelectedEntityReference();
if (pipeline != null) {
setProcessorLimits(queryData, pipeline);
}
});
chooser.show();
}
private void setProcessorLimits(final QueryData queryData, final DocRef pipeline) {
processorLimitsPresenter.setTimeLimitMins(defaultProcessorTimeLimit);
processorLimitsPresenter.setRecordLimit(defaultProcessorRecordLimit);
final PopupSize popupSize = new PopupSize(321, 102, false);
ShowPopupEvent.fire(this, processorLimitsPresenter, PopupType.OK_CANCEL_DIALOG, popupSize,
"Process Search Results", new PopupUiHandlers() {
@Override
public void onHideRequest(final boolean autoClose, final boolean ok) {
if (ok) {
final Limits limits = new Limits();
if (processorLimitsPresenter.getRecordLimit() != null) {
limits.setEventCount(processorLimitsPresenter.getRecordLimit());
}
if (processorLimitsPresenter.getTimeLimitMins() != null) {
limits.setDurationMs(processorLimitsPresenter.getTimeLimitMins() * 60 * 1000);
}
queryData.setLimits(limits);
openEditor(queryData, pipeline);
}
HidePopupEvent.fire(QueryPresenter.this, processorLimitsPresenter);
}
@Override
public void onHide(final boolean autoClose, final boolean ok) {
}
});
}
private void openEditor(final QueryData queryData, final DocRef pipeline) {
// Now create the processor filter using the find stream criteria.
final CreateProcessFilterRequest request = new CreateProcessFilterRequest(pipeline, queryData, 1, false, true);
final Rest<ProcessorFilter> rest = restFactory.create();
rest
.onSuccess(streamProcessorFilter -> {
if (streamProcessorFilter != null) {
CreateProcessorEvent.fire(QueryPresenter.this, streamProcessorFilter);
} else {
AlertEvent.fireInfo(this, "Created batch processor", null);
}
})
.call(PROCESSOR_FILTER_RESOURCE)
.create(request);
}
private void showWarnings() {
if (currentWarnings != null && !currentWarnings.isEmpty()) {
AlertEvent.fireWarn(this, "The following warnings have been created while running this search:",
currentWarnings, null);
}
}
@Override
public void onQuery(final String params, final String queryInfo) {
this.params = params;
lastUsedQueryInfo = queryInfo;
if (initialised) {
stop();
run(true, true);
}
}
@Override
public void setQueryOnOpen(final boolean queryOnOpen) {
this.queryOnOpen = queryOnOpen;
}
@Override
public void start() {
if (SearchModel.Mode.INACTIVE.equals(searchModel.getMode())) {
queryInfoPresenterProvider.get().show(lastUsedQueryInfo, state -> {
if (state.isOk()) {
lastUsedQueryInfo = state.getQueryInfo();
run(true, true);
}
});
} else {
run(true, true);
}
}
@Override
public void stop() {
if (autoRefreshTimer != null) {
autoRefreshTimer.cancel();
autoRefreshTimer = null;
}
searchModel.destroy();
}
private void run(final boolean incremental,
final boolean storeHistory) {
run(incremental, storeHistory, Function.identity());
}
private void run(final boolean incremental,
final boolean storeHistory,
final Function<ExpressionOperator, ExpressionOperator> expressionDecorator) {
final DocRef dataSourceRef = getQuerySettings().getDataSource();
if (dataSourceRef == null) {
warnNoDataSource();
} else {
currentWarnings = null;
expressionPresenter.clearSelection();
warningsButton.setVisible(false);
// Write expression.
final ExpressionOperator root = expressionPresenter.write();
final ExpressionOperator decorated = expressionDecorator.apply(root);
// Start search.
searchModel.search(decorated, params, incremental, storeHistory, lastUsedQueryInfo);
}
}
@Override
public void read(final ComponentConfig componentConfig) {
super.read(componentConfig);
final ComponentSettings settings = componentConfig.getSettings();
if (!(settings instanceof QueryComponentSettings)) {
setSettings(QueryComponentSettings.builder()
.build());
}
if (getQuerySettings().getAutomate() == null) {
final Automate automate = Automate.builder().build();
setSettings(getQuerySettings()
.copy()
.automate(automate)
.build());
}
// Create and register the search model.
final DashboardDoc dashboard = getComponents().getDashboard();
final DashboardUUID dashboardUUID = new DashboardUUID(dashboard.getUuid(),
dashboard.getName(),
getComponentConfig().getId());
searchModel.setDashboardUUID(dashboardUUID);
// Read data source.
loadDataSource(getQuerySettings().getDataSource());
// Read expression.
ExpressionOperator root = getQuerySettings().getExpression();
if (root == null) {
root = ExpressionOperator.builder().build();
}
setExpression(root);
}
@Override
public ComponentConfig write() {
// Write expression.
setSettings(getQuerySettings()
.copy()
.expression(expressionPresenter.write())
.build());
return super.write();
}
private QueryComponentSettings getQuerySettings() {
return (QueryComponentSettings) getSettings();
}
@Override
public void onRemove() {
super.onRemove();
stop();
initialised = false;
}
@Override
public void link() {
}
private void init() {
if (!initialised) {
initialised = true;
// An auto search can only commence if the UI has fully loaded and the data source has also
// loaded from the server.
final Automate automate = getQuerySettings().getAutomate();
if (queryOnOpen || automate.isOpen()) {
run(true, false);
}
}
}
@Override
public void changeSettings() {
super.changeSettings();
loadDataSource(getQuerySettings().getDataSource());
}
@Override
public HandlerRegistration addDirtyHandler(final DirtyHandler handler) {
return addHandlerToSource(DirtyEvent.getType(), handler);
}
@Override
public ComponentType getType() {
return TYPE;
}
public SearchModel getSearchModel() {
return searchModel;
}
public void setExpression(final ExpressionOperator root) {
expressionPresenter.read(root);
}
public void setMode(final SearchModel.Mode mode) {
getView().setMode(mode);
// If this is the end of a query then schedule a refresh.
if (SearchModel.Mode.INACTIVE.equals(mode)) {
scheduleRefresh();
}
}
private void scheduleRefresh() {
// Schedule auto refresh after a query has finished.
if (autoRefreshTimer != null) {
autoRefreshTimer.cancel();
}
autoRefreshTimer = null;
final Automate automate = getQuerySettings().getAutomate();
if (initialised && automate.isRefresh()) {
try {
final String interval = automate.getRefreshInterval();
int millis = ModelStringUtil.parseDurationString(interval).intValue();
// Ensure that the refresh interval is not less than 10 seconds.
millis = Math.max(millis, TEN_SECONDS);
autoRefreshTimer = new Timer() {
@Override
public void run() {
if (!initialised) {
stop();
} else {
// Make sure search is currently inactive before we attempt to execute a new query.
if (SearchModel.Mode.INACTIVE.equals(searchModel.getMode())) {
QueryPresenter.this.run(false, false);
}
}
}
};
autoRefreshTimer.schedule(millis);
} catch (final RuntimeException e) {
// Ignore as we cannot display this error now.
}
}
}
private List<Item> addExpressionActionsToMenu() {
final stroom.query.client.Item selectedItem = getSelectedItem();
final boolean hasSelection = selectedItem != null;
final List<Item> menuItems = new ArrayList<>();
menuItems.add(new IconMenuItem(1, SvgPresets.ADD, SvgPresets.ADD, "Add Term", null, true, this::addTerm));
menuItems.add(new IconMenuItem(2, SvgPresets.OPERATOR, SvgPresets.OPERATOR, "Add Operator", null,
true, this::addOperator));
menuItems.add(new IconMenuItem(3, SvgPresets.DISABLE, SvgPresets.DISABLE, getEnableDisableText(),
null, hasSelection, this::disable));
menuItems.add(new IconMenuItem(4, SvgPresets.DELETE, SvgPresets.DELETE, "Delete", null,
hasSelection, this::delete));
return menuItems;
}
private String getEnableDisableText() {
final stroom.query.client.Item selectedItem = getSelectedItem();
if (selectedItem != null && !selectedItem.isEnabled()) {
return "Enable";
}
return "Disable";
}
private stroom.query.client.Item getSelectedItem() {
if (expressionPresenter.getSelectionModel() != null) {
return expressionPresenter.getSelectionModel().getSelectedObject();
}
return null;
}
private void showMenu(final PopupPosition popupPosition, final List<Item> menuItems) {
menuListPresenter.setData(menuItems);
final PopupUiHandlers popupUiHandlers = new PopupUiHandlers() {
@Override
public void onHideRequest(final boolean autoClose, final boolean ok) {
HidePopupEvent.fire(QueryPresenter.this, menuListPresenter);
}
@Override
public void onHide(final boolean autoClose, final boolean ok) {
}
};
ShowPopupEvent.fire(this, menuListPresenter, PopupType.POPUP, popupPosition, popupUiHandlers);
}
private void downloadQuery() {
if (getQuerySettings().getDataSource() != null) {
final DashboardSearchRequest searchRequest = searchModel.createDownloadQueryRequest(
expressionPresenter.write(),
params,
false,
false,
null);
final DashboardDoc dashboard = getComponents().getDashboard();
final DashboardUUID dashboardUUID = new DashboardUUID(
dashboard.getUuid(),
dashboard.getName(),
getComponentConfig().getId());
final DashboardQueryKey dashboardQueryKey = new DashboardQueryKey(
dashboardUUID.getUUID(),
dashboard.getUuid(),
dashboardUUID.getComponentId());
final Rest<ResourceGeneration> rest = restFactory.create();
rest
.onSuccess(result ->
ExportFileCompleteUtil.onSuccess(locationManager, null, result))
.call(DASHBOARD_RESOURCE)
.downloadQuery(new DownloadQueryRequest(dashboardQueryKey, searchRequest));
}
}
public interface QueryView extends View, HasUiHandlers<QueryUiHandlers> {
ButtonView addButton(SvgPreset preset);
void setExpressionView(View view);
void setMode(SearchModel.Mode mode);
void setEnabled(boolean enabled);
}
}
| Sort query field picker in case-insensitive order
| stroom-dashboard/stroom-dashboard-client/src/main/java/stroom/dashboard/client/query/QueryPresenter.java | Sort query field picker in case-insensitive order | <ide><path>troom-dashboard/stroom-dashboard-client/src/main/java/stroom/dashboard/client/query/QueryPresenter.java
<ide> }
<ide> }
<ide> }
<del> fields.sort(Comparator.comparing(AbstractField::getName));
<add> fields.sort(Comparator.comparing(AbstractField::getName, String.CASE_INSENSITIVE_ORDER));
<ide> expressionPresenter.init(restFactory, dataSourceRef, fields);
<ide>
<ide> final EqualsBuilder builder = new EqualsBuilder(); |
|
Java | mit | 41a04b0017fa623b71dfd081495f082bda07210f | 0 | Ordinastie/MalisisCore | /*
* The MIT License (MIT)
*
* Copyright (c) 2014 Ordinastie
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package net.malisis.core.util.multiblock;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import net.malisis.core.MalisisCore;
import net.malisis.core.block.IBlockComponent;
import net.malisis.core.block.component.DirectionalComponent;
import net.malisis.core.util.BlockPosUtils;
import net.malisis.core.util.EnumFacingUtils;
import net.malisis.core.util.MBlockState;
import net.malisis.core.util.blockdata.BlockDataHandler;
import net.minecraft.block.properties.PropertyDirection;
import net.minecraft.block.state.IBlockState;
import net.minecraft.util.BlockPos;
import net.minecraft.util.EnumFacing;
import net.minecraft.world.IBlockAccess;
import net.minecraft.world.World;
/**
* @author Ordinastie
*
*/
public abstract class MultiBlock implements Iterable<MBlockState>
{
public static String ORIGIN_BLOCK_DATA = MalisisCore.modid + ":multiBlockOrigin";
protected Map<BlockPos, MBlockState> states = new HashMap<>();
protected BlockPos offset = new BlockPos(0, 0, 0);
protected PropertyDirection property = DirectionalComponent.HORIZONTAL;
private int rotation;
private boolean bulkPlace;
private boolean bulkBreak;
public void setOffset(BlockPos offset)
{
this.offset = offset;
buildStates();
}
public void setPropertyDirection(PropertyDirection property)
{
this.property = property;
}
public void setRotation(IBlockState state)
{
if (state == null || !state.getProperties().containsKey(property))
rotation = 0;
else
{
EnumFacing direction = (EnumFacing) state.getValue(property);
rotation = EnumFacingUtils.getRotationCount(direction);
}
}
public int getRotation()
{
return rotation;
}
public void setBulkProcess(boolean bulkPlace, boolean bulkBreak)
{
this.bulkPlace = bulkPlace;
this.bulkBreak = bulkBreak;
}
public boolean isBulkPlace()
{
return bulkPlace;
}
public boolean isBulkBreak()
{
return bulkBreak;
}
public boolean isFromMultiblock(World world, BlockPos pos)
{
BlockPos origin = getOrigin(world, pos);
if (origin == null)
return false;
IBlockState state = world.getBlockState(origin);
setRotation(state);
for (MBlockState mstate : this)
{
mstate = mstate.rotate(rotation).offset(pos);
if (mstate.getPos().equals(pos))
return true;
}
return false;
}
public MBlockState getState(BlockPos pos)
{
pos = BlockPosUtils.rotate(pos, 4 - rotation);
return states.get(pos);
}
public boolean canPlaceBlockAt(World world, BlockPos pos, IBlockState state, boolean placeOrigin)
{
setRotation(state);
for (MBlockState mstate : this)
{
mstate = mstate.rotate(rotation).offset(pos);
if ((!mstate.getPos().equals(pos) || placeOrigin)
&& !world.getBlockState(mstate.getPos()).getBlock().isReplaceable(world, mstate.getPos()))
return false;
}
return true;
}
public void placeBlocks(World world, BlockPos pos, IBlockState state, boolean placeOrigin)
{
setRotation(state);
for (MBlockState mstate : this)
{
mstate = mstate.rotate(rotation).offset(pos);
if (!mstate.getPos().equals(pos) || placeOrigin)
{
BlockDataHandler.setData(ORIGIN_BLOCK_DATA, world, mstate.getPos(), pos);
mstate.placeBlock(world, 2);
}
}
BlockDataHandler.setData(ORIGIN_BLOCK_DATA, world, pos, pos);
}
public void breakBlocks(World world, BlockPos pos, IBlockState state)
{
BlockPos origin = getOrigin(world, pos);
if (origin == null)
{
world.setBlockToAir(pos);
return;
}
if (!pos.equals(origin))
{
breakBlocks(world, origin, world.getBlockState(origin));
return;
}
BlockDataHandler.removeData(ORIGIN_BLOCK_DATA, world, origin);
setRotation(state);
for (MBlockState mstate : this)
{
mstate = mstate.rotate(rotation).offset(origin);
if (mstate.matchesWorld(world))
{
mstate.breakBlock(world, 2);
BlockDataHandler.removeData(ORIGIN_BLOCK_DATA, world, mstate.getPos());
}
}
}
public boolean isComplete(World world, BlockPos pos)
{
return isComplete(world, pos, null);
}
public boolean isComplete(World world, BlockPos pos, MBlockState newState)
{
setRotation(world.getBlockState(pos));
MultiBlockAccess mba = new MultiBlockAccess(this, world);
for (MBlockState mstate : this)
{
mstate = new MBlockState(mba, mstate.getPos())/*.rotate(rotation)*/.offset(pos);
boolean matches = mstate.matchesWorld(world);
if (!matches)
mstate.matchesWorld(world);
if (!matches && (newState == null || !mstate.equals(newState)))
return false;
}
return true;
}
@Override
public Iterator<MBlockState> iterator()
{
return states.values().iterator();
}
protected abstract void buildStates();
public static void registerBlockData()
{
BlockDataHandler.registerBlockData(ORIGIN_BLOCK_DATA, BlockPosUtils::fromBytes, BlockPosUtils::toBytes);
}
public static BlockPos getOrigin(IBlockAccess world, BlockPos pos)
{
BlockPos origin = BlockDataHandler.getData(ORIGIN_BLOCK_DATA, world, pos);
if (origin != null && IBlockComponent.getComponent(MultiBlockComponent.class, world.getBlockState(origin).getBlock()) == null)
{
origin = null;
BlockDataHandler.removeData(ORIGIN_BLOCK_DATA, world, pos);
}
return world != null && pos != null ? origin : null;
}
public static boolean isOrigin(IBlockAccess world, BlockPos pos)
{
return world != null && pos != null && pos.equals(getOrigin(world, pos));
}
}
| src/main/java/net/malisis/core/util/multiblock/MultiBlock.java | /*
* The MIT License (MIT)
*
* Copyright (c) 2014 Ordinastie
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package net.malisis.core.util.multiblock;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import net.malisis.core.MalisisCore;
import net.malisis.core.block.component.DirectionalComponent;
import net.malisis.core.util.BlockPosUtils;
import net.malisis.core.util.EnumFacingUtils;
import net.malisis.core.util.MBlockState;
import net.malisis.core.util.blockdata.BlockDataHandler;
import net.minecraft.block.properties.PropertyDirection;
import net.minecraft.block.state.IBlockState;
import net.minecraft.util.BlockPos;
import net.minecraft.util.EnumFacing;
import net.minecraft.world.IBlockAccess;
import net.minecraft.world.World;
/**
* @author Ordinastie
*
*/
public abstract class MultiBlock implements Iterable<MBlockState>
{
public static String ORIGIN_BLOCK_DATA = MalisisCore.modid + ":multiBlockOrigin";
protected Map<BlockPos, MBlockState> states = new HashMap<>();
protected BlockPos offset = new BlockPos(0, 0, 0);
protected PropertyDirection property = DirectionalComponent.HORIZONTAL;
private int rotation;
private boolean bulkPlace;
private boolean bulkBreak;
public void setOffset(BlockPos offset)
{
this.offset = offset;
buildStates();
}
public void setPropertyDirection(PropertyDirection property)
{
this.property = property;
}
public void setRotation(IBlockState state)
{
if (state == null || !state.getProperties().containsKey(property))
rotation = 0;
else
{
EnumFacing direction = (EnumFacing) state.getValue(property);
rotation = EnumFacingUtils.getRotationCount(direction);
}
}
public int getRotation()
{
return rotation;
}
public void setBulkProcess(boolean bulkPlace, boolean bulkBreak)
{
this.bulkPlace = bulkPlace;
this.bulkBreak = bulkBreak;
}
public boolean isBulkPlace()
{
return bulkPlace;
}
public boolean isBulkBreak()
{
return bulkBreak;
}
public boolean isFromMultiblock(World world, BlockPos pos)
{
BlockPos origin = getOrigin(world, pos);
if (origin == null)
return false;
IBlockState state = world.getBlockState(origin);
setRotation(state);
for (MBlockState mstate : this)
{
mstate = mstate.rotate(rotation).offset(pos);
if (mstate.getPos().equals(pos))
return true;
}
return false;
}
public MBlockState getState(BlockPos pos)
{
pos = BlockPosUtils.rotate(pos, 4 - rotation);
return states.get(pos);
}
public boolean canPlaceBlockAt(World world, BlockPos pos, IBlockState state, boolean placeOrigin)
{
setRotation(state);
for (MBlockState mstate : this)
{
mstate = mstate.rotate(rotation).offset(pos);
if ((!mstate.getPos().equals(pos) || placeOrigin)
&& !world.getBlockState(mstate.getPos()).getBlock().isReplaceable(world, mstate.getPos()))
return false;
}
return true;
}
public void placeBlocks(World world, BlockPos pos, IBlockState state, boolean placeOrigin)
{
setRotation(state);
for (MBlockState mstate : this)
{
mstate = mstate.rotate(rotation).offset(pos);
if (!mstate.getPos().equals(pos) || placeOrigin)
{
BlockDataHandler.setData(ORIGIN_BLOCK_DATA, world, mstate.getPos(), pos);
mstate.placeBlock(world, 2);
}
}
BlockDataHandler.setData(ORIGIN_BLOCK_DATA, world, pos, pos);
}
public void breakBlocks(World world, BlockPos pos, IBlockState state)
{
BlockPos origin = getOrigin(world, pos);
if (origin == null)
{
world.setBlockToAir(pos);
return;
}
if (!pos.equals(origin))
{
breakBlocks(world, origin, world.getBlockState(origin));
return;
}
BlockDataHandler.removeData(ORIGIN_BLOCK_DATA, world, origin);
setRotation(state);
for (MBlockState mstate : this)
{
mstate = mstate.rotate(rotation).offset(origin);
if (mstate.matchesWorld(world))
{
mstate.breakBlock(world, 2);
BlockDataHandler.removeData(ORIGIN_BLOCK_DATA, world, mstate.getPos());
}
}
}
public boolean isComplete(World world, BlockPos pos)
{
return isComplete(world, pos, null);
}
public boolean isComplete(World world, BlockPos pos, MBlockState newState)
{
setRotation(world.getBlockState(pos));
MultiBlockAccess mba = new MultiBlockAccess(this, world);
for (MBlockState mstate : this)
{
mstate = new MBlockState(mba, mstate.getPos())/*.rotate(rotation)*/.offset(pos);
boolean matches = mstate.matchesWorld(world);
if (!matches)
mstate.matchesWorld(world);
if (!matches && (newState == null || !mstate.equals(newState)))
return false;
}
return true;
}
@Override
public Iterator<MBlockState> iterator()
{
return states.values().iterator();
}
protected abstract void buildStates();
public static void registerBlockData()
{
BlockDataHandler.registerBlockData(ORIGIN_BLOCK_DATA, BlockPosUtils::fromBytes, BlockPosUtils::toBytes);
}
public static BlockPos getOrigin(IBlockAccess world, BlockPos pos)
{
return world != null && pos != null ? BlockDataHandler.getData(ORIGIN_BLOCK_DATA, world, pos) : null;
}
public static boolean isOrigin(IBlockAccess world, BlockPos pos)
{
return world != null && pos != null && pos.equals(getOrigin(world, pos));
}
}
| Added cleanup of ORIGIN data if no Multiblock is found at the origin
position | src/main/java/net/malisis/core/util/multiblock/MultiBlock.java | Added cleanup of ORIGIN data if no Multiblock is found at the origin position | <ide><path>rc/main/java/net/malisis/core/util/multiblock/MultiBlock.java
<ide> import java.util.Map;
<ide>
<ide> import net.malisis.core.MalisisCore;
<add>import net.malisis.core.block.IBlockComponent;
<ide> import net.malisis.core.block.component.DirectionalComponent;
<ide> import net.malisis.core.util.BlockPosUtils;
<ide> import net.malisis.core.util.EnumFacingUtils;
<ide>
<ide> public static BlockPos getOrigin(IBlockAccess world, BlockPos pos)
<ide> {
<del> return world != null && pos != null ? BlockDataHandler.getData(ORIGIN_BLOCK_DATA, world, pos) : null;
<add> BlockPos origin = BlockDataHandler.getData(ORIGIN_BLOCK_DATA, world, pos);
<add> if (origin != null && IBlockComponent.getComponent(MultiBlockComponent.class, world.getBlockState(origin).getBlock()) == null)
<add> {
<add> origin = null;
<add> BlockDataHandler.removeData(ORIGIN_BLOCK_DATA, world, pos);
<add> }
<add> return world != null && pos != null ? origin : null;
<ide> }
<ide>
<ide> public static boolean isOrigin(IBlockAccess world, BlockPos pos) |
|
Java | agpl-3.0 | e8fd450392d807f399f90afb48dae452ff3505a2 | 0 | duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test | 76f1a28e-2e60-11e5-9284-b827eb9e62be | hello.java | 76ec3ed4-2e60-11e5-9284-b827eb9e62be | 76f1a28e-2e60-11e5-9284-b827eb9e62be | hello.java | 76f1a28e-2e60-11e5-9284-b827eb9e62be | <ide><path>ello.java
<del>76ec3ed4-2e60-11e5-9284-b827eb9e62be
<add>76f1a28e-2e60-11e5-9284-b827eb9e62be |
|
JavaScript | isc | 9630d92e0a567176730721b6ccba450dbd3a27c0 | 0 | wilsonianb/ripple-lib,wilsonianb/ripple-lib,ripple/ripple-lib,ripple/ripple-lib,darkdarkdragon/ripple-lib,ripple/ripple-lib,darkdarkdragon/ripple-lib,wilsonianb/ripple-lib,ripple/ripple-lib,darkdarkdragon/ripple-lib | #!/usr/bin/env node
var Transaction = require('../src/js/ripple/transaction').Transaction;
var argv = process.argv.slice(2);
var verbose;
var secret;
var tx_json;
if (~argv.indexOf('-v')){
argv.splice(argv.indexOf('-v'), 1);
verbose = true;
}
secret = argv.shift();
tx_json = argv.shift();
if (tx_json === '-') {
read_input(ready);
} else {
ready();
}
function read_input(callback) {
tx_json = '';
process.stdin.on('data', function(data) { tx_json += data; });
process.stdin.on('end', callback);
process.stdin.resume();
}
function ready() {
var valid_arguments = secret && tx_json;
if (!valid_arguments) {
console.error('Invalid arguments\n');
print_usage();
} else {
var valid_json = true;
try {
tx_json = JSON.parse(tx_json);
} catch(exception) {
valid_json = false;
}
if (!valid_json) {
console.error('Invalid JSON\n');
print_usage();
} else {
sign_transaction();
}
}
}
function print_usage() {
console.log(
'Usage: rsign.js <secret> <json>\n\n',
'Example: rsign.js ssq55ueDob4yV3kPVnNQLHB6icwpC',
JSON.stringify({
TransactionType: 'Payment',
Account: 'r3P9vH81KBayazSTrQj6S25jW6kDb779Gi',
Destination: 'r3kmLJN5D28dHuH8vZNUZpMC43pEHpaocV',
Amount: '200000000',
Fee: '10',
Sequence: 1
})
);
};
function sign_transaction() {
var tx = new Transaction();
tx.tx_json = tx_json;
tx._secret = secret;
tx.complete();
var unsigned_blob = tx.serialize().to_hex();
tx.sign();
if (verbose) {
var sim = { };
sim.tx_blob = tx.serialize().to_hex();
sim.tx_json = tx.tx_json;
sim.tx_signing_hash = tx.signing_hash().to_hex();
sim.tx_unsigned = unsigned_blob;
console.log(JSON.stringify(sim, null, 2));
} else {
console.log(tx.serialize().to_hex());
}
};
// vim:sw=2:sts=2:ts=8:et
| bin/rsign.js | #!/usr/bin/env node
var Transaction = require('../src/js/ripple/transaction').Transaction;
var argv = process.argv.slice(2);
var verbose;
var secret;
var tx_json;
if (~argv.indexOf('-v')){
argv.splice(argv.indexOf('-v'));
verbose = true;
}
secret = argv.shift();
tx_json = argv.shift();
if (tx_json === '-') {
read_input(ready);
} else {
ready();
}
function read_input(callback) {
tx_json = '';
process.stdin.on('data', function(data) { tx_json += data; });
process.stdin.on('end', callback);
process.stdin.resume();
}
function ready() {
var valid_arguments = secret && tx_json;
if (!valid_arguments) {
console.error('Invalid arguments\n');
print_usage();
} else {
var valid_json = true;
try {
tx_json = JSON.parse(tx_json);
} catch(exception) {
valid_json = false;
}
if (!valid_json) {
console.error('Invalid JSON\n');
print_usage();
} else {
sign_transaction();
}
}
}
function print_usage() {
console.log(
'Usage: rsign.js <secret> <json>\n\n',
'Example: rsign.js ssq55ueDob4yV3kPVnNQLHB6icwpC',
JSON.stringify({
TransactionType: 'Payment',
Account: 'r3P9vH81KBayazSTrQj6S25jW6kDb779Gi',
Destination: 'r3kmLJN5D28dHuH8vZNUZpMC43pEHpaocV',
Amount: '200000000',
Fee: '10',
Sequence: '1'
})
);
};
function sign_transaction() {
var tx = new Transaction();
tx.tx_json = tx_json;
tx._secret = secret;
tx.complete();
var unsigned_blob = tx.serialize().to_hex();
tx.sign();
if (verbose) {
var sim = { };
sim.tx_blob = tx.serialize().to_hex();
sim.tx_json = tx.tx_json;
sim.tx_signing_hash = tx.signing_hash().to_hex();
sim.tx_unsigned = unsigned_blob;
console.log(JSON.stringify(sim, null, 2));
} else {
console.log(tx.serialize().to_hex());
}
};
// vim:sw=2:sts=2:ts=8:et
| Fix bin/rsign example sequence
| bin/rsign.js | Fix bin/rsign example sequence | <ide><path>in/rsign.js
<ide> var tx_json;
<ide>
<ide> if (~argv.indexOf('-v')){
<del> argv.splice(argv.indexOf('-v'));
<add> argv.splice(argv.indexOf('-v'), 1);
<ide> verbose = true;
<ide> }
<ide>
<ide> Destination: 'r3kmLJN5D28dHuH8vZNUZpMC43pEHpaocV',
<ide> Amount: '200000000',
<ide> Fee: '10',
<del> Sequence: '1'
<add> Sequence: 1
<ide> })
<ide> );
<ide> }; |
|
JavaScript | bsd-3-clause | 7e032ea97408ceaf7e23b489151473520f803298 | 0 | LocalData/localdata-dashboard,LocalData/localdata-dashboard | /*jslint nomen: true */
/*globals define: true */
define([
// Libraries
'jquery',
'lib/lodash',
'backbone',
'lib/leaflet/leaflet.google',
'moment',
'lib/tinypubsub',
// LocalData
'settings',
'api',
// Models
'models/responses'
],
function($, _, Backbone, L, moment, events, settings, api, Responses) {
'use strict';
var MapView = Backbone.View.extend({
map: null,
responses: null,
surveyId: null,
paginationView: null,
selectedLayer: null,
filtered: false,
selectedObject: {},
markers: {},
initialize: function(options) {
console.log("Init map view");
_.bindAll(this, 'render', 'selectObject', 'renderObject', 'renderObjects', 'getResponsesInBounds', 'updateMapStyleBasedOnZoom', 'updateObjectStyles', 'styleBy');
this.responses = options.responses;
this.responses.on('reset', this.render, this);
// We track the results on the map using these two groups
this.parcelIdsOnTheMap = {};
this.parcelsLayerGroup = new L.FeatureGroup();
this.doneMarkersLayerGroup = new L.FeatureGroup();
this.defaultStyle = settings.farZoomStyle;
this.$el.html(_.template($('#map-view').html(), {}));
// Initialize the map
this.map = new L.map('map');
// Don't think this is needed: this.markers = {};
// Set up the base map; add the parcels and done markers
this.googleLayer = new L.Google("TERRAIN");
this.map.addLayer(this.googleLayer);
this.map.addLayer(this.doneMarkersLayerGroup); // no longer used??
this.map.addLayer(this.parcelsLayerGroup);
this.map.setView([42.374891,-83.069504], 17); // default center
this.map.on('zoomend', this.updateMapStyleBasedOnZoom);
this.render();
},
render: function() {
// TODO: better message passing
events.publish('loading', [true]);
this.mapResponses();
events.publish('loading', [false]);
},
// Map all the responses on the map
// Optional paramemters: "question", [answers]
// If given, the each result on the map will be styled by answers to
// question.
mapResponses: function(question, answers) {
var indexOfColorToUse;
var color;
var style;
if(question !== undefined) {
this.filtered = true;
}
// Clear out all the old results
this.parcelsLayerGroup.clearLayers();
this.parcelIdsOnTheMap = {};
_.each(this.responses.models, function(response){
// Skip old records that don't have geo_info
var geoInfo = response.get("geo_info");
if (geoInfo === undefined) {
console.log("Skipping geo object");
return;
}
// Make sure were have the geometry for this parcel
if(_.has(geoInfo, "geometry")) {
var toRender = {
parcelId: response.get("parcel_id"),
geometry: response.get("geo_info").geometry
};
style = this.defaultStyle;
// Color the results if necessary
// TODO: lots of optimization possible here!
if (this.filtered) {
var questions = response.get("responses");
var answerToQuestion = questions[question];
// Figure out what color to use
indexOfColorToUse = _.indexOf(answers, answerToQuestion);
color = settings.colorRange[indexOfColorToUse + 1];
if (indexOfColorToUse == -1) {
console.log(settings.colorRange[0]);
color = settings.colorRange[0];
}
style = settings.styleTemplate;
style.color = color;
style.fillColor = color;
}
// Use that style!
this.renderObject(toRender, style);
}
}, this);
// fitBounds fails if there aren't any results, hence this test:
try {
this.map.fitBounds(this.parcelsLayerGroup.getBounds());
}
catch (e) {
console.log(e);
}
},
updateObjectStyles: function(style) {
this.parcelsLayerGroup.setStyle(style);
},
// Expects an object with properties
// obj.parcelId: ID of the given object
// obj.geometry: GeoJSON geometry object
renderObject: function(obj, style) {
if(style === undefined) {
style = this.defaultStyle;
}
// We don't want to re-draw parcels that are already on the map
// So we keep a hash map with the layers so we can unrender them
if(! _.has(this.parcelIdsOnTheMap, obj.parcelId)){
// Make sure the format fits Leaflet's geoJSON expectations
obj.type = "Feature";
// Create a new geojson layer and style it.
var geojsonLayer = new L.GeoJSON();
geojsonLayer.addData(obj);
geojsonLayer.setStyle(style);
geojsonLayer.on('click', this.selectObject);
// Add the layer to the layergroup and the hashmap
this.parcelsLayerGroup.addLayer(geojsonLayer);
this.parcelIdsOnTheMap[obj.parcelId] = geojsonLayer;
}
},
renderObjects: function(results) {
_.each(results, function(elt) {
this.renderObject(elt);
}, this);
},
updateMapStyleBasedOnZoom: function(e) {
console.log("Map style update triggered");
// Don't update the styles if there's a filter in place
if(this.filtered) {
return;
}
// _kmq.push(['record', "Map zoomed"]);
var zoom = this.map.getZoom();
// Objects should be more detailed close up (zoom 10+)
if(zoom > 10) {
// If we're in pretty close, show the satellite view
if(zoom > 14) {
if(this.googleLayer._type !== "HYBRID") {
this.map.removeLayer(this.googleLayer);
this.googleLayer = new L.Google("HYBRID");
this.map.addLayer(this.googleLayer);
}
if(this.defaultStyle !== settings.closeZoomStyle) {
this.defaultStyle = settings.closeZoomStyle;
this.updateObjectStyles(settings.closeZoomStyle);
}
} else {
// Mid zoom (11-14)
// We're not that close, show the mid zoom styles
if(this.defaultStyle !== settings.midZoomStyle) {
this.defaultStyle = settings.closeZoomStyle;
this.updateObjectStyles(settings.closeZoomStyle);
}
// And use the terrain map
if (this.googleLayer._type !== "TERRAIN") {
// Show a more abstract map when zoomed out
this.map.removeLayer(this.googleLayer);
this.googleLayer = new L.Google("TERRAIN");
this.map.addLayer(this.googleLayer);
}
}
}else {
// Far zoom (>14)
// Show a more abstract map when zoomed out
if (this.googleLayer._type !== "TERRAIN") {
this.map.removeLayer(this.googleLayer);
this.googleLayer = new L.Google("TERRAIN");
this.map.addLayer(this.googleLayer);
this.defaultStyle = settings.farZoomStyle;
this.updateObjectStyles(settings.farZoomStyle);
}
}
// If a parcel is selected, make sure it says visually selected
if (this.selectedLayer !== null) {
this.selectedLayer.setStyle(settings.selectedStyle);
}
},
getParcelsInBounds: function() {
// Don't add any parcels if the zoom is really far out.
var zoom = this.map.getZoom();
if(zoom < 16) {
return;
}
// If there are a lot of objects, let's clear them out
// to improve performance
if( _.size(this.parcelIdsOnTheMap) > 1250 ) {
this.parcelsLayerGroup.clearLayers();
this.parcelIdsOnTheMap = {};
}
// Get parcel data in the bounds
api.getObjectsInBounds(this.map.getBounds(), this.renderObjects);
},
// TODO
// Adds a checkbox marker to the given point
// addDoneMarker: function(latlng, id) {
// // Only add markers if they aren't already on the map.
// // if (true){ //this.markers[id] == undefined
// // var doneIcon = new this.CheckIcon();
// // var doneMarker = new L.Marker(latlng, {icon: doneIcon});
// // this.doneMarkersLayerGroup.addLayer(doneMarker);
// // this.markers[id] = doneMarker;
// // }
// },
// addResultsToMap: function(results){
// _.each(results, function(elt) {
// var point = new L.LatLng(elt.geo_info.centroid[0], elt.geo_info.centroid[1// ]);
// var id = elt.parcel_id;
// this.addDoneMarker(point, id);
// }, this);
// },
// Get all the responses in the current viewport
getResponsesInBounds: function(){
console.log("Getting responses in the map");
// Don't add any markers if the zoom is really far out.
var zoom = this.map.getZoom();
if(zoom < 17) {
return;
}
// Get the objects in the bounds
// And add them to the map
api.getResponsesInBounds(this.map.getBounds(), this.addResultsToMap);
},
selectObject: function(event) {
// _kmq.push(['record', "Map object selected"]);
if (this.selectedLayer !== null) {
this.selectedLayer.setStyle(this.defaultStyle);
}
// Select the current layer
this.selectedLayer = event.layer;
this.selectedLayer.setStyle(settings.selectedStyle);
// Let's show some info about this object.
this.details(this.selectedLayer.feature.parcelId);
},
// When a parcel is clicked, show details for just that parcel.
details: function(parcelId) {
console.log("Finding parcels " + parcelId);
this.sel = new Responses.Collection(this.responses.where({'parcel_id': parcelId}));
var selectedSingleObject = this.sel.toJSON()[0];
selectedSingleObject.createdHumanized = moment(selectedSingleObject.created, "YYYY-MM-DDThh:mm:ss.SSSZ").format("MMM Do h:mma");
$("#individual-result-container").html(_.template($('#indivdual-result').html(), {r: selectedSingleObject}));
// Button to close the details view
$("#individual-result-container .close").click(function(e) {
e.preventDefault();
$("#individual-result-container").html("");
});
}
});
return MapView;
}); | js/views/map.js | /*jslint nomen: true */
/*globals define: true */
define([
// Libraries
'jquery',
'lib/lodash',
'backbone',
'lib/leaflet/leaflet.google',
'moment',
'lib/tinypubsub',
// LocalData
'settings',
'api',
// Models
'models/responses'
],
function($, _, Backbone, L, moment, events, settings, api, Responses) {
'use strict';
var MapView = Backbone.View.extend({
map: null,
responses: null,
surveyId: null,
paginationView: null,
selectedLayer: null,
filtered: false,
selectedObject: {},
markers: {},
initialize: function(options) {
console.log("Init map view");
_.bindAll(this, 'render', 'selectObject', 'renderObject', 'renderObjects', 'getResponsesInBounds', 'updateMapStyleBasedOnZoom', 'updateObjectStyles', 'styleBy');
this.responses = options.responses;
this.responses.on('reset', this.render, this);
// We track the results on the map using these two groups
this.parcelIdsOnTheMap = {};
this.parcelsLayerGroup = new L.FeatureGroup();
this.doneMarkersLayerGroup = new L.FeatureGroup();
this.defaultStyle = settings.farZoomStyle;
this.$el.html(_.template($('#map-view').html(), {}));
// Initialize the map
this.map = new L.map('map');
// Don't think this is needed: this.markers = {};
// Set up the base map; add the parcels and done markers
this.googleLayer = new L.Google("TERRAIN");
this.map.addLayer(this.googleLayer);
this.map.addLayer(this.doneMarkersLayerGroup); // no longer used??
this.map.addLayer(this.parcelsLayerGroup);
this.map.setView([42.374891,-83.069504], 17); // default center
this.map.on('zoomend', this.updateMapStyleBasedOnZoom);
this.render();
},
render: function() {
// TODO: better message passing
events.publish('loading', [true]);
this.mapResponses();
events.publish('loading', [false]);
},
// Map all the responses on the map
// Optional paramemters: "question", [answers]
// If given, the each result on the map will be styled by answers to
// question.
mapResponses: function(question, answers) {
var indexOfColorToUse;
var color;
var style;
if(question !== undefined) {
this.filtered = true;
}
// Clear out all the old results
this.parcelsLayerGroup.clearLayers();
this.parcelIdsOnTheMap = {};
_.each(this.responses.models, function(response){
// Skip old records that don't have geo_info
var geoInfo = response.get("geo_info");
if (geoInfo === undefined) {
console.log("Skipping geo object");
return;
}
// Make sure were have the geometry for this parcel
if(_.has(geoInfo, "geometry")) {
var toRender = {
parcelId: response.get("parcel_id"),
geometry: response.get("geo_info").geometry
};
style = this.defaultStyle;
// Color the results if necessary
// TODO: lots of optimization possible here!
if (this.filtered) {
var questions = response.get("responses");
var answerToQuestion = questions[question];
// Figure out what color to use
indexOfColorToUse = _.indexOf(answers, answerToQuestion);
color = settings.colorRange[indexOfColorToUse + 1];
if (indexOfColorToUse == -1) {
console.log(settings.colorRange[0]);
color = settings.colorRange[0];
}
style = settings.styleTemplate;
style.color = color;
style.fillColor = color;
}
// Use that style!
this.renderObject(toRender, style);
}
}, this);
// fitBounds fails if there aren't any results, hence this test:
try {
console.log("Fitting bounds");
console.log(this.map);
console.log(this.parcelsLayerGroup);
this.map.fitBounds(this.parcelsLayerGroup.getBounds());
}
catch (e) {
// statements to handle any exceptions
console.log(e); // pass exception object to error handler
}
},
updateObjectStyles: function(style) {
this.parcelsLayerGroup.setStyle(style);
},
renderObject: function(obj, style) {
// Expects an object with properties
// obj.parcelId: ID of the given object
// obj.geometry: GeoJSON geometry object
if(style === undefined) {
style = this.defaultStyle;
}
// We don't want to re-draw parcels that are already on the map
// So we keep a hash map with the layers so we can unrender them
if(! _.has(this.parcelIdsOnTheMap, obj.parcelId)){
// Make sure the format fits Leaflet's geoJSON expectations
obj.type = "Feature";
// Create a new geojson layer and style it.
var geojsonLayer = new L.GeoJSON();
geojsonLayer.addData(obj);
geojsonLayer.setStyle(style);
geojsonLayer.on('click', this.selectObject);
// Add the layer to the layergroup and the hashmap
this.parcelsLayerGroup.addLayer(geojsonLayer);
this.parcelIdsOnTheMap[obj.parcelId] = geojsonLayer;
}
},
renderObjects: function(results) {
_.each(results, function(elt) {
this.renderObject(elt);
}, this);
},
updateMapStyleBasedOnZoom: function(e) {
console.log("Map style update triggered");
// Don't update the styles if there's a filter in place
if(this.filtered) {
return;
}
// _kmq.push(['record', "Map zoomed"]);
var zoom = this.map.getZoom();
// Objects should be more detailed close up (zoom 14+) ...................
if(zoom > 10) {
// If we're in pretty close, show the satellite view
if(zoom > 14) {
if(this.googleLayer._type !== "HYBRID") {
this.map.removeLayer(this.googleLayer);
this.googleLayer = new L.Google("HYBRID");
this.map.addLayer(this.googleLayer);
}
if(this.defaultStyle !== settings.closeZoomStyle) {
this.defaultStyle = settings.closeZoomStyle;
this.updateObjectStyles(settings.closeZoomStyle);
}
} else {
// Mid zoom (11-14)...................................................
// We're not that close, show the mid zoom styles
if(this.defaultStyle !== settings.midZoomStyle) {
this.defaultStyle = settings.closeZoomStyle;
this.updateObjectStyles(settings.closeZoomStyle);
}
// And use the terrain map
if (this.googleLayer._type !== "TERRAIN") {
// Show a more abstract map when zoomed out
this.map.removeLayer(this.googleLayer);
this.googleLayer = new L.Google("TERRAIN");
this.map.addLayer(this.googleLayer);
}
}
}else {
// Far zoom (>14) ......................................................
if (this.googleLayer._type !== "TERRAIN") {
// Show a more abstract map when zoomed out
this.map.removeLayer(this.googleLayer);
this.googleLayer = new L.Google("TERRAIN");
this.map.addLayer(this.googleLayer);
// Objects should be more abstract far out
this.defaultStyle = settings.farZoomStyle;
this.updateObjectStyles(settings.farZoomStyle);
}
}
// If a parcel is selected, make sure it says visually selected
if (this.selectedLayer !== null) {
this.selectedLayer.setStyle(settings.selectedStyle);
}
},
getParcelsInBounds: function() {
// Don't add any parcels if the zoom is really far out.
var zoom = this.map.getZoom();
if(zoom < 16) {
return;
}
// If there are a lot of objects, let's reset.
if( _.size(this.parcelIdsOnTheMap) > 1250 ) {
this.parcelsLayerGroup.clearLayers();
this.parcelIdsOnTheMap = {};
}
// Get parcel data in the bounds
api.getObjectsInBounds(this.map.getBounds(), this.renderObjects);
},
// TODO
// Adds a checkbox marker to the given point
// addDoneMarker: function(latlng, id) {
// // Only add markers if they aren't already on the map.
// // if (true){ //this.markers[id] == undefined
// // var doneIcon = new this.CheckIcon();
// // var doneMarker = new L.Marker(latlng, {icon: doneIcon});
// // this.doneMarkersLayerGroup.addLayer(doneMarker);
// // this.markers[id] = doneMarker;
// // }
// },
// addResultsToMap: function(results){
// _.each(results, function(elt) {
// var point = new L.LatLng(elt.geo_info.centroid[0], elt.geo_info.centroid[1// ]);
// var id = elt.parcel_id;
// this.addDoneMarker(point, id);
// }, this);
// },
// Get all the responses in a map
getResponsesInBounds: function(){
console.log("Getting responses in the map");
// Don't add any markers if the zoom is really far out.
var zoom = this.map.getZoom();
if(zoom < 17) {
return;
}
// Get the objects in the bounds
// And add them to the map
api.getResponsesInBounds(this.map.getBounds(), this.addResultsToMap);
},
selectObject: function(event) {
// _kmq.push(['record', "Map object selected"]);
if (this.selectedLayer !== null) {
this.selectedLayer.setStyle(this.defaultStyle);
}
// Select the current layer
this.selectedLayer = event.layer;
this.selectedLayer.setStyle(settings.selectedStyle);
// Let's show some info about this object.
this.details(this.selectedLayer.feature.parcelId);
// TODO
// Let other parts of the app know that we've selected something.
// $.publish("objectSelected");
},
// When a parcel is clicked, show details for just that parcel.
details: function(parcelId) {
console.log("Finding parcels " + parcelId);
this.sel = new Responses.Collection(this.responses.where({'parcel_id': parcelId}));
var selectedSingleObject = this.sel.toJSON()[0];
selectedSingleObject.createdHumanized = moment(selectedSingleObject.created, "YYYY-MM-DDThh:mm:ss.SSSZ").format("MMM Do h:mma");
$("#individual-result-container").html(_.template($('#indivdual-result').html(), {r: selectedSingleObject}));
// Button to close the details view
$("#individual-result-container .close").click(function(e) {
e.preventDefault();
$("#individual-result-container").html("");
});
}
});
return MapView;
}); | Style / comment improvements
| js/views/map.js | Style / comment improvements | <ide><path>s/views/map.js
<ide>
<ide> }, this);
<ide>
<del>
<ide> // fitBounds fails if there aren't any results, hence this test:
<ide> try {
<del> console.log("Fitting bounds");
<del> console.log(this.map);
<del> console.log(this.parcelsLayerGroup);
<del>
<ide> this.map.fitBounds(this.parcelsLayerGroup.getBounds());
<ide> }
<ide> catch (e) {
<del> // statements to handle any exceptions
<del> console.log(e); // pass exception object to error handler
<add> console.log(e);
<ide> }
<ide>
<ide> },
<ide> this.parcelsLayerGroup.setStyle(style);
<ide> },
<ide>
<add> // Expects an object with properties
<add> // obj.parcelId: ID of the given object
<add> // obj.geometry: GeoJSON geometry object
<ide> renderObject: function(obj, style) {
<del> // Expects an object with properties
<del> // obj.parcelId: ID of the given object
<del> // obj.geometry: GeoJSON geometry object
<ide>
<ide> if(style === undefined) {
<ide> style = this.defaultStyle;
<ide> // _kmq.push(['record', "Map zoomed"]);
<ide> var zoom = this.map.getZoom();
<ide>
<del> // Objects should be more detailed close up (zoom 14+) ...................
<add> // Objects should be more detailed close up (zoom 10+)
<ide> if(zoom > 10) {
<ide>
<ide> // If we're in pretty close, show the satellite view
<ide> }
<ide>
<ide> } else {
<del> // Mid zoom (11-14)...................................................
<add> // Mid zoom (11-14)
<ide> // We're not that close, show the mid zoom styles
<ide> if(this.defaultStyle !== settings.midZoomStyle) {
<ide> this.defaultStyle = settings.closeZoomStyle;
<ide> }
<ide>
<ide> }else {
<del> // Far zoom (>14) ......................................................
<add> // Far zoom (>14)
<add> // Show a more abstract map when zoomed out
<ide> if (this.googleLayer._type !== "TERRAIN") {
<del> // Show a more abstract map when zoomed out
<ide> this.map.removeLayer(this.googleLayer);
<ide> this.googleLayer = new L.Google("TERRAIN");
<ide> this.map.addLayer(this.googleLayer);
<ide>
<del> // Objects should be more abstract far out
<ide> this.defaultStyle = settings.farZoomStyle;
<ide> this.updateObjectStyles(settings.farZoomStyle);
<ide> }
<ide> return;
<ide> }
<ide>
<del> // If there are a lot of objects, let's reset.
<add> // If there are a lot of objects, let's clear them out
<add> // to improve performance
<ide> if( _.size(this.parcelIdsOnTheMap) > 1250 ) {
<ide> this.parcelsLayerGroup.clearLayers();
<ide> this.parcelIdsOnTheMap = {};
<ide> // }, this);
<ide> // },
<ide>
<del> // Get all the responses in a map
<add> // Get all the responses in the current viewport
<ide> getResponsesInBounds: function(){
<ide> console.log("Getting responses in the map");
<ide>
<ide>
<ide> // Let's show some info about this object.
<ide> this.details(this.selectedLayer.feature.parcelId);
<del>
<del> // TODO
<del> // Let other parts of the app know that we've selected something.
<del> // $.publish("objectSelected");
<ide> },
<ide>
<ide> // When a parcel is clicked, show details for just that parcel. |
|
Java | bsd-3-clause | 24dc9fa2711cf139daa212716a923a6bb3911107 | 0 | hoijui/JavaOSC | /*
* Copyright (C) 2001, C. Ramakrishnan / Illposed Software.
* All rights reserved.
*
* This code is licensed under the BSD 3-Clause license.
* See file LICENSE (or LICENSE.html) for more information.
*/
package com.illposed.osc.transport.udp;
import com.illposed.osc.OSCBundle;
import com.illposed.osc.OSCMessage;
import com.illposed.osc.OSCPacket;
import com.illposed.osc.SimpleOSCMessageListener;
import com.illposed.osc.messageselector.OSCPatternAddressMessageSelector;
import java.net.InetAddress;
import java.util.ArrayList;
import java.util.List;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
public class OSCPortTest {
private static final long WAIT_FOR_SOCKET_CLOSE = 30;
@Rule
public ExpectedException expectedException = ExpectedException.none();
private OSCPortOut sender;
private OSCPortIn receiver;
@Before
public void setUp() throws Exception {
sender = new OSCPortOut();
receiver = new OSCPortIn(OSCPort.defaultSCOSCPort());
}
@After
public void tearDown() throws Exception {
receiver.close();
sender.close();
// wait a bit after closing the receiver,
// because (some) operating systems need some time
// to actually close the underlying socket
Thread.sleep(WAIT_FOR_SOCKET_CLOSE);
}
@Test
public void testSocketClose() throws Exception {
// close the underlying sockets
receiver.close();
sender.close();
// make sure the old receiver is gone for good
Thread.sleep(WAIT_FOR_SOCKET_CLOSE);
// check if the underlying sockets were closed
// NOTE We can have many (out-)sockets sending
// on the same address and port,
// but only one receiving per each such tuple.
sender = new OSCPortOut();
receiver = new OSCPortIn(OSCPort.defaultSCOSCPort());
}
@Test
public void testSocketAutoClose() throws Exception {
// DANGEROUS! here we forget to close the underlying sockets!
receiver = null;
sender = null;
// make sure the old receiver is gone for good
System.gc();
Thread.sleep(WAIT_FOR_SOCKET_CLOSE);
// check if the underlying sockets were closed
// NOTE We can have many (out-)sockets sending
// on the same address and port,
// but only one receiving per each such tuple.
sender = new OSCPortOut();
receiver = new OSCPortIn(OSCPort.defaultSCOSCPort());
}
@Test
public void testPorts() throws Exception {
Assert.assertEquals("Bad default SuperCollider OSC port",
57110, OSCPort.defaultSCOSCPort());
Assert.assertEquals("Bad default SuperCollider Language OSC port",
57120, OSCPort.defaultSCLangOSCPort());
Assert.assertEquals("Bad default port with ctor()",
57110, sender.getPort());
sender.close();
sender = new OSCPortOut(InetAddress.getLocalHost());
Assert.assertEquals("Bad default port with ctor(address)",
57110, sender.getPort());
sender.close();
sender = new OSCPortOut(InetAddress.getLocalHost(), 12345);
Assert.assertEquals("Bad port with ctor(address, port)",
12345, sender.getPort());
}
@Test
public void testStart() throws Exception {
OSCMessage mesg = new OSCMessage("/sc/stop");
sender.send(mesg);
}
@Test
public void testMessageWithArgs() throws Exception {
List<Object> args = new ArrayList<Object>(2);
args.add(3);
args.add("hello");
OSCMessage mesg = new OSCMessage("/foo/bar", args);
sender.send(mesg);
}
@Test
public void testBundle() throws Exception {
List<Object> args = new ArrayList<Object>(2);
args.add(3);
args.add("hello");
List<OSCPacket> msgs = new ArrayList<OSCPacket>(1);
msgs.add(new OSCMessage("/foo/bar", args));
OSCBundle bundle = new OSCBundle(msgs);
sender.send(bundle);
}
@Test
public void testBundle2() throws Exception {
final List<Object> arguments = new ArrayList<Object>(2);
arguments.add(3);
arguments.add("hello");
final OSCMessage mesg = new OSCMessage("/foo/bar", arguments);
OSCBundle bundle = new OSCBundle();
bundle.addPacket(mesg);
sender.send(bundle);
}
@Test
public void testReceiving() throws Exception {
OSCMessage mesg = new OSCMessage("/message/receiving");
SimpleOSCMessageListener listener = new SimpleOSCMessageListener();
receiver.getDispatcher().addListener(new OSCPatternAddressMessageSelector("/message/receiving"),
listener);
receiver.startListening();
sender.send(mesg);
Thread.sleep(100); // wait a bit
receiver.stopListening();
if (!listener.isMessageReceived()) {
Assert.fail("Message was not received");
}
}
@Test
public void testBundleReceiving() throws Exception {
OSCBundle bundle = new OSCBundle();
bundle.addPacket(new OSCMessage("/bundle/receiving"));
SimpleOSCMessageListener listener = new SimpleOSCMessageListener();
receiver.getDispatcher().addListener(new OSCPatternAddressMessageSelector("/bundle/receiving"),
listener);
receiver.startListening();
sender.send(bundle);
Thread.sleep(100); // wait a bit
receiver.stopListening();
if (!listener.isMessageReceived()) {
Assert.fail("Message was not received");
}
if (!listener.getReceivedTimestamp().equals(bundle.getTimestamp())) {
Assert.fail("Message should have timestamp " + bundle.getTimestamp()
+ " but has " + listener.getReceivedTimestamp());
}
}
}
| modules/core/src/test/java/com/illposed/osc/transport/udp/OSCPortTest.java | /*
* Copyright (C) 2001, C. Ramakrishnan / Illposed Software.
* All rights reserved.
*
* This code is licensed under the BSD 3-Clause license.
* See file LICENSE (or LICENSE.html) for more information.
*/
package com.illposed.osc.transport.udp;
import com.illposed.osc.OSCBundle;
import com.illposed.osc.OSCMessage;
import com.illposed.osc.OSCPacket;
import com.illposed.osc.SimpleOSCMessageListener;
import com.illposed.osc.messageselector.OSCPatternAddressMessageSelector;
import java.net.InetAddress;
import java.util.ArrayList;
import java.util.List;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
public class OSCPortTest {
private static final long WAIT_FOR_SOCKET_CLOSE = 30;
@Rule
public ExpectedException expectedException = ExpectedException.none();
private OSCPortOut sender;
private OSCPortIn receiver;
@Before
public void setUp() throws Exception {
sender = new OSCPortOut();
receiver = new OSCPortIn(OSCPort.defaultSCOSCPort());
}
@After
public void tearDown() throws Exception {
receiver.close();
sender.close();
// wait a bit after closing the receiver,
// because (some) operating systems need some time
// to actually close the underlying socket
Thread.sleep(WAIT_FOR_SOCKET_CLOSE);
}
@Test
public void testSocketClose() throws Exception {
// close the underlying sockets
receiver.close();
sender.close();
// make sure the old receiver is gone for good
Thread.sleep(WAIT_FOR_SOCKET_CLOSE);
// check if the underlying sockets were closed
// NOTE We can have many (out-)sockets sending
// on the same address and port,
// but only one receiving per each such tuple.
sender = new OSCPortOut();
receiver = new OSCPortIn(OSCPort.defaultSCOSCPort());
}
@Test
public void testSocketAutoClose() throws Exception {
// DANGEROUS! here we forget to close the underlying sockets!
receiver = null;
sender = null;
// make sure the old receiver is gone for good
System.gc();
Thread.sleep(WAIT_FOR_SOCKET_CLOSE);
// check if the underlying sockets were closed
// NOTE We can have many (out-)sockets sending
// on the same address and port,
// but only one receiving per each such tuple.
sender = new OSCPortOut();
receiver = new OSCPortIn(OSCPort.defaultSCOSCPort());
}
@Test
public void testPorts() throws Exception {
Assert.assertEquals("Bad default SuperCollider OSC port",
57110, OSCPort.defaultSCOSCPort());
Assert.assertEquals("Bad default SuperCollider Language OSC port",
57120, OSCPort.defaultSCLangOSCPort());
Assert.assertEquals("Bad default port with ctor()",
57110, sender.getPort());
sender.close();
sender = new OSCPortOut(InetAddress.getLocalHost());
Assert.assertEquals("Bad default port with ctor(address)",
57110, sender.getPort());
sender.close();
sender = new OSCPortOut(InetAddress.getLocalHost(), 12345);
Assert.assertEquals("Bad port with ctor(address, port)",
12345, sender.getPort());
}
@Test
public void testStart() throws Exception {
OSCMessage mesg = new OSCMessage("/sc/stop");
sender.send(mesg);
}
@Test
public void testMessageWithArgs() throws Exception {
List<Object> args = new ArrayList<Object>(2);
args.add(3);
args.add("hello");
OSCMessage mesg = new OSCMessage("/foo/bar", args);
sender.send(mesg);
}
@Test
public void testBundle() throws Exception {
List<Object> args = new ArrayList<Object>(2);
args.add(3);
args.add("hello");
List<OSCPacket> msgs = new ArrayList<OSCPacket>(1);
msgs.add(new OSCMessage("/foo/bar", args));
OSCBundle bundle = new OSCBundle(msgs);
sender.send(bundle);
}
@Test
public void testBundle2() throws Exception {
final List<Object> arguments = new ArrayList<Object>(2);
arguments.add(3);
arguments.add("hello");
final OSCMessage mesg = new OSCMessage("/foo/bar", arguments);
OSCBundle bundle = new OSCBundle();
bundle.addPacket(mesg);
sender.send(bundle);
}
@Test
public void testReceiving() throws Exception {
OSCMessage mesg = new OSCMessage("/message/receiving");
SimpleOSCMessageListener listener = new SimpleOSCMessageListener();
receiver.getDispatcher().addListener(new OSCPatternAddressMessageSelector("/message/receiving"),
listener);
receiver.startListening();
sender.send(mesg);
Thread.sleep(100); // wait a bit
receiver.stopListening();
if (!listener.isMessageReceived()) {
Assert.fail("Message was not received");
}
}
@Test
public void testBundleReceiving() throws Exception {
OSCBundle bundle = new OSCBundle();
bundle.addPacket(new OSCMessage("/bundle/receiving"));
SimpleOSCMessageListener listener = new SimpleOSCMessageListener();
receiver.getDispatcher().addListener(new OSCPatternAddressMessageSelector("/bundle/receiving"),
listener);
receiver.startListening();
sender.send(bundle);
Thread.sleep(100); // wait a bit
receiver.stopListening();
if (!listener.isMessageReceived()) {
Assert.fail("Message was not received");
}
if (!listener.getReceivedTimestamp().equals(bundle.getTimestamp())) {
Assert.fail("Message should have timestamp " + bundle.getTimestamp()
+ " but has " + listener.getReceivedTimestamp());
}
}
}
| OSCPortTest: fix formatting [minor] [test]
| modules/core/src/test/java/com/illposed/osc/transport/udp/OSCPortTest.java | OSCPortTest: fix formatting [minor] [test] | <ide><path>odules/core/src/test/java/com/illposed/osc/transport/udp/OSCPortTest.java
<ide>
<ide> @After
<ide> public void tearDown() throws Exception {
<add>
<ide> receiver.close();
<ide> sender.close();
<add>
<ide> // wait a bit after closing the receiver,
<ide> // because (some) operating systems need some time
<ide> // to actually close the underlying socket
<ide>
<ide> @Test
<ide> public void testStart() throws Exception {
<add>
<ide> OSCMessage mesg = new OSCMessage("/sc/stop");
<ide> sender.send(mesg);
<ide> }
<ide>
<ide> @Test
<ide> public void testMessageWithArgs() throws Exception {
<add>
<ide> List<Object> args = new ArrayList<Object>(2);
<ide> args.add(3);
<ide> args.add("hello");
<ide>
<ide> @Test
<ide> public void testBundle() throws Exception {
<add>
<ide> List<Object> args = new ArrayList<Object>(2);
<ide> args.add(3);
<ide> args.add("hello");
<ide>
<ide> @Test
<ide> public void testBundle2() throws Exception {
<add>
<ide> final List<Object> arguments = new ArrayList<Object>(2);
<ide> arguments.add(3);
<ide> arguments.add("hello");
<ide>
<ide> @Test
<ide> public void testReceiving() throws Exception {
<add>
<ide> OSCMessage mesg = new OSCMessage("/message/receiving");
<ide> SimpleOSCMessageListener listener = new SimpleOSCMessageListener();
<ide> receiver.getDispatcher().addListener(new OSCPatternAddressMessageSelector("/message/receiving"),
<ide>
<ide> @Test
<ide> public void testBundleReceiving() throws Exception {
<add>
<ide> OSCBundle bundle = new OSCBundle();
<ide> bundle.addPacket(new OSCMessage("/bundle/receiving"));
<ide> SimpleOSCMessageListener listener = new SimpleOSCMessageListener(); |
|
Java | bsd-3-clause | e6f20c3150ef89ced3b3ed646e328efd8a22e7ae | 0 | Monsters-308/FRC2017,Monsters-308/FRC2017 | package org.usfirst.frc308.FRC2017.commands;
import org.usfirst.frc308.FRC2017.Robot;
import org.usfirst.frc308.FRC2017.RobotConstants;
import org.usfirst.frc308.FRC2017.utils.MathUtils;
import edu.wpi.first.wpilibj.Timer;
import edu.wpi.first.wpilibj.command.Command;
import edu.wpi.first.wpilibj.networktables.NetworkTable;
public class AutonomousRotateToTarget extends Command {
private double rot;
public AutonomousRotateToTarget(double rotation){
rot = rotation;
requires(Robot.chassis);
}
@Override
protected void initialize() {
super.initialize();
Robot.chassis.setupDrive();
}
@Override
protected void execute() {
super.execute();
Robot.chassis.arcadeDrive(0, rot);
}
@Override
protected void end() {
super.end();
Robot.chassis.arcadeDrive(0, rot);
}
@Override
protected void interrupted() {
super.interrupted();
end();
}
@Override
protected boolean isFinished() {
//Enhance this method
if (NetworkTable.getTable("GRIP/myContoursReport").getNumberArray("centerX", new double[0]).length > 0) {
double[] array = NetworkTable.getTable("GRIP/myContoursReport").getNumberArray("centerX", new double[0]);
int index = MathUtils.getLargestIndex(array);
double centerX = RobotConstants.x / 2;
if(MathUtils.getDiffrence(centerX, array[index]) < RobotConstants.visionTolerance){
return true;
}
else{
//Maybe we need to adjust the rotation value here ...
//but it wasn't required to do that now
}
}
return false;
}
}
| src/org/usfirst/frc308/FRC2017/commands/AutonomousRotateToTarget.java | package org.usfirst.frc308.FRC2017.commands;
import org.usfirst.frc308.FRC2017.Robot;
import org.usfirst.frc308.FRC2017.RobotConstants;
import org.usfirst.frc308.FRC2017.utils.MathUtils;
import edu.wpi.first.wpilibj.Timer;
import edu.wpi.first.wpilibj.command.Command;
import edu.wpi.first.wpilibj.networktables.NetworkTable;
public class AutonomousRotateToTarget extends Command {
boolean shouldRetry = false;
boolean isFinished = false;
private Timer timer;
public AutonomousRotateToTarget() {
//Is needed to turn the robot to the right direction
requires(Robot.chassis);
}
@Override
protected void execute() {
super.execute();
aim();
}
@Override
protected void initialize() {
super.initialize();
Robot.chassis.setupDrive();
Robot.chassis.setRotatePIDZero();
}
@Override
protected boolean isFinished() {
return isFinished;
}
@Override
protected void end() {
super.end();
RobotConstants.isAutonomousAiming = false;
}
@Override
protected void interrupted() {
super.interrupted();
end();
}
private void aim(){
//Make sure, that aim() is called
System.out.println("Aim() got called");
isFinished = false;
//Make clear, that the robot is trying to aim the target
RobotConstants.isAutonomousAiming = true;
//Getting GRIP data from NetworkTable
double[] targets = NetworkTable.getTable("GRIP/myContoursReport").getNumberArray("centerX", new double[0]);
double[] targets2 = NetworkTable.getTable("GRIP/myContoursReport").getNumberArray("centerY", new double[0]);
double[] targets3 = NetworkTable.getTable("GRIP/myContoursReport").getNumberArray("area", new double[0]);
//Find the biggest target
int biggestTarget = MathUtils.getLargestIndex(targets3);
//Create a new timer
//timer = new Timer();
//No target can be seen
if(targets.length == 0){
//turn until you can see the target
//Suggestion: Turn always 10, to make sure that we don't miss anything
double suggestedAngle = 10.0; //Amount in degrees
//Check if this get called
//Robot.chassis.arcadeDrive(0, MathUtils.degToDriveDouble(suggestedAngle));
Robot.chassis.setRotatePID(suggestedAngle);
}
else if(targets.length > 0){
//Target can be seen now
//Check if there is a biggest target
if(biggestTarget != -1){
//Get X and Y values
double x = targets[biggestTarget];
double y = targets[biggestTarget];
if(isInCenter(x, y))
{
//Done with rotating to the target
isFinished = true;
}
else{
//Calculate off and readjusts
double diffrence = x - 160.0;
double angleToTurn = MathUtils.pxToDeg(diffrence);
Robot.chassis.setRotatePID(angleToTurn);
}
}
else{
//Retry it
biggestTarget = MathUtils.getLargestIndex(targets3);
}
}
//If the robot didn't finished
if(isFinished == false){
//Restart
//TODO This might create some problems
//aim();
}
else{
//Your are now done with the whole process
//Enjoy it and do nothing
}
}
/**
*
* @author Alexander Kaschta
* @param x
* @param y
* @return if the coordinates are in the center of the camera
*/
private boolean isInCenter(double x, double y){
//Get the coordinates of the center of the camera
int centerX = RobotConstants.x / 2;
//If the difference in x is smaller than the allowed tolerance
if(MathUtils.getDiffrence(centerX, x) < RobotConstants.visionTolerance){
//Don't care about y-axis
return true;
}
return false;
}
}
| Vision update
:money_with_wings:
| src/org/usfirst/frc308/FRC2017/commands/AutonomousRotateToTarget.java | Vision update | <ide><path>rc/org/usfirst/frc308/FRC2017/commands/AutonomousRotateToTarget.java
<ide>
<ide> public class AutonomousRotateToTarget extends Command {
<ide>
<del> boolean shouldRetry = false;
<del> boolean isFinished = false;
<del>
<del> private Timer timer;
<add> private double rot;
<add>
<add> public AutonomousRotateToTarget(double rotation){
<add> rot = rotation;
<add> requires(Robot.chassis);
<add> }
<add>
<add> @Override
<add> protected void initialize() {
<add> super.initialize();
<add> Robot.chassis.setupDrive();
<add> }
<add>
<add> @Override
<add> protected void execute() {
<add> super.execute();
<add> Robot.chassis.arcadeDrive(0, rot);
<add> }
<ide>
<del> public AutonomousRotateToTarget() {
<del>
<del> //Is needed to turn the robot to the right direction
<del> requires(Robot.chassis);
<del> }
<del>
<del> @Override
<del> protected void execute() {
<del> super.execute();
<del> aim();
<del>
<del> }
<del>
<del> @Override
<del> protected void initialize() {
<del> super.initialize();
<del> Robot.chassis.setupDrive();
<del> Robot.chassis.setRotatePIDZero();
<del> }
<add> @Override
<add> protected void end() {
<add> super.end();
<add> Robot.chassis.arcadeDrive(0, rot);
<add> }
<add>
<add>
<add> @Override
<add> protected void interrupted() {
<add> super.interrupted();
<add> end();
<add> }
<ide>
<del> @Override
<del> protected boolean isFinished() {
<del> return isFinished;
<del> }
<del>
<del> @Override
<del> protected void end() {
<del> super.end();
<del> RobotConstants.isAutonomousAiming = false;
<del> }
<del>
<del> @Override
<del> protected void interrupted() {
<del> super.interrupted();
<del> end();
<del> }
<del>
<del> private void aim(){
<del> //Make sure, that aim() is called
<del> System.out.println("Aim() got called");
<del>
<del> isFinished = false;
<del> //Make clear, that the robot is trying to aim the target
<del> RobotConstants.isAutonomousAiming = true;
<del>
<del>
<del> //Getting GRIP data from NetworkTable
<del> double[] targets = NetworkTable.getTable("GRIP/myContoursReport").getNumberArray("centerX", new double[0]);
<del> double[] targets2 = NetworkTable.getTable("GRIP/myContoursReport").getNumberArray("centerY", new double[0]);
<del> double[] targets3 = NetworkTable.getTable("GRIP/myContoursReport").getNumberArray("area", new double[0]);
<del>
<del> //Find the biggest target
<del> int biggestTarget = MathUtils.getLargestIndex(targets3);
<del>
<del> //Create a new timer
<del> //timer = new Timer();
<del>
<del> //No target can be seen
<del> if(targets.length == 0){
<del> //turn until you can see the target
<del>
<del> //Suggestion: Turn always 10, to make sure that we don't miss anything
<del> double suggestedAngle = 10.0; //Amount in degrees
<del>
<del> //Check if this get called
<del> //Robot.chassis.arcadeDrive(0, MathUtils.degToDriveDouble(suggestedAngle));
<del> Robot.chassis.setRotatePID(suggestedAngle);
<add> @Override
<add> protected boolean isFinished() {
<add> //Enhance this method
<add> if (NetworkTable.getTable("GRIP/myContoursReport").getNumberArray("centerX", new double[0]).length > 0) {
<add> double[] array = NetworkTable.getTable("GRIP/myContoursReport").getNumberArray("centerX", new double[0]);
<add> int index = MathUtils.getLargestIndex(array);
<add> double centerX = RobotConstants.x / 2;
<add> if(MathUtils.getDiffrence(centerX, array[index]) < RobotConstants.visionTolerance){
<add> return true;
<add> }
<add> else{
<add> //Maybe we need to adjust the rotation value here ...
<add> //but it wasn't required to do that now
<add> }
<ide>
<ide> }
<del> else if(targets.length > 0){
<del> //Target can be seen now
<del>
<del> //Check if there is a biggest target
<del> if(biggestTarget != -1){
<del>
<del> //Get X and Y values
<del> double x = targets[biggestTarget];
<del> double y = targets[biggestTarget];
<del>
<del>
<del> if(isInCenter(x, y))
<del> {
<del> //Done with rotating to the target
<del> isFinished = true;
<del> }
<del> else{
<del> //Calculate off and readjusts
<del> double diffrence = x - 160.0;
<del>
<del> double angleToTurn = MathUtils.pxToDeg(diffrence);
<del>
<del> Robot.chassis.setRotatePID(angleToTurn);
<del>
<del>
<del> }
<del>
<del> }
<del> else{
<del> //Retry it
<del> biggestTarget = MathUtils.getLargestIndex(targets3);
<del> }
<del>
<del> }
<del>
<del> //If the robot didn't finished
<del> if(isFinished == false){
<del> //Restart
<del> //TODO This might create some problems
<del> //aim();
<del> }
<del> else{
<del> //Your are now done with the whole process
<del> //Enjoy it and do nothing
<del> }
<del>
<del> }
<del>
<del>
<del> /**
<del> *
<del> * @author Alexander Kaschta
<del> * @param x
<del> * @param y
<del> * @return if the coordinates are in the center of the camera
<del> */
<del> private boolean isInCenter(double x, double y){
<del>
<del> //Get the coordinates of the center of the camera
<del> int centerX = RobotConstants.x / 2;
<del>
<del> //If the difference in x is smaller than the allowed tolerance
<del> if(MathUtils.getDiffrence(centerX, x) < RobotConstants.visionTolerance){
<del> //Don't care about y-axis
<del> return true;
<del> }
<del>
<del> return false;
<del> }
<del>
<add> return false;
<add> }
<ide> } |
|
Java | apache-2.0 | 563aee685218abbfe71873de6c1be9c11ff14f72 | 0 | allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community | // Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.ide.actions.searcheverywhere;
import com.google.common.collect.Lists;
import com.intellij.codeInsight.hint.HintUtil;
import com.intellij.execution.runners.ExecutionUtil;
import com.intellij.find.findUsages.PsiElement2UsageTargetAdapter;
import com.intellij.icons.AllIcons;
import com.intellij.ide.IdeBundle;
import com.intellij.ide.util.ElementsChooser;
import com.intellij.ide.util.gotoByName.QuickSearchComponent;
import com.intellij.openapi.Disposable;
import com.intellij.openapi.actionSystem.*;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.keymap.KeymapUtil;
import com.intellij.openapi.progress.ProcessCanceledException;
import com.intellij.openapi.progress.ProgressIndicator;
import com.intellij.openapi.progress.ProgressManager;
import com.intellij.openapi.progress.Task;
import com.intellij.openapi.progress.util.ProgressIndicatorBase;
import com.intellij.openapi.project.DumbAware;
import com.intellij.openapi.project.DumbAwareAction;
import com.intellij.openapi.project.DumbService;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.ui.popup.JBPopup;
import com.intellij.openapi.ui.popup.JBPopupFactory;
import com.intellij.openapi.ui.popup.JBPopupListener;
import com.intellij.openapi.ui.popup.LightweightWindowEvent;
import com.intellij.openapi.util.ActionCallback;
import com.intellij.openapi.util.Disposer;
import com.intellij.openapi.util.Pair;
import com.intellij.psi.PsiElement;
import com.intellij.psi.codeStyle.MinusculeMatcher;
import com.intellij.psi.codeStyle.NameUtil;
import com.intellij.psi.util.PsiUtilCore;
import com.intellij.ui.*;
import com.intellij.ui.components.JBCheckBox;
import com.intellij.ui.components.JBList;
import com.intellij.ui.components.JBScrollPane;
import com.intellij.ui.components.fields.ExtendableTextField;
import com.intellij.ui.popup.PopupUpdateProcessor;
import com.intellij.usageView.UsageInfo;
import com.intellij.usages.*;
import com.intellij.util.Alarm;
import com.intellij.util.text.MatcherHolder;
import com.intellij.util.ui.DialogUtil;
import com.intellij.util.ui.JBUI;
import com.intellij.util.ui.UIUtil;
import com.intellij.util.ui.components.BorderLayoutPanel;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import javax.swing.border.Border;
import javax.swing.event.DocumentEvent;
import java.awt.*;
import java.awt.event.*;
import java.util.*;
import java.util.List;
import java.util.stream.Collectors;
/**
* @author Konstantin Bulenkov
* @author Mikhail.Sokolov
*/
public class SearchEverywhereUI extends BorderLayoutPanel implements Disposable, DataProvider, QuickSearchComponent {
private static final Logger LOG = Logger.getInstance(SearchEverywhereUI.class);
public static final int SINGLE_CONTRIBUTOR_ELEMENTS_LIMIT = 30;
public static final int MULTIPLE_CONTRIBUTORS_ELEMENTS_LIMIT = 15;
private final List<SearchEverywhereContributor> myServiceContributors;
private final List<SearchEverywhereContributor> myShownContributors;
private final Map<String, SearchEverywhereContributorFilter<?>> myContributorFilters;
private final Project myProject;
private SETab mySelectedTab;
private final JTextField mySearchField;
private final JCheckBox myNonProjectCB;
private final List<SETab> myTabs = new ArrayList<>();
private boolean nonProjectCheckBoxLocked;
private final JBList<Object> myResultsList = new JBList<>();
private final SearchListModel myListModel = new SearchListModel(); //todo using in different threads? #UX-1
private JBPopup myHint;
private CalcThread myCalcThread; //todo using in different threads? #UX-1
private volatile ActionCallback myCurrentWorker = ActionCallback.DONE;
private int myCalcThreadRestartRequestId = 0;
private final Object myWorkerRestartRequestLock = new Object();
private final Alarm listOperationsAlarm = new Alarm(Alarm.ThreadToUse.SWING_THREAD, ApplicationManager.getApplication());
private Runnable searchFinishedHandler = () -> {};
public SearchEverywhereUI(Project project,
List<SearchEverywhereContributor> serviceContributors,
List<SearchEverywhereContributor> contributors,
Map<String, SearchEverywhereContributorFilter<?>> filters) {
withMinimumWidth(670);
withPreferredWidth(670);
withBackground(JBUI.CurrentTheme.SearchEverywhere.dialogBackground());
myProject = project;
myServiceContributors = serviceContributors;
myShownContributors = contributors;
myContributorFilters = filters;
myNonProjectCB = new JBCheckBox();
myNonProjectCB.setOpaque(false);
myNonProjectCB.setFocusable(false);
JPanel contributorsPanel = createTabPanel(contributors);
JPanel settingsPanel = createSettingsPanel();
mySearchField = createSearchField();
JPanel suggestionsPanel = createSuggestionsPanel();
myResultsList.setModel(myListModel);
myResultsList.setFocusable(false);
myResultsList.setCellRenderer(new CompositeCellRenderer());
myResultsList.setSelectionMode(ListSelectionModel.MULTIPLE_INTERVAL_SELECTION);
ScrollingUtil.installActions(myResultsList, getSearchField());
JPanel topPanel = new JPanel(new BorderLayout());
topPanel.setOpaque(false);
topPanel.add(contributorsPanel, BorderLayout.WEST);
topPanel.add(settingsPanel, BorderLayout.EAST);
topPanel.add(mySearchField, BorderLayout.SOUTH);
WindowMoveListener moveListener = new WindowMoveListener(this);
topPanel.addMouseListener(moveListener);
topPanel.addMouseMotionListener(moveListener);
addToTop(topPanel);
addToCenter(suggestionsPanel);
initSearchActions();
}
private JPanel createSuggestionsPanel() {
JPanel pnl = new JPanel(new BorderLayout());
pnl.setOpaque(false);
pnl.setBorder(JBUI.Borders.customLine(JBUI.CurrentTheme.SearchEverywhere.searchFieldBorderColor(), 1, 0, 0, 0));
JScrollPane resultsScroll = new JBScrollPane(myResultsList);
resultsScroll.setBorder(null);
resultsScroll.setHorizontalScrollBarPolicy(ScrollPaneConstants.HORIZONTAL_SCROLLBAR_NEVER);
resultsScroll.setPreferredSize(JBUI.size(670, JBUI.CurrentTheme.SearchEverywhere.maxListHeght()));
pnl.add(resultsScroll, BorderLayout.CENTER);
String hint = IdeBundle.message("searcheverywhere.history.shortcuts.hint",
KeymapUtil.getKeystrokeText(SearchTextField.ALT_SHOW_HISTORY_KEYSTROKE),
KeymapUtil.getKeystrokeText(SearchTextField.SHOW_HISTORY_KEYSTROKE));
JLabel hintLabel = HintUtil.createAdComponent(hint, JBUI.Borders.empty(), SwingConstants.LEFT);
hintLabel.setOpaque(false);
hintLabel.setForeground(JBColor.GRAY);
pnl.add(hintLabel, BorderLayout.SOUTH);
return pnl;
}
public JTextField getSearchField() {
return mySearchField;
}
public void setUseNonProjectItems(boolean use) {
myNonProjectCB.setSelected(use);
nonProjectCheckBoxLocked = true;
}
public boolean isUseNonProjectItems() {
return myNonProjectCB.isSelected();
}
public void switchToContributor(String contributorID) {
SETab selectedTab = myTabs.stream()
.filter(tab -> tab.getID().equals(contributorID))
.findAny()
.orElseThrow(() -> new IllegalArgumentException(String.format("Contributor %s is not supported", contributorID)));
switchToTab(selectedTab);
}
private void switchToNextTab() {
int currentIndex = myTabs.indexOf(mySelectedTab);
SETab nextTab = currentIndex == myTabs.size() - 1 ? myTabs.get(0) : myTabs.get(currentIndex + 1);
switchToTab(nextTab);
}
private void switchToPrevTab() {
int currentIndex = myTabs.indexOf(mySelectedTab);
SETab prevTab = currentIndex == 0 ? myTabs.get(myTabs.size() - 1) : myTabs.get(currentIndex - 1);
switchToTab(prevTab);
}
private void switchToTab(SETab tab) {
mySelectedTab = tab;
String checkBoxText = tab.getContributor()
.map(SearchEverywhereContributor::includeNonProjectItemsText)
.orElse(IdeBundle.message("checkbox.include.non.project.items", IdeUICustomization.getInstance().getProjectConceptName()));
if (checkBoxText.indexOf(UIUtil.MNEMONIC) != -1) {
DialogUtil.setTextWithMnemonic(myNonProjectCB, checkBoxText);
} else {
myNonProjectCB.setText(checkBoxText);
myNonProjectCB.setDisplayedMnemonicIndex(-1);
myNonProjectCB.setMnemonic(0);
}
myNonProjectCB.setSelected(false);
nonProjectCheckBoxLocked = false;
myResultsList.getEmptyText().setText(getEmptyText());
repaint();
rebuildList();
}
public void setSearchFinishedHandler(@NotNull Runnable searchFinishedHandler) {
this.searchFinishedHandler = searchFinishedHandler;
}
public String getSelectedContributorID() {
return mySelectedTab.getID();
}
@Override
public void dispose() {
stopSearching();
}
@Nullable
@Override
public Object getData(String dataId) {
//common data section---------------------
//todo
//item-specific data section--------------
int index = myResultsList.getSelectedIndex();
if (index < 0 || myListModel.isMoreElement(index)) {
return null;
}
SearchEverywhereContributor contributor = myListModel.getContributorForIndex(index);
return contributor.getDataForItem(myListModel.getElementAt(index), dataId);
}
@Override
public void registerHint(JBPopup h) {
if (myHint != null && myHint.isVisible() && myHint != h) {
myHint.cancel();
}
myHint = h;
}
@Override
public void unregisterHint() {
registerHint(null);
}
@Override
public Component asComponent() {
return this;
}
private void hideHint() {
if (myHint != null && myHint.isVisible()) {
myHint.cancel();
}
}
private void updateHint(Object element) {
if (myHint == null || !myHint.isVisible()) return;
final PopupUpdateProcessor updateProcessor = myHint.getUserData(PopupUpdateProcessor.class);
if (updateProcessor != null) {
updateProcessor.updatePopup(element);
}
}
private boolean isAllTabSelected() {
return SearchEverywhereManagerImpl.ALL_CONTRIBUTORS_GROUP_ID.equals(getSelectedContributorID());
}
private JTextField createSearchField() {
ExtendableTextField searchField = new ExtendableTextField() {
@Override
public Dimension getPreferredSize() {
Dimension size = super.getPreferredSize();
size.height = JBUI.scale(29);
return size;
}
};
ExtendableTextField.Extension searchExtension = new ExtendableTextField.Extension() {
@Override
public Icon getIcon(boolean hovered) {
return AllIcons.Actions.Search;
}
@Override
public boolean isIconBeforeText() {
return true;
}
};
ExtendableTextField.Extension hintExtension = new ExtendableTextField.Extension() {
private final TextIcon icon;
{
icon = new TextIcon(IdeBundle.message("searcheverywhere.switch.scope.hint"), JBColor.GRAY, null, 0);
icon.setFont(RelativeFont.SMALL.derive(getFont()));
}
@Override
public Icon getIcon(boolean hovered) {
return icon;
}
};
searchField.setExtensions(searchExtension, hintExtension);
//todo gap between icon and text #UX-1
Insets insets = JBUI.CurrentTheme.SearchEverywhere.searchFieldInsets();
Border empty = JBUI.Borders.empty(insets.top, insets.left, insets.bottom, insets.right);
Border topLine = JBUI.Borders.customLine(JBUI.CurrentTheme.SearchEverywhere.searchFieldBorderColor(), 1, 0, 0, 0);
searchField.setBorder(JBUI.Borders.merge(empty, topLine, true));
searchField.setBackground(JBUI.CurrentTheme.SearchEverywhere.searchFieldBackground());
searchField.setFocusTraversalKeysEnabled(false);
return searchField;
}
private JPanel createSettingsPanel() {
JPanel res = new JPanel();
BoxLayout bl = new BoxLayout(res, BoxLayout.X_AXIS);
res.setLayout(bl);
res.setOpaque(false);
res.add(myNonProjectCB);
res.add(Box.createHorizontalStrut(JBUI.scale(19)));
DefaultActionGroup actionGroup = new DefaultActionGroup();
actionGroup.addAction(new ShowInFindToolWindowAction());
actionGroup.addAction(new ShowFilterAction());
ActionToolbar toolbar = ActionManager.getInstance().createActionToolbar("search.everywhere.toolbar", actionGroup, true);
JComponent toolbarComponent = toolbar.getComponent();
toolbarComponent.setOpaque(false);
res.add(toolbarComponent);
return res;
}
@NotNull
private JPanel createTabPanel(List<SearchEverywhereContributor> contributors) {
JPanel contributorsPanel = new JPanel(new FlowLayout(FlowLayout.LEFT, 0, 0));
contributorsPanel.setOpaque(false);
SETab allTab = new SETab(null);
contributorsPanel.add(allTab);
myTabs.add(allTab);
contributors.forEach(contributor -> {
SETab tab = new SETab(contributor);
contributorsPanel.add(tab);
myTabs.add(tab);
});
switchToTab(allTab);
return contributorsPanel;
}
private class SETab extends JLabel {
private final SearchEverywhereContributor myContributor;
public SETab(SearchEverywhereContributor contributor) {
super(contributor == null ? IdeBundle.message("searcheverywhere.allelements.tab.name") : contributor.getGroupName());
myContributor = contributor;
Insets insets = JBUI.CurrentTheme.SearchEverywhere.tabInsets();
setBorder(JBUI.Borders.empty(insets.top, insets.left, insets.bottom, insets.right));
addMouseListener(new MouseAdapter() {
@Override
public void mousePressed(MouseEvent e) {
switchToTab(SETab.this);
}
});
}
public String getID() {
return getContributor()
.map(SearchEverywhereContributor::getSearchProviderId)
.orElse(SearchEverywhereManagerImpl.ALL_CONTRIBUTORS_GROUP_ID);
}
public Optional<SearchEverywhereContributor> getContributor() {
return Optional.ofNullable(myContributor);
}
@Override
public Dimension getPreferredSize() {
Dimension size = super.getPreferredSize();
size.height = JBUI.scale(29);
return size;
}
@Override
public boolean isOpaque() {
return mySelectedTab == this;
}
@Override
public Color getBackground() {
return mySelectedTab == this
? JBUI.CurrentTheme.SearchEverywhere.selectedTabColor()
: super.getBackground();
}
}
private void rebuildList() {
assert EventQueue.isDispatchThread() : "Must be EDT";
if (myCalcThread != null && !myCurrentWorker.isProcessed()) {
myCurrentWorker = myCalcThread.cancel();
}
if (myCalcThread != null && !myCalcThread.isCanceled()) {
myCalcThread.cancel();
}
String pattern = getSearchPattern();
String matcherString = mySelectedTab.getContributor()
.map(contributor -> contributor.filterControlSymbols(pattern))
.orElse(pattern);
MinusculeMatcher matcher = NameUtil.buildMatcher("*" + matcherString, NameUtil.MatchingCaseSensitivity.NONE);
MatcherHolder.associateMatcher(myResultsList, matcher);
//assert project != null;
//myRenderer.myProject = project;
synchronized (myWorkerRestartRequestLock) { // this lock together with RestartRequestId should be enough to prevent two CalcThreads running at the same time
final int currentRestartRequest = ++myCalcThreadRestartRequestId;
myCurrentWorker.doWhenProcessed(() -> {
synchronized (myWorkerRestartRequestLock) {
if (currentRestartRequest != myCalcThreadRestartRequestId) {
return;
}
myCalcThread = new CalcThread(pattern, null);
myCurrentWorker = myCalcThread.start();
}
});
}
}
private String getSearchPattern() {
return mySearchField != null ? mySearchField.getText() : "";
}
private void initSearchActions() {
mySearchField.addKeyListener(new KeyAdapter() {
@Override
public void keyPressed(KeyEvent e) {
if (e.getKeyCode() == KeyEvent.VK_TAB) {
if (e.getModifiers() == 0) {
switchToNextTab();
e.consume();
} else if (e.getModifiers() == InputEvent.SHIFT_MASK) {
switchToPrevTab();
e.consume();
}
}
if (e.isShiftDown()) {
if (e.getKeyCode() == KeyEvent.VK_DOWN) {
//ScrollingUtil.moveDown(myResultsList, e.getModifiersEx());
myResultsList.dispatchEvent(e);
e.consume();
}
if (e.getKeyCode() == KeyEvent.VK_UP) {
//ScrollingUtil.moveUp(myResultsList, e.getModifiersEx());
myResultsList.dispatchEvent(e);
e.consume();
}
}
int[] indices = myResultsList.getSelectedIndices();
if (e.getKeyCode() == KeyEvent.VK_ENTER && indices.length != 0) {
elementsSelected(indices, e.getModifiers());
}
}
});
AnAction escape = ActionManager.getInstance().getAction("EditorEscape");
DumbAwareAction.create(__ -> {
stopSearching();
searchFinishedHandler.run();
}).registerCustomShortcutSet(escape == null ? CommonShortcuts.ESCAPE : escape.getShortcutSet(), this);
mySearchField.getDocument().addDocumentListener(new DocumentAdapter() {
@Override
protected void textChanged(DocumentEvent e) {
nonProjectCheckBoxLocked = false;
rebuildList();
}
});
myNonProjectCB.addItemListener(e -> rebuildList());
myNonProjectCB.addActionListener(e -> nonProjectCheckBoxLocked = true);
myResultsList.addMouseListener(new MouseAdapter() {
@Override
public void mouseClicked(MouseEvent e) {
boolean multiSelectMode = e.isShiftDown() || e.isControlDown();
if (e.getButton() == MouseEvent.BUTTON1 && !multiSelectMode) {
e.consume();
final int i = myResultsList.locationToIndex(e.getPoint());
if (i > -1) {
myResultsList.setSelectedIndex(i);
elementsSelected(new int[]{i}, e.getModifiers());
}
}
}
});
myResultsList.addListSelectionListener(e -> {
Object selectedValue = myResultsList.getSelectedValue();
if (selectedValue != null && myHint != null && myHint.isVisible()) {
updateHint(selectedValue);
}
});
myProject.getMessageBus().connect(this).subscribe(DumbService.DUMB_MODE, new DumbService.DumbModeListener() {
@Override
public void exitDumbMode() {
ApplicationManager.getApplication().invokeLater(() -> rebuildList());
}
});
}
private void elementsSelected(int[] indexes, int modifiers) {
if (indexes.length == 1 && myListModel.isMoreElement(indexes[0])) {
SearchEverywhereContributor contributor = myListModel.getContributorForIndex(indexes[0]);
showMoreElements(contributor);
return;
}
indexes = Arrays.stream(indexes)
.filter(i -> !myListModel.isMoreElement(i))
.toArray();
boolean closePopup = false;
for (int i: indexes) {
SearchEverywhereContributor contributor = myListModel.getContributorForIndex(i);
Object value = myListModel.getElementAt(i);
closePopup |= contributor.processSelectedItem(value, modifiers, getSearchPattern());
}
if (closePopup) {
stopSearching();
searchFinishedHandler.run();
} else {
myResultsList.repaint();
}
}
private void showMoreElements(SearchEverywhereContributor contributor) {
synchronized (myWorkerRestartRequestLock) { // this lock together with RestartRequestId should be enough to prevent two CalcThreads running at the same time
final int currentRestartRequest = ++myCalcThreadRestartRequestId;
myCurrentWorker.doWhenProcessed(() -> {
synchronized (myWorkerRestartRequestLock) {
if (currentRestartRequest != myCalcThreadRestartRequestId) {
return;
}
myCalcThread = new CalcThread(getSearchPattern(), contributor);
myCurrentWorker = myCalcThread.start();
}
});
}
}
private void gotoSelectedItem(Object value, SearchEverywhereContributor contributor, int modifiers, String searchText) {
boolean closePopup = contributor.processSelectedItem(value, modifiers, searchText);
if (closePopup) {
stopSearching();
searchFinishedHandler.run();
} else {
myResultsList.repaint();
}
}
private void stopSearching() {
listOperationsAlarm.cancelAllRequests();
if (myCalcThread != null && !myCalcThread.isCanceled()) {
myCalcThread.cancel();
}
}
private void handleEmptyResults() {
ApplicationManager.getApplication().invokeLater(() -> {
if (!nonProjectCheckBoxLocked && !isUseNonProjectItems() && !getSearchPattern().isEmpty()) {
setUseNonProjectItems(true);
return;
}
hideHint();
});
}
@SuppressWarnings("Duplicates") //todo remove suppress #UX-1
private class CalcThread implements Runnable {
private final String pattern;
private final ProgressIndicator myProgressIndicator = new ProgressIndicatorBase();
private final ActionCallback myDone = new ActionCallback();
private final SearchEverywhereContributor contributorToExpand;
public CalcThread(@NotNull String pattern, @Nullable SearchEverywhereContributor expand) {
this.pattern = pattern;
contributorToExpand = expand;
}
@Override
public void run() {
try {
check();
if (contributorToExpand == null) {
resetList();
} else {
showMore(contributorToExpand);
}
}
catch (ProcessCanceledException ignore) {
myDone.setRejected();
}
catch (Exception e) {
LOG.error(e);
myDone.setRejected();
}
finally {
if (!isCanceled()) {
listOperationsAlarm.addRequest(() -> myResultsList.getEmptyText().setText(getEmptyText()), 0);
}
if (!myDone.isProcessed()) {
myDone.setDone();
}
}
}
private void resetList() {
listOperationsAlarm.cancelAllRequests();
listOperationsAlarm.addRequest(() -> {
Dimension oldSize = getPreferredSize();
myResultsList.getEmptyText().setText(IdeBundle.message("label.choosebyname.searching"));
myListModel.clear();
Dimension newSize = getPreferredSize();
firePropertyChange("preferredSize", oldSize, newSize);
}, 200);
boolean anyFound = false;
SearchEverywhereContributor selectedContributor = mySelectedTab.getContributor().orElse(null);
if (selectedContributor != null) {
anyFound = addContributorItems(selectedContributor, SINGLE_CONTRIBUTOR_ELEMENTS_LIMIT, true);
} else {
boolean clearBefore = true;
for (SearchEverywhereContributor contributor : getUsedContributors()) {
int count = myServiceContributors.contains(contributor) ? -1 : MULTIPLE_CONTRIBUTORS_ELEMENTS_LIMIT; //show ALL items for service contributors
anyFound |= addContributorItems(contributor, count, clearBefore);
clearBefore = false;
}
}
if (!anyFound) {
handleEmptyResults();
}
}
private void showMore(SearchEverywhereContributor contributor) {
int delta = isAllTabSelected() ? MULTIPLE_CONTRIBUTORS_ELEMENTS_LIMIT : SINGLE_CONTRIBUTOR_ELEMENTS_LIMIT;
int size = myListModel.getItemsForContributor(contributor) + delta;
addContributorItems(contributor, size, false);
}
private boolean addContributorItems(SearchEverywhereContributor contributor, int count, boolean clearBefore) {
ContributorSearchResult<Object> results =
contributor.search(pattern, isUseNonProjectItems(), myContributorFilters.get(contributor.getSearchProviderId()), myProgressIndicator, count);
boolean found = !results.isEmpty();
if (clearBefore) {
listOperationsAlarm.cancelAllRequests();
}
listOperationsAlarm.addRequest(() -> {
if (isCanceled()) {
return;
}
Dimension oldSize = getPreferredSize();
if (clearBefore) {
myListModel.clear();
}
List<Object> itemsToAdd = results.getItems().stream()
.filter(o -> !myListModel.contains(o))
.collect(Collectors.toList());
if (!itemsToAdd.isEmpty()) {
myListModel.addElements(itemsToAdd, contributor, results.hasMoreItems());
ScrollingUtil.ensureSelectionExists(myResultsList);
}
firePropertyChange("preferredSize", oldSize, getPreferredSize());
}, 0);
return found;
}
protected void check() {
myProgressIndicator.checkCanceled();
if (myDone.isRejected()) throw new ProcessCanceledException();
assert myCalcThread == this : "There are two CalcThreads running before one of them was cancelled";
}
private boolean isCanceled() {
return myProgressIndicator.isCanceled() || myDone.isRejected();
}
public ActionCallback cancel() {
myProgressIndicator.cancel();
//myDone.setRejected();
return myDone;
}
public ActionCallback start() {
ApplicationManager.getApplication().executeOnPooledThread(this);
return myDone;
}
}
@NotNull
private List<SearchEverywhereContributor> getUsedContributors() {
SearchEverywhereContributorFilter<String> contributorsFilter =
(SearchEverywhereContributorFilter<String>) myContributorFilters.get(SearchEverywhereManagerImpl.ALL_CONTRIBUTORS_GROUP_ID);
List<SearchEverywhereContributor> contributors = new ArrayList<>(myServiceContributors);
myShownContributors.stream()
.filter(contributor -> contributorsFilter.isSelected(contributor.getSearchProviderId()))
.forEach(contributor -> contributors.add(contributor));
return contributors;
}
private class CompositeCellRenderer implements ListCellRenderer<Object> {
@Override
public Component getListCellRendererComponent(JList list, Object value, int index, boolean isSelected, boolean cellHasFocus) {
if (myListModel.isMoreElement(index)) {
Component cmp = moreRenderer.getListCellRendererComponent(list, value, index, isSelected, cellHasFocus);
return wrap(cmp, 1, 7);
}
SearchEverywhereContributor contributor = myListModel.getContributorForIndex(index);
Component component = contributor.getElementsRenderer(myResultsList)
.getListCellRendererComponent(list, value, index, isSelected, cellHasFocus);
if (isAllTabSelected() && myListModel.isGroupFirstItem(index)) {
component = groupTitleRenderer.withDisplayedData(contributor.getGroupName(), component);
}
return wrap(component, 1, 0);
}
private Component wrap(Component cmp, int verticalGap, int hotizontalGap) {
JPanel panel = new JPanel(new BorderLayout());
panel.setOpaque(cmp.isOpaque());
if (cmp.isOpaque()) {
panel.setBackground(cmp.getBackground());
}
panel.add(cmp, BorderLayout.CENTER);
panel.setBorder(JBUI.Borders.empty(verticalGap, hotizontalGap));
return panel;
}
}
private final MoreRenderer moreRenderer = new MoreRenderer();
public static class MoreRenderer extends JPanel implements ListCellRenderer<Object> {
final JLabel label;
private MoreRenderer() {
super(new BorderLayout());
label = groupInfoLabel("... more");
add(label, BorderLayout.CENTER);
}
@Override
public Component getListCellRendererComponent(JList<?> list, Object value, int index, boolean isSelected, boolean cellHasFocus) {
setBackground(UIUtil.getListBackground(isSelected));
return this;
}
}
private final GroupTitleRenderer groupTitleRenderer = new GroupTitleRenderer();
public static class GroupTitleRenderer extends JPanel {
private final JLabel titleLabel;
private final BorderLayout myLayout = new BorderLayout();
public GroupTitleRenderer() {
setLayout(myLayout);
setBackground(UIUtil.getListBackground(false));
titleLabel = groupInfoLabel("Group");
SeparatorComponent separatorComponent =
new SeparatorComponent(titleLabel.getPreferredSize().height / 2, JBUI.CurrentTheme.SearchEverywhere.listSeparatorColor(),null);
JPanel topPanel = JBUI.Panels.simplePanel(5, 0)
.addToCenter(separatorComponent)
.addToLeft(titleLabel)
.withBorder(JBUI.Borders.empty(0, 7))
.withBackground(UIUtil.getListBackground());
add(topPanel, BorderLayout.NORTH);
}
public GroupTitleRenderer withDisplayedData(String title, Component itemContent) {
titleLabel.setText(title);
Component prevContent = myLayout.getLayoutComponent(BorderLayout.CENTER);
if (prevContent != null) {
remove(prevContent);
}
add(itemContent, BorderLayout.CENTER);
return this;
}
}
private static class SearchListModel extends AbstractListModel<Object> {
private static final Object MORE_ELEMENT = new Object();
private final List<Pair<Object, SearchEverywhereContributor>> listElements = new ArrayList<>();
@Override
public int getSize() {
return listElements.size();
}
@Override
public Object getElementAt(int index) {
return listElements.get(index).first;
}
public Collection<Object> getFoundItems(SearchEverywhereContributor contributor) {
return listElements.stream()
.filter(pair -> pair.second == contributor && pair.first != MORE_ELEMENT)
.map(pair -> pair.getFirst())
.collect(Collectors.toList());
}
public boolean hasMoreElements(SearchEverywhereContributor contributor) {
return listElements.stream()
.anyMatch(pair -> pair.first == MORE_ELEMENT && pair.second == contributor);
}
public void addElements(List<Object> items, SearchEverywhereContributor contributor, boolean hasMore) {
if (items.isEmpty()) {
return;
}
List<Pair<Object, SearchEverywhereContributor>> pairsToAdd = items.stream()
.map(o -> Pair.create(o, contributor))
.collect(Collectors.toList());
int insertPoint = contributors().lastIndexOf(contributor);
int startIndex;
int endIndex;
if (insertPoint < 0) {
// no items of this contributor
startIndex = listElements.size();
listElements.addAll(pairsToAdd);
if (hasMore) {
listElements.add(Pair.create(MORE_ELEMENT, contributor));
}
endIndex = listElements.size() - 1;
} else {
// contributor elements already exists in list
if (isMoreElement(insertPoint)) {
listElements.remove(insertPoint);
} else {
insertPoint += 1;
}
startIndex = insertPoint;
endIndex = startIndex + pairsToAdd.size();
listElements.addAll(insertPoint, pairsToAdd);
if (hasMore) {
listElements.add(insertPoint + pairsToAdd.size(), Pair.create(MORE_ELEMENT, contributor));
endIndex += 1;
}
}
fireIntervalAdded(this, startIndex, endIndex);
}
public void clear() {
int index = listElements.size() - 1;
listElements.clear();
if (index >= 0) {
fireIntervalRemoved(this, 0, index);
}
}
public boolean contains(Object val) {
return values().contains(val);
}
public boolean isMoreElement(int index) {
return listElements.get(index).first == MORE_ELEMENT;
}
public SearchEverywhereContributor getContributorForIndex(int index) {
return listElements.get(index).second;
}
public boolean isGroupFirstItem(int index) {
return index == 0
|| listElements.get(index).second != listElements.get(index - 1).second;
}
public int getItemsForContributor(SearchEverywhereContributor contributor) {
List<SearchEverywhereContributor> contributorsList = contributors();
int first = contributorsList.indexOf(contributor);
int last = contributorsList.lastIndexOf(contributor);
if (isMoreElement(last)) {
last -= 1;
}
return last - first + 1;
}
@NotNull
private List<SearchEverywhereContributor> contributors() {
return Lists.transform(listElements, pair -> pair.getSecond());
}
@NotNull
private List<Object> values() {
return Lists.transform(listElements, pair -> pair.getFirst());
}
}
private class ShowInFindToolWindowAction extends DumbAwareAction {
public ShowInFindToolWindowAction() {
super(IdeBundle.message("searcheverywhere.show.in.find.window.button.name"),
IdeBundle.message("searcheverywhere.show.in.find.window.button.name"), AllIcons.General.Pin_tab);
}
@Override
public void actionPerformed(AnActionEvent e) {
stopSearching();
Collection<SearchEverywhereContributor> contributors = isAllTabSelected() ? getUsedContributors() : Collections.singleton(mySelectedTab.getContributor().get());
contributors = contributors.stream()
.filter(SearchEverywhereContributor::showInFindResults)
.collect(Collectors.toList());
if (contributors.isEmpty()) {
return;
}
String searchText = getSearchPattern();
boolean everywhere = isUseNonProjectItems();
String contributorsString = contributors.stream()
.map(SearchEverywhereContributor::getGroupName)
.collect(Collectors.joining(", "));
UsageViewPresentation presentation = new UsageViewPresentation();
String tabCaptionText = IdeBundle.message("searcheverywhere.found.matches.title", searchText, contributorsString);
presentation.setCodeUsagesString(tabCaptionText);
presentation.setUsagesInGeneratedCodeString(IdeBundle.message("searcheverywhere.found.matches.generated.code.title", searchText, contributorsString));
presentation.setTargetsNodeText(IdeBundle.message("searcheverywhere.found.targets.title", searchText, contributorsString));
presentation.setTabName(tabCaptionText);
presentation.setTabText(tabCaptionText);
Collection<Usage> usages = new LinkedHashSet<>();
Collection<PsiElement> targets = new LinkedHashSet<>();
Collection<Object> cached = contributors.stream()
.flatMap(contributor -> myListModel.getFoundItems(contributor).stream())
.collect(Collectors.toList());
fillUsages(cached, usages, targets);
Collection<SearchEverywhereContributor> contributorsForAdditionalSearch;
contributorsForAdditionalSearch = contributors.stream()
.filter(contributor -> myListModel.hasMoreElements(contributor))
.collect(Collectors.toList());
searchFinishedHandler.run();
if (!contributorsForAdditionalSearch.isEmpty()) {
ProgressManager.getInstance().run(new Task.Modal(myProject, tabCaptionText, true) {
private final ProgressIndicator progressIndicator = new ProgressIndicatorBase();
@Override
public void run(@NotNull ProgressIndicator indicator) {
contributorsForAdditionalSearch.forEach(contributor -> {
if (!progressIndicator.isCanceled()) {
//todo overflow #UX-1
List<Object> foundElements =
contributor.search(searchText, everywhere, myContributorFilters.get(contributor.getSearchProviderId()), progressIndicator);
fillUsages(foundElements, usages, targets);
}
});
}
@Override
public void onCancel() {
progressIndicator.cancel();
}
@Override
public void onSuccess() {
showInFindWindow(targets, usages, presentation);
}
@Override
public void onThrowable(@NotNull Throwable error) {
progressIndicator.cancel();
}
});
} else {
showInFindWindow(targets, usages, presentation);
}
}
private void fillUsages(Collection<Object> foundElements, Collection<Usage> usages, Collection<PsiElement> targets) {
foundElements.stream()
.filter(o -> o instanceof PsiElement)
.forEach(o -> {
PsiElement element = (PsiElement)o;
if (element.getTextRange() != null) {
UsageInfo usageInfo = new UsageInfo(element);
usages.add(new UsageInfo2UsageAdapter(usageInfo));
}
else {
targets.add(element);
}
});
}
private void showInFindWindow(Collection<PsiElement> targets, Collection<Usage> usages, UsageViewPresentation presentation) {
UsageTarget[] targetsArray = targets.isEmpty() ? UsageTarget.EMPTY_ARRAY : PsiElement2UsageTargetAdapter.convert(PsiUtilCore.toPsiElementArray(targets));
Usage[] usagesArray = usages.toArray(Usage.EMPTY_ARRAY);
UsageViewManager.getInstance(myProject).showUsages(targetsArray, usagesArray, presentation);
}
}
private class ShowFilterAction extends ToggleAction implements DumbAware {
private JBPopup myFilterPopup;
public ShowFilterAction() {
super("Filter", "Filter files by type", AllIcons.General.Filter);
}
@Override
public boolean isSelected(final AnActionEvent e) {
return myFilterPopup != null && !myFilterPopup.isDisposed();
}
@Override
public void setSelected(final AnActionEvent e, final boolean state) {
if (state) {
showPopup(e.getInputEvent().getComponent());
}
else {
if (myFilterPopup != null && !myFilterPopup.isDisposed()) {
myFilterPopup.cancel();
}
}
}
@Override
public void update(@NotNull AnActionEvent e) {
Icon icon = getTemplatePresentation().getIcon();
e.getPresentation().setIcon(isActive() ? ExecutionUtil.getLiveIndicator(icon) : icon);
e.getPresentation().setEnabled(myContributorFilters.get(getSelectedContributorID()) != null);
e.getPresentation().putClientProperty(SELECTED_PROPERTY, isSelected(e));
}
private boolean isActive() {
String contributorID = getSelectedContributorID();
SearchEverywhereContributorFilter<?> filter = myContributorFilters.get(contributorID);
if (filter == null) {
return false;
}
return filter.getAllElements().size() != filter.getSelectedElements().size();
}
private void showPopup(Component anchor) {
if (myFilterPopup != null) {
return;
}
JBPopupListener popupCloseListener = new JBPopupListener() {
@Override
public void onClosed(LightweightWindowEvent event) {
myFilterPopup = null;
}
};
myFilterPopup = JBPopupFactory.getInstance()
.createComponentPopupBuilder(createFilterPanel(), null)
.setModalContext(false)
.setFocusable(false)
.setResizable(true)
.setCancelOnClickOutside(false)
.setMinSize(new Dimension(200, 200))
.setDimensionServiceKey(myProject, "Search_Everywhere_Filter_Popup", false)
.addListener(popupCloseListener)
.createPopup();
Disposer.register(SearchEverywhereUI.this, myFilterPopup);
myFilterPopup.showUnderneathOf(anchor);
}
private JComponent createFilterPanel() {
SearchEverywhereContributorFilter<?> filter = myContributorFilters.get(getSelectedContributorID());
ElementsChooser<?> chooser = createChooser(filter);
JPanel panel = new JPanel();
panel.setLayout(new BoxLayout(panel, BoxLayout.Y_AXIS));
panel.add(chooser);
JPanel buttons = new JPanel();
JButton all = new JButton("All");
all.addActionListener(new ActionListener() {
@Override
public void actionPerformed(final ActionEvent e) {
chooser.setAllElementsMarked(true);
}
});
buttons.add(all);
JButton none = new JButton("None");
none.addActionListener(new ActionListener() {
@Override
public void actionPerformed(final ActionEvent e) {
chooser.setAllElementsMarked(false);
}
});
buttons.add(none);
JButton invert = new JButton("Invert");
invert.addActionListener(new ActionListener() {
@Override
public void actionPerformed(final ActionEvent e) {
chooser.invertSelection();
}
});
buttons.add(invert);
panel.add(buttons);
return panel;
}
private <T> ElementsChooser<T> createChooser(SearchEverywhereContributorFilter<T> filter) {
ElementsChooser<T> res = new ElementsChooser<T>(filter.getAllElements(), false) {
@Override
protected String getItemText(@NotNull T value) {
return filter.getElementText(value);
}
@Nullable
@Override
protected Icon getItemIcon(@NotNull T value) {
return filter.getElementIcon(value);
}
};
res.markElements(filter.getSelectedElements());
ElementsChooser.ElementsMarkListener<T> listener = (element, isMarked) -> {
filter.setSelected(element, isMarked);
rebuildList();
};
res.addElementsMarkListener(listener);
return res;
}
}
private static JLabel groupInfoLabel(String text) {
JLabel label = new JLabel(text);
label.setForeground(UIUtil.getLabelDisabledForeground());
label.setFont(UIUtil.getLabelFont().deriveFont(UIUtil.getFontSize(UIUtil.FontSize.SMALL)));
label.setOpaque(false);
return label;
}
private String getEmptyText() {
return mySelectedTab.getContributor()
.map(c -> IdeBundle.message("searcheverywhere.nothing.found.for.contributor.anywhere", c.getGroupName()))
.orElse(IdeBundle.message("searcheverywhere.nothing.found.for.all.anywhere"));
}
}
| platform/lang-impl/src/com/intellij/ide/actions/searcheverywhere/SearchEverywhereUI.java | // Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.ide.actions.searcheverywhere;
import com.google.common.collect.Lists;
import com.intellij.codeInsight.hint.HintUtil;
import com.intellij.execution.runners.ExecutionUtil;
import com.intellij.find.findUsages.PsiElement2UsageTargetAdapter;
import com.intellij.icons.AllIcons;
import com.intellij.ide.IdeBundle;
import com.intellij.ide.util.ElementsChooser;
import com.intellij.ide.util.gotoByName.QuickSearchComponent;
import com.intellij.openapi.Disposable;
import com.intellij.openapi.actionSystem.*;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.keymap.KeymapUtil;
import com.intellij.openapi.progress.ProcessCanceledException;
import com.intellij.openapi.progress.ProgressIndicator;
import com.intellij.openapi.progress.ProgressManager;
import com.intellij.openapi.progress.Task;
import com.intellij.openapi.progress.util.ProgressIndicatorBase;
import com.intellij.openapi.project.DumbAware;
import com.intellij.openapi.project.DumbAwareAction;
import com.intellij.openapi.project.DumbService;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.ui.popup.JBPopup;
import com.intellij.openapi.ui.popup.JBPopupFactory;
import com.intellij.openapi.ui.popup.JBPopupListener;
import com.intellij.openapi.ui.popup.LightweightWindowEvent;
import com.intellij.openapi.util.ActionCallback;
import com.intellij.openapi.util.Disposer;
import com.intellij.openapi.util.Pair;
import com.intellij.psi.PsiElement;
import com.intellij.psi.codeStyle.MinusculeMatcher;
import com.intellij.psi.codeStyle.NameUtil;
import com.intellij.psi.util.PsiUtilCore;
import com.intellij.ui.*;
import com.intellij.ui.components.JBCheckBox;
import com.intellij.ui.components.JBList;
import com.intellij.ui.components.JBScrollPane;
import com.intellij.ui.components.fields.ExtendableTextField;
import com.intellij.ui.popup.PopupUpdateProcessor;
import com.intellij.usageView.UsageInfo;
import com.intellij.usages.*;
import com.intellij.util.Alarm;
import com.intellij.util.text.MatcherHolder;
import com.intellij.util.ui.DialogUtil;
import com.intellij.util.ui.JBUI;
import com.intellij.util.ui.UIUtil;
import com.intellij.util.ui.components.BorderLayoutPanel;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import javax.swing.border.Border;
import javax.swing.event.DocumentEvent;
import java.awt.*;
import java.awt.event.*;
import java.util.*;
import java.util.List;
import java.util.stream.Collectors;
/**
* @author Konstantin Bulenkov
* @author Mikhail.Sokolov
*/
public class SearchEverywhereUI extends BorderLayoutPanel implements Disposable, DataProvider, QuickSearchComponent {
private static final Logger LOG = Logger.getInstance(SearchEverywhereUI.class);
public static final int SINGLE_CONTRIBUTOR_ELEMENTS_LIMIT = 30;
public static final int MULTIPLE_CONTRIBUTORS_ELEMENTS_LIMIT = 15;
private final List<SearchEverywhereContributor> myServiceContributors;
private final List<SearchEverywhereContributor> myShownContributors;
private final Map<String, SearchEverywhereContributorFilter<?>> myContributorFilters;
private final Project myProject;
private SETab mySelectedTab;
private final JTextField mySearchField;
private final JCheckBox myNonProjectCB;
private final List<SETab> myTabs = new ArrayList<>();
private boolean nonProjectCheckBoxLocked;
private final JBList<Object> myResultsList = new JBList<>();
private final SearchListModel myListModel = new SearchListModel(); //todo using in different threads? #UX-1
private JBPopup myHint;
private CalcThread myCalcThread; //todo using in different threads? #UX-1
private volatile ActionCallback myCurrentWorker = ActionCallback.DONE;
private int myCalcThreadRestartRequestId = 0;
private final Object myWorkerRestartRequestLock = new Object();
private final Alarm listOperationsAlarm = new Alarm(Alarm.ThreadToUse.SWING_THREAD, ApplicationManager.getApplication());
private Runnable searchFinishedHandler = () -> {};
public SearchEverywhereUI(Project project,
List<SearchEverywhereContributor> serviceContributors,
List<SearchEverywhereContributor> contributors,
Map<String, SearchEverywhereContributorFilter<?>> filters) {
withMinimumWidth(670);
withPreferredWidth(670);
withBackground(JBUI.CurrentTheme.SearchEverywhere.dialogBackground());
myProject = project;
myServiceContributors = serviceContributors;
myShownContributors = contributors;
myContributorFilters = filters;
myNonProjectCB = new JBCheckBox();
myNonProjectCB.setOpaque(false);
myNonProjectCB.setFocusable(false);
JPanel contributorsPanel = createTabPanel(contributors);
JPanel settingsPanel = createSettingsPanel();
mySearchField = createSearchField();
JPanel suggestionsPanel = createSuggestionsPanel();
myResultsList.setModel(myListModel);
myResultsList.setCellRenderer(new CompositeCellRenderer());
ScrollingUtil.installActions(myResultsList, getSearchField());
JPanel topPanel = new JPanel(new BorderLayout());
topPanel.setOpaque(false);
topPanel.add(contributorsPanel, BorderLayout.WEST);
topPanel.add(settingsPanel, BorderLayout.EAST);
topPanel.add(mySearchField, BorderLayout.SOUTH);
WindowMoveListener moveListener = new WindowMoveListener(this);
topPanel.addMouseListener(moveListener);
topPanel.addMouseMotionListener(moveListener);
addToTop(topPanel);
addToCenter(suggestionsPanel);
initSearchActions();
}
private JPanel createSuggestionsPanel() {
JPanel pnl = new JPanel(new BorderLayout());
pnl.setOpaque(false);
pnl.setBorder(JBUI.Borders.customLine(JBUI.CurrentTheme.SearchEverywhere.searchFieldBorderColor(), 1, 0, 0, 0));
JScrollPane resultsScroll = new JBScrollPane(myResultsList);
resultsScroll.setBorder(null);
resultsScroll.setHorizontalScrollBarPolicy(ScrollPaneConstants.HORIZONTAL_SCROLLBAR_NEVER);
resultsScroll.setPreferredSize(JBUI.size(670, JBUI.CurrentTheme.SearchEverywhere.maxListHeght()));
pnl.add(resultsScroll, BorderLayout.CENTER);
String hint = IdeBundle.message("searcheverywhere.history.shortcuts.hint",
KeymapUtil.getKeystrokeText(SearchTextField.ALT_SHOW_HISTORY_KEYSTROKE),
KeymapUtil.getKeystrokeText(SearchTextField.SHOW_HISTORY_KEYSTROKE));
JLabel hintLabel = HintUtil.createAdComponent(hint, JBUI.Borders.empty(), SwingConstants.LEFT);
hintLabel.setOpaque(false);
hintLabel.setForeground(JBColor.GRAY);
pnl.add(hintLabel, BorderLayout.SOUTH);
return pnl;
}
public JTextField getSearchField() {
return mySearchField;
}
public void setUseNonProjectItems(boolean use) {
myNonProjectCB.setSelected(use);
nonProjectCheckBoxLocked = true;
}
public boolean isUseNonProjectItems() {
return myNonProjectCB.isSelected();
}
public void switchToContributor(String contributorID) {
SETab selectedTab = myTabs.stream()
.filter(tab -> tab.getID().equals(contributorID))
.findAny()
.orElseThrow(() -> new IllegalArgumentException(String.format("Contributor %s is not supported", contributorID)));
switchToTab(selectedTab);
}
private void switchToNextTab() {
int currentIndex = myTabs.indexOf(mySelectedTab);
SETab nextTab = currentIndex == myTabs.size() - 1 ? myTabs.get(0) : myTabs.get(currentIndex + 1);
switchToTab(nextTab);
}
private void switchToPrevTab() {
int currentIndex = myTabs.indexOf(mySelectedTab);
SETab prevTab = currentIndex == 0 ? myTabs.get(myTabs.size() - 1) : myTabs.get(currentIndex - 1);
switchToTab(prevTab);
}
private void switchToTab(SETab tab) {
mySelectedTab = tab;
String checkBoxText = tab.getContributor()
.map(SearchEverywhereContributor::includeNonProjectItemsText)
.orElse(IdeBundle.message("checkbox.include.non.project.items", IdeUICustomization.getInstance().getProjectConceptName()));
if (checkBoxText.indexOf(UIUtil.MNEMONIC) != -1) {
DialogUtil.setTextWithMnemonic(myNonProjectCB, checkBoxText);
} else {
myNonProjectCB.setText(checkBoxText);
myNonProjectCB.setDisplayedMnemonicIndex(-1);
myNonProjectCB.setMnemonic(0);
}
myNonProjectCB.setSelected(false);
nonProjectCheckBoxLocked = false;
myResultsList.getEmptyText().setText(getEmptyText());
repaint();
rebuildList();
}
public void setSearchFinishedHandler(@NotNull Runnable searchFinishedHandler) {
this.searchFinishedHandler = searchFinishedHandler;
}
public String getSelectedContributorID() {
return mySelectedTab.getID();
}
@Override
public void dispose() {
stopSearching();
}
@Nullable
@Override
public Object getData(String dataId) {
//common data section---------------------
//todo
//item-specific data section--------------
int index = myResultsList.getSelectedIndex();
if (index < 0 || myListModel.isMoreElement(index)) {
return null;
}
SearchEverywhereContributor contributor = myListModel.getContributorForIndex(index);
return contributor.getDataForItem(myListModel.getElementAt(index), dataId);
}
@Override
public void registerHint(JBPopup h) {
if (myHint != null && myHint.isVisible() && myHint != h) {
myHint.cancel();
}
myHint = h;
}
@Override
public void unregisterHint() {
registerHint(null);
}
@Override
public Component asComponent() {
return this;
}
private void hideHint() {
if (myHint != null && myHint.isVisible()) {
myHint.cancel();
}
}
private void updateHint(Object element) {
if (myHint == null || !myHint.isVisible()) return;
final PopupUpdateProcessor updateProcessor = myHint.getUserData(PopupUpdateProcessor.class);
if (updateProcessor != null) {
updateProcessor.updatePopup(element);
}
}
private boolean isAllTabSelected() {
return SearchEverywhereManagerImpl.ALL_CONTRIBUTORS_GROUP_ID.equals(getSelectedContributorID());
}
private JTextField createSearchField() {
ExtendableTextField searchField = new ExtendableTextField() {
@Override
public Dimension getPreferredSize() {
Dimension size = super.getPreferredSize();
size.height = JBUI.scale(29);
return size;
}
};
ExtendableTextField.Extension searchExtension = new ExtendableTextField.Extension() {
@Override
public Icon getIcon(boolean hovered) {
return AllIcons.Actions.Search;
}
@Override
public boolean isIconBeforeText() {
return true;
}
};
ExtendableTextField.Extension hintExtension = new ExtendableTextField.Extension() {
private final TextIcon icon;
{
icon = new TextIcon(IdeBundle.message("searcheverywhere.switch.scope.hint"), JBColor.GRAY, null, 0);
icon.setFont(RelativeFont.SMALL.derive(getFont()));
}
@Override
public Icon getIcon(boolean hovered) {
return icon;
}
};
searchField.setExtensions(searchExtension, hintExtension);
//todo gap between icon and text #UX-1
Insets insets = JBUI.CurrentTheme.SearchEverywhere.searchFieldInsets();
Border empty = JBUI.Borders.empty(insets.top, insets.left, insets.bottom, insets.right);
Border topLine = JBUI.Borders.customLine(JBUI.CurrentTheme.SearchEverywhere.searchFieldBorderColor(), 1, 0, 0, 0);
searchField.setBorder(JBUI.Borders.merge(empty, topLine, true));
searchField.setBackground(JBUI.CurrentTheme.SearchEverywhere.searchFieldBackground());
searchField.setFocusTraversalKeysEnabled(false);
return searchField;
}
private JPanel createSettingsPanel() {
JPanel res = new JPanel();
BoxLayout bl = new BoxLayout(res, BoxLayout.X_AXIS);
res.setLayout(bl);
res.setOpaque(false);
res.add(myNonProjectCB);
res.add(Box.createHorizontalStrut(JBUI.scale(19)));
DefaultActionGroup actionGroup = new DefaultActionGroup();
actionGroup.addAction(new ShowInFindToolWindowAction());
actionGroup.addAction(new ShowFilterAction());
ActionToolbar toolbar = ActionManager.getInstance().createActionToolbar("search.everywhere.toolbar", actionGroup, true);
JComponent toolbarComponent = toolbar.getComponent();
toolbarComponent.setOpaque(false);
res.add(toolbarComponent);
return res;
}
@NotNull
private JPanel createTabPanel(List<SearchEverywhereContributor> contributors) {
JPanel contributorsPanel = new JPanel(new FlowLayout(FlowLayout.LEFT, 0, 0));
contributorsPanel.setOpaque(false);
SETab allTab = new SETab(null);
contributorsPanel.add(allTab);
myTabs.add(allTab);
contributors.forEach(contributor -> {
SETab tab = new SETab(contributor);
contributorsPanel.add(tab);
myTabs.add(tab);
});
switchToTab(allTab);
return contributorsPanel;
}
private class SETab extends JLabel {
private final SearchEverywhereContributor myContributor;
public SETab(SearchEverywhereContributor contributor) {
super(contributor == null ? IdeBundle.message("searcheverywhere.allelements.tab.name") : contributor.getGroupName());
myContributor = contributor;
Insets insets = JBUI.CurrentTheme.SearchEverywhere.tabInsets();
setBorder(JBUI.Borders.empty(insets.top, insets.left, insets.bottom, insets.right));
addMouseListener(new MouseAdapter() {
@Override
public void mousePressed(MouseEvent e) {
switchToTab(SETab.this);
}
});
}
public String getID() {
return getContributor()
.map(SearchEverywhereContributor::getSearchProviderId)
.orElse(SearchEverywhereManagerImpl.ALL_CONTRIBUTORS_GROUP_ID);
}
public Optional<SearchEverywhereContributor> getContributor() {
return Optional.ofNullable(myContributor);
}
@Override
public Dimension getPreferredSize() {
Dimension size = super.getPreferredSize();
size.height = JBUI.scale(29);
return size;
}
@Override
public boolean isOpaque() {
return mySelectedTab == this;
}
@Override
public Color getBackground() {
return mySelectedTab == this
? JBUI.CurrentTheme.SearchEverywhere.selectedTabColor()
: super.getBackground();
}
}
private void rebuildList() {
assert EventQueue.isDispatchThread() : "Must be EDT";
if (myCalcThread != null && !myCurrentWorker.isProcessed()) {
myCurrentWorker = myCalcThread.cancel();
}
if (myCalcThread != null && !myCalcThread.isCanceled()) {
myCalcThread.cancel();
}
String pattern = getSearchPattern();
String matcherString = mySelectedTab.getContributor()
.map(contributor -> contributor.filterControlSymbols(pattern))
.orElse(pattern);
MinusculeMatcher matcher = NameUtil.buildMatcher("*" + matcherString, NameUtil.MatchingCaseSensitivity.NONE);
MatcherHolder.associateMatcher(myResultsList, matcher);
//assert project != null;
//myRenderer.myProject = project;
synchronized (myWorkerRestartRequestLock) { // this lock together with RestartRequestId should be enough to prevent two CalcThreads running at the same time
final int currentRestartRequest = ++myCalcThreadRestartRequestId;
myCurrentWorker.doWhenProcessed(() -> {
synchronized (myWorkerRestartRequestLock) {
if (currentRestartRequest != myCalcThreadRestartRequestId) {
return;
}
myCalcThread = new CalcThread(pattern, null);
myCurrentWorker = myCalcThread.start();
}
});
}
}
private String getSearchPattern() {
return mySearchField != null ? mySearchField.getText() : "";
}
private void initSearchActions() {
mySearchField.addKeyListener(new KeyAdapter() {
@Override
public void keyPressed(KeyEvent e) {
if (e.getKeyCode() == KeyEvent.VK_TAB) {
if (e.getModifiers() == 0) {
switchToNextTab();
e.consume();
} else if (e.getModifiers() == InputEvent.SHIFT_MASK) {
switchToPrevTab();
e.consume();
}
}
int index = myResultsList.getSelectedIndex();
if (e.getKeyCode() == KeyEvent.VK_ENTER && index >= 0) {
elementSelected(index, e.getModifiers());
}
}
});
AnAction escape = ActionManager.getInstance().getAction("EditorEscape");
DumbAwareAction.create(__ -> {
stopSearching();
searchFinishedHandler.run();
}).registerCustomShortcutSet(escape == null ? CommonShortcuts.ESCAPE : escape.getShortcutSet(), this);
mySearchField.getDocument().addDocumentListener(new DocumentAdapter() {
@Override
protected void textChanged(DocumentEvent e) {
nonProjectCheckBoxLocked = false;
rebuildList();
}
});
myNonProjectCB.addItemListener(e -> rebuildList());
myNonProjectCB.addActionListener(e -> nonProjectCheckBoxLocked = true);
myResultsList.addMouseListener(new MouseAdapter() {
@Override
public void mouseClicked(MouseEvent e) {
if (e.getButton() == MouseEvent.BUTTON1) {
e.consume();
final int i = myResultsList.locationToIndex(e.getPoint());
if (i > -1) {
myResultsList.setSelectedIndex(i);
elementSelected(i, e.getModifiers());
}
}
}
});
myResultsList.addListSelectionListener(e -> {
Object selectedValue = myResultsList.getSelectedValue();
if (selectedValue != null && myHint != null && myHint.isVisible()) {
updateHint(selectedValue);
}
});
myProject.getMessageBus().connect(this).subscribe(DumbService.DUMB_MODE, new DumbService.DumbModeListener() {
@Override
public void exitDumbMode() {
ApplicationManager.getApplication().invokeLater(() -> rebuildList());
}
});
}
private void elementSelected(int i, int modifiers) {
SearchEverywhereContributor contributor = myListModel.getContributorForIndex(i);
if (myListModel.isMoreElement(i)) {
showMoreElements(contributor);
} else {
gotoSelectedItem(myListModel.getElementAt(i), contributor, modifiers, getSearchPattern());
}
}
private void showMoreElements(SearchEverywhereContributor contributor) {
synchronized (myWorkerRestartRequestLock) { // this lock together with RestartRequestId should be enough to prevent two CalcThreads running at the same time
final int currentRestartRequest = ++myCalcThreadRestartRequestId;
myCurrentWorker.doWhenProcessed(() -> {
synchronized (myWorkerRestartRequestLock) {
if (currentRestartRequest != myCalcThreadRestartRequestId) {
return;
}
myCalcThread = new CalcThread(getSearchPattern(), contributor);
myCurrentWorker = myCalcThread.start();
}
});
}
}
private void gotoSelectedItem(Object value, SearchEverywhereContributor contributor, int modifiers, String searchText) {
boolean closePopup = contributor.processSelectedItem(value, modifiers, searchText);
if (closePopup) {
stopSearching();
searchFinishedHandler.run();
} else {
myResultsList.repaint();
}
}
private void stopSearching() {
listOperationsAlarm.cancelAllRequests();
if (myCalcThread != null && !myCalcThread.isCanceled()) {
myCalcThread.cancel();
}
}
private void handleEmptyResults() {
ApplicationManager.getApplication().invokeLater(() -> {
if (!nonProjectCheckBoxLocked && !isUseNonProjectItems() && !getSearchPattern().isEmpty()) {
setUseNonProjectItems(true);
return;
}
hideHint();
});
}
@SuppressWarnings("Duplicates") //todo remove suppress #UX-1
private class CalcThread implements Runnable {
private final String pattern;
private final ProgressIndicator myProgressIndicator = new ProgressIndicatorBase();
private final ActionCallback myDone = new ActionCallback();
private final SearchEverywhereContributor contributorToExpand;
public CalcThread(@NotNull String pattern, @Nullable SearchEverywhereContributor expand) {
this.pattern = pattern;
contributorToExpand = expand;
}
@Override
public void run() {
try {
check();
if (contributorToExpand == null) {
resetList();
} else {
showMore(contributorToExpand);
}
}
catch (ProcessCanceledException ignore) {
myDone.setRejected();
}
catch (Exception e) {
LOG.error(e);
myDone.setRejected();
}
finally {
if (!isCanceled()) {
listOperationsAlarm.addRequest(() -> myResultsList.getEmptyText().setText(getEmptyText()), 0);
}
if (!myDone.isProcessed()) {
myDone.setDone();
}
}
}
private void resetList() {
listOperationsAlarm.cancelAllRequests();
listOperationsAlarm.addRequest(() -> {
Dimension oldSize = getPreferredSize();
myResultsList.getEmptyText().setText(IdeBundle.message("label.choosebyname.searching"));
myListModel.clear();
Dimension newSize = getPreferredSize();
firePropertyChange("preferredSize", oldSize, newSize);
}, 200);
boolean anyFound = false;
SearchEverywhereContributor selectedContributor = mySelectedTab.getContributor().orElse(null);
if (selectedContributor != null) {
anyFound = addContributorItems(selectedContributor, SINGLE_CONTRIBUTOR_ELEMENTS_LIMIT, true);
} else {
boolean clearBefore = true;
for (SearchEverywhereContributor contributor : getUsedContributors()) {
int count = myServiceContributors.contains(contributor) ? -1 : MULTIPLE_CONTRIBUTORS_ELEMENTS_LIMIT; //show ALL items for service contributors
anyFound |= addContributorItems(contributor, count, clearBefore);
clearBefore = false;
}
}
if (!anyFound) {
handleEmptyResults();
}
}
private void showMore(SearchEverywhereContributor contributor) {
int delta = isAllTabSelected() ? MULTIPLE_CONTRIBUTORS_ELEMENTS_LIMIT : SINGLE_CONTRIBUTOR_ELEMENTS_LIMIT;
int size = myListModel.getItemsForContributor(contributor) + delta;
addContributorItems(contributor, size, false);
}
private boolean addContributorItems(SearchEverywhereContributor contributor, int count, boolean clearBefore) {
ContributorSearchResult<Object> results =
contributor.search(pattern, isUseNonProjectItems(), myContributorFilters.get(contributor.getSearchProviderId()), myProgressIndicator, count);
boolean found = !results.isEmpty();
if (clearBefore) {
listOperationsAlarm.cancelAllRequests();
}
listOperationsAlarm.addRequest(() -> {
if (isCanceled()) {
return;
}
Dimension oldSize = getPreferredSize();
if (clearBefore) {
myListModel.clear();
}
List<Object> itemsToAdd = results.getItems().stream()
.filter(o -> !myListModel.contains(o))
.collect(Collectors.toList());
if (!itemsToAdd.isEmpty()) {
myListModel.addElements(itemsToAdd, contributor, results.hasMoreItems());
ScrollingUtil.ensureSelectionExists(myResultsList);
}
firePropertyChange("preferredSize", oldSize, getPreferredSize());
}, 0);
return found;
}
protected void check() {
myProgressIndicator.checkCanceled();
if (myDone.isRejected()) throw new ProcessCanceledException();
assert myCalcThread == this : "There are two CalcThreads running before one of them was cancelled";
}
private boolean isCanceled() {
return myProgressIndicator.isCanceled() || myDone.isRejected();
}
public ActionCallback cancel() {
myProgressIndicator.cancel();
//myDone.setRejected();
return myDone;
}
public ActionCallback start() {
ApplicationManager.getApplication().executeOnPooledThread(this);
return myDone;
}
}
@NotNull
private List<SearchEverywhereContributor> getUsedContributors() {
SearchEverywhereContributorFilter<String> contributorsFilter =
(SearchEverywhereContributorFilter<String>) myContributorFilters.get(SearchEverywhereManagerImpl.ALL_CONTRIBUTORS_GROUP_ID);
List<SearchEverywhereContributor> contributors = new ArrayList<>(myServiceContributors);
myShownContributors.stream()
.filter(contributor -> contributorsFilter.isSelected(contributor.getSearchProviderId()))
.forEach(contributor -> contributors.add(contributor));
return contributors;
}
private class CompositeCellRenderer implements ListCellRenderer<Object> {
@Override
public Component getListCellRendererComponent(JList list, Object value, int index, boolean isSelected, boolean cellHasFocus) {
if (myListModel.isMoreElement(index)) {
Component cmp = moreRenderer.getListCellRendererComponent(list, value, index, isSelected, cellHasFocus);
return wrap(cmp, 1, 7);
}
SearchEverywhereContributor contributor = myListModel.getContributorForIndex(index);
Component component = contributor.getElementsRenderer(myResultsList)
.getListCellRendererComponent(list, value, index, isSelected, cellHasFocus);
if (isAllTabSelected() && myListModel.isGroupFirstItem(index)) {
component = groupTitleRenderer.withDisplayedData(contributor.getGroupName(), component);
}
return wrap(component, 1, 0);
}
private Component wrap(Component cmp, int verticalGap, int hotizontalGap) {
JPanel panel = new JPanel(new BorderLayout());
panel.setOpaque(cmp.isOpaque());
if (cmp.isOpaque()) {
panel.setBackground(cmp.getBackground());
}
panel.add(cmp, BorderLayout.CENTER);
panel.setBorder(JBUI.Borders.empty(verticalGap, hotizontalGap));
return panel;
}
}
private final MoreRenderer moreRenderer = new MoreRenderer();
public static class MoreRenderer extends JPanel implements ListCellRenderer<Object> {
final JLabel label;
private MoreRenderer() {
super(new BorderLayout());
label = groupInfoLabel("... more");
add(label, BorderLayout.CENTER);
}
@Override
public Component getListCellRendererComponent(JList<?> list, Object value, int index, boolean isSelected, boolean cellHasFocus) {
setBackground(UIUtil.getListBackground(isSelected));
return this;
}
}
private final GroupTitleRenderer groupTitleRenderer = new GroupTitleRenderer();
public static class GroupTitleRenderer extends JPanel {
private final JLabel titleLabel;
private final BorderLayout myLayout = new BorderLayout();
public GroupTitleRenderer() {
setLayout(myLayout);
setBackground(UIUtil.getListBackground(false));
titleLabel = groupInfoLabel("Group");
SeparatorComponent separatorComponent =
new SeparatorComponent(titleLabel.getPreferredSize().height / 2, JBUI.CurrentTheme.SearchEverywhere.listSeparatorColor(),null);
JPanel topPanel = JBUI.Panels.simplePanel(5, 0)
.addToCenter(separatorComponent)
.addToLeft(titleLabel)
.withBorder(JBUI.Borders.empty(0, 7))
.withBackground(UIUtil.getListBackground());
add(topPanel, BorderLayout.NORTH);
}
public GroupTitleRenderer withDisplayedData(String title, Component itemContent) {
titleLabel.setText(title);
Component prevContent = myLayout.getLayoutComponent(BorderLayout.CENTER);
if (prevContent != null) {
remove(prevContent);
}
add(itemContent, BorderLayout.CENTER);
return this;
}
}
private static class SearchListModel extends AbstractListModel<Object> {
private static final Object MORE_ELEMENT = new Object();
private final List<Pair<Object, SearchEverywhereContributor>> listElements = new ArrayList<>();
@Override
public int getSize() {
return listElements.size();
}
@Override
public Object getElementAt(int index) {
return listElements.get(index).first;
}
public Collection<Object> getFoundItems(SearchEverywhereContributor contributor) {
return listElements.stream()
.filter(pair -> pair.second == contributor && pair.first != MORE_ELEMENT)
.map(pair -> pair.getFirst())
.collect(Collectors.toList());
}
public boolean hasMoreElements(SearchEverywhereContributor contributor) {
return listElements.stream()
.anyMatch(pair -> pair.first == MORE_ELEMENT && pair.second == contributor);
}
public void addElements(List<Object> items, SearchEverywhereContributor contributor, boolean hasMore) {
if (items.isEmpty()) {
return;
}
List<Pair<Object, SearchEverywhereContributor>> pairsToAdd = items.stream()
.map(o -> Pair.create(o, contributor))
.collect(Collectors.toList());
int insertPoint = contributors().lastIndexOf(contributor);
int startIndex;
int endIndex;
if (insertPoint < 0) {
// no items of this contributor
startIndex = listElements.size();
listElements.addAll(pairsToAdd);
if (hasMore) {
listElements.add(Pair.create(MORE_ELEMENT, contributor));
}
endIndex = listElements.size() - 1;
} else {
// contributor elements already exists in list
if (isMoreElement(insertPoint)) {
listElements.remove(insertPoint);
} else {
insertPoint += 1;
}
startIndex = insertPoint;
endIndex = startIndex + pairsToAdd.size();
listElements.addAll(insertPoint, pairsToAdd);
if (hasMore) {
listElements.add(insertPoint + pairsToAdd.size(), Pair.create(MORE_ELEMENT, contributor));
endIndex += 1;
}
}
fireIntervalAdded(this, startIndex, endIndex);
}
public void clear() {
int index = listElements.size() - 1;
listElements.clear();
if (index >= 0) {
fireIntervalRemoved(this, 0, index);
}
}
public boolean contains(Object val) {
return values().contains(val);
}
public boolean isMoreElement(int index) {
return listElements.get(index).first == MORE_ELEMENT;
}
public SearchEverywhereContributor getContributorForIndex(int index) {
return listElements.get(index).second;
}
public boolean isGroupFirstItem(int index) {
return index == 0
|| listElements.get(index).second != listElements.get(index - 1).second;
}
public int getItemsForContributor(SearchEverywhereContributor contributor) {
List<SearchEverywhereContributor> contributorsList = contributors();
int first = contributorsList.indexOf(contributor);
int last = contributorsList.lastIndexOf(contributor);
if (isMoreElement(last)) {
last -= 1;
}
return last - first + 1;
}
@NotNull
private List<SearchEverywhereContributor> contributors() {
return Lists.transform(listElements, pair -> pair.getSecond());
}
@NotNull
private List<Object> values() {
return Lists.transform(listElements, pair -> pair.getFirst());
}
}
private class ShowInFindToolWindowAction extends DumbAwareAction {
public ShowInFindToolWindowAction() {
super(IdeBundle.message("searcheverywhere.show.in.find.window.button.name"),
IdeBundle.message("searcheverywhere.show.in.find.window.button.name"), AllIcons.General.Pin_tab);
}
@Override
public void actionPerformed(AnActionEvent e) {
stopSearching();
Collection<SearchEverywhereContributor> contributors = isAllTabSelected() ? getUsedContributors() : Collections.singleton(mySelectedTab.getContributor().get());
contributors = contributors.stream()
.filter(SearchEverywhereContributor::showInFindResults)
.collect(Collectors.toList());
if (contributors.isEmpty()) {
return;
}
String searchText = getSearchPattern();
boolean everywhere = isUseNonProjectItems();
String contributorsString = contributors.stream()
.map(SearchEverywhereContributor::getGroupName)
.collect(Collectors.joining(", "));
UsageViewPresentation presentation = new UsageViewPresentation();
String tabCaptionText = IdeBundle.message("searcheverywhere.found.matches.title", searchText, contributorsString);
presentation.setCodeUsagesString(tabCaptionText);
presentation.setUsagesInGeneratedCodeString(IdeBundle.message("searcheverywhere.found.matches.generated.code.title", searchText, contributorsString));
presentation.setTargetsNodeText(IdeBundle.message("searcheverywhere.found.targets.title", searchText, contributorsString));
presentation.setTabName(tabCaptionText);
presentation.setTabText(tabCaptionText);
Collection<Usage> usages = new LinkedHashSet<>();
Collection<PsiElement> targets = new LinkedHashSet<>();
Collection<Object> cached = contributors.stream()
.flatMap(contributor -> myListModel.getFoundItems(contributor).stream())
.collect(Collectors.toList());
fillUsages(cached, usages, targets);
Collection<SearchEverywhereContributor> contributorsForAdditionalSearch;
contributorsForAdditionalSearch = contributors.stream()
.filter(contributor -> myListModel.hasMoreElements(contributor))
.collect(Collectors.toList());
searchFinishedHandler.run();
if (!contributorsForAdditionalSearch.isEmpty()) {
ProgressManager.getInstance().run(new Task.Modal(myProject, tabCaptionText, true) {
private final ProgressIndicator progressIndicator = new ProgressIndicatorBase();
@Override
public void run(@NotNull ProgressIndicator indicator) {
contributorsForAdditionalSearch.forEach(contributor -> {
if (!progressIndicator.isCanceled()) {
//todo overflow #UX-1
List<Object> foundElements =
contributor.search(searchText, everywhere, myContributorFilters.get(contributor.getSearchProviderId()), progressIndicator);
fillUsages(foundElements, usages, targets);
}
});
}
@Override
public void onCancel() {
progressIndicator.cancel();
}
@Override
public void onSuccess() {
showInFindWindow(targets, usages, presentation);
}
@Override
public void onThrowable(@NotNull Throwable error) {
progressIndicator.cancel();
}
});
} else {
showInFindWindow(targets, usages, presentation);
}
}
private void fillUsages(Collection<Object> foundElements, Collection<Usage> usages, Collection<PsiElement> targets) {
foundElements.stream()
.filter(o -> o instanceof PsiElement)
.forEach(o -> {
PsiElement element = (PsiElement)o;
if (element.getTextRange() != null) {
UsageInfo usageInfo = new UsageInfo(element);
usages.add(new UsageInfo2UsageAdapter(usageInfo));
}
else {
targets.add(element);
}
});
}
private void showInFindWindow(Collection<PsiElement> targets, Collection<Usage> usages, UsageViewPresentation presentation) {
UsageTarget[] targetsArray = targets.isEmpty() ? UsageTarget.EMPTY_ARRAY : PsiElement2UsageTargetAdapter.convert(PsiUtilCore.toPsiElementArray(targets));
Usage[] usagesArray = usages.toArray(Usage.EMPTY_ARRAY);
UsageViewManager.getInstance(myProject).showUsages(targetsArray, usagesArray, presentation);
}
}
private class ShowFilterAction extends ToggleAction implements DumbAware {
private JBPopup myFilterPopup;
public ShowFilterAction() {
super("Filter", "Filter files by type", AllIcons.General.Filter);
}
@Override
public boolean isSelected(final AnActionEvent e) {
return myFilterPopup != null && !myFilterPopup.isDisposed();
}
@Override
public void setSelected(final AnActionEvent e, final boolean state) {
if (state) {
showPopup(e.getInputEvent().getComponent());
}
else {
if (myFilterPopup != null && !myFilterPopup.isDisposed()) {
myFilterPopup.cancel();
}
}
}
@Override
public void update(@NotNull AnActionEvent e) {
Icon icon = getTemplatePresentation().getIcon();
e.getPresentation().setIcon(isActive() ? ExecutionUtil.getLiveIndicator(icon) : icon);
e.getPresentation().setEnabled(myContributorFilters.get(getSelectedContributorID()) != null);
e.getPresentation().putClientProperty(SELECTED_PROPERTY, isSelected(e));
}
private boolean isActive() {
String contributorID = getSelectedContributorID();
SearchEverywhereContributorFilter<?> filter = myContributorFilters.get(contributorID);
if (filter == null) {
return false;
}
return filter.getAllElements().size() != filter.getSelectedElements().size();
}
private void showPopup(Component anchor) {
if (myFilterPopup != null) {
return;
}
JBPopupListener popupCloseListener = new JBPopupListener() {
@Override
public void onClosed(LightweightWindowEvent event) {
myFilterPopup = null;
}
};
myFilterPopup = JBPopupFactory.getInstance()
.createComponentPopupBuilder(createFilterPanel(), null)
.setModalContext(false)
.setFocusable(false)
.setResizable(true)
.setCancelOnClickOutside(false)
.setMinSize(new Dimension(200, 200))
.setDimensionServiceKey(myProject, "Search_Everywhere_Filter_Popup", false)
.addListener(popupCloseListener)
.createPopup();
Disposer.register(SearchEverywhereUI.this, myFilterPopup);
myFilterPopup.showUnderneathOf(anchor);
}
private JComponent createFilterPanel() {
SearchEverywhereContributorFilter<?> filter = myContributorFilters.get(getSelectedContributorID());
ElementsChooser<?> chooser = createChooser(filter);
JPanel panel = new JPanel();
panel.setLayout(new BoxLayout(panel, BoxLayout.Y_AXIS));
panel.add(chooser);
JPanel buttons = new JPanel();
JButton all = new JButton("All");
all.addActionListener(new ActionListener() {
@Override
public void actionPerformed(final ActionEvent e) {
chooser.setAllElementsMarked(true);
}
});
buttons.add(all);
JButton none = new JButton("None");
none.addActionListener(new ActionListener() {
@Override
public void actionPerformed(final ActionEvent e) {
chooser.setAllElementsMarked(false);
}
});
buttons.add(none);
JButton invert = new JButton("Invert");
invert.addActionListener(new ActionListener() {
@Override
public void actionPerformed(final ActionEvent e) {
chooser.invertSelection();
}
});
buttons.add(invert);
panel.add(buttons);
return panel;
}
private <T> ElementsChooser<T> createChooser(SearchEverywhereContributorFilter<T> filter) {
ElementsChooser<T> res = new ElementsChooser<T>(filter.getAllElements(), false) {
@Override
protected String getItemText(@NotNull T value) {
return filter.getElementText(value);
}
@Nullable
@Override
protected Icon getItemIcon(@NotNull T value) {
return filter.getElementIcon(value);
}
};
res.markElements(filter.getSelectedElements());
ElementsChooser.ElementsMarkListener<T> listener = (element, isMarked) -> {
filter.setSelected(element, isMarked);
rebuildList();
};
res.addElementsMarkListener(listener);
return res;
}
}
private static JLabel groupInfoLabel(String text) {
JLabel label = new JLabel(text);
label.setForeground(UIUtil.getLabelDisabledForeground());
label.setFont(UIUtil.getLabelFont().deriveFont(UIUtil.getFontSize(UIUtil.FontSize.SMALL)));
label.setOpaque(false);
return label;
}
private String getEmptyText() {
return mySelectedTab.getContributor()
.map(c -> IdeBundle.message("searcheverywhere.nothing.found.for.contributor.anywhere", c.getGroupName()))
.orElse(IdeBundle.message("searcheverywhere.nothing.found.for.all.anywhere"));
}
}
| IDEA-193398
| platform/lang-impl/src/com/intellij/ide/actions/searcheverywhere/SearchEverywhereUI.java | IDEA-193398 | <ide><path>latform/lang-impl/src/com/intellij/ide/actions/searcheverywhere/SearchEverywhereUI.java
<ide> JPanel suggestionsPanel = createSuggestionsPanel();
<ide>
<ide> myResultsList.setModel(myListModel);
<add> myResultsList.setFocusable(false);
<ide> myResultsList.setCellRenderer(new CompositeCellRenderer());
<add> myResultsList.setSelectionMode(ListSelectionModel.MULTIPLE_INTERVAL_SELECTION);
<ide>
<ide> ScrollingUtil.installActions(myResultsList, getSearchField());
<ide>
<ide> }
<ide> }
<ide>
<del> int index = myResultsList.getSelectedIndex();
<del> if (e.getKeyCode() == KeyEvent.VK_ENTER && index >= 0) {
<del> elementSelected(index, e.getModifiers());
<add> if (e.isShiftDown()) {
<add> if (e.getKeyCode() == KeyEvent.VK_DOWN) {
<add> //ScrollingUtil.moveDown(myResultsList, e.getModifiersEx());
<add> myResultsList.dispatchEvent(e);
<add> e.consume();
<add> }
<add> if (e.getKeyCode() == KeyEvent.VK_UP) {
<add> //ScrollingUtil.moveUp(myResultsList, e.getModifiersEx());
<add> myResultsList.dispatchEvent(e);
<add> e.consume();
<add> }
<add> }
<add>
<add> int[] indices = myResultsList.getSelectedIndices();
<add> if (e.getKeyCode() == KeyEvent.VK_ENTER && indices.length != 0) {
<add> elementsSelected(indices, e.getModifiers());
<ide> }
<ide> }
<ide> });
<ide> myResultsList.addMouseListener(new MouseAdapter() {
<ide> @Override
<ide> public void mouseClicked(MouseEvent e) {
<del> if (e.getButton() == MouseEvent.BUTTON1) {
<add> boolean multiSelectMode = e.isShiftDown() || e.isControlDown();
<add> if (e.getButton() == MouseEvent.BUTTON1 && !multiSelectMode) {
<ide> e.consume();
<ide> final int i = myResultsList.locationToIndex(e.getPoint());
<ide> if (i > -1) {
<ide> myResultsList.setSelectedIndex(i);
<del> elementSelected(i, e.getModifiers());
<add> elementsSelected(new int[]{i}, e.getModifiers());
<ide> }
<ide> }
<ide> }
<ide> });
<ide> }
<ide>
<del> private void elementSelected(int i, int modifiers) {
<del> SearchEverywhereContributor contributor = myListModel.getContributorForIndex(i);
<del> if (myListModel.isMoreElement(i)) {
<add> private void elementsSelected(int[] indexes, int modifiers) {
<add> if (indexes.length == 1 && myListModel.isMoreElement(indexes[0])) {
<add> SearchEverywhereContributor contributor = myListModel.getContributorForIndex(indexes[0]);
<ide> showMoreElements(contributor);
<add> return;
<add> }
<add>
<add> indexes = Arrays.stream(indexes)
<add> .filter(i -> !myListModel.isMoreElement(i))
<add> .toArray();
<add>
<add> boolean closePopup = false;
<add> for (int i: indexes) {
<add> SearchEverywhereContributor contributor = myListModel.getContributorForIndex(i);
<add> Object value = myListModel.getElementAt(i);
<add> closePopup |= contributor.processSelectedItem(value, modifiers, getSearchPattern());
<add> }
<add>
<add> if (closePopup) {
<add> stopSearching();
<add> searchFinishedHandler.run();
<ide> } else {
<del> gotoSelectedItem(myListModel.getElementAt(i), contributor, modifiers, getSearchPattern());
<add> myResultsList.repaint();
<ide> }
<ide> }
<ide> |
|
Java | apache-2.0 | 3de5f8834255dd6b8edb760f1467cf3a87bb28ce | 0 | sculptor/sculptor,sculptor/sculptor,sculptor/sculptor | package org.sculptor.example.helloworld.milkyway.serviceapi;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import org.junit.Test;
import org.sculptor.example.helloworld.milkyway.domain.Moon;
import org.sculptor.example.helloworld.milkyway.domain.Planet;
import org.sculptor.example.helloworld.milkyway.exception.PlanetNotFoundException;
import org.sculptor.framework.domain.PagedResult;
import org.sculptor.framework.domain.PagingParameter;
import org.sculptor.framework.test.AbstractDbUnitJpaTests;
import org.springframework.beans.factory.annotation.Autowired;
/**
* Spring based transactional test with DbUnit support.
*/
public class PlanetServiceTest extends AbstractDbUnitJpaTests implements PlanetServiceTestBase {
@Autowired
private PlanetService planetService;
@Test(expected=PlanetNotFoundException.class)
public void testSayHello() throws Exception {
String greeting = planetService.sayHello(getServiceContext(), "Earth");
assertEquals("Hello from Earth", greeting);
}
@Test
public void testSayHelloError() throws Exception {
planetService.sayHello(getServiceContext(), "Pluto");
}
@Test
public void testGetPlanet() throws Exception {
Planet earth = planetService.getPlanet(getServiceContext(), "Earth");
assertNotNull(earth);
assertEquals("Earth", earth.getName());
}
@Test
public void testFindById() throws Exception {
Planet earth = planetService.findById(getServiceContext(), 11L);
assertEquals("Earth", earth.getName());
}
@Test
public void testFindAll() throws Exception {
PagingParameter pagingParameter = PagingParameter.pageAccess(10);
PagedResult<Planet> result = planetService.findAll(getServiceContext(), pagingParameter);
assertEquals(2, result.getValues().size());
}
@Test
public void testSave() throws Exception {
int moonsBefore = countRowsInTable(Moon.class);
Planet earth = planetService.getPlanet(getServiceContext(), "Earth");
earth.addMoon(new Moon("Moon2"));
planetService.save(getServiceContext(), earth);
int moonsAfter = countRowsInTable(Moon.class);
assertEquals(moonsBefore + 1, moonsAfter);
}
@Test
public void testDeleteOrphan() throws Exception {
int moonsBefore = countRowsInTable(Moon.class);
Planet earth = planetService.getPlanet(getServiceContext(), "Earth");
// delete orphan
earth.removeAllMoons();
planetService.save(getServiceContext(), earth);
int moonsAfter = countRowsInTable(Moon.class);
assertEquals(moonsBefore - 1, moonsAfter);
}
@Test
public void testDelete() throws Exception {
int planetsBefore = countRowsInTable(Planet.class);
int moonsBefore = countRowsInTable(Moon.class);
Planet earth = planetService.getPlanet(getServiceContext(), "Earth");
planetService.delete(getServiceContext(), earth);
int planetsAfter = countRowsInTable(Planet.class);
int moonsAfter = countRowsInTable(Moon.class);
assertEquals(planetsBefore - 1, planetsAfter);
assertEquals(moonsBefore - 1, moonsAfter);
}
@Test
public void testPopulateAssociations() throws Exception {
// planet.moons has lazy="false", due to same aggregate
}
}
| sculptor-example/helloworld-example/helloworld/src/test/java/org/sculptor/example/helloworld/milkyway/serviceapi/PlanetServiceTest.java | package org.sculptor.example.helloworld.milkyway.serviceapi;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import org.junit.Test;
import org.sculptor.example.helloworld.milkyway.domain.Moon;
import org.sculptor.example.helloworld.milkyway.domain.Planet;
import org.sculptor.example.helloworld.milkyway.exception.PlanetNotFoundException;
import org.sculptor.framework.domain.PagedResult;
import org.sculptor.framework.domain.PagingParameter;
import org.sculptor.framework.test.AbstractDbUnitJpaTests;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.test.annotation.ExpectedException;
/**
* Spring based transactional test with DbUnit support.
*/
public class PlanetServiceTest extends AbstractDbUnitJpaTests implements PlanetServiceTestBase {
@Autowired
private PlanetService planetService;
@Test
public void testSayHello() throws Exception {
String greeting = planetService.sayHello(getServiceContext(), "Earth");
assertEquals("Hello from Earth", greeting);
}
@Test
@ExpectedException(PlanetNotFoundException.class)
public void testSayHelloError() throws Exception {
planetService.sayHello(getServiceContext(), "Pluto");
}
@Test
public void testGetPlanet() throws Exception {
Planet earth = planetService.getPlanet(getServiceContext(), "Earth");
assertNotNull(earth);
assertEquals("Earth", earth.getName());
}
@Test
public void testFindById() throws Exception {
Planet earth = planetService.findById(getServiceContext(), 11L);
assertEquals("Earth", earth.getName());
}
@Test
public void testFindAll() throws Exception {
PagingParameter pagingParameter = PagingParameter.pageAccess(10);
PagedResult<Planet> result = planetService.findAll(getServiceContext(), pagingParameter);
assertEquals(2, result.getValues().size());
}
@Test
public void testSave() throws Exception {
int moonsBefore = countRowsInTable(Moon.class);
Planet earth = planetService.getPlanet(getServiceContext(), "Earth");
earth.addMoon(new Moon("Moon2"));
planetService.save(getServiceContext(), earth);
int moonsAfter = countRowsInTable(Moon.class);
assertEquals(moonsBefore + 1, moonsAfter);
}
@Test
public void testDeleteOrphan() throws Exception {
int moonsBefore = countRowsInTable(Moon.class);
Planet earth = planetService.getPlanet(getServiceContext(), "Earth");
// delete orphan
earth.removeAllMoons();
planetService.save(getServiceContext(), earth);
int moonsAfter = countRowsInTable(Moon.class);
assertEquals(moonsBefore - 1, moonsAfter);
}
@Test
public void testDelete() throws Exception {
int planetsBefore = countRowsInTable(Planet.class);
int moonsBefore = countRowsInTable(Moon.class);
Planet earth = planetService.getPlanet(getServiceContext(), "Earth");
planetService.delete(getServiceContext(), earth);
int planetsAfter = countRowsInTable(Planet.class);
int moonsAfter = countRowsInTable(Moon.class);
assertEquals(planetsBefore - 1, planetsAfter);
assertEquals(moonsBefore - 1, moonsAfter);
}
@Test
public void testPopulateAssociations() throws Exception {
// planet.moons has lazy="false", due to same aggregate
}
}
| refactors expected exception into Junit annotation
| sculptor-example/helloworld-example/helloworld/src/test/java/org/sculptor/example/helloworld/milkyway/serviceapi/PlanetServiceTest.java | refactors expected exception into Junit annotation | <ide><path>culptor-example/helloworld-example/helloworld/src/test/java/org/sculptor/example/helloworld/milkyway/serviceapi/PlanetServiceTest.java
<ide> import org.sculptor.framework.domain.PagingParameter;
<ide> import org.sculptor.framework.test.AbstractDbUnitJpaTests;
<ide> import org.springframework.beans.factory.annotation.Autowired;
<del>import org.springframework.test.annotation.ExpectedException;
<ide>
<ide> /**
<ide> * Spring based transactional test with DbUnit support.
<ide> @Autowired
<ide> private PlanetService planetService;
<ide>
<del> @Test
<add> @Test(expected=PlanetNotFoundException.class)
<ide> public void testSayHello() throws Exception {
<ide> String greeting = planetService.sayHello(getServiceContext(), "Earth");
<ide> assertEquals("Hello from Earth", greeting);
<ide> }
<ide>
<ide> @Test
<del> @ExpectedException(PlanetNotFoundException.class)
<ide> public void testSayHelloError() throws Exception {
<ide> planetService.sayHello(getServiceContext(), "Pluto");
<ide> } |
|
JavaScript | apache-2.0 | 32ee37d723c6fa97aaa53a3062f0c5d66a2f92bd | 0 | wenhuizhang/flowsim,flowgrammable/flowsim,wenhuizhang/flowsim,flowgrammable/flowsim | var assert = require('assert');
var msg = require('../msg.js');
var testAdapter = require('./testAdapter.js');
var controller = require('../controller.js')(testAdapter);
var events = require('../../../events.js');
//-----------------------------------------------------------------------------
// Create Packet Controller Tests
describe('===> Testing create packet controller: \n', function(){
it('Test if name not provided', function(done){
var testId = 'testerID1';
var session = { subscriber_id: 1, key: '', timeout: '' };
var data = { name: '' };
events.Emitter.once(testId, function(result){
assert.equal(JSON.stringify(result), JSON.stringify(msg.missingPacketName()));
done();
});
controller.module.auth.create(session, 'POST', {}, data, '127.0.0.1', testId);
});
it('Test if method is not POST', function(done){
var testId = 'testerID2';
var session = { subscriber_id: 1, key: '', timeout: '' };
var data = {name:'something'};
events.Emitter.once(testId, function(result){
assert.equal(JSON.stringify(result),JSON.stringify(msg.methodNotSupported()));
done();
});
controller.module.auth.create(session,'GET', {}, data, '127.0.0.1', testId);
});
});
describe('===> Testing list packet controller: \n', function(){
it('Test if method is not GET', function(done){
var testId = 'testerID3';
var session = { subscriber_id: 1, key: '', timeout: '' };
var data = {};
events.Emitter.once(testId, function(result){
assert.equal(JSON.stringify(result),JSON.stringify(msg.methodNotSupported()));
done();
});
controller.module.auth.list(session,'POST', {}, data, '127.0.0.1', testId);
});
});
describe('===> Testing detail packet controller: \n', function(){
it('Test if packet_id not provided', function(done){
var testId = 'testerID4';
var session = { subscriber_id: 1, key: '', timeout: '' };
var data = {};
events.Emitter.once(testId, function(result){
assert.equal(JSON.stringify(result), JSON.stringify(msg.missingId()));
done();
});
controller.module.auth.detail(session, 'GET', {}, data, '127.0.0.1', testId);
});
it('Test if method is not GET', function(done){
var testId = 'testerID5';
var session = { subscriber_id: 1, key: '', timeout: '' };
var data = {};
events.Emitter.once(testId, function(result){
assert.equal(JSON.stringify(result),JSON.stringify(msg.methodNotSupported()));
done();
});
controller.module.auth.detail(session,'POST', {}, data, '127.0.0.1', testId);
});
});
describe('===> Testing update packet controller: \n', function(){
it('Test if id not provided', function(done){
var testId = 'testerID6';
var session = { subscriber_id: 1, key: '', timeout: '' };
var data = { id: '', name: 'something' };
events.Emitter.once(testId, function(result){
assert.equal(JSON.stringify(result), JSON.stringify(msg.missingId()));
done();
});
controller.module.auth.update(session, 'PUT', {}, data, '127.0.0.1', testId);
});
it('Test if name not provided', function(done){
var testId = 'testerID7';
var session = { subscriber_id: 1, key: '', timeout: '' };
var data = { id: 1, name: '' };
events.Emitter.once(testId, function(result){
assert.equal(JSON.stringify(result), JSON.stringify(msg.missingPacketName()));
done();
});
controller.module.auth.update(session, 'PUT', {}, data, '127.0.0.1', testId);
});
it('Test if subscriber_id provided', function(done){
var testId = 'testerID8';
var session = { subscriber_id: 1, key: '', timeout: '' };
var data = { id: 1, name: 'something', subscriber_id: 123 };
events.Emitter.once(testId, function(result){
assert.equal(JSON.stringify(result), JSON.stringify(msg.notAuthorized()));
done();
});
controller.module.auth.update(session, 'PUT', {}, data, '127.0.0.1', testId);
});
it('Test if method is not PUT', function(done){
var testId = 'testerID9';
var session = { subscriber_id: 1, key: '', timeout: '' };
var data = {name:'something'};
events.Emitter.once(testId, function(result){
assert.equal(JSON.stringify(result),JSON.stringify(msg.methodNotSupported()));
done();
});
controller.module.auth.update(session,'GET', {}, data, '127.0.0.1', testId);
});
});
describe('===> Testing destroy packet controller: \n', function(){
it('Test if packet_id not provided', function(done){
var testId = 'testerID10';
var session = { subscriber_id: 1, key: '', timeout: '' };
var data = {};
events.Emitter.once(testId, function(result){
assert.equal(JSON.stringify(result), JSON.stringify(msg.missingId()));
done();
});
controller.module.auth.destroy(session, 'DEL', {}, data, '127.0.0.1', testId);
});
it('Test if method is not DEL', function(done){
var testId = 'testerID11';
var session = { subscriber_id: 1, key: '', timeout: '' };
var data = {};
events.Emitter.once(testId, function(result){
assert.equal(JSON.stringify(result),JSON.stringify(msg.methodNotSupported()));
done();
});
controller.module.auth.destroy(session,'POST', {}, data, '127.0.0.1', testId);
});
});
| src/backend/rest/packet/test/controllerTest.js | var assert = require('assert');
var msg = require('../msg.js');
var testAdapter = require('./testAdapter.js');
var controller = require('../controller.js')(testAdapter);
var events = require('../../../events.js');
//-----------------------------------------------------------------------------
// Create Packet Controller Tests
describe('===> Testing create packet controller: \n', function(){
it('Test if name not provided', function(done){
var testId = 'testerID1';
var session = { subscriber_id: 1, key: '', timeout: '' };
var data = { name: '' };
events.Emitter.once(testId, function(result){
assert.equal(JSON.stringify(result), JSON.stringify(msg.missingPacketName()));
done();
});
controller.module.auth.create(session, 'POST', {}, data, '127.0.0.1', testId);
});
it('Test if method is not POST', function(done){
var testId = 'testerID2';
var session = { subscriber_id: 1, key: '', timeout: '' };
var data = {name:'something'};
events.Emitter.once(testId, function(result){
assert.equal(JSON.stringify(result),JSON.stringify(msg.methodNotSupported()));
done();
});
controller.module.auth.create(session,'GET', {}, data, '127.0.0.1', testId);
});
});
describe('===> Testing list packet controller: \n', function(){
it('Test if method is not GET', function(done){
var testId = 'testerID3';
var session = { subscriber_id: 1, key: '', timeout: '' };
var data = {};
events.Emitter.once(testId, function(result){
assert.equal(JSON.stringify(result),JSON.stringify(msg.methodNotSupported()));
done();
});
controller.module.auth.list(session,'POST', {}, data, '127.0.0.1', testId);
});
});
describe('===> Testing detail packet controller: \n', function(){
it('Test if packet_id not provided', function(done){
var testId = 'testerID4';
var session = { subscriber_id: 1, key: '', timeout: '' };
var data = {};
events.Emitter.once(testId, function(result){
assert.equal(JSON.stringify(result), JSON.stringify(msg.missingId()));
done();
});
controller.module.auth.detail(session, 'GET', {}, data, '127.0.0.1', testId);
});
it('Test if method is not GET', function(done){
var testId = 'testerID5';
var session = { subscriber_id: 1, key: '', timeout: '' };
var data = {};
events.Emitter.once(testId, function(result){
assert.equal(JSON.stringify(result),JSON.stringify(msg.methodNotSupported()));
done();
});
controller.module.auth.detail(session,'POST', {}, data, '127.0.0.1', testId);
});
});
describe('===> Testing update packet controller: \n', function(){
it('Test if id not provided', function(done){
var testId = 'testerID6';
var session = { subscriber_id: 1, key: '', timeout: '' };
var data = { id: '', name: 'something' };
events.Emitter.once(testId, function(result){
assert.equal(JSON.stringify(result), JSON.stringify(msg.missingId()));
done();
});
controller.module.auth.update(session, 'PUT', {}, data, '127.0.0.1', testId);
});
it('Test if name not provided', function(done){
var testId = 'testerID7';
var session = { subscriber_id: 1, key: '', timeout: '' };
var data = { id: 1, name: '' };
events.Emitter.once(testId, function(result){
assert.equal(JSON.stringify(result), JSON.stringify(msg.missingPacketName()));
done();
});
controller.module.auth.update(session, 'PUT', {}, data, '127.0.0.1', testId);
});
it('Test if subscriber_id provided', function(done){
var testId = 'testerID8';
var session = { subscriber_id: 1, key: '', timeout: '' };
var data = { id: 1, name: 'something', subscriber_id: 123 };
events.Emitter.once(testId, function(result){
assert.equal(JSON.stringify(result), JSON.stringify(msg.notAuthorized()));
done();
});
controller.module.auth.update(session, 'PUT', {}, data, '127.0.0.1', testId);
});
it('Test if method is not PUT', function(done){
var testId = 'testerID2';
var session = { subscriber_id: 1, key: '', timeout: '' };
var data = {name:'something'};
events.Emitter.once(testId, function(result){
assert.equal(JSON.stringify(result),JSON.stringify(msg.methodNotSupported()));
done();
});
controller.module.auth.update(session,'GET', {}, data, '127.0.0.1', testId);
});
});
| Adding destroy test
| src/backend/rest/packet/test/controllerTest.js | Adding destroy test | <ide><path>rc/backend/rest/packet/test/controllerTest.js
<ide> controller.module.auth.update(session, 'PUT', {}, data, '127.0.0.1', testId);
<ide> });
<ide> it('Test if method is not PUT', function(done){
<del> var testId = 'testerID2';
<add> var testId = 'testerID9';
<ide> var session = { subscriber_id: 1, key: '', timeout: '' };
<ide> var data = {name:'something'};
<ide> events.Emitter.once(testId, function(result){
<ide> });
<ide> });
<ide>
<add>describe('===> Testing destroy packet controller: \n', function(){
<add> it('Test if packet_id not provided', function(done){
<add> var testId = 'testerID10';
<add> var session = { subscriber_id: 1, key: '', timeout: '' };
<add> var data = {};
<add> events.Emitter.once(testId, function(result){
<add> assert.equal(JSON.stringify(result), JSON.stringify(msg.missingId()));
<add> done();
<add> });
<add> controller.module.auth.destroy(session, 'DEL', {}, data, '127.0.0.1', testId);
<add> });
<add> it('Test if method is not DEL', function(done){
<add> var testId = 'testerID11';
<add> var session = { subscriber_id: 1, key: '', timeout: '' };
<add> var data = {};
<add> events.Emitter.once(testId, function(result){
<add> assert.equal(JSON.stringify(result),JSON.stringify(msg.methodNotSupported()));
<add> done();
<add> });
<add> controller.module.auth.destroy(session,'POST', {}, data, '127.0.0.1', testId);
<add> });
<add>});
<add> |
|
JavaScript | mit | 44642b52c002f825b4cb4bbe3e6bad1e0a7b0afe | 0 | yoannisj/gulp-wires | 'use-strict';
// require dependencies
// --------------------
// node modules
var path = require('path');
var stripPath = require('strip-path');
var _ = require('lodash');
var expander = require('expander');
var globule = require('globule');
var arrayify = require('array-ify');
var isGlob = require('is-glob');
var globjoin = require('globjoin');
// gulp
var gulp = require('gulp');
var loadPlugins = require('gulp-load-plugins');
var gutil = require('gulp-util');
var gulpif = require('gulp-if');
var debug = require('gulp-debug');
var plumber = require('gulp-plumber');
// Todo: Throw Warnings if options.debug is set to true
// Todo: Implement a 'data' method to share data through task functions
// Todo: If config is given as filepath, use config's base path as default 'build' path
// ¿ Todo: move options to config ?
// ¿ Todo: rename 'files.src' to 'files.main' ?
// ¿ Todo: rename 'dir.src' to 'dir.base' ?
var wires = {};
// =Utilities
// -----------
// Shortcuts to functionality from gulp-util and other utility plugins
// =gutil
wires.util = gutil;
wires.log = gutil.log;
wires.env = gutil.env;
// =gulpif
wires.if = gulpif;
wires.unless = function(condition, no, yes) {
// allow omitting the 'yes' argument
yes = yes || function() {};
return gulpif(condition, yes, no);
}
// =gulpdebug
wires.debug = function(options) {
// allow passing a string, used as title option
if (typeof options == 'string') {
options = { title: options };
}
return debug(options);
};
// =plumber
wires.plumber = plumber;
// =Singleton Class & Config
// -------------------------
// Exports a function that returns a unique _instance
// of the helper class (singleton/pseudo static class)
var _isSetup = false,
_currConfig,
_getFilename,
_getKeyname,
_defaults = {
tasksPath: './tasks',
optionsPath: './options',
filename: 'kebab-case',
keyname: 'kebab-case',
debug: wires.env.debug,
monkeyPatch: true,
loadTasks: true,
loadPlugins: {
config: path.join(process.cwd(), 'package.json'),
pattern: ['gulp-*', 'gulp.*', '!gulp-wires'],
camelize: false,
debug: '<%= debug %>'
},
plumber: {},
root: {
src: './src',
dest: './dest'
},
tasks: {}
};
// =main
// - injects default options
// - loads configuration and tasks
// - optionally monkey-patches gulp methods
// - returns static `wires` API object
// @param config => path to configuration file or configuration hash
module.exports = function(config) {
// if called first time, set up configuration
if (!_isSetup || (config && _currConfig != config)) {
_isSetup = true;
// load configuration
wires.loadConfig(config);
// load plugins
wires.plugins = loadPlugins(wires.config.loadPlugins);
// monkey patch gulp methods
if (wires.config.monkeyPatch) {
_monkeyPatchGulp();
}
// load tasks
if (wires.config.loadTasks) {
wires.loadTasks();
}
}
// return static helper object
return wires;
};
// =loadConfig
// - loads a config file or hash
// @param config => path to config file or hash of config options
// @param imports => variables/modules that are available in the config's lodash templates
// - keys are variable names, values are values/functions
wires.loadConfig = function(config) {
// default config filepath
if (!config) config = './build/config.js';
// store reference to latest config that was loaded.
// This way, running loadConfig() with the same argument
// twice in a row won't do the job twice
_currConfig = config;
// allow passing a filepath as config
if (typeof config == 'string') {
config = require(path.join(process.cwd(), config));
}
// if resulting config is not an object
if (typeof config !== 'object' || Array.isArray(config)) {
// throw error: 'config must be an object, or a path to a node module that exports an object.'
_throw(9, 'argument `config` must be a path to a config module, or a hash of configuration settings.');
}
// inject default configuration options
config = _.merge({}, _defaults, config || {});
// get default buildPath
if (!config.buildPath)
{
// get config path's dirname
if (typeof _currConfig == 'string') {
config.buildPath = path.dirname(_currConfig);
}
// or default to process's cwd
else {
config.buildPath = process.cwd();
}
}
// make sure buildPath is an absolute path
if (!path.isAbsolute(config.buildPath)) {
config.buildPath = path.join(process.cwd(), config.buildPath);
}
// get filename transform function
_getFilename = _getTransformFunction(config.filename);
if (!_getFilename) {
_throw(9, '`filename` setting must be either "kebab-case", "camel-case", "snake-case" or a function.');
}
// get keyname transform function
_getKeyname = _getTransformFunction(config.keyname);
if (!_getKeyname) {
_throw(9, '`keyname` setting must be either "kebab-case", "camel-case", "snake-case" or a function.');
}
// temporarily delete 'imports' from config for expander to work correctly
var imports = _.clone(config.imports);
delete config.imports;
// expand and store configuration object
wires.config = expander.interface(config, {
// make base modules available in config templates
imports: _.assign({
'_': _,
'path': path,
'env': wires.env
}, imports || {})
})();
// restore imports setting
wires.config.imports = imports;
// chaining
return wires;
};
// =_getTransformFunction
// - helper to get the correct string transformation function for keynames/filenames
// @param transform => the transform setting (either, 'kebab-case', 'camel-case' or a function)
function _getTransformFunction(transform) {
// allow the 'kebab-case' keyword
if (transform == 'kebab-case') {
return _.kebabCase;
}
// allow the 'camel-case' keyword
if (transform == 'camel-case') {
return _.camelCase;
}
if (transform == 'snake-case') {
return _.snakeCase;
}
// allow setting transform as a function
if (typeof transform == 'function') {
return transform;
}
return undefined;
}
// =Log
// ----
// helper functions to log notices/warnings/errors to the CLI
var _logColors = {
'notice': 'white',
'warning': 'yellow',
'error': 'red'
};
// =_log
// - logs a message to the command-line using 'gutil.log'
// and applies to correct color for the log message type
// @param type => the type of message to log (either 'notice' or 'warning' or 'error')
function _log(type) {
var color = _logColors[type],
colorize = gutil.colors[color],
// prefix message with 'Wires'
segments = [colorize('[Wires]')];
// get warning segments and format them
for (var i = 1, ln = arguments.length; i < ln; i++) {
segments.push(colorize(arguments[i]));
}
// add 'Wires:' in front of warning
gutil.log.apply(gutil, segments);
}
// =_warn
// - logs a warning message in the console
// @param messages.. => list of messages to log in the warning
function _warn(message) {
_log('warning', message);
}
// =_warn
// - throws an error in the console and exits the current process
// @param code => exit code to use to exit the process
// @param messages => error message to log in the console
function _throw(code, message) {
_log('error', message);
process.exit(code);
}
// =Module-Loading Helpers
// -----------------------
// helper functions to load and cache task/options files
var _cache = {
options: {},
task: {}
};
// =_load
// - loads a file given a path and basepath (defaults to config.buildPath);
// @param filePath => path to the file to load
// @param base => [optional] the context to use for 'require'
function _load( filePath, base ) {
// load absolute paths normally
if (path.isAbsolute(filePath)) return require(filePath);
// load path relatively to context setting
return require( path.join(base || wires.config.buildPath, filePath) );
}
// =_getPath
// - returns the path for files of a given type
// @param type => either 'task' or 'options'
// @param name => the name of the 'task' or 'plugin' to get the function or options for
// @param filename => the name of the file defining the task function or plugin options
function _getPath(type, name, filename) {
// get basePath
var basePath;
switch (type) {
case 'task':
basePath = wires.config.tasksPath;
break;
case 'options':
basePath = wires.config.optionsPath;
break;
}
// get complete file path
var filePath = path.join( basePath, filename || _getFilename(name) );
// add file extension
return _.endsWith(filePath, '.js') ? filePath : filePath + '.js';
}
// =_exists
// - checks whether a given task/options file exists
// @param type => either 'task' or 'options'
// @param name => the name of the 'task' or 'plugin' for which to load config/options
// @param filename => the name of the file exporting the task function or the options
function _exists(type, name, filename ) {
// cached files/modules exist
if (_cache[type].hasOwnProperty(name)) return true;
// search for module's file
var filePath = _getPath(type, name, filename),
file = globule.find(filePath, {
cwd: wires.config.buildPath
});
// return whether file was found or not
return !!(file.length);
}
// =_get
// returns the value exported in a given task/options file
// @param type => either 'task' or 'options'
// @param name => the name of the 'task' or 'plugin' for which to load config/options
// @param filename => the file exporting the wanted value
function _get( type, name, filename) {
// return cached module
if (_cache[type].hasOwnProperty(name)) return _cache[type][name];
else if (!_exists(type, name, filename)) {
// Todo: throw warning if options.debug = true
// OR throw an error (not interesting as a feature to fail silently..)
return undefined;
}
// load file
var res = _load( _getPath(type, name, filename) );
// cache and return result
_cache[type][name] = res;
return res;
}
// =Task Configuration
// -------------------
var _taskConfigs = {};
// =getTaskConfig
// - loads task configuration hash, normalizes it and populates with defaults
// @param task => name of task fot witch to retreive configuration
wires.getTaskConfig = function(task) {
// throw warning in debug mode if no options are defined and task file does not exist
if (wires.config.debug && !wires.hasTask(task)) {
_warn('getTaskConfig(): could not find the task "' + task + '".');
}
// get cached task configurations
if (_taskConfigs.hasOwnProperty(task)) return _taskConfigs[task];
// get task options from global configuration
var conf = wires.config.tasks[task];
// understand "group" tasks, defined as dependency arrays
// TODO: Test this
if (Array.isArray(conf)) {
// map dependencies to the 'deps' setting
conf = {
deps: conf
};
}
// inject option defaults
conf = _.assign({
deps: [],
autoWatch: true,
root: {
src: wires.config.root.src,
dest: wires.config.root.dest
},
dir: {
src: './',
dest: './'
},
files: {
src: '**/*',
watch: '**/*'
}
}, conf || {});
// split `root` option into 'src' and 'dest' targets
if (typeof conf.root == 'string') {
conf.root = {
src: conf.root,
dest: conf.root
};
}
// split `dir` option into 'src' and 'dest' targets
if (typeof conf.dir == 'string') {
conf.dir = {
src: conf.dir,
dest: conf.dir
};
}
// split `files` option into 'src' and 'watch' targets
// - accept array of globs as value for files
if (typeof conf.files == 'string' || Array.isArray(conf.files)) {
conf.files = {
src: conf.files,
watch: conf.files
};
}
// cache task configuration and return it
_taskConfigs[task] = conf;
return conf;
};
// =Tasks
// ------
// load and register tasks from separate files
// =hasTask
// - returns whether a given task exists or not
// @param name => the name of the task to check for
// @param filename => the name of the file exporting the task function
wires.hasTask = function( name, filename ) {
return (wires.config.tasks.hasOwnProperty(name) || _exists('task', name, filename));
};
// =getTask
// - returns a given task's function as defined in task file
// @param name => the name of the task to retreive
// @param filename => the name of the file exporting the task function
wires.getTask = function(name, filename) {
return _get('task', name, filename);
};
// =loadTask
// - loads a given task by registering it using `gulp.task`.
// @param name => the name of the task to load
// @param deps => array of task-names to run before
// @param fn => [optional] task function or name of file exporting it
var _watchingTasks = {};
wires.loadTask = function(name, deps, fn) {
// get task configuration
var conf = wires.getTaskConfig(name);
// allow omitting the 'deps' argument
if (arguments.length <= 2 && !Array.isArray(deps)) {
fn = deps;
deps = conf.deps;
}
// load function from task file if no task function is provided
// or if a filename is provided instead
if (!fn || typeof fn == 'string') fn = wires.getTask(name, fn);
// register group tasks without a main function to just run dependencies
if (!fn) {
gulp.task(name, deps);
} else {
// register task through gulp, and watch for file changes in '--watch' mode
// - uses a wrapper function to automate watching
gulp.task(name, deps, function(done) {
// start watching for file changes in '--watch' mode
if (wires.env.watch && !_watchingTasks[name] && conf.autoWatch) {
gulp.watch( wires.watchGlob(name), [name] );
_watchingTasks[name] = true;
}
// run task function
return fn(done);
});
}
};
// =loadTasks
// - loads a set of tasks by registering it using `gulp.task`
// @param tasks => an array of task names to load, or `true` to load all tasks
wires.loadTasks = function(tasks) {
// load all tasks found in the task folder by default
// or when 'true' is passed
if (!tasks || tasks === true) {
// get absolute path to tasks directory
var tasksPath = path.isAbsolute(wires.config.tasksPath) ? wires.config.tasksPath :
path.join(wires.config.buildPath, wires.config.tasksPath);
// get all tasks defined in the 'tasks' folder
// TODO: allow sub-tasks in sub-folders
// => need to replace _.camelCase and _.kebabCase so '/' is mapped to ':'
var fileTasks = globule.find('*.js', {
cwd: tasksPath
}).map(function(file) {
return _getKeyname( path.basename(file, path.extname(file)) );
});
// get all tasks defined in the global configuration
var configTasks = _.keys(wires.config.tasks);
// get complete list of tasks
tasks = _.union(configTasks, fileTasks);
}
// allow passing an array of task names
if (Array.isArray(tasks))
{
tasks.forEach(function(task) {
wires.loadTask(task);
});
}
};
// =Plugins
// --------
// =hasOptions
// - verifies if options are set for a given plugin
// @param name => the name of the plugin for which to check the options
// @param filename => the name of the file exporting the plugin options
wires.hasOptions = function(name, filename) {
return _exists('options', name, filename);
};
// =options
// - returns options for a given plugin as defined in options' file
// @param name => the name of the plugin for which to retreive the options
// @param filename => the name of the file exporting the plugin options to retreive
// @param overrides => [optional] option overrides to merge into defaults
wires.options = function(name, filename, overrides) {
// allow omitting the 'filename' argument
if (typeof filename == 'object') {
overrides = filename;
filename = name;
}
// load defaults from file, default to an empty object
var options = _get('options', name, filename) || {};
// override default options from file
if (typeof overrides == 'object') {
_.assign(options, overrides);
}
return options;
};
// =plugin
// - runs a given plugin with default options and returns the result
// @param name => name of the plugin to run
// @param filename => the name of the file exporting the plugin options to retreive
// @param overrides => [optional] option overrides to merge into defaults
wires.plugin = function(name, filename, overrides) {
// allow passing 'false' instead of filename to use options passed on the fly
if (filename === false) {
options = overrides;
}
// or, load plugin options from options file
else {
options = wires.options(name, filename, overrides);
}
// invoke plugin and return result
return wires.plugins[name](options);
};
// =Paths
// ------
// get paths from task configuration objects
var _paths = {};
// =path
// - returns path to a task's base/dest directory
// @param task => name of task for wich to return path
// @param target => which path to retreive (either 'src'/'base'/'watch' or 'dest')
wires.path = function( task, target ) {
// throw error if target argument is missing
if (target === undefined) _throw(9, 'called `wires.path` without the `target` argument.');
// map 'base' and 'watch' targets to 'src' dirs
if (target == 'base' || target == 'watch') target = 'src';
// namespace for task and target
var ns = task + '_' + target;
// return cached directories
if (_paths.hasOwnProperty(ns)) return _paths[ns];
// return 'undefined' for unexisting tasks
if (!wires.hasTask(task)) {
_warn('wires.path(): could not find the task "' + task + '".');
return undefined;
}
// get config options (with defaults for missing options)
var conf = wires.getTaskConfig(task);
// resolve directory path
var dirPath = path.join(conf.root[target], conf.dir[target]);
// cache and return directory path
_paths[ns] = dirPath;
return dirPath;
};
// =base
// - returns the path to the task's src directory
// @param task => name of task for wich to return src path
wires.base = function( task ) {
return wires.path(task, 'base');
};
// =dest
// - returns the path to the task's dest directory
// @param task => name of task for wich to return dest path
wires.dest = function( task ) {
return wires.path(task, 'dest');
};
// =Globs
// ------
// =_negateGlob
// - negates a glob or array of globs
// @param glob => glob to negate
function _negateGlob(glob) {
// allow arrays of globs
if (Array.isArray(glob)) {
return _.map(glob, function(pattern) {
return _negateGlob(pattern);
});
}
// remove lead from relative globs
glob = _.trimStart(glob, './');
// negate glob
return _.startsWith(glob, '!') ? glob.substr(1) : '!' + glob;
}
var _globs = {};
// =_glob(glob, target, _base, _negate)
// - dynamically build a glob, recognizes task names to include or ignore task files
// @param glob = the glob or array of globs to parse
// @param options [optional] = options to customize computed glob
// - options.target - the task files to target, either 'src'/'main' or 'watch',
// - options.base - base path to prepend (defaults to task's base path for task names)
// @param _negate [internal] = whether to negate the glob or not
function _glob(glob, options, _taskBase, _taskNegate) {
// allow passing target instead of options
if (typeof options == 'string') {
options = { target: options };
}
// inject default options
options = _.assign({}, options || {});
// swap 'main' target to 'src'
if (options.target == 'main') options.target = 'src';
// allow array of globs and/or task-names
if (Array.isArray(glob)) {
// apply to all items in the array glob
// - flatten because some task names might be aliased to nested array globs
var globs = _.flatMap(glob, function(pattern) {
return _glob(pattern, options, _taskBase, _taskNegate);
});
// remove task names that aliased to undefined
globs = _.without(globs, undefined);
// return undefined if all task names resolved to undefined
return globs.length ? globs : undefined;
}
// detect task-names in glob
var isNegated = _.startsWith(glob, '!'),
pattern = isNegated ? glob.substr(1) : glob,
isTask = (!isGlob(pattern) && wires.hasTask(pattern));
// swap task names with their globs
if (isTask) {
var task = pattern,
// different caching ns for negated task-names
ns = isNegated ? task + '_not' : task;
// return cached task globs
if (_globs.hasOwnProperty(ns) && _.isEqual(_globs[ns].options)) {
return _globs[ns].value;
}
// compute task files' glob
var taskConf = wires.getTaskConfig(task),
taskGlobs = taskConf.files;
glob = options.target ? taskGlobs[options.target] :
_.flatten(_.union( [taskGlobs.src], [taskGlobs.watch] ));
_taskBase = wires.path(task, 'base');
if (options.base && options.keepDir) {
options.base = path.join(options.base, taskConf.dir.src);
}
glob = _glob(glob, options, _taskBase, isNegated);
// cache computed glob
_globs[task] = {
options: options,
value: glob
};
return glob;
}
// join glob to 'base' option, or include task's base
var base = options.base || _taskBase;
if (base) glob = globjoin(base, glob);
// [internal] negate glob if task-name starts with '!'
if (_taskNegate) glob = _negateGlob(glob);
return glob;
}
// =glob(glob, target)
// - builds a glob by replacing task-names with globs corresponding to config
// @param glob = the glob or array of globs to parse
// @param options [optional] = options to customize computed glob
// - options.target - the task files to target, either 'src'/'main' or 'watch',
// - options.base - base path to prepend (defaults to task's base path for task names)
wires.glob = function(glob, options) {
return _glob(glob, options);
};
// =mainGlob(glob)
// - builds a glob by replacing task-names with globs for their main files
// @param glob = the glob or array of globs to parse
wires.mainGlob = function(glob, options) {
// force 'src' target
if (options && typeof options == 'object') {
options.target = 'src';
} else {
options = 'src';
}
return wires.glob(glob, options);
};
// =watchGlob(glob)
// - builds a glob by replacing task-names with globs for their watch files
// @param glob = the glob or array of globs to parse
wires.watchGlob = function(glob, options) {
// force 'src' target
if (options && typeof options == 'object') {
options.target = 'watch';
} else {
options = 'watch';
}
return wires.glob(glob, options);
};
// =Files
// ------
var _files = {};
// =files
// get files corresponding to given glob
// - detects task-names and replaces them with glob correspondig to config
// @param glob = the glob or array of globs to parse
// @param options [optional] = options to customize computed glob
// - options.target - the task files to target, either 'src'/'main' or 'watch',
// - options.base - base path to prepend (defaults to task's base path for task names)
wires.files = function(glob, options) {
// parse glob to detect and replace task-names
glob = wires.glob(glob, options);
// return an array of files that correspond to the glob
return globule.find(glob);
};
// =mainFiles
// get main files corresponding to given glob
// - detects task-names and replaces them with glob for their main files
// @param glob = the glob or array of globs to parse
wires.mainFiles = function( task, options ) {
// force 'src' target
if (options && typeof options == 'object') {
options.target = 'src';
} else {
options = 'src';
}
return wires.files(task, options);
};
// =watchFiles
// get main files corresponding to given glob
// - detects task-names and replaces them with glob for their watch files
// @param glob = the glob or array of globs to parse
wires.watchFiles = function( task, options ) {
// force 'src' target
if (options && typeof options == 'object') {
options.target = 'watch';
} else {
options = 'watch';
}
return wires.files(task, options);
};
// =Gulp
// -----
var _gulpIsMonkeyPatched = false;
// =_monkeyPatchGulp
// - monkey patch gulp methods
function _monkeyPatchGulp() {
// only monkey patch once!
if (_gulpIsMonkeyPatched) return;
_gulpIsMonkeyPatched = true;
// store reference to original gulp methods
var _gulpAPI = {
task: gulp.task,
src: gulp.src,
watch: gulp.watch,
dest: gulp.dest
};
// =gulp.task
gulp.task = function(name, deps, fn) {
// allow omitting the 'deps' argument
if (!Array.isArray(deps)) {
fn = deps;
deps = wires.getTaskConfig(name).deps || [];
}
// load function from task file if no task function is provided
// or if a filename is provided instead
if (!fn || typeof fn == 'string') {
fn = wires.getTask(name, fn);
}
// delegate to original `gulp.task` method
return _gulpAPI.task.call(gulp, name, deps, fn);
};
// =gulp.src
gulp.src = function(globs, options) {
// allow omitting the 'options' argument
options = options || {};
// use 'wires.src' to replace task names with globs in config
globs = wires.mainGlob(globs);
// delegate to original `gulp.src`
var stream = _gulpAPI.src.call(gulp, globs, options);
// automatically run plumber in debug mode
var plumberOpts = options.plumber || wires.config.plumber || {};
// allow setting 'options.plumber' to 'false' to disable plumber
if (plumberOpts) {
return stream
.pipe(wires.plumber(plumberOpts));
}
return stream;
};
// =gulp.watch
gulp.watch = function(globs, options, handlers) {
// use 'wires.watch' to replace task names with globs in config
globs = wires.watchGlob(globs);
// delegate to original `gulp.watch`
return _gulpAPI.watch.call(gulp, globs, options, handlers);
};
// =gulp.dest
gulp.dest = function(path, options) {
// replace task name with destination path
if (wires.hasTask(path)) path = wires.dest(path);
// delegate to original `gulp.dest`
return _gulpAPI.dest.call(gulp, path, options);
};
} | index.js | 'use-strict';
// require dependencies
// --------------------
// node modules
var path = require('path');
var stripPath = require('strip-path');
var _ = require('lodash');
var expander = require('expander');
var globule = require('globule');
var arrayify = require('array-ify');
var isGlob = require('is-glob');
var globjoin = require('globjoin');
// gulp
var gulp = require('gulp');
var loadPlugins = require('gulp-load-plugins');
var gutil = require('gulp-util');
var gulpif = require('gulp-if');
var debug = require('gulp-debug');
var plumber = require('gulp-plumber');
// Todo: Throw Warnings if options.debug is set to true
// Todo: Implement a 'data' method to share data through task functions
// Todo: If config is given as filepath, use config's base path as default 'build' path
// ¿ Todo: move options to config ?
// ¿ Todo: rename 'files.src' to 'files.main' ?
// ¿ Todo: rename 'dir.src' to 'dir.base' ?
var wires = {};
// =Utilities
// -----------
// Shortcuts to functionality from gulp-util and other utility plugins
// =gutil
wires.util = gutil;
wires.log = gutil.log;
wires.env = gutil.env;
// =gulpif
wires.if = gulpif;
wires.unless = function(condition, no, yes) {
// allow omitting the 'yes' argument
yes = yes || function() {};
return gulpif(condition, yes, no);
}
// =gulpdebug
wires.debug = function(options) {
// allow passing a string, used as title option
if (typeof options == 'string') {
options = { title: options };
}
return debug(options);
};
// =plumber
wires.plumber = plumber;
// =Singleton Class & Config
// -------------------------
// Exports a function that returns a unique _instance
// of the helper class (singleton/pseudo static class)
var _isSetup = false,
_currConfig,
_getFilename,
_getKeyname,
_defaults = {
tasksPath: './tasks',
optionsPath: './options',
filename: 'kebab-case',
keyname: 'kebab-case',
debug: wires.env.debug,
monkeyPatch: true,
loadTasks: true,
loadPlugins: {
config: path.join(process.cwd(), 'package.json'),
pattern: ['gulp-*', 'gulp.*', '!gulp-wires'],
camelize: false,
debug: '<%= debug %>'
},
plumber: {},
root: {
src: './src',
dest: './dest'
},
tasks: {}
};
// =main
// - injects default options
// - loads configuration and tasks
// - optionally monkey-patches gulp methods
// - returns static `wires` API object
// @param config => path to configuration file or configuration hash
module.exports = function(config) {
// if called first time, set up configuration
if (!_isSetup || (config && _currConfig != config)) {
_isSetup = true;
// load configuration
wires.loadConfig(config);
// load plugins
wires.plugins = loadPlugins(wires.config.loadPlugins);
// monkey patch gulp methods
if (wires.config.monkeyPatch) {
_monkeyPatchGulp();
}
// load tasks
if (wires.config.loadTasks) {
wires.loadTasks();
}
}
// return static helper object
return wires;
};
// =loadConfig
// - loads a config file or hash
// @param config => path to config file or hash of config options
// @param imports => variables/modules that are available in the config's lodash templates
// - keys are variable names, values are values/functions
wires.loadConfig = function(config) {
// default config filepath
if (!config) config = './build/config.js';
// store reference to latest config that was loaded.
// This way, running loadConfig() with the same argument
// twice in a row won't do the job twice
_currConfig = config;
// allow passing a filepath as config
if (typeof config == 'string') {
config = require(path.join(process.cwd(), config));
}
// if resulting config is not an object
if (typeof config !== 'object' || Array.isArray(config)) {
// throw error: 'config must be an object, or a path to a node module that exports an object.'
_throw(9, 'argument `config` must be a path to a config module, or a hash of configuration settings.');
}
// inject default configuration options
config = _.merge({}, _defaults, config || {});
// get default buildPath
if (!config.buildPath)
{
// get config path's dirname
if (typeof _currConfig == 'string') {
config.buildPath = path.dirname(_currConfig);
}
// or default to process's cwd
else {
config.buildPath = process.cwd();
}
}
// make sure buildPath is an absolute path
if (!path.isAbsolute(config.buildPath)) {
config.buildPath = path.join(process.cwd(), config.buildPath);
}
// get filename transform function
_getFilename = _getTransformFunction(config.filename);
if (!_getFilename) {
_throw(9, '`filename` setting must be either "kebab-case", "camel-case", "snake-case" or a function.');
}
// get keyname transform function
_getKeyname = _getTransformFunction(config.keyname);
if (!_getKeyname) {
_throw(9, '`keyname` setting must be either "kebab-case", "camel-case", "snake-case" or a function.');
}
// temporarily delete 'imports' from config for expander to work correctly
var imports = _.clone(config.imports);
delete config.imports;
// expand and store configuration object
wires.config = expander.interface(config, {
// make base modules available in config templates
imports: _.assign({
'_': _,
'path': path,
'env': wires.env
}, imports || {})
})();
// restore imports setting
wires.config.imports = imports;
// chaining
return wires;
};
// =_getTransformFunction
// - helper to get the correct string transformation function for keynames/filenames
// @param transform => the transform setting (either, 'kebab-case', 'camel-case' or a function)
function _getTransformFunction(transform) {
// allow the 'kebab-case' keyword
if (transform == 'kebab-case') {
return _.kebabCase;
}
// allow the 'camel-case' keyword
if (transform == 'camel-case') {
return _.camelCase;
}
if (transform == 'snake-case') {
return _.snakeCase;
}
// allow setting transform as a function
if (typeof transform == 'function') {
return transform;
}
return undefined;
}
// =Log
// ----
// helper functions to log notices/warnings/errors to the CLI
var _logColors = {
'notice': 'white',
'warning': 'yellow',
'error': 'red'
};
// =_log
// - logs a message to the command-line using 'gutil.log'
// and applies to correct color for the log message type
// @param type => the type of message to log (either 'notice' or 'warning' or 'error')
function _log(type) {
var color = _logColors[type],
colorize = gutil.colors[color],
// prefix message with 'Wires'
segments = [colorize('[Wires]')];
// get warning segments and format them
for (var i = 1, ln = arguments.length; i < ln; i++) {
segments.push(colorize(arguments[i]));
}
// add 'Wires:' in front of warning
gutil.log.apply(gutil, segments);
}
// =_warn
// - logs a warning message in the console
// @param messages.. => list of messages to log in the warning
function _warn(message) {
_log('warning', message);
}
// =_warn
// - throws an error in the console and exits the current process
// @param code => exit code to use to exit the process
// @param messages => error message to log in the console
function _throw(code, message) {
_log('error', message);
process.exit(code);
}
// =Module-Loading Helpers
// -----------------------
// helper functions to load and cache task/options files
var _cache = {
options: {},
task: {}
};
// =_load
// - loads a file given a path and basepath (defaults to config.buildPath);
// @param filePath => path to the file to load
// @param base => [optional] the context to use for 'require'
function _load( filePath, base ) {
// load absolute paths normally
if (path.isAbsolute(filePath)) return require(filePath);
// load path relatively to context setting
return require( path.join(base || wires.config.buildPath, filePath) );
}
// =_getPath
// - returns the path for files of a given type
// @param type => either 'task' or 'options'
// @param name => the name of the 'task' or 'plugin' to get the function or options for
// @param filename => the name of the file defining the task function or plugin options
function _getPath(type, name, filename) {
// get basePath
var basePath;
switch (type) {
case 'task':
basePath = wires.config.tasksPath;
break;
case 'options':
basePath = wires.config.optionsPath;
break;
}
// get complete file path
var filePath = path.join( basePath, filename || _getFilename(name) );
// add file extension
return _.endsWith(filePath, '.js') ? filePath : filePath + '.js';
}
// =_exists
// - checks whether a given task/options file exists
// @param type => either 'task' or 'options'
// @param name => the name of the 'task' or 'plugin' for which to load config/options
// @param filename => the name of the file exporting the task function or the options
function _exists(type, name, filename ) {
// cached files/modules exist
if (_cache[type].hasOwnProperty(name)) return true;
// search for module's file
var filePath = _getPath(type, name, filename),
file = globule.find(filePath, {
cwd: wires.config.buildPath
});
// return whether file was found or not
return !!(file.length);
}
// =_get
// returns the value exported in a given task/options file
// @param type => either 'task' or 'options'
// @param name => the name of the 'task' or 'plugin' for which to load config/options
// @param filename => the file exporting the wanted value
function _get( type, name, filename) {
// return cached module
if (_cache[type].hasOwnProperty(name)) return _cache[type][name];
else if (!_exists(type, name, filename)) {
// Todo: throw warning if options.debug = true
// OR throw an error (not interesting as a feature to fail silently..)
return undefined;
}
// load file
var res = _load( _getPath(type, name, filename) );
// cache and return result
_cache[type][name] = res;
return res;
}
// =Task Configuration
// -------------------
var _taskConfigs = {};
// =getTaskConfig
// - loads task configuration hash, normalizes it and populates with defaults
// @param task => name of task fot witch to retreive configuration
wires.getTaskConfig = function(task) {
// throw warning in debug mode if no options are defined and task file does not exist
if (wires.config.debug && !wires.hasTask(task)) {
_warn('getTaskConfig(): could not find the task "' + task + '".');
}
// get cached task configurations
if (_taskConfigs.hasOwnProperty(task)) return _taskConfigs[task];
// get task options from global configuration
var conf = wires.config.tasks[task];
// understand "group" tasks, defined as dependency arrays
// TODO: Test this
if (Array.isArray(conf)) {
// map dependencies to the 'deps' setting
conf = {
deps: conf
};
}
// inject option defaults
conf = _.assign({
deps: [],
autoWatch: true,
root: {
src: wires.config.root.src,
dest: wires.config.root.dest
},
dir: {
src: './',
dest: './'
},
files: {
src: '**/*',
watch: '**/*'
}
}, conf || {});
// split `root` option into 'src' and 'dest' targets
if (typeof conf.root == 'string') {
conf.root = {
src: conf.root,
dest: conf.root
};
}
// split `dir` option into 'src' and 'dest' targets
if (typeof conf.dir == 'string') {
conf.dir = {
src: conf.dir,
dest: conf.dir
};
}
// split `files` option into 'src' and 'watch' targets
// - accept array of globs as value for files
if (typeof conf.files == 'string' || Array.isArray(conf.files)) {
conf.files = {
src: conf.files,
watch: conf.files
};
}
// cache task configuration and return it
_taskConfigs[task] = conf;
return conf;
};
// =Tasks
// ------
// load and register tasks from separate files
// =hasTask
// - returns whether a given task exists or not
// @param name => the name of the task to check for
// @param filename => the name of the file exporting the task function
wires.hasTask = function( name, filename ) {
return (wires.config.tasks.hasOwnProperty(name) || _exists('task', name, filename));
};
// =getTask
// - returns a given task's function as defined in task file
// @param name => the name of the task to retreive
// @param filename => the name of the file exporting the task function
wires.getTask = function(name, filename) {
return _get('task', name, filename);
};
// =loadTask
// - loads a given task by registering it using `gulp.task`.
// @param name => the name of the task to load
// @param deps => array of task-names to run before
// @param fn => [optional] task function or name of file exporting it
var _watchingTasks = {};
wires.loadTask = function(name, deps, fn) {
// get task configuration
var conf = wires.getTaskConfig(name);
// allow omitting the 'deps' argument
if (arguments.length <= 2 && !Array.isArray(deps)) {
fn = deps;
deps = conf.deps;
}
// load function from task file if no task function is provided
// or if a filename is provided instead
if (!fn || typeof fn == 'string') fn = wires.getTask(name, fn);
// register group tasks without a main function to just run dependencies
if (!fn) {
gulp.task(name, deps);
} else {
// register task through gulp, and watch for file changes in '--watch' mode
// - uses a wrapper function to automate watching
gulp.task(name, deps, function(done) {
// start watching for file changes in '--watch' mode
if (wires.env.watch && !_watchingTasks[name] && conf.autoWatch) {
gulp.watch( wires.watchGlob(name), [name] );
_watchingTasks[name] = true;
}
// run task function
return fn(done);
});
}
};
// =loadTasks
// - loads a set of tasks by registering it using `gulp.task`
// @param tasks => an array of task names to load, or `true` to load all tasks
wires.loadTasks = function(tasks) {
// load all tasks found in the task folder by default
// or when 'true' is passed
if (!tasks || tasks === true) {
// get absolute path to tasks directory
var tasksPath = path.isAbsolute(wires.config.tasksPath) ? wires.config.tasksPath :
path.join(wires.config.buildPath, wires.config.tasksPath);
// get all tasks defined in the 'tasks' folder
// TODO: allow sub-tasks in sub-folders
// => need to replace _.camelCase and _.kebabCase so '/' is mapped to ':'
var fileTasks = globule.find('*.js', {
cwd: tasksPath
}).map(function(file) {
return _getKeyname( path.basename(file, path.extname(file)) );
});
// get all tasks defined in the global configuration
var configTasks = _.keys(wires.config.tasks);
// get complete list of tasks
tasks = _.union(configTasks, fileTasks);
}
// allow passing an array of task names
if (Array.isArray(tasks))
{
tasks.forEach(function(task) {
wires.loadTask(task);
});
}
};
// =Plugins
// --------
// =hasOptions
// - verifies if options are set for a given plugin
// @param name => the name of the plugin for which to check the options
// @param filename => the name of the file exporting the plugin options
wires.hasOptions = function(name, filename) {
return _exists('options', name, filename);
};
// =options
// - returns options for a given plugin as defined in options' file
// @param name => the name of the plugin for which to retreive the options
// @param filename => the name of the file exporting the plugin options to retreive
// @param overrides => [optional] option overrides to merge into defaults
wires.options = function(name, filename, overrides) {
// allow omitting the 'filename' argument
if (typeof filename == 'object') {
overrides = filename;
filename = name;
}
// load defaults from file, default to an empty object
var options = _get('options', name, filename) || {};
// override default options from file
if (typeof overrides == 'object') {
_.assign(options, overrides);
}
return options;
};
// =plugin
// - runs a given plugin with default options and returns the result
// @param name => name of the plugin to run
// @param filename => the name of the file exporting the plugin options to retreive
// @param overrides => [optional] option overrides to merge into defaults
wires.plugin = function(name, filename, overrides) {
// allow passing 'false' instead of filename to use options passed on the fly
if (filename === false) {
options = overrides;
}
// or, load plugin options from options file
else {
options = wires.options(name, filename, overrides);
}
// invoke plugin and return result
return wires.plugins[name](options);
};
// =Paths
// ------
// get paths from task configuration objects
var _paths = {};
// =path
// - returns path to a task's base/dest directory
// @param task => name of task for wich to return path
// @param target => which path to retreive (either 'src'/'base'/'watch' or 'dest')
wires.path = function( task, target ) {
// throw error if target argument is missing
if (target === undefined) _throw(9, 'called `wires.path` without the `target` argument.');
// map 'base' and 'watch' targets to 'src' dirs
if (target == 'base' || target == 'watch') target = 'src';
// namespace for task and target
var ns = task + '_' + target;
// return cached directories
if (_paths.hasOwnProperty(ns)) return _paths[ns];
// return 'undefined' for unexisting tasks
if (!wires.hasTask(task)) {
_warn('wires.path(): could not find the task "' + task + '".');
return undefined;
}
// get config options (with defaults for missing options)
var conf = wires.getTaskConfig(task);
// resolve directory path
var dirPath = path.join(conf.root[target], conf.dir[target]);
// cache and return directory path
_paths[ns] = dirPath;
return dirPath;
};
// =base
// - returns the path to the task's src directory
// @param task => name of task for wich to return src path
wires.base = function( task ) {
return wires.path(task, 'base');
};
// =dest
// - returns the path to the task's dest directory
// @param task => name of task for wich to return dest path
wires.dest = function( task ) {
return wires.path(task, 'dest');
};
// =Globs
// ------
// =_negateGlob
// - negates a glob or array of globs
// @param glob => glob to negate
function _negateGlob(glob) {
// allow arrays of globs
if (Array.isArray(glob)) {
return _.map(glob, function(pattern) {
return _negateGlob(pattern);
});
}
// remove lead from relative globs
glob = _.trimStart(glob, './');
// negate glob
return _.startsWith(glob, '!') ? glob.substr(1) : '!' + glob;
}
var _globs = {};
// =_glob(glob, target, _base, _negate)
// - dynamically build a glob, recognizes task names to include or ignore task files
// @param glob = the glob or array of globs to parse
// @param options [optional] = options to customize computed glob
// - options.target - the task files to target, either 'src'/'main' or 'watch',
// - options.base - base path to prepend (defaults to task's base path for task names)
// @param _negate [internal] = whether to negate the glob or not
function _glob(glob, options, _taskBase, _taskNegate) {
// allow passing target instead of options
if (typeof options == 'string') {
options = { target: options };
}
// inject default options
options = _.assign({}, options || {});
// swap 'main' target to 'src'
if (options.target == 'main') options.target = 'src';
// allow array of globs and/or task-names
if (Array.isArray(glob)) {
// apply to all items in the array glob
// - flatten because some task names might be aliased to nested array globs
var globs = _.flatMap(glob, function(pattern) {
return _glob(pattern, options, _taskBase, _taskNegate);
});
// remove task names that aliased to undefined
globs = _.without(globs, undefined);
// return undefined if all task names resolved to undefined
return globs.length ? globs : undefined;
}
// detect task-names in glob
var isNegated = _.startsWith(glob, '!'),
pattern = isNegated ? glob.substr(1) : glob,
isTask = (!isGlob(pattern) && wires.hasTask(pattern));
// swap task names with their globs
if (isTask) {
var task = pattern,
// different caching ns for negated task-names
ns = isNegated ? task + '_not' : task;
// return cached task globs
if (_globs.hasOwnProperty(ns) && _.isEqual(_globs[ns].options)) {
return _globs[ns].value;
}
// compute task files' glob
var taskConf = wires.getTaskConfig(task),
taskGlobs = taskConf.files;
glob = options.target ? taskGlobs[options.target] :
_.flatten(_.union( [taskGlobs.src], [taskGlobs.watch] ));
_taskBase = wires.path(task, 'base');
if (options.base && options.keepDir) {
options.base = path.join(options.base, taskConf.dir.src);
}
glob = _glob(glob, options, _taskBase, isNegated);
// cache computed glob
_globs[task] = {
options: options,
value: glob
};
return glob;
}
// join glob to 'base' option, or include task's base
var base = options.base || _taskBase;
if (base) glob = globjoin(base, glob);
// [internal] negate glob if task-name starts with '!'
if (_taskNegate) glob = _negateGlob(glob);
return glob;
}
// =glob(glob, target)
// - builds a glob by replacing task-names with globs corresponding to config
// @param glob = the glob or array of globs to parse
// @param options [optional] = options to customize computed glob
// - options.target - the task files to target, either 'src'/'main' or 'watch',
// - options.base - base path to prepend (defaults to task's base path for task names)
wires.glob = function(glob, options) {
return _glob(glob, options);
};
// =mainGlob(glob)
// - builds a glob by replacing task-names with globs for their main files
// @param glob = the glob or array of globs to parse
wires.mainGlob = function(glob, options) {
// force 'src' target
if (options && typeof options == 'object') {
options.target = 'src';
} else {
options = 'src';
}
return wires.glob(glob, options);
};
// =watchGlob(glob)
// - builds a glob by replacing task-names with globs for their watch files
// @param glob = the glob or array of globs to parse
wires.watchGlob = function(glob, options) {
// force 'src' target
if (options && typeof options == 'object') {
options.target = 'watch';
} else {
options = 'watch';
}
return wires.glob(glob, options);
};
// =Files
// ------
var _files = {};
// =files
// get files corresponding to given glob
// - detects task-names and replaces them with glob correspondig to config
// @param glob = the glob or array of globs to parse
// @param options [optional] = options to customize computed glob
// - options.target - the task files to target, either 'src'/'main' or 'watch',
// - options.base - base path to prepend (defaults to task's base path for task names)
wires.files = function(glob, options) {
// parse glob to detect and replace task-names
glob = wires.glob(glob, options);
// return an array of files that correspond to the glob
return globule.find(glob);
};
// =mainFiles
// get main files corresponding to given glob
// - detects task-names and replaces them with glob for their main files
// @param glob = the glob or array of globs to parse
wires.mainFiles = function( task, options ) {
// force 'src' target
if (options && typeof options == 'object') {
options.target = 'src';
} else {
options = 'src';
}
return wires.files(task, options);
};
// =watchFiles
// get main files corresponding to given glob
// - detects task-names and replaces them with glob for their watch files
// @param glob = the glob or array of globs to parse
wires.watchFiles = function( task, options ) {
// force 'src' target
if (options && typeof options == 'object') {
options.target = 'watch';
} else {
options = 'watch';
}
return wires.files(task, options);
};
// =Gulp
// -----
var _gulpIsMonkeyPatched = false;
// =_monkeyPatchGulp
// - monkey patch gulp methods
function _monkeyPatchGulp() {
// only monkey patch once!
if (_gulpIsMonkeyPatched) return;
_gulpIsMonkeyPatched = true;
// store reference to original gulp methods
var _gulpAPI = {
task: gulp.task,
src: gulp.src,
watch: gulp.watch,
dest: gulp.dest
};
// =gulp.task
gulp.task = function(name, deps, fn) {
// allow omitting the 'deps' argument
if (!Array.isArray(deps)) {
fn = deps;
deps = wires.getTaskConfig(name).deps || [];
}
// load function from task file if no task function is provided
// or if a filename is provided instead
if (!fn || typeof fn == 'string') {
fn = wires.getTask(name, fn);
}
// delegate to original `gulp.task` method
return _gulpAPI.task.call(gulp, name, deps, fn);
};
// =gulp.src
gulp.src = function(globs, options) {
// allow omitting the 'options' argument
options = options || {};
// use 'wires.src' to replace task names with globs in config
globs = wires.mainGlob(globs);
// delegate to original `gulp.src`
var stream = _gulpAPI.src.call(gulp, globs, options);
// automatically run plumber in debug mode
if (wires.config.debug) {
var plumberOpts = options.plumber || wires.config.plumber || {};
return stream
.pipe(wires.plumber(plumberOpts));
}
return stream;
};
// =gulp.watch
gulp.watch = function(globs, options, handlers) {
// use 'wires.watch' to replace task names with globs in config
globs = wires.watchGlob(globs);
// delegate to original `gulp.watch`
return _gulpAPI.watch.call(gulp, globs, options, handlers);
};
// =gulp.dest
gulp.dest = function(path, options) {
// replace task name with destination path
if (wires.hasTask(path)) path = wires.dest(path);
// delegate to original `gulp.dest`
return _gulpAPI.dest.call(gulp, path, options);
};
} | use plumber by default
| index.js | use plumber by default | <ide><path>ndex.js
<ide> var stream = _gulpAPI.src.call(gulp, globs, options);
<ide>
<ide> // automatically run plumber in debug mode
<del> if (wires.config.debug) {
<del> var plumberOpts = options.plumber || wires.config.plumber || {};
<del>
<add> var plumberOpts = options.plumber || wires.config.plumber || {};
<add>
<add> // allow setting 'options.plumber' to 'false' to disable plumber
<add> if (plumberOpts) {
<ide> return stream
<ide> .pipe(wires.plumber(plumberOpts));
<ide> } |
|
Java | apache-2.0 | d7b1f3c272310ce46075ad9e41925c2d4577016f | 0 | Sargul/dbeaver,Sargul/dbeaver,dbeaver/dbeaver,serge-rider/dbeaver,serge-rider/dbeaver,Sargul/dbeaver,dbeaver/dbeaver,dbeaver/dbeaver,serge-rider/dbeaver,Sargul/dbeaver,serge-rider/dbeaver,dbeaver/dbeaver,Sargul/dbeaver | /*
* DBeaver - Universal Database Manager
* Copyright (C) 2010-2020 DBeaver Corp and others
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jkiss.dbeaver.ui.editors.sql;
import org.eclipse.core.filesystem.EFS;
import org.eclipse.core.filesystem.IFileStore;
import org.eclipse.core.resources.IFile;
import org.eclipse.core.resources.IFileState;
import org.eclipse.core.runtime.*;
import org.eclipse.core.runtime.jobs.Job;
import org.eclipse.jface.action.*;
import org.eclipse.jface.dialogs.IDialogConstants;
import org.eclipse.jface.preference.IPreferenceStore;
import org.eclipse.jface.text.*;
import org.eclipse.jface.viewers.ISelectionProvider;
import org.eclipse.jface.viewers.SelectionChangedEvent;
import org.eclipse.jface.viewers.StructuredSelection;
import org.eclipse.osgi.util.NLS;
import org.eclipse.swt.SWT;
import org.eclipse.swt.custom.CTabFolder;
import org.eclipse.swt.custom.CTabItem;
import org.eclipse.swt.custom.StyledText;
import org.eclipse.swt.events.*;
import org.eclipse.swt.graphics.Image;
import org.eclipse.swt.graphics.Point;
import org.eclipse.swt.layout.FillLayout;
import org.eclipse.swt.layout.GridData;
import org.eclipse.swt.layout.GridLayout;
import org.eclipse.swt.widgets.*;
import org.eclipse.ui.*;
import org.eclipse.ui.actions.CompoundContributionItem;
import org.eclipse.ui.ide.FileStoreEditorInput;
import org.eclipse.ui.texteditor.DefaultRangeIndicator;
import org.eclipse.ui.texteditor.ITextEditorActionConstants;
import org.eclipse.ui.texteditor.rulers.IColumnSupport;
import org.eclipse.ui.texteditor.rulers.RulerColumnDescriptor;
import org.eclipse.ui.texteditor.rulers.RulerColumnRegistry;
import org.jkiss.code.NotNull;
import org.jkiss.code.Nullable;
import org.jkiss.dbeaver.DBException;
import org.jkiss.dbeaver.ModelPreferences;
import org.jkiss.dbeaver.model.*;
import org.jkiss.dbeaver.model.app.DBPProject;
import org.jkiss.dbeaver.model.data.DBDDataFilter;
import org.jkiss.dbeaver.model.data.DBDDataReceiver;
import org.jkiss.dbeaver.model.exec.*;
import org.jkiss.dbeaver.model.exec.plan.DBCPlan;
import org.jkiss.dbeaver.model.exec.plan.DBCPlanStyle;
import org.jkiss.dbeaver.model.exec.plan.DBCQueryPlanner;
import org.jkiss.dbeaver.model.impl.DefaultServerOutputReader;
import org.jkiss.dbeaver.model.impl.sql.SQLQueryTransformerCount;
import org.jkiss.dbeaver.model.messages.ModelMessages;
import org.jkiss.dbeaver.model.navigator.DBNUtils;
import org.jkiss.dbeaver.model.preferences.DBPPreferenceListener;
import org.jkiss.dbeaver.model.preferences.DBPPreferenceStore;
import org.jkiss.dbeaver.model.qm.QMUtils;
import org.jkiss.dbeaver.model.runtime.AbstractJob;
import org.jkiss.dbeaver.model.runtime.DBRProgressListener;
import org.jkiss.dbeaver.model.runtime.DBRProgressMonitor;
import org.jkiss.dbeaver.model.runtime.DBRRunnableWithProgress;
import org.jkiss.dbeaver.model.sql.*;
import org.jkiss.dbeaver.model.sql.data.SQLQueryDataContainer;
import org.jkiss.dbeaver.model.struct.DBSDataContainer;
import org.jkiss.dbeaver.model.struct.DBSInstance;
import org.jkiss.dbeaver.model.struct.DBSObject;
import org.jkiss.dbeaver.registry.DataSourceUtils;
import org.jkiss.dbeaver.runtime.DBWorkbench;
import org.jkiss.dbeaver.runtime.sql.SQLResultsConsumer;
import org.jkiss.dbeaver.runtime.ui.UIServiceConnections;
import org.jkiss.dbeaver.tools.transfer.IDataTransferProducer;
import org.jkiss.dbeaver.tools.transfer.database.DatabaseTransferProducer;
import org.jkiss.dbeaver.tools.transfer.ui.wizard.DataTransferWizard;
import org.jkiss.dbeaver.ui.*;
import org.jkiss.dbeaver.ui.controls.*;
import org.jkiss.dbeaver.ui.controls.resultset.*;
import org.jkiss.dbeaver.ui.controls.resultset.internal.ResultSetMessages;
import org.jkiss.dbeaver.ui.css.CSSUtils;
import org.jkiss.dbeaver.ui.css.DBStyles;
import org.jkiss.dbeaver.ui.dialogs.ConfirmationDialog;
import org.jkiss.dbeaver.ui.dialogs.EnterNameDialog;
import org.jkiss.dbeaver.ui.editors.DatabaseEditorUtils;
import org.jkiss.dbeaver.ui.editors.EditorUtils;
import org.jkiss.dbeaver.ui.editors.INonPersistentEditorInput;
import org.jkiss.dbeaver.ui.editors.StringEditorInput;
import org.jkiss.dbeaver.ui.editors.sql.execute.SQLQueryJob;
import org.jkiss.dbeaver.ui.editors.sql.handlers.SQLNavigatorContext;
import org.jkiss.dbeaver.ui.editors.sql.internal.SQLEditorMessages;
import org.jkiss.dbeaver.ui.editors.sql.log.SQLLogPanel;
import org.jkiss.dbeaver.ui.editors.sql.plan.ExplainPlanViewer;
import org.jkiss.dbeaver.ui.editors.sql.registry.SQLPresentationDescriptor;
import org.jkiss.dbeaver.ui.editors.sql.registry.SQLPresentationPanelDescriptor;
import org.jkiss.dbeaver.ui.editors.sql.registry.SQLPresentationRegistry;
import org.jkiss.dbeaver.ui.editors.text.ScriptPositionColumn;
import org.jkiss.dbeaver.ui.navigator.INavigatorModelView;
import org.jkiss.dbeaver.utils.GeneralUtils;
import org.jkiss.dbeaver.utils.PrefUtils;
import org.jkiss.dbeaver.utils.RuntimeUtils;
import org.jkiss.utils.ArrayUtils;
import org.jkiss.utils.CommonUtils;
import java.io.*;
import java.net.URI;
import java.util.List;
import java.util.*;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* SQL Executor
*/
public class SQLEditor extends SQLEditorBase implements
IDataSourceContainerProviderEx,
DBPEventListener,
ISaveablePart2,
DBPDataSourceTask,
DBPDataSourceHandler,
DBPPreferenceListener,
ISmartTransactionManager
{
private static final long SCRIPT_UI_UPDATE_PERIOD = 100;
private static final int MAX_PARALLEL_QUERIES_NO_WARN = 1;
private static final int SQL_EDITOR_CONTROL_INDEX = 1;
private static final int EXTRA_CONTROL_INDEX = 0;
private static final String PANEL_ITEM_PREFIX = "SQLPanelToggle:";
private static final String EMBEDDED_BINDING_PREFIX = "-- CONNECTION: ";
private static final Pattern EMBEDDED_BINDING_PREFIX_PATTERN = Pattern.compile("--\\s*CONNECTION:\\s*(.+)", Pattern.CASE_INSENSITIVE);
private static Image IMG_DATA_GRID = DBeaverIcons.getImage(UIIcon.SQL_PAGE_DATA_GRID);
private static Image IMG_DATA_GRID_LOCKED = DBeaverIcons.getImage(UIIcon.SQL_PAGE_DATA_GRID_LOCKED);
private static Image IMG_EXPLAIN_PLAN = DBeaverIcons.getImage(UIIcon.SQL_PAGE_EXPLAIN_PLAN);
private static Image IMG_LOG = DBeaverIcons.getImage(UIIcon.SQL_PAGE_LOG);
private static Image IMG_OUTPUT = DBeaverIcons.getImage(UIIcon.SQL_PAGE_OUTPUT);
private static Image IMG_OUTPUT_ALERT = DBeaverIcons.getImage(UIIcon.SQL_PAGE_OUTPUT_ALERT);
private static final String TOOLBAR_CONTRIBUTION_ID = "toolbar:org.jkiss.dbeaver.ui.editors.sql.toolbar.side";
private static final String TOOLBAR_GROUP_TOP = "top";
private static final String TOOLBAR_GROUP_ADDITIONS = IWorkbenchActionConstants.MB_ADDITIONS;
private static final String TOOLBAR_GROUP_PANELS = "panelToggles";
public static final String VAR_CONNECTION_NAME = "connectionName";
public static final String VAR_FILE_NAME = "fileName";
public static final String VAR_FILE_EXT = "fileExt";
public static final String VAR_DRIVER_NAME = "driverName";
public static final String DEFAULT_TITLE_PATTERN = "<${" + VAR_CONNECTION_NAME + "}> ${" + VAR_FILE_NAME + "}";
private ResultSetOrientation resultSetOrientation = ResultSetOrientation.HORIZONTAL;
private CustomSashForm resultsSash;
private Composite sqlEditorPanel;
@Nullable
private CustomSashForm presentationSash;
private CTabFolder resultTabs;
private CTabItem activeResultsTab;
private SQLLogPanel logViewer;
private SQLEditorOutputConsoleViewer outputViewer;
private volatile QueryProcessor curQueryProcessor;
private final List<QueryProcessor> queryProcessors = new ArrayList<>();
private DBPDataSourceContainer dataSourceContainer;
private DBPDataSource curDataSource;
private volatile DBCExecutionContext executionContext;
private volatile DBCExecutionContext lastExecutionContext;
private SQLScriptContext globalScriptContext;
private volatile boolean syntaxLoaded = false;
private final FindReplaceTarget findReplaceTarget = new FindReplaceTarget();
private final List<SQLQuery> runningQueries = new ArrayList<>();
private QueryResultsContainer curResultsContainer;
private Image editorImage;
private VerticalFolder sideToolBar;
private SQLPresentationDescriptor extraPresentationDescriptor;
private SQLEditorPresentation extraPresentation;
private Map<SQLPresentationPanelDescriptor, SQLEditorPresentationPanel> extraPresentationPanels = new HashMap<>();
private SQLEditorPresentationPanel extraPresentationCurrentPanel;
private VerticalFolder presentationSwitchFolder;
private final List<SQLEditorListener> listeners = new ArrayList<>();
private DisposeListener resultTabDisposeListener = new DisposeListener() {
@Override
public void widgetDisposed(DisposeEvent e) {
if (resultTabs.getItemCount() == 0) {
if (resultsSash.getMaximizedControl() == null) {
// Hide results
toggleResultPanel();
}
}
}
};
private VerticalButton switchPresentationSQLButton;
private VerticalButton switchPresentationExtraButton;
public SQLEditor()
{
super();
this.extraPresentationDescriptor = SQLPresentationRegistry.getInstance().getPresentation(this);
}
@Override
protected String[] getKeyBindingContexts() {
return new String[]{TEXT_EDITOR_CONTEXT, SQLEditorContributions.SQL_EDITOR_CONTEXT, SQLEditorContributions.SQL_EDITOR_SCRIPT_CONTEXT};
}
@Override
public DBPDataSource getDataSource() {
DBPDataSourceContainer container = getDataSourceContainer();
return container == null ? null : container.getDataSource();
}
@Override
public DBCExecutionContext getExecutionContext() {
if (executionContext != null) {
return executionContext;
}
if (dataSourceContainer != null && !SQLEditorUtils.isOpenSeparateConnection(dataSourceContainer)) {
return DBUtils.getDefaultContext(getDataSource(), false);
}
return null;
}
@Nullable
public DBPProject getProject()
{
IFile file = EditorUtils.getFileFromInput(getEditorInput());
return file == null ?
DBWorkbench.getPlatform().getWorkspace().getActiveProject() : DBWorkbench.getPlatform().getWorkspace().getProject(file.getProject());
}
@Nullable
@Override
public int[] getCurrentLines()
{
synchronized (runningQueries) {
IDocument document = getDocument();
if (document == null || runningQueries.isEmpty()) {
return null;
}
List<Integer> lines = new ArrayList<>(runningQueries.size() * 2);
for (SQLQuery statementInfo : runningQueries) {
try {
int firstLine = document.getLineOfOffset(statementInfo.getOffset());
int lastLine = document.getLineOfOffset(statementInfo.getOffset() + statementInfo.getLength());
for (int k = firstLine; k <= lastLine; k++) {
lines.add(k);
}
} catch (BadLocationException e) {
// ignore - this may happen is SQL was edited after execution start
}
}
if (lines.isEmpty()) {
return null;
}
int[] results = new int[lines.size()];
for (int i = 0; i < lines.size(); i++) {
results[i] = lines.get(i);
}
return results;
}
}
@Nullable
@Override
public DBPDataSourceContainer getDataSourceContainer()
{
return dataSourceContainer;
}
@Override
public boolean setDataSourceContainer(@Nullable DBPDataSourceContainer container)
{
if (container == dataSourceContainer) {
return true;
}
// Release ds container
releaseContainer();
closeAllJobs();
dataSourceContainer = container;
if (dataSourceContainer != null) {
dataSourceContainer.getPreferenceStore().addPropertyChangeListener(this);
dataSourceContainer.getRegistry().addDataSourceListener(this);
}
IEditorInput input = getEditorInput();
if (input != null) {
DBPDataSourceContainer savedContainer = EditorUtils.getInputDataSource(input);
if (savedContainer != container) {
EditorUtils.setInputDataSource(input, new SQLNavigatorContext(container, getExecutionContext()));
}
IFile file = EditorUtils.getFileFromInput(input);
if (file != null) {
DBNUtils.refreshNavigatorResource(file, container);
} else {
// FIXME: this is a hack. We can't fire event on resource change so editor's state won't be updated in UI.
// FIXME: To update main toolbar and other controls we hade and show this editor
IWorkbenchPage page = getSite().getPage();
for (IEditorReference er : page.getEditorReferences()) {
if (er.getEditor(false) == this) {
page.hideEditor(er);
page.showEditor(er);
break;
}
}
//page.activate(this);
}
}
checkConnected(false, status -> UIUtils.asyncExec(() -> {
if (!status.isOK()) {
DBWorkbench.getPlatformUI().showError("Can't connect to database", "Error connecting to datasource", status);
}
setFocus();
}));
setPartName(getEditorName());
fireDataSourceChange();
if (dataSourceContainer != null) {
dataSourceContainer.acquire(this);
}
if (SQLEditorBase.isWriteEmbeddedBinding()) {
// Patch connection reference
UIUtils.syncExec(this::embedDataSourceAssociation);
}
return true;
}
private void updateDataSourceContainer() {
DBPDataSourceContainer inputDataSource = null;
if (SQLEditorBase.isReadEmbeddedBinding()) {
// Try to get datasource from contents (always, no matter what )
inputDataSource = getDataSourceFromContent();
}
if (inputDataSource == null) {
inputDataSource = EditorUtils.getInputDataSource(getEditorInput());
}
if (inputDataSource == null) {
// No datasource. Try to get one from active part
IWorkbenchPart activePart = getSite().getWorkbenchWindow().getActivePage().getActivePart();
if (activePart != this && activePart instanceof IDataSourceContainerProvider) {
inputDataSource = ((IDataSourceContainerProvider) activePart).getDataSourceContainer();
}
}
setDataSourceContainer(inputDataSource);
}
private void updateExecutionContext(Runnable onSuccess) {
if (dataSourceContainer == null) {
releaseExecutionContext();
} else {
// Get/open context
final DBPDataSource dataSource = dataSourceContainer.getDataSource();
if (dataSource == null) {
releaseExecutionContext();
} else if (curDataSource != dataSource) {
// Datasource was changed or instance was changed (PG)
releaseExecutionContext();
curDataSource = dataSource;
DBPDataSourceContainer container = dataSource.getContainer();
if (SQLEditorUtils.isOpenSeparateConnection(container)) {
initSeparateConnection(dataSource, onSuccess);
} else {
if (onSuccess != null) {
onSuccess.run();
}
}
}
}
}
private void initSeparateConnection(@NotNull DBPDataSource dataSource, Runnable onSuccess) {
DBSInstance dsInstance = dataSource.getDefaultInstance();
String[] contextDefaults = isRestoreActiveSchemaFromScript() ?
EditorUtils.getInputContextDefaults(getEditorInput()) : null;
if (!ArrayUtils.isEmpty(contextDefaults) && contextDefaults[0] != null) {
DBSInstance selectedInstance = DBUtils.findObject(dataSource.getAvailableInstances(), contextDefaults[0]);
if (selectedInstance != null) {
dsInstance = selectedInstance;
}
}
if (dsInstance != null) {
final OpenContextJob job = new OpenContextJob(dsInstance, onSuccess);
job.schedule();
}
}
private void releaseExecutionContext() {
if (executionContext != null && executionContext.isConnected()) {
// Close context in separate job (otherwise it can block UI)
new CloseContextJob(executionContext).schedule();
}
executionContext = null;
curDataSource = null;
}
private void releaseContainer() {
releaseExecutionContext();
if (dataSourceContainer != null) {
dataSourceContainer.getPreferenceStore().removePropertyChangeListener(this);
dataSourceContainer.getRegistry().removeDataSourceListener(this);
dataSourceContainer.release(this);
dataSourceContainer = null;
}
}
private DBPDataSourceContainer getDataSourceFromContent() {
DBPProject project = getProject();
IDocument document = getDocument();
int totalLines = document.getNumberOfLines();
if (totalLines == 0) {
return null;
}
try {
IRegion region = document.getLineInformation(0);
String line = document.get(region.getOffset(), region.getLength());
Matcher matcher = EMBEDDED_BINDING_PREFIX_PATTERN.matcher(line);
if (matcher.matches()) {
String connSpec = matcher.group(1).trim();
if (!CommonUtils.isEmpty(connSpec)) {
final DBPDataSourceContainer dataSource = DataSourceUtils.getDataSourceBySpec(project, connSpec, null, true, false);
if (dataSource != null) {
return dataSource;
}
}
}
} catch (Throwable e) {
log.debug("Error extracting datasource info from script's content", e);
}
return null;
}
private void embedDataSourceAssociation() {
if (getDataSourceFromContent() == dataSourceContainer) {
return;
}
IDocument document = getDocument();
try {
int totalLines = document.getNumberOfLines();
IRegion region = null;
if (totalLines > 0) {
region = document.getLineInformation(0);
String line = document.get(region.getOffset(), region.getLength());
Matcher matcher = EMBEDDED_BINDING_PREFIX_PATTERN.matcher(line);
if (!matcher.matches()) {
// Update existing association
region = null;
}
}
if (dataSourceContainer == null) {
if (region == null) {
return;
}
// Remove connection association
document.replace(region.getOffset(), region.getLength(), "");
} else {
SQLScriptBindingType bindingType = SQLScriptBindingType.valueOf(DBWorkbench.getPlatform().getPreferenceStore().getString(SQLPreferenceConstants.SCRIPT_BIND_COMMENT_TYPE));
StringBuilder assocSpecLine = new StringBuilder(EMBEDDED_BINDING_PREFIX);
bindingType.appendSpec(dataSourceContainer, assocSpecLine);
if (region != null) {
// Remove connection association
document.replace(region.getOffset(), region.getLength(), assocSpecLine.toString());
} else {
document.replace(0, 0, assocSpecLine.toString());
}
}
} catch (Throwable e) {
log.debug("Error extracting datasource info from script's content", e);
}
UIUtils.asyncExec(() -> getTextViewer().refresh());
}
public void addListener(SQLEditorListener listener) {
synchronized (listeners) {
listeners.add(listener);
}
}
public void removeListener(SQLEditorListener listener) {
synchronized (listeners) {
listeners.remove(listener);
}
}
@Override
public boolean isActiveTask() {
return getTotalQueryRunning() > 0;
}
@Override
public boolean isSmartAutoCommit() {
return getActivePreferenceStore().getBoolean(ModelPreferences.TRANSACTIONS_SMART_COMMIT);
}
@Override
public void setSmartAutoCommit(boolean smartAutoCommit) {
getActivePreferenceStore().setValue(ModelPreferences.TRANSACTIONS_SMART_COMMIT, smartAutoCommit);
try {
getActivePreferenceStore().save();
} catch (IOException e) {
log.error("Error saving smart auto-commit option", e);
}
}
public void refreshActions() {
// Redraw toolbar to refresh action sets
sideToolBar.redraw();
}
private class OutputLogWriter extends Writer {
@Override
public void write(@NotNull final char[] cbuf, final int off, final int len) {
UIUtils.syncExec(() -> {
if (!outputViewer.isDisposed()) {
outputViewer.getOutputWriter().write(cbuf, off, len);
outputViewer.scrollToEnd();
if (!outputViewer.isVisible()) {
updateOutputViewerIcon(true);
}
}
});
}
@Override
public void flush() throws IOException {
outputViewer.getOutputWriter().flush();
}
@Override
public void close() throws IOException {
}
}
private class OpenContextJob extends AbstractJob {
private final DBSInstance instance;
private final Runnable onSuccess;
private Throwable error;
OpenContextJob(DBSInstance instance, Runnable onSuccess) {
super("Open connection to " + instance.getDataSource().getContainer().getName());
this.instance = instance;
this.onSuccess = onSuccess;
setUser(true);
}
@Override
protected IStatus run(DBRProgressMonitor monitor) {
monitor.beginTask("Open SQLEditor isolated connection", 1);
try {
String title = "SQLEditor <" + getEditorInput().getName() + ">";
monitor.subTask("Open context " + title);
DBCExecutionContext newContext = instance.openIsolatedContext(monitor, title, instance.getDefaultContext(monitor, false));
// Set context defaults
String[] contextDefaultNames = isRestoreActiveSchemaFromScript() ?
EditorUtils.getInputContextDefaults(getEditorInput()) : null;
if (contextDefaultNames != null && contextDefaultNames.length > 1 &&
(!CommonUtils.isEmpty(contextDefaultNames[0]) || !CommonUtils.isEmpty(contextDefaultNames[1])))
{
try {
DBExecUtils.setExecutionContextDefaults(monitor, newContext.getDataSource(), newContext, contextDefaultNames[0], null, contextDefaultNames[1]);
} catch (DBException e) {
DBWorkbench.getPlatformUI().showError("New connection default", "Error setting default catalog/schema for new connection", e);
}
}
SQLEditor.this.executionContext = newContext;
// Needed to update main toolbar
DBUtils.fireObjectSelect(instance, true);
} catch (DBException e) {
error = e;
} finally {
monitor.done();
}
updateContext();
return Status.OK_STATUS;
}
private void updateContext() {
if (error != null) {
releaseExecutionContext();
DBWorkbench.getPlatformUI().showError("Open context", "Can't open editor connection", error);
} else {
if (onSuccess != null) {
onSuccess.run();
}
fireDataSourceChange();
}
}
}
private boolean isRestoreActiveSchemaFromScript() {
return getActivePreferenceStore().getBoolean(SQLPreferenceConstants.AUTO_SAVE_ACTIVE_SCHEMA) &&
getActivePreferenceStore().getBoolean(SQLPreferenceConstants.EDITOR_SEPARATE_CONNECTION);
}
private class CloseContextJob extends AbstractJob {
private final DBCExecutionContext context;
CloseContextJob(DBCExecutionContext context) {
super("Close context " + context.getContextName());
this.context = context;
setUser(true);
}
@Override
protected IStatus run(DBRProgressMonitor monitor) {
monitor.beginTask("Close SQLEditor isolated connection", 1);
try {
if (QMUtils.isTransactionActive(context)) {
UIServiceConnections serviceConnections = DBWorkbench.getService(UIServiceConnections.class);
if (serviceConnections != null) {
serviceConnections.closeActiveTransaction(monitor, context, false);
}
}
monitor.subTask("Close context " + context.getContextName());
context.close();
} finally {
monitor.done();
}
return Status.OK_STATUS;
}
}
@Override
public boolean isDirty()
{
for (QueryProcessor queryProcessor : queryProcessors) {
if (queryProcessor.isDirty() || queryProcessor.curJobRunning.get() > 0) {
return true;
}
}
if (executionContext != null && QMUtils.isTransactionActive(executionContext)) {
return true;
}
if (isNonPersistentEditor()) {
// Console is never dirty
return false;
}
if (extraPresentation instanceof ISaveablePart && ((ISaveablePart) extraPresentation).isDirty()) {
return true;
}
return super.isDirty();
}
@Nullable
@Override
public <T> T getAdapter(Class<T> required)
{
if (required == INavigatorModelView.class) {
return null;
}
if (resultTabs != null && !resultTabs.isDisposed()) {
if (required == IFindReplaceTarget.class) {
return required.cast(findReplaceTarget);
}
CTabItem activeResultsTab = getActiveResultsTab();
if (activeResultsTab != null && UIUtils.isUIThread()) {
Object tabControl = activeResultsTab.getData();
if (tabControl instanceof QueryResultsContainer) {
tabControl = ((QueryResultsContainer) tabControl).viewer;
}
if (tabControl instanceof IAdaptable) {
T adapter = ((IAdaptable) tabControl).getAdapter(required);
if (adapter != null) {
return adapter;
}
}
if (tabControl instanceof ResultSetViewer && (required == IResultSetController.class || required == ResultSetViewer.class)) {
return required.cast(tabControl);
}
}
}
return super.getAdapter(required);
}
private boolean checkConnected(boolean forceConnect, DBRProgressListener onFinish)
{
// Connect to datasource
final DBPDataSourceContainer dataSourceContainer = getDataSourceContainer();
boolean doConnect = dataSourceContainer != null &&
(forceConnect || dataSourceContainer.getPreferenceStore().getBoolean(SQLPreferenceConstants.EDITOR_CONNECT_ON_ACTIVATE));
if (doConnect) {
if (!dataSourceContainer.isConnected()) {
UIServiceConnections serviceConnections = DBWorkbench.getService(UIServiceConnections.class);
if (serviceConnections != null) {
serviceConnections.connectDataSource(dataSourceContainer, onFinish);
}
}
}
return dataSourceContainer != null && dataSourceContainer.isConnected();
}
@Override
public void createPartControl(Composite parent)
{
setRangeIndicator(new DefaultRangeIndicator());
// divides editor area and results/panels area
resultsSash = UIUtils.createPartDivider(
this,
parent,
resultSetOrientation.getSashOrientation() | SWT.SMOOTH);
CSSUtils.setCSSClass(resultsSash, DBStyles.COLORED_BY_CONNECTION_TYPE);
resultsSash.setSashWidth(5);
UIUtils.setHelp(resultsSash, IHelpContextIds.CTX_SQL_EDITOR);
Composite editorContainer;
sqlEditorPanel = UIUtils.createPlaceholder(resultsSash, 3, 0);
// Create left vertical toolbar
createControlsBar(sqlEditorPanel);
// Create editor presentations sash
Composite pPlaceholder = null;
if (extraPresentationDescriptor != null) {
presentationSash = UIUtils.createPartDivider(
this,
sqlEditorPanel,
((resultSetOrientation.getSashOrientation() == SWT.VERTICAL) ? SWT.HORIZONTAL : SWT.VERTICAL) | SWT.SMOOTH);
presentationSash.setSashWidth(5);
presentationSash.setLayoutData(new GridData(GridData.FILL_BOTH));
editorContainer = presentationSash;
pPlaceholder = new Composite(presentationSash, SWT.NONE);
pPlaceholder.setLayout(new FillLayout());
} else {
editorContainer = sqlEditorPanel;
}
super.createPartControl(editorContainer);
getEditorControlWrapper().setLayoutData(new GridData(GridData.FILL_BOTH));
// Create right vertical toolbar
createPresentationSwitchBar(sqlEditorPanel);
if (pPlaceholder != null) {
switch (extraPresentationDescriptor.getActivationType()) {
case HIDDEN:
presentationSash.setMaximizedControl(presentationSash.getChildren()[SQL_EDITOR_CONTROL_INDEX]);
break;
case MAXIMIZED:
case VISIBLE:
extraPresentation.createPresentation(pPlaceholder, this);
if (extraPresentationDescriptor.getActivationType() == SQLEditorPresentation.ActivationType.MAXIMIZED) {
if (presentationSash.getChildren()[EXTRA_CONTROL_INDEX] != null) {
presentationSash.setMaximizedControl(pPlaceholder);
}
}
break;
}
}
getSite().setSelectionProvider(new DynamicSelectionProvider());
DBPProject project = getProject();
if (project != null && project.isRegistryLoaded()) {
createResultTabs();
} else {
UIExecutionQueue.queueExec(this::createResultTabs);
}
setAction(ITextEditorActionConstants.SHOW_INFORMATION, null);
//toolTipAction.setEnabled(false);
/*
resultsSash.setSashBorders(new boolean[]{true, true});
if (presentationSash != null) {
presentationSash.setSashBorders(new boolean[]{true, true});
}
*/
SQLEditorFeatures.SQL_EDITOR_OPEN.use();
// Start output reader
new ServerOutputReader().schedule();
updateExecutionContext(null);
// Update controls
UIExecutionQueue.queueExec(this::onDataSourceChange);
}
private void createControlsBar(Composite sqlEditorPanel) {
sideToolBar = new VerticalFolder(sqlEditorPanel, SWT.LEFT);
((GridLayout)sideToolBar.getLayout()).marginTop = 3;
((GridLayout)sideToolBar.getLayout()).marginBottom = 10;
((GridLayout)sideToolBar.getLayout()).verticalSpacing = 3;
VerticalButton.create(sideToolBar, SWT.LEFT | SWT.PUSH, getSite(), SQLEditorCommands.CMD_EXECUTE_STATEMENT, false);
VerticalButton.create(sideToolBar, SWT.LEFT | SWT.PUSH, getSite(), SQLEditorCommands.CMD_EXECUTE_STATEMENT_NEW, false);
VerticalButton.create(sideToolBar, SWT.LEFT | SWT.PUSH, getSite(), SQLEditorCommands.CMD_EXECUTE_SCRIPT, false);
VerticalButton.create(sideToolBar, SWT.LEFT | SWT.PUSH, getSite(), SQLEditorCommands.CMD_EXECUTE_SCRIPT_NEW, false);
VerticalButton.create(sideToolBar, SWT.LEFT | SWT.PUSH, getSite(), SQLEditorCommands.CMD_EXPLAIN_PLAN, false);
UIUtils.createEmptyLabel(sideToolBar, 1, 1).setLayoutData(new GridData(GridData.FILL_VERTICAL));
VerticalButton.create(sideToolBar, SWT.LEFT | SWT.CHECK, new ShowPreferencesAction(), false);
Label label = new Label(sideToolBar, SWT.NONE);
label.setImage(DBeaverIcons.getImage(UIIcon.SEPARATOR_H));
VerticalButton.create(sideToolBar, SWT.LEFT | SWT.CHECK, getSite(), SQLEditorCommands.CMD_SQL_SHOW_OUTPUT, false);
VerticalButton.create(sideToolBar, SWT.LEFT | SWT.CHECK, getSite(), SQLEditorCommands.CMD_SQL_SHOW_LOG, false);
/*
sideToolBar.add(new GroupMarker(TOOLBAR_GROUP_PANELS));
final IMenuService menuService = getSite().getService(IMenuService.class);
if (menuService != null) {
int prevSize = sideToolBar.getSize();
menuService.populateContributionManager(sideToolBar, TOOLBAR_CONTRIBUTION_ID);
if (prevSize != sideToolBar.getSize()) {
// Something was populated
sideToolBar.insertBefore(TOOLBAR_GROUP_ADDITIONS, new ToolbarSeparatorContribution(false));
}
}
*/
sideToolBar.setLayoutData(new GridData(GridData.FILL_VERTICAL | GridData.VERTICAL_ALIGN_BEGINNING));
}
private void createPresentationSwitchBar(Composite sqlEditorPanel) {
if (extraPresentationDescriptor == null) {
return;
}
presentationSwitchFolder = new VerticalFolder(sqlEditorPanel, SWT.RIGHT);
presentationSwitchFolder.setLayoutData(new GridData(GridData.FILL_VERTICAL));
switchPresentationSQLButton = new VerticalButton(presentationSwitchFolder, SWT.RIGHT | SWT.CHECK);
switchPresentationSQLButton.setText(SQLEditorMessages.editors_sql_description);
switchPresentationSQLButton.setImage(DBeaverIcons.getImage(UIIcon.SQL_SCRIPT));
switchPresentationExtraButton = new VerticalButton(presentationSwitchFolder, SWT.RIGHT | SWT.CHECK);
switchPresentationExtraButton.setData(extraPresentationDescriptor);
switchPresentationExtraButton.setText(extraPresentationDescriptor.getLabel());
switchPresentationExtraButton.setImage(DBeaverIcons.getImage(extraPresentationDescriptor.getIcon()));
String toolTip = ActionUtils.findCommandDescription(extraPresentationDescriptor.getToggleCommandId(), getSite(), false);
if (CommonUtils.isEmpty(toolTip)) {
toolTip = extraPresentationDescriptor.getDescription();
}
if (!CommonUtils.isEmpty(toolTip)) {
switchPresentationExtraButton.setToolTipText(toolTip);
}
switchPresentationSQLButton.setChecked(true);
// We use single switch handler. It must be provided by presentation itself
// Presentation switch may require some additional action so we can't just switch visible controls
SelectionListener switchListener = new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
if (((VerticalButton)e.item).isChecked() || presentationSwitchFolder.getSelection() == e.item) {
return;
}
String toggleCommandId = extraPresentationDescriptor.getToggleCommandId();
ActionUtils.runCommand(toggleCommandId, getSite());
}
};
switchPresentationSQLButton.addSelectionListener(switchListener);
switchPresentationExtraButton.addSelectionListener(switchListener);
// Stretch
UIUtils.createEmptyLabel(presentationSwitchFolder, 1, 1).setLayoutData(new GridData(GridData.FILL_VERTICAL));
VerticalButton.create(presentationSwitchFolder, SWT.RIGHT | SWT.CHECK, getSite(), SQLEditorCommands.CMD_TOGGLE_LAYOUT, false);
}
/**
* Sets focus in current editor.
* This function is called on drag-n-drop and some other operations
*/
@Override
public boolean validateEditorInputState() {
boolean res = super.validateEditorInputState();
if (res) {
StyledText textWidget = getViewer().getTextWidget();
if (textWidget != null && !textWidget.isDisposed()) {
textWidget.setFocus();
}
}
return res;
}
private void createResultTabs()
{
resultTabs = new CTabFolder(resultsSash, SWT.TOP | SWT.FLAT);
CSSUtils.setCSSClass(resultTabs, DBStyles.COLORED_BY_CONNECTION_TYPE);
new TabFolderReorder(resultTabs);
resultTabs.setLayoutData(new GridData(GridData.FILL_BOTH));
resultTabs.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
if (extraPresentationCurrentPanel != null) {
extraPresentationCurrentPanel.deactivatePanel();
}
extraPresentationCurrentPanel = null;
Object data = e.item.getData();
if (data instanceof QueryResultsContainer) {
setActiveResultsContainer((QueryResultsContainer) data);
} else if (data instanceof SQLEditorPresentationPanel) {
extraPresentationCurrentPanel = ((SQLEditorPresentationPanel) data);
extraPresentationCurrentPanel.activatePanel();
} else if (data instanceof ExplainPlanViewer) {
SQLQuery planQuery = ((ExplainPlanViewer) data).getQuery();
if (planQuery != null) {
getSelectionProvider().setSelection(new TextSelection(planQuery.getOffset(), 0));
}
}
}
});
this.resultTabs.addListener(SWT.Resize, event -> {
if (!resultsSash.isDisposed()) {
int[] weights = resultsSash.getWeights();
IPreferenceStore prefs = getPreferenceStore();
if (prefs != null) {
prefs.setValue(SQLPreferenceConstants.RESULTS_PANEL_RATIO, weights[0] + "-" + weights[1]);
}
}
});
String resultsPanelRatio = getPreferenceStore().getString(SQLPreferenceConstants.RESULTS_PANEL_RATIO);
if (!CommonUtils.isEmpty(resultsPanelRatio)) {
String[] weights = resultsPanelRatio.split("-");
if (weights.length > 1) {
resultsSash.setWeights(new int[] {
Integer.parseInt(weights[0]),
Integer.parseInt(weights[1]),
});
}
}
getTextViewer().getTextWidget().addTraverseListener(e -> {
if (e.detail == SWT.TRAVERSE_PAGE_NEXT) {
ResultSetViewer viewer = getActiveResultSetViewer();
if (viewer != null && viewer.getActivePresentation().getControl().isVisible()) {
viewer.getActivePresentation().getControl().setFocus();
e.doit = false;
e.detail = SWT.TRAVERSE_NONE;
}
}
});
resultTabs.setSimple(true);
resultTabs.addMouseListener(new MouseAdapter() {
@Override
public void mouseUp(MouseEvent e) {
if (e.button == 2) {
CTabItem item = resultTabs.getItem(new Point(e.x, e.y));
if (item != null && item.getShowClose()) {
item.dispose();
}
}
}
});
resultTabs.addListener(SWT.MouseDoubleClick, event -> {
if (event.button != 1) {
return;
}
CTabItem selectedItem = resultTabs.getItem(new Point(event.getBounds().x, event.getBounds().y));
if (selectedItem != null && selectedItem == resultTabs.getSelection()) {
toggleEditorMaximize();
}
});
// Extra views
//planView = new ExplainPlanViewer(this, resultTabs);
logViewer = new SQLLogPanel(resultTabs, this);
outputViewer = new SQLEditorOutputConsoleViewer(getSite(), resultTabs, SWT.NONE);
// Create results tab
createQueryProcessor(true, true);
{
resultTabs.addMouseListener(new MouseAdapter() {
@Override
public void mouseDown(MouseEvent e) {
activeResultsTab = resultTabs.getItem(new Point(e.x, e.y));
}
});
MenuManager menuMgr = new MenuManager();
Menu menu = menuMgr.createContextMenu(resultTabs);
menuMgr.addMenuListener(manager -> {
manager.add(ActionUtils.makeCommandContribution(getSite(), SQLEditorCommands.CMD_SQL_EDITOR_MAXIMIZE_PANEL));
if (resultTabs.getItemCount() > 1) {
manager.add(new Action("Close multiple results") {
@Override
public void run()
{
closeExtraResultTabs(null, false);
}
});
int pinnedTabsCount = 0;
for (CTabItem item : resultTabs.getItems()) {
if (item.getData() instanceof QueryResultsContainer) {
if (((QueryResultsContainer) item.getData()).isPinned()) {
pinnedTabsCount++;
}
}
}
if (pinnedTabsCount > 1) {
manager.add(new Action("Unpin all tabs") {
@Override
public void run()
{
for (CTabItem item : resultTabs.getItems()) {
if (item.getData() instanceof QueryResultsContainer) {
if (((QueryResultsContainer) item.getData()).isPinned()) {
((QueryResultsContainer) item.getData()).setPinned(false);
}
}
}
}
});
}
}
final CTabItem activeTab = getActiveResultsTab();
if (activeTab != null && activeTab.getData() instanceof QueryResultsContainer) {
{
final QueryResultsContainer resultsContainer = (QueryResultsContainer) activeTab.getData();
if (resultsContainer.getResultSetController().hasData()) {
manager.add(new Separator());
final boolean isPinned = resultsContainer.isPinned();
manager.add(new Action(isPinned ? "Unpin tab" : "Pin tab") {
@Override
public void run() {
resultsContainer.setPinned(!isPinned);
}
});
}
}
manager.add(new Action("Set tab title") {
@Override
public void run()
{
EnterNameDialog dialog = new EnterNameDialog(resultTabs.getShell(), "Tab title", activeTab.getText());
if (dialog.open() == IDialogConstants.OK_ID) {
activeTab.setText(dialog.getResult());
}
}
});
}
if (activeTab != null && activeTab.getShowClose()) {
manager.add(ActionUtils.makeCommandContribution(getSite(), SQLEditorCommands.CMD_SQL_EDITOR_CLOSE_TAB));
}
});
menuMgr.setRemoveAllWhenShown(true);
resultTabs.setMenu(menu);
}
}
private void setActiveResultsContainer(QueryResultsContainer data) {
curResultsContainer = data;
curQueryProcessor = curResultsContainer.queryProcessor;
ResultSetViewer rsv = curResultsContainer.getResultSetController();
if (rsv != null) {
//rsv.getActivePresentation().getControl().setFocus();
}
}
/////////////////////////////////////////////////////////////
// Panels
private void showExtraView(final String commandId, String name, String toolTip, Image image, Control view) {
VerticalButton viewItem = getViewToolItem(commandId);
if (viewItem == null) {
log.warn("Tool item for command " + commandId + " not found");
return;
}
for (CTabItem item : resultTabs.getItems()) {
if (item.getData() == view) {
// Close tab if it is already open
viewItem.setChecked(false);
viewItem.redraw();
item.dispose();
return;
}
}
if (view == outputViewer.getControl()) {
updateOutputViewerIcon(false);
outputViewer.resetNewOutput();
}
// Create new tab
viewItem.setChecked(true);
CTabItem item = new CTabItem(resultTabs, SWT.CLOSE);
item.setControl(view);
item.setText(name);
item.setToolTipText(toolTip);
item.setImage(image);
item.setData(view);
// De-select tool item on tab close
item.addDisposeListener(e -> {
if (!viewItem.isDisposed()) {
viewItem.setChecked(false);
viewItem.redraw();
}
resultTabDisposeListener.widgetDisposed(e);
});
resultTabs.setSelection(item);
viewItem.redraw();
}
private VerticalButton getViewToolItem(String commandId) {
VerticalButton viewItem = null;
for (VerticalButton item : sideToolBar.getItems()) {
if (commandId.equals(item.getCommandId())) {
viewItem = item;
break;
}
}
return viewItem;
}
private CTabItem getActiveResultsTab() {
return activeResultsTab == null || activeResultsTab.isDisposed() ?
(resultTabs == null ? null : resultTabs.getSelection()) : activeResultsTab;
}
public void closeActiveTab() {
CTabItem tabItem = getActiveResultsTab();
if (tabItem != null && tabItem.getShowClose()) {
tabItem.dispose();
activeResultsTab = null;
}
}
public void showOutputPanel() {
if (resultsSash.getMaximizedControl() != null) {
resultsSash.setMaximizedControl(null);
}
showExtraView(SQLEditorCommands.CMD_SQL_SHOW_OUTPUT, SQLEditorMessages.editors_sql_output, SQLEditorMessages.editors_sql_output_tip, IMG_OUTPUT, outputViewer.getControl());
}
public void showExecutionLogPanel() {
if (resultsSash.getMaximizedControl() != null) {
resultsSash.setMaximizedControl(null);
}
showExtraView(SQLEditorCommands.CMD_SQL_SHOW_LOG, SQLEditorMessages.editors_sql_execution_log, SQLEditorMessages.editors_sql_execution_log_tip, IMG_LOG, logViewer);
}
public <T> T getExtraPresentationPanel(Class<T> panelClass) {
for (CTabItem tabItem : resultTabs.getItems()) {
if (tabItem.getData() instanceof SQLEditorPresentationPanel && tabItem.getData().getClass() == panelClass) {
return panelClass.cast(tabItem.getData());
}
}
return null;
}
public boolean showPresentationPanel(SQLEditorPresentationPanel panel) {
for (CTabItem item : resultTabs.getItems()) {
if (item.getData() == panel) {
resultTabs.setSelection(item);
return true;
}
}
return false;
}
public SQLEditorPresentationPanel showPresentationPanel(String panelID) {
for (VerticalButton cItem : sideToolBar.getItems()) {
IAction action = cItem.getAction();
if (action != null) {
if (action instanceof PresentationPanelToggleAction && ((PresentationPanelToggleAction) action).panel.getId().equals(panelID)) {
action.run();
return extraPresentationCurrentPanel;
}
}
}
return null;
}
public boolean hasMaximizedControl() {
return resultsSash.getMaximizedControl() != null;
}
public SQLEditorPresentation getExtraPresentation() {
return extraPresentation;
}
public SQLEditorPresentation.ActivationType getExtraPresentationState() {
if (extraPresentation == null) {
return SQLEditorPresentation.ActivationType.HIDDEN;
}
Control maximizedControl = presentationSash.getMaximizedControl();
if (maximizedControl == getExtraPresentationControl()) {
return SQLEditorPresentation.ActivationType.MAXIMIZED;
} else if (maximizedControl == getEditorControlWrapper()) {
return SQLEditorPresentation.ActivationType.HIDDEN;
} else {
return SQLEditorPresentation.ActivationType.VISIBLE;
}
}
public void showExtraPresentation(boolean show, boolean maximize) {
if (extraPresentationDescriptor == null) {
return;
}
resultsSash.setRedraw(false);
try {
if (!show) {
//boolean epHasFocus = UIUtils.hasFocus(getExtraPresentationControl());
presentationSash.setMaximizedControl(presentationSash.getChildren()[SQL_EDITOR_CONTROL_INDEX]);
//if (epHasFocus) {
getEditorControlWrapper().setFocus();
//}
} else {
if (extraPresentation == null) {
// Lazy activation
try {
extraPresentation = extraPresentationDescriptor.createPresentation();
extraPresentation.createPresentation((Composite) getExtraPresentationControl(), this);
} catch (DBException e) {
log.error("Error creating presentation", e);
}
}
if (maximize) {
presentationSash.setMaximizedControl(getExtraPresentationControl());
getExtraPresentationControl().setFocus();
} else {
presentationSash.setMaximizedControl(null);
}
}
// Show presentation panels
boolean sideBarChanged = false;
if (getExtraPresentationState() == SQLEditorPresentation.ActivationType.HIDDEN) {
// Remove all presentation panel toggles
for (SQLPresentationPanelDescriptor panelDescriptor : extraPresentationDescriptor.getPanels()) {
for (Control vb : presentationSwitchFolder.getChildren()) {
if (vb instanceof Label || vb.getData() instanceof SQLPresentationPanelDescriptor) {
vb.dispose();
sideBarChanged = true;
}
}
}
// Close all panels
for (CTabItem tabItem : resultTabs.getItems()) {
if (tabItem.getData() instanceof SQLEditorPresentationPanel) {
tabItem.dispose();
}
}
extraPresentationCurrentPanel = null;
} else {
// Check and add presentation panel toggles
UIUtils.createEmptyLabel(presentationSwitchFolder, 1, 1).setLayoutData(new GridData(GridData.FILL_VERTICAL));
for (SQLPresentationPanelDescriptor panelDescriptor : extraPresentationDescriptor.getPanels()) {
sideBarChanged = true;
PresentationPanelToggleAction toggleAction = new PresentationPanelToggleAction(panelDescriptor);
VerticalButton panelButton = new VerticalButton(presentationSwitchFolder, SWT.RIGHT);
panelButton.setLayoutData(new GridData(GridData.VERTICAL_ALIGN_END));
panelButton.setAction(toggleAction, true);
panelButton.setData(panelDescriptor);
if (panelDescriptor.isAutoActivate()) {
//panelButton.setChecked(true);
toggleAction.run();
}
}
}
boolean isExtra = getExtraPresentationState() == SQLEditorPresentation.ActivationType.MAXIMIZED;
switchPresentationSQLButton.setChecked(!isExtra);
switchPresentationExtraButton.setChecked(isExtra);
presentationSwitchFolder.redraw();
if (sideBarChanged) {
sideToolBar.getParent().layout(true, true);
}
} finally {
resultsSash.setRedraw(true);
}
}
private Control getExtraPresentationControl() {
return presentationSash.getChildren()[EXTRA_CONTROL_INDEX];
}
public void toggleResultPanel() {
if (resultsSash.getMaximizedControl() == null) {
resultsSash.setMaximizedControl(sqlEditorPanel);
switchFocus(false);
} else {
// Show both editor and results
// Check for existing query processors (maybe all result tabs were closed)
if (resultTabs.getItemCount() == 0) {
createQueryProcessor(true, true);
}
resultsSash.setMaximizedControl(null);
switchFocus(true);
}
}
public void toggleEditorMaximize()
{
if (resultsSash.getMaximizedControl() == null) {
resultsSash.setMaximizedControl(resultTabs);
switchFocus(true);
} else {
resultsSash.setMaximizedControl(null);
switchFocus(false);
}
}
private void switchFocus(boolean results) {
if (results) {
ResultSetViewer activeRS = getActiveResultSetViewer();
if (activeRS != null && activeRS.getActivePresentation() != null) {
activeRS.getActivePresentation().getControl().setFocus();
} else {
CTabItem activeTab = resultTabs.getSelection();
if (activeTab != null && activeTab.getControl() != null) {
activeTab.getControl().setFocus();
}
}
} else {
getEditorControlWrapper().setFocus();
}
}
public void toggleActivePanel() {
if (resultsSash.getMaximizedControl() == null) {
if (UIUtils.hasFocus(resultTabs)) {
switchFocus(false);
} else {
switchFocus(true);
}
}
}
private void updateResultSetOrientation() {
try {
resultSetOrientation = ResultSetOrientation.valueOf(DBWorkbench.getPlatform().getPreferenceStore().getString(SQLPreferenceConstants.RESULT_SET_ORIENTATION));
} catch (IllegalArgumentException e) {
resultSetOrientation = ResultSetOrientation.HORIZONTAL;
}
if (resultsSash != null) {
resultsSash.setOrientation(resultSetOrientation.getSashOrientation());
}
}
private class PresentationPanelToggleAction extends Action {
private SQLPresentationPanelDescriptor panel;
private CTabItem tabItem;
public PresentationPanelToggleAction(SQLPresentationPanelDescriptor panel) {
super(panel.getLabel(), Action.AS_CHECK_BOX);
setId(PANEL_ITEM_PREFIX + panel.getId());
if (panel.getIcon() != null) {
setImageDescriptor(DBeaverIcons.getImageDescriptor(panel.getIcon()));
}
if (panel.getDescription() != null) {
setToolTipText(panel.getDescription());
}
this.panel = panel;
}
@Override
public void run() {
setChecked(!isChecked());
SQLEditorPresentationPanel panelInstance = extraPresentationPanels.get(panel);
if (panelInstance != null && !isChecked()) {
// Hide panel
for (CTabItem tabItem : resultTabs.getItems()) {
if (tabItem.getData() == panelInstance) {
tabItem.dispose();
return;
}
}
}
if (panelInstance == null) {
Control panelControl;
try {
panelInstance = panel.createPanel();
panelControl = panelInstance.createPanel(resultTabs, SQLEditor.this, extraPresentation);
} catch (DBException e) {
DBWorkbench.getPlatformUI().showError("Panel opening error", "Can't create panel " + panel.getLabel(), e);
return;
}
extraPresentationPanels.put(panel, panelInstance);
tabItem = new CTabItem(resultTabs, SWT.CLOSE);
tabItem.setControl(panelControl);
tabItem.setText(panel.getLabel());
tabItem.setToolTipText(panel.getDescription());
tabItem.setImage(DBeaverIcons.getImage(panel.getIcon()));
tabItem.setData(panelInstance);
// De-select tool item on tab close
tabItem.addDisposeListener(e -> {
PresentationPanelToggleAction.this.setChecked(false);
panelControl.dispose();
extraPresentationPanels.remove(panel);
extraPresentationCurrentPanel = null;
resultTabDisposeListener.widgetDisposed(e);
});
extraPresentationCurrentPanel = panelInstance;
resultTabs.setSelection(tabItem);
} else {
for (CTabItem tabItem : resultTabs.getItems()) {
if (tabItem.getData() == panelInstance) {
resultTabs.setSelection(tabItem);
break;
}
}
}
}
}
/////////////////////////////////////////////////////////////
// Initialization
@Override
public void init(IEditorSite site, IEditorInput editorInput)
throws PartInitException
{
super.init(site, editorInput);
updateResultSetOrientation();
this.globalScriptContext = new SQLScriptContext(
null,
this,
EditorUtils.getLocalFileFromInput(getEditorInput()),
new OutputLogWriter(),
new SQLEditorParametersProvider(getSite()));
}
@Override
protected void doSetInput(IEditorInput editorInput)
{
// Check for file existence
try {
if (editorInput instanceof IFileEditorInput) {
final IFile file = ((IFileEditorInput) editorInput).getFile();
if (!file.exists()) {
file.create(new ByteArrayInputStream(new byte[]{}), true, new NullProgressMonitor());
}
}
} catch (Exception e) {
log.error("Error checking SQL file", e);
}
try {
super.doSetInput(editorInput);
} catch (Throwable e) {
// Something bad may happen. E.g. OutOfMemory error in case of too big input file.
StringWriter out = new StringWriter();
e.printStackTrace(new PrintWriter(out, true));
editorInput = new StringEditorInput("Error", CommonUtils.truncateString(out.toString(), 10000), true, GeneralUtils.UTF8_ENCODING);
doSetInput(editorInput);
log.error("Error loading input SQL file", e);
}
syntaxLoaded = false;
Runnable inputinitializer = () -> {
DBPDataSourceContainer oldDataSource = SQLEditor.this.getDataSourceContainer();
DBPDataSourceContainer newDataSource = EditorUtils.getInputDataSource(SQLEditor.this.getEditorInput());
if (oldDataSource != newDataSource) {
SQLEditor.this.dataSourceContainer = null;
SQLEditor.this.updateDataSourceContainer();
} else {
SQLEditor.this.reloadSyntaxRules();
}
};
if (isNonPersistentEditor()) {
inputinitializer.run();
} else {
// Run in queue - for app startup
UIExecutionQueue.queueExec(inputinitializer);
}
setPartName(getEditorName());
if (isNonPersistentEditor()) {
setTitleImage(DBeaverIcons.getImage(UIIcon.SQL_CONSOLE));
}
editorImage = getTitleImage();
}
@Override
public String getTitleToolTip() {
DBPDataSourceContainer dataSourceContainer = getDataSourceContainer();
if (dataSourceContainer == null) {
return super.getTitleToolTip();
}
final IEditorInput editorInput = getEditorInput();
String scriptPath;
if (editorInput instanceof IFileEditorInput) {
scriptPath = ((IFileEditorInput) editorInput).getFile().getFullPath().toString();
} else if (editorInput instanceof IPathEditorInput) {
scriptPath = ((IPathEditorInput) editorInput).getPath().toString();
} else if (editorInput instanceof IURIEditorInput) {
final URI uri = ((IURIEditorInput) editorInput).getURI();
if ("file".equals(uri.getScheme())) {
scriptPath = new File(uri).getAbsolutePath();
} else {
scriptPath = uri.toString();
}
} else if (editorInput instanceof INonPersistentEditorInput) {
scriptPath = "SQL Console";
} else {
scriptPath = editorInput.getName();
if (CommonUtils.isEmpty(scriptPath)) {
scriptPath = "<not a file>";
}
}
return
"Script: " + scriptPath +
" \nConnection: " + dataSourceContainer.getName() +
" \nType: " + (dataSourceContainer.getDriver().getFullName()) +
" \nURL: " + dataSourceContainer.getConnectionConfiguration().getUrl();
}
private String getEditorName() {
final IFile file = EditorUtils.getFileFromInput(getEditorInput());
String scriptName;
if (file != null) {
scriptName = file.getFullPath().removeFileExtension().lastSegment();
} else {
File localFile = EditorUtils.getLocalFileFromInput(getEditorInput());
if (localFile != null) {
scriptName = localFile.getName();
} else {
scriptName = getEditorInput().getName();
}
}
DBPDataSourceContainer dataSourceContainer = getDataSourceContainer();
DBPPreferenceStore preferenceStore = getActivePreferenceStore();
String pattern = preferenceStore.getString(SQLPreferenceConstants.SCRIPT_TITLE_PATTERN);
Map<String, Object> vars = new HashMap<>();
vars.put(VAR_CONNECTION_NAME, dataSourceContainer == null ? "none" : dataSourceContainer.getName());
vars.put(VAR_FILE_NAME, scriptName);
vars.put(VAR_FILE_EXT,
file == null ? "" : file.getFullPath().getFileExtension());
vars.put(VAR_DRIVER_NAME, dataSourceContainer == null ? "?" : dataSourceContainer.getDriver().getFullName());
return GeneralUtils.replaceVariables(pattern, new GeneralUtils.MapResolver(vars));
}
@Override
public void setFocus()
{
super.setFocus();
}
public void loadQueryPlan() {
DBCQueryPlanner planner = GeneralUtils.adapt(getDataSource(), DBCQueryPlanner.class);
ExplainPlanViewer planView = getPlanView(null, planner);
if (planView != null) {
if (!planView.loadQueryPlan(planner, planView)) {
closeActiveTab();
}
}
}
public void explainQueryPlan() {
// Notify listeners
synchronized (listeners) {
for (SQLEditorListener listener : listeners) {
listener.beforeQueryPlanExplain();
}
}
final SQLScriptElement scriptElement = extractActiveQuery();
if (scriptElement == null) {
setStatus(SQLEditorMessages.editors_sql_status_empty_query_string, DBPMessageType.ERROR);
return;
}
if (!(scriptElement instanceof SQLQuery)) {
setStatus("Can't explain plan for command", DBPMessageType.ERROR);
return;
}
explainQueryPlan((SQLQuery) scriptElement);
}
private void explainQueryPlan(SQLQuery sqlQuery) {
DBCQueryPlanner planner = GeneralUtils.adapt(getDataSource(), DBCQueryPlanner.class);
DBCPlanStyle planStyle = planner.getPlanStyle();
if (planStyle == DBCPlanStyle.QUERY) {
explainPlanFromQuery(planner, sqlQuery);
return;
}
ExplainPlanViewer planView = getPlanView(sqlQuery,planner);
if (planView != null) {
planView.explainQueryPlan(sqlQuery, planner);
}
}
private ExplainPlanViewer getPlanView(SQLQuery sqlQuery, DBCQueryPlanner planner) {
// 1. Determine whether planner supports plan extraction
if (planner == null) {
DBWorkbench.getPlatformUI().showError("Execution plan", "Execution plan explain isn't supported by current datasource");
return null;
}
// Transform query parameters
if (sqlQuery != null) {
if (!transformQueryWithParameters(sqlQuery)) {
return null;
}
}
ExplainPlanViewer planView = null;
if (sqlQuery != null) {
for (CTabItem item : resultTabs.getItems()) {
if (item.getData() instanceof ExplainPlanViewer) {
ExplainPlanViewer pv = (ExplainPlanViewer) item.getData();
if (pv.getQuery() != null && pv.getQuery().equals(sqlQuery)) {
resultTabs.setSelection(item);
planView = pv;
break;
}
}
}
}
if (planView == null) {
int maxPlanNumber = 0;
for (CTabItem item : resultTabs.getItems()) {
if (item.getData() instanceof ExplainPlanViewer) {
maxPlanNumber = Math.max(maxPlanNumber, ((ExplainPlanViewer) item.getData()).getPlanNumber());
}
}
maxPlanNumber++;
planView = new ExplainPlanViewer(this, this, resultTabs, maxPlanNumber);
final CTabItem item = new CTabItem(resultTabs, SWT.CLOSE);
item.setControl(planView.getControl());
item.setText(SQLEditorMessages.editors_sql_error_execution_plan_title + " - " + maxPlanNumber);
if (sqlQuery != null) {
item.setToolTipText(sqlQuery.getText());
}
item.setImage(IMG_EXPLAIN_PLAN);
item.setData(planView);
item.addDisposeListener(resultTabDisposeListener);
UIUtils.disposeControlOnItemDispose(item);
resultTabs.setSelection(item);
}
return planView;
}
private void explainPlanFromQuery(final DBCQueryPlanner planner, final SQLQuery sqlQuery) {
final String[] planQueryString = new String[1];
DBRRunnableWithProgress queryObtainTask = monitor -> {
try (DBCSession session = getExecutionContext().openSession(monitor, DBCExecutionPurpose.UTIL, "Prepare plan query")) {
DBCPlan plan = planner.planQueryExecution(session, sqlQuery.getText());
planQueryString[0] = plan.getPlanQueryString();
} catch (Exception e) {
log.error(e);
}
};
if (RuntimeUtils.runTask(queryObtainTask, "Retrieve plan query", 5000) && !CommonUtils.isEmpty(planQueryString[0])) {
SQLQuery planQuery = new SQLQuery(getDataSource(), planQueryString[0]);
processQueries(Collections.singletonList(planQuery), false, true, false, true, null);
}
}
public void processSQL(boolean newTab, boolean script) {
processSQL(newTab, script, null, null);
}
public boolean processSQL(boolean newTab, boolean script, SQLQueryTransformer transformer, @Nullable SQLQueryListener queryListener)
{
IDocument document = getDocument();
if (document == null) {
setStatus(SQLEditorMessages.editors_sql_status_cant_obtain_document, DBPMessageType.ERROR);
return false;
}
// Notify listeners
synchronized (listeners) {
for (SQLEditorListener listener : listeners) {
listener.beforeQueryExecute(script, newTab);
}
}
List<SQLScriptElement> elements;
if (script) {
// Execute all SQL statements consequently
ITextSelection selection = (ITextSelection) getSelectionProvider().getSelection();
if (selection.getLength() > 1) {
elements = extractScriptQueries(selection.getOffset(), selection.getLength(), true, false, true);
} else {
elements = extractScriptQueries(0, document.getLength(), true, false, true);
}
} else {
// Execute statement under cursor or selected text (if selection present)
SQLScriptElement sqlQuery = extractActiveQuery();
if (sqlQuery == null) {
ResultSetViewer activeViewer = getActiveResultSetViewer();
if (activeViewer != null) {
activeViewer.setStatus(SQLEditorMessages.editors_sql_status_empty_query_string, DBPMessageType.ERROR);
}
return false;
} else {
elements = Collections.singletonList(sqlQuery);
}
}
try {
if (transformer != null) {
DBPDataSource dataSource = getDataSource();
if (dataSource != null) {
List<SQLScriptElement> xQueries = new ArrayList<>(elements.size());
for (SQLScriptElement element : elements) {
if (element instanceof SQLQuery) {
SQLQuery query = transformer.transformQuery(dataSource, getSyntaxManager(), (SQLQuery) element);
if (!CommonUtils.isEmpty(query.getParameters())) {
query.setParameters(parseQueryParameters(query));
}
xQueries.add(query);
} else {
xQueries.add(element);
}
}
elements = xQueries;
}
}
}
catch (DBException e) {
DBWorkbench.getPlatformUI().showError("Bad query", "Can't execute query", e);
return false;
}
if (!CommonUtils.isEmpty(elements)) {
return processQueries(elements, script, newTab, false, true, queryListener);
} else {
return false;
}
}
public void exportDataFromQuery()
{
List<SQLScriptElement> elements;
ITextSelection selection = (ITextSelection) getSelectionProvider().getSelection();
if (selection.getLength() > 1) {
elements = extractScriptQueries(selection.getOffset(), selection.getLength(), true, false, true);
} else {
elements = new ArrayList<>();
elements.add(extractActiveQuery());
}
if (!elements.isEmpty()) {
processQueries(elements, false, false, true, true, null);
} else {
DBWorkbench.getPlatformUI().showError(
"Extract data",
"Choose one or more queries to export from");
}
}
private boolean processQueries(@NotNull final List<SQLScriptElement> queries, final boolean forceScript, final boolean newTab, final boolean export, final boolean checkSession, @Nullable final SQLQueryListener queryListener)
{
if (queries.isEmpty()) {
// Nothing to process
return false;
}
final DBPDataSourceContainer container = getDataSourceContainer();
if (checkSession) {
try {
DBRProgressListener connectListener = status -> {
if (!status.isOK() || container == null || !container.isConnected()) {
DBWorkbench.getPlatformUI().showError(
SQLEditorMessages.editors_sql_error_cant_obtain_session,
null,
status);
return;
}
updateExecutionContext(() -> UIUtils.syncExec(() ->
processQueries(queries, forceScript, newTab, export, false, queryListener)));
};
if (!checkSession(connectListener)) {
return false;
}
} catch (DBException ex) {
ResultSetViewer viewer = getActiveResultSetViewer();
if (viewer != null) {
viewer.setStatus(ex.getMessage(), DBPMessageType.ERROR);
}
DBWorkbench.getPlatformUI().showError(
SQLEditorMessages.editors_sql_error_cant_obtain_session,
ex.getMessage());
return false;
}
}
if (dataSourceContainer == null) {
return false;
}
if (!dataSourceContainer.hasModifyPermission(DBPDataSourcePermission.PERMISSION_EXECUTE_SCRIPTS)) {
DBWorkbench.getPlatformUI().showError(
SQLEditorMessages.editors_sql_error_cant_execute_query_title,
"Query execution was restricted by connection configuration");
return false;
}
SQLScriptContext scriptContext = createScriptContext();
final boolean isSingleQuery = !forceScript && (queries.size() == 1);
if (isSingleQuery && queries.get(0) instanceof SQLQuery) {
SQLQuery query = (SQLQuery) queries.get(0);
if (query.isDeleteUpdateDangerous()) {
String targetName = "multiple tables";
if (query.getSingleSource() != null) {
targetName = query.getSingleSource().getEntityName();
}
if (ConfirmationDialog.showConfirmDialogEx(
ResourceBundle.getBundle(SQLEditorMessages.BUNDLE_NAME),
getSite().getShell(),
SQLPreferenceConstants.CONFIRM_DANGER_SQL,
ConfirmationDialog.CONFIRM,
ConfirmationDialog.WARNING,
query.getType().name(),
targetName) != IDialogConstants.OK_ID)
{
return false;
}
}
} else if (newTab && queries.size() > MAX_PARALLEL_QUERIES_NO_WARN) {
if (ConfirmationDialog.showConfirmDialogEx(
ResourceBundle.getBundle(SQLEditorMessages.BUNDLE_NAME),
getSite().getShell(),
SQLPreferenceConstants.CONFIRM_MASS_PARALLEL_SQL,
ConfirmationDialog.CONFIRM,
ConfirmationDialog.WARNING,
queries.size()) != IDialogConstants.OK_ID)
{
return false;
}
}
if (resultsSash.getMaximizedControl() != null) {
resultsSash.setMaximizedControl(null);
}
// Save editor
if (getActivePreferenceStore().getBoolean(SQLPreferenceConstants.AUTO_SAVE_ON_EXECUTE) && isDirty()) {
doSave(new NullProgressMonitor());
}
boolean extraTabsClosed = false;
if (!export) {
if (getActivePreferenceStore().getBoolean(SQLPreferenceConstants.CLEAR_OUTPUT_BEFORE_EXECUTE)) {
outputViewer.clearOutput();
}
if (!newTab || !isSingleQuery) {
// We don't need new tab or we are executing a script - so close all extra tabs
if (!closeExtraResultTabs(null, true)) {
return false;
}
extraTabsClosed = true;
}
}
if (queryProcessors.isEmpty()) {
// If all tabs were closed
createQueryProcessor(true, true);
}
if (newTab) {
// Execute each query in a new tab
for (int i = 0; i < queries.size(); i++) {
SQLScriptElement query = queries.get(i);
QueryProcessor queryProcessor = (i == 0 && !isSingleQuery ? curQueryProcessor : createQueryProcessor(queries.size() == 1, false));
queryProcessor.processQueries(
scriptContext,
Collections.singletonList(query),
false,
true,
export,
getActivePreferenceStore().getBoolean(SQLPreferenceConstants.RESULT_SET_CLOSE_ON_ERROR), queryListener);
}
} else {
if (!export) {
// Use current tab.
// If current tab was pinned then use first tab
QueryResultsContainer firstResults = curQueryProcessor.getFirstResults();
CTabItem tabItem = firstResults.getTabItem();
if (firstResults.isPinned()) {
curQueryProcessor = queryProcessors.get(0);
firstResults = curQueryProcessor.getFirstResults();
if (firstResults.isPinned()) {
// The very first tab is also pinned
// Well, let's create a new tab
curQueryProcessor = createQueryProcessor(true, true);
// Make new tab the default
firstResults = curQueryProcessor.getFirstResults();
if (firstResults.isPinned()) {
tabItem.setShowClose(false);
}
}
}
if (!extraTabsClosed) {
if (!closeExtraResultTabs(curQueryProcessor, true)) {
return false;
}
}
if (tabItem != null) {
// Do not switch tab if Output tab is active
CTabItem selectedTab = resultTabs.getSelection();
if (selectedTab == null || selectedTab.getData() != outputViewer.getControl()) {
resultTabs.setSelection(tabItem);
}
}
}
return curQueryProcessor.processQueries(scriptContext, queries, forceScript, false, export, false, queryListener);
}
return true;
}
@NotNull
private SQLScriptContext createScriptContext() {
File localFile = EditorUtils.getLocalFileFromInput(getEditorInput());
return new SQLScriptContext(globalScriptContext, SQLEditor.this, localFile, new OutputLogWriter(), new SQLEditorParametersProvider(getSite()));
}
private void setStatus(String status, DBPMessageType messageType)
{
ResultSetViewer resultsView = getActiveResultSetViewer();
if (resultsView != null) {
resultsView.setStatus(status, messageType);
}
}
private boolean closeExtraResultTabs(@Nullable QueryProcessor queryProcessor, boolean confirmClose)
{
// Close all tabs except first one
List<CTabItem> tabsToClose = new ArrayList<>();
for (int i = resultTabs.getItemCount() - 1; i > 0; i--) {
CTabItem item = resultTabs.getItem(i);
if (item.getData() instanceof QueryResultsContainer && item.getShowClose()) {
QueryResultsContainer resultsProvider = (QueryResultsContainer)item.getData();
if (queryProcessor != null && queryProcessor != resultsProvider.queryProcessor) {
continue;
}
if (queryProcessor != null && queryProcessor.resultContainers.size() < 2) {
// Do not remove first tab for this processor
continue;
}
tabsToClose.add(item);
} else if (item.getData() instanceof ExplainPlanViewer) {
tabsToClose.add(item);
}
}
if (tabsToClose.size() > 1) {
int confirmResult = IDialogConstants.YES_ID;
if (confirmClose) {
confirmResult = ConfirmationDialog.showConfirmDialog(
ResourceBundle.getBundle(SQLEditorMessages.BUNDLE_NAME),
getSite().getShell(),
SQLPreferenceConstants.CONFIRM_RESULT_TABS_CLOSE,
ConfirmationDialog.QUESTION_WITH_CANCEL,
tabsToClose.size() + 4);
if (confirmResult == IDialogConstants.CANCEL_ID) {
return false;
}
}
if (confirmResult == IDialogConstants.YES_ID) {
for (CTabItem item : tabsToClose) {
item.dispose();
}
}
}
return true;
}
public boolean transformQueryWithParameters(SQLQuery query) {
return createScriptContext().fillQueryParameters(query, false);
}
private boolean checkSession(DBRProgressListener onFinish)
throws DBException
{
DBPDataSourceContainer ds = getDataSourceContainer();
if (ds == null) {
throw new DBException("No active connection");
}
if (!ds.isConnected()) {
boolean doConnect = ds.getPreferenceStore().getBoolean(SQLPreferenceConstants.EDITOR_CONNECT_ON_EXECUTE);
if (doConnect) {
return checkConnected(true, onFinish);
} else {
throw new DBException("Disconnected from database");
}
}
DBPDataSource dataSource = ds.getDataSource();
if (dataSource != null && SQLEditorUtils.isOpenSeparateConnection(ds) && executionContext == null) {
initSeparateConnection(dataSource, () -> onFinish.onTaskFinished(Status.OK_STATUS));
return executionContext != null;
}
return true;
}
/**
* Handles datasource change action in UI
*/
private void fireDataSourceChange()
{
updateExecutionContext(null);
UIUtils.syncExec(this::onDataSourceChange);
}
private void onDataSourceChange()
{
if (resultsSash == null || resultsSash.isDisposed()) {
reloadSyntaxRules();
return;
}
DatabaseEditorUtils.setPartBackground(this, resultTabs);
if (getSourceViewerConfiguration() instanceof SQLEditorSourceViewerConfiguration) {
((SQLEditorSourceViewerConfiguration) getSourceViewerConfiguration()).onDataSourceChange();
}
DBCExecutionContext executionContext = getExecutionContext();
if (executionContext != null) {
EditorUtils.setInputDataSource(getEditorInput(), new SQLNavigatorContext(executionContext));
}
if (syntaxLoaded && lastExecutionContext == executionContext) {
return;
}
if (curResultsContainer != null) {
ResultSetViewer rsv = curResultsContainer.getResultSetController();
if (rsv != null) {
if (executionContext == null) {
rsv.setStatus(ModelMessages.error_not_connected_to_database);
} else {
rsv.setStatus(SQLEditorMessages.editors_sql_staus_connected_to + executionContext.getDataSource().getContainer().getName() + "'"); //$NON-NLS-2$
}
}
}
if (lastExecutionContext == null || executionContext == null || lastExecutionContext.getDataSource() != executionContext.getDataSource()) {
// Update command states
SQLEditorPropertyTester.firePropertyChange(SQLEditorPropertyTester.PROP_CAN_EXECUTE);
SQLEditorPropertyTester.firePropertyChange(SQLEditorPropertyTester.PROP_CAN_EXPLAIN);
reloadSyntaxRules();
}
if (getDataSourceContainer() == null) {
resultsSash.setMaximizedControl(sqlEditorPanel);
} else {
resultsSash.setMaximizedControl(null);
}
refreshActions();
lastExecutionContext = executionContext;
syntaxLoaded = true;
}
@Override
public void beforeConnect()
{
}
@Override
public void beforeDisconnect()
{
closeAllJobs();
}
@Override
public void dispose()
{
if (extraPresentation != null) {
extraPresentation.dispose();
extraPresentation = null;
}
// Release ds container
releaseContainer();
closeAllJobs();
final IEditorInput editorInput = getEditorInput();
IFile sqlFile = EditorUtils.getFileFromInput(editorInput);
logViewer = null;
outputViewer = null;
queryProcessors.clear();
curResultsContainer = null;
curQueryProcessor = null;
super.dispose();
if (sqlFile != null && !PlatformUI.getWorkbench().isClosing()) {
deleteFileIfEmpty(sqlFile);
}
}
private void deleteFileIfEmpty(IFile sqlFile) {
if (sqlFile == null || !sqlFile.exists()) {
return;
}
SQLPreferenceConstants.EmptyScriptCloseBehavior emptyScriptCloseBehavior = SQLPreferenceConstants.EmptyScriptCloseBehavior.getByName(
getActivePreferenceStore().getString(SQLPreferenceConstants.SCRIPT_DELETE_EMPTY));
if (emptyScriptCloseBehavior == SQLPreferenceConstants.EmptyScriptCloseBehavior.NOTHING) {
return;
}
File osFile = sqlFile.getLocation().toFile();
if (!osFile.exists() || osFile.length() != 0) {
// Not empty
return;
}
try {
IProgressMonitor monitor = new NullProgressMonitor();
if (emptyScriptCloseBehavior == SQLPreferenceConstants.EmptyScriptCloseBehavior.DELETE_NEW) {
IFileState[] fileHistory = sqlFile.getHistory(monitor);
if (!ArrayUtils.isEmpty(fileHistory)) {
for (IFileState historyItem : fileHistory) {
try (InputStream contents = historyItem.getContents()) {
int cValue = contents.read();
if (cValue != -1) {
// At least once there was some content saved
return;
}
}
}
}
}
// This file is empty and never (at least during this session) had any contents.
// Drop it.
log.debug("Delete empty SQL script '" + sqlFile.getFullPath().toOSString() + "'");
sqlFile.delete(true, monitor);
} catch (Exception e) {
log.error("Can't delete empty script file", e); //$NON-NLS-1$
}
}
private void closeAllJobs()
{
for (QueryProcessor queryProcessor : queryProcessors) {
queryProcessor.closeJob();
}
}
private int getTotalQueryRunning() {
int jobsRunning = 0;
for (QueryProcessor queryProcessor : queryProcessors) {
jobsRunning += queryProcessor.curJobRunning.get();
}
return jobsRunning;
}
@Override
public void handleDataSourceEvent(final DBPEvent event)
{
final boolean dsEvent = event.getObject() == getDataSourceContainer();
final boolean objectEvent = event.getObject().getDataSource() == getDataSource();
if (dsEvent || objectEvent) {
UIUtils.asyncExec(
() -> {
switch (event.getAction()) {
case OBJECT_REMOVE:
if (dsEvent) {
setDataSourceContainer(null);
}
break;
case OBJECT_UPDATE:
case OBJECT_SELECT:
if (objectEvent) {
setPartName(getEditorName());
// Active schema was changed? Update title and tooltip
firePropertyChange(IWorkbenchPartConstants.PROP_TITLE);
}
break;
default:
break;
}
updateExecutionContext(null);
onDataSourceChange();
}
);
}
}
@Override
public void doSave(IProgressMonitor monitor) {
if (!EditorUtils.isInAutoSaveJob()) {
monitor.beginTask("Save data changes...", 1);
try {
monitor.subTask("Save '" + getPartName() + "' changes...");
SaveJob saveJob = new SaveJob();
saveJob.schedule();
// Wait until job finished
UIUtils.waitJobCompletion(saveJob);
if (!saveJob.success) {
monitor.setCanceled(true);
return;
}
} finally {
monitor.done();
}
}
if (extraPresentation instanceof ISaveablePart) {
((ISaveablePart) extraPresentation).doSave(monitor);
}
super.doSave(monitor);
updateDataSourceContainer();
}
@Override
public boolean isSaveAsAllowed()
{
return true;
}
@Override
public void doSaveAs()
{
saveToExternalFile();
}
@Override
public int promptToSaveOnClose()
{
int jobsRunning = getTotalQueryRunning();
if (jobsRunning > 0) {
log.warn("There are " + jobsRunning + " SQL job(s) still running in the editor");
if (ConfirmationDialog.showConfirmDialog(
ResourceBundle.getBundle(SQLEditorMessages.BUNDLE_NAME),
null,
SQLPreferenceConstants.CONFIRM_RUNNING_QUERY_CLOSE,
ConfirmationDialog.QUESTION,
jobsRunning) != IDialogConstants.YES_ID)
{
return ISaveablePart2.CANCEL;
}
}
for (QueryProcessor queryProcessor : queryProcessors) {
for (QueryResultsContainer resultsProvider : queryProcessor.getResultContainers()) {
ResultSetViewer rsv = resultsProvider.getResultSetController();
if (rsv != null && rsv.isDirty()) {
return rsv.promptToSaveOnClose();
}
}
}
// End transaction
if (executionContext != null) {
UIServiceConnections serviceConnections = DBWorkbench.getService(UIServiceConnections.class);
if (serviceConnections != null && !serviceConnections.checkAndCloseActiveTransaction(new DBCExecutionContext[] {executionContext})) {
return ISaveablePart2.CANCEL;
}
}
// That's fine
if (isNonPersistentEditor()) {
return ISaveablePart2.NO;
}
// Cancel running jobs (if any) and close results tabs
for (QueryProcessor queryProcessor : queryProcessors) {
queryProcessor.cancelJob();
// FIXME: it is a hack (to avoid asking "Save script?" because editor is marked as dirty while queries are running)
// FIXME: make it better
queryProcessor.curJobRunning.set(0);
}
updateDirtyFlag();
if (getActivePreferenceStore().getBoolean(SQLPreferenceConstants.AUTO_SAVE_ON_CLOSE)) {
return ISaveablePart2.YES;
}
return ISaveablePart2.DEFAULT;
}
protected void afterSaveToFile(File saveFile) {
try {
IFileStore fileStore = EFS.getStore(saveFile.toURI());
IEditorInput input = new FileStoreEditorInput(fileStore);
EditorUtils.setInputDataSource(input, new SQLNavigatorContext(getDataSourceContainer(), getExecutionContext()));
setInput(input);
} catch (CoreException e) {
DBWorkbench.getPlatformUI().showError("File save", "Can't open SQL editor from external file", e);
}
}
@Nullable
private ResultSetViewer getActiveResultSetViewer()
{
if (curResultsContainer != null) {
return curResultsContainer.getResultSetController();
}
return null;
}
private void showScriptPositionRuler(boolean show)
{
IColumnSupport columnSupport = getAdapter(IColumnSupport.class);
if (columnSupport != null) {
RulerColumnDescriptor positionColumn = RulerColumnRegistry.getDefault().getColumnDescriptor(ScriptPositionColumn.ID);
columnSupport.setColumnVisible(positionColumn, show);
}
}
private void showStatementInEditor(final SQLQuery query, final boolean select)
{
UIUtils.runUIJob("Select SQL query in editor", monitor -> {
if (isDisposed()) {
return;
}
if (select) {
selectAndReveal(query.getOffset(), query.getLength());
setStatus(query.getText(), DBPMessageType.INFORMATION);
} else {
getSourceViewer().revealRange(query.getOffset(), query.getLength());
}
});
}
@Override
public void reloadSyntaxRules() {
super.reloadSyntaxRules();
if (outputViewer != null) {
outputViewer.refreshStyles();
}
}
private QueryProcessor createQueryProcessor(boolean setSelection, boolean makeDefault)
{
final QueryProcessor queryProcessor = new QueryProcessor(makeDefault);
curQueryProcessor = queryProcessor;
curResultsContainer = queryProcessor.getFirstResults();
if (setSelection) {
CTabItem tabItem = curResultsContainer.getTabItem();
if (tabItem != null) {
resultTabs.setSelection(tabItem);
}
}
return queryProcessor;
}
@Override
public void preferenceChange(PreferenceChangeEvent event) {
switch (event.getProperty()) {
case ModelPreferences.SCRIPT_STATEMENT_DELIMITER:
case ModelPreferences.SCRIPT_IGNORE_NATIVE_DELIMITER:
case ModelPreferences.SCRIPT_STATEMENT_DELIMITER_BLANK:
case ModelPreferences.SQL_PARAMETERS_ENABLED:
case ModelPreferences.SQL_ANONYMOUS_PARAMETERS_MARK:
case ModelPreferences.SQL_ANONYMOUS_PARAMETERS_ENABLED:
case ModelPreferences.SQL_VARIABLES_ENABLED:
case ModelPreferences.SQL_NAMED_PARAMETERS_PREFIX:
reloadSyntaxRules();
break;
case SQLPreferenceConstants.RESULT_SET_ORIENTATION:
updateResultSetOrientation();
break;
case SQLPreferenceConstants.EDITOR_SEPARATE_CONNECTION: {
// Save current datasource (we want to keep it here)
DBPDataSource dataSource = curDataSource;
releaseExecutionContext();
// Restore cur data source (as it is reset in releaseExecutionContext)
curDataSource = dataSource;
if (dataSource != null && SQLEditorUtils.isOpenSeparateConnection(dataSource.getContainer())) {
initSeparateConnection(dataSource, null);
}
break;
}
}
}
public enum ResultSetOrientation {
HORIZONTAL(SWT.VERTICAL, SQLEditorMessages.sql_editor_result_set_orientation_horizontal, SQLEditorMessages.sql_editor_result_set_orientation_horizontal_tip, true),
VERTICAL(SWT.HORIZONTAL, SQLEditorMessages.sql_editor_result_set_orientation_vertical, SQLEditorMessages.sql_editor_result_set_orientation_vertical_tip, true),
DETACHED(SWT.VERTICAL, SQLEditorMessages.sql_editor_result_set_orientation_detached, SQLEditorMessages.sql_editor_result_set_orientation_detached_tip, false);
private final int sashOrientation;
private final String label;
private final String description;
private final boolean supported;
ResultSetOrientation(int sashOrientation, String label, String description, boolean supported) {
this.sashOrientation = sashOrientation;
this.label = label;
this.description = description;
this.supported = supported;
}
public int getSashOrientation() {
return sashOrientation;
}
public String getLabel() {
return label;
}
public String getDescription() {
return description;
}
public boolean isSupported() {
return supported;
}
}
public static class ResultSetOrientationMenuContributor extends CompoundContributionItem
{
@Override
protected IContributionItem[] getContributionItems() {
IEditorPart activeEditor = UIUtils.getActiveWorkbenchWindow().getActivePage().getActiveEditor();
if (!(activeEditor instanceof SQLEditorBase)) {
return new IContributionItem[0];
}
final DBPPreferenceStore preferenceStore = DBWorkbench.getPlatform().getPreferenceStore();
String curPresentation = preferenceStore.getString(SQLPreferenceConstants.RESULT_SET_ORIENTATION);
ResultSetOrientation[] orientations = ResultSetOrientation.values();
List<IContributionItem> items = new ArrayList<>(orientations.length);
for (final ResultSetOrientation orientation : orientations) {
Action action = new Action(orientation.getLabel(), Action.AS_RADIO_BUTTON) {
@Override
public void run() {
preferenceStore.setValue(SQLPreferenceConstants.RESULT_SET_ORIENTATION, orientation.name());
PrefUtils.savePreferenceStore(preferenceStore);
}
};
action.setDescription(orientation.getDescription());
if (!orientation.isSupported()) {
action.setEnabled(false);
}
if (orientation.name().equals(curPresentation)) {
action.setChecked(true);
}
items.add(new ActionContributionItem(action));
}
return items.toArray(new IContributionItem[0]);
}
}
public class QueryProcessor implements SQLResultsConsumer {
private volatile SQLQueryJob curJob;
private AtomicInteger curJobRunning = new AtomicInteger(0);
private final List<QueryResultsContainer> resultContainers = new ArrayList<>();
private volatile DBDDataReceiver curDataReceiver = null;
QueryProcessor(boolean makeDefault) {
// Create first (default) results provider
if (makeDefault) {
queryProcessors.add(0, this);
} else {
queryProcessors.add(this);
}
createResultsProvider(0, makeDefault);
}
private QueryResultsContainer createResultsProvider(int resultSetNumber, boolean makeDefault) {
QueryResultsContainer resultsProvider = new QueryResultsContainer(this, resultSetNumber, makeDefault);
resultContainers.add(resultsProvider);
return resultsProvider;
}
private QueryResultsContainer createResultsProvider(DBSDataContainer dataContainer) {
QueryResultsContainer resultsProvider = new QueryResultsContainer(this, resultContainers.size(), dataContainer);
resultContainers.add(resultsProvider);
return resultsProvider;
}
@NotNull
QueryResultsContainer getFirstResults()
{
return resultContainers.get(0);
}
@Nullable
QueryResultsContainer getResults(SQLQuery query) {
for (QueryResultsContainer provider : resultContainers) {
if (provider.query == query) {
return provider;
}
}
return null;
}
List<QueryResultsContainer> getResultContainers() {
return resultContainers;
}
private void closeJob()
{
final SQLQueryJob job = curJob;
if (job != null) {
if (job.getState() == Job.RUNNING) {
job.cancel();
}
curJob = null;
if (job.isJobOpen()) {
RuntimeUtils.runTask(monitor -> {
job.closeJob();
}, "Close SQL job", 2000, true);
}
}
}
public void cancelJob() {
for (QueryResultsContainer rc : resultContainers) {
rc.viewer.cancelJobs();
}
final SQLQueryJob job = curJob;
if (job != null) {
if (job.getState() == Job.RUNNING) {
job.cancel();
}
}
}
boolean processQueries(SQLScriptContext scriptContext, final List<SQLScriptElement> queries, boolean forceScript, final boolean fetchResults, boolean export, boolean closeTabOnError, SQLQueryListener queryListener)
{
if (queries.isEmpty()) {
// Nothing to process
return false;
}
if (curJobRunning.get() > 0) {
DBWorkbench.getPlatformUI().showError(
SQLEditorMessages.editors_sql_error_cant_execute_query_title,
SQLEditorMessages.editors_sql_error_cant_execute_query_message);
return false;
}
final DBCExecutionContext executionContext = getExecutionContext();
if (executionContext == null) {
DBWorkbench.getPlatformUI().showError(
SQLEditorMessages.editors_sql_error_cant_execute_query_title,
ModelMessages.error_not_connected_to_database);
return false;
}
final boolean isSingleQuery = !forceScript && (queries.size() == 1);
// Prepare execution job
{
showScriptPositionRuler(true);
QueryResultsContainer resultsContainer = getFirstResults();
SQLEditorQueryListener listener = new SQLEditorQueryListener(this, closeTabOnError);
if (queryListener != null) {
listener.setExtListener(queryListener);
}
if (export) {
List<IDataTransferProducer> producers = new ArrayList<>();
for (int i = 0; i < queries.size(); i++) {
SQLScriptElement element = queries.get(i);
if (element instanceof SQLControlCommand) {
try {
scriptContext.executeControlCommand((SQLControlCommand) element);
} catch (DBException e) {
DBWorkbench.getPlatformUI().showError("Command error", "Error processing control command", e);
}
} else {
SQLQuery query = (SQLQuery) element;
scriptContext.fillQueryParameters(query, false);
SQLQueryDataContainer dataContainer = new SQLQueryDataContainer(SQLEditor.this, query, scriptContext, log);
producers.add(new DatabaseTransferProducer(dataContainer, null));
}
}
DataTransferWizard.openWizard(
getSite().getWorkbenchWindow(),
producers,
null,
new StructuredSelection(this));
} else {
final SQLQueryJob job = new SQLQueryJob(
getSite(),
isSingleQuery ? SQLEditorMessages.editors_sql_job_execute_query : SQLEditorMessages.editors_sql_job_execute_script,
executionContext,
resultsContainer,
queries,
scriptContext,
this,
listener);
if (isSingleQuery) {
resultsContainer.query = queries.get(0);
closeJob();
curJob = job;
ResultSetViewer rsv = resultsContainer.getResultSetController();
if (rsv != null) {
rsv.resetDataFilter(false);
rsv.resetHistory();
rsv.refresh();
}
} else {
if (fetchResults) {
job.setFetchResultSets(true);
}
job.schedule();
curJob = job;
}
}
}
return true;
}
public boolean isDirty() {
for (QueryResultsContainer resultsProvider : resultContainers) {
ResultSetViewer rsv = resultsProvider.getResultSetController();
if (rsv != null && rsv.isDirty()) {
return true;
}
}
return false;
}
void removeResults(QueryResultsContainer resultsContainer) {
resultContainers.remove(resultsContainer);
if (resultContainers.isEmpty()) {
queryProcessors.remove(this);
if (curQueryProcessor == this) {
if (queryProcessors.isEmpty()) {
curQueryProcessor = null;
curResultsContainer = null;
} else {
curQueryProcessor = queryProcessors.get(0);
curResultsContainer = curQueryProcessor.getFirstResults();
}
}
}
}
@Nullable
@Override
public DBDDataReceiver getDataReceiver(final SQLQuery statement, final int resultSetNumber) {
if (curDataReceiver != null) {
return curDataReceiver;
}
final boolean isStatsResult = (statement != null && statement.getData() == SQLQueryJob.STATS_RESULTS);
// if (isStatsResult) {
// // Maybe it was already open
// for (QueryResultsProvider provider : resultContainers) {
// if (provider.query != null && provider.query.getData() == SQLQueryJob.STATS_RESULTS) {
// resultSetNumber = provider.resultSetNumber;
// break;
// }
// }
// }
if (resultSetNumber >= resultContainers.size() && !isDisposed()) {
// Open new results processor in UI thread
UIUtils.syncExec(() -> createResultsProvider(resultSetNumber, false));
}
if (resultSetNumber >= resultContainers.size()) {
// Editor seems to be disposed - no data receiver
return null;
}
final QueryResultsContainer resultsProvider = resultContainers.get(resultSetNumber);
if (statement != null && !resultTabs.isDisposed()) {
resultsProvider.query = statement;
resultsProvider.lastGoodQuery = statement;
String tabName = null;
String toolTip = CommonUtils.truncateString(statement.getText(), 1000);
// Special statements (not real statements) have their name in data
if (isStatsResult) {
tabName = "Statistics";
int queryIndex = queryProcessors.indexOf(QueryProcessor.this);
if (queryIndex > 0) {
tabName += " - " + (queryIndex + 1);
}
}
String finalTabName = tabName;
UIUtils.asyncExec(() -> resultsProvider.updateResultsName(finalTabName, toolTip));
}
ResultSetViewer rsv = resultsProvider.getResultSetController();
return rsv == null ? null : rsv.getDataReceiver();
}
}
public class QueryResultsContainer implements DBSDataContainer, IResultSetContainer, IResultSetListener, SQLQueryContainer, ISmartTransactionManager {
private final QueryProcessor queryProcessor;
private final ResultSetViewer viewer;
private final int resultSetNumber;
private SQLScriptElement query = null;
private SQLScriptElement lastGoodQuery = null;
// Data container and filter are non-null only in case of associations navigation
private DBSDataContainer dataContainer;
private QueryResultsContainer(QueryProcessor queryProcessor, int resultSetNumber, boolean makeDefault)
{
this.queryProcessor = queryProcessor;
this.resultSetNumber = resultSetNumber;
boolean detachedViewer = false;
SQLResultsView sqlView = null;
if (detachedViewer) {
try {
sqlView = (SQLResultsView) getSite().getPage().showView(SQLResultsView.VIEW_ID, null, IWorkbenchPage.VIEW_CREATE);
} catch (Throwable e) {
DBWorkbench.getPlatformUI().showError("Detached results", "Can't open results view", e);
}
}
if (sqlView != null) {
// Detached results viewer
sqlView.setContainer(this);
this.viewer = sqlView.getViewer();
} else {
// Embedded results viewer
this.viewer = new ResultSetViewer(resultTabs, getSite(), this);
this.viewer.addListener(this);
int tabCount = resultTabs.getItemCount();
int tabIndex = 0;
if (!makeDefault) {
for (int i = tabCount; i > 0; i--) {
if (resultTabs.getItem(i - 1).getData() instanceof QueryResultsContainer) {
tabIndex = i;
break;
}
}
}
CTabItem tabItem = new CTabItem(resultTabs, SWT.NONE, tabIndex);
int queryIndex = queryProcessors.indexOf(queryProcessor);
String tabName = getResultsTabName(resultSetNumber, queryIndex, null);
tabItem.setText(tabName);
tabItem.setImage(IMG_DATA_GRID);
tabItem.setData(this);
tabItem.setShowClose(true);
CSSUtils.setCSSClass(tabItem, DBStyles.COLORED_BY_CONNECTION_TYPE);
tabItem.setControl(viewer.getControl());
tabItem.addDisposeListener(resultTabDisposeListener);
UIUtils.disposeControlOnItemDispose(tabItem);
}
viewer.getControl().addDisposeListener(e -> {
QueryResultsContainer.this.queryProcessor.removeResults(QueryResultsContainer.this);
if (QueryResultsContainer.this == curResultsContainer) {
curResultsContainer = null;
}
});
}
QueryResultsContainer(QueryProcessor queryProcessor, int resultSetNumber, DBSDataContainer dataContainer) {
this(queryProcessor, resultSetNumber, false);
this.dataContainer = dataContainer;
updateResultsName(getResultsTabName(resultSetNumber, 0, dataContainer.getName()), null);
}
private CTabItem getTabItem() {
return getTabItem(this);
}
private CTabItem getTabItem(QueryResultsContainer resultsContainer) {
for (CTabItem item : resultTabs.getItems()) {
if (item.getData() == resultsContainer) {
return item;
}
}
return null;
}
void updateResultsName(String resultSetName, String toolTip) {
CTabItem tabItem = getTabItem();
if (tabItem != null && !tabItem.isDisposed()) {
if (!CommonUtils.isEmpty(resultSetName)) {
tabItem.setText(resultSetName);
}
tabItem.setToolTipText(toolTip);
}
}
boolean isPinned() {
CTabItem tabItem = getTabItem();
return tabItem != null && !tabItem.isDisposed() && !tabItem.getShowClose();
}
void setPinned(boolean pinned) {
CTabItem tabItem = getTabItem();
if (tabItem != null) {
tabItem.setShowClose(!pinned);
tabItem.setImage(pinned ? IMG_DATA_GRID_LOCKED : IMG_DATA_GRID);
}
}
@NotNull
@Override
public DBPProject getProject() {
return SQLEditor.this.getProject();
}
@Override
public DBCExecutionContext getExecutionContext() {
return SQLEditor.this.getExecutionContext();
}
@Nullable
@Override
public ResultSetViewer getResultSetController()
{
return viewer;
}
@Nullable
@Override
public DBSDataContainer getDataContainer()
{
return this;
}
@Override
public boolean isReadyToRun()
{
return queryProcessor.curJob == null || queryProcessor.curJobRunning.get() <= 0;
}
@Override
public void openNewContainer(DBRProgressMonitor monitor, @NotNull DBSDataContainer dataContainer, @NotNull DBDDataFilter newFilter) {
UIUtils.syncExec(() -> {
QueryResultsContainer resultsProvider = queryProcessor.createResultsProvider(dataContainer);
CTabItem tabItem = getTabItem(resultsProvider);
if (tabItem != null) {
tabItem.getParent().setSelection(tabItem);
}
setActiveResultsContainer(resultsProvider);
resultsProvider.viewer.refreshWithFilter(newFilter);
});
}
@Override
public IResultSetDecorator createResultSetDecorator() {
return new QueryResultsDecorator() {
@Override
public String getEmptyDataDescription() {
String execQuery = ActionUtils.findCommandDescription(SQLEditorCommands.CMD_EXECUTE_STATEMENT, getSite(), true);
String execScript = ActionUtils.findCommandDescription(SQLEditorCommands.CMD_EXECUTE_SCRIPT, getSite(), true);
return NLS.bind(ResultSetMessages.sql_editor_resultset_filter_panel_control_execute_to_see_reslut, execQuery, execScript);
}
};
}
@Override
public int getSupportedFeatures()
{
if (dataContainer != null) {
return dataContainer.getSupportedFeatures();
}
int features = DATA_SELECT;
features |= DATA_COUNT;
if (getQueryResultCounts() <= 1) {
features |= DATA_FILTER;
}
return features;
}
@NotNull
@Override
public DBCStatistics readData(@NotNull DBCExecutionSource source, @NotNull DBCSession session, @NotNull DBDDataReceiver dataReceiver, DBDDataFilter dataFilter, long firstRow, long maxRows, long flags, int fetchSize) throws DBCException
{
if (dataContainer != null) {
return dataContainer.readData(source, session, dataReceiver, dataFilter, firstRow, maxRows, flags, fetchSize);
}
final SQLQueryJob job = queryProcessor.curJob;
if (job == null) {
throw new DBCException("No active query - can't read data");
}
if (this.query instanceof SQLQuery) {
SQLQuery query = (SQLQuery) this.query;
if (query.getResultsMaxRows() >= 0) {
firstRow = query.getResultsOffset();
maxRows = query.getResultsMaxRows();
}
}
try {
if (dataReceiver != viewer.getDataReceiver()) {
// Some custom receiver. Probably data export
queryProcessor.curDataReceiver = dataReceiver;
} else {
queryProcessor.curDataReceiver = null;
}
// Count number of results for this query. If > 1 then we will refresh them all at once
int resultCounts = getQueryResultCounts();
if (resultCounts <= 1 && resultSetNumber > 0) {
job.setFetchResultSetNumber(resultSetNumber);
} else {
job.setFetchResultSetNumber(-1);
}
job.setResultSetLimit(firstRow, maxRows);
job.setDataFilter(dataFilter);
job.setFetchSize(fetchSize);
job.setFetchFlags(flags);
job.extractData(session, this.query, resultCounts > 1 ? 0 : resultSetNumber);
lastGoodQuery = job.getLastGoodQuery();
return job.getStatistics();
} finally {
// Nullify custom data receiver
queryProcessor.curDataReceiver = null;
}
}
private int getQueryResultCounts() {
int resultCounts = 0;
for (QueryResultsContainer qrc : queryProcessor.resultContainers) {
if (qrc.query == query) {
resultCounts++;
}
}
return resultCounts;
}
@Override
public long countData(@NotNull DBCExecutionSource source, @NotNull DBCSession session, DBDDataFilter dataFilter, long flags)
throws DBCException
{
if (dataContainer != null) {
return dataContainer.countData(source, session, dataFilter, DBSDataContainer.FLAG_NONE);
}
DBPDataSource dataSource = getDataSource();
if (dataSource == null) {
throw new DBCException("Query transform is not supported by datasource");
}
if (!(query instanceof SQLQuery)) {
throw new DBCException("Can't count rows for control command");
}
try {
SQLQuery countQuery = new SQLQueryTransformerCount().transformQuery(dataSource, getSyntaxManager(), (SQLQuery) query);
if (!CommonUtils.isEmpty(countQuery.getParameters())) {
countQuery.setParameters(parseQueryParameters(countQuery));
}
try (DBCStatement dbStatement = DBUtils.makeStatement(source, session, DBCStatementType.QUERY, countQuery, 0, 0)) {
if (dbStatement.executeStatement()) {
try (DBCResultSet rs = dbStatement.openResultSet()) {
if (rs.nextRow()) {
List<DBCAttributeMetaData> resultAttrs = rs.getMeta().getAttributes();
Object countValue = null;
if (resultAttrs.size() == 1) {
countValue = rs.getAttributeValue(0);
} else {
// In some databases (Influx?) SELECT count(*) produces multiple columns. Try to find first one with 'count' in its name.
for (int i = 0; i < resultAttrs.size(); i++) {
DBCAttributeMetaData ma = resultAttrs.get(i);
if (ma.getName().toLowerCase(Locale.ENGLISH).contains("count")) {
countValue = rs.getAttributeValue(i);
break;
}
}
}
if (countValue instanceof Number) {
return ((Number) countValue).longValue();
} else {
throw new DBCException("Unexpected row count value: " + countValue);
}
} else {
throw new DBCException("Row count result is empty");
}
}
} else {
throw new DBCException("Row count query didn't return any value");
}
}
} catch (DBException e) {
throw new DBCException("Error executing row count", e);
}
}
@Nullable
@Override
public String getDescription()
{
if (dataContainer != null) {
return dataContainer.getDescription();
} else {
return SQLEditorMessages.editors_sql_description;
}
}
@Nullable
@Override
public DBSObject getParentObject()
{
return getDataSource();
}
@Nullable
@Override
public DBPDataSource getDataSource()
{
return SQLEditor.this.getDataSource();
}
@Override
public boolean isPersisted() {
return dataContainer == null || dataContainer.isPersisted();
}
@NotNull
@Override
public String getName()
{
if (dataContainer != null) {
return dataContainer.getName();
}
String name = lastGoodQuery != null ?
lastGoodQuery.getOriginalText() :
(query == null ? null : query.getOriginalText());
if (name == null) {
name = "SQL";
}
return name;
}
@Nullable
@Override
public DBPDataSourceContainer getDataSourceContainer() {
return SQLEditor.this.getDataSourceContainer();
}
@Override
public String toString() {
if (dataContainer != null) {
return dataContainer.toString();
}
return query == null ?
"SQL Query / " + SQLEditor.this.getEditorInput().getName() :
query.getOriginalText();
}
@Override
public void handleResultSetLoad() {
}
@Override
public void handleResultSetChange() {
updateDirtyFlag();
}
@Override
public void handleResultSetSelectionChange(SelectionChangedEvent event) {
}
@Override
public SQLScriptElement getQuery() {
return query;
}
@Override
public Map<String, Object> getQueryParameters() {
return globalScriptContext.getAllParameters();
}
@Override
public boolean isSmartAutoCommit() {
return SQLEditor.this.isSmartAutoCommit();
}
@Override
public void setSmartAutoCommit(boolean smartAutoCommit) {
SQLEditor.this.setSmartAutoCommit(smartAutoCommit);
}
}
private String getResultsTabName(int resultSetNumber, int queryIndex, String name) {
String tabName = name;
if (CommonUtils.isEmpty(tabName)) {
tabName = SQLEditorMessages.editors_sql_data_grid;
}
if (resultSetNumber > 0) {
tabName += " - " + (resultSetNumber + 1);
} else if (queryIndex > 0) {
tabName += " - " + (queryIndex + 1);
}
return tabName;
}
private class SQLEditorQueryListener implements SQLQueryListener {
private final QueryProcessor queryProcessor;
private boolean scriptMode;
private long lastUIUpdateTime;
private final ITextSelection originalSelection = (ITextSelection) getSelectionProvider().getSelection();
private int topOffset, visibleLength;
private boolean closeTabOnError;
private SQLQueryListener extListener;
private SQLEditorQueryListener(QueryProcessor queryProcessor, boolean closeTabOnError) {
this.queryProcessor = queryProcessor;
this.closeTabOnError = closeTabOnError;
}
public SQLQueryListener getExtListener() {
return extListener;
}
public void setExtListener(SQLQueryListener extListener) {
this.extListener = extListener;
}
@Override
public void onStartScript() {
try {
lastUIUpdateTime = -1;
scriptMode = true;
UIUtils.asyncExec(() -> {
if (isDisposed()) {
return;
}
if (getActivePreferenceStore().getBoolean(SQLPreferenceConstants.MAXIMIZE_EDITOR_ON_SCRIPT_EXECUTE)) {
resultsSash.setMaximizedControl(sqlEditorPanel);
}
});
} finally {
if (extListener != null) extListener.onStartScript();
}
}
@Override
public void onStartQuery(DBCSession session, final SQLQuery query) {
try {
if (isSmartAutoCommit()) {
DBExecUtils.checkSmartAutoCommit(session, query.getText());
}
boolean isInExecute = getTotalQueryRunning() > 0;
if (!isInExecute) {
UIUtils.asyncExec(() -> {
setTitleImage(DBeaverIcons.getImage(UIIcon.SQL_SCRIPT_EXECUTE));
updateDirtyFlag();
});
}
queryProcessor.curJobRunning.incrementAndGet();
synchronized (runningQueries) {
runningQueries.add(query);
}
if (lastUIUpdateTime < 0 || System.currentTimeMillis() - lastUIUpdateTime > SCRIPT_UI_UPDATE_PERIOD) {
UIUtils.asyncExec(() -> {
TextViewer textViewer = getTextViewer();
if (textViewer != null) {
topOffset = textViewer.getTopIndexStartOffset();
visibleLength = textViewer.getBottomIndexEndOffset() - topOffset;
}
});
if (scriptMode) {
showStatementInEditor(query, false);
}
lastUIUpdateTime = System.currentTimeMillis();
}
} finally {
if (extListener != null) extListener.onStartQuery(session, query);
}
}
@Override
public void onEndQuery(final DBCSession session, final SQLQueryResult result, DBCStatistics statistics) {
try {
synchronized (runningQueries) {
runningQueries.remove(result.getStatement());
}
queryProcessor.curJobRunning.decrementAndGet();
if (getTotalQueryRunning() <= 0) {
UIUtils.asyncExec(() -> {
setTitleImage(editorImage);
updateDirtyFlag();
});
}
if (isDisposed()) {
return;
}
UIUtils.runUIJob("Process SQL query result", monitor -> {
// Finish query
processQueryResult(monitor, result, statistics);
// Update dirty flag
updateDirtyFlag();
refreshActions();
});
} finally {
if (extListener != null) extListener.onEndQuery(session, result, statistics);
}
}
private void processQueryResult(DBRProgressMonitor monitor, SQLQueryResult result, DBCStatistics statistics) {
dumpQueryServerOutput(result);
if (!scriptMode) {
runPostExecuteActions(result);
}
SQLQuery query = result.getStatement();
Throwable error = result.getError();
if (error != null) {
setStatus(GeneralUtils.getFirstMessage(error), DBPMessageType.ERROR);
if (!scrollCursorToError(monitor, query, error)) {
int errorQueryOffset = query.getOffset();
int errorQueryLength = query.getLength();
if (errorQueryOffset >= 0 && errorQueryLength > 0) {
if (scriptMode) {
getSelectionProvider().setSelection(new TextSelection(errorQueryOffset, errorQueryLength));
} else {
getSelectionProvider().setSelection(originalSelection);
}
}
}
} else if (!scriptMode && getActivePreferenceStore().getBoolean(SQLPreferenceConstants.RESET_CURSOR_ON_EXECUTE)) {
getSelectionProvider().setSelection(originalSelection);
}
// Get results window (it is possible that it was closed till that moment
{
for (QueryResultsContainer cr : queryProcessor.resultContainers) {
cr.viewer.updateFiltersText(false);
}
// Set tab names by query results names
if (scriptMode || queryProcessor.getResultContainers().size() > 0) {
int queryIndex = queryProcessors.indexOf(queryProcessor);
int resultsIndex = 0;
for (QueryResultsContainer results : queryProcessor.resultContainers) {
if (results.query != query) {
continue;
}
if (resultsIndex < result.getExecuteResults().size()) {
SQLQueryResult.ExecuteResult executeResult = result.getExecuteResults(resultsIndex, true);
String resultSetName = getResultsTabName(results.resultSetNumber, queryIndex, executeResult.getResultSetName());
results.updateResultsName(resultSetName, null);
ResultSetViewer resultSetViewer = results.getResultSetController();
if (resultSetViewer != null) {
resultSetViewer.getModel().setStatistics(statistics);
}
}
resultsIndex++;
}
}
}
// Close tab on error
if (closeTabOnError && error != null) {
CTabItem tabItem = queryProcessor.getFirstResults().getTabItem();
if (tabItem != null && tabItem.getShowClose()) {
tabItem.dispose();
}
}
// Beep
if (dataSourceContainer != null && !scriptMode && getActivePreferenceStore().getBoolean(SQLPreferenceConstants.BEEP_ON_QUERY_END)) {
Display.getCurrent().beep();
}
// Notify agent
if (result.getQueryTime() > DBWorkbench.getPlatformUI().getLongOperationTimeout() * 1000) {
DBWorkbench.getPlatformUI().notifyAgent(
"Query completed [" + getEditorInput().getName() + "]" + GeneralUtils.getDefaultLineSeparator() +
CommonUtils.truncateString(query.getText(), 200), !result.hasError() ? IStatus.INFO : IStatus.ERROR);
}
}
@Override
public void onEndScript(final DBCStatistics statistics, final boolean hasErrors) {
try {
if (isDisposed()) {
return;
}
runPostExecuteActions(null);
UIUtils.asyncExec(() -> {
if (isDisposed()) {
// Editor closed
return;
}
resultsSash.setMaximizedControl(null);
if (!hasErrors) {
getSelectionProvider().setSelection(originalSelection);
}
QueryResultsContainer results = queryProcessor.getFirstResults();
ResultSetViewer viewer = results.getResultSetController();
if (viewer != null) {
viewer.getModel().setStatistics(statistics);
viewer.updateStatusMessage();
}
});
} finally {
if (extListener != null) extListener.onEndScript(statistics, hasErrors);
}
}
}
public void updateDirtyFlag() {
firePropertyChange(IWorkbenchPartConstants.PROP_DIRTY);
}
private class FindReplaceTarget extends DynamicFindReplaceTarget {
private boolean lastFocusInEditor = true;
@Override
public IFindReplaceTarget getTarget() {
ResultSetViewer rsv = getActiveResultSetViewer();
TextViewer textViewer = getTextViewer();
boolean focusInEditor = textViewer != null && textViewer.getTextWidget().isFocusControl();
CTabItem activeResultsTab = getActiveResultsTab();
if (activeResultsTab != null && activeResultsTab.getData() instanceof StyledText) {
StyledText styledText = (StyledText) activeResultsTab.getData();
if (!focusInEditor) {
return new StyledTextFindReplaceTarget(styledText);
}
}
if (!focusInEditor) {
if (rsv != null && rsv.getActivePresentation().getControl().isFocusControl()) {
focusInEditor = false;
} else {
focusInEditor = lastFocusInEditor;
}
}
lastFocusInEditor = focusInEditor;
if (!focusInEditor && rsv != null) {
IFindReplaceTarget nested = rsv.getAdapter(IFindReplaceTarget.class);
if (nested != null) {
return nested;
}
} else if (textViewer != null) {
return textViewer.getFindReplaceTarget();
}
return null;
}
}
private class DynamicSelectionProvider extends CompositeSelectionProvider {
private boolean lastFocusInEditor = true;
@Override
public ISelectionProvider getProvider() {
ResultSetViewer rsv = getActiveResultSetViewer();
TextViewer textViewer = getTextViewer();
boolean focusInEditor = textViewer != null && textViewer.getTextWidget().isFocusControl();
if (!focusInEditor) {
if (rsv != null && rsv.getActivePresentation().getControl().isFocusControl()) {
focusInEditor = false;
} else {
focusInEditor = lastFocusInEditor;
}
}
lastFocusInEditor = focusInEditor;
if (!focusInEditor && rsv != null) {
return rsv;
} else if (textViewer != null) {
return textViewer.getSelectionProvider();
} else {
return null;
}
}
}
private void dumpQueryServerOutput(@Nullable SQLQueryResult result) {
final DBCExecutionContext executionContext = getExecutionContext();
if (executionContext != null) {
final DBPDataSource dataSource = executionContext.getDataSource();
// Dump server output
DBCServerOutputReader outputReader = DBUtils.getAdapter(DBCServerOutputReader.class, dataSource);
if (outputReader == null && result != null) {
outputReader = new DefaultServerOutputReader();
}
if (outputReader != null && outputReader.isServerOutputEnabled()) {
synchronized (serverOutputs) {
serverOutputs.add(new ServerOutputInfo(outputReader, executionContext, result));
}
}
}
}
private void runPostExecuteActions(@Nullable SQLQueryResult result) {
final DBCExecutionContext executionContext = getExecutionContext();
if (executionContext != null) {
// Refresh active object
if (result == null || !result.hasError() && getActivePreferenceStore().getBoolean(SQLPreferenceConstants.REFRESH_DEFAULTS_AFTER_EXECUTE)) {
DBCExecutionContextDefaults contextDefaults = executionContext.getContextDefaults();
if (contextDefaults != null) {
new AbstractJob("Refresh default object") {
@Override
protected IStatus run(DBRProgressMonitor monitor) {
DBUtils.refreshContextDefaultsAndReflect(monitor, contextDefaults);
return Status.OK_STATUS;
}
}.schedule();
}
}
}
}
private void updateOutputViewerIcon(boolean alert) {
Image image = alert ? IMG_OUTPUT_ALERT : IMG_OUTPUT;
CTabItem outputItem = UIUtils.getTabItem(resultTabs, outputViewer.getControl());
if (outputItem != null && outputItem != resultTabs.getSelection()) {
outputItem.setImage(image);
} else {
// TODO: make icon update. Can't call setImage because this will break contract f VerticalButton
/*
VerticalButton viewItem = getViewToolItem(SQLEditorCommands.CMD_SQL_SHOW_OUTPUT);
if (viewItem != null) {
viewItem.setImage(image);
}
*/
}
}
private class SaveJob extends AbstractJob {
private transient Boolean success = null;
SaveJob() {
super("Save '" + getPartName() + "' data changes...");
setUser(true);
}
@Override
protected IStatus run(DBRProgressMonitor monitor) {
try {
for (QueryProcessor queryProcessor : queryProcessors) {
for (QueryResultsContainer resultsProvider : queryProcessor.getResultContainers()) {
ResultSetViewer rsv = resultsProvider.getResultSetController();
if (rsv != null && rsv.isDirty()) {
rsv.doSave(monitor);
}
}
}
success = true;
return Status.OK_STATUS;
} catch (Throwable e) {
success = false;
log.error(e);
return GeneralUtils.makeExceptionStatus(e);
} finally {
if (success == null) {
success = true;
}
}
}
}
private static class ServerOutputInfo {
private final DBCServerOutputReader outputReader;
private final DBCExecutionContext executionContext;
private final SQLQueryResult result;
ServerOutputInfo(DBCServerOutputReader outputReader, DBCExecutionContext executionContext, SQLQueryResult result) {
this.outputReader = outputReader;
this.executionContext = executionContext;
this.result = result;
}
}
private final List<ServerOutputInfo> serverOutputs = new ArrayList<>();
private class ServerOutputReader extends AbstractJob {
ServerOutputReader() {
super("Dump server output");
setSystem(true);
}
@Override
protected IStatus run(DBRProgressMonitor monitor) {
if (!DBWorkbench.getPlatform().isShuttingDown() && resultsSash != null && !resultsSash.isDisposed()) {
dumpOutput(monitor);
schedule(200);
}
return Status.OK_STATUS;
}
private void dumpOutput(DBRProgressMonitor monitor) {
if (outputViewer == null) {
return;
}
List<ServerOutputInfo> outputs;
synchronized (serverOutputs) {
outputs = new ArrayList<>(serverOutputs);
serverOutputs.clear();
}
PrintWriter outputWriter = outputViewer.getOutputWriter();
if (!outputs.isEmpty()) {
for (ServerOutputInfo info : outputs) {
try {
info.outputReader.readServerOutput(monitor, info.executionContext, info.result, null, outputWriter);
} catch (Exception e) {
log.error(e);
}
}
}
{
// Check running queries for async output
DBCServerOutputReader outputReader = null;
final DBCExecutionContext executionContext = getExecutionContext();
if (executionContext != null) {
final DBPDataSource dataSource = executionContext.getDataSource();
// Dump server output
outputReader = DBUtils.getAdapter(DBCServerOutputReader.class, dataSource);
}
if (outputReader != null && outputReader.isAsyncOutputReadSupported()) {
for (QueryProcessor qp : queryProcessors) {
SQLQueryJob queryJob = qp.curJob;
if (queryJob != null) {
DBCStatement statement = queryJob.getCurrentStatement();
if (statement != null) {
try {
outputReader.readServerOutput(monitor, executionContext, null, statement, outputWriter);
} catch (DBCException e) {
log.error(e);
}
}
}
}
}
}
outputWriter.flush();
UIUtils.asyncExec(() -> {
if (outputViewer!=null) {
if (outputViewer.getControl()!=null) {
if (!outputViewer.isDisposed() && outputViewer.isHasNewOutput()) {
outputViewer.scrollToEnd();
updateOutputViewerIcon(true);
outputViewer.resetNewOutput();
}
}
}
});
}
}
}
| plugins/org.jkiss.dbeaver.ui.editors.sql/src/org/jkiss/dbeaver/ui/editors/sql/SQLEditor.java | /*
* DBeaver - Universal Database Manager
* Copyright (C) 2010-2020 DBeaver Corp and others
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jkiss.dbeaver.ui.editors.sql;
import org.eclipse.core.filesystem.EFS;
import org.eclipse.core.filesystem.IFileStore;
import org.eclipse.core.resources.IFile;
import org.eclipse.core.resources.IFileState;
import org.eclipse.core.runtime.*;
import org.eclipse.core.runtime.jobs.Job;
import org.eclipse.jface.action.*;
import org.eclipse.jface.dialogs.IDialogConstants;
import org.eclipse.jface.preference.IPreferenceStore;
import org.eclipse.jface.text.*;
import org.eclipse.jface.viewers.ISelectionProvider;
import org.eclipse.jface.viewers.SelectionChangedEvent;
import org.eclipse.jface.viewers.StructuredSelection;
import org.eclipse.osgi.util.NLS;
import org.eclipse.swt.SWT;
import org.eclipse.swt.custom.CTabFolder;
import org.eclipse.swt.custom.CTabItem;
import org.eclipse.swt.custom.StyledText;
import org.eclipse.swt.events.*;
import org.eclipse.swt.graphics.Image;
import org.eclipse.swt.graphics.Point;
import org.eclipse.swt.layout.FillLayout;
import org.eclipse.swt.layout.GridData;
import org.eclipse.swt.layout.GridLayout;
import org.eclipse.swt.widgets.*;
import org.eclipse.ui.*;
import org.eclipse.ui.actions.CompoundContributionItem;
import org.eclipse.ui.ide.FileStoreEditorInput;
import org.eclipse.ui.texteditor.DefaultRangeIndicator;
import org.eclipse.ui.texteditor.ITextEditorActionConstants;
import org.eclipse.ui.texteditor.rulers.IColumnSupport;
import org.eclipse.ui.texteditor.rulers.RulerColumnDescriptor;
import org.eclipse.ui.texteditor.rulers.RulerColumnRegistry;
import org.jkiss.code.NotNull;
import org.jkiss.code.Nullable;
import org.jkiss.dbeaver.DBException;
import org.jkiss.dbeaver.ModelPreferences;
import org.jkiss.dbeaver.model.*;
import org.jkiss.dbeaver.model.app.DBPProject;
import org.jkiss.dbeaver.model.data.DBDDataFilter;
import org.jkiss.dbeaver.model.data.DBDDataReceiver;
import org.jkiss.dbeaver.model.exec.*;
import org.jkiss.dbeaver.model.exec.plan.DBCPlan;
import org.jkiss.dbeaver.model.exec.plan.DBCPlanStyle;
import org.jkiss.dbeaver.model.exec.plan.DBCQueryPlanner;
import org.jkiss.dbeaver.model.impl.DefaultServerOutputReader;
import org.jkiss.dbeaver.model.impl.sql.SQLQueryTransformerCount;
import org.jkiss.dbeaver.model.messages.ModelMessages;
import org.jkiss.dbeaver.model.navigator.DBNUtils;
import org.jkiss.dbeaver.model.preferences.DBPPreferenceListener;
import org.jkiss.dbeaver.model.preferences.DBPPreferenceStore;
import org.jkiss.dbeaver.model.qm.QMUtils;
import org.jkiss.dbeaver.model.runtime.AbstractJob;
import org.jkiss.dbeaver.model.runtime.DBRProgressListener;
import org.jkiss.dbeaver.model.runtime.DBRProgressMonitor;
import org.jkiss.dbeaver.model.runtime.DBRRunnableWithProgress;
import org.jkiss.dbeaver.model.sql.*;
import org.jkiss.dbeaver.model.sql.data.SQLQueryDataContainer;
import org.jkiss.dbeaver.model.struct.DBSDataContainer;
import org.jkiss.dbeaver.model.struct.DBSInstance;
import org.jkiss.dbeaver.model.struct.DBSObject;
import org.jkiss.dbeaver.registry.DataSourceUtils;
import org.jkiss.dbeaver.runtime.DBWorkbench;
import org.jkiss.dbeaver.runtime.sql.SQLResultsConsumer;
import org.jkiss.dbeaver.runtime.ui.UIServiceConnections;
import org.jkiss.dbeaver.tools.transfer.IDataTransferProducer;
import org.jkiss.dbeaver.tools.transfer.database.DatabaseTransferProducer;
import org.jkiss.dbeaver.tools.transfer.ui.wizard.DataTransferWizard;
import org.jkiss.dbeaver.ui.*;
import org.jkiss.dbeaver.ui.controls.*;
import org.jkiss.dbeaver.ui.controls.resultset.*;
import org.jkiss.dbeaver.ui.controls.resultset.internal.ResultSetMessages;
import org.jkiss.dbeaver.ui.css.CSSUtils;
import org.jkiss.dbeaver.ui.css.DBStyles;
import org.jkiss.dbeaver.ui.dialogs.ConfirmationDialog;
import org.jkiss.dbeaver.ui.dialogs.EnterNameDialog;
import org.jkiss.dbeaver.ui.editors.DatabaseEditorUtils;
import org.jkiss.dbeaver.ui.editors.EditorUtils;
import org.jkiss.dbeaver.ui.editors.INonPersistentEditorInput;
import org.jkiss.dbeaver.ui.editors.StringEditorInput;
import org.jkiss.dbeaver.ui.editors.sql.execute.SQLQueryJob;
import org.jkiss.dbeaver.ui.editors.sql.handlers.SQLNavigatorContext;
import org.jkiss.dbeaver.ui.editors.sql.internal.SQLEditorMessages;
import org.jkiss.dbeaver.ui.editors.sql.log.SQLLogPanel;
import org.jkiss.dbeaver.ui.editors.sql.plan.ExplainPlanViewer;
import org.jkiss.dbeaver.ui.editors.sql.registry.SQLPresentationDescriptor;
import org.jkiss.dbeaver.ui.editors.sql.registry.SQLPresentationPanelDescriptor;
import org.jkiss.dbeaver.ui.editors.sql.registry.SQLPresentationRegistry;
import org.jkiss.dbeaver.ui.editors.text.ScriptPositionColumn;
import org.jkiss.dbeaver.ui.navigator.INavigatorModelView;
import org.jkiss.dbeaver.utils.GeneralUtils;
import org.jkiss.dbeaver.utils.PrefUtils;
import org.jkiss.dbeaver.utils.RuntimeUtils;
import org.jkiss.utils.ArrayUtils;
import org.jkiss.utils.CommonUtils;
import java.io.*;
import java.net.URI;
import java.util.List;
import java.util.*;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* SQL Executor
*/
public class SQLEditor extends SQLEditorBase implements
IDataSourceContainerProviderEx,
DBPEventListener,
ISaveablePart2,
DBPDataSourceTask,
DBPDataSourceHandler,
DBPPreferenceListener,
ISmartTransactionManager
{
private static final long SCRIPT_UI_UPDATE_PERIOD = 100;
private static final int MAX_PARALLEL_QUERIES_NO_WARN = 1;
private static final int SQL_EDITOR_CONTROL_INDEX = 1;
private static final int EXTRA_CONTROL_INDEX = 0;
private static final String PANEL_ITEM_PREFIX = "SQLPanelToggle:";
private static final String EMBEDDED_BINDING_PREFIX = "-- CONNECTION: ";
private static final Pattern EMBEDDED_BINDING_PREFIX_PATTERN = Pattern.compile("--\\s*CONNECTION:\\s*(.+)", Pattern.CASE_INSENSITIVE);
private static Image IMG_DATA_GRID = DBeaverIcons.getImage(UIIcon.SQL_PAGE_DATA_GRID);
private static Image IMG_DATA_GRID_LOCKED = DBeaverIcons.getImage(UIIcon.SQL_PAGE_DATA_GRID_LOCKED);
private static Image IMG_EXPLAIN_PLAN = DBeaverIcons.getImage(UIIcon.SQL_PAGE_EXPLAIN_PLAN);
private static Image IMG_LOG = DBeaverIcons.getImage(UIIcon.SQL_PAGE_LOG);
private static Image IMG_OUTPUT = DBeaverIcons.getImage(UIIcon.SQL_PAGE_OUTPUT);
private static Image IMG_OUTPUT_ALERT = DBeaverIcons.getImage(UIIcon.SQL_PAGE_OUTPUT_ALERT);
private static final String TOOLBAR_CONTRIBUTION_ID = "toolbar:org.jkiss.dbeaver.ui.editors.sql.toolbar.side";
private static final String TOOLBAR_GROUP_TOP = "top";
private static final String TOOLBAR_GROUP_ADDITIONS = IWorkbenchActionConstants.MB_ADDITIONS;
private static final String TOOLBAR_GROUP_PANELS = "panelToggles";
public static final String VAR_CONNECTION_NAME = "connectionName";
public static final String VAR_FILE_NAME = "fileName";
public static final String VAR_FILE_EXT = "fileExt";
public static final String VAR_DRIVER_NAME = "driverName";
public static final String DEFAULT_TITLE_PATTERN = "<${" + VAR_CONNECTION_NAME + "}> ${" + VAR_FILE_NAME + "}";
private ResultSetOrientation resultSetOrientation = ResultSetOrientation.HORIZONTAL;
private CustomSashForm resultsSash;
private Composite sqlEditorPanel;
@Nullable
private CustomSashForm presentationSash;
private CTabFolder resultTabs;
private CTabItem activeResultsTab;
private SQLLogPanel logViewer;
private SQLEditorOutputConsoleViewer outputViewer;
private volatile QueryProcessor curQueryProcessor;
private final List<QueryProcessor> queryProcessors = new ArrayList<>();
private DBPDataSourceContainer dataSourceContainer;
private DBPDataSource curDataSource;
private volatile DBCExecutionContext executionContext;
private volatile DBCExecutionContext lastExecutionContext;
private SQLScriptContext globalScriptContext;
private volatile boolean syntaxLoaded = false;
private final FindReplaceTarget findReplaceTarget = new FindReplaceTarget();
private final List<SQLQuery> runningQueries = new ArrayList<>();
private QueryResultsContainer curResultsContainer;
private Image editorImage;
private VerticalFolder sideToolBar;
private SQLPresentationDescriptor extraPresentationDescriptor;
private SQLEditorPresentation extraPresentation;
private Map<SQLPresentationPanelDescriptor, SQLEditorPresentationPanel> extraPresentationPanels = new HashMap<>();
private SQLEditorPresentationPanel extraPresentationCurrentPanel;
private VerticalFolder presentationSwitchFolder;
private final List<SQLEditorListener> listeners = new ArrayList<>();
private DisposeListener resultTabDisposeListener = new DisposeListener() {
@Override
public void widgetDisposed(DisposeEvent e) {
if (resultTabs.getItemCount() == 0) {
if (resultsSash.getMaximizedControl() == null) {
// Hide results
toggleResultPanel();
}
}
}
};
private VerticalButton switchPresentationSQLButton;
private VerticalButton switchPresentationExtraButton;
public SQLEditor()
{
super();
this.extraPresentationDescriptor = SQLPresentationRegistry.getInstance().getPresentation(this);
}
@Override
protected String[] getKeyBindingContexts() {
return new String[]{TEXT_EDITOR_CONTEXT, SQLEditorContributions.SQL_EDITOR_CONTEXT, SQLEditorContributions.SQL_EDITOR_SCRIPT_CONTEXT};
}
@Override
public DBPDataSource getDataSource() {
DBPDataSourceContainer container = getDataSourceContainer();
return container == null ? null : container.getDataSource();
}
@Override
public DBCExecutionContext getExecutionContext() {
if (executionContext != null) {
return executionContext;
}
if (dataSourceContainer != null && !SQLEditorUtils.isOpenSeparateConnection(dataSourceContainer)) {
return DBUtils.getDefaultContext(getDataSource(), false);
}
return null;
}
@Nullable
public DBPProject getProject()
{
IFile file = EditorUtils.getFileFromInput(getEditorInput());
return file == null ?
DBWorkbench.getPlatform().getWorkspace().getActiveProject() : DBWorkbench.getPlatform().getWorkspace().getProject(file.getProject());
}
@Nullable
@Override
public int[] getCurrentLines()
{
synchronized (runningQueries) {
IDocument document = getDocument();
if (document == null || runningQueries.isEmpty()) {
return null;
}
List<Integer> lines = new ArrayList<>(runningQueries.size() * 2);
for (SQLQuery statementInfo : runningQueries) {
try {
int firstLine = document.getLineOfOffset(statementInfo.getOffset());
int lastLine = document.getLineOfOffset(statementInfo.getOffset() + statementInfo.getLength());
for (int k = firstLine; k <= lastLine; k++) {
lines.add(k);
}
} catch (BadLocationException e) {
// ignore - this may happen is SQL was edited after execution start
}
}
if (lines.isEmpty()) {
return null;
}
int[] results = new int[lines.size()];
for (int i = 0; i < lines.size(); i++) {
results[i] = lines.get(i);
}
return results;
}
}
@Nullable
@Override
public DBPDataSourceContainer getDataSourceContainer()
{
return dataSourceContainer;
}
@Override
public boolean setDataSourceContainer(@Nullable DBPDataSourceContainer container)
{
if (container == dataSourceContainer) {
return true;
}
// Release ds container
releaseContainer();
closeAllJobs();
dataSourceContainer = container;
if (dataSourceContainer != null) {
dataSourceContainer.getPreferenceStore().addPropertyChangeListener(this);
dataSourceContainer.getRegistry().addDataSourceListener(this);
}
IEditorInput input = getEditorInput();
if (input != null) {
DBPDataSourceContainer savedContainer = EditorUtils.getInputDataSource(input);
if (savedContainer != container) {
EditorUtils.setInputDataSource(input, new SQLNavigatorContext(container, getExecutionContext()));
}
IFile file = EditorUtils.getFileFromInput(input);
if (file != null) {
DBNUtils.refreshNavigatorResource(file, container);
} else {
// FIXME: this is a hack. We can't fire event on resource change so editor's state won't be updated in UI.
// FIXME: To update main toolbar and other controls we hade and show this editor
IWorkbenchPage page = getSite().getPage();
for (IEditorReference er : page.getEditorReferences()) {
if (er.getEditor(false) == this) {
page.hideEditor(er);
page.showEditor(er);
break;
}
}
//page.activate(this);
}
}
checkConnected(false, status -> UIUtils.asyncExec(() -> {
if (!status.isOK()) {
DBWorkbench.getPlatformUI().showError("Can't connect to database", "Error connecting to datasource", status);
}
setFocus();
}));
setPartName(getEditorName());
fireDataSourceChange();
if (dataSourceContainer != null) {
dataSourceContainer.acquire(this);
}
if (SQLEditorBase.isWriteEmbeddedBinding()) {
// Patch connection reference
UIUtils.syncExec(this::embedDataSourceAssociation);
}
return true;
}
private void updateDataSourceContainer() {
DBPDataSourceContainer inputDataSource = null;
if (SQLEditorBase.isReadEmbeddedBinding()) {
// Try to get datasource from contents (always, no matter what )
inputDataSource = getDataSourceFromContent();
}
if (inputDataSource == null) {
inputDataSource = EditorUtils.getInputDataSource(getEditorInput());
}
if (inputDataSource == null) {
// No datasource. Try to get one from active part
IWorkbenchPart activePart = getSite().getWorkbenchWindow().getActivePage().getActivePart();
if (activePart != this && activePart instanceof IDataSourceContainerProvider) {
inputDataSource = ((IDataSourceContainerProvider) activePart).getDataSourceContainer();
}
}
setDataSourceContainer(inputDataSource);
}
private void updateExecutionContext(Runnable onSuccess) {
if (dataSourceContainer == null) {
releaseExecutionContext();
} else {
// Get/open context
final DBPDataSource dataSource = dataSourceContainer.getDataSource();
if (dataSource == null) {
releaseExecutionContext();
} else if (curDataSource != dataSource) {
// Datasource was changed or instance was changed (PG)
releaseExecutionContext();
curDataSource = dataSource;
DBPDataSourceContainer container = dataSource.getContainer();
if (SQLEditorUtils.isOpenSeparateConnection(container)) {
initSeparateConnection(dataSource, onSuccess);
} else {
if (onSuccess != null) {
onSuccess.run();
}
}
}
}
}
private void initSeparateConnection(@NotNull DBPDataSource dataSource, Runnable onSuccess) {
DBSInstance dsInstance = dataSource.getDefaultInstance();
String[] contextDefaults = isRestoreActiveSchemaFromScript() ?
EditorUtils.getInputContextDefaults(getEditorInput()) : null;
if (!ArrayUtils.isEmpty(contextDefaults) && contextDefaults[0] != null) {
DBSInstance selectedInstance = DBUtils.findObject(dataSource.getAvailableInstances(), contextDefaults[0]);
if (selectedInstance != null) {
dsInstance = selectedInstance;
}
}
if (dsInstance != null) {
final OpenContextJob job = new OpenContextJob(dsInstance, onSuccess);
job.schedule();
}
}
private void releaseExecutionContext() {
if (executionContext != null && executionContext.isConnected()) {
// Close context in separate job (otherwise it can block UI)
new CloseContextJob(executionContext).schedule();
}
executionContext = null;
curDataSource = null;
}
private void releaseContainer() {
releaseExecutionContext();
if (dataSourceContainer != null) {
dataSourceContainer.getPreferenceStore().removePropertyChangeListener(this);
dataSourceContainer.getRegistry().removeDataSourceListener(this);
dataSourceContainer.release(this);
dataSourceContainer = null;
}
}
private DBPDataSourceContainer getDataSourceFromContent() {
DBPProject project = getProject();
IDocument document = getDocument();
int totalLines = document.getNumberOfLines();
if (totalLines == 0) {
return null;
}
try {
IRegion region = document.getLineInformation(0);
String line = document.get(region.getOffset(), region.getLength());
Matcher matcher = EMBEDDED_BINDING_PREFIX_PATTERN.matcher(line);
if (matcher.matches()) {
String connSpec = matcher.group(1).trim();
if (!CommonUtils.isEmpty(connSpec)) {
final DBPDataSourceContainer dataSource = DataSourceUtils.getDataSourceBySpec(project, connSpec, null, true, false);
if (dataSource != null) {
return dataSource;
}
}
}
} catch (Throwable e) {
log.debug("Error extracting datasource info from script's content", e);
}
return null;
}
private void embedDataSourceAssociation() {
if (getDataSourceFromContent() == dataSourceContainer) {
return;
}
IDocument document = getDocument();
try {
int totalLines = document.getNumberOfLines();
IRegion region = null;
if (totalLines > 0) {
region = document.getLineInformation(0);
String line = document.get(region.getOffset(), region.getLength());
Matcher matcher = EMBEDDED_BINDING_PREFIX_PATTERN.matcher(line);
if (!matcher.matches()) {
// Update existing association
region = null;
}
}
if (dataSourceContainer == null) {
if (region == null) {
return;
}
// Remove connection association
document.replace(region.getOffset(), region.getLength(), "");
} else {
SQLScriptBindingType bindingType = SQLScriptBindingType.valueOf(DBWorkbench.getPlatform().getPreferenceStore().getString(SQLPreferenceConstants.SCRIPT_BIND_COMMENT_TYPE));
StringBuilder assocSpecLine = new StringBuilder(EMBEDDED_BINDING_PREFIX);
bindingType.appendSpec(dataSourceContainer, assocSpecLine);
if (region != null) {
// Remove connection association
document.replace(region.getOffset(), region.getLength(), assocSpecLine.toString());
} else {
document.replace(0, 0, assocSpecLine.toString());
}
}
} catch (Throwable e) {
log.debug("Error extracting datasource info from script's content", e);
}
UIUtils.asyncExec(() -> getTextViewer().refresh());
}
public void addListener(SQLEditorListener listener) {
synchronized (listeners) {
listeners.add(listener);
}
}
public void removeListener(SQLEditorListener listener) {
synchronized (listeners) {
listeners.remove(listener);
}
}
@Override
public boolean isActiveTask() {
return getTotalQueryRunning() > 0;
}
@Override
public boolean isSmartAutoCommit() {
return getActivePreferenceStore().getBoolean(ModelPreferences.TRANSACTIONS_SMART_COMMIT);
}
@Override
public void setSmartAutoCommit(boolean smartAutoCommit) {
getActivePreferenceStore().setValue(ModelPreferences.TRANSACTIONS_SMART_COMMIT, smartAutoCommit);
try {
getActivePreferenceStore().save();
} catch (IOException e) {
log.error("Error saving smart auto-commit option", e);
}
}
public void refreshActions() {
// Redraw toolbar to refresh action sets
sideToolBar.redraw();
}
private class OutputLogWriter extends Writer {
@Override
public void write(@NotNull final char[] cbuf, final int off, final int len) {
UIUtils.syncExec(() -> {
if (!outputViewer.isDisposed()) {
outputViewer.getOutputWriter().write(cbuf, off, len);
outputViewer.scrollToEnd();
if (!outputViewer.isVisible()) {
updateOutputViewerIcon(true);
}
}
});
}
@Override
public void flush() throws IOException {
outputViewer.getOutputWriter().flush();
}
@Override
public void close() throws IOException {
}
}
private class OpenContextJob extends AbstractJob {
private final DBSInstance instance;
private final Runnable onSuccess;
private Throwable error;
OpenContextJob(DBSInstance instance, Runnable onSuccess) {
super("Open connection to " + instance.getDataSource().getContainer().getName());
this.instance = instance;
this.onSuccess = onSuccess;
setUser(true);
}
@Override
protected IStatus run(DBRProgressMonitor monitor) {
monitor.beginTask("Open SQLEditor isolated connection", 1);
try {
String title = "SQLEditor <" + getEditorInput().getName() + ">";
monitor.subTask("Open context " + title);
DBCExecutionContext newContext = instance.openIsolatedContext(monitor, title, instance.getDefaultContext(monitor, false));
// Set context defaults
String[] contextDefaultNames = isRestoreActiveSchemaFromScript() ?
EditorUtils.getInputContextDefaults(getEditorInput()) : null;
if (contextDefaultNames != null && contextDefaultNames.length > 1 &&
(!CommonUtils.isEmpty(contextDefaultNames[0]) || !CommonUtils.isEmpty(contextDefaultNames[1])))
{
try {
DBExecUtils.setExecutionContextDefaults(monitor, newContext.getDataSource(), newContext, contextDefaultNames[0], null, contextDefaultNames[1]);
} catch (DBException e) {
DBWorkbench.getPlatformUI().showError("New connection default", "Error setting default catalog/schema for new connection", e);
}
}
SQLEditor.this.executionContext = newContext;
// Needed to update main toolbar
DBUtils.fireObjectSelect(instance, true);
} catch (DBException e) {
error = e;
} finally {
monitor.done();
}
updateContext();
return Status.OK_STATUS;
}
private void updateContext() {
if (error != null) {
releaseExecutionContext();
DBWorkbench.getPlatformUI().showError("Open context", "Can't open editor connection", error);
} else {
if (onSuccess != null) {
onSuccess.run();
}
fireDataSourceChange();
}
}
}
private boolean isRestoreActiveSchemaFromScript() {
return getActivePreferenceStore().getBoolean(SQLPreferenceConstants.AUTO_SAVE_ACTIVE_SCHEMA) &&
getActivePreferenceStore().getBoolean(SQLPreferenceConstants.EDITOR_SEPARATE_CONNECTION);
}
private class CloseContextJob extends AbstractJob {
private final DBCExecutionContext context;
CloseContextJob(DBCExecutionContext context) {
super("Close context " + context.getContextName());
this.context = context;
setUser(true);
}
@Override
protected IStatus run(DBRProgressMonitor monitor) {
monitor.beginTask("Close SQLEditor isolated connection", 1);
try {
if (QMUtils.isTransactionActive(context)) {
UIServiceConnections serviceConnections = DBWorkbench.getService(UIServiceConnections.class);
if (serviceConnections != null) {
serviceConnections.closeActiveTransaction(monitor, context, false);
}
}
monitor.subTask("Close context " + context.getContextName());
context.close();
} finally {
monitor.done();
}
return Status.OK_STATUS;
}
}
@Override
public boolean isDirty()
{
for (QueryProcessor queryProcessor : queryProcessors) {
if (queryProcessor.isDirty() || queryProcessor.curJobRunning.get() > 0) {
return true;
}
}
if (executionContext != null && QMUtils.isTransactionActive(executionContext)) {
return true;
}
if (isNonPersistentEditor()) {
// Console is never dirty
return false;
}
if (extraPresentation instanceof ISaveablePart && ((ISaveablePart) extraPresentation).isDirty()) {
return true;
}
return super.isDirty();
}
@Nullable
@Override
public <T> T getAdapter(Class<T> required)
{
if (required == INavigatorModelView.class) {
return null;
}
if (resultTabs != null && !resultTabs.isDisposed()) {
if (required == IFindReplaceTarget.class) {
return required.cast(findReplaceTarget);
}
CTabItem activeResultsTab = getActiveResultsTab();
if (activeResultsTab != null && UIUtils.isUIThread()) {
Object tabControl = activeResultsTab.getData();
if (tabControl instanceof QueryResultsContainer) {
tabControl = ((QueryResultsContainer) tabControl).viewer;
}
if (tabControl instanceof IAdaptable) {
T adapter = ((IAdaptable) tabControl).getAdapter(required);
if (adapter != null) {
return adapter;
}
}
if (tabControl instanceof ResultSetViewer && (required == IResultSetController.class || required == ResultSetViewer.class)) {
return required.cast(tabControl);
}
}
}
return super.getAdapter(required);
}
private boolean checkConnected(boolean forceConnect, DBRProgressListener onFinish)
{
// Connect to datasource
final DBPDataSourceContainer dataSourceContainer = getDataSourceContainer();
boolean doConnect = dataSourceContainer != null &&
(forceConnect || dataSourceContainer.getPreferenceStore().getBoolean(SQLPreferenceConstants.EDITOR_CONNECT_ON_ACTIVATE));
if (doConnect) {
if (!dataSourceContainer.isConnected()) {
UIServiceConnections serviceConnections = DBWorkbench.getService(UIServiceConnections.class);
if (serviceConnections != null) {
serviceConnections.connectDataSource(dataSourceContainer, onFinish);
}
}
}
return dataSourceContainer != null && dataSourceContainer.isConnected();
}
@Override
public void createPartControl(Composite parent)
{
setRangeIndicator(new DefaultRangeIndicator());
// divides editor area and results/panels area
resultsSash = UIUtils.createPartDivider(
this,
parent,
resultSetOrientation.getSashOrientation() | SWT.SMOOTH);
CSSUtils.setCSSClass(resultsSash, DBStyles.COLORED_BY_CONNECTION_TYPE);
resultsSash.setSashWidth(5);
UIUtils.setHelp(resultsSash, IHelpContextIds.CTX_SQL_EDITOR);
Composite editorContainer;
sqlEditorPanel = UIUtils.createPlaceholder(resultsSash, 3, 0);
// Create left vertical toolbar
createControlsBar(sqlEditorPanel);
// Create editor presentations sash
Composite pPlaceholder = null;
if (extraPresentationDescriptor != null) {
presentationSash = UIUtils.createPartDivider(
this,
sqlEditorPanel,
((resultSetOrientation.getSashOrientation() == SWT.VERTICAL) ? SWT.HORIZONTAL : SWT.VERTICAL) | SWT.SMOOTH);
presentationSash.setSashWidth(5);
presentationSash.setLayoutData(new GridData(GridData.FILL_BOTH));
editorContainer = presentationSash;
pPlaceholder = new Composite(presentationSash, SWT.NONE);
pPlaceholder.setLayout(new FillLayout());
} else {
editorContainer = sqlEditorPanel;
}
super.createPartControl(editorContainer);
getEditorControlWrapper().setLayoutData(new GridData(GridData.FILL_BOTH));
// Create right vertical toolbar
createPresentationSwitchBar(sqlEditorPanel);
if (pPlaceholder != null) {
switch (extraPresentationDescriptor.getActivationType()) {
case HIDDEN:
presentationSash.setMaximizedControl(presentationSash.getChildren()[SQL_EDITOR_CONTROL_INDEX]);
break;
case MAXIMIZED:
case VISIBLE:
extraPresentation.createPresentation(pPlaceholder, this);
if (extraPresentationDescriptor.getActivationType() == SQLEditorPresentation.ActivationType.MAXIMIZED) {
if (presentationSash.getChildren()[EXTRA_CONTROL_INDEX] != null) {
presentationSash.setMaximizedControl(pPlaceholder);
}
}
break;
}
}
getSite().setSelectionProvider(new DynamicSelectionProvider());
DBPProject project = getProject();
if (project != null && project.isRegistryLoaded()) {
createResultTabs();
} else {
UIExecutionQueue.queueExec(this::createResultTabs);
}
setAction(ITextEditorActionConstants.SHOW_INFORMATION, null);
//toolTipAction.setEnabled(false);
/*
resultsSash.setSashBorders(new boolean[]{true, true});
if (presentationSash != null) {
presentationSash.setSashBorders(new boolean[]{true, true});
}
*/
SQLEditorFeatures.SQL_EDITOR_OPEN.use();
// Start output reader
new ServerOutputReader().schedule();
updateExecutionContext(null);
// Update controls
UIExecutionQueue.queueExec(this::onDataSourceChange);
}
private void createControlsBar(Composite sqlEditorPanel) {
sideToolBar = new VerticalFolder(sqlEditorPanel, SWT.LEFT);
((GridLayout)sideToolBar.getLayout()).marginTop = 3;
((GridLayout)sideToolBar.getLayout()).marginBottom = 10;
((GridLayout)sideToolBar.getLayout()).verticalSpacing = 3;
VerticalButton.create(sideToolBar, SWT.LEFT | SWT.PUSH, getSite(), SQLEditorCommands.CMD_EXECUTE_STATEMENT, false);
VerticalButton.create(sideToolBar, SWT.LEFT | SWT.PUSH, getSite(), SQLEditorCommands.CMD_EXECUTE_STATEMENT_NEW, false);
VerticalButton.create(sideToolBar, SWT.LEFT | SWT.PUSH, getSite(), SQLEditorCommands.CMD_EXECUTE_SCRIPT, false);
VerticalButton.create(sideToolBar, SWT.LEFT | SWT.PUSH, getSite(), SQLEditorCommands.CMD_EXECUTE_SCRIPT_NEW, false);
VerticalButton.create(sideToolBar, SWT.LEFT | SWT.PUSH, getSite(), SQLEditorCommands.CMD_EXPLAIN_PLAN, false);
UIUtils.createEmptyLabel(sideToolBar, 1, 1).setLayoutData(new GridData(GridData.FILL_VERTICAL));
VerticalButton.create(sideToolBar, SWT.LEFT | SWT.CHECK, new ShowPreferencesAction(), false);
Label label = new Label(sideToolBar, SWT.NONE);
label.setImage(DBeaverIcons.getImage(UIIcon.SEPARATOR_H));
VerticalButton.create(sideToolBar, SWT.LEFT | SWT.CHECK, getSite(), SQLEditorCommands.CMD_SQL_SHOW_OUTPUT, false);
VerticalButton.create(sideToolBar, SWT.LEFT | SWT.CHECK, getSite(), SQLEditorCommands.CMD_SQL_SHOW_LOG, false);
/*
sideToolBar.add(new GroupMarker(TOOLBAR_GROUP_PANELS));
final IMenuService menuService = getSite().getService(IMenuService.class);
if (menuService != null) {
int prevSize = sideToolBar.getSize();
menuService.populateContributionManager(sideToolBar, TOOLBAR_CONTRIBUTION_ID);
if (prevSize != sideToolBar.getSize()) {
// Something was populated
sideToolBar.insertBefore(TOOLBAR_GROUP_ADDITIONS, new ToolbarSeparatorContribution(false));
}
}
*/
sideToolBar.setLayoutData(new GridData(GridData.FILL_VERTICAL | GridData.VERTICAL_ALIGN_BEGINNING));
}
private void createPresentationSwitchBar(Composite sqlEditorPanel) {
if (extraPresentationDescriptor == null) {
return;
}
presentationSwitchFolder = new VerticalFolder(sqlEditorPanel, SWT.RIGHT);
presentationSwitchFolder.setLayoutData(new GridData(GridData.FILL_VERTICAL));
switchPresentationSQLButton = new VerticalButton(presentationSwitchFolder, SWT.RIGHT | SWT.CHECK);
switchPresentationSQLButton.setText(SQLEditorMessages.editors_sql_description);
switchPresentationSQLButton.setImage(DBeaverIcons.getImage(UIIcon.SQL_SCRIPT));
switchPresentationExtraButton = new VerticalButton(presentationSwitchFolder, SWT.RIGHT | SWT.CHECK);
switchPresentationExtraButton.setData(extraPresentationDescriptor);
switchPresentationExtraButton.setText(extraPresentationDescriptor.getLabel());
switchPresentationExtraButton.setImage(DBeaverIcons.getImage(extraPresentationDescriptor.getIcon()));
String toolTip = ActionUtils.findCommandDescription(extraPresentationDescriptor.getToggleCommandId(), getSite(), false);
if (CommonUtils.isEmpty(toolTip)) {
toolTip = extraPresentationDescriptor.getDescription();
}
if (!CommonUtils.isEmpty(toolTip)) {
switchPresentationExtraButton.setToolTipText(toolTip);
}
switchPresentationSQLButton.setChecked(true);
// We use single switch handler. It must be provided by presentation itself
// Presentation switch may require some additional action so we can't just switch visible controls
SelectionListener switchListener = new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
if (((VerticalButton)e.item).isChecked() || presentationSwitchFolder.getSelection() == e.item) {
return;
}
String toggleCommandId = extraPresentationDescriptor.getToggleCommandId();
ActionUtils.runCommand(toggleCommandId, getSite());
}
};
switchPresentationSQLButton.addSelectionListener(switchListener);
switchPresentationExtraButton.addSelectionListener(switchListener);
// Stretch
UIUtils.createEmptyLabel(presentationSwitchFolder, 1, 1).setLayoutData(new GridData(GridData.FILL_VERTICAL));
VerticalButton.create(presentationSwitchFolder, SWT.RIGHT | SWT.CHECK, getSite(), SQLEditorCommands.CMD_TOGGLE_LAYOUT, false);
}
/**
* Sets focus in current editor.
* This function is called on drag-n-drop and some other operations
*/
@Override
public boolean validateEditorInputState() {
boolean res = super.validateEditorInputState();
if (res) {
StyledText textWidget = getViewer().getTextWidget();
if (textWidget != null && !textWidget.isDisposed()) {
textWidget.setFocus();
}
}
return res;
}
private void createResultTabs()
{
resultTabs = new CTabFolder(resultsSash, SWT.TOP | SWT.FLAT);
CSSUtils.setCSSClass(resultTabs, DBStyles.COLORED_BY_CONNECTION_TYPE);
new TabFolderReorder(resultTabs);
resultTabs.setLayoutData(new GridData(GridData.FILL_BOTH));
resultTabs.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
if (extraPresentationCurrentPanel != null) {
extraPresentationCurrentPanel.deactivatePanel();
}
extraPresentationCurrentPanel = null;
Object data = e.item.getData();
if (data instanceof QueryResultsContainer) {
setActiveResultsContainer((QueryResultsContainer) data);
} else if (data instanceof SQLEditorPresentationPanel) {
extraPresentationCurrentPanel = ((SQLEditorPresentationPanel) data);
extraPresentationCurrentPanel.activatePanel();
} else if (data instanceof ExplainPlanViewer) {
SQLQuery planQuery = ((ExplainPlanViewer) data).getQuery();
if (planQuery != null) {
getSelectionProvider().setSelection(new TextSelection(planQuery.getOffset(), 0));
}
}
}
});
this.resultTabs.addListener(SWT.Resize, event -> {
if (!resultsSash.isDisposed()) {
int[] weights = resultsSash.getWeights();
IPreferenceStore prefs = getPreferenceStore();
if (prefs != null) {
prefs.setValue(SQLPreferenceConstants.RESULTS_PANEL_RATIO, weights[0] + "-" + weights[1]);
}
}
});
String resultsPanelRatio = getPreferenceStore().getString(SQLPreferenceConstants.RESULTS_PANEL_RATIO);
if (!CommonUtils.isEmpty(resultsPanelRatio)) {
String[] weights = resultsPanelRatio.split("-");
if (weights.length > 1) {
resultsSash.setWeights(new int[] {
Integer.parseInt(weights[0]),
Integer.parseInt(weights[1]),
});
}
}
getTextViewer().getTextWidget().addTraverseListener(e -> {
if (e.detail == SWT.TRAVERSE_PAGE_NEXT) {
ResultSetViewer viewer = getActiveResultSetViewer();
if (viewer != null && viewer.getActivePresentation().getControl().isVisible()) {
viewer.getActivePresentation().getControl().setFocus();
e.doit = false;
e.detail = SWT.TRAVERSE_NONE;
}
}
});
resultTabs.setSimple(true);
resultTabs.addMouseListener(new MouseAdapter() {
@Override
public void mouseUp(MouseEvent e) {
if (e.button == 2) {
CTabItem item = resultTabs.getItem(new Point(e.x, e.y));
if (item != null && item.getShowClose()) {
item.dispose();
}
}
}
});
resultTabs.addListener(SWT.MouseDoubleClick, event -> {
if (event.button != 1) {
return;
}
CTabItem selectedItem = resultTabs.getItem(new Point(event.getBounds().x, event.getBounds().y));
if (selectedItem != null && selectedItem == resultTabs.getSelection()) {
toggleEditorMaximize();
}
});
// Extra views
//planView = new ExplainPlanViewer(this, resultTabs);
logViewer = new SQLLogPanel(resultTabs, this);
outputViewer = new SQLEditorOutputConsoleViewer(getSite(), resultTabs, SWT.NONE);
// Create results tab
createQueryProcessor(true, true);
{
resultTabs.addMouseListener(new MouseAdapter() {
@Override
public void mouseDown(MouseEvent e) {
activeResultsTab = resultTabs.getItem(new Point(e.x, e.y));
}
});
MenuManager menuMgr = new MenuManager();
Menu menu = menuMgr.createContextMenu(resultTabs);
menuMgr.addMenuListener(manager -> {
manager.add(ActionUtils.makeCommandContribution(getSite(), SQLEditorCommands.CMD_SQL_EDITOR_MAXIMIZE_PANEL));
if (resultTabs.getItemCount() > 1) {
manager.add(new Action("Close multiple results") {
@Override
public void run()
{
closeExtraResultTabs(null, false);
}
});
int pinnedTabsCount = 0;
for (CTabItem item : resultTabs.getItems()) {
if (item.getData() instanceof QueryResultsContainer) {
if (((QueryResultsContainer) item.getData()).isPinned()) {
pinnedTabsCount++;
}
}
}
if (pinnedTabsCount > 1) {
manager.add(new Action("Unpin all tabs") {
@Override
public void run()
{
for (CTabItem item : resultTabs.getItems()) {
if (item.getData() instanceof QueryResultsContainer) {
if (((QueryResultsContainer) item.getData()).isPinned()) {
((QueryResultsContainer) item.getData()).setPinned(false);
}
}
}
}
});
}
}
final CTabItem activeTab = getActiveResultsTab();
if (activeTab != null && activeTab.getData() instanceof QueryResultsContainer) {
{
final QueryResultsContainer resultsContainer = (QueryResultsContainer) activeTab.getData();
if (resultsContainer.getResultSetController().hasData()) {
manager.add(new Separator());
final boolean isPinned = resultsContainer.isPinned();
manager.add(new Action(isPinned ? "Unpin tab" : "Pin tab") {
@Override
public void run() {
resultsContainer.setPinned(!isPinned);
}
});
}
}
manager.add(new Action("Set tab title") {
@Override
public void run()
{
EnterNameDialog dialog = new EnterNameDialog(resultTabs.getShell(), "Tab title", activeTab.getText());
if (dialog.open() == IDialogConstants.OK_ID) {
activeTab.setText(dialog.getResult());
}
}
});
}
if (activeTab != null && activeTab.getShowClose()) {
manager.add(ActionUtils.makeCommandContribution(getSite(), SQLEditorCommands.CMD_SQL_EDITOR_CLOSE_TAB));
}
});
menuMgr.setRemoveAllWhenShown(true);
resultTabs.setMenu(menu);
}
}
private void setActiveResultsContainer(QueryResultsContainer data) {
curResultsContainer = data;
curQueryProcessor = curResultsContainer.queryProcessor;
ResultSetViewer rsv = curResultsContainer.getResultSetController();
if (rsv != null) {
//rsv.getActivePresentation().getControl().setFocus();
}
}
/////////////////////////////////////////////////////////////
// Panels
private void showExtraView(final String commandId, String name, String toolTip, Image image, Control view) {
VerticalButton viewItem = getViewToolItem(commandId);
if (viewItem == null) {
log.warn("Tool item for command " + commandId + " not found");
return;
}
for (CTabItem item : resultTabs.getItems()) {
if (item.getData() == view) {
// Close tab if it is already open
viewItem.setChecked(false);
viewItem.redraw();
item.dispose();
return;
}
}
if (view == outputViewer.getControl()) {
updateOutputViewerIcon(false);
outputViewer.resetNewOutput();
}
// Create new tab
viewItem.setChecked(true);
CTabItem item = new CTabItem(resultTabs, SWT.CLOSE);
item.setControl(view);
item.setText(name);
item.setToolTipText(toolTip);
item.setImage(image);
item.setData(view);
// De-select tool item on tab close
item.addDisposeListener(e -> {
if (!viewItem.isDisposed()) {
viewItem.setChecked(false);
viewItem.redraw();
}
resultTabDisposeListener.widgetDisposed(e);
});
resultTabs.setSelection(item);
viewItem.redraw();
}
private VerticalButton getViewToolItem(String commandId) {
VerticalButton viewItem = null;
for (VerticalButton item : sideToolBar.getItems()) {
if (commandId.equals(item.getCommandId())) {
viewItem = item;
break;
}
}
return viewItem;
}
private CTabItem getActiveResultsTab() {
return activeResultsTab == null || activeResultsTab.isDisposed() ?
(resultTabs == null ? null : resultTabs.getSelection()) : activeResultsTab;
}
public void closeActiveTab() {
CTabItem tabItem = getActiveResultsTab();
if (tabItem != null && tabItem.getShowClose()) {
tabItem.dispose();
activeResultsTab = null;
}
}
public void showOutputPanel() {
if (resultsSash.getMaximizedControl() != null) {
resultsSash.setMaximizedControl(null);
}
showExtraView(SQLEditorCommands.CMD_SQL_SHOW_OUTPUT, SQLEditorMessages.editors_sql_output, SQLEditorMessages.editors_sql_output_tip, IMG_OUTPUT, outputViewer.getControl());
}
public void showExecutionLogPanel() {
if (resultsSash.getMaximizedControl() != null) {
resultsSash.setMaximizedControl(null);
}
showExtraView(SQLEditorCommands.CMD_SQL_SHOW_LOG, SQLEditorMessages.editors_sql_execution_log, SQLEditorMessages.editors_sql_execution_log_tip, IMG_LOG, logViewer);
}
public <T> T getExtraPresentationPanel(Class<T> panelClass) {
for (CTabItem tabItem : resultTabs.getItems()) {
if (tabItem.getData() instanceof SQLEditorPresentationPanel && tabItem.getData().getClass() == panelClass) {
return panelClass.cast(tabItem.getData());
}
}
return null;
}
public boolean showPresentationPanel(SQLEditorPresentationPanel panel) {
for (CTabItem item : resultTabs.getItems()) {
if (item.getData() == panel) {
resultTabs.setSelection(item);
return true;
}
}
return false;
}
public SQLEditorPresentationPanel showPresentationPanel(String panelID) {
for (VerticalButton cItem : sideToolBar.getItems()) {
IAction action = cItem.getAction();
if (action != null) {
if (action instanceof PresentationPanelToggleAction && ((PresentationPanelToggleAction) action).panel.getId().equals(panelID)) {
action.run();
return extraPresentationCurrentPanel;
}
}
}
return null;
}
public boolean hasMaximizedControl() {
return resultsSash.getMaximizedControl() != null;
}
public SQLEditorPresentation getExtraPresentation() {
return extraPresentation;
}
public SQLEditorPresentation.ActivationType getExtraPresentationState() {
if (extraPresentation == null) {
return SQLEditorPresentation.ActivationType.HIDDEN;
}
Control maximizedControl = presentationSash.getMaximizedControl();
if (maximizedControl == getExtraPresentationControl()) {
return SQLEditorPresentation.ActivationType.MAXIMIZED;
} else if (maximizedControl == getEditorControlWrapper()) {
return SQLEditorPresentation.ActivationType.HIDDEN;
} else {
return SQLEditorPresentation.ActivationType.VISIBLE;
}
}
public void showExtraPresentation(boolean show, boolean maximize) {
if (extraPresentationDescriptor == null) {
return;
}
resultsSash.setRedraw(false);
try {
if (!show) {
//boolean epHasFocus = UIUtils.hasFocus(getExtraPresentationControl());
presentationSash.setMaximizedControl(presentationSash.getChildren()[SQL_EDITOR_CONTROL_INDEX]);
//if (epHasFocus) {
getEditorControlWrapper().setFocus();
//}
} else {
if (extraPresentation == null) {
// Lazy activation
try {
extraPresentation = extraPresentationDescriptor.createPresentation();
extraPresentation.createPresentation((Composite) getExtraPresentationControl(), this);
} catch (DBException e) {
log.error("Error creating presentation", e);
}
}
if (maximize) {
presentationSash.setMaximizedControl(getExtraPresentationControl());
getExtraPresentationControl().setFocus();
} else {
presentationSash.setMaximizedControl(null);
}
}
// Show presentation panels
boolean sideBarChanged = false;
if (getExtraPresentationState() == SQLEditorPresentation.ActivationType.HIDDEN) {
// Remove all presentation panel toggles
for (SQLPresentationPanelDescriptor panelDescriptor : extraPresentationDescriptor.getPanels()) {
for (Control vb : presentationSwitchFolder.getChildren()) {
if (vb instanceof Label || vb.getData() instanceof SQLPresentationPanelDescriptor) {
vb.dispose();
sideBarChanged = true;
}
}
}
// Close all panels
for (CTabItem tabItem : resultTabs.getItems()) {
if (tabItem.getData() instanceof SQLEditorPresentationPanel) {
tabItem.dispose();
}
}
extraPresentationCurrentPanel = null;
} else {
// Check and add presentation panel toggles
UIUtils.createEmptyLabel(presentationSwitchFolder, 1, 1).setLayoutData(new GridData(GridData.FILL_VERTICAL));
for (SQLPresentationPanelDescriptor panelDescriptor : extraPresentationDescriptor.getPanels()) {
sideBarChanged = true;
PresentationPanelToggleAction toggleAction = new PresentationPanelToggleAction(panelDescriptor);
VerticalButton panelButton = new VerticalButton(presentationSwitchFolder, SWT.RIGHT);
panelButton.setLayoutData(new GridData(GridData.VERTICAL_ALIGN_END));
panelButton.setAction(toggleAction, true);
panelButton.setData(panelDescriptor);
if (panelDescriptor.isAutoActivate()) {
//panelButton.setChecked(true);
toggleAction.run();
}
}
}
boolean isExtra = getExtraPresentationState() == SQLEditorPresentation.ActivationType.MAXIMIZED;
switchPresentationSQLButton.setChecked(!isExtra);
switchPresentationExtraButton.setChecked(isExtra);
presentationSwitchFolder.redraw();
if (sideBarChanged) {
sideToolBar.getParent().layout(true, true);
}
} finally {
resultsSash.setRedraw(true);
}
}
private Control getExtraPresentationControl() {
return presentationSash.getChildren()[EXTRA_CONTROL_INDEX];
}
public void toggleResultPanel() {
if (resultsSash.getMaximizedControl() == null) {
resultsSash.setMaximizedControl(sqlEditorPanel);
switchFocus(false);
} else {
// Show both editor and results
// Check for existing query processors (maybe all result tabs were closed)
if (resultTabs.getItemCount() == 0) {
createQueryProcessor(true, true);
}
resultsSash.setMaximizedControl(null);
switchFocus(true);
}
}
public void toggleEditorMaximize()
{
if (resultsSash.getMaximizedControl() == null) {
resultsSash.setMaximizedControl(resultTabs);
switchFocus(true);
} else {
resultsSash.setMaximizedControl(null);
switchFocus(false);
}
}
private void switchFocus(boolean results) {
if (results) {
ResultSetViewer activeRS = getActiveResultSetViewer();
if (activeRS != null && activeRS.getActivePresentation() != null) {
activeRS.getActivePresentation().getControl().setFocus();
} else {
CTabItem activeTab = resultTabs.getSelection();
if (activeTab != null && activeTab.getControl() != null) {
activeTab.getControl().setFocus();
}
}
} else {
getEditorControlWrapper().setFocus();
}
}
public void toggleActivePanel() {
if (resultsSash.getMaximizedControl() == null) {
if (UIUtils.hasFocus(resultTabs)) {
switchFocus(false);
} else {
switchFocus(true);
}
}
}
private void updateResultSetOrientation() {
try {
resultSetOrientation = ResultSetOrientation.valueOf(DBWorkbench.getPlatform().getPreferenceStore().getString(SQLPreferenceConstants.RESULT_SET_ORIENTATION));
} catch (IllegalArgumentException e) {
resultSetOrientation = ResultSetOrientation.HORIZONTAL;
}
if (resultsSash != null) {
resultsSash.setOrientation(resultSetOrientation.getSashOrientation());
}
}
private class PresentationPanelToggleAction extends Action {
private SQLPresentationPanelDescriptor panel;
private CTabItem tabItem;
public PresentationPanelToggleAction(SQLPresentationPanelDescriptor panel) {
super(panel.getLabel(), Action.AS_CHECK_BOX);
setId(PANEL_ITEM_PREFIX + panel.getId());
if (panel.getIcon() != null) {
setImageDescriptor(DBeaverIcons.getImageDescriptor(panel.getIcon()));
}
if (panel.getDescription() != null) {
setToolTipText(panel.getDescription());
}
this.panel = panel;
}
@Override
public void run() {
setChecked(!isChecked());
SQLEditorPresentationPanel panelInstance = extraPresentationPanels.get(panel);
if (panelInstance != null && !isChecked()) {
// Hide panel
for (CTabItem tabItem : resultTabs.getItems()) {
if (tabItem.getData() == panelInstance) {
tabItem.dispose();
return;
}
}
}
if (panelInstance == null) {
Control panelControl;
try {
panelInstance = panel.createPanel();
panelControl = panelInstance.createPanel(resultTabs, SQLEditor.this, extraPresentation);
} catch (DBException e) {
DBWorkbench.getPlatformUI().showError("Panel opening error", "Can't create panel " + panel.getLabel(), e);
return;
}
extraPresentationPanels.put(panel, panelInstance);
tabItem = new CTabItem(resultTabs, SWT.CLOSE);
tabItem.setControl(panelControl);
tabItem.setText(panel.getLabel());
tabItem.setToolTipText(panel.getDescription());
tabItem.setImage(DBeaverIcons.getImage(panel.getIcon()));
tabItem.setData(panelInstance);
// De-select tool item on tab close
tabItem.addDisposeListener(e -> {
PresentationPanelToggleAction.this.setChecked(false);
panelControl.dispose();
extraPresentationPanels.remove(panel);
extraPresentationCurrentPanel = null;
resultTabDisposeListener.widgetDisposed(e);
});
extraPresentationCurrentPanel = panelInstance;
resultTabs.setSelection(tabItem);
} else {
for (CTabItem tabItem : resultTabs.getItems()) {
if (tabItem.getData() == panelInstance) {
resultTabs.setSelection(tabItem);
break;
}
}
}
}
}
/////////////////////////////////////////////////////////////
// Initialization
@Override
public void init(IEditorSite site, IEditorInput editorInput)
throws PartInitException
{
super.init(site, editorInput);
updateResultSetOrientation();
this.globalScriptContext = new SQLScriptContext(
null,
this,
EditorUtils.getLocalFileFromInput(getEditorInput()),
new OutputLogWriter(),
new SQLEditorParametersProvider(getSite()));
}
@Override
protected void doSetInput(IEditorInput editorInput)
{
// Check for file existence
try {
if (editorInput instanceof IFileEditorInput) {
final IFile file = ((IFileEditorInput) editorInput).getFile();
if (!file.exists()) {
file.create(new ByteArrayInputStream(new byte[]{}), true, new NullProgressMonitor());
}
}
} catch (Exception e) {
log.error("Error checking SQL file", e);
}
try {
super.doSetInput(editorInput);
} catch (Throwable e) {
// Something bad may happen. E.g. OutOfMemory error in case of too big input file.
StringWriter out = new StringWriter();
e.printStackTrace(new PrintWriter(out, true));
editorInput = new StringEditorInput("Error", CommonUtils.truncateString(out.toString(), 10000), true, GeneralUtils.UTF8_ENCODING);
doSetInput(editorInput);
log.error("Error loading input SQL file", e);
}
syntaxLoaded = false;
Runnable inputinitializer = () -> {
DBPDataSourceContainer oldDataSource = SQLEditor.this.getDataSourceContainer();
DBPDataSourceContainer newDataSource = EditorUtils.getInputDataSource(SQLEditor.this.getEditorInput());
if (oldDataSource != newDataSource) {
SQLEditor.this.dataSourceContainer = null;
SQLEditor.this.updateDataSourceContainer();
} else {
SQLEditor.this.reloadSyntaxRules();
}
};
if (isNonPersistentEditor()) {
inputinitializer.run();
} else {
// Run in queue - for app startup
UIExecutionQueue.queueExec(inputinitializer);
}
setPartName(getEditorName());
if (isNonPersistentEditor()) {
setTitleImage(DBeaverIcons.getImage(UIIcon.SQL_CONSOLE));
}
editorImage = getTitleImage();
}
@Override
public String getTitleToolTip() {
DBPDataSourceContainer dataSourceContainer = getDataSourceContainer();
if (dataSourceContainer == null) {
return super.getTitleToolTip();
}
final IEditorInput editorInput = getEditorInput();
String scriptPath;
if (editorInput instanceof IFileEditorInput) {
scriptPath = ((IFileEditorInput) editorInput).getFile().getFullPath().toString();
} else if (editorInput instanceof IPathEditorInput) {
scriptPath = ((IPathEditorInput) editorInput).getPath().toString();
} else if (editorInput instanceof IURIEditorInput) {
final URI uri = ((IURIEditorInput) editorInput).getURI();
if ("file".equals(uri.getScheme())) {
scriptPath = new File(uri).getAbsolutePath();
} else {
scriptPath = uri.toString();
}
} else if (editorInput instanceof INonPersistentEditorInput) {
scriptPath = "SQL Console";
} else {
scriptPath = editorInput.getName();
if (CommonUtils.isEmpty(scriptPath)) {
scriptPath = "<not a file>";
}
}
return
"Script: " + scriptPath +
" \nConnection: " + dataSourceContainer.getName() +
" \nType: " + (dataSourceContainer.getDriver().getFullName()) +
" \nURL: " + dataSourceContainer.getConnectionConfiguration().getUrl();
}
private String getEditorName() {
final IFile file = EditorUtils.getFileFromInput(getEditorInput());
String scriptName;
if (file != null) {
scriptName = file.getFullPath().removeFileExtension().lastSegment();
} else {
File localFile = EditorUtils.getLocalFileFromInput(getEditorInput());
if (localFile != null) {
scriptName = localFile.getName();
} else {
scriptName = getEditorInput().getName();
}
}
DBPDataSourceContainer dataSourceContainer = getDataSourceContainer();
DBPPreferenceStore preferenceStore = getActivePreferenceStore();
String pattern = preferenceStore.getString(SQLPreferenceConstants.SCRIPT_TITLE_PATTERN);
Map<String, Object> vars = new HashMap<>();
vars.put(VAR_CONNECTION_NAME, dataSourceContainer == null ? "none" : dataSourceContainer.getName());
vars.put(VAR_FILE_NAME, scriptName);
vars.put(VAR_FILE_EXT,
file == null ? "" : file.getFullPath().getFileExtension());
vars.put(VAR_DRIVER_NAME, dataSourceContainer == null ? "?" : dataSourceContainer.getDriver().getFullName());
return GeneralUtils.replaceVariables(pattern, new GeneralUtils.MapResolver(vars));
}
@Override
public void setFocus()
{
super.setFocus();
}
public void loadQueryPlan() {
DBCQueryPlanner planner = GeneralUtils.adapt(getDataSource(), DBCQueryPlanner.class);
ExplainPlanViewer planView = getPlanView(null, planner);
if (planView != null) {
if (!planView.loadQueryPlan(planner, planView)) {
closeActiveTab();
}
}
}
public void explainQueryPlan() {
// Notify listeners
synchronized (listeners) {
for (SQLEditorListener listener : listeners) {
listener.beforeQueryPlanExplain();
}
}
final SQLScriptElement scriptElement = extractActiveQuery();
if (scriptElement == null) {
setStatus(SQLEditorMessages.editors_sql_status_empty_query_string, DBPMessageType.ERROR);
return;
}
if (!(scriptElement instanceof SQLQuery)) {
setStatus("Can't explain plan for command", DBPMessageType.ERROR);
return;
}
explainQueryPlan((SQLQuery) scriptElement);
}
private void explainQueryPlan(SQLQuery sqlQuery) {
DBCQueryPlanner planner = GeneralUtils.adapt(getDataSource(), DBCQueryPlanner.class);
DBCPlanStyle planStyle = planner.getPlanStyle();
if (planStyle == DBCPlanStyle.QUERY) {
explainPlanFromQuery(planner, sqlQuery);
return;
}
ExplainPlanViewer planView = getPlanView(sqlQuery,planner);
if (planView != null) {
planView.explainQueryPlan(sqlQuery, planner);
}
}
private ExplainPlanViewer getPlanView(SQLQuery sqlQuery, DBCQueryPlanner planner) {
// 1. Determine whether planner supports plan extraction
if (planner == null) {
DBWorkbench.getPlatformUI().showError("Execution plan", "Execution plan explain isn't supported by current datasource");
return null;
}
// Transform query parameters
if (sqlQuery != null) {
if (!transformQueryWithParameters(sqlQuery)) {
return null;
}
}
ExplainPlanViewer planView = null;
if (sqlQuery != null) {
for (CTabItem item : resultTabs.getItems()) {
if (item.getData() instanceof ExplainPlanViewer) {
ExplainPlanViewer pv = (ExplainPlanViewer) item.getData();
if (pv.getQuery() != null && pv.getQuery().equals(sqlQuery)) {
resultTabs.setSelection(item);
planView = pv;
break;
}
}
}
}
if (planView == null) {
int maxPlanNumber = 0;
for (CTabItem item : resultTabs.getItems()) {
if (item.getData() instanceof ExplainPlanViewer) {
maxPlanNumber = Math.max(maxPlanNumber, ((ExplainPlanViewer) item.getData()).getPlanNumber());
}
}
maxPlanNumber++;
planView = new ExplainPlanViewer(this, this, resultTabs, maxPlanNumber);
final CTabItem item = new CTabItem(resultTabs, SWT.CLOSE);
item.setControl(planView.getControl());
item.setText(SQLEditorMessages.editors_sql_error_execution_plan_title + " - " + maxPlanNumber);
if (sqlQuery != null) {
item.setToolTipText(sqlQuery.getText());
}
item.setImage(IMG_EXPLAIN_PLAN);
item.setData(planView);
item.addDisposeListener(resultTabDisposeListener);
UIUtils.disposeControlOnItemDispose(item);
resultTabs.setSelection(item);
}
return planView;
}
private void explainPlanFromQuery(final DBCQueryPlanner planner, final SQLQuery sqlQuery) {
final String[] planQueryString = new String[1];
DBRRunnableWithProgress queryObtainTask = monitor -> {
try (DBCSession session = getExecutionContext().openSession(monitor, DBCExecutionPurpose.UTIL, "Prepare plan query")) {
DBCPlan plan = planner.planQueryExecution(session, sqlQuery.getText());
planQueryString[0] = plan.getPlanQueryString();
} catch (Exception e) {
log.error(e);
}
};
if (RuntimeUtils.runTask(queryObtainTask, "Retrieve plan query", 5000) && !CommonUtils.isEmpty(planQueryString[0])) {
SQLQuery planQuery = new SQLQuery(getDataSource(), planQueryString[0]);
processQueries(Collections.singletonList(planQuery), false, true, false, true, null);
}
}
public void processSQL(boolean newTab, boolean script) {
processSQL(newTab, script, null, null);
}
public boolean processSQL(boolean newTab, boolean script, SQLQueryTransformer transformer, @Nullable SQLQueryListener queryListener)
{
IDocument document = getDocument();
if (document == null) {
setStatus(SQLEditorMessages.editors_sql_status_cant_obtain_document, DBPMessageType.ERROR);
return false;
}
// Notify listeners
synchronized (listeners) {
for (SQLEditorListener listener : listeners) {
listener.beforeQueryExecute(script, newTab);
}
}
List<SQLScriptElement> elements;
if (script) {
// Execute all SQL statements consequently
ITextSelection selection = (ITextSelection) getSelectionProvider().getSelection();
if (selection.getLength() > 1) {
elements = extractScriptQueries(selection.getOffset(), selection.getLength(), true, false, true);
} else {
elements = extractScriptQueries(0, document.getLength(), true, false, true);
}
} else {
// Execute statement under cursor or selected text (if selection present)
SQLScriptElement sqlQuery = extractActiveQuery();
if (sqlQuery == null) {
ResultSetViewer activeViewer = getActiveResultSetViewer();
if (activeViewer != null) {
activeViewer.setStatus(SQLEditorMessages.editors_sql_status_empty_query_string, DBPMessageType.ERROR);
}
return false;
} else {
elements = Collections.singletonList(sqlQuery);
}
}
try {
if (transformer != null) {
DBPDataSource dataSource = getDataSource();
if (dataSource != null) {
List<SQLScriptElement> xQueries = new ArrayList<>(elements.size());
for (SQLScriptElement element : elements) {
if (element instanceof SQLQuery) {
SQLQuery query = transformer.transformQuery(dataSource, getSyntaxManager(), (SQLQuery) element);
if (!CommonUtils.isEmpty(query.getParameters())) {
query.setParameters(parseQueryParameters(query));
}
xQueries.add(query);
} else {
xQueries.add(element);
}
}
elements = xQueries;
}
}
}
catch (DBException e) {
DBWorkbench.getPlatformUI().showError("Bad query", "Can't execute query", e);
return false;
}
if (!CommonUtils.isEmpty(elements)) {
return processQueries(elements, script, newTab, false, true, queryListener);
} else {
return false;
}
}
public void exportDataFromQuery()
{
List<SQLScriptElement> elements;
ITextSelection selection = (ITextSelection) getSelectionProvider().getSelection();
if (selection.getLength() > 1) {
elements = extractScriptQueries(selection.getOffset(), selection.getLength(), true, false, true);
} else {
elements = new ArrayList<>();
elements.add(extractActiveQuery());
}
if (!elements.isEmpty()) {
processQueries(elements, false, false, true, true, null);
} else {
DBWorkbench.getPlatformUI().showError(
"Extract data",
"Choose one or more queries to export from");
}
}
private boolean processQueries(@NotNull final List<SQLScriptElement> queries, final boolean forceScript, final boolean newTab, final boolean export, final boolean checkSession, @Nullable final SQLQueryListener queryListener)
{
if (queries.isEmpty()) {
// Nothing to process
return false;
}
final DBPDataSourceContainer container = getDataSourceContainer();
if (checkSession) {
try {
DBRProgressListener connectListener = status -> {
if (!status.isOK() || container == null || !container.isConnected()) {
DBWorkbench.getPlatformUI().showError(
SQLEditorMessages.editors_sql_error_cant_obtain_session,
null,
status);
return;
}
updateExecutionContext(() -> UIUtils.syncExec(() ->
processQueries(queries, forceScript, newTab, export, false, queryListener)));
};
if (!checkSession(connectListener)) {
return false;
}
} catch (DBException ex) {
ResultSetViewer viewer = getActiveResultSetViewer();
if (viewer != null) {
viewer.setStatus(ex.getMessage(), DBPMessageType.ERROR);
}
DBWorkbench.getPlatformUI().showError(
SQLEditorMessages.editors_sql_error_cant_obtain_session,
ex.getMessage());
return false;
}
}
if (dataSourceContainer == null) {
return false;
}
if (!dataSourceContainer.hasModifyPermission(DBPDataSourcePermission.PERMISSION_EXECUTE_SCRIPTS)) {
DBWorkbench.getPlatformUI().showError(
SQLEditorMessages.editors_sql_error_cant_execute_query_title,
"Query execution was restricted by connection configuration");
return false;
}
SQLScriptContext scriptContext = createScriptContext();
final boolean isSingleQuery = !forceScript && (queries.size() == 1);
if (isSingleQuery && queries.get(0) instanceof SQLQuery) {
SQLQuery query = (SQLQuery) queries.get(0);
if (query.isDeleteUpdateDangerous()) {
String targetName = "multiple tables";
if (query.getSingleSource() != null) {
targetName = query.getSingleSource().getEntityName();
}
if (ConfirmationDialog.showConfirmDialogEx(
ResourceBundle.getBundle(SQLEditorMessages.BUNDLE_NAME),
getSite().getShell(),
SQLPreferenceConstants.CONFIRM_DANGER_SQL,
ConfirmationDialog.CONFIRM,
ConfirmationDialog.WARNING,
query.getType().name(),
targetName) != IDialogConstants.OK_ID)
{
return false;
}
}
} else if (newTab && queries.size() > MAX_PARALLEL_QUERIES_NO_WARN) {
if (ConfirmationDialog.showConfirmDialogEx(
ResourceBundle.getBundle(SQLEditorMessages.BUNDLE_NAME),
getSite().getShell(),
SQLPreferenceConstants.CONFIRM_MASS_PARALLEL_SQL,
ConfirmationDialog.CONFIRM,
ConfirmationDialog.WARNING,
queries.size()) != IDialogConstants.OK_ID)
{
return false;
}
}
if (resultsSash.getMaximizedControl() != null) {
resultsSash.setMaximizedControl(null);
}
// Save editor
if (getActivePreferenceStore().getBoolean(SQLPreferenceConstants.AUTO_SAVE_ON_EXECUTE) && isDirty()) {
doSave(new NullProgressMonitor());
}
boolean extraTabsClosed = false;
if (!export) {
if (getActivePreferenceStore().getBoolean(SQLPreferenceConstants.CLEAR_OUTPUT_BEFORE_EXECUTE)) {
outputViewer.clearOutput();
}
if (!newTab || !isSingleQuery) {
// We don't need new tab or we are executing a script - so close all extra tabs
if (!closeExtraResultTabs(null, true)) {
return false;
}
extraTabsClosed = true;
}
}
if (queryProcessors.isEmpty()) {
// If all tabs were closed
createQueryProcessor(true, true);
}
if (newTab) {
// Execute each query in a new tab
for (int i = 0; i < queries.size(); i++) {
SQLScriptElement query = queries.get(i);
QueryProcessor queryProcessor = (i == 0 && !isSingleQuery ? curQueryProcessor : createQueryProcessor(queries.size() == 1, false));
queryProcessor.processQueries(
scriptContext,
Collections.singletonList(query),
false,
true,
export,
getActivePreferenceStore().getBoolean(SQLPreferenceConstants.RESULT_SET_CLOSE_ON_ERROR), queryListener);
}
} else {
if (!export) {
// Use current tab.
// If current tab was pinned then use first tab
QueryResultsContainer firstResults = curQueryProcessor.getFirstResults();
CTabItem tabItem = firstResults.getTabItem();
if (firstResults.isPinned()) {
curQueryProcessor = queryProcessors.get(0);
firstResults = curQueryProcessor.getFirstResults();
if (firstResults.isPinned()) {
// The very first tab is also pinned
// Well, let's create a new tab
curQueryProcessor = createQueryProcessor(true, true);
// Make new tab the default
firstResults = curQueryProcessor.getFirstResults();
if (firstResults.isPinned()) {
tabItem.setShowClose(false);
}
}
}
if (!extraTabsClosed) {
if (!closeExtraResultTabs(curQueryProcessor, true)) {
return false;
}
}
if (tabItem != null) {
// Do not switch tab if Output tab is active
CTabItem selectedTab = resultTabs.getSelection();
if (selectedTab == null || selectedTab.getData() != outputViewer.getControl()) {
resultTabs.setSelection(tabItem);
}
}
}
return curQueryProcessor.processQueries(scriptContext, queries, forceScript, false, export, false, queryListener);
}
return true;
}
@NotNull
private SQLScriptContext createScriptContext() {
File localFile = EditorUtils.getLocalFileFromInput(getEditorInput());
return new SQLScriptContext(globalScriptContext, SQLEditor.this, localFile, new OutputLogWriter(), new SQLEditorParametersProvider(getSite()));
}
private void setStatus(String status, DBPMessageType messageType)
{
ResultSetViewer resultsView = getActiveResultSetViewer();
if (resultsView != null) {
resultsView.setStatus(status, messageType);
}
}
private boolean closeExtraResultTabs(@Nullable QueryProcessor queryProcessor, boolean confirmClose)
{
// Close all tabs except first one
List<CTabItem> tabsToClose = new ArrayList<>();
for (int i = resultTabs.getItemCount() - 1; i > 0; i--) {
CTabItem item = resultTabs.getItem(i);
if (item.getData() instanceof QueryResultsContainer && item.getShowClose()) {
QueryResultsContainer resultsProvider = (QueryResultsContainer)item.getData();
if (queryProcessor != null && queryProcessor != resultsProvider.queryProcessor) {
continue;
}
if (queryProcessor != null && queryProcessor.resultContainers.size() < 2) {
// Do not remove first tab for this processor
continue;
}
tabsToClose.add(item);
} else if (item.getData() instanceof ExplainPlanViewer) {
tabsToClose.add(item);
}
}
if (tabsToClose.size() > 1) {
int confirmResult = IDialogConstants.YES_ID;
if (confirmClose) {
confirmResult = ConfirmationDialog.showConfirmDialog(
ResourceBundle.getBundle(SQLEditorMessages.BUNDLE_NAME),
getSite().getShell(),
SQLPreferenceConstants.CONFIRM_RESULT_TABS_CLOSE,
ConfirmationDialog.QUESTION_WITH_CANCEL,
tabsToClose.size() + 4);
if (confirmResult == IDialogConstants.CANCEL_ID) {
return false;
}
}
if (confirmResult == IDialogConstants.YES_ID) {
for (CTabItem item : tabsToClose) {
item.dispose();
}
}
}
return true;
}
public boolean transformQueryWithParameters(SQLQuery query) {
return createScriptContext().fillQueryParameters(query, false);
}
private boolean checkSession(DBRProgressListener onFinish)
throws DBException
{
DBPDataSourceContainer ds = getDataSourceContainer();
if (ds == null) {
throw new DBException("No active connection");
}
if (!ds.isConnected()) {
boolean doConnect = ds.getPreferenceStore().getBoolean(SQLPreferenceConstants.EDITOR_CONNECT_ON_EXECUTE);
if (doConnect) {
return checkConnected(true, onFinish);
} else {
throw new DBException("Disconnected from database");
}
}
DBPDataSource dataSource = ds.getDataSource();
if (dataSource != null && SQLEditorUtils.isOpenSeparateConnection(ds) && executionContext == null) {
initSeparateConnection(dataSource, () -> onFinish.onTaskFinished(Status.OK_STATUS));
return executionContext != null;
}
return true;
}
/**
* Handles datasource change action in UI
*/
private void fireDataSourceChange()
{
updateExecutionContext(null);
UIUtils.syncExec(this::onDataSourceChange);
}
private void onDataSourceChange()
{
if (resultsSash == null || resultsSash.isDisposed()) {
reloadSyntaxRules();
return;
}
DatabaseEditorUtils.setPartBackground(this, resultTabs);
if (getSourceViewerConfiguration() instanceof SQLEditorSourceViewerConfiguration) {
((SQLEditorSourceViewerConfiguration) getSourceViewerConfiguration()).onDataSourceChange();
}
DBCExecutionContext executionContext = getExecutionContext();
if (executionContext != null) {
EditorUtils.setInputDataSource(getEditorInput(), new SQLNavigatorContext(executionContext));
}
if (syntaxLoaded && lastExecutionContext == executionContext) {
return;
}
if (curResultsContainer != null) {
ResultSetViewer rsv = curResultsContainer.getResultSetController();
if (rsv != null) {
if (executionContext == null) {
rsv.setStatus(ModelMessages.error_not_connected_to_database);
} else {
rsv.setStatus(SQLEditorMessages.editors_sql_staus_connected_to + executionContext.getDataSource().getContainer().getName() + "'"); //$NON-NLS-2$
}
}
}
if (lastExecutionContext == null || executionContext == null || lastExecutionContext.getDataSource() != executionContext.getDataSource()) {
// Update command states
SQLEditorPropertyTester.firePropertyChange(SQLEditorPropertyTester.PROP_CAN_EXECUTE);
SQLEditorPropertyTester.firePropertyChange(SQLEditorPropertyTester.PROP_CAN_EXPLAIN);
reloadSyntaxRules();
}
if (getDataSourceContainer() == null) {
resultsSash.setMaximizedControl(sqlEditorPanel);
} else {
resultsSash.setMaximizedControl(null);
}
refreshActions();
lastExecutionContext = executionContext;
syntaxLoaded = true;
}
@Override
public void beforeConnect()
{
}
@Override
public void beforeDisconnect()
{
closeAllJobs();
}
@Override
public void dispose()
{
if (extraPresentation != null) {
extraPresentation.dispose();
extraPresentation = null;
}
// Release ds container
releaseContainer();
closeAllJobs();
final IEditorInput editorInput = getEditorInput();
IFile sqlFile = EditorUtils.getFileFromInput(editorInput);
logViewer = null;
outputViewer = null;
queryProcessors.clear();
curResultsContainer = null;
curQueryProcessor = null;
super.dispose();
if (sqlFile != null && !PlatformUI.getWorkbench().isClosing()) {
deleteFileIfEmpty(sqlFile);
}
}
private void deleteFileIfEmpty(IFile sqlFile) {
if (sqlFile == null || !sqlFile.exists()) {
return;
}
SQLPreferenceConstants.EmptyScriptCloseBehavior emptyScriptCloseBehavior = SQLPreferenceConstants.EmptyScriptCloseBehavior.getByName(
getActivePreferenceStore().getString(SQLPreferenceConstants.SCRIPT_DELETE_EMPTY));
if (emptyScriptCloseBehavior == SQLPreferenceConstants.EmptyScriptCloseBehavior.NOTHING) {
return;
}
File osFile = sqlFile.getLocation().toFile();
if (!osFile.exists() || osFile.length() != 0) {
// Not empty
return;
}
try {
IProgressMonitor monitor = new NullProgressMonitor();
if (emptyScriptCloseBehavior == SQLPreferenceConstants.EmptyScriptCloseBehavior.DELETE_NEW) {
IFileState[] fileHistory = sqlFile.getHistory(monitor);
if (!ArrayUtils.isEmpty(fileHistory)) {
for (IFileState historyItem : fileHistory) {
try (InputStream contents = historyItem.getContents()) {
int cValue = contents.read();
if (cValue != -1) {
// At least once there was some content saved
return;
}
}
}
}
}
// This file is empty and never (at least during this session) had any contents.
// Drop it.
log.debug("Delete empty SQL script '" + sqlFile.getFullPath().toOSString() + "'");
sqlFile.delete(true, monitor);
} catch (Exception e) {
log.error("Can't delete empty script file", e); //$NON-NLS-1$
}
}
private void closeAllJobs()
{
for (QueryProcessor queryProcessor : queryProcessors) {
queryProcessor.closeJob();
}
}
private int getTotalQueryRunning() {
int jobsRunning = 0;
for (QueryProcessor queryProcessor : queryProcessors) {
jobsRunning += queryProcessor.curJobRunning.get();
}
return jobsRunning;
}
@Override
public void handleDataSourceEvent(final DBPEvent event)
{
final boolean dsEvent = event.getObject() == getDataSourceContainer();
final boolean objectEvent = event.getObject().getDataSource() == getDataSource();
if (dsEvent || objectEvent) {
UIUtils.asyncExec(
() -> {
switch (event.getAction()) {
case OBJECT_REMOVE:
if (dsEvent) {
setDataSourceContainer(null);
}
break;
case OBJECT_UPDATE:
case OBJECT_SELECT:
if (objectEvent) {
setPartName(getEditorName());
// Active schema was changed? Update title and tooltip
firePropertyChange(IWorkbenchPartConstants.PROP_TITLE);
}
break;
default:
break;
}
updateExecutionContext(null);
onDataSourceChange();
}
);
}
}
@Override
public void doSave(IProgressMonitor monitor) {
if (!EditorUtils.isInAutoSaveJob()) {
monitor.beginTask("Save data changes...", 1);
try {
monitor.subTask("Save '" + getPartName() + "' changes...");
SaveJob saveJob = new SaveJob();
saveJob.schedule();
// Wait until job finished
UIUtils.waitJobCompletion(saveJob);
if (!saveJob.success) {
monitor.setCanceled(true);
return;
}
} finally {
monitor.done();
}
}
if (extraPresentation instanceof ISaveablePart) {
((ISaveablePart) extraPresentation).doSave(monitor);
}
super.doSave(monitor);
updateDataSourceContainer();
}
@Override
public boolean isSaveAsAllowed()
{
return true;
}
@Override
public void doSaveAs()
{
saveToExternalFile();
}
@Override
public int promptToSaveOnClose()
{
int jobsRunning = getTotalQueryRunning();
if (jobsRunning > 0) {
log.warn("There are " + jobsRunning + " SQL job(s) still running in the editor");
if (ConfirmationDialog.showConfirmDialog(
ResourceBundle.getBundle(SQLEditorMessages.BUNDLE_NAME),
null,
SQLPreferenceConstants.CONFIRM_RUNNING_QUERY_CLOSE,
ConfirmationDialog.QUESTION,
jobsRunning) != IDialogConstants.YES_ID)
{
return ISaveablePart2.CANCEL;
}
}
for (QueryProcessor queryProcessor : queryProcessors) {
for (QueryResultsContainer resultsProvider : queryProcessor.getResultContainers()) {
ResultSetViewer rsv = resultsProvider.getResultSetController();
if (rsv != null && rsv.isDirty()) {
return rsv.promptToSaveOnClose();
}
}
}
// End transaction
if (executionContext != null) {
UIServiceConnections serviceConnections = DBWorkbench.getService(UIServiceConnections.class);
if (serviceConnections != null && !serviceConnections.checkAndCloseActiveTransaction(new DBCExecutionContext[] {executionContext})) {
return ISaveablePart2.CANCEL;
}
}
// That's fine
if (isNonPersistentEditor()) {
return ISaveablePart2.NO;
}
// Cancel running jobs (if any) and close results tabs
for (QueryProcessor queryProcessor : queryProcessors) {
queryProcessor.cancelJob();
// FIXME: it is a hack (to avoid asking "Save script?" because editor is marked as dirty while queries are running)
// FIXME: make it better
queryProcessor.curJobRunning.set(0);
}
updateDirtyFlag();
if (getActivePreferenceStore().getBoolean(SQLPreferenceConstants.AUTO_SAVE_ON_CLOSE)) {
return ISaveablePart2.YES;
}
return ISaveablePart2.DEFAULT;
}
protected void afterSaveToFile(File saveFile) {
try {
IFileStore fileStore = EFS.getStore(saveFile.toURI());
IEditorInput input = new FileStoreEditorInput(fileStore);
EditorUtils.setInputDataSource(input, new SQLNavigatorContext(getDataSourceContainer(), getExecutionContext()));
setInput(input);
} catch (CoreException e) {
DBWorkbench.getPlatformUI().showError("File save", "Can't open SQL editor from external file", e);
}
}
@Nullable
private ResultSetViewer getActiveResultSetViewer()
{
if (curResultsContainer != null) {
return curResultsContainer.getResultSetController();
}
return null;
}
private void showScriptPositionRuler(boolean show)
{
IColumnSupport columnSupport = getAdapter(IColumnSupport.class);
if (columnSupport != null) {
RulerColumnDescriptor positionColumn = RulerColumnRegistry.getDefault().getColumnDescriptor(ScriptPositionColumn.ID);
columnSupport.setColumnVisible(positionColumn, show);
}
}
private void showStatementInEditor(final SQLQuery query, final boolean select)
{
UIUtils.runUIJob("Select SQL query in editor", monitor -> {
if (isDisposed()) {
return;
}
if (select) {
selectAndReveal(query.getOffset(), query.getLength());
setStatus(query.getText(), DBPMessageType.INFORMATION);
} else {
getSourceViewer().revealRange(query.getOffset(), query.getLength());
}
});
}
@Override
public void reloadSyntaxRules() {
super.reloadSyntaxRules();
if (outputViewer != null) {
outputViewer.refreshStyles();
}
}
private QueryProcessor createQueryProcessor(boolean setSelection, boolean makeDefault)
{
final QueryProcessor queryProcessor = new QueryProcessor(makeDefault);
curQueryProcessor = queryProcessor;
curResultsContainer = queryProcessor.getFirstResults();
if (setSelection) {
CTabItem tabItem = curResultsContainer.getTabItem();
if (tabItem != null) {
resultTabs.setSelection(tabItem);
}
}
return queryProcessor;
}
@Override
public void preferenceChange(PreferenceChangeEvent event) {
switch (event.getProperty()) {
case ModelPreferences.SCRIPT_STATEMENT_DELIMITER:
case ModelPreferences.SCRIPT_IGNORE_NATIVE_DELIMITER:
case ModelPreferences.SCRIPT_STATEMENT_DELIMITER_BLANK:
case ModelPreferences.SQL_PARAMETERS_ENABLED:
case ModelPreferences.SQL_ANONYMOUS_PARAMETERS_MARK:
case ModelPreferences.SQL_ANONYMOUS_PARAMETERS_ENABLED:
case ModelPreferences.SQL_VARIABLES_ENABLED:
case ModelPreferences.SQL_NAMED_PARAMETERS_PREFIX:
reloadSyntaxRules();
break;
case SQLPreferenceConstants.RESULT_SET_ORIENTATION:
updateResultSetOrientation();
break;
case SQLPreferenceConstants.EDITOR_SEPARATE_CONNECTION: {
// Save current datasource (we want to keep it here)
DBPDataSource dataSource = curDataSource;
releaseExecutionContext();
// Restore cur data source (as it is reset in releaseExecutionContext)
curDataSource = dataSource;
if (dataSource != null && SQLEditorUtils.isOpenSeparateConnection(dataSource.getContainer())) {
initSeparateConnection(dataSource, null);
}
break;
}
}
}
public enum ResultSetOrientation {
HORIZONTAL(SWT.VERTICAL, SQLEditorMessages.sql_editor_result_set_orientation_horizontal, SQLEditorMessages.sql_editor_result_set_orientation_horizontal_tip, true),
VERTICAL(SWT.HORIZONTAL, SQLEditorMessages.sql_editor_result_set_orientation_vertical, SQLEditorMessages.sql_editor_result_set_orientation_vertical_tip, true),
DETACHED(SWT.VERTICAL, SQLEditorMessages.sql_editor_result_set_orientation_detached, SQLEditorMessages.sql_editor_result_set_orientation_detached_tip, false);
private final int sashOrientation;
private final String label;
private final String description;
private final boolean supported;
ResultSetOrientation(int sashOrientation, String label, String description, boolean supported) {
this.sashOrientation = sashOrientation;
this.label = label;
this.description = description;
this.supported = supported;
}
public int getSashOrientation() {
return sashOrientation;
}
public String getLabel() {
return label;
}
public String getDescription() {
return description;
}
public boolean isSupported() {
return supported;
}
}
public static class ResultSetOrientationMenuContributor extends CompoundContributionItem
{
@Override
protected IContributionItem[] getContributionItems() {
IEditorPart activeEditor = UIUtils.getActiveWorkbenchWindow().getActivePage().getActiveEditor();
if (!(activeEditor instanceof SQLEditorBase)) {
return new IContributionItem[0];
}
final DBPPreferenceStore preferenceStore = DBWorkbench.getPlatform().getPreferenceStore();
String curPresentation = preferenceStore.getString(SQLPreferenceConstants.RESULT_SET_ORIENTATION);
ResultSetOrientation[] orientations = ResultSetOrientation.values();
List<IContributionItem> items = new ArrayList<>(orientations.length);
for (final ResultSetOrientation orientation : orientations) {
Action action = new Action(orientation.getLabel(), Action.AS_RADIO_BUTTON) {
@Override
public void run() {
preferenceStore.setValue(SQLPreferenceConstants.RESULT_SET_ORIENTATION, orientation.name());
PrefUtils.savePreferenceStore(preferenceStore);
}
};
action.setDescription(orientation.getDescription());
if (!orientation.isSupported()) {
action.setEnabled(false);
}
if (orientation.name().equals(curPresentation)) {
action.setChecked(true);
}
items.add(new ActionContributionItem(action));
}
return items.toArray(new IContributionItem[0]);
}
}
public class QueryProcessor implements SQLResultsConsumer {
private volatile SQLQueryJob curJob;
private AtomicInteger curJobRunning = new AtomicInteger(0);
private final List<QueryResultsContainer> resultContainers = new ArrayList<>();
private volatile DBDDataReceiver curDataReceiver = null;
QueryProcessor(boolean makeDefault) {
// Create first (default) results provider
if (makeDefault) {
queryProcessors.add(0, this);
} else {
queryProcessors.add(this);
}
createResultsProvider(0, makeDefault);
}
private QueryResultsContainer createResultsProvider(int resultSetNumber, boolean makeDefault) {
QueryResultsContainer resultsProvider = new QueryResultsContainer(this, resultSetNumber, makeDefault);
resultContainers.add(resultsProvider);
return resultsProvider;
}
private QueryResultsContainer createResultsProvider(DBSDataContainer dataContainer) {
QueryResultsContainer resultsProvider = new QueryResultsContainer(this, resultContainers.size(), dataContainer);
resultContainers.add(resultsProvider);
return resultsProvider;
}
@NotNull
QueryResultsContainer getFirstResults()
{
return resultContainers.get(0);
}
@Nullable
QueryResultsContainer getResults(SQLQuery query) {
for (QueryResultsContainer provider : resultContainers) {
if (provider.query == query) {
return provider;
}
}
return null;
}
List<QueryResultsContainer> getResultContainers() {
return resultContainers;
}
private void closeJob()
{
final SQLQueryJob job = curJob;
if (job != null) {
if (job.getState() == Job.RUNNING) {
job.cancel();
}
curJob = null;
if (job.isJobOpen()) {
RuntimeUtils.runTask(monitor -> {
job.closeJob();
}, "Close SQL job", 2000, true);
}
}
}
public void cancelJob() {
for (QueryResultsContainer rc : resultContainers) {
rc.viewer.cancelJobs();
}
final SQLQueryJob job = curJob;
if (job != null) {
if (job.getState() == Job.RUNNING) {
job.cancel();
}
}
}
boolean processQueries(SQLScriptContext scriptContext, final List<SQLScriptElement> queries, boolean forceScript, final boolean fetchResults, boolean export, boolean closeTabOnError, SQLQueryListener queryListener)
{
if (queries.isEmpty()) {
// Nothing to process
return false;
}
if (curJobRunning.get() > 0) {
DBWorkbench.getPlatformUI().showError(
SQLEditorMessages.editors_sql_error_cant_execute_query_title,
SQLEditorMessages.editors_sql_error_cant_execute_query_message);
return false;
}
final DBCExecutionContext executionContext = getExecutionContext();
if (executionContext == null) {
DBWorkbench.getPlatformUI().showError(
SQLEditorMessages.editors_sql_error_cant_execute_query_title,
ModelMessages.error_not_connected_to_database);
return false;
}
final boolean isSingleQuery = !forceScript && (queries.size() == 1);
// Prepare execution job
{
showScriptPositionRuler(true);
QueryResultsContainer resultsContainer = getFirstResults();
SQLEditorQueryListener listener = new SQLEditorQueryListener(this, closeTabOnError);
if (queryListener != null) {
listener.setExtListener(queryListener);
}
if (export) {
List<IDataTransferProducer> producers = new ArrayList<>();
for (int i = 0; i < queries.size(); i++) {
SQLScriptElement element = queries.get(i);
if (element instanceof SQLControlCommand) {
try {
scriptContext.executeControlCommand((SQLControlCommand) element);
} catch (DBException e) {
DBWorkbench.getPlatformUI().showError("Command error", "Error processing control command", e);
}
} else {
SQLQuery query = (SQLQuery) element;
scriptContext.fillQueryParameters(query, false);
SQLQueryDataContainer dataContainer = new SQLQueryDataContainer(SQLEditor.this, query, scriptContext, log);
producers.add(new DatabaseTransferProducer(dataContainer, null));
}
}
DataTransferWizard.openWizard(
getSite().getWorkbenchWindow(),
producers,
null,
new StructuredSelection(this));
} else {
final SQLQueryJob job = new SQLQueryJob(
getSite(),
isSingleQuery ? SQLEditorMessages.editors_sql_job_execute_query : SQLEditorMessages.editors_sql_job_execute_script,
executionContext,
resultsContainer,
queries,
scriptContext,
this,
listener);
if (isSingleQuery) {
resultsContainer.query = queries.get(0);
closeJob();
curJob = job;
ResultSetViewer rsv = resultsContainer.getResultSetController();
if (rsv != null) {
rsv.resetDataFilter(false);
rsv.resetHistory();
rsv.refresh();
}
} else {
if (fetchResults) {
job.setFetchResultSets(true);
}
job.schedule();
curJob = job;
}
}
}
return true;
}
public boolean isDirty() {
for (QueryResultsContainer resultsProvider : resultContainers) {
ResultSetViewer rsv = resultsProvider.getResultSetController();
if (rsv != null && rsv.isDirty()) {
return true;
}
}
return false;
}
void removeResults(QueryResultsContainer resultsContainer) {
resultContainers.remove(resultsContainer);
if (resultContainers.isEmpty()) {
queryProcessors.remove(this);
if (curQueryProcessor == this) {
if (queryProcessors.isEmpty()) {
curQueryProcessor = null;
curResultsContainer = null;
} else {
curQueryProcessor = queryProcessors.get(0);
curResultsContainer = curQueryProcessor.getFirstResults();
}
}
}
}
@Nullable
@Override
public DBDDataReceiver getDataReceiver(final SQLQuery statement, final int resultSetNumber) {
if (curDataReceiver != null) {
return curDataReceiver;
}
final boolean isStatsResult = (statement != null && statement.getData() == SQLQueryJob.STATS_RESULTS);
// if (isStatsResult) {
// // Maybe it was already open
// for (QueryResultsProvider provider : resultContainers) {
// if (provider.query != null && provider.query.getData() == SQLQueryJob.STATS_RESULTS) {
// resultSetNumber = provider.resultSetNumber;
// break;
// }
// }
// }
if (resultSetNumber >= resultContainers.size() && !isDisposed()) {
// Open new results processor in UI thread
UIUtils.syncExec(() -> createResultsProvider(resultSetNumber, false));
}
if (resultSetNumber >= resultContainers.size()) {
// Editor seems to be disposed - no data receiver
return null;
}
final QueryResultsContainer resultsProvider = resultContainers.get(resultSetNumber);
if (statement != null && !resultTabs.isDisposed()) {
resultsProvider.query = statement;
resultsProvider.lastGoodQuery = statement;
String tabName = null;
String toolTip = CommonUtils.truncateString(statement.getText(), 1000);
// Special statements (not real statements) have their name in data
if (isStatsResult) {
tabName = "Statistics";
int queryIndex = queryProcessors.indexOf(QueryProcessor.this);
if (queryIndex > 0) {
tabName += " - " + (queryIndex + 1);
}
}
String finalTabName = tabName;
UIUtils.asyncExec(() -> resultsProvider.updateResultsName(finalTabName, toolTip));
}
ResultSetViewer rsv = resultsProvider.getResultSetController();
return rsv == null ? null : rsv.getDataReceiver();
}
}
public class QueryResultsContainer implements DBSDataContainer, IResultSetContainer, IResultSetListener, SQLQueryContainer, ISmartTransactionManager {
private final QueryProcessor queryProcessor;
private final ResultSetViewer viewer;
private final int resultSetNumber;
private SQLScriptElement query = null;
private SQLScriptElement lastGoodQuery = null;
// Data container and filter are non-null only in case of associations navigation
private DBSDataContainer dataContainer;
private QueryResultsContainer(QueryProcessor queryProcessor, int resultSetNumber, boolean makeDefault)
{
this.queryProcessor = queryProcessor;
this.resultSetNumber = resultSetNumber;
boolean detachedViewer = false;
SQLResultsView sqlView = null;
if (detachedViewer) {
try {
sqlView = (SQLResultsView) getSite().getPage().showView(SQLResultsView.VIEW_ID, null, IWorkbenchPage.VIEW_CREATE);
} catch (Throwable e) {
DBWorkbench.getPlatformUI().showError("Detached results", "Can't open results view", e);
}
}
if (sqlView != null) {
// Detached results viewer
sqlView.setContainer(this);
this.viewer = sqlView.getViewer();
} else {
// Embedded results viewer
this.viewer = new ResultSetViewer(resultTabs, getSite(), this);
this.viewer.addListener(this);
int tabCount = resultTabs.getItemCount();
int tabIndex = 0;
if (!makeDefault) {
for (int i = tabCount; i > 0; i--) {
if (resultTabs.getItem(i - 1).getData() instanceof QueryResultsContainer) {
tabIndex = i;
break;
}
}
}
CTabItem tabItem = new CTabItem(resultTabs, SWT.NONE, tabIndex);
int queryIndex = queryProcessors.indexOf(queryProcessor);
String tabName = getResultsTabName(resultSetNumber, queryIndex, null);
tabItem.setText(tabName);
tabItem.setImage(IMG_DATA_GRID);
tabItem.setData(this);
tabItem.setShowClose(true);
CSSUtils.setCSSClass(tabItem, DBStyles.COLORED_BY_CONNECTION_TYPE);
tabItem.setControl(viewer.getControl());
tabItem.addDisposeListener(resultTabDisposeListener);
UIUtils.disposeControlOnItemDispose(tabItem);
}
viewer.getControl().addDisposeListener(e -> {
QueryResultsContainer.this.queryProcessor.removeResults(QueryResultsContainer.this);
if (QueryResultsContainer.this == curResultsContainer) {
curResultsContainer = null;
}
});
}
QueryResultsContainer(QueryProcessor queryProcessor, int resultSetNumber, DBSDataContainer dataContainer) {
this(queryProcessor, resultSetNumber, false);
this.dataContainer = dataContainer;
updateResultsName(getResultsTabName(resultSetNumber, 0, dataContainer.getName()), null);
}
private CTabItem getTabItem() {
return getTabItem(this);
}
private CTabItem getTabItem(QueryResultsContainer resultsContainer) {
for (CTabItem item : resultTabs.getItems()) {
if (item.getData() == resultsContainer) {
return item;
}
}
return null;
}
void updateResultsName(String resultSetName, String toolTip) {
CTabItem tabItem = getTabItem();
if (tabItem != null && !tabItem.isDisposed()) {
if (!CommonUtils.isEmpty(resultSetName)) {
tabItem.setText(resultSetName);
}
tabItem.setToolTipText(toolTip);
}
}
boolean isPinned() {
CTabItem tabItem = getTabItem();
return tabItem != null && !tabItem.isDisposed() && !tabItem.getShowClose();
}
void setPinned(boolean pinned) {
CTabItem tabItem = getTabItem();
if (tabItem != null) {
tabItem.setShowClose(!pinned);
tabItem.setImage(pinned ? IMG_DATA_GRID_LOCKED : IMG_DATA_GRID);
}
}
@NotNull
@Override
public DBPProject getProject() {
return SQLEditor.this.getProject();
}
@Override
public DBCExecutionContext getExecutionContext() {
return SQLEditor.this.getExecutionContext();
}
@Nullable
@Override
public ResultSetViewer getResultSetController()
{
return viewer;
}
@Nullable
@Override
public DBSDataContainer getDataContainer()
{
return this;
}
@Override
public boolean isReadyToRun()
{
return queryProcessor.curJob == null || queryProcessor.curJobRunning.get() <= 0;
}
@Override
public void openNewContainer(DBRProgressMonitor monitor, @NotNull DBSDataContainer dataContainer, @NotNull DBDDataFilter newFilter) {
UIUtils.syncExec(() -> {
QueryResultsContainer resultsProvider = queryProcessor.createResultsProvider(dataContainer);
CTabItem tabItem = getTabItem(resultsProvider);
if (tabItem != null) {
tabItem.getParent().setSelection(tabItem);
}
setActiveResultsContainer(resultsProvider);
resultsProvider.viewer.refreshWithFilter(newFilter);
});
}
@Override
public IResultSetDecorator createResultSetDecorator() {
return new QueryResultsDecorator() {
@Override
public String getEmptyDataDescription() {
String execQuery = ActionUtils.findCommandDescription(SQLEditorCommands.CMD_EXECUTE_STATEMENT, getSite(), true);
String execScript = ActionUtils.findCommandDescription(SQLEditorCommands.CMD_EXECUTE_SCRIPT, getSite(), true);
return NLS.bind(ResultSetMessages.sql_editor_resultset_filter_panel_control_execute_to_see_reslut, execQuery, execScript);
}
};
}
@Override
public int getSupportedFeatures()
{
if (dataContainer != null) {
return dataContainer.getSupportedFeatures();
}
int features = DATA_SELECT;
features |= DATA_COUNT;
if (getQueryResultCounts() <= 1) {
features |= DATA_FILTER;
}
return features;
}
@NotNull
@Override
public DBCStatistics readData(@NotNull DBCExecutionSource source, @NotNull DBCSession session, @NotNull DBDDataReceiver dataReceiver, DBDDataFilter dataFilter, long firstRow, long maxRows, long flags, int fetchSize) throws DBCException
{
if (dataContainer != null) {
return dataContainer.readData(source, session, dataReceiver, dataFilter, firstRow, maxRows, flags, fetchSize);
}
final SQLQueryJob job = queryProcessor.curJob;
if (job == null) {
throw new DBCException("No active query - can't read data");
}
if (this.query instanceof SQLQuery) {
SQLQuery query = (SQLQuery) this.query;
if (query.getResultsMaxRows() >= 0) {
firstRow = query.getResultsOffset();
maxRows = query.getResultsMaxRows();
}
}
try {
if (dataReceiver != viewer.getDataReceiver()) {
// Some custom receiver. Probably data export
queryProcessor.curDataReceiver = dataReceiver;
} else {
queryProcessor.curDataReceiver = null;
}
// Count number of results for this query. If > 1 then we will refresh them all at once
int resultCounts = getQueryResultCounts();
if (resultCounts <= 1 && resultSetNumber > 0) {
job.setFetchResultSetNumber(resultSetNumber);
} else {
job.setFetchResultSetNumber(-1);
}
job.setResultSetLimit(firstRow, maxRows);
job.setDataFilter(dataFilter);
job.setFetchSize(fetchSize);
job.setFetchFlags(flags);
job.extractData(session, this.query, resultCounts > 1 ? 0 : resultSetNumber);
lastGoodQuery = job.getLastGoodQuery();
return job.getStatistics();
} finally {
// Nullify custom data receiver
queryProcessor.curDataReceiver = null;
}
}
private int getQueryResultCounts() {
int resultCounts = 0;
for (QueryResultsContainer qrc : queryProcessor.resultContainers) {
if (qrc.query == query) {
resultCounts++;
}
}
return resultCounts;
}
@Override
public long countData(@NotNull DBCExecutionSource source, @NotNull DBCSession session, DBDDataFilter dataFilter, long flags)
throws DBCException
{
if (dataContainer != null) {
return dataContainer.countData(source, session, dataFilter, DBSDataContainer.FLAG_NONE);
}
DBPDataSource dataSource = getDataSource();
if (dataSource == null) {
throw new DBCException("Query transform is not supported by datasource");
}
if (!(query instanceof SQLQuery)) {
throw new DBCException("Can't count rows for control command");
}
try {
SQLQuery countQuery = new SQLQueryTransformerCount().transformQuery(dataSource, getSyntaxManager(), (SQLQuery) query);
if (!CommonUtils.isEmpty(countQuery.getParameters())) {
countQuery.setParameters(parseQueryParameters(countQuery));
}
try (DBCStatement dbStatement = DBUtils.makeStatement(source, session, DBCStatementType.QUERY, countQuery, 0, 0)) {
if (dbStatement.executeStatement()) {
try (DBCResultSet rs = dbStatement.openResultSet()) {
if (rs.nextRow()) {
List<DBCAttributeMetaData> resultAttrs = rs.getMeta().getAttributes();
Object countValue = null;
if (resultAttrs.size() == 1) {
countValue = rs.getAttributeValue(0);
} else {
// In some databases (Influx?) SELECT count(*) produces multiple columns. Try to find first one with 'count' in its name.
for (int i = 0; i < resultAttrs.size(); i++) {
DBCAttributeMetaData ma = resultAttrs.get(i);
if (ma.getName().toLowerCase(Locale.ENGLISH).contains("count")) {
countValue = rs.getAttributeValue(i);
break;
}
}
}
if (countValue instanceof Number) {
return ((Number) countValue).longValue();
} else {
throw new DBCException("Unexpected row count value: " + countValue);
}
} else {
throw new DBCException("Row count result is empty");
}
}
} else {
throw new DBCException("Row count query didn't return any value");
}
}
} catch (DBException e) {
throw new DBCException("Error executing row count", e);
}
}
@Nullable
@Override
public String getDescription()
{
if (dataContainer != null) {
return dataContainer.getDescription();
} else {
return SQLEditorMessages.editors_sql_description;
}
}
@Nullable
@Override
public DBSObject getParentObject()
{
return getDataSource();
}
@Nullable
@Override
public DBPDataSource getDataSource()
{
return SQLEditor.this.getDataSource();
}
@Override
public boolean isPersisted() {
return dataContainer == null || dataContainer.isPersisted();
}
@NotNull
@Override
public String getName()
{
if (dataContainer != null) {
return dataContainer.getName();
}
String name = lastGoodQuery != null ?
lastGoodQuery.getOriginalText() :
(query == null ? null : query.getOriginalText());
if (name == null) {
name = "SQL";
}
return name;
}
@Nullable
@Override
public DBPDataSourceContainer getDataSourceContainer() {
return SQLEditor.this.getDataSourceContainer();
}
@Override
public String toString() {
if (dataContainer != null) {
return dataContainer.toString();
}
return query == null ?
"SQL Query / " + SQLEditor.this.getEditorInput().getName() :
query.getOriginalText();
}
@Override
public void handleResultSetLoad() {
}
@Override
public void handleResultSetChange() {
updateDirtyFlag();
}
@Override
public void handleResultSetSelectionChange(SelectionChangedEvent event) {
}
@Override
public SQLScriptElement getQuery() {
return query;
}
@Override
public Map<String, Object> getQueryParameters() {
return globalScriptContext.getAllParameters();
}
@Override
public boolean isSmartAutoCommit() {
return SQLEditor.this.isSmartAutoCommit();
}
@Override
public void setSmartAutoCommit(boolean smartAutoCommit) {
SQLEditor.this.setSmartAutoCommit(smartAutoCommit);
}
}
private String getResultsTabName(int resultSetNumber, int queryIndex, String name) {
String tabName = name;
if (CommonUtils.isEmpty(tabName)) {
tabName = SQLEditorMessages.editors_sql_data_grid;
}
if (resultSetNumber > 0) {
tabName += " - " + (resultSetNumber + 1);
} else if (queryIndex > 0) {
tabName += " - " + (queryIndex + 1);
}
return tabName;
}
private class SQLEditorQueryListener implements SQLQueryListener {
private final QueryProcessor queryProcessor;
private boolean scriptMode;
private long lastUIUpdateTime;
private final ITextSelection originalSelection = (ITextSelection) getSelectionProvider().getSelection();
private int topOffset, visibleLength;
private boolean closeTabOnError;
private SQLQueryListener extListener;
private SQLEditorQueryListener(QueryProcessor queryProcessor, boolean closeTabOnError) {
this.queryProcessor = queryProcessor;
this.closeTabOnError = closeTabOnError;
}
public SQLQueryListener getExtListener() {
return extListener;
}
public void setExtListener(SQLQueryListener extListener) {
this.extListener = extListener;
}
@Override
public void onStartScript() {
try {
lastUIUpdateTime = -1;
scriptMode = true;
UIUtils.asyncExec(() -> {
if (isDisposed()) {
return;
}
if (getActivePreferenceStore().getBoolean(SQLPreferenceConstants.MAXIMIZE_EDITOR_ON_SCRIPT_EXECUTE)) {
resultsSash.setMaximizedControl(sqlEditorPanel);
}
});
} finally {
if (extListener != null) extListener.onStartScript();
}
}
@Override
public void onStartQuery(DBCSession session, final SQLQuery query) {
try {
if (isSmartAutoCommit()) {
DBExecUtils.checkSmartAutoCommit(session, query.getText());
}
boolean isInExecute = getTotalQueryRunning() > 0;
if (!isInExecute) {
UIUtils.asyncExec(() -> {
setTitleImage(DBeaverIcons.getImage(UIIcon.SQL_SCRIPT_EXECUTE));
updateDirtyFlag();
});
}
queryProcessor.curJobRunning.incrementAndGet();
synchronized (runningQueries) {
runningQueries.add(query);
}
if (lastUIUpdateTime < 0 || System.currentTimeMillis() - lastUIUpdateTime > SCRIPT_UI_UPDATE_PERIOD) {
UIUtils.asyncExec(() -> {
TextViewer textViewer = getTextViewer();
if (textViewer != null) {
topOffset = textViewer.getTopIndexStartOffset();
visibleLength = textViewer.getBottomIndexEndOffset() - topOffset;
}
});
if (scriptMode) {
showStatementInEditor(query, false);
}
lastUIUpdateTime = System.currentTimeMillis();
}
} finally {
if (extListener != null) extListener.onStartQuery(session, query);
}
}
@Override
public void onEndQuery(final DBCSession session, final SQLQueryResult result, DBCStatistics statistics) {
try {
synchronized (runningQueries) {
runningQueries.remove(result.getStatement());
}
queryProcessor.curJobRunning.decrementAndGet();
if (getTotalQueryRunning() <= 0) {
UIUtils.asyncExec(() -> {
setTitleImage(editorImage);
updateDirtyFlag();
});
}
if (isDisposed()) {
return;
}
UIUtils.runUIJob("Process SQL query result", monitor -> {
// Finish query
processQueryResult(monitor, result, statistics);
// Update dirty flag
updateDirtyFlag();
refreshActions();
});
} finally {
if (extListener != null) extListener.onEndQuery(session, result, statistics);
}
}
private void processQueryResult(DBRProgressMonitor monitor, SQLQueryResult result, DBCStatistics statistics) {
dumpQueryServerOutput(result);
if (!scriptMode) {
runPostExecuteActions(result);
}
SQLQuery query = result.getStatement();
Throwable error = result.getError();
if (error != null) {
setStatus(GeneralUtils.getFirstMessage(error), DBPMessageType.ERROR);
if (!scrollCursorToError(monitor, query, error)) {
int errorQueryOffset = query.getOffset();
int errorQueryLength = query.getLength();
if (errorQueryOffset >= 0 && errorQueryLength > 0) {
if (scriptMode) {
getSelectionProvider().setSelection(new TextSelection(errorQueryOffset, errorQueryLength));
} else {
getSelectionProvider().setSelection(originalSelection);
}
}
}
} else if (!scriptMode && getActivePreferenceStore().getBoolean(SQLPreferenceConstants.RESET_CURSOR_ON_EXECUTE)) {
getSelectionProvider().setSelection(originalSelection);
}
// Get results window (it is possible that it was closed till that moment
{
for (QueryResultsContainer cr : queryProcessor.resultContainers) {
cr.viewer.updateFiltersText(false);
}
// Set tab names by query results names
if (scriptMode || queryProcessor.getResultContainers().size() > 0) {
int queryIndex = queryProcessors.indexOf(queryProcessor);
int resultsIndex = 0;
for (QueryResultsContainer results : queryProcessor.resultContainers) {
if (results.query != query) {
continue;
}
if (resultsIndex < result.getExecuteResults().size()) {
SQLQueryResult.ExecuteResult executeResult = result.getExecuteResults(resultsIndex, true);
String resultSetName = getResultsTabName(results.resultSetNumber, queryIndex, executeResult.getResultSetName());
results.updateResultsName(resultSetName, null);
ResultSetViewer resultSetViewer = results.getResultSetController();
if (resultSetViewer != null) {
resultSetViewer.getModel().setStatistics(statistics);
}
}
resultsIndex++;
}
}
}
// Close tab on error
if (closeTabOnError && error != null) {
CTabItem tabItem = queryProcessor.getFirstResults().getTabItem();
if (tabItem != null && tabItem.getShowClose()) {
tabItem.dispose();
}
}
// Beep
if (dataSourceContainer != null && !scriptMode && getActivePreferenceStore().getBoolean(SQLPreferenceConstants.BEEP_ON_QUERY_END)) {
Display.getCurrent().beep();
}
// Notify agent
if (result.getQueryTime() > DBWorkbench.getPlatformUI().getLongOperationTimeout() * 1000) {
DBWorkbench.getPlatformUI().notifyAgent(
"Query completed [" + getEditorInput().getName() + "]" + GeneralUtils.getDefaultLineSeparator() +
CommonUtils.truncateString(query.getText(), 200), !result.hasError() ? IStatus.INFO : IStatus.ERROR);
}
}
@Override
public void onEndScript(final DBCStatistics statistics, final boolean hasErrors) {
try {
if (isDisposed()) {
return;
}
runPostExecuteActions(null);
UIUtils.asyncExec(() -> {
if (isDisposed()) {
// Editor closed
return;
}
resultsSash.setMaximizedControl(null);
if (!hasErrors) {
getSelectionProvider().setSelection(originalSelection);
}
QueryResultsContainer results = queryProcessor.getFirstResults();
ResultSetViewer viewer = results.getResultSetController();
if (viewer != null) {
viewer.getModel().setStatistics(statistics);
viewer.updateStatusMessage();
}
});
} finally {
if (extListener != null) extListener.onEndScript(statistics, hasErrors);
}
}
}
public void updateDirtyFlag() {
firePropertyChange(IWorkbenchPartConstants.PROP_DIRTY);
}
private class FindReplaceTarget extends DynamicFindReplaceTarget {
private boolean lastFocusInEditor = true;
@Override
public IFindReplaceTarget getTarget() {
CTabItem activeResultsTab = getActiveResultsTab();
if (activeResultsTab != null && outputViewer != null && activeResultsTab.getData() == outputViewer) {
return new StyledTextFindReplaceTarget(outputViewer.getText());
}
ResultSetViewer rsv = getActiveResultSetViewer();
TextViewer textViewer = getTextViewer();
boolean focusInEditor = textViewer != null && textViewer.getTextWidget().isFocusControl();
if (!focusInEditor) {
if (rsv != null && rsv.getActivePresentation().getControl().isFocusControl()) {
focusInEditor = false;
} else {
focusInEditor = lastFocusInEditor;
}
}
lastFocusInEditor = focusInEditor;
if (!focusInEditor && rsv != null) {
IFindReplaceTarget nested = rsv.getAdapter(IFindReplaceTarget.class);
if (nested != null) {
return nested;
}
} else if (textViewer != null) {
return textViewer.getFindReplaceTarget();
}
return null;
}
}
private class DynamicSelectionProvider extends CompositeSelectionProvider {
private boolean lastFocusInEditor = true;
@Override
public ISelectionProvider getProvider() {
ResultSetViewer rsv = getActiveResultSetViewer();
TextViewer textViewer = getTextViewer();
boolean focusInEditor = textViewer != null && textViewer.getTextWidget().isFocusControl();
if (!focusInEditor) {
if (rsv != null && rsv.getActivePresentation().getControl().isFocusControl()) {
focusInEditor = false;
} else {
focusInEditor = lastFocusInEditor;
}
}
lastFocusInEditor = focusInEditor;
if (!focusInEditor && rsv != null) {
return rsv;
} else if (textViewer != null) {
return textViewer.getSelectionProvider();
} else {
return null;
}
}
}
private void dumpQueryServerOutput(@Nullable SQLQueryResult result) {
final DBCExecutionContext executionContext = getExecutionContext();
if (executionContext != null) {
final DBPDataSource dataSource = executionContext.getDataSource();
// Dump server output
DBCServerOutputReader outputReader = DBUtils.getAdapter(DBCServerOutputReader.class, dataSource);
if (outputReader == null && result != null) {
outputReader = new DefaultServerOutputReader();
}
if (outputReader != null && outputReader.isServerOutputEnabled()) {
synchronized (serverOutputs) {
serverOutputs.add(new ServerOutputInfo(outputReader, executionContext, result));
}
}
}
}
private void runPostExecuteActions(@Nullable SQLQueryResult result) {
final DBCExecutionContext executionContext = getExecutionContext();
if (executionContext != null) {
// Refresh active object
if (result == null || !result.hasError() && getActivePreferenceStore().getBoolean(SQLPreferenceConstants.REFRESH_DEFAULTS_AFTER_EXECUTE)) {
DBCExecutionContextDefaults contextDefaults = executionContext.getContextDefaults();
if (contextDefaults != null) {
new AbstractJob("Refresh default object") {
@Override
protected IStatus run(DBRProgressMonitor monitor) {
DBUtils.refreshContextDefaultsAndReflect(monitor, contextDefaults);
return Status.OK_STATUS;
}
}.schedule();
}
}
}
}
private void updateOutputViewerIcon(boolean alert) {
Image image = alert ? IMG_OUTPUT_ALERT : IMG_OUTPUT;
CTabItem outputItem = UIUtils.getTabItem(resultTabs, outputViewer.getControl());
if (outputItem != null && outputItem != resultTabs.getSelection()) {
outputItem.setImage(image);
} else {
// TODO: make icon update. Can't call setImage because this will break contract f VerticalButton
/*
VerticalButton viewItem = getViewToolItem(SQLEditorCommands.CMD_SQL_SHOW_OUTPUT);
if (viewItem != null) {
viewItem.setImage(image);
}
*/
}
}
private class SaveJob extends AbstractJob {
private transient Boolean success = null;
SaveJob() {
super("Save '" + getPartName() + "' data changes...");
setUser(true);
}
@Override
protected IStatus run(DBRProgressMonitor monitor) {
try {
for (QueryProcessor queryProcessor : queryProcessors) {
for (QueryResultsContainer resultsProvider : queryProcessor.getResultContainers()) {
ResultSetViewer rsv = resultsProvider.getResultSetController();
if (rsv != null && rsv.isDirty()) {
rsv.doSave(monitor);
}
}
}
success = true;
return Status.OK_STATUS;
} catch (Throwable e) {
success = false;
log.error(e);
return GeneralUtils.makeExceptionStatus(e);
} finally {
if (success == null) {
success = true;
}
}
}
}
private static class ServerOutputInfo {
private final DBCServerOutputReader outputReader;
private final DBCExecutionContext executionContext;
private final SQLQueryResult result;
ServerOutputInfo(DBCServerOutputReader outputReader, DBCExecutionContext executionContext, SQLQueryResult result) {
this.outputReader = outputReader;
this.executionContext = executionContext;
this.result = result;
}
}
private final List<ServerOutputInfo> serverOutputs = new ArrayList<>();
private class ServerOutputReader extends AbstractJob {
ServerOutputReader() {
super("Dump server output");
setSystem(true);
}
@Override
protected IStatus run(DBRProgressMonitor monitor) {
if (!DBWorkbench.getPlatform().isShuttingDown() && resultsSash != null && !resultsSash.isDisposed()) {
dumpOutput(monitor);
schedule(200);
}
return Status.OK_STATUS;
}
private void dumpOutput(DBRProgressMonitor monitor) {
if (outputViewer == null) {
return;
}
List<ServerOutputInfo> outputs;
synchronized (serverOutputs) {
outputs = new ArrayList<>(serverOutputs);
serverOutputs.clear();
}
PrintWriter outputWriter = outputViewer.getOutputWriter();
if (!outputs.isEmpty()) {
for (ServerOutputInfo info : outputs) {
try {
info.outputReader.readServerOutput(monitor, info.executionContext, info.result, null, outputWriter);
} catch (Exception e) {
log.error(e);
}
}
}
{
// Check running queries for async output
DBCServerOutputReader outputReader = null;
final DBCExecutionContext executionContext = getExecutionContext();
if (executionContext != null) {
final DBPDataSource dataSource = executionContext.getDataSource();
// Dump server output
outputReader = DBUtils.getAdapter(DBCServerOutputReader.class, dataSource);
}
if (outputReader != null && outputReader.isAsyncOutputReadSupported()) {
for (QueryProcessor qp : queryProcessors) {
SQLQueryJob queryJob = qp.curJob;
if (queryJob != null) {
DBCStatement statement = queryJob.getCurrentStatement();
if (statement != null) {
try {
outputReader.readServerOutput(monitor, executionContext, null, statement, outputWriter);
} catch (DBCException e) {
log.error(e);
}
}
}
}
}
}
outputWriter.flush();
UIUtils.asyncExec(() -> {
if (outputViewer!=null) {
if (outputViewer.getControl()!=null) {
if (!outputViewer.isDisposed() && outputViewer.isHasNewOutput()) {
outputViewer.scrollToEnd();
updateOutputViewerIcon(true);
outputViewer.resetNewOutput();
}
}
}
});
}
}
}
| #8489 Search in output console
Former-commit-id: 4067ca2e26257fb082f9f15e7d3ac80f5e8cd413 | plugins/org.jkiss.dbeaver.ui.editors.sql/src/org/jkiss/dbeaver/ui/editors/sql/SQLEditor.java | #8489 Search in output console | <ide><path>lugins/org.jkiss.dbeaver.ui.editors.sql/src/org/jkiss/dbeaver/ui/editors/sql/SQLEditor.java
<ide> private boolean lastFocusInEditor = true;
<ide> @Override
<ide> public IFindReplaceTarget getTarget() {
<del> CTabItem activeResultsTab = getActiveResultsTab();
<del> if (activeResultsTab != null && outputViewer != null && activeResultsTab.getData() == outputViewer) {
<del> return new StyledTextFindReplaceTarget(outputViewer.getText());
<del> }
<ide> ResultSetViewer rsv = getActiveResultSetViewer();
<ide> TextViewer textViewer = getTextViewer();
<ide> boolean focusInEditor = textViewer != null && textViewer.getTextWidget().isFocusControl();
<add>
<add> CTabItem activeResultsTab = getActiveResultsTab();
<add> if (activeResultsTab != null && activeResultsTab.getData() instanceof StyledText) {
<add> StyledText styledText = (StyledText) activeResultsTab.getData();
<add> if (!focusInEditor) {
<add> return new StyledTextFindReplaceTarget(styledText);
<add> }
<add> }
<add>
<ide> if (!focusInEditor) {
<ide> if (rsv != null && rsv.getActivePresentation().getControl().isFocusControl()) {
<ide> focusInEditor = false; |
|
JavaScript | mit | b7e8725e70235412e7c33830a2bb3f01f12042e0 | 0 | pdanpdan/quasar,quasarframework/quasar,quasarframework/quasar,pdanpdan/quasar,fsgiudice/quasar,rstoenescu/quasar-framework,pdanpdan/quasar,pdanpdan/quasar,rstoenescu/quasar-framework,quasarframework/quasar,rstoenescu/quasar-framework,fsgiudice/quasar,quasarframework/quasar,fsgiudice/quasar | import History from '../plugins/history'
export default {
data () {
return {
inFullscreen: false
}
},
watch: {
$route () {
this.exitFullscreen()
}
},
methods: {
toggleFullscreen () {
if (this.inFullscreen) {
this.exitFullscreen()
}
else {
this.setFullscreen()
}
},
setFullscreen () {
if (this.inFullscreen) {
return
}
this.inFullscreen = true
this.container = this.$el.parentNode
this.container.replaceChild(this.fullscreenFillerNode, this.$el)
document.body.appendChild(this.$el)
document.body.classList.add('with-mixin-fullscreen')
this.__historyFullscreen = {
handler: this.exitFullscreen
}
History.add(this.__historyFullscreen)
},
exitFullscreen () {
if (!this.inFullscreen) {
return
}
if (this.__historyFullscreen) {
History.remove(this.__historyFullscreen)
this.__historyFullscreen = null
}
this.container.replaceChild(this.$el, this.fullscreenFillerNode)
document.body.classList.remove('with-mixin-fullscreen')
this.inFullscreen = false
}
},
created () {
this.fullscreenFillerNode = document.createElement('span')
},
beforeDestroy () {
this.exitFullscreen()
}
}
| src/mixins/fullscreen.js | import History from '../plugins/history'
export default {
data () {
return {
inFullscreen: false
}
},
watch: {
$route () {
this.__exitFullscreen()
}
},
methods: {
toggleFullscreen () {
if (this.inFullscreen) {
this.exitFullscreen()
}
else {
this.setFullscreen()
}
},
setFullscreen () {
if (this.inFullscreen) {
return
}
this.inFullscreen = true
this.container = this.$el.parentNode
this.container.replaceChild(this.fullscreenFillerNode, this.$el)
document.body.appendChild(this.$el)
document.body.classList.add('with-mixin-fullscreen')
this.__historyFullscreen = {
handler: this.exitFullscreen
}
History.add(this.__historyFullscreen)
},
exitFullscreen () {
if (!this.inFullscreen) {
return
}
if (this.__historyFullscreen) {
History.remove(this.__historyFullscreen)
this.__historyFullscreen = null
}
this.container.replaceChild(this.$el, this.fullscreenFillerNode)
document.body.classList.remove('with-mixin-fullscreen')
this.inFullscreen = false
}
},
created () {
this.fullscreenFillerNode = document.createElement('span')
},
beforeDestroy () {
this.exitFullscreen()
}
}
| [Fix] mixins/fullscreen.js: __exitFullscreen in $route watcher (#1342)
mixins/fullscreen.js: Fix name __exitFullscreen in $route watcher | src/mixins/fullscreen.js | [Fix] mixins/fullscreen.js: __exitFullscreen in $route watcher (#1342) | <ide><path>rc/mixins/fullscreen.js
<ide> },
<ide> watch: {
<ide> $route () {
<del> this.__exitFullscreen()
<add> this.exitFullscreen()
<ide> }
<ide> },
<ide> methods: { |
|
Java | lgpl-2.1 | 4cf16a59a9d7d8204823160956b95a2a48992e6e | 0 | drhee/toxoMine,drhee/toxoMine,Arabidopsis-Information-Portal/intermine,drhee/toxoMine,JoeCarlson/intermine,joshkh/intermine,JoeCarlson/intermine,drhee/toxoMine,Arabidopsis-Information-Portal/intermine,drhee/toxoMine,JoeCarlson/intermine,JoeCarlson/intermine,tomck/intermine,justincc/intermine,joshkh/intermine,tomck/intermine,justincc/intermine,kimrutherford/intermine,tomck/intermine,Arabidopsis-Information-Portal/intermine,tomck/intermine,kimrutherford/intermine,elsiklab/intermine,Arabidopsis-Information-Portal/intermine,JoeCarlson/intermine,drhee/toxoMine,elsiklab/intermine,Arabidopsis-Information-Portal/intermine,JoeCarlson/intermine,justincc/intermine,joshkh/intermine,joshkh/intermine,zebrafishmine/intermine,zebrafishmine/intermine,elsiklab/intermine,tomck/intermine,kimrutherford/intermine,joshkh/intermine,zebrafishmine/intermine,drhee/toxoMine,kimrutherford/intermine,justincc/intermine,zebrafishmine/intermine,zebrafishmine/intermine,joshkh/intermine,JoeCarlson/intermine,tomck/intermine,tomck/intermine,drhee/toxoMine,elsiklab/intermine,zebrafishmine/intermine,justincc/intermine,kimrutherford/intermine,joshkh/intermine,elsiklab/intermine,tomck/intermine,Arabidopsis-Information-Portal/intermine,drhee/toxoMine,elsiklab/intermine,elsiklab/intermine,elsiklab/intermine,Arabidopsis-Information-Portal/intermine,Arabidopsis-Information-Portal/intermine,joshkh/intermine,elsiklab/intermine,kimrutherford/intermine,justincc/intermine,tomck/intermine,kimrutherford/intermine,justincc/intermine,Arabidopsis-Information-Portal/intermine,JoeCarlson/intermine,kimrutherford/intermine,JoeCarlson/intermine,zebrafishmine/intermine,zebrafishmine/intermine,joshkh/intermine,kimrutherford/intermine,zebrafishmine/intermine,justincc/intermine,justincc/intermine | package org.intermine.web;
/*
* Copyright (C) 2002-2005 FlyMine
*
* This code may be freely distributed and modified under the
* terms of the GNU Lesser General Public Licence. This should
* be distributed with the code. See the LICENSE file for more
* information or http://www.gnu.org/copyleft/lesser.html.
*
*/
/**
* Container for ServletContext and Session attribute names used by the webapp
*
* @author Kim Rutherford
*/
public interface Constants
{
/**
* ServletContext attribute used to store web.properties
*/
public static final String WEB_PROPERTIES = "WEB_PROPERTIES";
/**
* ServletContext attribute used to store the example queries
*/
public static final String EXAMPLE_QUERIES = "EXAMPLE_QUERIES";
/**
* ServletContext attribute used to store global template queries
*/
public static final String GLOBAL_TEMPLATE_QUERIES = "GLOBAL_TEMPLATE_QUERIES";
/**
* ServletContext attribute maps category name to List of TemplateQuerys
*/
public static final String CATEGORY_TEMPLATES = "CATEGORY_TEMPLATES";
/**
* ServletContext attribute maps a class name to a Map of category names to List of
* TemplateQuerys.
*/
public static final String CLASS_CATEGORY_TEMPLATES = "CLASS_CATEGORY_TEMPLATES";
/**
* ServletContext attribute maps a class name to a Map of template names to simple expressions -
* the expression describes a field that should be set when a template is linked to from the
* object details page. eg. Gene.identifier
*/
public static final String CLASS_TEMPLATE_EXPRS = "CLASS_TEMPLATE_EXPRS";
/**
* ServletContext attribute maps category name to List of class names.
*/
public static final String CATEGORY_CLASSES = "CATEGORY_CLASSES";
/**
* ServletContext attribute, List of category names.
*/
public static final String CATEGORIES = "CATEGORIES";
/**
* ServletContext attribute, provides an interface for actions and
* controllers to query some model meta-data like class counts and
* field enumerations.
*/
public static final String OBJECT_STORE_SUMMARY = "OBJECT_STORE_SUMMARY";
/**
* ServletContext attribute used to store the Map of class names to Displayer objects and
* className+"."+fieldName to Displayer objects.
*/
public static final String DISPLAYERS = "DISPLAYERS";
/**
* ServletContext attribute used to store the WebConfig object for the Model.
*/
public static final String WEBCONFIG = "WEBCONFIG";
/**
* ServletContext attribute used to store the ObjectStore
*/
public static final String OBJECTSTORE = "OBJECTSTORE";
/**
* ServletContext attribute used to store the ProfileManager
*/
public static final String PROFILE_MANAGER = "PROFILE_MANAGER";
/**
* Session attribute used to store the user's Profile
*/
public static final String PROFILE = "PROFILE";
/**
* Session attribute used to store the current query
*/
public static final String QUERY = "QUERY";
/**
* Session attribute used to store the copy of the query that the user is
* building a template with.
*/
public static final String TEMPLATE_PATHQUERY = "TEMPLATE_PATHQUERY";
/**
* Session attribute used to store the original of the template being edited
* in the query builder.
*/
public static final String EDITING_TEMPLATE = "EDITING_TEMPLATE";
/**
* Session attribute used to store the results of running the current query
*/
public static final String QUERY_RESULTS = "QUERY_RESULTS";
/**
* Session attribute used to store the active results table (which may be QUERY_RESULTS)
*/
public static final String RESULTS_TABLE = "RESULTS_TABLE";
/**
* Session attribute storing a bean exposing the user's trail through the object details
* pages.
*/
public static final String OBJECT_DETAILS_TRAIL = "OBJECT_DETAILS_TRAIL";
/**
* Session attribute equals Boolean.TRUE when logged in user is superuser.
*/
public static final String IS_SUPERUSER = "IS_SUPERUSER";
/**
* Session attribute containing Map containing 'collapsed' state of objectDetails.jsp
* UI elements.
*/
public static final String COLLAPSED = "COLLAPSED";
/**
* Servlet attribute used to store username of superuser (this attribute
* will disappear when we implement a more fine-grained user privileges
* system).
*/
public static final String SUPERUSER_ACCOUNT = "SUPERUSER_ACCOUNT";
/**
* Session attribute that temporarily holds a Vector of messages that will be displayed by the
* errorMessages.jsp on the next page viewed by the user and then removed (allows message
* after redirect).
*/
public static final String MESSAGES = "MESSAGES";
/**
* Session attribute that temporarily holds a Vector of errors that will be displayed by the
* errorMessages.jsp on the next page viewed by the user and then removed (allows errors
* after redirect).
*/
public static final String ERRORS = "ERRORS";
/**
* The name of the property that is set to TRUE in the PortalQueryAction Action to indicate
* to the ObjectDetailsController that we have come from a portal page.
*/
public static final String PORTAL_QUERY_FLAG = "PORTAL_QUERY_FLAG";
/**
* The name of the property to look up to find the maximum size of an inline table.
*/
public static final String INLINE_TABLE_SIZE = "inline.table.size";
/**
* Period of time to wait for client to poll a running query before cancelling the query.
*/
public static final int QUERY_TIMEOUT_SECONDS = 20;
/**
* Refresh period specified on query poll page.
*/
public static final int POLL_REFRESH_SECONDS = 2;
}
| intermine/src/java/org/intermine/web/Constants.java | package org.intermine.web;
/*
* Copyright (C) 2002-2005 FlyMine
*
* This code may be freely distributed and modified under the
* terms of the GNU Lesser General Public Licence. This should
* be distributed with the code. See the LICENSE file for more
* information or http://www.gnu.org/copyleft/lesser.html.
*
*/
/**
* Container for ServletContext and Session attribute names used by the webapp
*
* @author Kim Rutherford
*/
public interface Constants
{
/**
* ServletContext attribute used to store web.properties
*/
public static final String WEB_PROPERTIES = "WEB_PROPERTIES";
/**
* ServletContext attribute used to store the example queries
*/
public static final String EXAMPLE_QUERIES = "EXAMPLE_QUERIES";
/**
* ServletContext attribute used to store global template queries
*/
public static final String GLOBAL_TEMPLATE_QUERIES = "GLOBAL_TEMPLATE_QUERIES";
/**
* ServletContext attribute maps category name to List of TemplateQuerys
*/
public static final String CATEGORY_TEMPLATES = "CATEGORY_TEMPLATES";
/**
* ServletContext attribute maps a class name to a Map of category names to List of
* TemplateQuerys.
*/
public static final String CLASS_CATEGORY_TEMPLATES = "CLASS_CATEGORY_TEMPLATES";
/**
* ServletContext attribute maps a class name to a Map of template names to simple expressions -
* the expression describes a field that should be set when a template is linked to from the
* object details page. eg. Gene.identifier
*/
public static final String CLASS_TEMPLATE_EXPRS = "CLASS_TEMPLATE_EXPRS";
/**
* ServletContext attribute maps category name to List of class names.
*/
public static final String CATEGORY_CLASSES = "CATEGORY_CLASSES";
/**
* ServletContext attribute, List of category names.
*/
public static final String CATEGORIES = "CATEGORIES";
/**
* ServletContext attribute, provides an interface for actions and
* controllers to query some model meta-data like class counts and
* field enumerations.
*/
public static final String OBJECT_STORE_SUMMARY = "OBJECT_STORE_SUMMARY";
/**
* ServletContext attribute used to store the Map of class names to Displayer objects and
* className+"."+fieldName to Displayer objects.
*/
public static final String DISPLAYERS = "DISPLAYERS";
/**
* ServletContext attribute used to store the WebConfig object for the Model.
*/
public static final String WEBCONFIG = "WEBCONFIG";
/**
* ServletContext attribute used to store the ObjectStore
*/
public static final String OBJECTSTORE = "OBJECTSTORE";
/**
* ServletContext attribute used to store the ProfileManager
*/
public static final String PROFILE_MANAGER = "PROFILE_MANAGER";
/**
* Session attribute used to store the user's Profile
*/
public static final String PROFILE = "PROFILE";
/**
* Session attribute used to store the current query
*/
public static final String QUERY = "QUERY";
/**
* Session attribute used to store the copy of the query that the user is
* building a template with.
*/
public static final String TEMPLATE_PATHQUERY = "TEMPLATE_PATHQUERY";
/**
* Session attribute used to store the original of the template being edited
* in the query builder.
*/
public static final String EDITING_TEMPLATE = "EDITING_TEMPLATE";
/**
* Session attribute used to store the results of running the current query
*/
public static final String QUERY_RESULTS = "QUERY_RESULTS";
/**
* Session attribute used to store the active results table (which may be QUERY_RESULTS)
*/
public static final String RESULTS_TABLE = "RESULTS_TABLE";
/**
* Session attribute storing a bean exposing the user's trail through the object details
* pages.
*/
public static final String OBJECT_DETAILS_TRAIL = "OBJECT_DETAILS_TRAIL";
/**
* Session attribute equals Boolean.TRUE when logged in user is superuser.
*/
public static final String IS_SUPERUSER = "IS_SUPERUSER";
/**
* Session attribute containing Map containing 'collapsed' state of objectDetails.jsp
* UI elements.
*/
public static final String COLLAPSED = "COLLAPSED";
/**
* Servlet attribute used to store username of superuser (this attribute
* will disappear when we implement a more fine-grained user privileges
* system).
*/
public static final String SUPERUSER_ACCOUNT = "SUPERUSER_ACCOUNT";
/**
* Session attribute that temporarily holds a Vector of messages that will be displayed by the
* errorMessages.jsp on the next page viewed by the user and then removed (allows message
* after redirect).
*/
public static final String MESSAGES = "MESSAGES";
/**
* Session attribute that temporarily holds a Vector of errors that will be displayed by the
* errorMessages.jsp on the next page viewed by the user and then removed (allows errors
* after redirect).
*/
public static final String ERRORS = "ERRORS";
/**
* The name of the property that is set to TRUE in the PortalQuery Action to indicate to the
* ObjectDetailsController that we have come from a portal page.
*/
public static final String PORTAL_QUERY_FLAG = "PORTAL_QUERY_FLAG";
/**
* The name of the property to look up to find the maximum size of an inline table.
*/
public static final String INLINE_TABLE_SIZE = "inline.table.size";
}
| Constants to do with polling a query.
| intermine/src/java/org/intermine/web/Constants.java | Constants to do with polling a query. | <ide><path>ntermine/src/java/org/intermine/web/Constants.java
<ide> public static final String ERRORS = "ERRORS";
<ide>
<ide> /**
<del> * The name of the property that is set to TRUE in the PortalQuery Action to indicate to the
<del> * ObjectDetailsController that we have come from a portal page.
<add> * The name of the property that is set to TRUE in the PortalQueryAction Action to indicate
<add> * to the ObjectDetailsController that we have come from a portal page.
<ide> */
<ide> public static final String PORTAL_QUERY_FLAG = "PORTAL_QUERY_FLAG";
<ide>
<ide> * The name of the property to look up to find the maximum size of an inline table.
<ide> */
<ide> public static final String INLINE_TABLE_SIZE = "inline.table.size";
<add>
<add> /**
<add> * Period of time to wait for client to poll a running query before cancelling the query.
<add> */
<add> public static final int QUERY_TIMEOUT_SECONDS = 20;
<add>
<add> /**
<add> * Refresh period specified on query poll page.
<add> */
<add> public static final int POLL_REFRESH_SECONDS = 2;
<ide> } |
|
Java | apache-2.0 | 56c8534f6f8c582e9e10dae569831a022d749715 | 0 | HanSolo/Enzo | /*
* Copyright (c) 2013 by Gerrit Grunwald
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package eu.hansolo.enzo.heatcontrol.skin;
import eu.hansolo.enzo.common.Fonts;
import eu.hansolo.enzo.common.ValueEvent;
import eu.hansolo.enzo.heatcontrol.GradientLookup;
import eu.hansolo.enzo.heatcontrol.HeatControl;
import javafx.animation.FadeTransition;
import javafx.animation.ParallelTransition;
import javafx.animation.PauseTransition;
import javafx.animation.SequentialTransition;
import javafx.event.EventHandler;
import javafx.event.EventType;
import javafx.geometry.Point2D;
import javafx.geometry.VPos;
import javafx.scene.CacheHint;
import javafx.scene.canvas.Canvas;
import javafx.scene.canvas.GraphicsContext;
import javafx.scene.control.Skin;
import javafx.scene.control.SkinBase;
import javafx.scene.input.MouseEvent;
import javafx.scene.input.TouchEvent;
import javafx.scene.layout.Pane;
import javafx.scene.layout.Region;
import javafx.scene.paint.Color;
import javafx.scene.paint.Stop;
import javafx.scene.shape.Circle;
import javafx.scene.shape.StrokeLineCap;
import javafx.scene.text.Text;
import javafx.scene.transform.Rotate;
import javafx.util.Duration;
import java.util.Locale;
/**
* User: hansolo
* Date: 08.11.13
* Time: 16:35
*/
public class HeatControlSkin extends SkinBase<HeatControl> implements Skin<HeatControl> {
private static final double PREFERRED_WIDTH = 200;
private static final double PREFERRED_HEIGHT = 200;
private static final double MINIMUM_WIDTH = 50;
private static final double MINIMUM_HEIGHT = 50;
private static final double MAXIMUM_WIDTH = 1024;
private static final double MAXIMUM_HEIGHT = 1024;
private double size;
private double centerX;
private double centerY;
private Pane pane;
private Circle background;
private Canvas ticksCanvas;
private GraphicsContext ticks;
private Region targetIndicator;
private Rotate targetIndicatorRotate;
private boolean targetExceeded;
private Region valueIndicator;
private Rotate valueIndicatorRotate;
private Text infoText;
private Text value;
private String newTarget;
private GradientLookup gradientLookup;
private double angleStep;
private double interactiveAngle;
private EventHandler<MouseEvent> mouseEventHandler;
private EventHandler<TouchEvent> touchEventHandler;
// ******************** Constructors **************************************
public HeatControlSkin(HeatControl heatControl) {
super(heatControl);
newTarget = "";
gradientLookup = new GradientLookup(new Stop(0.10, Color.web("#3221c9")),
new Stop(0.20, Color.web("#216ec9")),
new Stop(0.30, Color.web("#21bac9")),
new Stop(0.40, Color.web("#30cb22")),
new Stop(0.50, Color.web("#b7df25")),
new Stop(0.60, Color.web("#f1ec28")),
new Stop(0.70, Color.web("#f1c428")),
new Stop(0.80, Color.web("#f19c28")),
new Stop(0.90, Color.web("#f16f28")),
new Stop(1.00, Color.web("#ec272f")));
angleStep = heatControl.getAngleRange() / (heatControl.getMaxValue() - heatControl.getMinValue());
mouseEventHandler = mouseEvent -> handleMouseEvent(mouseEvent);
touchEventHandler = touchEvent -> handleTouchEvent(touchEvent);
init();
initGraphics();
registerListeners();
}
// ******************** Initialization ************************************
private void init() {
if (Double.compare(getSkinnable().getPrefWidth(), 0.0) <= 0 || Double.compare(getSkinnable().getPrefHeight(),
0.0) <= 0 ||
Double.compare(getSkinnable().getWidth(), 0.0) <= 0 || Double.compare(getSkinnable().getHeight(),
0.0) <= 0) {
if (getSkinnable().getPrefWidth() > 0 && getSkinnable().getPrefHeight() > 0) {
getSkinnable().setPrefSize(getSkinnable().getPrefWidth(), getSkinnable().getPrefHeight());
} else {
getSkinnable().setPrefSize(PREFERRED_WIDTH, PREFERRED_HEIGHT);
}
}
if (Double.compare(getSkinnable().getMinWidth(), 0.0) <= 0 || Double.compare(getSkinnable().getMinHeight(),
0.0) <= 0) {
getSkinnable().setMinSize(MINIMUM_WIDTH, MINIMUM_HEIGHT);
}
if (Double.compare(getSkinnable().getMaxWidth(), 0.0) <= 0 || Double.compare(getSkinnable().getMaxHeight(),
0.0) <= 0) {
getSkinnable().setMaxSize(MAXIMUM_WIDTH, MAXIMUM_HEIGHT);
}
}
private void initGraphics() {
background = new Circle(0.5 * PREFERRED_WIDTH, 0.5 * PREFERRED_HEIGHT, 0.5 * PREFERRED_WIDTH);
background.setFill(gradientLookup.getColorAt(getSkinnable().getValue() / (getSkinnable().getMaxValue() - getSkinnable().getMinValue())));
ticksCanvas = new Canvas(PREFERRED_WIDTH, PREFERRED_HEIGHT);
ticks = ticksCanvas.getGraphicsContext2D();
targetIndicator = new Region();
targetIndicator.getStyleClass().setAll("target-indicator");
targetIndicatorRotate = new Rotate(180 - getSkinnable().getStartAngle() - getSkinnable().getMinValue() * angleStep);
targetIndicator.getTransforms().setAll(targetIndicatorRotate);
targetExceeded = false;
valueIndicator = new Region();
valueIndicator.getStyleClass().setAll("value-indicator");
valueIndicatorRotate = new Rotate(180 - getSkinnable().getStartAngle());
valueIndicatorRotate.setAngle(valueIndicatorRotate.getAngle() + (getSkinnable().getValue() - getSkinnable().getOldValue() - getSkinnable().getMinValue()) * angleStep);
valueIndicator.getTransforms().setAll(valueIndicatorRotate);
infoText = new Text(getSkinnable().getInfoText().toUpperCase());
infoText.setTextOrigin(VPos.CENTER);
infoText.setFont(Fonts.opensansSemiBold(0.06 * PREFERRED_HEIGHT));
infoText.getStyleClass().setAll("info-text");
value = new Text(String.format(Locale.US, "%." + getSkinnable().getDecimals() + "f", getSkinnable().getValue()));
value.setMouseTransparent(true);
value.setTextOrigin(VPos.CENTER);
value.setFont(Fonts.opensansBold(0.32 * PREFERRED_HEIGHT));
value.getStyleClass().setAll("value");
// Add all nodes
pane = new Pane();
pane.getChildren().setAll(background,
ticksCanvas,
valueIndicator,
targetIndicator,
infoText,
value);
getChildren().setAll(pane);
}
private void registerListeners() {
getSkinnable().widthProperty().addListener(observable -> handleControlPropertyChanged("RESIZE"));
getSkinnable().heightProperty().addListener(observable -> handleControlPropertyChanged("RESIZE"));
getSkinnable().infoTextProperty().addListener(observable -> handleControlPropertyChanged("INFO_TEXT"));
getSkinnable().valueProperty().addListener(observable -> handleControlPropertyChanged("VALUE"));
getSkinnable().minValueProperty().addListener(observable -> handleControlPropertyChanged("RECALC"));
getSkinnable().maxValueProperty().addListener(observable -> handleControlPropertyChanged("RECALC"));
getSkinnable().minMeasuredValueProperty().addListener(observable -> handleControlPropertyChanged("MIN_MEASURED_VALUE"));
getSkinnable().maxMeasuredValueProperty().addListener(observable -> handleControlPropertyChanged("MAX_MEASURED_VALUE"));
getSkinnable().thresholdProperty().addListener(observable -> handleControlPropertyChanged("TARGET"));
getSkinnable().angleRangeProperty().addListener(observable -> handleControlPropertyChanged("ANGLE_RANGE"));
valueIndicatorRotate.angleProperty().addListener(observable -> handleControlPropertyChanged("ANGLE"));
targetIndicator.setOnMousePressed(mouseEventHandler);
targetIndicator.setOnMouseDragged(mouseEventHandler);
targetIndicator.setOnMouseReleased(mouseEventHandler);
targetIndicator.setOnTouchPressed(touchEventHandler);
targetIndicator.setOnTouchMoved(touchEventHandler);
targetIndicator.setOnTouchReleased(touchEventHandler);
}
// ******************** Methods *******************************************
protected void handleControlPropertyChanged(final String PROPERTY) {
if ("RESIZE".equals(PROPERTY)) {
resize();
} else if ("INFO_TEXT".equals(PROPERTY)) {
infoText.setText(getSkinnable().getInfoText().toUpperCase());
resize();
} else if ("VALUE".equals(PROPERTY)) {
rotateNeedle();
adjustBackgroundColor();
} else if ("RECALC".equals(PROPERTY)) {
if (getSkinnable().getMinValue() < 0) {
angleStep = getSkinnable().getAngleRange() / (getSkinnable().getMaxValue() - getSkinnable().getMinValue());
valueIndicatorRotate.setAngle(180 - getSkinnable().getStartAngle() - (getSkinnable().getMinValue()) * angleStep);
} else {
angleStep = getSkinnable().getAngleRange() / (getSkinnable().getMaxValue() + getSkinnable().getMinValue());
valueIndicatorRotate.setAngle(180 - getSkinnable().getStartAngle() * angleStep);
}
resize();
} else if ("ANGLE".equals(PROPERTY)) {
double currentValue = (valueIndicatorRotate.getAngle() + getSkinnable().getStartAngle() - 180) / angleStep + getSkinnable().getMinValue();
value.setText(String.format(Locale.US, "%." + getSkinnable().getDecimals() + "f", currentValue));
value.setTranslateX((size - value.getLayoutBounds().getWidth()) * 0.5);
// Check targetIndicator
if (targetExceeded) {
if (currentValue < getSkinnable().getTarget()) {
getSkinnable().fireEvent(new ValueEvent(this, null, ValueEvent.VALUE_UNDERRUN));
targetExceeded = false;
}
} else {
if (currentValue > getSkinnable().getTarget()) {
getSkinnable().fireEvent(new ValueEvent(this, null, ValueEvent.VALUE_EXCEEDED));
targetExceeded = true;
}
}
} else if ("TARGET".equals(PROPERTY)) {
targetIndicatorRotate.setAngle(getSkinnable().getTarget() * angleStep - 180 - getSkinnable().getStartAngle());
}
}
@Override protected double computeMinWidth(final double HEIGHT, double TOP_INSET, double RIGHT_INSET, double BOTTOM_INSET, double LEFT_INSET) {
return super.computeMinWidth(Math.max(MINIMUM_HEIGHT, HEIGHT - TOP_INSET - BOTTOM_INSET), TOP_INSET, RIGHT_INSET, BOTTOM_INSET, LEFT_INSET);
}
@Override protected double computeMinHeight(final double WIDTH, double TOP_INSET, double RIGHT_INSET, double BOTTOM_INSET, double LEFT_INSET) {
return super.computeMinHeight(Math.max(MINIMUM_WIDTH, WIDTH - LEFT_INSET - RIGHT_INSET), TOP_INSET, RIGHT_INSET, BOTTOM_INSET, LEFT_INSET);
}
@Override protected double computeMaxWidth(final double HEIGHT, double TOP_INSET, double RIGHT_INSET, double BOTTOM_INSET, double LEFT_INSET) {
return super.computeMaxWidth(Math.min(MAXIMUM_HEIGHT, HEIGHT - TOP_INSET - BOTTOM_INSET), TOP_INSET, RIGHT_INSET, BOTTOM_INSET, LEFT_INSET);
}
@Override protected double computeMaxHeight(final double WIDTH, double TOP_INSET, double RIGHT_INSET, double BOTTOM_INSET, double LEFT_INSET) {
return super.computeMaxHeight(Math.min(MAXIMUM_WIDTH, WIDTH - LEFT_INSET - RIGHT_INSET), TOP_INSET, RIGHT_INSET, BOTTOM_INSET, LEFT_INSET);
}
@Override protected double computePrefWidth(final double HEIGHT, double TOP_INSET, double RIGHT_INSET, double BOTTOM_INSET, double LEFT_INSET) {
double prefHeight = PREFERRED_HEIGHT;
if (HEIGHT != -1) {
prefHeight = Math.max(0, HEIGHT - TOP_INSET - BOTTOM_INSET);
}
return super.computePrefWidth(prefHeight, TOP_INSET, RIGHT_INSET, BOTTOM_INSET, LEFT_INSET);
}
@Override protected double computePrefHeight(final double WIDTH, double TOP_INSET, double RIGHT_INSET, double BOTTOM_INSET, double LEFT_INSET) {
double prefWidth = PREFERRED_WIDTH;
if (WIDTH != -1) {
prefWidth = Math.max(0, WIDTH - LEFT_INSET - RIGHT_INSET);
}
return super.computePrefHeight(prefWidth, TOP_INSET, RIGHT_INSET, BOTTOM_INSET, LEFT_INSET);
}
// ******************** Private Methods ***********************************
private void handleMouseEvent(final MouseEvent MOUSE_EVENT) {
final Object SRC = MOUSE_EVENT.getSource();
final EventType TYPE = MOUSE_EVENT.getEventType();
if (SRC.equals(targetIndicator)) {
if (MouseEvent.MOUSE_PRESSED == TYPE) {
value.setText(String.format(Locale.US, "%." + getSkinnable().getDecimals() + "f", getSkinnable().getTarget()));
resizeText();
} else if (MouseEvent.MOUSE_DRAGGED == TYPE) {
touchRotate(MOUSE_EVENT.getSceneX() - getSkinnable().getLayoutX(), MOUSE_EVENT.getSceneY() - getSkinnable().getLayoutY(), targetIndicatorRotate);
} else if (MouseEvent.MOUSE_RELEASED == TYPE) {
getSkinnable().setTarget(Double.parseDouble(newTarget));
fadeBack();
}
}
}
private void handleTouchEvent(final TouchEvent TOUCH_EVENT) {
final Object SRC = TOUCH_EVENT.getSource();
final EventType TYPE = TOUCH_EVENT.getEventType();
if (SRC.equals(targetIndicator)) {
if (TouchEvent.TOUCH_PRESSED == TYPE) {
value.setText(String.format(Locale.US, "%." + getSkinnable().getDecimals() + "f", getSkinnable().getTarget()));
resizeText();
} else if (TouchEvent.TOUCH_MOVED == TYPE) {
touchRotate(TOUCH_EVENT.getTouchPoint().getSceneX() - getSkinnable().getLayoutX(), TOUCH_EVENT.getTouchPoint().getSceneY() - getSkinnable().getLayoutY(),
targetIndicatorRotate);
} else if (TouchEvent.TOUCH_RELEASED == TYPE) {
getSkinnable().setTarget(Double.parseDouble(value.getText()));
fadeBack();
}
}
}
private double getTheta(double x, double y) {
double deltaX = x - centerX;
double deltaY = y - centerY;
double radius = Math.sqrt((deltaX * deltaX) + (deltaY * deltaY));
double nx = deltaX / radius;
double ny = deltaY / radius;
double theta = Math.atan2(ny, nx);
return Double.compare(theta, 0.0) >= 0 ? Math.toDegrees(theta) : Math.toDegrees((theta)) + 360.0;
}
private void touchRotate(final double X, final double Y, final Rotate ROTATE) {
double theta = getTheta(X, Y);
interactiveAngle = (theta + 90) % 360;
double newValue = Double.compare(interactiveAngle, 180) <= 0 ?
(interactiveAngle + 180.0 + getSkinnable().getStartAngle() - 360) / angleStep + getSkinnable().getMinValue():
(interactiveAngle - 180.0 + getSkinnable().getStartAngle() - 360) / angleStep + getSkinnable().getMinValue();
if (Double.compare(newValue, getSkinnable().getMinValue()) >= 0 && Double.compare(newValue, getSkinnable().getMaxValue()) <= 0) {
ROTATE.setAngle(interactiveAngle);
value.setText(String.format(Locale.US, "%." + getSkinnable().getDecimals() + "f", newValue));
newTarget = value.getText();
resizeText();
}
}
private void fadeBack() {
FadeTransition fadeInfoTextOut = new FadeTransition(Duration.millis(425), infoText);
fadeInfoTextOut.setFromValue(1.0);
fadeInfoTextOut.setToValue(0.0);
FadeTransition fadeValueOut = new FadeTransition(Duration.millis(425), value);
fadeValueOut.setFromValue(1.0);
fadeValueOut.setToValue(0.0);
PauseTransition pause = new PauseTransition(Duration.millis(50));
FadeTransition fadeInfoTextIn = new FadeTransition(Duration.millis(425), infoText);
fadeInfoTextIn.setFromValue(0.0);
fadeInfoTextIn.setToValue(1.0);
FadeTransition fadeValueIn = new FadeTransition(Duration.millis(425), value);
fadeValueIn.setFromValue(0.0);
fadeValueIn.setToValue(1.0);
ParallelTransition parallelIn = new ParallelTransition(fadeInfoTextIn, fadeValueIn);
ParallelTransition parallelOut = new ParallelTransition(fadeInfoTextOut, fadeValueOut);
parallelOut.setOnFinished(event -> {
double currentValue = (valueIndicatorRotate.getAngle() + getSkinnable().getStartAngle() - 180) / angleStep + getSkinnable().getMinValue();
value.setText(String.format(Locale.US, "%." + getSkinnable().getDecimals() + "f", currentValue));
value.setTranslateX((size - value.getLayoutBounds().getWidth()) * 0.5);
if (getSkinnable().getTarget() < getSkinnable().getValue()) {
getSkinnable().setInfoText("COOLING");
} else if (getSkinnable().getTarget() > getSkinnable().getValue()) {
getSkinnable().setInfoText("HEATING");
}
resizeText();
drawTickMarks(ticks);
});
SequentialTransition sequence = new SequentialTransition(parallelOut, pause, parallelIn);
sequence.play();
}
private void rotateNeedle() {
double range = (getSkinnable().getMaxValue() - getSkinnable().getMinValue());
double angleRange = getSkinnable().getAngleRange();
angleStep = angleRange / range;
double targetAngle = valueIndicatorRotate.getAngle() + (getSkinnable().getValue() - getSkinnable().getOldValue()) * angleStep;
valueIndicatorRotate.setAngle(targetAngle);
drawTickMarks(ticks);
}
private void adjustBackgroundColor() {
background.setFill(gradientLookup.getColorAt(getSkinnable().getValue() / (getSkinnable().getMaxValue() - getSkinnable().getMinValue())));
}
private void drawTickMarks(final GraphicsContext CTX) {
CTX.clearRect(0, 0, size, size);
double sinValue;
double cosValue;
double startAngle = getSkinnable().getStartAngle();
Point2D center = new Point2D(size * 0.5, size * 0.5);
double stdLineWidth = size * 0.003;
double rangeLineWidth = size * 0.007;
for (double angle = 0, counter = getSkinnable().getMinValue() ; Double.compare(counter, getSkinnable().getMaxValue()) <= 0 ; angle -= angleStep / 3, counter+= 0.33333) {
sinValue = Math.sin(Math.toRadians(angle + startAngle));
cosValue = Math.cos(Math.toRadians(angle + startAngle));
Point2D innerPoint = new Point2D(center.getX() + size * 0.368 * sinValue, center.getY() + size * 0.368 * cosValue);
Point2D outerPoint = new Point2D(center.getX() + size * 0.457 * sinValue, center.getY() + size * 0.457 * cosValue);
CTX.setStroke(getSkinnable().getTickMarkFill());
if (counter > getSkinnable().getValue() && counter < getSkinnable().getTarget() ||
counter > getSkinnable().getTarget() && counter < getSkinnable().getValue()) {
CTX.setLineWidth(rangeLineWidth);
} else {
CTX.setLineWidth(stdLineWidth);
}
CTX.setLineCap(StrokeLineCap.ROUND);
CTX.strokeLine(innerPoint.getX(), innerPoint.getY(), outerPoint.getX(), outerPoint.getY());
}
}
private void resizeText() {
infoText.setFont(Fonts.opensansLight(size * 0.07));
infoText.setTranslateX((size - infoText.getLayoutBounds().getWidth()) * 0.5);
infoText.setTranslateY(size * 0.34);
value.setFont(Fonts.opensansBold(size * 0.32));
value.setTranslateX((size - value.getLayoutBounds().getWidth()) * 0.5);
value.setTranslateY(size * 0.5);
}
private void resize() {
size = getSkinnable().getWidth() < getSkinnable().getHeight() ? getSkinnable().getWidth() : getSkinnable().getHeight();
centerX = size * 0.5;
centerY = size * 0.5;
background.setCenterX(centerX);
background.setCenterY(centerY);
background.setRadius(size * 0.5);
ticksCanvas.setWidth(size);
ticksCanvas.setHeight(size);
ticks.clearRect(0, 0, size, size);
drawTickMarks(ticks);
ticksCanvas.setCache(true);
ticksCanvas.setCacheHint(CacheHint.QUALITY);
valueIndicator.setPrefSize(size * 0.025, size * 0.096);
valueIndicator.relocate((size - valueIndicator.getPrefWidth()) * 0.5, size * 0.039);
valueIndicatorRotate.setPivotX(valueIndicator.getPrefWidth() * 0.5);
valueIndicatorRotate.setPivotY(size * 0.461);
targetIndicator.setPrefSize(0.025 * size, 0.13 * size);
targetIndicator.relocate((size - targetIndicator.getPrefWidth()) * 0.5, size * 0.039);
targetIndicatorRotate.setPivotX(targetIndicator.getPrefWidth() * 0.5);
targetIndicatorRotate.setPivotY(size * 0.461);
targetIndicatorRotate.setAngle(getSkinnable().getTarget() * angleStep - 180 - getSkinnable().getStartAngle() - getSkinnable().getMinValue() * angleStep);
infoText.setText(getSkinnable().getInfoText().toUpperCase());
value.setText(String.format(Locale.US, "%." + getSkinnable().getDecimals() + "f", (valueIndicatorRotate.getAngle() + getSkinnable().getStartAngle() - 180) / angleStep));
resizeText();
}
}
| src/main/java/eu.hansolo.enzo/heatcontrol/skin/HeatControlSkin.java | /*
* Copyright (c) 2013 by Gerrit Grunwald
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package eu.hansolo.enzo.heatcontrol.skin;
import eu.hansolo.enzo.common.Fonts;
import eu.hansolo.enzo.common.ValueEvent;
import eu.hansolo.enzo.heatcontrol.GradientLookup;
import eu.hansolo.enzo.heatcontrol.HeatControl;
import javafx.animation.FadeTransition;
import javafx.animation.ParallelTransition;
import javafx.animation.PauseTransition;
import javafx.animation.SequentialTransition;
import javafx.event.EventHandler;
import javafx.event.EventType;
import javafx.geometry.Point2D;
import javafx.geometry.VPos;
import javafx.scene.CacheHint;
import javafx.scene.canvas.Canvas;
import javafx.scene.canvas.GraphicsContext;
import javafx.scene.control.Skin;
import javafx.scene.control.SkinBase;
import javafx.scene.input.MouseEvent;
import javafx.scene.input.TouchEvent;
import javafx.scene.layout.Pane;
import javafx.scene.layout.Region;
import javafx.scene.paint.Color;
import javafx.scene.paint.Stop;
import javafx.scene.shape.Circle;
import javafx.scene.shape.StrokeLineCap;
import javafx.scene.text.Text;
import javafx.scene.transform.Rotate;
import javafx.util.Duration;
import java.util.Locale;
/**
* User: hansolo
* Date: 08.11.13
* Time: 16:35
*/
public class HeatControlSkin extends SkinBase<HeatControl> implements Skin<HeatControl> {
private static final double PREFERRED_WIDTH = 200;
private static final double PREFERRED_HEIGHT = 200;
private static final double MINIMUM_WIDTH = 50;
private static final double MINIMUM_HEIGHT = 50;
private static final double MAXIMUM_WIDTH = 1024;
private static final double MAXIMUM_HEIGHT = 1024;
private static boolean interactive;
private double size;
private double centerX;
private double centerY;
private Pane pane;
private Circle background;
private Canvas ticksCanvas;
private GraphicsContext ticks;
private Region targetIndicator;
private Rotate targetIndicatorRotate;
private boolean targetExceeded;
private Region valueIndicator;
private Rotate valueIndicatorRotate;
private Text infoText;
private Text value;
private String newTarget;
private GradientLookup gradientLookup;
private double angleStep;
private double interactiveAngle;
private EventHandler<MouseEvent> mouseEventHandler;
private EventHandler<TouchEvent> touchEventHandler;
// ******************** Constructors **************************************
public HeatControlSkin(HeatControl heatControl) {
super(heatControl);
interactive = false;
newTarget = "";
gradientLookup = new GradientLookup(new Stop(0.10, Color.web("#3221c9")),
new Stop(0.20, Color.web("#216ec9")),
new Stop(0.30, Color.web("#21bac9")),
new Stop(0.40, Color.web("#30cb22")),
new Stop(0.50, Color.web("#b7df25")),
new Stop(0.60, Color.web("#f1ec28")),
new Stop(0.70, Color.web("#f1c428")),
new Stop(0.80, Color.web("#f19c28")),
new Stop(0.90, Color.web("#f16f28")),
new Stop(1.00, Color.web("#ec272f")));
angleStep = heatControl.getAngleRange() / (heatControl.getMaxValue() - heatControl.getMinValue());
mouseEventHandler = mouseEvent -> handleMouseEvent(mouseEvent);
touchEventHandler = touchEvent -> handleTouchEvent(touchEvent);
init();
initGraphics();
registerListeners();
}
// ******************** Initialization ************************************
private void init() {
if (Double.compare(getSkinnable().getPrefWidth(), 0.0) <= 0 || Double.compare(getSkinnable().getPrefHeight(),
0.0) <= 0 ||
Double.compare(getSkinnable().getWidth(), 0.0) <= 0 || Double.compare(getSkinnable().getHeight(),
0.0) <= 0) {
if (getSkinnable().getPrefWidth() > 0 && getSkinnable().getPrefHeight() > 0) {
getSkinnable().setPrefSize(getSkinnable().getPrefWidth(), getSkinnable().getPrefHeight());
} else {
getSkinnable().setPrefSize(PREFERRED_WIDTH, PREFERRED_HEIGHT);
}
}
if (Double.compare(getSkinnable().getMinWidth(), 0.0) <= 0 || Double.compare(getSkinnable().getMinHeight(),
0.0) <= 0) {
getSkinnable().setMinSize(MINIMUM_WIDTH, MINIMUM_HEIGHT);
}
if (Double.compare(getSkinnable().getMaxWidth(), 0.0) <= 0 || Double.compare(getSkinnable().getMaxHeight(),
0.0) <= 0) {
getSkinnable().setMaxSize(MAXIMUM_WIDTH, MAXIMUM_HEIGHT);
}
}
private void initGraphics() {
background = new Circle(0.5 * PREFERRED_WIDTH, 0.5 * PREFERRED_HEIGHT, 0.5 * PREFERRED_WIDTH);
background.setFill(gradientLookup.getColorAt(getSkinnable().getValue() / (getSkinnable().getMaxValue() - getSkinnable().getMinValue())));
ticksCanvas = new Canvas(PREFERRED_WIDTH, PREFERRED_HEIGHT);
ticks = ticksCanvas.getGraphicsContext2D();
targetIndicator = new Region();
targetIndicator.getStyleClass().setAll("target-indicator");
targetIndicatorRotate = new Rotate(180 - getSkinnable().getStartAngle() - getSkinnable().getMinValue() * angleStep);
targetIndicator.getTransforms().setAll(targetIndicatorRotate);
targetExceeded = false;
valueIndicator = new Region();
valueIndicator.getStyleClass().setAll("value-indicator");
valueIndicatorRotate = new Rotate(180 - getSkinnable().getStartAngle());
valueIndicatorRotate.setAngle(valueIndicatorRotate.getAngle() + (getSkinnable().getValue() - getSkinnable().getOldValue() - getSkinnable().getMinValue()) * angleStep);
valueIndicator.getTransforms().setAll(valueIndicatorRotate);
infoText = new Text(getSkinnable().getInfoText().toUpperCase());
infoText.setTextOrigin(VPos.CENTER);
infoText.setFont(Fonts.opensansSemiBold(0.06 * PREFERRED_HEIGHT));
infoText.getStyleClass().setAll("info-text");
value = new Text(String.format(Locale.US, "%." + getSkinnable().getDecimals() + "f", getSkinnable().getValue()));
value.setMouseTransparent(true);
value.setTextOrigin(VPos.CENTER);
value.setFont(Fonts.opensansBold(0.32 * PREFERRED_HEIGHT));
value.getStyleClass().setAll("value");
// Add all nodes
pane = new Pane();
pane.getChildren().setAll(background,
ticksCanvas,
valueIndicator,
targetIndicator,
infoText,
value);
getChildren().setAll(pane);
}
private void registerListeners() {
getSkinnable().widthProperty().addListener(observable -> handleControlPropertyChanged("RESIZE"));
getSkinnable().heightProperty().addListener(observable -> handleControlPropertyChanged("RESIZE"));
getSkinnable().infoTextProperty().addListener(observable -> handleControlPropertyChanged("INFO_TEXT"));
getSkinnable().valueProperty().addListener(observable -> handleControlPropertyChanged("VALUE"));
getSkinnable().minValueProperty().addListener(observable -> handleControlPropertyChanged("RECALC"));
getSkinnable().maxValueProperty().addListener(observable -> handleControlPropertyChanged("RECALC"));
getSkinnable().minMeasuredValueProperty().addListener(observable -> handleControlPropertyChanged("MIN_MEASURED_VALUE"));
getSkinnable().maxMeasuredValueProperty().addListener(observable -> handleControlPropertyChanged("MAX_MEASURED_VALUE"));
getSkinnable().thresholdProperty().addListener(observable -> handleControlPropertyChanged("TARGET"));
getSkinnable().angleRangeProperty().addListener(observable -> handleControlPropertyChanged("ANGLE_RANGE"));
valueIndicatorRotate.angleProperty().addListener(observable -> handleControlPropertyChanged("ANGLE"));
targetIndicator.setOnMousePressed(mouseEventHandler);
targetIndicator.setOnMouseDragged(mouseEventHandler);
targetIndicator.setOnMouseReleased(mouseEventHandler);
targetIndicator.setOnTouchPressed(touchEventHandler);
targetIndicator.setOnTouchMoved(touchEventHandler);
targetIndicator.setOnTouchReleased(touchEventHandler);
}
// ******************** Methods *******************************************
protected void handleControlPropertyChanged(final String PROPERTY) {
if ("RESIZE".equals(PROPERTY)) {
resize();
} else if ("INFO_TEXT".equals(PROPERTY)) {
infoText.setText(getSkinnable().getInfoText().toUpperCase());
resize();
} else if ("VALUE".equals(PROPERTY)) {
rotateNeedle();
adjustBackgroundColor();
} else if ("RECALC".equals(PROPERTY)) {
if (getSkinnable().getMinValue() < 0) {
angleStep = getSkinnable().getAngleRange() / (getSkinnable().getMaxValue() - getSkinnable().getMinValue());
valueIndicatorRotate.setAngle(180 - getSkinnable().getStartAngle() - (getSkinnable().getMinValue()) * angleStep);
} else {
angleStep = getSkinnable().getAngleRange() / (getSkinnable().getMaxValue() + getSkinnable().getMinValue());
valueIndicatorRotate.setAngle(180 - getSkinnable().getStartAngle() * angleStep);
}
resize();
} else if ("ANGLE".equals(PROPERTY)) {
double currentValue = (valueIndicatorRotate.getAngle() + getSkinnable().getStartAngle() - 180) / angleStep + getSkinnable().getMinValue();
value.setText(String.format(Locale.US, "%." + getSkinnable().getDecimals() + "f", currentValue));
value.setTranslateX((size - value.getLayoutBounds().getWidth()) * 0.5);
// Check targetIndicator
if (targetExceeded) {
if (currentValue < getSkinnable().getTarget()) {
getSkinnable().fireEvent(new ValueEvent(this, null, ValueEvent.VALUE_UNDERRUN));
targetExceeded = false;
}
} else {
if (currentValue > getSkinnable().getTarget()) {
getSkinnable().fireEvent(new ValueEvent(this, null, ValueEvent.VALUE_EXCEEDED));
targetExceeded = true;
}
}
} else if ("TARGET".equals(PROPERTY)) {
targetIndicatorRotate.setAngle(getSkinnable().getTarget() * angleStep - 180 - getSkinnable().getStartAngle());
}
}
@Override protected double computeMinWidth(final double HEIGHT, double TOP_INSET, double RIGHT_INSET, double BOTTOM_INSET, double LEFT_INSET) {
return super.computeMinWidth(Math.max(MINIMUM_HEIGHT, HEIGHT - TOP_INSET - BOTTOM_INSET), TOP_INSET, RIGHT_INSET, BOTTOM_INSET, LEFT_INSET);
}
@Override protected double computeMinHeight(final double WIDTH, double TOP_INSET, double RIGHT_INSET, double BOTTOM_INSET, double LEFT_INSET) {
return super.computeMinHeight(Math.max(MINIMUM_WIDTH, WIDTH - LEFT_INSET - RIGHT_INSET), TOP_INSET, RIGHT_INSET, BOTTOM_INSET, LEFT_INSET);
}
@Override protected double computeMaxWidth(final double HEIGHT, double TOP_INSET, double RIGHT_INSET, double BOTTOM_INSET, double LEFT_INSET) {
return super.computeMaxWidth(Math.min(MAXIMUM_HEIGHT, HEIGHT - TOP_INSET - BOTTOM_INSET), TOP_INSET, RIGHT_INSET, BOTTOM_INSET, LEFT_INSET);
}
@Override protected double computeMaxHeight(final double WIDTH, double TOP_INSET, double RIGHT_INSET, double BOTTOM_INSET, double LEFT_INSET) {
return super.computeMaxHeight(Math.min(MAXIMUM_WIDTH, WIDTH - LEFT_INSET - RIGHT_INSET), TOP_INSET, RIGHT_INSET, BOTTOM_INSET, LEFT_INSET);
}
@Override protected double computePrefWidth(final double HEIGHT, double TOP_INSET, double RIGHT_INSET, double BOTTOM_INSET, double LEFT_INSET) {
double prefHeight = PREFERRED_HEIGHT;
if (HEIGHT != -1) {
prefHeight = Math.max(0, HEIGHT - TOP_INSET - BOTTOM_INSET);
}
return super.computePrefWidth(prefHeight, TOP_INSET, RIGHT_INSET, BOTTOM_INSET, LEFT_INSET);
}
@Override protected double computePrefHeight(final double WIDTH, double TOP_INSET, double RIGHT_INSET, double BOTTOM_INSET, double LEFT_INSET) {
double prefWidth = PREFERRED_WIDTH;
if (WIDTH != -1) {
prefWidth = Math.max(0, WIDTH - LEFT_INSET - RIGHT_INSET);
}
return super.computePrefHeight(prefWidth, TOP_INSET, RIGHT_INSET, BOTTOM_INSET, LEFT_INSET);
}
// ******************** Private Methods ***********************************
private void handleMouseEvent(final MouseEvent MOUSE_EVENT) {
final Object SRC = MOUSE_EVENT.getSource();
final EventType TYPE = MOUSE_EVENT.getEventType();
if (SRC.equals(targetIndicator)) {
if (MouseEvent.MOUSE_PRESSED == TYPE) {
interactive = true;
value.setText(String.format(Locale.US, "%." + getSkinnable().getDecimals() + "f", getSkinnable().getTarget()));
resizeText();
} else if (MouseEvent.MOUSE_DRAGGED == TYPE) {
touchRotate(MOUSE_EVENT.getSceneX() - getSkinnable().getLayoutX(), MOUSE_EVENT.getSceneY() - getSkinnable().getLayoutY(), targetIndicatorRotate);
} else if (MouseEvent.MOUSE_RELEASED == TYPE) {
getSkinnable().setTarget(Double.parseDouble(newTarget));
fadeBack();
}
}
}
private void handleTouchEvent(final TouchEvent TOUCH_EVENT) {
final Object SRC = TOUCH_EVENT.getSource();
final EventType TYPE = TOUCH_EVENT.getEventType();
if (SRC.equals(targetIndicator)) {
if (TouchEvent.TOUCH_PRESSED == TYPE) {
value.setText(String.format(Locale.US, "%." + getSkinnable().getDecimals() + "f", getSkinnable().getTarget()));
resizeText();
} else if (TouchEvent.TOUCH_MOVED == TYPE) {
touchRotate(TOUCH_EVENT.getTouchPoint().getSceneX() - getSkinnable().getLayoutX(), TOUCH_EVENT.getTouchPoint().getSceneY() - getSkinnable().getLayoutY(),
targetIndicatorRotate);
} else if (TouchEvent.TOUCH_RELEASED == TYPE) {
getSkinnable().setTarget(Double.parseDouble(value.getText()));
fadeBack();
}
}
}
private double getTheta(double x, double y) {
double deltaX = x - centerX;
double deltaY = y - centerY;
double radius = Math.sqrt((deltaX * deltaX) + (deltaY * deltaY));
double nx = deltaX / radius;
double ny = deltaY / radius;
double theta = Math.atan2(ny, nx);
return Double.compare(theta, 0.0) >= 0 ? Math.toDegrees(theta) : Math.toDegrees((theta)) + 360.0;
}
private void touchRotate(final double X, final double Y, final Rotate ROTATE) {
double theta = getTheta(X, Y);
interactiveAngle = (theta + 90) % 360;
double newValue = Double.compare(interactiveAngle, 180) <= 0 ?
(interactiveAngle + 180.0 + getSkinnable().getStartAngle() - 360) / angleStep + getSkinnable().getMinValue():
(interactiveAngle - 180.0 + getSkinnable().getStartAngle() - 360) / angleStep + getSkinnable().getMinValue();
if (Double.compare(newValue, getSkinnable().getMinValue()) >= 0 && Double.compare(newValue, getSkinnable().getMaxValue()) <= 0) {
ROTATE.setAngle(interactiveAngle);
value.setText(String.format(Locale.US, "%." + getSkinnable().getDecimals() + "f", newValue));
newTarget = value.getText();
resizeText();
}
}
private void fadeBack() {
FadeTransition fadeInfoTextOut = new FadeTransition(Duration.millis(425), infoText);
fadeInfoTextOut.setFromValue(1.0);
fadeInfoTextOut.setToValue(0.0);
FadeTransition fadeValueOut = new FadeTransition(Duration.millis(425), value);
fadeValueOut.setFromValue(1.0);
fadeValueOut.setToValue(0.0);
PauseTransition pause = new PauseTransition(Duration.millis(50));
FadeTransition fadeInfoTextIn = new FadeTransition(Duration.millis(425), infoText);
fadeInfoTextIn.setFromValue(0.0);
fadeInfoTextIn.setToValue(1.0);
FadeTransition fadeValueIn = new FadeTransition(Duration.millis(425), value);
fadeValueIn.setFromValue(0.0);
fadeValueIn.setToValue(1.0);
ParallelTransition parallelIn = new ParallelTransition(fadeInfoTextIn, fadeValueIn);
ParallelTransition parallelOut = new ParallelTransition(fadeInfoTextOut, fadeValueOut);
parallelOut.setOnFinished(event -> {
double currentValue = (valueIndicatorRotate.getAngle() + getSkinnable().getStartAngle() - 180) / angleStep + getSkinnable().getMinValue();
value.setText(String.format(Locale.US, "%." + getSkinnable().getDecimals() + "f", currentValue));
value.setTranslateX((size - value.getLayoutBounds().getWidth()) * 0.5);
if (getSkinnable().getTarget() < getSkinnable().getValue()) {
getSkinnable().setInfoText("COOLING");
} else if (getSkinnable().getTarget() > getSkinnable().getValue()) {
getSkinnable().setInfoText("HEATING");
}
resizeText();
drawTickMarks(ticks);
interactive = false;
});
SequentialTransition sequence = new SequentialTransition(parallelOut, pause, parallelIn);
sequence.play();
}
private void rotateNeedle() {
double range = (getSkinnable().getMaxValue() - getSkinnable().getMinValue());
double angleRange = getSkinnable().getAngleRange();
angleStep = angleRange / range;
double targetAngle = valueIndicatorRotate.getAngle() + (getSkinnable().getValue() - getSkinnable().getOldValue()) * angleStep;
valueIndicatorRotate.setAngle(targetAngle);
drawTickMarks(ticks);
}
private void adjustBackgroundColor() {
background.setFill(gradientLookup.getColorAt(getSkinnable().getValue() / (getSkinnable().getMaxValue() - getSkinnable().getMinValue())));
}
private void drawTickMarks(final GraphicsContext CTX) {
CTX.clearRect(0, 0, size, size);
double sinValue;
double cosValue;
double startAngle = getSkinnable().getStartAngle();
Point2D center = new Point2D(size * 0.5, size * 0.5);
double stdLineWidth = size * 0.003;
double rangeLineWidth = size * 0.007;
for (double angle = 0, counter = getSkinnable().getMinValue() ; Double.compare(counter, getSkinnable().getMaxValue()) <= 0 ; angle -= angleStep / 3, counter+= 0.33333) {
sinValue = Math.sin(Math.toRadians(angle + startAngle));
cosValue = Math.cos(Math.toRadians(angle + startAngle));
Point2D innerPoint = new Point2D(center.getX() + size * 0.368 * sinValue, center.getY() + size * 0.368 * cosValue);
Point2D outerPoint = new Point2D(center.getX() + size * 0.457 * sinValue, center.getY() + size * 0.457 * cosValue);
CTX.setStroke(getSkinnable().getTickMarkFill());
if (counter > getSkinnable().getValue() && counter < getSkinnable().getTarget() ||
counter > getSkinnable().getTarget() && counter < getSkinnable().getValue()) {
CTX.setLineWidth(rangeLineWidth);
} else {
CTX.setLineWidth(stdLineWidth);
}
CTX.setLineCap(StrokeLineCap.ROUND);
CTX.strokeLine(innerPoint.getX(), innerPoint.getY(), outerPoint.getX(), outerPoint.getY());
}
}
private void resizeText() {
infoText.setFont(Fonts.opensansLight(size * 0.07));
infoText.setTranslateX((size - infoText.getLayoutBounds().getWidth()) * 0.5);
infoText.setTranslateY(size * 0.34);
value.setFont(Fonts.opensansBold(size * 0.32));
value.setTranslateX((size - value.getLayoutBounds().getWidth()) * 0.5);
value.setTranslateY(size * 0.5);
}
private void resize() {
size = getSkinnable().getWidth() < getSkinnable().getHeight() ? getSkinnable().getWidth() : getSkinnable().getHeight();
centerX = size * 0.5;
centerY = size * 0.5;
background.setCenterX(centerX);
background.setCenterY(centerY);
background.setRadius(size * 0.5);
ticksCanvas.setWidth(size);
ticksCanvas.setHeight(size);
ticks.clearRect(0, 0, size, size);
drawTickMarks(ticks);
ticksCanvas.setCache(true);
ticksCanvas.setCacheHint(CacheHint.QUALITY);
valueIndicator.setPrefSize(size * 0.025, size * 0.096);
valueIndicator.relocate((size - valueIndicator.getPrefWidth()) * 0.5, size * 0.039);
valueIndicatorRotate.setPivotX(valueIndicator.getPrefWidth() * 0.5);
valueIndicatorRotate.setPivotY(size * 0.461);
targetIndicator.setPrefSize(0.025 * size, 0.13 * size);
targetIndicator.relocate((size - targetIndicator.getPrefWidth()) * 0.5, size * 0.039);
targetIndicatorRotate.setPivotX(targetIndicator.getPrefWidth() * 0.5);
targetIndicatorRotate.setPivotY(size * 0.461);
targetIndicatorRotate.setAngle(getSkinnable().getTarget() * angleStep - 180 - getSkinnable().getStartAngle() - getSkinnable().getMinValue() * angleStep);
infoText.setText(getSkinnable().getInfoText().toUpperCase());
value.setText(String.format(Locale.US, "%." + getSkinnable().getDecimals() + "f", (valueIndicatorRotate.getAngle() + getSkinnable().getStartAngle() - 180) / angleStep));
resizeText();
}
}
| cosmetics
| src/main/java/eu.hansolo.enzo/heatcontrol/skin/HeatControlSkin.java | cosmetics | <ide><path>rc/main/java/eu.hansolo.enzo/heatcontrol/skin/HeatControlSkin.java
<ide> private static final double MINIMUM_WIDTH = 50;
<ide> private static final double MINIMUM_HEIGHT = 50;
<ide> private static final double MAXIMUM_WIDTH = 1024;
<del> private static final double MAXIMUM_HEIGHT = 1024;
<del> private static boolean interactive;
<add> private static final double MAXIMUM_HEIGHT = 1024;
<ide> private double size;
<ide> private double centerX;
<ide> private double centerY;
<ide> // ******************** Constructors **************************************
<ide> public HeatControlSkin(HeatControl heatControl) {
<ide> super(heatControl);
<del> interactive = false;
<ide> newTarget = "";
<ide> gradientLookup = new GradientLookup(new Stop(0.10, Color.web("#3221c9")),
<ide> new Stop(0.20, Color.web("#216ec9")),
<ide> final Object SRC = MOUSE_EVENT.getSource();
<ide> final EventType TYPE = MOUSE_EVENT.getEventType();
<ide> if (SRC.equals(targetIndicator)) {
<del> if (MouseEvent.MOUSE_PRESSED == TYPE) {
<del> interactive = true;
<add> if (MouseEvent.MOUSE_PRESSED == TYPE) {
<ide> value.setText(String.format(Locale.US, "%." + getSkinnable().getDecimals() + "f", getSkinnable().getTarget()));
<ide> resizeText();
<ide> } else if (MouseEvent.MOUSE_DRAGGED == TYPE) {
<ide> }
<ide>
<ide> resizeText();
<del> drawTickMarks(ticks);
<del> interactive = false;
<add> drawTickMarks(ticks);
<ide> });
<ide>
<ide> SequentialTransition sequence = new SequentialTransition(parallelOut, pause, parallelIn); |
|
Java | apache-2.0 | c6395ba55348c2938051b4e979fc8997a2dd4a37 | 0 | josephcsible/GravityBox | /*
* Copyright (C) 2015 Peter Gregus for GravityBox Project (C3C076@xda)
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.ceco.lollipop.gravitybox;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import android.app.Activity;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.os.Bundle;
import android.view.View;
import android.widget.Toast;
import de.robv.android.xposed.XC_MethodHook;
import de.robv.android.xposed.XSharedPreferences;
import de.robv.android.xposed.XposedBridge;
import de.robv.android.xposed.XposedHelpers;
import de.robv.android.xposed.XposedHelpers.ClassNotFoundError;
public class ModLauncher {
public static final List<String> PACKAGE_NAMES = new ArrayList<String>(Arrays.asList(
"com.android.launcher3", "com.google.android.googlequicksearchbox"));
private static final String TAG = "GB:ModLauncher";
private static final List<DynamicGrid> CLASS_DYNAMIC_GRID;
private static final List<ShowAllApps> METHOD_SHOW_ALL_APPS;
private static final String CLASS_LAUNCHER = "com.android.launcher3.Launcher";
private static final String CLASS_APP_WIDGET_HOST_VIEW = "android.appwidget.AppWidgetHostView";
private static final boolean DEBUG = false;
public static final String ACTION_SHOW_APP_DRAWER = "gravitybox.launcher.intent.action.SHOW_APP_DRAWER";
private static final class DynamicGrid {
Class<?> clazz;
String className;
String fProfile;
String fNumRows;
String fNumCols;
String invariantProfile;
public DynamicGrid(String cN, String fp, String fnr, String fnc) {
this(cN, fp, fnr, fnc, null);
}
public DynamicGrid(String cN, String fp, String fnr, String fnc, String invDp) {
className = cN;
fProfile = fp;
fNumRows = fnr;
fNumCols = fnc;
invariantProfile = invDp;
}
}
private static final class ShowAllApps {
String methodName;
Object[] paramTypes;
Object[] paramValues;
String fLauncherCallbacks;
public ShowAllApps(String mName, Object[] pTypes, Object[] pValues) {
this(mName, pTypes, pValues, null);
}
public ShowAllApps(String mName, Object[] pTypes, Object[] pValues, String flc) {
methodName = mName;
paramTypes = pTypes;
paramValues = pValues;
fLauncherCallbacks = flc;
}
}
static {
CLASS_DYNAMIC_GRID = new ArrayList<DynamicGrid>();
CLASS_DYNAMIC_GRID.add(new DynamicGrid("com.android.launcher3.DynamicGrid", "mProfile", "numRows", "numColumns"));
CLASS_DYNAMIC_GRID.add(new DynamicGrid("nw", "Bq", "yx", "yy"));
CLASS_DYNAMIC_GRID.add(new DynamicGrid("rf", "DU", "AW", "AX"));
CLASS_DYNAMIC_GRID.add(new DynamicGrid("sg", "Ez", "BB", "BC"));
CLASS_DYNAMIC_GRID.add(new DynamicGrid("ur", "Gi", "Dg", "Dh"));
CLASS_DYNAMIC_GRID.add(new DynamicGrid("wd", "Fe", "Ce", "Cf"));
CLASS_DYNAMIC_GRID.add(new DynamicGrid("com.android.launcher3.cn", "KA", "Hz", "HA"));
CLASS_DYNAMIC_GRID.add(new DynamicGrid("com.android.launcher3.cn", "Kz", "Hy", "Hz"));
CLASS_DYNAMIC_GRID.add(new DynamicGrid("com.android.launcher3.cn", "KE", "HD", "HE"));
CLASS_DYNAMIC_GRID.add(new DynamicGrid("com.android.launcher3.co", "KK", "HJ", "HK"));
CLASS_DYNAMIC_GRID.add(new DynamicGrid("com.android.launcher3.co", "KV", "HU", "HV"));
CLASS_DYNAMIC_GRID.add(new DynamicGrid("com.android.launcher3.bf", "KU", "HT", "HU"));
CLASS_DYNAMIC_GRID.add(new DynamicGrid("com.android.launcher3.bf", "Mf", "Je", "Jf"));
CLASS_DYNAMIC_GRID.add(new DynamicGrid("com.android.launcher3.Launcher", "mDeviceProfile", "numRows", "numColumns", "com.android.launcher3.InvariantDeviceProfile"));
METHOD_SHOW_ALL_APPS = new ArrayList<ShowAllApps>();
METHOD_SHOW_ALL_APPS.add(new ShowAllApps("onClickAllAppsButton",
new Object[] { View.class },
new Object[] { null } ));
METHOD_SHOW_ALL_APPS.add(new ShowAllApps("a",
new Object[] { boolean.class, "tk", boolean.class },
new Object[] { false, "xJ", false } ));
METHOD_SHOW_ALL_APPS.add(new ShowAllApps("a",
new Object[] { boolean.class, "com.android.launcher3.q", boolean.class },
new Object[] { false, "Dc", false } ));
METHOD_SHOW_ALL_APPS.add(new ShowAllApps("a",
new Object[] { boolean.class, "com.android.launcher3.q", boolean.class },
new Object[] { false, "Db", false } ));
METHOD_SHOW_ALL_APPS.add(new ShowAllApps("a",
new Object[] { boolean.class, "com.android.launcher3.q", boolean.class },
new Object[] { false, "Dg", false } ));
METHOD_SHOW_ALL_APPS.add(new ShowAllApps("a",
new Object[] { boolean.class, "com.android.launcher3.s", boolean.class },
new Object[] { false, "Dm", false } ));
METHOD_SHOW_ALL_APPS.add(new ShowAllApps("a",
new Object[] { boolean.class, "com.android.launcher3.s", boolean.class },
new Object[] { false, "Dx", false } ));
METHOD_SHOW_ALL_APPS.add(new ShowAllApps("a",
new Object[] { boolean.class, "com.android.launcher3.h", boolean.class },
new Object[] { false, "Dw", false } ));
METHOD_SHOW_ALL_APPS.add(new ShowAllApps("a",
new Object[] { boolean.class, "com.android.launcher3.h", boolean.class },
new Object[] { false, "EH", false } ));
METHOD_SHOW_ALL_APPS.add(new ShowAllApps("onClickAllAppsButton",
new Object[] { View.class },
new Object[] { "mAllAppsButton" },
"mLauncherCallbacks"));
}
private static void log(String message) {
XposedBridge.log(TAG + ": " + message);
}
private static boolean mShouldShowAppDrawer;
private static boolean mReceiverRegistered;
private static Method mShowAllAppsMethod;
private static Object[] mShowAllAppsParams;
private static Object mShowAllAppsObject;
private static BroadcastReceiver mBroadcastReceiver = new BroadcastReceiver() {
@Override
public void onReceive(Context context, Intent intent) {
Intent i = new Intent(Intent.ACTION_MAIN);
i.addCategory(Intent.CATEGORY_HOME);
i.putExtra("showAppDrawer", true);
context.startActivity(i);
}
};
public static void init(final XSharedPreferences prefs, final ClassLoader classLoader) {
prefs.reload();
boolean dynamicGridFound = false;
for (DynamicGrid dg : CLASS_DYNAMIC_GRID) {
final DynamicGrid dynamicGrid = dg;
try {
Class<?> cls = XposedHelpers.findClass(dg.className, classLoader);
Class<?> profileClass = dg.invariantProfile != null ?
XposedHelpers.findClass(dg.invariantProfile, classLoader) :
cls.getDeclaredField(dg.fProfile).getType();
Field numRows = profileClass.getDeclaredField(dg.fNumRows);
if (!numRows.getType().isAssignableFrom(dg.invariantProfile != null ? int.class : float.class))
throw new Exception("numRows doesn't seem to be of expected type");
Field numCols = profileClass.getDeclaredField(dg.fNumCols);
if (!numCols.getType().isAssignableFrom(dg.invariantProfile != null ? int.class : float.class))
throw new Exception("numCols doesn't seem to be of expected type");
if (DEBUG) log("Probably found DynamicGrid class as: " + dg.className +
"; numRows=" + dg.fNumRows + "; numCols=" + dg.fNumCols);
dg.clazz = dg.invariantProfile != null ? profileClass : cls;
} catch (Throwable t) {
if (DEBUG) log("search for dynamic grid " + dg.className + ": " + t.getMessage());
continue;
}
dynamicGridFound = true;
try {
XposedBridge.hookAllConstructors(dynamicGrid.clazz, new XC_MethodHook() {
@Override
protected void afterHookedMethod(final MethodHookParam param) throws Throwable {
Object profile = dynamicGrid.invariantProfile != null ? param.thisObject :
XposedHelpers.getObjectField(param.thisObject, dynamicGrid.fProfile);
if (profile != null) {
final int rows = Integer.valueOf(prefs.getString(
GravityBoxSettings.PREF_KEY_LAUNCHER_DESKTOP_GRID_ROWS, "0"));
if (rows != 0) {
XposedHelpers.setIntField(profile, dynamicGrid.fNumRows, rows);
if (DEBUG) log("Launcher rows set to: " + rows);
}
final int cols = Integer.valueOf(prefs.getString(
GravityBoxSettings.PREF_KEY_LAUNCHER_DESKTOP_GRID_COLS, "0"));
if (cols != 0) {
XposedHelpers.setIntField(profile, dynamicGrid.fNumCols, cols);
if (DEBUG) log("Launcher cols set to: " + cols);
}
}
}
});
} catch (Throwable t) {
XposedBridge.log(t);
}
break;
}
if (!dynamicGridFound) {
log("Couldn't find dynamic grid. Incompatible Google Search?");
}
try {
Class<?> classLauncher = null;
try {
classLauncher = XposedHelpers.findClass(CLASS_LAUNCHER, classLoader);
} catch (ClassNotFoundError e) {
log("Launcher3.Launcher not found");
}
if (classLauncher != null) {
XposedHelpers.findAndHookMethod(classLauncher, "onCreate", Bundle.class, new XC_MethodHook() {
@Override
protected void afterHookedMethod(final MethodHookParam param) throws Throwable {
IntentFilter intentFilter = new IntentFilter(ACTION_SHOW_APP_DRAWER);
((Activity)param.thisObject).registerReceiver(mBroadcastReceiver, intentFilter);
mReceiverRegistered = true;
}
});
XposedHelpers.findAndHookMethod(classLauncher, "onDestroy", new XC_MethodHook() {
@Override
protected void beforeHookedMethod(final MethodHookParam param) throws Throwable {
if (mReceiverRegistered) {
((Activity)param.thisObject).unregisterReceiver(mBroadcastReceiver);
mReceiverRegistered = false;
}
}
});
XposedHelpers.findAndHookMethod(classLauncher, "onNewIntent", Intent.class, new XC_MethodHook() {
@Override
protected void afterHookedMethod(final MethodHookParam param) throws Throwable {
Intent i = (Intent) param.args[0];
mShouldShowAppDrawer = (i != null && i.hasExtra("showAppDrawer"));
}
});
XposedHelpers.findAndHookMethod(classLauncher, "onResume", new XC_MethodHook() {
@Override
protected void afterHookedMethod(final MethodHookParam param) throws Throwable {
if (mShouldShowAppDrawer) {
mShouldShowAppDrawer = false;
if (mShowAllAppsMethod != null) {
mShowAllAppsMethod.invoke(mShowAllAppsObject, mShowAllAppsParams);
} else {
for (ShowAllApps sapm : METHOD_SHOW_ALL_APPS) {
try {
for (int i = 0; i < sapm.paramTypes.length; i++) {
if (sapm.paramTypes[i] instanceof String) {
sapm.paramTypes[i] = XposedHelpers.findClass(
(String) sapm.paramTypes[i], classLoader);
}
if (sapm.paramValues[i] instanceof String) {
if (sapm.fLauncherCallbacks != null) {
sapm.paramValues[i] = XposedHelpers.getObjectField(
param.thisObject, (String) sapm.paramValues[i]);
} else {
Object type = XposedHelpers.getStaticObjectField(
(Class<?>) sapm.paramTypes[i],
(String) sapm.paramValues[i]);
if (!"Applications".equals(type.toString()))
continue;
sapm.paramValues[i] = type;
}
}
}
Object o = sapm.fLauncherCallbacks == null ? param.thisObject :
XposedHelpers.getObjectField(param.thisObject, sapm.fLauncherCallbacks);
Class<?> clazz = o.getClass();
if (clazz.getName().equals(CLASS_LAUNCHER) || sapm.fLauncherCallbacks != null) {
mShowAllAppsMethod = XposedHelpers.findMethodExact(clazz,
sapm.methodName, sapm.paramTypes);
} else if (clazz.getSuperclass().getName().equals(CLASS_LAUNCHER)) {
mShowAllAppsMethod = XposedHelpers.findMethodExact(clazz.getSuperclass(),
sapm.methodName, sapm.paramTypes);
}
mShowAllAppsObject = o;
mShowAllAppsParams = sapm.paramValues;
mShowAllAppsMethod.invoke(mShowAllAppsObject, mShowAllAppsParams);
if (sapm.fLauncherCallbacks != null) {
Toast.makeText((Activity) param.thisObject,
"Such action might not be supported in this Launcher version",
Toast.LENGTH_LONG).show();
}
break;
} catch (Throwable t) {
if (DEBUG) log("Method name " + sapm.methodName +
" not found: " + t.getMessage());
}
}
if (mShowAllAppsMethod == null) {
log("Couldn't find method for opening app dawer. Incompatible Google Search?");
}
}
}
}
});
}
Class<?> classAppWidgetHostView = null;
try {
classAppWidgetHostView = XposedHelpers.findClass(CLASS_APP_WIDGET_HOST_VIEW, classLoader);
} catch (ClassNotFoundError e) {
log("AppWidgetHostView not found");
}
if (classAppWidgetHostView != null) {
XposedHelpers.findAndHookMethod(classAppWidgetHostView, "getAppWidgetInfo", new XC_MethodHook() {
@Override
protected void beforeHookedMethod(MethodHookParam param) throws Throwable {
if (prefs.getBoolean(
GravityBoxSettings.PREF_KEY_LAUNCHER_RESIZE_WIDGET, false)) {
Object info = XposedHelpers.getObjectField(param.thisObject, "mInfo");
if (info != null) {
XposedHelpers.setIntField(info, "resizeMode", 3);
XposedHelpers.setIntField(info, "minResizeWidth", 40);
XposedHelpers.setIntField(info, "minResizeHeight", 40);
}
}
}
});
}
} catch (Throwable t) {
XposedBridge.log(t);
}
}
}
| src/com/ceco/lollipop/gravitybox/ModLauncher.java | /*
* Copyright (C) 2015 Peter Gregus for GravityBox Project (C3C076@xda)
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.ceco.lollipop.gravitybox;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import android.app.Activity;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.os.Bundle;
import android.view.View;
import de.robv.android.xposed.XC_MethodHook;
import de.robv.android.xposed.XSharedPreferences;
import de.robv.android.xposed.XposedBridge;
import de.robv.android.xposed.XposedHelpers;
import de.robv.android.xposed.XposedHelpers.ClassNotFoundError;
public class ModLauncher {
public static final List<String> PACKAGE_NAMES = new ArrayList<String>(Arrays.asList(
"com.android.launcher3", "com.google.android.googlequicksearchbox"));
private static final String TAG = "GB:ModLauncher";
private static final List<DynamicGrid> CLASS_DYNAMIC_GRID;
private static final List<ShowAllApps> METHOD_SHOW_ALL_APPS;
private static final String CLASS_LAUNCHER = "com.android.launcher3.Launcher";
private static final String CLASS_APP_WIDGET_HOST_VIEW = "android.appwidget.AppWidgetHostView";
private static final boolean DEBUG = false;
public static final String ACTION_SHOW_APP_DRAWER = "gravitybox.launcher.intent.action.SHOW_APP_DRAWER";
private static final class DynamicGrid {
Class<?> clazz;
String className;
String fProfile;
String fNumRows;
String fNumCols;
public DynamicGrid(String cN, String fp, String fnr, String fnc) {
className = cN;
fProfile = fp;
fNumRows = fnr;
fNumCols = fnc;
}
}
private static final class ShowAllApps {
String methodName;
Object[] paramTypes;
Object[] paramValues;
public ShowAllApps(String mName, Object[] pTypes, Object[] pValues) {
methodName = mName;
paramTypes = pTypes;
paramValues = pValues;
}
}
static {
CLASS_DYNAMIC_GRID = new ArrayList<DynamicGrid>();
CLASS_DYNAMIC_GRID.add(new DynamicGrid("com.android.launcher3.DynamicGrid", "mProfile", "numRows", "numColumns"));
CLASS_DYNAMIC_GRID.add(new DynamicGrid("nw", "Bq", "yx", "yy"));
CLASS_DYNAMIC_GRID.add(new DynamicGrid("rf", "DU", "AW", "AX"));
CLASS_DYNAMIC_GRID.add(new DynamicGrid("sg", "Ez", "BB", "BC"));
CLASS_DYNAMIC_GRID.add(new DynamicGrid("ur", "Gi", "Dg", "Dh"));
CLASS_DYNAMIC_GRID.add(new DynamicGrid("wd", "Fe", "Ce", "Cf"));
CLASS_DYNAMIC_GRID.add(new DynamicGrid("com.android.launcher3.cn", "KA", "Hz", "HA"));
CLASS_DYNAMIC_GRID.add(new DynamicGrid("com.android.launcher3.cn", "Kz", "Hy", "Hz"));
CLASS_DYNAMIC_GRID.add(new DynamicGrid("com.android.launcher3.cn", "KE", "HD", "HE"));
CLASS_DYNAMIC_GRID.add(new DynamicGrid("com.android.launcher3.co", "KK", "HJ", "HK"));
CLASS_DYNAMIC_GRID.add(new DynamicGrid("com.android.launcher3.co", "KV", "HU", "HV"));
CLASS_DYNAMIC_GRID.add(new DynamicGrid("com.android.launcher3.bf", "KU", "HT", "HU"));
CLASS_DYNAMIC_GRID.add(new DynamicGrid("com.android.launcher3.bf", "Mf", "Je", "Jf"));
METHOD_SHOW_ALL_APPS = new ArrayList<ShowAllApps>();
METHOD_SHOW_ALL_APPS.add(new ShowAllApps("onClickAllAppsButton",
new Object[] { View.class },
new Object[] { null } ));
METHOD_SHOW_ALL_APPS.add(new ShowAllApps("a",
new Object[] { boolean.class, "tk", boolean.class },
new Object[] { false, "xJ", false } ));
METHOD_SHOW_ALL_APPS.add(new ShowAllApps("a",
new Object[] { boolean.class, "com.android.launcher3.q", boolean.class },
new Object[] { false, "Dc", false } ));
METHOD_SHOW_ALL_APPS.add(new ShowAllApps("a",
new Object[] { boolean.class, "com.android.launcher3.q", boolean.class },
new Object[] { false, "Db", false } ));
METHOD_SHOW_ALL_APPS.add(new ShowAllApps("a",
new Object[] { boolean.class, "com.android.launcher3.q", boolean.class },
new Object[] { false, "Dg", false } ));
METHOD_SHOW_ALL_APPS.add(new ShowAllApps("a",
new Object[] { boolean.class, "com.android.launcher3.s", boolean.class },
new Object[] { false, "Dm", false } ));
METHOD_SHOW_ALL_APPS.add(new ShowAllApps("a",
new Object[] { boolean.class, "com.android.launcher3.s", boolean.class },
new Object[] { false, "Dx", false } ));
METHOD_SHOW_ALL_APPS.add(new ShowAllApps("a",
new Object[] { boolean.class, "com.android.launcher3.h", boolean.class },
new Object[] { false, "Dw", false } ));
METHOD_SHOW_ALL_APPS.add(new ShowAllApps("a",
new Object[] { boolean.class, "com.android.launcher3.h", boolean.class },
new Object[] { false, "EH", false } ));
}
private static void log(String message) {
XposedBridge.log(TAG + ": " + message);
}
private static boolean mShouldShowAppDrawer;
private static boolean mReceiverRegistered;
private static Method mShowAllAppsMethod;
private static Object[] mShowAllAppsParams;
private static BroadcastReceiver mBroadcastReceiver = new BroadcastReceiver() {
@Override
public void onReceive(Context context, Intent intent) {
Intent i = new Intent(Intent.ACTION_MAIN);
i.addCategory(Intent.CATEGORY_HOME);
i.putExtra("showAppDrawer", true);
context.startActivity(i);
}
};
public static void init(final XSharedPreferences prefs, final ClassLoader classLoader) {
boolean dynamicGridFound = false;
for (DynamicGrid dg : CLASS_DYNAMIC_GRID) {
final DynamicGrid dynamicGrid = dg;
try {
Class<?> cls = XposedHelpers.findClass(dg.className, classLoader);
Field profile = cls.getDeclaredField(dg.fProfile);
Class<?> profileClass = profile.getType();
Field numRows = profileClass.getDeclaredField(dg.fNumRows);
if (!numRows.getType().isAssignableFrom(float.class))
throw new Exception("numRows doesn't seem to be of float type");
Field numCols = profileClass.getDeclaredField(dg.fNumCols);
if (!numCols.getType().isAssignableFrom(float.class))
throw new Exception("numCols doesn't seem to be of float type");
if (DEBUG) log("Probably found DynamicGrid class as: " + dg.className +
"; numRows=" + dg.fNumRows + "; numCols=" + dg.fNumCols);
dg.clazz = cls;
} catch (Throwable t) {
if (DEBUG) log("search for dynamic grid " + dg.className + ": " + t.getMessage());
continue;
}
dynamicGridFound = true;
try {
XposedBridge.hookAllConstructors(dynamicGrid.clazz, new XC_MethodHook() {
@Override
protected void afterHookedMethod(final MethodHookParam param) throws Throwable {
prefs.reload();
Object profile = XposedHelpers.getObjectField(param.thisObject, dynamicGrid.fProfile);
if (profile != null) {
final int rows = Integer.valueOf(prefs.getString(
GravityBoxSettings.PREF_KEY_LAUNCHER_DESKTOP_GRID_ROWS, "0"));
if (rows != 0) {
XposedHelpers.setIntField(profile, dynamicGrid.fNumRows, rows);
if (DEBUG) log("Launcher rows set to: " + rows);
}
final int cols = Integer.valueOf(prefs.getString(
GravityBoxSettings.PREF_KEY_LAUNCHER_DESKTOP_GRID_COLS, "0"));
if (cols != 0) {
XposedHelpers.setIntField(profile, dynamicGrid.fNumCols, cols);
if (DEBUG) log("Launcher cols set to: " + cols);
}
}
}
});
} catch (Throwable t) {
XposedBridge.log(t);
}
break;
}
if (!dynamicGridFound) {
log("Couldn't find dynamic grid. Incompatible Google Search?");
}
try {
Class<?> classLauncher = null;
try {
classLauncher = XposedHelpers.findClass(CLASS_LAUNCHER, classLoader);
} catch (ClassNotFoundError e) {
log("Launcher3.Launcher not found");
}
if (classLauncher != null) {
XposedHelpers.findAndHookMethod(classLauncher, "onCreate", Bundle.class, new XC_MethodHook() {
@Override
protected void afterHookedMethod(final MethodHookParam param) throws Throwable {
IntentFilter intentFilter = new IntentFilter(ACTION_SHOW_APP_DRAWER);
((Activity)param.thisObject).registerReceiver(mBroadcastReceiver, intentFilter);
mReceiverRegistered = true;
}
});
XposedHelpers.findAndHookMethod(classLauncher, "onDestroy", new XC_MethodHook() {
@Override
protected void beforeHookedMethod(final MethodHookParam param) throws Throwable {
if (mReceiverRegistered) {
((Activity)param.thisObject).unregisterReceiver(mBroadcastReceiver);
mReceiverRegistered = false;
}
}
});
XposedHelpers.findAndHookMethod(classLauncher, "onNewIntent", Intent.class, new XC_MethodHook() {
@Override
protected void afterHookedMethod(final MethodHookParam param) throws Throwable {
Intent i = (Intent) param.args[0];
mShouldShowAppDrawer = (i != null && i.hasExtra("showAppDrawer"));
}
});
XposedHelpers.findAndHookMethod(classLauncher, "onResume", new XC_MethodHook() {
@Override
protected void afterHookedMethod(final MethodHookParam param) throws Throwable {
if (mShouldShowAppDrawer) {
mShouldShowAppDrawer = false;
if (mShowAllAppsMethod != null) {
mShowAllAppsMethod.invoke(param.thisObject, mShowAllAppsParams);
} else {
for (ShowAllApps sapm : METHOD_SHOW_ALL_APPS) {
try {
for (int i = 0; i < sapm.paramTypes.length; i++) {
if (sapm.paramTypes[i] instanceof String) {
sapm.paramTypes[i] = XposedHelpers.findClass(
(String) sapm.paramTypes[i], classLoader);
}
if (sapm.paramValues[i] instanceof String) {
Object type = XposedHelpers.getStaticObjectField(
(Class<?>) sapm.paramTypes[i],
(String) sapm.paramValues[i]);
if (!"Applications".equals(type.toString()))
continue;
sapm.paramValues[i] = type;
}
}
Class<?> clazz = param.thisObject.getClass();
if (clazz.getName().equals(CLASS_LAUNCHER)) {
mShowAllAppsMethod = XposedHelpers.findMethodExact(clazz,
sapm.methodName, sapm.paramTypes);
} else if (clazz.getSuperclass().getName().equals(CLASS_LAUNCHER)) {
mShowAllAppsMethod = XposedHelpers.findMethodExact(clazz.getSuperclass(),
sapm.methodName, sapm.paramTypes);
}
mShowAllAppsParams = sapm.paramValues;
mShowAllAppsMethod.invoke(param.thisObject, mShowAllAppsParams);
break;
} catch (Throwable t) {
if (DEBUG) log("Method name " + sapm.methodName +
" not found: " + t.getMessage());
}
}
if (mShowAllAppsMethod == null) {
log("Couldn't find method for opening app dawer. Incompatible Google Search?");
}
}
}
}
});
}
Class<?> classAppWidgetHostView = null;
try {
classAppWidgetHostView = XposedHelpers.findClass(CLASS_APP_WIDGET_HOST_VIEW, classLoader);
} catch (ClassNotFoundError e) {
log("AppWidgetHostView not found");
}
if (classAppWidgetHostView != null) {
XposedHelpers.findAndHookMethod(classAppWidgetHostView, "getAppWidgetInfo", new XC_MethodHook() {
@Override
protected void beforeHookedMethod(MethodHookParam param) throws Throwable {
if (prefs.getBoolean(
GravityBoxSettings.PREF_KEY_LAUNCHER_RESIZE_WIDGET, false)) {
Object info = XposedHelpers.getObjectField(param.thisObject, "mInfo");
if (info != null) {
XposedHelpers.setIntField(info, "resizeMode", 3);
XposedHelpers.setIntField(info, "minResizeWidth", 40);
XposedHelpers.setIntField(info, "minResizeHeight", 40);
}
}
}
});
}
} catch (Throwable t) {
XposedBridge.log(t);
}
}
}
| Launcher: adjusted for compatibility with Google App 5.23.19
Action to show app drawer doesn't seem to work anymore. Although proper
method is called for sure, app drawer just won't show. Looks like this
action is now strongly bound to all apps button interaction. | src/com/ceco/lollipop/gravitybox/ModLauncher.java | Launcher: adjusted for compatibility with Google App 5.23.19 Action to show app drawer doesn't seem to work anymore. Although proper method is called for sure, app drawer just won't show. Looks like this action is now strongly bound to all apps button interaction. | <ide><path>rc/com/ceco/lollipop/gravitybox/ModLauncher.java
<ide> import android.content.IntentFilter;
<ide> import android.os.Bundle;
<ide> import android.view.View;
<add>import android.widget.Toast;
<ide> import de.robv.android.xposed.XC_MethodHook;
<ide> import de.robv.android.xposed.XSharedPreferences;
<ide> import de.robv.android.xposed.XposedBridge;
<ide> String fProfile;
<ide> String fNumRows;
<ide> String fNumCols;
<add> String invariantProfile;
<ide> public DynamicGrid(String cN, String fp, String fnr, String fnc) {
<add> this(cN, fp, fnr, fnc, null);
<add> }
<add> public DynamicGrid(String cN, String fp, String fnr, String fnc, String invDp) {
<ide> className = cN;
<ide> fProfile = fp;
<ide> fNumRows = fnr;
<ide> fNumCols = fnc;
<add> invariantProfile = invDp;
<ide> }
<ide> }
<ide>
<ide> String methodName;
<ide> Object[] paramTypes;
<ide> Object[] paramValues;
<add> String fLauncherCallbacks;
<ide> public ShowAllApps(String mName, Object[] pTypes, Object[] pValues) {
<add> this(mName, pTypes, pValues, null);
<add> }
<add> public ShowAllApps(String mName, Object[] pTypes, Object[] pValues, String flc) {
<ide> methodName = mName;
<ide> paramTypes = pTypes;
<ide> paramValues = pValues;
<add> fLauncherCallbacks = flc;
<ide> }
<ide> }
<ide>
<ide> CLASS_DYNAMIC_GRID.add(new DynamicGrid("com.android.launcher3.co", "KV", "HU", "HV"));
<ide> CLASS_DYNAMIC_GRID.add(new DynamicGrid("com.android.launcher3.bf", "KU", "HT", "HU"));
<ide> CLASS_DYNAMIC_GRID.add(new DynamicGrid("com.android.launcher3.bf", "Mf", "Je", "Jf"));
<add> CLASS_DYNAMIC_GRID.add(new DynamicGrid("com.android.launcher3.Launcher", "mDeviceProfile", "numRows", "numColumns", "com.android.launcher3.InvariantDeviceProfile"));
<ide>
<ide> METHOD_SHOW_ALL_APPS = new ArrayList<ShowAllApps>();
<ide> METHOD_SHOW_ALL_APPS.add(new ShowAllApps("onClickAllAppsButton",
<ide> METHOD_SHOW_ALL_APPS.add(new ShowAllApps("a",
<ide> new Object[] { boolean.class, "com.android.launcher3.h", boolean.class },
<ide> new Object[] { false, "EH", false } ));
<add> METHOD_SHOW_ALL_APPS.add(new ShowAllApps("onClickAllAppsButton",
<add> new Object[] { View.class },
<add> new Object[] { "mAllAppsButton" },
<add> "mLauncherCallbacks"));
<ide> }
<ide>
<ide> private static void log(String message) {
<ide> private static boolean mReceiverRegistered;
<ide> private static Method mShowAllAppsMethod;
<ide> private static Object[] mShowAllAppsParams;
<add> private static Object mShowAllAppsObject;
<ide>
<ide> private static BroadcastReceiver mBroadcastReceiver = new BroadcastReceiver() {
<ide> @Override
<ide> };
<ide>
<ide> public static void init(final XSharedPreferences prefs, final ClassLoader classLoader) {
<add> prefs.reload();
<ide> boolean dynamicGridFound = false;
<ide> for (DynamicGrid dg : CLASS_DYNAMIC_GRID) {
<ide> final DynamicGrid dynamicGrid = dg;
<ide> try {
<del> Class<?> cls = XposedHelpers.findClass(dg.className, classLoader);
<del> Field profile = cls.getDeclaredField(dg.fProfile);
<del> Class<?> profileClass = profile.getType();
<add> Class<?> cls = XposedHelpers.findClass(dg.className, classLoader);
<add> Class<?> profileClass = dg.invariantProfile != null ?
<add> XposedHelpers.findClass(dg.invariantProfile, classLoader) :
<add> cls.getDeclaredField(dg.fProfile).getType();
<ide> Field numRows = profileClass.getDeclaredField(dg.fNumRows);
<del> if (!numRows.getType().isAssignableFrom(float.class))
<del> throw new Exception("numRows doesn't seem to be of float type");
<add> if (!numRows.getType().isAssignableFrom(dg.invariantProfile != null ? int.class : float.class))
<add> throw new Exception("numRows doesn't seem to be of expected type");
<ide> Field numCols = profileClass.getDeclaredField(dg.fNumCols);
<del> if (!numCols.getType().isAssignableFrom(float.class))
<del> throw new Exception("numCols doesn't seem to be of float type");
<add> if (!numCols.getType().isAssignableFrom(dg.invariantProfile != null ? int.class : float.class))
<add> throw new Exception("numCols doesn't seem to be of expected type");
<ide> if (DEBUG) log("Probably found DynamicGrid class as: " + dg.className +
<ide> "; numRows=" + dg.fNumRows + "; numCols=" + dg.fNumCols);
<del> dg.clazz = cls;
<add> dg.clazz = dg.invariantProfile != null ? profileClass : cls;
<ide> } catch (Throwable t) {
<ide> if (DEBUG) log("search for dynamic grid " + dg.className + ": " + t.getMessage());
<ide> continue;
<ide> XposedBridge.hookAllConstructors(dynamicGrid.clazz, new XC_MethodHook() {
<ide> @Override
<ide> protected void afterHookedMethod(final MethodHookParam param) throws Throwable {
<del> prefs.reload();
<del> Object profile = XposedHelpers.getObjectField(param.thisObject, dynamicGrid.fProfile);
<add> Object profile = dynamicGrid.invariantProfile != null ? param.thisObject :
<add> XposedHelpers.getObjectField(param.thisObject, dynamicGrid.fProfile);
<ide> if (profile != null) {
<ide> final int rows = Integer.valueOf(prefs.getString(
<ide> GravityBoxSettings.PREF_KEY_LAUNCHER_DESKTOP_GRID_ROWS, "0"));
<ide> if (mShouldShowAppDrawer) {
<ide> mShouldShowAppDrawer = false;
<ide> if (mShowAllAppsMethod != null) {
<del> mShowAllAppsMethod.invoke(param.thisObject, mShowAllAppsParams);
<add> mShowAllAppsMethod.invoke(mShowAllAppsObject, mShowAllAppsParams);
<ide> } else {
<ide> for (ShowAllApps sapm : METHOD_SHOW_ALL_APPS) {
<ide> try {
<ide> (String) sapm.paramTypes[i], classLoader);
<ide> }
<ide> if (sapm.paramValues[i] instanceof String) {
<del> Object type = XposedHelpers.getStaticObjectField(
<del> (Class<?>) sapm.paramTypes[i],
<del> (String) sapm.paramValues[i]);
<del> if (!"Applications".equals(type.toString()))
<del> continue;
<del> sapm.paramValues[i] = type;
<add> if (sapm.fLauncherCallbacks != null) {
<add> sapm.paramValues[i] = XposedHelpers.getObjectField(
<add> param.thisObject, (String) sapm.paramValues[i]);
<add> } else {
<add> Object type = XposedHelpers.getStaticObjectField(
<add> (Class<?>) sapm.paramTypes[i],
<add> (String) sapm.paramValues[i]);
<add> if (!"Applications".equals(type.toString()))
<add> continue;
<add> sapm.paramValues[i] = type;
<add> }
<ide> }
<ide> }
<del> Class<?> clazz = param.thisObject.getClass();
<del> if (clazz.getName().equals(CLASS_LAUNCHER)) {
<add> Object o = sapm.fLauncherCallbacks == null ? param.thisObject :
<add> XposedHelpers.getObjectField(param.thisObject, sapm.fLauncherCallbacks);
<add> Class<?> clazz = o.getClass();
<add> if (clazz.getName().equals(CLASS_LAUNCHER) || sapm.fLauncherCallbacks != null) {
<ide> mShowAllAppsMethod = XposedHelpers.findMethodExact(clazz,
<ide> sapm.methodName, sapm.paramTypes);
<ide> } else if (clazz.getSuperclass().getName().equals(CLASS_LAUNCHER)) {
<ide> mShowAllAppsMethod = XposedHelpers.findMethodExact(clazz.getSuperclass(),
<ide> sapm.methodName, sapm.paramTypes);
<ide> }
<add> mShowAllAppsObject = o;
<ide> mShowAllAppsParams = sapm.paramValues;
<del> mShowAllAppsMethod.invoke(param.thisObject, mShowAllAppsParams);
<add> mShowAllAppsMethod.invoke(mShowAllAppsObject, mShowAllAppsParams);
<add> if (sapm.fLauncherCallbacks != null) {
<add> Toast.makeText((Activity) param.thisObject,
<add> "Such action might not be supported in this Launcher version",
<add> Toast.LENGTH_LONG).show();
<add> }
<ide> break;
<ide> } catch (Throwable t) {
<ide> if (DEBUG) log("Method name " + sapm.methodName + |
|
JavaScript | mit | 7d6df48dfebbf5f00c3bf775183280be5db45e75 | 0 | mcanthony/dygraphs,mcanthony/dygraphs,pshevtsov/dygraphs,panuhorsmalahti/dygraphs,jmptrader/dygraphs,kbaggott/dygraphs,witsa/dygraphs,panuhorsmalahti/dygraphs,kbaggott/dygraphs,vhotspur/dygraphs,Yong-Lee/dygraphs,danvk/dygraphs,reinert/dygraphs,mcanthony/dygraphs,jmptrader/dygraphs,timeu/dygraphs,klausw/dygraphs,pshevtsov/dygraphs,petechap/dygraphs,timeu/dygraphs,mantyr/dygraphs,klausw/dygraphs,Akiyah/dygraphs,davidmsibley/dygraphs,kbaggott/dygraphs,kbaggott/dygraphs,vhotspur/dygraphs,klausw/dygraphs,mcanthony/dygraphs,mcanthony/dygraphs,davidmsibley/dygraphs,klausw/dygraphs,vhotspur/dygraphs,witsa/dygraphs,reinert/dygraphs,Yong-Lee/dygraphs,Akiyah/dygraphs,petechap/dygraphs,mariolll/dygraphs,jmptrader/dygraphs,grantadesign/dygraphs,danvk/dygraphs,panuhorsmalahti/dygraphs,socib/dygraphs,socib/dygraphs,mantyr/dygraphs,grantadesign/dygraphs,danvk/dygraphs,pshevtsov/dygraphs,timeu/dygraphs,vhotspur/dygraphs,mariolll/dygraphs,danvk/dygraphs,reinert/dygraphs,mantyr/dygraphs,witsa/dygraphs,Akiyah/dygraphs,kbaggott/dygraphs,davidmsibley/dygraphs,petechap/dygraphs,Yong-Lee/dygraphs,Yong-Lee/dygraphs,jmptrader/dygraphs,klausw/dygraphs,socib/dygraphs,mantyr/dygraphs,petechap/dygraphs,pshevtsov/dygraphs,danvk/dygraphs,grantadesign/dygraphs,witsa/dygraphs,timeu/dygraphs,Akiyah/dygraphs,Yong-Lee/dygraphs,reinert/dygraphs,panuhorsmalahti/dygraphs,jmptrader/dygraphs,witsa/dygraphs,petechap/dygraphs,vhotspur/dygraphs,timeu/dygraphs,mariolll/dygraphs,grantadesign/dygraphs,davidmsibley/dygraphs,grantadesign/dygraphs,davidmsibley/dygraphs,mariolll/dygraphs,Akiyah/dygraphs,socib/dygraphs,mariolll/dygraphs,panuhorsmalahti/dygraphs,mantyr/dygraphs,pshevtsov/dygraphs,reinert/dygraphs | /**
* @license
* Copyright 2006 Dan Vanderkam ([email protected])
* MIT-licensed (http://opensource.org/licenses/MIT)
*/
/**
* @fileoverview Creates an interactive, zoomable graph based on a CSV file or
* string. Dygraph can handle multiple series with or without error bars. The
* date/value ranges will be automatically set. Dygraph uses the
* <canvas> tag, so it only works in FF1.5+.
* @author [email protected] (Dan Vanderkam)
Usage:
<div id="graphdiv" style="width:800px; height:500px;"></div>
<script type="text/javascript">
new Dygraph(document.getElementById("graphdiv"),
"datafile.csv", // CSV file with headers
{ }); // options
</script>
The CSV file is of the form
Date,SeriesA,SeriesB,SeriesC
YYYYMMDD,A1,B1,C1
YYYYMMDD,A2,B2,C2
If the 'errorBars' option is set in the constructor, the input should be of
the form
Date,SeriesA,SeriesB,...
YYYYMMDD,A1,sigmaA1,B1,sigmaB1,...
YYYYMMDD,A2,sigmaA2,B2,sigmaB2,...
If the 'fractions' option is set, the input should be of the form:
Date,SeriesA,SeriesB,...
YYYYMMDD,A1/B1,A2/B2,...
YYYYMMDD,A1/B1,A2/B2,...
And error bars will be calculated automatically using a binomial distribution.
For further documentation and examples, see http://dygraphs.com/
*/
/*jshint globalstrict: true */
/*global DygraphLayout:false, DygraphCanvasRenderer:false, DygraphOptions:false, G_vmlCanvasManager:false */
"use strict";
/**
* Creates an interactive, zoomable chart.
*
* @constructor
* @param {div | String} div A div or the id of a div into which to construct
* the chart.
* @param {String | Function} file A file containing CSV data or a function
* that returns this data. The most basic expected format for each line is
* "YYYY/MM/DD,val1,val2,...". For more information, see
* http://dygraphs.com/data.html.
* @param {Object} attrs Various other attributes, e.g. errorBars determines
* whether the input data contains error ranges. For a complete list of
* options, see http://dygraphs.com/options.html.
*/
var Dygraph = function(div, data, opts, opt_fourth_param) {
if (opt_fourth_param !== undefined) {
// Old versions of dygraphs took in the series labels as a constructor
// parameter. This doesn't make sense anymore, but it's easy to continue
// to support this usage.
this.warn("Using deprecated four-argument dygraph constructor");
this.__old_init__(div, data, opts, opt_fourth_param);
} else {
this.__init__(div, data, opts);
}
};
Dygraph.NAME = "Dygraph";
Dygraph.VERSION = "1.2";
Dygraph.__repr__ = function() {
return "[" + this.NAME + " " + this.VERSION + "]";
};
/**
* Returns information about the Dygraph class.
*/
Dygraph.toString = function() {
return this.__repr__();
};
// Various default values
Dygraph.DEFAULT_ROLL_PERIOD = 1;
Dygraph.DEFAULT_WIDTH = 480;
Dygraph.DEFAULT_HEIGHT = 320;
// For max 60 Hz. animation:
Dygraph.ANIMATION_STEPS = 12;
Dygraph.ANIMATION_DURATION = 200;
// These are defined before DEFAULT_ATTRS so that it can refer to them.
/**
* @private
* Return a string version of a number. This respects the digitsAfterDecimal
* and maxNumberWidth options.
* @param {Number} x The number to be formatted
* @param {Dygraph} opts An options view
* @param {String} name The name of the point's data series
* @param {Dygraph} g The dygraph object
*/
Dygraph.numberValueFormatter = function(x, opts, pt, g) {
var sigFigs = opts('sigFigs');
if (sigFigs !== null) {
// User has opted for a fixed number of significant figures.
return Dygraph.floatFormat(x, sigFigs);
}
var digits = opts('digitsAfterDecimal');
var maxNumberWidth = opts('maxNumberWidth');
// switch to scientific notation if we underflow or overflow fixed display.
if (x !== 0.0 &&
(Math.abs(x) >= Math.pow(10, maxNumberWidth) ||
Math.abs(x) < Math.pow(10, -digits))) {
return x.toExponential(digits);
} else {
return '' + Dygraph.round_(x, digits);
}
};
/**
* variant for use as an axisLabelFormatter.
* @private
*/
Dygraph.numberAxisLabelFormatter = function(x, granularity, opts, g) {
return Dygraph.numberValueFormatter(x, opts, g);
};
/**
* Convert a JS date (millis since epoch) to YYYY/MM/DD
* @param {Number} date The JavaScript date (ms since epoch)
* @return {String} A date of the form "YYYY/MM/DD"
* @private
*/
Dygraph.dateString_ = function(date) {
var zeropad = Dygraph.zeropad;
var d = new Date(date);
// Get the year:
var year = "" + d.getFullYear();
// Get a 0 padded month string
var month = zeropad(d.getMonth() + 1); //months are 0-offset, sigh
// Get a 0 padded day string
var day = zeropad(d.getDate());
var ret = "";
var frac = d.getHours() * 3600 + d.getMinutes() * 60 + d.getSeconds();
if (frac) ret = " " + Dygraph.hmsString_(date);
return year + "/" + month + "/" + day + ret;
};
/**
* Convert a JS date to a string appropriate to display on an axis that
* is displaying values at the stated granularity.
* @param {Date} date The date to format
* @param {Number} granularity One of the Dygraph granularity constants
* @return {String} The formatted date
* @private
*/
Dygraph.dateAxisFormatter = function(date, granularity) {
if (granularity >= Dygraph.DECADAL) {
return date.strftime('%Y');
} else if (granularity >= Dygraph.MONTHLY) {
return date.strftime('%b %y');
} else {
var frac = date.getHours() * 3600 + date.getMinutes() * 60 + date.getSeconds() + date.getMilliseconds();
if (frac === 0 || granularity >= Dygraph.DAILY) {
return new Date(date.getTime() + 3600*1000).strftime('%d%b');
} else {
return Dygraph.hmsString_(date.getTime());
}
}
};
/**
* Standard plotters. These may be used by clients.
* Available plotters are:
* - Dygraph.Plotters.linePlotter: draws central lines (most common)
* - Dygraph.Plotters.errorPlotter: draws error bars
* - Dygraph.Plotters.fillPlotter: draws fills under lines (used with fillGraph)
*
* By default, the plotter is [fillPlotter, errorPlotter, linePlotter].
* This causes all the lines to be drawn over all the fills/error bars.
*/
Dygraph.Plotters = DygraphCanvasRenderer._Plotters;
// Default attribute values.
Dygraph.DEFAULT_ATTRS = {
highlightCircleSize: 3,
highlightSeriesOpts: null,
highlightSeriesBackgroundAlpha: 0.5,
labelsDivWidth: 250,
labelsDivStyles: {
// TODO(danvk): move defaults from createStatusMessage_ here.
},
labelsSeparateLines: false,
labelsShowZeroValues: true,
labelsKMB: false,
labelsKMG2: false,
showLabelsOnHighlight: true,
digitsAfterDecimal: 2,
maxNumberWidth: 6,
sigFigs: null,
strokeWidth: 1.0,
strokeBorderWidth: 0,
strokeBorderColor: "white",
axisTickSize: 3,
axisLabelFontSize: 14,
xAxisLabelWidth: 50,
yAxisLabelWidth: 50,
rightGap: 5,
showRoller: false,
xValueParser: Dygraph.dateParser,
delimiter: ',',
sigma: 2.0,
errorBars: false,
fractions: false,
wilsonInterval: true, // only relevant if fractions is true
customBars: false,
fillGraph: false,
fillAlpha: 0.15,
connectSeparatedPoints: false,
stackedGraph: false,
hideOverlayOnMouseOut: true,
// TODO(danvk): support 'onmouseover' and 'never', and remove synonyms.
legend: 'onmouseover', // the only relevant value at the moment is 'always'.
stepPlot: false,
avoidMinZero: false,
drawAxesAtZero: false,
// Sizes of the various chart labels.
titleHeight: 28,
xLabelHeight: 18,
yLabelWidth: 18,
drawXAxis: true,
drawYAxis: true,
axisLineColor: "black",
axisLineWidth: 0.3,
gridLineWidth: 0.3,
axisLabelColor: "black",
axisLabelFont: "Arial", // TODO(danvk): is this implemented?
axisLabelWidth: 50,
drawYGrid: true,
drawXGrid: true,
gridLineColor: "rgb(128,128,128)",
interactionModel: null, // will be set to Dygraph.Interaction.defaultModel
animatedZooms: false, // (for now)
// Range selector options
showRangeSelector: false,
rangeSelectorHeight: 40,
rangeSelectorPlotStrokeColor: "#808FAB",
rangeSelectorPlotFillColor: "#A7B1C4",
// The ordering here ensures that central lines always appear above any
// fill bars/error bars.
plotter: [
Dygraph.Plotters.fillPlotter,
Dygraph.Plotters.errorPlotter,
Dygraph.Plotters.linePlotter
],
plugins: [ ],
// per-axis options
axes: {
x: {
pixelsPerLabel: 60,
axisLabelFormatter: Dygraph.dateAxisFormatter,
valueFormatter: Dygraph.dateString_,
ticker: null // will be set in dygraph-tickers.js
},
y: {
pixelsPerLabel: 30,
valueFormatter: Dygraph.numberValueFormatter,
axisLabelFormatter: Dygraph.numberAxisLabelFormatter,
ticker: null // will be set in dygraph-tickers.js
},
y2: {
pixelsPerLabel: 30,
valueFormatter: Dygraph.numberValueFormatter,
axisLabelFormatter: Dygraph.numberAxisLabelFormatter,
ticker: null // will be set in dygraph-tickers.js
}
}
};
// Directions for panning and zooming. Use bit operations when combined
// values are possible.
Dygraph.HORIZONTAL = 1;
Dygraph.VERTICAL = 2;
// Installed plugins, in order of precedence (most-general to most-specific).
// Plugins are installed after they are defined, in plugins/install.js.
Dygraph.PLUGINS = [
];
// Used for initializing annotation CSS rules only once.
Dygraph.addedAnnotationCSS = false;
Dygraph.prototype.__old_init__ = function(div, file, labels, attrs) {
// Labels is no longer a constructor parameter, since it's typically set
// directly from the data source. It also conains a name for the x-axis,
// which the previous constructor form did not.
if (labels !== null) {
var new_labels = ["Date"];
for (var i = 0; i < labels.length; i++) new_labels.push(labels[i]);
Dygraph.update(attrs, { 'labels': new_labels });
}
this.__init__(div, file, attrs);
};
/**
* Initializes the Dygraph. This creates a new DIV and constructs the PlotKit
* and context <canvas> inside of it. See the constructor for details.
* on the parameters.
* @param {Element} div the Element to render the graph into.
* @param {String | Function} file Source data
* @param {Object} attrs Miscellaneous other options
* @private
*/
Dygraph.prototype.__init__ = function(div, file, attrs) {
// Hack for IE: if we're using excanvas and the document hasn't finished
// loading yet (and hence may not have initialized whatever it needs to
// initialize), then keep calling this routine periodically until it has.
if (/MSIE/.test(navigator.userAgent) && !window.opera &&
typeof(G_vmlCanvasManager) != 'undefined' &&
document.readyState != 'complete') {
var self = this;
setTimeout(function() { self.__init__(div, file, attrs); }, 100);
return;
}
// Support two-argument constructor
if (attrs === null || attrs === undefined) { attrs = {}; }
attrs = Dygraph.mapLegacyOptions_(attrs);
if (typeof(div) == 'string') {
div = document.getElementById(div);
}
if (!div) {
Dygraph.error("Constructing dygraph with a non-existent div!");
return;
}
this.isUsingExcanvas_ = typeof(G_vmlCanvasManager) != 'undefined';
// Copy the important bits into the object
// TODO(danvk): most of these should just stay in the attrs_ dictionary.
this.maindiv_ = div;
this.file_ = file;
this.rollPeriod_ = attrs.rollPeriod || Dygraph.DEFAULT_ROLL_PERIOD;
this.previousVerticalX_ = -1;
this.fractions_ = attrs.fractions || false;
this.dateWindow_ = attrs.dateWindow || null;
this.is_initial_draw_ = true;
this.annotations_ = [];
// Zoomed indicators - These indicate when the graph has been zoomed and on what axis.
this.zoomed_x_ = false;
this.zoomed_y_ = false;
// Clear the div. This ensure that, if multiple dygraphs are passed the same
// div, then only one will be drawn.
div.innerHTML = "";
// For historical reasons, the 'width' and 'height' options trump all CSS
// rules _except_ for an explicit 'width' or 'height' on the div.
// As an added convenience, if the div has zero height (like <div></div> does
// without any styles), then we use a default height/width.
if (div.style.width === '' && attrs.width) {
div.style.width = attrs.width + "px";
}
if (div.style.height === '' && attrs.height) {
div.style.height = attrs.height + "px";
}
if (div.style.height === '' && div.clientHeight === 0) {
div.style.height = Dygraph.DEFAULT_HEIGHT + "px";
if (div.style.width === '') {
div.style.width = Dygraph.DEFAULT_WIDTH + "px";
}
}
// these will be zero if the dygraph's div is hidden.
this.width_ = div.clientWidth;
this.height_ = div.clientHeight;
// TODO(danvk): set fillGraph to be part of attrs_ here, not user_attrs_.
if (attrs.stackedGraph) {
attrs.fillGraph = true;
// TODO(nikhilk): Add any other stackedGraph checks here.
}
// DEPRECATION WARNING: All option processing should be moved from
// attrs_ and user_attrs_ to options_, which holds all this information.
//
// Dygraphs has many options, some of which interact with one another.
// To keep track of everything, we maintain two sets of options:
//
// this.user_attrs_ only options explicitly set by the user.
// this.attrs_ defaults, options derived from user_attrs_, data.
//
// Options are then accessed this.attr_('attr'), which first looks at
// user_attrs_ and then computed attrs_. This way Dygraphs can set intelligent
// defaults without overriding behavior that the user specifically asks for.
this.user_attrs_ = {};
Dygraph.update(this.user_attrs_, attrs);
// This sequence ensures that Dygraph.DEFAULT_ATTRS is never modified.
this.attrs_ = {};
Dygraph.updateDeep(this.attrs_, Dygraph.DEFAULT_ATTRS);
this.boundaryIds_ = [];
this.setIndexByName_ = {};
this.datasetIndex_ = [];
this.registeredEvents_ = [];
this.eventListeners_ = {};
this.attributes_ = new DygraphOptions(this);
// Create the containing DIV and other interactive elements
this.createInterface_();
// Activate plugins.
this.plugins_ = [];
var plugins = Dygraph.PLUGINS.concat(this.getOption('plugins'));
for (var i = 0; i < plugins.length; i++) {
var Plugin = plugins[i];
var pluginInstance = new Plugin();
var pluginDict = {
plugin: pluginInstance,
events: {},
options: {},
pluginOptions: {}
};
var handlers = pluginInstance.activate(this);
for (var eventName in handlers) {
// TODO(danvk): validate eventName.
pluginDict.events[eventName] = handlers[eventName];
}
this.plugins_.push(pluginDict);
}
// At this point, plugins can no longer register event handlers.
// Construct a map from event -> ordered list of [callback, plugin].
for (var i = 0; i < this.plugins_.length; i++) {
var plugin_dict = this.plugins_[i];
for (var eventName in plugin_dict.events) {
if (!plugin_dict.events.hasOwnProperty(eventName)) continue;
var callback = plugin_dict.events[eventName];
var pair = [plugin_dict.plugin, callback];
if (!(eventName in this.eventListeners_)) {
this.eventListeners_[eventName] = [pair];
} else {
this.eventListeners_[eventName].push(pair);
}
}
}
this.createDragInterface_();
this.start_();
};
/**
* Triggers a cascade of events to the various plugins which are interested in them.
* Returns true if the "default behavior" should be performed, i.e. if none of
* the event listeners called event.preventDefault().
* @private
*/
Dygraph.prototype.cascadeEvents_ = function(name, extra_props) {
if (!(name in this.eventListeners_)) return true;
// QUESTION: can we use objects & prototypes to speed this up?
var e = {
dygraph: this,
cancelable: false,
defaultPrevented: false,
preventDefault: function() {
if (!e.cancelable) throw "Cannot call preventDefault on non-cancelable event.";
e.defaultPrevented = true;
},
propagationStopped: false,
stopPropagation: function() {
e.propagationStopped = true;
}
};
Dygraph.update(e, extra_props);
var callback_plugin_pairs = this.eventListeners_[name];
if (callback_plugin_pairs) {
for (var i = callback_plugin_pairs.length - 1; i >= 0; i--) {
var plugin = callback_plugin_pairs[i][0];
var callback = callback_plugin_pairs[i][1];
callback.call(plugin, e);
if (e.propagationStopped) break;
}
}
return e.defaultPrevented;
};
/**
* Returns the zoomed status of the chart for one or both axes.
*
* Axis is an optional parameter. Can be set to 'x' or 'y'.
*
* The zoomed status for an axis is set whenever a user zooms using the mouse
* or when the dateWindow or valueRange are updated (unless the
* isZoomedIgnoreProgrammaticZoom option is also specified).
*/
Dygraph.prototype.isZoomed = function(axis) {
if (axis === null || axis === undefined) {
return this.zoomed_x_ || this.zoomed_y_;
}
if (axis === 'x') return this.zoomed_x_;
if (axis === 'y') return this.zoomed_y_;
throw "axis parameter is [" + axis + "] must be null, 'x' or 'y'.";
};
/**
* Returns information about the Dygraph object, including its containing ID.
*/
Dygraph.prototype.toString = function() {
var maindiv = this.maindiv_;
var id = (maindiv && maindiv.id) ? maindiv.id : maindiv;
return "[Dygraph " + id + "]";
};
/**
* @private
* Returns the value of an option. This may be set by the user (either in the
* constructor or by calling updateOptions) or by dygraphs, and may be set to a
* per-series value.
* @param { String } name The name of the option, e.g. 'rollPeriod'.
* @param { String } [seriesName] The name of the series to which the option
* will be applied. If no per-series value of this option is available, then
* the global value is returned. This is optional.
* @return { ... } The value of the option.
*/
Dygraph.prototype.attr_ = function(name, seriesName) {
// <REMOVE_FOR_COMBINED>
if (typeof(Dygraph.OPTIONS_REFERENCE) === 'undefined') {
this.error('Must include options reference JS for testing');
} else if (!Dygraph.OPTIONS_REFERENCE.hasOwnProperty(name)) {
this.error('Dygraphs is using property ' + name + ', which has no entry ' +
'in the Dygraphs.OPTIONS_REFERENCE listing.');
// Only log this error once.
Dygraph.OPTIONS_REFERENCE[name] = true;
}
// </REMOVE_FOR_COMBINED>
return seriesName ? this.attributes_.getForSeries(name, seriesName) : this.attributes_.get(name);
};
/**
* Returns the current value for an option, as set in the constructor or via
* updateOptions. You may pass in an (optional) series name to get per-series
* values for the option.
*
* All values returned by this method should be considered immutable. If you
* modify them, there is no guarantee that the changes will be honored or that
* dygraphs will remain in a consistent state. If you want to modify an option,
* use updateOptions() instead.
*
* @param { String } name The name of the option (e.g. 'strokeWidth')
* @param { String } [opt_seriesName] Series name to get per-series values.
* @return { ... } The value of the option.
*/
Dygraph.prototype.getOption = function(name, opt_seriesName) {
return this.attr_(name, opt_seriesName);
};
Dygraph.prototype.getOptionForAxis = function(name, axis) {
return this.attributes_.getForAxis(name, axis);
};
/**
* @private
* @param String} axis The name of the axis (i.e. 'x', 'y' or 'y2')
* @return { ... } A function mapping string -> option value
*/
Dygraph.prototype.optionsViewForAxis_ = function(axis) {
var self = this;
return function(opt) {
var axis_opts = self.user_attrs_.axes;
if (axis_opts && axis_opts[axis] && axis_opts[axis][opt]) {
return axis_opts[axis][opt];
}
// user-specified attributes always trump defaults, even if they're less
// specific.
if (typeof(self.user_attrs_[opt]) != 'undefined') {
return self.user_attrs_[opt];
}
axis_opts = self.attrs_.axes;
if (axis_opts && axis_opts[axis] && axis_opts[axis][opt]) {
return axis_opts[axis][opt];
}
// check old-style axis options
// TODO(danvk): add a deprecation warning if either of these match.
if (axis == 'y' && self.axes_[0].hasOwnProperty(opt)) {
return self.axes_[0][opt];
} else if (axis == 'y2' && self.axes_[1].hasOwnProperty(opt)) {
return self.axes_[1][opt];
}
return self.attr_(opt);
};
};
/**
* Returns the current rolling period, as set by the user or an option.
* @return {Number} The number of points in the rolling window
*/
Dygraph.prototype.rollPeriod = function() {
return this.rollPeriod_;
};
/**
* Returns the currently-visible x-range. This can be affected by zooming,
* panning or a call to updateOptions.
* Returns a two-element array: [left, right].
* If the Dygraph has dates on the x-axis, these will be millis since epoch.
*/
Dygraph.prototype.xAxisRange = function() {
return this.dateWindow_ ? this.dateWindow_ : this.xAxisExtremes();
};
/**
* Returns the lower- and upper-bound x-axis values of the
* data set.
*/
Dygraph.prototype.xAxisExtremes = function() {
var left = this.rawData_[0][0];
var right = this.rawData_[this.rawData_.length - 1][0];
return [left, right];
};
/**
* Returns the currently-visible y-range for an axis. This can be affected by
* zooming, panning or a call to updateOptions. Axis indices are zero-based. If
* called with no arguments, returns the range of the first axis.
* Returns a two-element array: [bottom, top].
*/
Dygraph.prototype.yAxisRange = function(idx) {
if (typeof(idx) == "undefined") idx = 0;
if (idx < 0 || idx >= this.axes_.length) {
return null;
}
var axis = this.axes_[idx];
return [ axis.computedValueRange[0], axis.computedValueRange[1] ];
};
/**
* Returns the currently-visible y-ranges for each axis. This can be affected by
* zooming, panning, calls to updateOptions, etc.
* Returns an array of [bottom, top] pairs, one for each y-axis.
*/
Dygraph.prototype.yAxisRanges = function() {
var ret = [];
for (var i = 0; i < this.axes_.length; i++) {
ret.push(this.yAxisRange(i));
}
return ret;
};
// TODO(danvk): use these functions throughout dygraphs.
/**
* Convert from data coordinates to canvas/div X/Y coordinates.
* If specified, do this conversion for the coordinate system of a particular
* axis. Uses the first axis by default.
* Returns a two-element array: [X, Y]
*
* Note: use toDomXCoord instead of toDomCoords(x, null) and use toDomYCoord
* instead of toDomCoords(null, y, axis).
*/
Dygraph.prototype.toDomCoords = function(x, y, axis) {
return [ this.toDomXCoord(x), this.toDomYCoord(y, axis) ];
};
/**
* Convert from data x coordinates to canvas/div X coordinate.
* If specified, do this conversion for the coordinate system of a particular
* axis.
* Returns a single value or null if x is null.
*/
Dygraph.prototype.toDomXCoord = function(x) {
if (x === null) {
return null;
}
var area = this.plotter_.area;
var xRange = this.xAxisRange();
return area.x + (x - xRange[0]) / (xRange[1] - xRange[0]) * area.w;
};
/**
* Convert from data x coordinates to canvas/div Y coordinate and optional
* axis. Uses the first axis by default.
*
* returns a single value or null if y is null.
*/
Dygraph.prototype.toDomYCoord = function(y, axis) {
var pct = this.toPercentYCoord(y, axis);
if (pct === null) {
return null;
}
var area = this.plotter_.area;
return area.y + pct * area.h;
};
/**
* Convert from canvas/div coords to data coordinates.
* If specified, do this conversion for the coordinate system of a particular
* axis. Uses the first axis by default.
* Returns a two-element array: [X, Y].
*
* Note: use toDataXCoord instead of toDataCoords(x, null) and use toDataYCoord
* instead of toDataCoords(null, y, axis).
*/
Dygraph.prototype.toDataCoords = function(x, y, axis) {
return [ this.toDataXCoord(x), this.toDataYCoord(y, axis) ];
};
/**
* Convert from canvas/div x coordinate to data coordinate.
*
* If x is null, this returns null.
*/
Dygraph.prototype.toDataXCoord = function(x) {
if (x === null) {
return null;
}
var area = this.plotter_.area;
var xRange = this.xAxisRange();
return xRange[0] + (x - area.x) / area.w * (xRange[1] - xRange[0]);
};
/**
* Convert from canvas/div y coord to value.
*
* If y is null, this returns null.
* if axis is null, this uses the first axis.
*/
Dygraph.prototype.toDataYCoord = function(y, axis) {
if (y === null) {
return null;
}
var area = this.plotter_.area;
var yRange = this.yAxisRange(axis);
if (typeof(axis) == "undefined") axis = 0;
if (!this.axes_[axis].logscale) {
return yRange[0] + (area.y + area.h - y) / area.h * (yRange[1] - yRange[0]);
} else {
// Computing the inverse of toDomCoord.
var pct = (y - area.y) / area.h;
// Computing the inverse of toPercentYCoord. The function was arrived at with
// the following steps:
//
// Original calcuation:
// pct = (logr1 - Dygraph.log10(y)) / (logr1 - Dygraph.log10(yRange[0]));
//
// Move denominator to both sides:
// pct * (logr1 - Dygraph.log10(yRange[0])) = logr1 - Dygraph.log10(y);
//
// subtract logr1, and take the negative value.
// logr1 - (pct * (logr1 - Dygraph.log10(yRange[0]))) = Dygraph.log10(y);
//
// Swap both sides of the equation, and we can compute the log of the
// return value. Which means we just need to use that as the exponent in
// e^exponent.
// Dygraph.log10(y) = logr1 - (pct * (logr1 - Dygraph.log10(yRange[0])));
var logr1 = Dygraph.log10(yRange[1]);
var exponent = logr1 - (pct * (logr1 - Dygraph.log10(yRange[0])));
var value = Math.pow(Dygraph.LOG_SCALE, exponent);
return value;
}
};
/**
* Converts a y for an axis to a percentage from the top to the
* bottom of the drawing area.
*
* If the coordinate represents a value visible on the canvas, then
* the value will be between 0 and 1, where 0 is the top of the canvas.
* However, this method will return values outside the range, as
* values can fall outside the canvas.
*
* If y is null, this returns null.
* if axis is null, this uses the first axis.
*
* @param { Number } y The data y-coordinate.
* @param { Number } [axis] The axis number on which the data coordinate lives.
* @return { Number } A fraction in [0, 1] where 0 = the top edge.
*/
Dygraph.prototype.toPercentYCoord = function(y, axis) {
if (y === null) {
return null;
}
if (typeof(axis) == "undefined") axis = 0;
var yRange = this.yAxisRange(axis);
var pct;
var logscale = this.attributes_.getForAxis("logscale", axis);
if (!logscale) {
// yRange[1] - y is unit distance from the bottom.
// yRange[1] - yRange[0] is the scale of the range.
// (yRange[1] - y) / (yRange[1] - yRange[0]) is the % from the bottom.
pct = (yRange[1] - y) / (yRange[1] - yRange[0]);
} else {
var logr1 = Dygraph.log10(yRange[1]);
pct = (logr1 - Dygraph.log10(y)) / (logr1 - Dygraph.log10(yRange[0]));
}
return pct;
};
/**
* Converts an x value to a percentage from the left to the right of
* the drawing area.
*
* If the coordinate represents a value visible on the canvas, then
* the value will be between 0 and 1, where 0 is the left of the canvas.
* However, this method will return values outside the range, as
* values can fall outside the canvas.
*
* If x is null, this returns null.
* @param { Number } x The data x-coordinate.
* @return { Number } A fraction in [0, 1] where 0 = the left edge.
*/
Dygraph.prototype.toPercentXCoord = function(x) {
if (x === null) {
return null;
}
var xRange = this.xAxisRange();
return (x - xRange[0]) / (xRange[1] - xRange[0]);
};
/**
* Returns the number of columns (including the independent variable).
* @return { Integer } The number of columns.
*/
Dygraph.prototype.numColumns = function() {
return this.rawData_[0] ? this.rawData_[0].length : this.attr_("labels").length;
};
/**
* Returns the number of rows (excluding any header/label row).
* @return { Integer } The number of rows, less any header.
*/
Dygraph.prototype.numRows = function() {
return this.rawData_.length;
};
/**
* Returns the full range of the x-axis, as determined by the most extreme
* values in the data set. Not affected by zooming, visibility, etc.
* TODO(danvk): merge w/ xAxisExtremes
* @return { Array<Number> } A [low, high] pair
* @private
*/
Dygraph.prototype.fullXRange_ = function() {
if (this.numRows() > 0) {
return [this.rawData_[0][0], this.rawData_[this.numRows() - 1][0]];
} else {
return [0, 1];
}
};
/**
* Returns the value in the given row and column. If the row and column exceed
* the bounds on the data, returns null. Also returns null if the value is
* missing.
* @param { Number} row The row number of the data (0-based). Row 0 is the
* first row of data, not a header row.
* @param { Number} col The column number of the data (0-based)
* @return { Number } The value in the specified cell or null if the row/col
* were out of range.
*/
Dygraph.prototype.getValue = function(row, col) {
if (row < 0 || row > this.rawData_.length) return null;
if (col < 0 || col > this.rawData_[row].length) return null;
return this.rawData_[row][col];
};
/**
* Generates interface elements for the Dygraph: a containing div, a div to
* display the current point, and a textbox to adjust the rolling average
* period. Also creates the Renderer/Layout elements.
* @private
*/
Dygraph.prototype.createInterface_ = function() {
// Create the all-enclosing graph div
var enclosing = this.maindiv_;
this.graphDiv = document.createElement("div");
this.graphDiv.style.width = this.width_ + "px";
this.graphDiv.style.height = this.height_ + "px";
enclosing.appendChild(this.graphDiv);
// Create the canvas for interactive parts of the chart.
this.canvas_ = Dygraph.createCanvas();
this.canvas_.style.position = "absolute";
this.canvas_.width = this.width_;
this.canvas_.height = this.height_;
this.canvas_.style.width = this.width_ + "px"; // for IE
this.canvas_.style.height = this.height_ + "px"; // for IE
this.canvas_ctx_ = Dygraph.getContext(this.canvas_);
// ... and for static parts of the chart.
this.hidden_ = this.createPlotKitCanvas_(this.canvas_);
this.hidden_ctx_ = Dygraph.getContext(this.hidden_);
// The interactive parts of the graph are drawn on top of the chart.
this.graphDiv.appendChild(this.hidden_);
this.graphDiv.appendChild(this.canvas_);
this.mouseEventElement_ = this.createMouseEventElement_();
// Create the grapher
this.layout_ = new DygraphLayout(this);
var dygraph = this;
// Don't recreate and register the handlers on subsequent calls.
// This happens when the graph is resized.
if (!this.mouseMoveHandler_) {
this.mouseMoveHandler_ = function(e) {
dygraph.mouseMove_(e);
};
this.addEvent(this.mouseEventElement_, 'mousemove', this.mouseMoveHandler_);
this.mouseOutHandler_ = function(e) {
dygraph.mouseOut_(e);
};
this.addEvent(this.mouseEventElement_, 'mouseout', this.mouseOutHandler_);
this.resizeHandler_ = function(e) {
dygraph.resize();
};
// Update when the window is resized.
// TODO(danvk): drop frames depending on complexity of the chart.
this.addEvent(window, 'resize', this.resizeHandler_);
}
};
/**
* Detach DOM elements in the dygraph and null out all data references.
* Calling this when you're done with a dygraph can dramatically reduce memory
* usage. See, e.g., the tests/perf.html example.
*/
Dygraph.prototype.destroy = function() {
var removeRecursive = function(node) {
while (node.hasChildNodes()) {
removeRecursive(node.firstChild);
node.removeChild(node.firstChild);
}
};
for (var idx = 0; idx < this.registeredEvents_.length; idx++) {
var reg = this.registeredEvents_[idx];
Dygraph.removeEvent(reg.elem, reg.type, reg.fn);
}
this.registeredEvents_ = [];
// remove mouse event handlers (This may not be necessary anymore)
Dygraph.removeEvent(this.mouseEventElement_, 'mouseout', this.mouseOutHandler_);
Dygraph.removeEvent(this.mouseEventElement_, 'mousemove', this.mouseMoveHandler_);
Dygraph.removeEvent(this.mouseEventElement_, 'mousemove', this.mouseUpHandler_);
// remove window handlers
Dygraph.removeEvent(window,'resize',this.resizeHandler_);
this.resizeHandler_ = null;
removeRecursive(this.maindiv_);
var nullOut = function(obj) {
for (var n in obj) {
if (typeof(obj[n]) === 'object') {
obj[n] = null;
}
}
};
// These may not all be necessary, but it can't hurt...
nullOut(this.layout_);
nullOut(this.plotter_);
nullOut(this);
};
/**
* Creates the canvas on which the chart will be drawn. Only the Renderer ever
* draws on this particular canvas. All Dygraph work (i.e. drawing hover dots
* or the zoom rectangles) is done on this.canvas_.
* @param {Object} canvas The Dygraph canvas over which to overlay the plot
* @return {Object} The newly-created canvas
* @private
*/
Dygraph.prototype.createPlotKitCanvas_ = function(canvas) {
var h = Dygraph.createCanvas();
h.style.position = "absolute";
// TODO(danvk): h should be offset from canvas. canvas needs to include
// some extra area to make it easier to zoom in on the far left and far
// right. h needs to be precisely the plot area, so that clipping occurs.
h.style.top = canvas.style.top;
h.style.left = canvas.style.left;
h.width = this.width_;
h.height = this.height_;
h.style.width = this.width_ + "px"; // for IE
h.style.height = this.height_ + "px"; // for IE
return h;
};
/**
* Creates an overlay element used to handle mouse events.
* @return {Object} The mouse event element.
* @private
*/
Dygraph.prototype.createMouseEventElement_ = function() {
if (this.isUsingExcanvas_) {
var elem = document.createElement("div");
elem.style.position = 'absolute';
elem.style.backgroundColor = 'white';
elem.style.filter = 'alpha(opacity=0)';
elem.style.width = this.width_ + "px";
elem.style.height = this.height_ + "px";
this.graphDiv.appendChild(elem);
return elem;
} else {
return this.canvas_;
}
};
/**
* Generate a set of distinct colors for the data series. This is done with a
* color wheel. Saturation/Value are customizable, and the hue is
* equally-spaced around the color wheel. If a custom set of colors is
* specified, that is used instead.
* @private
*/
Dygraph.prototype.setColors_ = function() {
var labels = this.getLabels();
var num = labels.length - 1;
this.colors_ = [];
this.colorsMap_ = {};
var colors = this.attr_('colors');
var i;
if (!colors) {
var sat = this.attr_('colorSaturation') || 1.0;
var val = this.attr_('colorValue') || 0.5;
var half = Math.ceil(num / 2);
for (i = 1; i <= num; i++) {
if (!this.visibility()[i-1]) continue;
// alternate colors for high contrast.
var idx = i % 2 ? Math.ceil(i / 2) : (half + i / 2);
var hue = (1.0 * idx/ (1 + num));
var colorStr = Dygraph.hsvToRGB(hue, sat, val);
this.colors_.push(colorStr);
this.colorsMap_[labels[i]] = colorStr;
}
} else {
for (i = 0; i < num; i++) {
if (!this.visibility()[i]) continue;
var colorStr = colors[i % colors.length];
this.colors_.push(colorStr);
this.colorsMap_[labels[1 + i]] = colorStr;
}
}
};
/**
* Return the list of colors. This is either the list of colors passed in the
* attributes or the autogenerated list of rgb(r,g,b) strings.
* This does not return colors for invisible series.
* @return {Array<string>} The list of colors.
*/
Dygraph.prototype.getColors = function() {
return this.colors_;
};
/**
* Returns a few attributes of a series, i.e. its color, its visibility, which
* axis it's assigned to, and its column in the original data.
* Returns null if the series does not exist.
* Otherwise, returns an object with column, visibility, color and axis properties.
* The "axis" property will be set to 1 for y1 and 2 for y2.
* The "column" property can be fed back into getValue(row, column) to get
* values for this series.
*/
Dygraph.prototype.getPropertiesForSeries = function(series_name) {
var idx = -1;
var labels = this.getLabels();
for (var i = 1; i < labels.length; i++) {
if (labels[i] == series_name) {
idx = i;
break;
}
}
if (idx == -1) return null;
return {
name: series_name,
column: idx,
visible: this.visibility()[idx - 1],
color: this.colorsMap_[series_name],
axis: 1 + this.attributes_.axisForSeries(series_name)
};
};
/**
* Create the text box to adjust the averaging period
* @private
*/
Dygraph.prototype.createRollInterface_ = function() {
// Create a roller if one doesn't exist already.
if (!this.roller_) {
this.roller_ = document.createElement("input");
this.roller_.type = "text";
this.roller_.style.display = "none";
this.graphDiv.appendChild(this.roller_);
}
var display = this.attr_('showRoller') ? 'block' : 'none';
var area = this.plotter_.area;
var textAttr = { "position": "absolute",
"zIndex": 10,
"top": (area.y + area.h - 25) + "px",
"left": (area.x + 1) + "px",
"display": display
};
this.roller_.size = "2";
this.roller_.value = this.rollPeriod_;
for (var name in textAttr) {
if (textAttr.hasOwnProperty(name)) {
this.roller_.style[name] = textAttr[name];
}
}
var dygraph = this;
this.roller_.onchange = function() { dygraph.adjustRoll(dygraph.roller_.value); };
};
/**
* @private
* Converts page the x-coordinate of the event to pixel x-coordinates on the
* canvas (i.e. DOM Coords).
*/
Dygraph.prototype.dragGetX_ = function(e, context) {
return Dygraph.pageX(e) - context.px;
};
/**
* @private
* Converts page the y-coordinate of the event to pixel y-coordinates on the
* canvas (i.e. DOM Coords).
*/
Dygraph.prototype.dragGetY_ = function(e, context) {
return Dygraph.pageY(e) - context.py;
};
/**
* Set up all the mouse handlers needed to capture dragging behavior for zoom
* events.
* @private
*/
Dygraph.prototype.createDragInterface_ = function() {
var context = {
// Tracks whether the mouse is down right now
isZooming: false,
isPanning: false, // is this drag part of a pan?
is2DPan: false, // if so, is that pan 1- or 2-dimensional?
dragStartX: null, // pixel coordinates
dragStartY: null, // pixel coordinates
dragEndX: null, // pixel coordinates
dragEndY: null, // pixel coordinates
dragDirection: null,
prevEndX: null, // pixel coordinates
prevEndY: null, // pixel coordinates
prevDragDirection: null,
cancelNextDblclick: false, // see comment in dygraph-interaction-model.js
// The value on the left side of the graph when a pan operation starts.
initialLeftmostDate: null,
// The number of units each pixel spans. (This won't be valid for log
// scales)
xUnitsPerPixel: null,
// TODO(danvk): update this comment
// The range in second/value units that the viewport encompasses during a
// panning operation.
dateRange: null,
// Top-left corner of the canvas, in DOM coords
// TODO(konigsberg): Rename topLeftCanvasX, topLeftCanvasY.
px: 0,
py: 0,
// Values for use with panEdgeFraction, which limit how far outside the
// graph's data boundaries it can be panned.
boundedDates: null, // [minDate, maxDate]
boundedValues: null, // [[minValue, maxValue] ...]
// We cover iframes during mouse interactions. See comments in
// dygraph-utils.js for more info on why this is a good idea.
tarp: new Dygraph.IFrameTarp(),
// contextB is the same thing as this context object but renamed.
initializeMouseDown: function(event, g, contextB) {
// prevents mouse drags from selecting page text.
if (event.preventDefault) {
event.preventDefault(); // Firefox, Chrome, etc.
} else {
event.returnValue = false; // IE
event.cancelBubble = true;
}
contextB.px = Dygraph.findPosX(g.canvas_);
contextB.py = Dygraph.findPosY(g.canvas_);
contextB.dragStartX = g.dragGetX_(event, contextB);
contextB.dragStartY = g.dragGetY_(event, contextB);
contextB.cancelNextDblclick = false;
contextB.tarp.cover();
}
};
var interactionModel = this.attr_("interactionModel");
// Self is the graph.
var self = this;
// Function that binds the graph and context to the handler.
var bindHandler = function(handler) {
return function(event) {
handler(event, self, context);
};
};
for (var eventName in interactionModel) {
if (!interactionModel.hasOwnProperty(eventName)) continue;
this.addEvent(this.mouseEventElement_, eventName,
bindHandler(interactionModel[eventName]));
}
// If the user releases the mouse button during a drag, but not over the
// canvas, then it doesn't count as a zooming action.
this.mouseUpHandler_ = function(event) {
if (context.isZooming || context.isPanning) {
context.isZooming = false;
context.dragStartX = null;
context.dragStartY = null;
}
if (context.isPanning) {
context.isPanning = false;
context.draggingDate = null;
context.dateRange = null;
for (var i = 0; i < self.axes_.length; i++) {
delete self.axes_[i].draggingValue;
delete self.axes_[i].dragValueRange;
}
}
context.tarp.uncover();
};
this.addEvent(document, 'mouseup', this.mouseUpHandler_);
};
/**
* Draw a gray zoom rectangle over the desired area of the canvas. Also clears
* up any previous zoom rectangles that were drawn. This could be optimized to
* avoid extra redrawing, but it's tricky to avoid interactions with the status
* dots.
*
* @param {Number} direction the direction of the zoom rectangle. Acceptable
* values are Dygraph.HORIZONTAL and Dygraph.VERTICAL.
* @param {Number} startX The X position where the drag started, in canvas
* coordinates.
* @param {Number} endX The current X position of the drag, in canvas coords.
* @param {Number} startY The Y position where the drag started, in canvas
* coordinates.
* @param {Number} endY The current Y position of the drag, in canvas coords.
* @param {Number} prevDirection the value of direction on the previous call to
* this function. Used to avoid excess redrawing
* @param {Number} prevEndX The value of endX on the previous call to this
* function. Used to avoid excess redrawing
* @param {Number} prevEndY The value of endY on the previous call to this
* function. Used to avoid excess redrawing
* @private
*/
Dygraph.prototype.drawZoomRect_ = function(direction, startX, endX, startY,
endY, prevDirection, prevEndX,
prevEndY) {
var ctx = this.canvas_ctx_;
// Clean up from the previous rect if necessary
if (prevDirection == Dygraph.HORIZONTAL) {
ctx.clearRect(Math.min(startX, prevEndX), this.layout_.getPlotArea().y,
Math.abs(startX - prevEndX), this.layout_.getPlotArea().h);
} else if (prevDirection == Dygraph.VERTICAL){
ctx.clearRect(this.layout_.getPlotArea().x, Math.min(startY, prevEndY),
this.layout_.getPlotArea().w, Math.abs(startY - prevEndY));
}
// Draw a light-grey rectangle to show the new viewing area
if (direction == Dygraph.HORIZONTAL) {
if (endX && startX) {
ctx.fillStyle = "rgba(128,128,128,0.33)";
ctx.fillRect(Math.min(startX, endX), this.layout_.getPlotArea().y,
Math.abs(endX - startX), this.layout_.getPlotArea().h);
}
} else if (direction == Dygraph.VERTICAL) {
if (endY && startY) {
ctx.fillStyle = "rgba(128,128,128,0.33)";
ctx.fillRect(this.layout_.getPlotArea().x, Math.min(startY, endY),
this.layout_.getPlotArea().w, Math.abs(endY - startY));
}
}
if (this.isUsingExcanvas_) {
this.currentZoomRectArgs_ = [direction, startX, endX, startY, endY, 0, 0, 0];
}
};
/**
* Clear the zoom rectangle (and perform no zoom).
* @private
*/
Dygraph.prototype.clearZoomRect_ = function() {
this.currentZoomRectArgs_ = null;
this.canvas_ctx_.clearRect(0, 0, this.canvas_.width, this.canvas_.height);
};
/**
* Zoom to something containing [lowX, highX]. These are pixel coordinates in
* the canvas. The exact zoom window may be slightly larger if there are no data
* points near lowX or highX. Don't confuse this function with doZoomXDates,
* which accepts dates that match the raw data. This function redraws the graph.
*
* @param {Number} lowX The leftmost pixel value that should be visible.
* @param {Number} highX The rightmost pixel value that should be visible.
* @private
*/
Dygraph.prototype.doZoomX_ = function(lowX, highX) {
this.currentZoomRectArgs_ = null;
// Find the earliest and latest dates contained in this canvasx range.
// Convert the call to date ranges of the raw data.
var minDate = this.toDataXCoord(lowX);
var maxDate = this.toDataXCoord(highX);
this.doZoomXDates_(minDate, maxDate);
};
/**
* Transition function to use in animations. Returns values between 0.0
* (totally old values) and 1.0 (totally new values) for each frame.
* @private
*/
Dygraph.zoomAnimationFunction = function(frame, numFrames) {
var k = 1.5;
return (1.0 - Math.pow(k, -frame)) / (1.0 - Math.pow(k, -numFrames));
};
/**
* Zoom to something containing [minDate, maxDate] values. Don't confuse this
* method with doZoomX which accepts pixel coordinates. This function redraws
* the graph.
*
* @param {Number} minDate The minimum date that should be visible.
* @param {Number} maxDate The maximum date that should be visible.
* @private
*/
Dygraph.prototype.doZoomXDates_ = function(minDate, maxDate) {
// TODO(danvk): when yAxisRange is null (i.e. "fit to data", the animation
// can produce strange effects. Rather than the y-axis transitioning slowly
// between values, it can jerk around.)
var old_window = this.xAxisRange();
var new_window = [minDate, maxDate];
this.zoomed_x_ = true;
var that = this;
this.doAnimatedZoom(old_window, new_window, null, null, function() {
if (that.attr_("zoomCallback")) {
that.attr_("zoomCallback")(minDate, maxDate, that.yAxisRanges());
}
});
};
/**
* Zoom to something containing [lowY, highY]. These are pixel coordinates in
* the canvas. This function redraws the graph.
*
* @param {Number} lowY The topmost pixel value that should be visible.
* @param {Number} highY The lowest pixel value that should be visible.
* @private
*/
Dygraph.prototype.doZoomY_ = function(lowY, highY) {
this.currentZoomRectArgs_ = null;
// Find the highest and lowest values in pixel range for each axis.
// Note that lowY (in pixels) corresponds to the max Value (in data coords).
// This is because pixels increase as you go down on the screen, whereas data
// coordinates increase as you go up the screen.
var oldValueRanges = this.yAxisRanges();
var newValueRanges = [];
for (var i = 0; i < this.axes_.length; i++) {
var hi = this.toDataYCoord(lowY, i);
var low = this.toDataYCoord(highY, i);
newValueRanges.push([low, hi]);
}
this.zoomed_y_ = true;
var that = this;
this.doAnimatedZoom(null, null, oldValueRanges, newValueRanges, function() {
if (that.attr_("zoomCallback")) {
var xRange = that.xAxisRange();
that.attr_("zoomCallback")(xRange[0], xRange[1], that.yAxisRanges());
}
});
};
/**
* Reset the zoom to the original view coordinates. This is the same as
* double-clicking on the graph.
*/
Dygraph.prototype.resetZoom = function() {
var dirty = false, dirtyX = false, dirtyY = false;
if (this.dateWindow_ !== null) {
dirty = true;
dirtyX = true;
}
for (var i = 0; i < this.axes_.length; i++) {
if (typeof(this.axes_[i].valueWindow) !== 'undefined' && this.axes_[i].valueWindow !== null) {
dirty = true;
dirtyY = true;
}
}
// Clear any selection, since it's likely to be drawn in the wrong place.
this.clearSelection();
if (dirty) {
this.zoomed_x_ = false;
this.zoomed_y_ = false;
var minDate = this.rawData_[0][0];
var maxDate = this.rawData_[this.rawData_.length - 1][0];
// With only one frame, don't bother calculating extreme ranges.
// TODO(danvk): merge this block w/ the code below.
if (!this.attr_("animatedZooms")) {
this.dateWindow_ = null;
for (i = 0; i < this.axes_.length; i++) {
if (this.axes_[i].valueWindow !== null) {
delete this.axes_[i].valueWindow;
}
}
this.drawGraph_();
if (this.attr_("zoomCallback")) {
this.attr_("zoomCallback")(minDate, maxDate, this.yAxisRanges());
}
return;
}
var oldWindow=null, newWindow=null, oldValueRanges=null, newValueRanges=null;
if (dirtyX) {
oldWindow = this.xAxisRange();
newWindow = [minDate, maxDate];
}
if (dirtyY) {
oldValueRanges = this.yAxisRanges();
// TODO(danvk): this is pretty inefficient
var packed = this.gatherDatasets_(this.rolledSeries_, null);
var extremes = packed[1];
// this has the side-effect of modifying this.axes_.
// this doesn't make much sense in this context, but it's convenient (we
// need this.axes_[*].extremeValues) and not harmful since we'll be
// calling drawGraph_ shortly, which clobbers these values.
this.computeYAxisRanges_(extremes);
newValueRanges = [];
for (i = 0; i < this.axes_.length; i++) {
var axis = this.axes_[i];
newValueRanges.push((axis.valueRange !== null &&
axis.valueRange !== undefined) ?
axis.valueRange : axis.extremeRange);
}
}
var that = this;
this.doAnimatedZoom(oldWindow, newWindow, oldValueRanges, newValueRanges,
function() {
that.dateWindow_ = null;
for (var i = 0; i < that.axes_.length; i++) {
if (that.axes_[i].valueWindow !== null) {
delete that.axes_[i].valueWindow;
}
}
if (that.attr_("zoomCallback")) {
that.attr_("zoomCallback")(minDate, maxDate, that.yAxisRanges());
}
});
}
};
/**
* Combined animation logic for all zoom functions.
* either the x parameters or y parameters may be null.
* @private
*/
Dygraph.prototype.doAnimatedZoom = function(oldXRange, newXRange, oldYRanges, newYRanges, callback) {
var steps = this.attr_("animatedZooms") ? Dygraph.ANIMATION_STEPS : 1;
var windows = [];
var valueRanges = [];
var step, frac;
if (oldXRange !== null && newXRange !== null) {
for (step = 1; step <= steps; step++) {
frac = Dygraph.zoomAnimationFunction(step, steps);
windows[step-1] = [oldXRange[0]*(1-frac) + frac*newXRange[0],
oldXRange[1]*(1-frac) + frac*newXRange[1]];
}
}
if (oldYRanges !== null && newYRanges !== null) {
for (step = 1; step <= steps; step++) {
frac = Dygraph.zoomAnimationFunction(step, steps);
var thisRange = [];
for (var j = 0; j < this.axes_.length; j++) {
thisRange.push([oldYRanges[j][0]*(1-frac) + frac*newYRanges[j][0],
oldYRanges[j][1]*(1-frac) + frac*newYRanges[j][1]]);
}
valueRanges[step-1] = thisRange;
}
}
var that = this;
Dygraph.repeatAndCleanup(function(step) {
if (valueRanges.length) {
for (var i = 0; i < that.axes_.length; i++) {
var w = valueRanges[step][i];
that.axes_[i].valueWindow = [w[0], w[1]];
}
}
if (windows.length) {
that.dateWindow_ = windows[step];
}
that.drawGraph_();
}, steps, Dygraph.ANIMATION_DURATION / steps, callback);
};
/**
* Get the current graph's area object.
*
* Returns: {x, y, w, h}
*/
Dygraph.prototype.getArea = function() {
return this.plotter_.area;
};
/**
* Convert a mouse event to DOM coordinates relative to the graph origin.
*
* Returns a two-element array: [X, Y].
*/
Dygraph.prototype.eventToDomCoords = function(event) {
var canvasx = Dygraph.pageX(event) - Dygraph.findPosX(this.mouseEventElement_);
var canvasy = Dygraph.pageY(event) - Dygraph.findPosY(this.mouseEventElement_);
return [canvasx, canvasy];
};
/**
* Given a canvas X coordinate, find the closest row.
* @param {Number} domX graph-relative DOM X coordinate
* Returns: row number, integer
* @private
*/
Dygraph.prototype.findClosestRow = function(domX) {
var minDistX = Infinity;
var pointIdx = -1, setIdx = -1;
var sets = this.layout_.points;
for (var i = 0; i < sets.length; i++) {
var points = sets[i];
var len = points.length;
for (var j = 0; j < len; j++) {
var point = points[j];
if (!Dygraph.isValidPoint(point, true)) continue;
var dist = Math.abs(point.canvasx - domX);
if (dist < minDistX) {
minDistX = dist;
setIdx = i;
pointIdx = j;
}
}
}
// TODO(danvk): remove this function; it's trivial and has only one use.
return this.idxToRow_(setIdx, pointIdx);
};
/**
* Given canvas X,Y coordinates, find the closest point.
*
* This finds the individual data point across all visible series
* that's closest to the supplied DOM coordinates using the standard
* Euclidean X,Y distance.
*
* @param {Number} domX graph-relative DOM X coordinate
* @param {Number} domY graph-relative DOM Y coordinate
* Returns: {row, seriesName, point}
* @private
*/
Dygraph.prototype.findClosestPoint = function(domX, domY) {
var minDist = Infinity;
var idx = -1;
var dist, dx, dy, point, closestPoint, closestSeries;
for ( var setIdx = this.layout_.datasets.length - 1 ; setIdx >= 0 ; --setIdx ) {
var points = this.layout_.points[setIdx];
for (var i = 0; i < points.length; ++i) {
var point = points[i];
if (!Dygraph.isValidPoint(point)) continue;
dx = point.canvasx - domX;
dy = point.canvasy - domY;
dist = dx * dx + dy * dy;
if (dist < minDist) {
minDist = dist;
closestPoint = point;
closestSeries = setIdx;
idx = i;
}
}
}
var name = this.layout_.setNames[closestSeries];
return {
row: idx + this.getLeftBoundary_(),
seriesName: name,
point: closestPoint
};
};
/**
* Given canvas X,Y coordinates, find the touched area in a stacked graph.
*
* This first finds the X data point closest to the supplied DOM X coordinate,
* then finds the series which puts the Y coordinate on top of its filled area,
* using linear interpolation between adjacent point pairs.
*
* @param {Number} domX graph-relative DOM X coordinate
* @param {Number} domY graph-relative DOM Y coordinate
* Returns: {row, seriesName, point}
* @private
*/
Dygraph.prototype.findStackedPoint = function(domX, domY) {
var row = this.findClosestRow(domX);
var boundary = this.getLeftBoundary_();
var rowIdx = row - boundary;
var closestPoint, closestSeries;
for (var setIdx = 0; setIdx < this.layout_.datasets.length; ++setIdx) {
var points = this.layout_.points[setIdx];
if (rowIdx >= points.length) continue;
var p1 = points[rowIdx];
if (!Dygraph.isValidPoint(p1)) continue;
var py = p1.canvasy;
if (domX > p1.canvasx && rowIdx + 1 < points.length) {
// interpolate series Y value using next point
var p2 = points[rowIdx + 1];
if (Dygraph.isValidPoint(p2)) {
var dx = p2.canvasx - p1.canvasx;
if (dx > 0) {
var r = (domX - p1.canvasx) / dx;
py += r * (p2.canvasy - p1.canvasy);
}
}
} else if (domX < p1.canvasx && rowIdx > 0) {
// interpolate series Y value using previous point
var p0 = points[rowIdx - 1];
if (Dygraph.isValidPoint(p0)) {
var dx = p1.canvasx - p0.canvasx;
if (dx > 0) {
var r = (p1.canvasx - domX) / dx;
py += r * (p0.canvasy - p1.canvasy);
}
}
}
// Stop if the point (domX, py) is above this series' upper edge
if (setIdx === 0 || py < domY) {
closestPoint = p1;
closestSeries = setIdx;
}
}
var name = this.layout_.setNames[closestSeries];
return {
row: row,
seriesName: name,
point: closestPoint
};
};
/**
* When the mouse moves in the canvas, display information about a nearby data
* point and draw dots over those points in the data series. This function
* takes care of cleanup of previously-drawn dots.
* @param {Object} event The mousemove event from the browser.
* @private
*/
Dygraph.prototype.mouseMove_ = function(event) {
// This prevents JS errors when mousing over the canvas before data loads.
var points = this.layout_.points;
if (points === undefined || points === null) return;
var canvasCoords = this.eventToDomCoords(event);
var canvasx = canvasCoords[0];
var canvasy = canvasCoords[1];
var highlightSeriesOpts = this.attr_("highlightSeriesOpts");
var selectionChanged = false;
if (highlightSeriesOpts && !this.isSeriesLocked()) {
var closest;
if (this.attr_("stackedGraph")) {
closest = this.findStackedPoint(canvasx, canvasy);
} else {
closest = this.findClosestPoint(canvasx, canvasy);
}
selectionChanged = this.setSelection(closest.row, closest.seriesName);
} else {
var idx = this.findClosestRow(canvasx);
selectionChanged = this.setSelection(idx);
}
var callback = this.attr_("highlightCallback");
if (callback && selectionChanged) {
callback(event, this.lastx_, this.selPoints_, this.lastRow_, this.highlightSet_);
}
};
/**
* Fetch left offset from first defined boundaryIds record (see bug #236).
* @private
*/
Dygraph.prototype.getLeftBoundary_ = function() {
for (var i = 0; i < this.boundaryIds_.length; i++) {
if (this.boundaryIds_[i] !== undefined) {
return this.boundaryIds_[i][0];
}
}
return 0;
};
/**
* Transforms layout_.points index into data row number.
* @param int layout_.points index
* @return int row number, or -1 if none could be found.
* @private
*/
Dygraph.prototype.idxToRow_ = function(setIdx, rowIdx) {
if (rowIdx < 0) return -1;
var boundary = this.getLeftBoundary_();
return boundary + rowIdx;
// for (var setIdx = 0; setIdx < this.layout_.datasets.length; ++setIdx) {
// var set = this.layout_.datasets[setIdx];
// if (idx < set.length) {
// return boundary + idx;
// }
// idx -= set.length;
// }
// return -1;
};
Dygraph.prototype.animateSelection_ = function(direction) {
var totalSteps = 10;
var millis = 30;
if (this.fadeLevel === undefined) this.fadeLevel = 0;
if (this.animateId === undefined) this.animateId = 0;
var start = this.fadeLevel;
var steps = direction < 0 ? start : totalSteps - start;
if (steps <= 0) {
if (this.fadeLevel) {
this.updateSelection_(1.0);
}
return;
}
var thisId = ++this.animateId;
var that = this;
Dygraph.repeatAndCleanup(
function(n) {
// ignore simultaneous animations
if (that.animateId != thisId) return;
that.fadeLevel += direction;
if (that.fadeLevel === 0) {
that.clearSelection();
} else {
that.updateSelection_(that.fadeLevel / totalSteps);
}
},
steps, millis, function() {});
};
/**
* Draw dots over the selectied points in the data series. This function
* takes care of cleanup of previously-drawn dots.
* @private
*/
Dygraph.prototype.updateSelection_ = function(opt_animFraction) {
/*var defaultPrevented = */
this.cascadeEvents_('select', {
selectedX: this.lastx_,
selectedPoints: this.selPoints_
});
// TODO(danvk): use defaultPrevented here?
// Clear the previously drawn vertical, if there is one
var i;
var ctx = this.canvas_ctx_;
if (this.attr_('highlightSeriesOpts')) {
ctx.clearRect(0, 0, this.width_, this.height_);
var alpha = 1.0 - this.attr_('highlightSeriesBackgroundAlpha');
if (alpha) {
// Activating background fade includes an animation effect for a gradual
// fade. TODO(klausw): make this independently configurable if it causes
// issues? Use a shared preference to control animations?
var animateBackgroundFade = true;
if (animateBackgroundFade) {
if (opt_animFraction === undefined) {
// start a new animation
this.animateSelection_(1);
return;
}
alpha *= opt_animFraction;
}
ctx.fillStyle = 'rgba(255,255,255,' + alpha + ')';
ctx.fillRect(0, 0, this.width_, this.height_);
}
// Redraw only the highlighted series in the interactive canvas (not the
// static plot canvas, which is where series are usually drawn).
this.plotter_._renderLineChart(this.highlightSet_, ctx);
} else if (this.previousVerticalX_ >= 0) {
// Determine the maximum highlight circle size.
var maxCircleSize = 0;
var labels = this.attr_('labels');
for (i = 1; i < labels.length; i++) {
var r = this.attr_('highlightCircleSize', labels[i]);
if (r > maxCircleSize) maxCircleSize = r;
}
var px = this.previousVerticalX_;
ctx.clearRect(px - maxCircleSize - 1, 0,
2 * maxCircleSize + 2, this.height_);
}
if (this.isUsingExcanvas_ && this.currentZoomRectArgs_) {
Dygraph.prototype.drawZoomRect_.apply(this, this.currentZoomRectArgs_);
}
if (this.selPoints_.length > 0) {
// Draw colored circles over the center of each selected point
var canvasx = this.selPoints_[0].canvasx;
ctx.save();
for (i = 0; i < this.selPoints_.length; i++) {
var pt = this.selPoints_[i];
if (!Dygraph.isOK(pt.canvasy)) continue;
var circleSize = this.attr_('highlightCircleSize', pt.name);
var callback = this.attr_("drawHighlightPointCallback", pt.name);
var color = this.plotter_.colors[pt.name];
if (!callback) {
callback = Dygraph.Circles.DEFAULT;
}
ctx.lineWidth = this.attr_('strokeWidth', pt.name);
ctx.strokeStyle = color;
ctx.fillStyle = color;
callback(this.g, pt.name, ctx, canvasx, pt.canvasy,
color, circleSize);
}
ctx.restore();
this.previousVerticalX_ = canvasx;
}
};
/**
* Manually set the selected points and display information about them in the
* legend. The selection can be cleared using clearSelection() and queried
* using getSelection().
* @param { Integer } row number that should be highlighted (i.e. appear with
* hover dots on the chart). Set to false to clear any selection.
* @param { seriesName } optional series name to highlight that series with the
* the highlightSeriesOpts setting.
* @param { locked } optional If true, keep seriesName selected when mousing
* over the graph, disabling closest-series highlighting. Call clearSelection()
* to unlock it.
*/
Dygraph.prototype.setSelection = function(row, opt_seriesName, opt_locked) {
// Extract the points we've selected
this.selPoints_ = [];
if (row !== false) {
row -= this.getLeftBoundary_();
}
var changed = false;
if (row !== false && row >= 0) {
if (row != this.lastRow_) changed = true;
this.lastRow_ = row;
for (var setIdx = 0; setIdx < this.layout_.datasets.length; ++setIdx) {
var set = this.layout_.datasets[setIdx];
if (row < set.length) {
var point = this.layout_.points[setIdx][row];
if (this.attr_("stackedGraph")) {
point = this.layout_.unstackPointAtIndex(setIdx, row);
}
if (point.yval !== null) this.selPoints_.push(point);
}
}
} else {
if (this.lastRow_ >= 0) changed = true;
this.lastRow_ = -1;
}
if (this.selPoints_.length) {
this.lastx_ = this.selPoints_[0].xval;
} else {
this.lastx_ = -1;
}
if (opt_seriesName !== undefined) {
if (this.highlightSet_ !== opt_seriesName) changed = true;
this.highlightSet_ = opt_seriesName;
}
if (opt_locked !== undefined) {
this.lockedSet_ = opt_locked;
}
if (changed) {
this.updateSelection_(undefined);
}
return changed;
};
/**
* The mouse has left the canvas. Clear out whatever artifacts remain
* @param {Object} event the mouseout event from the browser.
* @private
*/
Dygraph.prototype.mouseOut_ = function(event) {
if (this.attr_("unhighlightCallback")) {
this.attr_("unhighlightCallback")(event);
}
if (this.attr_("hideOverlayOnMouseOut") && !this.lockedSet_) {
this.clearSelection();
}
};
/**
* Clears the current selection (i.e. points that were highlighted by moving
* the mouse over the chart).
*/
Dygraph.prototype.clearSelection = function() {
this.cascadeEvents_('deselect', {});
this.lockedSet_ = false;
// Get rid of the overlay data
if (this.fadeLevel) {
this.animateSelection_(-1);
return;
}
this.canvas_ctx_.clearRect(0, 0, this.width_, this.height_);
this.fadeLevel = 0;
this.selPoints_ = [];
this.lastx_ = -1;
this.lastRow_ = -1;
this.highlightSet_ = null;
};
/**
* Returns the number of the currently selected row. To get data for this row,
* you can use the getValue method.
* @return { Integer } row number, or -1 if nothing is selected
*/
Dygraph.prototype.getSelection = function() {
if (!this.selPoints_ || this.selPoints_.length < 1) {
return -1;
}
for (var setIdx = 0; setIdx < this.layout_.points.length; setIdx++) {
var points = this.layout_.points[setIdx];
for (var row = 0; row < points.length; row++) {
if (points[row].x == this.selPoints_[0].x) {
return row + this.getLeftBoundary_();
}
}
}
return -1;
};
/**
* Returns the name of the currently-highlighted series.
* Only available when the highlightSeriesOpts option is in use.
*/
Dygraph.prototype.getHighlightSeries = function() {
return this.highlightSet_;
};
/**
* Returns true if the currently-highlighted series was locked
* via setSelection(..., seriesName, true).
*/
Dygraph.prototype.isSeriesLocked = function() {
return this.lockedSet_;
};
/**
* Fires when there's data available to be graphed.
* @param {String} data Raw CSV data to be plotted
* @private
*/
Dygraph.prototype.loadedEvent_ = function(data) {
this.rawData_ = this.parseCSV_(data);
this.predraw_();
};
/**
* Add ticks on the x-axis representing years, months, quarters, weeks, or days
* @private
*/
Dygraph.prototype.addXTicks_ = function() {
// Determine the correct ticks scale on the x-axis: quarterly, monthly, ...
var range;
if (this.dateWindow_) {
range = [this.dateWindow_[0], this.dateWindow_[1]];
} else {
range = this.fullXRange_();
}
var xAxisOptionsView = this.optionsViewForAxis_('x');
var xTicks = xAxisOptionsView('ticker')(
range[0],
range[1],
this.width_, // TODO(danvk): should be area.width
xAxisOptionsView,
this);
// var msg = 'ticker(' + range[0] + ', ' + range[1] + ', ' + this.width_ + ', ' + this.attr_('pixelsPerXLabel') + ') -> ' + JSON.stringify(xTicks);
// console.log(msg);
this.layout_.setXTicks(xTicks);
};
/**
* @private
* Computes the range of the data series (including confidence intervals).
* @param { [Array] } series either [ [x1, y1], [x2, y2], ... ] or
* [ [x1, [y1, dev_low, dev_high]], [x2, [y2, dev_low, dev_high]], ...
* @return [low, high]
*/
Dygraph.prototype.extremeValues_ = function(series) {
var minY = null, maxY = null, j, y;
var bars = this.attr_("errorBars") || this.attr_("customBars");
if (bars) {
// With custom bars, maxY is the max of the high values.
for (j = 0; j < series.length; j++) {
y = series[j][1][0];
if (y === null || isNaN(y)) continue;
var low = y - series[j][1][1];
var high = y + series[j][1][2];
if (low > y) low = y; // this can happen with custom bars,
if (high < y) high = y; // e.g. in tests/custom-bars.html
if (maxY === null || high > maxY) {
maxY = high;
}
if (minY === null || low < minY) {
minY = low;
}
}
} else {
for (j = 0; j < series.length; j++) {
y = series[j][1];
if (y === null || isNaN(y)) continue;
if (maxY === null || y > maxY) {
maxY = y;
}
if (minY === null || y < minY) {
minY = y;
}
}
}
return [minY, maxY];
};
/**
* @private
* This function is called once when the chart's data is changed or the options
* dictionary is updated. It is _not_ called when the user pans or zooms. The
* idea is that values derived from the chart's data can be computed here,
* rather than every time the chart is drawn. This includes things like the
* number of axes, rolling averages, etc.
*/
Dygraph.prototype.predraw_ = function() {
var start = new Date();
this.layout_.computePlotArea();
// TODO(danvk): move more computations out of drawGraph_ and into here.
this.computeYAxes_();
// Create a new plotter.
if (this.plotter_) {
this.cascadeEvents_('clearChart');
this.plotter_.clear();
}
this.plotter_ = new DygraphCanvasRenderer(this,
this.hidden_,
this.hidden_ctx_,
this.layout_);
// The roller sits in the bottom left corner of the chart. We don't know where
// this will be until the options are available, so it's positioned here.
this.createRollInterface_();
this.cascadeEvents_('predraw');
// Convert the raw data (a 2D array) into the internal format and compute
// rolling averages.
this.rolledSeries_ = [null]; // x-axis is the first series and it's special
for (var i = 1; i < this.numColumns(); i++) {
// var logScale = this.attr_('logscale', i); // TODO(klausw): this looks wrong // konigsberg thinks so too.
var logScale = this.attr_('logscale');
var series = this.extractSeries_(this.rawData_, i, logScale);
series = this.rollingAverage(series, this.rollPeriod_);
this.rolledSeries_.push(series);
}
// If the data or options have changed, then we'd better redraw.
this.drawGraph_();
// This is used to determine whether to do various animations.
var end = new Date();
this.drawingTimeMs_ = (end - start);
};
/**
* Loop over all fields and create datasets, calculating extreme y-values for
* each series and extreme x-indices as we go.
*
* dateWindow is passed in as an explicit parameter so that we can compute
* extreme values "speculatively", i.e. without actually setting state on the
* dygraph.
*
* TODO(danvk): make this more of a true function
* @return [ datasets, seriesExtremes, boundaryIds ]
* @private
*/
Dygraph.prototype.gatherDatasets_ = function(rolledSeries, dateWindow) {
var boundaryIds = [];
var cumulative_y = []; // For stacked series.
var datasets = [];
var extremes = {}; // series name -> [low, high]
var i, j, k;
// Loop over the fields (series). Go from the last to the first,
// because if they're stacked that's how we accumulate the values.
var num_series = rolledSeries.length - 1;
for (i = num_series; i >= 1; i--) {
if (!this.visibility()[i - 1]) continue;
// Note: this copy _is_ necessary at the moment.
// If you remove it, it breaks zooming with error bars on.
// TODO(danvk): investigate further & write a test for this.
var series = [];
for (j = 0; j < rolledSeries[i].length; j++) {
series.push(rolledSeries[i][j]);
}
// Prune down to the desired range, if necessary (for zooming)
// Because there can be lines going to points outside of the visible area,
// we actually prune to visible points, plus one on either side.
var bars = this.attr_("errorBars") || this.attr_("customBars");
if (dateWindow) {
var low = dateWindow[0];
var high = dateWindow[1];
var pruned = [];
// TODO(danvk): do binary search instead of linear search.
// TODO(danvk): pass firstIdx and lastIdx directly to the renderer.
var firstIdx = null, lastIdx = null;
for (k = 0; k < series.length; k++) {
if (series[k][0] >= low && firstIdx === null) {
firstIdx = k;
}
if (series[k][0] <= high) {
lastIdx = k;
}
}
if (firstIdx === null) firstIdx = 0;
if (firstIdx > 0) firstIdx--;
if (lastIdx === null) lastIdx = series.length - 1;
if (lastIdx < series.length - 1) lastIdx++;
boundaryIds[i-1] = [firstIdx, lastIdx];
for (k = firstIdx; k <= lastIdx; k++) {
pruned.push(series[k]);
}
series = pruned;
} else {
boundaryIds[i-1] = [0, series.length-1];
}
var seriesExtremes = this.extremeValues_(series);
if (bars) {
for (j=0; j<series.length; j++) {
series[j] = [series[j][0],
series[j][1][0],
series[j][1][1],
series[j][1][2]];
}
} else if (this.attr_("stackedGraph")) {
var l = series.length;
var actual_y;
for (j = 0; j < l; j++) {
// If one data set has a NaN, let all subsequent stacked
// sets inherit the NaN -- only start at 0 for the first set.
var x = series[j][0];
if (cumulative_y[x] === undefined) {
cumulative_y[x] = 0;
}
actual_y = series[j][1];
if (actual_y === null) {
series[j] = [x, null];
continue;
}
cumulative_y[x] += actual_y;
series[j] = [x, cumulative_y[x]];
if (cumulative_y[x] > seriesExtremes[1]) {
seriesExtremes[1] = cumulative_y[x];
}
if (cumulative_y[x] < seriesExtremes[0]) {
seriesExtremes[0] = cumulative_y[x];
}
}
}
var seriesName = this.attr_("labels")[i];
extremes[seriesName] = seriesExtremes;
datasets[i] = series;
}
// For stacked graphs, a NaN value for any point in the sum should create a
// clean gap in the graph. Back-propagate NaNs to all points at this X value.
if (this.attr_("stackedGraph")) {
for (k = datasets.length - 1; k >= 0; --k) {
// Use the first nonempty dataset to get X values.
if (!datasets[k]) continue;
for (j = 0; j < datasets[k].length; j++) {
var x = datasets[k][j][0];
if (isNaN(cumulative_y[x])) {
// Set all Y values to NaN at that X value.
for (i = datasets.length - 1; i >= 0; i--) {
if (!datasets[i]) continue;
datasets[i][j][1] = NaN;
}
}
}
break;
}
}
return [ datasets, extremes, boundaryIds ];
};
/**
* Update the graph with new data. This method is called when the viewing area
* has changed. If the underlying data or options have changed, predraw_ will
* be called before drawGraph_ is called.
*
* @private
*/
Dygraph.prototype.drawGraph_ = function() {
var start = new Date();
// This is used to set the second parameter to drawCallback, below.
var is_initial_draw = this.is_initial_draw_;
this.is_initial_draw_ = false;
this.layout_.removeAllDatasets();
this.setColors_();
this.attrs_.pointSize = 0.5 * this.attr_('highlightCircleSize');
var packed = this.gatherDatasets_(this.rolledSeries_, this.dateWindow_);
var datasets = packed[0];
var extremes = packed[1];
this.boundaryIds_ = packed[2];
this.setIndexByName_ = {};
var labels = this.attr_("labels");
if (labels.length > 0) {
this.setIndexByName_[labels[0]] = 0;
}
var dataIdx = 0;
for (var i = 1; i < datasets.length; i++) {
this.setIndexByName_[labels[i]] = i;
if (!this.visibility()[i - 1]) continue;
this.layout_.addDataset(labels[i], datasets[i]);
this.datasetIndex_[i] = dataIdx++;
}
this.computeYAxisRanges_(extremes);
this.layout_.setYAxes(this.axes_);
this.addXTicks_();
// Save the X axis zoomed status as the updateOptions call will tend to set it erroneously
var tmp_zoomed_x = this.zoomed_x_;
// Tell PlotKit to use this new data and render itself
this.layout_.setDateWindow(this.dateWindow_);
this.zoomed_x_ = tmp_zoomed_x;
this.layout_.evaluateWithError();
this.renderGraph_(is_initial_draw);
if (this.attr_("timingName")) {
var end = new Date();
if (console) {
console.log(this.attr_("timingName") + " - drawGraph: " + (end - start) + "ms");
}
}
};
/**
* This does the work of drawing the chart. It assumes that the layout and axis
* scales have already been set (e.g. by predraw_).
*
* @private
*/
Dygraph.prototype.renderGraph_ = function(is_initial_draw) {
this.cascadeEvents_('clearChart');
this.plotter_.clear();
if (this.attr_('underlayCallback')) {
// NOTE: we pass the dygraph object to this callback twice to avoid breaking
// users who expect a deprecated form of this callback.
this.attr_('underlayCallback')(
this.hidden_ctx_, this.layout_.getPlotArea(), this, this);
}
var e = {
canvas: this.hidden_,
drawingContext: this.hidden_ctx_
};
this.cascadeEvents_('willDrawChart', e);
this.plotter_.render();
this.cascadeEvents_('didDrawChart', e);
// TODO(danvk): is this a performance bottleneck when panning?
// The interaction canvas should already be empty in that situation.
this.canvas_.getContext('2d').clearRect(0, 0, this.canvas_.width,
this.canvas_.height);
if (this.attr_("drawCallback") !== null) {
this.attr_("drawCallback")(this, is_initial_draw);
}
};
/**
* @private
* Determine properties of the y-axes which are independent of the data
* currently being displayed. This includes things like the number of axes and
* the style of the axes. It does not include the range of each axis and its
* tick marks.
* This fills in this.axes_.
* axes_ = [ { options } ]
* indices are into the axes_ array.
*/
Dygraph.prototype.computeYAxes_ = function() {
// Preserve valueWindow settings if they exist, and if the user hasn't
// specified a new valueRange.
var valueWindows, axis, index, opts, v;
if (this.axes_ !== undefined && this.user_attrs_.hasOwnProperty("valueRange") === false) {
valueWindows = [];
for (index = 0; index < this.axes_.length; index++) {
valueWindows.push(this.axes_[index].valueWindow);
}
}
// this.axes_ doesn't match this.attributes_.axes_.options. It's used for
// data computation as well as options storage.
// Go through once and add all the axes.
this.axes_ = [];
for (axis = 0; axis < this.attributes_.numAxes(); axis++) {
// Add a new axis, making a copy of its per-axis options.
opts = { g : this };
Dygraph.update(opts, this.attributes_.axisOptions(axis));
this.axes_[axis] = opts;
}
// Copy global valueRange option over to the first axis.
// NOTE(konigsberg): Are these two statements necessary?
// I tried removing it. The automated tests pass, and manually
// messing with tests/zoom.html showed no trouble.
v = this.attr_('valueRange');
if (v) this.axes_[0].valueRange = v;
if (valueWindows !== undefined) {
// Restore valueWindow settings.
for (index = 0; index < valueWindows.length; index++) {
this.axes_[index].valueWindow = valueWindows[index];
}
}
for (axis = 0; axis < this.axes_.length; axis++) {
if (axis === 0) {
opts = this.optionsViewForAxis_('y' + (axis ? '2' : ''));
v = opts("valueRange");
if (v) this.axes_[axis].valueRange = v;
} else { // To keep old behavior
var axes = this.user_attrs_.axes;
if (axes && axes.y2) {
v = axes.y2.valueRange;
if (v) this.axes_[axis].valueRange = v;
}
}
}
};
/**
* Returns the number of y-axes on the chart.
* @return {Number} the number of axes.
*/
Dygraph.prototype.numAxes = function() {
return this.attributes_.numAxes();
};
/**
* @private
* Returns axis properties for the given series.
* @param { String } setName The name of the series for which to get axis
* properties, e.g. 'Y1'.
* @return { Object } The axis properties.
*/
Dygraph.prototype.axisPropertiesForSeries = function(series) {
// TODO(danvk): handle errors.
return this.axes_[this.attributes_.axisForSeries(series)];
};
/**
* @private
* Determine the value range and tick marks for each axis.
* @param {Object} extremes A mapping from seriesName -> [low, high]
* This fills in the valueRange and ticks fields in each entry of this.axes_.
*/
Dygraph.prototype.computeYAxisRanges_ = function(extremes) {
var series;
var numAxes = this.attributes_.numAxes();
// Compute extreme values, a span and tick marks for each axis.
for (var i = 0; i < numAxes; i++) {
var axis = this.axes_[i];
var logscale = this.attributes_.getForAxis("logscale", i);
var includeZero = this.attributes_.getForAxis("includeZero", i);
series = this.attributes_.seriesForAxis(i);
if (series.length === 0) {
// If no series are defined or visible then use a reasonable default
axis.extremeRange = [0, 1];
} else {
// Calculate the extremes of extremes.
var minY = Infinity; // extremes[series[0]][0];
var maxY = -Infinity; // extremes[series[0]][1];
var extremeMinY, extremeMaxY;
for (var j = 0; j < series.length; j++) {
// this skips invisible series
if (!extremes.hasOwnProperty(series[j])) continue;
// Only use valid extremes to stop null data series' from corrupting the scale.
extremeMinY = extremes[series[j]][0];
if (extremeMinY !== null) {
minY = Math.min(extremeMinY, minY);
}
extremeMaxY = extremes[series[j]][1];
if (extremeMaxY !== null) {
maxY = Math.max(extremeMaxY, maxY);
}
}
if (includeZero && minY > 0) minY = 0;
// Ensure we have a valid scale, otherwise default to [0, 1] for safety.
if (minY == Infinity) minY = 0;
if (maxY == -Infinity) maxY = 1;
// Add some padding and round up to an integer to be human-friendly.
var span = maxY - minY;
// special case: if we have no sense of scale, use +/-10% of the sole value.
if (span === 0) { span = maxY; }
var maxAxisY, minAxisY;
if (logscale) {
maxAxisY = maxY + 0.1 * span;
minAxisY = minY;
} else {
maxAxisY = maxY + 0.1 * span;
minAxisY = minY - 0.1 * span;
// Try to include zero and make it minAxisY (or maxAxisY) if it makes sense.
if (!this.attr_("avoidMinZero")) {
if (minAxisY < 0 && minY >= 0) minAxisY = 0;
if (maxAxisY > 0 && maxY <= 0) maxAxisY = 0;
}
if (this.attr_("includeZero")) {
if (maxY < 0) maxAxisY = 0;
if (minY > 0) minAxisY = 0;
}
}
axis.extremeRange = [minAxisY, maxAxisY];
}
if (axis.valueWindow) {
// This is only set if the user has zoomed on the y-axis. It is never set
// by a user. It takes precedence over axis.valueRange because, if you set
// valueRange, you'd still expect to be able to pan.
axis.computedValueRange = [axis.valueWindow[0], axis.valueWindow[1]];
} else if (axis.valueRange) {
// This is a user-set value range for this axis.
axis.computedValueRange = [axis.valueRange[0], axis.valueRange[1]];
} else {
axis.computedValueRange = axis.extremeRange;
}
// Add ticks. By default, all axes inherit the tick positions of the
// primary axis. However, if an axis is specifically marked as having
// independent ticks, then that is permissible as well.
var opts = this.optionsViewForAxis_('y' + (i ? '2' : ''));
var ticker = opts('ticker');
if (i === 0 || axis.independentTicks) {
axis.ticks = ticker(axis.computedValueRange[0],
axis.computedValueRange[1],
this.height_, // TODO(danvk): should be area.height
opts,
this);
} else {
var p_axis = this.axes_[0];
var p_ticks = p_axis.ticks;
var p_scale = p_axis.computedValueRange[1] - p_axis.computedValueRange[0];
var scale = axis.computedValueRange[1] - axis.computedValueRange[0];
var tick_values = [];
for (var k = 0; k < p_ticks.length; k++) {
var y_frac = (p_ticks[k].v - p_axis.computedValueRange[0]) / p_scale;
var y_val = axis.computedValueRange[0] + y_frac * scale;
tick_values.push(y_val);
}
axis.ticks = ticker(axis.computedValueRange[0],
axis.computedValueRange[1],
this.height_, // TODO(danvk): should be area.height
opts,
this,
tick_values);
}
}
};
/**
* Extracts one series from the raw data (a 2D array) into an array of (date,
* value) tuples.
*
* This is where undesirable points (i.e. negative values on log scales and
* missing values through which we wish to connect lines) are dropped.
* TODO(danvk): the "missing values" bit above doesn't seem right.
*
* @private
*/
Dygraph.prototype.extractSeries_ = function(rawData, i, logScale) {
// TODO(danvk): pre-allocate series here.
var series = [];
for (var j = 0; j < rawData.length; j++) {
var x = rawData[j][0];
var point = rawData[j][i];
if (logScale) {
// On the log scale, points less than zero do not exist.
// This will create a gap in the chart.
if (point <= 0) {
point = null;
}
}
series.push([x, point]);
}
return series;
};
/**
* @private
* Calculates the rolling average of a data set.
* If originalData is [label, val], rolls the average of those.
* If originalData is [label, [, it's interpreted as [value, stddev]
* and the roll is returned in the same form, with appropriately reduced
* stddev for each value.
* Note that this is where fractional input (i.e. '5/10') is converted into
* decimal values.
* @param {Array} originalData The data in the appropriate format (see above)
* @param {Number} rollPeriod The number of points over which to average the
* data
*/
Dygraph.prototype.rollingAverage = function(originalData, rollPeriod) {
if (originalData.length < 2)
return originalData;
rollPeriod = Math.min(rollPeriod, originalData.length);
var rollingData = [];
var sigma = this.attr_("sigma");
var low, high, i, j, y, sum, num_ok, stddev;
if (this.fractions_) {
var num = 0;
var den = 0; // numerator/denominator
var mult = 100.0;
for (i = 0; i < originalData.length; i++) {
num += originalData[i][1][0];
den += originalData[i][1][1];
if (i - rollPeriod >= 0) {
num -= originalData[i - rollPeriod][1][0];
den -= originalData[i - rollPeriod][1][1];
}
var date = originalData[i][0];
var value = den ? num / den : 0.0;
if (this.attr_("errorBars")) {
if (this.attr_("wilsonInterval")) {
// For more details on this confidence interval, see:
// http://en.wikipedia.org/wiki/Binomial_confidence_interval
if (den) {
var p = value < 0 ? 0 : value, n = den;
var pm = sigma * Math.sqrt(p*(1-p)/n + sigma*sigma/(4*n*n));
var denom = 1 + sigma * sigma / den;
low = (p + sigma * sigma / (2 * den) - pm) / denom;
high = (p + sigma * sigma / (2 * den) + pm) / denom;
rollingData[i] = [date,
[p * mult, (p - low) * mult, (high - p) * mult]];
} else {
rollingData[i] = [date, [0, 0, 0]];
}
} else {
stddev = den ? sigma * Math.sqrt(value * (1 - value) / den) : 1.0;
rollingData[i] = [date, [mult * value, mult * stddev, mult * stddev]];
}
} else {
rollingData[i] = [date, mult * value];
}
}
} else if (this.attr_("customBars")) {
low = 0;
var mid = 0;
high = 0;
var count = 0;
for (i = 0; i < originalData.length; i++) {
var data = originalData[i][1];
y = data[1];
rollingData[i] = [originalData[i][0], [y, y - data[0], data[2] - y]];
if (y !== null && !isNaN(y)) {
low += data[0];
mid += y;
high += data[2];
count += 1;
}
if (i - rollPeriod >= 0) {
var prev = originalData[i - rollPeriod];
if (prev[1][1] !== null && !isNaN(prev[1][1])) {
low -= prev[1][0];
mid -= prev[1][1];
high -= prev[1][2];
count -= 1;
}
}
if (count) {
rollingData[i] = [originalData[i][0], [ 1.0 * mid / count,
1.0 * (mid - low) / count,
1.0 * (high - mid) / count ]];
} else {
rollingData[i] = [originalData[i][0], [null, null, null]];
}
}
} else {
// Calculate the rolling average for the first rollPeriod - 1 points where
// there is not enough data to roll over the full number of points
if (!this.attr_("errorBars")){
if (rollPeriod == 1) {
return originalData;
}
for (i = 0; i < originalData.length; i++) {
sum = 0;
num_ok = 0;
for (j = Math.max(0, i - rollPeriod + 1); j < i + 1; j++) {
y = originalData[j][1];
if (y === null || isNaN(y)) continue;
num_ok++;
sum += originalData[j][1];
}
if (num_ok) {
rollingData[i] = [originalData[i][0], sum / num_ok];
} else {
rollingData[i] = [originalData[i][0], null];
}
}
} else {
for (i = 0; i < originalData.length; i++) {
sum = 0;
var variance = 0;
num_ok = 0;
for (j = Math.max(0, i - rollPeriod + 1); j < i + 1; j++) {
y = originalData[j][1][0];
if (y === null || isNaN(y)) continue;
num_ok++;
sum += originalData[j][1][0];
variance += Math.pow(originalData[j][1][1], 2);
}
if (num_ok) {
stddev = Math.sqrt(variance) / num_ok;
rollingData[i] = [originalData[i][0],
[sum / num_ok, sigma * stddev, sigma * stddev]];
} else {
rollingData[i] = [originalData[i][0], [null, null, null]];
}
}
}
}
return rollingData;
};
/**
* Detects the type of the str (date or numeric) and sets the various
* formatting attributes in this.attrs_ based on this type.
* @param {String} str An x value.
* @private
*/
Dygraph.prototype.detectTypeFromString_ = function(str) {
var isDate = false;
var dashPos = str.indexOf('-'); // could be 2006-01-01 _or_ 1.0e-2
if ((dashPos > 0 && (str[dashPos-1] != 'e' && str[dashPos-1] != 'E')) ||
str.indexOf('/') >= 0 ||
isNaN(parseFloat(str))) {
isDate = true;
} else if (str.length == 8 && str > '19700101' && str < '20371231') {
// TODO(danvk): remove support for this format.
isDate = true;
}
this.setXAxisOptions_(isDate);
};
Dygraph.prototype.setXAxisOptions_ = function(isDate) {
if (isDate) {
this.attrs_.xValueParser = Dygraph.dateParser;
this.attrs_.axes.x.valueFormatter = Dygraph.dateString_;
this.attrs_.axes.x.ticker = Dygraph.dateTicker;
this.attrs_.axes.x.axisLabelFormatter = Dygraph.dateAxisFormatter;
} else {
/** @private (shut up, jsdoc!) */
this.attrs_.xValueParser = function(x) { return parseFloat(x); };
// TODO(danvk): use Dygraph.numberValueFormatter here?
/** @private (shut up, jsdoc!) */
this.attrs_.axes.x.valueFormatter = function(x) { return x; };
this.attrs_.axes.x.ticker = Dygraph.numericLinearTicks;
this.attrs_.axes.x.axisLabelFormatter = this.attrs_.axes.x.valueFormatter;
}
};
/**
* Parses the value as a floating point number. This is like the parseFloat()
* built-in, but with a few differences:
* - the empty string is parsed as null, rather than NaN.
* - if the string cannot be parsed at all, an error is logged.
* If the string can't be parsed, this method returns null.
* @param {String} x The string to be parsed
* @param {Number} opt_line_no The line number from which the string comes.
* @param {String} opt_line The text of the line from which the string comes.
* @private
*/
// Parse the x as a float or return null if it's not a number.
Dygraph.prototype.parseFloat_ = function(x, opt_line_no, opt_line) {
var val = parseFloat(x);
if (!isNaN(val)) return val;
// Try to figure out what happeend.
// If the value is the empty string, parse it as null.
if (/^ *$/.test(x)) return null;
// If it was actually "NaN", return it as NaN.
if (/^ *nan *$/i.test(x)) return NaN;
// Looks like a parsing error.
var msg = "Unable to parse '" + x + "' as a number";
if (opt_line !== null && opt_line_no !== null) {
msg += " on line " + (1+opt_line_no) + " ('" + opt_line + "') of CSV.";
}
this.error(msg);
return null;
};
/**
* @private
* Parses a string in a special csv format. We expect a csv file where each
* line is a date point, and the first field in each line is the date string.
* We also expect that all remaining fields represent series.
* if the errorBars attribute is set, then interpret the fields as:
* date, series1, stddev1, series2, stddev2, ...
* @param {[Object]} data See above.
*
* @return [Object] An array with one entry for each row. These entries
* are an array of cells in that row. The first entry is the parsed x-value for
* the row. The second, third, etc. are the y-values. These can take on one of
* three forms, depending on the CSV and constructor parameters:
* 1. numeric value
* 2. [ value, stddev ]
* 3. [ low value, center value, high value ]
*/
Dygraph.prototype.parseCSV_ = function(data) {
var ret = [];
var line_delimiter = Dygraph.detectLineDelimiter(data);
var lines = data.split(line_delimiter || "\n");
var vals, j;
// Use the default delimiter or fall back to a tab if that makes sense.
var delim = this.attr_('delimiter');
if (lines[0].indexOf(delim) == -1 && lines[0].indexOf('\t') >= 0) {
delim = '\t';
}
var start = 0;
if (!('labels' in this.user_attrs_)) {
// User hasn't explicitly set labels, so they're (presumably) in the CSV.
start = 1;
this.attrs_.labels = lines[0].split(delim); // NOTE: _not_ user_attrs_.
this.attributes_.reparseSeries();
}
var line_no = 0;
var xParser;
var defaultParserSet = false; // attempt to auto-detect x value type
var expectedCols = this.attr_("labels").length;
var outOfOrder = false;
for (var i = start; i < lines.length; i++) {
var line = lines[i];
line_no = i;
if (line.length === 0) continue; // skip blank lines
if (line[0] == '#') continue; // skip comment lines
var inFields = line.split(delim);
if (inFields.length < 2) continue;
var fields = [];
if (!defaultParserSet) {
this.detectTypeFromString_(inFields[0]);
xParser = this.attr_("xValueParser");
defaultParserSet = true;
}
fields[0] = xParser(inFields[0], this);
// If fractions are expected, parse the numbers as "A/B"
if (this.fractions_) {
for (j = 1; j < inFields.length; j++) {
// TODO(danvk): figure out an appropriate way to flag parse errors.
vals = inFields[j].split("/");
if (vals.length != 2) {
this.error('Expected fractional "num/den" values in CSV data ' +
"but found a value '" + inFields[j] + "' on line " +
(1 + i) + " ('" + line + "') which is not of this form.");
fields[j] = [0, 0];
} else {
fields[j] = [this.parseFloat_(vals[0], i, line),
this.parseFloat_(vals[1], i, line)];
}
}
} else if (this.attr_("errorBars")) {
// If there are error bars, values are (value, stddev) pairs
if (inFields.length % 2 != 1) {
this.error('Expected alternating (value, stdev.) pairs in CSV data ' +
'but line ' + (1 + i) + ' has an odd number of values (' +
(inFields.length - 1) + "): '" + line + "'");
}
for (j = 1; j < inFields.length; j += 2) {
fields[(j + 1) / 2] = [this.parseFloat_(inFields[j], i, line),
this.parseFloat_(inFields[j + 1], i, line)];
}
} else if (this.attr_("customBars")) {
// Bars are a low;center;high tuple
for (j = 1; j < inFields.length; j++) {
var val = inFields[j];
if (/^ *$/.test(val)) {
fields[j] = [null, null, null];
} else {
vals = val.split(";");
if (vals.length == 3) {
fields[j] = [ this.parseFloat_(vals[0], i, line),
this.parseFloat_(vals[1], i, line),
this.parseFloat_(vals[2], i, line) ];
} else {
this.warn('When using customBars, values must be either blank ' +
'or "low;center;high" tuples (got "' + val +
'" on line ' + (1+i));
}
}
}
} else {
// Values are just numbers
for (j = 1; j < inFields.length; j++) {
fields[j] = this.parseFloat_(inFields[j], i, line);
}
}
if (ret.length > 0 && fields[0] < ret[ret.length - 1][0]) {
outOfOrder = true;
}
if (fields.length != expectedCols) {
this.error("Number of columns in line " + i + " (" + fields.length +
") does not agree with number of labels (" + expectedCols +
") " + line);
}
// If the user specified the 'labels' option and none of the cells of the
// first row parsed correctly, then they probably double-specified the
// labels. We go with the values set in the option, discard this row and
// log a warning to the JS console.
if (i === 0 && this.attr_('labels')) {
var all_null = true;
for (j = 0; all_null && j < fields.length; j++) {
if (fields[j]) all_null = false;
}
if (all_null) {
this.warn("The dygraphs 'labels' option is set, but the first row of " +
"CSV data ('" + line + "') appears to also contain labels. " +
"Will drop the CSV labels and use the option labels.");
continue;
}
}
ret.push(fields);
}
if (outOfOrder) {
this.warn("CSV is out of order; order it correctly to speed loading.");
ret.sort(function(a,b) { return a[0] - b[0]; });
}
return ret;
};
/**
* @private
* The user has provided their data as a pre-packaged JS array. If the x values
* are numeric, this is the same as dygraphs' internal format. If the x values
* are dates, we need to convert them from Date objects to ms since epoch.
* @param {[Object]} data
* @return {[Object]} data with numeric x values.
*/
Dygraph.prototype.parseArray_ = function(data) {
// Peek at the first x value to see if it's numeric.
if (data.length === 0) {
this.error("Can't plot empty data set");
return null;
}
if (data[0].length === 0) {
this.error("Data set cannot contain an empty row");
return null;
}
var i;
if (this.attr_("labels") === null) {
this.warn("Using default labels. Set labels explicitly via 'labels' " +
"in the options parameter");
this.attrs_.labels = [ "X" ];
for (i = 1; i < data[0].length; i++) {
this.attrs_.labels.push("Y" + i); // Not user_attrs_.
}
this.attributes_.reparseSeries();
} else {
var num_labels = this.attr_("labels");
if (num_labels.length != data[0].length) {
this.error("Mismatch between number of labels (" + num_labels +
") and number of columns in array (" + data[0].length + ")");
return null;
}
}
if (Dygraph.isDateLike(data[0][0])) {
// Some intelligent defaults for a date x-axis.
this.attrs_.axes.x.valueFormatter = Dygraph.dateString_;
this.attrs_.axes.x.ticker = Dygraph.dateTicker;
this.attrs_.axes.x.axisLabelFormatter = Dygraph.dateAxisFormatter;
// Assume they're all dates.
var parsedData = Dygraph.clone(data);
for (i = 0; i < data.length; i++) {
if (parsedData[i].length === 0) {
this.error("Row " + (1 + i) + " of data is empty");
return null;
}
if (parsedData[i][0] === null ||
typeof(parsedData[i][0].getTime) != 'function' ||
isNaN(parsedData[i][0].getTime())) {
this.error("x value in row " + (1 + i) + " is not a Date");
return null;
}
parsedData[i][0] = parsedData[i][0].getTime();
}
return parsedData;
} else {
// Some intelligent defaults for a numeric x-axis.
/** @private (shut up, jsdoc!) */
this.attrs_.axes.x.valueFormatter = function(x) { return x; };
this.attrs_.axes.x.ticker = Dygraph.numericLinearTicks;
this.attrs_.axes.x.axisLabelFormatter = Dygraph.numberAxisLabelFormatter;
return data;
}
};
/**
* Parses a DataTable object from gviz.
* The data is expected to have a first column that is either a date or a
* number. All subsequent columns must be numbers. If there is a clear mismatch
* between this.xValueParser_ and the type of the first column, it will be
* fixed. Fills out rawData_.
* @param {[Object]} data See above.
* @private
*/
Dygraph.prototype.parseDataTable_ = function(data) {
var shortTextForAnnotationNum = function(num) {
// converts [0-9]+ [A-Z][a-z]*
// example: 0=A, 1=B, 25=Z, 26=Aa, 27=Ab
// and continues like.. Ba Bb .. Za .. Zz..Aaa...Zzz Aaaa Zzzz
var shortText = String.fromCharCode(65 /* A */ + num % 26);
num = Math.floor(num / 26);
while ( num > 0 ) {
shortText = String.fromCharCode(65 /* A */ + (num - 1) % 26 ) + shortText.toLowerCase();
num = Math.floor((num - 1) / 26);
}
return shortText;
};
var cols = data.getNumberOfColumns();
var rows = data.getNumberOfRows();
var indepType = data.getColumnType(0);
if (indepType == 'date' || indepType == 'datetime') {
this.attrs_.xValueParser = Dygraph.dateParser;
this.attrs_.axes.x.valueFormatter = Dygraph.dateString_;
this.attrs_.axes.x.ticker = Dygraph.dateTicker;
this.attrs_.axes.x.axisLabelFormatter = Dygraph.dateAxisFormatter;
} else if (indepType == 'number') {
this.attrs_.xValueParser = function(x) { return parseFloat(x); };
this.attrs_.axes.x.valueFormatter = function(x) { return x; };
this.attrs_.axes.x.ticker = Dygraph.numericLinearTicks;
this.attrs_.axes.x.axisLabelFormatter = this.attrs_.axes.x.valueFormatter;
} else {
this.error("only 'date', 'datetime' and 'number' types are supported for " +
"column 1 of DataTable input (Got '" + indepType + "')");
return null;
}
// Array of the column indices which contain data (and not annotations).
var colIdx = [];
var annotationCols = {}; // data index -> [annotation cols]
var hasAnnotations = false;
var i, j;
for (i = 1; i < cols; i++) {
var type = data.getColumnType(i);
if (type == 'number') {
colIdx.push(i);
} else if (type == 'string' && this.attr_('displayAnnotations')) {
// This is OK -- it's an annotation column.
var dataIdx = colIdx[colIdx.length - 1];
if (!annotationCols.hasOwnProperty(dataIdx)) {
annotationCols[dataIdx] = [i];
} else {
annotationCols[dataIdx].push(i);
}
hasAnnotations = true;
} else {
this.error("Only 'number' is supported as a dependent type with Gviz." +
" 'string' is only supported if displayAnnotations is true");
}
}
// Read column labels
// TODO(danvk): add support back for errorBars
var labels = [data.getColumnLabel(0)];
for (i = 0; i < colIdx.length; i++) {
labels.push(data.getColumnLabel(colIdx[i]));
if (this.attr_("errorBars")) i += 1;
}
this.attrs_.labels = labels;
cols = labels.length;
var ret = [];
var outOfOrder = false;
var annotations = [];
for (i = 0; i < rows; i++) {
var row = [];
if (typeof(data.getValue(i, 0)) === 'undefined' ||
data.getValue(i, 0) === null) {
this.warn("Ignoring row " + i +
" of DataTable because of undefined or null first column.");
continue;
}
if (indepType == 'date' || indepType == 'datetime') {
row.push(data.getValue(i, 0).getTime());
} else {
row.push(data.getValue(i, 0));
}
if (!this.attr_("errorBars")) {
for (j = 0; j < colIdx.length; j++) {
var col = colIdx[j];
row.push(data.getValue(i, col));
if (hasAnnotations &&
annotationCols.hasOwnProperty(col) &&
data.getValue(i, annotationCols[col][0]) !== null) {
var ann = {};
ann.series = data.getColumnLabel(col);
ann.xval = row[0];
ann.shortText = shortTextForAnnotationNum(annotations.length);
ann.text = '';
for (var k = 0; k < annotationCols[col].length; k++) {
if (k) ann.text += "\n";
ann.text += data.getValue(i, annotationCols[col][k]);
}
annotations.push(ann);
}
}
// Strip out infinities, which give dygraphs problems later on.
for (j = 0; j < row.length; j++) {
if (!isFinite(row[j])) row[j] = null;
}
} else {
for (j = 0; j < cols - 1; j++) {
row.push([ data.getValue(i, 1 + 2 * j), data.getValue(i, 2 + 2 * j) ]);
}
}
if (ret.length > 0 && row[0] < ret[ret.length - 1][0]) {
outOfOrder = true;
}
ret.push(row);
}
if (outOfOrder) {
this.warn("DataTable is out of order; order it correctly to speed loading.");
ret.sort(function(a,b) { return a[0] - b[0]; });
}
this.rawData_ = ret;
if (annotations.length > 0) {
this.setAnnotations(annotations, true);
}
this.attributes_.reparseSeries();
};
/**
* Get the CSV data. If it's in a function, call that function. If it's in a
* file, do an XMLHttpRequest to get it.
* @private
*/
Dygraph.prototype.start_ = function() {
var data = this.file_;
// Functions can return references of all other types.
if (typeof data == 'function') {
data = data();
}
if (Dygraph.isArrayLike(data)) {
this.rawData_ = this.parseArray_(data);
this.predraw_();
} else if (typeof data == 'object' &&
typeof data.getColumnRange == 'function') {
// must be a DataTable from gviz.
this.parseDataTable_(data);
this.predraw_();
} else if (typeof data == 'string') {
// Heuristic: a newline means it's CSV data. Otherwise it's an URL.
var line_delimiter = Dygraph.detectLineDelimiter(data);
if (line_delimiter) {
this.loadedEvent_(data);
} else {
var req = new XMLHttpRequest();
var caller = this;
req.onreadystatechange = function () {
if (req.readyState == 4) {
if (req.status === 200 || // Normal http
req.status === 0) { // Chrome w/ --allow-file-access-from-files
caller.loadedEvent_(req.responseText);
}
}
};
req.open("GET", data, true);
req.send(null);
}
} else {
this.error("Unknown data format: " + (typeof data));
}
};
/**
* Changes various properties of the graph. These can include:
* <ul>
* <li>file: changes the source data for the graph</li>
* <li>errorBars: changes whether the data contains stddev</li>
* </ul>
*
* There's a huge variety of options that can be passed to this method. For a
* full list, see http://dygraphs.com/options.html.
*
* @param {Object} attrs The new properties and values
* @param {Boolean} [block_redraw] Usually the chart is redrawn after every
* call to updateOptions(). If you know better, you can pass true to explicitly
* block the redraw. This can be useful for chaining updateOptions() calls,
* avoiding the occasional infinite loop and preventing redraws when it's not
* necessary (e.g. when updating a callback).
*/
Dygraph.prototype.updateOptions = function(input_attrs, block_redraw) {
if (typeof(block_redraw) == 'undefined') block_redraw = false;
// mapLegacyOptions_ drops the "file" parameter as a convenience to us.
var file = input_attrs.file;
var attrs = Dygraph.mapLegacyOptions_(input_attrs);
// TODO(danvk): this is a mess. Move these options into attr_.
if ('rollPeriod' in attrs) {
this.rollPeriod_ = attrs.rollPeriod;
}
if ('dateWindow' in attrs) {
this.dateWindow_ = attrs.dateWindow;
if (!('isZoomedIgnoreProgrammaticZoom' in attrs)) {
this.zoomed_x_ = (attrs.dateWindow !== null);
}
}
if ('valueRange' in attrs && !('isZoomedIgnoreProgrammaticZoom' in attrs)) {
this.zoomed_y_ = (attrs.valueRange !== null);
}
// TODO(danvk): validate per-series options.
// Supported:
// strokeWidth
// pointSize
// drawPoints
// highlightCircleSize
// Check if this set options will require new points.
var requiresNewPoints = Dygraph.isPixelChangingOptionList(this.attr_("labels"), attrs);
Dygraph.updateDeep(this.user_attrs_, attrs);
this.attributes_.reparseSeries();
if (file) {
this.file_ = file;
if (!block_redraw) this.start_();
} else {
if (!block_redraw) {
if (requiresNewPoints) {
this.predraw_();
} else {
this.renderGraph_(false);
}
}
}
};
/**
* Returns a copy of the options with deprecated names converted into current
* names. Also drops the (potentially-large) 'file' attribute. If the caller is
* interested in that, they should save a copy before calling this.
* @private
*/
Dygraph.mapLegacyOptions_ = function(attrs) {
var my_attrs = {};
for (var k in attrs) {
if (k == 'file') continue;
if (attrs.hasOwnProperty(k)) my_attrs[k] = attrs[k];
}
var set = function(axis, opt, value) {
if (!my_attrs.axes) my_attrs.axes = {};
if (!my_attrs.axes[axis]) my_attrs.axes[axis] = {};
my_attrs.axes[axis][opt] = value;
};
var map = function(opt, axis, new_opt) {
if (typeof(attrs[opt]) != 'undefined') {
Dygraph.warn("Option " + opt + " is deprecated. Use the " +
new_opt + " option for the " + axis + " axis instead. " +
"(e.g. { axes : { " + axis + " : { " + new_opt + " : ... } } } " +
"(see http://dygraphs.com/per-axis.html for more information.");
set(axis, new_opt, attrs[opt]);
delete my_attrs[opt];
}
};
// This maps, e.g., xValueFormater -> axes: { x: { valueFormatter: ... } }
map('xValueFormatter', 'x', 'valueFormatter');
map('pixelsPerXLabel', 'x', 'pixelsPerLabel');
map('xAxisLabelFormatter', 'x', 'axisLabelFormatter');
map('xTicker', 'x', 'ticker');
map('yValueFormatter', 'y', 'valueFormatter');
map('pixelsPerYLabel', 'y', 'pixelsPerLabel');
map('yAxisLabelFormatter', 'y', 'axisLabelFormatter');
map('yTicker', 'y', 'ticker');
return my_attrs;
};
/**
* Resizes the dygraph. If no parameters are specified, resizes to fill the
* containing div (which has presumably changed size since the dygraph was
* instantiated. If the width/height are specified, the div will be resized.
*
* This is far more efficient than destroying and re-instantiating a
* Dygraph, since it doesn't have to reparse the underlying data.
*
* @param {Number} [width] Width (in pixels)
* @param {Number} [height] Height (in pixels)
*/
Dygraph.prototype.resize = function(width, height) {
if (this.resize_lock) {
return;
}
this.resize_lock = true;
if ((width === null) != (height === null)) {
this.warn("Dygraph.resize() should be called with zero parameters or " +
"two non-NULL parameters. Pretending it was zero.");
width = height = null;
}
var old_width = this.width_;
var old_height = this.height_;
if (width) {
this.maindiv_.style.width = width + "px";
this.maindiv_.style.height = height + "px";
this.width_ = width;
this.height_ = height;
} else {
this.width_ = this.maindiv_.clientWidth;
this.height_ = this.maindiv_.clientHeight;
}
if (old_width != this.width_ || old_height != this.height_) {
// TODO(danvk): there should be a clear() method.
this.maindiv_.innerHTML = "";
this.roller_ = null;
this.attrs_.labelsDiv = null;
this.createInterface_();
if (this.annotations_.length) {
// createInterface_ reset the layout, so we need to do this.
this.layout_.setAnnotations(this.annotations_);
}
this.createDragInterface_();
this.predraw_();
}
this.resize_lock = false;
};
/**
* Adjusts the number of points in the rolling average. Updates the graph to
* reflect the new averaging period.
* @param {Number} length Number of points over which to average the data.
*/
Dygraph.prototype.adjustRoll = function(length) {
this.rollPeriod_ = length;
this.predraw_();
};
/**
* Returns a boolean array of visibility statuses.
*/
Dygraph.prototype.visibility = function() {
// Do lazy-initialization, so that this happens after we know the number of
// data series.
if (!this.attr_("visibility")) {
this.attrs_.visibility = [];
}
// TODO(danvk): it looks like this could go into an infinite loop w/ user_attrs.
while (this.attr_("visibility").length < this.numColumns() - 1) {
this.attrs_.visibility.push(true);
}
return this.attr_("visibility");
};
/**
* Changes the visiblity of a series.
*/
Dygraph.prototype.setVisibility = function(num, value) {
var x = this.visibility();
if (num < 0 || num >= x.length) {
this.warn("invalid series number in setVisibility: " + num);
} else {
x[num] = value;
this.predraw_();
}
};
/**
* How large of an area will the dygraph render itself in?
* This is used for testing.
* @return A {width: w, height: h} object.
* @private
*/
Dygraph.prototype.size = function() {
return { width: this.width_, height: this.height_ };
};
/**
* Update the list of annotations and redraw the chart.
* See dygraphs.com/annotations.html for more info on how to use annotations.
* @param ann {Array} An array of annotation objects.
* @param suppressDraw {Boolean} Set to "true" to block chart redraw (optional).
*/
Dygraph.prototype.setAnnotations = function(ann, suppressDraw) {
// Only add the annotation CSS rule once we know it will be used.
Dygraph.addAnnotationRule();
this.annotations_ = ann;
this.layout_.setAnnotations(this.annotations_);
if (!suppressDraw) {
this.predraw_();
}
};
/**
* Return the list of annotations.
*/
Dygraph.prototype.annotations = function() {
return this.annotations_;
};
/**
* Get the list of label names for this graph. The first column is the
* x-axis, so the data series names start at index 1.
*/
Dygraph.prototype.getLabels = function() {
return this.attr_("labels").slice();
};
/**
* Get the index of a series (column) given its name. The first column is the
* x-axis, so the data series start with index 1.
*/
Dygraph.prototype.indexFromSetName = function(name) {
return this.setIndexByName_[name];
};
/**
* Get the internal dataset index given its name. These are numbered starting from 0,
* and only count visible sets.
* @private
*/
Dygraph.prototype.datasetIndexFromSetName_ = function(name) {
return this.datasetIndex_[this.indexFromSetName(name)];
};
/**
* @private
* Adds a default style for the annotation CSS classes to the document. This is
* only executed when annotations are actually used. It is designed to only be
* called once -- all calls after the first will return immediately.
*/
Dygraph.addAnnotationRule = function() {
// TODO(danvk): move this function into plugins/annotations.js?
if (Dygraph.addedAnnotationCSS) return;
var rule = "border: 1px solid black; " +
"background-color: white; " +
"text-align: center;";
var styleSheetElement = document.createElement("style");
styleSheetElement.type = "text/css";
document.getElementsByTagName("head")[0].appendChild(styleSheetElement);
// Find the first style sheet that we can access.
// We may not add a rule to a style sheet from another domain for security
// reasons. This sometimes comes up when using gviz, since the Google gviz JS
// adds its own style sheets from google.com.
for (var i = 0; i < document.styleSheets.length; i++) {
if (document.styleSheets[i].disabled) continue;
var mysheet = document.styleSheets[i];
try {
if (mysheet.insertRule) { // Firefox
var idx = mysheet.cssRules ? mysheet.cssRules.length : 0;
mysheet.insertRule(".dygraphDefaultAnnotation { " + rule + " }", idx);
} else if (mysheet.addRule) { // IE
mysheet.addRule(".dygraphDefaultAnnotation", rule);
}
Dygraph.addedAnnotationCSS = true;
return;
} catch(err) {
// Was likely a security exception.
}
}
this.warn("Unable to add default annotation CSS rule; display may be off.");
};
// Older pages may still use this name.
var DateGraph = Dygraph;
| dygraph.js | /**
* @license
* Copyright 2006 Dan Vanderkam ([email protected])
* MIT-licensed (http://opensource.org/licenses/MIT)
*/
/**
* @fileoverview Creates an interactive, zoomable graph based on a CSV file or
* string. Dygraph can handle multiple series with or without error bars. The
* date/value ranges will be automatically set. Dygraph uses the
* <canvas> tag, so it only works in FF1.5+.
* @author [email protected] (Dan Vanderkam)
Usage:
<div id="graphdiv" style="width:800px; height:500px;"></div>
<script type="text/javascript">
new Dygraph(document.getElementById("graphdiv"),
"datafile.csv", // CSV file with headers
{ }); // options
</script>
The CSV file is of the form
Date,SeriesA,SeriesB,SeriesC
YYYYMMDD,A1,B1,C1
YYYYMMDD,A2,B2,C2
If the 'errorBars' option is set in the constructor, the input should be of
the form
Date,SeriesA,SeriesB,...
YYYYMMDD,A1,sigmaA1,B1,sigmaB1,...
YYYYMMDD,A2,sigmaA2,B2,sigmaB2,...
If the 'fractions' option is set, the input should be of the form:
Date,SeriesA,SeriesB,...
YYYYMMDD,A1/B1,A2/B2,...
YYYYMMDD,A1/B1,A2/B2,...
And error bars will be calculated automatically using a binomial distribution.
For further documentation and examples, see http://dygraphs.com/
*/
/*jshint globalstrict: true */
/*global DygraphLayout:false, DygraphCanvasRenderer:false, DygraphOptions:false, G_vmlCanvasManager:false */
"use strict";
/**
* Creates an interactive, zoomable chart.
*
* @constructor
* @param {div | String} div A div or the id of a div into which to construct
* the chart.
* @param {String | Function} file A file containing CSV data or a function
* that returns this data. The most basic expected format for each line is
* "YYYY/MM/DD,val1,val2,...". For more information, see
* http://dygraphs.com/data.html.
* @param {Object} attrs Various other attributes, e.g. errorBars determines
* whether the input data contains error ranges. For a complete list of
* options, see http://dygraphs.com/options.html.
*/
var Dygraph = function(div, data, opts, opt_fourth_param) {
if (opt_fourth_param !== undefined) {
// Old versions of dygraphs took in the series labels as a constructor
// parameter. This doesn't make sense anymore, but it's easy to continue
// to support this usage.
this.warn("Using deprecated four-argument dygraph constructor");
this.__old_init__(div, data, opts, opt_fourth_param);
} else {
this.__init__(div, data, opts);
}
};
Dygraph.NAME = "Dygraph";
Dygraph.VERSION = "1.2";
Dygraph.__repr__ = function() {
return "[" + this.NAME + " " + this.VERSION + "]";
};
/**
* Returns information about the Dygraph class.
*/
Dygraph.toString = function() {
return this.__repr__();
};
// Various default values
Dygraph.DEFAULT_ROLL_PERIOD = 1;
Dygraph.DEFAULT_WIDTH = 480;
Dygraph.DEFAULT_HEIGHT = 320;
// For max 60 Hz. animation:
Dygraph.ANIMATION_STEPS = 12;
Dygraph.ANIMATION_DURATION = 200;
// These are defined before DEFAULT_ATTRS so that it can refer to them.
/**
* @private
* Return a string version of a number. This respects the digitsAfterDecimal
* and maxNumberWidth options.
* @param {Number} x The number to be formatted
* @param {Dygraph} opts An options view
* @param {String} name The name of the point's data series
* @param {Dygraph} g The dygraph object
*/
Dygraph.numberValueFormatter = function(x, opts, pt, g) {
var sigFigs = opts('sigFigs');
if (sigFigs !== null) {
// User has opted for a fixed number of significant figures.
return Dygraph.floatFormat(x, sigFigs);
}
var digits = opts('digitsAfterDecimal');
var maxNumberWidth = opts('maxNumberWidth');
// switch to scientific notation if we underflow or overflow fixed display.
if (x !== 0.0 &&
(Math.abs(x) >= Math.pow(10, maxNumberWidth) ||
Math.abs(x) < Math.pow(10, -digits))) {
return x.toExponential(digits);
} else {
return '' + Dygraph.round_(x, digits);
}
};
/**
* variant for use as an axisLabelFormatter.
* @private
*/
Dygraph.numberAxisLabelFormatter = function(x, granularity, opts, g) {
return Dygraph.numberValueFormatter(x, opts, g);
};
/**
* Convert a JS date (millis since epoch) to YYYY/MM/DD
* @param {Number} date The JavaScript date (ms since epoch)
* @return {String} A date of the form "YYYY/MM/DD"
* @private
*/
Dygraph.dateString_ = function(date) {
var zeropad = Dygraph.zeropad;
var d = new Date(date);
// Get the year:
var year = "" + d.getFullYear();
// Get a 0 padded month string
var month = zeropad(d.getMonth() + 1); //months are 0-offset, sigh
// Get a 0 padded day string
var day = zeropad(d.getDate());
var ret = "";
var frac = d.getHours() * 3600 + d.getMinutes() * 60 + d.getSeconds();
if (frac) ret = " " + Dygraph.hmsString_(date);
return year + "/" + month + "/" + day + ret;
};
/**
* Convert a JS date to a string appropriate to display on an axis that
* is displaying values at the stated granularity.
* @param {Date} date The date to format
* @param {Number} granularity One of the Dygraph granularity constants
* @return {String} The formatted date
* @private
*/
Dygraph.dateAxisFormatter = function(date, granularity) {
if (granularity >= Dygraph.DECADAL) {
return date.strftime('%Y');
} else if (granularity >= Dygraph.MONTHLY) {
return date.strftime('%b %y');
} else {
var frac = date.getHours() * 3600 + date.getMinutes() * 60 + date.getSeconds() + date.getMilliseconds();
if (frac === 0 || granularity >= Dygraph.DAILY) {
return new Date(date.getTime() + 3600*1000).strftime('%d%b');
} else {
return Dygraph.hmsString_(date.getTime());
}
}
};
/**
* Standard plotters. These may be used by clients.
* Available plotters are:
* - Dygraph.Plotters.linePlotter: draws central lines (most common)
* - Dygraph.Plotters.errorPlotter: draws error bars
* - Dygraph.Plotters.fillPlotter: draws fills under lines (used with fillGraph)
*
* By default, the plotter is [fillPlotter, errorPlotter, linePlotter].
* This causes all the lines to be drawn over all the fills/error bars.
*/
Dygraph.Plotters = DygraphCanvasRenderer._Plotters;
// Default attribute values.
Dygraph.DEFAULT_ATTRS = {
highlightCircleSize: 3,
highlightSeriesOpts: null,
highlightSeriesBackgroundAlpha: 0.5,
labelsDivWidth: 250,
labelsDivStyles: {
// TODO(danvk): move defaults from createStatusMessage_ here.
},
labelsSeparateLines: false,
labelsShowZeroValues: true,
labelsKMB: false,
labelsKMG2: false,
showLabelsOnHighlight: true,
digitsAfterDecimal: 2,
maxNumberWidth: 6,
sigFigs: null,
strokeWidth: 1.0,
strokeBorderWidth: 0,
strokeBorderColor: "white",
axisTickSize: 3,
axisLabelFontSize: 14,
xAxisLabelWidth: 50,
yAxisLabelWidth: 50,
rightGap: 5,
showRoller: false,
xValueParser: Dygraph.dateParser,
delimiter: ',',
sigma: 2.0,
errorBars: false,
fractions: false,
wilsonInterval: true, // only relevant if fractions is true
customBars: false,
fillGraph: false,
fillAlpha: 0.15,
connectSeparatedPoints: false,
stackedGraph: false,
hideOverlayOnMouseOut: true,
// TODO(danvk): support 'onmouseover' and 'never', and remove synonyms.
legend: 'onmouseover', // the only relevant value at the moment is 'always'.
stepPlot: false,
avoidMinZero: false,
drawAxesAtZero: false,
// Sizes of the various chart labels.
titleHeight: 28,
xLabelHeight: 18,
yLabelWidth: 18,
drawXAxis: true,
drawYAxis: true,
axisLineColor: "black",
axisLineWidth: 0.3,
gridLineWidth: 0.3,
axisLabelColor: "black",
axisLabelFont: "Arial", // TODO(danvk): is this implemented?
axisLabelWidth: 50,
drawYGrid: true,
drawXGrid: true,
gridLineColor: "rgb(128,128,128)",
interactionModel: null, // will be set to Dygraph.Interaction.defaultModel
animatedZooms: false, // (for now)
// Range selector options
showRangeSelector: false,
rangeSelectorHeight: 40,
rangeSelectorPlotStrokeColor: "#808FAB",
rangeSelectorPlotFillColor: "#A7B1C4",
// The ordering here ensures that central lines always appear above any
// fill bars/error bars.
plotter: [
Dygraph.Plotters.fillPlotter,
Dygraph.Plotters.errorPlotter,
Dygraph.Plotters.linePlotter
],
plugins: [ ],
// per-axis options
axes: {
x: {
pixelsPerLabel: 60,
axisLabelFormatter: Dygraph.dateAxisFormatter,
valueFormatter: Dygraph.dateString_,
ticker: null // will be set in dygraph-tickers.js
},
y: {
pixelsPerLabel: 30,
valueFormatter: Dygraph.numberValueFormatter,
axisLabelFormatter: Dygraph.numberAxisLabelFormatter,
ticker: null // will be set in dygraph-tickers.js
},
y2: {
pixelsPerLabel: 30,
valueFormatter: Dygraph.numberValueFormatter,
axisLabelFormatter: Dygraph.numberAxisLabelFormatter,
ticker: null // will be set in dygraph-tickers.js
}
}
};
// Directions for panning and zooming. Use bit operations when combined
// values are possible.
Dygraph.HORIZONTAL = 1;
Dygraph.VERTICAL = 2;
// Installed plugins, in order of precedence (most-general to most-specific).
// Plugins are installed after they are defined, in plugins/install.js.
Dygraph.PLUGINS = [
];
// Used for initializing annotation CSS rules only once.
Dygraph.addedAnnotationCSS = false;
Dygraph.prototype.__old_init__ = function(div, file, labels, attrs) {
// Labels is no longer a constructor parameter, since it's typically set
// directly from the data source. It also conains a name for the x-axis,
// which the previous constructor form did not.
if (labels !== null) {
var new_labels = ["Date"];
for (var i = 0; i < labels.length; i++) new_labels.push(labels[i]);
Dygraph.update(attrs, { 'labels': new_labels });
}
this.__init__(div, file, attrs);
};
/**
* Initializes the Dygraph. This creates a new DIV and constructs the PlotKit
* and context <canvas> inside of it. See the constructor for details.
* on the parameters.
* @param {Element} div the Element to render the graph into.
* @param {String | Function} file Source data
* @param {Object} attrs Miscellaneous other options
* @private
*/
Dygraph.prototype.__init__ = function(div, file, attrs) {
// Hack for IE: if we're using excanvas and the document hasn't finished
// loading yet (and hence may not have initialized whatever it needs to
// initialize), then keep calling this routine periodically until it has.
if (/MSIE/.test(navigator.userAgent) && !window.opera &&
typeof(G_vmlCanvasManager) != 'undefined' &&
document.readyState != 'complete') {
var self = this;
setTimeout(function() { self.__init__(div, file, attrs); }, 100);
return;
}
// Support two-argument constructor
if (attrs === null || attrs === undefined) { attrs = {}; }
attrs = Dygraph.mapLegacyOptions_(attrs);
if (typeof(div) == 'string') {
div = document.getElementById(div);
}
if (!div) {
Dygraph.error("Constructing dygraph with a non-existent div!");
return;
}
this.isUsingExcanvas_ = typeof(G_vmlCanvasManager) != 'undefined';
// Copy the important bits into the object
// TODO(danvk): most of these should just stay in the attrs_ dictionary.
this.maindiv_ = div;
this.file_ = file;
this.rollPeriod_ = attrs.rollPeriod || Dygraph.DEFAULT_ROLL_PERIOD;
this.previousVerticalX_ = -1;
this.fractions_ = attrs.fractions || false;
this.dateWindow_ = attrs.dateWindow || null;
this.is_initial_draw_ = true;
this.annotations_ = [];
// Zoomed indicators - These indicate when the graph has been zoomed and on what axis.
this.zoomed_x_ = false;
this.zoomed_y_ = false;
// Clear the div. This ensure that, if multiple dygraphs are passed the same
// div, then only one will be drawn.
div.innerHTML = "";
// For historical reasons, the 'width' and 'height' options trump all CSS
// rules _except_ for an explicit 'width' or 'height' on the div.
// As an added convenience, if the div has zero height (like <div></div> does
// without any styles), then we use a default height/width.
if (div.style.width === '' && attrs.width) {
div.style.width = attrs.width + "px";
}
if (div.style.height === '' && attrs.height) {
div.style.height = attrs.height + "px";
}
if (div.style.height === '' && div.clientHeight === 0) {
div.style.height = Dygraph.DEFAULT_HEIGHT + "px";
if (div.style.width === '') {
div.style.width = Dygraph.DEFAULT_WIDTH + "px";
}
}
// these will be zero if the dygraph's div is hidden.
this.width_ = div.clientWidth;
this.height_ = div.clientHeight;
// TODO(danvk): set fillGraph to be part of attrs_ here, not user_attrs_.
if (attrs.stackedGraph) {
attrs.fillGraph = true;
// TODO(nikhilk): Add any other stackedGraph checks here.
}
// DEPRECATION WARNING: All option processing should be moved from
// attrs_ and user_attrs_ to options_, which holds all this information.
//
// Dygraphs has many options, some of which interact with one another.
// To keep track of everything, we maintain two sets of options:
//
// this.user_attrs_ only options explicitly set by the user.
// this.attrs_ defaults, options derived from user_attrs_, data.
//
// Options are then accessed this.attr_('attr'), which first looks at
// user_attrs_ and then computed attrs_. This way Dygraphs can set intelligent
// defaults without overriding behavior that the user specifically asks for.
this.user_attrs_ = {};
Dygraph.update(this.user_attrs_, attrs);
// This sequence ensures that Dygraph.DEFAULT_ATTRS is never modified.
this.attrs_ = {};
Dygraph.updateDeep(this.attrs_, Dygraph.DEFAULT_ATTRS);
this.boundaryIds_ = [];
this.setIndexByName_ = {};
this.datasetIndex_ = [];
this.registeredEvents_ = [];
this.eventListeners_ = {};
this.attributes_ = new DygraphOptions(this);
// Create the containing DIV and other interactive elements
this.createInterface_();
// Activate plugins.
this.plugins_ = [];
var plugins = Dygraph.PLUGINS.concat(this.getOption('plugins'));
for (var i = 0; i < plugins.length; i++) {
var Plugin = plugins[i];
var pluginInstance = new Plugin();
var pluginDict = {
plugin: pluginInstance,
events: {},
options: {},
pluginOptions: {}
};
var handlers = pluginInstance.activate(this);
for (var eventName in handlers) {
// TODO(danvk): validate eventName.
pluginDict.events[eventName] = handlers[eventName];
}
this.plugins_.push(pluginDict);
}
// At this point, plugins can no longer register event handlers.
// Construct a map from event -> ordered list of [callback, plugin].
for (var i = 0; i < this.plugins_.length; i++) {
var plugin_dict = this.plugins_[i];
for (var eventName in plugin_dict.events) {
if (!plugin_dict.events.hasOwnProperty(eventName)) continue;
var callback = plugin_dict.events[eventName];
var pair = [plugin_dict.plugin, callback];
if (!(eventName in this.eventListeners_)) {
this.eventListeners_[eventName] = [pair];
} else {
this.eventListeners_[eventName].push(pair);
}
}
}
this.createDragInterface_();
this.start_();
};
/**
* Triggers a cascade of events to the various plugins which are interested in them.
* Returns true if the "default behavior" should be performed, i.e. if none of
* the event listeners called event.preventDefault().
* @private
*/
Dygraph.prototype.cascadeEvents_ = function(name, extra_props) {
if (!(name in this.eventListeners_)) return true;
// QUESTION: can we use objects & prototypes to speed this up?
var e = {
dygraph: this,
cancelable: false,
defaultPrevented: false,
preventDefault: function() {
if (!e.cancelable) throw "Cannot call preventDefault on non-cancelable event.";
e.defaultPrevented = true;
},
propagationStopped: false,
stopPropagation: function() {
e.propagationStopped = true;
}
};
Dygraph.update(e, extra_props);
var callback_plugin_pairs = this.eventListeners_[name];
if (callback_plugin_pairs) {
for (var i = callback_plugin_pairs.length - 1; i >= 0; i--) {
var plugin = callback_plugin_pairs[i][0];
var callback = callback_plugin_pairs[i][1];
callback.call(plugin, e);
if (e.propagationStopped) break;
}
}
return e.defaultPrevented;
};
/**
* Returns the zoomed status of the chart for one or both axes.
*
* Axis is an optional parameter. Can be set to 'x' or 'y'.
*
* The zoomed status for an axis is set whenever a user zooms using the mouse
* or when the dateWindow or valueRange are updated (unless the
* isZoomedIgnoreProgrammaticZoom option is also specified).
*/
Dygraph.prototype.isZoomed = function(axis) {
if (axis === null || axis === undefined) {
return this.zoomed_x_ || this.zoomed_y_;
}
if (axis === 'x') return this.zoomed_x_;
if (axis === 'y') return this.zoomed_y_;
throw "axis parameter is [" + axis + "] must be null, 'x' or 'y'.";
};
/**
* Returns information about the Dygraph object, including its containing ID.
*/
Dygraph.prototype.toString = function() {
var maindiv = this.maindiv_;
var id = (maindiv && maindiv.id) ? maindiv.id : maindiv;
return "[Dygraph " + id + "]";
};
/**
* @private
* Returns the value of an option. This may be set by the user (either in the
* constructor or by calling updateOptions) or by dygraphs, and may be set to a
* per-series value.
* @param { String } name The name of the option, e.g. 'rollPeriod'.
* @param { String } [seriesName] The name of the series to which the option
* will be applied. If no per-series value of this option is available, then
* the global value is returned. This is optional.
* @return { ... } The value of the option.
*/
Dygraph.prototype.attr_ = function(name, seriesName) {
// <REMOVE_FOR_COMBINED>
if (typeof(Dygraph.OPTIONS_REFERENCE) === 'undefined') {
this.error('Must include options reference JS for testing');
} else if (!Dygraph.OPTIONS_REFERENCE.hasOwnProperty(name)) {
this.error('Dygraphs is using property ' + name + ', which has no entry ' +
'in the Dygraphs.OPTIONS_REFERENCE listing.');
// Only log this error once.
Dygraph.OPTIONS_REFERENCE[name] = true;
}
// </REMOVE_FOR_COMBINED>
return seriesName ? this.attributes_.getForSeries(name, seriesName) : this.attributes_.get(name);
};
/**
* Returns the current value for an option, as set in the constructor or via
* updateOptions. You may pass in an (optional) series name to get per-series
* values for the option.
*
* All values returned by this method should be considered immutable. If you
* modify them, there is no guarantee that the changes will be honored or that
* dygraphs will remain in a consistent state. If you want to modify an option,
* use updateOptions() instead.
*
* @param { String } name The name of the option (e.g. 'strokeWidth')
* @param { String } [opt_seriesName] Series name to get per-series values.
* @return { ... } The value of the option.
*/
Dygraph.prototype.getOption = function(name, opt_seriesName) {
return this.attr_(name, opt_seriesName);
};
Dygraph.prototype.getOptionForAxis = function(name, axis) {
return this.attributes_.getForAxis(name, axis);
};
/**
* @private
* @param String} axis The name of the axis (i.e. 'x', 'y' or 'y2')
* @return { ... } A function mapping string -> option value
*/
Dygraph.prototype.optionsViewForAxis_ = function(axis) {
var self = this;
return function(opt) {
var axis_opts = self.user_attrs_.axes;
if (axis_opts && axis_opts[axis] && axis_opts[axis][opt]) {
return axis_opts[axis][opt];
}
// user-specified attributes always trump defaults, even if they're less
// specific.
if (typeof(self.user_attrs_[opt]) != 'undefined') {
return self.user_attrs_[opt];
}
axis_opts = self.attrs_.axes;
if (axis_opts && axis_opts[axis] && axis_opts[axis][opt]) {
return axis_opts[axis][opt];
}
// check old-style axis options
// TODO(danvk): add a deprecation warning if either of these match.
if (axis == 'y' && self.axes_[0].hasOwnProperty(opt)) {
return self.axes_[0][opt];
} else if (axis == 'y2' && self.axes_[1].hasOwnProperty(opt)) {
return self.axes_[1][opt];
}
return self.attr_(opt);
};
};
/**
* Returns the current rolling period, as set by the user or an option.
* @return {Number} The number of points in the rolling window
*/
Dygraph.prototype.rollPeriod = function() {
return this.rollPeriod_;
};
/**
* Returns the currently-visible x-range. This can be affected by zooming,
* panning or a call to updateOptions.
* Returns a two-element array: [left, right].
* If the Dygraph has dates on the x-axis, these will be millis since epoch.
*/
Dygraph.prototype.xAxisRange = function() {
return this.dateWindow_ ? this.dateWindow_ : this.xAxisExtremes();
};
/**
* Returns the lower- and upper-bound x-axis values of the
* data set.
*/
Dygraph.prototype.xAxisExtremes = function() {
var left = this.rawData_[0][0];
var right = this.rawData_[this.rawData_.length - 1][0];
return [left, right];
};
/**
* Returns the currently-visible y-range for an axis. This can be affected by
* zooming, panning or a call to updateOptions. Axis indices are zero-based. If
* called with no arguments, returns the range of the first axis.
* Returns a two-element array: [bottom, top].
*/
Dygraph.prototype.yAxisRange = function(idx) {
if (typeof(idx) == "undefined") idx = 0;
if (idx < 0 || idx >= this.axes_.length) {
return null;
}
var axis = this.axes_[idx];
return [ axis.computedValueRange[0], axis.computedValueRange[1] ];
};
/**
* Returns the currently-visible y-ranges for each axis. This can be affected by
* zooming, panning, calls to updateOptions, etc.
* Returns an array of [bottom, top] pairs, one for each y-axis.
*/
Dygraph.prototype.yAxisRanges = function() {
var ret = [];
for (var i = 0; i < this.axes_.length; i++) {
ret.push(this.yAxisRange(i));
}
return ret;
};
// TODO(danvk): use these functions throughout dygraphs.
/**
* Convert from data coordinates to canvas/div X/Y coordinates.
* If specified, do this conversion for the coordinate system of a particular
* axis. Uses the first axis by default.
* Returns a two-element array: [X, Y]
*
* Note: use toDomXCoord instead of toDomCoords(x, null) and use toDomYCoord
* instead of toDomCoords(null, y, axis).
*/
Dygraph.prototype.toDomCoords = function(x, y, axis) {
return [ this.toDomXCoord(x), this.toDomYCoord(y, axis) ];
};
/**
* Convert from data x coordinates to canvas/div X coordinate.
* If specified, do this conversion for the coordinate system of a particular
* axis.
* Returns a single value or null if x is null.
*/
Dygraph.prototype.toDomXCoord = function(x) {
if (x === null) {
return null;
}
var area = this.plotter_.area;
var xRange = this.xAxisRange();
return area.x + (x - xRange[0]) / (xRange[1] - xRange[0]) * area.w;
};
/**
* Convert from data x coordinates to canvas/div Y coordinate and optional
* axis. Uses the first axis by default.
*
* returns a single value or null if y is null.
*/
Dygraph.prototype.toDomYCoord = function(y, axis) {
var pct = this.toPercentYCoord(y, axis);
if (pct === null) {
return null;
}
var area = this.plotter_.area;
return area.y + pct * area.h;
};
/**
* Convert from canvas/div coords to data coordinates.
* If specified, do this conversion for the coordinate system of a particular
* axis. Uses the first axis by default.
* Returns a two-element array: [X, Y].
*
* Note: use toDataXCoord instead of toDataCoords(x, null) and use toDataYCoord
* instead of toDataCoords(null, y, axis).
*/
Dygraph.prototype.toDataCoords = function(x, y, axis) {
return [ this.toDataXCoord(x), this.toDataYCoord(y, axis) ];
};
/**
* Convert from canvas/div x coordinate to data coordinate.
*
* If x is null, this returns null.
*/
Dygraph.prototype.toDataXCoord = function(x) {
if (x === null) {
return null;
}
var area = this.plotter_.area;
var xRange = this.xAxisRange();
return xRange[0] + (x - area.x) / area.w * (xRange[1] - xRange[0]);
};
/**
* Convert from canvas/div y coord to value.
*
* If y is null, this returns null.
* if axis is null, this uses the first axis.
*/
Dygraph.prototype.toDataYCoord = function(y, axis) {
if (y === null) {
return null;
}
var area = this.plotter_.area;
var yRange = this.yAxisRange(axis);
if (typeof(axis) == "undefined") axis = 0;
if (!this.axes_[axis].logscale) {
return yRange[0] + (area.y + area.h - y) / area.h * (yRange[1] - yRange[0]);
} else {
// Computing the inverse of toDomCoord.
var pct = (y - area.y) / area.h;
// Computing the inverse of toPercentYCoord. The function was arrived at with
// the following steps:
//
// Original calcuation:
// pct = (logr1 - Dygraph.log10(y)) / (logr1 - Dygraph.log10(yRange[0]));
//
// Move denominator to both sides:
// pct * (logr1 - Dygraph.log10(yRange[0])) = logr1 - Dygraph.log10(y);
//
// subtract logr1, and take the negative value.
// logr1 - (pct * (logr1 - Dygraph.log10(yRange[0]))) = Dygraph.log10(y);
//
// Swap both sides of the equation, and we can compute the log of the
// return value. Which means we just need to use that as the exponent in
// e^exponent.
// Dygraph.log10(y) = logr1 - (pct * (logr1 - Dygraph.log10(yRange[0])));
var logr1 = Dygraph.log10(yRange[1]);
var exponent = logr1 - (pct * (logr1 - Dygraph.log10(yRange[0])));
var value = Math.pow(Dygraph.LOG_SCALE, exponent);
return value;
}
};
/**
* Converts a y for an axis to a percentage from the top to the
* bottom of the drawing area.
*
* If the coordinate represents a value visible on the canvas, then
* the value will be between 0 and 1, where 0 is the top of the canvas.
* However, this method will return values outside the range, as
* values can fall outside the canvas.
*
* If y is null, this returns null.
* if axis is null, this uses the first axis.
*
* @param { Number } y The data y-coordinate.
* @param { Number } [axis] The axis number on which the data coordinate lives.
* @return { Number } A fraction in [0, 1] where 0 = the top edge.
*/
Dygraph.prototype.toPercentYCoord = function(y, axis) {
if (y === null) {
return null;
}
if (typeof(axis) == "undefined") axis = 0;
var yRange = this.yAxisRange(axis);
var pct;
var logscale = this.attributes_.getForAxis("logscale", axis);
if (!logscale) {
// yRange[1] - y is unit distance from the bottom.
// yRange[1] - yRange[0] is the scale of the range.
// (yRange[1] - y) / (yRange[1] - yRange[0]) is the % from the bottom.
pct = (yRange[1] - y) / (yRange[1] - yRange[0]);
} else {
var logr1 = Dygraph.log10(yRange[1]);
pct = (logr1 - Dygraph.log10(y)) / (logr1 - Dygraph.log10(yRange[0]));
}
return pct;
};
/**
* Converts an x value to a percentage from the left to the right of
* the drawing area.
*
* If the coordinate represents a value visible on the canvas, then
* the value will be between 0 and 1, where 0 is the left of the canvas.
* However, this method will return values outside the range, as
* values can fall outside the canvas.
*
* If x is null, this returns null.
* @param { Number } x The data x-coordinate.
* @return { Number } A fraction in [0, 1] where 0 = the left edge.
*/
Dygraph.prototype.toPercentXCoord = function(x) {
if (x === null) {
return null;
}
var xRange = this.xAxisRange();
return (x - xRange[0]) / (xRange[1] - xRange[0]);
};
/**
* Returns the number of columns (including the independent variable).
* @return { Integer } The number of columns.
*/
Dygraph.prototype.numColumns = function() {
return this.rawData_[0] ? this.rawData_[0].length : this.attr_("labels").length;
};
/**
* Returns the number of rows (excluding any header/label row).
* @return { Integer } The number of rows, less any header.
*/
Dygraph.prototype.numRows = function() {
return this.rawData_.length;
};
/**
* Returns the full range of the x-axis, as determined by the most extreme
* values in the data set. Not affected by zooming, visibility, etc.
* TODO(danvk): merge w/ xAxisExtremes
* @return { Array<Number> } A [low, high] pair
* @private
*/
Dygraph.prototype.fullXRange_ = function() {
if (this.numRows() > 0) {
return [this.rawData_[0][0], this.rawData_[this.numRows() - 1][0]];
} else {
return [0, 1];
}
};
/**
* Returns the value in the given row and column. If the row and column exceed
* the bounds on the data, returns null. Also returns null if the value is
* missing.
* @param { Number} row The row number of the data (0-based). Row 0 is the
* first row of data, not a header row.
* @param { Number} col The column number of the data (0-based)
* @return { Number } The value in the specified cell or null if the row/col
* were out of range.
*/
Dygraph.prototype.getValue = function(row, col) {
if (row < 0 || row > this.rawData_.length) return null;
if (col < 0 || col > this.rawData_[row].length) return null;
return this.rawData_[row][col];
};
/**
* Generates interface elements for the Dygraph: a containing div, a div to
* display the current point, and a textbox to adjust the rolling average
* period. Also creates the Renderer/Layout elements.
* @private
*/
Dygraph.prototype.createInterface_ = function() {
// Create the all-enclosing graph div
var enclosing = this.maindiv_;
this.graphDiv = document.createElement("div");
this.graphDiv.style.width = this.width_ + "px";
this.graphDiv.style.height = this.height_ + "px";
enclosing.appendChild(this.graphDiv);
// Create the canvas for interactive parts of the chart.
this.canvas_ = Dygraph.createCanvas();
this.canvas_.style.position = "absolute";
this.canvas_.width = this.width_;
this.canvas_.height = this.height_;
this.canvas_.style.width = this.width_ + "px"; // for IE
this.canvas_.style.height = this.height_ + "px"; // for IE
this.canvas_ctx_ = Dygraph.getContext(this.canvas_);
// ... and for static parts of the chart.
this.hidden_ = this.createPlotKitCanvas_(this.canvas_);
this.hidden_ctx_ = Dygraph.getContext(this.hidden_);
// The interactive parts of the graph are drawn on top of the chart.
this.graphDiv.appendChild(this.hidden_);
this.graphDiv.appendChild(this.canvas_);
this.mouseEventElement_ = this.createMouseEventElement_();
// Create the grapher
this.layout_ = new DygraphLayout(this);
var dygraph = this;
this.mouseMoveHandler = function(e) {
dygraph.mouseMove_(e);
};
this.addEvent(this.mouseEventElement_, 'mousemove', this.mouseMoveHandler);
this.mouseOutHandler = function(e) {
dygraph.mouseOut_(e);
};
this.addEvent(this.mouseEventElement_, 'mouseout', this.mouseOutHandler);
if (!this.resizeHandler_) {
this.resizeHandler_ = function(e) {
dygraph.resize();
};
// Update when the window is resized.
// TODO(danvk): drop frames depending on complexity of the chart.
this.addEvent(window, 'resize', this.resizeHandler_);
}
};
/**
* Detach DOM elements in the dygraph and null out all data references.
* Calling this when you're done with a dygraph can dramatically reduce memory
* usage. See, e.g., the tests/perf.html example.
*/
Dygraph.prototype.destroy = function() {
var removeRecursive = function(node) {
while (node.hasChildNodes()) {
removeRecursive(node.firstChild);
node.removeChild(node.firstChild);
}
};
for (var idx = 0; idx < this.registeredEvents_.length; idx++) {
var reg = this.registeredEvents_[idx];
Dygraph.removeEvent(reg.elem, reg.type, reg.fn);
}
this.registeredEvents_ = [];
// remove mouse event handlers (This may not be necessary anymore)
Dygraph.removeEvent(this.mouseEventElement_, 'mouseout', this.mouseOutHandler);
Dygraph.removeEvent(this.mouseEventElement_, 'mousemove', this.mouseMoveHandler);
Dygraph.removeEvent(this.mouseEventElement_, 'mousemove', this.mouseUpHandler_);
removeRecursive(this.maindiv_);
var nullOut = function(obj) {
for (var n in obj) {
if (typeof(obj[n]) === 'object') {
obj[n] = null;
}
}
};
// remove event handlers
Dygraph.removeEvent(window,'resize',this.resizeHandler_);
this.resizeHandler_ = null;
// These may not all be necessary, but it can't hurt...
nullOut(this.layout_);
nullOut(this.plotter_);
nullOut(this);
};
/**
* Creates the canvas on which the chart will be drawn. Only the Renderer ever
* draws on this particular canvas. All Dygraph work (i.e. drawing hover dots
* or the zoom rectangles) is done on this.canvas_.
* @param {Object} canvas The Dygraph canvas over which to overlay the plot
* @return {Object} The newly-created canvas
* @private
*/
Dygraph.prototype.createPlotKitCanvas_ = function(canvas) {
var h = Dygraph.createCanvas();
h.style.position = "absolute";
// TODO(danvk): h should be offset from canvas. canvas needs to include
// some extra area to make it easier to zoom in on the far left and far
// right. h needs to be precisely the plot area, so that clipping occurs.
h.style.top = canvas.style.top;
h.style.left = canvas.style.left;
h.width = this.width_;
h.height = this.height_;
h.style.width = this.width_ + "px"; // for IE
h.style.height = this.height_ + "px"; // for IE
return h;
};
/**
* Creates an overlay element used to handle mouse events.
* @return {Object} The mouse event element.
* @private
*/
Dygraph.prototype.createMouseEventElement_ = function() {
if (this.isUsingExcanvas_) {
var elem = document.createElement("div");
elem.style.position = 'absolute';
elem.style.backgroundColor = 'white';
elem.style.filter = 'alpha(opacity=0)';
elem.style.width = this.width_ + "px";
elem.style.height = this.height_ + "px";
this.graphDiv.appendChild(elem);
return elem;
} else {
return this.canvas_;
}
};
/**
* Generate a set of distinct colors for the data series. This is done with a
* color wheel. Saturation/Value are customizable, and the hue is
* equally-spaced around the color wheel. If a custom set of colors is
* specified, that is used instead.
* @private
*/
Dygraph.prototype.setColors_ = function() {
var labels = this.getLabels();
var num = labels.length - 1;
this.colors_ = [];
this.colorsMap_ = {};
var colors = this.attr_('colors');
var i;
if (!colors) {
var sat = this.attr_('colorSaturation') || 1.0;
var val = this.attr_('colorValue') || 0.5;
var half = Math.ceil(num / 2);
for (i = 1; i <= num; i++) {
if (!this.visibility()[i-1]) continue;
// alternate colors for high contrast.
var idx = i % 2 ? Math.ceil(i / 2) : (half + i / 2);
var hue = (1.0 * idx/ (1 + num));
var colorStr = Dygraph.hsvToRGB(hue, sat, val);
this.colors_.push(colorStr);
this.colorsMap_[labels[i]] = colorStr;
}
} else {
for (i = 0; i < num; i++) {
if (!this.visibility()[i]) continue;
var colorStr = colors[i % colors.length];
this.colors_.push(colorStr);
this.colorsMap_[labels[1 + i]] = colorStr;
}
}
};
/**
* Return the list of colors. This is either the list of colors passed in the
* attributes or the autogenerated list of rgb(r,g,b) strings.
* This does not return colors for invisible series.
* @return {Array<string>} The list of colors.
*/
Dygraph.prototype.getColors = function() {
return this.colors_;
};
/**
* Returns a few attributes of a series, i.e. its color, its visibility, which
* axis it's assigned to, and its column in the original data.
* Returns null if the series does not exist.
* Otherwise, returns an object with column, visibility, color and axis properties.
* The "axis" property will be set to 1 for y1 and 2 for y2.
* The "column" property can be fed back into getValue(row, column) to get
* values for this series.
*/
Dygraph.prototype.getPropertiesForSeries = function(series_name) {
var idx = -1;
var labels = this.getLabels();
for (var i = 1; i < labels.length; i++) {
if (labels[i] == series_name) {
idx = i;
break;
}
}
if (idx == -1) return null;
return {
name: series_name,
column: idx,
visible: this.visibility()[idx - 1],
color: this.colorsMap_[series_name],
axis: 1 + this.attributes_.axisForSeries(series_name)
};
};
/**
* Create the text box to adjust the averaging period
* @private
*/
Dygraph.prototype.createRollInterface_ = function() {
// Create a roller if one doesn't exist already.
if (!this.roller_) {
this.roller_ = document.createElement("input");
this.roller_.type = "text";
this.roller_.style.display = "none";
this.graphDiv.appendChild(this.roller_);
}
var display = this.attr_('showRoller') ? 'block' : 'none';
var area = this.plotter_.area;
var textAttr = { "position": "absolute",
"zIndex": 10,
"top": (area.y + area.h - 25) + "px",
"left": (area.x + 1) + "px",
"display": display
};
this.roller_.size = "2";
this.roller_.value = this.rollPeriod_;
for (var name in textAttr) {
if (textAttr.hasOwnProperty(name)) {
this.roller_.style[name] = textAttr[name];
}
}
var dygraph = this;
this.roller_.onchange = function() { dygraph.adjustRoll(dygraph.roller_.value); };
};
/**
* @private
* Converts page the x-coordinate of the event to pixel x-coordinates on the
* canvas (i.e. DOM Coords).
*/
Dygraph.prototype.dragGetX_ = function(e, context) {
return Dygraph.pageX(e) - context.px;
};
/**
* @private
* Converts page the y-coordinate of the event to pixel y-coordinates on the
* canvas (i.e. DOM Coords).
*/
Dygraph.prototype.dragGetY_ = function(e, context) {
return Dygraph.pageY(e) - context.py;
};
/**
* Set up all the mouse handlers needed to capture dragging behavior for zoom
* events.
* @private
*/
Dygraph.prototype.createDragInterface_ = function() {
var context = {
// Tracks whether the mouse is down right now
isZooming: false,
isPanning: false, // is this drag part of a pan?
is2DPan: false, // if so, is that pan 1- or 2-dimensional?
dragStartX: null, // pixel coordinates
dragStartY: null, // pixel coordinates
dragEndX: null, // pixel coordinates
dragEndY: null, // pixel coordinates
dragDirection: null,
prevEndX: null, // pixel coordinates
prevEndY: null, // pixel coordinates
prevDragDirection: null,
cancelNextDblclick: false, // see comment in dygraph-interaction-model.js
// The value on the left side of the graph when a pan operation starts.
initialLeftmostDate: null,
// The number of units each pixel spans. (This won't be valid for log
// scales)
xUnitsPerPixel: null,
// TODO(danvk): update this comment
// The range in second/value units that the viewport encompasses during a
// panning operation.
dateRange: null,
// Top-left corner of the canvas, in DOM coords
// TODO(konigsberg): Rename topLeftCanvasX, topLeftCanvasY.
px: 0,
py: 0,
// Values for use with panEdgeFraction, which limit how far outside the
// graph's data boundaries it can be panned.
boundedDates: null, // [minDate, maxDate]
boundedValues: null, // [[minValue, maxValue] ...]
// We cover iframes during mouse interactions. See comments in
// dygraph-utils.js for more info on why this is a good idea.
tarp: new Dygraph.IFrameTarp(),
// contextB is the same thing as this context object but renamed.
initializeMouseDown: function(event, g, contextB) {
// prevents mouse drags from selecting page text.
if (event.preventDefault) {
event.preventDefault(); // Firefox, Chrome, etc.
} else {
event.returnValue = false; // IE
event.cancelBubble = true;
}
contextB.px = Dygraph.findPosX(g.canvas_);
contextB.py = Dygraph.findPosY(g.canvas_);
contextB.dragStartX = g.dragGetX_(event, contextB);
contextB.dragStartY = g.dragGetY_(event, contextB);
contextB.cancelNextDblclick = false;
contextB.tarp.cover();
}
};
var interactionModel = this.attr_("interactionModel");
// Self is the graph.
var self = this;
// Function that binds the graph and context to the handler.
var bindHandler = function(handler) {
return function(event) {
handler(event, self, context);
};
};
for (var eventName in interactionModel) {
if (!interactionModel.hasOwnProperty(eventName)) continue;
this.addEvent(this.mouseEventElement_, eventName,
bindHandler(interactionModel[eventName]));
}
// If the user releases the mouse button during a drag, but not over the
// canvas, then it doesn't count as a zooming action.
this.mouseUpHandler_ = function(event) {
if (context.isZooming || context.isPanning) {
context.isZooming = false;
context.dragStartX = null;
context.dragStartY = null;
}
if (context.isPanning) {
context.isPanning = false;
context.draggingDate = null;
context.dateRange = null;
for (var i = 0; i < self.axes_.length; i++) {
delete self.axes_[i].draggingValue;
delete self.axes_[i].dragValueRange;
}
}
context.tarp.uncover();
};
this.addEvent(document, 'mouseup', this.mouseUpHandler_);
};
/**
* Draw a gray zoom rectangle over the desired area of the canvas. Also clears
* up any previous zoom rectangles that were drawn. This could be optimized to
* avoid extra redrawing, but it's tricky to avoid interactions with the status
* dots.
*
* @param {Number} direction the direction of the zoom rectangle. Acceptable
* values are Dygraph.HORIZONTAL and Dygraph.VERTICAL.
* @param {Number} startX The X position where the drag started, in canvas
* coordinates.
* @param {Number} endX The current X position of the drag, in canvas coords.
* @param {Number} startY The Y position where the drag started, in canvas
* coordinates.
* @param {Number} endY The current Y position of the drag, in canvas coords.
* @param {Number} prevDirection the value of direction on the previous call to
* this function. Used to avoid excess redrawing
* @param {Number} prevEndX The value of endX on the previous call to this
* function. Used to avoid excess redrawing
* @param {Number} prevEndY The value of endY on the previous call to this
* function. Used to avoid excess redrawing
* @private
*/
Dygraph.prototype.drawZoomRect_ = function(direction, startX, endX, startY,
endY, prevDirection, prevEndX,
prevEndY) {
var ctx = this.canvas_ctx_;
// Clean up from the previous rect if necessary
if (prevDirection == Dygraph.HORIZONTAL) {
ctx.clearRect(Math.min(startX, prevEndX), this.layout_.getPlotArea().y,
Math.abs(startX - prevEndX), this.layout_.getPlotArea().h);
} else if (prevDirection == Dygraph.VERTICAL){
ctx.clearRect(this.layout_.getPlotArea().x, Math.min(startY, prevEndY),
this.layout_.getPlotArea().w, Math.abs(startY - prevEndY));
}
// Draw a light-grey rectangle to show the new viewing area
if (direction == Dygraph.HORIZONTAL) {
if (endX && startX) {
ctx.fillStyle = "rgba(128,128,128,0.33)";
ctx.fillRect(Math.min(startX, endX), this.layout_.getPlotArea().y,
Math.abs(endX - startX), this.layout_.getPlotArea().h);
}
} else if (direction == Dygraph.VERTICAL) {
if (endY && startY) {
ctx.fillStyle = "rgba(128,128,128,0.33)";
ctx.fillRect(this.layout_.getPlotArea().x, Math.min(startY, endY),
this.layout_.getPlotArea().w, Math.abs(endY - startY));
}
}
if (this.isUsingExcanvas_) {
this.currentZoomRectArgs_ = [direction, startX, endX, startY, endY, 0, 0, 0];
}
};
/**
* Clear the zoom rectangle (and perform no zoom).
* @private
*/
Dygraph.prototype.clearZoomRect_ = function() {
this.currentZoomRectArgs_ = null;
this.canvas_ctx_.clearRect(0, 0, this.canvas_.width, this.canvas_.height);
};
/**
* Zoom to something containing [lowX, highX]. These are pixel coordinates in
* the canvas. The exact zoom window may be slightly larger if there are no data
* points near lowX or highX. Don't confuse this function with doZoomXDates,
* which accepts dates that match the raw data. This function redraws the graph.
*
* @param {Number} lowX The leftmost pixel value that should be visible.
* @param {Number} highX The rightmost pixel value that should be visible.
* @private
*/
Dygraph.prototype.doZoomX_ = function(lowX, highX) {
this.currentZoomRectArgs_ = null;
// Find the earliest and latest dates contained in this canvasx range.
// Convert the call to date ranges of the raw data.
var minDate = this.toDataXCoord(lowX);
var maxDate = this.toDataXCoord(highX);
this.doZoomXDates_(minDate, maxDate);
};
/**
* Transition function to use in animations. Returns values between 0.0
* (totally old values) and 1.0 (totally new values) for each frame.
* @private
*/
Dygraph.zoomAnimationFunction = function(frame, numFrames) {
var k = 1.5;
return (1.0 - Math.pow(k, -frame)) / (1.0 - Math.pow(k, -numFrames));
};
/**
* Zoom to something containing [minDate, maxDate] values. Don't confuse this
* method with doZoomX which accepts pixel coordinates. This function redraws
* the graph.
*
* @param {Number} minDate The minimum date that should be visible.
* @param {Number} maxDate The maximum date that should be visible.
* @private
*/
Dygraph.prototype.doZoomXDates_ = function(minDate, maxDate) {
// TODO(danvk): when yAxisRange is null (i.e. "fit to data", the animation
// can produce strange effects. Rather than the y-axis transitioning slowly
// between values, it can jerk around.)
var old_window = this.xAxisRange();
var new_window = [minDate, maxDate];
this.zoomed_x_ = true;
var that = this;
this.doAnimatedZoom(old_window, new_window, null, null, function() {
if (that.attr_("zoomCallback")) {
that.attr_("zoomCallback")(minDate, maxDate, that.yAxisRanges());
}
});
};
/**
* Zoom to something containing [lowY, highY]. These are pixel coordinates in
* the canvas. This function redraws the graph.
*
* @param {Number} lowY The topmost pixel value that should be visible.
* @param {Number} highY The lowest pixel value that should be visible.
* @private
*/
Dygraph.prototype.doZoomY_ = function(lowY, highY) {
this.currentZoomRectArgs_ = null;
// Find the highest and lowest values in pixel range for each axis.
// Note that lowY (in pixels) corresponds to the max Value (in data coords).
// This is because pixels increase as you go down on the screen, whereas data
// coordinates increase as you go up the screen.
var oldValueRanges = this.yAxisRanges();
var newValueRanges = [];
for (var i = 0; i < this.axes_.length; i++) {
var hi = this.toDataYCoord(lowY, i);
var low = this.toDataYCoord(highY, i);
newValueRanges.push([low, hi]);
}
this.zoomed_y_ = true;
var that = this;
this.doAnimatedZoom(null, null, oldValueRanges, newValueRanges, function() {
if (that.attr_("zoomCallback")) {
var xRange = that.xAxisRange();
that.attr_("zoomCallback")(xRange[0], xRange[1], that.yAxisRanges());
}
});
};
/**
* Reset the zoom to the original view coordinates. This is the same as
* double-clicking on the graph.
*/
Dygraph.prototype.resetZoom = function() {
var dirty = false, dirtyX = false, dirtyY = false;
if (this.dateWindow_ !== null) {
dirty = true;
dirtyX = true;
}
for (var i = 0; i < this.axes_.length; i++) {
if (typeof(this.axes_[i].valueWindow) !== 'undefined' && this.axes_[i].valueWindow !== null) {
dirty = true;
dirtyY = true;
}
}
// Clear any selection, since it's likely to be drawn in the wrong place.
this.clearSelection();
if (dirty) {
this.zoomed_x_ = false;
this.zoomed_y_ = false;
var minDate = this.rawData_[0][0];
var maxDate = this.rawData_[this.rawData_.length - 1][0];
// With only one frame, don't bother calculating extreme ranges.
// TODO(danvk): merge this block w/ the code below.
if (!this.attr_("animatedZooms")) {
this.dateWindow_ = null;
for (i = 0; i < this.axes_.length; i++) {
if (this.axes_[i].valueWindow !== null) {
delete this.axes_[i].valueWindow;
}
}
this.drawGraph_();
if (this.attr_("zoomCallback")) {
this.attr_("zoomCallback")(minDate, maxDate, this.yAxisRanges());
}
return;
}
var oldWindow=null, newWindow=null, oldValueRanges=null, newValueRanges=null;
if (dirtyX) {
oldWindow = this.xAxisRange();
newWindow = [minDate, maxDate];
}
if (dirtyY) {
oldValueRanges = this.yAxisRanges();
// TODO(danvk): this is pretty inefficient
var packed = this.gatherDatasets_(this.rolledSeries_, null);
var extremes = packed[1];
// this has the side-effect of modifying this.axes_.
// this doesn't make much sense in this context, but it's convenient (we
// need this.axes_[*].extremeValues) and not harmful since we'll be
// calling drawGraph_ shortly, which clobbers these values.
this.computeYAxisRanges_(extremes);
newValueRanges = [];
for (i = 0; i < this.axes_.length; i++) {
var axis = this.axes_[i];
newValueRanges.push((axis.valueRange !== null &&
axis.valueRange !== undefined) ?
axis.valueRange : axis.extremeRange);
}
}
var that = this;
this.doAnimatedZoom(oldWindow, newWindow, oldValueRanges, newValueRanges,
function() {
that.dateWindow_ = null;
for (var i = 0; i < that.axes_.length; i++) {
if (that.axes_[i].valueWindow !== null) {
delete that.axes_[i].valueWindow;
}
}
if (that.attr_("zoomCallback")) {
that.attr_("zoomCallback")(minDate, maxDate, that.yAxisRanges());
}
});
}
};
/**
* Combined animation logic for all zoom functions.
* either the x parameters or y parameters may be null.
* @private
*/
Dygraph.prototype.doAnimatedZoom = function(oldXRange, newXRange, oldYRanges, newYRanges, callback) {
var steps = this.attr_("animatedZooms") ? Dygraph.ANIMATION_STEPS : 1;
var windows = [];
var valueRanges = [];
var step, frac;
if (oldXRange !== null && newXRange !== null) {
for (step = 1; step <= steps; step++) {
frac = Dygraph.zoomAnimationFunction(step, steps);
windows[step-1] = [oldXRange[0]*(1-frac) + frac*newXRange[0],
oldXRange[1]*(1-frac) + frac*newXRange[1]];
}
}
if (oldYRanges !== null && newYRanges !== null) {
for (step = 1; step <= steps; step++) {
frac = Dygraph.zoomAnimationFunction(step, steps);
var thisRange = [];
for (var j = 0; j < this.axes_.length; j++) {
thisRange.push([oldYRanges[j][0]*(1-frac) + frac*newYRanges[j][0],
oldYRanges[j][1]*(1-frac) + frac*newYRanges[j][1]]);
}
valueRanges[step-1] = thisRange;
}
}
var that = this;
Dygraph.repeatAndCleanup(function(step) {
if (valueRanges.length) {
for (var i = 0; i < that.axes_.length; i++) {
var w = valueRanges[step][i];
that.axes_[i].valueWindow = [w[0], w[1]];
}
}
if (windows.length) {
that.dateWindow_ = windows[step];
}
that.drawGraph_();
}, steps, Dygraph.ANIMATION_DURATION / steps, callback);
};
/**
* Get the current graph's area object.
*
* Returns: {x, y, w, h}
*/
Dygraph.prototype.getArea = function() {
return this.plotter_.area;
};
/**
* Convert a mouse event to DOM coordinates relative to the graph origin.
*
* Returns a two-element array: [X, Y].
*/
Dygraph.prototype.eventToDomCoords = function(event) {
var canvasx = Dygraph.pageX(event) - Dygraph.findPosX(this.mouseEventElement_);
var canvasy = Dygraph.pageY(event) - Dygraph.findPosY(this.mouseEventElement_);
return [canvasx, canvasy];
};
/**
* Given a canvas X coordinate, find the closest row.
* @param {Number} domX graph-relative DOM X coordinate
* Returns: row number, integer
* @private
*/
Dygraph.prototype.findClosestRow = function(domX) {
var minDistX = Infinity;
var pointIdx = -1, setIdx = -1;
var sets = this.layout_.points;
for (var i = 0; i < sets.length; i++) {
var points = sets[i];
var len = points.length;
for (var j = 0; j < len; j++) {
var point = points[j];
if (!Dygraph.isValidPoint(point, true)) continue;
var dist = Math.abs(point.canvasx - domX);
if (dist < minDistX) {
minDistX = dist;
setIdx = i;
pointIdx = j;
}
}
}
// TODO(danvk): remove this function; it's trivial and has only one use.
return this.idxToRow_(setIdx, pointIdx);
};
/**
* Given canvas X,Y coordinates, find the closest point.
*
* This finds the individual data point across all visible series
* that's closest to the supplied DOM coordinates using the standard
* Euclidean X,Y distance.
*
* @param {Number} domX graph-relative DOM X coordinate
* @param {Number} domY graph-relative DOM Y coordinate
* Returns: {row, seriesName, point}
* @private
*/
Dygraph.prototype.findClosestPoint = function(domX, domY) {
var minDist = Infinity;
var idx = -1;
var dist, dx, dy, point, closestPoint, closestSeries;
for ( var setIdx = this.layout_.datasets.length - 1 ; setIdx >= 0 ; --setIdx ) {
var points = this.layout_.points[setIdx];
for (var i = 0; i < points.length; ++i) {
var point = points[i];
if (!Dygraph.isValidPoint(point)) continue;
dx = point.canvasx - domX;
dy = point.canvasy - domY;
dist = dx * dx + dy * dy;
if (dist < minDist) {
minDist = dist;
closestPoint = point;
closestSeries = setIdx;
idx = i;
}
}
}
var name = this.layout_.setNames[closestSeries];
return {
row: idx + this.getLeftBoundary_(),
seriesName: name,
point: closestPoint
};
};
/**
* Given canvas X,Y coordinates, find the touched area in a stacked graph.
*
* This first finds the X data point closest to the supplied DOM X coordinate,
* then finds the series which puts the Y coordinate on top of its filled area,
* using linear interpolation between adjacent point pairs.
*
* @param {Number} domX graph-relative DOM X coordinate
* @param {Number} domY graph-relative DOM Y coordinate
* Returns: {row, seriesName, point}
* @private
*/
Dygraph.prototype.findStackedPoint = function(domX, domY) {
var row = this.findClosestRow(domX);
var boundary = this.getLeftBoundary_();
var rowIdx = row - boundary;
var closestPoint, closestSeries;
for (var setIdx = 0; setIdx < this.layout_.datasets.length; ++setIdx) {
var points = this.layout_.points[setIdx];
if (rowIdx >= points.length) continue;
var p1 = points[rowIdx];
if (!Dygraph.isValidPoint(p1)) continue;
var py = p1.canvasy;
if (domX > p1.canvasx && rowIdx + 1 < points.length) {
// interpolate series Y value using next point
var p2 = points[rowIdx + 1];
if (Dygraph.isValidPoint(p2)) {
var dx = p2.canvasx - p1.canvasx;
if (dx > 0) {
var r = (domX - p1.canvasx) / dx;
py += r * (p2.canvasy - p1.canvasy);
}
}
} else if (domX < p1.canvasx && rowIdx > 0) {
// interpolate series Y value using previous point
var p0 = points[rowIdx - 1];
if (Dygraph.isValidPoint(p0)) {
var dx = p1.canvasx - p0.canvasx;
if (dx > 0) {
var r = (p1.canvasx - domX) / dx;
py += r * (p0.canvasy - p1.canvasy);
}
}
}
// Stop if the point (domX, py) is above this series' upper edge
if (setIdx === 0 || py < domY) {
closestPoint = p1;
closestSeries = setIdx;
}
}
var name = this.layout_.setNames[closestSeries];
return {
row: row,
seriesName: name,
point: closestPoint
};
};
/**
* When the mouse moves in the canvas, display information about a nearby data
* point and draw dots over those points in the data series. This function
* takes care of cleanup of previously-drawn dots.
* @param {Object} event The mousemove event from the browser.
* @private
*/
Dygraph.prototype.mouseMove_ = function(event) {
// This prevents JS errors when mousing over the canvas before data loads.
var points = this.layout_.points;
if (points === undefined || points === null) return;
var canvasCoords = this.eventToDomCoords(event);
var canvasx = canvasCoords[0];
var canvasy = canvasCoords[1];
var highlightSeriesOpts = this.attr_("highlightSeriesOpts");
var selectionChanged = false;
if (highlightSeriesOpts && !this.isSeriesLocked()) {
var closest;
if (this.attr_("stackedGraph")) {
closest = this.findStackedPoint(canvasx, canvasy);
} else {
closest = this.findClosestPoint(canvasx, canvasy);
}
selectionChanged = this.setSelection(closest.row, closest.seriesName);
} else {
var idx = this.findClosestRow(canvasx);
selectionChanged = this.setSelection(idx);
}
var callback = this.attr_("highlightCallback");
if (callback && selectionChanged) {
callback(event, this.lastx_, this.selPoints_, this.lastRow_, this.highlightSet_);
}
};
/**
* Fetch left offset from first defined boundaryIds record (see bug #236).
* @private
*/
Dygraph.prototype.getLeftBoundary_ = function() {
for (var i = 0; i < this.boundaryIds_.length; i++) {
if (this.boundaryIds_[i] !== undefined) {
return this.boundaryIds_[i][0];
}
}
return 0;
};
/**
* Transforms layout_.points index into data row number.
* @param int layout_.points index
* @return int row number, or -1 if none could be found.
* @private
*/
Dygraph.prototype.idxToRow_ = function(setIdx, rowIdx) {
if (rowIdx < 0) return -1;
var boundary = this.getLeftBoundary_();
return boundary + rowIdx;
// for (var setIdx = 0; setIdx < this.layout_.datasets.length; ++setIdx) {
// var set = this.layout_.datasets[setIdx];
// if (idx < set.length) {
// return boundary + idx;
// }
// idx -= set.length;
// }
// return -1;
};
Dygraph.prototype.animateSelection_ = function(direction) {
var totalSteps = 10;
var millis = 30;
if (this.fadeLevel === undefined) this.fadeLevel = 0;
if (this.animateId === undefined) this.animateId = 0;
var start = this.fadeLevel;
var steps = direction < 0 ? start : totalSteps - start;
if (steps <= 0) {
if (this.fadeLevel) {
this.updateSelection_(1.0);
}
return;
}
var thisId = ++this.animateId;
var that = this;
Dygraph.repeatAndCleanup(
function(n) {
// ignore simultaneous animations
if (that.animateId != thisId) return;
that.fadeLevel += direction;
if (that.fadeLevel === 0) {
that.clearSelection();
} else {
that.updateSelection_(that.fadeLevel / totalSteps);
}
},
steps, millis, function() {});
};
/**
* Draw dots over the selectied points in the data series. This function
* takes care of cleanup of previously-drawn dots.
* @private
*/
Dygraph.prototype.updateSelection_ = function(opt_animFraction) {
/*var defaultPrevented = */
this.cascadeEvents_('select', {
selectedX: this.lastx_,
selectedPoints: this.selPoints_
});
// TODO(danvk): use defaultPrevented here?
// Clear the previously drawn vertical, if there is one
var i;
var ctx = this.canvas_ctx_;
if (this.attr_('highlightSeriesOpts')) {
ctx.clearRect(0, 0, this.width_, this.height_);
var alpha = 1.0 - this.attr_('highlightSeriesBackgroundAlpha');
if (alpha) {
// Activating background fade includes an animation effect for a gradual
// fade. TODO(klausw): make this independently configurable if it causes
// issues? Use a shared preference to control animations?
var animateBackgroundFade = true;
if (animateBackgroundFade) {
if (opt_animFraction === undefined) {
// start a new animation
this.animateSelection_(1);
return;
}
alpha *= opt_animFraction;
}
ctx.fillStyle = 'rgba(255,255,255,' + alpha + ')';
ctx.fillRect(0, 0, this.width_, this.height_);
}
// Redraw only the highlighted series in the interactive canvas (not the
// static plot canvas, which is where series are usually drawn).
this.plotter_._renderLineChart(this.highlightSet_, ctx);
} else if (this.previousVerticalX_ >= 0) {
// Determine the maximum highlight circle size.
var maxCircleSize = 0;
var labels = this.attr_('labels');
for (i = 1; i < labels.length; i++) {
var r = this.attr_('highlightCircleSize', labels[i]);
if (r > maxCircleSize) maxCircleSize = r;
}
var px = this.previousVerticalX_;
ctx.clearRect(px - maxCircleSize - 1, 0,
2 * maxCircleSize + 2, this.height_);
}
if (this.isUsingExcanvas_ && this.currentZoomRectArgs_) {
Dygraph.prototype.drawZoomRect_.apply(this, this.currentZoomRectArgs_);
}
if (this.selPoints_.length > 0) {
// Draw colored circles over the center of each selected point
var canvasx = this.selPoints_[0].canvasx;
ctx.save();
for (i = 0; i < this.selPoints_.length; i++) {
var pt = this.selPoints_[i];
if (!Dygraph.isOK(pt.canvasy)) continue;
var circleSize = this.attr_('highlightCircleSize', pt.name);
var callback = this.attr_("drawHighlightPointCallback", pt.name);
var color = this.plotter_.colors[pt.name];
if (!callback) {
callback = Dygraph.Circles.DEFAULT;
}
ctx.lineWidth = this.attr_('strokeWidth', pt.name);
ctx.strokeStyle = color;
ctx.fillStyle = color;
callback(this.g, pt.name, ctx, canvasx, pt.canvasy,
color, circleSize);
}
ctx.restore();
this.previousVerticalX_ = canvasx;
}
};
/**
* Manually set the selected points and display information about them in the
* legend. The selection can be cleared using clearSelection() and queried
* using getSelection().
* @param { Integer } row number that should be highlighted (i.e. appear with
* hover dots on the chart). Set to false to clear any selection.
* @param { seriesName } optional series name to highlight that series with the
* the highlightSeriesOpts setting.
* @param { locked } optional If true, keep seriesName selected when mousing
* over the graph, disabling closest-series highlighting. Call clearSelection()
* to unlock it.
*/
Dygraph.prototype.setSelection = function(row, opt_seriesName, opt_locked) {
// Extract the points we've selected
this.selPoints_ = [];
if (row !== false) {
row -= this.getLeftBoundary_();
}
var changed = false;
if (row !== false && row >= 0) {
if (row != this.lastRow_) changed = true;
this.lastRow_ = row;
for (var setIdx = 0; setIdx < this.layout_.datasets.length; ++setIdx) {
var set = this.layout_.datasets[setIdx];
if (row < set.length) {
var point = this.layout_.points[setIdx][row];
if (this.attr_("stackedGraph")) {
point = this.layout_.unstackPointAtIndex(setIdx, row);
}
if (point.yval !== null) this.selPoints_.push(point);
}
}
} else {
if (this.lastRow_ >= 0) changed = true;
this.lastRow_ = -1;
}
if (this.selPoints_.length) {
this.lastx_ = this.selPoints_[0].xval;
} else {
this.lastx_ = -1;
}
if (opt_seriesName !== undefined) {
if (this.highlightSet_ !== opt_seriesName) changed = true;
this.highlightSet_ = opt_seriesName;
}
if (opt_locked !== undefined) {
this.lockedSet_ = opt_locked;
}
if (changed) {
this.updateSelection_(undefined);
}
return changed;
};
/**
* The mouse has left the canvas. Clear out whatever artifacts remain
* @param {Object} event the mouseout event from the browser.
* @private
*/
Dygraph.prototype.mouseOut_ = function(event) {
if (this.attr_("unhighlightCallback")) {
this.attr_("unhighlightCallback")(event);
}
if (this.attr_("hideOverlayOnMouseOut") && !this.lockedSet_) {
this.clearSelection();
}
};
/**
* Clears the current selection (i.e. points that were highlighted by moving
* the mouse over the chart).
*/
Dygraph.prototype.clearSelection = function() {
this.cascadeEvents_('deselect', {});
this.lockedSet_ = false;
// Get rid of the overlay data
if (this.fadeLevel) {
this.animateSelection_(-1);
return;
}
this.canvas_ctx_.clearRect(0, 0, this.width_, this.height_);
this.fadeLevel = 0;
this.selPoints_ = [];
this.lastx_ = -1;
this.lastRow_ = -1;
this.highlightSet_ = null;
};
/**
* Returns the number of the currently selected row. To get data for this row,
* you can use the getValue method.
* @return { Integer } row number, or -1 if nothing is selected
*/
Dygraph.prototype.getSelection = function() {
if (!this.selPoints_ || this.selPoints_.length < 1) {
return -1;
}
for (var setIdx = 0; setIdx < this.layout_.points.length; setIdx++) {
var points = this.layout_.points[setIdx];
for (var row = 0; row < points.length; row++) {
if (points[row].x == this.selPoints_[0].x) {
return row + this.getLeftBoundary_();
}
}
}
return -1;
};
/**
* Returns the name of the currently-highlighted series.
* Only available when the highlightSeriesOpts option is in use.
*/
Dygraph.prototype.getHighlightSeries = function() {
return this.highlightSet_;
};
/**
* Returns true if the currently-highlighted series was locked
* via setSelection(..., seriesName, true).
*/
Dygraph.prototype.isSeriesLocked = function() {
return this.lockedSet_;
};
/**
* Fires when there's data available to be graphed.
* @param {String} data Raw CSV data to be plotted
* @private
*/
Dygraph.prototype.loadedEvent_ = function(data) {
this.rawData_ = this.parseCSV_(data);
this.predraw_();
};
/**
* Add ticks on the x-axis representing years, months, quarters, weeks, or days
* @private
*/
Dygraph.prototype.addXTicks_ = function() {
// Determine the correct ticks scale on the x-axis: quarterly, monthly, ...
var range;
if (this.dateWindow_) {
range = [this.dateWindow_[0], this.dateWindow_[1]];
} else {
range = this.fullXRange_();
}
var xAxisOptionsView = this.optionsViewForAxis_('x');
var xTicks = xAxisOptionsView('ticker')(
range[0],
range[1],
this.width_, // TODO(danvk): should be area.width
xAxisOptionsView,
this);
// var msg = 'ticker(' + range[0] + ', ' + range[1] + ', ' + this.width_ + ', ' + this.attr_('pixelsPerXLabel') + ') -> ' + JSON.stringify(xTicks);
// console.log(msg);
this.layout_.setXTicks(xTicks);
};
/**
* @private
* Computes the range of the data series (including confidence intervals).
* @param { [Array] } series either [ [x1, y1], [x2, y2], ... ] or
* [ [x1, [y1, dev_low, dev_high]], [x2, [y2, dev_low, dev_high]], ...
* @return [low, high]
*/
Dygraph.prototype.extremeValues_ = function(series) {
var minY = null, maxY = null, j, y;
var bars = this.attr_("errorBars") || this.attr_("customBars");
if (bars) {
// With custom bars, maxY is the max of the high values.
for (j = 0; j < series.length; j++) {
y = series[j][1][0];
if (y === null || isNaN(y)) continue;
var low = y - series[j][1][1];
var high = y + series[j][1][2];
if (low > y) low = y; // this can happen with custom bars,
if (high < y) high = y; // e.g. in tests/custom-bars.html
if (maxY === null || high > maxY) {
maxY = high;
}
if (minY === null || low < minY) {
minY = low;
}
}
} else {
for (j = 0; j < series.length; j++) {
y = series[j][1];
if (y === null || isNaN(y)) continue;
if (maxY === null || y > maxY) {
maxY = y;
}
if (minY === null || y < minY) {
minY = y;
}
}
}
return [minY, maxY];
};
/**
* @private
* This function is called once when the chart's data is changed or the options
* dictionary is updated. It is _not_ called when the user pans or zooms. The
* idea is that values derived from the chart's data can be computed here,
* rather than every time the chart is drawn. This includes things like the
* number of axes, rolling averages, etc.
*/
Dygraph.prototype.predraw_ = function() {
var start = new Date();
this.layout_.computePlotArea();
// TODO(danvk): move more computations out of drawGraph_ and into here.
this.computeYAxes_();
// Create a new plotter.
if (this.plotter_) {
this.cascadeEvents_('clearChart');
this.plotter_.clear();
}
this.plotter_ = new DygraphCanvasRenderer(this,
this.hidden_,
this.hidden_ctx_,
this.layout_);
// The roller sits in the bottom left corner of the chart. We don't know where
// this will be until the options are available, so it's positioned here.
this.createRollInterface_();
this.cascadeEvents_('predraw');
// Convert the raw data (a 2D array) into the internal format and compute
// rolling averages.
this.rolledSeries_ = [null]; // x-axis is the first series and it's special
for (var i = 1; i < this.numColumns(); i++) {
// var logScale = this.attr_('logscale', i); // TODO(klausw): this looks wrong // konigsberg thinks so too.
var logScale = this.attr_('logscale');
var series = this.extractSeries_(this.rawData_, i, logScale);
series = this.rollingAverage(series, this.rollPeriod_);
this.rolledSeries_.push(series);
}
// If the data or options have changed, then we'd better redraw.
this.drawGraph_();
// This is used to determine whether to do various animations.
var end = new Date();
this.drawingTimeMs_ = (end - start);
};
/**
* Loop over all fields and create datasets, calculating extreme y-values for
* each series and extreme x-indices as we go.
*
* dateWindow is passed in as an explicit parameter so that we can compute
* extreme values "speculatively", i.e. without actually setting state on the
* dygraph.
*
* TODO(danvk): make this more of a true function
* @return [ datasets, seriesExtremes, boundaryIds ]
* @private
*/
Dygraph.prototype.gatherDatasets_ = function(rolledSeries, dateWindow) {
var boundaryIds = [];
var cumulative_y = []; // For stacked series.
var datasets = [];
var extremes = {}; // series name -> [low, high]
var i, j, k;
// Loop over the fields (series). Go from the last to the first,
// because if they're stacked that's how we accumulate the values.
var num_series = rolledSeries.length - 1;
for (i = num_series; i >= 1; i--) {
if (!this.visibility()[i - 1]) continue;
// Note: this copy _is_ necessary at the moment.
// If you remove it, it breaks zooming with error bars on.
// TODO(danvk): investigate further & write a test for this.
var series = [];
for (j = 0; j < rolledSeries[i].length; j++) {
series.push(rolledSeries[i][j]);
}
// Prune down to the desired range, if necessary (for zooming)
// Because there can be lines going to points outside of the visible area,
// we actually prune to visible points, plus one on either side.
var bars = this.attr_("errorBars") || this.attr_("customBars");
if (dateWindow) {
var low = dateWindow[0];
var high = dateWindow[1];
var pruned = [];
// TODO(danvk): do binary search instead of linear search.
// TODO(danvk): pass firstIdx and lastIdx directly to the renderer.
var firstIdx = null, lastIdx = null;
for (k = 0; k < series.length; k++) {
if (series[k][0] >= low && firstIdx === null) {
firstIdx = k;
}
if (series[k][0] <= high) {
lastIdx = k;
}
}
if (firstIdx === null) firstIdx = 0;
if (firstIdx > 0) firstIdx--;
if (lastIdx === null) lastIdx = series.length - 1;
if (lastIdx < series.length - 1) lastIdx++;
boundaryIds[i-1] = [firstIdx, lastIdx];
for (k = firstIdx; k <= lastIdx; k++) {
pruned.push(series[k]);
}
series = pruned;
} else {
boundaryIds[i-1] = [0, series.length-1];
}
var seriesExtremes = this.extremeValues_(series);
if (bars) {
for (j=0; j<series.length; j++) {
series[j] = [series[j][0],
series[j][1][0],
series[j][1][1],
series[j][1][2]];
}
} else if (this.attr_("stackedGraph")) {
var l = series.length;
var actual_y;
for (j = 0; j < l; j++) {
// If one data set has a NaN, let all subsequent stacked
// sets inherit the NaN -- only start at 0 for the first set.
var x = series[j][0];
if (cumulative_y[x] === undefined) {
cumulative_y[x] = 0;
}
actual_y = series[j][1];
if (actual_y === null) {
series[j] = [x, null];
continue;
}
cumulative_y[x] += actual_y;
series[j] = [x, cumulative_y[x]];
if (cumulative_y[x] > seriesExtremes[1]) {
seriesExtremes[1] = cumulative_y[x];
}
if (cumulative_y[x] < seriesExtremes[0]) {
seriesExtremes[0] = cumulative_y[x];
}
}
}
var seriesName = this.attr_("labels")[i];
extremes[seriesName] = seriesExtremes;
datasets[i] = series;
}
// For stacked graphs, a NaN value for any point in the sum should create a
// clean gap in the graph. Back-propagate NaNs to all points at this X value.
if (this.attr_("stackedGraph")) {
for (k = datasets.length - 1; k >= 0; --k) {
// Use the first nonempty dataset to get X values.
if (!datasets[k]) continue;
for (j = 0; j < datasets[k].length; j++) {
var x = datasets[k][j][0];
if (isNaN(cumulative_y[x])) {
// Set all Y values to NaN at that X value.
for (i = datasets.length - 1; i >= 0; i--) {
if (!datasets[i]) continue;
datasets[i][j][1] = NaN;
}
}
}
break;
}
}
return [ datasets, extremes, boundaryIds ];
};
/**
* Update the graph with new data. This method is called when the viewing area
* has changed. If the underlying data or options have changed, predraw_ will
* be called before drawGraph_ is called.
*
* @private
*/
Dygraph.prototype.drawGraph_ = function() {
var start = new Date();
// This is used to set the second parameter to drawCallback, below.
var is_initial_draw = this.is_initial_draw_;
this.is_initial_draw_ = false;
this.layout_.removeAllDatasets();
this.setColors_();
this.attrs_.pointSize = 0.5 * this.attr_('highlightCircleSize');
var packed = this.gatherDatasets_(this.rolledSeries_, this.dateWindow_);
var datasets = packed[0];
var extremes = packed[1];
this.boundaryIds_ = packed[2];
this.setIndexByName_ = {};
var labels = this.attr_("labels");
if (labels.length > 0) {
this.setIndexByName_[labels[0]] = 0;
}
var dataIdx = 0;
for (var i = 1; i < datasets.length; i++) {
this.setIndexByName_[labels[i]] = i;
if (!this.visibility()[i - 1]) continue;
this.layout_.addDataset(labels[i], datasets[i]);
this.datasetIndex_[i] = dataIdx++;
}
this.computeYAxisRanges_(extremes);
this.layout_.setYAxes(this.axes_);
this.addXTicks_();
// Save the X axis zoomed status as the updateOptions call will tend to set it erroneously
var tmp_zoomed_x = this.zoomed_x_;
// Tell PlotKit to use this new data and render itself
this.layout_.setDateWindow(this.dateWindow_);
this.zoomed_x_ = tmp_zoomed_x;
this.layout_.evaluateWithError();
this.renderGraph_(is_initial_draw);
if (this.attr_("timingName")) {
var end = new Date();
if (console) {
console.log(this.attr_("timingName") + " - drawGraph: " + (end - start) + "ms");
}
}
};
/**
* This does the work of drawing the chart. It assumes that the layout and axis
* scales have already been set (e.g. by predraw_).
*
* @private
*/
Dygraph.prototype.renderGraph_ = function(is_initial_draw) {
this.cascadeEvents_('clearChart');
this.plotter_.clear();
if (this.attr_('underlayCallback')) {
// NOTE: we pass the dygraph object to this callback twice to avoid breaking
// users who expect a deprecated form of this callback.
this.attr_('underlayCallback')(
this.hidden_ctx_, this.layout_.getPlotArea(), this, this);
}
var e = {
canvas: this.hidden_,
drawingContext: this.hidden_ctx_
};
this.cascadeEvents_('willDrawChart', e);
this.plotter_.render();
this.cascadeEvents_('didDrawChart', e);
// TODO(danvk): is this a performance bottleneck when panning?
// The interaction canvas should already be empty in that situation.
this.canvas_.getContext('2d').clearRect(0, 0, this.canvas_.width,
this.canvas_.height);
if (this.attr_("drawCallback") !== null) {
this.attr_("drawCallback")(this, is_initial_draw);
}
};
/**
* @private
* Determine properties of the y-axes which are independent of the data
* currently being displayed. This includes things like the number of axes and
* the style of the axes. It does not include the range of each axis and its
* tick marks.
* This fills in this.axes_.
* axes_ = [ { options } ]
* indices are into the axes_ array.
*/
Dygraph.prototype.computeYAxes_ = function() {
// Preserve valueWindow settings if they exist, and if the user hasn't
// specified a new valueRange.
var valueWindows, axis, index, opts, v;
if (this.axes_ !== undefined && this.user_attrs_.hasOwnProperty("valueRange") === false) {
valueWindows = [];
for (index = 0; index < this.axes_.length; index++) {
valueWindows.push(this.axes_[index].valueWindow);
}
}
// this.axes_ doesn't match this.attributes_.axes_.options. It's used for
// data computation as well as options storage.
// Go through once and add all the axes.
this.axes_ = [];
for (axis = 0; axis < this.attributes_.numAxes(); axis++) {
// Add a new axis, making a copy of its per-axis options.
opts = { g : this };
Dygraph.update(opts, this.attributes_.axisOptions(axis));
this.axes_[axis] = opts;
}
// Copy global valueRange option over to the first axis.
// NOTE(konigsberg): Are these two statements necessary?
// I tried removing it. The automated tests pass, and manually
// messing with tests/zoom.html showed no trouble.
v = this.attr_('valueRange');
if (v) this.axes_[0].valueRange = v;
if (valueWindows !== undefined) {
// Restore valueWindow settings.
for (index = 0; index < valueWindows.length; index++) {
this.axes_[index].valueWindow = valueWindows[index];
}
}
for (axis = 0; axis < this.axes_.length; axis++) {
if (axis === 0) {
opts = this.optionsViewForAxis_('y' + (axis ? '2' : ''));
v = opts("valueRange");
if (v) this.axes_[axis].valueRange = v;
} else { // To keep old behavior
var axes = this.user_attrs_.axes;
if (axes && axes.y2) {
v = axes.y2.valueRange;
if (v) this.axes_[axis].valueRange = v;
}
}
}
};
/**
* Returns the number of y-axes on the chart.
* @return {Number} the number of axes.
*/
Dygraph.prototype.numAxes = function() {
return this.attributes_.numAxes();
};
/**
* @private
* Returns axis properties for the given series.
* @param { String } setName The name of the series for which to get axis
* properties, e.g. 'Y1'.
* @return { Object } The axis properties.
*/
Dygraph.prototype.axisPropertiesForSeries = function(series) {
// TODO(danvk): handle errors.
return this.axes_[this.attributes_.axisForSeries(series)];
};
/**
* @private
* Determine the value range and tick marks for each axis.
* @param {Object} extremes A mapping from seriesName -> [low, high]
* This fills in the valueRange and ticks fields in each entry of this.axes_.
*/
Dygraph.prototype.computeYAxisRanges_ = function(extremes) {
var series;
var numAxes = this.attributes_.numAxes();
// Compute extreme values, a span and tick marks for each axis.
for (var i = 0; i < numAxes; i++) {
var axis = this.axes_[i];
var logscale = this.attributes_.getForAxis("logscale", i);
var includeZero = this.attributes_.getForAxis("includeZero", i);
series = this.attributes_.seriesForAxis(i);
if (series.length === 0) {
// If no series are defined or visible then use a reasonable default
axis.extremeRange = [0, 1];
} else {
// Calculate the extremes of extremes.
var minY = Infinity; // extremes[series[0]][0];
var maxY = -Infinity; // extremes[series[0]][1];
var extremeMinY, extremeMaxY;
for (var j = 0; j < series.length; j++) {
// this skips invisible series
if (!extremes.hasOwnProperty(series[j])) continue;
// Only use valid extremes to stop null data series' from corrupting the scale.
extremeMinY = extremes[series[j]][0];
if (extremeMinY !== null) {
minY = Math.min(extremeMinY, minY);
}
extremeMaxY = extremes[series[j]][1];
if (extremeMaxY !== null) {
maxY = Math.max(extremeMaxY, maxY);
}
}
if (includeZero && minY > 0) minY = 0;
// Ensure we have a valid scale, otherwise default to [0, 1] for safety.
if (minY == Infinity) minY = 0;
if (maxY == -Infinity) maxY = 1;
// Add some padding and round up to an integer to be human-friendly.
var span = maxY - minY;
// special case: if we have no sense of scale, use +/-10% of the sole value.
if (span === 0) { span = maxY; }
var maxAxisY, minAxisY;
if (logscale) {
maxAxisY = maxY + 0.1 * span;
minAxisY = minY;
} else {
maxAxisY = maxY + 0.1 * span;
minAxisY = minY - 0.1 * span;
// Try to include zero and make it minAxisY (or maxAxisY) if it makes sense.
if (!this.attr_("avoidMinZero")) {
if (minAxisY < 0 && minY >= 0) minAxisY = 0;
if (maxAxisY > 0 && maxY <= 0) maxAxisY = 0;
}
if (this.attr_("includeZero")) {
if (maxY < 0) maxAxisY = 0;
if (minY > 0) minAxisY = 0;
}
}
axis.extremeRange = [minAxisY, maxAxisY];
}
if (axis.valueWindow) {
// This is only set if the user has zoomed on the y-axis. It is never set
// by a user. It takes precedence over axis.valueRange because, if you set
// valueRange, you'd still expect to be able to pan.
axis.computedValueRange = [axis.valueWindow[0], axis.valueWindow[1]];
} else if (axis.valueRange) {
// This is a user-set value range for this axis.
axis.computedValueRange = [axis.valueRange[0], axis.valueRange[1]];
} else {
axis.computedValueRange = axis.extremeRange;
}
// Add ticks. By default, all axes inherit the tick positions of the
// primary axis. However, if an axis is specifically marked as having
// independent ticks, then that is permissible as well.
var opts = this.optionsViewForAxis_('y' + (i ? '2' : ''));
var ticker = opts('ticker');
if (i === 0 || axis.independentTicks) {
axis.ticks = ticker(axis.computedValueRange[0],
axis.computedValueRange[1],
this.height_, // TODO(danvk): should be area.height
opts,
this);
} else {
var p_axis = this.axes_[0];
var p_ticks = p_axis.ticks;
var p_scale = p_axis.computedValueRange[1] - p_axis.computedValueRange[0];
var scale = axis.computedValueRange[1] - axis.computedValueRange[0];
var tick_values = [];
for (var k = 0; k < p_ticks.length; k++) {
var y_frac = (p_ticks[k].v - p_axis.computedValueRange[0]) / p_scale;
var y_val = axis.computedValueRange[0] + y_frac * scale;
tick_values.push(y_val);
}
axis.ticks = ticker(axis.computedValueRange[0],
axis.computedValueRange[1],
this.height_, // TODO(danvk): should be area.height
opts,
this,
tick_values);
}
}
};
/**
* Extracts one series from the raw data (a 2D array) into an array of (date,
* value) tuples.
*
* This is where undesirable points (i.e. negative values on log scales and
* missing values through which we wish to connect lines) are dropped.
* TODO(danvk): the "missing values" bit above doesn't seem right.
*
* @private
*/
Dygraph.prototype.extractSeries_ = function(rawData, i, logScale) {
// TODO(danvk): pre-allocate series here.
var series = [];
for (var j = 0; j < rawData.length; j++) {
var x = rawData[j][0];
var point = rawData[j][i];
if (logScale) {
// On the log scale, points less than zero do not exist.
// This will create a gap in the chart.
if (point <= 0) {
point = null;
}
}
series.push([x, point]);
}
return series;
};
/**
* @private
* Calculates the rolling average of a data set.
* If originalData is [label, val], rolls the average of those.
* If originalData is [label, [, it's interpreted as [value, stddev]
* and the roll is returned in the same form, with appropriately reduced
* stddev for each value.
* Note that this is where fractional input (i.e. '5/10') is converted into
* decimal values.
* @param {Array} originalData The data in the appropriate format (see above)
* @param {Number} rollPeriod The number of points over which to average the
* data
*/
Dygraph.prototype.rollingAverage = function(originalData, rollPeriod) {
if (originalData.length < 2)
return originalData;
rollPeriod = Math.min(rollPeriod, originalData.length);
var rollingData = [];
var sigma = this.attr_("sigma");
var low, high, i, j, y, sum, num_ok, stddev;
if (this.fractions_) {
var num = 0;
var den = 0; // numerator/denominator
var mult = 100.0;
for (i = 0; i < originalData.length; i++) {
num += originalData[i][1][0];
den += originalData[i][1][1];
if (i - rollPeriod >= 0) {
num -= originalData[i - rollPeriod][1][0];
den -= originalData[i - rollPeriod][1][1];
}
var date = originalData[i][0];
var value = den ? num / den : 0.0;
if (this.attr_("errorBars")) {
if (this.attr_("wilsonInterval")) {
// For more details on this confidence interval, see:
// http://en.wikipedia.org/wiki/Binomial_confidence_interval
if (den) {
var p = value < 0 ? 0 : value, n = den;
var pm = sigma * Math.sqrt(p*(1-p)/n + sigma*sigma/(4*n*n));
var denom = 1 + sigma * sigma / den;
low = (p + sigma * sigma / (2 * den) - pm) / denom;
high = (p + sigma * sigma / (2 * den) + pm) / denom;
rollingData[i] = [date,
[p * mult, (p - low) * mult, (high - p) * mult]];
} else {
rollingData[i] = [date, [0, 0, 0]];
}
} else {
stddev = den ? sigma * Math.sqrt(value * (1 - value) / den) : 1.0;
rollingData[i] = [date, [mult * value, mult * stddev, mult * stddev]];
}
} else {
rollingData[i] = [date, mult * value];
}
}
} else if (this.attr_("customBars")) {
low = 0;
var mid = 0;
high = 0;
var count = 0;
for (i = 0; i < originalData.length; i++) {
var data = originalData[i][1];
y = data[1];
rollingData[i] = [originalData[i][0], [y, y - data[0], data[2] - y]];
if (y !== null && !isNaN(y)) {
low += data[0];
mid += y;
high += data[2];
count += 1;
}
if (i - rollPeriod >= 0) {
var prev = originalData[i - rollPeriod];
if (prev[1][1] !== null && !isNaN(prev[1][1])) {
low -= prev[1][0];
mid -= prev[1][1];
high -= prev[1][2];
count -= 1;
}
}
if (count) {
rollingData[i] = [originalData[i][0], [ 1.0 * mid / count,
1.0 * (mid - low) / count,
1.0 * (high - mid) / count ]];
} else {
rollingData[i] = [originalData[i][0], [null, null, null]];
}
}
} else {
// Calculate the rolling average for the first rollPeriod - 1 points where
// there is not enough data to roll over the full number of points
if (!this.attr_("errorBars")){
if (rollPeriod == 1) {
return originalData;
}
for (i = 0; i < originalData.length; i++) {
sum = 0;
num_ok = 0;
for (j = Math.max(0, i - rollPeriod + 1); j < i + 1; j++) {
y = originalData[j][1];
if (y === null || isNaN(y)) continue;
num_ok++;
sum += originalData[j][1];
}
if (num_ok) {
rollingData[i] = [originalData[i][0], sum / num_ok];
} else {
rollingData[i] = [originalData[i][0], null];
}
}
} else {
for (i = 0; i < originalData.length; i++) {
sum = 0;
var variance = 0;
num_ok = 0;
for (j = Math.max(0, i - rollPeriod + 1); j < i + 1; j++) {
y = originalData[j][1][0];
if (y === null || isNaN(y)) continue;
num_ok++;
sum += originalData[j][1][0];
variance += Math.pow(originalData[j][1][1], 2);
}
if (num_ok) {
stddev = Math.sqrt(variance) / num_ok;
rollingData[i] = [originalData[i][0],
[sum / num_ok, sigma * stddev, sigma * stddev]];
} else {
rollingData[i] = [originalData[i][0], [null, null, null]];
}
}
}
}
return rollingData;
};
/**
* Detects the type of the str (date or numeric) and sets the various
* formatting attributes in this.attrs_ based on this type.
* @param {String} str An x value.
* @private
*/
Dygraph.prototype.detectTypeFromString_ = function(str) {
var isDate = false;
var dashPos = str.indexOf('-'); // could be 2006-01-01 _or_ 1.0e-2
if ((dashPos > 0 && (str[dashPos-1] != 'e' && str[dashPos-1] != 'E')) ||
str.indexOf('/') >= 0 ||
isNaN(parseFloat(str))) {
isDate = true;
} else if (str.length == 8 && str > '19700101' && str < '20371231') {
// TODO(danvk): remove support for this format.
isDate = true;
}
this.setXAxisOptions_(isDate);
};
Dygraph.prototype.setXAxisOptions_ = function(isDate) {
if (isDate) {
this.attrs_.xValueParser = Dygraph.dateParser;
this.attrs_.axes.x.valueFormatter = Dygraph.dateString_;
this.attrs_.axes.x.ticker = Dygraph.dateTicker;
this.attrs_.axes.x.axisLabelFormatter = Dygraph.dateAxisFormatter;
} else {
/** @private (shut up, jsdoc!) */
this.attrs_.xValueParser = function(x) { return parseFloat(x); };
// TODO(danvk): use Dygraph.numberValueFormatter here?
/** @private (shut up, jsdoc!) */
this.attrs_.axes.x.valueFormatter = function(x) { return x; };
this.attrs_.axes.x.ticker = Dygraph.numericLinearTicks;
this.attrs_.axes.x.axisLabelFormatter = this.attrs_.axes.x.valueFormatter;
}
};
/**
* Parses the value as a floating point number. This is like the parseFloat()
* built-in, but with a few differences:
* - the empty string is parsed as null, rather than NaN.
* - if the string cannot be parsed at all, an error is logged.
* If the string can't be parsed, this method returns null.
* @param {String} x The string to be parsed
* @param {Number} opt_line_no The line number from which the string comes.
* @param {String} opt_line The text of the line from which the string comes.
* @private
*/
// Parse the x as a float or return null if it's not a number.
Dygraph.prototype.parseFloat_ = function(x, opt_line_no, opt_line) {
var val = parseFloat(x);
if (!isNaN(val)) return val;
// Try to figure out what happeend.
// If the value is the empty string, parse it as null.
if (/^ *$/.test(x)) return null;
// If it was actually "NaN", return it as NaN.
if (/^ *nan *$/i.test(x)) return NaN;
// Looks like a parsing error.
var msg = "Unable to parse '" + x + "' as a number";
if (opt_line !== null && opt_line_no !== null) {
msg += " on line " + (1+opt_line_no) + " ('" + opt_line + "') of CSV.";
}
this.error(msg);
return null;
};
/**
* @private
* Parses a string in a special csv format. We expect a csv file where each
* line is a date point, and the first field in each line is the date string.
* We also expect that all remaining fields represent series.
* if the errorBars attribute is set, then interpret the fields as:
* date, series1, stddev1, series2, stddev2, ...
* @param {[Object]} data See above.
*
* @return [Object] An array with one entry for each row. These entries
* are an array of cells in that row. The first entry is the parsed x-value for
* the row. The second, third, etc. are the y-values. These can take on one of
* three forms, depending on the CSV and constructor parameters:
* 1. numeric value
* 2. [ value, stddev ]
* 3. [ low value, center value, high value ]
*/
Dygraph.prototype.parseCSV_ = function(data) {
var ret = [];
var line_delimiter = Dygraph.detectLineDelimiter(data);
var lines = data.split(line_delimiter || "\n");
var vals, j;
// Use the default delimiter or fall back to a tab if that makes sense.
var delim = this.attr_('delimiter');
if (lines[0].indexOf(delim) == -1 && lines[0].indexOf('\t') >= 0) {
delim = '\t';
}
var start = 0;
if (!('labels' in this.user_attrs_)) {
// User hasn't explicitly set labels, so they're (presumably) in the CSV.
start = 1;
this.attrs_.labels = lines[0].split(delim); // NOTE: _not_ user_attrs_.
this.attributes_.reparseSeries();
}
var line_no = 0;
var xParser;
var defaultParserSet = false; // attempt to auto-detect x value type
var expectedCols = this.attr_("labels").length;
var outOfOrder = false;
for (var i = start; i < lines.length; i++) {
var line = lines[i];
line_no = i;
if (line.length === 0) continue; // skip blank lines
if (line[0] == '#') continue; // skip comment lines
var inFields = line.split(delim);
if (inFields.length < 2) continue;
var fields = [];
if (!defaultParserSet) {
this.detectTypeFromString_(inFields[0]);
xParser = this.attr_("xValueParser");
defaultParserSet = true;
}
fields[0] = xParser(inFields[0], this);
// If fractions are expected, parse the numbers as "A/B"
if (this.fractions_) {
for (j = 1; j < inFields.length; j++) {
// TODO(danvk): figure out an appropriate way to flag parse errors.
vals = inFields[j].split("/");
if (vals.length != 2) {
this.error('Expected fractional "num/den" values in CSV data ' +
"but found a value '" + inFields[j] + "' on line " +
(1 + i) + " ('" + line + "') which is not of this form.");
fields[j] = [0, 0];
} else {
fields[j] = [this.parseFloat_(vals[0], i, line),
this.parseFloat_(vals[1], i, line)];
}
}
} else if (this.attr_("errorBars")) {
// If there are error bars, values are (value, stddev) pairs
if (inFields.length % 2 != 1) {
this.error('Expected alternating (value, stdev.) pairs in CSV data ' +
'but line ' + (1 + i) + ' has an odd number of values (' +
(inFields.length - 1) + "): '" + line + "'");
}
for (j = 1; j < inFields.length; j += 2) {
fields[(j + 1) / 2] = [this.parseFloat_(inFields[j], i, line),
this.parseFloat_(inFields[j + 1], i, line)];
}
} else if (this.attr_("customBars")) {
// Bars are a low;center;high tuple
for (j = 1; j < inFields.length; j++) {
var val = inFields[j];
if (/^ *$/.test(val)) {
fields[j] = [null, null, null];
} else {
vals = val.split(";");
if (vals.length == 3) {
fields[j] = [ this.parseFloat_(vals[0], i, line),
this.parseFloat_(vals[1], i, line),
this.parseFloat_(vals[2], i, line) ];
} else {
this.warn('When using customBars, values must be either blank ' +
'or "low;center;high" tuples (got "' + val +
'" on line ' + (1+i));
}
}
}
} else {
// Values are just numbers
for (j = 1; j < inFields.length; j++) {
fields[j] = this.parseFloat_(inFields[j], i, line);
}
}
if (ret.length > 0 && fields[0] < ret[ret.length - 1][0]) {
outOfOrder = true;
}
if (fields.length != expectedCols) {
this.error("Number of columns in line " + i + " (" + fields.length +
") does not agree with number of labels (" + expectedCols +
") " + line);
}
// If the user specified the 'labels' option and none of the cells of the
// first row parsed correctly, then they probably double-specified the
// labels. We go with the values set in the option, discard this row and
// log a warning to the JS console.
if (i === 0 && this.attr_('labels')) {
var all_null = true;
for (j = 0; all_null && j < fields.length; j++) {
if (fields[j]) all_null = false;
}
if (all_null) {
this.warn("The dygraphs 'labels' option is set, but the first row of " +
"CSV data ('" + line + "') appears to also contain labels. " +
"Will drop the CSV labels and use the option labels.");
continue;
}
}
ret.push(fields);
}
if (outOfOrder) {
this.warn("CSV is out of order; order it correctly to speed loading.");
ret.sort(function(a,b) { return a[0] - b[0]; });
}
return ret;
};
/**
* @private
* The user has provided their data as a pre-packaged JS array. If the x values
* are numeric, this is the same as dygraphs' internal format. If the x values
* are dates, we need to convert them from Date objects to ms since epoch.
* @param {[Object]} data
* @return {[Object]} data with numeric x values.
*/
Dygraph.prototype.parseArray_ = function(data) {
// Peek at the first x value to see if it's numeric.
if (data.length === 0) {
this.error("Can't plot empty data set");
return null;
}
if (data[0].length === 0) {
this.error("Data set cannot contain an empty row");
return null;
}
var i;
if (this.attr_("labels") === null) {
this.warn("Using default labels. Set labels explicitly via 'labels' " +
"in the options parameter");
this.attrs_.labels = [ "X" ];
for (i = 1; i < data[0].length; i++) {
this.attrs_.labels.push("Y" + i); // Not user_attrs_.
}
this.attributes_.reparseSeries();
} else {
var num_labels = this.attr_("labels");
if (num_labels.length != data[0].length) {
this.error("Mismatch between number of labels (" + num_labels +
") and number of columns in array (" + data[0].length + ")");
return null;
}
}
if (Dygraph.isDateLike(data[0][0])) {
// Some intelligent defaults for a date x-axis.
this.attrs_.axes.x.valueFormatter = Dygraph.dateString_;
this.attrs_.axes.x.ticker = Dygraph.dateTicker;
this.attrs_.axes.x.axisLabelFormatter = Dygraph.dateAxisFormatter;
// Assume they're all dates.
var parsedData = Dygraph.clone(data);
for (i = 0; i < data.length; i++) {
if (parsedData[i].length === 0) {
this.error("Row " + (1 + i) + " of data is empty");
return null;
}
if (parsedData[i][0] === null ||
typeof(parsedData[i][0].getTime) != 'function' ||
isNaN(parsedData[i][0].getTime())) {
this.error("x value in row " + (1 + i) + " is not a Date");
return null;
}
parsedData[i][0] = parsedData[i][0].getTime();
}
return parsedData;
} else {
// Some intelligent defaults for a numeric x-axis.
/** @private (shut up, jsdoc!) */
this.attrs_.axes.x.valueFormatter = function(x) { return x; };
this.attrs_.axes.x.ticker = Dygraph.numericLinearTicks;
this.attrs_.axes.x.axisLabelFormatter = Dygraph.numberAxisLabelFormatter;
return data;
}
};
/**
* Parses a DataTable object from gviz.
* The data is expected to have a first column that is either a date or a
* number. All subsequent columns must be numbers. If there is a clear mismatch
* between this.xValueParser_ and the type of the first column, it will be
* fixed. Fills out rawData_.
* @param {[Object]} data See above.
* @private
*/
Dygraph.prototype.parseDataTable_ = function(data) {
var shortTextForAnnotationNum = function(num) {
// converts [0-9]+ [A-Z][a-z]*
// example: 0=A, 1=B, 25=Z, 26=Aa, 27=Ab
// and continues like.. Ba Bb .. Za .. Zz..Aaa...Zzz Aaaa Zzzz
var shortText = String.fromCharCode(65 /* A */ + num % 26);
num = Math.floor(num / 26);
while ( num > 0 ) {
shortText = String.fromCharCode(65 /* A */ + (num - 1) % 26 ) + shortText.toLowerCase();
num = Math.floor((num - 1) / 26);
}
return shortText;
};
var cols = data.getNumberOfColumns();
var rows = data.getNumberOfRows();
var indepType = data.getColumnType(0);
if (indepType == 'date' || indepType == 'datetime') {
this.attrs_.xValueParser = Dygraph.dateParser;
this.attrs_.axes.x.valueFormatter = Dygraph.dateString_;
this.attrs_.axes.x.ticker = Dygraph.dateTicker;
this.attrs_.axes.x.axisLabelFormatter = Dygraph.dateAxisFormatter;
} else if (indepType == 'number') {
this.attrs_.xValueParser = function(x) { return parseFloat(x); };
this.attrs_.axes.x.valueFormatter = function(x) { return x; };
this.attrs_.axes.x.ticker = Dygraph.numericLinearTicks;
this.attrs_.axes.x.axisLabelFormatter = this.attrs_.axes.x.valueFormatter;
} else {
this.error("only 'date', 'datetime' and 'number' types are supported for " +
"column 1 of DataTable input (Got '" + indepType + "')");
return null;
}
// Array of the column indices which contain data (and not annotations).
var colIdx = [];
var annotationCols = {}; // data index -> [annotation cols]
var hasAnnotations = false;
var i, j;
for (i = 1; i < cols; i++) {
var type = data.getColumnType(i);
if (type == 'number') {
colIdx.push(i);
} else if (type == 'string' && this.attr_('displayAnnotations')) {
// This is OK -- it's an annotation column.
var dataIdx = colIdx[colIdx.length - 1];
if (!annotationCols.hasOwnProperty(dataIdx)) {
annotationCols[dataIdx] = [i];
} else {
annotationCols[dataIdx].push(i);
}
hasAnnotations = true;
} else {
this.error("Only 'number' is supported as a dependent type with Gviz." +
" 'string' is only supported if displayAnnotations is true");
}
}
// Read column labels
// TODO(danvk): add support back for errorBars
var labels = [data.getColumnLabel(0)];
for (i = 0; i < colIdx.length; i++) {
labels.push(data.getColumnLabel(colIdx[i]));
if (this.attr_("errorBars")) i += 1;
}
this.attrs_.labels = labels;
cols = labels.length;
var ret = [];
var outOfOrder = false;
var annotations = [];
for (i = 0; i < rows; i++) {
var row = [];
if (typeof(data.getValue(i, 0)) === 'undefined' ||
data.getValue(i, 0) === null) {
this.warn("Ignoring row " + i +
" of DataTable because of undefined or null first column.");
continue;
}
if (indepType == 'date' || indepType == 'datetime') {
row.push(data.getValue(i, 0).getTime());
} else {
row.push(data.getValue(i, 0));
}
if (!this.attr_("errorBars")) {
for (j = 0; j < colIdx.length; j++) {
var col = colIdx[j];
row.push(data.getValue(i, col));
if (hasAnnotations &&
annotationCols.hasOwnProperty(col) &&
data.getValue(i, annotationCols[col][0]) !== null) {
var ann = {};
ann.series = data.getColumnLabel(col);
ann.xval = row[0];
ann.shortText = shortTextForAnnotationNum(annotations.length);
ann.text = '';
for (var k = 0; k < annotationCols[col].length; k++) {
if (k) ann.text += "\n";
ann.text += data.getValue(i, annotationCols[col][k]);
}
annotations.push(ann);
}
}
// Strip out infinities, which give dygraphs problems later on.
for (j = 0; j < row.length; j++) {
if (!isFinite(row[j])) row[j] = null;
}
} else {
for (j = 0; j < cols - 1; j++) {
row.push([ data.getValue(i, 1 + 2 * j), data.getValue(i, 2 + 2 * j) ]);
}
}
if (ret.length > 0 && row[0] < ret[ret.length - 1][0]) {
outOfOrder = true;
}
ret.push(row);
}
if (outOfOrder) {
this.warn("DataTable is out of order; order it correctly to speed loading.");
ret.sort(function(a,b) { return a[0] - b[0]; });
}
this.rawData_ = ret;
if (annotations.length > 0) {
this.setAnnotations(annotations, true);
}
this.attributes_.reparseSeries();
};
/**
* Get the CSV data. If it's in a function, call that function. If it's in a
* file, do an XMLHttpRequest to get it.
* @private
*/
Dygraph.prototype.start_ = function() {
var data = this.file_;
// Functions can return references of all other types.
if (typeof data == 'function') {
data = data();
}
if (Dygraph.isArrayLike(data)) {
this.rawData_ = this.parseArray_(data);
this.predraw_();
} else if (typeof data == 'object' &&
typeof data.getColumnRange == 'function') {
// must be a DataTable from gviz.
this.parseDataTable_(data);
this.predraw_();
} else if (typeof data == 'string') {
// Heuristic: a newline means it's CSV data. Otherwise it's an URL.
var line_delimiter = Dygraph.detectLineDelimiter(data);
if (line_delimiter) {
this.loadedEvent_(data);
} else {
var req = new XMLHttpRequest();
var caller = this;
req.onreadystatechange = function () {
if (req.readyState == 4) {
if (req.status === 200 || // Normal http
req.status === 0) { // Chrome w/ --allow-file-access-from-files
caller.loadedEvent_(req.responseText);
}
}
};
req.open("GET", data, true);
req.send(null);
}
} else {
this.error("Unknown data format: " + (typeof data));
}
};
/**
* Changes various properties of the graph. These can include:
* <ul>
* <li>file: changes the source data for the graph</li>
* <li>errorBars: changes whether the data contains stddev</li>
* </ul>
*
* There's a huge variety of options that can be passed to this method. For a
* full list, see http://dygraphs.com/options.html.
*
* @param {Object} attrs The new properties and values
* @param {Boolean} [block_redraw] Usually the chart is redrawn after every
* call to updateOptions(). If you know better, you can pass true to explicitly
* block the redraw. This can be useful for chaining updateOptions() calls,
* avoiding the occasional infinite loop and preventing redraws when it's not
* necessary (e.g. when updating a callback).
*/
Dygraph.prototype.updateOptions = function(input_attrs, block_redraw) {
if (typeof(block_redraw) == 'undefined') block_redraw = false;
// mapLegacyOptions_ drops the "file" parameter as a convenience to us.
var file = input_attrs.file;
var attrs = Dygraph.mapLegacyOptions_(input_attrs);
// TODO(danvk): this is a mess. Move these options into attr_.
if ('rollPeriod' in attrs) {
this.rollPeriod_ = attrs.rollPeriod;
}
if ('dateWindow' in attrs) {
this.dateWindow_ = attrs.dateWindow;
if (!('isZoomedIgnoreProgrammaticZoom' in attrs)) {
this.zoomed_x_ = (attrs.dateWindow !== null);
}
}
if ('valueRange' in attrs && !('isZoomedIgnoreProgrammaticZoom' in attrs)) {
this.zoomed_y_ = (attrs.valueRange !== null);
}
// TODO(danvk): validate per-series options.
// Supported:
// strokeWidth
// pointSize
// drawPoints
// highlightCircleSize
// Check if this set options will require new points.
var requiresNewPoints = Dygraph.isPixelChangingOptionList(this.attr_("labels"), attrs);
Dygraph.updateDeep(this.user_attrs_, attrs);
this.attributes_.reparseSeries();
if (file) {
this.file_ = file;
if (!block_redraw) this.start_();
} else {
if (!block_redraw) {
if (requiresNewPoints) {
this.predraw_();
} else {
this.renderGraph_(false);
}
}
}
};
/**
* Returns a copy of the options with deprecated names converted into current
* names. Also drops the (potentially-large) 'file' attribute. If the caller is
* interested in that, they should save a copy before calling this.
* @private
*/
Dygraph.mapLegacyOptions_ = function(attrs) {
var my_attrs = {};
for (var k in attrs) {
if (k == 'file') continue;
if (attrs.hasOwnProperty(k)) my_attrs[k] = attrs[k];
}
var set = function(axis, opt, value) {
if (!my_attrs.axes) my_attrs.axes = {};
if (!my_attrs.axes[axis]) my_attrs.axes[axis] = {};
my_attrs.axes[axis][opt] = value;
};
var map = function(opt, axis, new_opt) {
if (typeof(attrs[opt]) != 'undefined') {
Dygraph.warn("Option " + opt + " is deprecated. Use the " +
new_opt + " option for the " + axis + " axis instead. " +
"(e.g. { axes : { " + axis + " : { " + new_opt + " : ... } } } " +
"(see http://dygraphs.com/per-axis.html for more information.");
set(axis, new_opt, attrs[opt]);
delete my_attrs[opt];
}
};
// This maps, e.g., xValueFormater -> axes: { x: { valueFormatter: ... } }
map('xValueFormatter', 'x', 'valueFormatter');
map('pixelsPerXLabel', 'x', 'pixelsPerLabel');
map('xAxisLabelFormatter', 'x', 'axisLabelFormatter');
map('xTicker', 'x', 'ticker');
map('yValueFormatter', 'y', 'valueFormatter');
map('pixelsPerYLabel', 'y', 'pixelsPerLabel');
map('yAxisLabelFormatter', 'y', 'axisLabelFormatter');
map('yTicker', 'y', 'ticker');
return my_attrs;
};
/**
* Resizes the dygraph. If no parameters are specified, resizes to fill the
* containing div (which has presumably changed size since the dygraph was
* instantiated. If the width/height are specified, the div will be resized.
*
* This is far more efficient than destroying and re-instantiating a
* Dygraph, since it doesn't have to reparse the underlying data.
*
* @param {Number} [width] Width (in pixels)
* @param {Number} [height] Height (in pixels)
*/
Dygraph.prototype.resize = function(width, height) {
if (this.resize_lock) {
return;
}
this.resize_lock = true;
if ((width === null) != (height === null)) {
this.warn("Dygraph.resize() should be called with zero parameters or " +
"two non-NULL parameters. Pretending it was zero.");
width = height = null;
}
var old_width = this.width_;
var old_height = this.height_;
if (width) {
this.maindiv_.style.width = width + "px";
this.maindiv_.style.height = height + "px";
this.width_ = width;
this.height_ = height;
} else {
this.width_ = this.maindiv_.clientWidth;
this.height_ = this.maindiv_.clientHeight;
}
if (old_width != this.width_ || old_height != this.height_) {
// TODO(danvk): there should be a clear() method.
this.maindiv_.innerHTML = "";
this.roller_ = null;
this.attrs_.labelsDiv = null;
this.createInterface_();
if (this.annotations_.length) {
// createInterface_ reset the layout, so we need to do this.
this.layout_.setAnnotations(this.annotations_);
}
this.createDragInterface_();
this.predraw_();
}
this.resize_lock = false;
};
/**
* Adjusts the number of points in the rolling average. Updates the graph to
* reflect the new averaging period.
* @param {Number} length Number of points over which to average the data.
*/
Dygraph.prototype.adjustRoll = function(length) {
this.rollPeriod_ = length;
this.predraw_();
};
/**
* Returns a boolean array of visibility statuses.
*/
Dygraph.prototype.visibility = function() {
// Do lazy-initialization, so that this happens after we know the number of
// data series.
if (!this.attr_("visibility")) {
this.attrs_.visibility = [];
}
// TODO(danvk): it looks like this could go into an infinite loop w/ user_attrs.
while (this.attr_("visibility").length < this.numColumns() - 1) {
this.attrs_.visibility.push(true);
}
return this.attr_("visibility");
};
/**
* Changes the visiblity of a series.
*/
Dygraph.prototype.setVisibility = function(num, value) {
var x = this.visibility();
if (num < 0 || num >= x.length) {
this.warn("invalid series number in setVisibility: " + num);
} else {
x[num] = value;
this.predraw_();
}
};
/**
* How large of an area will the dygraph render itself in?
* This is used for testing.
* @return A {width: w, height: h} object.
* @private
*/
Dygraph.prototype.size = function() {
return { width: this.width_, height: this.height_ };
};
/**
* Update the list of annotations and redraw the chart.
* See dygraphs.com/annotations.html for more info on how to use annotations.
* @param ann {Array} An array of annotation objects.
* @param suppressDraw {Boolean} Set to "true" to block chart redraw (optional).
*/
Dygraph.prototype.setAnnotations = function(ann, suppressDraw) {
// Only add the annotation CSS rule once we know it will be used.
Dygraph.addAnnotationRule();
this.annotations_ = ann;
this.layout_.setAnnotations(this.annotations_);
if (!suppressDraw) {
this.predraw_();
}
};
/**
* Return the list of annotations.
*/
Dygraph.prototype.annotations = function() {
return this.annotations_;
};
/**
* Get the list of label names for this graph. The first column is the
* x-axis, so the data series names start at index 1.
*/
Dygraph.prototype.getLabels = function() {
return this.attr_("labels").slice();
};
/**
* Get the index of a series (column) given its name. The first column is the
* x-axis, so the data series start with index 1.
*/
Dygraph.prototype.indexFromSetName = function(name) {
return this.setIndexByName_[name];
};
/**
* Get the internal dataset index given its name. These are numbered starting from 0,
* and only count visible sets.
* @private
*/
Dygraph.prototype.datasetIndexFromSetName_ = function(name) {
return this.datasetIndex_[this.indexFromSetName(name)];
};
/**
* @private
* Adds a default style for the annotation CSS classes to the document. This is
* only executed when annotations are actually used. It is designed to only be
* called once -- all calls after the first will return immediately.
*/
Dygraph.addAnnotationRule = function() {
// TODO(danvk): move this function into plugins/annotations.js?
if (Dygraph.addedAnnotationCSS) return;
var rule = "border: 1px solid black; " +
"background-color: white; " +
"text-align: center;";
var styleSheetElement = document.createElement("style");
styleSheetElement.type = "text/css";
document.getElementsByTagName("head")[0].appendChild(styleSheetElement);
// Find the first style sheet that we can access.
// We may not add a rule to a style sheet from another domain for security
// reasons. This sometimes comes up when using gviz, since the Google gviz JS
// adds its own style sheets from google.com.
for (var i = 0; i < document.styleSheets.length; i++) {
if (document.styleSheets[i].disabled) continue;
var mysheet = document.styleSheets[i];
try {
if (mysheet.insertRule) { // Firefox
var idx = mysheet.cssRules ? mysheet.cssRules.length : 0;
mysheet.insertRule(".dygraphDefaultAnnotation { " + rule + " }", idx);
} else if (mysheet.addRule) { // IE
mysheet.addRule(".dygraphDefaultAnnotation", rule);
}
Dygraph.addedAnnotationCSS = true;
return;
} catch(err) {
// Was likely a security exception.
}
}
this.warn("Unable to add default annotation CSS rule; display may be off.");
};
// Older pages may still use this name.
var DateGraph = Dygraph;
| Resize event causes several event handlers to be be added. Make the events private, and prevent multiple registration.
| dygraph.js | Resize event causes several event handlers to be be added. Make the events private, and prevent multiple registration. | <ide><path>ygraph.js
<ide>
<ide> var dygraph = this;
<ide>
<del> this.mouseMoveHandler = function(e) {
<del> dygraph.mouseMove_(e);
<del> };
<del> this.addEvent(this.mouseEventElement_, 'mousemove', this.mouseMoveHandler);
<del>
<del> this.mouseOutHandler = function(e) {
<del> dygraph.mouseOut_(e);
<del> };
<del> this.addEvent(this.mouseEventElement_, 'mouseout', this.mouseOutHandler);
<del>
<del> if (!this.resizeHandler_) {
<add> // Don't recreate and register the handlers on subsequent calls.
<add> // This happens when the graph is resized.
<add> if (!this.mouseMoveHandler_) {
<add> this.mouseMoveHandler_ = function(e) {
<add> dygraph.mouseMove_(e);
<add> };
<add> this.addEvent(this.mouseEventElement_, 'mousemove', this.mouseMoveHandler_);
<add>
<add> this.mouseOutHandler_ = function(e) {
<add> dygraph.mouseOut_(e);
<add> };
<add> this.addEvent(this.mouseEventElement_, 'mouseout', this.mouseOutHandler_);
<add>
<ide> this.resizeHandler_ = function(e) {
<ide> dygraph.resize();
<ide> };
<ide> this.registeredEvents_ = [];
<ide>
<ide> // remove mouse event handlers (This may not be necessary anymore)
<del> Dygraph.removeEvent(this.mouseEventElement_, 'mouseout', this.mouseOutHandler);
<del> Dygraph.removeEvent(this.mouseEventElement_, 'mousemove', this.mouseMoveHandler);
<add> Dygraph.removeEvent(this.mouseEventElement_, 'mouseout', this.mouseOutHandler_);
<add> Dygraph.removeEvent(this.mouseEventElement_, 'mousemove', this.mouseMoveHandler_);
<ide> Dygraph.removeEvent(this.mouseEventElement_, 'mousemove', this.mouseUpHandler_);
<add>
<add> // remove window handlers
<add> Dygraph.removeEvent(window,'resize',this.resizeHandler_);
<add> this.resizeHandler_ = null;
<add>
<ide> removeRecursive(this.maindiv_);
<ide>
<ide> var nullOut = function(obj) {
<ide> }
<ide> }
<ide> };
<del> // remove event handlers
<del> Dygraph.removeEvent(window,'resize',this.resizeHandler_);
<del> this.resizeHandler_ = null;
<ide> // These may not all be necessary, but it can't hurt...
<ide> nullOut(this.layout_);
<ide> nullOut(this.plotter_); |
|
Java | apache-2.0 | 6fc6e4881c22d44ec757137882f6de4b23ebdd94 | 0 | magnet/bnd,psoreide/bnd,mcculls/bnd,magnet/bnd,magnet/bnd,mcculls/bnd,mcculls/bnd,psoreide/bnd,lostiniceland/bnd,lostiniceland/bnd,lostiniceland/bnd,psoreide/bnd | package aQute.bnd.osgi.repository;
import java.io.InputStream;
import java.net.URI;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import javax.xml.stream.XMLInputFactory;
import javax.xml.stream.XMLStreamException;
import javax.xml.stream.XMLStreamReader;
import org.osgi.resource.Resource;
import aQute.bnd.header.Attrs;
import aQute.bnd.osgi.Processor;
import aQute.bnd.osgi.resource.CapReqBuilder;
import aQute.bnd.osgi.resource.ResourceBuilder;
import aQute.libg.gzip.GZipUtils;
public class XMLResourceParser extends Processor {
final static XMLInputFactory inputFactory = XMLInputFactory.newInstance();
static {
inputFactory.setProperty(XMLInputFactory.IS_NAMESPACE_AWARE, true);
inputFactory.setProperty(XMLInputFactory.IS_VALIDATING, false);
inputFactory.setProperty(XMLInputFactory.SUPPORT_DTD, false);
}
private static final String NS_URI = "http://www.osgi.org/xmlns/repository/v1.0.0";
private static final String TAG_REPOSITORY = "repository";
private static final String TAG_REFERRAL = "referral";
private static final String TAG_RESOURCE = "resource";
private static final String TAG_CAPABILITY = "capability";
private static final String TAG_REQUIREMENT = "requirement";
private static final String TAG_ATTRIBUTE = "attribute";
private static final String TAG_DIRECTIVE = "directive";
private static final String ATTR_REFERRAL_URL = "url";
private static final String ATTR_REFERRAL_DEPTH = "depth";
private static final String ATTR_NAMESPACE = "namespace";
private static final String ATTR_NAME = "name";
private static final String ATTR_VALUE = "value";
private static final String ATTR_TYPE = "type";
final private List<Resource> resources = new ArrayList<>();
final private XMLStreamReader reader;
final private Set<URI> traversed;
final private String what;
@SuppressWarnings("unused")
final private URI url;
private int depth;
public static List<Resource> getResources(URI uri) throws Exception {
try (XMLResourceParser parser = new XMLResourceParser(uri)) {
return parser.parse();
}
}
public XMLResourceParser(URI url) throws Exception {
this(url.toURL().openStream(), url.toString(), url);
}
public XMLResourceParser(InputStream in, String what, URI uri) throws Exception {
this(in, what, 100, new HashSet<URI>(), uri);
}
public void setDepth(int n) {
this.depth = n;
}
public XMLResourceParser(InputStream in, String what, int depth, Set<URI> traversed, URI url) throws Exception {
this.what = what;
this.depth = depth;
this.traversed = traversed;
this.url = url;
reader = inputFactory.createXMLStreamReader(GZipUtils.detectCompression(in));
}
List<Resource> getResources() {
if (!isOk())
return null;
return resources;
}
public List<Resource> parse() throws Exception {
if (!check(reader.hasNext(), "No content found"))
return null;
next();
if (!check(reader.isStartElement(), "Expected a start element at the root, is %s", reader.getEventType()))
return null;
String name = reader.getLocalName();
if (!check(TAG_REPOSITORY.equals(name), "Invalid tag name of top element, expected %s, got %s", TAG_REPOSITORY,
name))
return null;
String nsUri = reader.getNamespaceURI();
if (nsUri != null) {
check(NS_URI.equals(nsUri), "Incorrect namespace. Expected %s, got %s", NS_URI, nsUri);
}
next(); // either start resource/referral or end
while (reader.isStartElement()) {
String localName = reader.getLocalName();
if (localName.equals(TAG_REFERRAL))
parseReferral();
else if (localName.equals(TAG_RESOURCE))
parseResource(resources);
else {
check(false, "Unexpected element %s", localName);
next();
}
}
check(reader.isEndElement() && reader.getLocalName().equals(TAG_REPOSITORY),
"Expected to be at the end but are on %s", reader.getLocalName());
return getResources();
}
public void next() throws XMLStreamException {
report();
reader.nextTag();
}
private void report() {
if (reader.isStartElement()) {
trace("<%s>", reader.getLocalName());
} else if (reader.isEndElement()) {
trace("</%s>", reader.getLocalName());
} else {
trace("** unknown element %s", reader.getEventType());
}
}
private void parseReferral() throws Exception {
if (--depth < 0)
error("Too deep, traversed %s", traversed);
else {
String depthString = reader.getAttributeValue(NS_URI, ATTR_REFERRAL_DEPTH);
String urlString = reader.getAttributeValue(NS_URI, ATTR_REFERRAL_URL);
if (check(urlString != null, "Expected URL in referral")) {
// TODO resolve url
URI url = new URI(urlString);
traversed.add(url);
int depth = 100;
if (depthString != null) {
depth = Integer.parseInt(depthString);
}
InputStream in = url.toURL().openStream();
try (XMLResourceParser referralParser = new XMLResourceParser(in, urlString, depth, traversed, url);) {
referralParser.parse();
resources.addAll(referralParser.resources);
}
}
}
tagEnd(TAG_REFERRAL);
}
private void tagEnd(String tag) throws XMLStreamException {
if (!check(reader.isEndElement(), "Expected end element, got %s for %s (%s)", reader.getEventType(), tag,
reader.getLocalName())) {
trace("oops, invalid end %s", tag);
}
next();
}
private void parseResource(List<Resource> resources) throws Exception {
ResourceBuilder resourceBuilder = new ResourceBuilder();
next();
while (reader.isStartElement()) {
parseCapabilityOrRequirement(resourceBuilder);
}
Resource resource = resourceBuilder.build();
resources.add(resource);
tagEnd(TAG_RESOURCE);
}
private void parseCapabilityOrRequirement(ResourceBuilder resourceBuilder) throws Exception {
String name = reader.getLocalName();
check(TAG_REQUIREMENT.equals(name) || TAG_CAPABILITY.equals(name), "Expected <%s> or <%s> tag, got <%s>",
TAG_REQUIREMENT, TAG_CAPABILITY, name);
String namespace = reader.getAttributeValue(null, ATTR_NAMESPACE);
CapReqBuilder capReqBuilder = new CapReqBuilder(namespace);
next();
while (reader.isStartElement()) {
parseAttributesOrDirectives(capReqBuilder);
}
if (TAG_REQUIREMENT.equals(name)) {
resourceBuilder.addRequirement(capReqBuilder);
} else {
resourceBuilder.addCapability(capReqBuilder);
}
tagEnd(name);
}
private void parseAttributesOrDirectives(CapReqBuilder capReqBuilder) throws Exception {
String name = reader.getLocalName();
switch (name) {
case TAG_ATTRIBUTE :
parseAttribute(capReqBuilder);
break;
case TAG_DIRECTIVE :
parseDirective(capReqBuilder);
break;
default :
check(false, "Invalid tag, expected either <%s> or <%s>, got <%s>", TAG_ATTRIBUTE, TAG_DIRECTIVE);
}
next();
tagEnd(name);
}
private boolean check(boolean check, String format, Object... args) {
if (check)
return true;
String message = String.format(format, args);
error("%s: %s", what, message);
return false;
}
private void parseAttribute(CapReqBuilder capReqBuilder) throws Exception {
String attributeName = reader.getAttributeValue(null, ATTR_NAME);
String attributeValue = reader.getAttributeValue(null, ATTR_VALUE);
String attributeType = reader.getAttributeValue(null, ATTR_TYPE);
Object value = Attrs.convert(attributeType, attributeValue);
capReqBuilder.addAttribute(attributeName, value);
}
private void parseDirective(CapReqBuilder capReqBuilder) throws XMLStreamException {
String attributeName = reader.getAttributeValue(null, ATTR_NAME);
String attributeValue = reader.getAttributeValue(null, ATTR_VALUE);
String attributeType = reader.getAttributeValue(null, ATTR_TYPE);
check(attributeType == null, "Expected a directive to have no type: %s:%s=%s", attributeName, attributeType,
attributeValue);
capReqBuilder.addDirective(attributeName, attributeValue);
}
}
| biz.aQute.bndlib/src/aQute/bnd/osgi/repository/XMLResourceParser.java | package aQute.bnd.osgi.repository;
import java.io.InputStream;
import java.net.URI;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import javax.xml.stream.XMLInputFactory;
import javax.xml.stream.XMLStreamException;
import javax.xml.stream.XMLStreamReader;
import org.osgi.resource.Resource;
import aQute.bnd.header.Attrs;
import aQute.bnd.osgi.Processor;
import aQute.bnd.osgi.resource.CapReqBuilder;
import aQute.bnd.osgi.resource.ResourceBuilder;
import aQute.libg.gzip.GZipUtils;
public class XMLResourceParser extends Processor {
final static XMLInputFactory inputFactory = XMLInputFactory.newInstance();
static {
inputFactory.setProperty(XMLInputFactory.IS_NAMESPACE_AWARE, true);
inputFactory.setProperty(XMLInputFactory.IS_VALIDATING, false);
inputFactory.setProperty(XMLInputFactory.SUPPORT_DTD, false);
}
private static final String NS_URI = "http://www.osgi.org/xmlns/repository/v1.0.0";
private static final String TAG_REPOSITORY = "repository";
private static final String TAG_REFERRAL = "referral";
private static final String TAG_RESOURCE = "resource";
private static final String TAG_CAPABILITY = "capability";
private static final String TAG_REQUIREMENT = "requirement";
private static final String TAG_ATTRIBUTE = "attribute";
private static final String TAG_DIRECTIVE = "directive";
private static final String ATTR_REFERRAL_URL = "url";
private static final String ATTR_REFERRAL_DEPTH = "depth";
private static final String ATTR_NAMESPACE = "namespace";
private static final String ATTR_NAME = "name";
private static final String ATTR_VALUE = "value";
private static final String ATTR_TYPE = "type";
final private List<Resource> resources = new ArrayList<>();
final private XMLStreamReader reader;
final private Set<URI> traversed;
final private String what;
@SuppressWarnings("unused")
final private URI url;
private int depth;
public XMLResourceParser(URI url) throws Exception {
this(url.toURL().openStream(), url.toString(), url);
}
public XMLResourceParser(InputStream in, String what, URI uri) throws Exception {
this(in, what, 100, new HashSet<URI>(), uri);
}
public void setDepth(int n) {
this.depth = n;
}
public XMLResourceParser(InputStream in, String what, int depth, Set<URI> traversed, URI url) throws Exception {
this.what = what;
this.depth = depth;
this.traversed = traversed;
this.url = url;
reader = inputFactory.createXMLStreamReader(GZipUtils.detectCompression(in));
}
List<Resource> getResources() {
if (!isOk())
return null;
return resources;
}
public List<Resource> parse() throws Exception {
if (!check(reader.hasNext(), "No content found"))
return null;
next();
if (!check(reader.isStartElement(), "Expected a start element at the root, is %s", reader.getEventType()))
return null;
String name = reader.getLocalName();
if (!check(TAG_REPOSITORY.equals(name), "Invalid tag name of top element, expected %s, got %s", TAG_REPOSITORY,
name))
return null;
String nsUri = reader.getNamespaceURI();
if (nsUri != null) {
check(NS_URI.equals(nsUri), "Incorrect namespace. Expected %s, got %s", NS_URI, nsUri);
}
next(); // either start resource/referral or end
while (reader.isStartElement()) {
String localName = reader.getLocalName();
if (localName.equals(TAG_REFERRAL))
parseReferral();
else if (localName.equals(TAG_RESOURCE))
parseResource(resources);
else {
check(false, "Unexpected element %s", localName);
next();
}
}
check(reader.isEndElement() && reader.getLocalName().equals(TAG_REPOSITORY),
"Expected to be at the end but are on %s", reader.getLocalName());
return getResources();
}
public void next() throws XMLStreamException {
report();
reader.nextTag();
}
private void report() {
if (reader.isStartElement()) {
trace("<%s>", reader.getLocalName());
} else if (reader.isEndElement()) {
trace("</%s>", reader.getLocalName());
} else {
trace("** unknown element %s", reader.getEventType());
}
}
private void parseReferral() throws Exception {
if (--depth < 0)
error("Too deep, traversed %s", traversed);
else {
String depthString = reader.getAttributeValue(NS_URI, ATTR_REFERRAL_DEPTH);
String urlString = reader.getAttributeValue(NS_URI, ATTR_REFERRAL_URL);
if (check(urlString != null, "Expected URL in referral")) {
// TODO resolve url
URI url = new URI(urlString);
traversed.add(url);
int depth = 100;
if (depthString != null) {
depth = Integer.parseInt(depthString);
}
InputStream in = url.toURL().openStream();
try (XMLResourceParser referralParser = new XMLResourceParser(in, urlString, depth, traversed, url);) {
referralParser.parse();
resources.addAll(referralParser.resources);
}
}
}
tagEnd(TAG_REFERRAL);
}
private void tagEnd(String tag) throws XMLStreamException {
if (!check(reader.isEndElement(), "Expected end element, got %s for %s (%s)", reader.getEventType(), tag,
reader.getLocalName())) {
trace("oops, invalid end %s", tag);
}
next();
}
private void parseResource(List<Resource> resources) throws Exception {
ResourceBuilder resourceBuilder = new ResourceBuilder();
next();
while (reader.isStartElement()) {
parseCapabilityOrRequirement(resourceBuilder);
}
Resource resource = resourceBuilder.build();
resources.add(resource);
tagEnd(TAG_RESOURCE);
}
private void parseCapabilityOrRequirement(ResourceBuilder resourceBuilder) throws Exception {
String name = reader.getLocalName();
check(TAG_REQUIREMENT.equals(name) || TAG_CAPABILITY.equals(name), "Expected <%s> or <%s> tag, got <%s>",
TAG_REQUIREMENT, TAG_CAPABILITY, name);
String namespace = reader.getAttributeValue(null, ATTR_NAMESPACE);
CapReqBuilder capReqBuilder = new CapReqBuilder(namespace);
next();
while (reader.isStartElement()) {
parseAttributesOrDirectives(capReqBuilder);
}
if (TAG_REQUIREMENT.equals(name)) {
resourceBuilder.addRequirement(capReqBuilder);
} else {
resourceBuilder.addCapability(capReqBuilder);
}
tagEnd(name);
}
private void parseAttributesOrDirectives(CapReqBuilder capReqBuilder) throws Exception {
String name = reader.getLocalName();
switch (name) {
case TAG_ATTRIBUTE :
parseAttribute(capReqBuilder);
break;
case TAG_DIRECTIVE :
parseDirective(capReqBuilder);
break;
default :
check(false, "Invalid tag, expected either <%s> or <%s>, got <%s>", TAG_ATTRIBUTE, TAG_DIRECTIVE);
}
next();
tagEnd(name);
}
private boolean check(boolean check, String format, Object... args) {
if (check)
return true;
String message = String.format(format, args);
error("%s: %s", what, message);
return false;
}
private void parseAttribute(CapReqBuilder capReqBuilder) throws Exception {
String attributeName = reader.getAttributeValue(null, ATTR_NAME);
String attributeValue = reader.getAttributeValue(null, ATTR_VALUE);
String attributeType = reader.getAttributeValue(null, ATTR_TYPE);
Object value = Attrs.convert(attributeType, attributeValue);
capReqBuilder.addAttribute(attributeName, value);
}
private void parseDirective(CapReqBuilder capReqBuilder) throws XMLStreamException {
String attributeName = reader.getAttributeValue(null, ATTR_NAME);
String attributeValue = reader.getAttributeValue(null, ATTR_VALUE);
String attributeType = reader.getAttributeValue(null, ATTR_TYPE);
check(attributeType == null, "Expected a directive to have no type: %s:%s=%s", attributeName, attributeType,
attributeValue);
capReqBuilder.addDirective(attributeName, attributeValue);
}
}
| [repository] add static function to get Resource from URI
Signed-off-by: Raymond Auge <[email protected]>
| biz.aQute.bndlib/src/aQute/bnd/osgi/repository/XMLResourceParser.java | [repository] add static function to get Resource from URI | <ide><path>iz.aQute.bndlib/src/aQute/bnd/osgi/repository/XMLResourceParser.java
<ide> final private URI url;
<ide>
<ide> private int depth;
<add>
<add> public static List<Resource> getResources(URI uri) throws Exception {
<add> try (XMLResourceParser parser = new XMLResourceParser(uri)) {
<add> return parser.parse();
<add> }
<add> }
<ide>
<ide> public XMLResourceParser(URI url) throws Exception {
<ide> this(url.toURL().openStream(), url.toString(), url); |
|
Java | apache-2.0 | 3d701102173e1b3ec7aaa16c60472ce874e19a44 | 0 | wso2/carbon-device-mgt,madhawap/carbon-device-mgt,madhawap/carbon-device-mgt,rasika90/carbon-device-mgt,prithvi66/carbon-device-mgt,sameeragunarathne/carbon-device-mgt,dilee/carbon-device-mgt,sameeragunarathne/carbon-device-mgt,dilee/carbon-device-mgt,geethkokila/carbon-device-mgt,hasuniea/carbon-device-mgt,dilee/carbon-device-mgt,prithvi66/carbon-device-mgt,ruwany/carbon-device-mgt,ruwany/carbon-device-mgt,Kamidu/carbon-device-mgt,madawas/carbon-device-mgt,hasuniea/carbon-device-mgt,hasuniea/carbon-device-mgt,madhawap/carbon-device-mgt,wso2/carbon-device-mgt,menakaj/carbon-device-mgt,geethkokila/carbon-device-mgt,wso2/carbon-device-mgt,Kamidu/carbon-device-mgt,madawas/carbon-device-mgt,rasika/carbon-device-mgt,rasika/carbon-device-mgt,charithag/carbon-device-mgt,prithvi66/carbon-device-mgt,rasika90/carbon-device-mgt,menakaj/carbon-device-mgt,madawas/carbon-device-mgt,charithag/carbon-device-mgt,ruwany/carbon-device-mgt,menakaj/carbon-device-mgt,rasika90/carbon-device-mgt,charithag/carbon-device-mgt,sameeragunarathne/carbon-device-mgt,rasika/carbon-device-mgt,geethkokila/carbon-device-mgt,Kamidu/carbon-device-mgt | /*
* Copyright (c) 2017, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* you may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.carbon.device.mgt.jaxrs.service.impl;
import org.apache.axis2.AxisFault;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.mockito.Mock;
import org.mockito.Mockito;
import org.powermock.api.mockito.PowerMockito;
import org.powermock.core.classloader.annotations.PowerMockIgnore;
import org.powermock.core.classloader.annotations.PrepareForTest;
import org.powermock.core.classloader.annotations.SuppressStaticInitializationFor;
import org.testng.Assert;
import org.testng.IObjectFactory;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.ObjectFactory;
import org.testng.annotations.Test;
import org.wso2.carbon.caching.impl.CacheImpl;
import org.wso2.carbon.context.CarbonContext;
import org.wso2.carbon.context.PrivilegedCarbonContext;
import org.wso2.carbon.device.mgt.analytics.data.publisher.exception.DataPublisherConfigurationException;
import org.wso2.carbon.device.mgt.analytics.data.publisher.service.EventsPublisherService;
import org.wso2.carbon.device.mgt.analytics.data.publisher.service.EventsPublisherServiceImpl;
import org.wso2.carbon.device.mgt.common.Device;
import org.wso2.carbon.device.mgt.common.DeviceIdentifier;
import org.wso2.carbon.device.mgt.common.DeviceManagementException;
import org.wso2.carbon.device.mgt.common.EnrolmentInfo;
import org.wso2.carbon.device.mgt.common.authorization.DeviceAccessAuthorizationException;
import org.wso2.carbon.device.mgt.common.authorization.DeviceAccessAuthorizationService;
import org.wso2.carbon.device.mgt.common.operation.mgt.OperationManagementException;
import org.wso2.carbon.device.mgt.core.authorization.DeviceAccessAuthorizationServiceImpl;
import org.wso2.carbon.device.mgt.core.service.DeviceManagementProviderService;
import org.wso2.carbon.device.mgt.core.service.DeviceManagementProviderServiceImpl;
import org.wso2.carbon.device.mgt.jaxrs.service.api.DeviceAgentService;
import org.wso2.carbon.device.mgt.jaxrs.service.api.admin.DeviceTypeManagementAdminService;
import org.wso2.carbon.device.mgt.jaxrs.service.impl.util.DeviceMgtAPITestHelper;
import org.wso2.carbon.device.mgt.jaxrs.util.DeviceMgtAPIUtils;
import org.wso2.carbon.event.stream.stub.EventStreamAdminServiceStub;
import org.wso2.carbon.event.stream.stub.types.EventStreamAttributeDto;
import org.wso2.carbon.event.stream.stub.types.EventStreamDefinitionDto;
import org.wso2.carbon.identity.jwt.client.extension.exception.JWTClientException;
import org.wso2.carbon.user.api.UserStoreException;
import org.wso2.carbon.utils.CarbonUtils;
import javax.cache.CacheManager;
import javax.ws.rs.core.Response;
import java.rmi.RemoteException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import static org.mockito.MockitoAnnotations.initMocks;
/**
* This class holds the unit tests for the class {@link DeviceAgentServiceImpl}
*/
@PowerMockIgnore("javax.ws.rs.*")
@SuppressStaticInitializationFor({"org.wso2.carbon.device.mgt.jaxrs.util.DeviceMgtAPIUtils",
"org.wso2.carbon.context.CarbonContext", "org.wso2.carbon.context.internal.CarbonContextDataHolder"})
@PrepareForTest({DeviceMgtAPIUtils.class, DeviceManagementProviderService.class,
DeviceAccessAuthorizationService.class, EventStreamAdminServiceStub.class, PrivilegedCarbonContext.class,
CarbonContext.class, CarbonUtils.class})
public class DeviceAgentServiceTest {
private static final Log log = LogFactory.getLog(DeviceTypeManagementAdminService.class);
private DeviceManagementProviderService deviceManagementProviderService;
private DeviceAgentService deviceAgentService;
private EventStreamAdminServiceStub eventStreamAdminServiceStub;
private PrivilegedCarbonContext privilegedCarbonContext;
private CarbonContext carbonContext;
private CacheManager cacheManager;
private DeviceAccessAuthorizationService deviceAccessAuthorizationService;
private static final String TEST_DEVICE_TYPE = "TEST-DEVICE-TYPE";
private static final String TEST_DEVICE_IDENTIFIER = "11222334455";
private static final String AUTHENTICATED_USER = "admin";
private static final String TENANT_DOMAIN = "carbon.super";
private static Device demoDevice;
@ObjectFactory
public IObjectFactory getObjectFactory() {
return new org.powermock.modules.testng.PowerMockObjectFactory();
}
@BeforeClass
public void init() {
log.info("Initializing DeviceAgent tests");
initMocks(this);
this.deviceManagementProviderService = Mockito
.mock(DeviceManagementProviderServiceImpl.class, Mockito.RETURNS_MOCKS);
this.deviceAgentService = new DeviceAgentServiceImpl();
this.deviceAccessAuthorizationService = Mockito.mock(DeviceAccessAuthorizationServiceImpl.class,
Mockito.RETURNS_MOCKS);
this.privilegedCarbonContext = Mockito.mock(PrivilegedCarbonContext.class, Mockito.RETURNS_MOCKS);
this.carbonContext = Mockito.mock(CarbonContext.class, Mockito.RETURNS_MOCKS);
this.eventStreamAdminServiceStub = Mockito.mock(EventStreamAdminServiceStub.class, Mockito.RETURNS_MOCKS);
demoDevice = DeviceMgtAPITestHelper.generateDummyDevice(TEST_DEVICE_TYPE, TEST_DEVICE_IDENTIFIER);
this.cacheManager = Mockito.mock(CacheManager.class, Mockito.RETURNS_MOCKS);
}
@Test(description = "Test device Enrollment when the device is null")
public void testEnrollDeviceWithDeviceIsNULL() {
PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class, "getDeviceManagementService"))
.toReturn(this.deviceManagementProviderService);
Response response = this.deviceAgentService.enrollDevice(null);
Assert.assertNotNull(response, "Response should not be null");
Assert.assertEquals(response.getStatus(), Response.Status.BAD_REQUEST.getStatusCode(),
"The response status should be 400");
}
@Test(description = "Test device enrollment when device type is null.")
public void testEnrollDeviceWithDeviceTypeNull() {
PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class, "getDeviceManagementService"))
.toReturn(this.deviceManagementProviderService);
Device device = DeviceMgtAPITestHelper.generateDummyDevice(null, TEST_DEVICE_IDENTIFIER);
Response response = this.deviceAgentService.enrollDevice(device);
Assert.assertNotNull(response, "Response should not be null");
Assert.assertEquals(response.getStatus(), Response.Status.BAD_REQUEST.getStatusCode(),
"The response status should be 400");
}
@Test(description = "Test device enrollment of a device with null device identifier.")
public void testEnrollNewDeviceWithDeviceIdentifierIsNull() {
PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class, "getDeviceManagementService"))
.toReturn(this.deviceManagementProviderService);
Device device = DeviceMgtAPITestHelper.generateDummyDevice(TEST_DEVICE_TYPE, null);
Response response = this.deviceAgentService.enrollDevice(device);
Assert.assertNotNull(response, "Response should not be null");
Assert.assertEquals(response.getStatus(), Response.Status.BAD_REQUEST.getStatusCode(),
"The response status should be 400");
}
@Test(description = "Test an already enrolled device")
public void testEnrollExistingDevice() throws DeviceManagementException {
PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class, "getDeviceManagementService"))
.toReturn(this.deviceManagementProviderService);
Mockito.when(this.deviceManagementProviderService.getDevice(Mockito.any())).thenReturn(demoDevice);
Device device = DeviceMgtAPITestHelper.generateDummyDevice(TEST_DEVICE_TYPE, TEST_DEVICE_IDENTIFIER);
Mockito.when(this.deviceManagementProviderService.getDevice(Mockito.any())).thenReturn(device);
Response response = this.deviceAgentService.enrollDevice(device);
Assert.assertNotNull(response, "Response should not be null");
Assert.assertEquals(response.getStatus(), Response.Status.BAD_REQUEST.getStatusCode(),
"The response status should be 400");
Mockito.reset(this.deviceManagementProviderService);
}
@Test(description = "Test the device enrollment success scenario.")
public void testEnrollDeviceSuccess() throws DeviceManagementException {
PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class, "getDeviceManagementService"))
.toReturn(this.deviceManagementProviderService);
PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class, "getAuthenticatedUser"))
.toReturn(AUTHENTICATED_USER);
EnrolmentInfo enrolmentInfo = demoDevice.getEnrolmentInfo();
enrolmentInfo.setStatus(EnrolmentInfo.Status.INACTIVE);
demoDevice.setEnrolmentInfo(enrolmentInfo);
Mockito.when(this.deviceManagementProviderService.getDevice(Mockito.any())).thenReturn(demoDevice);
Response response = this.deviceAgentService.enrollDevice(demoDevice);
Assert.assertNotNull(response, "Response should not be null");
Assert.assertEquals(response.getStatus(), Response.Status.OK.getStatusCode(),
"The response status should be 200");
Mockito.reset(this.deviceManagementProviderService);
}
@Test(description = "Test the device enrollment with device management exception.")
public void testEnrollDeviceWithException() throws DeviceManagementException {
PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class, "getDeviceManagementService"))
.toReturn(this.deviceManagementProviderService);
PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class, "getAuthenticatedUser"))
.toReturn(AUTHENTICATED_USER);
Device device = DeviceMgtAPITestHelper.generateDummyDevice(TEST_DEVICE_TYPE, TEST_DEVICE_IDENTIFIER);
EnrolmentInfo enrolmentInfo = device.getEnrolmentInfo();
enrolmentInfo.setStatus(EnrolmentInfo.Status.INACTIVE);
device.setEnrolmentInfo(enrolmentInfo);
Mockito.when(this.deviceManagementProviderService.getDevice(Mockito.any())).thenReturn(device);
Mockito.when(this.deviceManagementProviderService.enrollDevice(Mockito.any()))
.thenThrow(new DeviceManagementException());
Response response = this.deviceAgentService.enrollDevice(device);
Assert.assertNotNull(response, "Response should not be null");
Assert.assertEquals(response.getStatus(), Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(),
"The response status should be 500");
Mockito.reset(this.deviceManagementProviderService);
}
@Test(description = "Test dis-enrolling the device success scenario.")
public void testDisEnrollDeviceSuccess() throws DeviceManagementException {
PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class, "getDeviceManagementService"))
.toReturn(this.deviceManagementProviderService);
Mockito.when(this.deviceManagementProviderService.disenrollDevice(Mockito.any())).thenReturn(true);
Response response = deviceAgentService.disEnrollDevice(TEST_DEVICE_TYPE, TEST_DEVICE_IDENTIFIER);
Assert.assertNotNull(response, "Response should not be null");
Assert.assertEquals(response.getStatus(), Response.Status.OK.getStatusCode(),
"The response status should be 200");
Mockito.reset(this.deviceManagementProviderService);
}
@Test(description = "Test dis-enrolling non existing device.")
public void testDisEnrollNonExistingDevice() throws DeviceManagementException {
PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class, "getDeviceManagementService"))
.toReturn(this.deviceManagementProviderService);
Response response = deviceAgentService.disEnrollDevice(TEST_DEVICE_TYPE, TEST_DEVICE_IDENTIFIER);
Assert.assertNotNull(response, "Response should not be null");
Assert.assertEquals(response.getStatus(), Response.Status.NO_CONTENT.getStatusCode(),
"The response status should be 204");
}
@Test(description = "Test dis-enrolling device error")
public void testDisEnrollingDeviceError() throws DeviceManagementException {
PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class, "getDeviceManagementService"))
.toReturn(this.deviceManagementProviderService);
Mockito.when(this.deviceManagementProviderService.disenrollDevice(Mockito.any())).thenThrow(new
DeviceManagementException());
Response response = deviceAgentService.disEnrollDevice(TEST_DEVICE_TYPE, TEST_DEVICE_IDENTIFIER);
Assert.assertNotNull(response, "Response should not be null");
Assert.assertEquals(response.getStatus(), Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(),
"The response status should be 500");
Mockito.reset(this.deviceManagementProviderService);
}
@Test(description = "Test device update scenario with device management exception.")
public void testUpdateDeviceDMException() throws DeviceManagementException {
PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class, "getDeviceManagementService"))
.toReturn(this.deviceManagementProviderService);
Mockito.when(this.deviceManagementProviderService.getDevice(Mockito.any())).thenThrow(new
DeviceManagementException());
Device testDevice = DeviceMgtAPITestHelper.generateDummyDevice(TEST_DEVICE_TYPE, TEST_DEVICE_IDENTIFIER);
Response response = deviceAgentService.updateDevice(TEST_DEVICE_TYPE, TEST_DEVICE_IDENTIFIER, testDevice);
Assert.assertNotNull(response, "Response should not be null");
Assert.assertEquals(response.getStatus(), Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(),
"The response status should be 500");
Mockito.reset(this.deviceManagementProviderService);
}
@Test(description = "Test update device scenario when the device is null.")
public void testUpdateDeviceWithNoDevice() {
PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class, "getDeviceManagementService"))
.toReturn(this.deviceManagementProviderService);
Response response = deviceAgentService.updateDevice(TEST_DEVICE_TYPE, TEST_DEVICE_IDENTIFIER, null);
Assert.assertNotNull(response, "Response should not be null");
Assert.assertEquals(response.getStatus(), Response.Status.BAD_REQUEST.getStatusCode(),
"The response status should be 400");
}
@Test(description = "Test the update device scenario when there is no enrolled device.")
public void testUpdatingNonExistingDevice() throws DeviceManagementException {
PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class, "getDeviceManagementService"))
.toReturn(this.deviceManagementProviderService);
Mockito.when(this.deviceManagementProviderService.getDevice(Mockito.any())).thenReturn(null);
Device testDevice = DeviceMgtAPITestHelper.generateDummyDevice(TEST_DEVICE_TYPE, TEST_DEVICE_IDENTIFIER);
Response response = deviceAgentService.updateDevice(TEST_DEVICE_TYPE, TEST_DEVICE_IDENTIFIER, testDevice);
Assert.assertNotNull(response, "Response should not be null");
Assert.assertEquals(response.getStatus(), Response.Status.NOT_FOUND.getStatusCode(),
"The response status should be 404");
Mockito.reset(this.deviceManagementProviderService);
}
@Test(description = "Test update device with device access authorization exception.")
public void testEnrollDeviceWithDeviceAccessAuthException() throws DeviceManagementException,
DeviceAccessAuthorizationException {
PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class, "getDeviceManagementService"))
.toReturn(this.deviceManagementProviderService);
PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class,
"getDeviceAccessAuthorizationService")).toReturn(this.deviceAccessAuthorizationService);
Device testDevice = DeviceMgtAPITestHelper.generateDummyDevice(TEST_DEVICE_TYPE, TEST_DEVICE_IDENTIFIER);
Mockito.when(this.deviceManagementProviderService.getDevice(Mockito.any())).thenReturn(testDevice);
Mockito.when(this.deviceAccessAuthorizationService.isUserAuthorized(Mockito.any(DeviceIdentifier.class)))
.thenThrow(new DeviceAccessAuthorizationException());
Response response = deviceAgentService.updateDevice(TEST_DEVICE_TYPE, TEST_DEVICE_IDENTIFIER, testDevice);
Assert.assertNotNull(response, "Response should not be null");
Assert.assertEquals(response.getStatus(), Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(),
"The response status should be 500");
Mockito.reset(this.deviceManagementProviderService);
Mockito.reset(this.deviceAccessAuthorizationService);
}
@Test(description = "Test update device when user does not have device access permission.")
public void testUpdateDeviceWithNoDeviceAccessPermission() throws DeviceManagementException,
DeviceAccessAuthorizationException {
PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class, "getDeviceManagementService"))
.toReturn(this.deviceManagementProviderService);
PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class,
"getDeviceAccessAuthorizationService")).toReturn(this.deviceAccessAuthorizationService);
Device testDevice = DeviceMgtAPITestHelper.generateDummyDevice(TEST_DEVICE_TYPE, TEST_DEVICE_IDENTIFIER);
Mockito.when(this.deviceManagementProviderService.getDevice(Mockito.any())).thenReturn(testDevice);
Mockito.when(this.deviceAccessAuthorizationService.isUserAuthorized(Mockito.any(DeviceIdentifier.class)))
.thenReturn(false);
Response response = deviceAgentService.updateDevice(TEST_DEVICE_TYPE, TEST_DEVICE_IDENTIFIER, testDevice);
Assert.assertNotNull(response, "Response should not be null");
Assert.assertEquals(response.getStatus(), Response.Status.UNAUTHORIZED.getStatusCode(),
"The response status should be 401");
Mockito.reset(this.deviceManagementProviderService);
Mockito.reset(this.deviceAccessAuthorizationService);
}
@Test(description = "Test update device when device modification is unsuccessful.")
public void testUpdateDeviceNOTModify() throws DeviceManagementException, DeviceAccessAuthorizationException {
PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class, "getDeviceManagementService"))
.toReturn(this.deviceManagementProviderService);
PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class,
"getDeviceAccessAuthorizationService")).toReturn(this.deviceAccessAuthorizationService);
PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class,
"getAuthenticatedUser")).toReturn(AUTHENTICATED_USER);
Device testDevice = DeviceMgtAPITestHelper.generateDummyDevice(TEST_DEVICE_TYPE, TEST_DEVICE_IDENTIFIER);
Mockito.when(this.deviceManagementProviderService.getDevice(Mockito.any())).thenReturn(testDevice);
Mockito.when(this.deviceAccessAuthorizationService.isUserAuthorized(Mockito.any(DeviceIdentifier.class)))
.thenReturn(true);
Mockito.when(this.deviceManagementProviderService.modifyEnrollment(Mockito.any())).thenReturn(false);
Response response = deviceAgentService.updateDevice(TEST_DEVICE_TYPE, TEST_DEVICE_IDENTIFIER, testDevice);
Assert.assertNotNull(response, "Response should not be null");
Assert.assertEquals(response.getStatus(), Response.Status.NOT_MODIFIED.getStatusCode(),
"The response status should be 304");
Mockito.reset(this.deviceManagementProviderService);
Mockito.reset(this.deviceAccessAuthorizationService);
}
@Test(description = "Test updating device when modify enrollment throws exception")
public void testUpdateDeviceWithModifyEnrollmentFailure() throws DeviceManagementException, DeviceAccessAuthorizationException {
PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class, "getDeviceManagementService"))
.toReturn(this.deviceManagementProviderService);
PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class,
"getDeviceAccessAuthorizationService")).toReturn(this.deviceAccessAuthorizationService);
PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class,
"getAuthenticatedUser")).toReturn(AUTHENTICATED_USER);
Device testDevice = DeviceMgtAPITestHelper.generateDummyDevice(TEST_DEVICE_TYPE, TEST_DEVICE_IDENTIFIER);
Mockito.when(this.deviceManagementProviderService.getDevice(Mockito.any())).thenReturn(testDevice);
Mockito.when(this.deviceAccessAuthorizationService.isUserAuthorized(Mockito.any(DeviceIdentifier.class)))
.thenReturn(true);
Mockito.when(this.deviceManagementProviderService.modifyEnrollment(Mockito.any())).thenThrow(new DeviceManagementException());
Response response = deviceAgentService.updateDevice(TEST_DEVICE_TYPE, TEST_DEVICE_IDENTIFIER, testDevice);
Assert.assertNotNull(response, "Response should not be null");
Assert.assertEquals(response.getStatus(), Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(),
"The response status should be 500");
Mockito.reset(this.deviceManagementProviderService);
Mockito.reset(this.deviceAccessAuthorizationService);
}
@Test(description = "Test updating device success scenario.")
public void testUpdateDeviceSuccess() throws DeviceManagementException, DeviceAccessAuthorizationException {
PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class, "getDeviceManagementService"))
.toReturn(this.deviceManagementProviderService);
PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class,
"getDeviceAccessAuthorizationService")).toReturn(this.deviceAccessAuthorizationService);
PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class,
"getAuthenticatedUser")).toReturn(AUTHENTICATED_USER);
Device testDevice = DeviceMgtAPITestHelper.generateDummyDevice(TEST_DEVICE_TYPE, TEST_DEVICE_IDENTIFIER);
Mockito.when(this.deviceManagementProviderService.getDevice(Mockito.any())).thenReturn(testDevice);
Mockito.when(this.deviceAccessAuthorizationService.isUserAuthorized(Mockito.any(DeviceIdentifier.class)))
.thenReturn(true);
Mockito.when(this.deviceManagementProviderService.modifyEnrollment(Mockito.any())).thenReturn((true));
Response response = deviceAgentService.updateDevice(TEST_DEVICE_TYPE, TEST_DEVICE_IDENTIFIER, testDevice);
Assert.assertNotNull(response, "Response should not be null");
Assert.assertEquals(response.getStatus(), Response.Status.ACCEPTED.getStatusCode(),
"The response status should be 202");
Mockito.reset(this.deviceManagementProviderService);
Mockito.reset(this.deviceAccessAuthorizationService);
}
@Test(description = "Test publish events with null payload.")
public void testPublishEventsWithNullPayload() {
PowerMockito.stub(PowerMockito.method(PrivilegedCarbonContext.class, "getThreadLocalCarbonContext"))
.toReturn(this.privilegedCarbonContext);
Mockito.when(this.privilegedCarbonContext.getTenantDomain()).thenReturn(TENANT_DOMAIN);
Map<String, Object> payload = null;
Response response = this.deviceAgentService.publishEvents(payload, TEST_DEVICE_TYPE, TEST_DEVICE_IDENTIFIER);
Assert.assertNotNull(response, "Response should not be null");
Assert.assertEquals(response.getStatus(), Response.Status.BAD_REQUEST.getStatusCode(),
"The response status should be 400");
List<Object> payloadList = null;
Response response2 = this.deviceAgentService.publishEvents(payloadList, TEST_DEVICE_TYPE,
TEST_DEVICE_IDENTIFIER);
Assert.assertNotNull(response2, "Response should not be null");
Assert.assertEquals(response2.getStatus(), Response.Status.BAD_REQUEST.getStatusCode(),
"The response status should be 400");
}
@Test(description = "Test publish events with no device access authorization.")
public void testPublishEventsWithOutAuthorization() throws DeviceAccessAuthorizationException {
PowerMockito.stub(PowerMockito.method(PrivilegedCarbonContext.class, "getThreadLocalCarbonContext"))
.toReturn(this.privilegedCarbonContext);
PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class,
"getDeviceAccessAuthorizationService")).toReturn(this.deviceAccessAuthorizationService);
Mockito.when(this.deviceAccessAuthorizationService.isUserAuthorized(Mockito.any(DeviceIdentifier.class)))
.thenReturn(false);
Mockito.when(this.privilegedCarbonContext.getTenantDomain()).thenReturn(TENANT_DOMAIN);
Map<String, Object> payload = new HashMap<>();
Response response = this.deviceAgentService.publishEvents(payload, TEST_DEVICE_TYPE, TEST_DEVICE_IDENTIFIER);
Assert.assertNotNull(response, "Response should not be null");
Assert.assertEquals(response.getStatus(), Response.Status.UNAUTHORIZED.getStatusCode(),
"The response status should be 401");
List<Object> payloadList = new ArrayList<>();
Response response2 = this.deviceAgentService.publishEvents(payloadList, TEST_DEVICE_TYPE, TEST_DEVICE_IDENTIFIER);
Assert.assertNotNull(response2, "Response should not be null");
Assert.assertEquals(response2.getStatus(), Response.Status.UNAUTHORIZED.getStatusCode(),
"The response status should be 401");
Mockito.reset(this.deviceAccessAuthorizationService);
}
@Test
public void testPublishEventsWithDeviceAccessAuthException() throws DeviceAccessAuthorizationException {
PowerMockito.stub(PowerMockito.method(PrivilegedCarbonContext.class, "getThreadLocalCarbonContext"))
.toReturn(this.privilegedCarbonContext);
PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class, "getDeviceAccessAuthorizationService"))
.toReturn(this.deviceAccessAuthorizationService);
Mockito.when(this.deviceAccessAuthorizationService.isUserAuthorized(Mockito.any(DeviceIdentifier.class)))
.thenThrow(new DeviceAccessAuthorizationException());
Mockito.when(this.privilegedCarbonContext.getTenantDomain()).thenReturn(TENANT_DOMAIN);
Map<String, Object> payload = new HashMap<>();
Response response = this.deviceAgentService.publishEvents(payload, TEST_DEVICE_TYPE, TEST_DEVICE_IDENTIFIER);
Assert.assertNotNull(response, "Response should not be null");
Assert.assertEquals(response.getStatus(), Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(),
"The response status should be 500");
List<Object> payloadList = new ArrayList<>();
Response response2 = this.deviceAgentService.publishEvents(payloadList, TEST_DEVICE_TYPE, TEST_DEVICE_IDENTIFIER);
Assert.assertNotNull(response2, "Response should not be null");
Assert.assertEquals(response2.getStatus(), Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(),
"The response status should be 500");
Mockito.reset(this.deviceAccessAuthorizationService);
}
@Test(description = "Test event publishing when the event stream dao is null.")
public void testEventPublishWithNullEventAttributesAndNullEventStreamDefDAO() throws DeviceAccessAuthorizationException, RemoteException {
PowerMockito.stub(PowerMockito.method(PrivilegedCarbonContext.class, "getThreadLocalCarbonContext"))
.toReturn(this.privilegedCarbonContext);
PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class, "getDeviceAccessAuthorizationService"))
.toReturn(this.deviceAccessAuthorizationService);
Mockito.when(this.deviceAccessAuthorizationService.isUserAuthorized(Mockito.any(DeviceIdentifier.class)))
.thenReturn(true);
PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class, "getEventStreamAdminServiceStub"))
.toReturn(this.eventStreamAdminServiceStub);
Mockito.when(this.eventStreamAdminServiceStub.getStreamDefinitionDto(Mockito.anyString())).thenReturn(null);
Map<String, Object> payload = new HashMap<>();
CacheImpl cache = Mockito.mock(CacheImpl.class);
PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class, "getDynamicEventCache"))
.toReturn(cache);
Response response = this.deviceAgentService.publishEvents(payload, TEST_DEVICE_TYPE, TEST_DEVICE_IDENTIFIER);
Assert.assertNotNull(response, "Response should not be null");
Assert.assertEquals(response.getStatus(), Response.Status.BAD_REQUEST.getStatusCode(),
"The response status should be 400");
List<Object> payloadList = new ArrayList<>();
Response response2 = this.deviceAgentService.publishEvents(payloadList, TEST_DEVICE_TYPE, TEST_DEVICE_IDENTIFIER);
Assert.assertNotNull(response2, "Response should not be null");
Assert.assertEquals(response2.getStatus(), Response.Status.BAD_REQUEST.getStatusCode(),
"The response status should be 400");
Mockito.reset(eventStreamAdminServiceStub);
}
@Test(description ="Test the error scenario of Publishing Events with null event attributes.")
public void testEventPublishWithEventAttributesNULLAndPublishEventsFailure() throws
DeviceAccessAuthorizationException, RemoteException {
PowerMockito.stub(PowerMockito.method(PrivilegedCarbonContext.class, "getThreadLocalCarbonContext"))
.toReturn(this.privilegedCarbonContext);
PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class,
"getDeviceAccessAuthorizationService")).toReturn(this.deviceAccessAuthorizationService);
Mockito.when(this.deviceAccessAuthorizationService.isUserAuthorized(Mockito.any(DeviceIdentifier.class)))
.thenReturn(true);
PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class, "getEventStreamAdminServiceStub"))
.toReturn(this.eventStreamAdminServiceStub);
EventStreamAttributeDto eventStreamAttributeDto = Mockito.mock(EventStreamAttributeDto.class,
Mockito.RETURNS_MOCKS);
EventStreamDefinitionDto eventStreamDefinitionDto = Mockito.mock(EventStreamDefinitionDto.class,
Mockito.RETURNS_MOCKS);
Mockito.when(this.eventStreamAdminServiceStub.getStreamDefinitionDto(Mockito.anyString()))
.thenReturn(eventStreamDefinitionDto);
Mockito.when(eventStreamDefinitionDto.getPayloadData()).thenReturn(new EventStreamAttributeDto[]{});
EventsPublisherService eventPublisherService = Mockito.mock(EventsPublisherServiceImpl.class,
Mockito.RETURNS_MOCKS);
PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class, "getEventPublisherService")).toReturn
(eventPublisherService);
Map<String, Object> payload = new HashMap<>();
CacheImpl cache = Mockito.mock(CacheImpl.class);
PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class, "getDynamicEventCache"))
.toReturn(cache);
Response response = this.deviceAgentService.publishEvents(payload, TEST_DEVICE_TYPE, TEST_DEVICE_IDENTIFIER);
Assert.assertNotNull(response, "Response should not be null");
Assert.assertEquals(response.getStatus(), Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(),
"The response status should be 500");
List<Object> payloadList = new ArrayList<>();
Response response2 = this.deviceAgentService.publishEvents(payloadList, TEST_DEVICE_TYPE, TEST_DEVICE_IDENTIFIER);
Assert.assertNotNull(response2, "Response should not be null");
Assert.assertEquals(response2.getStatus(), Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(),
"The response status should be 500");
}
@Test(description = "Test Event publishing success scenario.")
public void testEventPublishWithEventAttributesNULLAndPublishEventsSuccess()
throws DeviceAccessAuthorizationException, RemoteException, DataPublisherConfigurationException {
PowerMockito.stub(PowerMockito.method(PrivilegedCarbonContext.class, "getThreadLocalCarbonContext"))
.toReturn(this.privilegedCarbonContext);
PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class,
"getDeviceAccessAuthorizationService")).toReturn(this.deviceAccessAuthorizationService);
Mockito.when(this.deviceAccessAuthorizationService.isUserAuthorized(Mockito.any(DeviceIdentifier.class)))
.thenReturn(true);
PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class, "getEventStreamAdminServiceStub"))
.toReturn(this.eventStreamAdminServiceStub);
EventStreamAttributeDto eventStreamAttributeDto = Mockito.mock(EventStreamAttributeDto.class,
Mockito.RETURNS_MOCKS);
EventStreamDefinitionDto eventStreamDefinitionDto = Mockito.mock(EventStreamDefinitionDto.class,
Mockito.RETURNS_MOCKS);
Mockito.when(this.eventStreamAdminServiceStub.getStreamDefinitionDto(Mockito.anyString()))
.thenReturn(eventStreamDefinitionDto);
Mockito.when(eventStreamDefinitionDto.getPayloadData()).thenReturn(new EventStreamAttributeDto[]{});
EventsPublisherService eventPublisherService = Mockito.mock(EventsPublisherServiceImpl.class,
Mockito.RETURNS_MOCKS);
PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class, "getEventPublisherService")).toReturn
(eventPublisherService);
Mockito.when(eventPublisherService.publishEvent(Mockito.anyString(), Mockito.anyString(), Mockito.any(),
Mockito.any(), Mockito.any())).thenReturn(true);
Map<String, Object> payload = new HashMap<>();
CacheImpl cache = Mockito.mock(CacheImpl.class);
PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class, "getDynamicEventCache"))
.toReturn(cache);
Response response = this.deviceAgentService.publishEvents(payload, TEST_DEVICE_TYPE, TEST_DEVICE_IDENTIFIER);
Assert.assertNotNull(response, "Response should not be null");
Assert.assertEquals(response.getStatus(), Response.Status.OK.getStatusCode(),
"The response status should be 200");
List<Object> payloadList = new ArrayList<>();
Response response2 = this.deviceAgentService.publishEvents(payloadList, TEST_DEVICE_TYPE, TEST_DEVICE_IDENTIFIER);
Assert.assertNotNull(response2, "Response should not be null");
Assert.assertEquals(response2.getStatus(), Response.Status.OK.getStatusCode(),
"The response status should be 200");
}
@Test(description = "Test event publishing when PublishEvents throws DataPublisherConfigurationException.")
public void testPublishEventsDataPublisherConfig() throws DeviceAccessAuthorizationException, RemoteException, DataPublisherConfigurationException {
PowerMockito.stub(PowerMockito.method(PrivilegedCarbonContext.class, "getThreadLocalCarbonContext"))
.toReturn(this.privilegedCarbonContext);
PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class,
"getDeviceAccessAuthorizationService")).toReturn(this.deviceAccessAuthorizationService);
Mockito.when(this.deviceAccessAuthorizationService.isUserAuthorized(Mockito.any(DeviceIdentifier.class)))
.thenReturn(true);
PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class, "getEventStreamAdminServiceStub"))
.toReturn(this.eventStreamAdminServiceStub);
EventStreamAttributeDto eventStreamAttributeDto = Mockito.mock(EventStreamAttributeDto.class,
Mockito.RETURNS_MOCKS);
EventStreamDefinitionDto eventStreamDefinitionDto = Mockito.mock(EventStreamDefinitionDto.class,
Mockito.RETURNS_MOCKS);
Mockito.when(this.eventStreamAdminServiceStub.getStreamDefinitionDto(Mockito.anyString()))
.thenReturn(eventStreamDefinitionDto);
Mockito.when(eventStreamDefinitionDto.getPayloadData()).thenReturn(new EventStreamAttributeDto[]{});
EventsPublisherService eventPublisherService = Mockito.mock(EventsPublisherServiceImpl.class,
Mockito.RETURNS_MOCKS);
PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class, "getEventPublisherService"))
.toReturn(eventPublisherService);
Mockito.when(eventPublisherService.publishEvent(Mockito.anyString(), Mockito.anyString(), Mockito.any(),
Mockito.any(), Mockito.any())).thenThrow(
new DataPublisherConfigurationException("meta data[0] should have the device Id field"));
Map<String, Object> payload = new HashMap<>();
CacheImpl cache = Mockito.mock(CacheImpl.class);
PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class, "getDynamicEventCache"))
.toReturn(cache);
Response response = this.deviceAgentService.publishEvents(payload, TEST_DEVICE_TYPE, TEST_DEVICE_IDENTIFIER);
Assert.assertNotNull(response, "Response should not be null");
Assert.assertEquals(response.getStatus(), Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(),
"The response status should be 500");
List<Object> payloadList = new ArrayList<>();
Response response2 = this.deviceAgentService.publishEvents(payloadList, TEST_DEVICE_TYPE, TEST_DEVICE_IDENTIFIER);
Assert.assertNotNull(response2, "Response should not be null");
Assert.assertEquals(response2.getStatus(), Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(),
"The response status should be 500");
}
@Test(description = "Test Publish events with Axis Fault.")
public void testPublishEventsWithAxisFault() throws DeviceAccessAuthorizationException {
PowerMockito.stub(PowerMockito.method(PrivilegedCarbonContext.class, "getThreadLocalCarbonContext"))
.toReturn(this.privilegedCarbonContext);
PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class,
"getDeviceAccessAuthorizationService")).toReturn(this.deviceAccessAuthorizationService);
Mockito.when(this.deviceAccessAuthorizationService.isUserAuthorized(Mockito.any(DeviceIdentifier.class)))
.thenReturn(true);
PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class, "getEventStreamAdminServiceStub"))
.toThrow(new AxisFault(""));
Map<String, Object> payload = new HashMap<>();
CacheImpl cache = Mockito.mock(CacheImpl.class);
PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class, "getDynamicEventCache"))
.toReturn(cache);
Response response = this.deviceAgentService.publishEvents(payload, TEST_DEVICE_TYPE, TEST_DEVICE_IDENTIFIER);
Assert.assertNotNull(response, "Response should not be null");
Assert.assertEquals(response.getStatus(), Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(),
"The response status should be 500");
List<Object> payloadList = new ArrayList<>();
Response response2 = this.deviceAgentService.publishEvents(payloadList, TEST_DEVICE_TYPE, TEST_DEVICE_IDENTIFIER);
Assert.assertNotNull(response2, "Response should not be null");
Assert.assertEquals(response2.getStatus(), Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(),
"The response status should be 500");
}
@Test(description = "Test Publishing events when EventStreamAdminService throws Remote exception.")
public void testPublishEventsWithRemoteException() throws DeviceAccessAuthorizationException {
PowerMockito.stub(PowerMockito.method(PrivilegedCarbonContext.class, "getThreadLocalCarbonContext"))
.toReturn(this.privilegedCarbonContext);
PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class,
"getDeviceAccessAuthorizationService")).toReturn(this.deviceAccessAuthorizationService);
Mockito.when(this.deviceAccessAuthorizationService.isUserAuthorized(Mockito.any(DeviceIdentifier.class)))
.thenReturn(true);
PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class, "getEventStreamAdminServiceStub"))
.toThrow(new RemoteException());
Map<String, Object> payload = new HashMap<>();
CacheImpl cache = Mockito.mock(CacheImpl.class);
PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class, "getDynamicEventCache"))
.toReturn(cache);
Response response = this.deviceAgentService.publishEvents(payload, TEST_DEVICE_TYPE, TEST_DEVICE_IDENTIFIER);
Assert.assertNotNull(response, "Response should not be null");
Assert.assertEquals(response.getStatus(), Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(),
"The response status should be 500");
List<Object> payloadList = new ArrayList<>();
Response response2 = this.deviceAgentService.publishEvents(payloadList, TEST_DEVICE_TYPE, TEST_DEVICE_IDENTIFIER);
Assert.assertNotNull(response2, "Response should not be null");
Assert.assertEquals(response2.getStatus(), Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(),
"The response status should be 500");
}
@Test(description = "Test Publishing events when EventStreamAdminService throws JWT exception.")
public void testPublishEventsWithJWTException() throws DeviceAccessAuthorizationException {
PowerMockito.stub(PowerMockito.method(PrivilegedCarbonContext.class, "getThreadLocalCarbonContext"))
.toReturn(this.privilegedCarbonContext);
PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class,
"getDeviceAccessAuthorizationService")).toReturn(this.deviceAccessAuthorizationService);
Mockito.when(this.deviceAccessAuthorizationService.isUserAuthorized(Mockito.any(DeviceIdentifier.class)))
.thenReturn(true);
PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class, "getEventStreamAdminServiceStub"))
.toThrow(new JWTClientException());
Map<String, Object> payload = new HashMap<>();
CacheImpl cache = Mockito.mock(CacheImpl.class);
PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class, "getDynamicEventCache"))
.toReturn(cache);
Response response = this.deviceAgentService.publishEvents(payload, TEST_DEVICE_TYPE, TEST_DEVICE_IDENTIFIER);
Assert.assertNotNull(response, "Response should not be null");
Assert.assertEquals(response.getStatus(), Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(),
"The response status should be 500");
List<Object> payloadList = new ArrayList<>();
Response response2 = this.deviceAgentService.publishEvents(payloadList, TEST_DEVICE_TYPE, TEST_DEVICE_IDENTIFIER);
Assert.assertNotNull(response2, "Response should not be null");
Assert.assertEquals(response2.getStatus(), Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(),
"The response status should be 500");
}
@Test(description = "Test Publishing events when EventStreamAdminService throws User Store exception.")
public void testPublishEventsWithUserStoreException() throws DeviceAccessAuthorizationException {
PowerMockito.stub(PowerMockito.method(PrivilegedCarbonContext.class, "getThreadLocalCarbonContext"))
.toReturn(this.privilegedCarbonContext);
PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class,
"getDeviceAccessAuthorizationService")).toReturn(this.deviceAccessAuthorizationService);
Mockito.when(this.deviceAccessAuthorizationService.isUserAuthorized(Mockito.any(DeviceIdentifier.class)))
.thenReturn(true);
PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class, "getEventStreamAdminServiceStub"))
.toThrow(new UserStoreException());
Map<String, Object> payload = new HashMap<>();
CacheImpl cache = Mockito.mock(CacheImpl.class);
PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class, "getDynamicEventCache"))
.toReturn(cache);
Response response = this.deviceAgentService.publishEvents(payload, TEST_DEVICE_TYPE, TEST_DEVICE_IDENTIFIER);
Assert.assertNotNull(response, "Response should not be null");
Assert.assertEquals(response.getStatus(), Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(),
"The response status should be 500");
List<Object> payloadList = new ArrayList<>();
Response response2 = this.deviceAgentService.publishEvents(payloadList, TEST_DEVICE_TYPE, TEST_DEVICE_IDENTIFIER);
Assert.assertNotNull(response2, "Response should not be null");
Assert.assertEquals(response2.getStatus(), Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(),
"The response status should be 500");
}
@Test(description = "Test the get pending operation method which return empty device type list.")
public void testGetPendingOperationsWithNoDeviceType() throws DeviceManagementException {
PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class, "getDeviceManagementService"))
.toReturn(this.deviceManagementProviderService);
Mockito.when(this.deviceManagementProviderService.getAvailableDeviceTypes())
.thenReturn(new ArrayList<String>(){});
Response response = this.deviceAgentService.getPendingOperations(TEST_DEVICE_TYPE, TEST_DEVICE_IDENTIFIER);
Assert.assertNotNull(response, "Response should not be null");
Assert.assertEquals(response.getStatus(), Response.Status.BAD_REQUEST.getStatusCode(),
"The response status should be 400");
Mockito.reset(this.deviceManagementProviderService);
}
@Test(description = "Test the get pending operation method with invalid device identifier.")
public void testGetPendingOperationsWithInvalidDeviceIdentifier() throws DeviceManagementException {
PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class, "getDeviceManagementService"))
.toReturn(this.deviceManagementProviderService);
PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class, "isValidDeviceIdentifier"))
.toReturn(false);
List<String> deviceTypes = new ArrayList<>();
deviceTypes.add(TEST_DEVICE_TYPE);
Mockito.when(this.deviceManagementProviderService.getAvailableDeviceTypes())
.thenReturn(deviceTypes);
Response response = this.deviceAgentService.getPendingOperations(TEST_DEVICE_TYPE, TEST_DEVICE_IDENTIFIER);
Assert.assertNotNull(response, "Response should not be null");
Assert.assertEquals(response.getStatus(), Response.Status.NO_CONTENT.getStatusCode(),
"The response status should be 204");
Mockito.reset(this.deviceManagementProviderService);
}
@Test(description = "Test the get pending operations success scenario.")
public void testGetPendingOperationsSuccess() throws DeviceManagementException {
PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class, "getDeviceManagementService"))
.toReturn(this.deviceManagementProviderService);
PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class, "isValidDeviceIdentifier"))
.toReturn(true);
List<String> deviceTypes = new ArrayList<>();
deviceTypes.add(TEST_DEVICE_TYPE);
Mockito.when(this.deviceManagementProviderService.getAvailableDeviceTypes())
.thenReturn(deviceTypes);
Response response = this.deviceAgentService.getPendingOperations(TEST_DEVICE_TYPE, TEST_DEVICE_IDENTIFIER);
Assert.assertNotNull(response, "Response should not be null");
Assert.assertNotNull(response.getEntity(), "Response entity should not be null.");
Assert.assertEquals(response.getStatus(), Response.Status.OK.getStatusCode(),
"The response status should be 200");
Mockito.reset(this.deviceManagementProviderService);
}
@Test(description = "Test the scenario when get pending operations throw OperationManagementException.")
public void testGetPendingOperationsWithOperationManagementException() throws DeviceManagementException, OperationManagementException {
PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class, "getDeviceManagementService"))
.toReturn(this.deviceManagementProviderService);
PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class, "isValidDeviceIdentifier"))
.toReturn(true);
List<String> deviceTypes = new ArrayList<>();
deviceTypes.add(TEST_DEVICE_TYPE);
Mockito.when(this.deviceManagementProviderService.getAvailableDeviceTypes())
.thenReturn(deviceTypes);
Mockito.when(this.deviceManagementProviderService.getPendingOperations(Mockito.any())).thenThrow(new
OperationManagementException());
Response response = this.deviceAgentService.getPendingOperations(TEST_DEVICE_TYPE, TEST_DEVICE_IDENTIFIER);
Assert.assertNotNull(response, "Response should not be null");
Assert.assertNotNull(response.getEntity(), "Response entity should not be null.");
Assert.assertEquals(response.getStatus(), Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(),
"The response status should be 500");
Mockito.reset(this.deviceManagementProviderService);
}
@Test(description = "Test the scenario when getAvailableDeviceTypes throw DeviceManagementException.")
public void testGetPendingOperationsWithDeviceManagementException() throws DeviceManagementException {
PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class, "getDeviceManagementService"))
.toReturn(this.deviceManagementProviderService);
PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class, "isValidDeviceIdentifier"))
.toReturn(true);
Mockito.when(this.deviceManagementProviderService.getAvailableDeviceTypes())
.thenThrow(new DeviceManagementException());
Response response = this.deviceAgentService.getPendingOperations(TEST_DEVICE_TYPE, TEST_DEVICE_IDENTIFIER);
Assert.assertNotNull(response, "Response should not be null");
Assert.assertNotNull(response.getEntity(), "Response entity should not be null.");
Assert.assertEquals(response.getStatus(), Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(),
"The response status should be 500");
Mockito.reset(this.deviceManagementProviderService);
}
@Test(description = "Test get next pending operation with device type is invalid.")
public void getNextPendingOperationWithInvalidDeviceType() throws DeviceManagementException {
PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class, "getDeviceManagementService"))
.toReturn(this.deviceManagementProviderService);
Mockito.when(this.deviceManagementProviderService.getAvailableDeviceTypes())
.thenReturn(new ArrayList<String>(){});
Response response = this.deviceAgentService.getNextPendingOperation(TEST_DEVICE_TYPE, TEST_DEVICE_IDENTIFIER);
Assert.assertNotNull(response, "Response should not be null");
Assert.assertEquals(response.getStatus(), Response.Status.BAD_REQUEST.getStatusCode(),
"The response status should be 400");
Mockito.reset(this.deviceManagementProviderService);
}
@Test(description = "Test get next pending operation with invalid device identifier.")
public void getNextPendingOperationWithInvalidDeviceIdentifier() throws DeviceManagementException {
PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class, "getDeviceManagementService"))
.toReturn(this.deviceManagementProviderService);
PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class, "isValidDeviceIdentifier"))
.toReturn(false);
List<String> deviceTypes = new ArrayList<>();
deviceTypes.add(TEST_DEVICE_TYPE);
Mockito.when(this.deviceManagementProviderService.getAvailableDeviceTypes())
.thenReturn(deviceTypes);
Response response = this.deviceAgentService.getNextPendingOperation(TEST_DEVICE_TYPE, TEST_DEVICE_IDENTIFIER);
Assert.assertNotNull(response, "Response should not be null");
Assert.assertEquals(response.getStatus(), Response.Status.BAD_REQUEST.getStatusCode(),
"The response status should be 400");
Mockito.reset(this.deviceManagementProviderService);
}
@Test(description = "Test the getNextPendingOperation success scenario.")
public void testGetNextPendingOperationSuccess() throws DeviceManagementException {
PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class, "getDeviceManagementService"))
.toReturn(this.deviceManagementProviderService);
PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class, "isValidDeviceIdentifier"))
.toReturn(true);
List<String> deviceTypes = new ArrayList<>();
deviceTypes.add(TEST_DEVICE_TYPE);
Mockito.when(this.deviceManagementProviderService.getAvailableDeviceTypes())
.thenReturn(deviceTypes);
Response response = this.deviceAgentService.getNextPendingOperation(TEST_DEVICE_TYPE, TEST_DEVICE_IDENTIFIER);
Assert.assertNotNull(response, "Response should not be null");
Assert.assertNotNull(response.getEntity(), "Response entity should not be null.");
Assert.assertEquals(response.getStatus(), Response.Status.OK.getStatusCode(),
"The response status should be 200");
Mockito.reset(this.deviceManagementProviderService);
}
@Test(description = "Test get next pending operation with operation management exception.")
public void getNextPendingOperationWithOperationManagementException() throws DeviceManagementException, OperationManagementException {
PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class, "getDeviceManagementService"))
.toReturn(this.deviceManagementProviderService);
PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class, "isValidDeviceIdentifier"))
.toReturn(true);
List<String> deviceTypes = new ArrayList<>();
deviceTypes.add(TEST_DEVICE_TYPE);
Mockito.when(this.deviceManagementProviderService.getAvailableDeviceTypes())
.thenReturn(deviceTypes);
Mockito.when(this.deviceManagementProviderService.getNextPendingOperation(Mockito.any())).thenThrow(new
OperationManagementException());
Response response = this.deviceAgentService.getNextPendingOperation(TEST_DEVICE_TYPE, TEST_DEVICE_IDENTIFIER);
Assert.assertNotNull(response, "Response should not be null");
Assert.assertNotNull(response.getEntity(), "Response entity should not be null.");
Assert.assertEquals(response.getStatus(), Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(),
"The response status should be 500");
Mockito.reset(this.deviceManagementProviderService);
}
@Test(description = "Test the get next pending operation method with device management exception.")
public void getNextPendingOperationWithDeviceManagementException() throws DeviceManagementException {
PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class, "getDeviceManagementService"))
.toReturn(this.deviceManagementProviderService);
PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class, "isValidDeviceIdentifier"))
.toReturn(true);
Mockito.when(this.deviceManagementProviderService.getAvailableDeviceTypes())
.thenThrow(new DeviceManagementException());
Response response = this.deviceAgentService.getNextPendingOperation(TEST_DEVICE_TYPE, TEST_DEVICE_IDENTIFIER);
Assert.assertNotNull(response, "Response should not be null");
Assert.assertNotNull(response.getEntity(), "Response entity should not be null.");
Assert.assertEquals(response.getStatus(), Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(),
"The response status should be 500");
Mockito.reset(this.deviceManagementProviderService);
}
}
| components/device-mgt/org.wso2.carbon.device.mgt.api/src/test/java/org/wso2/carbon/device/mgt/jaxrs/service/impl/DeviceAgentServiceTest.java | /*
* Copyright (c) 2017, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* you may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.carbon.device.mgt.jaxrs.service.impl;
import org.apache.axis2.AxisFault;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.mockito.Mock;
import org.mockito.Mockito;
import org.powermock.api.mockito.PowerMockito;
import org.powermock.core.classloader.annotations.PowerMockIgnore;
import org.powermock.core.classloader.annotations.PrepareForTest;
import org.powermock.core.classloader.annotations.SuppressStaticInitializationFor;
import org.testng.Assert;
import org.testng.IObjectFactory;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.ObjectFactory;
import org.testng.annotations.Test;
import org.wso2.carbon.caching.impl.CacheImpl;
import org.wso2.carbon.context.CarbonContext;
import org.wso2.carbon.context.PrivilegedCarbonContext;
import org.wso2.carbon.device.mgt.analytics.data.publisher.exception.DataPublisherConfigurationException;
import org.wso2.carbon.device.mgt.analytics.data.publisher.service.EventsPublisherService;
import org.wso2.carbon.device.mgt.analytics.data.publisher.service.EventsPublisherServiceImpl;
import org.wso2.carbon.device.mgt.common.Device;
import org.wso2.carbon.device.mgt.common.DeviceIdentifier;
import org.wso2.carbon.device.mgt.common.DeviceManagementException;
import org.wso2.carbon.device.mgt.common.EnrolmentInfo;
import org.wso2.carbon.device.mgt.common.authorization.DeviceAccessAuthorizationException;
import org.wso2.carbon.device.mgt.common.authorization.DeviceAccessAuthorizationService;
import org.wso2.carbon.device.mgt.common.operation.mgt.OperationManagementException;
import org.wso2.carbon.device.mgt.core.authorization.DeviceAccessAuthorizationServiceImpl;
import org.wso2.carbon.device.mgt.core.service.DeviceManagementProviderService;
import org.wso2.carbon.device.mgt.core.service.DeviceManagementProviderServiceImpl;
import org.wso2.carbon.device.mgt.jaxrs.service.api.DeviceAgentService;
import org.wso2.carbon.device.mgt.jaxrs.service.api.admin.DeviceTypeManagementAdminService;
import org.wso2.carbon.device.mgt.jaxrs.service.impl.util.DeviceMgtAPITestHelper;
import org.wso2.carbon.device.mgt.jaxrs.util.DeviceMgtAPIUtils;
import org.wso2.carbon.event.stream.stub.EventStreamAdminServiceStub;
import org.wso2.carbon.event.stream.stub.types.EventStreamAttributeDto;
import org.wso2.carbon.event.stream.stub.types.EventStreamDefinitionDto;
import org.wso2.carbon.identity.jwt.client.extension.exception.JWTClientException;
import org.wso2.carbon.user.api.UserStoreException;
import org.wso2.carbon.utils.CarbonUtils;
import javax.cache.CacheManager;
import javax.ws.rs.core.Response;
import java.rmi.RemoteException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import static org.mockito.MockitoAnnotations.initMocks;
/**
* This class holds the unit tests for the class {@link DeviceAgentServiceImpl}
*/
@PowerMockIgnore("javax.ws.rs.*")
@SuppressStaticInitializationFor({"org.wso2.carbon.device.mgt.jaxrs.util.DeviceMgtAPIUtils",
"org.wso2.carbon.context.CarbonContext", "org.wso2.carbon.context.internal.CarbonContextDataHolder"})
@PrepareForTest({DeviceMgtAPIUtils.class, DeviceManagementProviderService.class,
DeviceAccessAuthorizationService.class, EventStreamAdminServiceStub.class, PrivilegedCarbonContext.class,
CarbonContext.class, CarbonUtils.class})
public class DeviceAgentServiceTest {
private static final Log log = LogFactory.getLog(DeviceTypeManagementAdminService.class);
private DeviceManagementProviderService deviceManagementProviderService;
private DeviceAgentService deviceAgentService;
private EventStreamAdminServiceStub eventStreamAdminServiceStub;
private PrivilegedCarbonContext privilegedCarbonContext;
private CarbonContext carbonContext;
private CacheManager cacheManager;
private DeviceAccessAuthorizationService deviceAccessAuthorizationService;
private static final String TEST_DEVICE_TYPE = "TEST-DEVICE-TYPE";
private static final String TEST_DEVICE_IDENTIFIER = "11222334455";
private static final String AUTHENTICATED_USER = "admin";
private static final String TENANT_DOMAIN = "carbon.super";
private static Device demoDevice;
@ObjectFactory
public IObjectFactory getObjectFactory() {
return new org.powermock.modules.testng.PowerMockObjectFactory();
}
@BeforeClass
public void init() {
log.info("Initializing DeviceAgent tests");
initMocks(this);
this.deviceManagementProviderService = Mockito
.mock(DeviceManagementProviderServiceImpl.class, Mockito.RETURNS_MOCKS);
this.deviceAgentService = new DeviceAgentServiceImpl();
this.deviceAccessAuthorizationService = Mockito.mock(DeviceAccessAuthorizationServiceImpl.class,
Mockito.RETURNS_MOCKS);
this.privilegedCarbonContext = Mockito.mock(PrivilegedCarbonContext.class, Mockito.RETURNS_MOCKS);
this.carbonContext = Mockito.mock(CarbonContext.class, Mockito.RETURNS_MOCKS);
this.eventStreamAdminServiceStub = Mockito.mock(EventStreamAdminServiceStub.class, Mockito.RETURNS_MOCKS);
demoDevice = DeviceMgtAPITestHelper.generateDummyDevice(TEST_DEVICE_TYPE, TEST_DEVICE_IDENTIFIER);
this.cacheManager = Mockito.mock(CacheManager.class, Mockito.RETURNS_MOCKS);
}
@Test(description = "Test device Enrollment when the device is null")
public void testEnrollDeviceWithDeviceIsNULL() {
PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class, "getDeviceManagementService"))
.toReturn(this.deviceManagementProviderService);
Response response = this.deviceAgentService.enrollDevice(null);
Assert.assertNotNull(response, "Response should not be null");
Assert.assertEquals(response.getStatus(), Response.Status.BAD_REQUEST.getStatusCode(),
"The response status should be 400");
}
@Test(description = "Test device enrollment when device type is null.")
public void testEnrollDeviceWithDeviceTypeNull() {
PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class, "getDeviceManagementService"))
.toReturn(this.deviceManagementProviderService);
Device device = DeviceMgtAPITestHelper.generateDummyDevice(null, TEST_DEVICE_IDENTIFIER);
Response response = this.deviceAgentService.enrollDevice(device);
Assert.assertNotNull(response, "Response should not be null");
Assert.assertEquals(response.getStatus(), Response.Status.BAD_REQUEST.getStatusCode(),
"The response status should be 400");
}
@Test(description = "Test device enrollment of a device with null device identifier.")
public void testEnrollNewDeviceWithDeviceIdentifierIsNull() {
PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class, "getDeviceManagementService"))
.toReturn(this.deviceManagementProviderService);
Device device = DeviceMgtAPITestHelper.generateDummyDevice(TEST_DEVICE_TYPE, null);
Response response = this.deviceAgentService.enrollDevice(device);
Assert.assertNotNull(response, "Response should not be null");
Assert.assertEquals(response.getStatus(), Response.Status.BAD_REQUEST.getStatusCode(),
"The response status should be 400");
}
@Test(description = "Test an already enrolled device")
public void testEnrollExistingDevice() throws DeviceManagementException {
PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class, "getDeviceManagementService"))
.toReturn(this.deviceManagementProviderService);
Mockito.when(this.deviceManagementProviderService.getDevice(Mockito.any())).thenReturn(demoDevice);
Device device = DeviceMgtAPITestHelper.generateDummyDevice(TEST_DEVICE_TYPE, TEST_DEVICE_IDENTIFIER);
Mockito.when(this.deviceManagementProviderService.getDevice(Mockito.any())).thenReturn(device);
Response response = this.deviceAgentService.enrollDevice(device);
Assert.assertNotNull(response, "Response should not be null");
Assert.assertEquals(response.getStatus(), Response.Status.BAD_REQUEST.getStatusCode(),
"The response status should be 400");
Mockito.reset(this.deviceManagementProviderService);
}
@Test(description = "Test the device enrollment success scenario.")
public void testEnrollDeviceSuccess() throws DeviceManagementException {
PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class, "getDeviceManagementService"))
.toReturn(this.deviceManagementProviderService);
PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class, "getAuthenticatedUser"))
.toReturn(AUTHENTICATED_USER);
EnrolmentInfo enrolmentInfo = demoDevice.getEnrolmentInfo();
enrolmentInfo.setStatus(EnrolmentInfo.Status.INACTIVE);
demoDevice.setEnrolmentInfo(enrolmentInfo);
Mockito.when(this.deviceManagementProviderService.getDevice(Mockito.any())).thenReturn(demoDevice);
Response response = this.deviceAgentService.enrollDevice(demoDevice);
Assert.assertNotNull(response, "Response should not be null");
Assert.assertEquals(response.getStatus(), Response.Status.OK.getStatusCode(),
"The response status should be 200");
Mockito.reset(this.deviceManagementProviderService);
}
@Test(description = "Test the device enrollment with device management exception.")
public void testEnrollDeviceWithException() throws DeviceManagementException {
PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class, "getDeviceManagementService"))
.toReturn(this.deviceManagementProviderService);
PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class, "getAuthenticatedUser"))
.toReturn(AUTHENTICATED_USER);
Device device = DeviceMgtAPITestHelper.generateDummyDevice(TEST_DEVICE_TYPE, TEST_DEVICE_IDENTIFIER);
EnrolmentInfo enrolmentInfo = device.getEnrolmentInfo();
enrolmentInfo.setStatus(EnrolmentInfo.Status.INACTIVE);
device.setEnrolmentInfo(enrolmentInfo);
Mockito.when(this.deviceManagementProviderService.getDevice(Mockito.any())).thenReturn(device);
Mockito.when(this.deviceManagementProviderService.enrollDevice(Mockito.any()))
.thenThrow(new DeviceManagementException());
Response response = this.deviceAgentService.enrollDevice(device);
Assert.assertNotNull(response, "Response should not be null");
Assert.assertEquals(response.getStatus(), Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(),
"The response status should be 500");
Mockito.reset(this.deviceManagementProviderService);
}
@Test(description = "Test dis-enrolling the device success scenario.")
public void testDisEnrollDeviceSuccess() throws DeviceManagementException {
PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class, "getDeviceManagementService"))
.toReturn(this.deviceManagementProviderService);
Mockito.when(this.deviceManagementProviderService.disenrollDevice(Mockito.any())).thenReturn(true);
Response response = deviceAgentService.disEnrollDevice(TEST_DEVICE_TYPE, TEST_DEVICE_IDENTIFIER);
Assert.assertNotNull(response, "Response should not be null");
Assert.assertEquals(response.getStatus(), Response.Status.OK.getStatusCode(),
"The response status should be 200");
Mockito.reset(this.deviceManagementProviderService);
}
@Test(description = "Test dis-enrolling non existing device.")
public void testDisEnrollNonExistingDevice() throws DeviceManagementException {
PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class, "getDeviceManagementService"))
.toReturn(this.deviceManagementProviderService);
Response response = deviceAgentService.disEnrollDevice(TEST_DEVICE_TYPE, TEST_DEVICE_IDENTIFIER);
Assert.assertNotNull(response, "Response should not be null");
Assert.assertEquals(response.getStatus(), Response.Status.NO_CONTENT.getStatusCode(),
"The response status should be 204");
}
@Test(description = "Test dis-enrolling device error")
public void testDisEnrollingDeviceError() throws DeviceManagementException {
PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class, "getDeviceManagementService"))
.toReturn(this.deviceManagementProviderService);
Mockito.when(this.deviceManagementProviderService.disenrollDevice(Mockito.any())).thenThrow(new
DeviceManagementException());
Response response = deviceAgentService.disEnrollDevice(TEST_DEVICE_TYPE, TEST_DEVICE_IDENTIFIER);
Assert.assertNotNull(response, "Response should not be null");
Assert.assertEquals(response.getStatus(), Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(),
"The response status should be 500");
Mockito.reset(this.deviceManagementProviderService);
}
@Test(description = "Test device update scenario with device management exception.")
public void testUpdateDeviceDMException() throws DeviceManagementException {
PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class, "getDeviceManagementService"))
.toReturn(this.deviceManagementProviderService);
Mockito.when(this.deviceManagementProviderService.getDevice(Mockito.any())).thenThrow(new
DeviceManagementException());
Device testDevice = DeviceMgtAPITestHelper.generateDummyDevice(TEST_DEVICE_TYPE, TEST_DEVICE_IDENTIFIER);
Response response = deviceAgentService.updateDevice(TEST_DEVICE_TYPE, TEST_DEVICE_IDENTIFIER, testDevice);
Assert.assertNotNull(response, "Response should not be null");
Assert.assertEquals(response.getStatus(), Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(),
"The response status should be 500");
Mockito.reset(this.deviceManagementProviderService);
}
@Test(description = "Test update device scenario when the device is null.")
public void testUpdateDeviceWithNoDevice() {
PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class, "getDeviceManagementService"))
.toReturn(this.deviceManagementProviderService);
Response response = deviceAgentService.updateDevice(TEST_DEVICE_TYPE, TEST_DEVICE_IDENTIFIER, null);
Assert.assertNotNull(response, "Response should not be null");
Assert.assertEquals(response.getStatus(), Response.Status.BAD_REQUEST.getStatusCode(),
"The response status should be 400");
}
@Test(description = "Test the update device scenario when there is no enrolled device.")
public void testUpdatingNonExistingDevice() throws DeviceManagementException {
PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class, "getDeviceManagementService"))
.toReturn(this.deviceManagementProviderService);
Mockito.when(this.deviceManagementProviderService.getDevice(Mockito.any())).thenReturn(null);
Device testDevice = DeviceMgtAPITestHelper.generateDummyDevice(TEST_DEVICE_TYPE, TEST_DEVICE_IDENTIFIER);
Response response = deviceAgentService.updateDevice(TEST_DEVICE_TYPE, TEST_DEVICE_IDENTIFIER, testDevice);
Assert.assertNotNull(response, "Response should not be null");
Assert.assertEquals(response.getStatus(), Response.Status.NOT_FOUND.getStatusCode(),
"The response status should be 404");
Mockito.reset(this.deviceManagementProviderService);
}
@Test(description = "Test update device with device access authorization exception.")
public void testEnrollDeviceWithDeviceAccessAuthException() throws DeviceManagementException,
DeviceAccessAuthorizationException {
PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class, "getDeviceManagementService"))
.toReturn(this.deviceManagementProviderService);
PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class,
"getDeviceAccessAuthorizationService")).toReturn(this.deviceAccessAuthorizationService);
Device testDevice = DeviceMgtAPITestHelper.generateDummyDevice(TEST_DEVICE_TYPE, TEST_DEVICE_IDENTIFIER);
Mockito.when(this.deviceManagementProviderService.getDevice(Mockito.any())).thenReturn(testDevice);
Mockito.when(this.deviceAccessAuthorizationService.isUserAuthorized(Mockito.any(DeviceIdentifier.class)))
.thenThrow(new DeviceAccessAuthorizationException());
Response response = deviceAgentService.updateDevice(TEST_DEVICE_TYPE, TEST_DEVICE_IDENTIFIER, testDevice);
Assert.assertNotNull(response, "Response should not be null");
Assert.assertEquals(response.getStatus(), Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(),
"The response status should be 500");
Mockito.reset(this.deviceManagementProviderService);
Mockito.reset(this.deviceAccessAuthorizationService);
}
@Test(description = "Test update device when user does not have device access permission.")
public void testUpdateDeviceWithNoDeviceAccessPermission() throws DeviceManagementException,
DeviceAccessAuthorizationException {
PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class, "getDeviceManagementService"))
.toReturn(this.deviceManagementProviderService);
PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class,
"getDeviceAccessAuthorizationService")).toReturn(this.deviceAccessAuthorizationService);
Device testDevice = DeviceMgtAPITestHelper.generateDummyDevice(TEST_DEVICE_TYPE, TEST_DEVICE_IDENTIFIER);
Mockito.when(this.deviceManagementProviderService.getDevice(Mockito.any())).thenReturn(testDevice);
Mockito.when(this.deviceAccessAuthorizationService.isUserAuthorized(Mockito.any(DeviceIdentifier.class)))
.thenReturn(false);
Response response = deviceAgentService.updateDevice(TEST_DEVICE_TYPE, TEST_DEVICE_IDENTIFIER, testDevice);
Assert.assertNotNull(response, "Response should not be null");
Assert.assertEquals(response.getStatus(), Response.Status.UNAUTHORIZED.getStatusCode(),
"The response status should be 401");
Mockito.reset(this.deviceManagementProviderService);
Mockito.reset(this.deviceAccessAuthorizationService);
}
@Test(description = "Test update device when device modification is unsuccessful.")
public void testUpdateDeviceNOTModify() throws DeviceManagementException, DeviceAccessAuthorizationException {
PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class, "getDeviceManagementService"))
.toReturn(this.deviceManagementProviderService);
PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class,
"getDeviceAccessAuthorizationService")).toReturn(this.deviceAccessAuthorizationService);
PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class,
"getAuthenticatedUser")).toReturn(AUTHENTICATED_USER);
Device testDevice = DeviceMgtAPITestHelper.generateDummyDevice(TEST_DEVICE_TYPE, TEST_DEVICE_IDENTIFIER);
Mockito.when(this.deviceManagementProviderService.getDevice(Mockito.any())).thenReturn(testDevice);
Mockito.when(this.deviceAccessAuthorizationService.isUserAuthorized(Mockito.any(DeviceIdentifier.class)))
.thenReturn(true);
Mockito.when(this.deviceManagementProviderService.modifyEnrollment(Mockito.any())).thenReturn(false);
Response response = deviceAgentService.updateDevice(TEST_DEVICE_TYPE, TEST_DEVICE_IDENTIFIER, testDevice);
Assert.assertNotNull(response, "Response should not be null");
Assert.assertEquals(response.getStatus(), Response.Status.NOT_MODIFIED.getStatusCode(),
"The response status should be 304");
Mockito.reset(this.deviceManagementProviderService);
Mockito.reset(this.deviceAccessAuthorizationService);
}
@Test(description = "Test updating device when modify enrollment throws exception")
public void testUpdateDeviceWithModifyEnrollmentFailure() throws DeviceManagementException, DeviceAccessAuthorizationException {
PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class, "getDeviceManagementService"))
.toReturn(this.deviceManagementProviderService);
PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class,
"getDeviceAccessAuthorizationService")).toReturn(this.deviceAccessAuthorizationService);
PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class,
"getAuthenticatedUser")).toReturn(AUTHENTICATED_USER);
Device testDevice = DeviceMgtAPITestHelper.generateDummyDevice(TEST_DEVICE_TYPE, TEST_DEVICE_IDENTIFIER);
Mockito.when(this.deviceManagementProviderService.getDevice(Mockito.any())).thenReturn(testDevice);
Mockito.when(this.deviceAccessAuthorizationService.isUserAuthorized(Mockito.any(DeviceIdentifier.class)))
.thenReturn(true);
Mockito.when(this.deviceManagementProviderService.modifyEnrollment(Mockito.any())).thenThrow(new DeviceManagementException());
Response response = deviceAgentService.updateDevice(TEST_DEVICE_TYPE, TEST_DEVICE_IDENTIFIER, testDevice);
Assert.assertNotNull(response, "Response should not be null");
Assert.assertEquals(response.getStatus(), Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(),
"The response status should be 500");
Mockito.reset(this.deviceManagementProviderService);
Mockito.reset(this.deviceAccessAuthorizationService);
}
@Test(description = "Test updating device success scenario.")
public void testUpdateDeviceSuccess() throws DeviceManagementException, DeviceAccessAuthorizationException {
PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class, "getDeviceManagementService"))
.toReturn(this.deviceManagementProviderService);
PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class,
"getDeviceAccessAuthorizationService")).toReturn(this.deviceAccessAuthorizationService);
PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class,
"getAuthenticatedUser")).toReturn(AUTHENTICATED_USER);
Device testDevice = DeviceMgtAPITestHelper.generateDummyDevice(TEST_DEVICE_TYPE, TEST_DEVICE_IDENTIFIER);
Mockito.when(this.deviceManagementProviderService.getDevice(Mockito.any())).thenReturn(testDevice);
Mockito.when(this.deviceAccessAuthorizationService.isUserAuthorized(Mockito.any(DeviceIdentifier.class)))
.thenReturn(true);
Mockito.when(this.deviceManagementProviderService.modifyEnrollment(Mockito.any())).thenReturn((true));
Response response = deviceAgentService.updateDevice(TEST_DEVICE_TYPE, TEST_DEVICE_IDENTIFIER, testDevice);
Assert.assertNotNull(response, "Response should not be null");
Assert.assertEquals(response.getStatus(), Response.Status.ACCEPTED.getStatusCode(),
"The response status should be 202");
Mockito.reset(this.deviceManagementProviderService);
Mockito.reset(this.deviceAccessAuthorizationService);
}
@Test(description = "Test publish events with null payload.")
public void testPublishEventsWithNullPayload() {
PowerMockito.stub(PowerMockito.method(PrivilegedCarbonContext.class, "getThreadLocalCarbonContext"))
.toReturn(this.privilegedCarbonContext);
Mockito.when(this.privilegedCarbonContext.getTenantDomain()).thenReturn(TENANT_DOMAIN);
Map<String, Object> payload = null;
Response response = this.deviceAgentService.publishEvents(payload, TEST_DEVICE_TYPE, TEST_DEVICE_IDENTIFIER);
Assert.assertNotNull(response, "Response should not be null");
Assert.assertEquals(response.getStatus(), Response.Status.BAD_REQUEST.getStatusCode(),
"The response status should be 400");
List<Object> payloadList = null;
Response response2 = this.deviceAgentService.publishEvents(payloadList, TEST_DEVICE_TYPE,
TEST_DEVICE_IDENTIFIER);
Assert.assertNotNull(response2, "Response should not be null");
Assert.assertEquals(response2.getStatus(), Response.Status.BAD_REQUEST.getStatusCode(),
"The response status should be 400");
}
@Test(description = "Test publish events with no device access authorization.")
public void testPublishEventsWithOutAuthorization() throws DeviceAccessAuthorizationException {
PowerMockito.stub(PowerMockito.method(PrivilegedCarbonContext.class, "getThreadLocalCarbonContext"))
.toReturn(this.privilegedCarbonContext);
PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class,
"getDeviceAccessAuthorizationService")).toReturn(this.deviceAccessAuthorizationService);
Mockito.when(this.deviceAccessAuthorizationService.isUserAuthorized(Mockito.any(DeviceIdentifier.class)))
.thenReturn(false);
Mockito.when(this.privilegedCarbonContext.getTenantDomain()).thenReturn(TENANT_DOMAIN);
Map<String, Object> payload = new HashMap<>();
Response response = this.deviceAgentService.publishEvents(payload, TEST_DEVICE_TYPE, TEST_DEVICE_IDENTIFIER);
Assert.assertNotNull(response, "Response should not be null");
Assert.assertEquals(response.getStatus(), Response.Status.UNAUTHORIZED.getStatusCode(),
"The response status should be 401");
List<Object> payloadList = new ArrayList<>();
Response response2 = this.deviceAgentService.publishEvents(payloadList, TEST_DEVICE_TYPE, TEST_DEVICE_IDENTIFIER);
Assert.assertNotNull(response2, "Response should not be null");
Assert.assertEquals(response2.getStatus(), Response.Status.UNAUTHORIZED.getStatusCode(),
"The response status should be 401");
Mockito.reset(this.deviceAccessAuthorizationService);
}
@Test
public void testPublishEventsWithDeviceAccessAuthException() throws DeviceAccessAuthorizationException {
PowerMockito.stub(PowerMockito.method(PrivilegedCarbonContext.class, "getThreadLocalCarbonContext"))
.toReturn(this.privilegedCarbonContext);
PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class, "getDeviceAccessAuthorizationService"))
.toReturn(this.deviceAccessAuthorizationService);
Mockito.when(this.deviceAccessAuthorizationService.isUserAuthorized(Mockito.any(DeviceIdentifier.class)))
.thenThrow(new DeviceAccessAuthorizationException());
Mockito.when(this.privilegedCarbonContext.getTenantDomain()).thenReturn(TENANT_DOMAIN);
Map<String, Object> payload = new HashMap<>();
Response response = this.deviceAgentService.publishEvents(payload, TEST_DEVICE_TYPE, TEST_DEVICE_IDENTIFIER);
Assert.assertNotNull(response, "Response should not be null");
Assert.assertEquals(response.getStatus(), Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(),
"The response status should be 500");
List<Object> payloadList = new ArrayList<>();
Response response2 = this.deviceAgentService.publishEvents(payloadList, TEST_DEVICE_TYPE, TEST_DEVICE_IDENTIFIER);
Assert.assertNotNull(response2, "Response should not be null");
Assert.assertEquals(response2.getStatus(), Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(),
"The response status should be 500");
Mockito.reset(this.deviceAccessAuthorizationService);
}
@Test(description = "Test event publishing when the event stream dao is null.")
public void testEventPublishWithNullEventAttributesAndNullEventStreamDefDAO() throws DeviceAccessAuthorizationException, RemoteException {
PowerMockito.stub(PowerMockito.method(PrivilegedCarbonContext.class, "getThreadLocalCarbonContext"))
.toReturn(this.privilegedCarbonContext);
PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class, "getDeviceAccessAuthorizationService"))
.toReturn(this.deviceAccessAuthorizationService);
Mockito.when(this.deviceAccessAuthorizationService.isUserAuthorized(Mockito.any(DeviceIdentifier.class)))
.thenReturn(true);
PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class, "getEventStreamAdminServiceStub"))
.toReturn(this.eventStreamAdminServiceStub);
Mockito.when(this.eventStreamAdminServiceStub.getStreamDefinitionDto(Mockito.anyString())).thenReturn(null);
Map<String, Object> payload = new HashMap<>();
CacheImpl cache = Mockito.mock(CacheImpl.class);
PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class, "getDynamicEventCache"))
.toReturn(cache);
Response response = this.deviceAgentService.publishEvents(payload, TEST_DEVICE_TYPE, TEST_DEVICE_IDENTIFIER);
Assert.assertNotNull(response, "Response should not be null");
Assert.assertEquals(response.getStatus(), Response.Status.BAD_REQUEST.getStatusCode(),
"The response status should be 400");
List<Object> payloadList = new ArrayList<>();
Response response2 = this.deviceAgentService.publishEvents(payloadList, TEST_DEVICE_TYPE, TEST_DEVICE_IDENTIFIER);
Assert.assertNotNull(response2, "Response should not be null");
Assert.assertEquals(response2.getStatus(), Response.Status.BAD_REQUEST.getStatusCode(),
"The response status should be 400");
Mockito.reset(eventStreamAdminServiceStub);
}
@Test(description ="Test the error scenario of Publishing Events with null event attributes.")
public void testEventPublishWithEventAttributesNULLAndPublishEventsFailure() throws
DeviceAccessAuthorizationException, RemoteException {
PowerMockito.stub(PowerMockito.method(PrivilegedCarbonContext.class, "getThreadLocalCarbonContext"))
.toReturn(this.privilegedCarbonContext);
PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class,
"getDeviceAccessAuthorizationService")).toReturn(this.deviceAccessAuthorizationService);
Mockito.when(this.deviceAccessAuthorizationService.isUserAuthorized(Mockito.any(DeviceIdentifier.class)))
.thenReturn(true);
PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class, "getEventStreamAdminServiceStub"))
.toReturn(this.eventStreamAdminServiceStub);
EventStreamAttributeDto eventStreamAttributeDto = Mockito.mock(EventStreamAttributeDto.class,
Mockito.RETURNS_MOCKS);
EventStreamDefinitionDto eventStreamDefinitionDto = Mockito.mock(EventStreamDefinitionDto.class,
Mockito.RETURNS_MOCKS);
Mockito.when(this.eventStreamAdminServiceStub.getStreamDefinitionDto(Mockito.anyString()))
.thenReturn(eventStreamDefinitionDto);
Mockito.when(eventStreamDefinitionDto.getPayloadData()).thenReturn(new EventStreamAttributeDto[]{});
EventsPublisherService eventPublisherService = Mockito.mock(EventsPublisherServiceImpl.class,
Mockito.RETURNS_MOCKS);
PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class, "getEventPublisherService")).toReturn
(eventPublisherService);
Map<String, Object> payload = new HashMap<>();
CacheImpl cache = Mockito.mock(CacheImpl.class);
PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class, "getDynamicEventCache"))
.toReturn(cache);
Response response = this.deviceAgentService.publishEvents(payload, TEST_DEVICE_TYPE, TEST_DEVICE_IDENTIFIER);
Assert.assertNotNull(response, "Response should not be null");
Assert.assertEquals(response.getStatus(), Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(),
"The response status should be 500");
List<Object> payloadList = new ArrayList<>();
Response response2 = this.deviceAgentService.publishEvents(payloadList, TEST_DEVICE_TYPE, TEST_DEVICE_IDENTIFIER);
Assert.assertNotNull(response2, "Response should not be null");
Assert.assertEquals(response2.getStatus(), Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(),
"The response status should be 500");
}
@Test(description = "Test Event publishing success scenario.")
public void testEventPublishWithEventAttributesNULLAndPublishEventsSuccess()
throws DeviceAccessAuthorizationException, RemoteException, DataPublisherConfigurationException {
PowerMockito.stub(PowerMockito.method(PrivilegedCarbonContext.class, "getThreadLocalCarbonContext"))
.toReturn(this.privilegedCarbonContext);
PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class,
"getDeviceAccessAuthorizationService")).toReturn(this.deviceAccessAuthorizationService);
Mockito.when(this.deviceAccessAuthorizationService.isUserAuthorized(Mockito.any(DeviceIdentifier.class)))
.thenReturn(true);
PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class, "getEventStreamAdminServiceStub"))
.toReturn(this.eventStreamAdminServiceStub);
EventStreamAttributeDto eventStreamAttributeDto = Mockito.mock(EventStreamAttributeDto.class,
Mockito.RETURNS_MOCKS);
EventStreamDefinitionDto eventStreamDefinitionDto = Mockito.mock(EventStreamDefinitionDto.class,
Mockito.RETURNS_MOCKS);
Mockito.when(this.eventStreamAdminServiceStub.getStreamDefinitionDto(Mockito.anyString()))
.thenReturn(eventStreamDefinitionDto);
Mockito.when(eventStreamDefinitionDto.getPayloadData()).thenReturn(new EventStreamAttributeDto[]{});
EventsPublisherService eventPublisherService = Mockito.mock(EventsPublisherServiceImpl.class,
Mockito.RETURNS_MOCKS);
PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class, "getEventPublisherService")).toReturn
(eventPublisherService);
Mockito.when(eventPublisherService.publishEvent(Mockito.anyString(), Mockito.anyString(), Mockito.any(),
Mockito.any(), Mockito.any())).thenReturn(true);
Map<String, Object> payload = new HashMap<>();
CacheImpl cache = Mockito.mock(CacheImpl.class);
PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class, "getDynamicEventCache"))
.toReturn(cache);
Response response = this.deviceAgentService.publishEvents(payload, TEST_DEVICE_TYPE, TEST_DEVICE_IDENTIFIER);
Assert.assertNotNull(response, "Response should not be null");
Assert.assertEquals(response.getStatus(), Response.Status.OK.getStatusCode(),
"The response status should be 200");
List<Object> payloadList = new ArrayList<>();
Response response2 = this.deviceAgentService.publishEvents(payloadList, TEST_DEVICE_TYPE, TEST_DEVICE_IDENTIFIER);
Assert.assertNotNull(response2, "Response should not be null");
Assert.assertEquals(response2.getStatus(), Response.Status.OK.getStatusCode(),
"The response status should be 200");
}
@Test(description = "Test event publishing when PublishEvents throws DataPublisherConfigurationException.")
public void testPublishEventsDataPublisherConfig() throws DeviceAccessAuthorizationException, RemoteException, DataPublisherConfigurationException {
PowerMockito.stub(PowerMockito.method(PrivilegedCarbonContext.class, "getThreadLocalCarbonContext"))
.toReturn(this.privilegedCarbonContext);
PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class,
"getDeviceAccessAuthorizationService")).toReturn(this.deviceAccessAuthorizationService);
Mockito.when(this.deviceAccessAuthorizationService.isUserAuthorized(Mockito.any(DeviceIdentifier.class)))
.thenReturn(true);
PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class, "getEventStreamAdminServiceStub"))
.toReturn(this.eventStreamAdminServiceStub);
EventStreamAttributeDto eventStreamAttributeDto = Mockito.mock(EventStreamAttributeDto.class,
Mockito.RETURNS_MOCKS);
EventStreamDefinitionDto eventStreamDefinitionDto = Mockito.mock(EventStreamDefinitionDto.class,
Mockito.RETURNS_MOCKS);
Mockito.when(this.eventStreamAdminServiceStub.getStreamDefinitionDto(Mockito.anyString()))
.thenReturn(eventStreamDefinitionDto);
Mockito.when(eventStreamDefinitionDto.getPayloadData()).thenReturn(new EventStreamAttributeDto[]{});
EventsPublisherService eventPublisherService = Mockito.mock(EventsPublisherServiceImpl.class,
Mockito.RETURNS_MOCKS);
PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class, "getEventPublisherService"))
.toReturn(eventPublisherService);
Mockito.when(eventPublisherService.publishEvent(Mockito.anyString(), Mockito.anyString(), Mockito.any(),
Mockito.any(), Mockito.any())).thenThrow(
new DataPublisherConfigurationException("meta data[0] should have the device Id field"));
Map<String, Object> payload = new HashMap<>();
CacheImpl cache = Mockito.mock(CacheImpl.class);
PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class, "getDynamicEventCache"))
.toReturn(cache);
Response response = this.deviceAgentService.publishEvents(payload, TEST_DEVICE_TYPE, TEST_DEVICE_IDENTIFIER);
Assert.assertNotNull(response, "Response should not be null");
Assert.assertEquals(response.getStatus(), Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(),
"The response status should be 500");
List<Object> payloadList = new ArrayList<>();
Response response2 = this.deviceAgentService.publishEvents(payloadList, TEST_DEVICE_TYPE, TEST_DEVICE_IDENTIFIER);
Assert.assertNotNull(response2, "Response should not be null");
Assert.assertEquals(response2.getStatus(), Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(),
"The response status should be 500");
}
@Test(description = "Test Publish events with Axis Fault.")
public void testPublishEventsWithAxisFault() throws DeviceAccessAuthorizationException {
PowerMockito.stub(PowerMockito.method(PrivilegedCarbonContext.class, "getThreadLocalCarbonContext"))
.toReturn(this.privilegedCarbonContext);
PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class,
"getDeviceAccessAuthorizationService")).toReturn(this.deviceAccessAuthorizationService);
Mockito.when(this.deviceAccessAuthorizationService.isUserAuthorized(Mockito.any(DeviceIdentifier.class)))
.thenReturn(true);
PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class, "getEventStreamAdminServiceStub"))
.toThrow(new AxisFault(""));
Map<String, Object> payload = new HashMap<>();
CacheImpl cache = Mockito.mock(CacheImpl.class);
PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class, "getDynamicEventCache"))
.toReturn(cache);
Response response = this.deviceAgentService.publishEvents(payload, TEST_DEVICE_TYPE, TEST_DEVICE_IDENTIFIER);
Assert.assertNotNull(response, "Response should not be null");
Assert.assertEquals(response.getStatus(), Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(),
"The response status should be 500");
List<Object> payloadList = new ArrayList<>();
Response response2 = this.deviceAgentService.publishEvents(payloadList, TEST_DEVICE_TYPE, TEST_DEVICE_IDENTIFIER);
Assert.assertNotNull(response2, "Response should not be null");
Assert.assertEquals(response2.getStatus(), Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(),
"The response status should be 500");
}
@Test(description = "Test Publishing events when EventStreamAdminService throws Remote exception.")
public void testPublishEventsWithRemoteException() throws DeviceAccessAuthorizationException {
PowerMockito.stub(PowerMockito.method(PrivilegedCarbonContext.class, "getThreadLocalCarbonContext"))
.toReturn(this.privilegedCarbonContext);
PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class,
"getDeviceAccessAuthorizationService")).toReturn(this.deviceAccessAuthorizationService);
Mockito.when(this.deviceAccessAuthorizationService.isUserAuthorized(Mockito.any(DeviceIdentifier.class)))
.thenReturn(true);
PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class, "getEventStreamAdminServiceStub"))
.toThrow(new RemoteException());
Map<String, Object> payload = new HashMap<>();
CacheImpl cache = Mockito.mock(CacheImpl.class);
PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class, "getDynamicEventCache"))
.toReturn(cache);
Response response = this.deviceAgentService.publishEvents(payload, TEST_DEVICE_TYPE, TEST_DEVICE_IDENTIFIER);
Assert.assertNotNull(response, "Response should not be null");
Assert.assertEquals(response.getStatus(), Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(),
"The response status should be 500");
List<Object> payloadList = new ArrayList<>();
Response response2 = this.deviceAgentService.publishEvents(payloadList, TEST_DEVICE_TYPE, TEST_DEVICE_IDENTIFIER);
Assert.assertNotNull(response2, "Response should not be null");
Assert.assertEquals(response2.getStatus(), Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(),
"The response status should be 500");
}
@Test(description = "Test Publishing events when EventStreamAdminService throws JWT exception.")
public void testPublishEventsWithJWTException() throws DeviceAccessAuthorizationException {
PowerMockito.stub(PowerMockito.method(PrivilegedCarbonContext.class, "getThreadLocalCarbonContext"))
.toReturn(this.privilegedCarbonContext);
PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class,
"getDeviceAccessAuthorizationService")).toReturn(this.deviceAccessAuthorizationService);
Mockito.when(this.deviceAccessAuthorizationService.isUserAuthorized(Mockito.any(DeviceIdentifier.class)))
.thenReturn(true);
PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class, "getEventStreamAdminServiceStub"))
.toThrow(new JWTClientException());
Map<String, Object> payload = new HashMap<>();
CacheImpl cache = Mockito.mock(CacheImpl.class);
PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class, "getDynamicEventCache"))
.toReturn(cache);
Response response = this.deviceAgentService.publishEvents(payload, TEST_DEVICE_TYPE, TEST_DEVICE_IDENTIFIER);
Assert.assertNotNull(response, "Response should not be null");
Assert.assertEquals(response.getStatus(), Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(),
"The response status should be 500");
List<Object> payloadList = new ArrayList<>();
Response response2 = this.deviceAgentService.publishEvents(payloadList, TEST_DEVICE_TYPE, TEST_DEVICE_IDENTIFIER);
Assert.assertNotNull(response2, "Response should not be null");
Assert.assertEquals(response2.getStatus(), Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(),
"The response status should be 500");
}
@Test(description = "Test Publishing events when EventStreamAdminService throws User Store exception.")
public void testPublishEventsWithUserStoreException() throws DeviceAccessAuthorizationException {
PowerMockito.stub(PowerMockito.method(PrivilegedCarbonContext.class, "getThreadLocalCarbonContext"))
.toReturn(this.privilegedCarbonContext);
PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class,
"getDeviceAccessAuthorizationService")).toReturn(this.deviceAccessAuthorizationService);
Mockito.when(this.deviceAccessAuthorizationService.isUserAuthorized(Mockito.any(DeviceIdentifier.class)))
.thenReturn(true);
PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class, "getEventStreamAdminServiceStub"))
.toThrow(new UserStoreException());
Map<String, Object> payload = new HashMap<>();
CacheImpl cache = Mockito.mock(CacheImpl.class);
PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class, "getDynamicEventCache"))
.toReturn(cache);
Response response = this.deviceAgentService.publishEvents(payload, TEST_DEVICE_TYPE, TEST_DEVICE_IDENTIFIER);
Assert.assertNotNull(response, "Response should not be null");
Assert.assertEquals(response.getStatus(), Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(),
"The response status should be 500");
List<Object> payloadList = new ArrayList<>();
Response response2 = this.deviceAgentService.publishEvents(payloadList, TEST_DEVICE_TYPE, TEST_DEVICE_IDENTIFIER);
Assert.assertNotNull(response2, "Response should not be null");
Assert.assertEquals(response2.getStatus(), Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(),
"The response status should be 500");
}
@Test(description = "Test the get pending operation method which return empty device type list.")
public void testGetPendingOperationsWithNoDeviceType() throws DeviceManagementException {
PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class, "getDeviceManagementService"))
.toReturn(this.deviceManagementProviderService);
Mockito.when(this.deviceManagementProviderService.getAvailableDeviceTypes())
.thenReturn(new ArrayList<String>(){});
Response response = this.deviceAgentService.getPendingOperations(TEST_DEVICE_TYPE, TEST_DEVICE_IDENTIFIER);
Assert.assertNotNull(response, "Response should not be null");
Assert.assertEquals(response.getStatus(), Response.Status.BAD_REQUEST.getStatusCode(),
"The response status should be 400");
Mockito.reset(this.deviceManagementProviderService);
}
@Test(description = "Test the get pending operation method with invalid device identifier.")
public void testGetPendingOperationsWithInvalidDeviceIdentifier() throws DeviceManagementException {
PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class, "getDeviceManagementService"))
.toReturn(this.deviceManagementProviderService);
PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class, "isValidDeviceIdentifier"))
.toReturn(false);
List<String> deviceTypes = new ArrayList<>();
deviceTypes.add(TEST_DEVICE_TYPE);
Mockito.when(this.deviceManagementProviderService.getAvailableDeviceTypes())
.thenReturn(deviceTypes);
Response response = this.deviceAgentService.getPendingOperations(TEST_DEVICE_TYPE, TEST_DEVICE_IDENTIFIER);
Assert.assertNotNull(response, "Response should not be null");
Assert.assertEquals(response.getStatus(), Response.Status.NO_CONTENT.getStatusCode(),
"The response status should be 204");
Mockito.reset(this.deviceManagementProviderService);
}
@Test(description = "Test the get pending operations success scenario.")
public void testGetPendingOperationsSuccess() throws DeviceManagementException {
PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class, "getDeviceManagementService"))
.toReturn(this.deviceManagementProviderService);
PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class, "isValidDeviceIdentifier"))
.toReturn(true);
List<String> deviceTypes = new ArrayList<>();
deviceTypes.add(TEST_DEVICE_TYPE);
Mockito.when(this.deviceManagementProviderService.getAvailableDeviceTypes())
.thenReturn(deviceTypes);
Response response = this.deviceAgentService.getPendingOperations(TEST_DEVICE_TYPE, TEST_DEVICE_IDENTIFIER);
Assert.assertNotNull(response, "Response should not be null");
Assert.assertNotNull(response.getEntity(), "Response entity should not be null.");
Assert.assertEquals(response.getStatus(), Response.Status.OK.getStatusCode(),
"The response status should be 200");
Mockito.reset(this.deviceManagementProviderService);
}
@Test(description = "Test the scenario when get pending operations throw OperationManagementException.")
public void testGetPendingOperationsWithOperationManagementException() throws DeviceManagementException, OperationManagementException {
PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class, "getDeviceManagementService"))
.toReturn(this.deviceManagementProviderService);
PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class, "isValidDeviceIdentifier"))
.toReturn(true);
List<String> deviceTypes = new ArrayList<>();
deviceTypes.add(TEST_DEVICE_TYPE);
Mockito.when(this.deviceManagementProviderService.getAvailableDeviceTypes())
.thenReturn(deviceTypes);
Mockito.when(this.deviceManagementProviderService.getPendingOperations(Mockito.any())).thenThrow(new
OperationManagementException());
Response response = this.deviceAgentService.getPendingOperations(TEST_DEVICE_TYPE, TEST_DEVICE_IDENTIFIER);
Assert.assertNotNull(response, "Response should not be null");
Assert.assertNotNull(response.getEntity(), "Response entity should not be null.");
Assert.assertEquals(response.getStatus(), Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(),
"The response status should be 200");
Mockito.reset(this.deviceManagementProviderService);
}
@Test(description = "Test the scenario when getAvailableDeviceTypes throw DeviceManagementException.")
public void testGetPendingOperationsWithDeviceManagementException() throws DeviceManagementException {
PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class, "getDeviceManagementService"))
.toReturn(this.deviceManagementProviderService);
PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class, "isValidDeviceIdentifier"))
.toReturn(true);
Mockito.when(this.deviceManagementProviderService.getAvailableDeviceTypes())
.thenThrow(new DeviceManagementException());
Response response = this.deviceAgentService.getPendingOperations(TEST_DEVICE_TYPE, TEST_DEVICE_IDENTIFIER);
Assert.assertNotNull(response, "Response should not be null");
Assert.assertNotNull(response.getEntity(), "Response entity should not be null.");
Assert.assertEquals(response.getStatus(), Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(),
"The response status should be 200");
Mockito.reset(this.deviceManagementProviderService);
}
}
| Test cases for getNextPendingOperation method.
| components/device-mgt/org.wso2.carbon.device.mgt.api/src/test/java/org/wso2/carbon/device/mgt/jaxrs/service/impl/DeviceAgentServiceTest.java | Test cases for getNextPendingOperation method. | <ide><path>omponents/device-mgt/org.wso2.carbon.device.mgt.api/src/test/java/org/wso2/carbon/device/mgt/jaxrs/service/impl/DeviceAgentServiceTest.java
<ide> Mockito.reset(this.deviceManagementProviderService);
<ide> Mockito.reset(this.deviceAccessAuthorizationService);
<ide> }
<add>
<ide> @Test(description = "Test updating device success scenario.")
<ide> public void testUpdateDeviceSuccess() throws DeviceManagementException, DeviceAccessAuthorizationException {
<ide> PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class, "getDeviceManagementService"))
<ide> Assert.assertNotNull(response, "Response should not be null");
<ide> Assert.assertNotNull(response.getEntity(), "Response entity should not be null.");
<ide> Assert.assertEquals(response.getStatus(), Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(),
<del> "The response status should be 200");
<add> "The response status should be 500");
<ide> Mockito.reset(this.deviceManagementProviderService);
<ide> }
<ide>
<ide> Assert.assertNotNull(response, "Response should not be null");
<ide> Assert.assertNotNull(response.getEntity(), "Response entity should not be null.");
<ide> Assert.assertEquals(response.getStatus(), Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(),
<add> "The response status should be 500");
<add> Mockito.reset(this.deviceManagementProviderService);
<add> }
<add>
<add> @Test(description = "Test get next pending operation with device type is invalid.")
<add> public void getNextPendingOperationWithInvalidDeviceType() throws DeviceManagementException {
<add> PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class, "getDeviceManagementService"))
<add> .toReturn(this.deviceManagementProviderService);
<add> Mockito.when(this.deviceManagementProviderService.getAvailableDeviceTypes())
<add> .thenReturn(new ArrayList<String>(){});
<add> Response response = this.deviceAgentService.getNextPendingOperation(TEST_DEVICE_TYPE, TEST_DEVICE_IDENTIFIER);
<add> Assert.assertNotNull(response, "Response should not be null");
<add> Assert.assertEquals(response.getStatus(), Response.Status.BAD_REQUEST.getStatusCode(),
<add> "The response status should be 400");
<add> Mockito.reset(this.deviceManagementProviderService);
<add> }
<add>
<add> @Test(description = "Test get next pending operation with invalid device identifier.")
<add> public void getNextPendingOperationWithInvalidDeviceIdentifier() throws DeviceManagementException {
<add> PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class, "getDeviceManagementService"))
<add> .toReturn(this.deviceManagementProviderService);
<add> PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class, "isValidDeviceIdentifier"))
<add> .toReturn(false);
<add> List<String> deviceTypes = new ArrayList<>();
<add> deviceTypes.add(TEST_DEVICE_TYPE);
<add>
<add> Mockito.when(this.deviceManagementProviderService.getAvailableDeviceTypes())
<add> .thenReturn(deviceTypes);
<add> Response response = this.deviceAgentService.getNextPendingOperation(TEST_DEVICE_TYPE, TEST_DEVICE_IDENTIFIER);
<add> Assert.assertNotNull(response, "Response should not be null");
<add> Assert.assertEquals(response.getStatus(), Response.Status.BAD_REQUEST.getStatusCode(),
<add> "The response status should be 400");
<add> Mockito.reset(this.deviceManagementProviderService);
<add> }
<add>
<add> @Test(description = "Test the getNextPendingOperation success scenario.")
<add> public void testGetNextPendingOperationSuccess() throws DeviceManagementException {
<add> PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class, "getDeviceManagementService"))
<add> .toReturn(this.deviceManagementProviderService);
<add> PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class, "isValidDeviceIdentifier"))
<add> .toReturn(true);
<add> List<String> deviceTypes = new ArrayList<>();
<add> deviceTypes.add(TEST_DEVICE_TYPE);
<add>
<add> Mockito.when(this.deviceManagementProviderService.getAvailableDeviceTypes())
<add> .thenReturn(deviceTypes);
<add> Response response = this.deviceAgentService.getNextPendingOperation(TEST_DEVICE_TYPE, TEST_DEVICE_IDENTIFIER);
<add> Assert.assertNotNull(response, "Response should not be null");
<add> Assert.assertNotNull(response.getEntity(), "Response entity should not be null.");
<add> Assert.assertEquals(response.getStatus(), Response.Status.OK.getStatusCode(),
<ide> "The response status should be 200");
<ide> Mockito.reset(this.deviceManagementProviderService);
<ide> }
<add>
<add> @Test(description = "Test get next pending operation with operation management exception.")
<add> public void getNextPendingOperationWithOperationManagementException() throws DeviceManagementException, OperationManagementException {
<add> PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class, "getDeviceManagementService"))
<add> .toReturn(this.deviceManagementProviderService);
<add> PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class, "isValidDeviceIdentifier"))
<add> .toReturn(true);
<add> List<String> deviceTypes = new ArrayList<>();
<add> deviceTypes.add(TEST_DEVICE_TYPE);
<add>
<add> Mockito.when(this.deviceManagementProviderService.getAvailableDeviceTypes())
<add> .thenReturn(deviceTypes);
<add> Mockito.when(this.deviceManagementProviderService.getNextPendingOperation(Mockito.any())).thenThrow(new
<add> OperationManagementException());
<add> Response response = this.deviceAgentService.getNextPendingOperation(TEST_DEVICE_TYPE, TEST_DEVICE_IDENTIFIER);
<add> Assert.assertNotNull(response, "Response should not be null");
<add> Assert.assertNotNull(response.getEntity(), "Response entity should not be null.");
<add> Assert.assertEquals(response.getStatus(), Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(),
<add> "The response status should be 500");
<add> Mockito.reset(this.deviceManagementProviderService);
<add> }
<add>
<add> @Test(description = "Test the get next pending operation method with device management exception.")
<add> public void getNextPendingOperationWithDeviceManagementException() throws DeviceManagementException {
<add> PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class, "getDeviceManagementService"))
<add> .toReturn(this.deviceManagementProviderService);
<add> PowerMockito.stub(PowerMockito.method(DeviceMgtAPIUtils.class, "isValidDeviceIdentifier"))
<add> .toReturn(true);
<add> Mockito.when(this.deviceManagementProviderService.getAvailableDeviceTypes())
<add> .thenThrow(new DeviceManagementException());
<add>
<add> Response response = this.deviceAgentService.getNextPendingOperation(TEST_DEVICE_TYPE, TEST_DEVICE_IDENTIFIER);
<add> Assert.assertNotNull(response, "Response should not be null");
<add> Assert.assertNotNull(response.getEntity(), "Response entity should not be null.");
<add> Assert.assertEquals(response.getStatus(), Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(),
<add> "The response status should be 500");
<add> Mockito.reset(this.deviceManagementProviderService);
<add> }
<ide> } |
|
JavaScript | apache-2.0 | 5da423862c8d4f8978bdc2ee7942a75b3d0071b9 | 0 | rweverwijk/photoGraph,rweverwijk/photoGraph,rweverwijk/photoGraph | var _ = require('underscore');
var neo4j = require("neo4j");
var db = new neo4j.GraphDatabase('http://localhost:7474');
exports.getRandomPhotos = function(options, callback) {
var queryPartial = [
'MATCH (p:Photo)',
'WITH p',
'MATCH (p)-[:HAS_TAG]->(t)',
'WITH p, collect(t.name) as t',
'WITH p,t, rand() as random',
'RETURN p.fileName as fileName, p.directory as directory, t as tags',
'LIMIT 90'
];
var ordering = 'ORDER BY fileName';
if (options.order && options.order === "random") {
ordering = 'ORDER BY random';
}
queryPartial.splice(6,0, ordering);
if (options.tags) {
var tagArray = [].concat(options.tags);
_.each(tagArray, function(item) {
queryPartial.splice(1,0, ', (p)-[:HAS_TAG]->(`' + item + '` {name: "' + item + '"})');
});
}
var query = queryPartial.join('\n');
console.log(query);
// var params = {
// order: options.order ? options.order : "random"
// };
db.query(query, {}, function (err, results) {
//console.log("p" + JSON.stringify(results));
if (err) throw err;
var photos = results;
photos = transformImageNames(photos);
callback(photos);
});
};
exports.getTags = function(callback) {
var query = [
'MATCH (t:Tag)',
'RETURN t.name as name',
'ORDER BY name'
].join('\n');
db.query(query, {}, function (err, results) {
console.log("p" + JSON.stringify(results));
if (err) throw err;
callback(results);
});
};
exports.getAllPhotos = function(callback) {
var query = [
'MATCH (p:Photo)',
'RETURN p.fileName as fileName, p.directory as directory, p.location as location',
].join('\n');
db.query(query, {}, function (err, results) {
if (err) throw err;
callback(transformImageNames(results));
});
};
var transformImageNames = function(photos) {
return _.each(photos, function(photo) {
photo.thumbnailUrl = photo.fileName.replace(".JPG", "_t.JPG").replace(".jpg", "_t.jpg");
photo.fileName = photo.fileName.replace(".JPG", "_b.JPG").replace(".jpg", "_b.jpg");
});
}; | photo.js | var _ = require('underscore');
var neo4j = require("neo4j");
var db = new neo4j.GraphDatabase('http://localhost:7474');
exports.getRandomPhotos = function(options, callback) {
var queryPartial = [
'MATCH (p:Photo)',
'WITH p',
'MATCH (p)-[:HAS_TAG]->(t)',
'WITH p, collect(t.name) as t',
'WITH p,t, rand() as random',
'RETURN p.fileName as fileName, p.directory as directory, t as tags',
'LIMIT 90'
];
var ordering = 'ORDER BY fileName';
if (options.order && options.order === "random") {
ordering = 'ORDER BY random';
}
queryPartial.splice(6,0, ordering);
if (options.tags) {
var tagArray = [].concat(options.tags);
_.each(tagArray, function(item) {
queryPartial.splice(1,0, ', (p)-[:HAS_TAG]->(`' + item + '` {name: "' + item + '"})');
});
}
var query = queryPartial.join('\n');
console.log(query);
// var params = {
// order: options.order ? options.order : "random"
// };
db.query(query, {}, function (err, results) {
//console.log("p" + JSON.stringify(results));
if (err) throw err;
var photos = results;
photos = transformImageNames(photos);
callback(photos);
});
};
exports.getTags = function(callback) {
var query = [
'MATCH (t:Tag)',
'RETURN t.name as name',
'ORDER BY name'
].join('\n');
db.query(query, {}, function (err, results) {
console.log("p" + JSON.stringify(results));
if (err) throw err;
callback(results);
});
};
exports.getAllPhotos = function(callback) {
var query = [
'MATCH (p:Photo)--(:Tag {name: "06_juni"})',
'RETURN p.fileName as fileName, p.directory as directory, p.location as location',
].join('\n');
db.query(query, {}, function (err, results) {
if (err) throw err;
callback(transformImageNames(results));
});
};
var transformImageNames = function(photos) {
return _.each(photos, function(photo) {
photo.thumbnailUrl = photo.fileName.replace(".JPG", "_t.JPG").replace(".jpg", "_t.jpg");
photo.fileName = photo.fileName.replace(".JPG", "_b.JPG").replace(".jpg", "_b.jpg");
});
}; | all photos should not filter on a tag
| photo.js | all photos should not filter on a tag | <ide><path>hoto.js
<ide>
<ide> exports.getAllPhotos = function(callback) {
<ide> var query = [
<del> 'MATCH (p:Photo)--(:Tag {name: "06_juni"})',
<add> 'MATCH (p:Photo)',
<ide> 'RETURN p.fileName as fileName, p.directory as directory, p.location as location',
<ide> ].join('\n');
<ide> |
|
Java | lgpl-2.1 | 80eba0f8d7a0b977c2bba1464ddcedb832a0fba7 | 0 | gillg/qz-print,dsanders11/qz-print,klabarge/qz-print,tresf/qz-print,gillg/qz-print,lzpfmh/qz-print,klabarge/qz-print,cbondo/qz-print,klabarge/qz-print,dsanders11/qz-print,dsanders11/qz-print,lzpfmh/qz-print,klabarge/qz-print,gillg/qz-print,gillg/qz-print,cbondo/qz-print,dsanders11/qz-print,dsanders11/qz-print,lzpfmh/qz-print,gillg/qz-print,tresf/qz-print,qzind/qz-print,lzpfmh/qz-print,lzpfmh/qz-print,qzind/qz-print,tresf/qz-print,klabarge/qz-print,qzind/qz-print,gillg/qz-print,klabarge/qz-print,klabarge/qz-print,tresf/qz-print,qzind/qz-print,qzind/qz-print,qzind/qz-print,cbondo/qz-print,qzindustries/qz-print,qzindustries/qz-print,tresf/qz-print,dsanders11/qz-print,dsanders11/qz-print,dsanders11/qz-print,qzind/qz-print,lzpfmh/qz-print,gillg/qz-print,klabarge/qz-print,lzpfmh/qz-print,cbondo/qz-print,klabarge/qz-print,tresf/qz-print,qzindustries/qz-print,cbondo/qz-print,cbondo/qz-print,tresf/qz-print,cbondo/qz-print,tresf/qz-print,qzind/qz-print,qzind/qz-print,tresf/qz-print,gillg/qz-print,cbondo/qz-print,lzpfmh/qz-print | /**
* @author Tres Finocchiaro
*
* Copyright (C) 2013 Tres Finocchiaro, QZ Industries
*
* IMPORTANT: This software is dual-licensed
*
* LGPL 2.1 This is free software. This software and source code are released
* under the "LGPL 2.1 License". A copy of this license should be distributed
* with this software. http://www.gnu.org/licenses/lgpl-2.1.html
*
* QZ INDUSTRIES SOURCE CODE LICENSE This software and source code *may* instead
* be distributed under the "QZ Industries Source Code License", available by
* request ONLY. If source code for this project is to be made proprietary for
* an individual and/or a commercial entity, written permission via a copy of
* the "QZ Industries Source Code License" must be obtained first. If you've
* obtained a copy of the proprietary license, the terms and conditions of the
* license apply only to the licensee identified in the agreement. Only THEN may
* the LGPL 2.1 license be voided.
*
*/
package qz;
import java.applet.Applet;
import java.awt.Graphics;
import java.awt.image.BufferedImage;
import java.awt.print.PrinterException;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.net.SocketException;
import java.net.URL;
import java.net.UnknownHostException;
import java.nio.ByteBuffer;
import java.nio.charset.Charset;
import java.nio.charset.IllegalCharsetNameException;
import java.security.AccessController;
import java.security.PrivilegedActionException;
import java.security.PrivilegedExceptionAction;
import java.util.LinkedList;
import java.util.concurrent.atomic.AtomicReference;
import java.util.logging.Level;
import javax.imageio.ImageIO;
import javax.print.PrintException;
import javax.print.PrintService;
import javax.print.PrintServiceLookup;
import netscape.javascript.JSException;
import netscape.javascript.JSObject;
import qz.exception.InvalidFileTypeException;
import qz.exception.NullCommandException;
import qz.exception.NullPrintServiceException;
import qz.exception.SerialException;
import qz.json.JSONArray;
import qz.reflection.ReflectException;
/**
* An invisible web applet for use with JavaScript functions to send raw
* commands to your thermal, receipt, shipping, barcode, card printer and much
* more.
*
* @author A. Tres Finocchiaro
*/
public class PrintApplet extends Applet implements Runnable {
private static final AtomicReference<Thread> thisThread = new AtomicReference<Thread>(null);
public static final String VERSION = "1.8.6";
private static final long serialVersionUID = 2787955484074291340L;
public static final int APPEND_XML = 1;
public static final int APPEND_RAW = 2;
public static final int APPEND_IMAGE = 3;
public static final int APPEND_IMAGE_PS = 4;
public static final int APPEND_PDF = 8;
public static final int APPEND_HTML = 16;
private JSObject window = null;
private LanguageType lang;
private int appendType;
private long sleep;
private PrintService ps;
private PrintRaw printRaw;
private SerialIO serialIO;
private PrintPostScript printPS;
private PrintHTML printHTML;
//private NetworkHashMap networkHashMap;
private NetworkUtilities networkUtilities;
private Throwable t;
private PaperFormat paperSize;
private boolean startFindingPrinters;
private boolean doneFindingPrinters;
private boolean startPrinting;
private boolean donePrinting;
private boolean startFindingNetwork;
private boolean doneFindingNetwork;
private boolean startAppending;
private boolean doneAppending;
private boolean startFindingPorts;
private boolean doneFindingPorts;
private boolean startSending;
private boolean doneSending;
private boolean autoSetSerialProperties = false;
private boolean startOpeningPort;
private boolean doneOpeningPort;
private boolean startClosingPort;
private boolean doneClosingPort;
private String serialPortName;
private int serialPortIndex = -1;
private boolean running;
private boolean reprint;
private boolean psPrint;
private boolean htmlPrint;
private boolean alternatePrint;
private boolean logFeaturesPS;
private int imageX = 0;
private int imageY = 0;
private int dotDensity = 32;
private boolean allowMultiple;
//private double[] psMargin;
private String jobName;
private String file;
private String xmlTag;
private String printer;
//private String orientation;
//private Boolean maintainAspect;
private int copies = -1;
private Charset charset = Charset.defaultCharset();
//private String pageBreak; // For spooling pages one at a time
private int documentsPerSpool = 0;
private String endOfDocument;
// private String manualBreak = "%/SPOOL/%";
/**
* Create a privileged thread that will listen for JavaScript events
*
* @since 1.1.7
*/
//@Override
public void run() {
final PrintApplet instance = this;
window = JSObject.getWindow(instance);
logStart();
try {
AccessController.doPrivileged(new PrivilegedExceptionAction<Object>() {
//@Override
public Object run() throws Exception {
startJavaScriptListener();
return null;
}
});
} catch (PrivilegedActionException e) {
LogIt.log("Error starting main JavaScript thread. All else will fail.", e);
set(e);
} finally {
logStop();
}
}
/**
* Starts the Applet and runs the JavaScript listener thread
*/
private void startJavaScriptListener() {
notifyBrowser("qzReady");
while (running) {
try {
Thread.sleep(sleep); // Wait 100 milli before running again
if (startAppending) {
try {
switch (appendType) {
case APPEND_HTML:
appendHTML(new String(FileUtilities.readRawFile(file), charset.name()));
case APPEND_XML:
append64(FileUtilities.readXMLFile(file, xmlTag));
break;
case APPEND_RAW:
getPrintRaw().append(FileUtilities.readRawFile(file));
break;
case APPEND_IMAGE_PS:
readImage();
break;
case APPEND_IMAGE:
BufferedImage bi;
ImageWrapper iw;
if (ByteUtilities.isBase64Image(file)) {
byte[] imageData = Base64.decode(file.split(",")[1]);
bi = ImageIO.read(new ByteArrayInputStream(imageData));
} else {
bi = ImageIO.read(new URL(file));
}
iw = new ImageWrapper(bi, lang);
iw.setCharset(charset);
// Image density setting (ESCP only)
iw.setDotDensity(dotDensity);
// Image coordinates, (EPL only)
iw.setxPos(imageX);
iw.setyPos(imageY);
getPrintRaw().append(iw.getImageCommand());
break;
case APPEND_PDF:
getPrintPS().setPDF(ByteBuffer.wrap(ByteUtilities.readBinaryFile(file)));
break;
default: // Do nothing
}
} catch (Throwable t) {
LogIt.log("Error appending data", t);
set(t);
}
startAppending = false;
setDoneAppending(true);
}
if (startFindingPorts) {
logFindPorts();
startFindingPorts = false;
getSerialIO().fetchSerialPorts();
setDoneFindingPorts(true);
}
if (startFindingNetwork) {
logFindingNetwork();
startFindingNetwork = false;
//getNetworkHashMap().clear();
try {
// Gather the network information and store in a custom HashMap
//for (Enumeration<NetworkInterface> en = NetworkInterface.getNetworkInterfaces(); en.hasMoreElements();) {
// getNetworkHashMap().put(en.nextElement());
//}
getNetworkUtilities().gatherNetworkInfo();
} catch (IOException e) {
set(e);
} catch (ReflectException e) {
LogIt.log(Level.SEVERE, "getHardwareAddress not supported on Java 1.5", e);
set(e);
}
//getNetworkUtilities().fetchNetworkInfo();
setDoneFindingNetwork(true);
}
if (startOpeningPort) {
logOpeningPort();
startOpeningPort = false;
try {
if (serialPortIndex != -1) {
getSerialIO().open(serialPortIndex);
} else {
getSerialIO().open(serialPortName);
}
// Currently a Windows-only feature
if (autoSetSerialProperties) {
getSerialIO().autoSetProperties();
}
} catch (Throwable t) {
//notifyBrowser("qzPortNotOpened", getSerialIO().getPortName());
set(t);
}
setDoneOpeningPort(true);
}
if (startClosingPort) {
logClosingPort();
startClosingPort = false;
try {
getSerialIO().close();
} catch (Throwable t) {
this.set(t);
}
setDoneClosingPort(true);
}
if (startFindingPrinters) {
logFindPrinter();
startFindingPrinters = false;
if (printer == null) {
PrintApplet.this.setPrintService(PrintServiceLookup.lookupDefaultPrintService());
} else {
PrintApplet.this.setPrintService(PrintServiceMatcher.findPrinter(printer));
}
setDoneFindingPrinters(true);
}
// Serial Port Stuff
if (startSending) {
try {
startSending = false;
logCommands(new String(getSerialIO().getInputBuffer().getByteArray(), charset.name()));
getSerialIO().send();
doneSending = true;
} catch (Throwable t) {
this.set(t);
}
}
if (serialIO != null && serialIO.getOutput() != null) {
try {
notifyBrowser("qzSerialReturned",
new Object[]{serialIO.getPortName(),
new String(serialIO.getOutput(), charset.name())});
} catch (UnsupportedEncodingException ex) {
this.set(ex);
}
serialIO.clearOutput();
}
if (startPrinting) {
logPrint();
try {
startPrinting = false;
if (htmlPrint) {
logAndPrint(getPrintHTML());
} else if (psPrint) {
logAndPrint(getPrintPS());
} else if (isRawAutoSpooling()) {
LinkedList<ByteArrayBuilder> pages = ByteUtilities.splitByteArray(
getPrintRaw().getByteArray(),
endOfDocument.getBytes(charset.name()),
documentsPerSpool);
//FIXME: Remove this debug line
LogIt.log(Level.INFO, "Automatically spooling to "
+ pages.size() + " separate print job(s)");
for (ByteArrayBuilder b : pages) {
logAndPrint(getPrintRaw(), b.getByteArray());
}
if (!reprint) {
getPrintRaw().clear();
}
} else {
logAndPrint(getPrintRaw());
}
} catch (PrintException e) {
set(e);
} catch (PrinterException e) {
set(e);
} catch (UnsupportedEncodingException e) {
set(e);
} catch (IOException e) {
set(e);
} finally {
setDonePrinting(true);
if (this.printRaw != null) {
getPrintRaw().clear();
}
}
}
} catch (InterruptedException e) {
set(e);
}
}
}
public void useAlternatePrinting() {
this.useAlternatePrinting(true);
}
public void useAlternatePrinting(boolean alternatePrint) {
this.alternatePrint = alternatePrint;
}
public boolean isAlternatePrinting() {
return this.alternatePrint;
}
private boolean isRawAutoSpooling() throws UnsupportedEncodingException {
return documentsPerSpool > 0 && endOfDocument != null && !getPrintRaw().isClear() && getPrintRaw().contains(endOfDocument);
}
private void setDonePrinting(boolean donePrinting) {
this.donePrinting = donePrinting;
this.copies = -1;
this.notifyBrowser("qzDonePrinting");
}
private void setDoneFindingPrinters(boolean doneFindingPrinters) {
this.doneFindingPrinters = doneFindingPrinters;
this.notifyBrowser("qzDoneFinding");
}
private void setDoneOpeningPort(boolean doneOpeningPort) {
this.doneOpeningPort = doneOpeningPort;
this.notifyBrowser("qzDoneOpeningPort", getSerialIO() == null ? null : getSerialIO().getPortName());
}
private void setDoneClosingPort(boolean doneClosingPort) {
this.doneClosingPort = doneClosingPort;
this.notifyBrowser("qzDoneClosingPort", serialPortName);
}
private void setDoneFindingNetwork(boolean doneFindingNetwork) {
this.doneFindingNetwork = doneFindingNetwork;
this.notifyBrowser("qzDoneFindingNetwork");
}
private void setDoneFindingPorts(boolean doneFindingPorts) {
this.doneFindingPorts = doneFindingPorts;
this.notifyBrowser("qzDoneFindingPorts");
}
private void setDoneAppending(boolean doneAppending) {
this.doneAppending = doneAppending;
this.notifyBrowser("qzDoneAppending");
}
public void logPostScriptFeatures(boolean logFeaturesPS) {
setLogPostScriptFeatures(logFeaturesPS);
}
public void setLogPostScriptFeatures(boolean logFeaturesPS) {
this.logFeaturesPS = logFeaturesPS;
LogIt.log("Console logging of PostScript printing features set to \"" + logFeaturesPS + "\"");
}
public boolean getLogPostScriptFeatures() {
return this.logFeaturesPS;
}
private void processParameters() {
jobName = "QZ-PRINT ___ Printing";
running = true;
startPrinting = false;
donePrinting = true;
startFindingPrinters = false;
doneFindingPrinters = true;
startFindingPorts = false;
doneFindingPorts = true;
startOpeningPort = false;
startClosingPort = false;
startSending = false;
doneSending = true;
startFindingNetwork = false;
doneFindingNetwork = true;
startAppending = false;
doneAppending = true;
sleep = getParameter("sleep", 100);
psPrint = false;
appendType = 0;
allowMultiple = false;
logFeaturesPS = false;
alternatePrint = false;
String printer = getParameter("printer", null);
LogIt.disableLogging = getParameter("disable_logging", false);
if (printer != null) {
findPrinter(printer);
}
}
/**
* Convenience method for calling a JavaScript function with a single
* <code>String</code> parameter. The functional equivalent of
* notifyBrowser(String function, new Object[]{String s})
*
* @param function
* @param s
* @return
*/
public boolean notifyBrowser(String function, String s) {
return notifyBrowser(function, new Object[]{s});
}
/**
* Calls JavaScript function (i.e. "qzReady()" from the web browser For a
* period of time, will call "jzebraReady()" as well as "qzReady()" but fail
* silently on the old "jzebra" prefixed functions. If the "jzebra"
* equivalent is used, it will display a deprecation warning.
*
* @param function The JavasScript function to call
* @param o The parameter or array of parameters to send to the JavaScript
* function
* @return
*/
public boolean notifyBrowser(String function, Object[] o) {
try {
String type = (String)window.eval("typeof(" + function + ")");
// Ubuntu doesn't properly raise exceptions when calling invalid
// functions, so this is the work-around
if (!type.equals("function")) {
throw new Exception("Object \"" + function + "\" does not "
+ "exist or is not a function.");
}
window.call(function, o);
LogIt.log(Level.INFO, "Successfully called JavaScript function \""
+ function + "(...)\"...");
if (function.startsWith("jzebra")) {
LogIt.log(Level.WARNING, "JavaScript function \"" + function
+ "(...)\" is deprecated and will be removed in future releases. "
+ "Please use \"" + function.replaceFirst("jzebra", "qz")
+ "(...)\" instead.");
}
return true;
} catch (Throwable e) {
//} catch (Throwable t) {
boolean success = false;
if (function.startsWith("qz")) {
// Try to call the old jzebra function
success = notifyBrowser(function.replaceFirst("qz", "jzebra"), o);
}
if (function.equals("jebraDoneFinding")) {
// Try to call yet another deprecated jzebra function
success = notifyBrowser("jzebraDoneFindingPrinters", o);
}
// Warn about the function missing only if it wasn't recovered using the old jzebra name
if (!success && !function.startsWith("jzebra")) {
LogIt.log(Level.WARNING, "Tried calling JavaScript function \""
+ function + "(...)\" through web browser but it has not "
+ "been implemented (" + e.getLocalizedMessage() + ")");
}
return success;
}
}
/**
* Convenience method for calling a JavaScript function with no parameters.
* The functional equivalent of notifyBrowser(String function, new
* Object[]{null})
*/
private boolean notifyBrowser(String function) {
return notifyBrowser(function, new Object[]{null});
}
/**
* Overrides getParameter() to allow all upper or all lowercase parameter
* names
*
* @param name
* @return
*/
private String getParameter(String name, String defaultVal) {
if (name != null) {
try {
String retVal = super.getParameter(name);
retVal = isBlank(retVal) ? super.getParameter(name.toUpperCase()) : retVal;
return isBlank(retVal) ? defaultVal : retVal;
} catch (NullPointerException e) {
return defaultVal;
}
}
return defaultVal;
}
/**
* Same as <code>getParameter(String, String)</code> except for a
* <code>long</code> type.
*
* @param name
* @param defaultVal
* @return
*/
private long getParameter(String name, long defaultVal) {
return Long.parseLong(getParameter(name, "" + defaultVal));
}
private boolean getParameter(String name, boolean defaultVal) {
return Boolean.parseBoolean(getParameter(name, Boolean.toString(defaultVal)));
}
/**
* Returns true if given String is empty or null
*
* @param s
* @return
*/
private boolean isBlank(String s) {
return s == null || s.trim().equals("");
}
public String getPrinters() {
return PrintServiceMatcher.getPrinterListing();
}
public String getPorts() {
return getSerialIO().getSerialPorts();
}
/**
* Tells jZebra to spool a new document when the raw data matches
* <code>pageBreak</code>
*
* @param pageBreak
*/
// @Deprecated
// public void setPageBreak(String pageBreak) {
// this.pageBreak = pageBreak;
// }
public void append64(String base64) {
try {
getPrintRaw().append(Base64.decode(base64));
} catch (IOException e) {
set(e);
}
}
public void appendHTMLFile(String url) {
this.appendType = APPEND_HTML;
this.appendFromThread(url, appendType);
//throw new UnsupportedOperationException("Sorry, not yet supported.");
}
public void appendHtmlFile(String url) {
this.appendHTMLFile(url);
}
public void appendHtml(String html) {
this.appendHTML(html);
}
public void appendHTML(String html) {
getPrintHTML().append(html);
}
/**
* Gets the first xml node identified by <code>tagName</code>, reads its
* contents and appends it to the buffer. Assumes XML content is base64
* formatted.
*
* @param xmlFile
* @param tagName
*/
public void appendXML(String url, String xmlTag) {
appendFromThread(url, APPEND_XML);
//this.startAppending = true;
//this.doneAppending = false;
//this.appendType = APPEND_XML;
//this.file = xmlFile;
this.xmlTag = xmlTag;
}
/**
* Appends the entire contents of the specified file to the buffer
*
* @param rawDataFile
*/
public void appendFile(String url) {
appendFromThread(url, APPEND_RAW);
}
/**
*
* @param imageFile
*/
public void appendImage(String url) {
appendFromThread(url, APPEND_IMAGE_PS);
}
public void appendPDF(String url) {
appendFromThread(url, APPEND_PDF);
}
public void setLanguage(String lang) {
this.lang = LanguageType.getType(lang);
}
/**
* Appends a raw image from URL specified in the language format specified.
*
* @param imageFile URL path to the image to be appended. Can be .PNG, .JPG,
* .GIF, .BMP (anything that can be converted to a
* <code>BufferedImage</code>) Cannot be a relative path, since there's no
* guarantee that the applet is aware of the browser's location.href.
* @param lang Usually "ESCP", "EPL", "ZPL", etc. Parsed by
* <code>LanguageType</code> class.
*/
public void appendImage(String imageFile, String lang) {
setLanguage(lang);
appendFromThread(imageFile, APPEND_IMAGE);
}
/**
* ESCP only. Appends a raw image from URL specified in the language format
* specified using the <code>dotDensity</code> specified.
*
* @param imageFile URL path to the image to be appended. Can be .PNG, .JPG,
* .GIF, .BMP (anything that can be converted to a
* <code>BufferedImage</code>) Cannot be a relative path, since there's no
* guarantee that the applet is aware of the browser's location.href.
* @param lang Usually "ESCP", "EPL", "ZPL", etc. Parsed by
* <code>LanguageType</code> class.
* @param dotDensity From the <code>ESC *</code> section of the ESC/P
* programmer's manual. Default = 32
*/
public void appendImage(String imageFile, String lang, int dotDensity) {
this.dotDensity = dotDensity;
setLanguage(lang);
appendFromThread(imageFile, APPEND_IMAGE);
}
/**
* ESCP only. Appends a raw image from URL specified in the language format
* specified using the <code>dotDensity</code> specified. Convenience method
* for
* <code>appendImage(String imageFile, String lang, int dotDensity)</code>
* where dotDenity is <code>32</code> "single" or <code>33</code> "double".
*
* @param imageFile URL path to the image to be appended. Can be .PNG, .JPG,
* .GIF, .BMP (anything that can be converted to a
* <code>BufferedImage</code>) Cannot be a relative path, since there's no
* guarantee that the applet is aware of the browser's location.href.
* @param lang Usually "ESCP", "EPL", "ZPL", etc. Parsed by
* <code>LanguageType</code> class.
* @param dotDensity Should be either "single", "double" or "triple". Triple
* being the highest resolution.
*/
public void appendImage(String imageFile, String lang, String dotDensity) {
if (dotDensity.equalsIgnoreCase("single")) {
this.dotDensity = 32;
} else if (dotDensity.equalsIgnoreCase("double")) {
this.dotDensity = 33;
} else if (dotDensity.equalsIgnoreCase("triple")) {
this.dotDensity = 39;
} else {
LogIt.log(Level.WARNING, "Cannot translate dotDensity value of '"
+ dotDensity + "'. Using '" + this.dotDensity + "'.");
}
setLanguage(lang);
appendFromThread(imageFile, APPEND_IMAGE);
}
/**
* Appends a raw image from URL specified in the language format specified.
* For CPCL and EPL, x and y coordinates should *always* be supplied. If
* they are not supplied, they will default to position 0,0.
*
* @param imageFile
* @param lang
* @param image_x
* @param image_y
*/
public void appendImage(String imageFile, String lang, int image_x, int image_y) {
this.imageX = image_x;
this.imageY = image_y;
appendImage(imageFile, lang);
}
/**
* Appends a file of the specified type
*
* @param url
* @param appendType
*/
private void appendFromThread(String file, int appendType) {
this.startAppending = true;
this.doneAppending = false;
this.appendType = appendType;
this.file = file;
}
/**
* Returns the orientation as it has been recently defined. Default is null
* which will allow the printer configuration to decide.
*
* @return
*/
public String getOrientation() {
return this.paperSize.getOrientationDescription();
}
/*
// Due to applet security, can only be invoked by run() thread
private String readXMLFile() {
try {
DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
DocumentBuilder db;
Document doc;
db = dbf.newDocumentBuilder();
doc = db.parse(file);
doc.getDocumentElement().normalize();
LogIt.log("Root element " + doc.getDocumentElement().getNodeName());
NodeList nodeList = doc.getElementsByTagName(xmlTag);
if (nodeList.getLength() > 0) {
return nodeList.item(0).getTextContent();
} else {
LogIt.log("Node \"" + xmlTag + "\" could not be found in XML file specified");
}
} catch (Exception e) {
LogIt.log(Level.WARNING, "Error reading/parsing specified XML file", e);
}
return "";
}
*/
public void printToFile() {
printToFile(null);
}
public void printToHost(String host) {
printToHost(host, 9100);
}
public void printToHost(String host, String port) {
try {
printToHost(host, Integer.parseInt(host));
} catch (Throwable t) {
this.set(t);
}
}
public void printToHost(String host, int port) {
if (!ByteUtilities.isBlank(host) && port > 0) {
getPrintRaw().setOutputSocket(host, port);
} else {
this.set(new NullPrintServiceException("Invalid port or host specified. "
+ "Port values must be non-zero posistive integers. "
+ "Host values must not be empty"));
this.clear();
this.setDonePrinting(true);
return;
}
this.print();
}
public void printToFile(String outputPath) {
if (!ByteUtilities.isBlank(outputPath)) {
try {
getPrintRaw().setOutputPath(outputPath);
} catch (InvalidFileTypeException e) {
this.set(e);
this.clear();
this.setDonePrinting(true);
return;
}
} else {
this.set(new NullPrintServiceException("Blank output path supplied"));
this.clear();
this.setDonePrinting(true);
return;
}
this.print();
}
// Due to applet security, can only be invoked by run() thread
private void readImage() {
try {
// Use the in-line base64 content as our image
if (ByteUtilities.isBase64Image(file)) {
getPrintPS().setImage(Base64.decode(file.split(",")[1]));
} else {
getPrintPS().setImage(ImageIO.read(new URL(file)));
}
} catch (IOException ex) {
LogIt.log(Level.WARNING, "Error reading specified image", ex);
}
}
// Use this instead of calling p2d directly. This will allow 2d graphics
// to only be used when absolutely needed
private PrintPostScript getPrintPS() {
if (this.printPS == null) {
this.printPS = new PrintPostScript();
this.printPS.setPrintParameters(this);
}
return printPS;
}
private PrintHTML getPrintHTML() {
if (this.printHTML == null) {
this.printHTML = new PrintHTML();
this.printHTML.setPrintParameters(this);
}
return printHTML;
}
/*
public double[] getPSMargin() {
return psMargin;
}
public void setPSMargin(int psMargin) {
this.psMargin = new double[]{psMargin};
}
public void setPSMargin(double psMargin) {
this.psMargin = new double[]{psMargin};
}
public void setPSMargin(int top, int left, int bottom, int right) {
this.psMargin = new double[]{top, left, bottom, right};
}
public void setPSMargin(double top, double left, double bottom, double right) {
this.psMargin = new double[]{top, left, bottom, right};
}*/
/*
// Due to applet security, can only be invoked by run() thread
private String readRawFile() {
String rawData = "";
try {
byte[] buffer = new byte[512];
DataInputStream in = new DataInputStream(new URL(file).openStream());
//String inputLine;
while (true) {
int len = in.read(buffer);
if (len == -1) {
break;
}
rawData += new String(buffer, 0, len, charset.name());
}
in.close();
} catch (Exception e) {
LogIt.log(Level.WARNING, "Error reading/parsing specified RAW file", e);
}
return rawData;
}*/
/**
* Prints the appended data without clearing the print buffer afterward.
*/
public void printPersistent() {
startPrinting = true;
donePrinting = false;
reprint = true;
}
/**
* Appends raw hexadecimal bytes in the format "x1Bx00", etc.
*
* @param s
*/
public void appendHex(String s) {
try {
getPrintRaw().append(ByteUtilities.hexStringToByteArray(s));
} catch (NumberFormatException e) {
this.set(e);
}
}
/**
* Interprets the supplied JSON formatted <code>String</code> value into a
* <code>byte</code> array or a <code>String</code> array.
*
* @param s
*/
public void appendJSONArray(String s) {
JSONArray array = new JSONArray(s);
if (array == null || array.length() < 0) {
this.set(new NullCommandException("Empty or null JSON Array provided. "
+ "Cannot append raw data."));
return;
} else {
Object o = array.get(0);
if (o instanceof Integer) {
LogIt.log("Interpreting JSON data as Integer array. "
+ "Will automatically convert to bytes.");
byte[] b = new byte[array.length()];
for (int i = 0; i < b.length; i++) {
if (!array.isNull(i)) {
b[i] = (byte) array.getInt(i);
} else {
LogIt.log(Level.WARNING, "Cannot parse null byte value. "
+ "Defaulting to 0x00");
b[i] = (byte) 0;
}
}
getPrintRaw().append(b);
} else if (o instanceof String) {
LogIt.log("Interpreting JSON data as String array");
for (int i = 0; i < array.length(); i++) {
if (!array.isNull(i)) {
try {
getPrintRaw().append(array.getString(i));
} catch (UnsupportedEncodingException e) {
LogIt.log(Level.WARNING, "String encoding exception "
+ "occured while parsing JSON.", e);
}
} else {
LogIt.log(Level.WARNING, "Cannot parse null String value. "
+ "Defaulting to blank");
}
}
} else {
this.set(new NullCommandException("JSON Arrays of type "
+ o.getClass().getName() + " are not yet supported"));
}
}
}
public void append(String s) {
try {
// Fix null character for ESC/P syntax
/*if (s.contains("\\x00")) {
LogIt.log("Replacing \\\\x00 with NUL character");
s = s.replace("\\x00", NUL_CHAR);
} else if (s.contains("\\0")) {
LogIt.log("Replacing \\\\0 with NUL character");
s = s.replace("\\0", NUL_CHAR);
} */
// JavaScript hates the NUL, perhaps we can allow the excaped version?
/*if (s.contains("\\x00")) {
String[] split = s.split("\\\\\\\\x00");
for (String ss : split) {
getPrintRaw().append(ss.getBytes(charset.name()));
getPrintRaw().append(new byte[]{'\0'});
}
} else {
getPrintRaw().append(s.getBytes(charset.name()));
}*/
getPrintRaw().append(s.getBytes(charset.name()));
} catch (UnsupportedEncodingException ex) {
this.set(ex);
}
}
/*
* Makes appending the unicode null character possible by appending
* the equivelant of <code>\x00</code> in JavaScript, which is syntatically
* invalid in JavaScript (no errors will be thrown, but Strings will be
* terminated prematurely
*/
public void appendNull() {
getPrintRaw().append(new byte[]{'\0'});
}
public void appendNUL() {
appendNull();
}
public void appendNul() {
appendNull();
}
/**
* Replaces a String with the specified value. PrintRaw only.
*
* @param tag
* @param value
*
* public void replace(String tag, String value) { replaceAll(tag, value); }
*/
/**
* Replaces a String with the specified value. PrintRaw only.
*
* @param tag
* @param value
*
* public void replaceAll(String tag, String value) {
* getPrintRaw().set(printRaw.get().replaceAll(tag, value)); }
*/
/**
* Replaces the first occurance of a String with a specified value. PrintRaw
* only.
*
* @param tag
* @param value
*
* public void replaceFirst(String tag, String value) {
* getPrintRaw().set(printRaw.get().replaceFirst(tag, value)); }
*/
/**
* Sets/overwrites the cached raw commands. PrintRaw only.
*
* @param s
*
* public void set(String s) { getPrintRaw().set(s); }
*/
/**
* Clears the cached raw commands. PrintRaw only.
*/
public void clear() {
getPrintRaw().clear();
}
/**
* Performs an asyncronous print and handles the output of exceptions and
* debugging. Important: print() clears any raw buffers after printing. Use
* printPersistent() to save the buffer to be used/appended to later.
*/
public void print() {
startPrinting = true;
donePrinting = false;
reprint = false;
}
public void printHTML() {
htmlPrint = true;
print();
}
public void printPS() {
psPrint = true;
print();
}
/**
* Get our main thread ready, but don't start it until <code>start()</code>
* has been called.
*/
@Override
public void init() {
if (!allowMultiple && thisThread.get() != null && thisThread.get().isAlive()) {
LogIt.log(Level.WARNING, "init() called, but applet already "
+ "seems to be running. Ignoring.");
return;
}
if (allowMultiple && thisThread.get() != null && thisThread.get().isAlive()) {
LogIt.log(Level.INFO, "init() called, but applet already "
+ "seems to be running. Allowing.");
}
processParameters();
thisThread.set(new Thread(this));
super.init();
}
/**
* No need to paint, the applet is invisible
*
* @param g
*/
@Override
public void paint(Graphics g) {
// Do nothing
}
/**
* Start our main thread
*/
@Override
public void start() {
try {
thisThread.get().start();
} catch (JSException e) {
set(e);
LogIt.log(Level.SEVERE, "Error setting applet object in JavaScript using LiveConnect. "
+ "This is usally caused by Java Security Settings. In Windows, enable the Java "
+ "Console and hit 5 to show verbose messages.");
} catch (Exception e) {
set(e);
}
super.start();
}
@Override
public void stop() {
running = false;
thisThread.set(null);
if (serialIO != null) {
try {
serialIO.close();
} catch (Throwable t) {
LogIt.log(Level.SEVERE, "Could not close port [" + serialIO.getPortName() + "].", t);
}
}
super.stop();
}
@Override
public void destroy() {
this.stop();
super.destroy();
}
public void findPrinter() {
findPrinter(null);
}
/**
* Creates the print service by iterating through printers until finding
* matching printer containing "printerName" in its description
*
* @param printerName
* @return
*/
public void findPrinter(String printer) {
this.startFindingPrinters = true;
this.doneFindingPrinters = false;
this.printer = printer;
}
/**
* Uses the JSSC JNI library to retreive a comma separated list of serial
* ports from the system, i.e. "COM1,COM2,COM3" or "/dev/tty0,/dev/tty1",
* etc.
*/
public void findPorts() {
this.startFindingPorts = true;
this.doneFindingPorts = false;
}
public void setSerialBegin(String begin) {
try {
getSerialIO().setBegin(begin.getBytes(charset.name()));
} catch (UnsupportedEncodingException ex) {
this.set(ex);
}
}
public void setSerialEnd(String end) {
try {
getSerialIO().setEnd(end.getBytes(charset.name()));
} catch (UnsupportedEncodingException ex) {
this.set(ex);
}
}
public void send(String portName, String data) {
try {
if (!getSerialIO().isOpen()) {
throw new SerialException("A port has not yet been opened.");
} else if (getSerialIO().getPortName().equals(portName)) {
getSerialIO().append(data.getBytes(charset.name()));
this.startSending = true;
this.doneSending = false;
} else {
throw new SerialException("Port specified [" + portName + "] "
+ "differs from previously opened port "
+ "[" + getSerialIO().getPortName() + "]. Applet currently "
+ "supports only one open port at a time. Data not sent.");
}
} catch (Throwable t) {
this.set(t);
}
}
public void sendHex(String portName, String data) {
try {
send(portName, new String(ByteUtilities.hexStringToByteArray(data), charset.name()));
} catch (UnsupportedEncodingException ex) {
this.set(ex);
}
}
public void setSerialProperties(int baud, int dataBits, String stopBits, int parity, String flowControl) {
setSerialProperties(Integer.toString(baud), Integer.toString(dataBits),
stopBits, Integer.toString(parity), flowControl);
}
public void setSerialProperties(String baud, String dataBits, String stopBits, String parity, String flowControl) {
try {
getSerialIO().setProperties(baud, dataBits, stopBits, parity, flowControl);
} catch (Throwable t) {
this.set(t);
}
}
public void openPort(String serialPortName) {
this.openPort(serialPortName, false);
}
public void closePort(String portName) {
if (getSerialIO().getPortName().equals(portName)) {
this.startClosingPort = true;
this.doneClosingPort = false;
} else {
this.set(new SerialException("Port specified [" + portName + "] "
+ "could not be closed. Please close "
+ "[" + getSerialIO().getPortName() + "] instead. "
+ "Applet currently supports only one open port at a time."));
}
}
public void openPort(String serialPortName, boolean autoSetSerialProperties) {
this.serialPortIndex = -1;
this.serialPortName = serialPortName;
this.startOpeningPort = true;
this.doneOpeningPort = false;
this.autoSetSerialProperties = autoSetSerialProperties;
}
public void openPort(int serialPortIndex) {
this.openPort(serialPortIndex, false);
}
public void openPort(int serialPortIndex, boolean autoSetSerialProperties) {
this.serialPortName = null;
this.serialPortIndex = serialPortIndex;
this.startOpeningPort = true;
this.doneOpeningPort = false;
}
public boolean isDoneFinding() {
return doneFindingPrinters;
}
public boolean isDoneFindingPorts() {
return doneFindingPorts;
}
public boolean isDoneOpeningPort() {
return doneOpeningPort;
}
public boolean isDoneClosingPort() {
return doneClosingPort;
}
public boolean isDoneFindingNetwork() {
return doneFindingNetwork;
}
public boolean isDonePrinting() {
return donePrinting;
}
public boolean isDoneAppending() {
return doneAppending;
}
public boolean isDoneSending() {
return doneSending;
}
/**
* Returns the PrintService's name (the printer name) associated with this
* applet, if any. Returns null if none is set.
*
* @return
*/
public String getPrinter() {
return ps == null ? null : ps.getName();
//return ps.getName();
}
public SerialIO getSerialIO() {
try {
Class.forName("jssc.SerialPort");
if (this.serialIO == null) {
this.serialIO = new SerialIO();
}
return serialIO;
} catch (ClassNotFoundException e) {
// Stop whatever is happening
this.startFindingPorts = false;
this.doneFindingPorts = true;
this.startSending = false;
this.doneSending = true;
this.startOpeningPort = false;
this.doneOpeningPort = true;
// Raise our exception
this.set(e);
}
return null;
}
/**
* Returns the PrintRaw object associated with this applet, if any. Returns
* null if none is set.
*
* @return
*/
private PrintRaw getPrintRaw() {
if (this.printRaw == null) {
this.printRaw = new PrintRaw();
this.printRaw.setPrintParameters(this);
}
return printRaw;
}
public NetworkUtilities getNetworkUtilities() throws SocketException, ReflectException, UnknownHostException {
if (this.networkUtilities == null) {
this.networkUtilities = new NetworkUtilities();
}
return this.networkUtilities;
}
/* private NetworkHashMap getNetworkHashMap() {
if (this.networkHashMap == null) {
this.networkHashMap = new NetworkHashMap();
}
return this.networkHashMap;
}*/
/*private NetworkUtilities getNetworkUtilities() {
if (this.networkUtilities == null) {
this.networkUtilities = new NetworkUtilities();
}
return this.networkUtilities;
}*/
/**
* Returns a comma delimited <code>String</code> containing the IP Addresses
* found for the specified MAC address. The format of these (IPv4 vs. IPv6)
* may vary depending on the system.
*
* @param macAddress
* @return
*/
/* public String getIPAddresses(String macAddress) {
return getNetworkHashMap().get(macAddress).getInetAddressesCSV();
}*/
/*public String getIpAddresses() {
return getIpAddresses();
}*/
public String getIP() {
return this.getIPAddress();
}
/**
* Returns a comma separated <code>String</code> containing all MAC
* Addresses found on the system, or <code>null</code> if none are found.
*
* @return
*/
/*
public String getMacAddresses() {
return getNetworkHashMap().getKeysCSV();
}*/
public String getMac() {
return this.getMacAddress();
}
/**
* Retrieves a <code>String</code> containing a single MAC address. i.e.
* 0A1B2C3D4E5F. This attempts to get the quickest and most appropriate
* match for systems with a single adapter by attempting to choose an
* enabled and non-loopback adapter first if possible.
* <strong>Note:</strong> If running JRE 1.5, Java won't be able to
* determine "enabled" or "loopback", so it will attempt to use other methods
* such as filtering out the 127.0.0.1s, etc.
* information. Returns <code>null</code> if no adapters are found.
*
* @return
*/
public String getMacAddress() {
try {
return getNetworkUtilities().getHardwareAddress();
} catch (Throwable t) {
return null;
}
//return getNetworkHashMap().getLightestNetworkObject().getMacAddress();
}
/**
* Retrieves a <code>String</code> containing a single IP address. i.e.
* 192.168.1.101 or fe80::81ca:bcae:d6c4:9a16%25 (formatted IPv4 or IPv6)
* This attempts to get the most appropriate match for
* systems with a single adapter by attempting to choose an enabled and
* non-loopback adapter first if possible, however if multiple IPs exist,
* it will return the first found, regardless of protocol or use.
* <strong>Note:</strong> If running JRE 1.5, Java won't be able to
* determine "enabled" or "loopback", so it will attempt to use other methods
* such as filtering out the 127.0.0.1 addresses, etc.
* information. Returns <code>null</code> if no adapters are found.
*
* @return
*/
public String getIPAddress() {
//return getNetworkHashMap().getLightestNetworkObject().getInetAddress();
try {
return getNetworkUtilities().getInetAddress();
} catch (Throwable t) {
return null;
}
}
/*public String getIpAddress() {
return getIPAddress();
}*/
/**
* Retrieves a <code>String</code> containing a single IP address. i.e.
* 192.168.1.101. This attempts to get the most appropriate match for
* systems with a single adapter by attempting to choose an enabled and
* non-loopback adapter first if possible.
* <strong>Note:</strong> If running JRE 1.5, Java won't be able to
* determine "enabled" or "loopback", so it will attempt to use other methods
* such as filtering out the 127.0.0.1 addresses, etc.
* information. Returns <code>null</code> if no adapters are found.
*
* @return
*/
/* public String getIPV4Address() {
return getNetworkHashMap().getLightestNetworkObject().getInet4Address();
}
public String getIpV4Address() {
return getIpV4Address();
}*/
/**
* Retrieves a <code>String</code> containing a single IP address. i.e.
* fe80::81ca:bcae:d6c4:9a16%25. This attempts to get the most appropriate
* match for systems with a single adapter by attempting to choose an
* enabled and non-loopback adapter first if possible.
* <strong>Note:</strong> If running JRE 1.5, Java won't be able to
* determine "enabled" or "loopback", so it will attempt to use other methods
* such as filtering out the 127.0.0.1 addresses, etc.
* information. Returns <code>null</code> if no adapters are found.
*
* @return
*/
/*
public String getIPV6Address() {
return getNetworkHashMap().getLightestNetworkObject().getInet6Address();
}
public String getIpV6Address() {
return getIpV4Address();
}*/
/**
* Returns the PrintService object associated with this applet, if any.
* Returns null if none is set.
*
* @return
*/
public PrintService getPrintService() {
return ps;
}
/**
* Returns the PrintService's name (the printer name) associated with this
* applet, if any. Returns null if none is set.
*
* @return
*/
@Deprecated
public String getPrinterName() {
LogIt.log(Level.WARNING, "Function \"getPrinterName()\" has been deprecated since v. 1.2.3."
+ " Please use \"getPrinter()\" instead.");
return getPrinter();
}
public Throwable getError() {
return getException();
}
public Throwable getException() {
return t;
}
public void clearException() {
this.t = null;
}
public String getExceptionMessage() {
return t.getLocalizedMessage();
}
public long getSleepTime() {
return sleep;
}
public String getVersion() {
return VERSION;
}
/**
* Sets the time the listener thread will wait between actions
*
* @param sleep
*/
public void setSleepTime(long sleep) {
this.sleep = sleep;
}
public String getEndOfDocument() {
return endOfDocument;
}
public void setEndOfDocument(String endOfPage) {
this.endOfDocument = endOfPage;
}
public void setPrinter(int index) {
setPrintService(PrintServiceMatcher.getPrinterList()[index]);
LogIt.log("Printer set to index: " + index + ", Name: " + ps.getName());
//PrinterState state = (PrinterState)this.ps.getAttribute(PrinterState.class);
//return state == PrinterState.IDLE || state == PrinterState.PROCESSING;
}
// Generally called internally only after a printer is found.
private void setPrintService(PrintService ps) {
if (ps == null) {
LogIt.log(Level.WARNING, "Setting null PrintService");
this.ps = ps;
return;
}
this.ps = ps;
if (printHTML != null) {
printHTML.setPrintService(ps);
}
if (printPS != null) {
printPS.setPrintService(ps);
}
if (printRaw != null) {
printRaw.setPrintService(ps);
}
}
/* public String getManualBreak() {
return manualBreak;
}*/
/* public void setManualBreak(String manualBreak) {
this.manualBreak = manualBreak;
}*/
public int getDocumentsPerSpool() {
return documentsPerSpool;
}
public void setDocumentsPerSpool(int pagesPer) {
this.documentsPerSpool = pagesPer;
}
public void setJobName(String jobName) {
this.jobName = jobName;
}
public String getJobName() {
return jobName;
}
public void findNetworkInfo() {
this.startFindingNetwork = true;
this.doneFindingNetwork = false;
}
private void set(Throwable t) {
this.t = t;
LogIt.log(t);
}
private void logStart() {
LogIt.log("QZ-PRINT " + VERSION);
LogIt.log("===== JAVASCRIPT LISTENER THREAD STARTED =====");
}
private void logStop() {
LogIt.log("===== JAVASCRIPT LISTENER THREAD STOPPED =====");
}
private void logPrint() {
LogIt.log("===== SENDING DATA TO THE PRINTER =====");
}
private void logFindPrinter() {
LogIt.log("===== SEARCHING FOR PRINTER =====");
}
private void logFindPorts() {
LogIt.log("===== SEARCHING FOR SERIAL PORTS =====");
}
private void logFindingNetwork() {
LogIt.log("===== GATHERING NETWORK INFORMATION =====");
}
private void logOpeningPort() {
LogIt.log("===== OPENING SERIAL PORT " + serialPortName + " =====");
}
private void logClosingPort() {
LogIt.log("===== CLOSING SERIAL PORT " + serialPortName + " =====");
}
private void logCommands(PrintHTML ph) {
logCommands(ph.get());
}
private void logCommands(PrintRaw pr) {
logCommands(pr.getOutput());
}
private void logCommands(byte[] commands) {
try {
logCommands(new String(commands, charset.name()));
} catch (UnsupportedEncodingException ex) {
LogIt.log(Level.WARNING, "Cannot decode raw bytes for debug output. "
+ "This could be due to incompatible charset for this JVM "
+ "or mixed charsets within one byte stream. Ignore this message"
+ " if printing seems fine.");
}
}
private void logCommands(String commands) {
LogIt.log("\r\n\r\n" + commands + "\r\n\r\n");
}
private void logAndPrint(PrintRaw pr, byte[] data) throws IOException, InterruptedException, PrintException, UnsupportedEncodingException {
logCommands(data);
pr.print(data);
}
private void logAndPrint(PrintRaw pr) throws IOException, PrintException, InterruptedException, UnsupportedEncodingException {
logCommands(pr);
if (reprint) {
pr.print();
} else {
pr.print();
pr.clear();
}
}
private void logAndPrint(PrintPostScript printPS) throws PrinterException {
logCommands(" <<" + file + ">>");
// Fix GitHub Bug #24
if (paperSize != null) {
printPS.setPaperSize(paperSize);
}
// Fix GitHub Bug #30, #31
if (copies > 0) {
printPS.setCopies(copies);
} else {
printPS.setCopies(1);
}
printPS.print();
psPrint = false;
}
private void logAndPrint(PrintHTML printHTML) throws PrinterException {
if (file != null) {
logCommands(" <<" + file + ">>");
}
logCommands(printHTML);
printHTML.print();
htmlPrint = false;
}
/*private void logAndPrint(String commands) throws PrintException, InterruptedException, UnsupportedEncodingException {
logCommands(commands);
getPrintRaw().print(commands);
}*/
/**
* Sets character encoding for raw printing only
*
* @param charset
*/
public void setEncoding(String charset) {
// Example: Charset.forName("US-ASCII");
System.out.println("Default charset encoding: " + Charset.defaultCharset().name());
try {
this.charset = Charset.forName(charset);
getPrintRaw().setCharset(Charset.forName(charset));
LogIt.log("Current applet charset encoding: " + this.charset.name());
} catch (IllegalCharsetNameException e) {
LogIt.log(Level.WARNING, "Could not find specified charset encoding: "
+ charset + ". Using default.", e);
}
}
public String getEncoding() {
return this.charset.displayName();
}
public Charset getCharset() {
return this.charset;
}
/**
* Can't seem to get this to work, removed from sample.html
*
* @param orientation
*
* @Deprecated public void setImageOrientation(String orientation) {
* getPrintPS().setOrientation(orientation); }
*/
/**
* Sets orientation (Portrait/Landscape) as to be picked up by PostScript
* printing only. Some documents (such as PDFs) have capabilities of
* supplying their own orientation in the document format. Some choose to
* allow the orientation to be defined by the printer definition (Advanced
* Printing Features, etc).
* <p>
* Example:</p>
* <code>setOrientation("landscape");</code>
* <code>setOrientation("portrait");</code>
* <code>setOrientation("reverse_landscape");</code>
*
* @param orientation
*/
public void setOrientation(String orientation) {
if (this.paperSize == null) {
LogIt.log(Level.WARNING, "A paper size must be specified before setting orientation!");
} else {
this.paperSize.setOrientation(orientation);
}
}
public void allowMultipleInstances(boolean allowMultiple) {
this.allowMultiple = allowMultiple;
LogIt.log("Allow multiple applet instances set to \"" + allowMultiple + "\"");
}
public void setAllowMultipleInstances(boolean allowMultiple) {
allowMultipleInstances(allowMultiple);
}
public boolean getAllowMultipleInstances() {
return allowMultiple;
}
/*public Boolean getMaintainAspect() {
return maintainAspect;
}*/
public void setAutoSize(boolean autoSize) {
if (this.paperSize == null) {
LogIt.log(Level.WARNING, "A paper size must be specified before setting auto-size!");
} else {
this.paperSize.setAutoSize(autoSize);
}
}
/*@Deprecated
public void setMaintainAspect(boolean maintainAspect) {
setAutoSize(maintainAspect);
}*/
public int getCopies() {
if (copies > 0) {
return copies;
} else {
return 1;
}
}
public void setCopies(int copies) {
if (copies > 0) {
this.copies = copies;
} else {
LogIt.log(Level.WARNING, "Copies must be greater than zero", new UnsupportedOperationException("Copies must be greater than zero"));
}
}
public PaperFormat getPaperSize() {
return paperSize;
}
public void setPaperSize(String width, String height) {
this.paperSize = PaperFormat.parseSize(width, height);
LogIt.log(Level.INFO, "Set paper size to " + paperSize.getWidth()
+ paperSize.getUnitDescription() + "x"
+ paperSize.getHeight() + paperSize.getUnitDescription());
}
public void setPaperSize(float width, float height) {
this.paperSize = new PaperFormat(width, height);
LogIt.log(Level.INFO, "Set paper size to " + paperSize.getWidth()
+ paperSize.getUnitDescription() + "x"
+ paperSize.getHeight() + paperSize.getUnitDescription());
}
public void setPaperSize(float width, float height, String units) {
this.paperSize = PaperFormat.parseSize("" + width, "" + height, units);
LogIt.log(Level.INFO, "Set paper size to " + paperSize.getWidth()
+ paperSize.getUnitDescription() + "x"
+ paperSize.getHeight() + paperSize.getUnitDescription());
}
}
| qz-print/src/qz/PrintApplet.java | /**
* @author Tres Finocchiaro
*
* Copyright (C) 2013 Tres Finocchiaro, QZ Industries
*
* IMPORTANT: This software is dual-licensed
*
* LGPL 2.1 This is free software. This software and source code are released
* under the "LGPL 2.1 License". A copy of this license should be distributed
* with this software. http://www.gnu.org/licenses/lgpl-2.1.html
*
* QZ INDUSTRIES SOURCE CODE LICENSE This software and source code *may* instead
* be distributed under the "QZ Industries Source Code License", available by
* request ONLY. If source code for this project is to be made proprietary for
* an individual and/or a commercial entity, written permission via a copy of
* the "QZ Industries Source Code License" must be obtained first. If you've
* obtained a copy of the proprietary license, the terms and conditions of the
* license apply only to the licensee identified in the agreement. Only THEN may
* the LGPL 2.1 license be voided.
*
*/
package qz;
import java.applet.Applet;
import java.awt.Graphics;
import java.awt.image.BufferedImage;
import java.awt.print.PrinterException;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.net.SocketException;
import java.net.URL;
import java.net.UnknownHostException;
import java.nio.ByteBuffer;
import java.nio.charset.Charset;
import java.nio.charset.IllegalCharsetNameException;
import java.security.AccessController;
import java.security.PrivilegedActionException;
import java.security.PrivilegedExceptionAction;
import java.util.LinkedList;
import java.util.concurrent.atomic.AtomicReference;
import java.util.logging.Level;
import javax.imageio.ImageIO;
import javax.print.PrintException;
import javax.print.PrintService;
import javax.print.PrintServiceLookup;
import netscape.javascript.JSException;
import netscape.javascript.JSObject;
import qz.exception.InvalidFileTypeException;
import qz.exception.NullCommandException;
import qz.exception.NullPrintServiceException;
import qz.exception.SerialException;
import qz.json.JSONArray;
import qz.reflection.ReflectException;
/**
* An invisible web applet for use with JavaScript functions to send raw
* commands to your thermal, receipt, shipping, barcode, card printer and much
* more.
*
* @author A. Tres Finocchiaro
*/
public class PrintApplet extends Applet implements Runnable {
private static final AtomicReference<Thread> thisThread = new AtomicReference<Thread>(null);
public static final String VERSION = "1.8.5";
private static final long serialVersionUID = 2787955484074291340L;
public static final int APPEND_XML = 1;
public static final int APPEND_RAW = 2;
public static final int APPEND_IMAGE = 3;
public static final int APPEND_IMAGE_PS = 4;
public static final int APPEND_PDF = 8;
public static final int APPEND_HTML = 16;
private JSObject window = null;
private LanguageType lang;
private int appendType;
private long sleep;
private PrintService ps;
private PrintRaw printRaw;
private SerialIO serialIO;
private PrintPostScript printPS;
private PrintHTML printHTML;
//private NetworkHashMap networkHashMap;
private NetworkUtilities networkUtilities;
private Throwable t;
private PaperFormat paperSize;
private boolean startFindingPrinters;
private boolean doneFindingPrinters;
private boolean startPrinting;
private boolean donePrinting;
private boolean startFindingNetwork;
private boolean doneFindingNetwork;
private boolean startAppending;
private boolean doneAppending;
private boolean startFindingPorts;
private boolean doneFindingPorts;
private boolean startSending;
private boolean doneSending;
private boolean autoSetSerialProperties = false;
private boolean startOpeningPort;
private boolean doneOpeningPort;
private boolean startClosingPort;
private boolean doneClosingPort;
private String serialPortName;
private int serialPortIndex = -1;
private boolean running;
private boolean reprint;
private boolean psPrint;
private boolean htmlPrint;
private boolean alternatePrint;
private boolean logFeaturesPS;
private int imageX = 0;
private int imageY = 0;
private int dotDensity = 32;
private boolean allowMultiple;
//private double[] psMargin;
private String jobName;
private String file;
private String xmlTag;
private String printer;
//private String orientation;
//private Boolean maintainAspect;
private int copies = -1;
private Charset charset = Charset.defaultCharset();
//private String pageBreak; // For spooling pages one at a time
private int documentsPerSpool = 0;
private String endOfDocument;
// private String manualBreak = "%/SPOOL/%";
/**
* Create a privileged thread that will listen for JavaScript events
*
* @since 1.1.7
*/
//@Override
public void run() {
final PrintApplet instance = this;
window = JSObject.getWindow(instance);
logStart();
try {
AccessController.doPrivileged(new PrivilegedExceptionAction<Object>() {
//@Override
public Object run() throws Exception {
startJavaScriptListener();
return null;
}
});
} catch (PrivilegedActionException e) {
LogIt.log("Error starting main JavaScript thread. All else will fail.", e);
set(e);
} finally {
logStop();
}
}
/**
* Starts the Applet and runs the JavaScript listener thread
*/
private void startJavaScriptListener() {
notifyBrowser("qzReady");
while (running) {
try {
Thread.sleep(sleep); // Wait 100 milli before running again
if (startAppending) {
try {
switch (appendType) {
case APPEND_HTML:
appendHTML(new String(FileUtilities.readRawFile(file), charset.name()));
case APPEND_XML:
append64(FileUtilities.readXMLFile(file, xmlTag));
break;
case APPEND_RAW:
getPrintRaw().append(FileUtilities.readRawFile(file));
break;
case APPEND_IMAGE_PS:
readImage();
break;
case APPEND_IMAGE:
BufferedImage bi;
ImageWrapper iw;
if (ByteUtilities.isBase64Image(file)) {
byte[] imageData = Base64.decode(file.split(",")[1]);
bi = ImageIO.read(new ByteArrayInputStream(imageData));
} else {
bi = ImageIO.read(new URL(file));
}
iw = new ImageWrapper(bi, lang);
iw.setCharset(charset);
// Image density setting (ESCP only)
iw.setDotDensity(dotDensity);
// Image coordinates, (EPL only)
iw.setxPos(imageX);
iw.setyPos(imageY);
getPrintRaw().append(iw.getImageCommand());
break;
case APPEND_PDF:
getPrintPS().setPDF(ByteBuffer.wrap(ByteUtilities.readBinaryFile(file)));
break;
default: // Do nothing
}
} catch (Throwable t) {
LogIt.log("Error appending data", t);
set(t);
}
startAppending = false;
setDoneAppending(true);
}
if (startFindingPorts) {
logFindPorts();
startFindingPorts = false;
getSerialIO().fetchSerialPorts();
setDoneFindingPorts(true);
}
if (startFindingNetwork) {
logFindingNetwork();
startFindingNetwork = false;
//getNetworkHashMap().clear();
try {
// Gather the network information and store in a custom HashMap
//for (Enumeration<NetworkInterface> en = NetworkInterface.getNetworkInterfaces(); en.hasMoreElements();) {
// getNetworkHashMap().put(en.nextElement());
//}
getNetworkUtilities().gatherNetworkInfo();
} catch (IOException e) {
set(e);
} catch (ReflectException e) {
LogIt.log(Level.SEVERE, "getHardwareAddress not supported on Java 1.5", e);
set(e);
}
//getNetworkUtilities().fetchNetworkInfo();
setDoneFindingNetwork(true);
}
if (startOpeningPort) {
logOpeningPort();
startOpeningPort = false;
try {
if (serialPortIndex != -1) {
getSerialIO().open(serialPortIndex);
} else {
getSerialIO().open(serialPortName);
}
// Currently a Windows-only feature
if (autoSetSerialProperties) {
getSerialIO().autoSetProperties();
}
} catch (Throwable t) {
//notifyBrowser("qzPortNotOpened", getSerialIO().getPortName());
set(t);
}
setDoneOpeningPort(true);
}
if (startClosingPort) {
logClosingPort();
startClosingPort = false;
try {
getSerialIO().close();
} catch (Throwable t) {
this.set(t);
}
setDoneClosingPort(true);
}
if (startFindingPrinters) {
logFindPrinter();
startFindingPrinters = false;
if (printer == null) {
PrintApplet.this.setPrintService(PrintServiceLookup.lookupDefaultPrintService());
} else {
PrintApplet.this.setPrintService(PrintServiceMatcher.findPrinter(printer));
}
setDoneFindingPrinters(true);
}
// Serial Port Stuff
if (startSending) {
try {
startSending = false;
logCommands(new String(getSerialIO().getInputBuffer().getByteArray(), charset.name()));
getSerialIO().send();
doneSending = true;
} catch (Throwable t) {
this.set(t);
}
}
if (serialIO != null && serialIO.getOutput() != null) {
try {
notifyBrowser("qzSerialReturned",
new Object[]{serialIO.getPortName(),
new String(serialIO.getOutput(), charset.name())});
} catch (UnsupportedEncodingException ex) {
this.set(ex);
}
serialIO.clearOutput();
}
if (startPrinting) {
logPrint();
try {
startPrinting = false;
if (htmlPrint) {
logAndPrint(getPrintHTML());
} else if (psPrint) {
logAndPrint(getPrintPS());
} else if (isRawAutoSpooling()) {
LinkedList<ByteArrayBuilder> pages = ByteUtilities.splitByteArray(
getPrintRaw().getByteArray(),
endOfDocument.getBytes(charset.name()),
documentsPerSpool);
//FIXME: Remove this debug line
LogIt.log(Level.INFO, "Automatically spooling to "
+ pages.size() + " separate print job(s)");
for (ByteArrayBuilder b : pages) {
logAndPrint(getPrintRaw(), b.getByteArray());
}
if (!reprint) {
getPrintRaw().clear();
}
} else {
logAndPrint(getPrintRaw());
}
} catch (PrintException e) {
set(e);
} catch (PrinterException e) {
set(e);
} catch (UnsupportedEncodingException e) {
set(e);
} catch (IOException e) {
set(e);
} finally {
setDonePrinting(true);
if (this.printRaw != null) {
getPrintRaw().clear();
}
}
}
} catch (InterruptedException e) {
set(e);
}
}
}
public void useAlternatePrinting() {
this.useAlternatePrinting(true);
}
public void useAlternatePrinting(boolean alternatePrint) {
this.alternatePrint = alternatePrint;
}
public boolean isAlternatePrinting() {
return this.alternatePrint;
}
private boolean isRawAutoSpooling() throws UnsupportedEncodingException {
return documentsPerSpool > 0 && endOfDocument != null && !getPrintRaw().isClear() && getPrintRaw().contains(endOfDocument);
}
private void setDonePrinting(boolean donePrinting) {
this.donePrinting = donePrinting;
this.copies = -1;
this.notifyBrowser("qzDonePrinting");
}
private void setDoneFindingPrinters(boolean doneFindingPrinters) {
this.doneFindingPrinters = doneFindingPrinters;
this.notifyBrowser("qzDoneFinding");
}
private void setDoneOpeningPort(boolean doneOpeningPort) {
this.doneOpeningPort = doneOpeningPort;
this.notifyBrowser("qzDoneOpeningPort", getSerialIO() == null ? null : getSerialIO().getPortName());
}
private void setDoneClosingPort(boolean doneClosingPort) {
this.doneClosingPort = doneClosingPort;
this.notifyBrowser("qzDoneClosingPort", serialPortName);
}
private void setDoneFindingNetwork(boolean doneFindingNetwork) {
this.doneFindingNetwork = doneFindingNetwork;
this.notifyBrowser("qzDoneFindingNetwork");
}
private void setDoneFindingPorts(boolean doneFindingPorts) {
this.doneFindingPorts = doneFindingPorts;
this.notifyBrowser("qzDoneFindingPorts");
}
private void setDoneAppending(boolean doneAppending) {
this.doneAppending = doneAppending;
this.notifyBrowser("qzDoneAppending");
}
public void logPostScriptFeatures(boolean logFeaturesPS) {
setLogPostScriptFeatures(logFeaturesPS);
}
public void setLogPostScriptFeatures(boolean logFeaturesPS) {
this.logFeaturesPS = logFeaturesPS;
LogIt.log("Console logging of PostScript printing features set to \"" + logFeaturesPS + "\"");
}
public boolean getLogPostScriptFeatures() {
return this.logFeaturesPS;
}
private void processParameters() {
jobName = "QZ-PRINT ___ Printing";
running = true;
startPrinting = false;
donePrinting = true;
startFindingPrinters = false;
doneFindingPrinters = true;
startFindingPorts = false;
doneFindingPorts = true;
startOpeningPort = false;
startClosingPort = false;
startSending = false;
doneSending = true;
startFindingNetwork = false;
doneFindingNetwork = true;
startAppending = false;
doneAppending = true;
sleep = getParameter("sleep", 100);
psPrint = false;
appendType = 0;
allowMultiple = false;
logFeaturesPS = false;
alternatePrint = false;
String printer = getParameter("printer", null);
LogIt.disableLogging = getParameter("disable_logging", false);
if (printer != null) {
findPrinter(printer);
}
}
/**
* Convenience method for calling a JavaScript function with a single
* <code>String</code> parameter. The functional equivalent of
* notifyBrowser(String function, new Object[]{String s})
*
* @param function
* @param s
* @return
*/
public boolean notifyBrowser(String function, String s) {
return notifyBrowser(function, new Object[]{s});
}
/**
* Calls JavaScript function (i.e. "qzReady()" from the web browser For a
* period of time, will call "jzebraReady()" as well as "qzReady()" but fail
* silently on the old "jzebra" prefixed functions. If the "jzebra"
* equivalent is used, it will display a deprecation warning.
*
* @param function The JavasScript function to call
* @param o The parameter or array of parameters to send to the JavaScript
* function
* @return
*/
public boolean notifyBrowser(String function, Object[] o) {
try {
String type = (String)window.eval("typeof(" + function + ")");
// Ubuntu doesn't properly raise exceptions when calling invalid
// functions, so this is the work-around
if (!type.equals("function")) {
throw new JSException("Object \"" + function + "\" does not "
+ "exist or is not a function.");
}
window.call(function, o);
LogIt.log(Level.INFO, "Successfully called JavaScript function \""
+ function + "(...)\"...");
if (function.startsWith("jzebra")) {
LogIt.log(Level.WARNING, "JavaScript function \"" + function
+ "(...)\" is deprecated and will be removed in future releases. "
+ "Please use \"" + function.replaceFirst("jzebra", "qz")
+ "(...)\" instead.");
}
return true;
} catch (JSException e) {
//} catch (Throwable t) {
boolean success = false;
if (function.startsWith("qz")) {
// Try to call the old jzebra function
success = notifyBrowser(function.replaceFirst("qz", "jzebra"), o);
}
if (function.equals("jebraDoneFinding")) {
// Try to call yet another deprecated jzebra function
success = notifyBrowser("jzebraDoneFindingPrinters", o);
}
// Warn about the function missing only if it wasn't recovered using the old jzebra name
if (!success && !function.startsWith("jzebra")) {
LogIt.log(Level.WARNING, "Tried calling JavaScript function \""
+ function + "(...)\" through web browser but it has not "
+ "been implemented (" + e.getLocalizedMessage() + ")");
}
return success;
}
}
/**
* Convenience method for calling a JavaScript function with no parameters.
* The functional equivalent of notifyBrowser(String function, new
* Object[]{null})
*/
private boolean notifyBrowser(String function) {
return notifyBrowser(function, new Object[]{null});
}
/**
* Overrides getParameter() to allow all upper or all lowercase parameter
* names
*
* @param name
* @return
*/
private String getParameter(String name, String defaultVal) {
if (name != null) {
try {
String retVal = super.getParameter(name);
retVal = isBlank(retVal) ? super.getParameter(name.toUpperCase()) : retVal;
return isBlank(retVal) ? defaultVal : retVal;
} catch (NullPointerException e) {
return defaultVal;
}
}
return defaultVal;
}
/**
* Same as <code>getParameter(String, String)</code> except for a
* <code>long</code> type.
*
* @param name
* @param defaultVal
* @return
*/
private long getParameter(String name, long defaultVal) {
return Long.parseLong(getParameter(name, "" + defaultVal));
}
private boolean getParameter(String name, boolean defaultVal) {
return Boolean.parseBoolean(getParameter(name, Boolean.toString(defaultVal)));
}
/**
* Returns true if given String is empty or null
*
* @param s
* @return
*/
private boolean isBlank(String s) {
return s == null || s.trim().equals("");
}
public String getPrinters() {
return PrintServiceMatcher.getPrinterListing();
}
public String getPorts() {
return getSerialIO().getSerialPorts();
}
/**
* Tells jZebra to spool a new document when the raw data matches
* <code>pageBreak</code>
*
* @param pageBreak
*/
// @Deprecated
// public void setPageBreak(String pageBreak) {
// this.pageBreak = pageBreak;
// }
public void append64(String base64) {
try {
getPrintRaw().append(Base64.decode(base64));
} catch (IOException e) {
set(e);
}
}
public void appendHTMLFile(String url) {
this.appendType = APPEND_HTML;
this.appendFromThread(url, appendType);
//throw new UnsupportedOperationException("Sorry, not yet supported.");
}
public void appendHtmlFile(String url) {
this.appendHTMLFile(url);
}
public void appendHtml(String html) {
this.appendHTML(html);
}
public void appendHTML(String html) {
getPrintHTML().append(html);
}
/**
* Gets the first xml node identified by <code>tagName</code>, reads its
* contents and appends it to the buffer. Assumes XML content is base64
* formatted.
*
* @param xmlFile
* @param tagName
*/
public void appendXML(String url, String xmlTag) {
appendFromThread(url, APPEND_XML);
//this.startAppending = true;
//this.doneAppending = false;
//this.appendType = APPEND_XML;
//this.file = xmlFile;
this.xmlTag = xmlTag;
}
/**
* Appends the entire contents of the specified file to the buffer
*
* @param rawDataFile
*/
public void appendFile(String url) {
appendFromThread(url, APPEND_RAW);
}
/**
*
* @param imageFile
*/
public void appendImage(String url) {
appendFromThread(url, APPEND_IMAGE_PS);
}
public void appendPDF(String url) {
appendFromThread(url, APPEND_PDF);
}
public void setLanguage(String lang) {
this.lang = LanguageType.getType(lang);
}
/**
* Appends a raw image from URL specified in the language format specified.
*
* @param imageFile URL path to the image to be appended. Can be .PNG, .JPG,
* .GIF, .BMP (anything that can be converted to a
* <code>BufferedImage</code>) Cannot be a relative path, since there's no
* guarantee that the applet is aware of the browser's location.href.
* @param lang Usually "ESCP", "EPL", "ZPL", etc. Parsed by
* <code>LanguageType</code> class.
*/
public void appendImage(String imageFile, String lang) {
setLanguage(lang);
appendFromThread(imageFile, APPEND_IMAGE);
}
/**
* ESCP only. Appends a raw image from URL specified in the language format
* specified using the <code>dotDensity</code> specified.
*
* @param imageFile URL path to the image to be appended. Can be .PNG, .JPG,
* .GIF, .BMP (anything that can be converted to a
* <code>BufferedImage</code>) Cannot be a relative path, since there's no
* guarantee that the applet is aware of the browser's location.href.
* @param lang Usually "ESCP", "EPL", "ZPL", etc. Parsed by
* <code>LanguageType</code> class.
* @param dotDensity From the <code>ESC *</code> section of the ESC/P
* programmer's manual. Default = 32
*/
public void appendImage(String imageFile, String lang, int dotDensity) {
this.dotDensity = dotDensity;
setLanguage(lang);
appendFromThread(imageFile, APPEND_IMAGE);
}
/**
* ESCP only. Appends a raw image from URL specified in the language format
* specified using the <code>dotDensity</code> specified. Convenience method
* for
* <code>appendImage(String imageFile, String lang, int dotDensity)</code>
* where dotDenity is <code>32</code> "single" or <code>33</code> "double".
*
* @param imageFile URL path to the image to be appended. Can be .PNG, .JPG,
* .GIF, .BMP (anything that can be converted to a
* <code>BufferedImage</code>) Cannot be a relative path, since there's no
* guarantee that the applet is aware of the browser's location.href.
* @param lang Usually "ESCP", "EPL", "ZPL", etc. Parsed by
* <code>LanguageType</code> class.
* @param dotDensity Should be either "single", "double" or "triple". Triple
* being the highest resolution.
*/
public void appendImage(String imageFile, String lang, String dotDensity) {
if (dotDensity.equalsIgnoreCase("single")) {
this.dotDensity = 32;
} else if (dotDensity.equalsIgnoreCase("double")) {
this.dotDensity = 33;
} else if (dotDensity.equalsIgnoreCase("triple")) {
this.dotDensity = 39;
} else {
LogIt.log(Level.WARNING, "Cannot translate dotDensity value of '"
+ dotDensity + "'. Using '" + this.dotDensity + "'.");
}
setLanguage(lang);
appendFromThread(imageFile, APPEND_IMAGE);
}
/**
* Appends a raw image from URL specified in the language format specified.
* For CPCL and EPL, x and y coordinates should *always* be supplied. If
* they are not supplied, they will default to position 0,0.
*
* @param imageFile
* @param lang
* @param image_x
* @param image_y
*/
public void appendImage(String imageFile, String lang, int image_x, int image_y) {
this.imageX = image_x;
this.imageY = image_y;
appendImage(imageFile, lang);
}
/**
* Appends a file of the specified type
*
* @param url
* @param appendType
*/
private void appendFromThread(String file, int appendType) {
this.startAppending = true;
this.doneAppending = false;
this.appendType = appendType;
this.file = file;
}
/**
* Returns the orientation as it has been recently defined. Default is null
* which will allow the printer configuration to decide.
*
* @return
*/
public String getOrientation() {
return this.paperSize.getOrientationDescription();
}
/*
// Due to applet security, can only be invoked by run() thread
private String readXMLFile() {
try {
DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
DocumentBuilder db;
Document doc;
db = dbf.newDocumentBuilder();
doc = db.parse(file);
doc.getDocumentElement().normalize();
LogIt.log("Root element " + doc.getDocumentElement().getNodeName());
NodeList nodeList = doc.getElementsByTagName(xmlTag);
if (nodeList.getLength() > 0) {
return nodeList.item(0).getTextContent();
} else {
LogIt.log("Node \"" + xmlTag + "\" could not be found in XML file specified");
}
} catch (Exception e) {
LogIt.log(Level.WARNING, "Error reading/parsing specified XML file", e);
}
return "";
}
*/
public void printToFile() {
printToFile(null);
}
public void printToHost(String host) {
printToHost(host, 9100);
}
public void printToHost(String host, String port) {
try {
printToHost(host, Integer.parseInt(host));
} catch (Throwable t) {
this.set(t);
}
}
public void printToHost(String host, int port) {
if (!ByteUtilities.isBlank(host) && port > 0) {
getPrintRaw().setOutputSocket(host, port);
} else {
this.set(new NullPrintServiceException("Invalid port or host specified. "
+ "Port values must be non-zero posistive integers. "
+ "Host values must not be empty"));
this.clear();
this.setDonePrinting(true);
return;
}
this.print();
}
public void printToFile(String outputPath) {
if (!ByteUtilities.isBlank(outputPath)) {
try {
getPrintRaw().setOutputPath(outputPath);
} catch (InvalidFileTypeException e) {
this.set(e);
this.clear();
this.setDonePrinting(true);
return;
}
} else {
this.set(new NullPrintServiceException("Blank output path supplied"));
this.clear();
this.setDonePrinting(true);
return;
}
this.print();
}
// Due to applet security, can only be invoked by run() thread
private void readImage() {
try {
// Use the in-line base64 content as our image
if (ByteUtilities.isBase64Image(file)) {
getPrintPS().setImage(Base64.decode(file.split(",")[1]));
} else {
getPrintPS().setImage(ImageIO.read(new URL(file)));
}
} catch (IOException ex) {
LogIt.log(Level.WARNING, "Error reading specified image", ex);
}
}
// Use this instead of calling p2d directly. This will allow 2d graphics
// to only be used when absolutely needed
private PrintPostScript getPrintPS() {
if (this.printPS == null) {
this.printPS = new PrintPostScript();
this.printPS.setPrintParameters(this);
}
return printPS;
}
private PrintHTML getPrintHTML() {
if (this.printHTML == null) {
this.printHTML = new PrintHTML();
this.printHTML.setPrintParameters(this);
}
return printHTML;
}
/*
public double[] getPSMargin() {
return psMargin;
}
public void setPSMargin(int psMargin) {
this.psMargin = new double[]{psMargin};
}
public void setPSMargin(double psMargin) {
this.psMargin = new double[]{psMargin};
}
public void setPSMargin(int top, int left, int bottom, int right) {
this.psMargin = new double[]{top, left, bottom, right};
}
public void setPSMargin(double top, double left, double bottom, double right) {
this.psMargin = new double[]{top, left, bottom, right};
}*/
/*
// Due to applet security, can only be invoked by run() thread
private String readRawFile() {
String rawData = "";
try {
byte[] buffer = new byte[512];
DataInputStream in = new DataInputStream(new URL(file).openStream());
//String inputLine;
while (true) {
int len = in.read(buffer);
if (len == -1) {
break;
}
rawData += new String(buffer, 0, len, charset.name());
}
in.close();
} catch (Exception e) {
LogIt.log(Level.WARNING, "Error reading/parsing specified RAW file", e);
}
return rawData;
}*/
/**
* Prints the appended data without clearing the print buffer afterward.
*/
public void printPersistent() {
startPrinting = true;
donePrinting = false;
reprint = true;
}
/**
* Appends raw hexadecimal bytes in the format "x1Bx00", etc.
*
* @param s
*/
public void appendHex(String s) {
try {
getPrintRaw().append(ByteUtilities.hexStringToByteArray(s));
} catch (NumberFormatException e) {
this.set(e);
}
}
/**
* Interprets the supplied JSON formatted <code>String</code> value into a
* <code>byte</code> array or a <code>String</code> array.
*
* @param s
*/
public void appendJSONArray(String s) {
JSONArray array = new JSONArray(s);
if (array == null || array.length() < 0) {
this.set(new NullCommandException("Empty or null JSON Array provided. "
+ "Cannot append raw data."));
return;
} else {
Object o = array.get(0);
if (o instanceof Integer) {
LogIt.log("Interpreting JSON data as Integer array. "
+ "Will automatically convert to bytes.");
byte[] b = new byte[array.length()];
for (int i = 0; i < b.length; i++) {
if (!array.isNull(i)) {
b[i] = (byte) array.getInt(i);
} else {
LogIt.log(Level.WARNING, "Cannot parse null byte value. "
+ "Defaulting to 0x00");
b[i] = (byte) 0;
}
}
getPrintRaw().append(b);
} else if (o instanceof String) {
LogIt.log("Interpreting JSON data as String array");
for (int i = 0; i < array.length(); i++) {
if (!array.isNull(i)) {
try {
getPrintRaw().append(array.getString(i));
} catch (UnsupportedEncodingException e) {
LogIt.log(Level.WARNING, "String encoding exception "
+ "occured while parsing JSON.", e);
}
} else {
LogIt.log(Level.WARNING, "Cannot parse null String value. "
+ "Defaulting to blank");
}
}
} else {
this.set(new NullCommandException("JSON Arrays of type "
+ o.getClass().getName() + " are not yet supported"));
}
}
}
public void append(String s) {
try {
// Fix null character for ESC/P syntax
/*if (s.contains("\\x00")) {
LogIt.log("Replacing \\\\x00 with NUL character");
s = s.replace("\\x00", NUL_CHAR);
} else if (s.contains("\\0")) {
LogIt.log("Replacing \\\\0 with NUL character");
s = s.replace("\\0", NUL_CHAR);
} */
// JavaScript hates the NUL, perhaps we can allow the excaped version?
/*if (s.contains("\\x00")) {
String[] split = s.split("\\\\\\\\x00");
for (String ss : split) {
getPrintRaw().append(ss.getBytes(charset.name()));
getPrintRaw().append(new byte[]{'\0'});
}
} else {
getPrintRaw().append(s.getBytes(charset.name()));
}*/
getPrintRaw().append(s.getBytes(charset.name()));
} catch (UnsupportedEncodingException ex) {
this.set(ex);
}
}
/*
* Makes appending the unicode null character possible by appending
* the equivelant of <code>\x00</code> in JavaScript, which is syntatically
* invalid in JavaScript (no errors will be thrown, but Strings will be
* terminated prematurely
*/
public void appendNull() {
getPrintRaw().append(new byte[]{'\0'});
}
public void appendNUL() {
appendNull();
}
public void appendNul() {
appendNull();
}
/**
* Replaces a String with the specified value. PrintRaw only.
*
* @param tag
* @param value
*
* public void replace(String tag, String value) { replaceAll(tag, value); }
*/
/**
* Replaces a String with the specified value. PrintRaw only.
*
* @param tag
* @param value
*
* public void replaceAll(String tag, String value) {
* getPrintRaw().set(printRaw.get().replaceAll(tag, value)); }
*/
/**
* Replaces the first occurance of a String with a specified value. PrintRaw
* only.
*
* @param tag
* @param value
*
* public void replaceFirst(String tag, String value) {
* getPrintRaw().set(printRaw.get().replaceFirst(tag, value)); }
*/
/**
* Sets/overwrites the cached raw commands. PrintRaw only.
*
* @param s
*
* public void set(String s) { getPrintRaw().set(s); }
*/
/**
* Clears the cached raw commands. PrintRaw only.
*/
public void clear() {
getPrintRaw().clear();
}
/**
* Performs an asyncronous print and handles the output of exceptions and
* debugging. Important: print() clears any raw buffers after printing. Use
* printPersistent() to save the buffer to be used/appended to later.
*/
public void print() {
startPrinting = true;
donePrinting = false;
reprint = false;
}
public void printHTML() {
htmlPrint = true;
print();
}
public void printPS() {
psPrint = true;
print();
}
/**
* Get our main thread ready, but don't start it until <code>start()</code>
* has been called.
*/
@Override
public void init() {
if (!allowMultiple && thisThread.get() != null && thisThread.get().isAlive()) {
LogIt.log(Level.WARNING, "init() called, but applet already "
+ "seems to be running. Ignoring.");
return;
}
if (allowMultiple && thisThread.get() != null && thisThread.get().isAlive()) {
LogIt.log(Level.INFO, "init() called, but applet already "
+ "seems to be running. Allowing.");
}
processParameters();
thisThread.set(new Thread(this));
super.init();
}
/**
* No need to paint, the applet is invisible
*
* @param g
*/
@Override
public void paint(Graphics g) {
// Do nothing
}
/**
* Start our main thread
*/
@Override
public void start() {
try {
thisThread.get().start();
} catch (JSException e) {
set(e);
LogIt.log(Level.SEVERE, "Error setting applet object in JavaScript using LiveConnect. "
+ "This is usally caused by Java Security Settings. In Windows, enable the Java "
+ "Console and hit 5 to show verbose messages.");
} catch (Exception e) {
set(e);
}
super.start();
}
@Override
public void stop() {
running = false;
thisThread.set(null);
if (serialIO != null) {
try {
serialIO.close();
} catch (Throwable t) {
LogIt.log(Level.SEVERE, "Could not close port [" + serialIO.getPortName() + "].", t);
}
}
super.stop();
}
@Override
public void destroy() {
this.stop();
super.destroy();
}
public void findPrinter() {
findPrinter(null);
}
/**
* Creates the print service by iterating through printers until finding
* matching printer containing "printerName" in its description
*
* @param printerName
* @return
*/
public void findPrinter(String printer) {
this.startFindingPrinters = true;
this.doneFindingPrinters = false;
this.printer = printer;
}
/**
* Uses the JSSC JNI library to retreive a comma separated list of serial
* ports from the system, i.e. "COM1,COM2,COM3" or "/dev/tty0,/dev/tty1",
* etc.
*/
public void findPorts() {
this.startFindingPorts = true;
this.doneFindingPorts = false;
}
public void setSerialBegin(String begin) {
try {
getSerialIO().setBegin(begin.getBytes(charset.name()));
} catch (UnsupportedEncodingException ex) {
this.set(ex);
}
}
public void setSerialEnd(String end) {
try {
getSerialIO().setEnd(end.getBytes(charset.name()));
} catch (UnsupportedEncodingException ex) {
this.set(ex);
}
}
public void send(String portName, String data) {
try {
if (!getSerialIO().isOpen()) {
throw new SerialException("A port has not yet been opened.");
} else if (getSerialIO().getPortName().equals(portName)) {
getSerialIO().append(data.getBytes(charset.name()));
this.startSending = true;
this.doneSending = false;
} else {
throw new SerialException("Port specified [" + portName + "] "
+ "differs from previously opened port "
+ "[" + getSerialIO().getPortName() + "]. Applet currently "
+ "supports only one open port at a time. Data not sent.");
}
} catch (Throwable t) {
this.set(t);
}
}
public void sendHex(String portName, String data) {
try {
send(portName, new String(ByteUtilities.hexStringToByteArray(data), charset.name()));
} catch (UnsupportedEncodingException ex) {
this.set(ex);
}
}
public void setSerialProperties(int baud, int dataBits, String stopBits, int parity, String flowControl) {
setSerialProperties(Integer.toString(baud), Integer.toString(dataBits),
stopBits, Integer.toString(parity), flowControl);
}
public void setSerialProperties(String baud, String dataBits, String stopBits, String parity, String flowControl) {
try {
getSerialIO().setProperties(baud, dataBits, stopBits, parity, flowControl);
} catch (Throwable t) {
this.set(t);
}
}
public void openPort(String serialPortName) {
this.openPort(serialPortName, false);
}
public void closePort(String portName) {
if (getSerialIO().getPortName().equals(portName)) {
this.startClosingPort = true;
this.doneClosingPort = false;
} else {
this.set(new SerialException("Port specified [" + portName + "] "
+ "could not be closed. Please close "
+ "[" + getSerialIO().getPortName() + "] instead. "
+ "Applet currently supports only one open port at a time."));
}
}
public void openPort(String serialPortName, boolean autoSetSerialProperties) {
this.serialPortIndex = -1;
this.serialPortName = serialPortName;
this.startOpeningPort = true;
this.doneOpeningPort = false;
this.autoSetSerialProperties = autoSetSerialProperties;
}
public void openPort(int serialPortIndex) {
this.openPort(serialPortIndex, false);
}
public void openPort(int serialPortIndex, boolean autoSetSerialProperties) {
this.serialPortName = null;
this.serialPortIndex = serialPortIndex;
this.startOpeningPort = true;
this.doneOpeningPort = false;
}
public boolean isDoneFinding() {
return doneFindingPrinters;
}
public boolean isDoneFindingPorts() {
return doneFindingPorts;
}
public boolean isDoneOpeningPort() {
return doneOpeningPort;
}
public boolean isDoneClosingPort() {
return doneClosingPort;
}
public boolean isDoneFindingNetwork() {
return doneFindingNetwork;
}
public boolean isDonePrinting() {
return donePrinting;
}
public boolean isDoneAppending() {
return doneAppending;
}
public boolean isDoneSending() {
return doneSending;
}
/**
* Returns the PrintService's name (the printer name) associated with this
* applet, if any. Returns null if none is set.
*
* @return
*/
public String getPrinter() {
return ps == null ? null : ps.getName();
//return ps.getName();
}
public SerialIO getSerialIO() {
try {
Class.forName("jssc.SerialPort");
if (this.serialIO == null) {
this.serialIO = new SerialIO();
}
return serialIO;
} catch (ClassNotFoundException e) {
// Stop whatever is happening
this.startFindingPorts = false;
this.doneFindingPorts = true;
this.startSending = false;
this.doneSending = true;
this.startOpeningPort = false;
this.doneOpeningPort = true;
// Raise our exception
this.set(e);
}
return null;
}
/**
* Returns the PrintRaw object associated with this applet, if any. Returns
* null if none is set.
*
* @return
*/
private PrintRaw getPrintRaw() {
if (this.printRaw == null) {
this.printRaw = new PrintRaw();
this.printRaw.setPrintParameters(this);
}
return printRaw;
}
public NetworkUtilities getNetworkUtilities() throws SocketException, ReflectException, UnknownHostException {
if (this.networkUtilities == null) {
this.networkUtilities = new NetworkUtilities();
}
return this.networkUtilities;
}
/* private NetworkHashMap getNetworkHashMap() {
if (this.networkHashMap == null) {
this.networkHashMap = new NetworkHashMap();
}
return this.networkHashMap;
}*/
/*private NetworkUtilities getNetworkUtilities() {
if (this.networkUtilities == null) {
this.networkUtilities = new NetworkUtilities();
}
return this.networkUtilities;
}*/
/**
* Returns a comma delimited <code>String</code> containing the IP Addresses
* found for the specified MAC address. The format of these (IPv4 vs. IPv6)
* may vary depending on the system.
*
* @param macAddress
* @return
*/
/* public String getIPAddresses(String macAddress) {
return getNetworkHashMap().get(macAddress).getInetAddressesCSV();
}*/
/*public String getIpAddresses() {
return getIpAddresses();
}*/
public String getIP() {
return this.getIPAddress();
}
/**
* Returns a comma separated <code>String</code> containing all MAC
* Addresses found on the system, or <code>null</code> if none are found.
*
* @return
*/
/*
public String getMacAddresses() {
return getNetworkHashMap().getKeysCSV();
}*/
public String getMac() {
return this.getMacAddress();
}
/**
* Retrieves a <code>String</code> containing a single MAC address. i.e.
* 0A1B2C3D4E5F. This attempts to get the quickest and most appropriate
* match for systems with a single adapter by attempting to choose an
* enabled and non-loopback adapter first if possible.
* <strong>Note:</strong> If running JRE 1.5, Java won't be able to
* determine "enabled" or "loopback", so it will attempt to use other methods
* such as filtering out the 127.0.0.1s, etc.
* information. Returns <code>null</code> if no adapters are found.
*
* @return
*/
public String getMacAddress() {
try {
return getNetworkUtilities().getHardwareAddress();
} catch (Throwable t) {
return null;
}
//return getNetworkHashMap().getLightestNetworkObject().getMacAddress();
}
/**
* Retrieves a <code>String</code> containing a single IP address. i.e.
* 192.168.1.101 or fe80::81ca:bcae:d6c4:9a16%25 (formatted IPv4 or IPv6)
* This attempts to get the most appropriate match for
* systems with a single adapter by attempting to choose an enabled and
* non-loopback adapter first if possible, however if multiple IPs exist,
* it will return the first found, regardless of protocol or use.
* <strong>Note:</strong> If running JRE 1.5, Java won't be able to
* determine "enabled" or "loopback", so it will attempt to use other methods
* such as filtering out the 127.0.0.1 addresses, etc.
* information. Returns <code>null</code> if no adapters are found.
*
* @return
*/
public String getIPAddress() {
//return getNetworkHashMap().getLightestNetworkObject().getInetAddress();
try {
return getNetworkUtilities().getInetAddress();
} catch (Throwable t) {
return null;
}
}
/*public String getIpAddress() {
return getIPAddress();
}*/
/**
* Retrieves a <code>String</code> containing a single IP address. i.e.
* 192.168.1.101. This attempts to get the most appropriate match for
* systems with a single adapter by attempting to choose an enabled and
* non-loopback adapter first if possible.
* <strong>Note:</strong> If running JRE 1.5, Java won't be able to
* determine "enabled" or "loopback", so it will attempt to use other methods
* such as filtering out the 127.0.0.1 addresses, etc.
* information. Returns <code>null</code> if no adapters are found.
*
* @return
*/
/* public String getIPV4Address() {
return getNetworkHashMap().getLightestNetworkObject().getInet4Address();
}
public String getIpV4Address() {
return getIpV4Address();
}*/
/**
* Retrieves a <code>String</code> containing a single IP address. i.e.
* fe80::81ca:bcae:d6c4:9a16%25. This attempts to get the most appropriate
* match for systems with a single adapter by attempting to choose an
* enabled and non-loopback adapter first if possible.
* <strong>Note:</strong> If running JRE 1.5, Java won't be able to
* determine "enabled" or "loopback", so it will attempt to use other methods
* such as filtering out the 127.0.0.1 addresses, etc.
* information. Returns <code>null</code> if no adapters are found.
*
* @return
*/
/*
public String getIPV6Address() {
return getNetworkHashMap().getLightestNetworkObject().getInet6Address();
}
public String getIpV6Address() {
return getIpV4Address();
}*/
/**
* Returns the PrintService object associated with this applet, if any.
* Returns null if none is set.
*
* @return
*/
public PrintService getPrintService() {
return ps;
}
/**
* Returns the PrintService's name (the printer name) associated with this
* applet, if any. Returns null if none is set.
*
* @return
*/
@Deprecated
public String getPrinterName() {
LogIt.log(Level.WARNING, "Function \"getPrinterName()\" has been deprecated since v. 1.2.3."
+ " Please use \"getPrinter()\" instead.");
return getPrinter();
}
public Throwable getError() {
return getException();
}
public Throwable getException() {
return t;
}
public void clearException() {
this.t = null;
}
public String getExceptionMessage() {
return t.getLocalizedMessage();
}
public long getSleepTime() {
return sleep;
}
public String getVersion() {
return VERSION;
}
/**
* Sets the time the listener thread will wait between actions
*
* @param sleep
*/
public void setSleepTime(long sleep) {
this.sleep = sleep;
}
public String getEndOfDocument() {
return endOfDocument;
}
public void setEndOfDocument(String endOfPage) {
this.endOfDocument = endOfPage;
}
public void setPrinter(int index) {
setPrintService(PrintServiceMatcher.getPrinterList()[index]);
LogIt.log("Printer set to index: " + index + ", Name: " + ps.getName());
//PrinterState state = (PrinterState)this.ps.getAttribute(PrinterState.class);
//return state == PrinterState.IDLE || state == PrinterState.PROCESSING;
}
// Generally called internally only after a printer is found.
private void setPrintService(PrintService ps) {
if (ps == null) {
LogIt.log(Level.WARNING, "Setting null PrintService");
this.ps = ps;
return;
}
this.ps = ps;
if (printHTML != null) {
printHTML.setPrintService(ps);
}
if (printPS != null) {
printPS.setPrintService(ps);
}
if (printRaw != null) {
printRaw.setPrintService(ps);
}
}
/* public String getManualBreak() {
return manualBreak;
}*/
/* public void setManualBreak(String manualBreak) {
this.manualBreak = manualBreak;
}*/
public int getDocumentsPerSpool() {
return documentsPerSpool;
}
public void setDocumentsPerSpool(int pagesPer) {
this.documentsPerSpool = pagesPer;
}
public void setJobName(String jobName) {
this.jobName = jobName;
}
public String getJobName() {
return jobName;
}
public void findNetworkInfo() {
this.startFindingNetwork = true;
this.doneFindingNetwork = false;
}
private void set(Throwable t) {
this.t = t;
LogIt.log(t);
}
private void logStart() {
LogIt.log("QZ-PRINT " + VERSION);
LogIt.log("===== JAVASCRIPT LISTENER THREAD STARTED =====");
}
private void logStop() {
LogIt.log("===== JAVASCRIPT LISTENER THREAD STOPPED =====");
}
private void logPrint() {
LogIt.log("===== SENDING DATA TO THE PRINTER =====");
}
private void logFindPrinter() {
LogIt.log("===== SEARCHING FOR PRINTER =====");
}
private void logFindPorts() {
LogIt.log("===== SEARCHING FOR SERIAL PORTS =====");
}
private void logFindingNetwork() {
LogIt.log("===== GATHERING NETWORK INFORMATION =====");
}
private void logOpeningPort() {
LogIt.log("===== OPENING SERIAL PORT " + serialPortName + " =====");
}
private void logClosingPort() {
LogIt.log("===== CLOSING SERIAL PORT " + serialPortName + " =====");
}
private void logCommands(PrintHTML ph) {
logCommands(ph.get());
}
private void logCommands(PrintRaw pr) {
logCommands(pr.getOutput());
}
private void logCommands(byte[] commands) {
try {
logCommands(new String(commands, charset.name()));
} catch (UnsupportedEncodingException ex) {
LogIt.log(Level.WARNING, "Cannot decode raw bytes for debug output. "
+ "This could be due to incompatible charset for this JVM "
+ "or mixed charsets within one byte stream. Ignore this message"
+ " if printing seems fine.");
}
}
private void logCommands(String commands) {
LogIt.log("\r\n\r\n" + commands + "\r\n\r\n");
}
private void logAndPrint(PrintRaw pr, byte[] data) throws IOException, InterruptedException, PrintException, UnsupportedEncodingException {
logCommands(data);
pr.print(data);
}
private void logAndPrint(PrintRaw pr) throws IOException, PrintException, InterruptedException, UnsupportedEncodingException {
logCommands(pr);
if (reprint) {
pr.print();
} else {
pr.print();
pr.clear();
}
}
private void logAndPrint(PrintPostScript printPS) throws PrinterException {
logCommands(" <<" + file + ">>");
// Fix GitHub Bug #24
if (paperSize != null) {
printPS.setPaperSize(paperSize);
}
// Fix GitHub Bug #30, #31
if (copies > 0) {
printPS.setCopies(copies);
} else {
printPS.setCopies(1);
}
printPS.print();
psPrint = false;
}
private void logAndPrint(PrintHTML printHTML) throws PrinterException {
if (file != null) {
logCommands(" <<" + file + ">>");
}
logCommands(printHTML);
printHTML.print();
htmlPrint = false;
}
/*private void logAndPrint(String commands) throws PrintException, InterruptedException, UnsupportedEncodingException {
logCommands(commands);
getPrintRaw().print(commands);
}*/
/**
* Sets character encoding for raw printing only
*
* @param charset
*/
public void setEncoding(String charset) {
// Example: Charset.forName("US-ASCII");
System.out.println("Default charset encoding: " + Charset.defaultCharset().name());
try {
this.charset = Charset.forName(charset);
getPrintRaw().setCharset(Charset.forName(charset));
LogIt.log("Current applet charset encoding: " + this.charset.name());
} catch (IllegalCharsetNameException e) {
LogIt.log(Level.WARNING, "Could not find specified charset encoding: "
+ charset + ". Using default.", e);
}
}
public String getEncoding() {
return this.charset.displayName();
}
public Charset getCharset() {
return this.charset;
}
/**
* Can't seem to get this to work, removed from sample.html
*
* @param orientation
*
* @Deprecated public void setImageOrientation(String orientation) {
* getPrintPS().setOrientation(orientation); }
*/
/**
* Sets orientation (Portrait/Landscape) as to be picked up by PostScript
* printing only. Some documents (such as PDFs) have capabilities of
* supplying their own orientation in the document format. Some choose to
* allow the orientation to be defined by the printer definition (Advanced
* Printing Features, etc).
* <p>
* Example:</p>
* <code>setOrientation("landscape");</code>
* <code>setOrientation("portrait");</code>
* <code>setOrientation("reverse_landscape");</code>
*
* @param orientation
*/
public void setOrientation(String orientation) {
if (this.paperSize == null) {
LogIt.log(Level.WARNING, "A paper size must be specified before setting orientation!");
} else {
this.paperSize.setOrientation(orientation);
}
}
public void allowMultipleInstances(boolean allowMultiple) {
this.allowMultiple = allowMultiple;
LogIt.log("Allow multiple applet instances set to \"" + allowMultiple + "\"");
}
public void setAllowMultipleInstances(boolean allowMultiple) {
allowMultipleInstances(allowMultiple);
}
public boolean getAllowMultipleInstances() {
return allowMultiple;
}
/*public Boolean getMaintainAspect() {
return maintainAspect;
}*/
public void setAutoSize(boolean autoSize) {
if (this.paperSize == null) {
LogIt.log(Level.WARNING, "A paper size must be specified before setting auto-size!");
} else {
this.paperSize.setAutoSize(autoSize);
}
}
/*@Deprecated
public void setMaintainAspect(boolean maintainAspect) {
setAutoSize(maintainAspect);
}*/
public int getCopies() {
if (copies > 0) {
return copies;
} else {
return 1;
}
}
public void setCopies(int copies) {
if (copies > 0) {
this.copies = copies;
} else {
LogIt.log(Level.WARNING, "Copies must be greater than zero", new UnsupportedOperationException("Copies must be greater than zero"));
}
}
public PaperFormat getPaperSize() {
return paperSize;
}
public void setPaperSize(String width, String height) {
this.paperSize = PaperFormat.parseSize(width, height);
LogIt.log(Level.INFO, "Set paper size to " + paperSize.getWidth()
+ paperSize.getUnitDescription() + "x"
+ paperSize.getHeight() + paperSize.getUnitDescription());
}
public void setPaperSize(float width, float height) {
this.paperSize = new PaperFormat(width, height);
LogIt.log(Level.INFO, "Set paper size to " + paperSize.getWidth()
+ paperSize.getUnitDescription() + "x"
+ paperSize.getHeight() + paperSize.getUnitDescription());
}
public void setPaperSize(float width, float height, String units) {
this.paperSize = PaperFormat.parseSize("" + width, "" + height, units);
LogIt.log(Level.INFO, "Set paper size to " + paperSize.getWidth()
+ paperSize.getUnitDescription() + "x"
+ paperSize.getHeight() + paperSize.getUnitDescription());
}
}
| Fixes Chrome 36/bug #33
| qz-print/src/qz/PrintApplet.java | Fixes Chrome 36/bug #33 | <ide><path>z-print/src/qz/PrintApplet.java
<ide> public class PrintApplet extends Applet implements Runnable {
<ide>
<ide> private static final AtomicReference<Thread> thisThread = new AtomicReference<Thread>(null);
<del> public static final String VERSION = "1.8.5";
<add> public static final String VERSION = "1.8.6";
<ide> private static final long serialVersionUID = 2787955484074291340L;
<ide> public static final int APPEND_XML = 1;
<ide> public static final int APPEND_RAW = 2;
<ide> // Ubuntu doesn't properly raise exceptions when calling invalid
<ide> // functions, so this is the work-around
<ide> if (!type.equals("function")) {
<del> throw new JSException("Object \"" + function + "\" does not "
<add> throw new Exception("Object \"" + function + "\" does not "
<ide> + "exist or is not a function.");
<ide> }
<ide>
<ide> + "(...)\" instead.");
<ide> }
<ide> return true;
<del> } catch (JSException e) {
<add> } catch (Throwable e) {
<ide> //} catch (Throwable t) {
<ide> boolean success = false;
<ide> if (function.startsWith("qz")) { |
|
JavaScript | agpl-3.0 | 52b1f737c501cb31b114a5d8207a2c1fa1228e4b | 0 | agoravoting/agora-core-view,agoravoting/agora-core-view | angular.module('avRegistration')
.directive('avLogin', ['Authmethod', 'Patterns', '$location', '$parse', '$state', '$cookies', '$i18next', function(Authmethod, Patterns, $location, $parse, $state, $cookies, $i18next) {
// we use it as something similar to a controller here
function link(scope, element, attrs) {
var splitUrl = $location.absUrl().split('/');
var autheventid = splitUrl[splitUrl.length - 2];
scope.isAdmin = false;
if (autheventid === 'admin') {
scope.isAdmin = true;
}
scope.login = {};
scope.view = function(id) {
Authmethod.viewEvent(id)
.success(function(data) {
if (data.status === "ok") {
scope.apply(data.events);
} else {
scope.status = 'Not found';
document.querySelector(".error").style.display = "block";
}
})
.error(function(error) {
scope.status = 'Scan error: ' + error.message;
document.querySelector(".error").style.display = "block";
});
};
if (!scope.isAdmin) {
scope.view(autheventid);
}
scope.apply = function(authevent) {
scope.method = authevent['auth_method'];
scope.name = authevent['name'];
};
scope.patterns = function(name) {
return Patterns.get(name);
};
scope.loginUser = function(valid) {
if (!valid) {
return;
}
var data = {
'auth-method': scope.method,
'auth-data': {
'email': scope.login.email,
'password': scope.login.password,
'authevent': autheventid,
}
};
Authmethod.login(data)
.success(function(data) {
if (data.status === "ok") {
scope.khmac = data.khmac;
$cookies.authevent = autheventid;
$cookies.userid = data['username'];
$cookies.user = scope.login.email;
$cookies.auth = data['auth-token'];
Authmethod.setAuth($cookies.auth);
if (scope.isAdmin) {
$state.go('admin.elections');
} else {
$state.go('registration.success');
}
} else {
scope.status = 'Not found';
scope.error = $i18next('avRegistration.invalidCredentials');
}
})
.error(function(error) {
scope.status = 'Registration error: ' + error.message;
scope.error = $i18next('avRegistration.errorRegistration');
});
};
scope.goSignup = function() {
$state.go('registration.register', {id: autheventid});
};
scope.forgotPassword = function() {
console.log('forgotPassword');
};
}
return {
restrict: 'AE',
scope: {
},
link: link,
templateUrl: 'avRegistration/login-directive/login-directive.html'
};
}]);
| avRegistration/login-directive/login-directive.js | angular.module('avRegistration')
.directive('avLogin', ['Authmethod', '$location', '$parse', '$state', '$cookies', '$i18next', function(Authmethod, $location, $parse, $state, $cookies, $i18next) {
// we use it as something similar to a controller here
function link(scope, element, attrs) {
var splitUrl = $location.absUrl().split('/');
var autheventid = splitUrl[splitUrl.length - 2];
scope.isAdmin = false;
if (autheventid === 'admin') {
scope.isAdmin = true;
}
scope.login = {};
scope.view = function(id) {
Authmethod.viewEvent(id)
.success(function(data) {
if (data.status === "ok") {
scope.apply(data.events);
} else {
scope.status = 'Not found';
document.querySelector(".error").style.display = "block";
}
})
.error(function(error) {
scope.status = 'Scan error: ' + error.message;
document.querySelector(".error").style.display = "block";
});
};
if (!scope.isAdmin) {
scope.view(autheventid);
}
scope.apply = function(authevent) {
scope.method = authevent['auth_method'];
scope.name = authevent['name'];
};
scope.patterns = function(name) {
if (name === 'mail' || name === 'email') {
return /^(([^<>()[\]\\.,;:\s@\"]+(\.[^<>()[\]\\.,;:\s@\"]+)*)|(\".+\"))@((\[[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\])|(([a-zA-Z\-0-9]+\.)+[a-zA-Z]{2,}))$/;
} else {
return /.*/;
}
};
scope.loginUser = function(valid) {
if (!valid) {
return;
}
var data = {
'auth-method': scope.method,
'auth-data': {
'email': scope.login.email,
'password': scope.login.password,
'authevent': autheventid,
}
};
Authmethod.login(data)
.success(function(data) {
if (data.status === "ok") {
scope.khmac = data.khmac;
$cookies.authevent = autheventid;
$cookies.userid = data['username'];
$cookies.user = scope.login.email;
$cookies.auth = data['auth-token'];
Authmethod.setAuth($cookies.auth);
if (scope.isAdmin) {
$state.go('admin.elections');
} else {
$state.go('registration.success');
}
} else {
scope.status = 'Not found';
scope.error = $i18next('avRegistration.invalidCredentials');
}
})
.error(function(error) {
scope.status = 'Registration error: ' + error.message;
scope.error = $i18next('avRegistration.errorRegistration');
});
};
scope.goSignup = function() {
$state.go('registration.register', {id: autheventid});
};
scope.forgotPassword = function() {
console.log('forgotPassword');
};
}
return {
restrict: 'AE',
scope: {
},
link: link,
templateUrl: 'avRegistration/login-directive/login-directive.html'
};
}]);
| Used patterns service in login-directive.
| avRegistration/login-directive/login-directive.js | Used patterns service in login-directive. | <ide><path>vRegistration/login-directive/login-directive.js
<ide> angular.module('avRegistration')
<del> .directive('avLogin', ['Authmethod', '$location', '$parse', '$state', '$cookies', '$i18next', function(Authmethod, $location, $parse, $state, $cookies, $i18next) {
<add> .directive('avLogin', ['Authmethod', 'Patterns', '$location', '$parse', '$state', '$cookies', '$i18next', function(Authmethod, Patterns, $location, $parse, $state, $cookies, $i18next) {
<ide> // we use it as something similar to a controller here
<ide> function link(scope, element, attrs) {
<ide> var splitUrl = $location.absUrl().split('/');
<ide> };
<ide>
<ide> scope.patterns = function(name) {
<del> if (name === 'mail' || name === 'email') {
<del> return /^(([^<>()[\]\\.,;:\s@\"]+(\.[^<>()[\]\\.,;:\s@\"]+)*)|(\".+\"))@((\[[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\])|(([a-zA-Z\-0-9]+\.)+[a-zA-Z]{2,}))$/;
<del> } else {
<del> return /.*/;
<del> }
<add> return Patterns.get(name);
<ide> };
<ide>
<ide> scope.loginUser = function(valid) { |
|
Java | mit | 6eba5a87bc82bcc4895486e3905b10cd0956b142 | 0 | damianszczepanik/jenkins,keyurpatankar/hudson,v1v/jenkins,goldchang/jenkins,deadmoose/jenkins,tfennelly/jenkins,pselle/jenkins,CodeShane/jenkins,olivergondza/jenkins,kohsuke/hudson,alvarolobato/jenkins,rlugojr/jenkins,FarmGeek4Life/jenkins,recena/jenkins,everyonce/jenkins,deadmoose/jenkins,guoxu0514/jenkins,SebastienGllmt/jenkins,DoctorQ/jenkins,paulmillar/jenkins,ndeloof/jenkins,mdonohue/jenkins,MadsNielsen/jtemp,verbitan/jenkins,mpeltonen/jenkins,dbroady1/jenkins,tfennelly/jenkins,chbiel/jenkins,escoem/jenkins,mrooney/jenkins,Wilfred/jenkins,bkmeneguello/jenkins,bpzhang/jenkins,khmarbaise/jenkins,pjanouse/jenkins,andresrc/jenkins,nandan4/Jenkins,NehemiahMi/jenkins,MichaelPranovich/jenkins_sc,singh88/jenkins,hemantojhaa/jenkins,AustinKwang/jenkins,gitaccountforprashant/gittest,seanlin816/jenkins,samatdav/jenkins,lilyJi/jenkins,goldchang/jenkins,guoxu0514/jenkins,nandan4/Jenkins,bpzhang/jenkins,viqueen/jenkins,jcsirot/jenkins,godfath3r/jenkins,dbroady1/jenkins,wangyikai/jenkins,ChrisA89/jenkins,1and1/jenkins,shahharsh/jenkins,aduprat/jenkins,everyonce/jenkins,ajshastri/jenkins,msrb/jenkins,fbelzunc/jenkins,AustinKwang/jenkins,ChrisA89/jenkins,keyurpatankar/hudson,khmarbaise/jenkins,protazy/jenkins,arcivanov/jenkins,vjuranek/jenkins,viqueen/jenkins,soenter/jenkins,vijayto/jenkins,dariver/jenkins,jcarrothers-sap/jenkins,soenter/jenkins,liupugong/jenkins,patbos/jenkins,lordofthejars/jenkins,mdonohue/jenkins,luoqii/jenkins,soenter/jenkins,protazy/jenkins,albers/jenkins,arunsingh/jenkins,Jimilian/jenkins,paulwellnerbou/jenkins,Krasnyanskiy/jenkins,bkmeneguello/jenkins,morficus/jenkins,christ66/jenkins,ns163/jenkins,christ66/jenkins,MadsNielsen/jtemp,chbiel/jenkins,akshayabd/jenkins,Jimilian/jenkins,jhoblitt/jenkins,dennisjlee/jenkins,goldchang/jenkins,my7seven/jenkins,Vlatombe/jenkins,mrooney/jenkins,batmat/jenkins,lordofthejars/jenkins,escoem/jenkins,Krasnyanskiy/jenkins,verbitan/jenkins,scoheb/jenkins,tangkun75/jenkins,maikeffi/hudson,MichaelPranovich/jenkins_sc,deadmoose/jenkins,AustinKwang/jenkins,aheritier/jenkins,svanoort/jenkins,verbitan/jenkins,vvv444/jenkins,kzantow/jenkins,iqstack/jenkins,maikeffi/hudson,aquarellian/jenkins,bkmeneguello/jenkins,SenolOzer/jenkins,luoqii/jenkins,svanoort/jenkins,batmat/jenkins,aquarellian/jenkins,rashmikanta-1984/jenkins,batmat/jenkins,morficus/jenkins,Wilfred/jenkins,sathiya-mit/jenkins,jcsirot/jenkins,ns163/jenkins,FarmGeek4Life/jenkins,vlajos/jenkins,arunsingh/jenkins,Krasnyanskiy/jenkins,SenolOzer/jenkins,github-api-test-org/jenkins,verbitan/jenkins,Wilfred/jenkins,bpzhang/jenkins,dennisjlee/jenkins,oleg-nenashev/jenkins,v1v/jenkins,christ66/jenkins,ydubreuil/jenkins,tfennelly/jenkins,damianszczepanik/jenkins,DoctorQ/jenkins,ndeloof/jenkins,kohsuke/hudson,escoem/jenkins,pjanouse/jenkins,nandan4/Jenkins,shahharsh/jenkins,soenter/jenkins,vlajos/jenkins,jglick/jenkins,tastatur/jenkins,rlugojr/jenkins,hplatou/jenkins,mcanthony/jenkins,gitaccountforprashant/gittest,hashar/jenkins,h4ck3rm1k3/jenkins,gorcz/jenkins,ajshastri/jenkins,FTG-003/jenkins,1and1/jenkins,KostyaSha/jenkins,nandan4/Jenkins,hplatou/jenkins,samatdav/jenkins,lilyJi/jenkins,Vlatombe/jenkins,lordofthejars/jenkins,synopsys-arc-oss/jenkins,intelchen/jenkins,luoqii/jenkins,jcarrothers-sap/jenkins,6WIND/jenkins,varmenise/jenkins,dennisjlee/jenkins,mcanthony/jenkins,mrooney/jenkins,bpzhang/jenkins,ChrisA89/jenkins,6WIND/jenkins,andresrc/jenkins,hemantojhaa/jenkins,msrb/jenkins,1and1/jenkins,liorhson/jenkins,paulmillar/jenkins,wuwen5/jenkins,hplatou/jenkins,stephenc/jenkins,chbiel/jenkins,arunsingh/jenkins,ns163/jenkins,shahharsh/jenkins,AustinKwang/jenkins,paulwellnerbou/jenkins,ydubreuil/jenkins,msrb/jenkins,svanoort/jenkins,petermarcoen/jenkins,my7seven/jenkins,Ykus/jenkins,v1v/jenkins,lilyJi/jenkins,iqstack/jenkins,synopsys-arc-oss/jenkins,khmarbaise/jenkins,ErikVerheul/jenkins,ikedam/jenkins,tangkun75/jenkins,hemantojhaa/jenkins,stephenc/jenkins,wangyikai/jenkins,vvv444/jenkins,jenkinsci/jenkins,jenkinsci/jenkins,thomassuckow/jenkins,fbelzunc/jenkins,dbroady1/jenkins,ErikVerheul/jenkins,jpbriend/jenkins,jpederzolli/jenkins-1,SebastienGllmt/jenkins,gorcz/jenkins,jenkinsci/jenkins,github-api-test-org/jenkins,liupugong/jenkins,kohsuke/hudson,liorhson/jenkins,albers/jenkins,mattclark/jenkins,DanielWeber/jenkins,goldchang/jenkins,ydubreuil/jenkins,albers/jenkins,ndeloof/jenkins,1and1/jenkins,ns163/jenkins,dbroady1/jenkins,everyonce/jenkins,brunocvcunha/jenkins,oleg-nenashev/jenkins,nandan4/Jenkins,gusreiber/jenkins,synopsys-arc-oss/jenkins,andresrc/jenkins,stephenc/jenkins,ChrisA89/jenkins,andresrc/jenkins,maikeffi/hudson,jzjzjzj/jenkins,ndeloof/jenkins,SebastienGllmt/jenkins,jcarrothers-sap/jenkins,paulmillar/jenkins,soenter/jenkins,vvv444/jenkins,keyurpatankar/hudson,kzantow/jenkins,luoqii/jenkins,recena/jenkins,goldchang/jenkins,gitaccountforprashant/gittest,DoctorQ/jenkins,kohsuke/hudson,FTG-003/jenkins,albers/jenkins,liorhson/jenkins,rsandell/jenkins,oleg-nenashev/jenkins,pselle/jenkins,sathiya-mit/jenkins,batmat/jenkins,aheritier/jenkins,rashmikanta-1984/jenkins,mdonohue/jenkins,jglick/jenkins,akshayabd/jenkins,scoheb/jenkins,azweb76/jenkins,ikedam/jenkins,hemantojhaa/jenkins,lilyJi/jenkins,arcivanov/jenkins,recena/jenkins,aquarellian/jenkins,arcivanov/jenkins,kzantow/jenkins,recena/jenkins,ChrisA89/jenkins,petermarcoen/jenkins,ns163/jenkins,Jimilian/jenkins,aheritier/jenkins,noikiy/jenkins,bkmeneguello/jenkins,maikeffi/hudson,ajshastri/jenkins,FarmGeek4Life/jenkins,ErikVerheul/jenkins,aldaris/jenkins,godfath3r/jenkins,jcarrothers-sap/jenkins,synopsys-arc-oss/jenkins,synopsys-arc-oss/jenkins,verbitan/jenkins,mattclark/jenkins,petermarcoen/jenkins,ikedam/jenkins,NehemiahMi/jenkins,varmenise/jenkins,amuniz/jenkins,Ykus/jenkins,ydubreuil/jenkins,rsandell/jenkins,recena/jenkins,DoctorQ/jenkins,jpbriend/jenkins,olivergondza/jenkins,guoxu0514/jenkins,noikiy/jenkins,vijayto/jenkins,viqueen/jenkins,wangyikai/jenkins,fbelzunc/jenkins,pjanouse/jenkins,mpeltonen/jenkins,liorhson/jenkins,github-api-test-org/jenkins,khmarbaise/jenkins,Wilfred/jenkins,thomassuckow/jenkins,dennisjlee/jenkins,NehemiahMi/jenkins,fbelzunc/jenkins,bkmeneguello/jenkins,azweb76/jenkins,stephenc/jenkins,vjuranek/jenkins,rlugojr/jenkins,seanlin816/jenkins,aduprat/jenkins,ErikVerheul/jenkins,dennisjlee/jenkins,dariver/jenkins,fbelzunc/jenkins,ajshastri/jenkins,vjuranek/jenkins,h4ck3rm1k3/jenkins,amruthsoft9/Jenkis,everyonce/jenkins,1and1/jenkins,daniel-beck/jenkins,evernat/jenkins,khmarbaise/jenkins,svanoort/jenkins,292388900/jenkins,dennisjlee/jenkins,arcivanov/jenkins,recena/jenkins,SebastienGllmt/jenkins,varmenise/jenkins,jcarrothers-sap/jenkins,csimons/jenkins,lilyJi/jenkins,jcarrothers-sap/jenkins,damianszczepanik/jenkins,mpeltonen/jenkins,jk47/jenkins,gorcz/jenkins,aheritier/jenkins,SenolOzer/jenkins,stephenc/jenkins,guoxu0514/jenkins,paulwellnerbou/jenkins,MichaelPranovich/jenkins_sc,ErikVerheul/jenkins,vijayto/jenkins,oleg-nenashev/jenkins,rashmikanta-1984/jenkins,Jochen-A-Fuerbacher/jenkins,wangyikai/jenkins,Krasnyanskiy/jenkins,escoem/jenkins,svanoort/jenkins,intelchen/jenkins,daspilker/jenkins,sathiya-mit/jenkins,pselle/jenkins,lindzh/jenkins,jenkinsci/jenkins,jzjzjzj/jenkins,huybrechts/hudson,hemantojhaa/jenkins,duzifang/my-jenkins,CodeShane/jenkins,thomassuckow/jenkins,dbroady1/jenkins,rashmikanta-1984/jenkins,Jochen-A-Fuerbacher/jenkins,jcsirot/jenkins,singh88/jenkins,vvv444/jenkins,guoxu0514/jenkins,everyonce/jenkins,csimons/jenkins,MadsNielsen/jtemp,daspilker/jenkins,lindzh/jenkins,elkingtonmcb/jenkins,duzifang/my-jenkins,liupugong/jenkins,godfath3r/jenkins,pselle/jenkins,yonglehou/jenkins,gorcz/jenkins,FarmGeek4Life/jenkins,SenolOzer/jenkins,rlugojr/jenkins,DanielWeber/jenkins,oleg-nenashev/jenkins,Jochen-A-Fuerbacher/jenkins,amuniz/jenkins,dariver/jenkins,wuwen5/jenkins,evernat/jenkins,Ykus/jenkins,albers/jenkins,aduprat/jenkins,yonglehou/jenkins,aldaris/jenkins,gitaccountforprashant/gittest,khmarbaise/jenkins,intelchen/jenkins,dbroady1/jenkins,protazy/jenkins,jcsirot/jenkins,pjanouse/jenkins,thomassuckow/jenkins,csimons/jenkins,AustinKwang/jenkins,jk47/jenkins,rlugojr/jenkins,seanlin816/jenkins,samatdav/jenkins,jk47/jenkins,liupugong/jenkins,damianszczepanik/jenkins,noikiy/jenkins,github-api-test-org/jenkins,aduprat/jenkins,scoheb/jenkins,wuwen5/jenkins,iqstack/jenkins,jhoblitt/jenkins,MichaelPranovich/jenkins_sc,lindzh/jenkins,amuniz/jenkins,Jimilian/jenkins,huybrechts/hudson,DoctorQ/jenkins,petermarcoen/jenkins,MarkEWaite/jenkins,kohsuke/hudson,mrooney/jenkins,ajshastri/jenkins,aheritier/jenkins,paulwellnerbou/jenkins,lindzh/jenkins,morficus/jenkins,wuwen5/jenkins,arcivanov/jenkins,maikeffi/hudson,arunsingh/jenkins,SebastienGllmt/jenkins,keyurpatankar/hudson,rlugojr/jenkins,aldaris/jenkins,DoctorQ/jenkins,ikedam/jenkins,ErikVerheul/jenkins,goldchang/jenkins,msrb/jenkins,liupugong/jenkins,duzifang/my-jenkins,escoem/jenkins,KostyaSha/jenkins,gorcz/jenkins,rlugojr/jenkins,deadmoose/jenkins,singh88/jenkins,daniel-beck/jenkins,mattclark/jenkins,hemantojhaa/jenkins,Ykus/jenkins,SebastienGllmt/jenkins,vlajos/jenkins,vlajos/jenkins,Jochen-A-Fuerbacher/jenkins,tfennelly/jenkins,ikedam/jenkins,KostyaSha/jenkins,evernat/jenkins,mcanthony/jenkins,aduprat/jenkins,verbitan/jenkins,dariver/jenkins,azweb76/jenkins,aheritier/jenkins,noikiy/jenkins,csimons/jenkins,viqueen/jenkins,pselle/jenkins,gusreiber/jenkins,akshayabd/jenkins,jpbriend/jenkins,jenkinsci/jenkins,daspilker/jenkins,guoxu0514/jenkins,CodeShane/jenkins,mattclark/jenkins,goldchang/jenkins,daniel-beck/jenkins,dennisjlee/jenkins,verbitan/jenkins,lindzh/jenkins,paulmillar/jenkins,amruthsoft9/Jenkis,hashar/jenkins,bpzhang/jenkins,intelchen/jenkins,oleg-nenashev/jenkins,everyonce/jenkins,AustinKwang/jenkins,noikiy/jenkins,noikiy/jenkins,SenolOzer/jenkins,elkingtonmcb/jenkins,ndeloof/jenkins,ns163/jenkins,MarkEWaite/jenkins,rsandell/jenkins,liorhson/jenkins,damianszczepanik/jenkins,morficus/jenkins,shahharsh/jenkins,jzjzjzj/jenkins,Vlatombe/jenkins,sathiya-mit/jenkins,amruthsoft9/Jenkis,hplatou/jenkins,keyurpatankar/hudson,lordofthejars/jenkins,synopsys-arc-oss/jenkins,MarkEWaite/jenkins,stephenc/jenkins,v1v/jenkins,MichaelPranovich/jenkins_sc,6WIND/jenkins,tfennelly/jenkins,yonglehou/jenkins,KostyaSha/jenkins,wuwen5/jenkins,christ66/jenkins,ns163/jenkins,amuniz/jenkins,mattclark/jenkins,nandan4/Jenkins,sathiya-mit/jenkins,aldaris/jenkins,wuwen5/jenkins,jpederzolli/jenkins-1,vlajos/jenkins,noikiy/jenkins,varmenise/jenkins,my7seven/jenkins,guoxu0514/jenkins,elkingtonmcb/jenkins,hashar/jenkins,jenkinsci/jenkins,daspilker/jenkins,evernat/jenkins,CodeShane/jenkins,gusreiber/jenkins,v1v/jenkins,scoheb/jenkins,singh88/jenkins,morficus/jenkins,jglick/jenkins,scoheb/jenkins,FTG-003/jenkins,jpederzolli/jenkins-1,mdonohue/jenkins,wangyikai/jenkins,vjuranek/jenkins,liupugong/jenkins,keyurpatankar/hudson,ajshastri/jenkins,Wilfred/jenkins,varmenise/jenkins,hashar/jenkins,jk47/jenkins,alvarolobato/jenkins,Jochen-A-Fuerbacher/jenkins,samatdav/jenkins,MadsNielsen/jtemp,hashar/jenkins,csimons/jenkins,rsandell/jenkins,daniel-beck/jenkins,brunocvcunha/jenkins,github-api-test-org/jenkins,godfath3r/jenkins,tastatur/jenkins,jpbriend/jenkins,escoem/jenkins,brunocvcunha/jenkins,singh88/jenkins,msrb/jenkins,vvv444/jenkins,csimons/jenkins,oleg-nenashev/jenkins,gusreiber/jenkins,batmat/jenkins,NehemiahMi/jenkins,godfath3r/jenkins,rashmikanta-1984/jenkins,morficus/jenkins,gitaccountforprashant/gittest,wuwen5/jenkins,DanielWeber/jenkins,tangkun75/jenkins,ChrisA89/jenkins,Vlatombe/jenkins,bpzhang/jenkins,DanielWeber/jenkins,alvarolobato/jenkins,jhoblitt/jenkins,jcarrothers-sap/jenkins,jzjzjzj/jenkins,olivergondza/jenkins,albers/jenkins,FarmGeek4Life/jenkins,deadmoose/jenkins,thomassuckow/jenkins,godfath3r/jenkins,amuniz/jenkins,christ66/jenkins,lindzh/jenkins,paulwellnerbou/jenkins,chbiel/jenkins,iqstack/jenkins,jcsirot/jenkins,brunocvcunha/jenkins,mdonohue/jenkins,tangkun75/jenkins,aheritier/jenkins,everyonce/jenkins,v1v/jenkins,lilyJi/jenkins,vvv444/jenkins,1and1/jenkins,Ykus/jenkins,aduprat/jenkins,jpederzolli/jenkins-1,KostyaSha/jenkins,vijayto/jenkins,FarmGeek4Life/jenkins,1and1/jenkins,patbos/jenkins,huybrechts/hudson,ajshastri/jenkins,iqstack/jenkins,yonglehou/jenkins,CodeShane/jenkins,h4ck3rm1k3/jenkins,rsandell/jenkins,amuniz/jenkins,shahharsh/jenkins,h4ck3rm1k3/jenkins,jpederzolli/jenkins-1,paulmillar/jenkins,paulmillar/jenkins,amruthsoft9/Jenkis,vvv444/jenkins,petermarcoen/jenkins,kohsuke/hudson,mpeltonen/jenkins,ndeloof/jenkins,hemantojhaa/jenkins,godfath3r/jenkins,tastatur/jenkins,chbiel/jenkins,akshayabd/jenkins,msrb/jenkins,h4ck3rm1k3/jenkins,dariver/jenkins,intelchen/jenkins,khmarbaise/jenkins,MichaelPranovich/jenkins_sc,thomassuckow/jenkins,daspilker/jenkins,dariver/jenkins,damianszczepanik/jenkins,singh88/jenkins,petermarcoen/jenkins,SenolOzer/jenkins,jglick/jenkins,viqueen/jenkins,arunsingh/jenkins,gorcz/jenkins,lordofthejars/jenkins,jpederzolli/jenkins-1,brunocvcunha/jenkins,synopsys-arc-oss/jenkins,mcanthony/jenkins,aldaris/jenkins,elkingtonmcb/jenkins,jhoblitt/jenkins,arcivanov/jenkins,tastatur/jenkins,hashar/jenkins,duzifang/my-jenkins,paulwellnerbou/jenkins,mattclark/jenkins,my7seven/jenkins,jenkinsci/jenkins,wangyikai/jenkins,my7seven/jenkins,sathiya-mit/jenkins,daspilker/jenkins,tastatur/jenkins,chbiel/jenkins,ErikVerheul/jenkins,MarkEWaite/jenkins,evernat/jenkins,mdonohue/jenkins,tastatur/jenkins,jenkinsci/jenkins,duzifang/my-jenkins,my7seven/jenkins,tfennelly/jenkins,mcanthony/jenkins,azweb76/jenkins,6WIND/jenkins,kzantow/jenkins,vjuranek/jenkins,v1v/jenkins,maikeffi/hudson,andresrc/jenkins,keyurpatankar/hudson,petermarcoen/jenkins,evernat/jenkins,rsandell/jenkins,KostyaSha/jenkins,FTG-003/jenkins,olivergondza/jenkins,fbelzunc/jenkins,elkingtonmcb/jenkins,morficus/jenkins,Krasnyanskiy/jenkins,Vlatombe/jenkins,huybrechts/hudson,DanielWeber/jenkins,KostyaSha/jenkins,huybrechts/hudson,intelchen/jenkins,Wilfred/jenkins,vijayto/jenkins,mpeltonen/jenkins,nandan4/Jenkins,Vlatombe/jenkins,amuniz/jenkins,gitaccountforprashant/gittest,kzantow/jenkins,gorcz/jenkins,jpbriend/jenkins,amruthsoft9/Jenkis,Wilfred/jenkins,patbos/jenkins,FarmGeek4Life/jenkins,292388900/jenkins,pjanouse/jenkins,liorhson/jenkins,rashmikanta-1984/jenkins,FTG-003/jenkins,seanlin816/jenkins,gorcz/jenkins,huybrechts/hudson,mattclark/jenkins,292388900/jenkins,protazy/jenkins,6WIND/jenkins,lordofthejars/jenkins,azweb76/jenkins,bkmeneguello/jenkins,lilyJi/jenkins,aduprat/jenkins,my7seven/jenkins,gusreiber/jenkins,ikedam/jenkins,luoqii/jenkins,ikedam/jenkins,rashmikanta-1984/jenkins,batmat/jenkins,6WIND/jenkins,jzjzjzj/jenkins,patbos/jenkins,mcanthony/jenkins,MarkEWaite/jenkins,elkingtonmcb/jenkins,shahharsh/jenkins,maikeffi/hudson,olivergondza/jenkins,singh88/jenkins,aldaris/jenkins,rsandell/jenkins,Vlatombe/jenkins,luoqii/jenkins,NehemiahMi/jenkins,mdonohue/jenkins,jcarrothers-sap/jenkins,Jochen-A-Fuerbacher/jenkins,pjanouse/jenkins,pselle/jenkins,daniel-beck/jenkins,yonglehou/jenkins,huybrechts/hudson,christ66/jenkins,292388900/jenkins,liupugong/jenkins,damianszczepanik/jenkins,daniel-beck/jenkins,h4ck3rm1k3/jenkins,NehemiahMi/jenkins,damianszczepanik/jenkins,azweb76/jenkins,github-api-test-org/jenkins,ndeloof/jenkins,jcsirot/jenkins,hplatou/jenkins,thomassuckow/jenkins,akshayabd/jenkins,aquarellian/jenkins,jhoblitt/jenkins,jglick/jenkins,KostyaSha/jenkins,alvarolobato/jenkins,fbelzunc/jenkins,patbos/jenkins,wangyikai/jenkins,evernat/jenkins,daspilker/jenkins,rsandell/jenkins,seanlin816/jenkins,AustinKwang/jenkins,tangkun75/jenkins,jzjzjzj/jenkins,tangkun75/jenkins,batmat/jenkins,MadsNielsen/jtemp,CodeShane/jenkins,DoctorQ/jenkins,jglick/jenkins,MichaelPranovich/jenkins_sc,hplatou/jenkins,alvarolobato/jenkins,mrooney/jenkins,andresrc/jenkins,jk47/jenkins,arcivanov/jenkins,kzantow/jenkins,andresrc/jenkins,deadmoose/jenkins,christ66/jenkins,soenter/jenkins,scoheb/jenkins,292388900/jenkins,aldaris/jenkins,patbos/jenkins,tangkun75/jenkins,duzifang/my-jenkins,Ykus/jenkins,Krasnyanskiy/jenkins,samatdav/jenkins,varmenise/jenkins,arunsingh/jenkins,viqueen/jenkins,recena/jenkins,ikedam/jenkins,dariver/jenkins,ydubreuil/jenkins,lordofthejars/jenkins,daniel-beck/jenkins,tastatur/jenkins,aquarellian/jenkins,jhoblitt/jenkins,kohsuke/hudson,albers/jenkins,elkingtonmcb/jenkins,olivergondza/jenkins,DanielWeber/jenkins,lindzh/jenkins,deadmoose/jenkins,github-api-test-org/jenkins,viqueen/jenkins,amruthsoft9/Jenkis,patbos/jenkins,vijayto/jenkins,iqstack/jenkins,daniel-beck/jenkins,jk47/jenkins,protazy/jenkins,mpeltonen/jenkins,svanoort/jenkins,Jimilian/jenkins,brunocvcunha/jenkins,jzjzjzj/jenkins,goldchang/jenkins,ChrisA89/jenkins,gusreiber/jenkins,Ykus/jenkins,bpzhang/jenkins,aquarellian/jenkins,SenolOzer/jenkins,jcsirot/jenkins,alvarolobato/jenkins,jzjzjzj/jenkins,iqstack/jenkins,DoctorQ/jenkins,shahharsh/jenkins,yonglehou/jenkins,MarkEWaite/jenkins,chbiel/jenkins,paulwellnerbou/jenkins,Jimilian/jenkins,vlajos/jenkins,akshayabd/jenkins,mrooney/jenkins,jhoblitt/jenkins,paulmillar/jenkins,jglick/jenkins,soenter/jenkins,arunsingh/jenkins,jpbriend/jenkins,bkmeneguello/jenkins,CodeShane/jenkins,gitaccountforprashant/gittest,292388900/jenkins,msrb/jenkins,escoem/jenkins,Krasnyanskiy/jenkins,duzifang/my-jenkins,vjuranek/jenkins,sathiya-mit/jenkins,MarkEWaite/jenkins,azweb76/jenkins,ydubreuil/jenkins,jk47/jenkins,csimons/jenkins,vjuranek/jenkins,hashar/jenkins,tfennelly/jenkins,samatdav/jenkins,keyurpatankar/hudson,SebastienGllmt/jenkins,jpbriend/jenkins,Jimilian/jenkins,varmenise/jenkins,seanlin816/jenkins,seanlin816/jenkins,MadsNielsen/jtemp,pjanouse/jenkins,MadsNielsen/jtemp,aquarellian/jenkins,mpeltonen/jenkins,MarkEWaite/jenkins,amruthsoft9/Jenkis,maikeffi/hudson,FTG-003/jenkins,svanoort/jenkins,stephenc/jenkins,alvarolobato/jenkins,akshayabd/jenkins,vijayto/jenkins,FTG-003/jenkins,protazy/jenkins,liorhson/jenkins,mcanthony/jenkins,luoqii/jenkins,gusreiber/jenkins,yonglehou/jenkins,shahharsh/jenkins,intelchen/jenkins,samatdav/jenkins,pselle/jenkins,kohsuke/hudson,github-api-test-org/jenkins,vlajos/jenkins,jpederzolli/jenkins-1,olivergondza/jenkins,hplatou/jenkins,NehemiahMi/jenkins,292388900/jenkins,mrooney/jenkins,DanielWeber/jenkins,brunocvcunha/jenkins,h4ck3rm1k3/jenkins,ydubreuil/jenkins,kzantow/jenkins,scoheb/jenkins,protazy/jenkins,6WIND/jenkins,Jochen-A-Fuerbacher/jenkins,dbroady1/jenkins | /*
* The MIT License
*
* Copyright (c) 2004-2010, Sun Microsystems, Inc., Kohsuke Kawaguchi, id:cactusman, Seiji Sogabe, Olivier Lamy
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package hudson.maven;
import hudson.Extension;
import hudson.FilePath;
import hudson.Launcher;
import hudson.Util;
import hudson.maven.reporters.MavenAbstractArtifactRecord;
import hudson.maven.reporters.MavenArtifactRecord;
import hudson.maven.settings.SettingConfig;
import hudson.maven.settings.SettingsProviderUtils;
import hudson.model.AbstractBuild;
import hudson.model.AbstractProject;
import hudson.model.BuildListener;
import hudson.model.Node;
import hudson.model.Result;
import hudson.model.TaskListener;
import hudson.remoting.Callable;
import hudson.tasks.BuildStepDescriptor;
import hudson.tasks.BuildStepMonitor;
import hudson.tasks.Maven.MavenInstallation;
import hudson.tasks.Publisher;
import hudson.tasks.Recorder;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Collections;
import java.util.List;
import java.util.Map.Entry;
import java.util.Properties;
import jenkins.model.Jenkins;
import net.sf.json.JSONObject;
import org.apache.commons.io.FileUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.maven.artifact.Artifact;
import org.apache.maven.artifact.deployer.ArtifactDeploymentException;
import org.apache.maven.artifact.metadata.ArtifactMetadata;
import org.apache.maven.artifact.repository.ArtifactRepository;
import org.apache.maven.artifact.repository.ArtifactRepositoryFactory;
import org.apache.maven.artifact.repository.ArtifactRepositoryPolicy;
import org.apache.maven.artifact.repository.Authentication;
import org.apache.maven.artifact.repository.layout.ArtifactRepositoryLayout;
import org.apache.maven.cli.BatchModeMavenTransferListener;
import org.apache.maven.repository.Proxy;
import org.codehaus.plexus.component.repository.exception.ComponentLookupException;
import org.kohsuke.stapler.DataBoundConstructor;
import org.kohsuke.stapler.StaplerRequest;
/**
* {@link Publisher} for {@link MavenModuleSetBuild} to deploy artifacts
* after a build is fully succeeded.
*
* @author Kohsuke Kawaguchi
* @since 1.191
*/
public class RedeployPublisher extends Recorder {
/**
* Repository ID. This is matched up with <tt>~/.m2/settings.xml</tt> for authentication related information.
*/
public final String id;
/**
* Repository URL to deploy artifacts to.
*/
public final String url;
public final boolean uniqueVersion;
public final boolean evenIfUnstable;
public final String releaseEnvVar;
/**
* For backward compatibility
*/
@Deprecated
public RedeployPublisher(String id, String url, boolean uniqueVersion) {
this(id, url, uniqueVersion, false, null);
}
/**
* @since 1.347
*/
@Deprecated
public RedeployPublisher(String id, String url, boolean uniqueVersion, boolean evenIfUnstable) {
this(id, url, uniqueVersion, evenIfUnstable, null);
}
@DataBoundConstructor
public RedeployPublisher(String id, String url, boolean uniqueVersion, boolean evenIfUnstable, String releaseEnvVar) {
this.id = id;
this.url = Util.fixEmptyAndTrim(url);
this.uniqueVersion = uniqueVersion;
this.evenIfUnstable = evenIfUnstable;
this.releaseEnvVar = Util.fixEmptyAndTrim(releaseEnvVar);
}
public boolean perform(AbstractBuild<?, ?> build, Launcher launcher, BuildListener listener) throws InterruptedException, IOException {
if (build.getResult().isWorseThan(getTreshold()))
return true; // build failed. Don't publish
/**
* Check if we should skip or not
*/
if (releaseEnvVar != null) {
String envVarValue = build.getEnvironment(listener).get(releaseEnvVar);
if ("true".equals(envVarValue)) { // null or false are ignored
listener.getLogger().println("[INFO] Skipping deploying artifact as release build is in progress.");
return true; // skip the deploy
}
}
List<MavenAbstractArtifactRecord> mavenAbstractArtifactRecords = getActions(build, listener);
if (mavenAbstractArtifactRecords == null || mavenAbstractArtifactRecords.isEmpty()) {
listener.getLogger().println("[ERROR] No artifacts are recorded. Is this a Maven project?");
build.setResult(Result.FAILURE);
return true;
}
if(build instanceof MavenModuleSetBuild && ((MavenModuleSetBuild)build).getParent().isArchivingDisabled()){
listener.getLogger().println("[ERROR] You cannot use the \"Deploy artifacts to Maven repository\" feature if you " +
"disabled automatic artifact archiving");
build.setResult(Result.FAILURE);
return true;
}
long startupTime = Calendar.getInstance().getTimeInMillis();
try {
MavenEmbedder embedder = createEmbedder(listener, build);
ArtifactRepositoryLayout layout =
(ArtifactRepositoryLayout) embedder.lookup(ArtifactRepositoryLayout.ROLE, "default");
ArtifactRepositoryFactory factory =
(ArtifactRepositoryFactory) embedder.lookup(ArtifactRepositoryFactory.ROLE);
ArtifactRepository artifactRepository = null;
if (url != null) {
// By default we try to get the repository definition from the job configuration
artifactRepository = getDeploymentRepository(factory, layout, id, url);
}
for (MavenAbstractArtifactRecord mavenAbstractArtifactRecord : mavenAbstractArtifactRecords) {
if (artifactRepository == null && mavenAbstractArtifactRecord instanceof MavenArtifactRecord) {
// If no repository definition is set on the job level we try to take it from the POM
MavenArtifactRecord mavenArtifactRecord = (MavenArtifactRecord) mavenAbstractArtifactRecord;
artifactRepository = getDeploymentRepository(factory, layout, mavenArtifactRecord.repositoryId, mavenArtifactRecord.repositoryUrl);
}
if (artifactRepository == null) {
listener.getLogger().println("[ERROR] No Repository settings defined in the job configuration or distributionManagement of the module.");
build.setResult(Result.FAILURE);
return true;
}
mavenAbstractArtifactRecord.deploy(embedder, artifactRepository, listener);
}
listener.getLogger().println("[INFO] Deployment done in " + Util.getTimeSpanString(Calendar.getInstance().getTimeInMillis() - startupTime));
return true;
} catch (MavenEmbedderException e) {
e.printStackTrace(listener.error(e.getMessage()));
} catch (ComponentLookupException e) {
e.printStackTrace(listener.error(e.getMessage()));
} catch (ArtifactDeploymentException e) {
e.printStackTrace(listener.error(e.getMessage()));
}
// failed
build.setResult(Result.FAILURE);
listener.getLogger().println("[INFO] Deployment failed after " + Util.getTimeSpanString(Calendar.getInstance().getTimeInMillis() - startupTime));
return true;
}
private ArtifactRepository getDeploymentRepository(ArtifactRepositoryFactory factory, ArtifactRepositoryLayout layout, String repositoryId, String repositoryUrl) throws ComponentLookupException {
if (repositoryUrl == null) return null;
final ArtifactRepository repository = factory.createDeploymentArtifactRepository(
repositoryId, repositoryUrl, layout, uniqueVersion);
return new WrappedArtifactRepository(repository, uniqueVersion);
}
/**
*
* copy from MavenUtil but here we have to ignore localRepo path and setting as thoses paths comes
* from the remote node and can not exist in master see http://issues.jenkins-ci.org/browse/JENKINS-8711
*
*/
private MavenEmbedder createEmbedder(TaskListener listener, AbstractBuild<?,?> build) throws MavenEmbedderException, IOException, InterruptedException {
MavenInstallation m=null;
File settingsLoc = null, remoteGlobalSettingsFromConfig = null;
String profiles = null;
Properties systemProperties = null;
String privateRepository = null;
FilePath remoteSettingsFromConfig = null;
File tmpSettings = File.createTempFile( "jenkins", "temp-settings.xml" );
try {
AbstractProject project = build.getProject();
if (project instanceof MavenModuleSet) {
MavenModuleSet mavenModuleSet = ((MavenModuleSet) project);
profiles = mavenModuleSet.getProfiles();
systemProperties = mavenModuleSet.getMavenProperties();
// olamy see
// we have to take about the settings use for the project
// order tru configuration
// TODO maybe in goals with -s,--settings last wins but not done in during pom parsing
// or -Dmaven.repo.local
// if not we must get ~/.m2/settings.xml then $M2_HOME/conf/settings.xml
// TODO check if the remoteSettings has a localRepository configured and disabled it
String settingsConfigId = mavenModuleSet.getSettingConfigId();
String altSettingsPath = null;
if (!StringUtils.isBlank(settingsConfigId)) {
SettingConfig config = SettingsProviderUtils.findSettings(settingsConfigId);
if (config == null) {
listener.getLogger().println(
" your Apache Maven build is setup to use a config with id " + settingsConfigId
+ " but cannot find the config" );
} else {
listener.getLogger().println( "redeploy publisher using settings config with name " + config.name );
if (config.content != null ) {
remoteSettingsFromConfig = SettingsProviderUtils.copyConfigContentToFilePath( config, build.getWorkspace() );
altSettingsPath = remoteSettingsFromConfig.getRemote();
}
}
}
if (mavenModuleSet.getAlternateSettings() != null ) {
altSettingsPath = mavenModuleSet.getAlternateSettings();
}
String globalSettingsConfigId = mavenModuleSet.getGlobalSettingConfigId();
if (!StringUtils.isBlank(globalSettingsConfigId)) {
SettingConfig config = SettingsProviderUtils.findSettings(globalSettingsConfigId);
if (config == null) {
listener.getLogger().println(
" your Apache Maven build is setup to use a global settings config with id "
+ globalSettingsConfigId + " but cannot find the config" );
} else {
listener.getLogger().println( "redeploy publisher using global settings config with name " + config.name );
if (config.content != null ) {
remoteGlobalSettingsFromConfig = SettingsProviderUtils.copyConfigContentToFile( config );
}
}
}
Node buildNode = build.getBuiltOn();
if(buildNode == null) {
// assume that build was made on master
buildNode = Jenkins.getInstance();
}
if (StringUtils.isBlank( altSettingsPath ) ) {
// get userHome from the node where job has been executed
String remoteUserHome = build.getWorkspace().act( new GetUserHome() );
altSettingsPath = remoteUserHome + "/.m2/settings.xml";
}
// we copy this file in the master in a temporary file
FilePath filePath = new FilePath( tmpSettings );
FilePath remoteSettings = build.getWorkspace().child( altSettingsPath );
if (!remoteSettings.exists()) {
// JENKINS-9084 we finally use $M2_HOME/conf/settings.xml as maven do
String mavenHome =
((MavenModuleSet) project).getMaven().forNode(buildNode, listener ).getHome();
String settingsPath = mavenHome + "/conf/settings.xml";
remoteSettings = build.getWorkspace().child( settingsPath);
}
listener.getLogger().println( "Maven RedeployPublisher use remote " + (buildNode != null ? buildNode.getNodeName() : "local" )
+ " maven settings from : " + remoteSettings.getRemote() );
remoteSettings.copyTo( filePath );
settingsLoc = tmpSettings;
}
MavenEmbedderRequest mavenEmbedderRequest = new MavenEmbedderRequest(listener,
m!=null?m.getHomeDir():null,
profiles,
systemProperties,
privateRepository,
settingsLoc );
if (remoteGlobalSettingsFromConfig != null) {
mavenEmbedderRequest.setGlobalSettings( remoteGlobalSettingsFromConfig );
}
mavenEmbedderRequest.setTransferListener(new BatchModeMavenTransferListener(listener.getLogger()));
return MavenUtil.createEmbedder(mavenEmbedderRequest);
} finally {
if (tmpSettings != null) {
tmpSettings.delete();
}
if (remoteSettingsFromConfig != null) {
remoteSettingsFromConfig.delete();
}
FileUtils.deleteQuietly(remoteGlobalSettingsFromConfig);
}
}
private static final class GetUserHome implements Callable<String,IOException> {
private static final long serialVersionUID = -8755705771716056636L;
public String call() throws IOException {
return System.getProperty("user.home");
}
}
/**
* Obtains the {@link MavenModuleSetBuild} that we'll work on, or null.
* <p>
* This allows promoted-builds plugin to reuse the code for delayed deployment.
*/
protected MavenModuleSetBuild getMavenBuild(AbstractBuild<?, ?> build) {
return (build instanceof MavenModuleSetBuild)
? (MavenModuleSetBuild) build
: null;
}
protected List<MavenAbstractArtifactRecord> getActions(AbstractBuild<?, ?> build, BuildListener listener) {
List<MavenAbstractArtifactRecord> actions = new ArrayList<MavenAbstractArtifactRecord>();
MavenModuleSetBuild mavenBuild = getMavenBuild(build);
if (mavenBuild == null) {
return actions;
}
for (Entry<MavenModule, MavenBuild> e : mavenBuild.getModuleLastBuilds().entrySet()) {
MavenAbstractArtifactRecord a = e.getValue().getAction( MavenAbstractArtifactRecord.class );
if (a == null) {
listener.getLogger().println("No artifacts are recorded for module" + e.getKey().getName() + ". Is this a Maven project?");
} else {
actions.add( a );
}
}
return actions;
}
public BuildStepMonitor getRequiredMonitorService() {
return BuildStepMonitor.NONE;
}
protected Result getTreshold() {
if (evenIfUnstable) {
return Result.UNSTABLE;
} else {
return Result.SUCCESS;
}
}
@Extension
public static class DescriptorImpl extends BuildStepDescriptor<Publisher> {
public DescriptorImpl() {
}
/**
* @deprecated as of 1.290
* Use the default constructor.
*/
protected DescriptorImpl(Class<? extends Publisher> clazz) {
super(clazz);
}
public boolean isApplicable(Class<? extends AbstractProject> jobType) {
return jobType==MavenModuleSet.class;
}
public RedeployPublisher newInstance(StaplerRequest req, JSONObject formData) throws FormException {
return req.bindJSON(RedeployPublisher.class,formData);
}
public String getDisplayName() {
return Messages.RedeployPublisher_getDisplayName();
}
public boolean showEvenIfUnstableOption() {
// little hack to avoid showing this option on the redeploy action's screen
return true;
}
}
//---------------------------------------------
@SuppressWarnings("deprecation") // as we're restricted to Maven 2.x API here, but compile against Maven 3.x we cannot avoid deprecations
public static class WrappedArtifactRepository implements ArtifactRepository {
private ArtifactRepository artifactRepository;
private boolean uniqueVersion;
public WrappedArtifactRepository (ArtifactRepository artifactRepository, boolean uniqueVersion)
{
this.artifactRepository = artifactRepository;
this.uniqueVersion = uniqueVersion;
}
public String pathOf( Artifact artifact )
{
return artifactRepository.pathOf( artifact );
}
public String pathOfRemoteRepositoryMetadata( ArtifactMetadata artifactMetadata )
{
return artifactRepository.pathOfRemoteRepositoryMetadata( artifactMetadata );
}
public String pathOfLocalRepositoryMetadata( ArtifactMetadata metadata, ArtifactRepository repository )
{
return artifactRepository.pathOfLocalRepositoryMetadata( metadata, repository );
}
public String getUrl()
{
return artifactRepository.getUrl();
}
public void setUrl( String url )
{
artifactRepository.setUrl( url );
}
public String getBasedir()
{
return artifactRepository.getBasedir();
}
public String getProtocol()
{
return artifactRepository.getProtocol();
}
public String getId()
{
return artifactRepository.getId();
}
public void setId( String id )
{
artifactRepository.setId( id );
}
public ArtifactRepositoryPolicy getSnapshots()
{
return artifactRepository.getSnapshots();
}
public void setSnapshotUpdatePolicy( ArtifactRepositoryPolicy policy )
{
artifactRepository.setSnapshotUpdatePolicy( policy );
}
public ArtifactRepositoryPolicy getReleases()
{
return artifactRepository.getReleases();
}
public void setReleaseUpdatePolicy( ArtifactRepositoryPolicy policy )
{
artifactRepository.setReleaseUpdatePolicy( policy );
}
public ArtifactRepositoryLayout getLayout()
{
return artifactRepository.getLayout();
}
public void setLayout( ArtifactRepositoryLayout layout )
{
artifactRepository.setLayout( layout );
}
public String getKey()
{
return artifactRepository.getKey();
}
public boolean isUniqueVersion()
{
return this.uniqueVersion;
}
public void setUniqueVersion(boolean uniqueVersion) {
this.uniqueVersion = uniqueVersion;
}
public boolean isBlacklisted()
{
return artifactRepository.isBlacklisted();
}
public void setBlacklisted( boolean blackListed )
{
artifactRepository.setBlacklisted( blackListed );
}
public Artifact find( Artifact artifact )
{
return artifactRepository.find( artifact );
}
public List<String> findVersions( Artifact artifact )
{
return artifactRepository.findVersions( artifact );
}
public boolean isProjectAware()
{
return artifactRepository.isProjectAware();
}
public void setAuthentication( Authentication authentication )
{
artifactRepository.setAuthentication( authentication );
}
public Authentication getAuthentication()
{
return artifactRepository.getAuthentication();
}
public void setProxy( Proxy proxy )
{
artifactRepository.setProxy( proxy );
}
public Proxy getProxy()
{
return artifactRepository.getProxy();
}
public List<ArtifactRepository> getMirroredRepositories()
{
return Collections.emptyList();
}
public void setMirroredRepositories( List<ArtifactRepository> arg0 )
{
// noop
}
}
}
| maven-plugin/src/main/java/hudson/maven/RedeployPublisher.java | /*
* The MIT License
*
* Copyright (c) 2004-2010, Sun Microsystems, Inc., Kohsuke Kawaguchi, id:cactusman, Seiji Sogabe, Olivier Lamy
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package hudson.maven;
import hudson.Extension;
import hudson.FilePath;
import hudson.Launcher;
import hudson.Util;
import hudson.maven.reporters.MavenAbstractArtifactRecord;
import hudson.maven.reporters.MavenArtifactRecord;
import hudson.maven.settings.SettingConfig;
import hudson.maven.settings.SettingsProviderUtils;
import hudson.model.AbstractBuild;
import hudson.model.AbstractProject;
import hudson.model.BuildListener;
import hudson.model.Node;
import hudson.model.Result;
import hudson.model.TaskListener;
import hudson.remoting.Callable;
import hudson.tasks.BuildStepDescriptor;
import hudson.tasks.BuildStepMonitor;
import hudson.tasks.Maven.MavenInstallation;
import hudson.tasks.Publisher;
import hudson.tasks.Recorder;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Collections;
import java.util.List;
import java.util.Map.Entry;
import java.util.Properties;
import jenkins.model.Jenkins;
import net.sf.json.JSONObject;
import org.apache.commons.io.FileUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.maven.artifact.Artifact;
import org.apache.maven.artifact.deployer.ArtifactDeploymentException;
import org.apache.maven.artifact.metadata.ArtifactMetadata;
import org.apache.maven.artifact.repository.ArtifactRepository;
import org.apache.maven.artifact.repository.ArtifactRepositoryFactory;
import org.apache.maven.artifact.repository.ArtifactRepositoryPolicy;
import org.apache.maven.artifact.repository.Authentication;
import org.apache.maven.artifact.repository.layout.ArtifactRepositoryLayout;
import org.apache.maven.cli.BatchModeMavenTransferListener;
import org.apache.maven.repository.Proxy;
import org.codehaus.plexus.component.repository.exception.ComponentLookupException;
import org.kohsuke.stapler.DataBoundConstructor;
import org.kohsuke.stapler.StaplerRequest;
/**
* {@link Publisher} for {@link MavenModuleSetBuild} to deploy artifacts
* after a build is fully succeeded.
*
* @author Kohsuke Kawaguchi
* @since 1.191
*/
public class RedeployPublisher extends Recorder {
/**
* Repository ID. This is matched up with <tt>~/.m2/settings.xml</tt> for authentication related information.
*/
public final String id;
/**
* Repository URL to deploy artifacts to.
*/
public final String url;
public final boolean uniqueVersion;
public final boolean evenIfUnstable;
public final String releaseEnvVar;
/**
* For backward compatibility
*/
@Deprecated
public RedeployPublisher(String id, String url, boolean uniqueVersion) {
this(id, url, uniqueVersion, false, null);
}
/**
* @since 1.347
*/
@Deprecated
public RedeployPublisher(String id, String url, boolean uniqueVersion, boolean evenIfUnstable) {
this(id, url, uniqueVersion, evenIfUnstable, null);
}
@DataBoundConstructor
public RedeployPublisher(String id, String url, boolean uniqueVersion, boolean evenIfUnstable, String releaseEnvVar) {
this.id = id;
this.url = Util.fixEmptyAndTrim(url);
this.uniqueVersion = uniqueVersion;
this.evenIfUnstable = evenIfUnstable;
this.releaseEnvVar = Util.fixEmptyAndTrim(releaseEnvVar);
}
public boolean perform(AbstractBuild<?, ?> build, Launcher launcher, BuildListener listener) throws InterruptedException, IOException {
if (build.getResult().isWorseThan(getTreshold()))
return true; // build failed. Don't publish
/**
* Check if we should skip or not
*/
if (releaseEnvVar != null) {
String envVarValue = build.getEnvironment(listener).get(releaseEnvVar);
if ("true".equals(envVarValue)) { // null or false are ignored
listener.getLogger().println("[INFO] Skipping deploying artifact as release build is in progress.");
return true; // skip the deploy
}
}
List<MavenAbstractArtifactRecord> mavenAbstractArtifactRecords = getActions(build, listener);
if (mavenAbstractArtifactRecords == null || mavenAbstractArtifactRecords.isEmpty()) {
listener.getLogger().println("[ERROR] No artifacts are recorded. Is this a Maven project?");
build.setResult(Result.FAILURE);
return true;
}
if(build instanceof MavenModuleSetBuild && ((MavenModuleSetBuild)build).getParent().isArchivingDisabled()){
listener.getLogger().println("[ERROR] You cannot use the \"Deploy artifacts to Maven repository\" feature if you " +
"disabled automatic artifact archiving");
build.setResult(Result.FAILURE);
return true;
}
long startupTime = Calendar.getInstance().getTimeInMillis();
try {
MavenEmbedder embedder = createEmbedder(listener, build);
ArtifactRepositoryLayout layout =
(ArtifactRepositoryLayout) embedder.lookup(ArtifactRepositoryLayout.ROLE, "default");
ArtifactRepositoryFactory factory =
(ArtifactRepositoryFactory) embedder.lookup(ArtifactRepositoryFactory.ROLE);
ArtifactRepository artifactRepository = null;
if (url != null) {
// By default we try to get the repository definition from the job configuration
artifactRepository = getDeploymentRepository(factory, layout, id, url);
}
for (MavenAbstractArtifactRecord mavenAbstractArtifactRecord : mavenAbstractArtifactRecords) {
if (artifactRepository == null && mavenAbstractArtifactRecord instanceof MavenArtifactRecord) {
// If no repository definition is set on the job level we try to take it from the POM
MavenArtifactRecord mavenArtifactRecord = (MavenArtifactRecord) mavenAbstractArtifactRecord;
artifactRepository = getDeploymentRepository(factory, layout, mavenArtifactRecord.repositoryId, mavenArtifactRecord.repositoryUrl);
}
if (artifactRepository == null) {
listener.getLogger().println("[ERROR] No Repository settings defined in the job configuration or distributionManagement of the module.");
build.setResult(Result.FAILURE);
return true;
}
mavenAbstractArtifactRecord.deploy(embedder, artifactRepository, listener);
}
listener.getLogger().println("[INFO] Deployment done in " + Util.getTimeSpanString(Calendar.getInstance().getTimeInMillis() - startupTime));
return true;
} catch (MavenEmbedderException e) {
e.printStackTrace(listener.error(e.getMessage()));
} catch (ComponentLookupException e) {
e.printStackTrace(listener.error(e.getMessage()));
} catch (ArtifactDeploymentException e) {
e.printStackTrace(listener.error(e.getMessage()));
}
// failed
build.setResult(Result.FAILURE);
listener.getLogger().println("[INFO] Deployment failed after " + Util.getTimeSpanString(Calendar.getInstance().getTimeInMillis() - startupTime));
return true;
}
private ArtifactRepository getDeploymentRepository(ArtifactRepositoryFactory factory, ArtifactRepositoryLayout layout, String repositoryId, String repositoryUrl) throws ComponentLookupException {
if (repositoryUrl == null) return null;
final ArtifactRepository repository = factory.createDeploymentArtifactRepository(
repositoryId, repositoryUrl, layout, uniqueVersion);
return new WrappedArtifactRepository(repository, uniqueVersion);
}
/**
*
* copy from MavenUtil but here we have to ignore localRepo path and setting as thoses paths comes
* from the remote node and can not exist in master see http://issues.jenkins-ci.org/browse/JENKINS-8711
*
*/
private MavenEmbedder createEmbedder(TaskListener listener, AbstractBuild<?,?> build) throws MavenEmbedderException, IOException, InterruptedException {
MavenInstallation m=null;
File settingsLoc = null, remoteGlobalSettingsFromConfig = null;
String profiles = null;
Properties systemProperties = null;
String privateRepository = null;
FilePath remoteSettingsFromConfig = null;
File tmpSettings = null;
try {
AbstractProject project = build.getProject();
if (project instanceof MavenModuleSet) {
MavenModuleSet mavenModuleSet = ((MavenModuleSet) project);
profiles = mavenModuleSet.getProfiles();
systemProperties = mavenModuleSet.getMavenProperties();
// olamy see
// we have to take about the settings use for the project
// order tru configuration
// TODO maybe in goals with -s,--settings last wins but not done in during pom parsing
// or -Dmaven.repo.local
// if not we must get ~/.m2/settings.xml then $M2_HOME/conf/settings.xml
// TODO check if the remoteSettings has a localRepository configured and disabled it
String settingsConfigId = mavenModuleSet.getSettingConfigId();
String altSettingsPath = null;
if (!StringUtils.isBlank(settingsConfigId)) {
SettingConfig config = SettingsProviderUtils.findSettings(settingsConfigId);
if (config == null) {
listener.getLogger().println(
" your Apache Maven build is setup to use a config with id " + settingsConfigId
+ " but cannot find the config" );
} else {
listener.getLogger().println( "redeploy publisher using settings config with name " + config.name );
if (config.content != null ) {
remoteSettingsFromConfig = SettingsProviderUtils.copyConfigContentToFilePath( config, build.getWorkspace() );
altSettingsPath = remoteSettingsFromConfig.getRemote();
}
}
}
if (mavenModuleSet.getAlternateSettings() != null ) {
altSettingsPath = mavenModuleSet.getAlternateSettings();
}
String globalSettingsConfigId = mavenModuleSet.getGlobalSettingConfigId();
if (!StringUtils.isBlank(globalSettingsConfigId)) {
SettingConfig config = SettingsProviderUtils.findSettings(globalSettingsConfigId);
if (config == null) {
listener.getLogger().println(
" your Apache Maven build is setup to use a global settings config with id "
+ globalSettingsConfigId + " but cannot find the config" );
} else {
listener.getLogger().println( "redeploy publisher using global settings config with name " + config.name );
if (config.content != null ) {
remoteGlobalSettingsFromConfig = SettingsProviderUtils.copyConfigContentToFile( config );
}
}
}
Node buildNode = build.getBuiltOn();
if(buildNode == null) {
// assume that build was made on master
buildNode = Jenkins.getInstance();
}
m = mavenModuleSet.getMaven().forNode(buildNode, listener);
if (StringUtils.isBlank( altSettingsPath ) ) {
// get userHome from the node where job has been executed
String remoteUserHome = build.getWorkspace().act( new GetUserHome() );
altSettingsPath = remoteUserHome + "/.m2/settings.xml";
}
// we copy this file in the master in a temporary file
FilePath remoteSettings = build.getWorkspace().child( altSettingsPath );
if (remoteSettings != null) {
listener.getLogger().println( "Maven RedeployPublisher use " + (buildNode != null ? buildNode.getNodeName() : "local" )
+ " maven settings from : " + remoteSettings.getRemote() );
tmpSettings = File.createTempFile( "jenkins", "temp-settings.xml" );
FilePath filePath = new FilePath( tmpSettings );
remoteSettings.copyTo( filePath );
settingsLoc = tmpSettings;
}
}
MavenEmbedderRequest mavenEmbedderRequest = new MavenEmbedderRequest(listener,
m!=null?m.getHomeDir():null,
profiles,
systemProperties,
privateRepository,
settingsLoc );
if (remoteGlobalSettingsFromConfig != null) {
mavenEmbedderRequest.setGlobalSettings( remoteGlobalSettingsFromConfig );
}
mavenEmbedderRequest.setTransferListener(new BatchModeMavenTransferListener(listener.getLogger()));
return MavenUtil.createEmbedder(mavenEmbedderRequest);
} finally {
if (tmpSettings != null) {
tmpSettings.delete();
}
if (remoteSettingsFromConfig != null) {
remoteSettingsFromConfig.delete();
}
FileUtils.deleteQuietly(remoteGlobalSettingsFromConfig);
}
}
private static final class GetUserHome implements Callable<String,IOException> {
private static final long serialVersionUID = -8755705771716056636L;
public String call() throws IOException {
return System.getProperty("user.home");
}
}
/**
* Obtains the {@link MavenModuleSetBuild} that we'll work on, or null.
* <p>
* This allows promoted-builds plugin to reuse the code for delayed deployment.
*/
protected MavenModuleSetBuild getMavenBuild(AbstractBuild<?, ?> build) {
return (build instanceof MavenModuleSetBuild)
? (MavenModuleSetBuild) build
: null;
}
protected List<MavenAbstractArtifactRecord> getActions(AbstractBuild<?, ?> build, BuildListener listener) {
List<MavenAbstractArtifactRecord> actions = new ArrayList<MavenAbstractArtifactRecord>();
MavenModuleSetBuild mavenBuild = getMavenBuild(build);
if (mavenBuild == null) {
return actions;
}
for (Entry<MavenModule, MavenBuild> e : mavenBuild.getModuleLastBuilds().entrySet()) {
MavenAbstractArtifactRecord a = e.getValue().getAction( MavenAbstractArtifactRecord.class );
if (a == null) {
listener.getLogger().println("No artifacts are recorded for module" + e.getKey().getName() + ". Is this a Maven project?");
} else {
actions.add( a );
}
}
return actions;
}
public BuildStepMonitor getRequiredMonitorService() {
return BuildStepMonitor.NONE;
}
protected Result getTreshold() {
if (evenIfUnstable) {
return Result.UNSTABLE;
} else {
return Result.SUCCESS;
}
}
@Extension
public static class DescriptorImpl extends BuildStepDescriptor<Publisher> {
public DescriptorImpl() {
}
/**
* @deprecated as of 1.290
* Use the default constructor.
*/
protected DescriptorImpl(Class<? extends Publisher> clazz) {
super(clazz);
}
public boolean isApplicable(Class<? extends AbstractProject> jobType) {
return jobType==MavenModuleSet.class;
}
public RedeployPublisher newInstance(StaplerRequest req, JSONObject formData) throws FormException {
return req.bindJSON(RedeployPublisher.class,formData);
}
public String getDisplayName() {
return Messages.RedeployPublisher_getDisplayName();
}
public boolean showEvenIfUnstableOption() {
// little hack to avoid showing this option on the redeploy action's screen
return true;
}
}
//---------------------------------------------
@SuppressWarnings("deprecation") // as we're restricted to Maven 2.x API here, but compile against Maven 3.x we cannot avoid deprecations
public static class WrappedArtifactRepository implements ArtifactRepository {
private ArtifactRepository artifactRepository;
private boolean uniqueVersion;
public WrappedArtifactRepository (ArtifactRepository artifactRepository, boolean uniqueVersion)
{
this.artifactRepository = artifactRepository;
this.uniqueVersion = uniqueVersion;
}
public String pathOf( Artifact artifact )
{
return artifactRepository.pathOf( artifact );
}
public String pathOfRemoteRepositoryMetadata( ArtifactMetadata artifactMetadata )
{
return artifactRepository.pathOfRemoteRepositoryMetadata( artifactMetadata );
}
public String pathOfLocalRepositoryMetadata( ArtifactMetadata metadata, ArtifactRepository repository )
{
return artifactRepository.pathOfLocalRepositoryMetadata( metadata, repository );
}
public String getUrl()
{
return artifactRepository.getUrl();
}
public void setUrl( String url )
{
artifactRepository.setUrl( url );
}
public String getBasedir()
{
return artifactRepository.getBasedir();
}
public String getProtocol()
{
return artifactRepository.getProtocol();
}
public String getId()
{
return artifactRepository.getId();
}
public void setId( String id )
{
artifactRepository.setId( id );
}
public ArtifactRepositoryPolicy getSnapshots()
{
return artifactRepository.getSnapshots();
}
public void setSnapshotUpdatePolicy( ArtifactRepositoryPolicy policy )
{
artifactRepository.setSnapshotUpdatePolicy( policy );
}
public ArtifactRepositoryPolicy getReleases()
{
return artifactRepository.getReleases();
}
public void setReleaseUpdatePolicy( ArtifactRepositoryPolicy policy )
{
artifactRepository.setReleaseUpdatePolicy( policy );
}
public ArtifactRepositoryLayout getLayout()
{
return artifactRepository.getLayout();
}
public void setLayout( ArtifactRepositoryLayout layout )
{
artifactRepository.setLayout( layout );
}
public String getKey()
{
return artifactRepository.getKey();
}
public boolean isUniqueVersion()
{
return this.uniqueVersion;
}
public void setUniqueVersion(boolean uniqueVersion) {
this.uniqueVersion = uniqueVersion;
}
public boolean isBlacklisted()
{
return artifactRepository.isBlacklisted();
}
public void setBlacklisted( boolean blackListed )
{
artifactRepository.setBlacklisted( blackListed );
}
public Artifact find( Artifact artifact )
{
return artifactRepository.find( artifact );
}
public List<String> findVersions( Artifact artifact )
{
return artifactRepository.findVersions( artifact );
}
public boolean isProjectAware()
{
return artifactRepository.isProjectAware();
}
public void setAuthentication( Authentication authentication )
{
artifactRepository.setAuthentication( authentication );
}
public Authentication getAuthentication()
{
return artifactRepository.getAuthentication();
}
public void setProxy( Proxy proxy )
{
artifactRepository.setProxy( proxy );
}
public Proxy getProxy()
{
return artifactRepository.getProxy();
}
public List<ArtifactRepository> getMirroredRepositories()
{
return Collections.emptyList();
}
public void setMirroredRepositories( List<ArtifactRepository> arg0 )
{
// noop
}
}
}
| [FIXED JENKINS-15292] Revert "setting maven home is enough for global settings to be m.getHome()/conf/settings.xml"
This reverts commit 51d5220f6c19c4e7d5c35a07b62219472b2a7870.
hudson.maven.MavenUtil#createEmbedder set GlobalSettingsFile to MavenHome/conf/settings.xml by default so I can't explain this regression
| maven-plugin/src/main/java/hudson/maven/RedeployPublisher.java | [FIXED JENKINS-15292] Revert "setting maven home is enough for global settings to be m.getHome()/conf/settings.xml" | <ide><path>aven-plugin/src/main/java/hudson/maven/RedeployPublisher.java
<ide> String privateRepository = null;
<ide> FilePath remoteSettingsFromConfig = null;
<ide>
<del> File tmpSettings = null;
<add> File tmpSettings = File.createTempFile( "jenkins", "temp-settings.xml" );
<ide> try {
<ide> AbstractProject project = build.getProject();
<ide>
<ide> // assume that build was made on master
<ide> buildNode = Jenkins.getInstance();
<ide> }
<del> m = mavenModuleSet.getMaven().forNode(buildNode, listener);
<ide>
<ide> if (StringUtils.isBlank( altSettingsPath ) ) {
<ide> // get userHome from the node where job has been executed
<ide> String remoteUserHome = build.getWorkspace().act( new GetUserHome() );
<ide> altSettingsPath = remoteUserHome + "/.m2/settings.xml";
<ide> }
<del>
<del> // we copy this file in the master in a temporary file
<add>
<add> // we copy this file in the master in a temporary file
<add> FilePath filePath = new FilePath( tmpSettings );
<ide> FilePath remoteSettings = build.getWorkspace().child( altSettingsPath );
<del> if (remoteSettings != null) {
<del> listener.getLogger().println( "Maven RedeployPublisher use " + (buildNode != null ? buildNode.getNodeName() : "local" )
<del> + " maven settings from : " + remoteSettings.getRemote() );
<del> tmpSettings = File.createTempFile( "jenkins", "temp-settings.xml" );
<del> FilePath filePath = new FilePath( tmpSettings );
<del> remoteSettings.copyTo( filePath );
<del> settingsLoc = tmpSettings;
<del> }
<del>
<add> if (!remoteSettings.exists()) {
<add> // JENKINS-9084 we finally use $M2_HOME/conf/settings.xml as maven do
<add>
<add> String mavenHome =
<add> ((MavenModuleSet) project).getMaven().forNode(buildNode, listener ).getHome();
<add> String settingsPath = mavenHome + "/conf/settings.xml";
<add> remoteSettings = build.getWorkspace().child( settingsPath);
<add> }
<add> listener.getLogger().println( "Maven RedeployPublisher use remote " + (buildNode != null ? buildNode.getNodeName() : "local" )
<add> + " maven settings from : " + remoteSettings.getRemote() );
<add> remoteSettings.copyTo( filePath );
<add> settingsLoc = tmpSettings;
<add>
<ide> }
<ide>
<ide> MavenEmbedderRequest mavenEmbedderRequest = new MavenEmbedderRequest(listener, |
|
Java | mit | 37e8eee877d4bbe54c99c21d300212fc1e0c60e4 | 0 | DevPhilB/Algorithms_and_Data_Structures | package problemset4;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.math.BigInteger;
/**
* Program takes a natural number (n) from user input, calculates the amount of all points on the
* path to this point (0, 0) -> (n, n) and prints it out.
*
* @author Philipp Backes, 191710
* @author Homa Alavi, 191720
* @author Jannis Scholz, 191481
*
*/
public class PathNodes {
public static BigInteger amountOfPoints = BigInteger.valueOf(0);
public static int n = 0;
/**
* Main method
*
* @param args
*/
public static void main(String[] args) {
String userInput = "";
BufferedReader br = new BufferedReader(new InputStreamReader(System.in));
System.out.print("n = ");
try {
userInput = br.readLine();
} catch (IOException e) {
e.printStackTrace();
}
n = Integer.valueOf(userInput);
BigInteger nResult = b(n, n, 0, "");
System.out.println("For n = " + n + ": " + nResult + " Pathes with " + amountOfPoints + " Points");
}
public static BigInteger b(int x, int y, int t, String s) {
// Think about this implementation
if (x < 0 || y < 0) {
return BigInteger.ZERO;
} else if (x == 0 && y == 0) {
amountOfPoints = amountOfPoints.add(BigInteger.valueOf(s.length() + 1));
return BigInteger.ONE;
}
if (x < 0 || y < 0) {
return BigInteger.ZERO;
}
if (x == 0 && y == 0) {
System.out.println(" " + s + " . "); // print path
return BigInteger.ONE;
}
BigInteger r = BigInteger.ZERO;
if (y < x) {
r = r.add(b(x - 1, y, 0, "R" + s));
}
if (y <= x) {
r = r.add(b(x, y - 1, 0, "U" + s));
}
if (y >= x) {
r = r.add(b(x - 1, y - 1, 0, "F" + s));
}
if (y > x + 1 && t != 2) {
r = r.add(b(x + 1, y - 1, 1, "L" + s));
}
if (y >= x && t != 1) {
r = r.add(b(x - 1, y + 1, 2, "D" + s));
}
return r;
}
}
| src/problemset4/PathNodes.java | package problemset4;
/**
* Program takes a natural number (n) from user input,
* calculates the amount of all points on the path to this point (0, 0) -> (n, n)
* and prints it out.
*
* @author Philipp Backes, 191710
* @author Homa Alavi, 191720
* @author Jannis Scholz, 191481
*
*/
public class PathNodes {
/**
* Main method
* @param args
*/
public static void main(String[] args) {
}
}
| Solved exercise 1
| src/problemset4/PathNodes.java | Solved exercise 1 | <ide><path>rc/problemset4/PathNodes.java
<ide> package problemset4;
<ide>
<add>import java.io.BufferedReader;
<add>import java.io.IOException;
<add>import java.io.InputStreamReader;
<add>import java.math.BigInteger;
<add>
<ide> /**
<del> * Program takes a natural number (n) from user input,
<del> * calculates the amount of all points on the path to this point (0, 0) -> (n, n)
<del> * and prints it out.
<add> * Program takes a natural number (n) from user input, calculates the amount of all points on the
<add> * path to this point (0, 0) -> (n, n) and prints it out.
<ide> *
<ide> * @author Philipp Backes, 191710
<ide> * @author Homa Alavi, 191720
<ide> *
<ide> */
<ide> public class PathNodes {
<add> public static BigInteger amountOfPoints = BigInteger.valueOf(0);
<add> public static int n = 0;
<ide>
<ide> /**
<ide> * Main method
<add> *
<ide> * @param args
<ide> */
<ide> public static void main(String[] args) {
<del>
<add> String userInput = "";
<add> BufferedReader br = new BufferedReader(new InputStreamReader(System.in));
<add> System.out.print("n = ");
<add> try {
<add> userInput = br.readLine();
<add> } catch (IOException e) {
<add> e.printStackTrace();
<add> }
<add> n = Integer.valueOf(userInput);
<add> BigInteger nResult = b(n, n, 0, "");
<add> System.out.println("For n = " + n + ": " + nResult + " Pathes with " + amountOfPoints + " Points");
<add> }
<add>
<add> public static BigInteger b(int x, int y, int t, String s) {
<add> // Think about this implementation
<add> if (x < 0 || y < 0) {
<add> return BigInteger.ZERO;
<add> } else if (x == 0 && y == 0) {
<add> amountOfPoints = amountOfPoints.add(BigInteger.valueOf(s.length() + 1));
<add> return BigInteger.ONE;
<add> }
<add>
<add> if (x < 0 || y < 0) {
<add> return BigInteger.ZERO;
<add> }
<add> if (x == 0 && y == 0) {
<add> System.out.println(" " + s + " . "); // print path
<add> return BigInteger.ONE;
<add> }
<add> BigInteger r = BigInteger.ZERO;
<add> if (y < x) {
<add> r = r.add(b(x - 1, y, 0, "R" + s));
<add> }
<add> if (y <= x) {
<add> r = r.add(b(x, y - 1, 0, "U" + s));
<add> }
<add> if (y >= x) {
<add> r = r.add(b(x - 1, y - 1, 0, "F" + s));
<add> }
<add> if (y > x + 1 && t != 2) {
<add> r = r.add(b(x + 1, y - 1, 1, "L" + s));
<add> }
<add> if (y >= x && t != 1) {
<add> r = r.add(b(x - 1, y + 1, 2, "D" + s));
<add> }
<add> return r;
<ide> }
<ide>
<ide> } |
|
Java | apache-2.0 | 5888371402b67a5a308bb11d6790594acb868820 | 0 | gpolitis/jitsi-videobridge,davidertel/jitsi-videobridge,matteocampana/jitsi-videobridge,parlaylabs/jitsi-videobridge,matteocampana/jitsi-videobridge,matteocampana/jitsi-videobridge,jitsi/jitsi-videobridge,jitsi/jitsi-videobridge,jitsi/jitsi-videobridge,davidertel/jitsi-videobridge,jitsi/jitsi-videobridge,parlaylabs/jitsi-videobridge,davidertel/jitsi-videobridge,jitsi/jitsi-videobridge,jitsi/jitsi-videobridge,gpolitis/jitsi-videobridge,jitsi/jitsi-videobridge,gpolitis/jitsi-videobridge,parlaylabs/jitsi-videobridge,parlaylabs/jitsi-videobridge | /*
* Copyright @ 2015 Atlassian Pty Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jitsi.videobridge;
import java.beans.*;
import java.io.*;
import java.net.*;
import java.util.*;
import net.java.sip.communicator.impl.protocol.jabber.extensions.jingle.*;
import net.java.sip.communicator.impl.protocol.jabber.extensions.jingle.CandidateType;
import net.java.sip.communicator.service.netaddr.*;
import net.java.sip.communicator.service.protocol.*;
import net.java.sip.communicator.util.*;
import org.ice4j.*;
import org.ice4j.ice.*;
import org.ice4j.ice.harvest.*;
import org.ice4j.socket.*;
import org.jitsi.eventadmin.*;
import org.jitsi.impl.neomedia.transform.dtls.*;
import org.jitsi.service.configuration.*;
import org.jitsi.service.neomedia.*;
import org.jitsi.util.Logger;
import org.osgi.framework.*;
/**
* Implements the Jingle ICE-UDP transport.
*
* @author Lyubomir Marinov
* @author Pawel Domas
* @author Boris Grozev
*/
public class IceUdpTransportManager
extends TransportManager
{
/**
* The name default of the single <tt>IceStream</tt> that this
* <tt>TransportManager</tt> will create/use.
*/
private static final String DEFAULT_ICE_STREAM_NAME = "stream";
/**
* The name of the property which disables the use of a
* <tt>TcpHarvester</tt>.
*/
private static final String DISABLE_TCP_HARVESTER
= "org.jitsi.videobridge.DISABLE_TCP_HARVESTER";
/**
* The name of the property which controls the port number used for
* <tt>SinglePortUdpHarvester</tt>s.
*/
private static final String SINGLE_PORT_HARVESTER_PORT
= "org.jitsi.videobridge.SINGLE_PORT_HARVESTER_PORT";
/**
* The default value of the port to be used for
* {@code SinglePortUdpHarvester}.
*/
private static final int SINGLE_PORT_DEFAULT_VALUE = 10000;
/**
* The <tt>Logger</tt> used by the <tt>IceUdpTransportManager</tt> class and
* its instances to print debug information.
*/
private static final Logger logger
= Logger.getLogger(IceUdpTransportManager.class);
/**
* The default port that the <tt>TcpHarvester</tt> will
* bind to.
*/
private static final int TCP_DEFAULT_PORT = 443;
/**
* The port on which the <tt>TcpHarvester</tt> will bind to
* if no port is specifically configured, and binding to
* <tt>DEFAULT_TCP_PORT</tt> fails (for example, if the process doesn't have
* the required privileges to bind to a port below 1024).
*/
private static final int TCP_FALLBACK_PORT = 4443;
/**
* The name of the property which specifies an additional port to be
* advertised by the TCP harvester.
*/
private static final String TCP_HARVESTER_MAPPED_PORT
= "org.jitsi.videobridge.TCP_HARVESTER_MAPPED_PORT";
/**
* The name of the property which controls the port to which the
* <tt>TcpHarvester</tt> will bind.
*/
private static final String TCP_HARVESTER_PORT
= "org.jitsi.videobridge.TCP_HARVESTER_PORT";
/**
* The name of the property which controls the use of ssltcp candidates by
* <tt>TcpHarvester</tt>.
*/
private static final String TCP_HARVESTER_SSLTCP
= "org.jitsi.videobridge.TCP_HARVESTER_SSLTCP";
/**
* The default value of the <tt>TCP_HARVESTER_SSLTCP</tt> property.
*/
private static final boolean TCP_HARVESTER_SSLTCP_DEFAULT = true;
/**
* The single <tt>TcpHarvester</tt> instance for the
* application.
*/
private static TcpHarvester tcpHostHarvester = null;
/**
* The <tt>SinglePortUdpHarvester</tt>s which will be appended to ICE
* <tt>Agent</tt>s managed by <tt>IceUdpTransportManager</tt> instances.
*/
private static List<SinglePortUdpHarvester> singlePortHarvesters = null;
/**
* The flag which indicates whether application-wide harvesters, stored
* in the static fields {@link #tcpHostHarvester} and
* {@link #singlePortHarvesters} have been initialized.
*/
private static boolean staticHarvestersInitialized = false;
/**
* The "mapped port" added to {@link #tcpHostHarvester}, or -1.
*/
private static int tcpHostHarvesterMappedPort = -1;
/**
* Logs a specific <tt>String</tt> at debug level.
*
* @param s the <tt>String</tt> to log at debug level
*/
private static void logd(String s)
{
logger.info(s);
}
/**
* The single (if any) <tt>Channel</tt> instance, whose sockets are
* currently configured to accept DTLS packets.
*/
private Channel channelForDtls = null;
/**
* Whether this <tt>TransportManager</tt> has been closed.
*/
private boolean closed = false;
/**
* The <tt>Conference</tt> object that this <tt>TransportManager</tt> is
* associated with.
*/
private final Conference conference;
/**
* The <tt>Thread</tt> used by this <tt>TransportManager</tt> to wait until
* {@link #iceAgent} has established a connection.
*/
private Thread connectThread;
/**
* Used to synchronize access to {@link #connectThread}.
*/
private final Object connectThreadSyncRoot = new Object();
/**
* The <tt>DtlsControl</tt> that this <tt>TransportManager</tt> uses.
*/
private final DtlsControlImpl dtlsControl;
/**
* The <tt>Agent</tt> which implements the ICE protocol and which is used
* by this instance to implement the Jingle ICE-UDP transport.
*/
private Agent iceAgent;
/**
* The <tt>PropertyChangeListener</tt> which is (to be) notified about
* changes in the <tt>state</tt> of {@link #iceAgent}.
*/
private final PropertyChangeListener iceAgentStateChangeListener
= new PropertyChangeListener()
{
@Override
public void propertyChange(PropertyChangeEvent ev)
{
iceAgentStateChange(ev);
}
};
/**
* Whether ICE connectivity has been established.
*/
private boolean iceConnected = false;
/**
* The <tt>IceMediaStream</tt> of {@link #iceAgent} associated with the
* <tt>Channel</tt> of this instance.
*/
private final IceMediaStream iceStream;
/**
* The <tt>PropertyChangeListener</tt> which is (to be) notified about
* changes in the properties of the <tt>CandidatePair</tt>s of
* {@link #iceStream}.
*/
private final PropertyChangeListener iceStreamPairChangeListener
= new PropertyChangeListener()
{
@Override
public void propertyChange(PropertyChangeEvent ev)
{
iceStreamPairChange(ev);
}
};
/**
* Whether this <tt>IceUdpTransportManager</tt> will serve as the the
* controlling or controlled ICE agent.
*/
private final boolean controlling;
/**
* The number of {@link org.ice4j.ice.Component}-s to create in
* {@link #iceStream}.
*/
private int numComponents;
/**
* Whether we're using rtcp-mux or not.
*/
private boolean rtcpmux;
/**
* The <tt>SctpConnection</tt> instance, if any, added as a <tt>Channel</tt>
* to this <tt>IceUdpTransportManager</tt>.
*
* Currently we support a single <tt>SctpConnection</tt> in one
* <tt>IceUdpTransportManager</tt> and if it exists, it will receive all
* DTLS packets.
*/
private SctpConnection sctpConnection = null;
/**
* Initializes a new <tt>IceUdpTransportManager</tt> instance.
*
* @param conference the <tt>Conference</tt> which created this
* <tt>TransportManager</tt>.
* @param controlling {@code true} if the new instance is to serve as a
* controlling ICE agent and passive DTLS endpoint; otherwise, {@code false}
* @throws IOException
*/
public IceUdpTransportManager(Conference conference,
boolean controlling)
throws IOException
{
this(conference, controlling, 2, DEFAULT_ICE_STREAM_NAME);
}
public IceUdpTransportManager(Conference conference,
boolean controlling,
int numComponents)
throws IOException
{
this(conference, controlling, numComponents, DEFAULT_ICE_STREAM_NAME);
}
/**
* Initializes a new <tt>IceUdpTransportManager</tt> instance.
*
* @param conference the <tt>Conference</tt> which created this
* <tt>TransportManager</tt>.
* @param controlling {@code true} if the new instance is to serve as a
* controlling ICE agent and passive DTLS endpoint; otherwise, {@code false}
* @param numComponents the number of ICE components that this instance is
* to start with.
* @param iceStreamName the name of the ICE stream to be created by this
* instance.
* @throws IOException
*/
public IceUdpTransportManager(Conference conference,
boolean controlling,
int numComponents,
String iceStreamName)
throws IOException
{
this.conference = conference;
this.controlling = controlling;
this.numComponents = numComponents;
this.rtcpmux = numComponents == 1;
dtlsControl = createDtlsControl();
iceAgent = createIceAgent(controlling, iceStreamName, rtcpmux);
iceAgent.addStateChangeListener(iceAgentStateChangeListener);
iceStream = iceAgent.getStream(iceStreamName);
iceStream.addPairChangeListener(iceStreamPairChangeListener);
EventAdmin eventAdmin = conference.getVideobridge().getEventAdmin();
if (eventAdmin != null)
eventAdmin.sendEvent(EventFactory.transportCreated(this));
}
/**
* Initializes a new <tt>IceUdpTransportManager</tt> instance.
*
* @param conference the <tt>Conference</tt> which created this
* <tt>TransportManager</tt>.
* @param controlling {@code true} if the new instance is to serve as a
* controlling ICE agent and passive DTLS endpoint; otherwise, {@code false}
* @param iceStreamName the name of the ICE stream to be created by this
* instance.
* @throws IOException
*/
public IceUdpTransportManager(Conference conference,
boolean controlling,
String iceStreamName)
throws IOException
{
this(conference, controlling, 2, iceStreamName);
}
/**
* {@inheritDoc}
*
* Assures that no more than one <tt>SctpConnection</tt> is added. Keeps
* {@link #sctpConnection} and {@link #channelForDtls} up to date.
*/
@Override
public boolean addChannel(Channel channel)
{
if (closed)
return false;
if (channel instanceof SctpConnection
&& sctpConnection != null
&& sctpConnection != channel)
{
logd("Not adding a second SctpConnection to TransportManager.");
return false;
}
if (!super.addChannel(channel))
return false;
if (channel instanceof SctpConnection)
{
// When an SctpConnection is added, it automatically replaces
// channelForDtls, because it needs DTLS packets for the application
// data inside them.
sctpConnection = (SctpConnection) channel;
if (channelForDtls != null)
{
// channelForDtls is necessarily an RtpChannel, because we don't
// add more than one SctpConnection. The SctpConnection socket
// will automatically accept DTLS.
RtpChannel rtpChannelForDtls = (RtpChannel) channelForDtls;
rtpChannelForDtls.getDatagramFilter(false).setAcceptNonRtp(
false);
rtpChannelForDtls.getDatagramFilter(true).setAcceptNonRtp(
false);
}
channelForDtls = sctpConnection;
}
else if (channelForDtls == null)
{
channelForDtls = channel;
RtpChannel rtpChannel = (RtpChannel) channel;
// The new channelForDtls will always accept DTLS packets on its
// RTP socket.
rtpChannel.getDatagramFilter(false).setAcceptNonRtp(true);
// If we use rtcpmux, we don't want to accept DTLS packets on the
// RTCP socket, because they will be duplicated from the RTP socket,
// because both sockets are actually filters on the same underlying
// socket.
rtpChannel.getDatagramFilter(true).setAcceptNonRtp(!rtcpmux);
}
if (iceConnected)
channel.transportConnected();
EventAdmin eventAdmin = conference.getVideobridge().getEventAdmin();
if (eventAdmin != null)
eventAdmin.sendEvent(EventFactory.transportChannelAdded(channel));
return true;
}
/**
* Adds to <tt>iceAgent</tt> videobridge specific candidate harvesters such
* as an Amazon AWS EC2 specific harvester.
*
* @param iceAgent the {@link Agent} that we'd like to append new harvesters
* to.
* @param rtcpmux whether rtcp will be used by this
* <tt>IceUdpTransportManager</tt>.
*/
private void appendVideobridgeHarvesters(Agent iceAgent,
boolean rtcpmux)
{
boolean enableDynamicHostHarvester = true;
if (rtcpmux)
{
if (tcpHostHarvester != null)
iceAgent.addCandidateHarvester(tcpHostHarvester);
if (singlePortHarvesters != null)
{
for (CandidateHarvester harvester : singlePortHarvesters)
{
iceAgent.addCandidateHarvester(harvester);
enableDynamicHostHarvester = false;
}
}
}
// Use dynamic ports iff we're not sing "single port".
iceAgent.setUseHostHarvester(enableDynamicHostHarvester);
ConfigurationService cfg
= ServiceUtils.getService(
getBundleContext(),
ConfigurationService.class);
//if no configuration is found then we simply log and bail
if (cfg == null)
{
logger.info("No configuration found. "
+ "Will continue without custom candidate harvesters");
return;
}
HarvesterConfiguration addressesConfig
= HarvesterConfiguration.getInstance(cfg);
MappingCandidateHarvester mappingHarvester
= addressesConfig.getCandidateHarvester();
//append the mapping harvester
if( mappingHarvester != null)
{
iceAgent.addCandidateHarvester(mappingHarvester);
}
if(addressesConfig.getPublicAddress() != null
&& addressesConfig.getLocalAddress() != null)
{
//if configured, append a mapping harvester.
MappingCandidateHarvester natHarvester
= new MappingCandidateHarvester(
addressesConfig.getPublicAddress(),
addressesConfig.getLocalAddress());
iceAgent.addCandidateHarvester(natHarvester);
}
}
/**
* Determines whether at least one <tt>LocalCandidate</tt> of a specific ICE
* <tt>Component</tt> can reach (in the terms of the ice4j library) a
* specific <tt>RemoteCandidate</tt>
*
* @param component the ICE <tt>Component</tt> which contains the
* <tt>LocalCandidate</tt>s to check whether at least one of them can reach
* the specified <tt>remoteCandidate</tt>
* @param remoteCandidate the <tt>RemoteCandidate</tt> to check whether at
* least one of the <tt>LocalCandidate</tt>s of the specified
* <tt>component</tt> can reach it
* @return <tt>true</tt> if at least one <tt>LocalCandidate</tt> of the
* specified <tt>component</tt> can reach the specified
* <tt>remoteCandidate</tt>
*/
private boolean canReach(
Component component,
RemoteCandidate remoteCandidate)
{
for (LocalCandidate localCandidate : component.getLocalCandidates())
{
if (localCandidate.canReach(remoteCandidate))
return true;
}
return false;
}
/**
* {@inheritDoc}
*
* TODO: In the case of multiple {@code Channel}s in one
* {@code TransportManager} it is unclear how to handle changes to the
* {@code initiator} property from individual channels.
*/
@Override
protected void channelPropertyChange(PropertyChangeEvent ev)
{
super.channelPropertyChange(ev);
/*
if (Channel.INITIATOR_PROPERTY.equals(ev.getPropertyName())
&& iceAgent != null)
{
Channel channel = (Channel) ev.getSource();
iceAgent.setControlling(channel.isInitiator());
}
*/
}
/**
* {@inheritDoc}
*/
@Override
public synchronized void close()
{
if (!closed)
{
// Set this early to prevent double closing when the last channel
// is removed.
closed = true;
for (Channel channel : getChannels())
close(channel);
if (dtlsControl != null)
{
dtlsControl.start(null); //stop
dtlsControl.cleanup(this);
}
// DatagramSocket[] datagramSockets = getStreamConnectorSockets();
if (iceStream != null)
{
iceStream.removePairStateChangeListener(
iceStreamPairChangeListener);
}
if (iceAgent != null)
{
iceAgent.removeStateChangeListener(iceAgentStateChangeListener);
iceAgent.free();
iceAgent = null;
}
/*
* It seems that the ICE agent takes care of closing these.
*
if (datagramSockets != null)
{
if (datagramSockets[0] != null)
datagramSockets[0].close();
if (datagramSockets[1] != null)
datagramSockets[1].close();
}
*/
synchronized (connectThreadSyncRoot)
{
if (connectThread != null)
connectThread.interrupt();
}
super.close();
}
}
/**
* {@inheritDoc}
*
* Keeps {@link #sctpConnection} and {@link #channelForDtls} up to date.
*/
@Override
public boolean close(Channel channel)
{
boolean removed = super.close(channel);
if (removed)
{
if (channel == sctpConnection)
{
sctpConnection = null;
}
if (channel == channelForDtls)
{
if (sctpConnection != null)
{
channelForDtls = sctpConnection;
}
else if (channel instanceof RtpChannel)
{
RtpChannel newChannelForDtls = null;
for (Channel c : getChannels())
{
if (c instanceof RtpChannel)
newChannelForDtls = (RtpChannel) c;
}
if (newChannelForDtls != null)
{
newChannelForDtls.getDatagramFilter(false)
.setAcceptNonRtp(true);
newChannelForDtls.getDatagramFilter(true)
.setAcceptNonRtp(!rtcpmux);
}
channelForDtls = newChannelForDtls;
}
if (channel instanceof RtpChannel)
{
RtpChannel rtpChannel = (RtpChannel) channel;
rtpChannel.getDatagramFilter(false).setAcceptNonRtp(false);
rtpChannel.getDatagramFilter(true).setAcceptNonRtp(false);
}
}
try
{
StreamConnector connector = channel.getStreamConnector();
if (connector != null)
{
DatagramSocket datagramSocket = connector.getDataSocket();
if (datagramSocket != null)
datagramSocket.close();
datagramSocket = connector.getControlSocket();
if (datagramSocket != null)
datagramSocket.close();
Socket socket = connector.getDataTCPSocket();
if (socket != null)
socket.close();
socket = connector.getControlTCPSocket();
if (socket != null)
socket.close();
}
}
catch (IOException ioe)
{
logd("Failed to close sockets when closing a channel:" + ioe);
}
EventAdmin eventAdmin = conference.getVideobridge().getEventAdmin();
if (eventAdmin != null)
{
eventAdmin.sendEvent(
EventFactory.transportChannelRemoved(channel));
}
channel.transportClosed();
}
if (getChannels().isEmpty())
close();
return removed;
}
/**
* Initializes a new {@code DtlsControlImpl} instance.
*
* @return a new {@code DtlsControlImpl} instance
*/
private DtlsControlImpl createDtlsControl()
{
DtlsControlImpl dtlsControl
= new DtlsControlImpl(/* srtpDisabled */ false);
dtlsControl.registerUser(this);
dtlsControl.setSetup(
controlling
? DtlsControl.Setup.PASSIVE
: DtlsControl.Setup.ACTIVE);
// XXX For DTLS, the media type doesn't matter (as long as it's not
// null).
// XXX The actual start of the DTLS servers/clients will be delayed
// until an rtpConnector is set (when a MediaStream with this
// SrtpControl starts or is assigned a target).
dtlsControl.start(MediaType.AUDIO);
return dtlsControl;
}
/**
* Initializes a new <tt>Agent</tt> instance which implements the ICE
* protocol and which is to be used by this instance to implement the Jingle
* ICE-UDP transport.
*
* @param controlling
* @param iceStreamName
* @param rtcpmux
* @return a new <tt>Agent</tt> instance which implements the ICE protocol
* and which is to be used by this instance to implement the Jingle ICE-UDP
* transport
* @throws IOException if initializing a new <tt>Agent</tt> instance for the
* purposes of this <tt>TransportManager</tt> fails
*/
private Agent createIceAgent(boolean controlling,
String iceStreamName,
boolean rtcpmux)
throws IOException
{
NetworkAddressManagerService nams
= ServiceUtils.getService(
getBundleContext(),
NetworkAddressManagerService.class);
Agent iceAgent = nams.createIceAgent();
//add videobridge specific harvesters such as a mapping and an Amazon
//AWS EC2 harvester
appendVideobridgeHarvesters(iceAgent, rtcpmux);
iceAgent.setControlling(controlling);
iceAgent.setPerformConsentFreshness(true);
PortTracker portTracker = JitsiTransportManager.getPortTracker(null);
int portBase = portTracker.getPort();
IceMediaStream iceStream
= nams.createIceStream(
numComponents,
portBase,
iceStreamName,
iceAgent);
// Attempt to minimize subsequent bind retries: see if we have allocated
// any ports from the dynamic range, and if so update the port tracker.
// Do NOT update the port tracker with non-dynamic ports (e.g. 4443
// coming from TCP) because this will force it to revert back it its
// configured min port. When maxPort is reached, allocation will begin
// from minPort again, so we don't have to worry about wraps.
int maxAllocatedPort
= getMaxAllocatedPort(
iceStream,
portTracker.getMinPort(),
portTracker.getMaxPort());
if (maxAllocatedPort > 0)
{
int nextPort = 1 + maxAllocatedPort;
portTracker.setNextPort(nextPort);
if (logger.isDebugEnabled())
logger.debug("Updating the port tracker min port: " + nextPort);
}
return iceAgent;
}
/**
* @return the highest local port used by any of the local candidates of
* {@code iceStream}, which falls in the range [{@code min}, {@code max}].
*/
private int getMaxAllocatedPort(IceMediaStream iceStream, int min, int max)
{
return
Math.max(
getMaxAllocatedPort(
iceStream.getComponent(Component.RTP),
min, max),
getMaxAllocatedPort(
iceStream.getComponent(Component.RTCP),
min, max));
}
/**
* @return the highest local port used by any of the local candidates of
* {@code component}, which falls in the range [{@code min}, {@code max}].
*/
private int getMaxAllocatedPort(Component component, int min, int max)
{
int maxAllocatedPort = -1;
if (component != null)
{
for (LocalCandidate candidate : component.getLocalCandidates())
{
int candidatePort = candidate.getTransportAddress().getPort();
if (min <= candidatePort
&& candidatePort <= max
&& maxAllocatedPort < candidatePort)
{
maxAllocatedPort = candidatePort;
}
}
}
return maxAllocatedPort;
}
/**
* {@inheritDoc}
*/
@Override
protected void describe(IceUdpTransportPacketExtension pe)
{
if (!closed)
{
pe.setPassword(iceAgent.getLocalPassword());
pe.setUfrag(iceAgent.getLocalUfrag());
for (Component component : iceStream.getComponents())
{
List<LocalCandidate> candidates
= component.getLocalCandidates();
if (candidates != null && !candidates.isEmpty())
{
for (LocalCandidate candidate : candidates)
{
if (candidate.getTransport() == Transport.TCP
&& tcpHostHarvesterMappedPort != -1
&& candidate.getTransportAddress().getPort()
!= tcpHostHarvesterMappedPort)
{
// In case we use a mapped port with the TCP
// harvester, do not advertise the candidates with
// the actual port that we listen on.
continue;
}
describe(candidate, pe);
}
}
}
if (rtcpmux)
pe.addChildExtension(new RtcpmuxPacketExtension());
describeDtlsControl(pe);
}
}
/**
* Adds a new <tt>CandidatePacketExtension</tt> to <tt>pe</tt>, sets the
* values of its properties to the values of the respective properties of
* <tt>candidate</tt>.
*
* @param candidate the <tt>LocalCandidate</tt> from which to take the values
* of the properties to set.
* @param pe the <tt>IceUdpTransportPacketExtension</tt> to which to add a
* new <tt>CandidatePacketExtension</tt>.
*/
private void describe(
LocalCandidate candidate,
IceUdpTransportPacketExtension pe)
{
CandidatePacketExtension candidatePE = new CandidatePacketExtension();
org.ice4j.ice.Component component = candidate.getParentComponent();
candidatePE.setComponent(component.getComponentID());
candidatePE.setFoundation(candidate.getFoundation());
candidatePE.setGeneration(
component.getParentStream().getParentAgent().getGeneration());
candidatePE.setID(generateCandidateID(candidate));
candidatePE.setNetwork(0);
candidatePE.setPriority(candidate.getPriority());
// Advertise 'tcp' candidates for which SSL is enabled as 'ssltcp'
// (although internally their transport protocol remains "tcp")
Transport transport = candidate.getTransport();
if (transport == Transport.TCP && candidate.isSSL())
{
transport = Transport.SSLTCP;
}
candidatePE.setProtocol(transport.toString());
if (transport == Transport.TCP || transport == Transport.SSLTCP)
{
candidatePE.setTcpType(candidate.getTcpType());
}
candidatePE.setType(
CandidateType.valueOf(candidate.getType().toString()));
TransportAddress transportAddress = candidate.getTransportAddress();
candidatePE.setIP(transportAddress.getHostAddress());
candidatePE.setPort(transportAddress.getPort());
TransportAddress relatedAddress = candidate.getRelatedAddress();
if (relatedAddress != null)
{
candidatePE.setRelAddr(relatedAddress.getHostAddress());
candidatePE.setRelPort(relatedAddress.getPort());
}
pe.addChildExtension(candidatePE);
}
/**
* Sets the values of the properties of a specific
* <tt>IceUdpTransportPacketExtension</tt> to the values of the
* respective properties of {@link #dtlsControl}
*
* @param transportPE the <tt>IceUdpTransportPacketExtension</tt> on which
* to set the values of the properties of <tt>dtlsControl</tt>
*/
private void describeDtlsControl(IceUdpTransportPacketExtension transportPE)
{
String fingerprint = dtlsControl.getLocalFingerprint();
String hash = dtlsControl.getLocalFingerprintHashFunction();
DtlsFingerprintPacketExtension fingerprintPE
= transportPE.getFirstChildOfType(
DtlsFingerprintPacketExtension.class);
if (fingerprintPE == null)
{
fingerprintPE = new DtlsFingerprintPacketExtension();
transportPE.addChildExtension(fingerprintPE);
}
fingerprintPE.setFingerprint(fingerprint);
fingerprintPE.setHash(hash);
}
/**
* Sets up {@link #dtlsControl} according to <tt>transport</tt>, adds all
* (supported) remote candidates from <tt>transport</tt> to
* {@link #iceAgent} and starts {@link #iceAgent} if it isn't started
* already.
*/
private synchronized void doStartConnectivityEstablishment(
IceUdpTransportPacketExtension transport)
{
if (closed)
return;
// Reflect the transport's rtcpmux onto this instance.
if (transport.isRtcpMux())
{
rtcpmux = true;
if (channelForDtls != null && channelForDtls instanceof RtpChannel)
{
((RtpChannel) channelForDtls)
.getDatagramFilter(true)
.setAcceptNonRtp(false);
}
}
dtlsControl.setRtcpmux(rtcpmux);
// Reflect the transport's remote fingerprints onto this instance.
List<DtlsFingerprintPacketExtension> dfpes
= transport.getChildExtensionsOfType(
DtlsFingerprintPacketExtension.class);
if (!dfpes.isEmpty())
{
Map<String, String> remoteFingerprints = new LinkedHashMap<>();
for (DtlsFingerprintPacketExtension dfpe : dfpes)
{
remoteFingerprints.put(
dfpe.getHash(),
dfpe.getFingerprint());
}
dtlsControl.setRemoteFingerprints(remoteFingerprints);
}
IceProcessingState state = iceAgent.getState();
if (IceProcessingState.COMPLETED.equals(state)
|| IceProcessingState.TERMINATED.equals(state))
{
// Adding candidates to a completed Agent is unnecessary and has
// been observed to cause problems.
return;
}
// If ICE is running already, we try to update the checklists with the
// candidates. Note that this is a best effort.
boolean iceAgentStateIsRunning
= IceProcessingState.RUNNING.equals(state);
int remoteCandidateCount = 0;
if (rtcpmux)
{
Component rtcpComponent = iceStream.getComponent(Component.RTCP);
if (rtcpComponent != null)
iceStream.removeComponent(rtcpComponent);
}
// Different stream may have different ufrag/password
String ufrag = transport.getUfrag();
if (ufrag != null)
iceStream.setRemoteUfrag(ufrag);
String password = transport.getPassword();
if (password != null)
iceStream.setRemotePassword(password);
List<CandidatePacketExtension> candidates
= transport.getChildExtensionsOfType(
CandidatePacketExtension.class);
if (iceAgentStateIsRunning && candidates.isEmpty())
return;
// Sort the remote candidates (host < reflexive < relayed) in order
// to create first the host, then the reflexive, the relayed
// candidates and thus be able to set the relative-candidate
// matching the rel-addr/rel-port attribute.
Collections.sort(candidates);
int generation = iceAgent.getGeneration();
for (CandidatePacketExtension candidate : candidates)
{
// Is the remote candidate from the current generation of the
// iceAgent?
if (candidate.getGeneration() != generation)
continue;
if (rtcpmux && Component.RTCP == candidate.getComponent())
{
logger.warn("Received an RTCP candidate, but we're using"
+ " rtcp-mux. Ignoring.");
continue;
}
Component component
= iceStream.getComponent(candidate.getComponent());
String relAddr;
int relPort;
TransportAddress relatedAddress = null;
if (((relAddr = candidate.getRelAddr()) != null)
&& ((relPort = candidate.getRelPort()) != -1))
{
relatedAddress
= new TransportAddress(
relAddr,
relPort,
Transport.parse(candidate.getProtocol()));
}
RemoteCandidate relatedCandidate
= component.findRemoteCandidate(relatedAddress);
RemoteCandidate remoteCandidate
= new RemoteCandidate(
new TransportAddress(
candidate.getIP(),
candidate.getPort(),
Transport.parse(
candidate.getProtocol())),
component,
org.ice4j.ice.CandidateType.parse(
candidate.getType().toString()),
candidate.getFoundation(),
candidate.getPriority(),
relatedCandidate);
// XXX IceUdpTransportManager harvests host candidates only and
// the ICE Components utilize the UDP protocol/transport only at
// the time of this writing. The ice4j library will, of course,
// check the theoretical reachability between the local and the
// remote candidates. However, we would like (1) to not mess
// with a possibly running iceAgent and (2) to return a
// consistent return value.
if (!canReach(component, remoteCandidate))
continue;
if (iceAgentStateIsRunning)
component.addUpdateRemoteCandidates(remoteCandidate);
else
component.addRemoteCandidate(remoteCandidate);
remoteCandidateCount++;
}
if (iceAgentStateIsRunning)
{
if (remoteCandidateCount == 0)
{
// XXX Effectively, the check above but realizing that all
// candidates were ignored:
// iceAgentStateIsRunning && candidates.isEmpty().
}
else
{
// update all components of all streams
for (IceMediaStream stream : iceAgent.getStreams())
{
for (Component component : stream.getComponents())
component.updateRemoteCandidates();
}
}
}
else if (remoteCandidateCount != 0)
{
// Once again because the ICE Agent does not support adding
// candidates after the connectivity establishment has been started
// and because multiple transport-info JingleIQs may be used to send
// the whole set of transport candidates from the remote peer to the
// local peer, do not really start the connectivity establishment
// until we have at least one remote candidate per ICE Component.
for (IceMediaStream stream : iceAgent.getStreams())
{
for (Component component : stream.getComponents())
{
if (component.getRemoteCandidateCount() < 1)
{
remoteCandidateCount = 0;
break;
}
}
if (remoteCandidateCount == 0)
break;
}
if (remoteCandidateCount != 0)
iceAgent.startConnectivityEstablishment();
}
else if (iceStream.getRemoteUfrag() != null
&& iceStream.getRemotePassword() != null)
{
// We don't have any remote candidates, but we already know the
// remote ufrag and password, so we can start ICE.
logger.info("Starting ICE agent without remote candidates.");
iceAgent.startConnectivityEstablishment();
}
}
/**
* Generates an ID to be set on a <tt>CandidatePacketExtension</tt> to
* represent a specific <tt>LocalCandidate</tt>.
*
* @param candidate the <tt>LocalCandidate</tt> whose ID is to be generated
* @return an ID to be set on a <tt>CandidatePacketExtension</tt> to
* represent the specified <tt>candidate</tt>
*/
private String generateCandidateID(LocalCandidate candidate)
{
StringBuilder candidateID = new StringBuilder();
candidateID.append(conference.getID());
candidateID.append(Long.toHexString(hashCode()));
Agent iceAgent
= candidate.getParentComponent().getParentStream().getParentAgent();
candidateID.append(Long.toHexString(iceAgent.hashCode()));
candidateID.append(Long.toHexString(iceAgent.getGeneration()));
candidateID.append(Long.toHexString(candidate.hashCode()));
return candidateID.toString();
}
/**
* Gets the <tt>Conference</tt> object that this <tt>TransportManager</tt>
* is associated with.
*/
public Conference getConference()
{
return conference;
}
/**
* Gets the number of {@link org.ice4j.ice.Component}-s to create in
* {@link #iceStream}.
*/
public int getNumComponents()
{
return numComponents;
}
/**
* Gets the <tt>Agent</tt> which implements the ICE protocol and which is
* used by this instance to implement the Jingle ICE-UDP transport.
*/
public Agent getAgent()
{
return iceAgent;
}
/**
* Gets the <tt>IceMediaStream</tt> of {@link #iceAgent} associated with the
* <tt>Channel</tt> of this instance.
*/
public IceMediaStream getIceStream()
{
return iceStream;
}
/**
* Returns a boolean value determining whether this
* <tt>IceUdpTransportManager</tt> will serve as the the controlling or
* the controlled ICE agent.
*/
public boolean isControlling()
{
return controlling;
}
/**
* Returns whether this {@code IceUdpTransportManager} is using rtcp-mux.
*
* @return {@code true} if this {@code IceUdpTransportManager} is using
* rtcp-mux; otherwise, {@code false}
*/
public boolean isRtcpmux()
{
return rtcpmux;
}
/**
* Gets the <tt>BundleContext</tt> associated with the <tt>Channel</tt>
* that this {@link net.java.sip.communicator.service.protocol.media
* .TransportManager} is servicing. The method is a
* convenience which gets the <tt>BundleContext</tt> associated with the
* XMPP component implementation in which the <tt>Videobridge</tt>
* associated with this instance is executing.
*
* @return the <tt>BundleContext</tt> associated with this
* <tt>IceUdpTransportManager</tt>
*/
public BundleContext getBundleContext()
{
return conference != null ? conference.getBundleContext() : null;
}
/**
* {@inheritDoc}
*/
@Override
public DtlsControlImpl getDtlsControl(Channel channel)
{
return dtlsControl;
}
/**
* Gets the <tt>IceSocketWrapper</tt> from the selected pair (if any)
* from a specific {@link org.ice4j.ice.Component}.
*
* @param component the <tt>Component</tt> from which to get a socket.
* @return the <tt>IceSocketWrapper</tt> from the selected pair (if any)
* from a specific {@link org.ice4j.ice.Component}.
*/
private IceSocketWrapper getSocketForComponent(Component component)
{
CandidatePair selectedPair = component.getSelectedPair();
return
(selectedPair == null) ? null : selectedPair.getIceSocketWrapper();
}
/**
* {@inheritDoc}
*/
@Override
public StreamConnector getStreamConnector(Channel channel)
{
if (!getChannels().contains(channel))
return null;
IceSocketWrapper[] iceSockets = getStreamConnectorSockets();
IceSocketWrapper iceSocket0;
if (iceSockets == null || (iceSocket0 = iceSockets[0]) == null)
return null;
if (channel instanceof SctpConnection)
{
DatagramSocket udpSocket = iceSocket0.getUDPSocket();
if (udpSocket != null)
{
if (udpSocket instanceof MultiplexingDatagramSocket)
{
MultiplexingDatagramSocket multiplexing
= (MultiplexingDatagramSocket) udpSocket;
try
{
DatagramSocket dtlsSocket
= multiplexing.getSocket(new DTLSDatagramFilter());
return new DefaultStreamConnector(dtlsSocket, null);
}
catch (IOException ioe)
{
logger.warn("Failed to create DTLS socket: " + ioe);
}
}
}
else
{
Socket tcpSocket = iceSocket0.getTCPSocket();
if (tcpSocket != null
&& tcpSocket instanceof MultiplexingSocket)
{
MultiplexingSocket multiplexing
= (MultiplexingSocket) tcpSocket;
try
{
Socket dtlsSocket
= multiplexing.getSocket(new DTLSDatagramFilter());
return new DefaultTCPStreamConnector(dtlsSocket, null);
}
catch(IOException ioe)
{
logger.warn("Failed to create DTLS socket: " + ioe);
}
}
}
return null;
}
if (! (channel instanceof RtpChannel))
return null;
DatagramSocket udpSocket0;
IceSocketWrapper iceSocket1 = iceSockets[1];
RtpChannel rtpChannel = (RtpChannel) channel;
if ((udpSocket0 = iceSocket0.getUDPSocket()) != null)
{
DatagramSocket udpSocket1
= (iceSocket1 == null) ? null : iceSocket1.getUDPSocket();
return
getUDPStreamConnector(
rtpChannel,
new DatagramSocket[] { udpSocket0, udpSocket1 });
}
else
{
Socket tcpSocket0 = iceSocket0.getTCPSocket();
Socket tcpSocket1
= (iceSocket1 == null) ? null : iceSocket1.getTCPSocket();
return
getTCPStreamConnector(
rtpChannel,
new Socket[]{tcpSocket0, tcpSocket1});
}
}
/**
* Gets the <tt>IceSocketWrapper</tt>s from the selected
* <tt>CandidatePair</tt>(s) of the ICE agent.
* TODO cache them in this instance?
* @return the <tt>IceSocketWrapper</tt>s from the selected
* <tt>CandidatePair</tt>(s) of the ICE agent.
*/
private IceSocketWrapper[] getStreamConnectorSockets()
{
IceSocketWrapper[] streamConnectorSockets = new IceSocketWrapper[2];
// RTP
Component rtpComponent = iceStream.getComponent(Component.RTP);
if (rtpComponent != null)
{
streamConnectorSockets[0 /* RTP */]
= getSocketForComponent(rtpComponent);
}
// RTCP
if (numComponents > 1 && !rtcpmux)
{
Component rtcpComponent = iceStream.getComponent(Component.RTCP);
if (rtcpComponent != null)
{
streamConnectorSockets[1 /* RTCP */]
= getSocketForComponent(rtcpComponent);
}
}
return streamConnectorSockets;
}
private MediaStreamTarget getStreamTarget()
{
MediaStreamTarget streamTarget = null;
InetSocketAddress[] streamTargetAddresses = new InetSocketAddress[2];
int streamTargetAddressCount = 0;
Component rtpComponent = iceStream.getComponent(Component.RTP);
if (rtpComponent != null)
{
CandidatePair selectedPair = rtpComponent.getSelectedPair();
if (selectedPair != null)
{
InetSocketAddress streamTargetAddress
= selectedPair
.getRemoteCandidate()
.getTransportAddress();
if (streamTargetAddress != null)
{
streamTargetAddresses[0] = streamTargetAddress;
streamTargetAddressCount++;
}
}
}
if (rtcpmux)
{
streamTargetAddresses[1] = streamTargetAddresses[0];
streamTargetAddressCount++;
}
else if (numComponents > 1)
{
Component rtcpComponent = iceStream.getComponent(Component.RTCP);
if (rtcpComponent != null)
{
CandidatePair selectedPair = rtcpComponent.getSelectedPair();
if (selectedPair != null)
{
InetSocketAddress streamTargetAddress
= selectedPair
.getRemoteCandidate()
.getTransportAddress();
if (streamTargetAddress != null)
{
streamTargetAddresses[1] = streamTargetAddress;
streamTargetAddressCount++;
}
}
}
}
if (streamTargetAddressCount > 0)
{
streamTarget
= new MediaStreamTarget(
streamTargetAddresses[0 /* RTP */],
streamTargetAddresses[1 /* RTCP */]);
}
return streamTarget;
}
/**
* {@inheritDoc}
*/
@Override
public MediaStreamTarget getStreamTarget(Channel channel)
{
return getStreamTarget();
}
/**
* Creates and returns a TCP <tt>StreamConnector</tt> to be used by a
* specific <tt>RtpChannel</tt>, using <tt>iceSockets</tt> as the
* underlying <tt>Socket</tt>s.
*
* Does not use <tt>iceSockets</tt> directly, but creates
* <tt>MultiplexedSocket</tt> instances on top of them.
*
* @param rtpChannel the <tt>RtpChannel</tt> which is to use the created
* <tt>StreamConnector</tt>.
* @param iceSockets the <tt>Socket</tt>s which are to be used by the
* created <tt>StreamConnector</tt>.
* @return a TCP <tt>StreamConnector</tt> with the <tt>Socket</tt>s
* given in <tt>iceSockets</tt> to be used by a specific
* <tt>RtpChannel</tt>.
*/
private StreamConnector getTCPStreamConnector(RtpChannel rtpChannel,
Socket[] iceSockets)
{
StreamConnector connector = null;
if (iceSockets != null)
{
Socket iceSocket0 = iceSockets[0];
Socket channelSocket0 = null;
if (iceSocket0 != null && iceSocket0 instanceof MultiplexingSocket)
{
MultiplexingSocket multiplexing
= (MultiplexingSocket) iceSocket0;
try
{
channelSocket0
= multiplexing.getSocket(
rtpChannel.getDatagramFilter(false /* RTP */));
}
catch (SocketException se) // never thrown
{}
}
Socket iceSocket1 = rtcpmux ? iceSocket0 : iceSockets[1];
Socket channelSocket1 = null;
if (iceSocket1 != null && iceSocket1 instanceof MultiplexingSocket)
{
MultiplexingSocket multiplexing
= (MultiplexingSocket) iceSocket1;
try
{
channelSocket1
= multiplexing.getSocket(
rtpChannel.getDatagramFilter(true /* RTCP */));
}
catch (SocketException se) // never thrown
{}
}
if (channelSocket0 != null || channelSocket1 != null)
{
connector
= new DefaultTCPStreamConnector(
channelSocket0,
channelSocket1,
rtcpmux);
}
}
return connector;
}
/**
* Creates and returns a UDP <tt>StreamConnector</tt> to be used by a
* specific <tt>RtpChannel</tt>, using <tt>iceSockets</tt> as the
* underlying <tt>DatagramSocket</tt>s.
*
* Does not use <tt>iceSockets</tt> directly, but creates
* <tt>MultiplexedDatagramSocket</tt> instances on top of them.
*
* @param rtpChannel the <tt>RtpChannel</tt> which is to use the created
* <tt>StreamConnector</tt>.
* @param iceSockets the <tt>DatagramSocket</tt>s which are to be used by the
* created <tt>StreamConnector</tt>.
* @return a UDP <tt>StreamConnector</tt> with the <tt>DatagramSocket</tt>s
* given in <tt>iceSockets</tt> to be used by a specific
* <tt>RtpChannel</tt>.
*/
private StreamConnector getUDPStreamConnector(RtpChannel rtpChannel,
DatagramSocket[] iceSockets)
{
StreamConnector connector = null;
if (iceSockets != null)
{
DatagramSocket iceSocket0 = iceSockets[0];
DatagramSocket channelSocket0 = null;
if (iceSocket0 != null
&& iceSocket0 instanceof MultiplexingDatagramSocket)
{
MultiplexingDatagramSocket multiplexing
= (MultiplexingDatagramSocket) iceSocket0;
try
{
channelSocket0
= multiplexing.getSocket(
rtpChannel.getDatagramFilter(false /* RTP */));
}
catch (SocketException se) // never thrown
{}
}
DatagramSocket iceSocket1 = rtcpmux ? iceSocket0 : iceSockets[1];
DatagramSocket channelSocket1 = null;
if (iceSocket1 != null
&& iceSocket1 instanceof MultiplexingDatagramSocket)
{
MultiplexingDatagramSocket multiplexing
= (MultiplexingDatagramSocket) iceSocket1;
try
{
channelSocket1
= multiplexing.getSocket(
rtpChannel.getDatagramFilter(true /* RTCP */));
}
catch (SocketException se) // never thrown
{}
}
if (channelSocket0 != null || channelSocket1 != null)
{
connector
= new DefaultStreamConnector(
channelSocket0,
channelSocket1,
rtcpmux);
}
}
return connector;
}
/**
* {@inheritDoc}
*/
@Override
public String getXmlNamespace()
{
return IceUdpTransportPacketExtension.NAMESPACE;
}
/**
* Notifies this instance about a change of the value of the <tt>state</tt>
* property of {@link #iceAgent}.
*
* @param ev a <tt>PropertyChangeEvent</tt> which specifies the old and new
* values of the <tt>state</tt> property of {@link #iceAgent}.
*/
private void iceAgentStateChange(PropertyChangeEvent ev)
{
// Log the changes in the ICE processing state of this
// IceUdpTransportManager for the purposes of debugging.
boolean interrupted = false;
try
{
IceProcessingState oldState = (IceProcessingState) ev.getOldValue();
IceProcessingState newState = (IceProcessingState) ev.getNewValue();
StringBuilder s
= new StringBuilder("ICE processing state of ")
.append(getClass().getSimpleName()).append(" #")
.append(Integer.toHexString(hashCode()))
.append(" (for channels");
for (Channel channel : getChannels())
s.append(" ").append(channel.getID());
s.append(") of conference ").append(conference.getID())
.append(" changed from ").append(oldState).append(" to ")
.append(newState).append(".");
logd(s.toString());
EventAdmin eventAdmin = conference.getVideobridge().getEventAdmin();
if (eventAdmin != null)
{
eventAdmin.sendEvent(
EventFactory.transportStateChanged(
this,
oldState,
newState));
}
}
catch (Throwable t)
{
if (t instanceof InterruptedException)
interrupted = true;
else if (t instanceof ThreadDeath)
throw (ThreadDeath) t;
}
finally
{
if (interrupted)
Thread.currentThread().interrupt();
}
}
/**
* Notifies this instance about a change of the value of a property of a
* <tt>CandidatePair</tt> of {@link #iceStream}.
*
* @param ev a <tt>PropertyChangeEvent</tt> which specifies the
* <tt>CandidatePair</tt>, the name of the <tt>CandidatePair</tt> property,
* and its old and new values
*/
private void iceStreamPairChange(PropertyChangeEvent ev)
{
if (IceMediaStream.PROPERTY_PAIR_CONSENT_FRESHNESS_CHANGED.equals(
ev.getPropertyName()))
{
// TODO we might not necessarily want to keep all channels alive by
// the ICE connection.
for (Channel channel : getChannels())
channel.touch();
}
}
/**
* Initializes the static <tt>Harvester</tt> instances used by all
* <tt>IceUdpTransportManager</tt> instances, that is
* {@link #tcpHostHarvester} and {@link #singlePortHarvesters}.
*
* @param cfg the {@link ConfigurationService} which provides values to
* configurable properties of the behavior/logic of the method
* implementation
*/
static void initializeStaticHarvesters(ConfigurationService cfg)
{
synchronized (IceUdpTransportManager.class)
{
if (staticHarvestersInitialized)
return;
staticHarvestersInitialized = true;
int singlePort = cfg.getInt(SINGLE_PORT_HARVESTER_PORT,
SINGLE_PORT_DEFAULT_VALUE);
if (singlePort != -1)
{
singlePortHarvesters
= SinglePortUdpHarvester.createHarvesters(singlePort);
if (singlePortHarvesters.isEmpty())
{
singlePortHarvesters = null;
logger.info("No single-port harvesters created.");
}
}
if (!cfg.getBoolean(DISABLE_TCP_HARVESTER, false))
{
int port = cfg.getInt(TCP_HARVESTER_PORT, -1);
boolean fallback = false;
boolean ssltcp = cfg.getBoolean(TCP_HARVESTER_SSLTCP,
TCP_HARVESTER_SSLTCP_DEFAULT);
if (port == -1)
{
port = TCP_DEFAULT_PORT;
fallback = true;
}
try
{
tcpHostHarvester = new TcpHarvester(port, ssltcp);
}
catch (IOException ioe)
{
logger.warn(
"Failed to initialize TCP harvester on port " + port
+ ": " + ioe
+ (fallback
? ". Retrying on port " + TCP_FALLBACK_PORT
: "")
+ ".");
// If no fallback is allowed, the method will return.
}
if (tcpHostHarvester == null)
{
// If TCP_HARVESTER_PORT specified a port, then fallback was
// disabled. However, if the binding on the port (above)
// fails, then the method should return.
if (!fallback)
return;
port = TCP_FALLBACK_PORT;
try
{
tcpHostHarvester
= new TcpHarvester(port, ssltcp);
}
catch (IOException ioe)
{
logger.warn(
"Failed to initialize TCP harvester on fallback"
+ " port " + port + ": " + ioe);
return;
}
}
if (logger.isInfoEnabled())
{
logger.info("Initialized TCP harvester on port " + port
+ ", using SSLTCP:" + ssltcp);
}
HarvesterConfiguration addressesConfig
= HarvesterConfiguration.getInstance(cfg);
// if there is mapping addresses configured or discovered
// use them
if(addressesConfig.getPublicAddress() != null
&& addressesConfig.getLocalAddress() != null)
{
tcpHostHarvester.addMappedAddress(
addressesConfig.getPublicAddress().getAddress(),
addressesConfig.getLocalAddress().getAddress());
}
int mappedPort = cfg.getInt(TCP_HARVESTER_MAPPED_PORT, -1);
if (mappedPort != -1)
{
tcpHostHarvesterMappedPort = mappedPort;
tcpHostHarvester.addMappedPort(mappedPort);
}
}
}
}
/**
* Notifies all channels of this <tt>TransportManager</tt> that connectivity
* has been established (and they can now obtain valid values through
* {@link #getStreamConnector(Channel)} and
* {@link #getStreamTarget(Channel)}.
*/
private void onIceConnected()
{
iceConnected = true;
EventAdmin eventAdmin = conference.getVideobridge().getEventAdmin();
if (eventAdmin != null)
eventAdmin.sendEvent(EventFactory.transportConnected(this));
for (Channel channel : getChannels())
channel.transportConnected();
}
/**
* {@inheritDoc}
*/
@Override
public void startConnectivityEstablishment(
IceUdpTransportPacketExtension transport)
{
doStartConnectivityEstablishment(transport);
synchronized (connectThreadSyncRoot)
{
if (connectThread == null)
{
connectThread = new Thread()
{
@Override
public void run()
{
try
{
wrapupConnectivityEstablishment();
}
catch (OperationFailedException ofe)
{
logd("Failed to connect IceUdpTransportManager: "
+ ofe);
synchronized (connectThreadSyncRoot)
{
connectThread = null;
return;
}
}
// XXX The value of the field iceAgent is null at times.
Agent iceAgent = getAgent();
if (iceAgent == null)
{
// This TransportManager has (probably) been closed.
return;
}
IceProcessingState state = iceAgent.getState();
if (IceProcessingState.COMPLETED.equals(state)
|| IceProcessingState.TERMINATED.equals(state))
{
onIceConnected();
}
else
{
logger.warn("Failed to establish ICE connectivity,"
+ " state: " + state);
}
}
};
connectThread.setDaemon(true);
connectThread.setName("IceUdpTransportManager connect thread");
connectThread.start();
}
}
}
/**
* Waits until {@link #iceAgent} exits the RUNNING or WAITING state.
*/
private void wrapupConnectivityEstablishment()
throws OperationFailedException
{
final Object syncRoot = new Object();
PropertyChangeListener propertyChangeListener
= new PropertyChangeListener()
{
@Override
public void propertyChange(PropertyChangeEvent ev)
{
// Wait for ICE to finish establishing connectivity (or to
// determine that no connectivity can be successfully
// established, of course).
Agent iceAgent = (Agent) ev.getSource();
if (iceAgent.isOver())
{
iceAgent.removeStateChangeListener(this);
if (iceAgent == IceUdpTransportManager.this.iceAgent)
{
synchronized (syncRoot)
{
syncRoot.notify();
}
}
}
}
};
Agent iceAgent = this.iceAgent;
if (iceAgent == null)
{
// The TransportManager has been closed, so we should return and
// let the thread finish.
return;
}
iceAgent.addStateChangeListener(propertyChangeListener);
// Wait for the connectivity checks to finish if they have been started.
boolean interrupted = false;
IceProcessingState state = iceAgent.getState();
synchronized (syncRoot)
{
while (IceProcessingState.RUNNING.equals(state)
|| IceProcessingState.WAITING.equals(state))
{
try
{
syncRoot.wait(1000);
}
catch (InterruptedException ie)
{
interrupted = true;
}
finally
{
state = iceAgent.getState();
if (this.iceAgent == null)
break;
}
}
}
if (interrupted)
Thread.currentThread().interrupt();
// Make sure stateChangeListener is removed from iceAgent in case its
// #propertyChange(PropertyChangeEvent) has never been executed.
iceAgent.removeStateChangeListener(propertyChangeListener);
// Check the state of ICE processing and throw an exception if failed.
if (this.iceAgent == null)
{
throw new OperationFailedException(
"TransportManager closed",
OperationFailedException.GENERAL_ERROR);
}
else if (IceProcessingState.FAILED.equals(state))
{
throw new OperationFailedException(
"ICE failed",
OperationFailedException.GENERAL_ERROR);
}
}
/**
* {@inheritDoc}
*/
@Override
public boolean isConnected()
{
return iceConnected;
}
}
| src/main/java/org/jitsi/videobridge/IceUdpTransportManager.java | /*
* Copyright @ 2015 Atlassian Pty Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jitsi.videobridge;
import java.beans.*;
import java.io.*;
import java.net.*;
import java.util.*;
import net.java.sip.communicator.impl.protocol.jabber.extensions.jingle.*;
import net.java.sip.communicator.impl.protocol.jabber.extensions.jingle.CandidateType;
import net.java.sip.communicator.service.netaddr.*;
import net.java.sip.communicator.service.protocol.*;
import net.java.sip.communicator.util.*;
import org.ice4j.*;
import org.ice4j.ice.*;
import org.ice4j.ice.harvest.*;
import org.ice4j.socket.*;
import org.jitsi.eventadmin.*;
import org.jitsi.impl.neomedia.transform.dtls.*;
import org.jitsi.service.configuration.*;
import org.jitsi.service.neomedia.*;
import org.jitsi.util.Logger;
import org.osgi.framework.*;
/**
* Implements the Jingle ICE-UDP transport.
*
* @author Lyubomir Marinov
* @author Pawel Domas
* @author Boris Grozev
*/
public class IceUdpTransportManager
extends TransportManager
{
/**
* The name default of the single <tt>IceStream</tt> that this
* <tt>TransportManager</tt> will create/use.
*/
private static final String DEFAULT_ICE_STREAM_NAME = "stream";
/**
* The name of the property which disables the use of a
* <tt>TcpHarvester</tt>.
*/
private static final String DISABLE_TCP_HARVESTER
= "org.jitsi.videobridge.DISABLE_TCP_HARVESTER";
/**
* The name of the property which controls the port number used for
* <tt>SinglePortUdpHarvester</tt>s.
*/
private static final String SINGLE_PORT_HARVESTER_PORT
= "org.jitsi.videobridge.SINGLE_PORT_HARVESTER_PORT";
/**
* The default value of the port to be used for
* {@code SinglePortUdpHarvester}.
*/
private static final int SINGLE_PORT_DEFAULT_VALUE = 10000;
/**
* The <tt>Logger</tt> used by the <tt>IceUdpTransportManager</tt> class and
* its instances to print debug information.
*/
private static final Logger logger
= Logger.getLogger(IceUdpTransportManager.class);
/**
* The default port that the <tt>TcpHarvester</tt> will
* bind to.
*/
private static final int TCP_DEFAULT_PORT = 443;
/**
* The port on which the <tt>TcpHarvester</tt> will bind to
* if no port is specifically configured, and binding to
* <tt>DEFAULT_TCP_PORT</tt> fails (for example, if the process doesn't have
* the required privileges to bind to a port below 1024).
*/
private static final int TCP_FALLBACK_PORT = 4443;
/**
* The name of the property which specifies an additional port to be
* advertised by the TCP harvester.
*/
private static final String TCP_HARVESTER_MAPPED_PORT
= "org.jitsi.videobridge.TCP_HARVESTER_MAPPED_PORT";
/**
* The name of the property which controls the port to which the
* <tt>TcpHarvester</tt> will bind.
*/
private static final String TCP_HARVESTER_PORT
= "org.jitsi.videobridge.TCP_HARVESTER_PORT";
/**
* The name of the property which controls the use of ssltcp candidates by
* <tt>TcpHarvester</tt>.
*/
private static final String TCP_HARVESTER_SSLTCP
= "org.jitsi.videobridge.TCP_HARVESTER_SSLTCP";
/**
* The default value of the <tt>TCP_HARVESTER_SSLTCP</tt> property.
*/
private static final boolean TCP_HARVESTER_SSLTCP_DEFAULT = true;
/**
* The single <tt>TcpHarvester</tt> instance for the
* application.
*/
private static TcpHarvester tcpHostHarvester = null;
/**
* The <tt>SinglePortUdpHarvester</tt>s which will be appended to ICE
* <tt>Agent</tt>s managed by <tt>IceUdpTransportManager</tt> instances.
*/
private static List<SinglePortUdpHarvester> singlePortHarvesters = null;
/**
* The flag which indicates whether application-wide harvesters, stored
* in the static fields {@link #tcpHostHarvester} and
* {@link #singlePortHarvesters} have been initialized.
*/
private static boolean staticHarvestersInitialized = false;
/**
* The "mapped port" added to {@link #tcpHostHarvester}, or -1.
*/
private static int tcpHostHarvesterMappedPort = -1;
/**
* Logs a specific <tt>String</tt> at debug level.
*
* @param s the <tt>String</tt> to log at debug level
*/
private static void logd(String s)
{
logger.info(s);
}
/**
* The single (if any) <tt>Channel</tt> instance, whose sockets are
* currently configured to accept DTLS packets.
*/
private Channel channelForDtls = null;
/**
* Whether this <tt>TransportManager</tt> has been closed.
*/
private boolean closed = false;
/**
* The <tt>Conference</tt> object that this <tt>TransportManager</tt> is
* associated with.
*/
private final Conference conference;
/**
* The <tt>Thread</tt> used by this <tt>TransportManager</tt> to wait until
* {@link #iceAgent} has established a connection.
*/
private Thread connectThread;
/**
* Used to synchronize access to {@link #connectThread}.
*/
private final Object connectThreadSyncRoot = new Object();
/**
* The <tt>DtlsControl</tt> that this <tt>TransportManager</tt> uses.
*/
private final DtlsControlImpl dtlsControl;
/**
* The <tt>Agent</tt> which implements the ICE protocol and which is used
* by this instance to implement the Jingle ICE-UDP transport.
*/
private Agent iceAgent;
/**
* The <tt>PropertyChangeListener</tt> which is (to be) notified about
* changes in the <tt>state</tt> of {@link #iceAgent}.
*/
private final PropertyChangeListener iceAgentStateChangeListener
= new PropertyChangeListener()
{
@Override
public void propertyChange(PropertyChangeEvent ev)
{
iceAgentStateChange(ev);
}
};
/**
* Whether ICE connectivity has been established.
*/
private boolean iceConnected = false;
/**
* The <tt>IceMediaStream</tt> of {@link #iceAgent} associated with the
* <tt>Channel</tt> of this instance.
*/
private final IceMediaStream iceStream;
/**
* The <tt>PropertyChangeListener</tt> which is (to be) notified about
* changes in the properties of the <tt>CandidatePair</tt>s of
* {@link #iceStream}.
*/
private final PropertyChangeListener iceStreamPairChangeListener
= new PropertyChangeListener()
{
@Override
public void propertyChange(PropertyChangeEvent ev)
{
iceStreamPairChange(ev);
}
};
/**
* Whether this <tt>IceUdpTransportManager</tt> will serve as the the
* controlling or controlled ICE agent.
*/
private final boolean controlling;
/**
* The number of {@link org.ice4j.ice.Component}-s to create in
* {@link #iceStream}.
*/
private int numComponents;
/**
* Whether we're using rtcp-mux or not.
*/
private boolean rtcpmux = false;
/**
* The <tt>SctpConnection</tt> instance, if any, added as a <tt>Channel</tt>
* to this <tt>IceUdpTransportManager</tt>.
*
* Currently we support a single <tt>SctpConnection</tt> in one
* <tt>IceUdpTransportManager</tt> and if it exists, it will receive all
* DTLS packets.
*/
private SctpConnection sctpConnection = null;
/**
* Initializes a new <tt>IceUdpTransportManager</tt> instance.
*
* @param conference the <tt>Conference</tt> which created this
* <tt>TransportManager</tt>.
* @param controlling {@code true} if the new instance is to serve as a
* controlling ICE agent and passive DTLS endpoint; otherwise, {@code false}
* @throws IOException
*/
public IceUdpTransportManager(Conference conference,
boolean controlling)
throws IOException
{
this(conference, controlling, 2, DEFAULT_ICE_STREAM_NAME);
}
public IceUdpTransportManager(Conference conference,
boolean controlling,
int numComponents)
throws IOException
{
this(conference, controlling, numComponents, DEFAULT_ICE_STREAM_NAME);
}
/**
* Initializes a new <tt>IceUdpTransportManager</tt> instance.
*
* @param conference the <tt>Conference</tt> which created this
* <tt>TransportManager</tt>.
* @param controlling {@code true} if the new instance is to serve as a
* controlling ICE agent and passive DTLS endpoint; otherwise, {@code false}
* @param numComponents the number of ICE components that this instance is
* to start with.
* @param iceStreamName the name of the ICE stream to be created by this
* instance.
* @throws IOException
*/
public IceUdpTransportManager(Conference conference,
boolean controlling,
int numComponents,
String iceStreamName)
throws IOException
{
this.conference = conference;
this.controlling = controlling;
this.numComponents = numComponents;
this.rtcpmux = numComponents == 1;
dtlsControl = createDtlsControl();
iceAgent = createIceAgent(controlling, iceStreamName, rtcpmux);
iceAgent.addStateChangeListener(iceAgentStateChangeListener);
iceStream = iceAgent.getStream(iceStreamName);
iceStream.addPairChangeListener(iceStreamPairChangeListener);
EventAdmin eventAdmin = conference.getVideobridge().getEventAdmin();
if (eventAdmin != null)
eventAdmin.sendEvent(EventFactory.transportCreated(this));
}
/**
* Initializes a new <tt>IceUdpTransportManager</tt> instance.
*
* @param conference the <tt>Conference</tt> which created this
* <tt>TransportManager</tt>.
* @param controlling {@code true} if the new instance is to serve as a
* controlling ICE agent and passive DTLS endpoint; otherwise, {@code false}
* @param iceStreamName the name of the ICE stream to be created by this
* instance.
* @throws IOException
*/
public IceUdpTransportManager(Conference conference,
boolean controlling,
String iceStreamName)
throws IOException
{
this(conference, controlling, 2, iceStreamName);
}
/**
* {@inheritDoc}
*
* Assures that no more than one <tt>SctpConnection</tt> is added. Keeps
* {@link #sctpConnection} and {@link #channelForDtls} up to date.
*/
@Override
public boolean addChannel(Channel channel)
{
if (closed)
return false;
if (channel instanceof SctpConnection
&& sctpConnection != null
&& sctpConnection != channel)
{
logd("Not adding a second SctpConnection to TransportManager.");
return false;
}
if (!super.addChannel(channel))
return false;
if (channel instanceof SctpConnection)
{
// When an SctpConnection is added, it automatically replaces
// channelForDtls, because it needs DTLS packets for the application
// data inside them.
sctpConnection = (SctpConnection) channel;
if (channelForDtls != null)
{
// channelForDtls is necessarily an RtpChannel, because we don't
// add more than one SctpConnection. The SctpConnection socket
// will automatically accept DTLS.
RtpChannel rtpChannelForDtls = (RtpChannel) channelForDtls;
rtpChannelForDtls.getDatagramFilter(false).setAcceptNonRtp(
false);
rtpChannelForDtls.getDatagramFilter(true).setAcceptNonRtp(
false);
}
channelForDtls = sctpConnection;
}
else if (channelForDtls == null)
{
channelForDtls = channel;
RtpChannel rtpChannel = (RtpChannel) channel;
// The new channelForDtls will always accept DTLS packets on its
// RTP socket.
rtpChannel.getDatagramFilter(false).setAcceptNonRtp(true);
// If we use rtcpmux, we don't want to accept DTLS packets on the
// RTCP socket, because they will be duplicated from the RTP socket,
// because both sockets are actually filters on the same underlying
// socket.
rtpChannel.getDatagramFilter(true).setAcceptNonRtp(!rtcpmux);
}
if (iceConnected)
channel.transportConnected();
EventAdmin eventAdmin = conference.getVideobridge().getEventAdmin();
if (eventAdmin != null)
eventAdmin.sendEvent(EventFactory.transportChannelAdded(channel));
return true;
}
/**
* Adds to <tt>iceAgent</tt> videobridge specific candidate harvesters such
* as an Amazon AWS EC2 specific harvester.
*
* @param iceAgent the {@link Agent} that we'd like to append new harvesters
* to.
* @param rtcpmux whether rtcp will be used by this
* <tt>IceUdpTransportManager</tt>.
*/
private void appendVideobridgeHarvesters(Agent iceAgent,
boolean rtcpmux)
{
boolean enableDynamicHostHarvester = true;
if (rtcpmux)
{
if (tcpHostHarvester != null)
iceAgent.addCandidateHarvester(tcpHostHarvester);
if (singlePortHarvesters != null)
{
for (CandidateHarvester harvester : singlePortHarvesters)
{
iceAgent.addCandidateHarvester(harvester);
enableDynamicHostHarvester = false;
}
}
}
// Use dynamic ports iff we're not sing "single port".
iceAgent.setUseHostHarvester(enableDynamicHostHarvester);
ConfigurationService cfg
= ServiceUtils.getService(
getBundleContext(),
ConfigurationService.class);
//if no configuration is found then we simply log and bail
if (cfg == null)
{
logger.info("No configuration found. "
+ "Will continue without custom candidate harvesters");
return;
}
HarvesterConfiguration addressesConfig
= HarvesterConfiguration.getInstance(cfg);
MappingCandidateHarvester mappingHarvester
= addressesConfig.getCandidateHarvester();
//append the mapping harvester
if( mappingHarvester != null)
{
iceAgent.addCandidateHarvester(mappingHarvester);
}
if(addressesConfig.getPublicAddress() != null
&& addressesConfig.getLocalAddress() != null)
{
//if configured, append a mapping harvester.
MappingCandidateHarvester natHarvester
= new MappingCandidateHarvester(
addressesConfig.getPublicAddress(),
addressesConfig.getLocalAddress());
iceAgent.addCandidateHarvester(natHarvester);
}
}
/**
* Determines whether at least one <tt>LocalCandidate</tt> of a specific ICE
* <tt>Component</tt> can reach (in the terms of the ice4j library) a
* specific <tt>RemoteCandidate</tt>
*
* @param component the ICE <tt>Component</tt> which contains the
* <tt>LocalCandidate</tt>s to check whether at least one of them can reach
* the specified <tt>remoteCandidate</tt>
* @param remoteCandidate the <tt>RemoteCandidate</tt> to check whether at
* least one of the <tt>LocalCandidate</tt>s of the specified
* <tt>component</tt> can reach it
* @return <tt>true</tt> if at least one <tt>LocalCandidate</tt> of the
* specified <tt>component</tt> can reach the specified
* <tt>remoteCandidate</tt>
*/
private boolean canReach(
Component component,
RemoteCandidate remoteCandidate)
{
for (LocalCandidate localCandidate : component.getLocalCandidates())
{
if (localCandidate.canReach(remoteCandidate))
return true;
}
return false;
}
/**
* {@inheritDoc}
*
* TODO: In the case of multiple {@code Channel}s in one
* {@code TransportManager} it is unclear how to handle changes to the
* {@code initiator} property from individual channels.
*/
@Override
protected void channelPropertyChange(PropertyChangeEvent ev)
{
super.channelPropertyChange(ev);
/*
if (Channel.INITIATOR_PROPERTY.equals(ev.getPropertyName())
&& iceAgent != null)
{
Channel channel = (Channel) ev.getSource();
iceAgent.setControlling(channel.isInitiator());
}
*/
}
/**
* {@inheritDoc}
*/
@Override
public synchronized void close()
{
if (!closed)
{
// Set this early to prevent double closing when the last channel
// is removed.
closed = true;
for (Channel channel : getChannels())
close(channel);
if (dtlsControl != null)
{
dtlsControl.start(null); //stop
dtlsControl.cleanup(this);
}
// DatagramSocket[] datagramSockets = getStreamConnectorSockets();
if (iceStream != null)
{
iceStream.removePairStateChangeListener(
iceStreamPairChangeListener);
}
if (iceAgent != null)
{
iceAgent.removeStateChangeListener(iceAgentStateChangeListener);
iceAgent.free();
iceAgent = null;
}
/*
* It seems that the ICE agent takes care of closing these.
*
if (datagramSockets != null)
{
if (datagramSockets[0] != null)
datagramSockets[0].close();
if (datagramSockets[1] != null)
datagramSockets[1].close();
}
*/
synchronized (connectThreadSyncRoot)
{
if (connectThread != null)
connectThread.interrupt();
}
super.close();
}
}
/**
* {@inheritDoc}
*
* Keeps {@link #sctpConnection} and {@link #channelForDtls} up to date.
*/
@Override
public boolean close(Channel channel)
{
boolean removed = super.close(channel);
if (removed)
{
if (channel == sctpConnection)
{
sctpConnection = null;
}
if (channel == channelForDtls)
{
if (sctpConnection != null)
{
channelForDtls = sctpConnection;
}
else if (channel instanceof RtpChannel)
{
RtpChannel newChannelForDtls = null;
for (Channel c : getChannels())
{
if (c instanceof RtpChannel)
newChannelForDtls = (RtpChannel) c;
}
if (newChannelForDtls != null)
{
newChannelForDtls.getDatagramFilter(false)
.setAcceptNonRtp(true);
newChannelForDtls.getDatagramFilter(true)
.setAcceptNonRtp(!rtcpmux);
}
channelForDtls = newChannelForDtls;
}
if (channel instanceof RtpChannel)
{
RtpChannel rtpChannel = (RtpChannel) channel;
rtpChannel.getDatagramFilter(false).setAcceptNonRtp(false);
rtpChannel.getDatagramFilter(true).setAcceptNonRtp(false);
}
}
try
{
StreamConnector connector = channel.getStreamConnector();
if (connector != null)
{
DatagramSocket datagramSocket = connector.getDataSocket();
if (datagramSocket != null)
datagramSocket.close();
datagramSocket = connector.getControlSocket();
if (datagramSocket != null)
datagramSocket.close();
Socket socket = connector.getDataTCPSocket();
if (socket != null)
socket.close();
socket = connector.getControlTCPSocket();
if (socket != null)
socket.close();
}
}
catch (IOException ioe)
{
logd("Failed to close sockets when closing a channel:" + ioe);
}
EventAdmin eventAdmin = conference.getVideobridge().getEventAdmin();
if (eventAdmin != null)
{
eventAdmin.sendEvent(
EventFactory.transportChannelRemoved(channel));
}
channel.transportClosed();
}
if (getChannels().isEmpty())
close();
return removed;
}
/**
* Initializes a new {@code DtlsControlImpl} instance.
*
* @return a new {@code DtlsControlImpl} instance
*/
private DtlsControlImpl createDtlsControl()
{
DtlsControlImpl dtlsControl
= new DtlsControlImpl(/* srtpDisabled */ false);
dtlsControl.registerUser(this);
dtlsControl.setSetup(
controlling
? DtlsControl.Setup.PASSIVE
: DtlsControl.Setup.ACTIVE);
// XXX For DTLS, the media type doesn't matter (as long as it's not
// null).
// XXX The actual start of the DTLS servers/clients will be delayed
// until an rtpConnector is set (when a MediaStream with this
// SrtpControl starts or is assigned a target).
dtlsControl.start(MediaType.AUDIO);
return dtlsControl;
}
/**
* Initializes a new <tt>Agent</tt> instance which implements the ICE
* protocol and which is to be used by this instance to implement the Jingle
* ICE-UDP transport.
*
* @param controlling
* @param iceStreamName
* @param rtcpmux
* @return a new <tt>Agent</tt> instance which implements the ICE protocol
* and which is to be used by this instance to implement the Jingle ICE-UDP
* transport
* @throws IOException if initializing a new <tt>Agent</tt> instance for the
* purposes of this <tt>TransportManager</tt> fails
*/
private Agent createIceAgent(boolean controlling,
String iceStreamName,
boolean rtcpmux)
throws IOException
{
NetworkAddressManagerService nams
= ServiceUtils.getService(
getBundleContext(),
NetworkAddressManagerService.class);
Agent iceAgent = nams.createIceAgent();
//add videobridge specific harvesters such as a mapping and an Amazon
//AWS EC2 harvester
appendVideobridgeHarvesters(iceAgent, rtcpmux);
iceAgent.setControlling(controlling);
iceAgent.setPerformConsentFreshness(true);
PortTracker portTracker = JitsiTransportManager.getPortTracker(null);
int portBase = portTracker.getPort();
IceMediaStream iceStream
= nams.createIceStream(
numComponents,
portBase,
iceStreamName,
iceAgent);
// Attempt to minimize subsequent bind retries: see if we have allocated
// any ports from the dynamic range, and if so update the port tracker.
// Do NOT update the port tracker with non-dynamic ports (e.g. 4443
// coming from TCP) because this will force it to revert back it its
// configured min port. When maxPort is reached, allocation will begin
// from minPort again, so we don't have to worry about wraps.
int maxAllocatedPort
= getMaxAllocatedPort(
iceStream,
portTracker.getMinPort(),
portTracker.getMaxPort());
if (maxAllocatedPort > 0)
{
int nextPort = 1 + maxAllocatedPort;
portTracker.setNextPort(nextPort);
if (logger.isDebugEnabled())
logger.debug("Updating the port tracker min port: " + nextPort);
}
return iceAgent;
}
/**
* @return the highest local port used by any of the local candidates of
* {@code iceStream}, which falls in the range [{@code min}, {@code max}].
*/
private int getMaxAllocatedPort(IceMediaStream iceStream, int min, int max)
{
return
Math.max(
getMaxAllocatedPort(
iceStream.getComponent(Component.RTP),
min, max),
getMaxAllocatedPort(
iceStream.getComponent(Component.RTCP),
min, max));
}
/**
* @return the highest local port used by any of the local candidates of
* {@code component}, which falls in the range [{@code min}, {@code max}].
*/
private int getMaxAllocatedPort(Component component, int min, int max)
{
int maxAllocatedPort = -1;
if (component != null)
{
for (LocalCandidate candidate : component.getLocalCandidates())
{
int candidatePort = candidate.getTransportAddress().getPort();
if (min <= candidatePort
&& candidatePort <= max
&& maxAllocatedPort < candidatePort)
{
maxAllocatedPort = candidatePort;
}
}
}
return maxAllocatedPort;
}
/**
* {@inheritDoc}
*/
@Override
protected void describe(IceUdpTransportPacketExtension pe)
{
if (!closed)
{
pe.setPassword(iceAgent.getLocalPassword());
pe.setUfrag(iceAgent.getLocalUfrag());
for (Component component : iceStream.getComponents())
{
List<LocalCandidate> candidates
= component.getLocalCandidates();
if (candidates != null && !candidates.isEmpty())
{
for (LocalCandidate candidate : candidates)
{
if (candidate.getTransport() == Transport.TCP
&& tcpHostHarvesterMappedPort != -1
&& candidate.getTransportAddress().getPort()
!= tcpHostHarvesterMappedPort)
{
// In case we use a mapped port with the TCP
// harvester, do not advertise the candidates with
// the actual port that we listen on.
continue;
}
describe(candidate, pe);
}
}
}
if (rtcpmux)
pe.addChildExtension(new RtcpmuxPacketExtension());
describeDtlsControl(pe);
}
}
/**
* Adds a new <tt>CandidatePacketExtension</tt> to <tt>pe</tt>, sets the
* values of its properties to the values of the respective properties of
* <tt>candidate</tt>.
*
* @param candidate the <tt>LocalCandidate</tt> from which to take the values
* of the properties to set.
* @param pe the <tt>IceUdpTransportPacketExtension</tt> to which to add a
* new <tt>CandidatePacketExtension</tt>.
*/
private void describe(
LocalCandidate candidate,
IceUdpTransportPacketExtension pe)
{
CandidatePacketExtension candidatePE = new CandidatePacketExtension();
org.ice4j.ice.Component component = candidate.getParentComponent();
candidatePE.setComponent(component.getComponentID());
candidatePE.setFoundation(candidate.getFoundation());
candidatePE.setGeneration(
component.getParentStream().getParentAgent().getGeneration());
candidatePE.setID(generateCandidateID(candidate));
candidatePE.setNetwork(0);
candidatePE.setPriority(candidate.getPriority());
// Advertise 'tcp' candidates for which SSL is enabled as 'ssltcp'
// (although internally their transport protocol remains "tcp")
Transport transport = candidate.getTransport();
if (transport == Transport.TCP && candidate.isSSL())
{
transport = Transport.SSLTCP;
}
candidatePE.setProtocol(transport.toString());
if (transport == Transport.TCP || transport == Transport.SSLTCP)
{
candidatePE.setTcpType(candidate.getTcpType());
}
candidatePE.setType(
CandidateType.valueOf(candidate.getType().toString()));
TransportAddress transportAddress = candidate.getTransportAddress();
candidatePE.setIP(transportAddress.getHostAddress());
candidatePE.setPort(transportAddress.getPort());
TransportAddress relatedAddress = candidate.getRelatedAddress();
if (relatedAddress != null)
{
candidatePE.setRelAddr(relatedAddress.getHostAddress());
candidatePE.setRelPort(relatedAddress.getPort());
}
pe.addChildExtension(candidatePE);
}
/**
* Sets the values of the properties of a specific
* <tt>IceUdpTransportPacketExtension</tt> to the values of the
* respective properties of {@link #dtlsControl}
*
* @param transportPE the <tt>IceUdpTransportPacketExtension</tt> on which
* to set the values of the properties of <tt>dtlsControl</tt>
*/
private void describeDtlsControl(IceUdpTransportPacketExtension transportPE)
{
String fingerprint = dtlsControl.getLocalFingerprint();
String hash = dtlsControl.getLocalFingerprintHashFunction();
DtlsFingerprintPacketExtension fingerprintPE
= transportPE.getFirstChildOfType(
DtlsFingerprintPacketExtension.class);
if (fingerprintPE == null)
{
fingerprintPE = new DtlsFingerprintPacketExtension();
transportPE.addChildExtension(fingerprintPE);
}
fingerprintPE.setFingerprint(fingerprint);
fingerprintPE.setHash(hash);
}
/**
* Sets up {@link #dtlsControl} according to <tt>transport</tt>, adds all
* (supported) remote candidates from <tt>transport</tt> to
* {@link #iceAgent} and starts {@link #iceAgent} if it isn't started
* already.
*/
private synchronized void doStartConnectivityEstablishment(
IceUdpTransportPacketExtension transport)
{
if (closed)
return;
// Reflect the transport's rtcpmux onto this instance.
if (transport.isRtcpMux())
{
rtcpmux = true;
if (channelForDtls != null && channelForDtls instanceof RtpChannel)
{
((RtpChannel) channelForDtls)
.getDatagramFilter(true)
.setAcceptNonRtp(false);
}
}
dtlsControl.setRtcpmux(rtcpmux);
// Reflect the transport's remote fingerprints onto this instance.
List<DtlsFingerprintPacketExtension> dfpes
= transport.getChildExtensionsOfType(
DtlsFingerprintPacketExtension.class);
if (!dfpes.isEmpty())
{
Map<String, String> remoteFingerprints = new LinkedHashMap<>();
for (DtlsFingerprintPacketExtension dfpe : dfpes)
{
remoteFingerprints.put(
dfpe.getHash(),
dfpe.getFingerprint());
}
dtlsControl.setRemoteFingerprints(remoteFingerprints);
}
IceProcessingState state = iceAgent.getState();
if (IceProcessingState.COMPLETED.equals(state)
|| IceProcessingState.TERMINATED.equals(state))
{
// Adding candidates to a completed Agent is unnecessary and has
// been observed to cause problems.
return;
}
// If ICE is running already, we try to update the checklists with the
// candidates. Note that this is a best effort.
boolean iceAgentStateIsRunning
= IceProcessingState.RUNNING.equals(state);
int remoteCandidateCount = 0;
if (rtcpmux)
{
Component rtcpComponent = iceStream.getComponent(Component.RTCP);
if (rtcpComponent != null)
iceStream.removeComponent(rtcpComponent);
}
// Different stream may have different ufrag/password
String ufrag = transport.getUfrag();
if (ufrag != null)
iceStream.setRemoteUfrag(ufrag);
String password = transport.getPassword();
if (password != null)
iceStream.setRemotePassword(password);
List<CandidatePacketExtension> candidates
= transport.getChildExtensionsOfType(
CandidatePacketExtension.class);
if (iceAgentStateIsRunning && candidates.isEmpty())
return;
// Sort the remote candidates (host < reflexive < relayed) in order
// to create first the host, then the reflexive, the relayed
// candidates and thus be able to set the relative-candidate
// matching the rel-addr/rel-port attribute.
Collections.sort(candidates);
int generation = iceAgent.getGeneration();
for (CandidatePacketExtension candidate : candidates)
{
// Is the remote candidate from the current generation of the
// iceAgent?
if (candidate.getGeneration() != generation)
continue;
if (rtcpmux && Component.RTCP == candidate.getComponent())
{
logger.warn("Received an RTCP candidate, but we're using"
+ " rtcp-mux. Ignoring.");
continue;
}
Component component
= iceStream.getComponent(candidate.getComponent());
String relAddr;
int relPort;
TransportAddress relatedAddress = null;
if (((relAddr = candidate.getRelAddr()) != null)
&& ((relPort = candidate.getRelPort()) != -1))
{
relatedAddress
= new TransportAddress(
relAddr,
relPort,
Transport.parse(candidate.getProtocol()));
}
RemoteCandidate relatedCandidate
= component.findRemoteCandidate(relatedAddress);
RemoteCandidate remoteCandidate
= new RemoteCandidate(
new TransportAddress(
candidate.getIP(),
candidate.getPort(),
Transport.parse(
candidate.getProtocol())),
component,
org.ice4j.ice.CandidateType.parse(
candidate.getType().toString()),
candidate.getFoundation(),
candidate.getPriority(),
relatedCandidate);
// XXX IceUdpTransportManager harvests host candidates only and
// the ICE Components utilize the UDP protocol/transport only at
// the time of this writing. The ice4j library will, of course,
// check the theoretical reachability between the local and the
// remote candidates. However, we would like (1) to not mess
// with a possibly running iceAgent and (2) to return a
// consistent return value.
if (!canReach(component, remoteCandidate))
continue;
if (iceAgentStateIsRunning)
component.addUpdateRemoteCandidates(remoteCandidate);
else
component.addRemoteCandidate(remoteCandidate);
remoteCandidateCount++;
}
if (iceAgentStateIsRunning)
{
if (remoteCandidateCount == 0)
{
// XXX Effectively, the check above but realizing that all
// candidates were ignored:
// iceAgentStateIsRunning && candidates.isEmpty().
}
else
{
// update all components of all streams
for (IceMediaStream stream : iceAgent.getStreams())
{
for (Component component : stream.getComponents())
component.updateRemoteCandidates();
}
}
}
else if (remoteCandidateCount != 0)
{
// Once again because the ICE Agent does not support adding
// candidates after the connectivity establishment has been started
// and because multiple transport-info JingleIQs may be used to send
// the whole set of transport candidates from the remote peer to the
// local peer, do not really start the connectivity establishment
// until we have at least one remote candidate per ICE Component.
for (IceMediaStream stream : iceAgent.getStreams())
{
for (Component component : stream.getComponents())
{
if (component.getRemoteCandidateCount() < 1)
{
remoteCandidateCount = 0;
break;
}
}
if (remoteCandidateCount == 0)
break;
}
if (remoteCandidateCount != 0)
iceAgent.startConnectivityEstablishment();
}
else if (iceStream.getRemoteUfrag() != null
&& iceStream.getRemotePassword() != null)
{
// We don't have any remote candidates, but we already know the
// remote ufrag and password, so we can start ICE.
logger.info("Starting ICE agent without remote candidates.");
iceAgent.startConnectivityEstablishment();
}
}
/**
* Generates an ID to be set on a <tt>CandidatePacketExtension</tt> to
* represent a specific <tt>LocalCandidate</tt>.
*
* @param candidate the <tt>LocalCandidate</tt> whose ID is to be generated
* @return an ID to be set on a <tt>CandidatePacketExtension</tt> to
* represent the specified <tt>candidate</tt>
*/
private String generateCandidateID(LocalCandidate candidate)
{
StringBuilder candidateID = new StringBuilder();
candidateID.append(conference.getID());
candidateID.append(Long.toHexString(hashCode()));
Agent iceAgent
= candidate.getParentComponent().getParentStream().getParentAgent();
candidateID.append(Long.toHexString(iceAgent.hashCode()));
candidateID.append(Long.toHexString(iceAgent.getGeneration()));
candidateID.append(Long.toHexString(candidate.hashCode()));
return candidateID.toString();
}
/**
* Gets the <tt>Conference</tt> object that this <tt>TransportManager</tt>
* is associated with.
*/
public Conference getConference()
{
return conference;
}
/**
* Gets the number of {@link org.ice4j.ice.Component}-s to create in
* {@link #iceStream}.
*/
public int getNumComponents()
{
return numComponents;
}
/**
* Gets the <tt>Agent</tt> which implements the ICE protocol and which is
* used by this instance to implement the Jingle ICE-UDP transport.
*/
public Agent getAgent()
{
return iceAgent;
}
/**
* Gets the <tt>IceMediaStream</tt> of {@link #iceAgent} associated with the
* <tt>Channel</tt> of this instance.
*/
public IceMediaStream getIceStream()
{
return iceStream;
}
/**
* Returns a boolean value determining whether this
* <tt>IceUdpTransportManager</tt> will serve as the the controlling or
* the controlled ICE agent.
*/
public boolean isControlling()
{
return controlling;
}
/**
* Gets the <tt>BundleContext</tt> associated with the <tt>Channel</tt>
* that this {@link net.java.sip.communicator.service.protocol.media
* .TransportManager} is servicing. The method is a
* convenience which gets the <tt>BundleContext</tt> associated with the
* XMPP component implementation in which the <tt>Videobridge</tt>
* associated with this instance is executing.
*
* @return the <tt>BundleContext</tt> associated with this
* <tt>IceUdpTransportManager</tt>
*/
public BundleContext getBundleContext()
{
return conference != null ? conference.getBundleContext() : null;
}
/**
* {@inheritDoc}
*/
@Override
public DtlsControlImpl getDtlsControl(Channel channel)
{
return dtlsControl;
}
/**
* Gets the <tt>IceSocketWrapper</tt> from the selected pair (if any)
* from a specific {@link org.ice4j.ice.Component}.
*
* @param component the <tt>Component</tt> from which to get a socket.
* @return the <tt>IceSocketWrapper</tt> from the selected pair (if any)
* from a specific {@link org.ice4j.ice.Component}.
*/
private IceSocketWrapper getSocketForComponent(Component component)
{
CandidatePair selectedPair = component.getSelectedPair();
return
(selectedPair == null) ? null : selectedPair.getIceSocketWrapper();
}
/**
* {@inheritDoc}
*/
@Override
public StreamConnector getStreamConnector(Channel channel)
{
if (!getChannels().contains(channel))
return null;
IceSocketWrapper[] iceSockets = getStreamConnectorSockets();
IceSocketWrapper iceSocket0;
if (iceSockets == null || (iceSocket0 = iceSockets[0]) == null)
return null;
if (channel instanceof SctpConnection)
{
DatagramSocket udpSocket = iceSocket0.getUDPSocket();
if (udpSocket != null)
{
if (udpSocket instanceof MultiplexingDatagramSocket)
{
MultiplexingDatagramSocket multiplexing
= (MultiplexingDatagramSocket) udpSocket;
try
{
DatagramSocket dtlsSocket
= multiplexing.getSocket(new DTLSDatagramFilter());
return new DefaultStreamConnector(dtlsSocket, null);
}
catch (IOException ioe)
{
logger.warn("Failed to create DTLS socket: " + ioe);
}
}
}
else
{
Socket tcpSocket = iceSocket0.getTCPSocket();
if (tcpSocket != null
&& tcpSocket instanceof MultiplexingSocket)
{
MultiplexingSocket multiplexing
= (MultiplexingSocket) tcpSocket;
try
{
Socket dtlsSocket
= multiplexing.getSocket(new DTLSDatagramFilter());
return new DefaultTCPStreamConnector(dtlsSocket, null);
}
catch(IOException ioe)
{
logger.warn("Failed to create DTLS socket: " + ioe);
}
}
}
return null;
}
if (! (channel instanceof RtpChannel))
return null;
DatagramSocket udpSocket0;
IceSocketWrapper iceSocket1 = iceSockets[1];
RtpChannel rtpChannel = (RtpChannel) channel;
if ((udpSocket0 = iceSocket0.getUDPSocket()) != null)
{
DatagramSocket udpSocket1
= (iceSocket1 == null) ? null : iceSocket1.getUDPSocket();
return
getUDPStreamConnector(
rtpChannel,
new DatagramSocket[] { udpSocket0, udpSocket1 });
}
else
{
Socket tcpSocket0 = iceSocket0.getTCPSocket();
Socket tcpSocket1
= (iceSocket1 == null) ? null : iceSocket1.getTCPSocket();
return
getTCPStreamConnector(
rtpChannel,
new Socket[]{tcpSocket0, tcpSocket1});
}
}
/**
* Gets the <tt>IceSocketWrapper</tt>s from the selected
* <tt>CandidatePair</tt>(s) of the ICE agent.
* TODO cache them in this instance?
* @return the <tt>IceSocketWrapper</tt>s from the selected
* <tt>CandidatePair</tt>(s) of the ICE agent.
*/
private IceSocketWrapper[] getStreamConnectorSockets()
{
IceSocketWrapper[] streamConnectorSockets = new IceSocketWrapper[2];
// RTP
Component rtpComponent = iceStream.getComponent(Component.RTP);
if (rtpComponent != null)
{
streamConnectorSockets[0 /* RTP */]
= getSocketForComponent(rtpComponent);
}
// RTCP
if (numComponents > 1 && !rtcpmux)
{
Component rtcpComponent = iceStream.getComponent(Component.RTCP);
if (rtcpComponent != null)
{
streamConnectorSockets[1 /* RTCP */]
= getSocketForComponent(rtcpComponent);
}
}
return streamConnectorSockets;
}
private MediaStreamTarget getStreamTarget()
{
MediaStreamTarget streamTarget = null;
InetSocketAddress[] streamTargetAddresses
= new InetSocketAddress[2];
int streamTargetAddressCount = 0;
Component rtpComponent = iceStream.getComponent(Component.RTP);
if (rtpComponent != null)
{
CandidatePair selectedPair = rtpComponent.getSelectedPair();
if (selectedPair != null)
{
InetSocketAddress streamTargetAddress
= selectedPair
.getRemoteCandidate()
.getTransportAddress();
if (streamTargetAddress != null)
{
streamTargetAddresses[0] = streamTargetAddress;
streamTargetAddressCount++;
}
}
}
if (numComponents > 1 && !rtcpmux)
{
Component rtcpComponent = iceStream.getComponent(Component.RTCP);
if (rtcpComponent != null)
{
CandidatePair selectedPair = rtcpComponent.getSelectedPair();
if (selectedPair != null)
{
InetSocketAddress streamTargetAddress
= selectedPair
.getRemoteCandidate()
.getTransportAddress();
if (streamTargetAddress != null)
{
streamTargetAddresses[1] = streamTargetAddress;
streamTargetAddressCount++;
}
}
}
}
if (rtcpmux)
{
streamTargetAddresses[1] = streamTargetAddresses[0];
streamTargetAddressCount++;
}
if (streamTargetAddressCount > 0)
{
streamTarget
= new MediaStreamTarget(
streamTargetAddresses[0 /* RTP */],
streamTargetAddresses[1 /* RTCP */]);
}
return streamTarget;
}
/**
* {@inheritDoc}
*/
@Override
public MediaStreamTarget getStreamTarget(Channel channel)
{
return getStreamTarget();
}
/**
* Creates and returns a TCP <tt>StreamConnector</tt> to be used by a
* specific <tt>RtpChannel</tt>, using <tt>iceSockets</tt> as the
* underlying <tt>Socket</tt>s.
*
* Does not use <tt>iceSockets</tt> directly, but creates
* <tt>MultiplexedSocket</tt> instances on top of them.
*
* @param rtpChannel the <tt>RtpChannel</tt> which is to use the created
* <tt>StreamConnector</tt>.
* @param iceSockets the <tt>Socket</tt>s which are to be used by the
* created <tt>StreamConnector</tt>.
* @return a TCP <tt>StreamConnector</tt> with the <tt>Socket</tt>s
* given in <tt>iceSockets</tt> to be used by a specific
* <tt>RtpChannel</tt>.
*/
private StreamConnector getTCPStreamConnector(RtpChannel rtpChannel,
Socket[] iceSockets)
{
StreamConnector connector = null;
if (iceSockets != null)
{
Socket iceSocket0 = iceSockets[0];
Socket channelSocket0 = null;
if (iceSocket0 != null && iceSocket0 instanceof MultiplexingSocket)
{
MultiplexingSocket multiplexing
= (MultiplexingSocket) iceSocket0;
try
{
channelSocket0
= multiplexing.getSocket(
rtpChannel.getDatagramFilter(false /* RTP */));
}
catch (SocketException se) // never thrown
{}
}
Socket iceSocket1 = rtcpmux ? iceSocket0 : iceSockets[1];
Socket channelSocket1 = null;
if (iceSocket1 != null && iceSocket1 instanceof MultiplexingSocket)
{
MultiplexingSocket multiplexing
= (MultiplexingSocket) iceSocket1;
try
{
channelSocket1
= multiplexing.getSocket(
rtpChannel.getDatagramFilter(true /* RTCP */));
}
catch (SocketException se) // never thrown
{}
}
if (channelSocket0 != null || channelSocket1 != null)
{
connector
= new DefaultTCPStreamConnector(
channelSocket0,
channelSocket1,
rtcpmux);
}
}
return connector;
}
/**
* Creates and returns a UDP <tt>StreamConnector</tt> to be used by a
* specific <tt>RtpChannel</tt>, using <tt>iceSockets</tt> as the
* underlying <tt>DatagramSocket</tt>s.
*
* Does not use <tt>iceSockets</tt> directly, but creates
* <tt>MultiplexedDatagramSocket</tt> instances on top of them.
*
* @param rtpChannel the <tt>RtpChannel</tt> which is to use the created
* <tt>StreamConnector</tt>.
* @param iceSockets the <tt>DatagramSocket</tt>s which are to be used by the
* created <tt>StreamConnector</tt>.
* @return a UDP <tt>StreamConnector</tt> with the <tt>DatagramSocket</tt>s
* given in <tt>iceSockets</tt> to be used by a specific
* <tt>RtpChannel</tt>.
*/
private StreamConnector getUDPStreamConnector(RtpChannel rtpChannel,
DatagramSocket[] iceSockets)
{
StreamConnector connector = null;
if (iceSockets != null)
{
DatagramSocket iceSocket0 = iceSockets[0];
DatagramSocket channelSocket0 = null;
if (iceSocket0 != null
&& iceSocket0 instanceof MultiplexingDatagramSocket)
{
MultiplexingDatagramSocket multiplexing
= (MultiplexingDatagramSocket) iceSocket0;
try
{
channelSocket0
= multiplexing.getSocket(
rtpChannel.getDatagramFilter(false /* RTP */));
}
catch (SocketException se) // never thrown
{}
}
DatagramSocket iceSocket1 = rtcpmux ? iceSocket0 : iceSockets[1];
DatagramSocket channelSocket1 = null;
if (iceSocket1 != null
&& iceSocket1 instanceof MultiplexingDatagramSocket)
{
MultiplexingDatagramSocket multiplexing
= (MultiplexingDatagramSocket) iceSocket1;
try
{
channelSocket1
= multiplexing.getSocket(
rtpChannel.getDatagramFilter(true /* RTCP */));
}
catch (SocketException se) // never thrown
{}
}
if (channelSocket0 != null || channelSocket1 != null)
{
connector
= new DefaultStreamConnector(
channelSocket0,
channelSocket1,
rtcpmux);
}
}
return connector;
}
/**
* {@inheritDoc}
*/
@Override
public String getXmlNamespace()
{
return IceUdpTransportPacketExtension.NAMESPACE;
}
/**
* Notifies this instance about a change of the value of the <tt>state</tt>
* property of {@link #iceAgent}.
*
* @param ev a <tt>PropertyChangeEvent</tt> which specifies the old and new
* values of the <tt>state</tt> property of {@link #iceAgent}.
*/
private void iceAgentStateChange(PropertyChangeEvent ev)
{
// Log the changes in the ICE processing state of this
// IceUdpTransportManager for the purposes of debugging.
boolean interrupted = false;
try
{
IceProcessingState oldState = (IceProcessingState) ev.getOldValue();
IceProcessingState newState = (IceProcessingState) ev.getNewValue();
StringBuilder s
= new StringBuilder("ICE processing state of ")
.append(getClass().getSimpleName()).append(" #")
.append(Integer.toHexString(hashCode()))
.append(" (for channels");
for (Channel channel : getChannels())
s.append(" ").append(channel.getID());
s.append(") of conference ").append(conference.getID())
.append(" changed from ").append(oldState).append(" to ")
.append(newState).append(".");
logd(s.toString());
EventAdmin eventAdmin = conference.getVideobridge().getEventAdmin();
if (eventAdmin != null)
{
eventAdmin.sendEvent(
EventFactory.transportStateChanged(
this,
oldState,
newState));
}
}
catch (Throwable t)
{
if (t instanceof InterruptedException)
interrupted = true;
else if (t instanceof ThreadDeath)
throw (ThreadDeath) t;
}
finally
{
if (interrupted)
Thread.currentThread().interrupt();
}
}
/**
* Notifies this instance about a change of the value of a property of a
* <tt>CandidatePair</tt> of {@link #iceStream}.
*
* @param ev a <tt>PropertyChangeEvent</tt> which specifies the
* <tt>CandidatePair</tt>, the name of the <tt>CandidatePair</tt> property,
* and its old and new values
*/
private void iceStreamPairChange(PropertyChangeEvent ev)
{
if (IceMediaStream.PROPERTY_PAIR_CONSENT_FRESHNESS_CHANGED.equals(
ev.getPropertyName()))
{
// TODO we might not necessarily want to keep all channels alive by
// the ICE connection.
for (Channel channel : getChannels())
channel.touch();
}
}
/**
* Initializes the static <tt>Harvester</tt> instances used by all
* <tt>IceUdpTransportManager</tt> instances, that is
* {@link #tcpHostHarvester} and {@link #singlePortHarvesters}.
*
* @param cfg the {@link ConfigurationService} which provides values to
* configurable properties of the behavior/logic of the method
* implementation
*/
static void initializeStaticHarvesters(ConfigurationService cfg)
{
synchronized (IceUdpTransportManager.class)
{
if (staticHarvestersInitialized)
return;
staticHarvestersInitialized = true;
int singlePort = cfg.getInt(SINGLE_PORT_HARVESTER_PORT,
SINGLE_PORT_DEFAULT_VALUE);
if (singlePort != -1)
{
singlePortHarvesters
= SinglePortUdpHarvester.createHarvesters(singlePort);
if (singlePortHarvesters.isEmpty())
{
singlePortHarvesters = null;
logger.info("No single-port harvesters created.");
}
}
if (!cfg.getBoolean(DISABLE_TCP_HARVESTER, false))
{
int port = cfg.getInt(TCP_HARVESTER_PORT, -1);
boolean fallback = false;
boolean ssltcp = cfg.getBoolean(TCP_HARVESTER_SSLTCP,
TCP_HARVESTER_SSLTCP_DEFAULT);
if (port == -1)
{
port = TCP_DEFAULT_PORT;
fallback = true;
}
try
{
tcpHostHarvester = new TcpHarvester(port, ssltcp);
}
catch (IOException ioe)
{
logger.warn(
"Failed to initialize TCP harvester on port " + port
+ ": " + ioe
+ (fallback
? ". Retrying on port " + TCP_FALLBACK_PORT
: "")
+ ".");
// If no fallback is allowed, the method will return.
}
if (tcpHostHarvester == null)
{
// If TCP_HARVESTER_PORT specified a port, then fallback was
// disabled. However, if the binding on the port (above)
// fails, then the method should return.
if (!fallback)
return;
port = TCP_FALLBACK_PORT;
try
{
tcpHostHarvester
= new TcpHarvester(port, ssltcp);
}
catch (IOException ioe)
{
logger.warn(
"Failed to initialize TCP harvester on fallback"
+ " port " + port + ": " + ioe);
return;
}
}
if (logger.isInfoEnabled())
{
logger.info("Initialized TCP harvester on port " + port
+ ", using SSLTCP:" + ssltcp);
}
HarvesterConfiguration addressesConfig
= HarvesterConfiguration.getInstance(cfg);
// if there is mapping addresses configured or discovered
// use them
if(addressesConfig.getPublicAddress() != null
&& addressesConfig.getLocalAddress() != null)
{
tcpHostHarvester.addMappedAddress(
addressesConfig.getPublicAddress().getAddress(),
addressesConfig.getLocalAddress().getAddress());
}
int mappedPort = cfg.getInt(TCP_HARVESTER_MAPPED_PORT, -1);
if (mappedPort != -1)
{
tcpHostHarvesterMappedPort = mappedPort;
tcpHostHarvester.addMappedPort(mappedPort);
}
}
}
}
/**
* Notifies all channels of this <tt>TransportManager</tt> that connectivity
* has been established (and they can now obtain valid values through
* {@link #getStreamConnector(Channel)} and
* {@link #getStreamTarget(Channel)}.
*/
private void onIceConnected()
{
iceConnected = true;
EventAdmin eventAdmin = conference.getVideobridge().getEventAdmin();
if (eventAdmin != null)
eventAdmin.sendEvent(EventFactory.transportConnected(this));
for (Channel channel : getChannels())
channel.transportConnected();
}
/**
* {@inheritDoc}
*/
@Override
public void startConnectivityEstablishment(
IceUdpTransportPacketExtension transport)
{
doStartConnectivityEstablishment(transport);
synchronized (connectThreadSyncRoot)
{
if (connectThread == null)
{
connectThread = new Thread()
{
@Override
public void run()
{
try
{
wrapupConnectivityEstablishment();
}
catch (OperationFailedException ofe)
{
logd("Failed to connect IceUdpTransportManager: "
+ ofe);
synchronized (connectThreadSyncRoot)
{
connectThread = null;
return;
}
}
// XXX The value of the field iceAgent is null at times.
Agent iceAgent = getAgent();
if (iceAgent == null)
{
// This TransportManager has (probably) been closed.
return;
}
IceProcessingState state = iceAgent.getState();
if (IceProcessingState.COMPLETED.equals(state)
|| IceProcessingState.TERMINATED.equals(state))
{
onIceConnected();
}
else
{
logger.warn("Failed to establish ICE connectivity,"
+ " state: " + state);
}
}
};
connectThread.setDaemon(true);
connectThread.setName("IceUdpTransportManager connect thread");
connectThread.start();
}
}
}
/**
* Waits until {@link #iceAgent} exits the RUNNING or WAITING state.
*/
private void wrapupConnectivityEstablishment()
throws OperationFailedException
{
final Object syncRoot = new Object();
PropertyChangeListener propertyChangeListener
= new PropertyChangeListener()
{
@Override
public void propertyChange(PropertyChangeEvent ev)
{
Object newValue = ev.getNewValue();
if (IceProcessingState.COMPLETED.equals(newValue)
|| IceProcessingState.FAILED.equals(newValue)
|| IceProcessingState.TERMINATED.equals(newValue))
{
Agent iceAgent = (Agent) ev.getSource();
iceAgent.removeStateChangeListener(this);
if (iceAgent == IceUdpTransportManager.this.iceAgent)
{
synchronized (syncRoot)
{
syncRoot.notify();
}
}
}
}
};
Agent iceAgent = this.iceAgent;
if (iceAgent == null)
{
// The TransportManager has been closed, so we should return and
// let the thread finish.
return;
}
iceAgent.addStateChangeListener(propertyChangeListener);
// Wait for the connectivity checks to finish if they have been started.
boolean interrupted = false;
IceProcessingState state = iceAgent.getState();
synchronized (syncRoot)
{
while (IceProcessingState.RUNNING.equals(state)
|| IceProcessingState.WAITING.equals(state))
{
try
{
syncRoot.wait(1000);
}
catch (InterruptedException ie)
{
interrupted = true;
}
finally
{
state = iceAgent.getState();
if (this.iceAgent == null)
break;
}
}
}
if (interrupted)
Thread.currentThread().interrupt();
// Make sure stateChangeListener is removed from iceAgent in case its
// #propertyChange(PropertyChangeEvent) has never been executed.
iceAgent.removeStateChangeListener(propertyChangeListener);
// Check the state of ICE processing and throw an exception if failed.
if (this.iceAgent == null)
{
throw new OperationFailedException(
"TransportManager closed",
OperationFailedException.GENERAL_ERROR);
}
else if (IceProcessingState.FAILED.equals(state))
{
throw new OperationFailedException(
"ICE failed",
OperationFailedException.GENERAL_ERROR);
}
}
/**
* {@inheritDoc}
*/
@Override
public boolean isConnected()
{
return iceConnected;
}
}
| Simplifies and/or clarifies and manually formats source code, mostly for readability (while reading the source code in the context of early DTLS support).
| src/main/java/org/jitsi/videobridge/IceUdpTransportManager.java | Simplifies and/or clarifies and manually formats source code, mostly for readability (while reading the source code in the context of early DTLS support). | <ide><path>rc/main/java/org/jitsi/videobridge/IceUdpTransportManager.java
<ide> /**
<ide> * Whether we're using rtcp-mux or not.
<ide> */
<del> private boolean rtcpmux = false;
<add> private boolean rtcpmux;
<ide>
<ide> /**
<ide> * The <tt>SctpConnection</tt> instance, if any, added as a <tt>Channel</tt>
<ide> }
<ide>
<ide> /**
<add> * Returns whether this {@code IceUdpTransportManager} is using rtcp-mux.
<add> *
<add> * @return {@code true} if this {@code IceUdpTransportManager} is using
<add> * rtcp-mux; otherwise, {@code false}
<add> */
<add> public boolean isRtcpmux()
<add> {
<add> return rtcpmux;
<add> }
<add>
<add> /**
<ide> * Gets the <tt>BundleContext</tt> associated with the <tt>Channel</tt>
<ide> * that this {@link net.java.sip.communicator.service.protocol.media
<ide> * .TransportManager} is servicing. The method is a
<ide> if (numComponents > 1 && !rtcpmux)
<ide> {
<ide> Component rtcpComponent = iceStream.getComponent(Component.RTCP);
<add>
<ide> if (rtcpComponent != null)
<ide> {
<ide> streamConnectorSockets[1 /* RTCP */]
<ide> private MediaStreamTarget getStreamTarget()
<ide> {
<ide> MediaStreamTarget streamTarget = null;
<del> InetSocketAddress[] streamTargetAddresses
<del> = new InetSocketAddress[2];
<add> InetSocketAddress[] streamTargetAddresses = new InetSocketAddress[2];
<ide> int streamTargetAddressCount = 0;
<ide>
<ide> Component rtpComponent = iceStream.getComponent(Component.RTP);
<add>
<ide> if (rtpComponent != null)
<ide> {
<ide> CandidatePair selectedPair = rtpComponent.getSelectedPair();
<ide> if (selectedPair != null)
<ide> {
<ide> InetSocketAddress streamTargetAddress
<del> = selectedPair
<add> = selectedPair
<ide> .getRemoteCandidate()
<del> .getTransportAddress();
<add> .getTransportAddress();
<ide>
<ide> if (streamTargetAddress != null)
<ide> {
<ide> }
<ide> }
<ide>
<del> if (numComponents > 1 && !rtcpmux)
<add> if (rtcpmux)
<add> {
<add> streamTargetAddresses[1] = streamTargetAddresses[0];
<add> streamTargetAddressCount++;
<add> }
<add> else if (numComponents > 1)
<ide> {
<ide> Component rtcpComponent = iceStream.getComponent(Component.RTCP);
<add>
<ide> if (rtcpComponent != null)
<ide> {
<ide> CandidatePair selectedPair = rtcpComponent.getSelectedPair();
<ide> if (selectedPair != null)
<ide> {
<ide> InetSocketAddress streamTargetAddress
<del> = selectedPair
<add> = selectedPair
<ide> .getRemoteCandidate()
<del> .getTransportAddress();
<add> .getTransportAddress();
<ide>
<ide> if (streamTargetAddress != null)
<ide> {
<ide> }
<ide> }
<ide>
<del> if (rtcpmux)
<del> {
<del> streamTargetAddresses[1] = streamTargetAddresses[0];
<del> streamTargetAddressCount++;
<del> }
<del>
<ide> if (streamTargetAddressCount > 0)
<ide> {
<ide> streamTarget
<del> = new MediaStreamTarget(
<add> = new MediaStreamTarget(
<ide> streamTargetAddresses[0 /* RTP */],
<ide> streamTargetAddresses[1 /* RTCP */]);
<ide> }
<ide> @Override
<ide> public void propertyChange(PropertyChangeEvent ev)
<ide> {
<del> Object newValue = ev.getNewValue();
<del>
<del> if (IceProcessingState.COMPLETED.equals(newValue)
<del> || IceProcessingState.FAILED.equals(newValue)
<del> || IceProcessingState.TERMINATED.equals(newValue))
<add> // Wait for ICE to finish establishing connectivity (or to
<add> // determine that no connectivity can be successfully
<add> // established, of course).
<add> Agent iceAgent = (Agent) ev.getSource();
<add>
<add> if (iceAgent.isOver())
<ide> {
<del> Agent iceAgent = (Agent) ev.getSource();
<del>
<ide> iceAgent.removeStateChangeListener(this);
<ide> if (iceAgent == IceUdpTransportManager.this.iceAgent)
<ide> { |
|
Java | mit | 3afad6499be2d6c47b003b72f350899938fa97df | 0 | GluuFederation/oxCore,madumlao/oxCore | package org.xdi.service.cache;
import net.spy.memcached.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* @author yuriyz on 02/02/2017.
*/
public class MemcachedProvider extends AbstractCacheProvider<MemcachedClient> {
private static final Logger log = LoggerFactory.getLogger(MemcachedProvider.class);
private MemcachedConfiguration memcachedConfiguration;
public MemcachedProvider(MemcachedConfiguration memcachedConfiguration) {
this.memcachedConfiguration = memcachedConfiguration;
}
private MemcachedClient client;
public void create() {
log.debug("Starting MemcachedProvider ...");
try {
final ConnectionFactory connectionFactory;
if (memcachedConfiguration.getConnectionFactoryType() == MemcachedConnectionFactoryType.BINARY) {
connectionFactory = new BinaryConnectionFactory(memcachedConfiguration.getMaxOperationQueueLength(), memcachedConfiguration.getBufferSize());
} else {
connectionFactory = new DefaultConnectionFactory(memcachedConfiguration.getMaxOperationQueueLength(), memcachedConfiguration.getBufferSize());
}
client = new MemcachedClient(connectionFactory, AddrUtil.getAddresses(memcachedConfiguration.getServers()));
testConnection();
log.debug("MemcachedProvider started.");
} catch (Exception e) {
throw new IllegalStateException("Error starting MemcachedProvider", e);
}
}
private void testConnection() {
put("2", "connectionTest", "connectionTestValue");
if (!"connectionTestValue".equals(get("connectionTest"))) {
throw new IllegalStateException("Error starting MemcachedProvider. Please check memcached configuration: " + memcachedConfiguration);
}
}
public void destroy() {
log.debug("Destroying MemcachedProvider");
try {
client.shutdown();
} catch (RuntimeException e) {
throw new IllegalStateException("Error destroying MemcachedProvider", e);
}
}
@Override
public MemcachedClient getDelegate() {
return client;
}
@Override
public Object get(String region, String key) {
try {
return client.get(key);
} catch (Exception e) {
log.error("Failed to fetch object by key: " + key, e);
return null;
}
}
@Override // it is so weird but we use as workaround "region" field to pass "expiration" for put operation
public void put(String expirationInSeconds, String key, Object object) {
try {
client.set(key, putExpiration(expirationInSeconds), object);
} catch (Exception e) {
log.error("Failed to put object in cache, key: " + key, e);
}
}
private int putExpiration(String expirationInSeconds) {
try {
return Integer.parseInt(expirationInSeconds);
} catch (Exception e) {
return memcachedConfiguration.getDefaultPutExpiration();
}
}
@Override
public void remove(String region, String key) {
try {
client.delete(key);
} catch (Exception e) {
log.error("Failed to remove object from cache, key: " + key, e);
}
}
@Override
public void clear() {
client.flush();
}
}
| oxService/src/main/java/org/xdi/service/cache/MemcachedProvider.java | package org.xdi.service.cache;
import net.spy.memcached.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* @author yuriyz on 02/02/2017.
*/
public class MemcachedProvider extends AbstractCacheProvider<MemcachedClient> {
private static final Logger log = LoggerFactory.getLogger(MemcachedProvider.class);
private MemcachedConfiguration memcachedConfiguration;
public MemcachedProvider(MemcachedConfiguration memcachedConfiguration) {
this.memcachedConfiguration = memcachedConfiguration;
}
private MemcachedClient client;
public void create() {
log.debug("Starting MemcachedProvider ...");
try {
final ConnectionFactory connectionFactory;
if (memcachedConfiguration.getConnectionFactoryType() == MemcachedConnectionFactoryType.BINARY) {
connectionFactory = new BinaryConnectionFactory(memcachedConfiguration.getMaxOperationQueueLength(), memcachedConfiguration.getBufferSize());
} else {
connectionFactory = new DefaultConnectionFactory(memcachedConfiguration.getMaxOperationQueueLength(), memcachedConfiguration.getBufferSize());
}
client = new MemcachedClient(connectionFactory, AddrUtil.getAddresses(memcachedConfiguration.getServers()));
log.debug("MemcachedProvider started.");
} catch (Exception e) {
throw new IllegalStateException("Error starting MemcachedProvider", e);
}
}
public void destroy() {
log.debug("Destroying MemcachedProvider");
try {
client.shutdown();
} catch (RuntimeException e) {
throw new IllegalStateException("Error destroying MemcachedProvider", e);
}
}
@Override
public MemcachedClient getDelegate() {
return client;
}
@Override
public Object get(String region, String key) {
try {
return client.get(key);
} catch (Exception e) {
log.error("Failed to fetch object by key: " + key, e);
return null;
}
}
@Override // it is so weird but we use as workaround "region" field to pass "expiration" for put operation
public void put(String expirationInSeconds, String key, Object object) {
try {
client.set(key, putExpiration(expirationInSeconds), object);
} catch (Exception e) {
log.error("Failed to put object in cache, key: " + key, e);
}
}
private int putExpiration(String expirationInSeconds) {
try {
return Integer.parseInt(expirationInSeconds);
} catch (Exception e) {
return memcachedConfiguration.getDefaultPutExpiration();
}
}
@Override
public void remove(String region, String key) {
try {
client.delete(key);
} catch (Exception e) {
log.error("Failed to remove object from cache, key: " + key, e);
}
}
@Override
public void clear() {
client.flush();
}
}
| memcached : added test connection method
| oxService/src/main/java/org/xdi/service/cache/MemcachedProvider.java | memcached : added test connection method | <ide><path>xService/src/main/java/org/xdi/service/cache/MemcachedProvider.java
<ide> }
<ide>
<ide> client = new MemcachedClient(connectionFactory, AddrUtil.getAddresses(memcachedConfiguration.getServers()));
<add> testConnection();
<ide> log.debug("MemcachedProvider started.");
<ide> } catch (Exception e) {
<ide> throw new IllegalStateException("Error starting MemcachedProvider", e);
<add> }
<add> }
<add>
<add> private void testConnection() {
<add> put("2", "connectionTest", "connectionTestValue");
<add> if (!"connectionTestValue".equals(get("connectionTest"))) {
<add> throw new IllegalStateException("Error starting MemcachedProvider. Please check memcached configuration: " + memcachedConfiguration);
<ide> }
<ide> }
<ide> |
|
Java | apache-2.0 | df347868c4e68f0c1ec59a69a765e13ac699e3ff | 0 | hs-web/hsweb-framework,asiaon123/hsweb-framework,asiaon123/hsweb-framework,hs-web/hsweb-framework,hs-web/hsweb-framework,asiaon123/hsweb-framework | /*
* Copyright 2016 http://www.hswebframework.org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.hswebframework.web.service.organizational.simple;
import org.hswebframework.web.commons.entity.DataStatus;
import org.hswebframework.web.commons.entity.TreeSupportEntity;
import org.hswebframework.web.dao.dynamic.QueryByEntityDao;
import org.hswebframework.web.dao.organizational.*;
import org.hswebframework.web.entity.authorization.UserEntity;
import org.hswebframework.web.entity.authorization.bind.BindRoleUserEntity;
import org.hswebframework.web.entity.organizational.*;
import org.hswebframework.web.id.IDGenerator;
import org.hswebframework.web.organizational.authorization.Personnel;
import org.hswebframework.web.organizational.authorization.PersonnelAuthorization;
import org.hswebframework.web.organizational.authorization.PersonnelAuthorizationManager;
import org.hswebframework.web.organizational.authorization.TreeNode;
import org.hswebframework.web.organizational.authorization.relation.Relation;
import org.hswebframework.web.organizational.authorization.relation.SimpleRelation;
import org.hswebframework.web.organizational.authorization.relation.SimpleRelations;
import org.hswebframework.web.organizational.authorization.simple.SimplePersonnel;
import org.hswebframework.web.organizational.authorization.simple.SimplePersonnelAuthorization;
import org.hswebframework.web.service.DefaultDSLQueryService;
import org.hswebframework.web.service.EnableCacheGenericEntityService;
import org.hswebframework.web.service.authorization.AuthorizationSettingTypeSupplier;
import org.hswebframework.web.service.authorization.UserService;
import org.hswebframework.web.service.organizational.PersonService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.cache.annotation.CacheConfig;
import org.springframework.cache.annotation.CacheEvict;
import org.springframework.cache.annotation.Cacheable;
import org.springframework.cache.annotation.Caching;
import org.springframework.stereotype.Service;
import org.springframework.util.CollectionUtils;
import java.util.*;
import java.util.function.BiConsumer;
import java.util.function.Consumer;
import java.util.function.Function;
import java.util.function.Predicate;
import java.util.stream.Collectors;
import static org.springframework.util.StringUtils.isEmpty;
/**
* 默认的服务实现
*
* @author hsweb-generator-online
*/
@Service("personService")
@CacheConfig(cacheNames = "person")
public class SimplePersonService extends EnableCacheGenericEntityService<PersonEntity, String>
implements PersonService, PersonnelAuthorizationManager, AuthorizationSettingTypeSupplier {
private static String SETTING_TYPE_PERSON = "person";
private static String SETTING_TYPE_POSITION = "position";
@Autowired
private PersonDao personDao;
@Autowired
private PersonPositionDao personPositionDao;
@Autowired
private PositionDao positionDao;
@Autowired
private DepartmentDao departmentDao;
@Autowired
private OrganizationalDao organizationalDao;
@Autowired
private DistrictDao districtDao;
@Autowired(required = false)
private UserService userService;
@Autowired
private RelationInfoDao relationInfoDao;
@Override
protected IDGenerator<String> getIDGenerator() {
return IDGenerator.MD5;
}
@Override
public PersonDao getDao() {
return personDao;
}
@Override
@Caching(evict = {
@CacheEvict(key = "'id:'+#result"),
@CacheEvict(key = "'auth:persion-id'+#result"),
@CacheEvict(key = "'auth-bind'+#result")
})
public String insert(PersonAuthBindEntity authBindEntity) {
authBindEntity.setStatus(DataStatus.STATUS_ENABLED);
// TODO: 17-6-1 应该使用锁,防止并发同步用户,导致多个人员使用相同的用户
if (authBindEntity.getPersonUser() != null) {
syncUserInfo(authBindEntity);
}
String id = this.insert(((PersonEntity) authBindEntity));
if (authBindEntity.getPositionIds() != null) {
syncPositionInfo(id, authBindEntity.getPositionIds());
}
return id;
}
@Override
@Caching(evict = {
@CacheEvict(key = "'id:'+#authBindEntity.id"),
@CacheEvict(key = "'auth:persion-id'+#authBindEntity.id"),
@CacheEvict(key = "'auth-bind'+#authBindEntity.id")
})
public int updateByPk(PersonAuthBindEntity authBindEntity) {
// TODO: 17-6-1 应该使用锁,防止并发同步用户,导致多个人员使用相同的用户
if (authBindEntity.getPositionIds() != null) {
personPositionDao.deleteByPersonId(authBindEntity.getId());
syncPositionInfo(authBindEntity.getId(), authBindEntity.getPositionIds());
}
if (authBindEntity.getPersonUser() != null) {
syncUserInfo(authBindEntity);
}
return this.updateByPk(((PersonEntity) authBindEntity));
}
@Override
@Cacheable(key = "'auth-bind'+#id")
public PersonAuthBindEntity selectAuthBindByPk(String id) {
PersonEntity personEntity = this.selectByPk(id);
if (personEntity == null) return null;
if (personEntity instanceof PersonAuthBindEntity) return ((PersonAuthBindEntity) personEntity);
PersonAuthBindEntity bindEntity = entityFactory.newInstance(PersonAuthBindEntity.class, personEntity);
Set<String> positionIds = DefaultDSLQueryService.createQuery(personPositionDao)
.where(PersonPositionEntity.personId, id)
.listNoPaging().stream()
.map(PersonPositionEntity::getPositionId)
.collect(Collectors.toSet());
bindEntity.setPositionIds(positionIds);
if (null != userService && null != personEntity.getUserId()) {
UserEntity userEntity = userService.selectByPk(personEntity.getUserId());
if (null != userEntity) {
PersonUserEntity entity = entityFactory.newInstance(PersonUserEntity.class);
entity.setUsername(userEntity.getUsername());
bindEntity.setPersonUser(entity);
}
}
return bindEntity;
}
@Override
public List<PersonEntity> selectByPositionId(String positionId) {
Objects.requireNonNull(positionId);
return personDao.selectByPositionId(positionId);
}
protected void syncPositionInfo(String personId, Set<String> positionIds) {
for (String positionId : positionIds) {
PersonPositionEntity positionEntity = entityFactory.newInstance(PersonPositionEntity.class);
positionEntity.setPersonId(personId);
positionEntity.setPositionId(positionId);
this.personPositionDao.insert(positionEntity);
}
}
protected void syncUserInfo(PersonAuthBindEntity bindEntity) {
if (isEmpty(bindEntity.getPersonUser().getUsername())) {
bindEntity.setUserId("");
return;
}
//获取所有职位
Set<String> positionIds = bindEntity.getPositionIds();
if (positionIds.isEmpty()) return;
//是否使用了权限管理的userService.
if (null == userService) {
logger.warn("userService not ready!");
return;
}
//获取职位实体
List<PositionEntity> positionEntities = DefaultDSLQueryService.createQuery(positionDao)
.where().in(PositionEntity.id, positionIds)
.listNoPaging();
if (positionEntities.isEmpty()) return;
//获取用户是否存在
UserEntity oldUser = userService.selectByUsername(bindEntity.getPersonUser().getUsername());
if (null != oldUser) {
//判断用户是否已经绑定了其他人员
int userBindSize = createQuery().where()
.is(PersonEntity.userId, oldUser.getId())
.not(PersonEntity.id, bindEntity.getId())
.total();
tryValidateProperty(userBindSize == 0, "personUser.username", "用户已绑定其他人员");
}
// 初始化用户后的操作方式
Function<UserEntity, String> userOperationFunction =
oldUser == null ? userService::insert : //为空新增,不为空修改
user -> {
userService.update(oldUser.getId(), user);
return oldUser.getId();
};
//初始化用户信息
//全部角色信息
Set<String> roleIds = positionEntities.stream()
.map(PositionEntity::getRoles)
.filter(Objects::nonNull)
.flatMap(List::stream)
.collect(Collectors.toSet());
BindRoleUserEntity userEntity = entityFactory.newInstance(BindRoleUserEntity.class);
userEntity.setUsername(bindEntity.getPersonUser().getUsername());
userEntity.setPassword(bindEntity.getPersonUser().getPassword());
userEntity.setName(bindEntity.getName());
userEntity.setRoles(new ArrayList<>(roleIds));
String userId = userOperationFunction.apply(userEntity);
bindEntity.setUserId(userId);
}
@Override
public int deleteByPk(String id) {
personPositionDao.deleteByPersonId(id);
return super.deleteByPk(id);
}
@Override
@Cacheable(key = "'auth:persion-id'+#personId")
public PersonnelAuthorization getPersonnelAuthorizationByPersonId(String personId) {
SimplePersonnelAuthorization authorization = new SimplePersonnelAuthorization();
PersonEntity entity = selectByPk(personId);
assertNotNull(entity);
Personnel personnel = entityFactory.newInstance(Personnel.class, SimplePersonnel.class, entity);
authorization.setPersonnel(personnel);
// 获取用户的职位ID集合(多个职位)
Set<String> positionIds = DefaultDSLQueryService.createQuery(personPositionDao)
.where(PersonPositionEntity.personId, personId)
.listNoPaging().stream()
.map(PersonPositionEntity::getPositionId)
.collect(Collectors.toSet());
//获取所有职位,并得到根职位(树结构)
List<PositionEntity> positionEntities = getAllChildrenAndReturnRootNode(positionDao, positionIds, PositionEntity::setChildren, rootPosList -> {
//根据职位获取部门
Set<String> departmentIds = rootPosList.stream().map(PositionEntity::getDepartmentId).collect(Collectors.toSet());
if (!CollectionUtils.isEmpty(departmentIds)) {
List<DepartmentEntity> departmentEntities = getAllChildrenAndReturnRootNode(departmentDao, departmentIds, DepartmentEntity::setChildren, rootDepList -> {
//根据部门获取机构
Set<String> orgIds = rootDepList.stream().map(DepartmentEntity::getOrgId).collect(Collectors.toSet());
if (!CollectionUtils.isEmpty(orgIds)) {
List<OrganizationalEntity> orgEntities = getAllChildrenAndReturnRootNode(organizationalDao, orgIds, OrganizationalEntity::setChildren, rootOrgList -> {
//根据机构获取行政区域
Set<String> districtIds = rootOrgList.stream().map(OrganizationalEntity::getDistrictId).filter(Objects::nonNull).collect(Collectors.toSet());
if (!CollectionUtils.isEmpty(districtIds)) {
List<DistrictEntity> districtEntities =
getAllChildrenAndReturnRootNode(districtDao, districtIds, DistrictEntity::setChildren, rootDistrictList -> {
});
authorization.setDistrictIds(transformationTreeNode(null, districtEntities));
}
});
authorization.setOrgIds(transformationTreeNode(null, orgEntities));
}
});
authorization.setDepartmentIds(transformationTreeNode(null, departmentEntities));
}
});
authorization.setPositionIds(transformationTreeNode(null, positionEntities));
//获取关系
List<RelationInfoEntity> relationInfoList = DefaultDSLQueryService.createQuery(relationInfoDao)
.where(RelationInfoEntity.relationFrom, personId)
.or(RelationInfoEntity.relationTo, personId)
.listNoPaging();
List<Relation> relations = relationInfoList.stream()
.map(info -> {
SimpleRelation relation = new SimpleRelation();
relation.setType(info.getRelationTypeFrom());
relation.setTarget(info.getRelationTo());
relation.setRelation(info.getRelationId());
if (personId.equals(info.getRelationFrom())) {
relation.setDirection(Relation.Direction.POSITIVE);
} else {
relation.setDirection(Relation.Direction.REVERSE);
}
return relation;
}).collect(Collectors.toList());
authorization.setRelations(new SimpleRelations(relations));
return authorization;
}
/**
* 获取一个树形结构的数据,并返回根节点集合
*
* @param dao 查询dao接口
* @param rootIds 根节点ID集合
* @param childAccepter 子节点接收方法
* @param rootConsumer 根节点消费回调
* @param <T> 节点类型
* @return 根节点集合
*/
protected <T extends TreeSupportEntity<String>> List<T> getAllChildrenAndReturnRootNode(QueryByEntityDao<T> dao,
Set<String> rootIds,
BiConsumer<T, List<T>> childAccepter,
Consumer<List<T>> rootConsumer) {
//获取根节点
List<T> root = DefaultDSLQueryService.createQuery(dao)
.where().in(TreeSupportEntity.id, rootIds)
.listNoPaging();
//节点不存在?
if (!root.isEmpty()) {
//所有子节点,使用节点的path属性进行快速查询,查询结果包含了根节点
List<T> allNode = DefaultDSLQueryService
.createQuery(dao)
//遍历生成查询条件: like path like ?||'%' or path like ?||'%' ....
.each(root, (query, data) -> query.or().like$(TreeSupportEntity.path, data.getPath()))
.listNoPaging();
//转为树形结构
List<T> tree = TreeSupportEntity
.list2tree(allNode, childAccepter,
(Predicate<T>) node -> rootIds.contains(node.getId())); // 根节点判定
rootConsumer.accept(root);
return tree;
}
return Collections.emptyList();
}
public static <V extends TreeSupportEntity<String>> Set<TreeNode<String>> transformationTreeNode(V parent, List<V> data) {
Set<TreeNode<String>> treeNodes = new HashSet<>();
data.forEach(node -> {
TreeNode<String> treeNode = new TreeNode<>();
if (parent != null) {
TreeNode<String> parentNode = new TreeNode<>();
parentNode.setValue(parent.getId());
parentNode.setChildren(treeNodes);
treeNode.setParent(parentNode);
}
treeNode.setValue(node.getId());
if (node.getChildren() != null && !node.getChildren().isEmpty()) {
treeNode.setChildren(transformationTreeNode(node, node.getChildren()));
}
treeNodes.add(treeNode);
});
return treeNodes;
}
@Override
@Cacheable(cacheNames = "person", key = "'auth:user-id'+#userId")
public PersonnelAuthorization getPersonnelAuthorizationByUserId(String userId) {
PersonEntity entity = createQuery().where(PersonEntity.userId, userId).single();
assertNotNull(entity);
return getPersonnelAuthorizationByPersonId(entity.getId());
}
@Override
public Set<SettingInfo> get(String userId) {
//支持职位和人员 设置权限
PersonEntity entity = createQuery().where(PersonEntity.userId, userId).single();
if (entity == null) return new HashSet<>();
Set<SettingInfo> settingInfo = new HashSet<>();
//岗位设置
//TODO 2017/06/08 是否将子级岗位的设置也放进来??
DefaultDSLQueryService.createQuery(personPositionDao)
.where(PersonPositionEntity.personId, entity.getId())
.listNoPaging()
.stream()
.map(position -> new SettingInfo(SETTING_TYPE_POSITION, position.getPositionId()))
.forEach(settingInfo::add);
//其他设置支持?
//人员配置
settingInfo.add(new SettingInfo(SETTING_TYPE_PERSON, entity.getId()));
return settingInfo;
}
}
| hsweb-system/hsweb-system-organizational/hsweb-system-organizational-service/hsweb-system-organizational-service-simple/src/main/java/org/hswebframework/web/service/organizational/simple/SimplePersonService.java | /*
* Copyright 2016 http://www.hswebframework.org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.hswebframework.web.service.organizational.simple;
import org.hswebframework.web.commons.entity.DataStatus;
import org.hswebframework.web.commons.entity.TreeSupportEntity;
import org.hswebframework.web.dao.dynamic.QueryByEntityDao;
import org.hswebframework.web.dao.organizational.*;
import org.hswebframework.web.entity.authorization.UserEntity;
import org.hswebframework.web.entity.authorization.bind.BindRoleUserEntity;
import org.hswebframework.web.entity.organizational.*;
import org.hswebframework.web.id.IDGenerator;
import org.hswebframework.web.organizational.authorization.Personnel;
import org.hswebframework.web.organizational.authorization.PersonnelAuthorization;
import org.hswebframework.web.organizational.authorization.PersonnelAuthorizationManager;
import org.hswebframework.web.organizational.authorization.TreeNode;
import org.hswebframework.web.organizational.authorization.relation.Relation;
import org.hswebframework.web.organizational.authorization.relation.SimpleRelation;
import org.hswebframework.web.organizational.authorization.relation.SimpleRelations;
import org.hswebframework.web.organizational.authorization.simple.SimplePersonnel;
import org.hswebframework.web.organizational.authorization.simple.SimplePersonnelAuthorization;
import org.hswebframework.web.service.DefaultDSLQueryService;
import org.hswebframework.web.service.EnableCacheGenericEntityService;
import org.hswebframework.web.service.authorization.AuthorizationSettingTypeSupplier;
import org.hswebframework.web.service.authorization.UserService;
import org.hswebframework.web.service.organizational.PersonService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.cache.annotation.CacheConfig;
import org.springframework.cache.annotation.CacheEvict;
import org.springframework.cache.annotation.Cacheable;
import org.springframework.cache.annotation.Caching;
import org.springframework.stereotype.Service;
import org.springframework.util.CollectionUtils;
import java.util.*;
import java.util.function.BiConsumer;
import java.util.function.Consumer;
import java.util.function.Function;
import java.util.function.Predicate;
import java.util.stream.Collectors;
import static org.springframework.util.StringUtils.isEmpty;
/**
* 默认的服务实现
*
* @author hsweb-generator-online
*/
@Service("personService")
@CacheConfig(cacheNames = "person")
public class SimplePersonService extends EnableCacheGenericEntityService<PersonEntity, String>
implements PersonService, PersonnelAuthorizationManager, AuthorizationSettingTypeSupplier {
private static String SETTING_TYPE_PERSON = "person";
private static String SETTING_TYPE_POSITION = "position";
@Autowired
private PersonDao personDao;
@Autowired
private PersonPositionDao personPositionDao;
@Autowired
private PositionDao positionDao;
@Autowired
private DepartmentDao departmentDao;
@Autowired
private OrganizationalDao organizationalDao;
@Autowired
private DistrictDao districtDao;
@Autowired(required = false)
private UserService userService;
@Autowired
private RelationInfoDao relationInfoDao;
@Override
protected IDGenerator<String> getIDGenerator() {
return IDGenerator.MD5;
}
@Override
public PersonDao getDao() {
return personDao;
}
@Override
@Caching(evict = {
@CacheEvict(key = "'id:'+#result"),
@CacheEvict(key = "'auth-bind'+#result")
})
public String insert(PersonAuthBindEntity authBindEntity) {
authBindEntity.setStatus(DataStatus.STATUS_ENABLED);
// TODO: 17-6-1 应该使用锁,防止并发同步用户,导致多个人员使用相同的用户
if (authBindEntity.getPersonUser() != null) {
syncUserInfo(authBindEntity);
}
String id = this.insert(((PersonEntity) authBindEntity));
if (authBindEntity.getPositionIds() != null) {
syncPositionInfo(id, authBindEntity.getPositionIds());
}
return id;
}
@Override
@Caching(evict = {
@CacheEvict(key = "'id:'+#authBindEntity.id"),
@CacheEvict(key = "'auth-bind'+#authBindEntity.id")
})
public int updateByPk(PersonAuthBindEntity authBindEntity) {
// TODO: 17-6-1 应该使用锁,防止并发同步用户,导致多个人员使用相同的用户
if (authBindEntity.getPositionIds() != null) {
personPositionDao.deleteByPersonId(authBindEntity.getId());
syncPositionInfo(authBindEntity.getId(), authBindEntity.getPositionIds());
}
if (authBindEntity.getPersonUser() != null) {
syncUserInfo(authBindEntity);
}
return this.updateByPk(((PersonEntity) authBindEntity));
}
@Override
@Cacheable(key = "'auth-bind'+#id")
public PersonAuthBindEntity selectAuthBindByPk(String id) {
PersonEntity personEntity = this.selectByPk(id);
if (personEntity == null) return null;
if (personEntity instanceof PersonAuthBindEntity) return ((PersonAuthBindEntity) personEntity);
PersonAuthBindEntity bindEntity = entityFactory.newInstance(PersonAuthBindEntity.class, personEntity);
Set<String> positionIds = DefaultDSLQueryService.createQuery(personPositionDao)
.where(PersonPositionEntity.personId, id)
.listNoPaging().stream()
.map(PersonPositionEntity::getPositionId)
.collect(Collectors.toSet());
bindEntity.setPositionIds(positionIds);
if (null != userService && null != personEntity.getUserId()) {
UserEntity userEntity = userService.selectByPk(personEntity.getUserId());
if (null != userEntity) {
PersonUserEntity entity = entityFactory.newInstance(PersonUserEntity.class);
entity.setUsername(userEntity.getUsername());
bindEntity.setPersonUser(entity);
}
}
return bindEntity;
}
@Override
public List<PersonEntity> selectByPositionId(String positionId) {
Objects.requireNonNull(positionId);
return personDao.selectByPositionId(positionId);
}
protected void syncPositionInfo(String personId, Set<String> positionIds) {
for (String positionId : positionIds) {
PersonPositionEntity positionEntity = entityFactory.newInstance(PersonPositionEntity.class);
positionEntity.setPersonId(personId);
positionEntity.setPositionId(positionId);
this.personPositionDao.insert(positionEntity);
}
}
protected void syncUserInfo(PersonAuthBindEntity bindEntity) {
if (isEmpty(bindEntity.getPersonUser().getUsername())) {
bindEntity.setUserId("");
return;
}
//获取所有职位
Set<String> positionIds = bindEntity.getPositionIds();
if (positionIds.isEmpty()) return;
//是否使用了权限管理的userService.
if (null == userService) {
logger.warn("userService not ready!");
return;
}
//获取职位实体
List<PositionEntity> positionEntities = DefaultDSLQueryService.createQuery(positionDao)
.where().in(PositionEntity.id, positionIds)
.listNoPaging();
if (positionEntities.isEmpty()) return;
//获取用户是否存在
UserEntity oldUser = userService.selectByUsername(bindEntity.getPersonUser().getUsername());
if (null != oldUser) {
//判断用户是否已经绑定了其他人员
int userBindSize = createQuery().where()
.is(PersonEntity.userId, oldUser.getId())
.not(PersonEntity.id, bindEntity.getId())
.total();
tryValidateProperty(userBindSize == 0, "personUser.username", "用户已绑定其他人员");
}
// 初始化用户后的操作方式
Function<UserEntity, String> userOperationFunction =
oldUser == null ? userService::insert : //为空新增,不为空修改
user -> {
userService.update(oldUser.getId(), user);
return oldUser.getId();
};
//初始化用户信息
//全部角色信息
Set<String> roleIds = positionEntities.stream()
.map(PositionEntity::getRoles)
.filter(Objects::nonNull)
.flatMap(List::stream)
.collect(Collectors.toSet());
BindRoleUserEntity userEntity = entityFactory.newInstance(BindRoleUserEntity.class);
userEntity.setUsername(bindEntity.getPersonUser().getUsername());
userEntity.setPassword(bindEntity.getPersonUser().getPassword());
userEntity.setName(bindEntity.getName());
userEntity.setRoles(new ArrayList<>(roleIds));
String userId = userOperationFunction.apply(userEntity);
bindEntity.setUserId(userId);
}
@Override
public int deleteByPk(String id) {
personPositionDao.deleteByPersonId(id);
return super.deleteByPk(id);
}
@Override
@Cacheable(key = "'auth:persion-id'+#personId")
public PersonnelAuthorization getPersonnelAuthorizationByPersonId(String personId) {
SimplePersonnelAuthorization authorization = new SimplePersonnelAuthorization();
PersonEntity entity = selectByPk(personId);
assertNotNull(entity);
Personnel personnel = entityFactory.newInstance(Personnel.class, SimplePersonnel.class, entity);
authorization.setPersonnel(personnel);
// 获取用户的职位ID集合(多个职位)
Set<String> positionIds = DefaultDSLQueryService.createQuery(personPositionDao)
.where(PersonPositionEntity.personId, personId)
.listNoPaging().stream()
.map(PersonPositionEntity::getPositionId)
.collect(Collectors.toSet());
//获取所有职位,并得到根职位(树结构)
List<PositionEntity> positionEntities = getAllChildrenAndReturnRootNode(positionDao, positionIds, PositionEntity::setChildren, rootPosList -> {
//根据职位获取部门
Set<String> departmentIds = rootPosList.stream().map(PositionEntity::getDepartmentId).collect(Collectors.toSet());
if (!CollectionUtils.isEmpty(departmentIds)) {
List<DepartmentEntity> departmentEntities = getAllChildrenAndReturnRootNode(departmentDao, departmentIds, DepartmentEntity::setChildren, rootDepList -> {
//根据部门获取机构
Set<String> orgIds = rootDepList.stream().map(DepartmentEntity::getOrgId).collect(Collectors.toSet());
if (!CollectionUtils.isEmpty(orgIds)) {
List<OrganizationalEntity> orgEntities = getAllChildrenAndReturnRootNode(organizationalDao, orgIds, OrganizationalEntity::setChildren, rootOrgList -> {
//根据机构获取行政区域
Set<String> districtIds = rootOrgList.stream().map(OrganizationalEntity::getDistrictId).filter(Objects::nonNull).collect(Collectors.toSet());
if (!CollectionUtils.isEmpty(districtIds)) {
List<DistrictEntity> districtEntities =
getAllChildrenAndReturnRootNode(districtDao, districtIds, DistrictEntity::setChildren, rootDistrictList -> {
});
authorization.setDistrictIds(transformationTreeNode(null, districtEntities));
}
});
authorization.setOrgIds(transformationTreeNode(null, orgEntities));
}
});
authorization.setDepartmentIds(transformationTreeNode(null, departmentEntities));
}
});
authorization.setPositionIds(transformationTreeNode(null, positionEntities));
//获取关系
List<RelationInfoEntity> relationInfoList = DefaultDSLQueryService.createQuery(relationInfoDao)
.where(RelationInfoEntity.relationFrom, personId)
.or(RelationInfoEntity.relationTo, personId)
.listNoPaging();
List<Relation> relations = relationInfoList.stream()
.map(info -> {
SimpleRelation relation = new SimpleRelation();
relation.setType(info.getRelationTypeFrom());
relation.setTarget(info.getRelationTo());
relation.setRelation(info.getRelationId());
if (personId.equals(info.getRelationFrom())) {
relation.setDirection(Relation.Direction.POSITIVE);
} else {
relation.setDirection(Relation.Direction.REVERSE);
}
return relation;
}).collect(Collectors.toList());
authorization.setRelations(new SimpleRelations(relations));
return authorization;
}
/**
* 获取一个树形结构的数据,并返回根节点集合
*
* @param dao 查询dao接口
* @param rootIds 根节点ID集合
* @param childAccepter 子节点接收方法
* @param rootConsumer 根节点消费回调
* @param <T> 节点类型
* @return 根节点集合
*/
protected <T extends TreeSupportEntity<String>> List<T> getAllChildrenAndReturnRootNode(QueryByEntityDao<T> dao,
Set<String> rootIds,
BiConsumer<T, List<T>> childAccepter,
Consumer<List<T>> rootConsumer) {
//获取根节点
List<T> root = DefaultDSLQueryService.createQuery(dao)
.where().in(TreeSupportEntity.id, rootIds)
.listNoPaging();
//节点不存在?
if (!root.isEmpty()) {
//所有子节点,使用节点的path属性进行快速查询,查询结果包含了根节点
List<T> allNode = DefaultDSLQueryService
.createQuery(dao)
//遍历生成查询条件: like path like ?||'%' or path like ?||'%' ....
.each(root, (query, data) -> query.or().like$(TreeSupportEntity.path, data.getPath()))
.listNoPaging();
//转为树形结构
List<T> tree = TreeSupportEntity
.list2tree(allNode, childAccepter,
(Predicate<T>) node -> rootIds.contains(node.getId())); // 根节点判定
rootConsumer.accept(root);
return tree;
}
return Collections.emptyList();
}
public static <V extends TreeSupportEntity<String>> Set<TreeNode<String>> transformationTreeNode(V parent, List<V> data) {
Set<TreeNode<String>> treeNodes = new HashSet<>();
data.forEach(node -> {
TreeNode<String> treeNode = new TreeNode<>();
if (parent != null) {
TreeNode<String> parentNode = new TreeNode<>();
parentNode.setValue(parent.getId());
parentNode.setChildren(treeNodes);
treeNode.setParent(parentNode);
}
treeNode.setValue(node.getId());
if (node.getChildren() != null && !node.getChildren().isEmpty()) {
treeNode.setChildren(transformationTreeNode(node, node.getChildren()));
}
treeNodes.add(treeNode);
});
return treeNodes;
}
@Override
@Cacheable(cacheNames = "person", key = "'auth:user-id'+#userId")
public PersonnelAuthorization getPersonnelAuthorizationByUserId(String userId) {
PersonEntity entity = createQuery().where(PersonEntity.userId, userId).single();
assertNotNull(entity);
return getPersonnelAuthorizationByPersonId(entity.getId());
}
@Override
public Set<SettingInfo> get(String userId) {
//支持职位和人员 设置权限
PersonEntity entity = createQuery().where(PersonEntity.userId, userId).single();
if (entity == null) return new HashSet<>();
Set<SettingInfo> settingInfo = new HashSet<>();
//岗位设置
//TODO 2017/06/08 是否将子级岗位的设置也放进来??
DefaultDSLQueryService.createQuery(personPositionDao)
.where(PersonPositionEntity.personId, entity.getId())
.listNoPaging()
.stream()
.map(position -> new SettingInfo(SETTING_TYPE_POSITION, position.getPositionId()))
.forEach(settingInfo::add);
//其他设置支持?
//人员配置
settingInfo.add(new SettingInfo(SETTING_TYPE_PERSON, entity.getId()));
return settingInfo;
}
}
| 修复变更了人员权限缓存未清除问题
| hsweb-system/hsweb-system-organizational/hsweb-system-organizational-service/hsweb-system-organizational-service-simple/src/main/java/org/hswebframework/web/service/organizational/simple/SimplePersonService.java | 修复变更了人员权限缓存未清除问题 | <ide><path>sweb-system/hsweb-system-organizational/hsweb-system-organizational-service/hsweb-system-organizational-service-simple/src/main/java/org/hswebframework/web/service/organizational/simple/SimplePersonService.java
<ide> @Override
<ide> @Caching(evict = {
<ide> @CacheEvict(key = "'id:'+#result"),
<add> @CacheEvict(key = "'auth:persion-id'+#result"),
<ide> @CacheEvict(key = "'auth-bind'+#result")
<ide> })
<ide> public String insert(PersonAuthBindEntity authBindEntity) {
<ide> @Override
<ide> @Caching(evict = {
<ide> @CacheEvict(key = "'id:'+#authBindEntity.id"),
<add> @CacheEvict(key = "'auth:persion-id'+#authBindEntity.id"),
<ide> @CacheEvict(key = "'auth-bind'+#authBindEntity.id")
<ide> })
<ide> public int updateByPk(PersonAuthBindEntity authBindEntity) { |
|
Java | bsd-2-clause | 6a4fb301f4b0337f2c8b7e8769805939d2a22076 | 0 | carlosrafaelgn/FPlayAndroid,carlosrafaelgn/FPlayAndroid,carlosrafaelgn/FPlayAndroid | //
// FPlayAndroid is distributed under the FreeBSD License
//
// Copyright (c) 2013-2014, Carlos Rafael Gimenes das Neves
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are met:
//
// 1. Redistributions of source code must retain the above copyright notice, this
// list of conditions and the following disclaimer.
// 2. Redistributions in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
// ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
// WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
// DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
// ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
// (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
// ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
// SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
//
// The views and conclusions contained in the software and documentation are those
// of the authors and should not be interpreted as representing official policies,
// either expressed or implied, of the FreeBSD Project.
//
// https://github.com/carlosrafaelgn/FPlayAndroid
//
package br.com.carlosrafaelgn.fplay;
import android.appwidget.AppWidgetManager;
import android.appwidget.AppWidgetProvider;
import android.content.ComponentName;
import android.content.Context;
import android.widget.RemoteViews;
import br.com.carlosrafaelgn.fplay.playback.Player;
import br.com.carlosrafaelgn.fplay.ui.UI;
public final class WidgetMain extends AppWidgetProvider {
private static AppWidgetManager appWidgetManager;
private static ComponentName widgetComponent;
private static void updateAppWidget(AppWidgetManager appWidgetManager, int appWidgetId) {
appWidgetManager.updateAppWidget(appWidgetId, Player.prepareRemoteViews(new RemoteViews(Player.theApplication.getPackageName(), UI.widgetTransparentBg ? R.layout.main_widget_transparent : R.layout.main_widget), true, false, false));
}
public static void updateWidgets() {
if (appWidgetManager == null) {
appWidgetManager = AppWidgetManager.getInstance(Player.theApplication);
if (appWidgetManager == null)
return;
}
if (widgetComponent == null)
widgetComponent = new ComponentName(Player.theApplication, WidgetMain.class);
final int[] appWidgetIds = appWidgetManager.getAppWidgetIds(widgetComponent);
if (appWidgetIds == null)
return;
for (int i = appWidgetIds.length - 1; i >= 0; i--)
updateAppWidget(appWidgetManager, appWidgetIds[i]);
}
@Override
public void onUpdate(Context context, AppWidgetManager appWidgetManager, int[] appWidgetIds) {
UI.loadWidgetRelatedSettings(context);
for (int i = appWidgetIds.length - 1; i >= 0; i--)
updateAppWidget(appWidgetManager, appWidgetIds[i]);
}
}
| src/br/com/carlosrafaelgn/fplay/WidgetMain.java | //
// FPlayAndroid is distributed under the FreeBSD License
//
// Copyright (c) 2013-2014, Carlos Rafael Gimenes das Neves
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are met:
//
// 1. Redistributions of source code must retain the above copyright notice, this
// list of conditions and the following disclaimer.
// 2. Redistributions in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
// ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
// WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
// DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
// ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
// (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
// ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
// SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
//
// The views and conclusions contained in the software and documentation are those
// of the authors and should not be interpreted as representing official policies,
// either expressed or implied, of the FreeBSD Project.
//
// https://github.com/carlosrafaelgn/FPlayAndroid
//
package br.com.carlosrafaelgn.fplay;
import android.appwidget.AppWidgetManager;
import android.appwidget.AppWidgetProvider;
import android.content.ComponentName;
import android.content.Context;
import android.widget.RemoteViews;
import br.com.carlosrafaelgn.fplay.playback.Player;
import br.com.carlosrafaelgn.fplay.ui.UI;
public final class WidgetMain extends AppWidgetProvider {
private static AppWidgetManager appWidgetManager;
private static ComponentName widgetComponent;
private static void updateAppWidget(AppWidgetManager appWidgetManager, int appWidgetId) {
appWidgetManager.updateAppWidget(appWidgetId, Player.prepareRemoteViews(new RemoteViews(Player.theApplication.getPackageName(), UI.widgetTransparentBg ? R.layout.main_widget_transparent : R.layout.main_widget), true, false, false));
}
public static void updateWidgets() {
if (appWidgetManager == null)
appWidgetManager = AppWidgetManager.getInstance(Player.theApplication);
if (widgetComponent == null)
widgetComponent = new ComponentName(Player.theApplication, WidgetMain.class);
final int[] appWidgetIds = appWidgetManager.getAppWidgetIds(widgetComponent);
if (appWidgetIds == null)
return;
for (int i = appWidgetIds.length - 1; i >= 0; i--)
updateAppWidget(appWidgetManager, appWidgetIds[i]);
}
@Override
public void onUpdate(Context context, AppWidgetManager appWidgetManager, int[] appWidgetIds) {
UI.loadWidgetRelatedSettings(context);
for (int i = appWidgetIds.length - 1; i >= 0; i--)
updateAppWidget(appWidgetManager, appWidgetIds[i]);
}
}
| Checking if appWidgetManager is null (it is null on devices without a home screen, like Android Things)
| src/br/com/carlosrafaelgn/fplay/WidgetMain.java | Checking if appWidgetManager is null (it is null on devices without a home screen, like Android Things) | <ide><path>rc/br/com/carlosrafaelgn/fplay/WidgetMain.java
<ide> }
<ide>
<ide> public static void updateWidgets() {
<del> if (appWidgetManager == null)
<add> if (appWidgetManager == null) {
<ide> appWidgetManager = AppWidgetManager.getInstance(Player.theApplication);
<add> if (appWidgetManager == null)
<add> return;
<add> }
<ide> if (widgetComponent == null)
<ide> widgetComponent = new ComponentName(Player.theApplication, WidgetMain.class);
<ide> final int[] appWidgetIds = appWidgetManager.getAppWidgetIds(widgetComponent); |
|
Java | epl-1.0 | c56e416bc4788556e59e9aa0a7321324544a9577 | 0 | takari/takari-lifecycle | package io.takari.maven.plugins.compile;
import static io.takari.maven.testing.TestResources.assertFileContents;
import static io.takari.maven.testing.TestResources.cp;
import static io.takari.maven.testing.TestResources.rm;
import static io.takari.maven.testing.TestResources.touch;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import org.apache.maven.execution.MavenSession;
import org.apache.maven.plugin.MojoExecution;
import org.apache.maven.plugin.MojoExecutionException;
import org.apache.maven.project.MavenProject;
import org.codehaus.plexus.util.FileUtils;
import org.codehaus.plexus.util.xml.Xpp3Dom;
import org.junit.Assert;
import org.junit.Assume;
import org.junit.Test;
import org.junit.runners.Parameterized.Parameters;
import io.takari.maven.plugins.compile.AbstractCompileMojo.Proc;
import io.takari.maven.plugins.compile.javac.CompilerJavac;
import io.takari.maven.plugins.compile.jdt.CompilerJdt;
public class AnnotationProcessingTest extends AbstractCompileTest {
public AnnotationProcessingTest(String compilerId) {
super(compilerId);
}
@Parameters(name = "{0}")
public static Iterable<Object[]> compilers() {
List<Object[]> compilers = new ArrayList<Object[]>();
compilers.add(new Object[] {"javac"});
compilers.add(new Object[] {"forked-javac"});
compilers.add(new Object[] {"jdt"});
return compilers;
}
private File procCompile(String projectName, Proc proc, Xpp3Dom... parameters) throws Exception, IOException {
File basedir = resources.getBasedir(projectName);
return procCompile(basedir, proc, parameters);
}
private File procCompile(File basedir, Proc proc, Xpp3Dom... parameters) throws Exception, IOException {
File processor = compileAnnotationProcessor();
return processAnnotations(basedir, proc, processor, parameters);
}
private File processAnnotations(File basedir, Proc proc, File processor, Xpp3Dom... parameters) throws Exception {
MavenProject project = mojos.readMavenProject(basedir);
processAnnotations(project, processor, proc, parameters);
return basedir;
}
protected void processAnnotations(MavenProject project, File processor, Proc proc, Xpp3Dom... parameters) throws Exception {
MavenSession session = mojos.newMavenSession(project);
processAnnotations(session, project, "compile", processor, proc, parameters);
}
protected void processAnnotations(MavenSession session, MavenProject project, String goal, File processor, Proc proc, Xpp3Dom... parameters) throws Exception {
MojoExecution execution = mojos.newMojoExecution(goal);
addDependency(project, "processor", new File(processor, "target/classes"));
Xpp3Dom configuration = execution.getConfiguration();
if (proc != null) {
configuration.addChild(newParameter("proc", proc.name()));
}
if (parameters != null) {
for (Xpp3Dom parameter : parameters) {
configuration.addChild(parameter);
}
}
mojos.executeMojo(session, project, execution);
}
private File compileAnnotationProcessor() throws Exception, IOException {
File processor = compile("compile-proc/processor");
cp(processor, "src/main/resources/META-INF/services/javax.annotation.processing.Processor", "target/classes/META-INF/services/javax.annotation.processing.Processor");
return processor;
}
@Test
public void testProc_only() throws Exception {
File basedir = procCompile("compile-proc/proc", Proc.only);
mojos.assertBuildOutputs(new File(basedir, "target/generated-sources/annotations"), "proc/GeneratedSource.java", "proc/AnotherGeneratedSource.java");
}
@Test
public void testProc_none() throws Exception {
File basedir = procCompile("compile-proc/proc", Proc.none);
mojos.assertBuildOutputs(new File(basedir, "target"), "classes/proc/Source.class");
}
@Test
public void testProc_proc() throws Exception {
File basedir = procCompile("compile-proc/proc", Proc.proc);
mojos.assertBuildOutputs(new File(basedir, "target"), //
"classes/proc/Source.class", //
"generated-sources/annotations/proc/GeneratedSource.java", //
"classes/proc/GeneratedSource.class", //
"generated-sources/annotations/proc/AnotherGeneratedSource.java", //
"classes/proc/AnotherGeneratedSource.class");
}
@Test
public void testProc_incrementalProcessorChange() throws Exception {
File processor = compileAnnotationProcessor();
File basedir = resources.getBasedir("compile-proc/proc");
processAnnotations(basedir, Proc.proc, processor);
mojos.assertBuildOutputs(new File(basedir, "target"), //
"classes/proc/Source.class", //
"generated-sources/annotations/proc/GeneratedSource.java", //
"classes/proc/GeneratedSource.class", //
"generated-sources/annotations/proc/AnotherGeneratedSource.java", //
"classes/proc/AnotherGeneratedSource.class");
rm(processor, "target/classes/META-INF/services/javax.annotation.processing.Processor");
mojos.flushClasspathCaches();
processAnnotations(basedir, Proc.proc, processor);
mojos.assertBuildOutputs(new File(basedir, "target"), "classes/proc/Source.class");
mojos.assertDeletedOutputs(new File(basedir, "target"), //
"generated-sources/annotations/proc/GeneratedSource.java", //
"classes/proc/GeneratedSource.class", //
"generated-sources/annotations/proc/AnotherGeneratedSource.java", //
"classes/proc/AnotherGeneratedSource.class");
}
@Test
public void testProc_dummyOutput() throws Exception {
File basedir = procCompile("compile-proc/proc", Proc.proc, newProcessors("processor.Processor_dummyOutput"));
mojos.assertBuildOutputs(new File(basedir, "target"), "classes/proc/Source.class");
}
@Test
public void testProcTypeReference() throws Exception {
File basedir = procCompile("compile-proc/proc-type-reference", Proc.proc);
mojos.assertBuildOutputs(new File(basedir, "target"), //
"classes/proc/Source.class", //
"classes/proc/GeneratedSourceSubclass.class", //
"generated-sources/annotations/proc/GeneratedSource.java", //
"classes/proc/GeneratedSource.class", //
"generated-sources/annotations/proc/AnotherGeneratedSource.java", //
"classes/proc/AnotherGeneratedSource.class");
}
@Test
public void testProc_createResource() throws Exception {
File basedir = procCompile("compile-proc/proc", Proc.proc, newProcessors("processor.ProcessorCreateResource"));
mojos.assertBuildOutputs(new File(basedir, "target"), //
"classes/proc/Source.class", //
"generated-sources/annotations/proc/GeneratedSource.java");
}
@Test
public void testProc_annotationProcessors() throws Exception {
Xpp3Dom processors = newProcessors("processor.Processor");
File basedir = procCompile("compile-proc/proc", Proc.proc, processors);
mojos.assertBuildOutputs(new File(basedir, "target"), //
"classes/proc/Source.class", //
"generated-sources/annotations/proc/GeneratedSource.java", //
"classes/proc/GeneratedSource.class");
}
@Test
public void testProc_processorErrorMessage() throws Exception {
Xpp3Dom processors = newProcessors("processor.ErrorMessageProcessor");
File basedir = resources.getBasedir("compile-proc/proc");
try {
procCompile(basedir, Proc.only, processors);
Assert.fail();
} catch (MojoExecutionException e) {
// expected
}
mojos.assertBuildOutputs(new File(basedir, "target"), new String[0]);
ErrorMessage expected = new ErrorMessage(compilerId);
expected.setSnippets("jdt", "ERROR Source.java [6:14] test error message"); // TODO why 14?
expected.setSnippets("javac", "ERROR Source.java [6:8] test error message");
mojos.assertMessage(basedir, "src/main/java/proc/Source.java", expected);
Collection<String> pomMessages = mojos.getBuildContextLog().getMessages(new File(basedir, "pom.xml"));
Assert.assertEquals(3, pomMessages.size());
// TODO assert actual messages are as expected
}
@Test
public void testProc_messages() throws Exception {
ErrorMessage expected = new ErrorMessage(compilerId);
expected.setSnippets("javac", "ERROR BrokenSource.java [2:29]", "cannot find symbol");
expected.setSnippets("jdt", "ERROR BrokenSource.java [2:29]", "cannot be resolved to a type");
File processor = compileAnnotationProcessor();
File basedir = resources.getBasedir("compile-proc/proc");
String[] outputs;
if (CompilerJdt.ID.equals(compilerId)) {
outputs = new String[] {"classes/proc/Source.class" //
, "generated-sources/annotations/proc/BrokenSource.java"};
} else {
// TODO investigate why javac does not generate classes/proc/Source.class
outputs = new String[] {"generated-sources/annotations/proc/BrokenSource.java"};
}
Xpp3Dom processors = newProcessors("processor.BrokenProcessor");
try {
processAnnotations(basedir, Proc.proc, processor, processors);
Assert.fail();
} catch (MojoExecutionException e) {
// expected
}
mojos.assertBuildOutputs(new File(basedir, "target"), outputs);
assertProcMessage(basedir, "target/generated-sources/annotations/proc/BrokenSource.java", expected);
// no change rebuild should produce the same messages
try {
processAnnotations(basedir, Proc.proc, processor, processors);
Assert.fail();
} catch (MojoExecutionException e) {
// expected
}
mojos.assertCarriedOverOutputs(new File(basedir, "target"), outputs);
assertProcMessage(basedir, "target/generated-sources/annotations/proc/BrokenSource.java", expected);
}
private void assertProcMessage(File basedir, String path, ErrorMessage expected) throws Exception {
// javac reports the same compilation error twice when Proc.proc
Set<String> messages = new HashSet<String>(mojos.getBuildContextLog().getMessages(new File(basedir, path)));
Assert.assertEquals(messages.toString(), 1, messages.size());
String message = messages.iterator().next();
Assert.assertTrue(expected.isMatch(message));
}
@Test
public void testProc_processorOptions() throws Exception {
Xpp3Dom processors = newProcessors("processor.ProcessorWithOptions");
Xpp3Dom options = new Xpp3Dom("annotationProcessorOptions");
options.addChild(newParameter("optionA", "valueA"));
options.addChild(newParameter("optionB", "valueB"));
procCompile("compile-proc/proc", Proc.proc, processors, options);
}
@Test
public void testProc_staleGeneratedSourcesCleanup() throws Exception {
File processor = compileAnnotationProcessor();
File basedir = resources.getBasedir("compile-proc/proc");
processAnnotations(basedir, Proc.proc, processor);
mojos.assertBuildOutputs(new File(basedir, "target"), //
"classes/proc/Source.class", //
"generated-sources/annotations/proc/GeneratedSource.java", //
"classes/proc/GeneratedSource.class", //
"generated-sources/annotations/proc/AnotherGeneratedSource.java", //
"classes/proc/AnotherGeneratedSource.class");
// remove annotation
cp(basedir, "src/main/java/proc/Source.java-remove-annotation", "src/main/java/proc/Source.java");
processAnnotations(basedir, Proc.proc, processor);
mojos.assertDeletedOutputs(new File(basedir, "target"), //
"generated-sources/annotations/proc/GeneratedSource.java", //
"classes/proc/GeneratedSource.class", //
"generated-sources/annotations/proc/AnotherGeneratedSource.java", //
"classes/proc/AnotherGeneratedSource.class");
}
@Test
public void testProc_incrementalDeleteLastAnnotatedSource() throws Exception {
File processor = compileAnnotationProcessor();
File basedir = resources.getBasedir("compile-proc/proc");
Xpp3Dom processors = newProcessors("processor.Processor");
// initial compilation
processAnnotations(basedir, Proc.proc, processor, processors);
mojos.assertBuildOutputs(new File(basedir, "target"), //
"classes/proc/Source.class", //
"generated-sources/annotations/proc/GeneratedSource.java", //
"classes/proc/GeneratedSource.class");
// no-change rebuild
processAnnotations(basedir, Proc.proc, processor, processors);
mojos.assertCarriedOverOutputs(new File(basedir, "target"), //
"classes/proc/Source.class", //
"generated-sources/annotations/proc/GeneratedSource.java", //
"classes/proc/GeneratedSource.class");
// remove annotated class
rm(basedir, "src/main/java/proc/Source.java");
processAnnotations(basedir, Proc.proc, processor, processors);
mojos.assertDeletedOutputs(new File(basedir, "target"), //
"generated-sources/annotations/proc/GeneratedSource.java", //
"classes/proc/Source.class", //
"classes/proc/GeneratedSource.class");
}
@Test
public void testProc_nonIncrementalProcessor_onlyEX_deleteSource() throws Exception {
File processor = compileAnnotationProcessor();
File basedir = resources.getBasedir("compile-proc/proc");
File target = new File(basedir, "target");
Xpp3Dom processors = newProcessors("processor.NonIncrementalProcessor");
processAnnotations(basedir, Proc.onlyEX, processor, processors);
mojos.assertBuildOutputs(target, //
"generated-sources/annotations/proc/NonIncrementalSource.java");
rm(basedir, "src/main/java/proc/Source.java");
processAnnotations(basedir, Proc.onlyEX, processor, processors);
mojos.assertDeletedOutputs(target, //
"generated-sources/annotations/proc/NonIncrementalSource.java");
}
@Test
public void testProc_projectSourceRoots() throws Exception {
File processor = compileAnnotationProcessor();
File basedir = resources.getBasedir("compile-proc/proc");
MavenProject project = mojos.readMavenProject(basedir);
addDependency(project, "processor", new File(processor, "target/classes"));
mojos.compile(project, newParameter("proc", Proc.proc.name()), newProcessors("processor.Processor"));
Assert.assertTrue(project.getCompileSourceRoots().contains(new File(basedir, "target/generated-sources/annotations").getAbsolutePath()));
// TODO testCompile
}
@Test
public void testIncrementalDelete() throws Exception {
File processor = compileAnnotationProcessor();
File basedir = resources.getBasedir("compile-proc/proc-incremental-delete");
Xpp3Dom processors = newProcessors("processor.Processor");
// initial compilation
processAnnotations(basedir, Proc.proc, processor, processors);
mojos.assertBuildOutputs(new File(basedir, "target"), //
"classes/proc/Keep.class", //
"classes/proc/Source.class", //
"generated-sources/annotations/proc/GeneratedSource.java", //
"classes/proc/GeneratedSource.class");
// no-change rebuild
processAnnotations(basedir, Proc.proc, processor, processors);
mojos.assertCarriedOverOutputs(new File(basedir, "target"), //
"classes/proc/Keep.class", //
"classes/proc/Source.class", //
"generated-sources/annotations/proc/GeneratedSource.java", //
"classes/proc/GeneratedSource.class");
// remove annotated source
rm(basedir, "src/main/java/proc/Source.java");
processAnnotations(basedir, Proc.proc, processor, processors);
mojos.assertDeletedOutputs(new File(basedir, "target"), //
"generated-sources/annotations/proc/GeneratedSource.java", //
"classes/proc/Source.class", //
"classes/proc/GeneratedSource.class");
}
@Test
public void testConvertGeneratedSourceToHandwritten() throws Exception {
// this test demonstrates the following scenario
// 1. annotation processor generates java source and the generated source is compiled by the compiler
// 2. annotation is removed from original source and the generated source is moved to a dependency
// assert original generatedSource.java and generatedSource.class are deleted
File processor = compileAnnotationProcessor();
File basedir = resources.getBasedir("compile-proc/proc-incremental-move");
File moduleA = new File(basedir, "module-a");
File moduleB = new File(basedir, "module-b");
Xpp3Dom processors = newProcessors("processor.Processor");
mojos.compile(moduleB);
MavenProject projectA = mojos.readMavenProject(moduleA);
addDependency(projectA, "module-b", new File(moduleB, "target/classes"));
processAnnotations(projectA, processor, Proc.proc, processors);
mojos.assertBuildOutputs(new File(moduleA, "target"), //
"classes/proc/Source.class", //
"generated-sources/annotations/proc/GeneratedSource.java", //
"classes/proc/GeneratedSource.class");
mojos.flushClasspathCaches();
// move generated source to module-b/src/main/java
cp(moduleB, "src/main/java/proc/GeneratedSource.java-moved", "src/main/java/proc/GeneratedSource.java");
cp(moduleA, "src/main/java/modulea/ModuleA.java-new", "src/main/java/modulea/ModuleA.java");
cp(moduleA, "src/main/java/proc/Source.java-remove-annotation", "src/main/java/proc/Source.java");
mojos.compile(moduleB);
mojos.assertBuildOutputs(moduleB, "target/classes/proc/GeneratedSource.class");
projectA = mojos.readMavenProject(moduleA);
addDependency(projectA, "module-b", new File(moduleB, "target/classes"));
processAnnotations(projectA, processor, Proc.proc, processors);
mojos.assertBuildOutputs(new File(moduleA, "target"), //
"classes/proc/Source.class", "classes/modulea/ModuleA.class");
mojos.assertDeletedOutputs(new File(moduleA, "target"), //
"generated-sources/annotations/proc/GeneratedSource.java", //
"classes/proc/GeneratedSource.class");
}
private Xpp3Dom newProcessors(String... processors) {
Xpp3Dom annotationProcessors = new Xpp3Dom("annotationProcessors");
for (String processor : processors) {
annotationProcessors.addChild(newParameter("processor", processor));
}
return annotationProcessors;
}
@Test
public void testRequireProc() throws Exception {
File processor = compileAnnotationProcessor();
File basedir = resources.getBasedir("compile-proc/require-proc");
try {
processAnnotations(basedir, null, processor);
Assert.fail();
} catch (IllegalArgumentException expected) {
// TODO assert message
}
processAnnotations(basedir, null, null);
}
@Test
public void testRecompile() throws Exception {
/**
* <pre>
* Source.java -> Source.class
* \-> GeneratedSource.java -> GeneratedSource.class
* ^
* Client.java -> Client.class
* </pre>
*/
File processor = compileAnnotationProcessor();
File basedir = resources.getBasedir("compile-proc/proc-incremental-recompile");
Xpp3Dom processors = newProcessors("processor.ProcessorSiblingBody");
Xpp3Dom options = new Xpp3Dom("annotationProcessorOptions");
options.addChild(newParameter("basedir", new File(basedir, "src/main/java").getCanonicalPath()));
processAnnotations(basedir, Proc.proc, processor, processors, options);
mojos.assertBuildOutputs(new File(basedir, "target"), //
"classes/proc/Source.class", //
"classes/proc/Client.class", //
"generated-sources/annotations/proc/GeneratedSource.java", //
"classes/proc/GeneratedSource.class");
cp(basedir, "src/main/java/proc/GeneratedSource.body-changed", "src/main/java/proc/GeneratedSource.body");
touch(basedir, "src/main/java/proc/Source.java");
processAnnotations(basedir, Proc.proc, processor, processors, options);
mojos.assertBuildOutputs(new File(basedir, "target"), //
"classes/proc/Source.class", //
"classes/proc/Client.class", //
"generated-sources/annotations/proc/GeneratedSource.java", //
"classes/proc/GeneratedSource.class");
}
@Test
public void testProc_processorLastRound() throws Exception {
Xpp3Dom processors = newProcessors("processor.ProcessorLastRound");
File basedir = procCompile("compile-proc/proc", Proc.onlyEX, processors);
mojos.assertBuildOutputs(new File(basedir, "target"), //
"classes/types.lst");
assertFileContents("proc.Source\n", basedir, "target/classes/types.lst");
}
@Test
public void testIncremental_proc_only() throws Exception {
// the point of this test is to assert that changes to annotations trigger affected sources reprocessing when proc=only
// note sourcepath=disable, otherwise proc:only is all-or-nothing
File processor = compileAnnotationProcessor();
File basedir = resources.getBasedir("compile-proc/proc-incremental-proconly");
File generatedSources = new File(basedir, "target/generated-sources/annotations");
Xpp3Dom processors = newProcessors("processor.Processor");
compile(basedir, processor, "compile-only");
processAnnotations(basedir, Proc.only, processor, processors, newParameter("sourcepath", "disable"));
mojos.assertBuildOutputs(generatedSources, "proc/GeneratedConcrete.java", "proc/GeneratedAbstract.java", "proc/GeneratedAnother.java");
cp(basedir, "src/main/java/proc/Abstract.java-remove-annotation", "src/main/java/proc/Abstract.java");
compile(basedir, processor, "compile-only");
processAnnotations(basedir, Proc.only, processor, processors, newParameter("sourcepath", "disable"));
mojos.assertDeletedOutputs(generatedSources, "proc/GeneratedConcrete.java", "proc/GeneratedAbstract.java");
if (CompilerJdt.ID.equals(compilerId)) {
mojos.assertCarriedOverOutputs(generatedSources, "proc/GeneratedAnother.java");
} else {
mojos.assertBuildOutputs(generatedSources, "proc/GeneratedAnother.java");
}
}
private void compile(File basedir, File processor, String executionId) throws Exception {
MavenProject project = mojos.readMavenProject(basedir);
MavenSession session = mojos.newMavenSession(project);
MojoExecution execution = mojos.newMojoExecution();
MojoExecution cloned = new MojoExecution(execution.getMojoDescriptor(), executionId, null);
cloned.setConfiguration(execution.getConfiguration());
execution.getConfiguration().addChild(newParameter("proc", Proc.none.name()));
addDependency(project, "processor", new File(processor, "target/classes"));
mojos.executeMojo(session, project, cloned);
}
@Test
public void testMutliround_procOnly() throws Exception {
File basedir = procCompile("compile-proc/multiround", Proc.only);
File generatedSources = new File(basedir, "target/generated-sources/annotations");
mojos.assertMessages(basedir, "src/main/java/multiround/Source.java", new String[] {});
mojos.assertBuildOutputs(generatedSources, "multiround/GeneratedSource.java", "multiround/AnotherGeneratedSource.java");
}
@Test
public void testMultiround_processorLastRound() throws Exception {
// processor.ProcessorLastRound creates well-known resource during last round
// the point of this test is to assert this works during incremental build
// when compiler may be invoked several times to compile indirectly affected sources
File processor = compileAnnotationProcessor();
File basedir = resources.getBasedir("compile-proc/multiround-type-reference");
processAnnotations(basedir, Proc.procEX, processor, newProcessors("processor.ProcessorLastRound"));
mojos.assertBuildOutputs(new File(basedir, "target"), //
"classes/proc/Source.class", //
"classes/proc/AnotherSource.class", //
"classes/types.lst");
cp(basedir, "src/main/java/proc/Source.java-changed", "src/main/java/proc/Source.java");
processAnnotations(basedir, Proc.procEX, processor, newProcessors("processor.ProcessorLastRound"));
mojos.assertBuildOutputs(new File(basedir, "target"), //
"classes/proc/Source.class", //
"classes/proc/AnotherSource.class", //
"classes/types.lst");
}
@Test
public void testLastRound_typeIndex() throws Exception {
// apparently javac can't resolve "forward" references to types generated during apt last round
Assume.assumeTrue(CompilerJdt.ID.equals(compilerId));
Xpp3Dom processors = newProcessors("processor.ProcessorLastRound_typeIndex");
File basedir = procCompile("compile-proc/multiround-type-index", Proc.procEX, processors);
File target = new File(basedir, "target");
mojos.assertBuildOutputs(target, //
"generated-sources/annotations/generated/TypeIndex.java", //
"generated-sources/annotations/generated/TypeIndex2.java", //
"classes/generated/TypeIndex.class", //
"classes/generated/TypeIndex2.class", //
"classes/typeindex/Annotated.class", //
"classes/typeindex/Consumer.class" //
);
}
@Test
public void testReprocess() throws Exception {
File processor = compileAnnotationProcessor();
File basedir = resources.getBasedir("compile-proc/reprocess");
File target = new File(basedir, "target");
processAnnotations(basedir, Proc.proc, processor, newProcessors("processor.ProessorValue"));
mojos.assertBuildOutputs(target, //
"classes/reprocess/Annotated.class", //
"classes/reprocess/Annotated.value", //
"classes/reprocess/SimpleA.class", //
"classes/reprocess/SimpleB.class");
Assert.assertEquals("1", FileUtils.fileRead(new File(target, "classes/reprocess/Annotated.value")));
cp(basedir, "src/main/java/reprocess/SimpleA.java-changed", "src/main/java/reprocess/SimpleA.java");
touch(new File(basedir, "src/main/java/reprocess/Annotated.java"));
processAnnotations(basedir, Proc.proc, processor, newProcessors("processor.ProessorValue"));
mojos.assertBuildOutputs(target, //
"classes/reprocess/Annotated.class", //
"classes/reprocess/Annotated.value", //
"classes/reprocess/SimpleA.class", //
"classes/reprocess/SimpleB.class");
Assert.assertEquals("10", FileUtils.fileRead(new File(target, "classes/reprocess/Annotated.value")));
}
@Test
public void testProc_nonIncrementalProcessor() throws Exception {
Assume.assumeTrue(CompilerJdt.ID.equals(compilerId));
File processor = compileAnnotationProcessor();
File basedir = resources.getBasedir("compile-proc/proc");
File target = new File(basedir, "target");
processAnnotations(basedir, Proc.procEX, processor, newProcessors("processor.NonIncrementalProcessor"));
mojos.assertBuildOutputs(target, //
"classes/proc/Source.class", //
"generated-sources/annotations/proc/NonIncrementalSource.java", //
"classes/proc/NonIncrementalSource.class");
FileUtils.deleteDirectory(target);
processAnnotations(basedir, Proc.onlyEX, processor, newProcessors("processor.NonIncrementalProcessor"));
mojos.assertBuildOutputs(target, //
"generated-sources/annotations/proc/NonIncrementalSource.java");
FileUtils.deleteDirectory(target);
try {
processAnnotations(basedir, Proc.proc, processor, newProcessors("processor.NonIncrementalProcessor"));
Assert.fail();
} catch (MojoExecutionException expected) {
// TODO validate the error message
}
FileUtils.deleteDirectory(target);
try {
processAnnotations(basedir, Proc.only, processor, newProcessors("processor.NonIncrementalProcessor"));
Assert.fail();
} catch (MojoExecutionException expected) {
// TODO validate the error message
}
}
@Test
public void testSourcepathDependency() throws Exception {
File processor = compileAnnotationProcessor();
File basedir = resources.getBasedir("compile-proc/proc-sourcepath");
File dependencyBasedir = new File(basedir, "dependency");
File projectBasedir = new File(basedir, "project");
Xpp3Dom processors = newProcessors("processor.Processor");
Xpp3Dom sourcepath = newParameter("sourcepath", "reactorDependencies");
MavenProject dependency = mojos.readMavenProject(dependencyBasedir);
MavenProject project = mojos.readMavenProject(projectBasedir);
mojos.newDependency(new File(dependencyBasedir, "target/classes")) //
.setGroupId(dependency.getGroupId()) //
.setArtifactId(dependency.getArtifactId()) //
.setVersion(dependency.getVersion()) //
.addTo(project);
MavenSession session = mojos.newMavenSession(project);
session.setProjects(Arrays.asList(project, dependency));
processAnnotations(session, project, "compile", processor, Proc.only, processors, sourcepath);
mojos.assertBuildOutputs(new File(projectBasedir, "target"), //
"generated-sources/annotations/sourcepath/project/GeneratedSource.java" //
);
}
@Test
public void testSourcepathDependency_incremental() throws Exception {
// the point of this test is assert that changes to sourcepath files are expected to trigger reprocessing of affected sources
File processor = compileAnnotationProcessor();
File basedir = resources.getBasedir("compile-proc/proc-sourcepath");
File dependencyBasedir = new File(basedir, "dependency");
File projectBasedir = new File(basedir, "project");
Xpp3Dom processors = newProcessors("processor.Processor");
Xpp3Dom sourcepath = newParameter("sourcepath", "reactorDependencies");
MavenProject dependency = mojos.readMavenProject(dependencyBasedir);
MavenProject project = mojos.readMavenProject(projectBasedir);
mojos.newDependency(new File(dependencyBasedir, "target/classes")) //
.setGroupId(dependency.getGroupId()) //
.setArtifactId(dependency.getArtifactId()) //
.setVersion(dependency.getVersion()) //
.addTo(project);
MavenSession session = mojos.newMavenSession(project);
session.setProjects(Arrays.asList(project, dependency));
processAnnotations(session, project, "compile", processor, Proc.only, processors, sourcepath);
mojos.assertBuildOutputs(new File(projectBasedir, "target"), //
"generated-sources/annotations/sourcepath/project/GeneratedSource.java" //
);
// second, incremental, compilation with one of sourcepath files removed
rm(dependencyBasedir, "src/main/java/sourcepath/dependency/SourcepathDependency.java");
mojos.flushClasspathCaches();
dependency = mojos.readMavenProject(dependencyBasedir);
project = mojos.readMavenProject(projectBasedir);
mojos.newDependency(new File(dependencyBasedir, "target/classes")) //
.setGroupId(dependency.getGroupId()) //
.setArtifactId(dependency.getArtifactId()) //
.setVersion(dependency.getVersion()) //
.addTo(project);
session = mojos.newMavenSession(project);
session.setProjects(Arrays.asList(project, dependency));
try {
processAnnotations(session, project, "compile", processor, Proc.only, processors, sourcepath);
} catch (MojoExecutionException expected) {}
ErrorMessage message = new ErrorMessage(compilerId);
message.setSnippets(CompilerJdt.ID, "sourcepath.dependency.SourcepathDependency cannot be resolved to a type");
message.setSnippets(CompilerJavac.ID, "package sourcepath.dependency does not exist");
mojos.assertMessage(projectBasedir, "src/main/java/sourcepath/project/Source.java", message);
// oddly enough, both jdt and javac generate GeneratedSource despite the error
}
@Test
public void testSourcepathDependency_classifiedDependency() throws Exception {
File processor = compileAnnotationProcessor();
File basedir = resources.getBasedir("compile-proc/proc-sourcepath");
File dependencyBasedir = new File(basedir, "dependency");
File projectBasedir = new File(basedir, "project");
Xpp3Dom processors = newProcessors("processor.Processor");
Xpp3Dom sourcepath = newParameter("sourcepath", "reactorDependencies");
MavenProject dependency = mojos.readMavenProject(dependencyBasedir);
MavenProject project = mojos.readMavenProject(projectBasedir);
mojos.newDependency(new File(dependencyBasedir, "target/classes")) //
.setGroupId(dependency.getGroupId()) //
.setArtifactId(dependency.getArtifactId()) //
.setVersion(dependency.getVersion()) //
.setClassifier("classifier") //
.addTo(project);
MavenSession session = mojos.newMavenSession(project);
session.setProjects(Arrays.asList(project, dependency));
try {
processAnnotations(session, project, "compile", processor, Proc.only, processors, sourcepath);
Assert.fail();
} catch (MojoExecutionException expected) {
Assert.assertTrue(expected.getMessage().contains(dependency.getGroupId() + ":" + dependency.getArtifactId()));
}
}
@Test
public void testSourcepathIncludes() throws Exception {
Xpp3Dom includes = new Xpp3Dom("includes");
includes.addChild(newParameter("include", "sourcepath/project/*.java"));
Xpp3Dom processors = newProcessors("processor.Processor");
Xpp3Dom sourcepath = newParameter("sourcepath", "reactorDependencies");
File basedir = procCompile("compile-proc/proc-sourcepath-includes", Proc.only, includes, processors, sourcepath);
mojos.assertBuildOutputs(new File(basedir, "target"), //
"generated-sources/annotations/sourcepath/project/GeneratedSource.java" //
);
}
@Test
public void testSourcepathDependency_testCompile() throws Exception {
File processor = compileAnnotationProcessor();
File basedir = resources.getBasedir("compile-proc/proc-sourcepath");
File dependencyBasedir = new File(basedir, "dependency");
File projectBasedir = new File(basedir, "project");
Xpp3Dom processors = newProcessors("processor.Processor");
Xpp3Dom sourcepath = newParameter("sourcepath", "reactorDependencies");
MavenProject dependency = mojos.readMavenProject(dependencyBasedir);
MavenProject project = mojos.readMavenProject(projectBasedir);
mojos.newDependency(new File(dependencyBasedir, "target/classes")) //
.setGroupId(dependency.getGroupId()) //
.setArtifactId(dependency.getArtifactId()) //
.setVersion(dependency.getVersion()) //
.addTo(project);
mojos.newDependency(new File(dependencyBasedir, "target/test-classes")) //
.setGroupId(dependency.getGroupId()) //
.setArtifactId(dependency.getArtifactId()) //
.setType("test-jar") //
.setVersion(dependency.getVersion()) //
.addTo(project);
MavenSession session = mojos.newMavenSession(project);
session.setProjects(Arrays.asList(project, dependency));
processAnnotations(session, project, "testCompile", processor, Proc.only, processors, sourcepath);
mojos.assertBuildOutputs(new File(projectBasedir, "target"), //
"generated-test-sources/test-annotations/sourcepath/project/test/GeneratedSourceTest.java" //
);
}
@Test
public void testSourcepath_classpathVisibility() throws Exception {
Assume.assumeTrue(CompilerJdt.ID.equals(compilerId));
File basedir = resources.getBasedir();
Xpp3Dom sourcepath = newParameter("sourcepath", "reactorDependencies");
Xpp3Dom classpathVisibility = newParameter("privatePackageReference", "error");
try {
procCompile(basedir, Proc.only, sourcepath, classpathVisibility);
Assert.fail();
} catch (MojoExecutionException expected) {
Assert.assertTrue(expected.getMessage().contains("privatePackageReference"));
}
}
}
| takari-lifecycle-plugin/src/test/java/io/takari/maven/plugins/compile/AnnotationProcessingTest.java | package io.takari.maven.plugins.compile;
import static io.takari.maven.testing.TestResources.assertFileContents;
import static io.takari.maven.testing.TestResources.cp;
import static io.takari.maven.testing.TestResources.rm;
import static io.takari.maven.testing.TestResources.touch;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import org.apache.maven.execution.MavenSession;
import org.apache.maven.plugin.MojoExecution;
import org.apache.maven.plugin.MojoExecutionException;
import org.apache.maven.project.MavenProject;
import org.codehaus.plexus.util.FileUtils;
import org.codehaus.plexus.util.xml.Xpp3Dom;
import org.junit.Assert;
import org.junit.Assume;
import org.junit.Test;
import org.junit.runners.Parameterized.Parameters;
import io.takari.maven.plugins.compile.AbstractCompileMojo.Proc;
import io.takari.maven.plugins.compile.jdt.CompilerJdt;
public class AnnotationProcessingTest extends AbstractCompileTest {
public AnnotationProcessingTest(String compilerId) {
super(compilerId);
}
@Parameters(name = "{0}")
public static Iterable<Object[]> compilers() {
List<Object[]> compilers = new ArrayList<Object[]>();
compilers.add(new Object[] {"javac"});
compilers.add(new Object[] {"forked-javac"});
compilers.add(new Object[] {"jdt"});
return compilers;
}
private File procCompile(String projectName, Proc proc, Xpp3Dom... parameters) throws Exception, IOException {
File basedir = resources.getBasedir(projectName);
return procCompile(basedir, proc, parameters);
}
private File procCompile(File basedir, Proc proc, Xpp3Dom... parameters) throws Exception, IOException {
File processor = compileAnnotationProcessor();
return processAnnotations(basedir, proc, processor, parameters);
}
private File processAnnotations(File basedir, Proc proc, File processor, Xpp3Dom... parameters) throws Exception {
MavenProject project = mojos.readMavenProject(basedir);
processAnnotations(project, processor, proc, parameters);
return basedir;
}
protected void processAnnotations(MavenProject project, File processor, Proc proc, Xpp3Dom... parameters) throws Exception {
MavenSession session = mojos.newMavenSession(project);
processAnnotations(session, project, "compile", processor, proc, parameters);
}
protected void processAnnotations(MavenSession session, MavenProject project, String goal, File processor, Proc proc, Xpp3Dom... parameters) throws Exception {
MojoExecution execution = mojos.newMojoExecution(goal);
addDependency(project, "processor", new File(processor, "target/classes"));
Xpp3Dom configuration = execution.getConfiguration();
if (proc != null) {
configuration.addChild(newParameter("proc", proc.name()));
}
if (parameters != null) {
for (Xpp3Dom parameter : parameters) {
configuration.addChild(parameter);
}
}
mojos.executeMojo(session, project, execution);
}
private File compileAnnotationProcessor() throws Exception, IOException {
File processor = compile("compile-proc/processor");
cp(processor, "src/main/resources/META-INF/services/javax.annotation.processing.Processor", "target/classes/META-INF/services/javax.annotation.processing.Processor");
return processor;
}
@Test
public void testProc_only() throws Exception {
File basedir = procCompile("compile-proc/proc", Proc.only);
mojos.assertBuildOutputs(new File(basedir, "target/generated-sources/annotations"), "proc/GeneratedSource.java", "proc/AnotherGeneratedSource.java");
}
@Test
public void testProc_none() throws Exception {
File basedir = procCompile("compile-proc/proc", Proc.none);
mojos.assertBuildOutputs(new File(basedir, "target"), "classes/proc/Source.class");
}
@Test
public void testProc_proc() throws Exception {
File basedir = procCompile("compile-proc/proc", Proc.proc);
mojos.assertBuildOutputs(new File(basedir, "target"), //
"classes/proc/Source.class", //
"generated-sources/annotations/proc/GeneratedSource.java", //
"classes/proc/GeneratedSource.class", //
"generated-sources/annotations/proc/AnotherGeneratedSource.java", //
"classes/proc/AnotherGeneratedSource.class");
}
@Test
public void testProc_incrementalProcessorChange() throws Exception {
File processor = compileAnnotationProcessor();
File basedir = resources.getBasedir("compile-proc/proc");
processAnnotations(basedir, Proc.proc, processor);
mojos.assertBuildOutputs(new File(basedir, "target"), //
"classes/proc/Source.class", //
"generated-sources/annotations/proc/GeneratedSource.java", //
"classes/proc/GeneratedSource.class", //
"generated-sources/annotations/proc/AnotherGeneratedSource.java", //
"classes/proc/AnotherGeneratedSource.class");
rm(processor, "target/classes/META-INF/services/javax.annotation.processing.Processor");
mojos.flushClasspathCaches();
processAnnotations(basedir, Proc.proc, processor);
mojos.assertBuildOutputs(new File(basedir, "target"), "classes/proc/Source.class");
mojos.assertDeletedOutputs(new File(basedir, "target"), //
"generated-sources/annotations/proc/GeneratedSource.java", //
"classes/proc/GeneratedSource.class", //
"generated-sources/annotations/proc/AnotherGeneratedSource.java", //
"classes/proc/AnotherGeneratedSource.class");
}
@Test
public void testProc_dummyOutput() throws Exception {
File basedir = procCompile("compile-proc/proc", Proc.proc, newProcessors("processor.Processor_dummyOutput"));
mojos.assertBuildOutputs(new File(basedir, "target"), "classes/proc/Source.class");
}
@Test
public void testProcTypeReference() throws Exception {
File basedir = procCompile("compile-proc/proc-type-reference", Proc.proc);
mojos.assertBuildOutputs(new File(basedir, "target"), //
"classes/proc/Source.class", //
"classes/proc/GeneratedSourceSubclass.class", //
"generated-sources/annotations/proc/GeneratedSource.java", //
"classes/proc/GeneratedSource.class", //
"generated-sources/annotations/proc/AnotherGeneratedSource.java", //
"classes/proc/AnotherGeneratedSource.class");
}
@Test
public void testProc_createResource() throws Exception {
File basedir = procCompile("compile-proc/proc", Proc.proc, newProcessors("processor.ProcessorCreateResource"));
mojos.assertBuildOutputs(new File(basedir, "target"), //
"classes/proc/Source.class", //
"generated-sources/annotations/proc/GeneratedSource.java");
}
@Test
public void testProc_annotationProcessors() throws Exception {
Xpp3Dom processors = newProcessors("processor.Processor");
File basedir = procCompile("compile-proc/proc", Proc.proc, processors);
mojos.assertBuildOutputs(new File(basedir, "target"), //
"classes/proc/Source.class", //
"generated-sources/annotations/proc/GeneratedSource.java", //
"classes/proc/GeneratedSource.class");
}
@Test
public void testProc_processorErrorMessage() throws Exception {
Xpp3Dom processors = newProcessors("processor.ErrorMessageProcessor");
File basedir = resources.getBasedir("compile-proc/proc");
try {
procCompile(basedir, Proc.only, processors);
Assert.fail();
} catch (MojoExecutionException e) {
// expected
}
mojos.assertBuildOutputs(new File(basedir, "target"), new String[0]);
ErrorMessage expected = new ErrorMessage(compilerId);
expected.setSnippets("jdt", "ERROR Source.java [6:14] test error message"); // TODO why 14?
expected.setSnippets("javac", "ERROR Source.java [6:8] test error message");
mojos.assertMessage(basedir, "src/main/java/proc/Source.java", expected);
Collection<String> pomMessages = mojos.getBuildContextLog().getMessages(new File(basedir, "pom.xml"));
Assert.assertEquals(3, pomMessages.size());
// TODO assert actual messages are as expected
}
@Test
public void testProc_messages() throws Exception {
ErrorMessage expected = new ErrorMessage(compilerId);
expected.setSnippets("javac", "ERROR BrokenSource.java [2:29]", "cannot find symbol");
expected.setSnippets("jdt", "ERROR BrokenSource.java [2:29]", "cannot be resolved to a type");
File processor = compileAnnotationProcessor();
File basedir = resources.getBasedir("compile-proc/proc");
String[] outputs;
if (CompilerJdt.ID.equals(compilerId)) {
outputs = new String[] {"classes/proc/Source.class" //
, "generated-sources/annotations/proc/BrokenSource.java"};
} else {
// TODO investigate why javac does not generate classes/proc/Source.class
outputs = new String[] {"generated-sources/annotations/proc/BrokenSource.java"};
}
Xpp3Dom processors = newProcessors("processor.BrokenProcessor");
try {
processAnnotations(basedir, Proc.proc, processor, processors);
Assert.fail();
} catch (MojoExecutionException e) {
// expected
}
mojos.assertBuildOutputs(new File(basedir, "target"), outputs);
assertProcMessage(basedir, "target/generated-sources/annotations/proc/BrokenSource.java", expected);
// no change rebuild should produce the same messages
try {
processAnnotations(basedir, Proc.proc, processor, processors);
Assert.fail();
} catch (MojoExecutionException e) {
// expected
}
mojos.assertCarriedOverOutputs(new File(basedir, "target"), outputs);
assertProcMessage(basedir, "target/generated-sources/annotations/proc/BrokenSource.java", expected);
}
private void assertProcMessage(File basedir, String path, ErrorMessage expected) throws Exception {
// javac reports the same compilation error twice when Proc.proc
Set<String> messages = new HashSet<String>(mojos.getBuildContextLog().getMessages(new File(basedir, path)));
Assert.assertEquals(messages.toString(), 1, messages.size());
String message = messages.iterator().next();
Assert.assertTrue(expected.isMatch(message));
}
@Test
public void testProc_processorOptions() throws Exception {
Xpp3Dom processors = newProcessors("processor.ProcessorWithOptions");
Xpp3Dom options = new Xpp3Dom("annotationProcessorOptions");
options.addChild(newParameter("optionA", "valueA"));
options.addChild(newParameter("optionB", "valueB"));
procCompile("compile-proc/proc", Proc.proc, processors, options);
}
@Test
public void testProc_staleGeneratedSourcesCleanup() throws Exception {
File processor = compileAnnotationProcessor();
File basedir = resources.getBasedir("compile-proc/proc");
processAnnotations(basedir, Proc.proc, processor);
mojos.assertBuildOutputs(new File(basedir, "target"), //
"classes/proc/Source.class", //
"generated-sources/annotations/proc/GeneratedSource.java", //
"classes/proc/GeneratedSource.class", //
"generated-sources/annotations/proc/AnotherGeneratedSource.java", //
"classes/proc/AnotherGeneratedSource.class");
// remove annotation
cp(basedir, "src/main/java/proc/Source.java-remove-annotation", "src/main/java/proc/Source.java");
processAnnotations(basedir, Proc.proc, processor);
mojos.assertDeletedOutputs(new File(basedir, "target"), //
"generated-sources/annotations/proc/GeneratedSource.java", //
"classes/proc/GeneratedSource.class", //
"generated-sources/annotations/proc/AnotherGeneratedSource.java", //
"classes/proc/AnotherGeneratedSource.class");
}
@Test
public void testProc_incrementalDeleteLastAnnotatedSource() throws Exception {
File processor = compileAnnotationProcessor();
File basedir = resources.getBasedir("compile-proc/proc");
Xpp3Dom processors = newProcessors("processor.Processor");
// initial compilation
processAnnotations(basedir, Proc.proc, processor, processors);
mojos.assertBuildOutputs(new File(basedir, "target"), //
"classes/proc/Source.class", //
"generated-sources/annotations/proc/GeneratedSource.java", //
"classes/proc/GeneratedSource.class");
// no-change rebuild
processAnnotations(basedir, Proc.proc, processor, processors);
mojos.assertCarriedOverOutputs(new File(basedir, "target"), //
"classes/proc/Source.class", //
"generated-sources/annotations/proc/GeneratedSource.java", //
"classes/proc/GeneratedSource.class");
// remove annotated class
rm(basedir, "src/main/java/proc/Source.java");
processAnnotations(basedir, Proc.proc, processor, processors);
mojos.assertDeletedOutputs(new File(basedir, "target"), //
"generated-sources/annotations/proc/GeneratedSource.java", //
"classes/proc/Source.class", //
"classes/proc/GeneratedSource.class");
}
@Test
public void testProc_nonIncrementalProcessor_onlyEX_deleteSource() throws Exception {
File processor = compileAnnotationProcessor();
File basedir = resources.getBasedir("compile-proc/proc");
File target = new File(basedir, "target");
Xpp3Dom processors = newProcessors("processor.NonIncrementalProcessor");
processAnnotations(basedir, Proc.onlyEX, processor, processors);
mojos.assertBuildOutputs(target, //
"generated-sources/annotations/proc/NonIncrementalSource.java");
rm(basedir, "src/main/java/proc/Source.java");
processAnnotations(basedir, Proc.onlyEX, processor, processors);
mojos.assertDeletedOutputs(target, //
"generated-sources/annotations/proc/NonIncrementalSource.java");
}
@Test
public void testProc_projectSourceRoots() throws Exception {
File processor = compileAnnotationProcessor();
File basedir = resources.getBasedir("compile-proc/proc");
MavenProject project = mojos.readMavenProject(basedir);
addDependency(project, "processor", new File(processor, "target/classes"));
mojos.compile(project, newParameter("proc", Proc.proc.name()), newProcessors("processor.Processor"));
Assert.assertTrue(project.getCompileSourceRoots().contains(new File(basedir, "target/generated-sources/annotations").getAbsolutePath()));
// TODO testCompile
}
@Test
public void testIncrementalDelete() throws Exception {
File processor = compileAnnotationProcessor();
File basedir = resources.getBasedir("compile-proc/proc-incremental-delete");
Xpp3Dom processors = newProcessors("processor.Processor");
// initial compilation
processAnnotations(basedir, Proc.proc, processor, processors);
mojos.assertBuildOutputs(new File(basedir, "target"), //
"classes/proc/Keep.class", //
"classes/proc/Source.class", //
"generated-sources/annotations/proc/GeneratedSource.java", //
"classes/proc/GeneratedSource.class");
// no-change rebuild
processAnnotations(basedir, Proc.proc, processor, processors);
mojos.assertCarriedOverOutputs(new File(basedir, "target"), //
"classes/proc/Keep.class", //
"classes/proc/Source.class", //
"generated-sources/annotations/proc/GeneratedSource.java", //
"classes/proc/GeneratedSource.class");
// remove annotated source
rm(basedir, "src/main/java/proc/Source.java");
processAnnotations(basedir, Proc.proc, processor, processors);
mojos.assertDeletedOutputs(new File(basedir, "target"), //
"generated-sources/annotations/proc/GeneratedSource.java", //
"classes/proc/Source.class", //
"classes/proc/GeneratedSource.class");
}
@Test
public void testConvertGeneratedSourceToHandwritten() throws Exception {
// this test demonstrates the following scenario
// 1. annotation processor generates java source and the generated source is compiled by the compiler
// 2. annotation is removed from original source and the generated source is moved to a dependency
// assert original generatedSource.java and generatedSource.class are deleted
File processor = compileAnnotationProcessor();
File basedir = resources.getBasedir("compile-proc/proc-incremental-move");
File moduleA = new File(basedir, "module-a");
File moduleB = new File(basedir, "module-b");
Xpp3Dom processors = newProcessors("processor.Processor");
mojos.compile(moduleB);
MavenProject projectA = mojos.readMavenProject(moduleA);
addDependency(projectA, "module-b", new File(moduleB, "target/classes"));
processAnnotations(projectA, processor, Proc.proc, processors);
mojos.assertBuildOutputs(new File(moduleA, "target"), //
"classes/proc/Source.class", //
"generated-sources/annotations/proc/GeneratedSource.java", //
"classes/proc/GeneratedSource.class");
mojos.flushClasspathCaches();
// move generated source to module-b/src/main/java
cp(moduleB, "src/main/java/proc/GeneratedSource.java-moved", "src/main/java/proc/GeneratedSource.java");
cp(moduleA, "src/main/java/modulea/ModuleA.java-new", "src/main/java/modulea/ModuleA.java");
cp(moduleA, "src/main/java/proc/Source.java-remove-annotation", "src/main/java/proc/Source.java");
mojos.compile(moduleB);
mojos.assertBuildOutputs(moduleB, "target/classes/proc/GeneratedSource.class");
projectA = mojos.readMavenProject(moduleA);
addDependency(projectA, "module-b", new File(moduleB, "target/classes"));
processAnnotations(projectA, processor, Proc.proc, processors);
mojos.assertBuildOutputs(new File(moduleA, "target"), //
"classes/proc/Source.class", "classes/modulea/ModuleA.class");
mojos.assertDeletedOutputs(new File(moduleA, "target"), //
"generated-sources/annotations/proc/GeneratedSource.java", //
"classes/proc/GeneratedSource.class");
}
private Xpp3Dom newProcessors(String... processors) {
Xpp3Dom annotationProcessors = new Xpp3Dom("annotationProcessors");
for (String processor : processors) {
annotationProcessors.addChild(newParameter("processor", processor));
}
return annotationProcessors;
}
@Test
public void testRequireProc() throws Exception {
File processor = compileAnnotationProcessor();
File basedir = resources.getBasedir("compile-proc/require-proc");
try {
processAnnotations(basedir, null, processor);
Assert.fail();
} catch (IllegalArgumentException expected) {
// TODO assert message
}
processAnnotations(basedir, null, null);
}
@Test
public void testRecompile() throws Exception {
/**
* <pre>
* Source.java -> Source.class
* \-> GeneratedSource.java -> GeneratedSource.class
* ^
* Client.java -> Client.class
* </pre>
*/
File processor = compileAnnotationProcessor();
File basedir = resources.getBasedir("compile-proc/proc-incremental-recompile");
Xpp3Dom processors = newProcessors("processor.ProcessorSiblingBody");
Xpp3Dom options = new Xpp3Dom("annotationProcessorOptions");
options.addChild(newParameter("basedir", new File(basedir, "src/main/java").getCanonicalPath()));
processAnnotations(basedir, Proc.proc, processor, processors, options);
mojos.assertBuildOutputs(new File(basedir, "target"), //
"classes/proc/Source.class", //
"classes/proc/Client.class", //
"generated-sources/annotations/proc/GeneratedSource.java", //
"classes/proc/GeneratedSource.class");
cp(basedir, "src/main/java/proc/GeneratedSource.body-changed", "src/main/java/proc/GeneratedSource.body");
touch(basedir, "src/main/java/proc/Source.java");
processAnnotations(basedir, Proc.proc, processor, processors, options);
mojos.assertBuildOutputs(new File(basedir, "target"), //
"classes/proc/Source.class", //
"classes/proc/Client.class", //
"generated-sources/annotations/proc/GeneratedSource.java", //
"classes/proc/GeneratedSource.class");
}
@Test
public void testProc_processorLastRound() throws Exception {
Xpp3Dom processors = newProcessors("processor.ProcessorLastRound");
File basedir = procCompile("compile-proc/proc", Proc.onlyEX, processors);
mojos.assertBuildOutputs(new File(basedir, "target"), //
"classes/types.lst");
assertFileContents("proc.Source\n", basedir, "target/classes/types.lst");
}
@Test
public void testIncremental_proc_only() throws Exception {
// the point of this test is to assert that changes to annotations trigger affected sources reprocessing when proc=only
// note sourcepath=disable, otherwise proc:only is all-or-nothing
File processor = compileAnnotationProcessor();
File basedir = resources.getBasedir("compile-proc/proc-incremental-proconly");
File generatedSources = new File(basedir, "target/generated-sources/annotations");
Xpp3Dom processors = newProcessors("processor.Processor");
compile(basedir, processor, "compile-only");
processAnnotations(basedir, Proc.only, processor, processors, newParameter("sourcepath", "disable"));
mojos.assertBuildOutputs(generatedSources, "proc/GeneratedConcrete.java", "proc/GeneratedAbstract.java", "proc/GeneratedAnother.java");
cp(basedir, "src/main/java/proc/Abstract.java-remove-annotation", "src/main/java/proc/Abstract.java");
compile(basedir, processor, "compile-only");
processAnnotations(basedir, Proc.only, processor, processors, newParameter("sourcepath", "disable"));
mojos.assertDeletedOutputs(generatedSources, "proc/GeneratedConcrete.java", "proc/GeneratedAbstract.java");
if (CompilerJdt.ID.equals(compilerId)) {
mojos.assertCarriedOverOutputs(generatedSources, "proc/GeneratedAnother.java");
} else {
mojos.assertBuildOutputs(generatedSources, "proc/GeneratedAnother.java");
}
}
private void compile(File basedir, File processor, String executionId) throws Exception {
MavenProject project = mojos.readMavenProject(basedir);
MavenSession session = mojos.newMavenSession(project);
MojoExecution execution = mojos.newMojoExecution();
MojoExecution cloned = new MojoExecution(execution.getMojoDescriptor(), executionId, null);
cloned.setConfiguration(execution.getConfiguration());
execution.getConfiguration().addChild(newParameter("proc", Proc.none.name()));
addDependency(project, "processor", new File(processor, "target/classes"));
mojos.executeMojo(session, project, cloned);
}
@Test
public void testMutliround_procOnly() throws Exception {
File basedir = procCompile("compile-proc/multiround", Proc.only);
File generatedSources = new File(basedir, "target/generated-sources/annotations");
mojos.assertMessages(basedir, "src/main/java/multiround/Source.java", new String[] {});
mojos.assertBuildOutputs(generatedSources, "multiround/GeneratedSource.java", "multiround/AnotherGeneratedSource.java");
}
@Test
public void testMultiround_processorLastRound() throws Exception {
// processor.ProcessorLastRound creates well-known resource during last round
// the point of this test is to assert this works during incremental build
// when compiler may be invoked several times to compile indirectly affected sources
File processor = compileAnnotationProcessor();
File basedir = resources.getBasedir("compile-proc/multiround-type-reference");
processAnnotations(basedir, Proc.procEX, processor, newProcessors("processor.ProcessorLastRound"));
mojos.assertBuildOutputs(new File(basedir, "target"), //
"classes/proc/Source.class", //
"classes/proc/AnotherSource.class", //
"classes/types.lst");
cp(basedir, "src/main/java/proc/Source.java-changed", "src/main/java/proc/Source.java");
processAnnotations(basedir, Proc.procEX, processor, newProcessors("processor.ProcessorLastRound"));
mojos.assertBuildOutputs(new File(basedir, "target"), //
"classes/proc/Source.class", //
"classes/proc/AnotherSource.class", //
"classes/types.lst");
}
@Test
public void testLastRound_typeIndex() throws Exception {
// apparently javac can't resolve "forward" references to types generated during apt last round
Assume.assumeTrue(CompilerJdt.ID.equals(compilerId));
Xpp3Dom processors = newProcessors("processor.ProcessorLastRound_typeIndex");
File basedir = procCompile("compile-proc/multiround-type-index", Proc.procEX, processors);
File target = new File(basedir, "target");
mojos.assertBuildOutputs(target, //
"generated-sources/annotations/generated/TypeIndex.java", //
"generated-sources/annotations/generated/TypeIndex2.java", //
"classes/generated/TypeIndex.class", //
"classes/generated/TypeIndex2.class", //
"classes/typeindex/Annotated.class", //
"classes/typeindex/Consumer.class" //
);
}
@Test
public void testReprocess() throws Exception {
File processor = compileAnnotationProcessor();
File basedir = resources.getBasedir("compile-proc/reprocess");
File target = new File(basedir, "target");
processAnnotations(basedir, Proc.proc, processor, newProcessors("processor.ProessorValue"));
mojos.assertBuildOutputs(target, //
"classes/reprocess/Annotated.class", //
"classes/reprocess/Annotated.value", //
"classes/reprocess/SimpleA.class", //
"classes/reprocess/SimpleB.class");
Assert.assertEquals("1", FileUtils.fileRead(new File(target, "classes/reprocess/Annotated.value")));
cp(basedir, "src/main/java/reprocess/SimpleA.java-changed", "src/main/java/reprocess/SimpleA.java");
touch(new File(basedir, "src/main/java/reprocess/Annotated.java"));
processAnnotations(basedir, Proc.proc, processor, newProcessors("processor.ProessorValue"));
mojos.assertBuildOutputs(target, //
"classes/reprocess/Annotated.class", //
"classes/reprocess/Annotated.value", //
"classes/reprocess/SimpleA.class", //
"classes/reprocess/SimpleB.class");
Assert.assertEquals("10", FileUtils.fileRead(new File(target, "classes/reprocess/Annotated.value")));
}
@Test
public void testProc_nonIncrementalProcessor() throws Exception {
Assume.assumeTrue(CompilerJdt.ID.equals(compilerId));
File processor = compileAnnotationProcessor();
File basedir = resources.getBasedir("compile-proc/proc");
File target = new File(basedir, "target");
processAnnotations(basedir, Proc.procEX, processor, newProcessors("processor.NonIncrementalProcessor"));
mojos.assertBuildOutputs(target, //
"classes/proc/Source.class", //
"generated-sources/annotations/proc/NonIncrementalSource.java", //
"classes/proc/NonIncrementalSource.class");
FileUtils.deleteDirectory(target);
processAnnotations(basedir, Proc.onlyEX, processor, newProcessors("processor.NonIncrementalProcessor"));
mojos.assertBuildOutputs(target, //
"generated-sources/annotations/proc/NonIncrementalSource.java");
FileUtils.deleteDirectory(target);
try {
processAnnotations(basedir, Proc.proc, processor, newProcessors("processor.NonIncrementalProcessor"));
Assert.fail();
} catch (MojoExecutionException expected) {
// TODO validate the error message
}
FileUtils.deleteDirectory(target);
try {
processAnnotations(basedir, Proc.only, processor, newProcessors("processor.NonIncrementalProcessor"));
Assert.fail();
} catch (MojoExecutionException expected) {
// TODO validate the error message
}
}
@Test
public void testSourcepathDependency() throws Exception {
File processor = compileAnnotationProcessor();
File basedir = resources.getBasedir("compile-proc/proc-sourcepath");
File dependencyBasedir = new File(basedir, "dependency");
File projectBasedir = new File(basedir, "project");
Xpp3Dom processors = newProcessors("processor.Processor");
Xpp3Dom sourcepath = newParameter("sourcepath", "reactorDependencies");
MavenProject dependency = mojos.readMavenProject(dependencyBasedir);
MavenProject project = mojos.readMavenProject(projectBasedir);
mojos.newDependency(new File(dependencyBasedir, "target/classes")) //
.setGroupId(dependency.getGroupId()) //
.setArtifactId(dependency.getArtifactId()) //
.setVersion(dependency.getVersion()) //
.addTo(project);
MavenSession session = mojos.newMavenSession(project);
session.setProjects(Arrays.asList(project, dependency));
processAnnotations(session, project, "compile", processor, Proc.only, processors, sourcepath);
mojos.assertBuildOutputs(new File(projectBasedir, "target"), //
"generated-sources/annotations/sourcepath/project/GeneratedSource.java" //
);
}
@Test
public void testSourcepathDependency_classifiedDependency() throws Exception {
File processor = compileAnnotationProcessor();
File basedir = resources.getBasedir("compile-proc/proc-sourcepath");
File dependencyBasedir = new File(basedir, "dependency");
File projectBasedir = new File(basedir, "project");
Xpp3Dom processors = newProcessors("processor.Processor");
Xpp3Dom sourcepath = newParameter("sourcepath", "reactorDependencies");
MavenProject dependency = mojos.readMavenProject(dependencyBasedir);
MavenProject project = mojos.readMavenProject(projectBasedir);
mojos.newDependency(new File(dependencyBasedir, "target/classes")) //
.setGroupId(dependency.getGroupId()) //
.setArtifactId(dependency.getArtifactId()) //
.setVersion(dependency.getVersion()) //
.setClassifier("classifier") //
.addTo(project);
MavenSession session = mojos.newMavenSession(project);
session.setProjects(Arrays.asList(project, dependency));
try {
processAnnotations(session, project, "compile", processor, Proc.only, processors, sourcepath);
Assert.fail();
} catch (MojoExecutionException expected) {
Assert.assertTrue(expected.getMessage().contains(dependency.getGroupId() + ":" + dependency.getArtifactId()));
}
}
@Test
public void testSourcepathIncludes() throws Exception {
Xpp3Dom includes = new Xpp3Dom("includes");
includes.addChild(newParameter("include", "sourcepath/project/*.java"));
Xpp3Dom processors = newProcessors("processor.Processor");
Xpp3Dom sourcepath = newParameter("sourcepath", "reactorDependencies");
File basedir = procCompile("compile-proc/proc-sourcepath-includes", Proc.only, includes, processors, sourcepath);
mojos.assertBuildOutputs(new File(basedir, "target"), //
"generated-sources/annotations/sourcepath/project/GeneratedSource.java" //
);
}
@Test
public void testSourcepathDependency_testCompile() throws Exception {
File processor = compileAnnotationProcessor();
File basedir = resources.getBasedir("compile-proc/proc-sourcepath");
File dependencyBasedir = new File(basedir, "dependency");
File projectBasedir = new File(basedir, "project");
Xpp3Dom processors = newProcessors("processor.Processor");
Xpp3Dom sourcepath = newParameter("sourcepath", "reactorDependencies");
MavenProject dependency = mojos.readMavenProject(dependencyBasedir);
MavenProject project = mojos.readMavenProject(projectBasedir);
mojos.newDependency(new File(dependencyBasedir, "target/classes")) //
.setGroupId(dependency.getGroupId()) //
.setArtifactId(dependency.getArtifactId()) //
.setVersion(dependency.getVersion()) //
.addTo(project);
mojos.newDependency(new File(dependencyBasedir, "target/test-classes")) //
.setGroupId(dependency.getGroupId()) //
.setArtifactId(dependency.getArtifactId()) //
.setType("test-jar") //
.setVersion(dependency.getVersion()) //
.addTo(project);
MavenSession session = mojos.newMavenSession(project);
session.setProjects(Arrays.asList(project, dependency));
processAnnotations(session, project, "testCompile", processor, Proc.only, processors, sourcepath);
mojos.assertBuildOutputs(new File(projectBasedir, "target"), //
"generated-test-sources/test-annotations/sourcepath/project/test/GeneratedSourceTest.java" //
);
}
@Test
public void testSourcepath_classpathVisibility() throws Exception {
Assume.assumeTrue(CompilerJdt.ID.equals(compilerId));
File basedir = resources.getBasedir();
Xpp3Dom sourcepath = newParameter("sourcepath", "reactorDependencies");
Xpp3Dom classpathVisibility = newParameter("privatePackageReference", "error");
try {
procCompile(basedir, Proc.only, sourcepath, classpathVisibility);
Assert.fail();
} catch (MojoExecutionException expected) {
Assert.assertTrue(expected.getMessage().contains("privatePackageReference"));
}
}
}
| added compile sourcepath incremental test
Signed-off-by: Igor Fedorenko <[email protected]>
| takari-lifecycle-plugin/src/test/java/io/takari/maven/plugins/compile/AnnotationProcessingTest.java | added compile sourcepath incremental test | <ide><path>akari-lifecycle-plugin/src/test/java/io/takari/maven/plugins/compile/AnnotationProcessingTest.java
<ide> import org.junit.runners.Parameterized.Parameters;
<ide>
<ide> import io.takari.maven.plugins.compile.AbstractCompileMojo.Proc;
<add>import io.takari.maven.plugins.compile.javac.CompilerJavac;
<ide> import io.takari.maven.plugins.compile.jdt.CompilerJdt;
<ide>
<ide> public class AnnotationProcessingTest extends AbstractCompileTest {
<ide> }
<ide>
<ide> @Test
<add> public void testSourcepathDependency_incremental() throws Exception {
<add> // the point of this test is assert that changes to sourcepath files are expected to trigger reprocessing of affected sources
<add>
<add> File processor = compileAnnotationProcessor();
<add> File basedir = resources.getBasedir("compile-proc/proc-sourcepath");
<add>
<add> File dependencyBasedir = new File(basedir, "dependency");
<add> File projectBasedir = new File(basedir, "project");
<add>
<add> Xpp3Dom processors = newProcessors("processor.Processor");
<add> Xpp3Dom sourcepath = newParameter("sourcepath", "reactorDependencies");
<add>
<add> MavenProject dependency = mojos.readMavenProject(dependencyBasedir);
<add> MavenProject project = mojos.readMavenProject(projectBasedir);
<add>
<add> mojos.newDependency(new File(dependencyBasedir, "target/classes")) //
<add> .setGroupId(dependency.getGroupId()) //
<add> .setArtifactId(dependency.getArtifactId()) //
<add> .setVersion(dependency.getVersion()) //
<add> .addTo(project);
<add>
<add> MavenSession session = mojos.newMavenSession(project);
<add> session.setProjects(Arrays.asList(project, dependency));
<add>
<add> processAnnotations(session, project, "compile", processor, Proc.only, processors, sourcepath);
<add> mojos.assertBuildOutputs(new File(projectBasedir, "target"), //
<add> "generated-sources/annotations/sourcepath/project/GeneratedSource.java" //
<add> );
<add>
<add> // second, incremental, compilation with one of sourcepath files removed
<add>
<add> rm(dependencyBasedir, "src/main/java/sourcepath/dependency/SourcepathDependency.java");
<add> mojos.flushClasspathCaches();
<add>
<add> dependency = mojos.readMavenProject(dependencyBasedir);
<add> project = mojos.readMavenProject(projectBasedir);
<add>
<add> mojos.newDependency(new File(dependencyBasedir, "target/classes")) //
<add> .setGroupId(dependency.getGroupId()) //
<add> .setArtifactId(dependency.getArtifactId()) //
<add> .setVersion(dependency.getVersion()) //
<add> .addTo(project);
<add>
<add> session = mojos.newMavenSession(project);
<add> session.setProjects(Arrays.asList(project, dependency));
<add> try {
<add> processAnnotations(session, project, "compile", processor, Proc.only, processors, sourcepath);
<add> } catch (MojoExecutionException expected) {}
<add> ErrorMessage message = new ErrorMessage(compilerId);
<add> message.setSnippets(CompilerJdt.ID, "sourcepath.dependency.SourcepathDependency cannot be resolved to a type");
<add> message.setSnippets(CompilerJavac.ID, "package sourcepath.dependency does not exist");
<add> mojos.assertMessage(projectBasedir, "src/main/java/sourcepath/project/Source.java", message);
<add> // oddly enough, both jdt and javac generate GeneratedSource despite the error
<add> }
<add>
<add> @Test
<ide> public void testSourcepathDependency_classifiedDependency() throws Exception {
<ide> File processor = compileAnnotationProcessor();
<ide> File basedir = resources.getBasedir("compile-proc/proc-sourcepath"); |
|
Java | apache-2.0 | d4218476af1c940837b88928dbc2047123e501c8 | 0 | gemmellr/qpid-jms,tabish121/qpid-jms,apache/qpid-jms,andrew-buckley/qpid-jms,andrew-buckley/qpid-jms,gemmellr/qpid-jms,tabish121/qpid-jms,apache/qpid-jms,avranju/qpid-jms,avranju/qpid-jms | /**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.qpid.jms;
import java.io.IOException;
import java.net.URI;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.CopyOnWriteArraySet;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.ThreadFactory;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicLong;
import javax.jms.Connection;
import javax.jms.ConnectionConsumer;
import javax.jms.ConnectionMetaData;
import javax.jms.Destination;
import javax.jms.ExceptionListener;
import javax.jms.IllegalStateException;
import javax.jms.JMSException;
import javax.jms.Queue;
import javax.jms.QueueConnection;
import javax.jms.QueueSession;
import javax.jms.ServerSessionPool;
import javax.jms.Session;
import javax.jms.TemporaryQueue;
import javax.jms.TemporaryTopic;
import javax.jms.Topic;
import javax.jms.TopicConnection;
import javax.jms.TopicSession;
import javax.net.ssl.SSLContext;
import org.apache.qpid.jms.exceptions.JmsConnectionFailedException;
import org.apache.qpid.jms.exceptions.JmsExceptionSupport;
import org.apache.qpid.jms.message.JmsInboundMessageDispatch;
import org.apache.qpid.jms.message.JmsMessage;
import org.apache.qpid.jms.message.JmsMessageFactory;
import org.apache.qpid.jms.message.JmsOutboundMessageDispatch;
import org.apache.qpid.jms.meta.JmsConnectionId;
import org.apache.qpid.jms.meta.JmsConnectionInfo;
import org.apache.qpid.jms.meta.JmsConsumerId;
import org.apache.qpid.jms.meta.JmsResource;
import org.apache.qpid.jms.meta.JmsSessionId;
import org.apache.qpid.jms.meta.JmsTransactionId;
import org.apache.qpid.jms.provider.Provider;
import org.apache.qpid.jms.provider.ProviderClosedException;
import org.apache.qpid.jms.provider.ProviderConstants.ACK_TYPE;
import org.apache.qpid.jms.provider.ProviderFuture;
import org.apache.qpid.jms.provider.ProviderListener;
import org.apache.qpid.jms.util.IdGenerator;
import org.apache.qpid.jms.util.ThreadPoolUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Implementation of a JMS Connection
*/
public class JmsConnection implements Connection, TopicConnection, QueueConnection, ProviderListener {
private static final Logger LOG = LoggerFactory.getLogger(JmsConnection.class);
private JmsConnectionInfo connectionInfo;
private final IdGenerator clientIdGenerator;
private boolean clientIdSet;
private boolean sendAcksAsync;
private ExceptionListener exceptionListener;
private final List<JmsSession> sessions = new CopyOnWriteArrayList<JmsSession>();
private final Map<JmsConsumerId, JmsMessageDispatcher> dispatchers =
new ConcurrentHashMap<JmsConsumerId, JmsMessageDispatcher>();
private final AtomicBoolean connected = new AtomicBoolean();
private final AtomicBoolean closed = new AtomicBoolean();
private final AtomicBoolean closing = new AtomicBoolean();
private final AtomicBoolean started = new AtomicBoolean();
private final AtomicBoolean failed = new AtomicBoolean();
private final Object connectLock = new Object();
private IOException firstFailureError;
private JmsPrefetchPolicy prefetchPolicy = new JmsPrefetchPolicy();
private boolean messagePrioritySupported;
private final ThreadPoolExecutor executor;
private URI brokerURI;
private URI localURI;
private SSLContext sslContext;
private Provider provider;
private final Set<JmsConnectionListener> connectionListeners =
new CopyOnWriteArraySet<JmsConnectionListener>();
private final Map<JmsDestination, JmsDestination> tempDestinations =
new ConcurrentHashMap<JmsDestination, JmsDestination>();
private final AtomicLong sessionIdGenerator = new AtomicLong();
private final AtomicLong tempDestIdGenerator = new AtomicLong();
private final AtomicLong transactionIdGenerator = new AtomicLong();
private JmsMessageFactory messageFactory;
protected JmsConnection(String connectionId, Provider provider, IdGenerator clientIdGenerator) throws JMSException {
// This executor can be used for dispatching asynchronous tasks that might block or result
// in reentrant calls to this Connection that could block. The thread in this executor
// will also serve as a means of preventing JVM shutdown should a client application
// not have it's own mechanism for doing so.
executor = new ThreadPoolExecutor(1, 1, 5, TimeUnit.SECONDS, new LinkedBlockingQueue<Runnable>(), new ThreadFactory() {
@Override
public Thread newThread(Runnable r) {
Thread thread = new Thread(r, "QpidJMS Connection Executor: ");
return thread;
}
});
this.provider = provider;
this.provider.setProviderListener(this);
try {
this.provider.start();
} catch (Exception e) {
throw JmsExceptionSupport.create(e);
}
this.clientIdGenerator = clientIdGenerator;
this.connectionInfo = new JmsConnectionInfo(new JmsConnectionId(connectionId));
}
/**
* @throws JMSException
* @see javax.jms.Connection#close()
*/
@Override
public void close() throws JMSException {
boolean interrupted = Thread.interrupted();
try {
if (!closed.get() && !failed.get()) {
// do not fail if already closed as specified by the JMS specification.
doStop(false);
}
synchronized (this) {
if (closed.get()) {
return;
}
closing.set(true);
for (JmsSession session : this.sessions) {
session.shutdown();
}
this.sessions.clear();
this.tempDestinations.clear();
if (isConnected() && !failed.get()) {
ProviderFuture request = new ProviderFuture();
try {
provider.destroy(connectionInfo, request);
try {
request.sync();
} catch (Exception ex) {
// TODO - Spec is a bit vague here, we don't fail if already closed but
// in this case we really aren't closed yet so there could be an
// argument that at this point an exception is still valid.
if (ex.getCause() instanceof InterruptedException) {
throw (InterruptedException) ex.getCause();
}
LOG.debug("Failed destroying Connection resource: {}", ex.getMessage());
}
} catch(ProviderClosedException pce) {
LOG.debug("Ignoring provider closed exception during connection close");
}
}
connected.set(false);
started.set(false);
closing.set(false);
closed.set(true);
}
} catch (Exception e) {
throw JmsExceptionSupport.create(e);
} finally {
try {
ThreadPoolUtils.shutdown(executor);
} catch (Throwable e) {
LOG.warn("Error shutting down thread pool: " + executor + ". This exception will be ignored.", e);
}
if (provider != null) {
provider.close();
provider = null;
}
if (interrupted) {
Thread.currentThread().interrupt();
}
}
}
/**
* Called to free all Connection resources.
*/
protected void shutdown() throws JMSException {
// TODO - Once ConnectionConsumer is added we must shutdown those as well.
for (JmsSession session : this.sessions) {
session.shutdown();
}
if (isConnected() && !failed.get() && !closing.get()) {
destroyResource(connectionInfo);
}
synchronized (this) {
if (clientIdSet) {
connectionInfo.setClientId(null);
clientIdSet = false;
}
}
tempDestinations.clear();
started.set(false);
connected.set(false);
}
/**
* @param destination
* @param messageSelector
* @param sessionPool
* @param maxMessages
* @return ConnectionConsumer
* @throws JMSException
* @see javax.jms.Connection#createConnectionConsumer(javax.jms.Destination,
* java.lang.String, javax.jms.ServerSessionPool, int)
*/
@Override
public ConnectionConsumer createConnectionConsumer(Destination destination, String messageSelector,
ServerSessionPool sessionPool, int maxMessages) throws JMSException {
checkClosedOrFailed();
connect();
throw new JMSException("Not supported");
}
/**
* @param topic
* @param subscriptionName
* @param messageSelector
* @param sessionPool
* @param maxMessages
* @return ConnectionConsumer
* @throws JMSException
*
* @see javax.jms.Connection#createDurableConnectionConsumer(javax.jms.Topic,
* java.lang.String, java.lang.String, javax.jms.ServerSessionPool, int)
*/
@Override
public ConnectionConsumer createDurableConnectionConsumer(Topic topic, String subscriptionName,
String messageSelector, ServerSessionPool sessionPool, int maxMessages) throws JMSException {
checkClosedOrFailed();
connect();
throw new JMSException("Not supported");
}
/**
* @param transacted
* @param acknowledgeMode
* @return Session
* @throws JMSException
* @see javax.jms.Connection#createSession(boolean, int)
*/
@Override
public Session createSession(boolean transacted, int acknowledgeMode) throws JMSException {
checkClosedOrFailed();
connect();
int ackMode = getSessionAcknowledgeMode(transacted, acknowledgeMode);
JmsSession result = new JmsSession(this, getNextSessionId(), ackMode);
addSession(result);
if (started.get()) {
result.start();
}
return result;
}
/**
* @return clientId
* @see javax.jms.Connection#getClientID()
*/
@Override
public synchronized String getClientID() throws JMSException {
checkClosedOrFailed();
return this.connectionInfo.getClientId();
}
/**
* @return connectionInfoData
* @see javax.jms.Connection#getMetaData()
*/
@Override
public ConnectionMetaData getMetaData() throws JMSException {
checkClosedOrFailed();
return JmsConnectionMetaData.INSTANCE;
}
/**
* @param clientID
* @throws JMSException
* @see javax.jms.Connection#setClientID(java.lang.String)
*/
@Override
public synchronized void setClientID(String clientID) throws JMSException {
checkClosedOrFailed();
if (this.clientIdSet) {
throw new IllegalStateException("The clientID has already been set");
}
if (clientID == null) {
throw new IllegalStateException("Cannot have a null clientID");
}
if (connected.get()) {
throw new IllegalStateException("Cannot set the client id once connected.");
}
this.connectionInfo.setClientId(clientID);
this.clientIdSet = true;
//We weren't connected if we got this far, we should now connect now to ensure the clientID is valid.
//TODO: determine if any resulting failure is only the result of the ClientID value, or other reasons such as auth.
connect();
}
/**
* @throws JMSException
* @see javax.jms.Connection#start()
*/
@Override
public void start() throws JMSException {
checkClosedOrFailed();
connect();
if (this.started.compareAndSet(false, true)) {
try {
for (JmsSession s : this.sessions) {
s.start();
}
} catch (Exception e) {
throw JmsExceptionSupport.create(e);
}
}
}
/**
* @throws JMSException
* @see javax.jms.Connection#stop()
*/
@Override
public void stop() throws JMSException {
doStop(true);
}
/**
* @see #stop()
* @param checkClosed <tt>true</tt> to check for already closed and throw
* {@link java.lang.IllegalStateException} if already closed,
* <tt>false</tt> to skip this check
* @throws JMSException if the JMS provider fails to stop message delivery due to some internal error.
*/
void doStop(boolean checkClosed) throws JMSException {
if (checkClosed) {
checkClosedOrFailed();
}
if (started.compareAndSet(true, false)) {
synchronized(sessions) {
for (JmsSession s : this.sessions) {
s.stop();
}
}
}
}
/**
* @param topic
* @param messageSelector
* @param sessionPool
* @param maxMessages
* @return ConnectionConsumer
* @throws JMSException
* @see javax.jms.TopicConnection#createConnectionConsumer(javax.jms.Topic,
* java.lang.String, javax.jms.ServerSessionPool, int)
*/
@Override
public ConnectionConsumer createConnectionConsumer(Topic topic, String messageSelector,
ServerSessionPool sessionPool, int maxMessages) throws JMSException {
checkClosedOrFailed();
connect();
return null;
}
/**
* @param transacted
* @param acknowledgeMode
* @return TopicSession
* @throws JMSException
* @see javax.jms.TopicConnection#createTopicSession(boolean, int)
*/
@Override
public TopicSession createTopicSession(boolean transacted, int acknowledgeMode) throws JMSException {
checkClosedOrFailed();
connect();
int ackMode = getSessionAcknowledgeMode(transacted, acknowledgeMode);
JmsTopicSession result = new JmsTopicSession(this, getNextSessionId(), ackMode);
addSession(result);
if (started.get()) {
result.start();
}
return result;
}
/**
* @param queue
* @param messageSelector
* @param sessionPool
* @param maxMessages
* @return ConnectionConsumer
* @throws JMSException
* @see javax.jms.QueueConnection#createConnectionConsumer(javax.jms.Queue,
* java.lang.String, javax.jms.ServerSessionPool, int)
*/
@Override
public ConnectionConsumer createConnectionConsumer(Queue queue, String messageSelector,
ServerSessionPool sessionPool, int maxMessages) throws JMSException {
checkClosedOrFailed();
connect();
return null;
}
/**
* @param transacted
* @param acknowledgeMode
* @return QueueSession
* @throws JMSException
* @see javax.jms.QueueConnection#createQueueSession(boolean, int)
*/
@Override
public QueueSession createQueueSession(boolean transacted, int acknowledgeMode) throws JMSException {
checkClosedOrFailed();
connect();
int ackMode = getSessionAcknowledgeMode(transacted, acknowledgeMode);
JmsQueueSession result = new JmsQueueSession(this, getNextSessionId(), ackMode);
addSession(result);
if (started.get()) {
result.start();
}
return result;
}
/**
* @param ex
*/
public void onException(Exception ex) {
onException(JmsExceptionSupport.create(ex));
}
/**
* @param ex
*/
public void onException(JMSException ex) {
ExceptionListener l = this.exceptionListener;
if (l != null) {
l.onException(JmsExceptionSupport.create(ex));
}
}
protected int getSessionAcknowledgeMode(boolean transacted, int acknowledgeMode) throws JMSException {
int result = acknowledgeMode;
if (!transacted && acknowledgeMode == Session.SESSION_TRANSACTED) {
throw new JMSException("acknowledgeMode SESSION_TRANSACTED cannot be used for an non-transacted Session");
}
if (transacted) {
result = Session.SESSION_TRANSACTED;
}
return result;
}
protected void removeSession(JmsSession session) throws JMSException {
this.sessions.remove(session);
}
protected void addSession(JmsSession s) {
this.sessions.add(s);
}
protected void addDispatcher(JmsConsumerId consumerId, JmsMessageDispatcher dispatcher) {
dispatchers.put(consumerId, dispatcher);
}
protected void removeDispatcher(JmsConsumerId consumerId) {
dispatchers.remove(consumerId);
}
private void connect() throws JMSException {
synchronized(this.connectLock) {
if (isConnected() || closed.get()) {
return;
}
if (connectionInfo.getClientId() == null || connectionInfo.getClientId().trim().isEmpty()) {
connectionInfo.setClientId(clientIdGenerator.generateId());
}
this.connectionInfo = createResource(connectionInfo);
this.connected.set(true);
// TODO - Advisory Support.
//
// Providers should have an interface for adding a listener for temporary
// destination advisory messages for create / destroy so we can track them
// and throw exceptions when producers try to send to deleted destinations.
}
}
/**
* @return a newly initialized TemporaryQueue instance.
*/
protected TemporaryQueue createTemporaryQueue() throws JMSException {
String destinationName = connectionInfo.getConnectionId() + ":" + tempDestIdGenerator.incrementAndGet();
JmsTemporaryQueue queue = new JmsTemporaryQueue(destinationName);
queue = createResource(queue);
tempDestinations.put(queue, queue);
return queue;
}
/**
* @return a newly initialized TemporaryTopic instance.
*/
protected TemporaryTopic createTemporaryTopic() throws JMSException {
String destinationName = connectionInfo.getConnectionId() + ":" + tempDestIdGenerator.incrementAndGet();
JmsTemporaryTopic topic = new JmsTemporaryTopic(destinationName);
topic = createResource(topic);
tempDestinations.put(topic, topic);
return topic;
}
protected void deleteDestination(JmsDestination destination) throws JMSException {
checkClosedOrFailed();
connect();
try {
for (JmsSession session : this.sessions) {
if (session.isDestinationInUse(destination)) {
throw new JMSException("A consumer is consuming from the temporary destination");
}
}
if (destination.isTemporary()) {
tempDestinations.remove(destination);
}
destroyResource(destination);
} catch (Exception e) {
throw JmsExceptionSupport.create(e);
}
}
protected void checkClosedOrFailed() throws JMSException {
checkClosed();
if (failed.get()) {
throw new JmsConnectionFailedException(firstFailureError);
}
}
protected void checkClosed() throws IllegalStateException {
if (this.closed.get()) {
throw new IllegalStateException("The Connection is closed");
}
}
protected JmsSessionId getNextSessionId() {
return new JmsSessionId(connectionInfo.getConnectionId(), sessionIdGenerator.incrementAndGet());
}
protected JmsTransactionId getNextTransactionId() {
return new JmsTransactionId(connectionInfo.getConnectionId(), transactionIdGenerator.incrementAndGet());
}
////////////////////////////////////////////////////////////////////////////
// Provider interface methods
////////////////////////////////////////////////////////////////////////////
<T extends JmsResource> T createResource(T resource) throws JMSException {
checkClosedOrFailed();
try {
ProviderFuture request = new ProviderFuture();
provider.create(resource, request);
request.sync();
return resource;
} catch (Exception ex) {
throw JmsExceptionSupport.create(ex);
}
}
void startResource(JmsResource resource) throws JMSException {
connect();
try {
ProviderFuture request = new ProviderFuture();
provider.start(resource, request);
request.sync();
} catch (Exception ioe) {
throw JmsExceptionSupport.create(ioe);
}
}
void destroyResource(JmsResource resource) throws JMSException {
connect();
try {
ProviderFuture request = new ProviderFuture();
provider.destroy(resource, request);
request.sync();
} catch (Exception ioe) {
throw JmsExceptionSupport.create(ioe);
}
}
void send(JmsOutboundMessageDispatch envelope) throws JMSException {
checkClosedOrFailed();
connect();
// TODO - We don't currently have a way to say that an operation
// should be done asynchronously. A send can be done async
// in many cases, such as non-persistent delivery. We probably
// don't need to do anything here though just have a way to
// configure the provider for async sends which we do in the
// JmsConnectionInfo. Here we just need to register a listener
// on the request to know when it completes if we want to do
// JMS 2.0 style async sends where we signal a callback, then
// we can manage order of callback events to async senders at
// this level.
try {
ProviderFuture request = new ProviderFuture();
provider.send(envelope, request);
request.sync();
} catch (Exception ioe) {
throw JmsExceptionSupport.create(ioe);
}
}
void acknowledge(JmsInboundMessageDispatch envelope, ACK_TYPE ackType) throws JMSException {
checkClosedOrFailed();
connect();
try {
ProviderFuture request = new ProviderFuture();
provider.acknowledge(envelope, ackType, request);
request.sync();
} catch (Exception ioe) {
throw JmsExceptionSupport.create(ioe);
}
}
void acknowledge(JmsSessionId sessionId) throws JMSException {
checkClosedOrFailed();
connect();
try {
ProviderFuture request = new ProviderFuture();
provider.acknowledge(sessionId, request);
request.sync();
} catch (Exception ioe) {
throw JmsExceptionSupport.create(ioe);
}
}
void unsubscribe(String name) throws JMSException {
checkClosedOrFailed();
connect();
try {
ProviderFuture request = new ProviderFuture();
provider.unsubscribe(name, request);
request.sync();
} catch (Exception ioe) {
throw JmsExceptionSupport.create(ioe);
}
}
void commit(JmsSessionId sessionId) throws JMSException {
checkClosedOrFailed();
connect();
try {
ProviderFuture request = new ProviderFuture();
provider.commit(sessionId, request);
request.sync();
} catch (Exception ioe) {
throw JmsExceptionSupport.create(ioe);
}
}
void rollback(JmsSessionId sessionId) throws JMSException {
checkClosedOrFailed();
connect();
try {
ProviderFuture request = new ProviderFuture();
provider.rollback(sessionId, request);
request.sync();
} catch (Exception ioe) {
throw JmsExceptionSupport.create(ioe);
}
}
void recover(JmsSessionId sessionId) throws JMSException {
checkClosedOrFailed();
connect();
try {
ProviderFuture request = new ProviderFuture();
provider.recover(sessionId, request);
request.sync();
} catch (Exception ioe) {
throw JmsExceptionSupport.create(ioe);
}
}
void pull(JmsConsumerId consumerId, long timeout) throws JMSException {
checkClosedOrFailed();
connect();
try {
ProviderFuture request = new ProviderFuture();
provider.pull(consumerId, timeout, request);
request.sync();
} catch (Exception ioe) {
throw JmsExceptionSupport.create(ioe);
}
}
////////////////////////////////////////////////////////////////////////////
// Property setters and getters
////////////////////////////////////////////////////////////////////////////
/**
* @return ExceptionListener
* @see javax.jms.Connection#getExceptionListener()
*/
@Override
public ExceptionListener getExceptionListener() throws JMSException {
checkClosedOrFailed();
return this.exceptionListener;
}
/**
* @param listener
* @see javax.jms.Connection#setExceptionListener(javax.jms.ExceptionListener)
*/
@Override
public void setExceptionListener(ExceptionListener listener) throws JMSException {
checkClosedOrFailed();
this.exceptionListener = listener;
}
/**
* Adds a JmsConnectionListener so that a client can be notified of events in
* the underlying protocol provider.
*
* @param listener
* the new listener to add to the collection.
*/
public void addConnectionListener(JmsConnectionListener listener) {
this.connectionListeners.add(listener);
}
/**
* Removes a JmsConnectionListener that was previously registered.
*
* @param listener
* the listener to remove from the collection.
*/
public void removeTransportListener(JmsConnectionListener listener) {
this.connectionListeners.remove(listener);
}
public boolean isForceAsyncSend() {
return connectionInfo.isForceAsyncSend();
}
public void setForceAsyncSend(boolean forceAsyncSend) {
connectionInfo.setForceAsyncSends(forceAsyncSend);
}
public boolean isAlwaysSyncSend() {
return connectionInfo.isAlwaysSyncSend();
}
public void setAlwaysSyncSend(boolean alwaysSyncSend) {
this.connectionInfo.setAlwaysSyncSend(alwaysSyncSend);
}
public String getTopicPrefix() {
return connectionInfo.getTopicPrefix();
}
public void setTopicPrefix(String topicPrefix) {
connectionInfo.setTopicPrefix(topicPrefix);
}
public String getTempTopicPrefix() {
return connectionInfo.getTempTopicPrefix();
}
public void setTempTopicPrefix(String tempTopicPrefix) {
connectionInfo.setTempTopicPrefix(tempTopicPrefix);
}
public String getTempQueuePrefix() {
return connectionInfo.getTempQueuePrefix();
}
public void setTempQueuePrefix(String tempQueuePrefix) {
connectionInfo.setTempQueuePrefix(tempQueuePrefix);
}
public String getQueuePrefix() {
return connectionInfo.getQueuePrefix();
}
public void setQueuePrefix(String queuePrefix) {
connectionInfo.setQueuePrefix(queuePrefix);
}
public boolean isOmitHost() {
return connectionInfo.isOmitHost();
}
public void setOmitHost(boolean omitHost) {
connectionInfo.setOmitHost(omitHost);
}
public JmsPrefetchPolicy getPrefetchPolicy() {
return prefetchPolicy;
}
public void setPrefetchPolicy(JmsPrefetchPolicy prefetchPolicy) {
this.prefetchPolicy = prefetchPolicy;
}
public boolean isMessagePrioritySupported() {
return messagePrioritySupported;
}
public void setMessagePrioritySupported(boolean messagePrioritySupported) {
this.messagePrioritySupported = messagePrioritySupported;
}
public long getCloseTimeout() {
return connectionInfo.getCloseTimeout();
}
public void setCloseTimeout(long closeTimeout) {
connectionInfo.setCloseTimeout(closeTimeout);
}
public long getConnectTimeout() {
return this.connectionInfo.getConnectTimeout();
}
public void setConnectTimeout(long connectTimeout) {
this.connectionInfo.setConnectTimeout(connectTimeout);
}
public long getSendTimeout() {
return connectionInfo.getSendTimeout();
}
public void setSendTimeout(long sendTimeout) {
connectionInfo.setSendTimeout(sendTimeout);
}
public long getRequestTimeout() {
return connectionInfo.getRequestTimeout();
}
public void setRequestTimeout(long requestTimeout) {
connectionInfo.setRequestTimeout(requestTimeout);
}
public URI getBrokerURI() {
return brokerURI;
}
public void setBrokerURI(URI brokerURI) {
this.brokerURI = brokerURI;
}
public URI getLocalURI() {
return localURI;
}
public void setLocalURI(URI localURI) {
this.localURI = localURI;
}
public SSLContext getSslContext() {
return sslContext;
}
public void setSslContext(SSLContext sslContext) {
this.sslContext = sslContext;
}
public String getUsername() {
return this.connectionInfo.getUsername();
}
public void setUsername(String username) {
this.connectionInfo.setUsername(username);;
}
public String getPassword() {
return this.connectionInfo.getPassword();
}
public void setPassword(String password) {
this.connectionInfo.setPassword(password);
}
public Provider getProvider() {
return provider;
}
void setProvider(Provider provider) {
this.provider = provider;
}
public boolean isConnected() {
return this.connected.get();
}
public boolean isStarted() {
return this.started.get();
}
public boolean isClosed() {
return this.closed.get();
}
JmsConnectionId getConnectionId() {
return this.connectionInfo.getConnectionId();
}
public boolean isWatchRemoteDestinations() {
return this.connectionInfo.isWatchRemoteDestinations();
}
public void setWatchRemoteDestinations(boolean watchRemoteDestinations) {
this.connectionInfo.setWatchRemoteDestinations(watchRemoteDestinations);
}
public JmsMessageFactory getMessageFactory() {
if (messageFactory == null) {
throw new RuntimeException("Message factory should never be null");
}
return messageFactory;
}
public boolean isSendAcksAsync() {
return sendAcksAsync;
}
public void setSendAcksAsync(boolean sendAcksAsync) {
this.sendAcksAsync = sendAcksAsync;
}
@Override
public void onMessage(JmsInboundMessageDispatch envelope) {
JmsMessage incoming = envelope.getMessage();
// Ensure incoming Messages are in readonly mode.
if (incoming != null) {
incoming.setReadOnlyBody(true);
incoming.setReadOnlyProperties(true);
}
JmsMessageDispatcher dispatcher = dispatchers.get(envelope.getConsumerId());
if (dispatcher != null) {
dispatcher.onMessage(envelope);
}
for (JmsConnectionListener listener : connectionListeners) {
listener.onMessage(envelope);
}
}
@Override
public void onConnectionInterrupted(URI remoteURI) {
for (JmsSession session : sessions) {
session.onConnectionInterrupted();
}
for (JmsConnectionListener listener : connectionListeners) {
listener.onConnectionInterrupted(remoteURI);
}
}
@Override
public void onConnectionRecovery(Provider provider) throws Exception {
// TODO - Recover Advisory Consumer once we can support it.
LOG.debug("Connection {} is starting recovery.", connectionInfo.getConnectionId());
ProviderFuture request = new ProviderFuture();
provider.create(connectionInfo, request);
request.sync();
for (JmsDestination tempDestination : tempDestinations.values()) {
createResource(tempDestination);
}
for (JmsSession session : sessions) {
session.onConnectionRecovery(provider);
}
}
@Override
public void onConnectionRecovered(Provider provider) throws Exception {
LOG.debug("Connection {} is finalizing recovery.", connectionInfo.getConnectionId());
this.messageFactory = provider.getMessageFactory();
for (JmsSession session : sessions) {
session.onConnectionRecovered(provider);
}
}
@Override
public void onConnectionRestored(URI remoteURI) {
for (JmsSession session : sessions) {
session.onConnectionRestored();
}
for (JmsConnectionListener listener : connectionListeners) {
listener.onConnectionRestored(remoteURI);
}
}
@Override
public void onConnectionEstablished(URI remoteURI) {
LOG.info("Connection {} connected to remote Broker: {}", connectionInfo.getConnectionId(), remoteURI);
this.messageFactory = provider.getMessageFactory();
// TODO - For events triggered from the Provider thread, we might want to consider always
// firing the client level events on the Connection executor to prevent the client
// from stalling the provider thread.
for (JmsConnectionListener listener : connectionListeners) {
listener.onConnectionEstablished(remoteURI);
}
}
@Override
public void onConnectionFailure(final IOException ex) {
onAsyncException(ex);
if (!closing.get() && !closed.get()) {
executor.execute(new Runnable() {
@Override
public void run() {
providerFailed(ex);
if (provider != null) {
try {
provider.close();
} catch (Throwable error) {
LOG.debug("Error while closing failed Provider: {}", error.getMessage());
}
}
try {
shutdown();
} catch (JMSException e) {
LOG.warn("Exception during connection cleanup, " + e, e);
}
for (JmsConnectionListener listener : connectionListeners) {
listener.onConnectionFailure(ex);
}
}
});
}
}
/**
* Handles any asynchronous errors that occur from the JMS framework classes.
*
* If any listeners are registered they will be notified of the error from a thread
* in the Connection's Executor service.
*
* @param error
* The exception that triggered this error.
*/
public void onAsyncException(Throwable error) {
if (!closed.get() && !closing.get()) {
if (this.exceptionListener != null) {
if (!(error instanceof JMSException)) {
error = JmsExceptionSupport.create(error);
}
final JMSException jmsError = (JMSException)error;
executor.execute(new Runnable() {
@Override
public void run() {
JmsConnection.this.exceptionListener.onException(jmsError);
}
});
} else {
LOG.debug("Async exception with no exception listener: " + error, error);
}
}
}
protected void providerFailed(IOException error) {
failed.set(true);
if (firstFailureError == null) {
firstFailureError = error;
}
}
}
| qpid-jms-client/src/main/java/org/apache/qpid/jms/JmsConnection.java | /**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.qpid.jms;
import java.io.IOException;
import java.net.URI;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.CopyOnWriteArraySet;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.ThreadFactory;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicLong;
import javax.jms.Connection;
import javax.jms.ConnectionConsumer;
import javax.jms.ConnectionMetaData;
import javax.jms.Destination;
import javax.jms.ExceptionListener;
import javax.jms.IllegalStateException;
import javax.jms.JMSException;
import javax.jms.Queue;
import javax.jms.QueueConnection;
import javax.jms.QueueSession;
import javax.jms.ServerSessionPool;
import javax.jms.Session;
import javax.jms.TemporaryQueue;
import javax.jms.TemporaryTopic;
import javax.jms.Topic;
import javax.jms.TopicConnection;
import javax.jms.TopicSession;
import javax.net.ssl.SSLContext;
import org.apache.qpid.jms.exceptions.JmsConnectionFailedException;
import org.apache.qpid.jms.exceptions.JmsExceptionSupport;
import org.apache.qpid.jms.message.JmsInboundMessageDispatch;
import org.apache.qpid.jms.message.JmsMessage;
import org.apache.qpid.jms.message.JmsMessageFactory;
import org.apache.qpid.jms.message.JmsOutboundMessageDispatch;
import org.apache.qpid.jms.meta.JmsConnectionId;
import org.apache.qpid.jms.meta.JmsConnectionInfo;
import org.apache.qpid.jms.meta.JmsConsumerId;
import org.apache.qpid.jms.meta.JmsResource;
import org.apache.qpid.jms.meta.JmsSessionId;
import org.apache.qpid.jms.meta.JmsTransactionId;
import org.apache.qpid.jms.provider.Provider;
import org.apache.qpid.jms.provider.ProviderClosedException;
import org.apache.qpid.jms.provider.ProviderConstants.ACK_TYPE;
import org.apache.qpid.jms.provider.ProviderFuture;
import org.apache.qpid.jms.provider.ProviderListener;
import org.apache.qpid.jms.util.IdGenerator;
import org.apache.qpid.jms.util.ThreadPoolUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Implementation of a JMS Connection
*/
public class JmsConnection implements Connection, TopicConnection, QueueConnection, ProviderListener {
private static final Logger LOG = LoggerFactory.getLogger(JmsConnection.class);
private JmsConnectionInfo connectionInfo;
private final IdGenerator clientIdGenerator;
private boolean clientIdSet;
private boolean sendAcksAsync;
private ExceptionListener exceptionListener;
private final List<JmsSession> sessions = new CopyOnWriteArrayList<JmsSession>();
private final Map<JmsConsumerId, JmsMessageDispatcher> dispatchers =
new ConcurrentHashMap<JmsConsumerId, JmsMessageDispatcher>();
private final AtomicBoolean connected = new AtomicBoolean();
private final AtomicBoolean closed = new AtomicBoolean();
private final AtomicBoolean closing = new AtomicBoolean();
private final AtomicBoolean started = new AtomicBoolean();
private final AtomicBoolean failed = new AtomicBoolean();
private final Object connectLock = new Object();
private IOException firstFailureError;
private JmsPrefetchPolicy prefetchPolicy = new JmsPrefetchPolicy();
private boolean messagePrioritySupported;
private final ThreadPoolExecutor executor;
private URI brokerURI;
private URI localURI;
private SSLContext sslContext;
private Provider provider;
private final Set<JmsConnectionListener> connectionListeners =
new CopyOnWriteArraySet<JmsConnectionListener>();
private final Map<JmsDestination, JmsDestination> tempDestinations =
new ConcurrentHashMap<JmsDestination, JmsDestination>();
private final AtomicLong sessionIdGenerator = new AtomicLong();
private final AtomicLong tempDestIdGenerator = new AtomicLong();
private final AtomicLong transactionIdGenerator = new AtomicLong();
private JmsMessageFactory messageFactory;
protected JmsConnection(String connectionId, Provider provider, IdGenerator clientIdGenerator) throws JMSException {
// This executor can be used for dispatching asynchronous tasks that might block or result
// in reentrant calls to this Connection that could block. The thread in this executor
// will also serve as a means of preventing JVM shutdown should a client application
// not have it's own mechanism for doing so.
executor = new ThreadPoolExecutor(1, 1, 5, TimeUnit.SECONDS, new LinkedBlockingQueue<Runnable>(), new ThreadFactory() {
@Override
public Thread newThread(Runnable r) {
Thread thread = new Thread(r, "QpidJMS Connection Executor: ");
return thread;
}
});
this.provider = provider;
this.provider.setProviderListener(this);
try {
this.provider.start();
} catch (Exception e) {
throw JmsExceptionSupport.create(e);
}
this.clientIdGenerator = clientIdGenerator;
this.connectionInfo = new JmsConnectionInfo(new JmsConnectionId(connectionId));
}
/**
* @throws JMSException
* @see javax.jms.Connection#close()
*/
@Override
public void close() throws JMSException {
boolean interrupted = Thread.interrupted();
try {
if (!closed.get() && !failed.get()) {
// do not fail if already closed as specified by the JMS specification.
doStop(false);
}
synchronized (this) {
if (closed.get()) {
return;
}
closing.set(true);
for (JmsSession session : this.sessions) {
session.shutdown();
}
this.sessions.clear();
this.tempDestinations.clear();
if (isConnected() && !failed.get()) {
ProviderFuture request = new ProviderFuture();
try {
provider.destroy(connectionInfo, request);
try {
request.sync();
} catch (Exception ex) {
// TODO - Spec is a bit vague here, we don't fail if already closed but
// in this case we really aren't closed yet so there could be an
// argument that at this point an exception is still valid.
if (ex.getCause() instanceof InterruptedException) {
throw (InterruptedException) ex.getCause();
}
LOG.debug("Failed destroying Connection resource: {}", ex.getMessage());
}
} catch(ProviderClosedException pce) {
LOG.debug("Ignoring provider closed exception during connection close");
}
}
connected.set(false);
started.set(false);
closing.set(false);
closed.set(true);
}
} catch (Exception e) {
throw JmsExceptionSupport.create(e);
} finally {
try {
ThreadPoolUtils.shutdown(executor);
} catch (Throwable e) {
LOG.warn("Error shutting down thread pool: " + executor + ". This exception will be ignored.", e);
}
if (provider != null) {
provider.close();
provider = null;
}
if (interrupted) {
Thread.currentThread().interrupt();
}
}
}
/**
* Called to free all Connection resources.
*/
protected void shutdown() throws JMSException {
// TODO - Once ConnectionConsumer is added we must shutdown those as well.
for (JmsSession session : this.sessions) {
session.shutdown();
}
if (isConnected() && !failed.get() && !closing.get()) {
destroyResource(connectionInfo);
}
if (clientIdSet) {
connectionInfo.setClientId(null);
clientIdSet = false;
}
tempDestinations.clear();
started.set(false);
connected.set(false);
}
/**
* @param destination
* @param messageSelector
* @param sessionPool
* @param maxMessages
* @return ConnectionConsumer
* @throws JMSException
* @see javax.jms.Connection#createConnectionConsumer(javax.jms.Destination,
* java.lang.String, javax.jms.ServerSessionPool, int)
*/
@Override
public ConnectionConsumer createConnectionConsumer(Destination destination, String messageSelector,
ServerSessionPool sessionPool, int maxMessages) throws JMSException {
checkClosedOrFailed();
connect();
throw new JMSException("Not supported");
}
/**
* @param topic
* @param subscriptionName
* @param messageSelector
* @param sessionPool
* @param maxMessages
* @return ConnectionConsumer
* @throws JMSException
*
* @see javax.jms.Connection#createDurableConnectionConsumer(javax.jms.Topic,
* java.lang.String, java.lang.String, javax.jms.ServerSessionPool, int)
*/
@Override
public ConnectionConsumer createDurableConnectionConsumer(Topic topic, String subscriptionName,
String messageSelector, ServerSessionPool sessionPool, int maxMessages) throws JMSException {
checkClosedOrFailed();
connect();
throw new JMSException("Not supported");
}
/**
* @param transacted
* @param acknowledgeMode
* @return Session
* @throws JMSException
* @see javax.jms.Connection#createSession(boolean, int)
*/
@Override
public Session createSession(boolean transacted, int acknowledgeMode) throws JMSException {
checkClosedOrFailed();
connect();
int ackMode = getSessionAcknowledgeMode(transacted, acknowledgeMode);
JmsSession result = new JmsSession(this, getNextSessionId(), ackMode);
addSession(result);
if (started.get()) {
result.start();
}
return result;
}
/**
* @return clientId
* @see javax.jms.Connection#getClientID()
*/
@Override
public String getClientID() throws JMSException {
checkClosedOrFailed();
return this.connectionInfo.getClientId();
}
/**
* @return connectionInfoData
* @see javax.jms.Connection#getMetaData()
*/
@Override
public ConnectionMetaData getMetaData() throws JMSException {
checkClosedOrFailed();
return JmsConnectionMetaData.INSTANCE;
}
/**
* @param clientID
* @throws JMSException
* @see javax.jms.Connection#setClientID(java.lang.String)
*/
@Override
public synchronized void setClientID(String clientID) throws JMSException {
checkClosedOrFailed();
if (this.clientIdSet) {
throw new IllegalStateException("The clientID has already been set");
}
if (clientID == null) {
throw new IllegalStateException("Cannot have a null clientID");
}
if (connected.get()) {
throw new IllegalStateException("Cannot set the client id once connected.");
}
this.connectionInfo.setClientId(clientID);
this.clientIdSet = true;
//We weren't connected if we got this far, we should now connect now to ensure the clientID is valid.
//TODO: determine if any resulting failure is only the result of the ClientID value, or other reasons such as auth.
connect();
}
/**
* @throws JMSException
* @see javax.jms.Connection#start()
*/
@Override
public void start() throws JMSException {
checkClosedOrFailed();
connect();
if (this.started.compareAndSet(false, true)) {
try {
for (JmsSession s : this.sessions) {
s.start();
}
} catch (Exception e) {
throw JmsExceptionSupport.create(e);
}
}
}
/**
* @throws JMSException
* @see javax.jms.Connection#stop()
*/
@Override
public void stop() throws JMSException {
doStop(true);
}
/**
* @see #stop()
* @param checkClosed <tt>true</tt> to check for already closed and throw
* {@link java.lang.IllegalStateException} if already closed,
* <tt>false</tt> to skip this check
* @throws JMSException if the JMS provider fails to stop message delivery due to some internal error.
*/
void doStop(boolean checkClosed) throws JMSException {
if (checkClosed) {
checkClosedOrFailed();
}
if (started.compareAndSet(true, false)) {
synchronized(sessions) {
for (JmsSession s : this.sessions) {
s.stop();
}
}
}
}
/**
* @param topic
* @param messageSelector
* @param sessionPool
* @param maxMessages
* @return ConnectionConsumer
* @throws JMSException
* @see javax.jms.TopicConnection#createConnectionConsumer(javax.jms.Topic,
* java.lang.String, javax.jms.ServerSessionPool, int)
*/
@Override
public ConnectionConsumer createConnectionConsumer(Topic topic, String messageSelector,
ServerSessionPool sessionPool, int maxMessages) throws JMSException {
checkClosedOrFailed();
connect();
return null;
}
/**
* @param transacted
* @param acknowledgeMode
* @return TopicSession
* @throws JMSException
* @see javax.jms.TopicConnection#createTopicSession(boolean, int)
*/
@Override
public TopicSession createTopicSession(boolean transacted, int acknowledgeMode) throws JMSException {
checkClosedOrFailed();
connect();
int ackMode = getSessionAcknowledgeMode(transacted, acknowledgeMode);
JmsTopicSession result = new JmsTopicSession(this, getNextSessionId(), ackMode);
addSession(result);
if (started.get()) {
result.start();
}
return result;
}
/**
* @param queue
* @param messageSelector
* @param sessionPool
* @param maxMessages
* @return ConnectionConsumer
* @throws JMSException
* @see javax.jms.QueueConnection#createConnectionConsumer(javax.jms.Queue,
* java.lang.String, javax.jms.ServerSessionPool, int)
*/
@Override
public ConnectionConsumer createConnectionConsumer(Queue queue, String messageSelector,
ServerSessionPool sessionPool, int maxMessages) throws JMSException {
checkClosedOrFailed();
connect();
return null;
}
/**
* @param transacted
* @param acknowledgeMode
* @return QueueSession
* @throws JMSException
* @see javax.jms.QueueConnection#createQueueSession(boolean, int)
*/
@Override
public QueueSession createQueueSession(boolean transacted, int acknowledgeMode) throws JMSException {
checkClosedOrFailed();
connect();
int ackMode = getSessionAcknowledgeMode(transacted, acknowledgeMode);
JmsQueueSession result = new JmsQueueSession(this, getNextSessionId(), ackMode);
addSession(result);
if (started.get()) {
result.start();
}
return result;
}
/**
* @param ex
*/
public void onException(Exception ex) {
onException(JmsExceptionSupport.create(ex));
}
/**
* @param ex
*/
public void onException(JMSException ex) {
ExceptionListener l = this.exceptionListener;
if (l != null) {
l.onException(JmsExceptionSupport.create(ex));
}
}
protected int getSessionAcknowledgeMode(boolean transacted, int acknowledgeMode) throws JMSException {
int result = acknowledgeMode;
if (!transacted && acknowledgeMode == Session.SESSION_TRANSACTED) {
throw new JMSException("acknowledgeMode SESSION_TRANSACTED cannot be used for an non-transacted Session");
}
if (transacted) {
result = Session.SESSION_TRANSACTED;
}
return result;
}
protected void removeSession(JmsSession session) throws JMSException {
this.sessions.remove(session);
}
protected void addSession(JmsSession s) {
this.sessions.add(s);
}
protected void addDispatcher(JmsConsumerId consumerId, JmsMessageDispatcher dispatcher) {
dispatchers.put(consumerId, dispatcher);
}
protected void removeDispatcher(JmsConsumerId consumerId) {
dispatchers.remove(consumerId);
}
private void connect() throws JMSException {
synchronized(this.connectLock) {
if (isConnected() || closed.get()) {
return;
}
if (connectionInfo.getClientId() == null || connectionInfo.getClientId().trim().isEmpty()) {
connectionInfo.setClientId(clientIdGenerator.generateId());
}
this.connectionInfo = createResource(connectionInfo);
this.connected.set(true);
// TODO - Advisory Support.
//
// Providers should have an interface for adding a listener for temporary
// destination advisory messages for create / destroy so we can track them
// and throw exceptions when producers try to send to deleted destinations.
}
}
/**
* @return a newly initialized TemporaryQueue instance.
*/
protected TemporaryQueue createTemporaryQueue() throws JMSException {
String destinationName = connectionInfo.getConnectionId() + ":" + tempDestIdGenerator.incrementAndGet();
JmsTemporaryQueue queue = new JmsTemporaryQueue(destinationName);
queue = createResource(queue);
tempDestinations.put(queue, queue);
return queue;
}
/**
* @return a newly initialized TemporaryTopic instance.
*/
protected TemporaryTopic createTemporaryTopic() throws JMSException {
String destinationName = connectionInfo.getConnectionId() + ":" + tempDestIdGenerator.incrementAndGet();
JmsTemporaryTopic topic = new JmsTemporaryTopic(destinationName);
topic = createResource(topic);
tempDestinations.put(topic, topic);
return topic;
}
protected void deleteDestination(JmsDestination destination) throws JMSException {
checkClosedOrFailed();
connect();
try {
for (JmsSession session : this.sessions) {
if (session.isDestinationInUse(destination)) {
throw new JMSException("A consumer is consuming from the temporary destination");
}
}
if (destination.isTemporary()) {
tempDestinations.remove(destination);
}
destroyResource(destination);
} catch (Exception e) {
throw JmsExceptionSupport.create(e);
}
}
protected void checkClosedOrFailed() throws JMSException {
checkClosed();
if (failed.get()) {
throw new JmsConnectionFailedException(firstFailureError);
}
}
protected void checkClosed() throws IllegalStateException {
if (this.closed.get()) {
throw new IllegalStateException("The Connection is closed");
}
}
protected JmsSessionId getNextSessionId() {
return new JmsSessionId(connectionInfo.getConnectionId(), sessionIdGenerator.incrementAndGet());
}
protected JmsTransactionId getNextTransactionId() {
return new JmsTransactionId(connectionInfo.getConnectionId(), transactionIdGenerator.incrementAndGet());
}
////////////////////////////////////////////////////////////////////////////
// Provider interface methods
////////////////////////////////////////////////////////////////////////////
<T extends JmsResource> T createResource(T resource) throws JMSException {
checkClosedOrFailed();
try {
ProviderFuture request = new ProviderFuture();
provider.create(resource, request);
request.sync();
return resource;
} catch (Exception ex) {
throw JmsExceptionSupport.create(ex);
}
}
void startResource(JmsResource resource) throws JMSException {
connect();
try {
ProviderFuture request = new ProviderFuture();
provider.start(resource, request);
request.sync();
} catch (Exception ioe) {
throw JmsExceptionSupport.create(ioe);
}
}
void destroyResource(JmsResource resource) throws JMSException {
connect();
try {
ProviderFuture request = new ProviderFuture();
provider.destroy(resource, request);
request.sync();
} catch (Exception ioe) {
throw JmsExceptionSupport.create(ioe);
}
}
void send(JmsOutboundMessageDispatch envelope) throws JMSException {
checkClosedOrFailed();
connect();
// TODO - We don't currently have a way to say that an operation
// should be done asynchronously. A send can be done async
// in many cases, such as non-persistent delivery. We probably
// don't need to do anything here though just have a way to
// configure the provider for async sends which we do in the
// JmsConnectionInfo. Here we just need to register a listener
// on the request to know when it completes if we want to do
// JMS 2.0 style async sends where we signal a callback, then
// we can manage order of callback events to async senders at
// this level.
try {
ProviderFuture request = new ProviderFuture();
provider.send(envelope, request);
request.sync();
} catch (Exception ioe) {
throw JmsExceptionSupport.create(ioe);
}
}
void acknowledge(JmsInboundMessageDispatch envelope, ACK_TYPE ackType) throws JMSException {
checkClosedOrFailed();
connect();
try {
ProviderFuture request = new ProviderFuture();
provider.acknowledge(envelope, ackType, request);
request.sync();
} catch (Exception ioe) {
throw JmsExceptionSupport.create(ioe);
}
}
void acknowledge(JmsSessionId sessionId) throws JMSException {
checkClosedOrFailed();
connect();
try {
ProviderFuture request = new ProviderFuture();
provider.acknowledge(sessionId, request);
request.sync();
} catch (Exception ioe) {
throw JmsExceptionSupport.create(ioe);
}
}
void unsubscribe(String name) throws JMSException {
checkClosedOrFailed();
connect();
try {
ProviderFuture request = new ProviderFuture();
provider.unsubscribe(name, request);
request.sync();
} catch (Exception ioe) {
throw JmsExceptionSupport.create(ioe);
}
}
void commit(JmsSessionId sessionId) throws JMSException {
checkClosedOrFailed();
connect();
try {
ProviderFuture request = new ProviderFuture();
provider.commit(sessionId, request);
request.sync();
} catch (Exception ioe) {
throw JmsExceptionSupport.create(ioe);
}
}
void rollback(JmsSessionId sessionId) throws JMSException {
checkClosedOrFailed();
connect();
try {
ProviderFuture request = new ProviderFuture();
provider.rollback(sessionId, request);
request.sync();
} catch (Exception ioe) {
throw JmsExceptionSupport.create(ioe);
}
}
void recover(JmsSessionId sessionId) throws JMSException {
checkClosedOrFailed();
connect();
try {
ProviderFuture request = new ProviderFuture();
provider.recover(sessionId, request);
request.sync();
} catch (Exception ioe) {
throw JmsExceptionSupport.create(ioe);
}
}
void pull(JmsConsumerId consumerId, long timeout) throws JMSException {
checkClosedOrFailed();
connect();
try {
ProviderFuture request = new ProviderFuture();
provider.pull(consumerId, timeout, request);
request.sync();
} catch (Exception ioe) {
throw JmsExceptionSupport.create(ioe);
}
}
////////////////////////////////////////////////////////////////////////////
// Property setters and getters
////////////////////////////////////////////////////////////////////////////
/**
* @return ExceptionListener
* @see javax.jms.Connection#getExceptionListener()
*/
@Override
public ExceptionListener getExceptionListener() throws JMSException {
checkClosedOrFailed();
return this.exceptionListener;
}
/**
* @param listener
* @see javax.jms.Connection#setExceptionListener(javax.jms.ExceptionListener)
*/
@Override
public void setExceptionListener(ExceptionListener listener) throws JMSException {
checkClosedOrFailed();
this.exceptionListener = listener;
}
/**
* Adds a JmsConnectionListener so that a client can be notified of events in
* the underlying protocol provider.
*
* @param listener
* the new listener to add to the collection.
*/
public void addConnectionListener(JmsConnectionListener listener) {
this.connectionListeners.add(listener);
}
/**
* Removes a JmsConnectionListener that was previously registered.
*
* @param listener
* the listener to remove from the collection.
*/
public void removeTransportListener(JmsConnectionListener listener) {
this.connectionListeners.remove(listener);
}
public boolean isForceAsyncSend() {
return connectionInfo.isForceAsyncSend();
}
public void setForceAsyncSend(boolean forceAsyncSend) {
connectionInfo.setForceAsyncSends(forceAsyncSend);
}
public boolean isAlwaysSyncSend() {
return connectionInfo.isAlwaysSyncSend();
}
public void setAlwaysSyncSend(boolean alwaysSyncSend) {
this.connectionInfo.setAlwaysSyncSend(alwaysSyncSend);
}
public String getTopicPrefix() {
return connectionInfo.getTopicPrefix();
}
public void setTopicPrefix(String topicPrefix) {
connectionInfo.setTopicPrefix(topicPrefix);
}
public String getTempTopicPrefix() {
return connectionInfo.getTempTopicPrefix();
}
public void setTempTopicPrefix(String tempTopicPrefix) {
connectionInfo.setTempTopicPrefix(tempTopicPrefix);
}
public String getTempQueuePrefix() {
return connectionInfo.getTempQueuePrefix();
}
public void setTempQueuePrefix(String tempQueuePrefix) {
connectionInfo.setTempQueuePrefix(tempQueuePrefix);
}
public String getQueuePrefix() {
return connectionInfo.getQueuePrefix();
}
public void setQueuePrefix(String queuePrefix) {
connectionInfo.setQueuePrefix(queuePrefix);
}
public boolean isOmitHost() {
return connectionInfo.isOmitHost();
}
public void setOmitHost(boolean omitHost) {
connectionInfo.setOmitHost(omitHost);
}
public JmsPrefetchPolicy getPrefetchPolicy() {
return prefetchPolicy;
}
public void setPrefetchPolicy(JmsPrefetchPolicy prefetchPolicy) {
this.prefetchPolicy = prefetchPolicy;
}
public boolean isMessagePrioritySupported() {
return messagePrioritySupported;
}
public void setMessagePrioritySupported(boolean messagePrioritySupported) {
this.messagePrioritySupported = messagePrioritySupported;
}
public long getCloseTimeout() {
return connectionInfo.getCloseTimeout();
}
public void setCloseTimeout(long closeTimeout) {
connectionInfo.setCloseTimeout(closeTimeout);
}
public long getConnectTimeout() {
return this.connectionInfo.getConnectTimeout();
}
public void setConnectTimeout(long connectTimeout) {
this.connectionInfo.setConnectTimeout(connectTimeout);
}
public long getSendTimeout() {
return connectionInfo.getSendTimeout();
}
public void setSendTimeout(long sendTimeout) {
connectionInfo.setSendTimeout(sendTimeout);
}
public long getRequestTimeout() {
return connectionInfo.getRequestTimeout();
}
public void setRequestTimeout(long requestTimeout) {
connectionInfo.setRequestTimeout(requestTimeout);
}
public URI getBrokerURI() {
return brokerURI;
}
public void setBrokerURI(URI brokerURI) {
this.brokerURI = brokerURI;
}
public URI getLocalURI() {
return localURI;
}
public void setLocalURI(URI localURI) {
this.localURI = localURI;
}
public SSLContext getSslContext() {
return sslContext;
}
public void setSslContext(SSLContext sslContext) {
this.sslContext = sslContext;
}
public String getUsername() {
return this.connectionInfo.getUsername();
}
public void setUsername(String username) {
this.connectionInfo.setUsername(username);;
}
public String getPassword() {
return this.connectionInfo.getPassword();
}
public void setPassword(String password) {
this.connectionInfo.setPassword(password);
}
public Provider getProvider() {
return provider;
}
void setProvider(Provider provider) {
this.provider = provider;
}
public boolean isConnected() {
return this.connected.get();
}
public boolean isStarted() {
return this.started.get();
}
public boolean isClosed() {
return this.closed.get();
}
JmsConnectionId getConnectionId() {
return this.connectionInfo.getConnectionId();
}
public boolean isWatchRemoteDestinations() {
return this.connectionInfo.isWatchRemoteDestinations();
}
public void setWatchRemoteDestinations(boolean watchRemoteDestinations) {
this.connectionInfo.setWatchRemoteDestinations(watchRemoteDestinations);
}
public JmsMessageFactory getMessageFactory() {
if (messageFactory == null) {
throw new RuntimeException("Message factory should never be null");
}
return messageFactory;
}
public boolean isSendAcksAsync() {
return sendAcksAsync;
}
public void setSendAcksAsync(boolean sendAcksAsync) {
this.sendAcksAsync = sendAcksAsync;
}
@Override
public void onMessage(JmsInboundMessageDispatch envelope) {
JmsMessage incoming = envelope.getMessage();
// Ensure incoming Messages are in readonly mode.
if (incoming != null) {
incoming.setReadOnlyBody(true);
incoming.setReadOnlyProperties(true);
}
JmsMessageDispatcher dispatcher = dispatchers.get(envelope.getConsumerId());
if (dispatcher != null) {
dispatcher.onMessage(envelope);
}
for (JmsConnectionListener listener : connectionListeners) {
listener.onMessage(envelope);
}
}
@Override
public void onConnectionInterrupted(URI remoteURI) {
for (JmsSession session : sessions) {
session.onConnectionInterrupted();
}
for (JmsConnectionListener listener : connectionListeners) {
listener.onConnectionInterrupted(remoteURI);
}
}
@Override
public void onConnectionRecovery(Provider provider) throws Exception {
// TODO - Recover Advisory Consumer once we can support it.
LOG.debug("Connection {} is starting recovery.", connectionInfo.getConnectionId());
ProviderFuture request = new ProviderFuture();
provider.create(connectionInfo, request);
request.sync();
for (JmsDestination tempDestination : tempDestinations.values()) {
createResource(tempDestination);
}
for (JmsSession session : sessions) {
session.onConnectionRecovery(provider);
}
}
@Override
public void onConnectionRecovered(Provider provider) throws Exception {
LOG.debug("Connection {} is finalizing recovery.", connectionInfo.getConnectionId());
this.messageFactory = provider.getMessageFactory();
for (JmsSession session : sessions) {
session.onConnectionRecovered(provider);
}
}
@Override
public void onConnectionRestored(URI remoteURI) {
for (JmsSession session : sessions) {
session.onConnectionRestored();
}
for (JmsConnectionListener listener : connectionListeners) {
listener.onConnectionRestored(remoteURI);
}
}
@Override
public void onConnectionEstablished(URI remoteURI) {
LOG.info("Connection {} connected to remote Broker: {}", connectionInfo.getConnectionId(), remoteURI);
this.messageFactory = provider.getMessageFactory();
// TODO - For events triggered from the Provider thread, we might want to consider always
// firing the client level events on the Connection executor to prevent the client
// from stalling the provider thread.
for (JmsConnectionListener listener : connectionListeners) {
listener.onConnectionEstablished(remoteURI);
}
}
@Override
public void onConnectionFailure(final IOException ex) {
onAsyncException(ex);
if (!closing.get() && !closed.get()) {
executor.execute(new Runnable() {
@Override
public void run() {
providerFailed(ex);
if (provider != null) {
try {
provider.close();
} catch (Throwable error) {
LOG.debug("Error while closing failed Provider: {}", error.getMessage());
}
}
try {
shutdown();
} catch (JMSException e) {
LOG.warn("Exception during connection cleanup, " + e, e);
}
for (JmsConnectionListener listener : connectionListeners) {
listener.onConnectionFailure(ex);
}
}
});
}
}
/**
* Handles any asynchronous errors that occur from the JMS framework classes.
*
* If any listeners are registered they will be notified of the error from a thread
* in the Connection's Executor service.
*
* @param error
* The exception that triggered this error.
*/
public void onAsyncException(Throwable error) {
if (!closed.get() && !closing.get()) {
if (this.exceptionListener != null) {
if (!(error instanceof JMSException)) {
error = JmsExceptionSupport.create(error);
}
final JMSException jmsError = (JMSException)error;
executor.execute(new Runnable() {
@Override
public void run() {
JmsConnection.this.exceptionListener.onException(jmsError);
}
});
} else {
LOG.debug("Async exception with no exception listener: " + error, error);
}
}
}
protected void providerFailed(IOException error) {
failed.set(true);
if (firstFailureError == null) {
firstFailureError = error;
}
}
}
| Fix some issues found with Findbugs | qpid-jms-client/src/main/java/org/apache/qpid/jms/JmsConnection.java | Fix some issues found with Findbugs | <ide><path>pid-jms-client/src/main/java/org/apache/qpid/jms/JmsConnection.java
<ide> destroyResource(connectionInfo);
<ide> }
<ide>
<del> if (clientIdSet) {
<del> connectionInfo.setClientId(null);
<del> clientIdSet = false;
<add> synchronized (this) {
<add> if (clientIdSet) {
<add> connectionInfo.setClientId(null);
<add> clientIdSet = false;
<add> }
<ide> }
<ide>
<ide> tempDestinations.clear();
<ide> * @see javax.jms.Connection#getClientID()
<ide> */
<ide> @Override
<del> public String getClientID() throws JMSException {
<add> public synchronized String getClientID() throws JMSException {
<ide> checkClosedOrFailed();
<ide> return this.connectionInfo.getClientId();
<ide> } |
|
Java | mit | edb6c01c7212b66eaa337fc59863f8b7b46185ad | 0 | HasMuh/MontyHacksProject | ExampleOfJava.java | public class Example
{
public static void main(String[] args)
{
System.out.println("This is a java file");
}
}
| Delete ExampleOfJava.java | ExampleOfJava.java | Delete ExampleOfJava.java | <ide><path>xampleOfJava.java
<del>public class Example
<del>{
<del> public static void main(String[] args)
<del> {
<del> System.out.println("This is a java file");
<del> }
<del>} |
||
JavaScript | apache-2.0 | fac1a431ef8a779ca674236cd6366cc34b6d3380 | 0 | entrylabs/entryjs,entrylabs/entryjs,entrylabs/entryjs | Entry.Comment = class Comment {
dragMode = Entry.DRAG_MODE_NONE;
offsetX = 50;
offsetY = 10;
constructor(block, board) {
Entry.Model(this, false);
if (!board.svgCommentGroup) {
return;
}
const { comment, view } = block;
const { svgGroup } = view || {};
this._block = block;
this._board = board;
this._blockView = view;
if (svgGroup && !(board instanceof Entry.BlockMenu)) {
console.time('a');
this.createComment();
this.startRender();
this.addControl();
console.timeEnd('a');
}
this.observe(this, 'updateOpacity', ['visible'], false);
}
get block() {
return this._block;
}
get board() {
return this._board;
}
get blockView() {
return this._blockView;
}
createComment() {
// const { comment, view } = this.block;
let thread = this.block.getThread();
while (!(thread.parent instanceof Entry.Code)) {
if (thread instanceof Entry.FieldBlock) {
thread = thread.getParentThread();
} else {
thread = thread.parent.getThread();
}
}
const { svgGroup, pathGroup } = this.blockView || {};
this.pathGroup = pathGroup;
this.parentGroup = svgGroup;
this.svgGroup = this._blockView.commentShapeGroup;
this.mouseDown = this.mouseDown.bind(this);
this.mouseMove = this.mouseMove.bind(this);
this.mouseUp = this.mouseUp.bind(this);
}
startRender() {
if (this.svgGroup) {
this._line = this.svgGroup.elem('line');
this._comment = this.svgGroup.elem('rect');
const { width } = this.pathGroup.getBBox();
const { topFieldHeight, height } = this._blockView;
const startX = width;
const startY = 0;
const defaultLineLength = this.defaultLineLength;
const lineHeight = (topFieldHeight || height) / 2;
this._comment.attr({
width: this.commentWidth,
height: this.commentTitleHeight,
x: startX + defaultLineLength,
y: startY + lineHeight - this.commentTitleHeight / 2,
stroke: '#EDA913',
fill: '#FBB315',
rx: '4',
});
this._line.attr({
x1: startX,
y1: startY + lineHeight,
x2: startX + defaultLineLength,
y2: startY + lineHeight,
style: 'stroke:#eda913;stroke-width:2',
});
this.set({
startX,
startY: startY + lineHeight,
});
this.canRender = true;
}
}
updatePos() {
if (this.pathGroup) {
const { width } = this.pathGroup.getBBox();
const matrix = this.parentGroup.getCTM();
const { x: pathX, y: pathY } = Entry.GlobalSvg.getRelativePoint(matrix);
const startX = pathX + width + this.defaultLineLength;
const startY = pathY;
this._line.attr({
x2: startX,
y2: startY + this.commentTitleHeight / 2,
});
this.set({
startX,
startY,
});
}
}
moveTo(x, y) {
const thisX = this.x;
const thisY = this.y;
if (!this.display) {
x = -99999;
y = -99999;
}
if (thisX !== x || thisY !== y) {
this.set({ x, y });
}
if (this.visible && this.display) {
this.setPosition();
}
}
moveBy(x, y) {
return this.moveTo(this.x + x, this.y + y);
}
setPosition() {
this._comment.attr({
x: this.x + this.startX + this.defaultLineLength,
y: this.y + this.startY - this.commentTitleHeight / 2,
});
this._line.attr({
x2: this.x + this.startX + this.commentWidth / 2 + this.defaultLineLength,
y2: this.y + this.startY,
});
}
mouseDown(e) {
e.preventDefault();
e.stopPropagation();
const { scale = 1 } = this.board || {};
if (
(e.button === 0 || (e.originalEvent && e.originalEvent.touches)) &&
!this._board.readOnly
) {
const mouseEvent = Entry.Utils.convertMouseEvent(e);
const matrix = this.svgGroup.getCTM();
const { x, y } = Entry.GlobalSvg.getRelativePoint(matrix);
this.mouseDownCoordinate = {
x: mouseEvent.pageX,
y: mouseEvent.pageY,
parentX: x,
parentY: y,
};
document.onmousemove = this.mouseMove;
document.ontouchmove = this.mouseMove;
document.onmouseup = this.mouseUp;
document.ontouchend = this.mouseUp;
console.log(x, this.startX);
this.dragInstance = new Entry.DragInstance({
startX: x / scale + this.startX,
startY: y / scale + this.startY,
offsetX: mouseEvent.pageX,
offsetY: mouseEvent.pageY,
mode: true,
});
this.dragMode = Entry.DRAG_MODE_MOUSEDOWN;
}
}
mouseMove(e) {
e.preventDefault();
e.stopPropagation();
const mouseEvent = Entry.Utils.convertMouseEvent(e);
const diff = Math.sqrt(
Math.pow(mouseEvent.pageX - this.mouseDownCoordinate.x, 2) +
Math.pow(mouseEvent.pageY - this.mouseDownCoordinate.y, 2)
);
if (this.dragMode == Entry.DRAG_MODE_DRAG || diff > Entry.BlockView.DRAG_RADIUS) {
this.set({ visible: false });
const workspaceMode = this.board.workspace.getMode();
const dragInstance = this.dragInstance;
const { scale = 1 } = this.board || {};
if (this.dragMode != Entry.DRAG_MODE_DRAG) {
this.dragMode = Entry.DRAG_MODE_DRAG;
Entry.GlobalSvg.setComment(this, workspaceMode);
const offset = this.board.offset();
Entry.GlobalSvg._applyDomPos(offset.left / scale, offset.top / scale);
}
this.moveBy(
(mouseEvent.pageX - dragInstance.offsetX) / scale,
(mouseEvent.pageY - dragInstance.offsetY) / scale,
false,
true
);
dragInstance.set({
offsetX: mouseEvent.pageX,
offsetY: mouseEvent.pageY,
});
Entry.GlobalSvg.commentPosition(dragInstance);
}
}
mouseUp(e) {
e.preventDefault();
e.stopPropagation();
this.dragMode = Entry.DRAG_MODE_NONE;
document.onmousemove = undefined;
document.ontouchmove = undefined;
document.onmouseup = undefined;
document.ontouchend = undefined;
this.board.set({ dragBlock: null });
Entry.GlobalSvg.remove();
this.set({ visible: true });
this.setPosition();
delete this.mouseDownCoordinate;
delete this.dragInstance;
}
addControl() {
const dom = this.svgGroup;
dom.onmousedown = this.mouseDown;
dom.ontouchstart = this.mouseDown;
}
updateOpacity = function() {
if (this.visible === false) {
this.svgGroup.attr({ opacity: 0 });
} else {
this.svgGroup.removeAttr('opacity');
this.setPosition();
}
};
isReadOnly() {
return this.readOnly;
}
getBoard() {
return undefined;
}
getAbsoluteCoordinate(dragMode = this.dragMode) {
const { scale = 1 } = this.board || {};
let pos = null;
const { parentX, parentY } = this.mouseDownCoordinate;
const posX = this.x + this.startX + parentX + this.defaultLineLength;
const posY = this.y + this.startY + parentY;
if (dragMode === Entry.DRAG_MODE_DRAG) {
pos = {
x: posX,
y: posY,
scaleX: this.x + this.startX + parentX / scale + this.defaultLineLength,
scaleY: this.y + this.startY + parentY / scale,
};
} else {
pos = this.block.getThread().view.requestAbsoluteCoordinate(this);
pos.x += posX;
pos.y += posY;
pos.scaleX = pos.x / scale;
pos.scaleY = pos.y / scale;
}
return pos;
}
};
Entry.Comment.prototype.schema = {
x: 0,
y: 0,
moveX: 0,
moveY: 0,
startX: 0,
startY: 0,
readOnly: false,
visible: true,
display: true,
movable: true,
commentWidth: 160,
commentTitleHeight: 22,
defaultLineLength: 120,
};
| src/playground/comment.js | Entry.Comment = class Comment {
dragMode = Entry.DRAG_MODE_NONE;
offsetX = 50;
offsetY = 10;
constructor(block, board) {
Entry.Model(this, false);
if (!board.svgCommentGroup) {
return;
}
const { comment, view } = block;
const { svgGroup } = view || {};
this._block = block;
this._board = board;
this._blockView = view;
if (svgGroup && !(board instanceof Entry.BlockMenu)) {
console.time('a');
this.createComment();
this.startRender();
this.addControl();
console.timeEnd('a');
}
this.observe(this, 'updateOpacity', ['visible'], false);
}
get block() {
return this._block;
}
get board() {
return this._board;
}
get blockView() {
return this._blockView;
}
createComment() {
// const { comment, view } = this.block;
let thread = this.block.getThread();
while (!(thread.parent instanceof Entry.Code)) {
if (thread instanceof Entry.FieldBlock) {
thread = thread.getParentThread();
} else {
thread = thread.parent.getThread();
}
}
const { svgGroup, pathGroup } = this.blockView || {};
this.pathGroup = pathGroup;
this.parentGroup = svgGroup;
this.svgGroup = this._blockView.commentShapeGroup;
this.mouseDown = this.mouseDown.bind(this);
this.mouseMove = this.mouseMove.bind(this);
this.mouseUp = this.mouseUp.bind(this);
}
startRender() {
if (this.svgGroup) {
this._line = this.svgGroup.elem('line');
this._comment = this.svgGroup.elem('rect');
const { width } = this.pathGroup.getBBox();
const { topFieldHeight, height } = this._blockView;
const startX = width;
const startY = 0;
const defaultLineLength = this.defaultLineLength;
const lineHeight = (topFieldHeight || height) / 2;
this._comment.attr({
width: this.commentWidth,
height: this.commentTitleHeight,
x: startX + defaultLineLength,
y: startY + lineHeight - this.commentTitleHeight / 2,
stroke: '#EDA913',
fill: '#FBB315',
rx: '4',
});
this._line.attr({
x1: startX,
y1: startY + lineHeight,
x2: startX + defaultLineLength,
y2: startY + lineHeight,
style: 'stroke:#eda913;stroke-width:2',
});
this.set({
startX,
startY: startY + lineHeight,
});
this.canRender = true;
}
}
updatePos() {
if (this.pathGroup) {
const { width } = this.pathGroup.getBBox();
const matrix = this.parentGroup.getCTM();
const { x: pathX, y: pathY } = Entry.GlobalSvg.getRelativePoint(matrix);
const startX = pathX + width + this.defaultLineLength;
const startY = pathY;
this._line.attr({
x2: startX,
y2: startY + this.commentTitleHeight / 2,
});
this.set({
startX,
startY,
});
}
}
moveTo(x, y) {
const thisX = this.x;
const thisY = this.y;
if (!this.display) {
x = -99999;
y = -99999;
}
if (thisX !== x || thisY !== y) {
this.set({ x, y });
}
if (this.visible && this.display) {
this.setPosition();
}
}
moveBy(x, y) {
return this.moveTo(this.x + x, this.y + y);
}
setPosition() {
this._comment.attr({
x: this.x + this.startX + this.defaultLineLength,
y: this.y + this.startY - this.commentTitleHeight / 2,
});
this._line.attr({
x2: this.x + this.startX + this.commentWidth / 2 + this.defaultLineLength,
y2: this.y + this.startY,
});
}
mouseDown(e) {
e.preventDefault();
e.stopPropagation();
const { scale = 1 } = this.board || {};
if (
(e.button === 0 || (e.originalEvent && e.originalEvent.touches)) &&
!this._board.readOnly
) {
const mouseEvent = Entry.Utils.convertMouseEvent(e);
const matrix = this.svgGroup.getCTM();
const { x, y } = Entry.GlobalSvg.getRelativePoint(matrix);
this.mouseDownCoordinate = {
x: mouseEvent.pageX,
y: mouseEvent.pageY,
parentX: x,
parentY: y,
};
document.onmousemove = this.mouseMove;
document.ontouchmove = this.mouseMove;
document.onmouseup = this.mouseUp;
document.ontouchend = this.mouseUp;
this.dragInstance = new Entry.DragInstance({
startX: (x + this.startX) / scale,
startY: (y + this.startY) / scale,
offsetX: mouseEvent.pageX,
offsetY: mouseEvent.pageY,
mode: true,
});
this.dragMode = Entry.DRAG_MODE_MOUSEDOWN;
}
}
mouseMove(e) {
e.preventDefault();
e.stopPropagation();
const mouseEvent = Entry.Utils.convertMouseEvent(e);
const diff = Math.sqrt(
Math.pow(mouseEvent.pageX - this.mouseDownCoordinate.x, 2) +
Math.pow(mouseEvent.pageY - this.mouseDownCoordinate.y, 2)
);
if (this.dragMode == Entry.DRAG_MODE_DRAG || diff > Entry.BlockView.DRAG_RADIUS) {
this.set({ visible: false });
const workspaceMode = this.board.workspace.getMode();
const dragInstance = this.dragInstance;
const { scale = 1 } = this.board || {};
if (this.dragMode != Entry.DRAG_MODE_DRAG) {
this.dragMode = Entry.DRAG_MODE_DRAG;
Entry.GlobalSvg.setComment(this, workspaceMode);
const offset = this.board.offset();
Entry.GlobalSvg._applyDomPos(offset.left / scale, offset.top / scale);
}
this.moveBy(
(mouseEvent.pageX - dragInstance.offsetX) / scale,
(mouseEvent.pageY - dragInstance.offsetY) / scale,
false,
true
);
dragInstance.set({
offsetX: mouseEvent.pageX,
offsetY: mouseEvent.pageY,
});
Entry.GlobalSvg.commentPosition(dragInstance);
}
}
mouseUp(e) {
e.preventDefault();
e.stopPropagation();
this.dragMode = Entry.DRAG_MODE_NONE;
document.onmousemove = undefined;
document.ontouchmove = undefined;
document.onmouseup = undefined;
document.ontouchend = undefined;
this.board.set({ dragBlock: null });
Entry.GlobalSvg.remove();
this.set({ visible: true });
this.setPosition();
delete this.mouseDownCoordinate;
delete this.dragInstance;
}
addControl() {
const dom = this.svgGroup;
dom.onmousedown = this.mouseDown;
dom.ontouchstart = this.mouseDown;
}
updateOpacity = function() {
if (this.visible === false) {
this.svgGroup.attr({ opacity: 0 });
} else {
this.svgGroup.removeAttr('opacity');
this.setPosition();
}
};
isReadOnly() {
return this.readOnly;
}
getBoard() {
return undefined;
}
getAbsoluteCoordinate(dragMode = this.dragMode) {
const { scale = 1 } = this.board || {};
let pos = null;
const { parentX, parentY } = this.mouseDownCoordinate;
const posX = this.x + this.startX + parentX + this.defaultLineLength;
const posY = this.y + this.startY + parentY;
if (dragMode === Entry.DRAG_MODE_DRAG) {
pos = {
x: posX,
y: posY,
scaleX: posX / scale,
scaleY: posY / scale,
};
} else {
pos = this.block.getThread().view.requestAbsoluteCoordinate(this);
pos.x += posX;
pos.y += posY;
pos.scaleX = pos.x / scale;
pos.scaleY = pos.y / scale;
}
return pos;
}
};
Entry.Comment.prototype.schema = {
x: 0,
y: 0,
moveX: 0,
moveY: 0,
startX: 0,
startY: 0,
readOnly: false,
visible: true,
display: true,
movable: true,
commentWidth: 160,
commentTitleHeight: 22,
defaultLineLength: 120,
};
| edit comment line start position
| src/playground/comment.js | edit comment line start position | <ide><path>rc/playground/comment.js
<ide> document.ontouchmove = this.mouseMove;
<ide> document.onmouseup = this.mouseUp;
<ide> document.ontouchend = this.mouseUp;
<add> console.log(x, this.startX);
<ide> this.dragInstance = new Entry.DragInstance({
<del> startX: (x + this.startX) / scale,
<del> startY: (y + this.startY) / scale,
<add> startX: x / scale + this.startX,
<add> startY: y / scale + this.startY,
<ide> offsetX: mouseEvent.pageX,
<ide> offsetY: mouseEvent.pageY,
<ide> mode: true,
<ide> pos = {
<ide> x: posX,
<ide> y: posY,
<del> scaleX: posX / scale,
<del> scaleY: posY / scale,
<add> scaleX: this.x + this.startX + parentX / scale + this.defaultLineLength,
<add> scaleY: this.y + this.startY + parentY / scale,
<ide> };
<ide> } else {
<ide> pos = this.block.getThread().view.requestAbsoluteCoordinate(this); |
|
Java | epl-1.0 | 9e03eede8b00f447894bb5a3b96f6e1a46b1511b | 0 | jcryptool/core,jcryptool/core,jcryptool/core,jcryptool/core,jcryptool/core | // -----BEGIN DISCLAIMER-----
/*******************************************************************************
* Copyright (c) 2011, 2020 JCrypTool Team and Contributors
*
* All rights reserved. This program and the accompanying materials are made available under the terms of the Eclipse
* Public License v1.0 which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*******************************************************************************/
// -----END DISCLAIMER-----
/**
*
*/
package org.jcryptool.crypto.keystore.ui.views.nodes.keys;
import org.eclipse.jface.resource.ImageDescriptor;
import org.jcryptool.core.util.images.ImageService;
import org.jcryptool.crypto.keystore.KeyStorePlugin;
import org.jcryptool.crypto.keystore.keys.IKeyStoreAlias;
import org.jcryptool.crypto.keystore.ui.views.nodes.TreeNode;
/**
* @author tkern
*
*/
public class KeyPairNode extends TreeNode {
private int nameCounter = 0;
private AbstractKeyNode privateKeyNode;
private AbstractKeyNode publicKeyNode;
private IKeyStoreAlias privateAlias, publicAlias;
public KeyPairNode(IKeyStoreAlias privateAlias, IKeyStoreAlias publicAlias) {
super(
getOperation(privateAlias, publicAlias)
+ " (" + Messages.getString("Label.KeyStrength") + ": " //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$ //$NON-NLS-4$
+ ((getKeyLength(privateAlias, publicAlias) > 0) ? getKeyLength(privateAlias, publicAlias)
: "n/a") + ")"/* + (addCounter==0?"":" ("+addCounter+")") */); //$NON-NLS-1$ //$NON-NLS-2$
this.privateAlias = privateAlias;
this.publicAlias = publicAlias;
// super(getOperation(privateAlias, publicAlias) + " " +
// KeyStorePlugin.getResourceBundle().getString("Label.KeyPair") + " ("+
// KeyStorePlugin.getResourceBundle().getString("Label.KeyStrength") +": " + getKeyLength(privateAlias,
// publicAlias) + ")");
if (privateAlias != null) {
privateKeyNode = new PrivateKeyNode(privateAlias);
this.addChild(privateKeyNode);
}
if (publicAlias != null) {
publicKeyNode = new CertificateNode(publicAlias);
this.addChild(publicKeyNode);
}
}
public void incNameCounter() {
nameCounter++;
}
@Override
public String getName() {
return getOperation(privateAlias, publicAlias)
+ Messages.getString("Label.KeyPair") + " (" + Messages.getString("Label.KeyStrength") + ": " //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$ //$NON-NLS-4$
+ ((getKeyLength(privateAlias, publicAlias) > 0) ? getKeyLength(privateAlias, publicAlias) : "n/a") + ")" + (nameCounter == 0 ? "" : " (" + nameCounter + ")"); //$NON-NLS-1$ //$NON-NLS-2$
}
private static String getOperation(IKeyStoreAlias privateAlias, IKeyStoreAlias publicAlias) {
if (privateAlias != null) {
return privateAlias.getOperation();
} else if (publicAlias != null) {
return publicAlias.getOperation();
}
else {
return "<undefined>"; //$NON-NLS-1$
}
}
private static int getKeyLength(IKeyStoreAlias privateAlias, IKeyStoreAlias publicAlias) {
if (privateAlias != null) {
return privateAlias.getKeyLength();
} else if (publicAlias != null) {
return publicAlias.getKeyLength();
} else {
return -1;
}
}
public void addPrivateKey(IKeyStoreAlias privateAlias) {
if (this.privateKeyNode != null) {
return;
} else {
this.privateAlias = privateAlias;
privateKeyNode = new PrivateKeyNode(privateAlias);
this.addChild(privateKeyNode);
}
}
public void addPublicKey(IKeyStoreAlias publicAlias) {
if (this.publicKeyNode != null) {
return;
} else {
this.publicAlias = publicAlias;
publicKeyNode = new CertificateNode(publicAlias);
this.addChild(publicKeyNode);
}
}
public IKeyStoreAlias getPrivateKeyAlias() {
if (privateKeyNode != null) {
return privateKeyNode.getAlias();
} else {
return null;
}
}
public IKeyStoreAlias getPublicKeyAlias() {
if (publicKeyNode != null) {
return publicKeyNode.getAlias();
} else {
return null;
}
}
/**
* @see org.jcryptool.crypto.keystore.ui.views.nodes.TreeNode#getImageDescriptor()
*/
public ImageDescriptor getImageDescriptor() {
return ImageService.getImageDescriptor(KeyStorePlugin.PLUGIN_ID, "icons/16x16/kgpg_key2.png");
}
}
| org.jcryptool.crypto.keystore/src/org/jcryptool/crypto/keystore/ui/views/nodes/keys/KeyPairNode.java | // -----BEGIN DISCLAIMER-----
/*******************************************************************************
* Copyright (c) 2011, 2020 JCrypTool Team and Contributors
*
* All rights reserved. This program and the accompanying materials are made available under the terms of the Eclipse
* Public License v1.0 which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*******************************************************************************/
// -----END DISCLAIMER-----
/**
*
*/
package org.jcryptool.crypto.keystore.ui.views.nodes.keys;
import org.eclipse.jface.resource.ImageDescriptor;
import org.jcryptool.core.util.images.ImageService;
import org.jcryptool.crypto.keystore.KeyStorePlugin;
import org.jcryptool.crypto.keystore.keys.IKeyStoreAlias;
import org.jcryptool.crypto.keystore.ui.views.nodes.TreeNode;
/**
* @author tkern
*
*/
public class KeyPairNode extends TreeNode {
private int nameCounter = 0;
private AbstractKeyNode privateKeyNode;
private AbstractKeyNode publicKeyNode;
private IKeyStoreAlias privateAlias, publicAlias;
public KeyPairNode(IKeyStoreAlias privateAlias, IKeyStoreAlias publicAlias) {
super(
getOperation(privateAlias, publicAlias)
+ Messages.getString("Label.KeyPair") + " (" + Messages.getString("Label.KeyStrength") + ": " //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$ //$NON-NLS-4$
+ ((getKeyLength(privateAlias, publicAlias) > 0) ? getKeyLength(privateAlias, publicAlias)
: "n/a") + ")"/* + (addCounter==0?"":" ("+addCounter+")") */); //$NON-NLS-1$ //$NON-NLS-2$
this.privateAlias = privateAlias;
this.publicAlias = publicAlias;
// super(getOperation(privateAlias, publicAlias) + " " +
// KeyStorePlugin.getResourceBundle().getString("Label.KeyPair") + " ("+
// KeyStorePlugin.getResourceBundle().getString("Label.KeyStrength") +": " + getKeyLength(privateAlias,
// publicAlias) + ")");
if (privateAlias != null) {
privateKeyNode = new PrivateKeyNode(privateAlias);
this.addChild(privateKeyNode);
}
if (publicAlias != null) {
publicKeyNode = new CertificateNode(publicAlias);
this.addChild(publicKeyNode);
}
}
public void incNameCounter() {
nameCounter++;
}
@Override
public String getName() {
return getOperation(privateAlias, publicAlias)
+ Messages.getString("Label.KeyPair") + " (" + Messages.getString("Label.KeyStrength") + ": " //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$ //$NON-NLS-4$
+ ((getKeyLength(privateAlias, publicAlias) > 0) ? getKeyLength(privateAlias, publicAlias) : "n/a") + ")" + (nameCounter == 0 ? "" : " (" + nameCounter + ")"); //$NON-NLS-1$ //$NON-NLS-2$
}
private static String getOperation(IKeyStoreAlias privateAlias, IKeyStoreAlias publicAlias) {
if (privateAlias != null) {
return privateAlias.getOperation();
} else if (publicAlias != null) {
return publicAlias.getOperation();
}
else {
return "<undefined>"; //$NON-NLS-1$
}
}
private static int getKeyLength(IKeyStoreAlias privateAlias, IKeyStoreAlias publicAlias) {
if (privateAlias != null) {
return privateAlias.getKeyLength();
} else if (publicAlias != null) {
return publicAlias.getKeyLength();
} else {
return -1;
}
}
public void addPrivateKey(IKeyStoreAlias privateAlias) {
if (this.privateKeyNode != null) {
return;
} else {
this.privateAlias = privateAlias;
privateKeyNode = new PrivateKeyNode(privateAlias);
this.addChild(privateKeyNode);
}
}
public void addPublicKey(IKeyStoreAlias publicAlias) {
if (this.publicKeyNode != null) {
return;
} else {
this.publicAlias = publicAlias;
publicKeyNode = new CertificateNode(publicAlias);
this.addChild(publicKeyNode);
}
}
public IKeyStoreAlias getPrivateKeyAlias() {
if (privateKeyNode != null) {
return privateKeyNode.getAlias();
} else {
return null;
}
}
public IKeyStoreAlias getPublicKeyAlias() {
if (publicKeyNode != null) {
return publicKeyNode.getAlias();
} else {
return null;
}
}
/**
* @see org.jcryptool.crypto.keystore.ui.views.nodes.TreeNode#getImageDescriptor()
*/
public ImageDescriptor getImageDescriptor() {
return ImageService.getImageDescriptor(KeyStorePlugin.PLUGIN_ID, "icons/16x16/kgpg_key2.png");
}
}
| remove "Key Pair" qualification from keystore view entries | org.jcryptool.crypto.keystore/src/org/jcryptool/crypto/keystore/ui/views/nodes/keys/KeyPairNode.java | remove "Key Pair" qualification from keystore view entries | <ide><path>rg.jcryptool.crypto.keystore/src/org/jcryptool/crypto/keystore/ui/views/nodes/keys/KeyPairNode.java
<ide> public KeyPairNode(IKeyStoreAlias privateAlias, IKeyStoreAlias publicAlias) {
<ide> super(
<ide> getOperation(privateAlias, publicAlias)
<del> + Messages.getString("Label.KeyPair") + " (" + Messages.getString("Label.KeyStrength") + ": " //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$ //$NON-NLS-4$
<add> + " (" + Messages.getString("Label.KeyStrength") + ": " //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$ //$NON-NLS-4$
<ide> + ((getKeyLength(privateAlias, publicAlias) > 0) ? getKeyLength(privateAlias, publicAlias)
<ide> : "n/a") + ")"/* + (addCounter==0?"":" ("+addCounter+")") */); //$NON-NLS-1$ //$NON-NLS-2$
<ide> |
|
Java | apache-2.0 | 0cb69836f91e39cc7dff5c2f695687981fd869a9 | 0 | clibois/wss4j,clibois/wss4j,asoldano/wss4j,apache/wss4j,jimma/wss4j,asoldano/wss4j,jimma/wss4j,apache/wss4j | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.wss4j.dom.processor;
import java.io.IOException;
import java.io.InputStream;
import java.security.NoSuchAlgorithmException;
import java.security.Principal;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.crypto.Cipher;
import javax.crypto.NoSuchPaddingException;
import javax.crypto.SecretKey;
import javax.security.auth.callback.Callback;
import javax.security.auth.callback.CallbackHandler;
import javax.security.auth.callback.UnsupportedCallbackException;
import org.apache.wss4j.common.ext.Attachment;
import org.apache.wss4j.common.ext.AttachmentRequestCallback;
import org.apache.wss4j.common.ext.AttachmentResultCallback;
import org.apache.wss4j.common.util.AttachmentUtils;
import org.apache.xml.security.algorithms.JCEMapper;
import org.w3c.dom.Attr;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.Node;
import org.apache.wss4j.common.bsp.BSPRule;
import org.apache.wss4j.common.crypto.AlgorithmSuite;
import org.apache.wss4j.common.crypto.AlgorithmSuiteValidator;
import org.apache.wss4j.common.ext.WSSecurityException;
import org.apache.wss4j.common.principal.WSDerivedKeyTokenPrincipal;
import org.apache.wss4j.common.util.KeyUtils;
import org.apache.wss4j.dom.WSConstants;
import org.apache.wss4j.dom.WSDataRef;
import org.apache.wss4j.dom.WSDocInfo;
import org.apache.wss4j.dom.WSSecurityEngineResult;
import org.apache.wss4j.dom.bsp.BSPEnforcer;
import org.apache.wss4j.dom.handler.RequestData;
import org.apache.wss4j.dom.message.CallbackLookup;
import org.apache.wss4j.dom.message.DOMCallbackLookup;
import org.apache.wss4j.dom.message.token.SecurityTokenReference;
import org.apache.wss4j.dom.str.STRParser;
import org.apache.wss4j.dom.str.SecurityTokenRefSTRParser;
import org.apache.wss4j.dom.util.WSSecurityUtil;
import org.apache.xml.security.encryption.XMLCipher;
import org.apache.xml.security.encryption.XMLEncryptionException;
public class ReferenceListProcessor implements Processor {
private static final org.slf4j.Logger LOG =
org.slf4j.LoggerFactory.getLogger(ReferenceListProcessor.class);
public List<WSSecurityEngineResult> handleToken(
Element elem,
RequestData data,
WSDocInfo wsDocInfo
) throws WSSecurityException {
if (LOG.isDebugEnabled()) {
LOG.debug("Found reference list element");
}
List<WSDataRef> dataRefs = handleReferenceList(elem, data, wsDocInfo);
WSSecurityEngineResult result =
new WSSecurityEngineResult(WSConstants.ENCR, dataRefs);
result.put(WSSecurityEngineResult.TAG_ID, elem.getAttributeNS(null, "Id"));
wsDocInfo.addTokenElement(elem);
wsDocInfo.addResult(result);
return java.util.Collections.singletonList(result);
}
/**
* Dereferences and decodes encrypted data elements.
*
* @param elem contains the <code>ReferenceList</code> to the encrypted
* data elements
*/
private List<WSDataRef> handleReferenceList(
Element elem,
RequestData data,
WSDocInfo wsDocInfo
) throws WSSecurityException {
List<WSDataRef> dataRefs = new ArrayList<WSDataRef>();
//find out if there's an EncryptedKey in the doc (AsymmetricBinding)
Element wsseHeaderElement = wsDocInfo.getSecurityHeader();
boolean asymBinding = WSSecurityUtil.getDirectChildElement(
wsseHeaderElement, WSConstants.ENC_KEY_LN, WSConstants.ENC_NS) != null;
for (Node node = elem.getFirstChild();
node != null;
node = node.getNextSibling()
) {
if (Node.ELEMENT_NODE == node.getNodeType()
&& WSConstants.ENC_NS.equals(node.getNamespaceURI())
&& "DataReference".equals(node.getLocalName())) {
String dataRefURI = ((Element) node).getAttributeNS(null, "URI");
if (dataRefURI.charAt(0) == '#') {
dataRefURI = dataRefURI.substring(1);
}
if (wsDocInfo.getResultByTag(WSConstants.ENCR, dataRefURI) == null) {
WSDataRef dataRef =
decryptDataRefEmbedded(
elem.getOwnerDocument(), dataRefURI, data, wsDocInfo, asymBinding);
dataRefs.add(dataRef);
}
}
}
return dataRefs;
}
/**
* Decrypt an (embedded) EncryptedData element referenced by dataRefURI.
*/
private WSDataRef decryptDataRefEmbedded(
Document doc,
String dataRefURI,
RequestData data,
WSDocInfo wsDocInfo,
boolean asymBinding
) throws WSSecurityException {
if (LOG.isDebugEnabled()) {
LOG.debug("Found data reference: " + dataRefURI);
}
//
// Find the encrypted data element referenced by dataRefURI
//
Element encryptedDataElement = findEncryptedDataElement(doc, wsDocInfo, dataRefURI);
if (encryptedDataElement != null && asymBinding && data.isRequireSignedEncryptedDataElements()) {
WSSecurityUtil.verifySignedElement(encryptedDataElement, doc, wsDocInfo.getSecurityHeader());
}
//
// Prepare the SecretKey object to decrypt EncryptedData
//
String symEncAlgo = X509Util.getEncAlgo(encryptedDataElement);
Element keyInfoElement =
WSSecurityUtil.getDirectChildElement(
encryptedDataElement, "KeyInfo", WSConstants.SIG_NS
);
// KeyInfo cannot be null
if (keyInfoElement == null) {
throw new WSSecurityException(WSSecurityException.ErrorCode.INVALID_SECURITY, "noKeyinfo");
}
// Check BSP compliance
checkBSPCompliance(keyInfoElement, symEncAlgo, data.getBSPEnforcer());
//
// Try to get a security reference token, if none found try to get a
// shared key using a KeyName.
//
Element secRefToken =
WSSecurityUtil.getDirectChildElement(
keyInfoElement, "SecurityTokenReference", WSConstants.WSSE_NS
);
SecretKey symmetricKey = null;
Principal principal = null;
if (secRefToken == null) {
symmetricKey = X509Util.getSharedKey(keyInfoElement, symEncAlgo, data.getCallbackHandler());
} else {
STRParser strParser = new SecurityTokenRefSTRParser();
Map<String, Object> parameters = new HashMap<String, Object>();
parameters.put(SecurityTokenRefSTRParser.SIGNATURE_METHOD, symEncAlgo);
strParser.parseSecurityTokenReference(
secRefToken, data,
wsDocInfo, parameters
);
byte[] secretKey = strParser.getSecretKey();
principal = strParser.getPrincipal();
symmetricKey = KeyUtils.prepareSecretKey(symEncAlgo, secretKey);
}
// Check for compliance against the defined AlgorithmSuite
AlgorithmSuite algorithmSuite = data.getAlgorithmSuite();
if (algorithmSuite != null) {
AlgorithmSuiteValidator algorithmSuiteValidator = new
AlgorithmSuiteValidator(algorithmSuite);
if (principal instanceof WSDerivedKeyTokenPrincipal) {
algorithmSuiteValidator.checkDerivedKeyAlgorithm(
((WSDerivedKeyTokenPrincipal)principal).getAlgorithm()
);
algorithmSuiteValidator.checkEncryptionDerivedKeyLength(
((WSDerivedKeyTokenPrincipal)principal).getLength()
);
}
algorithmSuiteValidator.checkSymmetricKeyLength(symmetricKey.getEncoded().length);
algorithmSuiteValidator.checkSymmetricEncryptionAlgorithm(symEncAlgo);
}
return
decryptEncryptedData(
doc, dataRefURI, encryptedDataElement, symmetricKey, symEncAlgo, data
);
}
/**
* Check for BSP compliance
* @param keyInfoElement The KeyInfo element child
* @param encAlgo The encryption algorithm
* @throws WSSecurityException
*/
private static void checkBSPCompliance(
Element keyInfoElement,
String encAlgo,
BSPEnforcer bspEnforcer
) throws WSSecurityException {
// We can only have one token reference
int result = 0;
Node node = keyInfoElement.getFirstChild();
Element child = null;
while (node != null) {
if (Node.ELEMENT_NODE == node.getNodeType()) {
result++;
child = (Element)node;
}
node = node.getNextSibling();
}
if (result != 1) {
bspEnforcer.handleBSPRule(BSPRule.R5424);
}
if (child == null || !WSConstants.WSSE_NS.equals(child.getNamespaceURI()) ||
!SecurityTokenReference.SECURITY_TOKEN_REFERENCE.equals(child.getLocalName())) {
bspEnforcer.handleBSPRule(BSPRule.R5426);
}
// EncryptionAlgorithm cannot be null
if (encAlgo == null) {
bspEnforcer.handleBSPRule(BSPRule.R5601);
}
// EncryptionAlgorithm must be 3DES, or AES128, or AES256
if (!WSConstants.TRIPLE_DES.equals(encAlgo)
&& !WSConstants.AES_128.equals(encAlgo)
&& !WSConstants.AES_128_GCM.equals(encAlgo)
&& !WSConstants.AES_256.equals(encAlgo)
&& !WSConstants.AES_256_GCM.equals(encAlgo)) {
bspEnforcer.handleBSPRule(BSPRule.R5620);
}
}
/**
* Look up the encrypted data. First try Id="someURI". If no such Id then try
* wsu:Id="someURI".
*
* @param doc The document in which to find EncryptedData
* @param wsDocInfo The WSDocInfo object to use
* @param dataRefURI The URI of EncryptedData
* @return The EncryptedData element
* @throws WSSecurityException if the EncryptedData element referenced by dataRefURI is
* not found
*/
public static Element
findEncryptedDataElement(
Document doc,
WSDocInfo wsDocInfo,
String dataRefURI
) throws WSSecurityException {
CallbackLookup callbackLookup = wsDocInfo.getCallbackLookup();
if (callbackLookup == null) {
callbackLookup = new DOMCallbackLookup(doc);
}
Element encryptedDataElement =
callbackLookup.getElement(dataRefURI, null, true);
if (encryptedDataElement == null) {
throw new WSSecurityException(
WSSecurityException.ErrorCode.INVALID_SECURITY, "dataRef", dataRefURI);
}
if (encryptedDataElement.getLocalName().equals(WSConstants.ENCRYPTED_HEADER)
&& encryptedDataElement.getNamespaceURI().equals(WSConstants.WSSE11_NS)) {
Node child = encryptedDataElement.getFirstChild();
while (child != null && child.getNodeType() != Node.ELEMENT_NODE) {
child = child.getNextSibling();
}
return (Element)child;
}
return encryptedDataElement;
}
/**
* Decrypt the EncryptedData argument using a SecretKey.
* @param doc The (document) owner of EncryptedData
* @param dataRefURI The URI of EncryptedData
* @param encData The EncryptedData element
* @param symmetricKey The SecretKey with which to decrypt EncryptedData
* @param symEncAlgo The symmetric encryption algorithm to use
* @throws WSSecurityException
*/
public static WSDataRef
decryptEncryptedData(
Document doc,
String dataRefURI,
Element encData,
SecretKey symmetricKey,
String symEncAlgo,
RequestData requestData
) throws WSSecurityException {
WSDataRef dataRef = new WSDataRef();
dataRef.setWsuId(dataRefURI);
dataRef.setAlgorithm(symEncAlgo);
String typeStr = encData.getAttribute("Type");
if (typeStr != null &&
(WSConstants.SWA_ATTACHMENT_ENCRYPTED_DATA_TYPE_CONTENT_ONLY.equals(typeStr) ||
WSConstants.SWA_ATTACHMENT_ENCRYPTED_DATA_TYPE_COMPLETE.equals(typeStr))) {
try {
Element cipherData = WSSecurityUtil.getDirectChildElement(encData, "CipherData", WSConstants.ENC_NS);
if (cipherData == null) {
throw new WSSecurityException(WSSecurityException.ErrorCode.FAILED_CHECK);
}
Element cipherReference = WSSecurityUtil.getDirectChildElement(cipherData, "CipherReference", WSConstants.ENC_NS);
if (cipherReference == null) {
throw new WSSecurityException(WSSecurityException.ErrorCode.FAILED_CHECK);
}
String uri = cipherReference.getAttributeNS(null, "URI");
if (uri == null || uri.length() < 5) {
throw new WSSecurityException(WSSecurityException.ErrorCode.FAILED_CHECK);
}
if (!uri.startsWith("cid:")) {
throw new WSSecurityException(WSSecurityException.ErrorCode.FAILED_CHECK);
}
dataRef.setWsuId(uri);
dataRef.setAttachment(true);
CallbackHandler attachmentCallbackHandler = requestData.getAttachmentCallbackHandler();
if (attachmentCallbackHandler == null) {
throw new WSSecurityException(WSSecurityException.ErrorCode.FAILED_CHECK);
}
final String attachmentId = uri.substring(4);
AttachmentRequestCallback attachmentRequestCallback = new AttachmentRequestCallback();
attachmentRequestCallback.setAttachmentId(attachmentId);
attachmentCallbackHandler.handle(new Callback[]{attachmentRequestCallback});
List<Attachment> attachments = attachmentRequestCallback.getAttachments();
if (attachments == null || attachments.isEmpty() || !attachmentId.equals(attachments.get(0).getId())) {
throw new WSSecurityException(
WSSecurityException.ErrorCode.INVALID_SECURITY,
"empty", "Attachment not found"
);
}
Attachment attachment = attachments.get(0);
final String encAlgo = X509Util.getEncAlgo(encData);
final String jceAlgorithm =
JCEMapper.translateURItoJCEID(encAlgo);
final Cipher cipher = Cipher.getInstance(jceAlgorithm);
InputStream attachmentInputStream =
AttachmentUtils.setupAttachmentDecryptionStream(
encAlgo, cipher, symmetricKey, attachment.getSourceStream());
Attachment resultAttachment = new Attachment();
resultAttachment.setId(attachment.getId());
resultAttachment.setMimeType(encData.getAttributeNS(null, "MimeType"));
resultAttachment.setSourceStream(attachmentInputStream);
resultAttachment.addHeaders(attachment.getHeaders());
if (WSConstants.SWA_ATTACHMENT_ENCRYPTED_DATA_TYPE_COMPLETE.equals(typeStr)) {
AttachmentUtils.readAndReplaceEncryptedAttachmentHeaders(
resultAttachment.getHeaders(), attachmentInputStream);
}
AttachmentResultCallback attachmentResultCallback = new AttachmentResultCallback();
attachmentResultCallback.setAttachment(resultAttachment);
attachmentResultCallback.setAttachmentId(resultAttachment.getId());
attachmentCallbackHandler.handle(new Callback[]{attachmentResultCallback});
} catch (UnsupportedCallbackException e) {
throw new WSSecurityException(
WSSecurityException.ErrorCode.FAILED_CHECK, e);
} catch (IOException e) {
throw new WSSecurityException(
WSSecurityException.ErrorCode.FAILED_CHECK, e);
} catch (NoSuchAlgorithmException e) {
throw new WSSecurityException(
WSSecurityException.ErrorCode.FAILED_CHECK, e);
} catch (NoSuchPaddingException e) {
throw new WSSecurityException(
WSSecurityException.ErrorCode.FAILED_CHECK, e);
}
dataRef.setContent(true);
// Remove this EncryptedData from the security header to avoid processing it again
encData.getParentNode().removeChild(encData);
return dataRef;
}
boolean content = X509Util.isContent(encData);
dataRef.setContent(content);
Node parent = encData.getParentNode();
Node previousSibling = encData.getPreviousSibling();
if (content) {
encData = (Element) encData.getParentNode();
parent = encData.getParentNode();
}
XMLCipher xmlCipher = null;
try {
xmlCipher = XMLCipher.getInstance(symEncAlgo);
xmlCipher.setSecureValidation(true);
xmlCipher.init(XMLCipher.DECRYPT_MODE, symmetricKey);
} catch (XMLEncryptionException ex) {
throw new WSSecurityException(
WSSecurityException.ErrorCode.UNSUPPORTED_ALGORITHM, ex
);
}
try {
xmlCipher.doFinal(doc, encData, content);
} catch (Exception ex) {
throw new WSSecurityException(WSSecurityException.ErrorCode.FAILED_CHECK, ex);
}
if (parent.getLocalName().equals(WSConstants.ENCRYPTED_HEADER)
&& parent.getNamespaceURI().equals(WSConstants.WSSE11_NS)
|| parent.getLocalName().equals(WSConstants.ENCRYPED_ASSERTION_LN)
&& parent.getNamespaceURI().equals(WSConstants.SAML2_NS)) {
Node decryptedHeader = parent.getFirstChild();
Node soapHeader = parent.getParentNode();
soapHeader.replaceChild(decryptedHeader, parent);
dataRef.setProtectedElement((Element)decryptedHeader);
dataRef.setXpath(getXPath(decryptedHeader));
} else if (content) {
dataRef.setProtectedElement(encData);
dataRef.setXpath(getXPath(encData));
} else {
Node decryptedNode;
if (previousSibling == null) {
decryptedNode = parent.getFirstChild();
} else {
decryptedNode = previousSibling.getNextSibling();
}
if (decryptedNode != null && Node.ELEMENT_NODE == decryptedNode.getNodeType()) {
dataRef.setProtectedElement((Element)decryptedNode);
}
dataRef.setXpath(getXPath(decryptedNode));
}
return dataRef;
}
public String getId() {
return null;
}
/**
* @param decryptedNode the decrypted node
* @return a fully built xpath
* (eg. "/soapenv:Envelope/soapenv:Body/ns:decryptedElement")
* if the decryptedNode is an Element or an Attr node and is not detached
* from the document. <code>null</code> otherwise
*/
public static String getXPath(Node decryptedNode) {
if (decryptedNode == null) {
return null;
}
String result = "";
if (Node.ELEMENT_NODE == decryptedNode.getNodeType()) {
result = decryptedNode.getNodeName();
result = prependFullPath(result, decryptedNode.getParentNode());
} else if (Node.ATTRIBUTE_NODE == decryptedNode.getNodeType()) {
result = "@" + decryptedNode.getNodeName();
result = prependFullPath(result, ((Attr)decryptedNode).getOwnerElement());
} else {
return null;
}
return result;
}
/**
* Recursively build an absolute xpath (starting with the root "/")
*
* @param xpath the xpath expression built so far
* @param node the current node whose name is to be prepended
* @return a fully built xpath
*/
private static String prependFullPath(String xpath, Node node) {
if (node == null) {
// probably a detached node... not really useful
return null;
} else if (Node.ELEMENT_NODE == node.getNodeType()) {
xpath = node.getNodeName() + "/" + xpath;
return prependFullPath(xpath, node.getParentNode());
} else if (Node.DOCUMENT_NODE == node.getNodeType()) {
return "/" + xpath;
} else {
return prependFullPath(xpath, node.getParentNode());
}
}
}
| ws-security-dom/src/main/java/org/apache/wss4j/dom/processor/ReferenceListProcessor.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.wss4j.dom.processor;
import java.io.IOException;
import java.io.InputStream;
import java.security.NoSuchAlgorithmException;
import java.security.Principal;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.crypto.Cipher;
import javax.crypto.NoSuchPaddingException;
import javax.crypto.SecretKey;
import javax.security.auth.callback.Callback;
import javax.security.auth.callback.CallbackHandler;
import javax.security.auth.callback.UnsupportedCallbackException;
import org.apache.wss4j.common.ext.Attachment;
import org.apache.wss4j.common.ext.AttachmentRequestCallback;
import org.apache.wss4j.common.ext.AttachmentResultCallback;
import org.apache.wss4j.common.util.AttachmentUtils;
import org.apache.xml.security.algorithms.JCEMapper;
import org.w3c.dom.Attr;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.Node;
import org.apache.wss4j.common.bsp.BSPRule;
import org.apache.wss4j.common.crypto.AlgorithmSuite;
import org.apache.wss4j.common.crypto.AlgorithmSuiteValidator;
import org.apache.wss4j.common.ext.WSSecurityException;
import org.apache.wss4j.common.principal.WSDerivedKeyTokenPrincipal;
import org.apache.wss4j.common.util.KeyUtils;
import org.apache.wss4j.dom.WSConstants;
import org.apache.wss4j.dom.WSDataRef;
import org.apache.wss4j.dom.WSDocInfo;
import org.apache.wss4j.dom.WSSecurityEngineResult;
import org.apache.wss4j.dom.bsp.BSPEnforcer;
import org.apache.wss4j.dom.handler.RequestData;
import org.apache.wss4j.dom.message.CallbackLookup;
import org.apache.wss4j.dom.message.DOMCallbackLookup;
import org.apache.wss4j.dom.message.token.SecurityTokenReference;
import org.apache.wss4j.dom.str.STRParser;
import org.apache.wss4j.dom.str.SecurityTokenRefSTRParser;
import org.apache.wss4j.dom.util.WSSecurityUtil;
import org.apache.xml.security.encryption.XMLCipher;
import org.apache.xml.security.encryption.XMLEncryptionException;
public class ReferenceListProcessor implements Processor {
private static final org.slf4j.Logger LOG =
org.slf4j.LoggerFactory.getLogger(ReferenceListProcessor.class);
public List<WSSecurityEngineResult> handleToken(
Element elem,
RequestData data,
WSDocInfo wsDocInfo
) throws WSSecurityException {
if (LOG.isDebugEnabled()) {
LOG.debug("Found reference list element");
}
List<WSDataRef> dataRefs = handleReferenceList(elem, data, wsDocInfo);
WSSecurityEngineResult result =
new WSSecurityEngineResult(WSConstants.ENCR, dataRefs);
result.put(WSSecurityEngineResult.TAG_ID, elem.getAttributeNS(null, "Id"));
wsDocInfo.addTokenElement(elem);
wsDocInfo.addResult(result);
return java.util.Collections.singletonList(result);
}
/**
* Dereferences and decodes encrypted data elements.
*
* @param elem contains the <code>ReferenceList</code> to the encrypted
* data elements
*/
private List<WSDataRef> handleReferenceList(
Element elem,
RequestData data,
WSDocInfo wsDocInfo
) throws WSSecurityException {
List<WSDataRef> dataRefs = new ArrayList<WSDataRef>();
//find out if there's an EncryptedKey in the doc (AsymmetricBinding)
Element wsseHeaderElement = wsDocInfo.getSecurityHeader();
boolean asymBinding = WSSecurityUtil.getDirectChildElement(
wsseHeaderElement, WSConstants.ENC_KEY_LN, WSConstants.ENC_NS) != null;
for (Node node = elem.getFirstChild();
node != null;
node = node.getNextSibling()
) {
if (Node.ELEMENT_NODE == node.getNodeType()
&& WSConstants.ENC_NS.equals(node.getNamespaceURI())
&& "DataReference".equals(node.getLocalName())) {
String dataRefURI = ((Element) node).getAttributeNS(null, "URI");
if (dataRefURI.charAt(0) == '#') {
dataRefURI = dataRefURI.substring(1);
}
if (wsDocInfo.getResultByTag(WSConstants.ENCR, dataRefURI) == null) {
WSDataRef dataRef =
decryptDataRefEmbedded(
elem.getOwnerDocument(), dataRefURI, data, wsDocInfo, asymBinding);
dataRefs.add(dataRef);
}
}
}
return dataRefs;
}
/**
* Decrypt an (embedded) EncryptedData element referenced by dataRefURI.
*/
private WSDataRef decryptDataRefEmbedded(
Document doc,
String dataRefURI,
RequestData data,
WSDocInfo wsDocInfo,
boolean asymBinding
) throws WSSecurityException {
if (LOG.isDebugEnabled()) {
LOG.debug("Found data reference: " + dataRefURI);
}
//
// Find the encrypted data element referenced by dataRefURI
//
Element encryptedDataElement = findEncryptedDataElement(doc, wsDocInfo, dataRefURI);
if (encryptedDataElement != null && asymBinding && data.isRequireSignedEncryptedDataElements()) {
WSSecurityUtil.verifySignedElement(encryptedDataElement, doc, wsDocInfo.getSecurityHeader());
}
//
// Prepare the SecretKey object to decrypt EncryptedData
//
String symEncAlgo = X509Util.getEncAlgo(encryptedDataElement);
Element keyInfoElement =
WSSecurityUtil.getDirectChildElement(
encryptedDataElement, "KeyInfo", WSConstants.SIG_NS
);
// KeyInfo cannot be null
if (keyInfoElement == null) {
throw new WSSecurityException(WSSecurityException.ErrorCode.INVALID_SECURITY, "noKeyinfo");
}
// Check BSP compliance
checkBSPCompliance(keyInfoElement, symEncAlgo, data.getBSPEnforcer());
//
// Try to get a security reference token, if none found try to get a
// shared key using a KeyName.
//
Element secRefToken =
WSSecurityUtil.getDirectChildElement(
keyInfoElement, "SecurityTokenReference", WSConstants.WSSE_NS
);
SecretKey symmetricKey = null;
Principal principal = null;
if (secRefToken == null) {
symmetricKey = X509Util.getSharedKey(keyInfoElement, symEncAlgo, data.getCallbackHandler());
} else {
STRParser strParser = new SecurityTokenRefSTRParser();
Map<String, Object> parameters = new HashMap<String, Object>();
parameters.put(SecurityTokenRefSTRParser.SIGNATURE_METHOD, symEncAlgo);
strParser.parseSecurityTokenReference(
secRefToken, data,
wsDocInfo, parameters
);
byte[] secretKey = strParser.getSecretKey();
principal = strParser.getPrincipal();
symmetricKey = KeyUtils.prepareSecretKey(symEncAlgo, secretKey);
}
// Check for compliance against the defined AlgorithmSuite
AlgorithmSuite algorithmSuite = data.getAlgorithmSuite();
if (algorithmSuite != null) {
AlgorithmSuiteValidator algorithmSuiteValidator = new
AlgorithmSuiteValidator(algorithmSuite);
if (principal instanceof WSDerivedKeyTokenPrincipal) {
algorithmSuiteValidator.checkDerivedKeyAlgorithm(
((WSDerivedKeyTokenPrincipal)principal).getAlgorithm()
);
algorithmSuiteValidator.checkEncryptionDerivedKeyLength(
((WSDerivedKeyTokenPrincipal)principal).getLength()
);
}
algorithmSuiteValidator.checkSymmetricKeyLength(symmetricKey.getEncoded().length);
algorithmSuiteValidator.checkSymmetricEncryptionAlgorithm(symEncAlgo);
}
return
decryptEncryptedData(
doc, dataRefURI, encryptedDataElement, symmetricKey, symEncAlgo, data
);
}
/**
* Check for BSP compliance
* @param keyInfoElement The KeyInfo element child
* @param encAlgo The encryption algorithm
* @throws WSSecurityException
*/
private static void checkBSPCompliance(
Element keyInfoElement,
String encAlgo,
BSPEnforcer bspEnforcer
) throws WSSecurityException {
// We can only have one token reference
int result = 0;
Node node = keyInfoElement.getFirstChild();
Element child = null;
while (node != null) {
if (Node.ELEMENT_NODE == node.getNodeType()) {
result++;
child = (Element)node;
}
node = node.getNextSibling();
}
if (result != 1) {
bspEnforcer.handleBSPRule(BSPRule.R5424);
}
if (child == null || !WSConstants.WSSE_NS.equals(child.getNamespaceURI()) ||
!SecurityTokenReference.SECURITY_TOKEN_REFERENCE.equals(child.getLocalName())) {
bspEnforcer.handleBSPRule(BSPRule.R5426);
}
// EncryptionAlgorithm cannot be null
if (encAlgo == null) {
bspEnforcer.handleBSPRule(BSPRule.R5601);
}
// EncryptionAlgorithm must be 3DES, or AES128, or AES256
if (!WSConstants.TRIPLE_DES.equals(encAlgo)
&& !WSConstants.AES_128.equals(encAlgo)
&& !WSConstants.AES_128_GCM.equals(encAlgo)
&& !WSConstants.AES_256.equals(encAlgo)
&& !WSConstants.AES_256_GCM.equals(encAlgo)) {
bspEnforcer.handleBSPRule(BSPRule.R5620);
}
}
/**
* Look up the encrypted data. First try Id="someURI". If no such Id then try
* wsu:Id="someURI".
*
* @param doc The document in which to find EncryptedData
* @param wsDocInfo The WSDocInfo object to use
* @param dataRefURI The URI of EncryptedData
* @return The EncryptedData element
* @throws WSSecurityException if the EncryptedData element referenced by dataRefURI is
* not found
*/
public static Element
findEncryptedDataElement(
Document doc,
WSDocInfo wsDocInfo,
String dataRefURI
) throws WSSecurityException {
CallbackLookup callbackLookup = wsDocInfo.getCallbackLookup();
if (callbackLookup == null) {
callbackLookup = new DOMCallbackLookup(doc);
}
Element encryptedDataElement =
callbackLookup.getElement(dataRefURI, null, true);
if (encryptedDataElement == null) {
throw new WSSecurityException(
WSSecurityException.ErrorCode.INVALID_SECURITY, "dataRef", dataRefURI);
}
if (encryptedDataElement.getLocalName().equals(WSConstants.ENCRYPTED_HEADER)
&& encryptedDataElement.getNamespaceURI().equals(WSConstants.WSSE11_NS)) {
Node child = encryptedDataElement.getFirstChild();
while (child != null && child.getNodeType() != Node.ELEMENT_NODE) {
child = child.getNextSibling();
}
return (Element)child;
}
return encryptedDataElement;
}
/**
* Decrypt the EncryptedData argument using a SecretKey.
* @param doc The (document) owner of EncryptedData
* @param dataRefURI The URI of EncryptedData
* @param encData The EncryptedData element
* @param symmetricKey The SecretKey with which to decrypt EncryptedData
* @param symEncAlgo The symmetric encryption algorithm to use
* @throws WSSecurityException
*/
public static WSDataRef
decryptEncryptedData(
Document doc,
String dataRefURI,
Element encData,
SecretKey symmetricKey,
String symEncAlgo,
RequestData requestData
) throws WSSecurityException {
WSDataRef dataRef = new WSDataRef();
dataRef.setWsuId(dataRefURI);
dataRef.setAlgorithm(symEncAlgo);
String typeStr = encData.getAttribute("Type");
if (typeStr != null &&
(WSConstants.SWA_ATTACHMENT_ENCRYPTED_DATA_TYPE_CONTENT_ONLY.equals(typeStr) ||
WSConstants.SWA_ATTACHMENT_ENCRYPTED_DATA_TYPE_COMPLETE.equals(typeStr))) {
try {
Element cipherData = WSSecurityUtil.getDirectChildElement(encData, "CipherData", WSConstants.ENC_NS);
if (cipherData == null) {
throw new WSSecurityException(WSSecurityException.ErrorCode.FAILED_CHECK);
}
Element cipherReference = WSSecurityUtil.getDirectChildElement(cipherData, "CipherReference", WSConstants.ENC_NS);
if (cipherReference == null) {
throw new WSSecurityException(WSSecurityException.ErrorCode.FAILED_CHECK);
}
String uri = cipherReference.getAttributeNS(null, "URI");
if (uri == null || uri.length() < 5) {
throw new WSSecurityException(WSSecurityException.ErrorCode.FAILED_CHECK);
}
if (!uri.startsWith("cid:")) {
throw new WSSecurityException(WSSecurityException.ErrorCode.FAILED_CHECK);
}
dataRef.setWsuId(uri);
dataRef.setAttachment(true);
CallbackHandler attachmentCallbackHandler = requestData.getAttachmentCallbackHandler();
if (attachmentCallbackHandler == null) {
throw new WSSecurityException(WSSecurityException.ErrorCode.FAILED_CHECK);
}
final String attachmentId = uri.substring(4);
AttachmentRequestCallback attachmentRequestCallback = new AttachmentRequestCallback();
attachmentRequestCallback.setAttachmentId(attachmentId);
attachmentCallbackHandler.handle(new Callback[]{attachmentRequestCallback});
List<Attachment> attachments = attachmentRequestCallback.getAttachments();
if (attachments == null || attachments.isEmpty() || !attachmentId.equals(attachments.get(0).getId())) {
throw new WSSecurityException(
WSSecurityException.ErrorCode.INVALID_SECURITY,
"empty", "Attachment not found"
);
}
Attachment attachment = attachments.get(0);
final String encAlgo = X509Util.getEncAlgo(encData);
final String jceAlgorithm =
JCEMapper.translateURItoJCEID(encAlgo);
final Cipher cipher = Cipher.getInstance(jceAlgorithm);
InputStream attachmentInputStream =
AttachmentUtils.setupAttachmentDecryptionStream(
encAlgo, cipher, symmetricKey, attachment.getSourceStream());
Attachment resultAttachment = new Attachment();
resultAttachment.setId(attachment.getId());
resultAttachment.setMimeType(encData.getAttributeNS(null, "MimeType"));
resultAttachment.setSourceStream(attachmentInputStream);
resultAttachment.addHeaders(attachment.getHeaders());
if (WSConstants.SWA_ATTACHMENT_ENCRYPTED_DATA_TYPE_COMPLETE.equals(typeStr)) {
AttachmentUtils.readAndReplaceEncryptedAttachmentHeaders(
resultAttachment.getHeaders(), attachmentInputStream);
}
AttachmentResultCallback attachmentResultCallback = new AttachmentResultCallback();
attachmentResultCallback.setAttachment(resultAttachment);
attachmentResultCallback.setAttachmentId(resultAttachment.getId());
attachmentCallbackHandler.handle(new Callback[]{attachmentResultCallback});
} catch (UnsupportedCallbackException e) {
throw new WSSecurityException(
WSSecurityException.ErrorCode.FAILED_CHECK, e);
} catch (IOException e) {
throw new WSSecurityException(
WSSecurityException.ErrorCode.FAILED_CHECK, e);
} catch (NoSuchAlgorithmException e) {
throw new WSSecurityException(
WSSecurityException.ErrorCode.FAILED_CHECK, e);
} catch (NoSuchPaddingException e) {
throw new WSSecurityException(
WSSecurityException.ErrorCode.FAILED_CHECK, e);
}
dataRef.setContent(true);
// Remove this EncryptedData from the security header to avoid processing it again
encData.getParentNode().removeChild(encData);
return dataRef;
}
boolean content = X509Util.isContent(encData);
dataRef.setContent(content);
Node parent = encData.getParentNode();
Node previousSibling = encData.getPreviousSibling();
if (content) {
encData = (Element) encData.getParentNode();
parent = encData.getParentNode();
}
XMLCipher xmlCipher = null;
try {
xmlCipher = XMLCipher.getInstance(symEncAlgo);
xmlCipher.setSecureValidation(true);
xmlCipher.init(XMLCipher.DECRYPT_MODE, symmetricKey);
} catch (XMLEncryptionException ex) {
throw new WSSecurityException(
WSSecurityException.ErrorCode.UNSUPPORTED_ALGORITHM, ex
);
}
try {
xmlCipher.doFinal(doc, encData, content);
} catch (Exception ex) {
throw new WSSecurityException(WSSecurityException.ErrorCode.FAILED_CHECK, ex);
}
if (parent.getLocalName().equals(WSConstants.ENCRYPTED_HEADER)
&& parent.getNamespaceURI().equals(WSConstants.WSSE11_NS)) {
Node decryptedHeader = parent.getFirstChild();
Node soapHeader = parent.getParentNode();
soapHeader.replaceChild(decryptedHeader, parent);
dataRef.setProtectedElement((Element)decryptedHeader);
dataRef.setXpath(getXPath(decryptedHeader));
} else if (content) {
dataRef.setProtectedElement(encData);
dataRef.setXpath(getXPath(encData));
} else {
Node decryptedNode;
if (previousSibling == null) {
decryptedNode = parent.getFirstChild();
} else {
decryptedNode = previousSibling.getNextSibling();
}
if (decryptedNode != null && Node.ELEMENT_NODE == decryptedNode.getNodeType()) {
dataRef.setProtectedElement((Element)decryptedNode);
}
dataRef.setXpath(getXPath(decryptedNode));
}
return dataRef;
}
public String getId() {
return null;
}
/**
* @param decryptedNode the decrypted node
* @return a fully built xpath
* (eg. "/soapenv:Envelope/soapenv:Body/ns:decryptedElement")
* if the decryptedNode is an Element or an Attr node and is not detached
* from the document. <code>null</code> otherwise
*/
public static String getXPath(Node decryptedNode) {
if (decryptedNode == null) {
return null;
}
String result = "";
if (Node.ELEMENT_NODE == decryptedNode.getNodeType()) {
result = decryptedNode.getNodeName();
result = prependFullPath(result, decryptedNode.getParentNode());
} else if (Node.ATTRIBUTE_NODE == decryptedNode.getNodeType()) {
result = "@" + decryptedNode.getNodeName();
result = prependFullPath(result, ((Attr)decryptedNode).getOwnerElement());
} else {
return null;
}
return result;
}
/**
* Recursively build an absolute xpath (starting with the root "/")
*
* @param xpath the xpath expression built so far
* @param node the current node whose name is to be prepended
* @return a fully built xpath
*/
private static String prependFullPath(String xpath, Node node) {
if (node == null) {
// probably a detached node... not really useful
return null;
} else if (Node.ELEMENT_NODE == node.getNodeType()) {
xpath = node.getNodeName() + "/" + xpath;
return prependFullPath(xpath, node.getParentNode());
} else if (Node.DOCUMENT_NODE == node.getNodeType()) {
return "/" + xpath;
} else {
return prependFullPath(xpath, node.getParentNode());
}
}
}
| [CXF-497] - Remove the EncryptedAssertion wrapper after decrypting
git-svn-id: 10bc45916fe30ae642aa5037c9a4b05727bba413@1589701 13f79535-47bb-0310-9956-ffa450edef68
| ws-security-dom/src/main/java/org/apache/wss4j/dom/processor/ReferenceListProcessor.java | [CXF-497] - Remove the EncryptedAssertion wrapper after decrypting | <ide><path>s-security-dom/src/main/java/org/apache/wss4j/dom/processor/ReferenceListProcessor.java
<ide> }
<ide>
<ide> if (parent.getLocalName().equals(WSConstants.ENCRYPTED_HEADER)
<del> && parent.getNamespaceURI().equals(WSConstants.WSSE11_NS)) {
<add> && parent.getNamespaceURI().equals(WSConstants.WSSE11_NS)
<add> || parent.getLocalName().equals(WSConstants.ENCRYPED_ASSERTION_LN)
<add> && parent.getNamespaceURI().equals(WSConstants.SAML2_NS)) {
<ide>
<ide> Node decryptedHeader = parent.getFirstChild();
<ide> Node soapHeader = parent.getParentNode(); |
|
Java | apache-2.0 | 84ac7bf322c61a768d0eda97e81a0a22fe0919af | 0 | kellyzly/pig,kellyzly/pig,kellyzly/pig,kellyzly/pig,kellyzly/pig,kellyzly/pig,kellyzly/pig | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.pig.backend.hadoop.executionengine.spark;
import java.io.IOException;
import java.util.List;
import java.util.Random;
import java.util.UUID;
import org.apache.hadoop.mapred.JobConf;
import org.apache.pig.backend.hadoop.datastorage.ConfigurationUtil;
import org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.MRConfiguration;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.PhysicalOperator;
import org.apache.pig.data.Tuple;
import org.apache.pig.impl.PigContext;
import org.apache.pig.impl.util.ObjectSerializer;
import org.apache.pig.impl.util.UDFContext;
import org.apache.spark.HashPartitioner;
import org.apache.spark.Partitioner;
import org.apache.spark.rdd.RDD;
import scala.Product2;
import scala.Tuple2;
import scala.collection.JavaConversions;
import scala.collection.Seq;
import scala.reflect.ClassTag;
import scala.reflect.ClassTag$;
public class SparkUtil {
public static <T> ClassTag<T> getManifest(Class<T> clazz) {
return ClassTag$.MODULE$.apply(clazz);
}
@SuppressWarnings("unchecked")
public static <K, V> ClassTag<Tuple2<K, V>> getTuple2Manifest() {
return (ClassTag<Tuple2<K, V>>) (Object) getManifest(Tuple2.class);
}
@SuppressWarnings("unchecked")
public static <K, V> ClassTag<Product2<K, V>> getProduct2Manifest() {
return (ClassTag<Product2<K, V>>) (Object) getManifest(Product2.class);
}
public static JobConf newJobConf(PigContext pigContext) throws IOException {
JobConf jobConf = new JobConf(
ConfigurationUtil.toConfiguration(pigContext.getProperties()));
// Serialize the PigContext so it's available in Executor thread.
jobConf.set("pig.pigContext", ObjectSerializer.serialize(pigContext));
// Serialize the thread local variable inside PigContext separately
jobConf.set("udf.import.list",
ObjectSerializer.serialize(PigContext.getPackageImportList()));
UDFContext.getUDFContext().serialize(jobConf);
Random rand = new Random();
jobConf.set(MRConfiguration.JOB_APPLICATION_ATTEMPT_ID, Integer.toString(rand.nextInt()));
return jobConf;
}
public static <T> Seq<T> toScalaSeq(List<T> list) {
return JavaConversions.asScalaBuffer(list);
}
public static void assertPredecessorSize(List<RDD<Tuple>> predecessors,
PhysicalOperator physicalOperator, int size) {
if (predecessors.size() != size) {
throw new RuntimeException("Should have " + size
+ " predecessors for " + physicalOperator.getClass()
+ ". Got : " + predecessors.size());
}
}
public static void assertPredecessorSizeGreaterThan(
List<RDD<Tuple>> predecessors, PhysicalOperator physicalOperator,
int size) {
if (predecessors.size() <= size) {
throw new RuntimeException("Should have greater than" + size
+ " predecessors for " + physicalOperator.getClass()
+ ". Got : " + predecessors.size());
}
}
public static int getParallelism(List<RDD<Tuple>> predecessors,
PhysicalOperator physicalOperator) {
String numReducers = System.getenv("SPARK_REDUCERS");
if (numReducers != null) {
return Integer.parseInt(numReducers);
}
int parallelism = physicalOperator.getRequestedParallelism();
if (parallelism <= 0) {
// Parallelism wasn't set in Pig, so set it to whatever Spark thinks
// is reasonable.
parallelism = predecessors.get(0).context().defaultParallelism();
}
return parallelism;
}
public static Partitioner getPartitioner(String customPartitioner, int parallelism) {
if (customPartitioner == null) {
return new HashPartitioner(parallelism);
} else {
return new MapReducePartitionerWrapper(customPartitioner, parallelism);
}
}
}
| src/org/apache/pig/backend/hadoop/executionengine/spark/SparkUtil.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.pig.backend.hadoop.executionengine.spark;
import java.io.IOException;
import java.util.List;
import org.apache.hadoop.mapred.JobConf;
import org.apache.pig.backend.hadoop.datastorage.ConfigurationUtil;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.PhysicalOperator;
import org.apache.pig.data.Tuple;
import org.apache.pig.impl.PigContext;
import org.apache.pig.impl.util.ObjectSerializer;
import org.apache.pig.impl.util.UDFContext;
import org.apache.spark.HashPartitioner;
import org.apache.spark.Partitioner;
import org.apache.spark.rdd.RDD;
import scala.Product2;
import scala.Tuple2;
import scala.collection.JavaConversions;
import scala.collection.Seq;
import scala.reflect.ClassTag;
import scala.reflect.ClassTag$;
public class SparkUtil {
public static <T> ClassTag<T> getManifest(Class<T> clazz) {
return ClassTag$.MODULE$.apply(clazz);
}
@SuppressWarnings("unchecked")
public static <K, V> ClassTag<Tuple2<K, V>> getTuple2Manifest() {
return (ClassTag<Tuple2<K, V>>) (Object) getManifest(Tuple2.class);
}
@SuppressWarnings("unchecked")
public static <K, V> ClassTag<Product2<K, V>> getProduct2Manifest() {
return (ClassTag<Product2<K, V>>) (Object) getManifest(Product2.class);
}
public static JobConf newJobConf(PigContext pigContext) throws IOException {
JobConf jobConf = new JobConf(
ConfigurationUtil.toConfiguration(pigContext.getProperties()));
// Serialize the PigContext so it's available in Executor thread.
jobConf.set("pig.pigContext", ObjectSerializer.serialize(pigContext));
// Serialize the thread local variable inside PigContext separately
jobConf.set("udf.import.list",
ObjectSerializer.serialize(PigContext.getPackageImportList()));
UDFContext.getUDFContext().serialize(jobConf);
return jobConf;
}
public static <T> Seq<T> toScalaSeq(List<T> list) {
return JavaConversions.asScalaBuffer(list);
}
public static void assertPredecessorSize(List<RDD<Tuple>> predecessors,
PhysicalOperator physicalOperator, int size) {
if (predecessors.size() != size) {
throw new RuntimeException("Should have " + size
+ " predecessors for " + physicalOperator.getClass()
+ ". Got : " + predecessors.size());
}
}
public static void assertPredecessorSizeGreaterThan(
List<RDD<Tuple>> predecessors, PhysicalOperator physicalOperator,
int size) {
if (predecessors.size() <= size) {
throw new RuntimeException("Should have greater than" + size
+ " predecessors for " + physicalOperator.getClass()
+ ". Got : " + predecessors.size());
}
}
public static int getParallelism(List<RDD<Tuple>> predecessors,
PhysicalOperator physicalOperator) {
String numReducers = System.getenv("SPARK_REDUCERS");
if (numReducers != null) {
return Integer.parseInt(numReducers);
}
int parallelism = physicalOperator.getRequestedParallelism();
if (parallelism <= 0) {
// Parallelism wasn't set in Pig, so set it to whatever Spark thinks
// is reasonable.
parallelism = predecessors.get(0).context().defaultParallelism();
}
return parallelism;
}
public static Partitioner getPartitioner(String customPartitioner, int parallelism) {
if (customPartitioner == null) {
return new HashPartitioner(parallelism);
} else {
return new MapReducePartitionerWrapper(customPartitioner, parallelism);
}
}
}
| PIG-4610: Enable TestOrcStorage unit test in spark mode (Liyun via Xuefu)
git-svn-id: d317905e1b1233abb7022f5914f79c3119e04b87@1686962 13f79535-47bb-0310-9956-ffa450edef68
| src/org/apache/pig/backend/hadoop/executionengine/spark/SparkUtil.java | PIG-4610: Enable TestOrcStorage unit test in spark mode (Liyun via Xuefu) | <ide><path>rc/org/apache/pig/backend/hadoop/executionengine/spark/SparkUtil.java
<ide>
<ide> import java.io.IOException;
<ide> import java.util.List;
<add>import java.util.Random;
<add>import java.util.UUID;
<ide>
<ide> import org.apache.hadoop.mapred.JobConf;
<ide> import org.apache.pig.backend.hadoop.datastorage.ConfigurationUtil;
<add>import org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.MRConfiguration;
<ide> import org.apache.pig.backend.hadoop.executionengine.physicalLayer.PhysicalOperator;
<ide> import org.apache.pig.data.Tuple;
<ide> import org.apache.pig.impl.PigContext;
<ide> jobConf.set("udf.import.list",
<ide> ObjectSerializer.serialize(PigContext.getPackageImportList()));
<ide> UDFContext.getUDFContext().serialize(jobConf);
<add> Random rand = new Random();
<add> jobConf.set(MRConfiguration.JOB_APPLICATION_ATTEMPT_ID, Integer.toString(rand.nextInt()));
<ide> return jobConf;
<ide> }
<ide> |
|
JavaScript | mit | 75196c75f27bbfa4d9d55aa0cffb1d3dd8ec4e84 | 0 | heyimalex/reselect-map,heyimalex/reselect-map | import Immutable from 'immutable'
import {
createArraySelector,
createObjectSelector,
createListSelector,
createMapSelector,
} from '../src'
describe('createArraySelector', () => {
test('basic array selector', () => {
const sel = createArraySelector(
state => state,
(element) => element * 5
)
expect(sel([1, 2, 3, 4])).toEqual([5, 10, 15, 20])
expect(sel([1, 2, 3, 4])).toEqual([5, 10, 15, 20])
expect(sel.recomputations()).toBe(4)
expect(sel([2, 2, 2, 2])).toEqual([10, 10, 10, 10])
expect(sel.recomputations()).toBe(4)
expect(sel([1, 1, 1, 1])).toEqual([5, 5, 5, 5])
expect(sel.recomputations()).toBe(5)
expect(sel([1, 2])).toEqual([5, 10])
expect(sel.recomputations()).toBe(6)
expect(sel([3, 4])).toEqual([15, 20])
expect(sel.recomputations()).toBe(8)
expect(sel([])).toEqual([])
expect(sel.recomputations()).toBe(8)
expect(sel([1, 2])).toEqual([5, 10])
expect(sel.recomputations()).toBe(10)
})
test('another argument change', () => {
const sel = createArraySelector(
state => state.numbers,
state => state.mul1,
state => state.mul2,
(element, mul1, mul2) => element * mul1 * mul2
)
let state = {
numbers: [1, 2, 3, 4],
mul1: 1,
mul2: 1,
}
expect(sel(state)).toEqual([1, 2, 3, 4])
expect(sel.recomputations()).toBe(4)
expect(sel(state)).toEqual([1, 2, 3, 4])
expect(sel.recomputations()).toBe(4)
state = Object.assign({}, state, {
numbers: [1, 2, 3, 4]
});
expect(sel(state)).toEqual([1, 2, 3, 4])
expect(sel.recomputations()).toBe(4)
state = Object.assign({}, state, {
mul1: 2,
});
expect(sel(state)).toEqual([2, 4, 6, 8])
expect(sel.recomputations()).toBe(8)
state = Object.assign({}, state, {
mul1: 1,
mul2: 2,
});
expect(sel(state)).toEqual([2, 4, 6, 8])
expect(sel.recomputations()).toBe(12)
state = Object.assign({}, state, {
numbers: [2, 3, 4, 5]
});
expect(sel(state)).toEqual([4, 6, 8, 10])
expect(sel.recomputations()).toBe(13)
})
})
describe('createObjectSelector', () => {
test('basic object selector', () => {
const sel = createObjectSelector(
state => state,
(element) => element * 5
)
expect(sel({ a: 1, b: 2 })).toEqual({ a: 5, b: 10 })
expect(sel.recomputations()).toBe(2)
const state = { a: 1, b: 2 }
expect(sel(state)).toEqual({ a: 5, b: 10 })
expect(sel(state)).toEqual({ a: 5, b: 10 })
expect(sel.recomputations()).toBe(2)
expect(sel({ a: 2, b: 1 })).toEqual({ a: 10, b: 5 })
expect(sel.recomputations()).toBe(4)
expect(sel({ a: 1, b: 1 })).toEqual({ a: 5, b: 5 })
expect(sel.recomputations()).toBe(5)
expect(sel({ a: 1, b: 1, c: 1 })).toEqual({ a: 5, b: 5, c: 5 })
expect(sel.recomputations()).toBe(6)
expect(sel({ a: 1 })).toEqual({ a: 5 })
expect(sel.recomputations()).toBe(6)
})
test('another argument change', () => {
const sel = createObjectSelector(
state => state.numbers,
state => state.mul1,
state => state.mul2,
(element, mul1, mul2) => element * mul1 * mul2
)
let state = {
numbers: { a: 1, b: 2 },
mul1: 1,
mul2: 1,
}
expect(sel(state)).toEqual({ a: 1, b: 2 })
expect(sel.recomputations()).toBe(2)
expect(sel(state)).toEqual({ a: 1, b: 2 })
expect(sel.recomputations()).toBe(2)
state = Object.assign({}, state, {
numbers: { a: 1, b: 2 },
});
expect(sel(state)).toEqual({ a: 1, b: 2 })
expect(sel.recomputations()).toBe(2)
state = Object.assign({}, state, {
mul1: 2,
});
expect(sel(state)).toEqual({ a: 2, b: 4 })
expect(sel.recomputations()).toBe(4)
state = Object.assign({}, state, {
mul1: 1,
mul2: 2,
});
expect(sel(state)).toEqual({ a: 2, b: 4 })
expect(sel.recomputations()).toBe(6)
state = Object.assign({}, state, {
numbers: { a: 1, b: 1 },
});
expect(sel(state)).toEqual({ a: 2, b: 2 })
expect(sel.recomputations()).toBe(7)
})
test('with key argument', () => {
const sel = createObjectSelector(
state => state.numbers,
state => state.mul1,
state => state.mul2,
(element, mul1, mul2, key) => `${key}:${element * mul1 * mul2}`
)
let state = {
numbers: { a: 1, b: 2 },
mul1: 1,
mul2: 1,
}
expect(sel(state)).toEqual({ a: 'a:1', b: 'b:2' })
expect(sel.recomputations()).toBe(2)
expect(sel(state)).toEqual({ a: 'a:1', b: 'b:2' })
expect(sel.recomputations()).toBe(2)
state = Object.assign({}, state, {
numbers: { a: 1, b: 2 },
});
expect(sel(state)).toEqual({ a: 'a:1', b: 'b:2' })
expect(sel.recomputations()).toBe(2)
state = Object.assign({}, state, {
mul1: 2,
});
expect(sel(state)).toEqual({ a: 'a:2', b: 'b:4' })
expect(sel.recomputations()).toBe(4)
state = Object.assign({}, state, {
mul1: 1,
mul2: 2,
});
expect(sel(state)).toEqual({ a: 'a:2', b: 'b:4' })
expect(sel.recomputations()).toBe(6)
state = Object.assign({}, state, {
numbers: { a: 1, b: 1 }
});
expect(sel(state)).toEqual({ a: 'a:2', b: 'b:2' })
expect(sel.recomputations()).toBe(7)
})
})
describe('createListSelector', () => {
test('basic list selector', () => {
const sel = createListSelector(
state => state,
(element) => element * 5
)
expect(sel([1, 2, 3, 4])).toEqual([5, 10, 15, 20])
expect(sel([1, 2, 3, 4])).toEqual([5, 10, 15, 20])
expect(sel.recomputations()).toBe(4)
expect(sel([2, 2, 2, 2])).toEqual([10, 10, 10, 10])
expect(sel.recomputations()).toBe(4)
expect(sel([1, 1, 1, 1])).toEqual([5, 5, 5, 5])
expect(sel.recomputations()).toBe(5)
expect(sel([1, 2])).toEqual([5, 10])
expect(sel.recomputations()).toBe(6)
expect(sel([3, 4])).toEqual([15, 20])
expect(sel.recomputations()).toBe(8)
expect(sel([])).toEqual([])
expect(sel.recomputations()).toBe(8)
expect(sel([1, 2])).toEqual([5, 10])
expect(sel.recomputations()).toBe(10)
})
test('another argument change', () => {
const sel = createListSelector(
state => state.numbers,
state => state.mul1,
state => state.mul2,
(element, mul1, mul2) => element * mul1 * mul2
)
let state = {
numbers: [1, 2, 3, 4],
mul1: 1,
mul2: 1,
}
expect(sel(state)).toEqual([1, 2, 3, 4])
expect(sel.recomputations()).toBe(4)
expect(sel(state)).toEqual([1, 2, 3, 4])
expect(sel.recomputations()).toBe(4)
state = Object.assign({}, state, {
numbers: [1, 2, 3, 4]
});
expect(sel(state)).toEqual([1, 2, 3, 4])
expect(sel.recomputations()).toBe(4)
state = Object.assign({}, state, {
mul1: 2
});
expect(sel(state)).toEqual([2, 4, 6, 8])
expect(sel.recomputations()).toBe(8)
state = Object.assign({}, state, {
mul1: 1,
mul2: 2,
});
expect(sel(state)).toEqual([2, 4, 6, 8])
expect(sel.recomputations()).toBe(12)
state = Object.assign({}, state, {
numbers: [2, 3, 4, 5]
});
expect(sel(state)).toEqual([4, 6, 8, 10])
expect(sel.recomputations()).toBe(13)
})
})
describe('createMapSelector', () => {
test('basic map selector', () => {
const sel = createMapSelector(
state => state,
(element) => element * 5
)
expect(Immutable.is(
sel(Immutable.Map({ a: 1, b: 2 })),
Immutable.Map({ a: 5, b: 10 })
)).toBeTruthy()
expect(sel.recomputations()).toBe(2)
expect(Immutable.is(
sel(Immutable.Map({ a: 1, b: 2 })),
Immutable.Map({ a: 5, b: 10 })
)).toBeTruthy()
expect(sel.recomputations()).toBe(2)
expect(Immutable.is(
sel(Immutable.Map({ a: 3, b: 4 })),
Immutable.Map({ a: 15, b: 20 })
)).toBeTruthy()
expect(sel.recomputations()).toBe(4)
})
})
| test/selectors.test.js | import Immutable from 'immutable'
import {
createArraySelector,
createObjectSelector,
createListSelector,
createMapSelector,
} from '../src'
describe('createArraySelector', () => {
test('basic array selector', () => {
const sel = createArraySelector(
state => state,
(element) => element * 5
)
expect(sel([1, 2, 3, 4])).toEqual([5, 10, 15, 20])
expect(sel([1, 2, 3, 4])).toEqual([5, 10, 15, 20])
expect(sel.recomputations()).toBe(4)
expect(sel([2, 2, 2, 2])).toEqual([10, 10, 10, 10])
expect(sel.recomputations()).toBe(4)
expect(sel([1, 1, 1, 1])).toEqual([5, 5, 5, 5])
expect(sel.recomputations()).toBe(5)
expect(sel([1, 2])).toEqual([5, 10])
expect(sel.recomputations()).toBe(6)
expect(sel([3, 4])).toEqual([15, 20])
expect(sel.recomputations()).toBe(8)
expect(sel([])).toEqual([])
expect(sel.recomputations()).toBe(8)
expect(sel([1, 2])).toEqual([5, 10])
expect(sel.recomputations()).toBe(10)
})
test('another argument change', () => {
const sel = createArraySelector(
state => state.numbers,
state => state.mul1,
state => state.mul2,
(element, mul1, mul2) => element * mul1 * mul2
)
let state = {
numbers: [1, 2, 3, 4],
mul1: 1,
mul2: 1,
}
expect(sel(state)).toEqual([1, 2, 3, 4])
expect(sel.recomputations()).toBe(4)
expect(sel(state)).toEqual([1, 2, 3, 4])
expect(sel.recomputations()).toBe(4)
state.numbers = [1, 2, 3, 4]
expect(sel(state)).toEqual([1, 2, 3, 4])
expect(sel.recomputations()).toBe(4)
state.mul1 = 2;
expect(sel(state)).toEqual([2, 4, 6, 8])
expect(sel.recomputations()).toBe(8)
state.mul1 = 1;
state.mul2 = 2;
expect(sel(state)).toEqual([2, 4, 6, 8])
expect(sel.recomputations()).toBe(12)
state.numbers = [2, 3, 4, 5]
expect(sel(state)).toEqual([4, 6, 8, 10])
expect(sel.recomputations()).toBe(13)
})
})
describe('createObjectSelector', () => {
test('basic object selector', () => {
const sel = createObjectSelector(
state => state,
(element) => element * 5
)
expect(sel({ a: 1, b: 2 })).toEqual({ a: 5, b: 10 })
expect(sel.recomputations()).toBe(2)
const state = { a: 1, b: 2 }
expect(sel(state)).toEqual({ a: 5, b: 10 })
expect(sel(state)).toEqual({ a: 5, b: 10 })
expect(sel.recomputations()).toBe(2)
expect(sel({ a: 2, b: 1 })).toEqual({ a: 10, b: 5 })
expect(sel.recomputations()).toBe(4)
expect(sel({ a: 1, b: 1 })).toEqual({ a: 5, b: 5 })
expect(sel.recomputations()).toBe(5)
expect(sel({ a: 1, b: 1, c: 1 })).toEqual({ a: 5, b: 5, c: 5 })
expect(sel.recomputations()).toBe(6)
expect(sel({ a: 1 })).toEqual({ a: 5 })
expect(sel.recomputations()).toBe(6)
})
test('another argument change', () => {
const sel = createObjectSelector(
state => state.numbers,
state => state.mul1,
state => state.mul2,
(element, mul1, mul2) => element * mul1 * mul2
)
let state = {
numbers: { a: 1, b: 2 },
mul1: 1,
mul2: 1,
}
expect(sel(state)).toEqual({ a: 1, b: 2 })
expect(sel.recomputations()).toBe(2)
expect(sel(state)).toEqual({ a: 1, b: 2 })
expect(sel.recomputations()).toBe(2)
state.numbers = { a: 1, b: 2 }
expect(sel(state)).toEqual({ a: 1, b: 2 })
expect(sel.recomputations()).toBe(2)
state.mul1 = 2;
expect(sel(state)).toEqual({ a: 2, b: 4 })
expect(sel.recomputations()).toBe(4)
state.mul1 = 1;
state.mul2 = 2;
expect(sel(state)).toEqual({ a: 2, b: 4 })
expect(sel.recomputations()).toBe(6)
state.numbers = { a: 1, b: 1 }
expect(sel(state)).toEqual({ a: 2, b: 2 })
expect(sel.recomputations()).toBe(7)
})
test('with key argument', () => {
const sel = createObjectSelector(
state => state.numbers,
state => state.mul1,
state => state.mul2,
(element, mul1, mul2, key) => `${key}:${element * mul1 * mul2}`
)
let state = {
numbers: { a: 1, b: 2 },
mul1: 1,
mul2: 1,
}
expect(sel(state)).toEqual({ a: 'a:1', b: 'b:2' })
expect(sel.recomputations()).toBe(2)
expect(sel(state)).toEqual({ a: 'a:1', b: 'b:2' })
expect(sel.recomputations()).toBe(2)
state.numbers = { a: 1, b: 2 }
expect(sel(state)).toEqual({ a: 'a:1', b: 'b:2' })
expect(sel.recomputations()).toBe(2)
state.mul1 = 2;
expect(sel(state)).toEqual({ a: 'a:2', b: 'b:4' })
expect(sel.recomputations()).toBe(4)
state.mul1 = 1;
state.mul2 = 2;
expect(sel(state)).toEqual({ a: 'a:2', b: 'b:4' })
expect(sel.recomputations()).toBe(6)
state.numbers = { a: 1, b: 1 }
expect(sel(state)).toEqual({ a: 'a:2', b: 'b:2' })
expect(sel.recomputations()).toBe(7)
})
})
describe('createListSelector', () => {
test('basic list selector', () => {
const sel = createListSelector(
state => state,
(element) => element * 5
)
expect(sel([1, 2, 3, 4])).toEqual([5, 10, 15, 20])
expect(sel([1, 2, 3, 4])).toEqual([5, 10, 15, 20])
expect(sel.recomputations()).toBe(4)
expect(sel([2, 2, 2, 2])).toEqual([10, 10, 10, 10])
expect(sel.recomputations()).toBe(4)
expect(sel([1, 1, 1, 1])).toEqual([5, 5, 5, 5])
expect(sel.recomputations()).toBe(5)
expect(sel([1, 2])).toEqual([5, 10])
expect(sel.recomputations()).toBe(6)
expect(sel([3, 4])).toEqual([15, 20])
expect(sel.recomputations()).toBe(8)
expect(sel([])).toEqual([])
expect(sel.recomputations()).toBe(8)
expect(sel([1, 2])).toEqual([5, 10])
expect(sel.recomputations()).toBe(10)
})
test('another argument change', () => {
const sel = createListSelector(
state => state.numbers,
state => state.mul1,
state => state.mul2,
(element, mul1, mul2) => element * mul1 * mul2
)
let state = {
numbers: [1, 2, 3, 4],
mul1: 1,
mul2: 1,
}
expect(sel(state)).toEqual([1, 2, 3, 4])
expect(sel.recomputations()).toBe(4)
expect(sel(state)).toEqual([1, 2, 3, 4])
expect(sel.recomputations()).toBe(4)
state.numbers = [1, 2, 3, 4]
expect(sel(state)).toEqual([1, 2, 3, 4])
expect(sel.recomputations()).toBe(4)
state.mul1 = 2;
expect(sel(state)).toEqual([2, 4, 6, 8])
expect(sel.recomputations()).toBe(8)
state.mul1 = 1;
state.mul2 = 2;
expect(sel(state)).toEqual([2, 4, 6, 8])
expect(sel.recomputations()).toBe(12)
state.numbers = [2, 3, 4, 5]
expect(sel(state)).toEqual([4, 6, 8, 10])
expect(sel.recomputations()).toBe(13)
})
})
describe('createMapSelector', () => {
test('basic map selector', () => {
const sel = createMapSelector(
state => state,
(element) => element * 5
)
expect(Immutable.is(
sel(Immutable.Map({ a: 1, b: 2 })),
Immutable.Map({ a: 5, b: 10 })
)).toBeTruthy()
expect(sel.recomputations()).toBe(2)
expect(Immutable.is(
sel(Immutable.Map({ a: 1, b: 2 })),
Immutable.Map({ a: 5, b: 10 })
)).toBeTruthy()
expect(sel.recomputations()).toBe(2)
expect(Immutable.is(
sel(Immutable.Map({ a: 3, b: 4 })),
Immutable.Map({ a: 15, b: 20 })
)).toBeTruthy()
expect(sel.recomputations()).toBe(4)
})
})
| Fix tests to work with 3.x
Version 3.x of reselect takes the input argument equality into account
when deciding whether to recompute. Unfortunately, some of the tests
mutated the value passed to the selector, so the selector wouldn't
update when it should have. This commit changes the updates to be
"immutable" and the test cases work once again.
| test/selectors.test.js | Fix tests to work with 3.x | <ide><path>est/selectors.test.js
<ide> expect(sel(state)).toEqual([1, 2, 3, 4])
<ide> expect(sel.recomputations()).toBe(4)
<ide>
<del> state.numbers = [1, 2, 3, 4]
<del> expect(sel(state)).toEqual([1, 2, 3, 4])
<del> expect(sel.recomputations()).toBe(4)
<del>
<del> state.mul1 = 2;
<add> state = Object.assign({}, state, {
<add> numbers: [1, 2, 3, 4]
<add> });
<add> expect(sel(state)).toEqual([1, 2, 3, 4])
<add> expect(sel.recomputations()).toBe(4)
<add>
<add> state = Object.assign({}, state, {
<add> mul1: 2,
<add> });
<ide> expect(sel(state)).toEqual([2, 4, 6, 8])
<ide> expect(sel.recomputations()).toBe(8)
<ide>
<del> state.mul1 = 1;
<del> state.mul2 = 2;
<add> state = Object.assign({}, state, {
<add> mul1: 1,
<add> mul2: 2,
<add> });
<ide> expect(sel(state)).toEqual([2, 4, 6, 8])
<ide> expect(sel.recomputations()).toBe(12)
<ide>
<del> state.numbers = [2, 3, 4, 5]
<add> state = Object.assign({}, state, {
<add> numbers: [2, 3, 4, 5]
<add> });
<ide> expect(sel(state)).toEqual([4, 6, 8, 10])
<ide> expect(sel.recomputations()).toBe(13)
<ide> })
<ide> expect(sel(state)).toEqual({ a: 1, b: 2 })
<ide> expect(sel.recomputations()).toBe(2)
<ide>
<del> state.numbers = { a: 1, b: 2 }
<add> state = Object.assign({}, state, {
<add> numbers: { a: 1, b: 2 },
<add> });
<ide> expect(sel(state)).toEqual({ a: 1, b: 2 })
<ide> expect(sel.recomputations()).toBe(2)
<ide>
<del> state.mul1 = 2;
<add> state = Object.assign({}, state, {
<add> mul1: 2,
<add> });
<ide> expect(sel(state)).toEqual({ a: 2, b: 4 })
<ide> expect(sel.recomputations()).toBe(4)
<ide>
<del> state.mul1 = 1;
<del> state.mul2 = 2;
<add> state = Object.assign({}, state, {
<add> mul1: 1,
<add> mul2: 2,
<add> });
<ide> expect(sel(state)).toEqual({ a: 2, b: 4 })
<ide> expect(sel.recomputations()).toBe(6)
<ide>
<del> state.numbers = { a: 1, b: 1 }
<add> state = Object.assign({}, state, {
<add> numbers: { a: 1, b: 1 },
<add> });
<ide> expect(sel(state)).toEqual({ a: 2, b: 2 })
<ide> expect(sel.recomputations()).toBe(7)
<ide> })
<ide> expect(sel(state)).toEqual({ a: 'a:1', b: 'b:2' })
<ide> expect(sel.recomputations()).toBe(2)
<ide>
<del> state.numbers = { a: 1, b: 2 }
<add> state = Object.assign({}, state, {
<add> numbers: { a: 1, b: 2 },
<add> });
<ide> expect(sel(state)).toEqual({ a: 'a:1', b: 'b:2' })
<ide> expect(sel.recomputations()).toBe(2)
<ide>
<del> state.mul1 = 2;
<add> state = Object.assign({}, state, {
<add> mul1: 2,
<add> });
<ide> expect(sel(state)).toEqual({ a: 'a:2', b: 'b:4' })
<ide> expect(sel.recomputations()).toBe(4)
<ide>
<del> state.mul1 = 1;
<del> state.mul2 = 2;
<add> state = Object.assign({}, state, {
<add> mul1: 1,
<add> mul2: 2,
<add> });
<ide> expect(sel(state)).toEqual({ a: 'a:2', b: 'b:4' })
<ide> expect(sel.recomputations()).toBe(6)
<ide>
<del> state.numbers = { a: 1, b: 1 }
<add> state = Object.assign({}, state, {
<add> numbers: { a: 1, b: 1 }
<add> });
<ide> expect(sel(state)).toEqual({ a: 'a:2', b: 'b:2' })
<ide> expect(sel.recomputations()).toBe(7)
<ide> })
<ide> expect(sel(state)).toEqual([1, 2, 3, 4])
<ide> expect(sel.recomputations()).toBe(4)
<ide>
<del> state.numbers = [1, 2, 3, 4]
<del> expect(sel(state)).toEqual([1, 2, 3, 4])
<del> expect(sel.recomputations()).toBe(4)
<del>
<del> state.mul1 = 2;
<add> state = Object.assign({}, state, {
<add> numbers: [1, 2, 3, 4]
<add> });
<add> expect(sel(state)).toEqual([1, 2, 3, 4])
<add> expect(sel.recomputations()).toBe(4)
<add>
<add> state = Object.assign({}, state, {
<add> mul1: 2
<add> });
<ide> expect(sel(state)).toEqual([2, 4, 6, 8])
<ide> expect(sel.recomputations()).toBe(8)
<ide>
<del> state.mul1 = 1;
<del> state.mul2 = 2;
<add> state = Object.assign({}, state, {
<add> mul1: 1,
<add> mul2: 2,
<add> });
<ide> expect(sel(state)).toEqual([2, 4, 6, 8])
<ide> expect(sel.recomputations()).toBe(12)
<ide>
<del> state.numbers = [2, 3, 4, 5]
<add> state = Object.assign({}, state, {
<add> numbers: [2, 3, 4, 5]
<add> });
<ide> expect(sel(state)).toEqual([4, 6, 8, 10])
<ide> expect(sel.recomputations()).toBe(13)
<ide> }) |
|
Java | apache-2.0 | 498f19b7d245ece3c657b10678d9a236be717ea3 | 0 | qtproject/qtqa-gerrit,WANdisco/gerrit,GerritCodeReview/gerrit,WANdisco/gerrit,GerritCodeReview/gerrit,GerritCodeReview/gerrit,WANdisco/gerrit,WANdisco/gerrit,gerrit-review/gerrit,GerritCodeReview/gerrit,WANdisco/gerrit,gerrit-review/gerrit,WANdisco/gerrit,gerrit-review/gerrit,WANdisco/gerrit,gerrit-review/gerrit,GerritCodeReview/gerrit,qtproject/qtqa-gerrit,gerrit-review/gerrit,qtproject/qtqa-gerrit,GerritCodeReview/gerrit,qtproject/qtqa-gerrit,qtproject/qtqa-gerrit,gerrit-review/gerrit,GerritCodeReview/gerrit,qtproject/qtqa-gerrit,GerritCodeReview/gerrit,qtproject/qtqa-gerrit,gerrit-review/gerrit | // Copyright (C) 2008 The Android Open Source Project
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.gerrit.server.git;
import com.google.gerrit.extensions.events.LifecycleListener;
import com.google.gerrit.lifecycle.LifecycleModule;
import com.google.gerrit.reviewdb.client.Project;
import com.google.gerrit.reviewdb.client.RefNames;
import com.google.gerrit.server.config.GerritServerConfig;
import com.google.gerrit.server.config.SitePaths;
import com.google.inject.Inject;
import com.google.inject.Singleton;
import org.eclipse.jgit.errors.RepositoryNotFoundException;
import org.eclipse.jgit.internal.storage.file.LockFile;
import org.eclipse.jgit.lib.Config;
import org.eclipse.jgit.lib.ConfigConstants;
import org.eclipse.jgit.lib.Constants;
import org.eclipse.jgit.lib.Repository;
import org.eclipse.jgit.lib.RepositoryCache;
import org.eclipse.jgit.lib.RepositoryCache.FileKey;
import org.eclipse.jgit.lib.RepositoryCacheConfig;
import org.eclipse.jgit.lib.StoredConfig;
import org.eclipse.jgit.storage.file.WindowCacheConfig;
import org.eclipse.jgit.util.FS;
import org.eclipse.jgit.util.IO;
import org.eclipse.jgit.util.RawParseUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.nio.file.FileVisitOption;
import java.nio.file.FileVisitResult;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.SimpleFileVisitor;
import java.nio.file.attribute.BasicFileAttributes;
import java.util.Collections;
import java.util.EnumSet;
import java.util.SortedSet;
import java.util.TreeSet;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;
/** Manages Git repositories stored on the local filesystem. */
@Singleton
public class LocalDiskRepositoryManager implements GitRepositoryManager,
LifecycleListener {
private static final Logger log =
LoggerFactory.getLogger(LocalDiskRepositoryManager.class);
private static final String UNNAMED =
"Unnamed repository; edit this file to name it for gitweb.";
public static class Module extends LifecycleModule {
@Override
protected void configure() {
bind(GitRepositoryManager.class).to(LocalDiskRepositoryManager.class);
listener().to(LocalDiskRepositoryManager.class);
listener().to(LocalDiskRepositoryManager.Lifecycle.class);
}
}
public static class Lifecycle implements LifecycleListener {
private final Config serverConfig;
@Inject
Lifecycle(@GerritServerConfig final Config cfg) {
this.serverConfig = cfg;
}
@Override
public void start() {
RepositoryCacheConfig repoCacheCfg = new RepositoryCacheConfig();
repoCacheCfg.fromConfig(serverConfig);
repoCacheCfg.install();
WindowCacheConfig cfg = new WindowCacheConfig();
cfg.fromConfig(serverConfig);
if (serverConfig.getString("core", null, "streamFileThreshold") == null) {
long mx = Runtime.getRuntime().maxMemory();
int limit = (int) Math.min(
mx / 4, // don't use more than 1/4 of the heap.
2047 << 20); // cannot exceed array length
if ((5 << 20) < limit && limit % (1 << 20) != 0) {
// If the limit is at least 5 MiB but is not a whole multiple
// of MiB round up to the next one full megabyte. This is a very
// tiny memory increase in exchange for nice round units.
limit = ((limit / (1 << 20)) + 1) << 20;
}
String desc;
if (limit % (1 << 20) == 0) {
desc = String.format("%dm", limit / (1 << 20));
} else if (limit % (1 << 10) == 0) {
desc = String.format("%dk", limit / (1 << 10));
} else {
desc = String.format("%d", limit);
}
log.info(String.format(
"Defaulting core.streamFileThreshold to %s",
desc));
cfg.setStreamFileThreshold(limit);
}
cfg.install();
}
@Override
public void stop() {
}
}
private final Path basePath;
private final Lock namesUpdateLock;
private volatile SortedSet<Project.NameKey> names = new TreeSet<>();
@Inject
LocalDiskRepositoryManager(SitePaths site,
@GerritServerConfig Config cfg) {
basePath = site.resolve(cfg.getString("gerrit", null, "basePath"));
if (basePath == null) {
throw new IllegalStateException("gerrit.basePath must be configured");
}
namesUpdateLock = new ReentrantLock(true /* fair */);
}
@Override
public void start() {
names = list();
}
@Override
public void stop() {
}
/**
* Return the basePath under which the specified project is stored.
*
* @param name the name of the project
* @return base directory
*/
public Path getBasePath(Project.NameKey name) {
return basePath;
}
@Override
public Repository openRepository(Project.NameKey name)
throws RepositoryNotFoundException {
return openRepository(getBasePath(name), name);
}
private Repository openRepository(Path path, Project.NameKey name)
throws RepositoryNotFoundException {
if (isUnreasonableName(name)) {
throw new RepositoryNotFoundException("Invalid name: " + name);
}
File gitDir = path.resolve(name.get()).toFile();
if (!names.contains(name)) {
// The this.names list does not hold the project-name but it can still exist
// on disk; for instance when the project has been created directly on the
// file-system through replication.
//
if (!name.get().endsWith(Constants.DOT_GIT_EXT)) {
if (FileKey.resolve(gitDir, FS.DETECTED) != null) {
onCreateProject(name);
} else {
throw new RepositoryNotFoundException(gitDir);
}
} else {
final File directory = gitDir;
if (FileKey.isGitRepository(new File(directory, Constants.DOT_GIT),
FS.DETECTED)) {
onCreateProject(name);
} else if (FileKey.isGitRepository(new File(directory.getParentFile(),
directory.getName() + Constants.DOT_GIT_EXT), FS.DETECTED)) {
onCreateProject(name);
} else {
throw new RepositoryNotFoundException(gitDir);
}
}
}
final FileKey loc = FileKey.lenient(gitDir, FS.DETECTED);
try {
return RepositoryCache.open(loc);
} catch (IOException e1) {
final RepositoryNotFoundException e2;
e2 = new RepositoryNotFoundException("Cannot open repository " + name);
e2.initCause(e1);
throw e2;
}
}
@Override
public Repository createRepository(Project.NameKey name)
throws RepositoryNotFoundException, RepositoryCaseMismatchException {
Path path = getBasePath(name);
if (isUnreasonableName(name)) {
throw new RepositoryNotFoundException("Invalid name: " + name);
}
File dir = FileKey.resolve(path.resolve(name.get()).toFile(), FS.DETECTED);
FileKey loc;
if (dir != null) {
// Already exists on disk, use the repository we found.
//
loc = FileKey.exact(dir, FS.DETECTED);
if (!names.contains(name)) {
throw new RepositoryCaseMismatchException(name);
}
} else {
// It doesn't exist under any of the standard permutations
// of the repository name, so prefer the standard bare name.
//
String n = name.get() + Constants.DOT_GIT_EXT;
loc = FileKey.exact(path.resolve(n).toFile(), FS.DETECTED);
}
try {
Repository db = RepositoryCache.open(loc, false);
db.create(true /* bare */);
StoredConfig config = db.getConfig();
config.setBoolean(ConfigConstants.CONFIG_CORE_SECTION,
null, ConfigConstants.CONFIG_KEY_LOGALLREFUPDATES, true);
config.save();
// JGit only writes to the reflog for refs/meta/config if the log file
// already exists.
//
File metaConfigLog =
new File(db.getDirectory(), "logs/" + RefNames.REFS_CONFIG);
if (!metaConfigLog.getParentFile().mkdirs()
|| !metaConfigLog.createNewFile()) {
log.error(String.format(
"Failed to create ref log for %s in repository %s",
RefNames.REFS_CONFIG, name));
}
onCreateProject(name);
return db;
} catch (IOException e1) {
final RepositoryNotFoundException e2;
e2 = new RepositoryNotFoundException("Cannot create repository " + name);
e2.initCause(e1);
throw e2;
}
}
private void onCreateProject(final Project.NameKey newProjectName) {
namesUpdateLock.lock();
try {
SortedSet<Project.NameKey> n = new TreeSet<>(names);
n.add(newProjectName);
names = Collections.unmodifiableSortedSet(n);
} finally {
namesUpdateLock.unlock();
}
}
@Override
public String getProjectDescription(final Project.NameKey name)
throws RepositoryNotFoundException, IOException {
try (Repository e = openRepository(name)) {
return getProjectDescription(e);
}
}
private String getProjectDescription(final Repository e) throws IOException {
final File d = new File(e.getDirectory(), "description");
String description;
try {
description = RawParseUtils.decode(IO.readFully(d));
} catch (FileNotFoundException err) {
return null;
}
if (description != null) {
description = description.trim();
if (description.isEmpty()) {
description = null;
}
if (UNNAMED.equals(description)) {
description = null;
}
}
return description;
}
@Override
public void setProjectDescription(Project.NameKey name, String description) {
// Update git's description file, in case gitweb is being used
//
try (Repository e = openRepository(name)) {
String old = getProjectDescription(e);
if ((old == null && description == null)
|| (old != null && old.equals(description))) {
return;
}
LockFile f = new LockFile(new File(e.getDirectory(), "description"));
if (f.lock()) {
String d = description;
if (d != null) {
d = d.trim();
if (d.length() > 0) {
d += "\n";
}
} else {
d = "";
}
f.write(Constants.encode(d));
f.commit();
}
} catch (IOException e) {
log.error("Cannot update description for " + name, e);
}
}
private boolean isUnreasonableName(final Project.NameKey nameKey) {
final String name = nameKey.get();
return name.length() == 0 // no empty paths
|| name.charAt(name.length() - 1) == '/' // no suffix
|| name.indexOf('\\') >= 0 // no windows/dos style paths
|| name.charAt(0) == '/' // no absolute paths
|| new File(name).isAbsolute() // no absolute paths
|| name.startsWith("../") // no "l../etc/passwd"
|| name.contains("/../") // no "foo/../etc/passwd"
|| name.contains("/./") // "foo/./foo" is insane to ask
|| name.contains("//") // windows UNC path can be "//..."
|| name.contains(".git/") // no path segments that end with '.git' as "foo.git/bar"
|| name.contains("?") // common unix wildcard
|| name.contains("%") // wildcard or string parameter
|| name.contains("*") // wildcard
|| name.contains(":") // Could be used for absolute paths in windows?
|| name.contains("<") // redirect input
|| name.contains(">") // redirect output
|| name.contains("|") // pipe
|| name.contains("$") // dollar sign
|| name.contains("\r"); // carriage return
}
@Override
public SortedSet<Project.NameKey> list() {
// The results of this method are cached by ProjectCacheImpl. Control only
// enters here if the cache was flushed by the administrator to force
// scanning the filesystem. Don't rely on the cached names collection.
ProjectVisitor visitor = new ProjectVisitor(basePath);
scanProjects(visitor);
return Collections.unmodifiableSortedSet(visitor.found);
}
protected void scanProjects(ProjectVisitor visitor) {
try {
Files.walkFileTree(visitor.startFolder,
EnumSet.of(FileVisitOption.FOLLOW_LINKS), Integer.MAX_VALUE, visitor);
} catch (IOException e) {
log.error("Error walking repository tree "
+ visitor.startFolder.toAbsolutePath(), e);
}
}
protected class ProjectVisitor extends SimpleFileVisitor<Path> {
private final SortedSet<Project.NameKey> found = new TreeSet<>();
private Path startFolder;
public ProjectVisitor(Path startFolder) {
setStartFolder(startFolder);
}
public void setStartFolder(Path startFolder) {
this.startFolder = startFolder;
}
@Override
public FileVisitResult preVisitDirectory(Path dir,
BasicFileAttributes attrs) throws IOException {
if (!dir.equals(startFolder) && isRepo(dir)) {
addProject(dir);
return FileVisitResult.SKIP_SUBTREE;
}
return FileVisitResult.CONTINUE;
}
private boolean isRepo(Path p) {
String name = p.getFileName().toString();
return !name.equals(Constants.DOT_GIT)
&& (name.endsWith(Constants.DOT_GIT_EXT)
|| FileKey.isGitRepository(p.toFile(), FS.DETECTED));
}
private void addProject(Path p) {
Project.NameKey nameKey = getProjectName(p);
if (getBasePath(nameKey).equals(startFolder)) {
if (isUnreasonableName(nameKey)) {
log.warn(
"Ignoring unreasonably named repository " + p.toAbsolutePath());
} else {
found.add(nameKey);
}
}
}
private Project.NameKey getProjectName(Path p) {
String projectName = startFolder.relativize(p).toString();
if (File.separatorChar != '/') {
projectName = projectName.replace(File.separatorChar, '/');
}
if (projectName.endsWith(Constants.DOT_GIT_EXT)) {
int newLen = projectName.length() - Constants.DOT_GIT_EXT.length();
projectName = projectName.substring(0, newLen);
}
return new Project.NameKey(projectName);
}
}
}
| gerrit-server/src/main/java/com/google/gerrit/server/git/LocalDiskRepositoryManager.java | // Copyright (C) 2008 The Android Open Source Project
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.gerrit.server.git;
import com.google.gerrit.extensions.events.LifecycleListener;
import com.google.gerrit.lifecycle.LifecycleModule;
import com.google.gerrit.reviewdb.client.Project;
import com.google.gerrit.reviewdb.client.RefNames;
import com.google.gerrit.server.config.GerritServerConfig;
import com.google.gerrit.server.config.SitePaths;
import com.google.inject.Inject;
import com.google.inject.Singleton;
import org.eclipse.jgit.errors.RepositoryNotFoundException;
import org.eclipse.jgit.internal.storage.file.LockFile;
import org.eclipse.jgit.lib.Config;
import org.eclipse.jgit.lib.ConfigConstants;
import org.eclipse.jgit.lib.Constants;
import org.eclipse.jgit.lib.Repository;
import org.eclipse.jgit.lib.RepositoryCache;
import org.eclipse.jgit.lib.RepositoryCache.FileKey;
import org.eclipse.jgit.lib.RepositoryCacheConfig;
import org.eclipse.jgit.lib.StoredConfig;
import org.eclipse.jgit.storage.file.WindowCacheConfig;
import org.eclipse.jgit.util.FS;
import org.eclipse.jgit.util.IO;
import org.eclipse.jgit.util.RawParseUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.nio.file.FileVisitOption;
import java.nio.file.FileVisitResult;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.SimpleFileVisitor;
import java.nio.file.attribute.BasicFileAttributes;
import java.util.Collections;
import java.util.EnumSet;
import java.util.SortedSet;
import java.util.TreeSet;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;
/** Manages Git repositories stored on the local filesystem. */
@Singleton
public class LocalDiskRepositoryManager implements GitRepositoryManager,
LifecycleListener {
private static final Logger log =
LoggerFactory.getLogger(LocalDiskRepositoryManager.class);
private static final String UNNAMED =
"Unnamed repository; edit this file to name it for gitweb.";
public static class Module extends LifecycleModule {
@Override
protected void configure() {
bind(GitRepositoryManager.class).to(LocalDiskRepositoryManager.class);
listener().to(LocalDiskRepositoryManager.class);
listener().to(LocalDiskRepositoryManager.Lifecycle.class);
}
}
public static class Lifecycle implements LifecycleListener {
private final Config serverConfig;
@Inject
Lifecycle(@GerritServerConfig final Config cfg) {
this.serverConfig = cfg;
}
@Override
public void start() {
RepositoryCacheConfig repoCacheCfg = new RepositoryCacheConfig();
repoCacheCfg.fromConfig(serverConfig);
repoCacheCfg.install();
WindowCacheConfig cfg = new WindowCacheConfig();
cfg.fromConfig(serverConfig);
if (serverConfig.getString("core", null, "streamFileThreshold") == null) {
long mx = Runtime.getRuntime().maxMemory();
int limit = (int) Math.min(
mx / 4, // don't use more than 1/4 of the heap.
2047 << 20); // cannot exceed array length
if ((5 << 20) < limit && limit % (1 << 20) != 0) {
// If the limit is at least 5 MiB but is not a whole multiple
// of MiB round up to the next one full megabyte. This is a very
// tiny memory increase in exchange for nice round units.
limit = ((limit / (1 << 20)) + 1) << 20;
}
String desc;
if (limit % (1 << 20) == 0) {
desc = String.format("%dm", limit / (1 << 20));
} else if (limit % (1 << 10) == 0) {
desc = String.format("%dk", limit / (1 << 10));
} else {
desc = String.format("%d", limit);
}
log.info(String.format(
"Defaulting core.streamFileThreshold to %s",
desc));
cfg.setStreamFileThreshold(limit);
}
cfg.install();
}
@Override
public void stop() {
}
}
private final Path basePath;
private final Lock namesUpdateLock;
private volatile SortedSet<Project.NameKey> names = new TreeSet<>();
@Inject
LocalDiskRepositoryManager(SitePaths site,
@GerritServerConfig Config cfg) {
basePath = site.resolve(cfg.getString("gerrit", null, "basePath"));
if (basePath == null) {
throw new IllegalStateException("gerrit.basePath must be configured");
}
namesUpdateLock = new ReentrantLock(true /* fair */);
}
@Override
public void start() {
names = list();
}
@Override
public void stop() {
}
/**
* Return the basePath under which the specified project is stored.
*
* @param name the name of the project
* @return base directory
*/
public Path getBasePath(Project.NameKey name) {
return basePath;
}
@Override
public Repository openRepository(Project.NameKey name)
throws RepositoryNotFoundException {
return openRepository(getBasePath(name), name);
}
private Repository openRepository(Path path, Project.NameKey name)
throws RepositoryNotFoundException {
if (isUnreasonableName(name)) {
throw new RepositoryNotFoundException("Invalid name: " + name);
}
File gitDir = path.resolve(name.get()).toFile();
if (!names.contains(name)) {
// The this.names list does not hold the project-name but it can still exist
// on disk; for instance when the project has been created directly on the
// file-system through replication.
//
if (!name.get().endsWith(Constants.DOT_GIT_EXT)) {
if (FileKey.resolve(gitDir, FS.DETECTED) != null) {
onCreateProject(name);
} else {
throw new RepositoryNotFoundException(gitDir);
}
} else {
final File directory = gitDir;
if (FileKey.isGitRepository(new File(directory, Constants.DOT_GIT),
FS.DETECTED)) {
onCreateProject(name);
} else if (FileKey.isGitRepository(new File(directory.getParentFile(),
directory.getName() + Constants.DOT_GIT_EXT), FS.DETECTED)) {
onCreateProject(name);
} else {
throw new RepositoryNotFoundException(gitDir);
}
}
}
final FileKey loc = FileKey.lenient(gitDir, FS.DETECTED);
try {
return RepositoryCache.open(loc);
} catch (IOException e1) {
final RepositoryNotFoundException e2;
e2 = new RepositoryNotFoundException("Cannot open repository " + name);
e2.initCause(e1);
throw e2;
}
}
@Override
public Repository createRepository(Project.NameKey name)
throws RepositoryNotFoundException, RepositoryCaseMismatchException {
Path path = getBasePath(name);
if (isUnreasonableName(name)) {
throw new RepositoryNotFoundException("Invalid name: " + name);
}
File dir = FileKey.resolve(path.resolve(name.get()).toFile(), FS.DETECTED);
FileKey loc;
if (dir != null) {
// Already exists on disk, use the repository we found.
//
loc = FileKey.exact(dir, FS.DETECTED);
if (!names.contains(name)) {
throw new RepositoryCaseMismatchException(name);
}
} else {
// It doesn't exist under any of the standard permutations
// of the repository name, so prefer the standard bare name.
//
String n = name.get() + Constants.DOT_GIT_EXT;
loc = FileKey.exact(path.resolve(n).toFile(), FS.DETECTED);
}
try {
Repository db = RepositoryCache.open(loc, false);
db.create(true /* bare */);
StoredConfig config = db.getConfig();
config.setBoolean(ConfigConstants.CONFIG_CORE_SECTION,
null, ConfigConstants.CONFIG_KEY_LOGALLREFUPDATES, true);
config.save();
// JGit only writes to the reflog for refs/meta/config if the log file
// already exists.
//
File metaConfigLog =
new File(db.getDirectory(), "logs/" + RefNames.REFS_CONFIG);
if (!metaConfigLog.getParentFile().mkdirs()
|| !metaConfigLog.createNewFile()) {
log.error(String.format(
"Failed to create ref log for %s in repository %s",
RefNames.REFS_CONFIG, name));
}
onCreateProject(name);
return db;
} catch (IOException e1) {
final RepositoryNotFoundException e2;
e2 = new RepositoryNotFoundException("Cannot create repository " + name);
e2.initCause(e1);
throw e2;
}
}
private void onCreateProject(final Project.NameKey newProjectName) {
namesUpdateLock.lock();
try {
SortedSet<Project.NameKey> n = new TreeSet<>(names);
n.add(newProjectName);
names = Collections.unmodifiableSortedSet(n);
} finally {
namesUpdateLock.unlock();
}
}
@Override
public String getProjectDescription(final Project.NameKey name)
throws RepositoryNotFoundException, IOException {
try (Repository e = openRepository(name)) {
return getProjectDescription(e);
}
}
private String getProjectDescription(final Repository e) throws IOException {
final File d = new File(e.getDirectory(), "description");
String description;
try {
description = RawParseUtils.decode(IO.readFully(d));
} catch (FileNotFoundException err) {
return null;
}
if (description != null) {
description = description.trim();
if (description.isEmpty()) {
description = null;
}
if (UNNAMED.equals(description)) {
description = null;
}
}
return description;
}
@Override
public void setProjectDescription(Project.NameKey name, String description) {
// Update git's description file, in case gitweb is being used
//
try (Repository e = openRepository(name)) {
String old = getProjectDescription(e);
if ((old == null && description == null)
|| (old != null && old.equals(description))) {
return;
}
LockFile f = new LockFile(new File(e.getDirectory(), "description"));
if (f.lock()) {
String d = description;
if (d != null) {
d = d.trim();
if (d.length() > 0) {
d += "\n";
}
} else {
d = "";
}
f.write(Constants.encode(d));
f.commit();
}
} catch (IOException e) {
log.error("Cannot update description for " + name, e);
}
}
private boolean isUnreasonableName(final Project.NameKey nameKey) {
final String name = nameKey.get();
return name.length() == 0 // no empty paths
|| name.charAt(name.length() - 1) == '/' // no suffix
|| name.indexOf('\\') >= 0 // no windows/dos style paths
|| name.charAt(0) == '/' // no absolute paths
|| new File(name).isAbsolute() // no absolute paths
|| name.startsWith("../") // no "l../etc/passwd"
|| name.contains("/../") // no "foo/../etc/passwd"
|| name.contains("/./") // "foo/./foo" is insane to ask
|| name.contains("//") // windows UNC path can be "//..."
|| name.contains(".git/") // no path segments that end with '.git' as "foo.git/bar"
|| name.contains("?") // common unix wildcard
|| name.contains("%") // wildcard or string parameter
|| name.contains("*") // wildcard
|| name.contains(":") // Could be used for absolute paths in windows?
|| name.contains("<") // redirect input
|| name.contains(">") // redirect output
|| name.contains("|") // pipe
|| name.contains("$") // dollar sign
|| name.contains("\r"); // carriage return
}
@Override
public SortedSet<Project.NameKey> list() {
// The results of this method are cached by ProjectCacheImpl. Control only
// enters here if the cache was flushed by the administrator to force
// scanning the filesystem. Don't rely on the cached names collection.
namesUpdateLock.lock();
try {
ProjectVisitor visitor = new ProjectVisitor(basePath);
scanProjects(visitor);
return Collections.unmodifiableSortedSet(visitor.found);
} finally {
namesUpdateLock.unlock();
}
}
protected void scanProjects(ProjectVisitor visitor) {
try {
Files.walkFileTree(visitor.startFolder,
EnumSet.of(FileVisitOption.FOLLOW_LINKS), Integer.MAX_VALUE, visitor);
} catch (IOException e) {
log.error("Error walking repository tree "
+ visitor.startFolder.toAbsolutePath(), e);
}
}
protected class ProjectVisitor extends SimpleFileVisitor<Path> {
private final SortedSet<Project.NameKey> found = new TreeSet<>();
private Path startFolder;
public ProjectVisitor(Path startFolder) {
setStartFolder(startFolder);
}
public void setStartFolder(Path startFolder) {
this.startFolder = startFolder;
}
@Override
public FileVisitResult preVisitDirectory(Path dir,
BasicFileAttributes attrs) throws IOException {
if (!dir.equals(startFolder) && isRepo(dir)) {
addProject(dir);
return FileVisitResult.SKIP_SUBTREE;
}
return FileVisitResult.CONTINUE;
}
private boolean isRepo(Path p) {
String name = p.getFileName().toString();
return !name.equals(Constants.DOT_GIT)
&& (name.endsWith(Constants.DOT_GIT_EXT)
|| FileKey.isGitRepository(p.toFile(), FS.DETECTED));
}
private void addProject(Path p) {
Project.NameKey nameKey = getProjectName(p);
if (getBasePath(nameKey).equals(startFolder)) {
if (isUnreasonableName(nameKey)) {
log.warn(
"Ignoring unreasonably named repository " + p.toAbsolutePath());
} else {
found.add(nameKey);
}
}
}
private Project.NameKey getProjectName(Path p) {
String projectName = startFolder.relativize(p).toString();
if (File.separatorChar != '/') {
projectName = projectName.replace(File.separatorChar, '/');
}
if (projectName.endsWith(Constants.DOT_GIT_EXT)) {
int newLen = projectName.length() - Constants.DOT_GIT_EXT.length();
projectName = projectName.substring(0, newLen);
}
return new Project.NameKey(projectName);
}
}
}
| Don't use namesUpdateLock when scanning for project names
The names collection is not updated from the list() method so we also
don't need to lock it. Further, the list() method is normally only
called from the loader of the project_list cache and Guava already takes
care that only one thread loads the cache entry.
Change-Id: I77d00051d3de0287c560d45b706d4fefcdd340f3
| gerrit-server/src/main/java/com/google/gerrit/server/git/LocalDiskRepositoryManager.java | Don't use namesUpdateLock when scanning for project names | <ide><path>errit-server/src/main/java/com/google/gerrit/server/git/LocalDiskRepositoryManager.java
<ide> // The results of this method are cached by ProjectCacheImpl. Control only
<ide> // enters here if the cache was flushed by the administrator to force
<ide> // scanning the filesystem. Don't rely on the cached names collection.
<del> namesUpdateLock.lock();
<del> try {
<del> ProjectVisitor visitor = new ProjectVisitor(basePath);
<del> scanProjects(visitor);
<del> return Collections.unmodifiableSortedSet(visitor.found);
<del> } finally {
<del> namesUpdateLock.unlock();
<del> }
<add> ProjectVisitor visitor = new ProjectVisitor(basePath);
<add> scanProjects(visitor);
<add> return Collections.unmodifiableSortedSet(visitor.found);
<ide> }
<ide>
<ide> protected void scanProjects(ProjectVisitor visitor) { |
|
Java | mit | 6452611298e047e6403f6a952f3f910802db6d63 | 0 | mitdbg/modeldb,mitdbg/modeldb,mitdbg/modeldb,mitdbg/modeldb,mitdbg/modeldb | package edu.mit.csail.db.ml.util;
import edu.mit.csail.db.ml.conf.ModelDbConfig;
import com.mongodb.MongoClient;
import com.mongodb.MongoException;
import com.mongodb.WriteConcern;
import com.mongodb.DB;
import com.mongodb.DBCollection;
import com.mongodb.BasicDBObject;
import com.mongodb.DBObject;
import com.mongodb.DBCursor;
import com.mongodb.ServerAddress;
import java.util.Arrays;
/**
* This class contains logic for connecting to the MongoDB database.
*/
public class MongoDBContext {
/**
* Create a MongoDB database context that reflects a connection to a database.
* @param username - The username to connect to the database.
* @param password - The password to connect to the database.
* @param jdbcUrl - The JDBC URL of the database.
* @param dbType - The database type.
* @return The database context.
*/
public static void create(String username, String password, String jdbcUrl, ModelDbConfig.DatabaseType dbType) {
// TODO: use the arguments instead of the hardcoded ones
}
public static void main( String args[] ) {
try {
// To connect to mongodb server
MongoClient mongoClient = new MongoClient("localhost", 27017);
// Now connect to your databases
DB db = mongoClient.getDB("test");
System.out.println("Connect to database successfully");
// boolean auth = db.authenticate("cmao18", "1111");
// System.out.println("Authentication: " + auth);
DBCollection coll = db.getCollection("mycol");
System.out.println("Collection mycol selected successfully");
BasicDBObject doc = new BasicDBObject("title", "MongoDB").append("description", "database")
.append("likes", 100).append("url", "test_url");
coll.insert(doc);
System.out.println("Document inserted successfully");
} catch (Exception e) {
System.err.println(e.getClass().getName() + ": " + e.getMessage());
}
}
} | server/src/main/java/edu/mit/csail/db/ml/util/MongoDBContext.java | package edu.mit.csail.db.ml.util;
import com.mongodb.MongoClient;
import com.mongodb.MongoException;
import com.mongodb.WriteConcern;
import com.mongodb.DB;
import com.mongodb.DBCollection;
import com.mongodb.BasicDBObject;
import com.mongodb.DBObject;
import com.mongodb.DBCursor;
import com.mongodb.ServerAddress;
import java.util.Arrays;
/**
* This class contains logic for connecting to the MongoDB database.
*/
public class MongoDBContext {
/**
* Create a MongoDB database context that reflects a connection to a database.
* @param username - The username to connect to the database.
* @param password - The password to connect to the database.
* @param jdbcUrl - The JDBC URL of the database.
* @param dbType - The database type.
* @return The database context.
*/
public static void create(String username, String password, String jdbcUrl, ModelDbConfig.DatabaseType dbType) {
// TODO: use the arguments instead of the hardcoded ones
}
public static void main( String args[] ) {
try {
// To connect to mongodb server
MongoClient mongoClient = new MongoClient("localhost", 27017);
// Now connect to your databases
DB db = mongoClient.getDB("test");
System.out.println("Connect to database successfully");
boolean auth = db.authenticate(myUserName, myPassword);
System.out.println("Authentication: " + auth);
DBCollection coll = db.getCollection("mycol");
System.out.println("Collection mycol selected successfully");
BasicDBObject doc = new BasicDBObject("title", "MongoDB").append("description", "database")
.append("likes", 100).append("url", "test_url");
coll.insert(doc);
System.out.println("Document inserted successfully");
} catch (Exception e) {
System.err.println(e.getClass().getName() + ": " + e.getMessage());
}
}
} | Get MongoDB to connect
| server/src/main/java/edu/mit/csail/db/ml/util/MongoDBContext.java | Get MongoDB to connect | <ide><path>erver/src/main/java/edu/mit/csail/db/ml/util/MongoDBContext.java
<ide> package edu.mit.csail.db.ml.util;
<add>import edu.mit.csail.db.ml.conf.ModelDbConfig;
<ide>
<ide> import com.mongodb.MongoClient;
<ide> import com.mongodb.MongoException;
<ide> DB db = mongoClient.getDB("test");
<ide> System.out.println("Connect to database successfully");
<ide>
<del> boolean auth = db.authenticate(myUserName, myPassword);
<del> System.out.println("Authentication: " + auth);
<add>// boolean auth = db.authenticate("cmao18", "1111");
<add>// System.out.println("Authentication: " + auth);
<ide> DBCollection coll = db.getCollection("mycol");
<ide> System.out.println("Collection mycol selected successfully");
<ide> |
|
JavaScript | mit | 229a9826495665456ff847ffd6b88b43f9ce9be3 | 0 | tbai/resume,tbai/resume | /**
* This is my personal resume, check the link below if you prefer to read it in the
* website or if you need to print a copy.
*
* @link http://resume.tiagobai.com
*/
/**********************************************************************************/
let name = "TIAGO XAVIER BAI";
/**********************************************************************************/
let grad = "Software Designer V at HP Inc.";
let contact = [
"Porto Alegre Brazil", "+55 51 92184663", "[email protected]"
];
let pages = [
"https://br.linkedin.com/in/tiagobai"
];
let summary = `
I work for HP since 2006 on R&D Projects, writing and designing advanced web
applications and cloud-based systems. I am a fast and competent Full-Stack
developer with extensive experience on Web technologies and attention to user
experience and design.
I will be moving to Sydney around November with an 189 Visa and I am currently
looking for new opportunities as Front-End or Full-Stack developer.
`;
/**********************************************************************************/
// WORK HISTORY
/**********************************************************************************/
let workHistory = [];
// HP Inc.
workHistory.push({
position: "Software Designer V", date: { from: "Nov 2012", to: "Present" },
company: {
name: "HP Inc.",
link: "http://www.hp.com/"
},
details: `
Project Manager and Front End Developer of the Brazil sub-team (10 members)
working at the HP SureSupply Auto-Delivery platform.
I joined the project to assume the leadership of the Front End development.
My first assignment was to travel to Seattle to receive training at a UX
company that built the first prototype and mock-ups, after that the Front End
development was transferred to my team at HP Brazil.
Today I am the Project Manager of the Brazilian team and I also work as a lead
Front End developer. We are now creating a set of new web applications using
Angular 2 in order to provide solutions for external partners.
`,
technologies: [
"Javascript", "Backbone", "Angular 2", "React.js", "Grunt/Gulp", "Karma",
"Node.js", "Express", "C#", "TFS", "Git"
]
});
// T&T Engenheiros Associados | HP contractor
workHistory.push({
position: "Senior Software Engineer", date: { from: "Jun 2009", to: "Oct 2012" },
company: {
name: "T&T Engenheiros Associados (HP contractor)",
link: "http://www.tet.com.br/en/"
},
details: `
Team leader and developer of a Web application designed to make it easy to
access and print large-format documents using a tablet, smartphone, laptop, or
printer touchscreen.
I was the main Front End developer and the technical leader of the development
team. The project was owned by HP Barcelona where I had the opportunity to
visit to transfer the technology to a new business unit. My team had around 4
developers and we built the entire system from scratch since the investigation
phase until it went to production using agile and continuous deployment
techniques.
`,
technologies: [
"Grails(Java, Groovy)", "Javascript", "Jenkins", "AWS", "Linux", "Git", "SVN"
]
});
// Perto S.A.
workHistory.push({
position: "Senior Software Engineer", date: { from: "Feb 2009", to: "May 2009" },
company: {
name: "Perto S.A.",
link: "http://www.perto.com.br/en/"
},
details: `
Linux system development on embedded device (POS) at an Electrical/Electronic
Manufacturing industry.
`,
technologies: [
"Linux", "C++", "C", "Git"
]
});
// T&T Engenheiros Associados | HP contractor
workHistory.push({
position: "Software Engineer", date: { from: "May 2006", to: "Feb 2009" },
company: {
name: "T&T Engenheiros Associados (HP contractor)",
link: "http://www.tet.com.br/en/"
},
details: `
During this period I worked at two different R&D projects for HP Brazil.
Both projects were sponsored by HP Bristol.
* Development of single page web application for online document creation using
Javascript and Java
* Plugin development for InDesign
`,
technologies: [
"Java", "JSP", "Javascript", "YUI", "MooTools", "XML", "XSLT",
"InDesign SDK", "C++", "CVS", "SVN"
]
});
// PUCRS
workHistory.push({
position: "Intern Software Engineer", date: { from: "Mar 2004", to: "May 2006" },
company: {
name: "PUCRS university (HP research project)",
link: "http://www.pucrs.br/"
},
details: `
Digital Documents Research in a partnership between HP Brazil and PUCRS university.
`,
technologies: [
"Java", "Javascript"
]
});
/**********************************************************************************/
// EDUCATION
/**********************************************************************************/
let educationHistory = [];
educationHistory.push({
title: "Computer Engineer", date: { from: "Jan 2003", to: "Jan 2008" },
university: {
name: "Pontifícia Universidade Católica do Rio Grande do Sul",
link: "http://www.pucrs.br/"
}
});
/**********************************************************************************/
// CERTIFICATIONS
/**********************************************************************************/
let certifications = [];
certifications.push({
name: "Scrum Product Owner", date: "2015",
authority: "SCRUM ALLIANCE"
});
/**********************************************************************************/
// You can stop here if you are only reading my resume from the code browser...
/**********************************************************************************/
/**********************************************************************************/
/**********************************************************************************/
(function buildHtml(){
function buildLinks(text){
let regex = /http(s)?\:[^\s]+/g;
return text.replace(regex, match => {
return `<a href="${match}" target="_blank">${match}</a>`;
});
}
function appendRow(html){
let mainEl = document.querySelector("main");
let el = document.createElement("div");
el.classList.add("row");
if (html){
el.innerHTML = html;
}
mainEl.appendChild(el);
}
function appendSubtitle(text, classStr){
appendRow(`<br><h3 class='no-margin ${classStr?classStr:''}'>${text}</h3><hr class='no-margin'><br>`);
}
// name and contact
appendRow(`
<h2 class="no-margin">${name}</h2>
<h5>${grad}</h5>
<p>
${contact.map(buildLinks).join(" | ")}<br>
${pages.map(buildLinks).join('<br>')}
</p>
<p>${summary}</p>
`);
// work history
appendSubtitle("Work History");
workHistory.forEach((job, index) => {
appendRow(`
<div class="eight columns">
<h5 class="no-margin">${job.position}</h5>
<p><a target="_blank" href="${job.company.link}">${job.company.name}</a></p>
</div>
<div class="four columns text-right">
<h5>${job.date.from} - ${job.date.to}</h5>
</div>
`);
job.details.split("\n\n").forEach(p => appendRow(`<p>${p}</p>`));
appendRow(`
<p>Technologies: <em>${job.technologies.join(', ')}</em></p>
${index < workHistory.length-1 ? '<br>':''}
`);
});
// education
appendSubtitle("Education", 'page-break');
educationHistory.forEach(education => {
appendRow(`
<div class="eight columns">
<h5 class="no-margin">${education.title}</h5>
<p><a target="_blank" href="${education.university.link}">${education.university.name}</a></p>
</div>
<div class="four columns text-right">
<h5>${education.date.from} - ${education.date.to}</h5>
</div>
`);
});
// certifications
appendSubtitle("Certifications");
certifications.forEach(cert => {
appendRow(`
<div class="eight columns">
<h5 class="no-margin">${cert.name}</h5>
<p>${cert.authority}</p>
</div>
<div class="four columns text-right">
<h5>${cert.date}</h5>
</div>
`);
});
})(); | js/resume.js | /**
* This is my personal resume, check the link below if you prefer to read it in the
* website or if you need to print a copy.
*
* @link http://resume.tiagobai.com
*/
/**********************************************************************************/
let name = "TIAGO XAVIER BAI";
/**********************************************************************************/
let grad = "Software Designer V at HP Inc.";
let contact = [
"Porto Alegre Brazil", "+55 51 92184663", "[email protected]"
];
let pages = [
"https://br.linkedin.com/in/tiagobai"
];
let summary = `
Software Engineer specialized in Front End development.
I work for HP since 2006 on R&D Projects, writing and designing advanced web
applications and cloud-based systems. I am a fast and competent Full Stack
developer with extensive experience on Web technologies and attention to user
experience and design.
`;
/**********************************************************************************/
// WORK HISTORY
/**********************************************************************************/
let workHistory = [];
// HP Inc.
workHistory.push({
position: "Software Designer V", date: { from: "Nov 2012", to: "Present" },
company: {
name: "HP Inc.",
link: "http://www.hp.com/"
},
details: `
Project Manager and Front End Developer of the Brazil sub-team (10 members)
working at the HP SureSupply Auto-Delivery platform.
I joined the project to assume the leadership of the Front End development.
My first assignment was to travel to Seattle to receive training at a UX
company that built the first prototype and mock-ups, after that the Front End
development was transferred to my team at HP Brazil.
Today I am the Project Manager of the Brazilian team and I also work as a lead
Front End developer. We are now creating a set of new web applications using
Angular 2 in order to provide solutions for external partners.
`,
technologies: [
"Javascript", "Backbone", "Angular 2", "React.js", "Grunt/Gulp", "Karma",
"Node.js", "Express", "C#", "TFS", "Git"
]
});
// T&T Engenheiros Associados | HP contractor
workHistory.push({
position: "Senior Software Engineer", date: { from: "Jun 2009", to: "Oct 2012" },
company: {
name: "T&T Engenheiros Associados (HP contractor)",
link: "http://www.tet.com.br/en/"
},
details: `
Team leader and developer of a Web application designed to make it easy to
access and print large-format documents using a tablet, smartphone, laptop, or
printer touchscreen.
I was the main Front End developer and the technical leader of the development
team. The project was owned by HP Barcelona where I had the opportunity to
visit to transfer the technology to a new business unit. My team had around 4
developers and we built the entire system from scratch since the investigation
phase until it went to production using agile and continuous deployment
techniques.
`,
technologies: [
"Grails(Java, Groovy)", "Javascript", "Jenkins", "AWS", "Linux", "Git", "SVN"
]
});
// Perto S.A.
workHistory.push({
position: "Senior Software Engineer", date: { from: "Feb 2009", to: "May 2009" },
company: {
name: "Perto S.A.",
link: "http://www.perto.com.br/en/"
},
details: `
Linux system development on embedded device (POS) at an Electrical/Electronic
Manufacturing industry.
`,
technologies: [
"Linux", "C++", "C", "Git"
]
});
// T&T Engenheiros Associados | HP contractor
workHistory.push({
position: "Software Engineer", date: { from: "May 2006", to: "Feb 2009" },
company: {
name: "T&T Engenheiros Associados (HP contractor)",
link: "http://www.tet.com.br/en/"
},
details: `
During this period I worked at two different R&D projects for HP Brazil.
Both projects were sponsored by HP Bristol.
* Development of single page web application for online document creation using
Javascript and Java
* Plugin development for InDesign
`,
technologies: [
"Java", "JSP", "Javascript", "YUI", "MooTools", "XML", "XSLT",
"InDesign SDK", "C++", "CVS", "SVN"
]
});
// PUCRS
workHistory.push({
position: "Intern Software Engineer", date: { from: "Mar 2004", to: "May 2006" },
company: {
name: "PUCRS university (HP research project)",
link: "http://www.pucrs.br/"
},
details: `
Digital Documents Research in a partnership between HP Brazil and PUCRS university.
`,
technologies: [
"Java", "Javascript"
]
});
/**********************************************************************************/
// EDUCATION
/**********************************************************************************/
let educationHistory = [];
educationHistory.push({
title: "Computer Engineer", date: { from: "Jan 2003", to: "Jan 2008" },
university: {
name: "Pontifícia Universidade Católica do Rio Grande do Sul",
link: "http://www.pucrs.br/"
}
});
/**********************************************************************************/
// CERTIFICATIONS
/**********************************************************************************/
let certifications = [];
certifications.push({
name: "Scrum Product Owner", date: "2015",
authority: "SCRUM ALLIANCE"
});
/**********************************************************************************/
// You can stop here if you are only reading my resume from the code browser...
/**********************************************************************************/
/**********************************************************************************/
/**********************************************************************************/
(function buildHtml(){
function buildLinks(text){
let regex = /http(s)?\:[^\s]+/g;
return text.replace(regex, match => {
return `<a href="${match}" target="_blank">${match}</a>`;
});
}
function appendRow(html){
let mainEl = document.querySelector("main");
let el = document.createElement("div");
el.classList.add("row");
if (html){
el.innerHTML = html;
}
mainEl.appendChild(el);
}
function appendSubtitle(text, classStr){
appendRow(`<br><h3 class='no-margin ${classStr?classStr:''}'>${text}</h3><hr class='no-margin'><br>`);
}
// name and contact
appendRow(`
<h2 class="no-margin">${name}</h2>
<h5>${grad}</h5>
<p>
${contact.map(buildLinks).join(" | ")}<br>
${pages.map(buildLinks).join('<br>')}
</p>
<p>${summary}</p>
`);
// work history
appendSubtitle("Work History");
workHistory.forEach((job, index) => {
appendRow(`
<div class="eight columns">
<h5 class="no-margin">${job.position}</h5>
<p><a target="_blank" href="${job.company.link}">${job.company.name}</a></p>
</div>
<div class="four columns text-right">
<h5>${job.date.from} - ${job.date.to}</h5>
</div>
`);
job.details.split("\n\n").forEach(p => appendRow(`<p>${p}</p>`));
appendRow(`
<p>Technologies: <em>${job.technologies.join(', ')}</em></p>
${index < workHistory.length-1 ? '<br>':''}
`);
});
// education
appendSubtitle("Education", 'page-break');
educationHistory.forEach(education => {
appendRow(`
<div class="eight columns">
<h5 class="no-margin">${education.title}</h5>
<p><a target="_blank" href="${education.university.link}">${education.university.name}</a></p>
</div>
<div class="four columns text-right">
<h5>${education.date.from} - ${education.date.to}</h5>
</div>
`);
});
// certifications
appendSubtitle("Certifications");
certifications.forEach(cert => {
appendRow(`
<div class="eight columns">
<h5 class="no-margin">${cert.name}</h5>
<p>${cert.authority}</p>
</div>
<div class="four columns text-right">
<h5>${cert.date}</h5>
</div>
`);
});
})(); | Updating summary
| js/resume.js | Updating summary | <ide><path>s/resume.js
<ide> ];
<ide>
<ide> let summary = `
<del> Software Engineer specialized in Front End development.
<ide> I work for HP since 2006 on R&D Projects, writing and designing advanced web
<del> applications and cloud-based systems. I am a fast and competent Full Stack
<add> applications and cloud-based systems. I am a fast and competent Full-Stack
<ide> developer with extensive experience on Web technologies and attention to user
<ide> experience and design.
<add>
<add> I will be moving to Sydney around November with an 189 Visa and I am currently
<add> looking for new opportunities as Front-End or Full-Stack developer.
<ide> `;
<ide>
<ide> |
|
Java | apache-2.0 | 8bb2aa89836f2f465c6cea7dad5aed32c676ee2a | 0 | dain/presto,losipiuk/presto,losipiuk/presto,11xor6/presto,Praveen2112/presto,ebyhr/presto,treasure-data/presto,smartnews/presto,losipiuk/presto,treasure-data/presto,Praveen2112/presto,11xor6/presto,martint/presto,electrum/presto,treasure-data/presto,erichwang/presto,hgschmie/presto,11xor6/presto,losipiuk/presto,martint/presto,Praveen2112/presto,electrum/presto,dain/presto,smartnews/presto,erichwang/presto,erichwang/presto,ebyhr/presto,losipiuk/presto,martint/presto,hgschmie/presto,ebyhr/presto,treasure-data/presto,Praveen2112/presto,dain/presto,electrum/presto,electrum/presto,erichwang/presto,dain/presto,hgschmie/presto,11xor6/presto,hgschmie/presto,erichwang/presto,Praveen2112/presto,hgschmie/presto,ebyhr/presto,dain/presto,ebyhr/presto,smartnews/presto,11xor6/presto,electrum/presto,martint/presto,smartnews/presto,martint/presto,treasure-data/presto,smartnews/presto,treasure-data/presto | /*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.prestosql.tests;
import com.google.common.collect.ImmutableMap;
import io.prestosql.Session;
import io.prestosql.connector.CatalogName;
import io.prestosql.metadata.SessionPropertyManager;
import io.prestosql.plugin.tpch.TpchConnectorFactory;
import io.prestosql.spi.type.Type;
import io.prestosql.testing.LocalQueryRunner;
import io.prestosql.testing.MaterializedResult;
import io.prestosql.testing.TestingAccessControlManager;
import org.intellij.lang.annotations.Language;
import org.testng.SkipException;
import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import java.util.List;
import static io.prestosql.plugin.tpch.TpchMetadata.TINY_SCHEMA_NAME;
import static io.prestosql.testing.TestingSession.TESTING_CATALOG;
import static io.prestosql.testing.TestingSession.testSessionBuilder;
public class TestQueryPlanDeterminism
extends AbstractTestQueries
{
private PlanDeterminismChecker determinismChecker;
protected TestQueryPlanDeterminism()
{
super(TestQueryPlanDeterminism::createLocalQueryRunner);
}
@BeforeClass
public void setUp()
{
determinismChecker = new PlanDeterminismChecker((LocalQueryRunner) getQueryRunner());
}
@AfterClass(alwaysRun = true)
public void tearDown()
{
determinismChecker = null;
}
private static LocalQueryRunner createLocalQueryRunner()
{
Session defaultSession = testSessionBuilder()
.setCatalog("local")
.setSchema(TINY_SCHEMA_NAME)
.build();
LocalQueryRunner localQueryRunner = new LocalQueryRunner(defaultSession);
// add the tpch catalog
// local queries run directly against the generator
localQueryRunner.createCatalog(
defaultSession.getCatalog().get(),
new TpchConnectorFactory(1),
ImmutableMap.of());
localQueryRunner.getMetadata().addFunctions(CUSTOM_FUNCTIONS);
SessionPropertyManager sessionPropertyManager = localQueryRunner.getMetadata().getSessionPropertyManager();
sessionPropertyManager.addSystemSessionProperties(TEST_SYSTEM_PROPERTIES);
sessionPropertyManager.addConnectorSessionProperties(new CatalogName(TESTING_CATALOG), TEST_CATALOG_PROPERTIES);
return localQueryRunner;
}
@Override
protected MaterializedResult computeActual(@Language("SQL") String sql)
{
determinismChecker.checkPlanIsDeterministic(sql);
return super.computeActual(sql);
}
@Override
protected MaterializedResult computeActual(Session session, @Language("SQL") String sql)
{
determinismChecker.checkPlanIsDeterministic(session, sql);
return super.computeActual(session, sql);
}
@Override
protected void assertQuery(@Language("SQL") String sql)
{
determinismChecker.checkPlanIsDeterministic(sql);
}
@Override
protected void assertQuery(Session session, @Language("SQL") String sql)
{
determinismChecker.checkPlanIsDeterministic(session, sql);
}
@Override
protected void assertQueryOrdered(@Language("SQL") String sql)
{
determinismChecker.checkPlanIsDeterministic(sql);
}
@Override
protected void assertQuery(@Language("SQL") String actual, @Language("SQL") String expected)
{
determinismChecker.checkPlanIsDeterministic(actual);
}
@Override
protected void assertQuery(Session session, @Language("SQL") String actual, @Language("SQL") String expected)
{
determinismChecker.checkPlanIsDeterministic(session, actual);
}
@Override
protected void assertQueryOrdered(@Language("SQL") String actual, @Language("SQL") String expected)
{
determinismChecker.checkPlanIsDeterministic(actual);
}
@Override
protected void assertQueryOrdered(Session session, @Language("SQL") String actual, @Language("SQL") String expected)
{
determinismChecker.checkPlanIsDeterministic(session, actual);
}
@Override
protected void assertUpdate(@Language("SQL") String actual, @Language("SQL") String expected)
{
determinismChecker.checkPlanIsDeterministic(actual);
}
@Override
protected void assertUpdate(Session session, @Language("SQL") String actual, @Language("SQL") String expected)
{
determinismChecker.checkPlanIsDeterministic(session, actual);
}
@Override
protected void assertUpdate(@Language("SQL") String sql)
{
determinismChecker.checkPlanIsDeterministic(sql);
}
@Override
protected void assertUpdate(Session session, @Language("SQL") String sql)
{
determinismChecker.checkPlanIsDeterministic(session, sql);
}
@Override
protected void assertUpdate(@Language("SQL") String sql, long count)
{
determinismChecker.checkPlanIsDeterministic(sql);
}
@Override
protected void assertUpdate(Session session, @Language("SQL") String sql, long count)
{
determinismChecker.checkPlanIsDeterministic(session, sql);
}
@Override
protected void assertQueryFails(@Language("SQL") String sql, @Language("RegExp") String expectedMessageRegExp)
{
super.assertQueryFails(sql, expectedMessageRegExp);
}
@Override
protected void assertQueryFails(Session session, @Language("SQL") String sql, @Language("RegExp") String expectedMessageRegExp)
{
super.assertQueryFails(session, sql, expectedMessageRegExp);
}
@Override
protected void assertAccessAllowed(@Language("SQL") String sql, TestingAccessControlManager.TestingPrivilege... deniedPrivileges)
{
}
@Override
protected void assertAccessAllowed(Session session, @Language("SQL") String sql, TestingAccessControlManager.TestingPrivilege... deniedPrivileges)
{
}
@Override
protected void assertAccessDenied(@Language("SQL") String sql, @Language("RegExp") String exceptionsMessageRegExp, TestingAccessControlManager.TestingPrivilege... deniedPrivileges)
{
}
@Override
protected void assertAccessDenied(Session session, @Language("SQL") String sql, @Language("RegExp") String exceptionsMessageRegExp, TestingAccessControlManager.TestingPrivilege... deniedPrivileges)
{
}
@Override
protected void assertTableColumnNames(String tableName, String... columnNames)
{
}
@Override
protected MaterializedResult computeExpected(@Language("SQL") String sql, List<? extends Type> resultTypes)
{
determinismChecker.checkPlanIsDeterministic(sql);
return super.computeExpected(sql, resultTypes);
}
@Test
public void testTpchQ9deterministic()
{
//This uses a modified version of TPC-H Q9, because the tpch connector uses non-standard column names
determinismChecker.checkPlanIsDeterministic("SELECT\n" +
" nation,\n" +
" o_year,\n" +
" sum(amount) AS sum_profit\n" +
"FROM (\n" +
" SELECT\n" +
" n.name AS nation,\n" +
" extract(YEAR FROM o.orderdate) AS o_year,\n" +
" l.extendedprice * (1 - l.discount) - ps.supplycost * l.quantity AS amount\n" +
" FROM\n" +
" part p,\n" +
" supplier s,\n" +
" lineitem l,\n" +
" partsupp ps,\n" +
" orders o,\n" +
" nation n\n" +
" WHERE\n" +
" s.suppkey = l.suppkey\n" +
" AND ps.suppkey = l.suppkey\n" +
" AND ps.partkey = l.partkey\n" +
" AND p.partkey = l.partkey\n" +
" AND o.orderkey = l.orderkey\n" +
" AND s.nationkey = n.nationkey\n" +
" AND p.name LIKE '%green%'\n" +
" ) AS profit\n" +
"GROUP BY\n" +
" nation,\n" +
" o_year\n" +
"ORDER BY\n" +
" nation,\n" +
" o_year DESC\n");
}
@Test
public void testTpcdsQ6deterministic()
{
//This is a query inspired on TPC-DS Q6 that reproduces its plan nondeterminism problems
determinismChecker.checkPlanIsDeterministic("SELECT orderdate " +
"FROM orders o,\n" +
" lineitem i\n" +
"WHERE o.orderdate =\n" +
" (SELECT DISTINCT (orderdate)\n" +
" FROM orders\n" +
" WHERE totalprice > 2)\n" +
" AND i.quantity > 1.2 *\n" +
" (SELECT avg(j.quantity)\n" +
" FROM lineitem j\n" +
" )\n");
}
@Override
public void testLargeIn()
{
// testLargeIn is expensive
throw new SkipException("Skipping testLargeIn");
}
}
| presto-tests/src/test/java/io/prestosql/tests/TestQueryPlanDeterminism.java | /*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.prestosql.tests;
import com.google.common.collect.ImmutableMap;
import io.prestosql.Session;
import io.prestosql.connector.CatalogName;
import io.prestosql.metadata.SessionPropertyManager;
import io.prestosql.plugin.tpch.TpchConnectorFactory;
import io.prestosql.spi.type.Type;
import io.prestosql.testing.LocalQueryRunner;
import io.prestosql.testing.MaterializedResult;
import io.prestosql.testing.TestingAccessControlManager;
import org.intellij.lang.annotations.Language;
import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import java.util.List;
import static io.prestosql.plugin.tpch.TpchMetadata.TINY_SCHEMA_NAME;
import static io.prestosql.testing.TestingSession.TESTING_CATALOG;
import static io.prestosql.testing.TestingSession.testSessionBuilder;
public class TestQueryPlanDeterminism
extends AbstractTestQueries
{
private PlanDeterminismChecker determinismChecker;
protected TestQueryPlanDeterminism()
{
super(TestQueryPlanDeterminism::createLocalQueryRunner);
}
@BeforeClass
public void setUp()
{
determinismChecker = new PlanDeterminismChecker((LocalQueryRunner) getQueryRunner());
}
@AfterClass(alwaysRun = true)
public void tearDown()
{
determinismChecker = null;
}
private static LocalQueryRunner createLocalQueryRunner()
{
Session defaultSession = testSessionBuilder()
.setCatalog("local")
.setSchema(TINY_SCHEMA_NAME)
.build();
LocalQueryRunner localQueryRunner = new LocalQueryRunner(defaultSession);
// add the tpch catalog
// local queries run directly against the generator
localQueryRunner.createCatalog(
defaultSession.getCatalog().get(),
new TpchConnectorFactory(1),
ImmutableMap.of());
localQueryRunner.getMetadata().addFunctions(CUSTOM_FUNCTIONS);
SessionPropertyManager sessionPropertyManager = localQueryRunner.getMetadata().getSessionPropertyManager();
sessionPropertyManager.addSystemSessionProperties(TEST_SYSTEM_PROPERTIES);
sessionPropertyManager.addConnectorSessionProperties(new CatalogName(TESTING_CATALOG), TEST_CATALOG_PROPERTIES);
return localQueryRunner;
}
@Override
protected MaterializedResult computeActual(@Language("SQL") String sql)
{
determinismChecker.checkPlanIsDeterministic(sql);
return super.computeActual(sql);
}
@Override
protected MaterializedResult computeActual(Session session, @Language("SQL") String sql)
{
determinismChecker.checkPlanIsDeterministic(session, sql);
return super.computeActual(session, sql);
}
@Override
protected void assertQuery(@Language("SQL") String sql)
{
determinismChecker.checkPlanIsDeterministic(sql);
}
@Override
protected void assertQuery(Session session, @Language("SQL") String sql)
{
determinismChecker.checkPlanIsDeterministic(session, sql);
}
@Override
protected void assertQueryOrdered(@Language("SQL") String sql)
{
determinismChecker.checkPlanIsDeterministic(sql);
}
@Override
protected void assertQuery(@Language("SQL") String actual, @Language("SQL") String expected)
{
determinismChecker.checkPlanIsDeterministic(actual);
}
@Override
protected void assertQuery(Session session, @Language("SQL") String actual, @Language("SQL") String expected)
{
determinismChecker.checkPlanIsDeterministic(session, actual);
}
@Override
protected void assertQueryOrdered(@Language("SQL") String actual, @Language("SQL") String expected)
{
determinismChecker.checkPlanIsDeterministic(actual);
}
@Override
protected void assertQueryOrdered(Session session, @Language("SQL") String actual, @Language("SQL") String expected)
{
determinismChecker.checkPlanIsDeterministic(session, actual);
}
@Override
protected void assertUpdate(@Language("SQL") String actual, @Language("SQL") String expected)
{
determinismChecker.checkPlanIsDeterministic(actual);
}
@Override
protected void assertUpdate(Session session, @Language("SQL") String actual, @Language("SQL") String expected)
{
determinismChecker.checkPlanIsDeterministic(session, actual);
}
@Override
protected void assertUpdate(@Language("SQL") String sql)
{
determinismChecker.checkPlanIsDeterministic(sql);
}
@Override
protected void assertUpdate(Session session, @Language("SQL") String sql)
{
determinismChecker.checkPlanIsDeterministic(session, sql);
}
@Override
protected void assertUpdate(@Language("SQL") String sql, long count)
{
determinismChecker.checkPlanIsDeterministic(sql);
}
@Override
protected void assertUpdate(Session session, @Language("SQL") String sql, long count)
{
determinismChecker.checkPlanIsDeterministic(session, sql);
}
@Override
protected void assertQueryFails(@Language("SQL") String sql, @Language("RegExp") String expectedMessageRegExp)
{
super.assertQueryFails(sql, expectedMessageRegExp);
}
@Override
protected void assertQueryFails(Session session, @Language("SQL") String sql, @Language("RegExp") String expectedMessageRegExp)
{
super.assertQueryFails(session, sql, expectedMessageRegExp);
}
@Override
protected void assertAccessAllowed(@Language("SQL") String sql, TestingAccessControlManager.TestingPrivilege... deniedPrivileges)
{
}
@Override
protected void assertAccessAllowed(Session session, @Language("SQL") String sql, TestingAccessControlManager.TestingPrivilege... deniedPrivileges)
{
}
@Override
protected void assertAccessDenied(@Language("SQL") String sql, @Language("RegExp") String exceptionsMessageRegExp, TestingAccessControlManager.TestingPrivilege... deniedPrivileges)
{
}
@Override
protected void assertAccessDenied(Session session, @Language("SQL") String sql, @Language("RegExp") String exceptionsMessageRegExp, TestingAccessControlManager.TestingPrivilege... deniedPrivileges)
{
}
@Override
protected void assertTableColumnNames(String tableName, String... columnNames)
{
}
@Override
protected MaterializedResult computeExpected(@Language("SQL") String sql, List<? extends Type> resultTypes)
{
determinismChecker.checkPlanIsDeterministic(sql);
return super.computeExpected(sql, resultTypes);
}
@Test
public void testTpchQ9deterministic()
{
//This uses a modified version of TPC-H Q9, because the tpch connector uses non-standard column names
determinismChecker.checkPlanIsDeterministic("SELECT\n" +
" nation,\n" +
" o_year,\n" +
" sum(amount) AS sum_profit\n" +
"FROM (\n" +
" SELECT\n" +
" n.name AS nation,\n" +
" extract(YEAR FROM o.orderdate) AS o_year,\n" +
" l.extendedprice * (1 - l.discount) - ps.supplycost * l.quantity AS amount\n" +
" FROM\n" +
" part p,\n" +
" supplier s,\n" +
" lineitem l,\n" +
" partsupp ps,\n" +
" orders o,\n" +
" nation n\n" +
" WHERE\n" +
" s.suppkey = l.suppkey\n" +
" AND ps.suppkey = l.suppkey\n" +
" AND ps.partkey = l.partkey\n" +
" AND p.partkey = l.partkey\n" +
" AND o.orderkey = l.orderkey\n" +
" AND s.nationkey = n.nationkey\n" +
" AND p.name LIKE '%green%'\n" +
" ) AS profit\n" +
"GROUP BY\n" +
" nation,\n" +
" o_year\n" +
"ORDER BY\n" +
" nation,\n" +
" o_year DESC\n");
}
@Test
public void testTpcdsQ6deterministic()
{
//This is a query inspired on TPC-DS Q6 that reproduces its plan nondeterminism problems
determinismChecker.checkPlanIsDeterministic("SELECT orderdate " +
"FROM orders o,\n" +
" lineitem i\n" +
"WHERE o.orderdate =\n" +
" (SELECT DISTINCT (orderdate)\n" +
" FROM orders\n" +
" WHERE totalprice > 2)\n" +
" AND i.quantity > 1.2 *\n" +
" (SELECT avg(j.quantity)\n" +
" FROM lineitem j\n" +
" )\n");
}
}
| Disable testLargeIn in plan determinism tester
This test is inherently expensive. Running it multiple times
causes build times to increased significantly.
| presto-tests/src/test/java/io/prestosql/tests/TestQueryPlanDeterminism.java | Disable testLargeIn in plan determinism tester | <ide><path>resto-tests/src/test/java/io/prestosql/tests/TestQueryPlanDeterminism.java
<ide> import io.prestosql.testing.MaterializedResult;
<ide> import io.prestosql.testing.TestingAccessControlManager;
<ide> import org.intellij.lang.annotations.Language;
<add>import org.testng.SkipException;
<ide> import org.testng.annotations.AfterClass;
<ide> import org.testng.annotations.BeforeClass;
<ide> import org.testng.annotations.Test;
<ide> " FROM lineitem j\n" +
<ide> " )\n");
<ide> }
<add>
<add> @Override
<add> public void testLargeIn()
<add> {
<add> // testLargeIn is expensive
<add> throw new SkipException("Skipping testLargeIn");
<add> }
<ide> } |
|
JavaScript | agpl-3.0 | 826adc3a88ea7a9d10c4a460e650abb69a7760ae | 0 | timelapseplus/VIEW,timelapseplus/VIEW,timelapseplus/VIEW,timelapseplus/VIEW,timelapseplus/VIEW | var EventEmitter = require("events").EventEmitter;
var exec = require('child_process').exec;
require('rootpath')();
var camera = require('camera/camera.js');
var db = require('system/db.js');
var motion = require('motion/motion.js');
var image = require('camera/image/image.js');
var exp = require('intervalometer/exposure.js');
var interpolate = require('intervalometer/interpolate.js');
var fs = require('fs');
var async = require('async');
var TLROOT = "/root/time-lapse";
var Button = require('gpio-button');
var gpio = require('linux-gpio');
var _ = require('underscore');
var suncalc = require('suncalc');
var AUXTIP_OUT = 111;
var AUXRING_OUT = 110;
var HOTSHOE_IN = 34;
gpio.setMode(gpio.MODE_RAW);
gpio.setup(AUXTIP_OUT, gpio.DIR_OUT, function(err){
if(err) console.log("GPIO error: ", err);
gpio.write(AUXTIP_OUT, 1);
});
gpio.setup(AUXRING_OUT, gpio.DIR_OUT, function(err){
if(err) console.log("GPIO error: ", err);
gpio.write(AUXRING_OUT, 1);
});
gpio.setup(HOTSHOE_IN, gpio.DIR_IN, function(err){
if(err) console.log("GPIO error: ", err);
});
var intervalometer = new EventEmitter();
intervalometer.db = db;
var timerHandle = null;
var delayHandle = null;
var rate = 0;
intervalometer.autoSettings = {
paddingTimeMs: 2000
}
intervalometer.timelapseFolder = false;
status = {
running: false,
frames: 0,
framesRemaining: 0,
rampRate: 0,
intervalMs: 0,
message: "",
rampEv: null,
autoSettings: {
paddingTimeMs: 2000
}
}
intervalometer.status = status;
var auxTrigger = new Button('input-aux2');
auxTrigger.on('press', function() {
console.log("AUX2 trigger!");
if (status.running && intervalometer.currentProgram.intervalMode == 'aux') timerHandle = setTimeout(runPhoto, 0);
});
auxTrigger.on('error', function(err) {
console.log("AUX2 error: ", err);
});
function motionSyncPulse() {
if (status.running && intervalometer.currentProgram.intervalMode != 'aux') {
gpio.read(HOTSHOE_IN, function(err, shutterClosed) {
console.log("hotshoe:", shutterClosed);
if(shutterClosed) {
console.log("=> AUX Pulse");
gpio.write(AUXTIP_OUT, 0, function() {
setTimeout(function(){
gpio.write(AUXTIP_OUT, 1);
}, 200);
});
} else {
setTimeout(motionSyncPulse, 100);
}
});
}
}
function fileInit() {
fs.writeFileSync(status.timelapseFolder + "/details.csv", "frame, error, target, setting, rate, interval, timestamp, file, p, i, d\n");
}
function writeFile() {
fs.appendFileSync(status.timelapseFolder + "/details.csv", status.frames + ", " + status.evDiff + "," + exp.status.targetEv + "," + status.rampEv + "," + exp.status.rate + "," + (status.intervalMs / 1000) + "," + status.lastPhotoTime + "," + status.path + "," + exp.status.pComponent + "," + exp.status.iComponent + "," + exp.status.dComponent + "\n");
//image.writeXMP(name, status.evDiff);
}
function getDetails(file) {
var d = {
frames: status.frames,
evCorrection: status.evDiff,
targetEv: exp.status.targetEv,
actualEv: status.rampEv,
cameraEv: status.cameraEv,
rampRate: exp.status.rate,
intervalMs: status.intervalMs,
timestamp: status.lastPhotoTime,
fileName: file || status.path,
p: exp.status.pComponent,
i: exp.status.iComponent,
d: exp.status.dComponent,
};
if(intervalometer.gpsData) {
d.latitude = intervalometer.gpsData.lat;
d.longitude = intervalometer.gpsData.lon;
d.sunPos = suncalc.getPosition(new Date(), d.latitude, d.longitude);
d.moonPos = suncalc.getMoonPosition(new Date(), d.latitude, d.longitude);
d.moonIllumination = suncalc.getMoonIllumination(new Date());
}
return d;
}
var startShutterEv = -1;
function calculateIntervalMs(interval, currentEv) {
var dayEv = 8;
var nightEv = -2;
if (intervalometer.currentProgram.intervalMode == 'fixed') {
return interval * 1000;
} else {
var newInterval = interpolate.linear([{
x: dayEv,
y: parseInt(intervalometer.currentProgram.dayInterval)
}, {
x: nightEv,
y: parseInt(intervalometer.currentProgram.nightInterval)
}], currentEv);
return newInterval * 1000;
}
}
function doKeyframeAxis(axisName, axisSubIndex, setupFirst, interpolationMethod, motionFunction) {
if(interpolationMethod != 'catmullRomSpline') interpolationMethod = 'linear';
var keyframes = intervalometer.currentProgram.keyframes;
if (status.running && keyframes && keyframes.length > 0 && keyframes[0][axisName] != null) {
var kfSet = null;
var kfCurrent = null;
if (setupFirst) {
keyframes[0].seconds = 0;
if(axisSubIndex != null) {
keyframes[0][axisName][axisSubIndex] = 0;
} else {
keyframes[0][axisName] = 0;
}
kfSet = 0;
} else {
var secondsSinceStart = status.lastPhotoTime + (status.intervalMs / 1000);
console.log("KF: Seconds since last: " + secondsSinceStart);
var totalSeconds = 0;
kfPoints = keyframes.map(function(kf) {
totalSeconds += kf.seconds;
if(axisSubIndex != null) {
return {
x: totalSeconds,
y: kf[axisName][axisSubIndex] || 0
}
} else {
return {
x: totalSeconds,
y: kf[axisName] || 0
}
}
});
kfSet = interpolate[interpolationMethod](kfPoints, secondsSinceStart);
console.log("FK: " + axisName + " target: " + kfSet);
}
var axisNameExtension = '';
if(axisSubIndex != null) axisNameExtension = '-' + axisSubIndex;
kfCurrent = intervalometer.currentProgram[axisName + axisNameExtension + 'Pos'];
if (kfCurrent == null) {
motionFunction(kfSet); // absolute setting (like ev)
} else {
var precision = 10000; // limit precision to ensure we hit even values
var kfTarget = Math.round(kfSet * precision) / precision;
if (kfTarget != Math.round(intervalometer.currentProgram[axisName + axisNameExtension + 'Pos'] * precision) / precision) {
var relativeMove = kfTarget - intervalometer.currentProgram[axisName + axisNameExtension + 'Pos'];
motionFunction(relativeMove);
intervalometer.currentProgram[axisName + axisNameExtension + 'Pos'] = kfTarget;
} else {
if (motionFunction) motionFunction();
}
}
} else {
if (motionFunction) motionFunction();
}
}
function processKeyframes(setupFirst, callback) {
var numAxes = 2;
var axesDone = 0;
if(intervalometer.currentProgram.keyframes && intervalometer.currentProgram.keyframes.length > 0 && intervalometer.currentProgram.keyframes[0].motor) {
for(motorId in intervalometer.currentProgram.keyframes[0].motor) numAxes++;
}
var checkDone = function() {
axesDone++;
console.log("KF: " + axesDone + " keyframe items complete");
if (axesDone >= numAxes && callback) {
console.log("KF: keyframes complete, running callback");
callback();
}
}
doKeyframeAxis('ev', null, setupFirst, 'linear', function(ev) {
//if (ev != null && camera.settings.ev != ev) camera.setEv(ev);
checkDone();
});
doKeyframeAxis('focus', null, setupFirst, 'linear', function(focus) {
if (focus) {
camera.ptp.preview(function() {
setTimeout(function() {
console.log("KF: Moving focus by " + focus + " steps");
var dir = focus > 0 ? 1 : -1;
var steps = Math.abs(focus);
camera.ptp.focus(dir, steps, function() {
setTimeout(function(){
camera.ptp.lvOff(function(){
setTimeout(checkDone, 500);
});
}, 500);
});
}, 1000);
});
} else {
checkDone();
}
});
if(intervalometer.currentProgram.keyframes && intervalometer.currentProgram.keyframes.length > 0 && intervalometer.currentProgram.keyframes[0].motor) for(motorId in intervalometer.currentProgram.keyframes[0].motor) {
doKeyframeAxis('motor', motorId, setupFirst, 'catmullRomSpline', function(move) {
var parts = motorId.split('-');
if (move && parts.length == 2) {
var driver = parts[0];
var motor = parts[1];
console.log("KF: Moving " + motorId + " by " + move + " steps");
if (motion.status.available) {
var connected = false;
for(var index = 0; index < motion.status.motors.length; index++) {
var motor = motion.status.motors[index];
if(motor.driver == driver && motor.motor == motor) {
connected = motor.connected;
break;
}
}
if(motor.connected) {
motion.move(driver, motor, move, function() {
checkDone();
});
} else {
console.log("KF: error moving", motorId, "-- motor not connected");
checkDone();
}
} else {
console.log("KF: error moving -- no motion system connected");
checkDone();
}
} else {
checkDone();
}
});
}
}
function getEvOptions() {
var maxShutterLengthMs = status.intervalMs;
if (maxShutterLengthMs > intervalometer.autoSettings.paddingTimeMs) maxShutterLengthMs = (status.intervalMs - intervalometer.autoSettings.paddingTimeMs);
return {
cameraSettings: camera.ptp.settings,
maxShutterLengthMs: maxShutterLengthMs,
isoMax: intervalometer.currentProgram.isoMax,
isoMin: intervalometer.currentProgram.isoMin,
shutterMax: intervalometer.currentProgram.shutterMax,
apertureMax: intervalometer.currentProgram.apertureMax,
apertureMin: intervalometer.currentProgram.apertureMin,
parameters: intervalometer.currentProgram.rampParameters || 'S+I',
blendParams: intervalometer.currentProgram.rampParameters && intervalometer.currentProgram.rampParameters.indexOf('=') !== -1
}
}
var busyExposure = false;
function setupExposure(cb) {
var expSetupStartTime = new Date() / 1000;
console.log("\n\nEXP: setupExposure");
busyExposure = true;
camera.ptp.getSettings(function() {
console.log("EXP: current interval: ", status.intervalMs, " (took ", (new Date() / 1000 - expSetupStartTime), "seconds from setup start");
camera.setEv(status.rampEv, getEvOptions(), function(err, res) {
if(res.ev != null) {
status.cameraEv = res.ev;
}
status.cameraSettings = camera.ptp.settings;
status.evDiff = status.cameraEv - status.rampEv;
console.log("EXP: program:", "capture", " (took ", (new Date() / 1000 - expSetupStartTime), "seconds from setup start");
busyExposure = false;
cb && cb(err);
});
});
}
var busyPhoto = false;
var retryHandle = null;
function runPhoto() {
if(!status.running) {
busyPhoto = false;
status.stopping = false;
return;
}
if ((busyPhoto || busyExposure) && intervalometer.currentProgram.rampMode == "auto") {
if (status.running) retryHandle = setTimeout(runPhoto, 100);
return;
}
if(!status.running) return;
busyPhoto = true;
if (camera.ptp.connected) {
if(status.useLiveview) camera.ptp.preview();
status.captureStartTime = new Date() / 1000;
intervalometer.emit("status", status);
var captureOptions = {
thumbnail: true,
index: status.frames
//saveTiff: "/mnt/sd/test" + status.frames + ".tiff",
//saveRaw: "/mnt/sd/test" + status.frames + ".cr2",
}
if (intervalometer.currentProgram.destination == 'sd' && camera.ptp.sdPresent && camera.ptp.sdMounted) {
console.log("CAPT: Saving timelapse to SD card");
captureOptions.thumbnail = false;
var framesPadded = status.frames.toString();
while (framesPadded.length < 4) framesPadded = '0' + framesPadded;
captureOptions.saveRaw = status.mediaFolder + "/img-" + framesPadded;
camera.ptp.saveToCameraCard(false);
} else {
camera.ptp.saveToCameraCard(true);
}
if (intervalometer.currentProgram.rampMode == "fixed") {
status.intervalMs = intervalometer.currentProgram.interval * 1000;
if (status.running) timerHandle = setTimeout(runPhoto, status.intervalMs);
setTimeout(motionSyncPulse, camera.lists.getSecondsFromEv(camera.ptp.settings.details.shutter.ev) * 1000 + 1500);
captureOptions.calculateEv = false;
status.lastPhotoTime = new Date() / 1000 - status.startTime;
camera.ptp.capture(captureOptions, function(err, photoRes) {
if (!err && photoRes) {
status.path = photoRes.file;
if(photoRes.cameraCount > 1) {
for(var i = 0; i < photoRes.cameraResults.length; i++) {
console.log("photoRes.cameraResults[" + i + "]:", photoRes.cameraResults[i].file, photoRes.cameraResults[i].cameraIndex, photoRes.cameraResults[i].thumbnailPath);
db.setTimelapseFrame(status.id, 0, getDetails(photoRes.cameraResults[i].file), photoRes.cameraResults[i].cameraIndex, photoRes.cameraResults[i].thumbnailPath);
}
} else {
db.setTimelapseFrame(status.id, 0, getDetails(), 1, photoRes.thumbnailPath);
}
status.message = "running";
if (status.framesRemaining > 0) status.framesRemaining--;
status.frames++;
//writeFile();
intervalometer.emit("status", status);
console.log("TL: program status:", status);
} else {
intervalometer.emit('error', "An error occurred during capture. This could mean that the camera body is not supported or possibly an issue with the cable disconnecting.\nThe time-lapse will attempt to continue anyway.\nSystem message: ", err);
}
if (status.framesRemaining < 1 || status.running == false || status.stopping == true) {
clearTimeout(timerHandle);
status.message = "done";
status.framesRemaining = 0;
intervalometer.cancel('done');
}
processKeyframes(false, function() {
busyPhoto = false;
});
});
} else {
if (status.rampEv === null) {
status.cameraEv = camera.lists.getEvFromSettings(camera.ptp.settings);
status.rampEv = status.cameraEv;
}
captureOptions.exposureCompensation = status.evDiff || 0;
captureOptions.calculateEv = true;
if(intervalometer.currentProgram.intervalMode == 'aux') {
if(status.intervalStartTime) status.intervalMs = ((new Date() / 1000) - status.intervalStartTime) * 1000;
status.intervalStartTime = new Date() / 1000;
} else {
status.intervalMs = calculateIntervalMs(intervalometer.currentProgram.interval, status.rampEv);
console.log("TL: Setting timer for fixed interval at ", status.intervalMs);
if (status.running) timerHandle = setTimeout(runPhoto, status.intervalMs);
}
intervalometer.emit("status", status);
var shutterEv;
if(camera.ptp.settings.details && camera.ptp.settings.details.shutter) shutterEv = camera.ptp.settings.details.shutter.ev; else shutterEv = 0;
var msDelayPulse = camera.lists.getSecondsFromEv(shutterEv) * 1000 + 1500;
setTimeout(motionSyncPulse, msDelayPulse);
status.lastPhotoTime = new Date() / 1000 - status.startTime;
camera.ptp.capture(captureOptions, function(err, photoRes) {
if (!err && photoRes) {
var bufferTime = (new Date() / 1000) - status.captureStartTime - camera.lists.getSecondsFromEv(camera.ptp.settings.details.shutter.ev);
if(!status.bufferSeconds) {
status.bufferSeconds = bufferTime;
} else if(bufferTime != status.bufferSeconds) {
status.bufferSeconds = (status.bufferSeconds + bufferTime) / 2;
}
status.path = photoRes.file;
if(photoRes.cameraCount > 1) {
for(var i = 0; i < photoRes.cameraResults.length; i++) {
db.setTimelapseFrame(status.id, status.evDiff, getDetails(photoRes.cameraResults[i].file), photoRes.cameraResults[i].cameraIndex, photoRes.cameraResults[i].thumbnailPath);
}
} else {
db.setTimelapseFrame(status.id, status.evDiff, getDetails(), 1, photoRes.thumbnailPath);
}
intervalometer.autoSettings.paddingTimeMs = status.bufferSeconds * 1000 + 250; // add a quarter second for setting exposure
status.rampEv = exp.calculate(intervalometer.currentProgram.rampAlgorithm, status.rampEv, photoRes.ev, photoRes.histogram, camera.minEv(camera.ptp.settings, getEvOptions()), camera.maxEv(camera.ptp.settings, getEvOptions()));
status.rampRate = exp.status.rate;
status.path = photoRes.file;
status.message = "running";
if(intervalometer.currentProgram.intervalMode == 'aux') status.message = "waiting for AUX2...";
setupExposure();
if (status.framesRemaining > 0) status.framesRemaining--;
status.frames++;
writeFile();
intervalometer.emit("status", status);
console.log("TL: program status:", status);
if(status.frames == 1 && photoRes.ev > 2.5) {
error("WARNING: the exposure is too high for reliable ramping. It will attempt to continue, but it's strongly recommended to stop the time-lapse, descrease the exposure to expose for the highlights and then start again.");
}
} else {
if(!err) err = "unknown";
error("An error occurred during capture. This could mean that the camera body is not supported or possibly an issue with the cable disconnecting.\nThe time-lapse will attempt to continue anyway.\nSystem message: " + err);
console.log("TL: error:", err);
}
if ((intervalometer.currentProgram.intervalMode == "fixed" && status.framesRemaining < 1) || status.running == false || status.stopping == true) {
clearTimeout(timerHandle);
status.stopping = false;
status.message = "done";
status.framesRemaining = 0;
intervalometer.cancel('done');
}
processKeyframes(false, function() {
busyPhoto = false;
});
});
}
}
}
function error(msg) {
setTimeout(function(){
intervalometer.emit("error", msg);
}, 100);
}
camera.ptp.on('saveError', function(msg) {
if (intervalometer.status.running) {
intervalometer.cancel('err');
error("Failed to save RAW image to SD card!\nTime-lapse has been stopped.\nPlease verify that the camera is set to RAW (not RAW+JPEG) and that the SD card is formatted and fully inserted into the VIEW.\nSystem message: " + msg);
}
});
camera.ptp.on('saveErrorCardFull', function(msg) {
if (intervalometer.status.running) {
intervalometer.cancel('err');
error("SD card full! Unabled to save RAW images.\nThe time-lapse has been stopped.");
}
});
intervalometer.validate = function(program) {
var results = {
errors: []
};
if(program.frames === null) program.frames = Infinity;
if (parseInt(program.delay) < 1) program.delay = 2;
if(program.rampMode == 'fixed') {
if (parseInt(program.frames) < 1) results.errors.push({param:'frames', reason: 'frame count not set'});
} else {
if(program.intervalMode == 'fixed' || program.rampMode == 'fixed') {
if (parseInt(program.interval) < 1) results.errors.push({param:'interval', reason: 'interval not set or too short'});
} else {
if (parseInt(program.dayInterval) < 2) results.errors.push({param:'dayInterval', reason: 'dayInterval must be at least 2 seconds'});
if (parseInt(program.nightInterval) < program.dayInterval) results.errors.push({param:'nightInterval', reason: 'nightInterval shorter than dayInterval'});
}
}
if(!camera.ptp.supports.destination && (program.destination != 'sd' || !camera.ptp.sdPresent)) {
console.log("VAL: Error: SD card required");
results.errors.push({param:false, reason: "SD card required. The connected camera (" + camera.ptp.model + ") does not support saving images to the camera. Please insert an SD card into the VIEW and set the Destination to 'SD Card' so images can be saved to the card."});
}
var settingsDetails = camera.ptp.settings.details;
if(!settingsDetails.iso || settingsDetails.iso.ev == null) {
console.log("VAL: Error: invalid ISO setting", settingsDetails.iso);
results.errors.push({param:false, reason: "invalid ISO setting on camera."});
}
if(!settingsDetails.shutter || settingsDetails.shutter.ev == null) {
console.log("VAL: Error: invalid shutter setting", settingsDetails.shutter);
results.errors.push({param:false, reason: "invalid shutter setting on camera."});
}
if(camera.ptp.settings && camera.ptp.settings.format != 'RAW' && program.destination == 'sd' && camera.ptp.sdPresent) {
console.log("VAL: Error: camera not set to save in RAW");
results.errors.push({param:false, reason: "camera must be set to save in RAW. The VIEW expects RAW files when processing images to the SD card (RAW+JPEG does not work)"});
}
console.log("VAL: validating program:", results);
return results;
}
intervalometer.cancel = function(reason) {
if(!reason) reason = 'stopped';
if (intervalometer.status.running) {
clearTimeout(timerHandle);
clearTimeout(delayHandle);
intervalometer.status.stopping = true;
if(reason == 'err') intervalometer.status.message = "stopped due to error";
else if(reason == 'done') intervalometer.status.message = "time-lapse complete";
else intervalometer.status.message = "time-lapse canceled";
intervalometer.status.framesRemaining = 0;
intervalometer.emit("status", status);
camera.ptp.completeWrites(function() {
busyPhoto = false;
intervalometer.status.running = false;
intervalometer.status.stopping = false;
intervalometer.timelapseFolder = false;
camera.ptp.saveThumbnails(intervalometer.timelapseFolder);
camera.ptp.unmountSd();
intervalometer.emit("status", status);
console.log("==========> END TIMELAPSE", status.tlName);
});
}
}
intervalometer.resume = function() {
camera.ptp.cancelCallbacks();
busyPhoto = false;
busyExposure = false;
clearTimeout(timerHandle);
clearTimeout(delayHandle);
clearTimeout(retryHandle);
var ms = status.intervalMs - ((new Date() / 1000) - (status.startTime + status.lastPhotoTime)) * 1000;
if(ms < 0) ms = 0;
setTimeout(runPhoto, ms);
}
intervalometer.run = function(program) {
if (intervalometer.status.running) return;
intervalometer.status.stopping = false;
console.log("loading time-lapse program:", program);
db.set('intervalometer.currentProgram', program);
if(program.manualAperture != null) camera.fixedApertureEv = program.manualAperture;
if (camera.ptp.connected) {
camera.ptp.getSettings(function(){
var validationResults = intervalometer.validate(program);
if (validationResults.errors.length == 0) {
var tlIndex = fs.readFileSync(TLROOT + '/index.txt');
if (!tlIndex) {
tlIndex = 1;
} else {
tlIndex = parseInt(tlIndex) + 1;
}
fs.writeFileSync(TLROOT + '/index.txt', tlIndex.toString());
status.tlName = "tl-" + tlIndex;
console.log("==========> TIMELAPSE START", status.tlName);
intervalometer.timelapseFolder = TLROOT + "/" + status.tlName;
fs.mkdirSync(intervalometer.timelapseFolder);
camera.ptp.saveThumbnails(intervalometer.timelapseFolder);
status.timelapseFolder = intervalometer.timelapseFolder;
fileInit();
busyPhoto = false;
intervalometer.currentProgram = program;
status.intervalMs = program.interval * 1000;
status.message = "starting";
status.frames = 0;
status.framesRemaining = (program.intervalMode == "auto" && program.rampMode == "auto") ? Infinity : program.frames;
status.startTime = new Date() / 1000;
status.rampEv = null;
status.bufferSeconds = 0;
status.cameraSettings = camera.ptp.settings;
if(intervalometer.gpsData) {
status.latitude = intervalometer.gpsData.lat;
status.longitude = intervalometer.gpsData.lon;
}
exp.init(camera.minEv(camera.ptp.settings, getEvOptions()), camera.maxEv(camera.ptp.settings, getEvOptions()), program.nightCompensation);
status.running = true;
intervalometer.emit("status", status);
console.log("program:", "starting", program);
//function start() {
// if(camera.ptp.settings.autofocus && camera.ptp.settings.autofocus == "on") {
// console.log("Intervalometer: disabling autofocus");
// camera.ptp.set("autofocus", "off", checkFocus2);
// } else {
// checkFocus2();
// }
//}
//function checkFocus2() {
// if(camera.ptp.settings.afmode && camera.ptp.settings.afmode != "manual") {
// console.log("Intervalometer: setting focus mode to manual");
// camera.ptp.set("afmode", "manual", start2);
// } else {
// start2();
// }
//}
function start() {
status.useLiveview = false;
var focusPosTest = null;
var focusChange = false;
if(camera.ptp.model.match(/nikon/i) && intervalometer.currentProgram.keyframes && intervalometer.currentProgram.keyframes.length > 0) {
for(var i = 0; i < intervalometer.currentProgram.keyframes.length; i++) {
if(focusPosTest != null && focusPosTest != intervalometer.currentProgram.keyframes[i].focus) {
focusChange = true;
break;
}
focusPosTest = intervalometer.currentProgram.keyframes[i].focus;
}
if(focusChange) status.useLiveview = true;
}
var cameras = 1, primary = 1;
if(camera.ptp.synchronized) {
cameras = camera.ptp.count;
primary = camera.ptp.getPrimaryCameraIndex();
}
db.setTimelapse(status.tlName, program, cameras, primary, status, function(err, timelapseId) {
status.id = timelapseId;
processKeyframes(true, function() {
setTimeout(function() {
busyPhoto = false;
if(intervalometer.currentProgram.intervalMode != 'aux' || intervalometer.currentProgram.rampMode == 'fixed') {
runPhoto();
}
if(intervalometer.currentProgram.intervalMode == 'aux') {
status.message = "waiting for AUX2...";
intervalometer.emit("status", status);
}
}, 3000);
});
});
//delayHandle = setTimeout(function() {
// runPhoto();
//}, program.delay * 1000);
}
if (program.destination && program.destination == 'sd' && camera.ptp.sdPresent) {
camera.ptp.mountSd(function(mountErr) {
if(mountErr) {
console.log("Error mounting SD card");
intervalometer.cancel('err');
error("Error mounting SD card. \nVerify the SD card is formatted and fully inserted in the VIEW, then try starting the time-lapse again.\nMessage from system: " + mountErr);
} else {
status.mediaFolder = "/media/" + status.tlName;
fs.mkdir(status.mediaFolder, function(folderErr) {
if(folderErr) {
console.log("Error creating folder", status.mediaFolder);
intervalometer.cancel('err');
error("Error creating folder on SD card: /" + status.tlName + ".\nVerify the card is present and not write-protected, then try starting the time-lapse again.\nAlternatively, set the Destination to Camera instead (if supported)");
} else {
start();
}
});
}
});
} else {
start();
}
} else {
var errorList = "";
var val = "";
for(var i = 0; i < validationResults.errors.length; i++) {
if(program.hasOwnProperty([validationResults.errors[i].param])) {
val = " (" + program[validationResults.errors[i].param] + ")";
} else {
val = "";
}
errorList += "- " + validationResults.errors[i].reason + val + "\n";
}
intervalometer.cancel('err');
error("Failed to start time-lapse: \n" + errorList + "Please correct and try again.");
}
});
} else {
intervalometer.cancel('err');
error("Camera not connected. Please verify camera connection via USB and try again.");
return;
}
}
intervalometer.addGpsData = function(gpsData, callback) {
intervalometer.gpsData = gpsData;
callback && callback();
}
module.exports = intervalometer; | intervalometer/intervalometer.js | var EventEmitter = require("events").EventEmitter;
var exec = require('child_process').exec;
require('rootpath')();
var camera = require('camera/camera.js');
var db = require('system/db.js');
var motion = require('motion/motion.js');
var image = require('camera/image/image.js');
var exp = require('intervalometer/exposure.js');
var interpolate = require('intervalometer/interpolate.js');
var fs = require('fs');
var async = require('async');
var TLROOT = "/root/time-lapse";
var Button = require('gpio-button');
var gpio = require('linux-gpio');
var _ = require('underscore');
var suncalc = require('suncalc');
var AUXTIP_OUT = 111;
var AUXRING_OUT = 110;
var HOTSHOE_IN = 34;
gpio.setMode(gpio.MODE_RAW);
gpio.setup(AUXTIP_OUT, gpio.DIR_OUT, function(err){
if(err) console.log("GPIO error: ", err);
gpio.write(AUXTIP_OUT, 1);
});
gpio.setup(AUXRING_OUT, gpio.DIR_OUT, function(err){
if(err) console.log("GPIO error: ", err);
gpio.write(AUXRING_OUT, 1);
});
gpio.setup(HOTSHOE_IN, gpio.DIR_IN, function(err){
if(err) console.log("GPIO error: ", err);
});
var intervalometer = new EventEmitter();
intervalometer.db = db;
var timerHandle = null;
var delayHandle = null;
var rate = 0;
intervalometer.autoSettings = {
paddingTimeMs: 2000
}
intervalometer.timelapseFolder = false;
status = {
running: false,
frames: 0,
framesRemaining: 0,
rampRate: 0,
intervalMs: 0,
message: "",
rampEv: null,
autoSettings: {
paddingTimeMs: 2000
}
}
intervalometer.status = status;
var auxTrigger = new Button('input-aux2');
auxTrigger.on('press', function() {
console.log("AUX2 trigger!");
if (status.running && intervalometer.currentProgram.intervalMode == 'aux') timerHandle = setTimeout(runPhoto, 0);
});
auxTrigger.on('error', function(err) {
console.log("AUX2 error: ", err);
});
function motionSyncPulse() {
if (status.running && intervalometer.currentProgram.intervalMode != 'aux') {
gpio.read(HOTSHOE_IN, function(err, shutterClosed) {
console.log("hotshoe:", shutterClosed);
if(shutterClosed) {
console.log("=> AUX Pulse");
gpio.write(AUXTIP_OUT, 0, function() {
setTimeout(function(){
gpio.write(AUXTIP_OUT, 1);
}, 200);
});
} else {
setTimeout(motionSyncPulse, 100);
}
});
}
}
function fileInit() {
fs.writeFileSync(status.timelapseFolder + "/details.csv", "frame, error, target, setting, rate, interval, timestamp, file, p, i, d\n");
}
function writeFile() {
fs.appendFileSync(status.timelapseFolder + "/details.csv", status.frames + ", " + status.evDiff + "," + exp.status.targetEv + "," + status.rampEv + "," + exp.status.rate + "," + (status.intervalMs / 1000) + "," + status.lastPhotoTime + "," + status.path + "," + exp.status.pComponent + "," + exp.status.iComponent + "," + exp.status.dComponent + "\n");
//image.writeXMP(name, status.evDiff);
}
function getDetails(file) {
var d = {
frames: status.frames,
evCorrection: status.evDiff,
targetEv: exp.status.targetEv,
actualEv: status.rampEv,
cameraEv: status.cameraEv,
rampRate: exp.status.rate,
intervalMs: status.intervalMs,
timestamp: status.lastPhotoTime,
fileName: file || status.path,
p: exp.status.pComponent,
i: exp.status.iComponent,
d: exp.status.dComponent,
};
if(intervalometer.gpsData) {
d.latitude = intervalometer.gpsData.lat;
d.longitude = intervalometer.gpsData.lon;
d.sunPos = suncalc.getPosition(new Date(), d.latitude, d.longitude);
d.moonPos = suncalc.getMoonPosition(new Date(), d.latitude, d.longitude);
d.moonIllumination = suncalc.getMoonIllumination(new Date());
}
return d;
}
var startShutterEv = -1;
function calculateIntervalMs(interval, currentEv) {
var dayEv = 8;
var nightEv = -2;
if (intervalometer.currentProgram.intervalMode == 'fixed') {
return interval * 1000;
} else {
var newInterval = interpolate.linear([{
x: dayEv,
y: parseInt(intervalometer.currentProgram.dayInterval)
}, {
x: nightEv,
y: parseInt(intervalometer.currentProgram.nightInterval)
}], currentEv);
return newInterval * 1000;
}
}
function doKeyframeAxis(axisName, axisSubIndex, setupFirst, interpolationMethod, motionFunction) {
if(interpolationMethod != 'catmullRomSpline') interpolationMethod = 'linear';
var keyframes = intervalometer.currentProgram.keyframes;
if (status.running && keyframes && keyframes.length > 0 && keyframes[0][axisName] != null) {
var kfSet = null;
var kfCurrent = null;
if (setupFirst) {
keyframes[0].seconds = 0;
if(axisSubIndex != null) {
keyframes[0][axisName][axisSubIndex] = 0;
} else {
keyframes[0][axisName] = 0;
}
kfSet = 0;
} else {
var secondsSinceStart = status.lastPhotoTime + (status.intervalMs / 1000);
console.log("KF: Seconds since last: " + secondsSinceStart);
var totalSeconds = 0;
kfPoints = keyframes.map(function(kf) {
totalSeconds += kf.seconds;
if(axisSubIndex != null) {
return {
x: totalSeconds,
y: kf[axisName][axisSubIndex] || 0
}
} else {
return {
x: totalSeconds,
y: kf[axisName] || 0
}
}
});
kfSet = interpolate[interpolationMethod](kfPoints, secondsSinceStart);
console.log("FK: " + axisName + " target: " + kfSet);
}
var axisNameExtension = '';
if(axisSubIndex != null) axisNameExtension = '-' + axisSubIndex;
kfCurrent = intervalometer.currentProgram[axisName + axisNameExtension + 'Pos'];
if (kfCurrent == null) {
motionFunction(kfSet); // absolute setting (like ev)
} else {
var precision = 10000; // limit precision to ensure we hit even values
var kfTarget = Math.round(kfSet * precision) / precision;
if (kfTarget != Math.round(intervalometer.currentProgram[axisName + axisNameExtension + 'Pos'] * precision) / precision) {
var relativeMove = kfTarget - intervalometer.currentProgram[axisName + axisNameExtension + 'Pos'];
motionFunction(relativeMove);
intervalometer.currentProgram[axisName + axisNameExtension + 'Pos'] = kfTarget;
} else {
if (motionFunction) motionFunction();
}
}
} else {
if (motionFunction) motionFunction();
}
}
function processKeyframes(setupFirst, callback) {
var numAxes = 2;
var axesDone = 0;
if(intervalometer.currentProgram.keyframes && intervalometer.currentProgram.keyframes.length > 0 && intervalometer.currentProgram.keyframes[0].motor) {
for(motorId in intervalometer.currentProgram.keyframes[0].motor) numAxes++;
}
var checkDone = function() {
axesDone++;
console.log("KF: " + axesDone + " keyframe items complete");
if (axesDone >= numAxes && callback) {
console.log("KF: keyframes complete, running callback");
callback();
}
}
doKeyframeAxis('ev', null, setupFirst, 'linear', function(ev) {
//if (ev != null && camera.settings.ev != ev) camera.setEv(ev);
checkDone();
});
doKeyframeAxis('focus', null, setupFirst, 'linear', function(focus) {
if (focus) {
camera.ptp.preview(function() {
setTimeout(function() {
console.log("KF: Moving focus by " + focus + " steps");
var dir = focus > 0 ? 1 : -1;
var steps = Math.abs(focus);
camera.ptp.focus(dir, steps, function() {
setTimeout(function(){
camera.ptp.lvOff(function(){
setTimeout(checkDone, 500);
});
}, 500);
});
}, 1000);
});
} else {
checkDone();
}
});
if(intervalometer.currentProgram.keyframes && intervalometer.currentProgram.keyframes.length > 0 && intervalometer.currentProgram.keyframes[0].motor) for(motorId in intervalometer.currentProgram.keyframes[0].motor) {
doKeyframeAxis('motor', motorId, setupFirst, 'catmullRomSpline', function(move) {
var parts = motorId.split('-');
if (move && parts.length == 2) {
var driver = parts[0];
var motor = parts[1];
console.log("KF: Moving " + motorId + " by " + move + " steps");
if (motion.status.available) {
motion.move(driver, motor, move, function() {
checkDone();
});
} else {
console.log("KF: error moving -- motion not connected");
checkDone();
}
} else {
checkDone();
}
});
}
}
function getEvOptions() {
var maxShutterLengthMs = status.intervalMs;
if (maxShutterLengthMs > intervalometer.autoSettings.paddingTimeMs) maxShutterLengthMs = (status.intervalMs - intervalometer.autoSettings.paddingTimeMs);
return {
cameraSettings: camera.ptp.settings,
maxShutterLengthMs: maxShutterLengthMs,
isoMax: intervalometer.currentProgram.isoMax,
isoMin: intervalometer.currentProgram.isoMin,
shutterMax: intervalometer.currentProgram.shutterMax,
apertureMax: intervalometer.currentProgram.apertureMax,
apertureMin: intervalometer.currentProgram.apertureMin,
parameters: intervalometer.currentProgram.rampParameters || 'S+I',
blendParams: intervalometer.currentProgram.rampParameters && intervalometer.currentProgram.rampParameters.indexOf('=') !== -1
}
}
var busyExposure = false;
function setupExposure(cb) {
var expSetupStartTime = new Date() / 1000;
console.log("\n\nEXP: setupExposure");
busyExposure = true;
camera.ptp.getSettings(function() {
console.log("EXP: current interval: ", status.intervalMs, " (took ", (new Date() / 1000 - expSetupStartTime), "seconds from setup start");
camera.setEv(status.rampEv, getEvOptions(), function(err, res) {
if(res.ev != null) {
status.cameraEv = res.ev;
}
status.cameraSettings = camera.ptp.settings;
status.evDiff = status.cameraEv - status.rampEv;
console.log("EXP: program:", "capture", " (took ", (new Date() / 1000 - expSetupStartTime), "seconds from setup start");
busyExposure = false;
cb && cb(err);
});
});
}
var busyPhoto = false;
var retryHandle = null;
function runPhoto() {
if(!status.running) {
busyPhoto = false;
status.stopping = false;
return;
}
if ((busyPhoto || busyExposure) && intervalometer.currentProgram.rampMode == "auto") {
if (status.running) retryHandle = setTimeout(runPhoto, 100);
return;
}
if(!status.running) return;
busyPhoto = true;
if (camera.ptp.connected) {
if(status.useLiveview) camera.ptp.preview();
status.captureStartTime = new Date() / 1000;
intervalometer.emit("status", status);
var captureOptions = {
thumbnail: true,
index: status.frames
//saveTiff: "/mnt/sd/test" + status.frames + ".tiff",
//saveRaw: "/mnt/sd/test" + status.frames + ".cr2",
}
if (intervalometer.currentProgram.destination == 'sd' && camera.ptp.sdPresent && camera.ptp.sdMounted) {
console.log("CAPT: Saving timelapse to SD card");
captureOptions.thumbnail = false;
var framesPadded = status.frames.toString();
while (framesPadded.length < 4) framesPadded = '0' + framesPadded;
captureOptions.saveRaw = status.mediaFolder + "/img-" + framesPadded;
camera.ptp.saveToCameraCard(false);
} else {
camera.ptp.saveToCameraCard(true);
}
if (intervalometer.currentProgram.rampMode == "fixed") {
status.intervalMs = intervalometer.currentProgram.interval * 1000;
if (status.running) timerHandle = setTimeout(runPhoto, status.intervalMs);
setTimeout(motionSyncPulse, camera.lists.getSecondsFromEv(camera.ptp.settings.details.shutter.ev) * 1000 + 1500);
captureOptions.calculateEv = false;
status.lastPhotoTime = new Date() / 1000 - status.startTime;
camera.ptp.capture(captureOptions, function(err, photoRes) {
if (!err && photoRes) {
status.path = photoRes.file;
if(photoRes.cameraCount > 1) {
for(var i = 0; i < photoRes.cameraResults.length; i++) {
console.log("photoRes.cameraResults[" + i + "]:", photoRes.cameraResults[i].file, photoRes.cameraResults[i].cameraIndex, photoRes.cameraResults[i].thumbnailPath);
db.setTimelapseFrame(status.id, 0, getDetails(photoRes.cameraResults[i].file), photoRes.cameraResults[i].cameraIndex, photoRes.cameraResults[i].thumbnailPath);
}
} else {
db.setTimelapseFrame(status.id, 0, getDetails(), 1, photoRes.thumbnailPath);
}
status.message = "running";
if (status.framesRemaining > 0) status.framesRemaining--;
status.frames++;
//writeFile();
intervalometer.emit("status", status);
console.log("TL: program status:", status);
} else {
intervalometer.emit('error', "An error occurred during capture. This could mean that the camera body is not supported or possibly an issue with the cable disconnecting.\nThe time-lapse will attempt to continue anyway.\nSystem message: ", err);
}
if (status.framesRemaining < 1 || status.running == false || status.stopping == true) {
clearTimeout(timerHandle);
status.message = "done";
status.framesRemaining = 0;
intervalometer.cancel('done');
}
processKeyframes(false, function() {
busyPhoto = false;
});
});
} else {
if (status.rampEv === null) {
status.cameraEv = camera.lists.getEvFromSettings(camera.ptp.settings);
status.rampEv = status.cameraEv;
}
captureOptions.exposureCompensation = status.evDiff || 0;
captureOptions.calculateEv = true;
if(intervalometer.currentProgram.intervalMode == 'aux') {
if(status.intervalStartTime) status.intervalMs = ((new Date() / 1000) - status.intervalStartTime) * 1000;
status.intervalStartTime = new Date() / 1000;
} else {
status.intervalMs = calculateIntervalMs(intervalometer.currentProgram.interval, status.rampEv);
console.log("TL: Setting timer for fixed interval at ", status.intervalMs);
if (status.running) timerHandle = setTimeout(runPhoto, status.intervalMs);
}
intervalometer.emit("status", status);
var shutterEv;
if(camera.ptp.settings.details && camera.ptp.settings.details.shutter) shutterEv = camera.ptp.settings.details.shutter.ev; else shutterEv = 0;
var msDelayPulse = camera.lists.getSecondsFromEv(shutterEv) * 1000 + 1500;
setTimeout(motionSyncPulse, msDelayPulse);
status.lastPhotoTime = new Date() / 1000 - status.startTime;
camera.ptp.capture(captureOptions, function(err, photoRes) {
if (!err && photoRes) {
var bufferTime = (new Date() / 1000) - status.captureStartTime - camera.lists.getSecondsFromEv(camera.ptp.settings.details.shutter.ev);
if(!status.bufferSeconds) {
status.bufferSeconds = bufferTime;
} else if(bufferTime != status.bufferSeconds) {
status.bufferSeconds = (status.bufferSeconds + bufferTime) / 2;
}
status.path = photoRes.file;
if(photoRes.cameraCount > 1) {
for(var i = 0; i < photoRes.cameraResults.length; i++) {
db.setTimelapseFrame(status.id, status.evDiff, getDetails(photoRes.cameraResults[i].file), photoRes.cameraResults[i].cameraIndex, photoRes.cameraResults[i].thumbnailPath);
}
} else {
db.setTimelapseFrame(status.id, status.evDiff, getDetails(), 1, photoRes.thumbnailPath);
}
intervalometer.autoSettings.paddingTimeMs = status.bufferSeconds * 1000 + 250; // add a quarter second for setting exposure
status.rampEv = exp.calculate(intervalometer.currentProgram.rampAlgorithm, status.rampEv, photoRes.ev, photoRes.histogram, camera.minEv(camera.ptp.settings, getEvOptions()), camera.maxEv(camera.ptp.settings, getEvOptions()));
status.rampRate = exp.status.rate;
status.path = photoRes.file;
status.message = "running";
if(intervalometer.currentProgram.intervalMode == 'aux') status.message = "waiting for AUX2...";
setupExposure();
if (status.framesRemaining > 0) status.framesRemaining--;
status.frames++;
writeFile();
intervalometer.emit("status", status);
console.log("TL: program status:", status);
if(status.frames == 1 && photoRes.ev > 2.5) {
error("WARNING: the exposure is too high for reliable ramping. It will attempt to continue, but it's strongly recommended to stop the time-lapse, descrease the exposure to expose for the highlights and then start again.");
}
} else {
if(!err) err = "unknown";
error("An error occurred during capture. This could mean that the camera body is not supported or possibly an issue with the cable disconnecting.\nThe time-lapse will attempt to continue anyway.\nSystem message: " + err);
console.log("TL: error:", err);
}
if ((intervalometer.currentProgram.intervalMode == "fixed" && status.framesRemaining < 1) || status.running == false || status.stopping == true) {
clearTimeout(timerHandle);
status.stopping = false;
status.message = "done";
status.framesRemaining = 0;
intervalometer.cancel('done');
}
processKeyframes(false, function() {
busyPhoto = false;
});
});
}
}
}
function error(msg) {
setTimeout(function(){
intervalometer.emit("error", msg);
}, 100);
}
camera.ptp.on('saveError', function(msg) {
if (intervalometer.status.running) {
intervalometer.cancel('err');
error("Failed to save RAW image to SD card!\nTime-lapse has been stopped.\nPlease verify that the camera is set to RAW (not RAW+JPEG) and that the SD card is formatted and fully inserted into the VIEW.\nSystem message: " + msg);
}
});
camera.ptp.on('saveErrorCardFull', function(msg) {
if (intervalometer.status.running) {
intervalometer.cancel('err');
error("SD card full! Unabled to save RAW images.\nThe time-lapse has been stopped.");
}
});
intervalometer.validate = function(program) {
var results = {
errors: []
};
if(program.frames === null) program.frames = Infinity;
if (parseInt(program.delay) < 1) program.delay = 2;
if(program.rampMode == 'fixed') {
if (parseInt(program.frames) < 1) results.errors.push({param:'frames', reason: 'frame count not set'});
} else {
if(program.intervalMode == 'fixed' || program.rampMode == 'fixed') {
if (parseInt(program.interval) < 1) results.errors.push({param:'interval', reason: 'interval not set or too short'});
} else {
if (parseInt(program.dayInterval) < 2) results.errors.push({param:'dayInterval', reason: 'dayInterval must be at least 2 seconds'});
if (parseInt(program.nightInterval) < program.dayInterval) results.errors.push({param:'nightInterval', reason: 'nightInterval shorter than dayInterval'});
}
}
if(!camera.ptp.supports.destination && (program.destination != 'sd' || !camera.ptp.sdPresent)) {
console.log("VAL: Error: SD card required");
results.errors.push({param:false, reason: "SD card required. The connected camera (" + camera.ptp.model + ") does not support saving images to the camera. Please insert an SD card into the VIEW and set the Destination to 'SD Card' so images can be saved to the card."});
}
var settingsDetails = camera.ptp.settings.details;
if(!settingsDetails.iso || settingsDetails.iso.ev == null) {
console.log("VAL: Error: invalid ISO setting", settingsDetails.iso);
results.errors.push({param:false, reason: "invalid ISO setting on camera."});
}
if(!settingsDetails.shutter || settingsDetails.shutter.ev == null) {
console.log("VAL: Error: invalid shutter setting", settingsDetails.shutter);
results.errors.push({param:false, reason: "invalid shutter setting on camera."});
}
if(camera.ptp.settings && camera.ptp.settings.format != 'RAW' && program.destination == 'sd' && camera.ptp.sdPresent) {
console.log("VAL: Error: camera not set to save in RAW");
results.errors.push({param:false, reason: "camera must be set to save in RAW. The VIEW expects RAW files when processing images to the SD card (RAW+JPEG does not work)"});
}
console.log("VAL: validating program:", results);
return results;
}
intervalometer.cancel = function(reason) {
if(!reason) reason = 'stopped';
if (intervalometer.status.running) {
clearTimeout(timerHandle);
clearTimeout(delayHandle);
intervalometer.status.stopping = true;
if(reason == 'err') intervalometer.status.message = "stopped due to error";
else if(reason == 'done') intervalometer.status.message = "time-lapse complete";
else intervalometer.status.message = "time-lapse canceled";
intervalometer.status.framesRemaining = 0;
intervalometer.emit("status", status);
camera.ptp.completeWrites(function() {
busyPhoto = false;
intervalometer.status.running = false;
intervalometer.status.stopping = false;
intervalometer.timelapseFolder = false;
camera.ptp.saveThumbnails(intervalometer.timelapseFolder);
camera.ptp.unmountSd();
intervalometer.emit("status", status);
console.log("==========> END TIMELAPSE", status.tlName);
});
}
}
intervalometer.resume = function() {
camera.ptp.cancelCallbacks();
busyPhoto = false;
busyExposure = false;
clearTimeout(timerHandle);
clearTimeout(delayHandle);
clearTimeout(retryHandle);
var ms = status.intervalMs - ((new Date() / 1000) - (status.startTime + status.lastPhotoTime)) * 1000;
if(ms < 0) ms = 0;
setTimeout(runPhoto, ms);
}
intervalometer.run = function(program) {
if (intervalometer.status.running) return;
intervalometer.status.stopping = false;
console.log("loading time-lapse program:", program);
db.set('intervalometer.currentProgram', program);
if(program.manualAperture != null) camera.fixedApertureEv = program.manualAperture;
if (camera.ptp.connected) {
camera.ptp.getSettings(function(){
var validationResults = intervalometer.validate(program);
if (validationResults.errors.length == 0) {
var tlIndex = fs.readFileSync(TLROOT + '/index.txt');
if (!tlIndex) {
tlIndex = 1;
} else {
tlIndex = parseInt(tlIndex) + 1;
}
fs.writeFileSync(TLROOT + '/index.txt', tlIndex.toString());
status.tlName = "tl-" + tlIndex;
console.log("==========> TIMELAPSE START", status.tlName);
intervalometer.timelapseFolder = TLROOT + "/" + status.tlName;
fs.mkdirSync(intervalometer.timelapseFolder);
camera.ptp.saveThumbnails(intervalometer.timelapseFolder);
status.timelapseFolder = intervalometer.timelapseFolder;
fileInit();
busyPhoto = false;
intervalometer.currentProgram = program;
status.intervalMs = program.interval * 1000;
status.message = "starting";
status.frames = 0;
status.framesRemaining = (program.intervalMode == "auto" && program.rampMode == "auto") ? Infinity : program.frames;
status.startTime = new Date() / 1000;
status.rampEv = null;
status.bufferSeconds = 0;
status.cameraSettings = camera.ptp.settings;
if(intervalometer.gpsData) {
status.latitude = intervalometer.gpsData.lat;
status.longitude = intervalometer.gpsData.lon;
}
exp.init(camera.minEv(camera.ptp.settings, getEvOptions()), camera.maxEv(camera.ptp.settings, getEvOptions()), program.nightCompensation);
status.running = true;
intervalometer.emit("status", status);
console.log("program:", "starting", program);
//function start() {
// if(camera.ptp.settings.autofocus && camera.ptp.settings.autofocus == "on") {
// console.log("Intervalometer: disabling autofocus");
// camera.ptp.set("autofocus", "off", checkFocus2);
// } else {
// checkFocus2();
// }
//}
//function checkFocus2() {
// if(camera.ptp.settings.afmode && camera.ptp.settings.afmode != "manual") {
// console.log("Intervalometer: setting focus mode to manual");
// camera.ptp.set("afmode", "manual", start2);
// } else {
// start2();
// }
//}
function start() {
status.useLiveview = false;
var focusPosTest = null;
var focusChange = false;
if(camera.ptp.model.match(/nikon/i) && intervalometer.currentProgram.keyframes && intervalometer.currentProgram.keyframes.length > 0) {
for(var i = 0; i < intervalometer.currentProgram.keyframes.length; i++) {
if(focusPosTest != null && focusPosTest != intervalometer.currentProgram.keyframes[i].focus) {
focusChange = true;
break;
}
focusPosTest = intervalometer.currentProgram.keyframes[i].focus;
}
if(focusChange) status.useLiveview = true;
}
var cameras = 1, primary = 1;
if(camera.ptp.synchronized) {
cameras = camera.ptp.count;
primary = camera.ptp.getPrimaryCameraIndex();
}
db.setTimelapse(status.tlName, program, cameras, primary, status, function(err, timelapseId) {
status.id = timelapseId;
processKeyframes(true, function() {
setTimeout(function() {
busyPhoto = false;
if(intervalometer.currentProgram.intervalMode != 'aux' || intervalometer.currentProgram.rampMode == 'fixed') {
runPhoto();
}
if(intervalometer.currentProgram.intervalMode == 'aux') {
status.message = "waiting for AUX2...";
intervalometer.emit("status", status);
}
}, 3000);
});
});
//delayHandle = setTimeout(function() {
// runPhoto();
//}, program.delay * 1000);
}
if (program.destination && program.destination == 'sd' && camera.ptp.sdPresent) {
camera.ptp.mountSd(function(mountErr) {
if(mountErr) {
console.log("Error mounting SD card");
intervalometer.cancel('err');
error("Error mounting SD card. \nVerify the SD card is formatted and fully inserted in the VIEW, then try starting the time-lapse again.\nMessage from system: " + mountErr);
} else {
status.mediaFolder = "/media/" + status.tlName;
fs.mkdir(status.mediaFolder, function(folderErr) {
if(folderErr) {
console.log("Error creating folder", status.mediaFolder);
intervalometer.cancel('err');
error("Error creating folder on SD card: /" + status.tlName + ".\nVerify the card is present and not write-protected, then try starting the time-lapse again.\nAlternatively, set the Destination to Camera instead (if supported)");
} else {
start();
}
});
}
});
} else {
start();
}
} else {
var errorList = "";
var val = "";
for(var i = 0; i < validationResults.errors.length; i++) {
if(program.hasOwnProperty([validationResults.errors[i].param])) {
val = " (" + program[validationResults.errors[i].param] + ")";
} else {
val = "";
}
errorList += "- " + validationResults.errors[i].reason + val + "\n";
}
intervalometer.cancel('err');
error("Failed to start time-lapse: \n" + errorList + "Please correct and try again.");
}
});
} else {
intervalometer.cancel('err');
error("Camera not connected. Please verify camera connection via USB and try again.");
return;
}
}
intervalometer.addGpsData = function(gpsData, callback) {
intervalometer.gpsData = gpsData;
callback && callback();
}
module.exports = intervalometer; | bugfixes
| intervalometer/intervalometer.js | bugfixes | <ide><path>ntervalometer/intervalometer.js
<ide> var motor = parts[1];
<ide> console.log("KF: Moving " + motorId + " by " + move + " steps");
<ide> if (motion.status.available) {
<del> motion.move(driver, motor, move, function() {
<add> var connected = false;
<add> for(var index = 0; index < motion.status.motors.length; index++) {
<add> var motor = motion.status.motors[index];
<add> if(motor.driver == driver && motor.motor == motor) {
<add> connected = motor.connected;
<add> break;
<add> }
<add> }
<add> if(motor.connected) {
<add> motion.move(driver, motor, move, function() {
<add> checkDone();
<add> });
<add> } else {
<add> console.log("KF: error moving", motorId, "-- motor not connected");
<ide> checkDone();
<del> });
<add> }
<ide> } else {
<del> console.log("KF: error moving -- motion not connected");
<add> console.log("KF: error moving -- no motion system connected");
<ide> checkDone();
<ide> }
<ide> } else { |
|
Java | apache-2.0 | 50cce6c9c898517365cbd6cd0e4b3e197ed3233d | 0 | opencb/opencga,opencb/opencga,opencb/opencga,opencb/opencga,opencb/opencga,opencb/opencga | package org.opencb.opencga.app.migrations.v2_4_2.catalog;
import com.mongodb.client.model.Projections;
import com.mongodb.client.model.UpdateOneModel;
import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.lang3.StringUtils;
import org.bson.Document;
import org.opencb.biodata.models.clinical.ClinicalDiscussion;
import org.opencb.opencga.catalog.db.mongodb.MongoDBAdaptorFactory;
import org.opencb.opencga.catalog.migration.Migration;
import org.opencb.opencga.catalog.migration.MigrationTool;
import org.opencb.opencga.core.common.TimeUtils;
import java.util.Arrays;
import java.util.List;
import static com.mongodb.client.model.Filters.eq;
@Migration(id = "add_clinical_discussion_TASK-1472",
description = "Add ClinicalDiscussion #TASK-1472", version = "2.4.2",
language = Migration.MigrationLanguage.JAVA,
domain = Migration.MigrationDomain.CATALOG,
date = 20220727)
public class AddClinicalDiscussion extends MigrationTool {
@Override
protected void run() throws Exception {
// Replace discussion from ClinicalAnalysis report.discussion
migrateCollection(
Arrays.asList(MongoDBAdaptorFactory.CLINICAL_ANALYSIS_COLLECTION, MongoDBAdaptorFactory.DELETED_CLINICAL_ANALYSIS_COLLECTION),
new Document("report.discussion.author", new Document("$exists", false)),
Projections.include("report", "analyst"),
((document, bulk) -> {
Document report = document.get("report", Document.class);
if (report != null) {
String author = extractAuthor(document);
Document discussionDoc = migrateDiscussion(report, author);;
bulk.add(new UpdateOneModel<>(
eq("_id", document.get("_id")),
new Document("$set", new Document("report.discussion", discussionDoc))
)
);
}
})
);
// We will be replacing the findings array which may be really big so we change the batch size to 1
setBatchSize(1);
// Replace discussion from ClinicalVariants from Interpretations primaryFindings.discussion, secondaryFindings.discussion
migrateCollection(
Arrays.asList(MongoDBAdaptorFactory.INTERPRETATION_COLLECTION, MongoDBAdaptorFactory.INTERPRETATION_ARCHIVE_COLLECTION,
MongoDBAdaptorFactory.DELETED_INTERPRETATION_COLLECTION),
new Document("$or", Arrays.asList(
new Document("primaryFindings.discussion.author", new Document("$exists", false)),
new Document("secondaryFindings.discussion.author", new Document("$exists", false))
)),
Projections.include("primaryFindings", "secondaryFindings", "analyst"),
((document, bulk) -> {
String author = extractAuthor(document);
List<Document> primaryFindings = document.getList("primaryFindings", Document.class);
List<Document> secondaryFindings = document.getList("secondaryFindings", Document.class);
if (CollectionUtils.isNotEmpty(primaryFindings)) {
for (Document primaryFinding : primaryFindings) {
migrateClinicalVariant(primaryFinding, author);
}
}
if (CollectionUtils.isNotEmpty(secondaryFindings)) {
for (Document secondaryFinding : secondaryFindings) {
migrateClinicalVariant(secondaryFinding, author);
}
}
bulk.add(new UpdateOneModel<>(
eq("_id", document.get("_id")),
new Document("$set", new Document()
.append("primaryFindings", primaryFindings)
.append("secondaryFindings", secondaryFindings))
)
);
})
);
}
private void migrateClinicalVariant(Document finding, String author) {
// Migrate discussion in root
migrateDiscussion(finding, author);
// Iterate over evidences
List<Document> evidences = finding.getList("evidences", Document.class);
if (CollectionUtils.isNotEmpty(evidences)) {
for (Document evidence : evidences) {
Document review = evidence.get("review", Document.class);
if (review != null) {
migrateDiscussion(review, author);
}
}
}
}
private Document migrateDiscussion(Document document, String author) {
Object discussion = document.get("discussion");
if (discussion == null || discussion instanceof String) {
if (discussion != null) {
ClinicalDiscussion cDiscussion = new ClinicalDiscussion(author, TimeUtils.getTime(), String.valueOf(discussion));
discussion = convertToDocument(cDiscussion);
} else {
discussion = new Document();
}
}
// Replace discussion field
document.put("discussion", discussion);
return (Document) discussion;
}
private String extractAuthor(Document document) {
String author = "";
Document analyst = document.get("analyst", Document.class);
if (analyst != null) {
String id = analyst.getString("id");
if (StringUtils.isNotEmpty(id)) {
author = id;
}
}
return author;
}
}
| opencga-app/src/main/java/org/opencb/opencga/app/migrations/v2_4_2/catalog/AddClinicalDiscussion.java | package org.opencb.opencga.app.migrations.v2_4_2.catalog;
import com.mongodb.client.model.Projections;
import com.mongodb.client.model.UpdateOneModel;
import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.lang3.StringUtils;
import org.bson.Document;
import org.opencb.biodata.models.clinical.ClinicalDiscussion;
import org.opencb.opencga.catalog.db.mongodb.MongoDBAdaptorFactory;
import org.opencb.opencga.catalog.migration.Migration;
import org.opencb.opencga.catalog.migration.MigrationTool;
import org.opencb.opencga.core.common.TimeUtils;
import java.util.Arrays;
import java.util.List;
import static com.mongodb.client.model.Filters.eq;
@Migration(id = "add_clinical_discussion_TASK-1472",
description = "Add ClinicalDiscussion #TASK-1472", version = "2.4.2",
language = Migration.MigrationLanguage.JAVA,
domain = Migration.MigrationDomain.CATALOG,
date = 20220727)
public class AddClinicalDiscussion extends MigrationTool {
@Override
protected void run() throws Exception {
// Replace discussion from ClinicalAnalysis report.discussion
migrateCollection(
Arrays.asList(MongoDBAdaptorFactory.CLINICAL_ANALYSIS_COLLECTION, MongoDBAdaptorFactory.DELETED_CLINICAL_ANALYSIS_COLLECTION),
new Document("report.discussion.author", new Document("$exists", false)),
Projections.include("report", "analyst"),
((document, bulk) -> {
Document report = document.get("report", Document.class);
if (report != null) {
String author = extractAuthor(document);
Document discussionDoc = migrateDiscussion(report, author);;
bulk.add(new UpdateOneModel<>(
eq("_id", document.get("_id")),
new Document("$set", new Document("report.discussion", discussionDoc))
)
);
}
})
);
// We will be replacing the findings array which may be really big so we change the batch size to 1
setBatchSize(1);
// Replace discussion from ClinicalVariants from Interpretations primaryFindings.discussion, secondaryFindings.discussion
migrateCollection(
Arrays.asList(MongoDBAdaptorFactory.INTERPRETATION_COLLECTION, MongoDBAdaptorFactory.INTERPRETATION_ARCHIVE_COLLECTION,
MongoDBAdaptorFactory.DELETED_INTERPRETATION_COLLECTION),
new Document("$or", Arrays.asList(
new Document("primaryFindings.discussion.author", new Document("$exists", false)),
new Document("secondaryFindings.discussion.author", new Document("$exists", false))
)),
Projections.include("primaryFindings", "secondaryFindings", "analyst"),
((document, bulk) -> {
String author = extractAuthor(document);
List<Document> primaryFindings = document.getList("primaryFindings", Document.class);
List<Document> secondaryFindings = document.getList("secondaryFindings", Document.class);
if (CollectionUtils.isNotEmpty(primaryFindings)) {
for (Document primaryFinding : primaryFindings) {
migrateClinicalVariant(primaryFinding, author);
}
}
if (CollectionUtils.isNotEmpty(secondaryFindings)) {
for (Document secondaryFinding : secondaryFindings) {
migrateClinicalVariant(secondaryFinding, author);
}
}
bulk.add(new UpdateOneModel<>(
eq("_id", document.get("_id")),
new Document("$set", new Document()
.append("primaryFindings", primaryFindings)
.append("secondaryFindings", secondaryFindings))
)
);
})
);
}
private void migrateClinicalVariant(Document finding, String author) {
// Migrate discussion in root
migrateDiscussion(finding, author);
// Iterate over evidences
List<Document> evidences = finding.getList("evidences", Document.class);
if (CollectionUtils.isNotEmpty(evidences)) {
for (Document evidence : evidences) {
Document review = evidence.get("review", Document.class);
if (review != null) {
migrateDiscussion(review, author);
}
}
}
}
private Document migrateDiscussion(Document document, String author) {
Object discussion = document.get("discussion");
if (discussion == null || discussion instanceof String) {
ClinicalDiscussion cDiscussion;
if (discussion != null) {
cDiscussion = new ClinicalDiscussion(author, TimeUtils.getTime(), String.valueOf(discussion));
} else {
cDiscussion = new ClinicalDiscussion();
}
discussion = convertToDocument(cDiscussion);
}
// Replace discussion field
document.put("discussion", discussion);
return (Document) discussion;
}
private String extractAuthor(Document document) {
String author = "";
Document analyst = document.get("analyst", Document.class);
if (analyst != null) {
String id = analyst.getString("id");
if (StringUtils.isNotEmpty(id)) {
author = id;
}
}
return author;
}
}
| app: improve migration, #TASK-1472
| opencga-app/src/main/java/org/opencb/opencga/app/migrations/v2_4_2/catalog/AddClinicalDiscussion.java | app: improve migration, #TASK-1472 | <ide><path>pencga-app/src/main/java/org/opencb/opencga/app/migrations/v2_4_2/catalog/AddClinicalDiscussion.java
<ide> private Document migrateDiscussion(Document document, String author) {
<ide> Object discussion = document.get("discussion");
<ide> if (discussion == null || discussion instanceof String) {
<del> ClinicalDiscussion cDiscussion;
<ide> if (discussion != null) {
<del> cDiscussion = new ClinicalDiscussion(author, TimeUtils.getTime(), String.valueOf(discussion));
<add> ClinicalDiscussion cDiscussion = new ClinicalDiscussion(author, TimeUtils.getTime(), String.valueOf(discussion));
<add> discussion = convertToDocument(cDiscussion);
<ide> } else {
<del> cDiscussion = new ClinicalDiscussion();
<add> discussion = new Document();
<ide> }
<del> discussion = convertToDocument(cDiscussion);
<ide> }
<ide> // Replace discussion field
<ide> document.put("discussion", discussion); |
|
Java | bsd-2-clause | 805d5fe73c5328f62ec55eaa4189b4537e323872 | 0 | jason-p-pickering/chailmis-android,clintonhealthaccess/chailmis-android,clintonhealthaccess/chailmis-android,jason-p-pickering/chailmis-android,jason-p-pickering/chailmis-android,jason-p-pickering/chailmis-android,clintonhealthaccess/chailmis-android,jason-p-pickering/chailmis-android,clintonhealthaccess/chailmis-android,clintonhealthaccess/chailmis-android | package org.clintonhealthaccess.lmis.app;
import android.os.Bundle;
import android.support.v7.app.ActionBar;
import android.view.Menu;
import android.view.MenuItem;
import android.widget.ArrayAdapter;
import android.widget.LinearLayout;
import android.widget.ListView;
import android.widget.TextView;
import com.jjoe64.graphview.BarGraphView;
import com.jjoe64.graphview.GraphView;
import com.jjoe64.graphview.GraphViewSeries;
import java.util.ArrayList;
import java.util.Collections;
import roboguice.activity.RoboActionBarActivity;
import roboguice.inject.InjectView;
public class HomeActivity extends RoboActionBarActivity {
public static final String DATE_FORMAT = "dd/MM/yyyy";
private TextView textFacilityName;
@InjectView(R.id.layoutGraph)
private LinearLayout layout;
@InjectView(R.id.listViewAlerts)
private ListView listViewAlerts;
@InjectView(R.id.listViewNotifications)
private ListView listViewNotifications;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_home);
getSupportActionBar().setDisplayOptions(ActionBar.DISPLAY_SHOW_CUSTOM);
getSupportActionBar().setCustomView(R.layout.action_bar);
setFacilityName("Kabira Health Center");
setupGraph();
setupAlerts();
}
private void setFacilityName(String text) {
textFacilityName = (TextView) getSupportActionBar().getCustomView().findViewById(R.id.textFacilityName);
textFacilityName.setText(text);
}
private void setupAlerts() {
String[] values = new String[]{"Low Stock for Coartem", "Low Stock for Panadol", "Low Stock for Hedex"};
ArrayList<String> list = new ArrayList<String>();
Collections.addAll(list, values);
ArrayAdapter adapter = new ArrayAdapter<String>(this,
android.R.layout.simple_list_item_1, list);
listViewAlerts.setAdapter(adapter);
String[] notificationValues = new String[]{"You Have a new Allocation"};
ArrayList<String> notificationList = new ArrayList<String>();
Collections.addAll(notificationList, notificationValues);
ArrayAdapter notificationAdapter = new ArrayAdapter<String>(this,
android.R.layout.simple_list_item_1, notificationList);
listViewNotifications.setAdapter(notificationAdapter);
}
private void setupGraph() {
GraphView graphView = new BarGraphView(this, "Commodity Consumption");
GraphViewSeries exampleSeries = new GraphViewSeries(new GraphView.GraphViewData[]{
new GraphView.GraphViewData(1, 3.0d),
new GraphView.GraphViewData(2, 12d),
new GraphView.GraphViewData(3, 4d),
new GraphView.GraphViewData(4, 10d),
new GraphView.GraphViewData(5, 6d)
});
graphView.addSeries(exampleSeries);
layout.addView(graphView);
textFacilityName = (TextView) getSupportActionBar().getCustomView().findViewById(R.id.textFacilityName);
textFacilityName.setText("Kabira Health Center");
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
getMenuInflater().inflate(R.menu.home, menu);
menu.add(getDate()).setShowAsAction(MenuItem.SHOW_AS_ACTION_ALWAYS);
return true;
}
private String getDate() {
return android.text.format.DateFormat.format(DATE_FORMAT, new java.util.Date()).toString();
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
return super.onOptionsItemSelected(item);
}
}
| app/src/main/java/org/clintonhealthaccess/lmis/app/HomeActivity.java | package org.clintonhealthaccess.lmis.app;
import android.os.Bundle;
import android.support.v7.app.ActionBar;
import android.view.Menu;
import android.view.MenuItem;
import android.widget.ArrayAdapter;
import android.widget.LinearLayout;
import android.widget.ListView;
import android.widget.TextView;
import com.jjoe64.graphview.BarGraphView;
import com.jjoe64.graphview.GraphView;
import com.jjoe64.graphview.GraphViewSeries;
import java.util.ArrayList;
import roboguice.activity.RoboActionBarActivity;
import roboguice.inject.InjectView;
public class HomeActivity extends RoboActionBarActivity {
public static final String DATE_FORMAT = "dd/MM/yyyy";
private TextView textFacilityName;
@InjectView(R.id.layoutGraph)
private LinearLayout layout;
@InjectView(R.id.listViewAlerts)
private ListView listViewAlerts;
@InjectView(R.id.listViewNotifications)
private ListView listViewNotifications;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_home);
getSupportActionBar().setDisplayOptions(ActionBar.DISPLAY_SHOW_CUSTOM);
getSupportActionBar().setCustomView(R.layout.action_bar);
setFacilityName("Kabira Health Center");
setupGraph();
setupAlerts();
}
private void setFacilityName(String text) {
textFacilityName = (TextView) getSupportActionBar().getCustomView().findViewById(R.id.textFacilityName);
textFacilityName.setText(text);
}
private void setupAlerts() {
String[] values = new String[]{"Low Stock for Coartem", "Low Stock for Panadol", "Low Stock for Hedex"};
ArrayList<String> list = new ArrayList<String>();
for (int i = 0; i < values.length; ++i) {
list.add(values[i]);
}
ArrayAdapter adapter = new ArrayAdapter<String>(this,
android.R.layout.simple_list_item_1, list);
listViewAlerts.setAdapter(adapter);
String[] notificationValues = new String[]{"You Have a new Allocation"};
ArrayList<String> notificationList = new ArrayList<String>();
for (int j = 0; j < notificationValues.length; ++j) {
notificationList.add(notificationValues[j]);
}
ArrayAdapter notificationAdapter = new ArrayAdapter<String>(this,
android.R.layout.simple_list_item_1, notificationList);
listViewNotifications.setAdapter(notificationAdapter);
}
private void setupGraph() {
GraphView graphView = new BarGraphView(
this
, "Commodity Consumption"
);
GraphViewSeries exampleSeries = new GraphViewSeries(new GraphView.GraphViewData[]{
new GraphView.GraphViewData(1, 3.0d)
, new GraphView.GraphViewData(2, 12d)
, new GraphView.GraphViewData(3, 4d)
, new GraphView.GraphViewData(4, 10d)
, new GraphView.GraphViewData(5, 6d)
});
graphView.addSeries(exampleSeries);
layout.addView(graphView);
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
getMenuInflater().inflate(R.menu.home, menu);
menu.add(getDate()).setShowAsAction(MenuItem.SHOW_AS_ACTION_ALWAYS);
return true;
}
private String getDate() {
return android.text.format.DateFormat.format(DATE_FORMAT, new java.util.Date()).toString();
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
return super.onOptionsItemSelected(item);
}
}
| Inlining variable
| app/src/main/java/org/clintonhealthaccess/lmis/app/HomeActivity.java | Inlining variable | <ide><path>pp/src/main/java/org/clintonhealthaccess/lmis/app/HomeActivity.java
<ide> import com.jjoe64.graphview.GraphViewSeries;
<ide>
<ide> import java.util.ArrayList;
<add>import java.util.Collections;
<ide>
<ide> import roboguice.activity.RoboActionBarActivity;
<ide> import roboguice.inject.InjectView;
<ide> public class HomeActivity extends RoboActionBarActivity {
<ide>
<ide> public static final String DATE_FORMAT = "dd/MM/yyyy";
<add> private TextView textFacilityName;
<ide>
<del> private TextView textFacilityName;
<ide>
<ide> @InjectView(R.id.layoutGraph)
<ide> private LinearLayout layout;
<ide> setContentView(R.layout.activity_home);
<ide>
<ide> getSupportActionBar().setDisplayOptions(ActionBar.DISPLAY_SHOW_CUSTOM);
<del>
<ide> getSupportActionBar().setCustomView(R.layout.action_bar);
<del>
<ide> setFacilityName("Kabira Health Center");
<del>
<ide> setupGraph();
<del>
<ide> setupAlerts();
<del>
<ide> }
<ide>
<ide> private void setFacilityName(String text) {
<ide> String[] values = new String[]{"Low Stock for Coartem", "Low Stock for Panadol", "Low Stock for Hedex"};
<ide>
<ide> ArrayList<String> list = new ArrayList<String>();
<del> for (int i = 0; i < values.length; ++i) {
<del> list.add(values[i]);
<del> }
<add> Collections.addAll(list, values);
<ide> ArrayAdapter adapter = new ArrayAdapter<String>(this,
<ide> android.R.layout.simple_list_item_1, list);
<ide> listViewAlerts.setAdapter(adapter);
<ide>
<del>
<ide> String[] notificationValues = new String[]{"You Have a new Allocation"};
<ide>
<ide> ArrayList<String> notificationList = new ArrayList<String>();
<del> for (int j = 0; j < notificationValues.length; ++j) {
<del> notificationList.add(notificationValues[j]);
<del> }
<add> Collections.addAll(notificationList, notificationValues);
<ide> ArrayAdapter notificationAdapter = new ArrayAdapter<String>(this,
<ide> android.R.layout.simple_list_item_1, notificationList);
<ide> listViewNotifications.setAdapter(notificationAdapter);
<ide> }
<ide>
<ide> private void setupGraph() {
<del> GraphView graphView = new BarGraphView(
<del> this
<del> , "Commodity Consumption"
<del> );
<add> GraphView graphView = new BarGraphView(this, "Commodity Consumption");
<ide> GraphViewSeries exampleSeries = new GraphViewSeries(new GraphView.GraphViewData[]{
<del> new GraphView.GraphViewData(1, 3.0d)
<del> , new GraphView.GraphViewData(2, 12d)
<del> , new GraphView.GraphViewData(3, 4d)
<del> , new GraphView.GraphViewData(4, 10d)
<del> , new GraphView.GraphViewData(5, 6d)
<del>
<add> new GraphView.GraphViewData(1, 3.0d),
<add> new GraphView.GraphViewData(2, 12d),
<add> new GraphView.GraphViewData(3, 4d),
<add> new GraphView.GraphViewData(4, 10d),
<add> new GraphView.GraphViewData(5, 6d)
<ide> });
<ide> graphView.addSeries(exampleSeries);
<ide>
<ide> layout.addView(graphView);
<add> textFacilityName = (TextView) getSupportActionBar().getCustomView().findViewById(R.id.textFacilityName);
<add> textFacilityName.setText("Kabira Health Center");
<ide> }
<del>
<ide>
<ide> @Override
<ide> public boolean onCreateOptionsMenu(Menu menu) { |
|
Java | apache-2.0 | 6cfb32a6a34439438673f6fa2354ca50e5717043 | 0 | NationalSecurityAgency/ghidra,NationalSecurityAgency/ghidra,NationalSecurityAgency/ghidra,NationalSecurityAgency/ghidra,NationalSecurityAgency/ghidra,NationalSecurityAgency/ghidra,NationalSecurityAgency/ghidra | /* ###
* IP: GHIDRA
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ghidra.dbg.testutil;
import java.io.*;
import ghidra.dbg.DebuggerModelListener;
import ghidra.dbg.DebuggerObjectModel;
import ghidra.dbg.target.TargetConsole.Channel;
import ghidra.dbg.target.TargetInterpreter;
import ghidra.dbg.target.TargetObject;
public class DebuggerConsole extends Thread implements DebuggerModelListener, AutoCloseable {
private final DebuggerObjectModel model;
private final BufferedReader reader;
private TargetInterpreter interpreter;
private boolean closed = false;
public DebuggerConsole(DebuggerObjectModel model) {
this.model = model;
this.reader = new BufferedReader(new InputStreamReader(System.in));
model.addModelListener(this);
setDaemon(true);
start();
}
@Override
public void consoleOutput(TargetObject console, Channel channel, byte[] data) {
if (console instanceof TargetInterpreter) {
if (interpreter == null) {
System.out.println("Found interpreter: " + console);
interpreter = (TargetInterpreter) console;
}
}
String text = new String(data);
System.out.println(text);
}
@Override
public void run() {
try {
while (!closed) {
String line = reader.readLine();
if (line == null) {
// NB. EOF happens immediately under Gradle
return;
}
if (interpreter == null) {
System.err.println("Have not found interpreter, yet");
continue;
}
interpreter.execute(line).whenComplete((__, ex) -> {
if (ex != null) {
System.err.println("Command error: " + ex.getMessage());
}
else {
System.out.println("Command finished");
}
});
}
}
catch (IOException e) {
System.err.println("IOException on console: " + e);
}
}
@Override
public void close() throws Exception {
model.removeModelListener(this);
closed = true;
interrupt();
}
}
| Ghidra/Debug/Framework-Debugging/src/test/java/ghidra/dbg/testutil/DebuggerConsole.java | /* ###
* IP: GHIDRA
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ghidra.dbg.testutil;
import java.io.*;
import ghidra.dbg.DebuggerModelListener;
import ghidra.dbg.DebuggerObjectModel;
import ghidra.dbg.target.TargetConsole.Channel;
import ghidra.dbg.target.TargetInterpreter;
import ghidra.dbg.target.TargetObject;
public class DebuggerConsole extends Thread implements DebuggerModelListener, AutoCloseable {
private final DebuggerObjectModel model;
private final BufferedReader reader;
private TargetInterpreter interpreter;
private boolean closed = false;
public DebuggerConsole(DebuggerObjectModel model) {
this.model = model;
this.reader = new BufferedReader(new InputStreamReader(System.in));
model.addModelListener(this);
setDaemon(true);
start();
}
@Override
public void consoleOutput(TargetObject console, Channel channel, byte[] data) {
if (console instanceof TargetInterpreter) {
if (interpreter == null) {
System.out.println("Found interpreter: " + console);
interpreter = (TargetInterpreter) console;
}
}
String text = new String(data);
System.out.println(text);
}
@Override
public void run() {
try {
while (!closed) {
String line = reader.readLine();
if (interpreter == null) {
System.err.println("Have not found interpreter, yet");
continue;
}
interpreter.execute(line).whenComplete((__, ex) -> {
if (ex != null) {
System.err.println("Command error: " + ex.getMessage());
}
else {
System.out.println("Command finished");
}
});
}
}
catch (IOException e) {
System.err.println("IOException on console: " + e);
}
}
@Override
public void close() throws Exception {
model.removeModelListener(this);
closed = true;
interrupt();
}
}
| GP-0: Fixed debugger model testing under gradle
| Ghidra/Debug/Framework-Debugging/src/test/java/ghidra/dbg/testutil/DebuggerConsole.java | GP-0: Fixed debugger model testing under gradle | <ide><path>hidra/Debug/Framework-Debugging/src/test/java/ghidra/dbg/testutil/DebuggerConsole.java
<ide> try {
<ide> while (!closed) {
<ide> String line = reader.readLine();
<add> if (line == null) {
<add> // NB. EOF happens immediately under Gradle
<add> return;
<add> }
<ide> if (interpreter == null) {
<ide> System.err.println("Have not found interpreter, yet");
<ide> continue; |
|
Java | apache-2.0 | 17eebad73642562593f2d887d23b8a5ff927c3d8 | 0 | huangll/elasticsearch-hadoop,jasontedor/elasticsearch-hadoop,nfouka/elasticsearch-hadoop,Gavin-Yang/elasticsearch-hadoop,puneetjaiswal/elasticsearch-hadoop,xjrk58/elasticsearch-hadoop,girirajsharma/elasticsearch-hadoop,samkohli/elasticsearch-hadoop,pranavraman/elasticsearch-hadoop,costin/elasticsearch-hadoop,lgscofield/elasticsearch-hadoop,sarwarbhuiyan/elasticsearch-hadoop,takezoe/elasticsearch-hadoop,cgvarela/elasticsearch-hadoop,elastic/elasticsearch-hadoop,holdenk/elasticsearch-hadoop,elastic/elasticsearch-hadoop,trifork/elasticsearch-hadoop,kai5263499/elasticsearch-hadoop,yonglehou/elasticsearch-hadoop,aie108/elasticsearch-hadoop | /*
* Copyright 2013 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.elasticsearch.hadoop.integration.hive;
import java.io.IOException;
import java.net.URISyntaxException;
import java.util.Properties;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.filecache.DistributedCache;
import org.apache.hadoop.fs.Path;
import org.elasticsearch.hadoop.integration.HdfsUtils;
import org.elasticsearch.hadoop.integration.HdpBootstrap;
import org.elasticsearch.hadoop.integration.LocalES;
import org.elasticsearch.hadoop.integration.Provisioner;
import org.elasticsearch.hadoop.util.StringUtils;
import org.elasticsearch.hadoop.util.TestSettings;
import org.junit.BeforeClass;
import org.junit.ClassRule;
import org.junit.rules.ChainedExternalResource;
import org.junit.rules.ExternalResource;
import org.junit.runner.RunWith;
import org.junit.runners.Suite;
@RunWith(Suite.class)
@Suite.SuiteClasses({ HiveSaveTest.class, HiveSearchTest.class })
//@Suite.SuiteClasses({ HiveSearchTest.class })
public class HiveSuite {
static HiveInstance server;
static boolean isLocal = true;
static String cleanDdl = "DROP DATABASE IF EXISTS test CASCADE";
static String createDB = "CREATE DATABASE test";
static String useDB = "USE test";
static String originalResource;
static String hdfsResource;
static String hdfsEsLib;
static Configuration hadoopConfig;
static {
try {
originalResource = HiveSuite.class.getClassLoader().getResource("hive-compound.dat").toURI().toString();
hdfsResource = originalResource;
} catch (URISyntaxException ex) {
throw new RuntimeException(ex);
}
}
public static ExternalResource hive = new ExternalResource() {
@Override
protected void before() throws Throwable {
Properties props = new TestSettings().getProperties();
String hive = props.getProperty("hive", "local");
isLocal = "local".equals(hive);
server = (isLocal ? new HiveEmbeddedServer(props) : new HiveJdbc(hive));
server.start();
server.execute(cleanDdl);
server.execute(createDB);
server.execute(useDB);
}
@Override
protected void after() {
try {
server.execute(cleanDdl);
server.stop();
} catch (Exception ex) {
}
}
};
@ClassRule
public static ExternalResource resource = new ChainedExternalResource(new LocalES(), hive);
@BeforeClass
public static void setup() throws Exception {
if (!isLocal) {
hadoopConfig = HdpBootstrap.hadoopConfig();
HdfsUtils.copyFromLocal(Provisioner.ESHADOOP_TESTING_JAR, Provisioner.HDFS_ES_HDP_LIB);
hdfsEsLib = HdfsUtils.qualify(Provisioner.HDFS_ES_HDP_LIB, hadoopConfig);
// copy jar to DistributedCache
try {
DistributedCache.addArchiveToClassPath(new Path(Provisioner.HDFS_ES_HDP_LIB), hadoopConfig);
} catch (IOException ex) {
throw new RuntimeException("Cannot provision Hive", ex);
}
hdfsResource = "/eshdp/hive/hive-compund.dat";
HdfsUtils.copyFromLocal(originalResource, hdfsResource);
hdfsResource = HdfsUtils.qualify(hdfsResource, hadoopConfig);
}
}
public static String tableProps(String resource, String... params) {
StringBuilder sb = new StringBuilder("STORED BY 'org.elasticsearch.hadoop.hive.ESStorageHandler' ");
sb.append("TBLPROPERTIES('es.resource' = '" + resource + "' ");
for (String string : params) {
sb.append(",");
sb.append(string);
}
if (!isLocal) {
String host = hadoopConfig.get("es.host");
if (StringUtils.hasText(host)) {
sb.append(",'es.host'='" + host + "'");
}
String port = hadoopConfig.get("es.port");
sb.append(",'es.port'='" + port + "'");
}
sb.append(")");
return sb.toString();
}
public static void provisionEsLib() throws Exception {
// provision on each test run since LOAD DATA _moves_ the file
if (!isLocal) {
hdfsResource = "/eshdp/hive/hive-compund.dat";
HdfsUtils.copyFromLocal(originalResource, hdfsResource);
}
String jar = "ADD JAR " + HiveSuite.hdfsEsLib;
if (!isLocal) {
System.out.println(server.execute(jar));
}
}
} | src/test/java/org/elasticsearch/hadoop/integration/hive/HiveSuite.java | /*
* Copyright 2013 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.elasticsearch.hadoop.integration.hive;
import java.io.IOException;
import java.net.URISyntaxException;
import java.util.Properties;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.filecache.DistributedCache;
import org.apache.hadoop.fs.Path;
import org.elasticsearch.hadoop.integration.HdfsUtils;
import org.elasticsearch.hadoop.integration.HdpBootstrap;
import org.elasticsearch.hadoop.integration.LocalES;
import org.elasticsearch.hadoop.integration.Provisioner;
import org.elasticsearch.hadoop.util.StringUtils;
import org.elasticsearch.hadoop.util.TestSettings;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.ClassRule;
import org.junit.rules.ChainedExternalResource;
import org.junit.rules.ExternalResource;
import org.junit.runner.RunWith;
import org.junit.runners.Suite;
import static org.elasticsearch.hadoop.integration.hive.HiveSuite.*;
@RunWith(Suite.class)
@Suite.SuiteClasses({ HiveSaveTest.class, HiveSearchTest.class })
//@Suite.SuiteClasses({ HiveSearchTest.class })
public class HiveSuite {
static HiveInstance server;
static boolean isLocal = true;
static String cleanDdl = "DROP DATABASE IF EXISTS test CASCADE";
static String createDB = "CREATE DATABASE test";
static String useDB = "USE test";
static String originalResource;
static String hdfsResource;
static String hdfsEsLib;
static Configuration hadoopConfig;
static {
try {
originalResource = HiveSuite.class.getClassLoader().getResource("hive-compound.dat").toURI().toString();
hdfsResource = originalResource;
} catch (URISyntaxException ex) {
throw new RuntimeException(ex);
}
}
public static ExternalResource hive = new ExternalResource() {
@Override
protected void before() throws Throwable {
Properties props = new TestSettings().getProperties();
String hive = props.getProperty("hive", "local");
isLocal = "local".equals(hive);
server = (isLocal ? new HiveEmbeddedServer(props) : new HiveJdbc(hive));
server.start();
server.execute(cleanDdl);
server.execute(createDB);
server.execute(useDB);
}
@Override
protected void after() {
try {
server.execute(cleanDdl);
server.stop();
} catch (Exception ex) {
}
}
};
@ClassRule
public static ExternalResource resource = new ChainedExternalResource(new LocalES(), hive);
@BeforeClass
public static void setup() throws Exception {
if (!isLocal) {
hadoopConfig = HdpBootstrap.hadoopConfig();
HdfsUtils.copyFromLocal(Provisioner.ESHADOOP_TESTING_JAR, Provisioner.HDFS_ES_HDP_LIB);
hdfsEsLib = HdfsUtils.qualify(Provisioner.HDFS_ES_HDP_LIB, hadoopConfig);
// copy jar to DistributedCache
try {
DistributedCache.addArchiveToClassPath(new Path(Provisioner.HDFS_ES_HDP_LIB), hadoopConfig);
} catch (IOException ex) {
throw new RuntimeException("Cannot provision Hive", ex);
}
hdfsResource = "/eshdp/hive/hive-compund.dat";
HdfsUtils.copyFromLocal(originalResource, hdfsResource);
hdfsResource = HdfsUtils.qualify(hdfsResource, hadoopConfig);
}
}
public static String tableProps(String resource, String... params) {
StringBuilder sb = new StringBuilder("STORED BY 'org.elasticsearch.hadoop.hive.ESStorageHandler' ");
sb.append("TBLPROPERTIES('es.resource' = '" + resource + "' ");
for (String string : params) {
sb.append(",");
sb.append(string);
}
if (!isLocal) {
String host = hadoopConfig.get("es.host");
if (StringUtils.hasText(host)) {
sb.append(",'es.host'='" + host + "'");
}
String port = hadoopConfig.get("es.port");
sb.append(",'es.port'='" + port + "'");
}
sb.append(")");
return sb.toString();
}
public static void provisionEsLib() throws Exception {
// provision on each test run since LOAD DATA _moves_ the file
if (!isLocal) {
hdfsResource = "/eshdp/hive/hive-compund.dat";
HdfsUtils.copyFromLocal(originalResource, hdfsResource);
}
String jar = "ADD JAR " + HiveSuite.hdfsEsLib;
if (!isLocal) {
System.out.println(server.execute(jar));
}
}
} | organize imports
| src/test/java/org/elasticsearch/hadoop/integration/hive/HiveSuite.java | organize imports | <ide><path>rc/test/java/org/elasticsearch/hadoop/integration/hive/HiveSuite.java
<ide> import org.elasticsearch.hadoop.integration.Provisioner;
<ide> import org.elasticsearch.hadoop.util.StringUtils;
<ide> import org.elasticsearch.hadoop.util.TestSettings;
<del>import org.junit.Before;
<ide> import org.junit.BeforeClass;
<ide> import org.junit.ClassRule;
<ide> import org.junit.rules.ChainedExternalResource;
<ide> import org.junit.rules.ExternalResource;
<ide> import org.junit.runner.RunWith;
<ide> import org.junit.runners.Suite;
<del>
<del>import static org.elasticsearch.hadoop.integration.hive.HiveSuite.*;
<ide>
<ide> @RunWith(Suite.class)
<ide> @Suite.SuiteClasses({ HiveSaveTest.class, HiveSearchTest.class }) |
|
Java | bsd-3-clause | 692c936dba3ea3af068d0d26bd241944fa59642e | 0 | erikhofer/GrowthLectures,erikhofer/GrowthLectures,erikhofer/GrowthLectures | package com.xinra.growthlectures.entity;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.persistence.EntityManager;
import javax.persistence.Query;
import org.springframework.beans.factory.annotation.Autowired;
public class LectureRepositoryImpl implements AbstractLectureRepositoryCustom<Lecture> {
@Autowired
EntityManager entityManager;
@Override
public List<Lecture> search(String term, OrderBy orderBy, boolean decending) {
Map<String, Object> parameters = new HashMap<>();
return searchInternal(parameters, "", term, orderBy, decending);
}
@Override
public List<Lecture> searchForCategory(String categorySlug, String term,
OrderBy orderBy, boolean decending) {
Map<String, Object> parameters = new HashMap<>();
parameters.put("categorySlug", categorySlug);
return searchInternal(parameters, " AND l.category.slug = :categorySlug", term, orderBy, decending);
}
@Override
public List<Lecture> searchForLecturer(String lecturerSlug, String term,
OrderBy orderBy, boolean decending) {
Map<String, Object> parameters = new HashMap<>();
parameters.put("lecturerSlug", lecturerSlug);
return searchInternal(parameters, " AND l.lecturer.slug = :lecturerSlug", term, orderBy, decending);
}
@SuppressWarnings("unchecked")
private List<Lecture> searchInternal(Map<String, Object> parameters, String where, String term,
OrderBy orderBy, boolean decending) {
String queryString = "SELECT l FROM Lecture l WHERE 1=1" + where;
if (!term.isEmpty()) { // throws NPE
queryString += " AND (UPPER(l.name) LIKE UPPER(:term)"
+ " OR UPPER(l.lecturer.name) LIKE UPPER(:term)"
+ " OR UPPER(l.description) LIKE UPPER(:term)"
+ " OR UPPER(l.category.name) LIKE UPPER(:term))";
parameters.put("term", "%" + term + "%");
}
switch (orderBy) {
case ADDED:
queryString += " ORDER BY l.added";
break;
case RATING:
queryString += " ORDER BY l.ratingAverage";
break;
default:
throw new IllegalArgumentException();
}
if (decending) {
queryString += " DESC";
}
Query query = entityManager.createQuery(queryString);
parameters.forEach((k, v) -> query.setParameter(k, v));
return query.getResultList();
}
}
| src/main/groovy/com/xinra/growthlectures/entity/LectureRepositoryImpl.java | package com.xinra.growthlectures.entity;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.persistence.EntityManager;
import javax.persistence.Query;
import org.springframework.beans.factory.annotation.Autowired;
public class LectureRepositoryImpl implements AbstractLectureRepositoryCustom<Lecture> {
@Autowired
EntityManager entityManager;
@Override
public List<Lecture> search(String term, OrderBy orderBy, boolean decending) {
Map<String, Object> parameters = new HashMap<>();
return searchInternal(parameters, "", term, orderBy, decending);
}
@Override
public List<Lecture> searchForCategory(String categorySlug, String term,
OrderBy orderBy, boolean decending) {
Map<String, Object> parameters = new HashMap<>();
parameters.put("categorySlug", categorySlug);
return searchInternal(parameters, " l.category.slug = :categorySlug", term, orderBy, decending);
}
@Override
public List<Lecture> searchForLecturer(String lecturerSlug, String term,
OrderBy orderBy, boolean decending) {
Map<String, Object> parameters = new HashMap<>();
parameters.put("lectuerSlug", lecturerSlug);
return searchInternal(parameters, " l.lecturer.slug = :lecturerSlug", term, orderBy, decending);
}
@SuppressWarnings("unchecked")
private List<Lecture> searchInternal(Map<String, Object> parameters, String where, String term,
OrderBy orderBy, boolean decending) {
String queryString = "SELECT l FROM Lecture l WHERE 1=1" + where;
if (!term.isEmpty()) { // throws NPE
queryString += " AND (UPPER(l.name) LIKE UPPER(:term)"
+ " OR UPPER(l.lecturer.name) LIKE UPPER(:term)"
+ " OR UPPER(l.description) LIKE UPPER(:term)"
+ " OR UPPER(l.category.name) LIKE UPPER(:term))";
parameters.put("term", "%" + term + "%");
}
switch (orderBy) {
case ADDED:
queryString += " ORDER BY l.added";
break;
case RATING:
queryString += " ORDER BY l.ratingAverage";
break;
default:
throw new IllegalArgumentException();
}
if (decending) {
queryString += " DESC";
}
Query query = entityManager.createQuery(queryString);
parameters.forEach((k, v) -> query.setParameter(k, v));
return query.getResultList();
}
}
| FIxed SQL queries. #19
| src/main/groovy/com/xinra/growthlectures/entity/LectureRepositoryImpl.java | FIxed SQL queries. #19 | <ide><path>rc/main/groovy/com/xinra/growthlectures/entity/LectureRepositoryImpl.java
<ide> OrderBy orderBy, boolean decending) {
<ide> Map<String, Object> parameters = new HashMap<>();
<ide> parameters.put("categorySlug", categorySlug);
<del> return searchInternal(parameters, " l.category.slug = :categorySlug", term, orderBy, decending);
<add> return searchInternal(parameters, " AND l.category.slug = :categorySlug", term, orderBy, decending);
<ide> }
<ide>
<ide>
<ide> public List<Lecture> searchForLecturer(String lecturerSlug, String term,
<ide> OrderBy orderBy, boolean decending) {
<ide> Map<String, Object> parameters = new HashMap<>();
<del> parameters.put("lectuerSlug", lecturerSlug);
<del> return searchInternal(parameters, " l.lecturer.slug = :lecturerSlug", term, orderBy, decending);
<add> parameters.put("lecturerSlug", lecturerSlug);
<add> return searchInternal(parameters, " AND l.lecturer.slug = :lecturerSlug", term, orderBy, decending);
<ide> }
<ide>
<ide> |
|
JavaScript | isc | 89961f822abc23fa57fae8515fcbb427f64613bc | 0 | Parkjeahwan/awegeeks,zealord/gatewayd,xdv/gatewayd,xdv/gatewayd,crazyquark/gatewayd,whotooktwarden/gatewayd,crazyquark/gatewayd,whotooktwarden/gatewayd,Parkjeahwan/awegeeks,zealord/gatewayd | var RippleRestClient = require('ripple-rest-client');
var uuid = require('node-uuid');
var config = require(__dirname+'/../../config/config.js');
var rippleRestClient = new RippleRestClient({
account: config.get('COLD_WALLET')
});
function fundHotWallet(payment, callback) {
var options = {
secret: payment.secret,
client_resource_id: uuid.v4(),
payment: {
destination_account: config.get('HOT_WALLET').address,
source_account: config.get('COLD_WALLET'),
destination_amount: {
value: payment.amount.toString(),
currency: payment.currency
}
}
};
rippleRestClient.sendAndConfirmPayment(options, function(error, response){
if (error || (response.result != 'tesSUCCESS')) {
logger.error('rippleRestClient.sendAndConfirmPayment', error);
return callback(error, null);
}
callback(null, response);
});
}
module.exports = fundHotWallet;
| lib/api/fund_hot_wallet.js | var RippleRestClient = require('ripple-rest-client');
var ripple = require(__dirname+'/../ripple/');
var uuid = require('node-uuid');
var async = require('async');
var config = require(__dirname+'/../../config/config.js');
var rippleRestClient = new RippleRestClient({
account: config.get('COLD_WALLET')
});
function fundHotWallet(payment, callback) {
var options = {
secret: payment.secret,
client_resource_id: uuid.v4(),
payment: {
destination_account: config.get('HOT_WALLET').address,
source_account: config.get('COLD_WALLET'),
destination_amount: {
value: payment.amount.toString(),
currency: payment.currency
}
}
};
rippleRestClient.sendAndConfirmPayment(options, function(error, response){
if (error || (response.result != 'tesSUCCESS')) {
logger.error('rippleRestClient.sendAndConfirmPayment', error);
return callback(error, null);
}
callback(null, response);
});
}
module.exports = fundHotWallet;
| [TASK] remove unused modules
| lib/api/fund_hot_wallet.js | [TASK] remove unused modules | <ide><path>ib/api/fund_hot_wallet.js
<ide> var RippleRestClient = require('ripple-rest-client');
<del>var ripple = require(__dirname+'/../ripple/');
<ide> var uuid = require('node-uuid');
<del>var async = require('async');
<ide> var config = require(__dirname+'/../../config/config.js');
<ide>
<ide> var rippleRestClient = new RippleRestClient({ |
|
Java | apache-2.0 | 0da20a5c80a8313f2629134716cf985d1df24271 | 0 | orientechnologies/orientdb,sanyaade-g2g-repos/orientdb,tempbottle/orientdb,joansmith/orientdb,rprabhat/orientdb,orientechnologies/orientdb,wouterv/orientdb,tempbottle/orientdb,mmacfadden/orientdb,wouterv/orientdb,allanmoso/orientdb,wyzssw/orientdb,wouterv/orientdb,allanmoso/orientdb,wyzssw/orientdb,mmacfadden/orientdb,allanmoso/orientdb,rprabhat/orientdb,giastfader/orientdb,mmacfadden/orientdb,tempbottle/orientdb,sanyaade-g2g-repos/orientdb,wouterv/orientdb,joansmith/orientdb,allanmoso/orientdb,giastfader/orientdb,mmacfadden/orientdb,sanyaade-g2g-repos/orientdb,rprabhat/orientdb,wyzssw/orientdb,giastfader/orientdb,tempbottle/orientdb,wyzssw/orientdb,orientechnologies/orientdb,joansmith/orientdb,orientechnologies/orientdb,joansmith/orientdb,rprabhat/orientdb,sanyaade-g2g-repos/orientdb,giastfader/orientdb | /*
*
* * Copyright 2014 Orient Technologies LTD (info(at)orientechnologies.com)
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
* *
* * For more information: http://www.orientechnologies.com
*
*/
package com.orientechnologies.orient.core.command.script;
import com.orientechnologies.common.collection.OMultiValue;
import com.orientechnologies.common.concur.ONeedRetryException;
import com.orientechnologies.common.concur.resource.OPartitionedObjectPool;
import com.orientechnologies.orient.core.Orient;
import com.orientechnologies.orient.core.command.OCommandContext;
import com.orientechnologies.orient.core.command.OCommandDistributedReplicateRequest;
import com.orientechnologies.orient.core.command.OCommandExecutorAbstract;
import com.orientechnologies.orient.core.command.OCommandRequest;
import com.orientechnologies.orient.core.db.ODatabaseDocumentInternal;
import com.orientechnologies.orient.core.db.ODatabaseRecordThreadLocal;
import com.orientechnologies.orient.core.db.document.ODatabaseDocument;
import com.orientechnologies.orient.core.db.document.ODatabaseDocumentTx;
import com.orientechnologies.orient.core.exception.OCommandExecutionException;
import com.orientechnologies.orient.core.exception.ORecordNotFoundException;
import com.orientechnologies.orient.core.exception.OTransactionException;
import com.orientechnologies.orient.core.serialization.serializer.OStringSerializerHelper;
import com.orientechnologies.orient.core.sql.OCommandSQL;
import com.orientechnologies.orient.core.sql.OCommandSQLParsingException;
import com.orientechnologies.orient.core.storage.ORecordDuplicatedException;
import com.orientechnologies.orient.core.tx.OTransaction;
import javax.script.*;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.StringReader;
import java.util.*;
/**
* Executes Script Commands.
*
* @see OCommandScript
* @author Luca Garulli
*
*/
public class OCommandExecutorScript extends OCommandExecutorAbstract implements OCommandDistributedReplicateRequest {
private static final int MAX_DELAY = 100;
protected OCommandScript request;
public OCommandExecutorScript() {
}
@SuppressWarnings("unchecked")
public OCommandExecutorScript parse(final OCommandRequest iRequest) {
request = (OCommandScript) iRequest;
return this;
}
public OCommandDistributedReplicateRequest.DISTRIBUTED_EXECUTION_MODE getDistributedExecutionMode() {
return DISTRIBUTED_EXECUTION_MODE.LOCAL;
}
public Object execute(final Map<Object, Object> iArgs) {
return executeInContext(context, iArgs);
}
public Object executeInContext(final OCommandContext iContext, final Map<Object, Object> iArgs) {
final String language = request.getLanguage();
parserText = request.getText();
parameters = iArgs;
parameters = iArgs;
if (language.equalsIgnoreCase("SQL"))
// SPECIAL CASE: EXECUTE THE COMMANDS IN SEQUENCE
return executeSQL();
else
return executeJsr223Script(language, iContext, iArgs);
}
public boolean isIdempotent() {
return false;
}
protected Object executeJsr223Script(final String language, final OCommandContext iContext, final Map<Object, Object> iArgs) {
ODatabaseDocumentInternal db = ODatabaseRecordThreadLocal.INSTANCE.get();
final OScriptManager scriptManager = Orient.instance().getScriptManager();
CompiledScript compiledScript = request.getCompiledScript();
final OPartitionedObjectPool.PoolEntry<ScriptEngine> entry = scriptManager.acquireDatabaseEngine(db.getName(), language);
final ScriptEngine scriptEngine = entry.object;
try {
if (compiledScript == null) {
if (!(scriptEngine instanceof Compilable))
throw new OCommandExecutionException("Language '" + language + "' does not support compilation");
final Compilable c = (Compilable) scriptEngine;
try {
compiledScript = c.compile(parserText);
} catch (ScriptException e) {
scriptManager.throwErrorMessage(e, parserText);
}
request.setCompiledScript(compiledScript);
}
final Bindings binding = scriptManager.bind(compiledScript.getEngine().getBindings(ScriptContext.ENGINE_SCOPE),
(ODatabaseDocumentTx) db, iContext, iArgs);
try {
final Object ob = compiledScript.eval(binding);
return OCommandExecutorUtility.transformResult(ob);
} catch (ScriptException e) {
throw new OCommandScriptException("Error on execution of the script", request.getText(), e.getColumnNumber(), e);
} finally {
scriptManager.unbind(binding, iContext, iArgs);
}
} finally {
scriptManager.releaseDatabaseEngine(language, db.getName(), entry);
}
}
// TODO: CREATE A REGULAR JSR223 SCRIPT IMPL
protected Object executeSQL() {
ODatabaseDocument db = ODatabaseRecordThreadLocal.INSTANCE.getIfDefined();
try {
return executeSQLScript(parserText, db);
} catch (IOException e) {
throw new OCommandExecutionException("Error on executing command: " + parserText, e);
}
}
@Override
protected void throwSyntaxErrorException(String iText) {
throw new OCommandScriptException("Error on execution of the script: " + iText, request.getText(), 0);
}
protected Object executeSQLScript(final String iText, final ODatabaseDocument db) throws IOException {
Object lastResult = null;
int maxRetry = 1;
context.setVariable("transactionRetries", 0);
for (int retry = 1; retry <= maxRetry; retry++) {
try {
try {
int txBegunAtLine = -1;
int txBegunAtPart = -1;
lastResult = null;
final BufferedReader reader = new BufferedReader(new StringReader(iText));
int line = 0;
int linePart = 0;
String lastLine;
boolean txBegun = false;
for (; line < txBegunAtLine; ++line)
// SKIP PREVIOUS COMMAND AND JUMP TO THE BEGIN IF ANY
reader.readLine();
for (; (lastLine = reader.readLine()) != null; ++line) {
lastLine = lastLine.trim();
final List<String> lineParts = OStringSerializerHelper.smartSplit(lastLine, ';', true);
if (line == txBegunAtLine)
// SKIP PREVIOUS COMMAND PART AND JUMP TO THE BEGIN IF ANY
linePart = txBegunAtPart;
else
linePart = 0;
for (; linePart < lineParts.size(); ++linePart) {
final String lastCommand = lineParts.get(linePart);
if (OStringSerializerHelper.startsWithIgnoreCase(lastCommand, "let ")) {
lastResult = executeLet(lastCommand, db);
} else if (OStringSerializerHelper.startsWithIgnoreCase(lastCommand, "begin")) {
if (txBegun)
throw new OCommandSQLParsingException("Transaction already begun");
if (db.getTransaction().isActive())
// COMMIT ANY ACTIVE TX
db.commit();
txBegun = true;
txBegunAtLine = line;
txBegunAtPart = linePart;
db.begin();
if (lastCommand.length() > "begin ".length()) {
String next = lastCommand.substring("begin ".length()).trim();
if (OStringSerializerHelper.startsWithIgnoreCase(next, "isolation ")) {
next = next.substring("isolation ".length()).trim();
db.getTransaction().setIsolationLevel(OTransaction.ISOLATION_LEVEL.valueOf(next.toUpperCase()));
}
}
} else if ("rollback".equalsIgnoreCase(lastCommand)) {
if (!txBegun)
throw new OCommandSQLParsingException("Transaction not begun");
db.rollback();
txBegun = false;
txBegunAtLine = -1;
txBegunAtPart = -1;
} else if (OStringSerializerHelper.startsWithIgnoreCase(lastCommand, "commit")) {
if (txBegunAtLine < 0)
throw new OCommandSQLParsingException("Transaction not begun");
if (retry == 1 && lastCommand.length() > "commit ".length()) {
// FIRST CYCLE: PARSE RETRY TIMES OVERWRITING DEFAULT = 1
String next = lastCommand.substring("commit ".length()).trim();
if (OStringSerializerHelper.startsWithIgnoreCase(next, "retry ")) {
next = next.substring("retry ".length()).trim();
maxRetry = Integer.parseInt(next);
}
}
db.commit();
txBegun = false;
txBegunAtLine = -1;
txBegunAtPart = -1;
} else if (OStringSerializerHelper.startsWithIgnoreCase(lastCommand, "sleep ")) {
executeSleep(lastCommand);
} else if (OStringSerializerHelper.startsWithIgnoreCase(lastCommand, "return ")) {
lastResult = executeReturn(lastCommand, lastResult);
// END OF SCRIPT
break;
} else if (lastCommand != null && lastCommand.length() > 0)
lastResult = executeCommand(lastCommand, db);
}
}
} catch (RuntimeException ex) {
if (db.getTransaction().isActive())
db.rollback();
throw ex;
}
// COMPLETED
break;
} catch (OTransactionException e) {
// THIS CASE IS ON UPSERT
context.setVariable("retries", retry);
getDatabase().getLocalCache().clear();
if (retry >= maxRetry)
throw e;
waitForNextRetry();
} catch (ORecordDuplicatedException e) {
// THIS CASE IS ON UPSERT
context.setVariable("retries", retry);
getDatabase().getLocalCache().clear();
if (retry >= maxRetry)
throw e;
waitForNextRetry();
} catch (ORecordNotFoundException e) {
// THIS CASE IS ON UPSERT
context.setVariable("retries", retry);
getDatabase().getLocalCache().clear();
if (retry >= maxRetry)
throw e;
} catch (ONeedRetryException e) {
context.setVariable("retries", retry);
getDatabase().getLocalCache().clear();
if (retry >= maxRetry)
throw e;
waitForNextRetry();
}
}
return lastResult;
}
/**
* Wait before to retry
*/
protected void waitForNextRetry() {
try {
Thread.sleep(new Random().nextInt(MAX_DELAY - 1) + 1);
} catch (InterruptedException e) {
}
}
private Object executeCommand(final String lastCommand, final ODatabaseDocument db) {
return db.command(new OCommandSQL(lastCommand).setContext(getContext())).execute(toMap(parameters));
}
private Object toMap(Object parameters) {
if (parameters instanceof SimpleBindings) {
HashMap<Object, Object> result = new LinkedHashMap<Object, Object>();
result.putAll((SimpleBindings) parameters);
return result;
}
return parameters;
}
private Object executeReturn(String lastCommand, Object lastResult) {
final String variable = lastCommand.substring("return ".length()).trim();
if (variable.equalsIgnoreCase("NULL"))
lastResult = null;
else if (variable.startsWith("$"))
lastResult = getContext().getVariable(variable);
else if (variable.startsWith("[") && variable.endsWith("]")) {
// ARRAY - COLLECTION
final List<String> items = new ArrayList<String>();
OStringSerializerHelper.getCollection(variable, 0, items);
final List<Object> result = new ArrayList<Object>(items.size());
for (int i = 0; i < items.size(); ++i) {
String item = items.get(i);
Object res;
if (item.startsWith("$"))
res = getContext().getVariable(item);
else
res = item;
if (OMultiValue.isMultiValue(res) && OMultiValue.getSize(res) == 1)
res = OMultiValue.getFirstValue(res);
result.add(res);
}
lastResult = result;
} else if (variable.startsWith("{") && variable.endsWith("}")) {
// MAP
final Map<String, String> map = OStringSerializerHelper.getMap(variable);
final Map<Object, Object> result = new HashMap<Object, Object>(map.size());
for (Map.Entry<String, String> entry : map.entrySet()) {
// KEY
String stringKey = entry.getKey();
if (stringKey == null)
continue;
stringKey = stringKey.trim();
Object key;
if (stringKey.startsWith("$"))
key = getContext().getVariable(stringKey);
else
key = stringKey;
if (OMultiValue.isMultiValue(key) && OMultiValue.getSize(key) == 1)
key = OMultiValue.getFirstValue(key);
// VALUE
String stringValue = entry.getValue();
if (stringValue == null)
continue;
stringValue = stringValue.trim();
Object value;
if (stringValue.toString().startsWith("$"))
value = getContext().getVariable(stringValue);
else
value = stringValue;
result.put(key, value);
}
lastResult = result;
} else
lastResult = variable;
// END OF THE SCRIPT
return lastResult;
}
private void executeSleep(String lastCommand) {
final String sleepTimeInMs = lastCommand.substring("sleep ".length()).trim();
try {
Thread.sleep(Integer.parseInt(sleepTimeInMs));
} catch (InterruptedException e) {
}
}
private Object executeLet(final String lastCommand, final ODatabaseDocument db) {
final int equalsPos = lastCommand.indexOf('=');
final String variable = lastCommand.substring("let ".length(), equalsPos).trim();
String cmd = lastCommand.substring(equalsPos + 1).trim();
final Object lastResult = executeCommand(cmd, db);
// PUT THE RESULT INTO THE CONTEXT
getContext().setVariable(variable, lastResult);
return lastResult;
}
@Override
public QUORUM_TYPE getQuorumType() {
return QUORUM_TYPE.WRITE;
}
}
| core/src/main/java/com/orientechnologies/orient/core/command/script/OCommandExecutorScript.java | /*
*
* * Copyright 2014 Orient Technologies LTD (info(at)orientechnologies.com)
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
* *
* * For more information: http://www.orientechnologies.com
*
*/
package com.orientechnologies.orient.core.command.script;
import com.orientechnologies.common.collection.OMultiValue;
import com.orientechnologies.common.concur.ONeedRetryException;
import com.orientechnologies.common.concur.resource.OPartitionedObjectPool;
import com.orientechnologies.orient.core.Orient;
import com.orientechnologies.orient.core.command.OCommandContext;
import com.orientechnologies.orient.core.command.OCommandDistributedReplicateRequest;
import com.orientechnologies.orient.core.command.OCommandExecutorAbstract;
import com.orientechnologies.orient.core.command.OCommandRequest;
import com.orientechnologies.orient.core.db.ODatabaseDocumentInternal;
import com.orientechnologies.orient.core.db.ODatabaseRecordThreadLocal;
import com.orientechnologies.orient.core.db.document.ODatabaseDocument;
import com.orientechnologies.orient.core.db.document.ODatabaseDocumentTx;
import com.orientechnologies.orient.core.exception.OCommandExecutionException;
import com.orientechnologies.orient.core.exception.ORecordNotFoundException;
import com.orientechnologies.orient.core.exception.OTransactionException;
import com.orientechnologies.orient.core.serialization.serializer.OStringSerializerHelper;
import com.orientechnologies.orient.core.sql.OCommandSQL;
import com.orientechnologies.orient.core.sql.OCommandSQLParsingException;
import com.orientechnologies.orient.core.storage.ORecordDuplicatedException;
import com.orientechnologies.orient.core.tx.OTransaction;
import javax.script.Bindings;
import javax.script.Compilable;
import javax.script.CompiledScript;
import javax.script.ScriptContext;
import javax.script.ScriptEngine;
import javax.script.ScriptException;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.StringReader;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Random;
/**
* Executes Script Commands.
*
* @see OCommandScript
* @author Luca Garulli
*
*/
public class OCommandExecutorScript extends OCommandExecutorAbstract implements OCommandDistributedReplicateRequest {
private static final int MAX_DELAY = 100;
protected OCommandScript request;
public OCommandExecutorScript() {
}
@SuppressWarnings("unchecked")
public OCommandExecutorScript parse(final OCommandRequest iRequest) {
request = (OCommandScript) iRequest;
return this;
}
public OCommandDistributedReplicateRequest.DISTRIBUTED_EXECUTION_MODE getDistributedExecutionMode() {
return DISTRIBUTED_EXECUTION_MODE.LOCAL;
}
public Object execute(final Map<Object, Object> iArgs) {
return executeInContext(context, iArgs);
}
public Object executeInContext(final OCommandContext iContext, final Map<Object, Object> iArgs) {
final String language = request.getLanguage();
parserText = request.getText();
parameters = iArgs;
parameters = iArgs;
if (language.equalsIgnoreCase("SQL"))
// SPECIAL CASE: EXECUTE THE COMMANDS IN SEQUENCE
return executeSQL();
else
return executeJsr223Script(language, iContext, iArgs);
}
public boolean isIdempotent() {
return false;
}
protected Object executeJsr223Script(final String language, final OCommandContext iContext, final Map<Object, Object> iArgs) {
ODatabaseDocumentInternal db = ODatabaseRecordThreadLocal.INSTANCE.get();
final OScriptManager scriptManager = Orient.instance().getScriptManager();
CompiledScript compiledScript = request.getCompiledScript();
final OPartitionedObjectPool.PoolEntry<ScriptEngine> entry = scriptManager.acquireDatabaseEngine(db.getName(), language);
final ScriptEngine scriptEngine = entry.object;
try {
if (compiledScript == null) {
if (!(scriptEngine instanceof Compilable))
throw new OCommandExecutionException("Language '" + language + "' does not support compilation");
final Compilable c = (Compilable) scriptEngine;
try {
compiledScript = c.compile(parserText);
} catch (ScriptException e) {
scriptManager.throwErrorMessage(e, parserText);
}
request.setCompiledScript(compiledScript);
}
final Bindings binding = scriptManager.bind(compiledScript.getEngine().getBindings(ScriptContext.ENGINE_SCOPE),
(ODatabaseDocumentTx) db, iContext, iArgs);
try {
final Object ob = compiledScript.eval(binding);
return OCommandExecutorUtility.transformResult(ob);
} catch (ScriptException e) {
throw new OCommandScriptException("Error on execution of the script", request.getText(), e.getColumnNumber(), e);
} finally {
scriptManager.unbind(binding, iContext, iArgs);
}
} finally {
scriptManager.releaseDatabaseEngine(language, db.getName(), entry);
}
}
// TODO: CREATE A REGULAR JSR223 SCRIPT IMPL
protected Object executeSQL() {
ODatabaseDocument db = ODatabaseRecordThreadLocal.INSTANCE.getIfDefined();
try {
return executeSQLScript(parserText, db);
} catch (IOException e) {
throw new OCommandExecutionException("Error on executing command: " + parserText, e);
}
}
@Override
protected void throwSyntaxErrorException(String iText) {
throw new OCommandScriptException("Error on execution of the script: " + iText, request.getText(), 0);
}
protected Object executeSQLScript(final String iText, final ODatabaseDocument db) throws IOException {
Object lastResult = null;
int maxRetry = 1;
context.setVariable("transactionRetries", 0);
for (int retry = 1; retry <= maxRetry; retry++) {
try {
try {
int txBegunAtLine = -1;
int txBegunAtPart = -1;
lastResult = null;
final BufferedReader reader = new BufferedReader(new StringReader(iText));
int line = 0;
int linePart = 0;
String lastLine;
boolean txBegun = false;
for (; line < txBegunAtLine; ++line)
// SKIP PREVIOUS COMMAND AND JUMP TO THE BEGIN IF ANY
reader.readLine();
for (; (lastLine = reader.readLine()) != null; ++line) {
lastLine = lastLine.trim();
final List<String> lineParts = OStringSerializerHelper.smartSplit(lastLine, ';', true);
if (line == txBegunAtLine)
// SKIP PREVIOUS COMMAND PART AND JUMP TO THE BEGIN IF ANY
linePart = txBegunAtPart;
else
linePart = 0;
for (; linePart < lineParts.size(); ++linePart) {
final String lastCommand = lineParts.get(linePart);
if (OStringSerializerHelper.startsWithIgnoreCase(lastCommand, "let ")) {
lastResult = executeLet(lastCommand, db);
} else if (OStringSerializerHelper.startsWithIgnoreCase(lastCommand, "begin")) {
if (txBegun)
throw new OCommandSQLParsingException("Transaction already begun");
if (db.getTransaction().isActive())
// COMMIT ANY ACTIVE TX
db.commit();
txBegun = true;
txBegunAtLine = line;
txBegunAtPart = linePart;
db.begin();
if (lastCommand.length() > "begin ".length()) {
String next = lastCommand.substring("begin ".length()).trim();
if (OStringSerializerHelper.startsWithIgnoreCase(next, "isolation ")) {
next = next.substring("isolation ".length()).trim();
db.getTransaction().setIsolationLevel(OTransaction.ISOLATION_LEVEL.valueOf(next.toUpperCase()));
}
}
} else if ("rollback".equalsIgnoreCase(lastCommand)) {
if (!txBegun)
throw new OCommandSQLParsingException("Transaction not begun");
db.rollback();
txBegun = false;
txBegunAtLine = -1;
txBegunAtPart = -1;
} else if (OStringSerializerHelper.startsWithIgnoreCase(lastCommand, "commit")) {
if (txBegunAtLine < 0)
throw new OCommandSQLParsingException("Transaction not begun");
if (retry == 1 && lastCommand.length() > "commit ".length()) {
// FIRST CYCLE: PARSE RETRY TIMES OVERWRITING DEFAULT = 1
String next = lastCommand.substring("commit ".length()).trim();
if (OStringSerializerHelper.startsWithIgnoreCase(next, "retry ")) {
next = next.substring("retry ".length()).trim();
maxRetry = Integer.parseInt(next);
}
}
db.commit();
txBegun = false;
txBegunAtLine = -1;
txBegunAtPart = -1;
} else if (OStringSerializerHelper.startsWithIgnoreCase(lastCommand, "sleep ")) {
executeSleep(lastCommand);
} else if (OStringSerializerHelper.startsWithIgnoreCase(lastCommand, "return ")) {
lastResult = executeReturn(lastCommand, lastResult);
// END OF SCRIPT
break;
} else if (lastCommand != null && lastCommand.length() > 0)
lastResult = executeCommand(lastCommand, db);
}
}
} catch (RuntimeException ex) {
if (db.getTransaction().isActive())
db.rollback();
throw ex;
}
// COMPLETED
break;
} catch (OTransactionException e) {
// THIS CASE IS ON UPSERT
context.setVariable("retries", retry);
getDatabase().getLocalCache().clear();
if (retry >= maxRetry)
throw e;
waitForNextRetry();
} catch (ORecordDuplicatedException e) {
// THIS CASE IS ON UPSERT
context.setVariable("retries", retry);
getDatabase().getLocalCache().clear();
if (retry >= maxRetry)
throw e;
waitForNextRetry();
} catch (ORecordNotFoundException e) {
// THIS CASE IS ON UPSERT
context.setVariable("retries", retry);
getDatabase().getLocalCache().clear();
if (retry >= maxRetry)
throw e;
} catch (ONeedRetryException e) {
context.setVariable("retries", retry);
getDatabase().getLocalCache().clear();
if (retry >= maxRetry)
throw e;
waitForNextRetry();
}
}
return lastResult;
}
/**
* Wait before to retry
*/
protected void waitForNextRetry() {
try {
Thread.sleep(new Random().nextInt(MAX_DELAY - 1) + 1);
} catch (InterruptedException e) {
}
}
private Object executeCommand(final String lastCommand, final ODatabaseDocument db) {
return db.command(new OCommandSQL(lastCommand).setContext(getContext())).execute(parameters);
}
private Object executeReturn(String lastCommand, Object lastResult) {
final String variable = lastCommand.substring("return ".length()).trim();
if (variable.equalsIgnoreCase("NULL"))
lastResult = null;
else if (variable.startsWith("$"))
lastResult = getContext().getVariable(variable);
else if (variable.startsWith("[") && variable.endsWith("]")) {
// ARRAY - COLLECTION
final List<String> items = new ArrayList<String>();
OStringSerializerHelper.getCollection(variable, 0, items);
final List<Object> result = new ArrayList<Object>(items.size());
for (int i = 0; i < items.size(); ++i) {
String item = items.get(i);
Object res;
if (item.startsWith("$"))
res = getContext().getVariable(item);
else
res = item;
if (OMultiValue.isMultiValue(res) && OMultiValue.getSize(res) == 1)
res = OMultiValue.getFirstValue(res);
result.add(res);
}
lastResult = result;
} else if (variable.startsWith("{") && variable.endsWith("}")) {
// MAP
final Map<String, String> map = OStringSerializerHelper.getMap(variable);
final Map<Object, Object> result = new HashMap<Object, Object>(map.size());
for (Map.Entry<String, String> entry : map.entrySet()) {
// KEY
String stringKey = entry.getKey();
if (stringKey == null)
continue;
stringKey = stringKey.trim();
Object key;
if (stringKey.startsWith("$"))
key = getContext().getVariable(stringKey);
else
key = stringKey;
if (OMultiValue.isMultiValue(key) && OMultiValue.getSize(key) == 1)
key = OMultiValue.getFirstValue(key);
// VALUE
String stringValue = entry.getValue();
if (stringValue == null)
continue;
stringValue = stringValue.trim();
Object value;
if (stringValue.toString().startsWith("$"))
value = getContext().getVariable(stringValue);
else
value = stringValue;
result.put(key, value);
}
lastResult = result;
} else
lastResult = variable;
// END OF THE SCRIPT
return lastResult;
}
private void executeSleep(String lastCommand) {
final String sleepTimeInMs = lastCommand.substring("sleep ".length()).trim();
try {
Thread.sleep(Integer.parseInt(sleepTimeInMs));
} catch (InterruptedException e) {
}
}
private Object executeLet(final String lastCommand, final ODatabaseDocument db) {
final int equalsPos = lastCommand.indexOf('=');
final String variable = lastCommand.substring("let ".length(), equalsPos).trim();
String cmd = lastCommand.substring(equalsPos + 1).trim();
final Object lastResult = executeCommand(cmd, db);
// PUT THE RESULT INTO THE CONTEXT
getContext().setVariable(variable, lastResult);
return lastResult;
}
@Override
public QUORUM_TYPE getQuorumType() {
return QUORUM_TYPE.WRITE;
}
}
| fixed issue #4207 - Exception on executing UPDATE statement in Function in SQL language
| core/src/main/java/com/orientechnologies/orient/core/command/script/OCommandExecutorScript.java | fixed issue #4207 - Exception on executing UPDATE statement in Function in SQL language | <ide><path>ore/src/main/java/com/orientechnologies/orient/core/command/script/OCommandExecutorScript.java
<ide> import com.orientechnologies.orient.core.storage.ORecordDuplicatedException;
<ide> import com.orientechnologies.orient.core.tx.OTransaction;
<ide>
<del>import javax.script.Bindings;
<del>import javax.script.Compilable;
<del>import javax.script.CompiledScript;
<del>import javax.script.ScriptContext;
<del>import javax.script.ScriptEngine;
<del>import javax.script.ScriptException;
<add>import javax.script.*;
<ide> import java.io.BufferedReader;
<ide> import java.io.IOException;
<ide> import java.io.StringReader;
<del>import java.util.ArrayList;
<del>import java.util.HashMap;
<del>import java.util.List;
<del>import java.util.Map;
<del>import java.util.Random;
<add>import java.util.*;
<ide>
<ide> /**
<ide> * Executes Script Commands.
<ide> }
<ide>
<ide> private Object executeCommand(final String lastCommand, final ODatabaseDocument db) {
<del> return db.command(new OCommandSQL(lastCommand).setContext(getContext())).execute(parameters);
<add> return db.command(new OCommandSQL(lastCommand).setContext(getContext())).execute(toMap(parameters));
<add> }
<add>
<add> private Object toMap(Object parameters) {
<add> if (parameters instanceof SimpleBindings) {
<add> HashMap<Object, Object> result = new LinkedHashMap<Object, Object>();
<add> result.putAll((SimpleBindings) parameters);
<add> return result;
<add> }
<add> return parameters;
<ide> }
<ide>
<ide> private Object executeReturn(String lastCommand, Object lastResult) { |
|
Java | bsd-3-clause | 965cc6213fb6b5934c231a9296584891929de182 | 0 | NCIP/caaers,NCIP/caaers,NCIP/caaers,CBIIT/caaers,CBIIT/caaers,CBIIT/caaers,NCIP/caaers,CBIIT/caaers,CBIIT/caaers | package gov.nih.nci.cabig.caaers.accesscontrol.query.impl;
import gov.nih.nci.cabig.caaers.accesscontrol.BaseSecurityFilterer;
import gov.nih.nci.cabig.caaers.dao.OrganizationDao;
import gov.nih.nci.cabig.caaers.dao.StudySiteDao;
import gov.nih.nci.cabig.caaers.dao.UserDao;
import gov.nih.nci.cabig.caaers.dao.query.HQLQuery;
import gov.nih.nci.cabig.caaers.domain.Organization;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import com.semanticbits.security.contentfilter.IdFetcher;
/**
* Will find the organizations that can be accessed by the user.
*
* Rules :
* Investigator - Study Assignment + all study organizations belong to his organization for those studies
* AE Coordinator - Study Assignment + all study organizations belong to his organization for those studies
* Subject Coordinator - Study Assignment + all study organizations belong to his organization for those studies
* Data Coordinator - Study Assignment + all study organizations belong to his organization for those studies
* Report Reviewer - Study Assignment + all study organizations belong to his organization for those studies
*
* Study Coordinator - No filtering needed
* Site Coordinator - No filtering needed
*
* @author Biju Joseph
*
*/
public class CaaersOrganizationIdFetcherImpl extends AbstractIdFetcher implements IdFetcher{
/**
* Based on the fact that for Study Coordinators and Site Coordinators, the fetchers will not be called, all users
* at all roles can go with "Study Assignment + all study organizations belong to his organization for those studies"
* filtering.
* @param loginId - username
* @return
*/
public List fetch(String loginId) {
StringBuilder hql = new StringBuilder("select distinct so.organization.id from StudyOrganization so ,StudyPersonnel sp ")
.append(" join sp.studyOrganization ss " )
.append(" join sp.siteResearchStaff srs " )
.append(" join srs.researchStaff rs " )
.append(" where ss.study = so.study ")
.append(" and rs.loginId = :loginId ")
.append(" and sp.startDate<= :stDate ")
.append(" and (sp.endDate is null or sp.endDate >= :enDate ) " )
.append(" and sp.retiredIndicator <> true");
Date d = new Date();
HQLQuery query = new HQLQuery(hql.toString());
query.getParameterMap().put("loginId", "sponsor");
query.getParameterMap().put("stDate", d);
query.getParameterMap().put("enDate", d);
List<Integer> resultList = (List<Integer>) search(query);
if(log.isDebugEnabled()){
log.debug("Organization IDs accessible for [ " + loginId + " ] are : " + String.valueOf(resultList));
}
return resultList;
}
}
| caAERS/software/core/src/main/java/gov/nih/nci/cabig/caaers/accesscontrol/query/impl/CaaersOrganizationIdFetcherImpl.java | package gov.nih.nci.cabig.caaers.accesscontrol.query.impl;
import gov.nih.nci.cabig.caaers.accesscontrol.BaseSecurityFilterer;
import gov.nih.nci.cabig.caaers.dao.OrganizationDao;
import gov.nih.nci.cabig.caaers.dao.StudySiteDao;
import gov.nih.nci.cabig.caaers.dao.UserDao;
import gov.nih.nci.cabig.caaers.domain.Organization;
import java.util.ArrayList;
import java.util.List;
import com.semanticbits.security.contentfilter.IdFetcher;
/**
* Will find the organizations that can be accessed by the user.
*
* Rules :
* Investigator - Study Assignment + all study organizations belong to his organization for those studies
* AE Coordinator - Study Assignment + all study organizations belong to his organization for those studies
* Subject Coordinator - Study Assignment + all study organizations belong to his organization for those studies
* Data Coordinator - Study Assignment + all study organizations belong to his organization for those studies
* Report Reviewer - Study Assignment + all study organizations belong to his organization for those studies
*
* Study Coordinator - No filtering needed
* Site Coordinator - No filtering needed
*
* @author Biju Joseph
*
*/
public class CaaersOrganizationIdFetcherImpl extends AbstractIdFetcher implements IdFetcher{
public List fetch(String s) {
return null;
}
}
| CAAERS-3879 updated the organization id fetcher
SVN-Revision: 12577
| caAERS/software/core/src/main/java/gov/nih/nci/cabig/caaers/accesscontrol/query/impl/CaaersOrganizationIdFetcherImpl.java | CAAERS-3879 updated the organization id fetcher | <ide><path>aAERS/software/core/src/main/java/gov/nih/nci/cabig/caaers/accesscontrol/query/impl/CaaersOrganizationIdFetcherImpl.java
<ide> import gov.nih.nci.cabig.caaers.dao.OrganizationDao;
<ide> import gov.nih.nci.cabig.caaers.dao.StudySiteDao;
<ide> import gov.nih.nci.cabig.caaers.dao.UserDao;
<add>import gov.nih.nci.cabig.caaers.dao.query.HQLQuery;
<ide> import gov.nih.nci.cabig.caaers.domain.Organization;
<ide>
<ide> import java.util.ArrayList;
<add>import java.util.Date;
<ide> import java.util.List;
<ide>
<ide> import com.semanticbits.security.contentfilter.IdFetcher;
<ide> */
<ide> public class CaaersOrganizationIdFetcherImpl extends AbstractIdFetcher implements IdFetcher{
<ide>
<del> public List fetch(String s) {
<del> return null;
<add>
<add> /**
<add> * Based on the fact that for Study Coordinators and Site Coordinators, the fetchers will not be called, all users
<add> * at all roles can go with "Study Assignment + all study organizations belong to his organization for those studies"
<add> * filtering.
<add> * @param loginId - username
<add> * @return
<add> */
<add> public List fetch(String loginId) {
<add>
<add> StringBuilder hql = new StringBuilder("select distinct so.organization.id from StudyOrganization so ,StudyPersonnel sp ")
<add> .append(" join sp.studyOrganization ss " )
<add> .append(" join sp.siteResearchStaff srs " )
<add> .append(" join srs.researchStaff rs " )
<add> .append(" where ss.study = so.study ")
<add> .append(" and rs.loginId = :loginId ")
<add> .append(" and sp.startDate<= :stDate ")
<add> .append(" and (sp.endDate is null or sp.endDate >= :enDate ) " )
<add> .append(" and sp.retiredIndicator <> true");
<add>
<add> Date d = new Date();
<add> HQLQuery query = new HQLQuery(hql.toString());
<add> query.getParameterMap().put("loginId", "sponsor");
<add> query.getParameterMap().put("stDate", d);
<add> query.getParameterMap().put("enDate", d);
<add>
<add> List<Integer> resultList = (List<Integer>) search(query);
<add>
<add> if(log.isDebugEnabled()){
<add> log.debug("Organization IDs accessible for [ " + loginId + " ] are : " + String.valueOf(resultList));
<add> }
<add> return resultList;
<ide> }
<ide>
<ide> |
|
Java | agpl-3.0 | 5084e518a82834e9fc7a5b25aeeb06b952f6de4a | 0 | duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test | 3db96cc6-2e61-11e5-9284-b827eb9e62be | hello.java | 3db4025e-2e61-11e5-9284-b827eb9e62be | 3db96cc6-2e61-11e5-9284-b827eb9e62be | hello.java | 3db96cc6-2e61-11e5-9284-b827eb9e62be | <ide><path>ello.java
<del>3db4025e-2e61-11e5-9284-b827eb9e62be
<add>3db96cc6-2e61-11e5-9284-b827eb9e62be |
|
Java | apache-2.0 | 69204842741a860b42aa81488c4133400ab1318a | 0 | marcbux/Hi-WAY | /*******************************************************************************
* In the Hi-WAY project we propose a novel approach of executing scientific
* workflows processing Big Data, as found in NGS applications, on distributed
* computational infrastructures. The Hi-WAY software stack comprises the func-
* tional workflow language Cuneiform as well as the Hi-WAY ApplicationMaster
* for Apache Hadoop 2.x (YARN).
*
* List of Contributors:
*
* Marc Bux (HU Berlin)
* Jörgen Brandt (HU Berlin)
* Hannes Schuh (HU Berlin)
* Ulf Leser (HU Berlin)
*
* Jörgen Brandt is funded by the European Commission through the BiobankCloud
* project. Marc Bux is funded by the Deutsche Forschungsgemeinschaft through
* research training group SOAMED (GRK 1651).
*
* Copyright 2014 Humboldt-Universität zu Berlin
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package de.huberlin.wbi.hiway.am.galaxy;
import java.io.BufferedWriter;
import java.io.FileWriter;
import java.io.IOException;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import java.util.UUID;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.hadoop.yarn.api.records.Container;
import org.apache.hadoop.yarn.api.records.LocalResource;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import de.huberlin.wbi.cuneiform.core.semanticmodel.ForeignLambdaExpr;
import de.huberlin.wbi.hiway.common.Data;
import de.huberlin.wbi.hiway.common.TaskInstance;
/**
* An object that extends the Hi-WAY task instance object with its Galaxy tool and tool state, along with methods and structures to utilize and populate them.
*
* @author Marc Bux
*
*/
public class GalaxyTaskInstance extends TaskInstance {
// the Galaxy tool invoked by this task instance
private GalaxyTool galaxyTool;
// input data, which the task instance has to know about for determining the metadata using the Python classes provided by Galaxy
private Set<GalaxyData> inputs;
// the Python script run before the actual task instance execution that is responsible for populating the tool state with metadata
private StringBuilder paramScript;
// the post script that is to be run subsequent to the actual task invocation and that may involve the moving of files from temporary fodlers to their
// destination
private String postScript;
// the tool state (i.e., the parameter settings in JSON format) of this task instance
private JSONObject toolState;
public GalaxyTaskInstance(long id, UUID workflowId, String taskName, GalaxyTool galaxyTool, String galaxyPath) {
super(id, workflowId, taskName, Math.abs(taskName.hashCode() + 1), ForeignLambdaExpr.LANGID_BASH);
this.galaxyTool = galaxyTool;
toolState = new JSONObject();
paramScript = new StringBuilder("import os, ast\nimport cPickle as pickle\nimport galaxy.app\n");
this.postScript = "";
inputs = new HashSet<>();
// the task instance's invocScript variable is set here for it to be passed to the Worker thread, which writes the script's content as JsonReportEntry
// to the log
setInvocScript("script.sh");
// As opposed to other Hi-WAY applciation masters, the Galaxy AM ha a fairly static command that can be build at task instance creation time
StringBuilder commandSb = new StringBuilder();
commandSb.append("PYTHONPATH=" + galaxyPath + "/lib:$PYTHONPATH; export PYTHONPATH\n");
commandSb.append("PYTHON_EGG_CACHE=.; export PYTHON_EGG_CACHE\n");
commandSb.append("python params.py\n");
commandSb.append("cat pre.sh > script.sh\n");
commandSb.append("echo `cheetah fill template.tmpl --pickle params.p -p` >> script.sh\n");
commandSb.append("cat post.sh >> script.sh\n");
commandSb.append("bash ").append(getInvocScript()).append("\n");
setCommand(commandSb.toString());
}
/**
* A method that is called when a file name is encountered in the workflow description and the tool state has to be adjusted accordingly
*
* @param name
* the parameter name for this file, as specified in the workflow description
* @param computeMetadata
* a parameter that determines whether metadata is to be computed for this file, which will only be the case for input data
* @param data
* the Hi-WAY data object for this file
*/
public void addFile(String name, boolean computeMetadata, GalaxyData data) {
addFile(name, computeMetadata, data, toolState);
}
/**
* A (recursive) method that is passed a file along with the JSON object in this task instance's tool state that corresponds to this file. This method is
* used to populate the tool state with information on the file.
*
* @param name
* the parameter name for this file, as specified in the workflow description
* @param computeMetadata
* a parameter that determines whether metadata is to be computed for this file, which will only be the case for input data
* @param data
* the Hi-WAY data object for this file
* @param jo
* the JSON object in the tool state that corresponds to this file parameter
*/
private void addFile(String name, boolean computeMetadata, GalaxyData data, JSONObject jo) {
try {
Pattern p = Pattern.compile("(_[0-9]*)?\\|");
Matcher m = p.matcher(name);
// (1) if the to-be-added file is part of a repeat (and thus can be identified by an underscore and index number in its name), compute its prefix
// (the repeat name) as well as its suffix (the actual parameter name) and index; use the prefix and index to obtain its JSON object from the tool
// state and (recursively) call this method to proceed to (2)
if (m.find()) {
String prefix = name.substring(0, m.start());
String suffix = name.substring(m.end());
if (m.end() - m.start() > 2) {
int index = Integer.parseInt(name.substring(m.start() + 1, m.end() - 1));
JSONArray repeatJa = jo.getJSONArray(prefix);
for (int i = 0; i < repeatJa.length(); i++) {
JSONObject repeatJo = repeatJa.getJSONObject(i);
if (repeatJo.getInt("__index__") == index) {
addFile(suffix, computeMetadata, data, repeatJo);
break;
}
}
} else {
addFile(suffix, computeMetadata, data, jo.getJSONObject(prefix));
}
// (2) fix both the template of this tool and the tool state of the task instance calling this method, such that they are in compliance with one
// another when the tool state is used to set the parameters of the task instance at execution time
} else {
// (a) add several properties for this parameter to the tool state
String fileName = data.getName();
JSONObject fileJo = new JSONObject();
fileJo.putOpt("path", fileName);
fileJo.putOpt("name", fileName.split("\\.(?=[^\\.]+$)")[0]);
fileJo.putOpt("files_path", data.getLocalDirectory());
if (data.hasDataType()) {
GalaxyDataType dataType = data.getDataType();
String fileExt = dataType.getExtension();
fileJo.putOpt("extension", fileExt);
fileJo.putOpt("ext", fileExt);
}
// note that this metadata is an empty dictionary that will only be be filled for input data of the task instance calling this method; this is
// done by executing a python script designed to populate the tool state with metadata prior to execution
if (data.hasDataType())
fileJo.putOpt("metadata", new JSONObject());
jo.putOpt(name, fileJo);
// (b) adjust the Python script for setting parameters at runtime to compute metadata for this file, given the computeMetadata parameter is set
if (computeMetadata && data.hasDataType()) {
GalaxyDataType dataType = data.getDataType();
paramScript.append("from ");
paramScript.append(dataType.getFile());
paramScript.append(" import ");
paramScript.append(dataType.getName());
paramScript.append("\n");
inputs.add(data);
}
}
} catch (JSONException e) {
e.printStackTrace();
System.exit(-1);
}
}
/**
* A method for setting the tool state of this task instance from the raw string of the workflow description file to the JSON object provided in the Python
* parameter script. There are several string operations that have to be performed before the tool state can be interpreted as a JSON object.
*
* @param tool_state
* the tool state string, as extracted from the workflow description file
*
*/
public void addToolState(String tool_state) {
String tool_state_json = tool_state;
tool_state_json = tool_state_json.replaceAll("\"null\"", "\"\"");
// replace "{ }" "[ ]" with { } [ ]
tool_state_json = tool_state_json.replaceAll("\"\\{", "\\{");
tool_state_json = tool_state_json.replaceAll("\\}\"", "\\}");
tool_state_json = tool_state_json.replaceAll("\"\\[", "\\[");
tool_state_json = tool_state_json.replaceAll("\\]\"", "\\]");
// remove \
tool_state_json = tool_state_json.replaceAll("\\\\", "");
// replace "" with "
tool_state_json = tool_state_json.replaceAll("\"\"", "\"");
// replace : ", with : "",
tool_state_json = tool_state_json.replaceAll(": ?\",", ": \"\",");
// replace UnvalidatedValue with their actual value
tool_state_json = tool_state_json.replaceAll("\\{\"__class__\":\\s?\"UnvalidatedValue\",\\s?\"value\":\\s?([^\\}]*)\\}", "$1");
// replace "null" with {"path": ""}
// this is done such that otpional files will still be accessible by Cheetah as id.path
tool_state_json = tool_state_json.replaceAll("\"null\"", "{\"path\": \"\"}");
try {
this.toolState = new JSONObject(tool_state_json);
} catch (JSONException e) {
e.printStackTrace();
System.exit(-1);
}
}
/**
* A method for adding commands to be executed after the task instance (e.g., for moving files from a temporary folder to their destination, as specified in
* the tool description)
*
* @param post
* the shell command to be executed after the task instance
*/
public void addToPostScript(String post) {
postScript = postScript + (post.endsWith("\n") ? post : post + "\n");
}
@Override
public Map<String, LocalResource> buildScriptsAndSetResources(Container container) {
Map<String, LocalResource> localResources = super.buildScriptsAndSetResources(container);
String containerId = container.getId().toString();
// The task isntance's bash script is built by appending the pre script, the template compiled by Cheetah using the parameters set in the params Python
// script, and the post script
Data preSriptData = new Data("pre.sh", containerId);
Data paramScriptData = new Data("params.py", containerId);
Data templateData = new Data("template.tmpl", containerId);
Data postSriptData = new Data("post.sh", containerId);
try (BufferedWriter preScriptWriter = new BufferedWriter(new FileWriter(preSriptData.getLocalPath().toString()));
BufferedWriter paramScriptWriter = new BufferedWriter(new FileWriter(paramScriptData.getLocalPath().toString()));
BufferedWriter templateWriter = new BufferedWriter(new FileWriter(templateData.getLocalPath().toString()));
BufferedWriter postScriptWriter = new BufferedWriter(new FileWriter(postSriptData.getLocalPath().toString()))) {
preScriptWriter.write(galaxyTool.getEnv());
paramScriptWriter.write(paramScript.toString());
templateWriter.write(galaxyTool.getTemplate());
postScriptWriter.write(getPostScript());
} catch (IOException e) {
e.printStackTrace();
System.exit(-1);
}
try {
preSriptData.stageOut();
paramScriptData.stageOut();
templateData.stageOut();
postSriptData.stageOut();
preSriptData.addToLocalResourceMap(localResources);
paramScriptData.addToLocalResourceMap(localResources);
templateData.addToLocalResourceMap(localResources);
postSriptData.addToLocalResourceMap(localResources);
} catch (IOException e) {
e.printStackTrace();
System.exit(-1);
}
return localResources;
}
public GalaxyTool getGalaxyTool() {
return galaxyTool;
}
public String getPostScript() {
return postScript;
}
/**
* A method for preparing the Python script that is responsible for populating the tool state with metadata and making it accessible for Cheetah to compile
* the template.
*
* @throws JSONException JSONException
*/
public void prepareParamScript() throws JSONException {
// (1) populate the tool state by mapping parameters and adding additional parameters
galaxyTool.mapParams(toolState);
toolState.put("__new_file_path__", ".");
// (2) Set the tool state to the tool state as parsed from the workflow description and populated in step 1
paramScript.append("\ntool_state = ");
paramScript.append(toolState.toString());
// (3) Define the Dict class, which is a Python dict whose members are accessible like functions of a class (via a "dot")
paramScript.append("\n\nclass Dict(dict):");
paramScript.append("\n def __init__(self, *args, **kwargs):");
paramScript.append("\n super(Dict, self).__init__(*args, **kwargs)");
paramScript.append("\n self.__dict__ = self");
// (4) The Dataset class inherits from the Dict class and provides some functions; in order to utilize the Python scripts describing the Galaxy data
// types, a Dataset object has to be created; unfortunately, the Dataset object provided by Galaxy requires Galaxy to run, hence a custom Dataset class
// had to be created
paramScript.append("\n\nclass Dataset(Dict):");
paramScript.append("\n def has_data(self):");
paramScript.append("\n return True");
paramScript.append("\n def get_size(self):");
paramScript.append("\n return os.path.getsize(self.file_name)");
// (5) The expandToolState method traverses the tool state recursively and, when encountering an input data item, determines and adds its metadata
paramScript.append("\n\ndef expandToolState(src, dest):");
paramScript.append("\n for k, v in src.iteritems():");
// (6) recusrively parse dicts of parameters (conditionals and the root parameter list)
paramScript.append("\n if isinstance (v, dict):");
paramScript.append("\n dest[k] = Dataset() if 'path' in v else Dict()");
paramScript.append("\n expandToolState(v, dest[k])");
// for each input data item
for (GalaxyData input : inputs) {
// (a) instantiate the Python class corresponding to this data item's data type
paramScript.append("\n if 'path' in v and v['path'] == '");
paramScript.append(input.getName());
paramScript.append("':");
paramScript.append("\n dest[k]['file_name'] = v['path']");
paramScript.append("\n datatype = ");
paramScript.append(input.getDataType().getName());
paramScript.append("()");
// (b) set the default values of this data item, as defined in its Python class
paramScript.append("\n for key in datatype.metadata_spec.keys():");
paramScript.append("\n value = datatype.metadata_spec[key]");
paramScript.append("\n default = value.get(\"default\")");
paramScript.append("\n dest[k].metadata[key] = default");
paramScript.append("\n try:");
// (c) attempt to determine additional metadata, as defined in the set_meta method of the Galaxy data type's Python classes
paramScript.append("\n datatype.set_meta(dataset=dest[k])");
paramScript.append("\n except:");
paramScript.append("\n pass");
// (d) metadata information in Python list format has to converted to a string and formatted before it can be added to the tool state
paramScript.append("\n for key in dest[k].metadata.keys():");
paramScript.append("\n value = dest[k].metadata[key]");
paramScript.append("\n if isinstance (value, list):");
paramScript.append("\n dest[k].metadata[key] = ', '.join(str(item) for item in value)");
}
// (7) recursively parse lists of parameters (repeats)
paramScript.append("\n elif isinstance (v, list):");
paramScript.append("\n dest[k] = list()");
paramScript.append("\n for i in v:");
paramScript.append("\n j = Dict()");
paramScript.append("\n dest[k].append(j)");
paramScript.append("\n expandToolState(i, j)");
// (8) if the encountered item is neither of type dict nor of type list (and hence atomic), just add it to the expanded dest dict
paramScript.append("\n else:");
paramScript.append("\n dest[k] = v");
// (9) invoke the expandToolState method and write its result to a pickle file, which Cheetah can interpret to compile the template
paramScript.append("\n\nexpanded_tool_state = Dict()");
paramScript.append("\nexpandToolState(tool_state, expanded_tool_state)");
paramScript.append("\nwith open(\"params.p\", \"wb\") as picklefile:");
paramScript.append("\n pickle.dump(ast.literal_eval(str(expanded_tool_state)), picklefile)\n");
}
}
| hiway-core/src/main/java/de/huberlin/wbi/hiway/am/galaxy/GalaxyTaskInstance.java | /*******************************************************************************
* In the Hi-WAY project we propose a novel approach of executing scientific
* workflows processing Big Data, as found in NGS applications, on distributed
* computational infrastructures. The Hi-WAY software stack comprises the func-
* tional workflow language Cuneiform as well as the Hi-WAY ApplicationMaster
* for Apache Hadoop 2.x (YARN).
*
* List of Contributors:
*
* Marc Bux (HU Berlin)
* Jörgen Brandt (HU Berlin)
* Hannes Schuh (HU Berlin)
* Ulf Leser (HU Berlin)
*
* Jörgen Brandt is funded by the European Commission through the BiobankCloud
* project. Marc Bux is funded by the Deutsche Forschungsgemeinschaft through
* research training group SOAMED (GRK 1651).
*
* Copyright 2014 Humboldt-Universität zu Berlin
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package de.huberlin.wbi.hiway.am.galaxy;
import java.io.BufferedWriter;
import java.io.FileWriter;
import java.io.IOException;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import java.util.UUID;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.hadoop.yarn.api.records.Container;
import org.apache.hadoop.yarn.api.records.LocalResource;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import de.huberlin.wbi.cuneiform.core.semanticmodel.ForeignLambdaExpr;
import de.huberlin.wbi.hiway.common.Data;
import de.huberlin.wbi.hiway.common.TaskInstance;
/**
* An object that extends the Hi-WAY task instance object with its Galaxy tool and tool state, along with methods and structures to utilize and populate them.
*
* @author Marc Bux
*
*/
public class GalaxyTaskInstance extends TaskInstance {
// the Galaxy tool invoked by this task instance
private GalaxyTool galaxyTool;
// input data, which the task instance has to know about for determining the metadata using the Python classes provided by Galaxy
private Set<GalaxyData> inputs;
// the Python script run before the actual task instance execution that is responsible for populating the tool state with metadata
private StringBuilder paramScript;
// the post script that is to be run subsequent to the actual task invocation and that may involve the moving of files from temporary fodlers to their
// destination
private String postScript;
// the tool state (i.e., the parameter settings in JSON format) of this task instance
private JSONObject toolState;
public GalaxyTaskInstance(long id, UUID workflowId, String taskName, GalaxyTool galaxyTool, String galaxyPath) {
super(id, workflowId, taskName, Math.abs(taskName.hashCode() + 1), ForeignLambdaExpr.LANGID_BASH);
this.galaxyTool = galaxyTool;
toolState = new JSONObject();
paramScript = new StringBuilder("import os, ast\nimport cPickle as pickle\nimport galaxy.app\n");
this.postScript = "";
inputs = new HashSet<>();
// the task instance's invocScript variable is set here for it to be passed to the Worker thread, which writes the script's content as JsonReportEntry
// to the log
setInvocScript("script.sh");
// As opposed to other Hi-WAY applciation masters, the Galaxy AM ha a fairly static command that can be build at task instance creation time
StringBuilder commandSb = new StringBuilder();
commandSb.append("PYTHONPATH=" + galaxyPath + "/lib:$PYTHONPATH; export PYTHONPATH\n");
commandSb.append("PYTHON_EGG_CACHE=.; export PYTHON_EGG_CACHE\n");
commandSb.append("python params.py\n");
commandSb.append("cat pre.sh > script.sh\n");
commandSb.append("echo `cheetah fill template.tmpl --pickle params.p -p` >> script.sh\n");
commandSb.append("cat post.sh >> script.sh\n");
commandSb.append("bash ").append(getInvocScript()).append("\n");
setCommand(commandSb.toString());
}
/**
* A method that is called when a file name is encountered in the workflow description and the tool state has to be adjusted accordingly
*
* @param name
* the parameter name for this file, as specified in the workflow description
* @param computeMetadata
* a parameter that determines whether metadata is to be computed for this file, which will only be the case for input data
* @param data
* the Hi-WAY data object for this file
*/
public void addFile(String name, boolean computeMetadata, GalaxyData data) {
addFile(name, computeMetadata, data, toolState);
}
/**
* A (recursive) method that is passed a file along with the JSON object in this task instance's tool state that corresponds to this file. This method is
* used to populate the tool state with information on the file.
*
* @param name
* the parameter name for this file, as specified in the workflow description
* @param computeMetadata
* a parameter that determines whether metadata is to be computed for this file, which will only be the case for input data
* @param data
* the Hi-WAY data object for this file
* @param jo
* the JSON object in the tool state that corresponds to this file parameter
*/
private void addFile(String name, boolean computeMetadata, GalaxyData data, JSONObject jo) {
try {
Pattern p = Pattern.compile("(_[0-9]*)?\\|");
Matcher m = p.matcher(name);
// (1) if the to-be-added file is part of a repeat (and thus can be identified by an underscore and index number in its name), compute its prefix
// (the repeat name) as well as its suffix (the actual parameter name) and index; use the prefix and index to obtain its JSON object from the tool
// state and (recursively) call this method to proceed to (2)
if (m.find()) {
String prefix = name.substring(0, m.start());
String suffix = name.substring(m.end());
if (m.end() - m.start() > 2) {
int index = Integer.parseInt(name.substring(m.start() + 1, m.end() - 1));
JSONArray repeatJa = jo.getJSONArray(prefix);
for (int i = 0; i < repeatJa.length(); i++) {
JSONObject repeatJo = repeatJa.getJSONObject(i);
if (repeatJo.getInt("__index__") == index) {
addFile(suffix, computeMetadata, data, repeatJo);
break;
}
}
} else {
addFile(suffix, computeMetadata, data, jo.getJSONObject(prefix));
}
// (2) fix both the template of this tool and the tool state of the task instance calling this method, such that they are in compliance with one
// another when the tool state is used to set the parameters of the task instance at execution time
} else {
// (a) add several properties for this parameter to the tool state
String fileName = data.getName();
JSONObject fileJo = new JSONObject();
fileJo.putOpt("path", fileName);
fileJo.putOpt("name", fileName.split("\\.(?=[^\\.]+$)")[0]);
fileJo.putOpt("files_path", data.getLocalDirectory());
if (data.hasDataType()) {
GalaxyDataType dataType = data.getDataType();
String fileExt = dataType.getExtension();
fileJo.putOpt("extension", fileExt);
fileJo.putOpt("ext", fileExt);
}
// note that this metadata is an empty dictionary that will only be be filled for input data of the task instance calling this method; this is
// done by executing a python script designed to populate the tool state with metadata prior to execution
if (data.hasDataType())
fileJo.putOpt("metadata", new JSONObject());
jo.putOpt(name, fileJo);
// (b) adjust the Python script for setting parameters at runtime to compute metadata for this file, given the computeMetadata parameter is set
if (computeMetadata && data.hasDataType()) {
GalaxyDataType dataType = data.getDataType();
paramScript.append("from ");
paramScript.append(dataType.getFile());
paramScript.append(" import ");
paramScript.append(dataType.getName());
paramScript.append("\n");
inputs.add(data);
}
}
} catch (JSONException e) {
e.printStackTrace();
System.exit(-1);
}
}
/**
* A method for setting the tool state of this task instance from the raw string of the workflow description file to the JSON object provided in the Python
* parameter script. There are several string operations that have to be performed before the tool state can be interpreted as a JSON object.
*
* @param tool_state
* the tool state string, as extracted from the workflow description file
*
*/
public void addToolState(String tool_state) {
String tool_state_json = tool_state;
// replace "{ }" "[ ]" with { } [ ]
tool_state_json = tool_state_json.replaceAll("\"\\{", "\\{");
tool_state_json = tool_state_json.replaceAll("\\}\"", "\\}");
tool_state_json = tool_state_json.replaceAll("\"\\[", "\\[");
tool_state_json = tool_state_json.replaceAll("\\]\"", "\\]");
// remove \
tool_state_json = tool_state_json.replaceAll("\\\\", "");
// replace "" with "
tool_state_json = tool_state_json.replaceAll("\"\"", "\"");
// replace : ", with : "",
tool_state_json = tool_state_json.replaceAll(": ?\",", ": \"\",");
// replace UnvalidatedValue with their actual value
tool_state_json = tool_state_json.replaceAll("\\{\"__class__\":\\s?\"UnvalidatedValue\",\\s?\"value\":\\s?([^\\}]*)\\}", "$1");
// replace "null" with ""
tool_state_json = tool_state_json.replaceAll("\"null\"", "\"\"");
try {
this.toolState = new JSONObject(tool_state_json);
} catch (JSONException e) {
e.printStackTrace();
System.exit(-1);
}
}
/**
* A method for adding commands to be executed after the task instance (e.g., for moving files from a temporary folder to their destination, as specified in
* the tool description)
*
* @param post
* the shell command to be executed after the task instance
*/
public void addToPostScript(String post) {
postScript = postScript + (post.endsWith("\n") ? post : post + "\n");
}
@Override
public Map<String, LocalResource> buildScriptsAndSetResources(Container container) {
Map<String, LocalResource> localResources = super.buildScriptsAndSetResources(container);
String containerId = container.getId().toString();
// The task isntance's bash script is built by appending the pre script, the template compiled by Cheetah using the parameters set in the params Python
// script, and the post script
Data preSriptData = new Data("pre.sh", containerId);
Data paramScriptData = new Data("params.py", containerId);
Data templateData = new Data("template.tmpl", containerId);
Data postSriptData = new Data("post.sh", containerId);
try (BufferedWriter preScriptWriter = new BufferedWriter(new FileWriter(preSriptData.getLocalPath().toString()));
BufferedWriter paramScriptWriter = new BufferedWriter(new FileWriter(paramScriptData.getLocalPath().toString()));
BufferedWriter templateWriter = new BufferedWriter(new FileWriter(templateData.getLocalPath().toString()));
BufferedWriter postScriptWriter = new BufferedWriter(new FileWriter(postSriptData.getLocalPath().toString()))) {
preScriptWriter.write(galaxyTool.getEnv());
paramScriptWriter.write(paramScript.toString());
templateWriter.write(galaxyTool.getTemplate());
postScriptWriter.write(getPostScript());
} catch (IOException e) {
e.printStackTrace();
System.exit(-1);
}
try {
preSriptData.stageOut();
paramScriptData.stageOut();
templateData.stageOut();
postSriptData.stageOut();
preSriptData.addToLocalResourceMap(localResources);
paramScriptData.addToLocalResourceMap(localResources);
templateData.addToLocalResourceMap(localResources);
postSriptData.addToLocalResourceMap(localResources);
} catch (IOException e) {
e.printStackTrace();
System.exit(-1);
}
return localResources;
}
public GalaxyTool getGalaxyTool() {
return galaxyTool;
}
public String getPostScript() {
return postScript;
}
/**
* A method for preparing the Python script that is responsible for populating the tool state with metadata and making it accessible for Cheetah to compile
* the template.
*
* @throws JSONException JSONException
*/
public void prepareParamScript() throws JSONException {
// (1) populate the tool state by mapping parameters and adding additional parameters
galaxyTool.mapParams(toolState);
toolState.put("__new_file_path__", ".");
// (2) Set the tool state to the tool state as parsed from the workflow description and populated in step 1
paramScript.append("\ntool_state = ");
paramScript.append(toolState.toString());
// (3) Define the Dict class, which is a Python dict whose members are accessible like functions of a class (via a "dot")
paramScript.append("\n\nclass Dict(dict):");
paramScript.append("\n def __init__(self, *args, **kwargs):");
paramScript.append("\n super(Dict, self).__init__(*args, **kwargs)");
paramScript.append("\n self.__dict__ = self");
// (4) The Dataset class inherits from the Dict class and provides some functions; in order to utilize the Python scripts describing the Galaxy data
// types, a Dataset object has to be created; unfortunately, the Dataset object provided by Galaxy requires Galaxy to run, hence a custom Dataset class
// had to be created
paramScript.append("\n\nclass Dataset(Dict):");
paramScript.append("\n def has_data(self):");
paramScript.append("\n return True");
paramScript.append("\n def get_size(self):");
paramScript.append("\n return os.path.getsize(self.file_name)");
// (5) The expandToolState method traverses the tool state recursively and, when encountering an input data item, determines and adds its metadata
paramScript.append("\n\ndef expandToolState(src, dest):");
paramScript.append("\n for k, v in src.iteritems():");
// (6) recusrively parse dicts of parameters (conditionals and the root parameter list)
paramScript.append("\n if isinstance (v, dict):");
paramScript.append("\n dest[k] = Dataset() if 'path' in v else Dict()");
paramScript.append("\n expandToolState(v, dest[k])");
// for each input data item
for (GalaxyData input : inputs) {
// (a) instantiate the Python class corresponding to this data item's data type
paramScript.append("\n if 'path' in v and v['path'] == '");
paramScript.append(input.getName());
paramScript.append("':");
paramScript.append("\n dest[k]['file_name'] = v['path']");
paramScript.append("\n datatype = ");
paramScript.append(input.getDataType().getName());
paramScript.append("()");
// (b) set the default values of this data item, as defined in its Python class
paramScript.append("\n for key in datatype.metadata_spec.keys():");
paramScript.append("\n value = datatype.metadata_spec[key]");
paramScript.append("\n default = value.get(\"default\")");
paramScript.append("\n dest[k].metadata[key] = default");
paramScript.append("\n try:");
// (c) attempt to determine additional metadata, as defined in the set_meta method of the Galaxy data type's Python classes
paramScript.append("\n datatype.set_meta(dataset=dest[k])");
paramScript.append("\n except:");
paramScript.append("\n pass");
// (d) metadata information in Python list format has to converted to a string and formatted before it can be added to the tool state
paramScript.append("\n for key in dest[k].metadata.keys():");
paramScript.append("\n value = dest[k].metadata[key]");
paramScript.append("\n if isinstance (value, list):");
paramScript.append("\n dest[k].metadata[key] = ', '.join(str(item) for item in value)");
}
// (7) recursively parse lists of parameters (repeats)
paramScript.append("\n elif isinstance (v, list):");
paramScript.append("\n dest[k] = list()");
paramScript.append("\n for i in v:");
paramScript.append("\n j = Dict()");
paramScript.append("\n dest[k].append(j)");
paramScript.append("\n expandToolState(i, j)");
// (8) if the encountered item is neither of type dict nor of type list (and hence atomic), just add it to the expanded dest dict
paramScript.append("\n else:");
paramScript.append("\n dest[k] = v");
// (9) invoke the expandToolState method and write its result to a pickle file, which Cheetah can interpret to compile the template
paramScript.append("\n\nexpanded_tool_state = Dict()");
paramScript.append("\nexpandToolState(tool_state, expanded_tool_state)");
paramScript.append("\nwith open(\"params.p\", \"wb\") as picklefile:");
paramScript.append("\n pickle.dump(ast.literal_eval(str(expanded_tool_state)), picklefile)\n");
}
}
| GAM Bugfix with optional (and unused) files | hiway-core/src/main/java/de/huberlin/wbi/hiway/am/galaxy/GalaxyTaskInstance.java | GAM Bugfix with optional (and unused) files | <ide><path>iway-core/src/main/java/de/huberlin/wbi/hiway/am/galaxy/GalaxyTaskInstance.java
<ide> */
<ide> public void addToolState(String tool_state) {
<ide> String tool_state_json = tool_state;
<add> tool_state_json = tool_state_json.replaceAll("\"null\"", "\"\"");
<ide> // replace "{ }" "[ ]" with { } [ ]
<ide> tool_state_json = tool_state_json.replaceAll("\"\\{", "\\{");
<ide> tool_state_json = tool_state_json.replaceAll("\\}\"", "\\}");
<ide> tool_state_json = tool_state_json.replaceAll(": ?\",", ": \"\",");
<ide> // replace UnvalidatedValue with their actual value
<ide> tool_state_json = tool_state_json.replaceAll("\\{\"__class__\":\\s?\"UnvalidatedValue\",\\s?\"value\":\\s?([^\\}]*)\\}", "$1");
<del> // replace "null" with ""
<del> tool_state_json = tool_state_json.replaceAll("\"null\"", "\"\"");
<add> // replace "null" with {"path": ""}
<add> // this is done such that otpional files will still be accessible by Cheetah as id.path
<add> tool_state_json = tool_state_json.replaceAll("\"null\"", "{\"path\": \"\"}");
<ide> try {
<ide> this.toolState = new JSONObject(tool_state_json);
<ide> } catch (JSONException e) { |
|
JavaScript | mit | 25f432ffab4166fe777900ca307eaf1bbbcf7d7a | 0 | BrianSipple/ember-cli,buschtoens/ember-cli,runspired/ember-cli,fpauser/ember-cli,Turbo87/ember-cli,ember-cli/ember-cli,akatov/ember-cli,BrianSipple/ember-cli,pixelhandler/ember-cli,patocallaghan/ember-cli,josemarluedke/ember-cli,mike-north/ember-cli,twokul/ember-cli,xtian/ember-cli,pixelhandler/ember-cli,rtablada/ember-cli,kellyselden/ember-cli,sivakumar-kailasam/ember-cli,Turbo87/ember-cli,kanongil/ember-cli,trabus/ember-cli,romulomachado/ember-cli,thoov/ember-cli,buschtoens/ember-cli,thoov/ember-cli,ember-cli/ember-cli,xtian/ember-cli,runspired/ember-cli,mike-north/ember-cli,calderas/ember-cli,thoov/ember-cli,trentmwillis/ember-cli,raycohen/ember-cli,jgwhite/ember-cli,kriswill/ember-cli,ember-cli/ember-cli,pzuraq/ember-cli,kanongil/ember-cli,xtian/ember-cli,runspired/ember-cli,balinterdi/ember-cli,cibernox/ember-cli,sivakumar-kailasam/ember-cli,twokul/ember-cli,elwayman02/ember-cli,raycohen/ember-cli,jrjohnson/ember-cli,scalus/ember-cli,jasonmit/ember-cli,scalus/ember-cli,johanneswuerbach/ember-cli,HeroicEric/ember-cli,patocallaghan/ember-cli,nathanhammond/ember-cli,calderas/ember-cli,thoov/ember-cli,givanse/ember-cli,calderas/ember-cli,givanse/ember-cli,trentmwillis/ember-cli,cibernox/ember-cli,josemarluedke/ember-cli,ef4/ember-cli,jgwhite/ember-cli,BrianSipple/ember-cli,trabus/ember-cli,twokul/ember-cli,johanneswuerbach/ember-cli,kriswill/ember-cli,kriswill/ember-cli,pixelhandler/ember-cli,jasonmit/ember-cli,scalus/ember-cli,runspired/ember-cli,rtablada/ember-cli,mohlek/ember-cli,nathanhammond/ember-cli,mohlek/ember-cli,Turbo87/ember-cli,jgwhite/ember-cli,williamsbdev/ember-cli,asakusuma/ember-cli,kategengler/ember-cli,kanongil/ember-cli,kanongil/ember-cli,romulomachado/ember-cli,romulomachado/ember-cli,kriswill/ember-cli,rtablada/ember-cli,kellyselden/ember-cli,fpauser/ember-cli,HeroicEric/ember-cli,HeroicEric/ember-cli,gfvcastro/ember-cli,johanneswuerbach/ember-cli,kategengler/ember-cli,gfvcastro/ember-cli,gfvcastro/ember-cli,scalus/ember-cli,elwayman02/ember-cli,jgwhite/ember-cli,sivakumar-kailasam/ember-cli,fpauser/ember-cli,pzuraq/ember-cli,mohlek/ember-cli,seawatts/ember-cli,jasonmit/ember-cli,pzuraq/ember-cli,sivakumar-kailasam/ember-cli,nathanhammond/ember-cli,HeroicEric/ember-cli,akatov/ember-cli,akatov/ember-cli,rtablada/ember-cli,kellyselden/ember-cli,patocallaghan/ember-cli,patocallaghan/ember-cli,kellyselden/ember-cli,josemarluedke/ember-cli,givanse/ember-cli,nathanhammond/ember-cli,josemarluedke/ember-cli,trabus/ember-cli,trabus/ember-cli,cibernox/ember-cli,seawatts/ember-cli,pzuraq/ember-cli,givanse/ember-cli,ef4/ember-cli,romulomachado/ember-cli,seawatts/ember-cli,asakusuma/ember-cli,akatov/ember-cli,jrjohnson/ember-cli,xtian/ember-cli,ef4/ember-cli,mike-north/ember-cli,calderas/ember-cli,cibernox/ember-cli,ef4/ember-cli,johanneswuerbach/ember-cli,jasonmit/ember-cli,pixelhandler/ember-cli,williamsbdev/ember-cli,fpauser/ember-cli,Turbo87/ember-cli,gfvcastro/ember-cli,twokul/ember-cli,mike-north/ember-cli,mohlek/ember-cli,balinterdi/ember-cli,seawatts/ember-cli,trentmwillis/ember-cli,williamsbdev/ember-cli,williamsbdev/ember-cli,trentmwillis/ember-cli,BrianSipple/ember-cli | 'use strict';
var isDarwin = /darwin/i.test(require('os').type());
var debug = require('debug')('ember-cli:utilities/attempt-metadata-index-file');
/*
* Writes a `.metadata_never_index` file to the specified folder if running on OS X.
*
* This hints to spotlight that this folder should not be indexed.
*
* @param {String} dir path to the folder that should not be indexed
*/
module.exports = function(dir) {
var path = dir + '/.metadata_never_index';
if (!isDarwin) {
debug('not darwin, skipping %s (which hints to spotlight to prevent indexing)', path);
return;
}
debug('creating: %s (to prevent spotlight indexing)', path);
var fs = require('fs-extra');
fs.mkdirsSync(dir);
fs.writeFileSync(path);
};
| lib/utilities/attempt-never-index.js | 'use strict';
var isDarwin = /darwin/i.test(require('os').type());
var debug = require('debug')('ember-cli:utilities/attempt-metadata-index-file');
module.exports = function(dir) {
var path = dir + '/.metadata_never_index';
if (!isDarwin) {
debug('not darwin, skipping %s (which hints to spotlight to prevent indexing)', path);
return;
}
debug('creating: %s (to prevent spotlight indexing)', path);
var fs = require('fs-extra');
fs.mkdirsSync(dir);
fs.writeFileSync(path);
};
| utilities/attempt-never-index: Add doc comment
| lib/utilities/attempt-never-index.js | utilities/attempt-never-index: Add doc comment | <ide><path>ib/utilities/attempt-never-index.js
<ide> var isDarwin = /darwin/i.test(require('os').type());
<ide> var debug = require('debug')('ember-cli:utilities/attempt-metadata-index-file');
<ide>
<add>/*
<add> * Writes a `.metadata_never_index` file to the specified folder if running on OS X.
<add> *
<add> * This hints to spotlight that this folder should not be indexed.
<add> *
<add> * @param {String} dir path to the folder that should not be indexed
<add> */
<ide> module.exports = function(dir) {
<ide> var path = dir + '/.metadata_never_index';
<ide> |
|
Java | apache-2.0 | ac83291942650a4bd329c73d2b302e4dde5313e1 | 0 | khituras/elasticsearch-mapper-preanalyzed | /*
* Licensed to ElasticSearch and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. ElasticSearch licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.plugin.mapper.preanalyzed;
import static org.elasticsearch.index.mapper.core.TypeParsers.parseField;
import java.io.IOException;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
import org.apache.lucene.analysis.tokenattributes.FlagsAttribute;
import org.apache.lucene.analysis.tokenattributes.OffsetAttribute;
import org.apache.lucene.analysis.tokenattributes.PayloadAttribute;
import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute;
import org.apache.lucene.analysis.tokenattributes.TypeAttribute;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.FieldType;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentParser.Token;
import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.codec.docvaluesformat.DocValuesFormatProvider;
import org.elasticsearch.index.codec.postingsformat.PostingsFormatProvider;
import org.elasticsearch.index.fielddata.FieldDataType;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MergeContext;
import org.elasticsearch.index.mapper.MergeMappingException;
import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.mapper.core.AbstractFieldMapper;
import org.elasticsearch.index.mapper.internal.AllFieldMapper;
import org.elasticsearch.index.similarity.SimilarityProvider;
import org.junit.internal.ArrayComparisonFailure;
/**
*
*/
public class PreAnalyzedFieldMapper extends AbstractFieldMapper<String> implements AllFieldMapper.IncludeInAll {
public static final String CONTENT_TYPE = "preanalyzed";
public static class Defaults extends AbstractFieldMapper.Defaults {
public static final FieldType FIELD_TYPE = new FieldType(AbstractFieldMapper.Defaults.FIELD_TYPE);
static {
FIELD_TYPE.freeze();
}
}
public static class Builder extends AbstractFieldMapper.Builder<Builder, PreAnalyzedFieldMapper> {
public Builder(String name) {
super(name, new FieldType(Defaults.FIELD_TYPE));
builder = this;
}
public PreAnalyzedFieldMapper build(BuilderContext context) {
PreAnalyzedFieldMapper fieldMapper = new PreAnalyzedFieldMapper(buildNames(context), boost, fieldType,
docValues, indexAnalyzer, searchAnalyzer, postingsProvider, docValuesProvider, similarity,
normsLoading, fieldDataSettings, context.indexSettings());
fieldMapper.includeInAll(includeInAll);
return fieldMapper;
}
}
public static class TypeParser implements Mapper.TypeParser {
// The parser used to parse the mapping; thus, setting whether the field
// is tokenized, stored etc.
public Mapper.Builder<?, ?> parse(String name, Map<String, Object> node, ParserContext parserContext)
throws MapperParsingException {
PreAnalyzedFieldMapper.Builder builder = new PreAnalyzedFieldMapper.Builder(name);
parseField(builder, name, node, parserContext);
return builder;
}
}
private Boolean includeInAll;
private FieldType fieldTypeText;
private FieldType fieldTypeTokenStream;
protected PreAnalyzedFieldMapper(Names names, Float boost, FieldType fieldType, Boolean docValues,
NamedAnalyzer indexAnalyzer, NamedAnalyzer searchAnalyzer, PostingsFormatProvider postingsFormat,
DocValuesFormatProvider docValuesFormat, SimilarityProvider similarity, Loading normsLoading,
@Nullable Settings fieldDataSettings, Settings indexSettings) {
super(names, boost, fieldType, docValues, indexAnalyzer, searchAnalyzer, postingsFormat, docValuesFormat,
similarity, normsLoading, fieldDataSettings, indexSettings);
fieldTypeTokenStream = new FieldType(fieldType);
// TokenStream fields cannot be stored. But the option can be set anyway
// because the plain text value should be stored.
fieldTypeTokenStream.setStored(false);
// The text field will be stored but not analyzed, tokenized or anything
// except of being stored.
fieldTypeText = new FieldType(fieldType);
fieldTypeText.setIndexed(false);
fieldTypeText.setTokenized(false);
fieldTypeText.setStored(true);
fieldTypeText.setStoreTermVectors(false);
fieldTypeText.setStoreTermVectorPositions(false);
fieldTypeText.setStoreTermVectorOffsets(false);
fieldTypeText.setStoreTermVectorPayloads(false);
}
public FieldType defaultFieldType() {
return Defaults.FIELD_TYPE;
}
public FieldDataType defaultFieldDataType() {
// Set the default field data type to string: This way the contents are
// interpreted as if the field type would have been "string" which is
// important for facets, for instance.
return new FieldDataType("string");
}
public void includeInAll(Boolean includeInAll) {
if (includeInAll != null) {
this.includeInAll = includeInAll;
}
}
public void includeInAllIfNotSet(Boolean includeInAll) {
if (includeInAll != null && this.includeInAll == null) {
this.includeInAll = includeInAll;
}
}
public String value(Object value) {
if (value == null) {
return null;
}
return value.toString();
}
protected boolean customBoost() {
return true;
}
protected void parseCreateField(ParseContext context, List<Field> fields) throws IOException {
BytesRef value = null;
float boost = this.boost;
if (context.externalValueSet()) {
value = (BytesRef) context.externalValue();
}
if (null == value) {
// No value given externally. Thus we expect a simple value like
// {"text":"{\"v\":\"1\",\"str\":\"This is a text\",\"tokens\":[...]}"}
// in the original document. We will parse out this string and then
// give
// it to a new parser in the following that will "see" the actual
// JSON
// format. Thus, there should be one value string and we are going
// to
// fetch it now.
XContentParser parser = context.parser();
XContentParser.Token token = parser.currentToken();
// We expect a string value encoding a JSON object which contains
// the
// pre-analyzed data.
if (token == XContentParser.Token.VALUE_STRING) {
value = parser.bytes();
}
}
if (context.includeInAll(includeInAll, this)) {
context.allEntries().addText(names.fullName(), new String(value.bytes), boost);
}
if (!fieldType().indexed() && !fieldType().stored()) {
context.ignoredValue(names.indexName(), new String(value.bytes));
return;
}
Tuple<PreAnalyzedStoredValue, TokenStream> valueAndTokenStream = parsePreAnalyzedFieldContents(value);
if (fieldTypeTokenStream.indexed() && fieldTypeTokenStream.tokenized()) {
TokenStream ts = valueAndTokenStream.v2();
if (null == ts)
throw new MapperParsingException("The preanalyzed field \"" + names.fullName()
+ "\" is tokenized and indexed, but no preanalyzed TokenStream could be found.");
Field field = new Field(names.indexName(), ts, fieldTypeTokenStream);
field.setBoost(boost);
// context.doc().add(field);
fields.add(field);
}
PreAnalyzedStoredValue storedValue = valueAndTokenStream.v1();
if (fieldTypeText.stored() && null != storedValue.value) {
Field field;
if (PreAnalyzedStoredValue.VALUE_TYPE.STRING == storedValue.type) {
field = new Field(names.indexName(), (String) storedValue.value, fieldTypeText);
} else {
field = new Field(names.indexName(), (BytesRef) storedValue.value, fieldTypeText);
}
// context.doc().add(field);
fields.add(field);
}
}
/**
* Parses the contents of <tt>preAnalyzedData</tt> according to the format specified by the Solr JSON PreAnalyzed
* field type. The format specification can be found at the link below.
*
* @param preAnalyzedData
* @return A tuple, containing the plain text value and a TokenStream with the pre-analyzed tokens.
* @see <a
* href="http://wiki.apache.org/solr/JsonPreAnalyzedParser">http://wiki.apache.org/solr/JsonPreAnalyzedParser</a>
*/
private Tuple<PreAnalyzedStoredValue, TokenStream> parsePreAnalyzedFieldContents(BytesRef preAnalyzedData) {
try {
XContentParser parser = XContentHelper.createParser(preAnalyzedData.bytes, 0, preAnalyzedData.length);
Token currentToken = parser.currentToken();
String currentFieldName = "";
String version = null;
PreAnalyzedStoredValue storedValue = new PreAnalyzedStoredValue();
while ((currentToken = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (currentToken == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.text();
} else if (currentToken == XContentParser.Token.VALUE_STRING) {
if ("v".equals(currentFieldName)) {
version = parser.text();
if (!"1".equals(version)) {
throw new MapperParsingException("Version of pre-analyzed field format is \"" + version
+ "\" which is not supported.");
}
} else if ("str".equals(currentFieldName)) {
storedValue.value = parser.text();
storedValue.type = PreAnalyzedStoredValue.VALUE_TYPE.STRING;
} else if ("bin".equals(currentFieldName)) {
storedValue.value = parser.binaryValue();
storedValue.type = PreAnalyzedStoredValue.VALUE_TYPE.STRING;
}
}
}
if (null == version) {
throw new MapperParsingException("No version of pre-analyzed field format has been specified.");
}
return new Tuple<PreAnalyzedStoredValue, TokenStream>(storedValue, new PreAnalyzedTokenStream(
preAnalyzedData));
} catch (IOException e) {
throw new MapperParsingException("The input document could not be parsed as a preanalyzed field value.", e);
}
}
protected String contentType() {
return CONTENT_TYPE;
}
public void merge(Mapper mergeWith, MergeContext mergeContext) throws MergeMappingException {
super.merge(mergeWith, mergeContext);
if (!this.getClass().equals(mergeWith.getClass())) {
return;
}
if (!mergeContext.mergeFlags().simulate()) {
this.includeInAll = ((PreAnalyzedFieldMapper) mergeWith).includeInAll;
}
}
protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException {
super.doXContentBody(builder, includeDefaults, params);
if (includeInAll != null) {
builder.field("include_in_all", includeInAll);
}
}
static class PreAnalyzedTokenStream extends TokenStream {
private final CharTermAttribute termAtt = addAttribute(CharTermAttribute.class);
private final OffsetAttribute offsetAtt = addAttribute(OffsetAttribute.class);
private final PositionIncrementAttribute posIncrAtt = addAttribute(PositionIncrementAttribute.class);
private final PayloadAttribute payloadAtt = addAttribute(PayloadAttribute.class);
private final TypeAttribute typeAtt = addAttribute(TypeAttribute.class);
private final FlagsAttribute flagsAtt = addAttribute(FlagsAttribute.class);
private XContentParser parser;
private boolean termsFieldFound = false;
private final BytesRef input;
/**
* <p>
* Creates a <tt>PreAnalyzedTokenStream</tt> which converts a JSON-serialization of a TokenStream to an actual
* TokenStream.
* </p>
* <p>
* The accepted JSON format is that of the Solr JsonPreAnalyzed format (see reference below).
* </p>
*
* @param input
* - The whole serialized field data, including version, the data to store and, of course, the list
* of tokens.
* @throws IOException
* @see <a
* href="http://wiki.apache.org/solr/JsonPreAnalyzedParser">http://wiki.apache.org/solr/JsonPreAnalyzedParser</a>
*/
PreAnalyzedTokenStream(BytesRef input) throws IOException {
this.input = input;
reset();
}
@Override
public final boolean incrementToken() throws IOException {
Token currentToken = parser.nextToken();
if (termsFieldFound && currentToken != null && currentToken != XContentParser.Token.END_ARRAY) {
// First clear all attributes for the case that some attributes
// are sometimes but not always specified.
clearAttributes();
boolean termFound = false;
int start = -1;
int end = -1;
String currentFieldName = null;
while ((currentToken = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (currentToken == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.text();
} else if (currentToken == XContentParser.Token.VALUE_STRING) {
if ("t".equals(currentFieldName)) {
char[] tokenBuffer = parser.textCharacters();
termAtt.copyBuffer(tokenBuffer, parser.textOffset(), parser.textLength());
termFound = true;
} else if ("p".equals(currentFieldName)) {
// since ES 1.x - at least 1.3 - we have to make a copy of the incoming BytesRef because the
// byte[] referenced by the input is longer than the actual information, just containing
// zeros, which can cause problems with Base64 encoding. All we do is trim the byte array to
// its actual length.
BytesRef inputBytes = parser.bytes();
byte[] byteArray = new byte[inputBytes.length];
System.arraycopy(inputBytes.bytes, 0, byteArray, 0, inputBytes.length);
BytesRef bytesRef = new BytesRef(byteArray);
payloadAtt.setPayload(bytesRef);
} else if ("f".equals(currentFieldName)) {
flagsAtt.setFlags(Integer.decode(parser.text()));
} else if ("y".equals(currentFieldName)) {
typeAtt.setType(parser.text());
}
} else if (currentToken == XContentParser.Token.VALUE_NUMBER) {
if ("s".equals(currentFieldName)) {
start = parser.intValue();
} else if ("e".equals(currentFieldName)) {
end = parser.intValue();
} else if ("i".equals(currentFieldName)) {
posIncrAtt.setPositionIncrement(parser.intValue());
}
}
}
if (-1 != start && -1 != end)
offsetAtt.setOffset(start, end);
if (!termFound) {
throw new IllegalArgumentException(
"There is at least one token object in the pre-analyzed field value where no actual term string is specified.");
}
return true;
}
return false;
}
/**
* Creates a new parser reading the input data and sets the parser state right to the beginning of the actual
* token list.
*/
@Override
public void reset() throws IOException {
parser = XContentHelper.createParser(input.bytes, 0, input.length);
// Go to the beginning of the token array to be ready when the
// tokenstream is read.
Token token;
String currentField;
do {
token = parser.nextToken();
if (token == XContentParser.Token.FIELD_NAME) {
currentField = parser.text();
if ("tokens".equals(currentField))
termsFieldFound = true;
}
} while (!termsFieldFound && token != null);
}
}
static private class PreAnalyzedStoredValue {
Object value;
VALUE_TYPE type;
enum VALUE_TYPE {
STRING, BINARY
}
}
public void unsetIncludeInAll() {
includeInAll = false;
}
}
| src/main/java/org/elasticsearch/index/plugin/mapper/preanalyzed/PreAnalyzedFieldMapper.java | /*
* Licensed to ElasticSearch and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. ElasticSearch licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.plugin.mapper.preanalyzed;
import static org.elasticsearch.index.mapper.core.TypeParsers.parseField;
import java.io.IOException;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
import org.apache.lucene.analysis.tokenattributes.FlagsAttribute;
import org.apache.lucene.analysis.tokenattributes.OffsetAttribute;
import org.apache.lucene.analysis.tokenattributes.PayloadAttribute;
import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute;
import org.apache.lucene.analysis.tokenattributes.TypeAttribute;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.FieldType;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentParser.Token;
import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.codec.docvaluesformat.DocValuesFormatProvider;
import org.elasticsearch.index.codec.postingsformat.PostingsFormatProvider;
import org.elasticsearch.index.fielddata.FieldDataType;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MergeContext;
import org.elasticsearch.index.mapper.MergeMappingException;
import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.mapper.core.AbstractFieldMapper;
import org.elasticsearch.index.mapper.internal.AllFieldMapper;
import org.elasticsearch.index.similarity.SimilarityProvider;
import org.junit.internal.ArrayComparisonFailure;
/**
*
*/
public class PreAnalyzedFieldMapper extends AbstractFieldMapper<String> implements AllFieldMapper.IncludeInAll {
public static final String CONTENT_TYPE = "preanalyzed";
public static class Defaults extends AbstractFieldMapper.Defaults {
public static final FieldType FIELD_TYPE = new FieldType(AbstractFieldMapper.Defaults.FIELD_TYPE);
static {
FIELD_TYPE.freeze();
}
}
public static class Builder extends AbstractFieldMapper.Builder<Builder, PreAnalyzedFieldMapper> {
public Builder(String name) {
super(name, new FieldType(Defaults.FIELD_TYPE));
builder = this;
}
public PreAnalyzedFieldMapper build(BuilderContext context) {
PreAnalyzedFieldMapper fieldMapper = new PreAnalyzedFieldMapper(buildNames(context), boost, fieldType,
docValues, indexAnalyzer, searchAnalyzer, postingsProvider, docValuesProvider, similarity,
normsLoading, fieldDataSettings, context.indexSettings());
fieldMapper.includeInAll(includeInAll);
return fieldMapper;
}
}
public static class TypeParser implements Mapper.TypeParser {
// The parser used to parse the mapping; thus, setting whether the field
// is tokenized, stored etc.
public Mapper.Builder<?, ?> parse(String name, Map<String, Object> node, ParserContext parserContext)
throws MapperParsingException {
PreAnalyzedFieldMapper.Builder builder = new PreAnalyzedFieldMapper.Builder(name);
parseField(builder, name, node, parserContext);
return builder;
}
}
private Boolean includeInAll;
private FieldType fieldTypeText;
private FieldType fieldTypeTokenStream;
protected PreAnalyzedFieldMapper(Names names, float boost, FieldType fieldType, boolean docValues,
NamedAnalyzer indexAnalyzer, NamedAnalyzer searchAnalyzer, PostingsFormatProvider postingsFormat,
DocValuesFormatProvider docValuesFormat, SimilarityProvider similarity, Loading normsLoading,
@Nullable Settings fieldDataSettings, Settings indexSettings) {
super(names, boost, fieldType, docValues, indexAnalyzer, searchAnalyzer, postingsFormat, docValuesFormat,
similarity, normsLoading, fieldDataSettings, indexSettings);
fieldTypeTokenStream = new FieldType(fieldType);
// TokenStream fields cannot be stored. But the option can be set anyway
// because the plain text value should be stored.
fieldTypeTokenStream.setStored(false);
// The text field will be stored but not analyzed, tokenized or anything
// except of being stored.
fieldTypeText = new FieldType(fieldType);
fieldTypeText.setIndexed(false);
fieldTypeText.setTokenized(false);
fieldTypeText.setStored(true);
fieldTypeText.setStoreTermVectors(false);
fieldTypeText.setStoreTermVectorPositions(false);
fieldTypeText.setStoreTermVectorOffsets(false);
fieldTypeText.setStoreTermVectorPayloads(false);
}
public FieldType defaultFieldType() {
return Defaults.FIELD_TYPE;
}
public FieldDataType defaultFieldDataType() {
// Set the default field data type to string: This way the contents are
// interpreted as if the field type would have been "string" which is
// important for facets, for instance.
return new FieldDataType("string");
}
public void includeInAll(Boolean includeInAll) {
if (includeInAll != null) {
this.includeInAll = includeInAll;
}
}
public void includeInAllIfNotSet(Boolean includeInAll) {
if (includeInAll != null && this.includeInAll == null) {
this.includeInAll = includeInAll;
}
}
public String value(Object value) {
if (value == null) {
return null;
}
return value.toString();
}
protected boolean customBoost() {
return true;
}
protected void parseCreateField(ParseContext context, List<Field> fields) throws IOException {
BytesRef value = null;
float boost = this.boost;
if (context.externalValueSet()) {
value = (BytesRef) context.externalValue();
}
if (null == value) {
// No value given externally. Thus we expect a simple value like
// {"text":"{\"v\":\"1\",\"str\":\"This is a text\",\"tokens\":[...]}"}
// in the original document. We will parse out this string and then
// give
// it to a new parser in the following that will "see" the actual
// JSON
// format. Thus, there should be one value string and we are going
// to
// fetch it now.
XContentParser parser = context.parser();
XContentParser.Token token = parser.currentToken();
// We expect a string value encoding a JSON object which contains
// the
// pre-analyzed data.
if (token == XContentParser.Token.VALUE_STRING) {
value = parser.bytes();
}
}
if (context.includeInAll(includeInAll, this)) {
context.allEntries().addText(names.fullName(), new String(value.bytes), boost);
}
if (!fieldType().indexed() && !fieldType().stored()) {
context.ignoredValue(names.indexName(), new String(value.bytes));
return;
}
Tuple<PreAnalyzedStoredValue, TokenStream> valueAndTokenStream = parsePreAnalyzedFieldContents(value);
if (fieldTypeTokenStream.indexed() && fieldTypeTokenStream.tokenized()) {
TokenStream ts = valueAndTokenStream.v2();
if (null == ts)
throw new MapperParsingException("The preanalyzed field \"" + names.fullName()
+ "\" is tokenized and indexed, but no preanalyzed TokenStream could be found.");
Field field = new Field(names.indexName(), ts, fieldTypeTokenStream);
field.setBoost(boost);
// context.doc().add(field);
fields.add(field);
}
PreAnalyzedStoredValue storedValue = valueAndTokenStream.v1();
if (fieldTypeText.stored() && null != storedValue.value) {
Field field;
if (PreAnalyzedStoredValue.VALUE_TYPE.STRING == storedValue.type) {
field = new Field(names.indexName(), (String) storedValue.value, fieldTypeText);
} else {
field = new Field(names.indexName(), (BytesRef) storedValue.value, fieldTypeText);
}
// context.doc().add(field);
fields.add(field);
}
}
/**
* Parses the contents of <tt>preAnalyzedData</tt> according to the format specified by the Solr JSON PreAnalyzed
* field type. The format specification can be found at the link below.
*
* @param preAnalyzedData
* @return A tuple, containing the plain text value and a TokenStream with the pre-analyzed tokens.
* @see <a
* href="http://wiki.apache.org/solr/JsonPreAnalyzedParser">http://wiki.apache.org/solr/JsonPreAnalyzedParser</a>
*/
private Tuple<PreAnalyzedStoredValue, TokenStream> parsePreAnalyzedFieldContents(BytesRef preAnalyzedData) {
try {
XContentParser parser = XContentHelper.createParser(preAnalyzedData.bytes, 0, preAnalyzedData.length);
Token currentToken = parser.currentToken();
String currentFieldName = "";
String version = null;
PreAnalyzedStoredValue storedValue = new PreAnalyzedStoredValue();
while ((currentToken = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (currentToken == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.text();
} else if (currentToken == XContentParser.Token.VALUE_STRING) {
if ("v".equals(currentFieldName)) {
version = parser.text();
if (!"1".equals(version)) {
throw new MapperParsingException("Version of pre-analyzed field format is \"" + version
+ "\" which is not supported.");
}
} else if ("str".equals(currentFieldName)) {
storedValue.value = parser.text();
storedValue.type = PreAnalyzedStoredValue.VALUE_TYPE.STRING;
} else if ("bin".equals(currentFieldName)) {
storedValue.value = parser.binaryValue();
storedValue.type = PreAnalyzedStoredValue.VALUE_TYPE.STRING;
}
}
}
if (null == version) {
throw new MapperParsingException("No version of pre-analyzed field format has been specified.");
}
return new Tuple<PreAnalyzedStoredValue, TokenStream>(storedValue, new PreAnalyzedTokenStream(
preAnalyzedData));
} catch (IOException e) {
throw new MapperParsingException("The input document could not be parsed as a preanalyzed field value.", e);
}
}
protected String contentType() {
return CONTENT_TYPE;
}
public void merge(Mapper mergeWith, MergeContext mergeContext) throws MergeMappingException {
super.merge(mergeWith, mergeContext);
if (!this.getClass().equals(mergeWith.getClass())) {
return;
}
if (!mergeContext.mergeFlags().simulate()) {
this.includeInAll = ((PreAnalyzedFieldMapper) mergeWith).includeInAll;
}
}
protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException {
super.doXContentBody(builder, includeDefaults, params);
if (includeInAll != null) {
builder.field("include_in_all", includeInAll);
}
}
static class PreAnalyzedTokenStream extends TokenStream {
private final CharTermAttribute termAtt = addAttribute(CharTermAttribute.class);
private final OffsetAttribute offsetAtt = addAttribute(OffsetAttribute.class);
private final PositionIncrementAttribute posIncrAtt = addAttribute(PositionIncrementAttribute.class);
private final PayloadAttribute payloadAtt = addAttribute(PayloadAttribute.class);
private final TypeAttribute typeAtt = addAttribute(TypeAttribute.class);
private final FlagsAttribute flagsAtt = addAttribute(FlagsAttribute.class);
private XContentParser parser;
private boolean termsFieldFound = false;
private final BytesRef input;
/**
* <p>
* Creates a <tt>PreAnalyzedTokenStream</tt> which converts a JSON-serialization of a TokenStream to an actual
* TokenStream.
* </p>
* <p>
* The accepted JSON format is that of the Solr JsonPreAnalyzed format (see reference below).
* </p>
*
* @param input
* - The whole serialized field data, including version, the data to store and, of course, the list
* of tokens.
* @throws IOException
* @see <a
* href="http://wiki.apache.org/solr/JsonPreAnalyzedParser">http://wiki.apache.org/solr/JsonPreAnalyzedParser</a>
*/
PreAnalyzedTokenStream(BytesRef input) throws IOException {
this.input = input;
reset();
}
@Override
public final boolean incrementToken() throws IOException {
Token currentToken = parser.nextToken();
if (termsFieldFound && currentToken != null && currentToken != XContentParser.Token.END_ARRAY) {
// First clear all attributes for the case that some attributes
// are sometimes but not always specified.
clearAttributes();
boolean termFound = false;
int start = -1;
int end = -1;
String currentFieldName = null;
while ((currentToken = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (currentToken == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.text();
} else if (currentToken == XContentParser.Token.VALUE_STRING) {
if ("t".equals(currentFieldName)) {
char[] tokenBuffer = parser.textCharacters();
termAtt.copyBuffer(tokenBuffer, parser.textOffset(), parser.textLength());
termFound = true;
} else if ("p".equals(currentFieldName)) {
// since ES 1.x - at least 1.3 - we have to make a copy of the incoming BytesRef because the
// byte[] referenced by the input is longer than the actual information, just containing
// zeros, which can cause problems with Base64 encoding. All we do is trim the byte array to
// its actual length.
BytesRef inputBytes = parser.bytes();
byte[] byteArray = new byte[inputBytes.length];
System.arraycopy(inputBytes.bytes, 0, byteArray, 0, inputBytes.length);
BytesRef bytesRef = new BytesRef(byteArray);
payloadAtt.setPayload(bytesRef);
} else if ("f".equals(currentFieldName)) {
flagsAtt.setFlags(Integer.decode(parser.text()));
} else if ("y".equals(currentFieldName)) {
typeAtt.setType(parser.text());
}
} else if (currentToken == XContentParser.Token.VALUE_NUMBER) {
if ("s".equals(currentFieldName)) {
start = parser.intValue();
} else if ("e".equals(currentFieldName)) {
end = parser.intValue();
} else if ("i".equals(currentFieldName)) {
posIncrAtt.setPositionIncrement(parser.intValue());
}
}
}
if (-1 != start && -1 != end)
offsetAtt.setOffset(start, end);
if (!termFound) {
throw new IllegalArgumentException(
"There is at least one token object in the pre-analyzed field value where no actual term string is specified.");
}
return true;
}
return false;
}
/**
* Creates a new parser reading the input data and sets the parser state right to the beginning of the actual
* token list.
*/
@Override
public void reset() throws IOException {
parser = XContentHelper.createParser(input.bytes, 0, input.length);
// Go to the beginning of the token array to be ready when the
// tokenstream is read.
Token token;
String currentField;
do {
token = parser.nextToken();
if (token == XContentParser.Token.FIELD_NAME) {
currentField = parser.text();
if ("tokens".equals(currentField))
termsFieldFound = true;
}
} while (!termsFieldFound && token != null);
}
}
static private class PreAnalyzedStoredValue {
Object value;
VALUE_TYPE type;
enum VALUE_TYPE {
STRING, BINARY
}
}
public void unsetIncludeInAll() {
includeInAll = false;
}
}
| Fixed an NPE due to autoboxing | src/main/java/org/elasticsearch/index/plugin/mapper/preanalyzed/PreAnalyzedFieldMapper.java | Fixed an NPE due to autoboxing | <ide><path>rc/main/java/org/elasticsearch/index/plugin/mapper/preanalyzed/PreAnalyzedFieldMapper.java
<ide> private FieldType fieldTypeText;
<ide> private FieldType fieldTypeTokenStream;
<ide>
<del> protected PreAnalyzedFieldMapper(Names names, float boost, FieldType fieldType, boolean docValues,
<add> protected PreAnalyzedFieldMapper(Names names, Float boost, FieldType fieldType, Boolean docValues,
<ide> NamedAnalyzer indexAnalyzer, NamedAnalyzer searchAnalyzer, PostingsFormatProvider postingsFormat,
<ide> DocValuesFormatProvider docValuesFormat, SimilarityProvider similarity, Loading normsLoading,
<ide> @Nullable Settings fieldDataSettings, Settings indexSettings) { |
|
JavaScript | mit | 8d11f2a6c0d367d8eb20987f54c18f98ff6f08b3 | 0 | rolemos/angular-timer,lvpeng/angular-timer,siddii/angular-timer,Jeremy017/angular-timer,justicart/angular-timer,RobinHerzog/angular-timer,DarthVanger/angular-timer,hloni2major/angular-timer,DarthVanger/angular-timer,WillianPaiva/angular-timer,maccode/angular-timer,WillianPaiva/angular-timer,staff0rd/angular-timer,suryasingh/angular-timer,Jeremy017/angular-timer,GeroLauvergnat/angular-timer,maccode/angular-timer,benfes/angular-timer,hloni2major/angular-timer,andresmatasuarez/angular-timer,akiokio/angular-timer,RobinHerzog/angular-timer,jayvi/angular-timer,andresmatasuarez/angular-timer,justicart/angular-timer,Verarxar/angular-timer,kasoprecede47/angular-timer,jayvi/angular-timer,leonuh/angular-timer,staff0rd/angular-timer,kasoprecede47/angular-timer,idomaz00/angular-timer,seawenzhu/angular-timer,seawenzhu/angular-timer,lvpeng/angular-timer,leonuh/angular-timer,benfes/angular-timer,suryasingh/angular-timer,webconnex/angular-timer,rolemos/angular-timer,akiokio/angular-timer,siddii/angular-timer,GeroLauvergnat/angular-timer,webconnex/angular-timer,idomaz00/angular-timer,Verarxar/angular-timer | module.exports = function (grunt) {
grunt.loadNpmTasks('grunt-contrib-clean');
grunt.loadNpmTasks('grunt-contrib-copy');
grunt.loadNpmTasks('grunt-contrib-jshint');
grunt.loadNpmTasks('grunt-contrib-concat');
grunt.loadNpmTasks('grunt-contrib-watch');
grunt.loadNpmTasks('grunt-contrib-uglify');
grunt.loadNpmTasks('grunt-karma');
grunt.loadNpmTasks('grunt-contrib-connect');
var userConfig = {
dist_dir: 'dist',
app_files: {
js: [ 'app/**/*.js', '!app/**/*.spec.js' ]
}
};
var taskConfig = {
pkg: grunt.file.readJSON("package.json"),
meta: {
banner: '/**\n' +
' * <%= pkg.name %> - v<%= pkg.version %> - <%= grunt.template.today("yyyy-mm-dd h:MM TT") %>\n' +
' * <%= pkg.homepage %>\n' +
' *\n' +
' * Copyright (c) <%= grunt.template.today("yyyy") %> <%= pkg.author %>\n' +
' * Licensed <%= pkg.licenses.type %> <<%= pkg.licenses.url %>>\n' +
' */\n'
},
concat: {
compile_js: {
options: {
banner: '<%= meta.banner %>'
},
src: [
'bower_components/jquery/jquery.min.js',
'bower_components/angular/angular.min.js',
'bower_components/bootstrap/docs/assets/js/bootstrap.min.js',
'bower_components/momentjs/min/moment.min.js',
'bower_components/momentjs/min/locales.min.js',
'bower_components/humanize-duration/humanize-duration.js',
'app/**/*.js'
],
dest: '<%= dist_dir %>/<%= pkg.name %>.js'
}
},
uglify: {
options: {
banner: '<%= meta.banner %>'
},
files: {
src: ['<%= concat.compile_js.dest %>'],
dest: '<%= dist_dir %>/<%= pkg.name %>.min.js'
}
},
jshint: {
src: [
'<%= app_files.js %>'
],
gruntfile: [
'Gruntfile.js'
],
options: {
curly: true,
immed: true,
newcap: true,
noarg: true,
sub: true,
boss: true,
eqnull: true
}
},
connect: {
server: {
options: {
port: 3030,
base: '.',
keepalive: false,
livereload:true,
open: true
}
},
testserver: {
options: {
port: 3030,
base: '.'
}
}
},
karma: {
unit: {
configFile: 'config/karma.conf.js',
singleRun: true,
browsers: ['PhantomJS']
},
e2e: {
configFile: 'config/karma-e2e.conf.js',
singleRun: true,
browsers: ['PhantomJS']
}
},
watch: {
scripts: {
files: ['Gruntfile.js', '*.json', 'app/**/*.js','*.html'],
tasks: ['build'],
options: {
livereload: true
}
}
}
};
grunt.initConfig(grunt.util._.extend(taskConfig, userConfig));
grunt.registerTask('default', [ 'connect:server', 'watch']);
grunt.registerTask('tests', [ 'connect:testserver', 'build', 'karma:unit', 'karma:e2e']);
grunt.registerTask('build', [
'jshint', 'concat', 'uglify'
]);
};
| Gruntfile.js | module.exports = function (grunt) {
grunt.loadNpmTasks('grunt-contrib-clean');
grunt.loadNpmTasks('grunt-contrib-copy');
grunt.loadNpmTasks('grunt-contrib-jshint');
grunt.loadNpmTasks('grunt-contrib-concat');
grunt.loadNpmTasks('grunt-contrib-watch');
grunt.loadNpmTasks('grunt-contrib-uglify');
grunt.loadNpmTasks('grunt-karma');
grunt.loadNpmTasks('grunt-contrib-connect');
var userConfig = {
dist_dir: 'dist',
app_files: {
js: [ 'app/**/*.js', '!app/**/*.spec.js' ]
}
};
var taskConfig = {
pkg: grunt.file.readJSON("package.json"),
meta: {
banner: '/**\n' +
' * <%= pkg.name %> - v<%= pkg.version %> - <%= grunt.template.today("yyyy-mm-dd h:MM TT") %>\n' +
' * <%= pkg.homepage %>\n' +
' *\n' +
' * Copyright (c) <%= grunt.template.today("yyyy") %> <%= pkg.author %>\n' +
' * Licensed <%= pkg.licenses.type %> <<%= pkg.licenses.url %>>\n' +
' */\n'
},
concat: {
compile_js: {
options: {
banner: '<%= meta.banner %>'
},
src: [
'app/**/*.js'
],
dest: '<%= dist_dir %>/<%= pkg.name %>.js'
}
},
uglify: {
options: {
banner: '<%= meta.banner %>'
},
files: {
src: ['<%= concat.compile_js.dest %>'],
dest: '<%= dist_dir %>/<%= pkg.name %>.min.js'
}
},
jshint: {
src: [
'<%= app_files.js %>'
],
gruntfile: [
'Gruntfile.js'
],
options: {
curly: true,
immed: true,
newcap: true,
noarg: true,
sub: true,
boss: true,
eqnull: true
}
},
connect: {
server: {
options: {
port: 3030,
base: '.',
keepalive: false,
livereload:true,
open: true
}
},
testserver: {
options: {
port: 3030,
base: '.'
}
}
},
karma: {
unit: {
configFile: 'config/karma.conf.js',
singleRun: true,
browsers: ['PhantomJS']
},
e2e: {
configFile: 'config/karma-e2e.conf.js',
singleRun: true,
browsers: ['PhantomJS']
}
},
watch: {
scripts: {
files: ['Gruntfile.js', '*.json', 'app/**/*.js','*.html'],
tasks: ['build'],
options: {
livereload: true
}
}
}
};
grunt.initConfig(grunt.util._.extend(taskConfig, userConfig));
grunt.registerTask('default', [ 'connect:server', 'watch']);
grunt.registerTask('tests', [ 'connect:testserver', 'build', 'karma:unit', 'karma:e2e']);
grunt.registerTask('build', [
'jshint', 'concat', 'uglify'
]);
};
| add bower components inside gruntFile
| Gruntfile.js | add bower components inside gruntFile | <ide><path>runtfile.js
<ide> banner: '<%= meta.banner %>'
<ide> },
<ide> src: [
<del> 'app/**/*.js'
<add> 'bower_components/jquery/jquery.min.js',
<add> 'bower_components/angular/angular.min.js',
<add> 'bower_components/bootstrap/docs/assets/js/bootstrap.min.js',
<add> 'bower_components/momentjs/min/moment.min.js',
<add> 'bower_components/momentjs/min/locales.min.js',
<add> 'bower_components/humanize-duration/humanize-duration.js',
<add> 'app/**/*.js'
<ide> ],
<ide> dest: '<%= dist_dir %>/<%= pkg.name %>.js'
<ide> } |
|
Java | agpl-3.0 | error: pathspec 'desktop/src/main/java/nl/mpi/kinnate/kindata/VisiblePanelSetting.java' did not match any file(s) known to git
| 736236afc887fa7dfb59570b48d10b1734389184 | 1 | PeterWithers/temp-to-delete1,PeterWithers/temp-to-delete1,KinshipSoftware/KinOathKinshipArchiver,KinshipSoftware/KinOathKinshipArchiver | package nl.mpi.kinnate.kindata;
import javax.xml.bind.annotation.XmlAttribute;
/**
* Document : VisiblePanelSetting
* Created on : Sept 25, 2011, 12:02:44 PM
* Author : Peter Withers
*/
public class VisiblePanelSetting {
public enum PanelType {
KinTypeStrings,
KinTerms,
ArchiveLinker,
MetaData,
IndexerSettings,
DiagramTree,
EntitySearch
}
@XmlAttribute(name = "type", namespace = "http://mpi.nl/tla/kin")
PanelType panelType;
@XmlAttribute(name = "show", namespace = "http://mpi.nl/tla/kin")
boolean panelShown;
@XmlAttribute(name = "width", namespace = "http://mpi.nl/tla/kin")
int panelWidth;
public VisiblePanelSetting() {
}
public VisiblePanelSetting(PanelType panelType, boolean panelShown, int panelWidth) {
this.panelType = panelType;
this.panelShown = panelShown;
this.panelWidth = panelWidth;
}
@Override
public boolean equals(Object obj) {
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
final VisiblePanelSetting other = (VisiblePanelSetting) obj;
if (this.panelType != other.panelType) {
return false;
}
return true;
}
@Override
public int hashCode() {
int hash = 7;
hash = 67 * hash + (this.panelType != null ? this.panelType.hashCode() : 0);
return hash;
}
}
| desktop/src/main/java/nl/mpi/kinnate/kindata/VisiblePanelSetting.java | Added stored data structure so that the shown panels can be saved in the drawing; eg if only the kin type strings panel is visible on save then only that panel will be visible on open.
| desktop/src/main/java/nl/mpi/kinnate/kindata/VisiblePanelSetting.java | Added stored data structure so that the shown panels can be saved in the drawing; eg if only the kin type strings panel is visible on save then only that panel will be visible on open. | <ide><path>esktop/src/main/java/nl/mpi/kinnate/kindata/VisiblePanelSetting.java
<add>package nl.mpi.kinnate.kindata;
<add>
<add>import javax.xml.bind.annotation.XmlAttribute;
<add>
<add>/**
<add> * Document : VisiblePanelSetting
<add> * Created on : Sept 25, 2011, 12:02:44 PM
<add> * Author : Peter Withers
<add> */
<add>public class VisiblePanelSetting {
<add>
<add> public enum PanelType {
<add>
<add> KinTypeStrings,
<add> KinTerms,
<add> ArchiveLinker,
<add> MetaData,
<add> IndexerSettings,
<add> DiagramTree,
<add> EntitySearch
<add> }
<add> @XmlAttribute(name = "type", namespace = "http://mpi.nl/tla/kin")
<add> PanelType panelType;
<add> @XmlAttribute(name = "show", namespace = "http://mpi.nl/tla/kin")
<add> boolean panelShown;
<add> @XmlAttribute(name = "width", namespace = "http://mpi.nl/tla/kin")
<add> int panelWidth;
<add>
<add> public VisiblePanelSetting() {
<add> }
<add>
<add> public VisiblePanelSetting(PanelType panelType, boolean panelShown, int panelWidth) {
<add> this.panelType = panelType;
<add> this.panelShown = panelShown;
<add> this.panelWidth = panelWidth;
<add> }
<add>
<add> @Override
<add> public boolean equals(Object obj) {
<add> if (obj == null) {
<add> return false;
<add> }
<add> if (getClass() != obj.getClass()) {
<add> return false;
<add> }
<add> final VisiblePanelSetting other = (VisiblePanelSetting) obj;
<add> if (this.panelType != other.panelType) {
<add> return false;
<add> }
<add> return true;
<add> }
<add>
<add> @Override
<add> public int hashCode() {
<add> int hash = 7;
<add> hash = 67 * hash + (this.panelType != null ? this.panelType.hashCode() : 0);
<add> return hash;
<add> }
<add>} |
|
Java | mit | 12df7030106c30488d94268f654f380753c241c4 | 0 | vnadgir-ef/vertx-jersey,christianbellinaef/vertx-jersey,englishtown/vertx-jersey,ef-labs/vertx-jersey,vnadgir-ef/vertx-jersey,christianbellinaef/vertx-jersey,englishtown/vertx-jersey,ef-labs/vertx-jersey | /*
* The MIT License (MIT)
* Copyright © 2013 Englishtown <[email protected]>
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the “Software”), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package com.englishtown.vertx.jersey.impl;
import com.englishtown.vertx.jersey.ApplicationHandlerDelegate;
import com.englishtown.vertx.jersey.JerseyHandlerConfigurator;
import org.glassfish.jersey.server.ApplicationHandler;
import org.glassfish.jersey.server.ResourceConfig;
import org.vertx.java.core.Vertx;
import org.vertx.java.core.json.JsonArray;
import org.vertx.java.core.json.JsonObject;
import org.vertx.java.platform.Container;
import java.net.URI;
/**
* Default {@link JerseyHandlerConfigurator} implementation
*/
public class DefaultJerseyHandlerConfigurator implements JerseyHandlerConfigurator {
public static final String CONFIG_BASE_PATH = "base_path";
public static final String CONFIG_MAX_BODY_SIZE = "max_body_size";
public static final String CONFIG_RESOURCES = "resources";
public static final String CONFIG_FEATURES = "features";
public static final String CONFIG_BINDERS = "binders";
public static final int DEFAULT_MAX_BODY_SIZE = 1024 * 1000; // Default max body size to 1MB
private JsonObject config;
@Override
public void init(Vertx vertx, Container container) {
config = container.config();
if (config == null) {
throw new IllegalStateException("The vert.x container configuration is null");
}
}
/**
* Returns the base URI used by Jersey
*
* @return base URI
*/
@Override
public URI getBaseUri() {
checkState();
String basePath = config.getString(CONFIG_BASE_PATH, "/");
// TODO: Does basePath need the trailing "/"?
// if (!basePath.endsWith("/")) {
// basePath += "/";
// }
return URI.create(basePath);
}
/**
* Returns the Jersey {@link org.glassfish.jersey.server.ApplicationHandler} instance
*
* @return the application handler instance
*/
@Override
public ApplicationHandlerDelegate getApplicationHandler() {
ApplicationHandler handler = new ApplicationHandler(getResourceConfig());
return new DefaultApplicationHandlerDelegate(handler);
}
/**
* The max body size in bytes when reading the vert.x input stream
*
* @return the max body size bytes
*/
@Override
public int getMaxBodySize() {
checkState();
return config.getNumber(CONFIG_MAX_BODY_SIZE, DEFAULT_MAX_BODY_SIZE).intValue();
}
protected ResourceConfig getResourceConfig() {
checkState();
JsonArray resources = config.getArray(CONFIG_RESOURCES, null);
if (resources == null || resources.size() == 0) {
throw new RuntimeException("At least one resource package name must be specified in the config " +
CONFIG_RESOURCES);
}
String[] resourceArr = new String[resources.size()];
for (int i = 0; i < resources.size(); i++) {
resourceArr[i] = String.valueOf(resources.get(i));
}
ResourceConfig rc = new ResourceConfig();
rc.packages(resourceArr);
ClassLoader cl = Thread.currentThread().getContextClassLoader();
JsonArray features = config.getArray(CONFIG_FEATURES, null);
if (features != null && features.size() > 0) {
for (int i = 0; i < features.size(); i++) {
try {
Class<?> clazz = cl.loadClass(String.valueOf(features.get(i)));
rc.register(clazz);
} catch (ClassNotFoundException e) {
throw new RuntimeException(e);
}
}
}
JsonArray binders = config.getArray(CONFIG_BINDERS, null);
if (binders != null && binders.size() > 0) {
for (int i = 0; i < binders.size(); i++) {
try {
Class<?> clazz = cl.loadClass(String.valueOf(binders.get(i)));
rc.register(clazz.newInstance());
} catch (ClassNotFoundException | InstantiationException | IllegalAccessException e) {
throw new RuntimeException(e);
}
}
}
return rc;
}
private void checkState() {
if (config == null) {
throw new IllegalStateException("The configurator has not been initialized.");
}
}
}
| src/main/java/com/englishtown/vertx/jersey/impl/DefaultJerseyHandlerConfigurator.java | /*
* The MIT License (MIT)
* Copyright © 2013 Englishtown <[email protected]>
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the “Software”), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package com.englishtown.vertx.jersey.impl;
import com.englishtown.vertx.jersey.ApplicationHandlerDelegate;
import com.englishtown.vertx.jersey.JerseyHandlerConfigurator;
import org.glassfish.jersey.server.ApplicationHandler;
import org.glassfish.jersey.server.ResourceConfig;
import org.vertx.java.core.Vertx;
import org.vertx.java.core.json.JsonArray;
import org.vertx.java.core.json.JsonObject;
import org.vertx.java.platform.Container;
import java.net.URI;
/**
* Default {@link JerseyHandlerConfigurator} implementation
*/
public class DefaultJerseyHandlerConfigurator implements JerseyHandlerConfigurator {
public static final String CONFIG_BASE_PATH = "base_path";
public static final String CONFIG_MAX_BODY_SIZE = "max_body_size";
public static final String CONFIG_RESOURCES = "resources";
public static final String CONFIG_FEATURES = "features";
public static final String CONFIG_BINDERS = "binders";
public static final int DEFAULT_MAX_BODY_SIZE = 1024 * 1000; // Default max body size to 1MB
private JsonObject config;
@Override
public void init(Vertx vertx, Container container) {
config = container.config();
if (config == null) {
throw new IllegalStateException("The vert.x container configuration is null");
}
}
/**
* Returns the base URI used by Jersey
*
* @return base URI
*/
@Override
public URI getBaseUri() {
checkState();
String basePath = config.getString(CONFIG_BASE_PATH, "/");
// TODO: Does basePath need the trailing "/"?
// if (!basePath.endsWith("/")) {
// basePath += "/";
// }
return URI.create(basePath);
}
/**
* Returns the Jersey {@link org.glassfish.jersey.server.ApplicationHandler} instance
*
* @return the application handler instance
*/
@Override
public ApplicationHandlerDelegate getApplicationHandler() {
ApplicationHandler handler = new ApplicationHandler(getResourceConfig());
return new DefaultApplicationHandlerDelegate(handler);
}
/**
* The max body size in bytes when reading the vert.x input stream
*
* @return the max body size bytes
*/
@Override
public int getMaxBodySize() {
checkState();
return config.getNumber(CONFIG_MAX_BODY_SIZE, DEFAULT_MAX_BODY_SIZE).intValue();
}
protected ResourceConfig getResourceConfig() {
checkState();
JsonArray resources = config.getArray(CONFIG_RESOURCES, null);
if (resources == null || resources.size() == 0) {
throw new RuntimeException("At lease one resource package name must be specified in the config " +
CONFIG_RESOURCES);
}
String[] resourceArr = new String[resources.size()];
for (int i = 0; i < resources.size(); i++) {
resourceArr[i] = String.valueOf(resources.get(i));
}
ResourceConfig rc = new ResourceConfig();
rc.packages(resourceArr);
ClassLoader cl = Thread.currentThread().getContextClassLoader();
JsonArray features = config.getArray(CONFIG_FEATURES, null);
if (features != null && features.size() > 0) {
for (int i = 0; i < features.size(); i++) {
try {
Class<?> clazz = cl.loadClass(String.valueOf(features.get(i)));
rc.register(clazz);
} catch (ClassNotFoundException e) {
throw new RuntimeException(e);
}
}
}
JsonArray binders = config.getArray(CONFIG_BINDERS, null);
if (binders != null && binders.size() > 0) {
for (int i = 0; i < binders.size(); i++) {
try {
Class<?> clazz = cl.loadClass(String.valueOf(binders.get(i)));
rc.register(clazz.newInstance());
} catch (ClassNotFoundException | InstantiationException | IllegalAccessException e) {
throw new RuntimeException(e);
}
}
}
return rc;
}
private void checkState() {
if (config == null) {
throw new IllegalStateException("The configurator has not been initialized.");
}
}
}
| Fixed typo in message
| src/main/java/com/englishtown/vertx/jersey/impl/DefaultJerseyHandlerConfigurator.java | Fixed typo in message | <ide><path>rc/main/java/com/englishtown/vertx/jersey/impl/DefaultJerseyHandlerConfigurator.java
<ide> JsonArray resources = config.getArray(CONFIG_RESOURCES, null);
<ide>
<ide> if (resources == null || resources.size() == 0) {
<del> throw new RuntimeException("At lease one resource package name must be specified in the config " +
<add> throw new RuntimeException("At least one resource package name must be specified in the config " +
<ide> CONFIG_RESOURCES);
<ide> }
<ide> |
|
JavaScript | mit | bb2df2abc06dbe8f1408995051b1a3eaedf2ea37 | 0 | konfirm/konflux,konfirm/konflux,daanvanham/konflux,daanvanham/konflux | /*
* __ Konflux (version 0.2.5, rev 331) - a javascript helper library
* /\_\
* /\/ / / Copyright 2012-2013, Konfirm (Rogier Spieker)
* \ / / Releases under the MIT license
* \/_/ More information: http://konfirm.net/konflux
*/
;(function(window, undefined){
"use strict";
var document = window.document,
// Private functions
/**
* Obtain a reference to a specific buffer object, creates one if it does not exist
* @name buffer
* @type function
* @access internal
* @param string object name
* @return object
*/
buffer = function(key)
{
if (typeof _buffer[key] === 'undefined')
_buffer[key] = {};
return _buffer[key];
},
/**
* Obtain the milliseconds since the UNIX Epoch (Jan 1, 1970 00:00:00)
* @name time
* @type function
* @access internal
* @return int milliseconds
*/
time = function()
{
return Date.now ? Date.now() : (new Date()).getTime();
},
/**
* Shorthand method for creating a combined version of several objects
* @name combine
* @type function
* @access internal
* @param object 1
* @param object ...
* @param object N
* @return function constructor
*/
combine = function()
{
var obj = {},
i, p;
for (i = 0; i < arguments.length; ++i)
if (typeof arguments[i] === 'object')
for (p in arguments[i])
obj[p] = arguments[i][p];
return obj;
},
/**
* Shorthand method creating object prototypes
* @name proto
* @type function
* @access internal
* @param function prototype
* @param object extension
* @return function constructor
*/
proto = function(construct, prototype)
{
var obj = construct || function(){};
if (prototype)
{
obj.prototype = typeof prototype === 'function' ? new prototype : prototype;
obj.prototype.constructor = obj;
}
return obj;
},
/**
* Obtain the elapsed time since Konflux started (roughly), using the format: [Nd ] hh:mm:ss.ms
* @name elapsed
* @type function
* @access internal
* @return string formatted time
*/
elapsed = function()
{
var delta = Math.abs((new Date()).getTime() - _timestamp),
days = Math.floor(delta / 86400000),
hours = Math.floor((delta -= days * 86400000) / 3600000),
minutes = Math.floor((delta -= hours * 3600000) / 60000),
seconds = Math.floor((delta -= minutes * 60000) / 1000),
ms = Math.floor(delta -= seconds * 1000);
return (days > 0 ? days + 'd ' : '') +
('00' + hours).substr(-2) + ':' +
('00' + minutes).substr(-2) + ':' +
('00' + seconds).substr(-2) + '.' +
('000' + ms).substr(-3);
},
/**
* Obtain an unique key, the key is guaranteed to be unique within the browser runtime
* @name unique
* @type function
* @access internal
* @return string key
*/
unique = function()
{
return (++_count + time() % 86400000).toString(36);
},
/**
* Verify whether given argument is empty
* @name empty
* @type function
* @access internal
* @param mixed variable to check
` * @note The function follows PHP's empty function; null, undefined, 0, '', '0' and false are all considered empty
*/
empty = function(p)
{
var types = {
'object': function(o){if (o instanceof Array)return o.length > 0; for (o in o)return true;return false},
'boolean': function(b){return b},
'number': function(n){return n !== 0},
'string': function(s){return !/^0?$/.test(p)}
};
if (typeof types[typeof p] === 'function' && types[typeof p](p))
return false;
return true;
},
/**
* Determine the type of given variable
* @name type
* @type function
* @access internal
* @param mixed variable
* @return string type
*/
type = function(variable)
{
return variable instanceof Array ? 'array' : typeof variable;
},
/**
* Does given object have given property
* @name hasProperty
* @type function
* @access internal
* @param object haystack
* @param string property
* @return bool available
*/
hasProperty = function(haystack, needle)
{
return !!(needle in haystack);
},
// Private properties
_buffer = {}, // singleton-like container, providing 'static' objects
_timestamp = time(), // rough execution start time
_count = 0,
konflux
; // end var
/**
* The Konflux object itself
* @name Konflux
* @type constructor function
* @access internal
* @return Konflux instance
* @note konflux is available both as (window.)konflux and (window.)kx
*/
function Konflux()
{
var kx = this;
/**
* Return konflux itself
* @name master
* @type method
* @access public
* @return object konflux
*/
kx.master = function()
{
return kx
};
/**
* Obtain the milliseconds since the UNIX Epoch (Jan 1, 1970 00:00:00)
* @name time
* @type method
* @access public
* @return int milliseconds
*/
kx.time = time;
/**
* Obtain the elapsed time since Konflux started (roughly), using the format: [Nd ] hh:mm:ss.ms
* @name elapsed
* @type method
* @access public
* @return string formatted time
*/
kx.elapsed = elapsed;
/**
* Obtain an unique key, the key is guaranteed to be unique within the browser runtime
* @name unique
* @type method
* @access public
* @return string key
*/
kx.unique = unique;
/**
* Shorthand method for creating a combined version of several objects
* @name combine
* @type function
* @access internal
* @param object 1
* @param object ...
* @param object N
* @return function constructor
*/
kx.combine = combine;
/**
* Verify whether given arguments are empty
* @name empty
* @type method
* @access public
* @param mixed variable1
* @param mixed variableN, ...
* @return bool variable is empty
*/
kx.empty = function()
{
var arg = Array.prototype.slice.call(arguments);
while (arg.length)
if (!empty(arg.shift()))
return false;
return true;
};
/**
* Determine the type of given variable
* @name type
* @type method
* @access public
* @param mixed variable
* @param bool object types
* @return string type
*/
kx.type = function(variable, objectTypes)
{
var result = type(variable),
name;
if (result === 'object' && objectTypes)
{
name = /(?:function\s+)?(.{1,})\(/i.exec(variable.constructor.toString());
if (name && name.length > 1)
result = name[1];
}
return result;
};
return this;
}
konflux = new Konflux();
/**
* Browser/feature detection
* @module browser
* @note available as konflux.browser / kx.browser
*/
function kxBrowser()
{
var browser = this,
support = {
touch: hasProperty(window, 'ontouchstart') || hasProperty(window.navigator, 'msMaxTouchPoints')
},
prefix,
ieVersion;
/**
* Determine whether or not the browser is Internet Explorer (4+)
* @name detectIE
* @type function
* @access internal
* @return mixed (boolean false if not IE, version number if IE)
*/
function detectIE()
{
// https://gist.github.com/527683 (Conditional comments only work for IE 5 - 9)
var node = document.createElement('div'),
check = node.getElementsByTagName('i'),
version = 3;
// Starting with IE 4 (as version is incremented before first use), an <i> element is added to
// the 'node' element surrounded by conditional comments. The 'check' variable is automatically updated
// to contain all <i> elements. These elements are not there if the browser does not support conditional
// comments or does not match the IE version
// Note that there are two conditions for the while loop; the innerHTML filling and the check, the while
// loop itself has no body (as it is closed off by a semi-colon right after declaration)
while (node.innerHTML = '<!--[if gt IE ' + (++version) + ']><i></i><![endif]-->', check.length && version < 10);
// Added IE's @cc_on trickery for browser which do not support conditional comments (such as IE10)
return version > 4 ? version : Function('/*@cc_on return document.documentMode@*/return false')();
}
/**
* Determine whether or not the browser has given feature in either the window or document scope
* @name hasFeature
* @type function
* @access internal
* @param string feature
* @return boolean has feature
*/
function hasFeature(feature)
{
return typeof support[feature] !== 'undefined' ? support[feature] : hasProperty(window, feature) || hasProperty(document, feature);
}
/**
* Obtain the vendor prefix for the current browser
* @name vendorPrefix
* @type function
* @access internal
* @return string prefix
*/
function vendorPrefix()
{
var vendor = ['O', 'ms', 'Moz', 'Icab', 'Khtml', 'Webkit'],
regex = new RegExp('^(' + vendor.join('|') + ')(?=[A-Z])'),
script = document.createElement('script'),
p;
for (p in script.style)
if (regex.test(p))
{
prefix = p.match(regex).shift();
break;
}
while (!prefix && vendor.length)
{
p = vendor.pop();
if (hasProperty(script.style, p + 'Opacity'))
prefix = p;
}
script = null;
return prefix;
};
/**
* Verify if the browser at hand is any version of Internet Explorer (4+)
* @name ie
* @type method
* @access public
* @return mixed (boolean false if not IE, version number if IE)
* @see detectIE
* @note this public implementation caches the result
*/
browser.ie = function()
{
if (typeof ieVersion === 'undefined')
ieVersion = detectIE();
return ieVersion;
};
/**
* Obtain the vendor prefix for the current browser
* @name prefix
* @type method
* @access public
* @return string prefix
* @note this public implementation caches the result
*/
browser.prefix = function()
{
if (!prefix)
prefix = vendorPrefix();
return prefix;
};
/**
* Test whether or not the browser at hand is aware of given feature(s) exist in either the window or document scope
* @name supports
* @type method
* @access public
* @param string feature
* @param string ...
* @return boolean support
* @note multiple features can be provided, in which case the return value indicates the support of all given features
*/
browser.supports = function()
{
var r = true,
i = arguments.length;
// test all the features given
while (r && --i >= 0)
r = r && hasFeature(arguments[i]);
return r;
};
/**
* Enable the HTML5 fullscreen mode for given element
* @name fullscreen
* @type method
* @access public
* @param DOMNode target
* @return bool success
* @note this method is highly experimental
*/
browser.fullscreen = function(target)
{
var check = ['fullScreen', 'isFullScreen'],
vendor = konflux.browser.prefix().toLowerCase(),
method, i;
if (!target)
target = document.documentElement;
for (i = 0, method = null; i < check.length, method === null; ++i)
{
method = hasProperty(document, check[i]) ? check[i] : vendor + konflux.string.ucFirst(check[i]);
if (!hasProperty(document, method))
method = null;
}
vendor = method.match(new RegExp('^' + vendor)) ? vendor : null;
vendor = (vendor || (document[method] ? 'cancel' : 'request')) + konflux.string.ucFirst((vendor ? (document[method] ? 'cancel' : 'request') : '') + konflux.string.ucFirst(check[0]));
(document[method] ? document : target)[vendor](Element.ALLOW_KEYBOARD_INPUT || null);
};
}
/**
* Handle URL's/URI's
* @module url
* @note available as konflux.url / kx.url
*/
function kxURL()
{
var url = this;
/**
* Parse given URL into its URI components
* @name parse
* @type function
* @access internal
* @param string location
* @return object result
*/
function parse(location)
{
// URL regex + key processing based on the work of Derek Watson's jsUri (http://code.google.com/p/jsuri/)
var match = /^(?:([^:\/?#]+):)?(?:\/\/((?:(([^:@]*)(?::([^:@]*))?)?@)?([^:\/?#]*)(?::(\d*))?))?((((?:[^?#\/]*\/)*)([^?#]*))(?:\?([^#]*))?(?:#(.*))?)/.exec(location),
prop = ['source', 'protocol', 'domain', 'userInfo', 'user', 'password', 'host', 'port', 'relative', 'path', 'directory', 'file', 'query', 'anchor'],
result = {};
while (prop.length)
result[prop.shift()] = match.length ? match.shift() : '';
if (result.query)
result.query.replace(/(?:^|&)([^&=]*)=?([^&]*)/g, function(a, b, c){
if (typeof result.query !== 'object')
result.query = {};
if (b)
result.query[b] = c;
});
return result;
}
/**
* The parsed url for the URL of the current page
* @name current
* @type object
* @access public
*/
url.current = window && window.location ? parse(window.location.href) : false;
/**
* Parse given URL into its URI components
* @name parse
* @type method
* @access public
* @param string url
* @return object result
*/
url.parse = parse;
/**
* Determine whether given URL is on the same domain as the page itself
* @name isLocal
* @type method
* @access public
* @param string location
* @return bool local
*/
url.isLocal = function(location)
{
return url.current.domain === url.parse(location).domain;
};
}
/**
* Style(sheet) manipulation
* @module style
* @note available as konflux.style / kx.style
*/
function kxStyle()
{
var style = this;
/**
* Obtain the script property notation for given property
* @name scriptProperty
* @type function
* @access internal
* @param string property
* @return string script property
* @note 'background-color' => 'backgroundColor'
*/
function scriptProperty(property)
{
var n = 0;
while ((n = property.indexOf('-', n)) >= 0)
property = property.substr(0, n) + property.charAt(++n).toUpperCase() + property.substring(n + 1);
return property;
}
/**
* Obtain the CSS property notation for given property
* @name cssProperty
* @type function
* @access internal
* @param string property
* @return string CSS property
* @note 'backgroundColor' => 'background-color'
*/
function cssProperty(property)
{
return property.replace(/([A-Z])/g, '-$1').toLowerCase();
}
/**
* Obtain all local stylesheets, where local is determined on a match of the domain
* @name getLocalStylesheets
* @type function
* @access internal
* @return array stylesheets
*/
function getLocalStylesheets()
{
var all = document.styleSheets,
list = [],
i;
for (i = 0; i < all.length; ++i)
if (konflux.url.isLocal(all[i].href))
list.push(all[i]);
return list;
}
/**
* Obtain specific stylesheets
* @name getStylesheet
* @type function
* @access internal
* @param string name [optional, default 'all'. Possible values 'first', 'last', 'all' or string filename]
* @param bool includeOffset [optional, default false, local stylesheets only]
* @return array stylesheets
*/
function getStylesheet(name, includeOffsite)
{
var list = includeOffsite ? document.styleSheets : getLocalStylesheets(),
match = [],
i;
switch (name)
{
// get the first stylesheet from the list of selected stylesheets
case 'first':
if (list.length > 0)
match = [list[0]];
break;
// get the last stylesheet from the list of selected stylesheets
case 'last':
if (list.length > 0)
match = [list[list.length - 1]];
break;
default:
// if no name was provided, return the entire list of (editable) stylesheets
if (name === 'all')
match = list;
else if (!name)
match = false;
// search for the stylesheet(s) whose href matches the given name
else if (list.length > 0)
for (i = 0; i < list.length; ++i)
{
if (list[i].href && list[i].href.substr(-name.length) === name)
match.push(list[i]);
else if (list[i].title && list[i].title === name)
match.push(list[i]);
}
break;
}
return match;
}
/**
* Obtain a stylesheet by its url or title
* @name findStylesheet
* @type function
* @access internal
* @param string url
* @param string name
* @return StyleSheet (bool false if not found)
*/
function findStylesheet(url, name)
{
var match = getStylesheet(url, true);
if (name && match.length === 0)
match = getStylesheet(name, true);
return match.length > 0 ? match[0] : false;
}
/**
* Create a new stylesheet
* @name createStylesheet
* @type function
* @access internal
* @param string url
* @param bool before (effectively true for being the first stylesheet, anything else for last)
* @param string name
* @return style node
*/
function createStylesheet(url, before, name)
{
var element = findStylesheet(url, name),
head = document.head || document.getElementsByTagName('head')[0],
i;
if (!element)
{
element = document.createElement(url ? 'link' : 'style');
element.setAttribute('type', 'text/css');
element.setAttribute('title', name || 'konflux.style.' + unique());
if (/link/i.test(element.nodeName))
{
element.setAttribute('rel', 'stylesheet');
element.setAttribute('href', url);
}
if (before && document.head.firstChild)
{
head.insertBefore(element, head.firstChild);
}
else
{
head.appendChild(element);
}
}
return element;
}
/**
* Parse the style declarations' cssText into key/value pairs
* @name getStyleProperties
* @type function
* @access internal
* @param CSS Rule
* @return Object key value pairs
*/
function getStyleProperties(declaration)
{
var list = declaration.split(/\s*;\s*/),
rules = {},
i, part;
for (i = 0; i < list.length; ++i)
{
part = list[i].split(/\s*:\s*/);
if (part[0] !== '')
rules[scriptProperty(part.shift())] = normalizeValue(part.join(':'));
}
return rules;
}
/**
* Normalize given selector string
* @name normalizeSelector
* @type function
* @access internal
* @param string selector
* @return string normalized selector
*/
function normalizeSelector(selector)
{
return selector.split(/\s+/).join(' ').toLowerCase();
}
/**
* Normalize given CSS value
* @name normalizeValue
* @type function
* @access internal
* @param string value
* @return string normalized value
*/
function normalizeValue(value)
{
var pattern = {
' ': /\s+/g, // minimize whitespace
'"': /["']/g, // unify quotes
',': /\s*,\s*/g, // unify whitespace around separators
'.': /\b0+\./g, // remove leading 0 from decimals
'0': /0(?:px|em|%|pt)\b/g // remove units from 0 value
},
p;
for (p in pattern)
value = value.replace(pattern[p], p);
// most browsers will recalculate hex color notation to rgb, so we do the same
if (pattern = value.match(/#([0-9a-f]+)/))
{
pattern = pattern[1];
if (pattern.length % 3 !== 0)
pattern = konflux.string.pad(pattern, 6, '0');
else if (pattern.length === 3)
pattern = pattern[0] + pattern[0] + pattern[1] + pattern[1] + pattern[2] + pattern[2];
value = 'rgb(' + [
parseInt(pattern[0] + pattern[1], 16),
parseInt(pattern[2] + pattern[3], 16),
parseInt(pattern[4] + pattern[5], 16)
].join(',') + ')';
}
return value;
}
/**
* Apply style rules to target DOMElement
* @name inline
* @type method
* @access public
* @param DOMElement target
* @param object style rules
* @return void
*/
style.inline = function(target, rules)
{
var p;
for (p in rules)
target.style[scriptProperty(p)] = rules[p];
};
/**
* Obtain a CSS selector for given element
* @name selector
* @type method
* @access public
* @param DOMElement target
* @return string selector
*/
style.selector = function(target)
{
var node = target.nodeName.toLowerCase(),
id = target.hasAttribute('id') ? '#' + target.getAttribute('id') : null,
classes = target.hasAttribute('class') ? '.' + target.getAttribute('class').split(' ').join('.') : null,
select = '';
if (arguments.length === 1 || id || classes)
select = node + (id || classes || '');
return kx.string.trim((!id && target.parentNode && target !== document.body ? style.selector(target.parentNode, true) + ' ' : '') + select);
};
/**
* Obtain a stylesheet by its name or by a mnemonic (first, last, all)
* @name sheet
* @type method
* @access public
* @param string target [optional, default 'all'. Possible values 'first', 'last', 'all' or string filename]
* @param bool editable [optional, default true]
* @return array stylesheets
*/
style.sheet = function(target, editable)
{
var list = getStylesheet(typeof target === 'string' ? target : null, editable === false ? true : false),
i;
if (typeof target.nodeName !== 'undefined')
for (i = 0; i < list.length; ++i)
if (list[i].ownerNode === target)
return [list[i]];
return list;
};
/**
* Create a new stylesheet, either as first or last
* @name create
* @type method
* @access public
* @param bool before all other stylesheets
* @return styleSheet
*/
style.create = function(name, before)
{
var element = createStylesheet(false, before, name);
return element.sheet || false;
};
/**
* Load an external stylesheet, either as first or last
* @name load
* @type method
* @access public
* @param string url the url of the stylesheet to load
* @param function callback
* @param bool before all other style sheets
* @return style node (<link...> element
*/
style.load = function(url, callback, before)
{
var style = createStylesheet(url, before);
// if style is a StyleSheet object, it has the ownerNode property containing the actual DOMElement in which it resides
if (typeof style.ownerNode !== 'undefined')
{
style = style.ownerNode;
// it is safe to assume here that the stylesheet was loaded, hence we need to apply the callback (with a slight delay, so the order of returning and execution of the callback is the same for both load scenario's)
if (callback)
setTimeout(function(){
callback.apply(style, [style]);
}, 1);
}
else if (callback)
{
konflux.event.listen(style, 'load', function(e){
callback.apply(style, [style]);
});
}
return style;
};
/**
* Determine whether or not the given style (node) is editable
* @name isEditable
* @type method
* @access public
* @param Stylesheet object or DOMelement style/link
* @return bool editable
*/
style.isEditable = function(stylesheet)
{
var list = getLocalStylesheets(),
node = typeof stylesheet.ownerNode !== 'undefined' ? stylesheet.ownerNode : stylesheet,
i;
for (i = 0; i < list.length; ++i)
if (list[i].ownerNode === node)
return true;
return false;
};
/**
* Create and add a new style rule
* @name add
* @type method
* @access public
* @param string selector
* @param mixed rules (one of; object {property: value} or string 'property: value')
* @param mixed sheet (either a sheet object or named reference, like 'first', 'last' or file name)
* @return int index at which the rule was added
*/
style.add = function(selector, rules, sheet)
{
var rule = '',
find, p;
// make the rules into an object
if (typeof rules === 'string')
rules = getStyleProperties(rules);
// if rules isn't an object, we exit right here
if (typeof rules !== 'object')
return false;
// if no sheet was provided, or a string reference to a sheet was provided, resolve it
if (!sheet || typeof sheet === 'string')
sheet = getStylesheet(sheet || 'last');
// in case we now have a list of stylesheets, we either want one (if there's just one) or we add the style to all
if (sheet instanceof Array)
{
if (sheet.length === 1)
{
sheet = sheet[0];
}
else
{
rule = true;
for (p = 0; p < sheet.length; ++p)
rule = rule && style.add(selector, rules, sheet[p]);
return rule;
}
}
// populate the find buffer, so we can determine which style rules we actually need
find = style.find(selector, sheet);
for (p in rules)
if (!(p in find) || normalizeValue(find[p]) !== normalizeValue(rules[p]))
rule += (rule !== '' ? ';' : '') + cssProperty(p) + ':' + rules[p];
// finally, add the rules to the stylesheet
if (sheet.addRule)
return sheet.addRule(selector, rule);
else if (sheet.insertRule)
return sheet.insertRule(selector + '{' + rule + '}', sheet.cssRules.length);
return false;
};
/**
* Find all style rules for given selector (in optionally given sheet)
* @name find
* @type method
* @access public
* @param string selector
* @param mixed sheet [optional, either a sheet object or named reference, like 'first', 'last' or file name]
* @return object style rules
*/
style.find = function(selector, sheet)
{
var match = {},
rules, i, j;
if (selector)
selector = normalizeSelector(selector);
if (!sheet)
sheet = getStylesheet();
else if (!(sheet instanceof Array))
sheet = [sheet];
for (i = 0; i < sheet.length; ++i)
{
rules = typeof sheet[i].cssRules ? sheet[i].cssRules : sheet[i].rules;
if (rules)
for (j = 0; j < rules.length; ++j)
if (!selector || normalizeSelector(rules[j].selectorText) === selector)
match = combine(match, getStyleProperties(rules[j].style.cssText));
}
return match;
};
}
/**
* Number utils
* @module number
* @note available as konflux.number / kx.number
*/
function kxNumber()
{
var number = this;
/**
* Test wheter given input is an even number
* @name even
* @type method
* @access public
* @param number input
* @return bool even
*/
number.even = function(input)
{
return input % 2 === 0;
};
/**
* Test wheter given input is an odd number
* @name odd
* @type method
* @access public
* @param number input
* @return bool odd
*/
number.odd = function(input)
{
return !number.even(input);
};
/**
* Test wheter given input between the low and high values
* @name between
* @type method
* @access public
* @param number input
* @param number low
* @param number hight
* @return bool between
*/
number.between = function(input, low, high)
{
return input >= low && input <= high;
};
}
/**
* String utils
* @module string
* @note available as konflux.string / kx.string
*/
function kxString()
{
var string = this,
/**
* Javascript port of Java’s String.hashCode()
* (Based on http://werxltd.com/wp/2010/05/13/javascript-implementation-of-javas-string-hashcode-method/)
* @name hashCode
* @type function
* @access internal
* @param string input
* @return number hash (32bit integer)
*/
hashCode = function(s)
{
for (var r = 0, i = 0, l = s.length; i < l; ++i)
r = (r = r * 31 + s.charCodeAt(i)) & r;
return r;
},
/**
* Create a hash from a string
* @name hash
* @type function
* @access internal
* @param string source
* @return string hash
*/
hash = function(s)
{
var p = 8,
pad = ('abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ' + s).substr(-(Math.ceil((s.length || 1) / p) * p)),
r = 0;
while (pad.length)
{
r += hashCode(pad.substr(0, p));
pad = pad.substr(p);
}
return Math.abs(r).toString(36);
},
/**
* Return the ASCII value of given character
* @name ord
* @type function
* @access internal
* @param string character
* @return number character code
*/
ord = function(s)
{
return s.charCodeAt(0);
},
/**
* Return the character corresponding with given ASCII value
* @name chr
* @type function
* @access internal
* @param number character code
* @return string character
*/
chr = function(n)
{
return String.fromCharCode(n);
},
/**
* Pad a string
* @name pad
* @type function
* @access internal
* @param string to pad
* @param number length
* @param string pad string [optional, default ' ']
* @param int pad type [optional, default PAD_RIGHT]
* @return padded string
*/
pad = function(s, n, c, t)
{
c = Array(n).join(c);
return (n -= s.length) > 0 && (t = t === string.PAD_LEFT ? n : (t === string.PAD_BOTH ? Math.ceil(n / 2): 0)) !== false
? (t > 0 ? c.substr(0, 1 + t) : '') + s + c.substr(0, 1 + n - t)
: s;
},
/**
* Generate a checksum for given string
* @name checksum
* @type function
* @access internal
* @param string source
* @return string checksum
*/
checksum = function(s)
{
for (var n = s.length, r = 0; n > 0; r += n * ord(s[--n]));
return Math.abs((r + '' + s.length) | 0).toString(36);
},
/**
* Generate a UUID
* @name uuid
* @type function
* @access internal
* @return string uuid
*/
uuid = function()
{
return 'xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx'.replace(/[xy]/g, function(c){
var r = Math.random() * 16 | 0;
return (c === 'x' ? r : (r & 0x3 | 0x8)).toString(16);
});
};
// 'constants'
string.PAD_LEFT = 1;
string.PAD_BOTH = 2;
string.PAD_RIGHT = 3;
/**
* Trim string from leading/trailing whitespace
* @name trim
* @type method
* @access public
* @param string to trim
* @return trimmed string
*/
string.trim = function(s)
{
var r = s.replace(/^\s\s*/, ''),
x = /\s/,
i = r.length;
while (x.test(r.charAt(--i)));
return r.slice(0, i + 1);
};
/**
* Reverse given string
* @name reverse
* @type method
* @access public
* @param string to reverse
* @return reversed string
*/
string.reverse = function(s)
{
for (var n = s.length, r = ''; n > 0; r += s[--n]);
return r;
};
/**
* Pad a string
* @name pad
* @type method
* @access public
* @param string to pad
* @param number length
* @param string pad string [optional, default ' ']
* @param int pad type [optional, default PAD_RIGHT]
* @return padded string
*/
string.pad = function(s, n, c, t)
{
return pad(s, n, c || ' ', t || string.PAD_RIGHT);
};
/**
* Uppercase the first character of given string
* @name ucFirst
* @type method
* @access public
* @param string of which to uppercase the first char
* @return string
*/
string.ucFirst = function(input)
{
return input.charAt(0).toUpperCase() + input.substr(1);
};
/**
* Create a hash from a string
* @name hash
* @type method
* @access public
* @param string source
* @return string hash
*/
string.hash = function(s)
{
return hash(s);
};
/**
* Generate a checksum for given string
* @name checksum
* @type method
* @access public
* @param string source
* @return string checksum
*/
string.checksum = checksum;
/**
* Generate a UUID
* @name uuid
* @type method
* @access public
* @return string uuid
*/
string.uuid = uuid;
}
/**
* Array utils
* @module array
* @note available as konflux.array / kx.array
*/
function kxArray()
{
var array = this,
/**
* Create a hash from a string
* @name contains
* @type function
* @access internal
* @param array haystack
* @param mixed value
* @return boolean contains
*/
contains = function(a, v)
{
for (var i = 0; i < a.length; ++i)
if (a[i] === v)
return true;
return false;
},
/**
* Return the difference between two arrays
* @name diff
* @type function
* @access internal
* @param array array1
* @param array array2
* @return array difference
*/
diff = function(a, b)
{
var ret = [],
i;
for (i = 0; i < a.length; ++i)
if (!contains(b, a[i]))
ret.push(a[i]);
return ret;
},
/**
* Create an array with values between (including) given start and end
* @name range
* @type function
* @access internal
* @param number start
* @param number end
* @return array range
*/
range = function(a, b)
{
var r = [];
b -= a;
while (r.length <= b)
r.push(a + r.length);
return r;
},
/**
* Shuffle given array
* @name shuffle
* @type function
* @access internal
* @param array source
* @return array shuffled
*/
shuffle = function(a)
{
for (var j, x, i = a.length; i; j = parseInt(Math.random() * i), x = a[--i], a[i] = a[j], a[j] = x);
return a;
};
// expose
/**
* Does the array contain given value
* @name contains
* @type method
* @access public
* @param array haystack
* @param mixed value
* @return boolean contains
*/
array.contains = contains;
/**
* Return the difference between two arrays
* @name diff
* @type method
* @access public
* @param array array1
* @param array array2
* @return array difference
*/
array.diff = diff;
/**
* Create an array with values between (including) given start and end
* @name range
* @type method
* @access public
* @param number start
* @param number end
* @return array range
*/
array.range = range;
/**
* Shuffle given array
* @name shuffle
* @type method
* @access public
* @param array source
* @return array shuffled
*/
array.shuffle = shuffle;
}
/**
* Event attachment handler
* @module event
* @note available as konflux.event / kx.event
*/
function kxEvent()
{
var event = this,
queue = buffer('event.queue'),
touch = konflux.browser.supports('touch'),
/**
* Ready state handler, removes all relevant triggers and executes any handler that is set
* @name ready
* @type function
* @access internal
* @return void
*/
ready = function(e){
var run = false,
p;
if (document.removeEventListener)
{
document.removeEventListener('DOMContentLoaded', ready, false);
window.removeEventListener('load', ready, false);
run = true;
}
else if (document.readyState === 'complete')
{
document.detachEvent('onreadystate', ready);
window.detachEvent('onload', ready);
run = true;
}
if (run && queue.ready)
for (p in queue.ready)
queue.ready[p].call(e);
},
/**
* Unify the event object, which makes event more consistent across browsers
* @name unifyEvent
* @type function
* @access internal
* @return Event object
*/
unifyEvent = function(e)
{
var evt = e || window.event;
if (typeof evt.target === 'undefined')
evt.target = typeof evt.srcElement !== 'undefined' ? evt.srcElement : null;
if (/^mouse[a-z]+|drag[a-z]+|drop$/i.test(evt.type))
{
evt.mouse = new kxPoint(
evt.pageX ? evt.pageX : (evt.clientX ? evt.clientX + document.body.scrollLeft + document.documentElement.scrollLeft : 0),
evt.pageY ? evt.pageY : (evt.clientY ? evt.clientY + document.body.scrollTop + document.documentElement.scrollTop : 0)
);
}
return evt;
};
/**
* Is the browser capable of touch events
* @name hasTouch
* @type method
* @access public
* @return bool is touch device
*/
event.hasTouch = function()
{
return touch;
};
/**
* A custom DOMReady handler
* @name add
* @type method
* @access public
* @param function handler
* @return void
*/
event.ready = function(handler)
{
// the document is ready already
if (document.readyState === 'complete')
return setTimeout(handler, 1); // make sure we run the 'event' asynchronously
// we cannot use the event.listen method, as we need very different event listeners
if (typeof queue.ready === 'undefined')
{
queue.ready = [];
if (document.addEventListener)
{
// prefer the 'DOM ready' event
document.addEventListener('DOMContentLoaded', ready, false);
// failsafe to window.onload
window.addEventListener('load', ready, false);
}
else
{
// the closest we can get to 'DOMContentLoaded' in IE, this is still prior to onload
document.attachEvent('onreadystatechange', ready);
// again the failsafe, now IE style
window.attachEvent('onload', ready);
}
}
queue.ready.push(handler);
};
/**
* Add event listeners to target
* @name listen
* @type method
* @access public
* @param DOMElement target
* @param string event type
* @param function handler
* @return bool success
*/
event.listen = function(target, type, handler)
{
var delegate = function(e){handler.apply(target, [unifyEvent(e)])},
list = typeof type === 'string' ? type.split(',') : type,
i;
for (i = 0; i < list.length; ++i)
{
if (target.addEventListener)
target.addEventListener(list[i], delegate, false);
else if (target.attachEvent)
target.attachEvent('on' + list[i], delegate);
else
target['on' + list[i]] = delegate;
}
return event;
};
}
/**
* Timing utils
* @module timing
* @note available as konflux.timing / kx.timing
* @TODO documentation (honestly... what DOES this do??)
*/
function kxTiming()
{
function kxDelay(handler, timeout, reference){
var delay = this,
timer = null,
cancel = function(){
clearTimeout(timer);
},
start = function(){
timer = setTimeout(function(){cancel();handler.call();}, timeout);
};
delay.cancel = function()
{
cancel();
};
start();
}
var timing = this,
stack = buffer('timing.delay'),
remove = function(reference){
if (typeof stack[reference] !== 'undefined')
{
// cancel the stack reference
stack[reference].cancel();
// delete it
delete stack[reference];
}
},
create = function(handler, delay, reference){
if (reference)
remove(reference);
else
reference = handler.toString() || unique();
return stack[reference] = new kxDelay(handler, delay, reference);
};
timing.remove = remove;
timing.create = create;
}
/**
* Observer object, handles subscriptions to messages
* @module observer
* @note available as konflux.observer / kx.observer
*/
function kxObserver()
{
var observer = this,
subscription = buffer('observer.subscriptions'),
active = buffer('observer.active'),
/**
* Create the subscription stack if it does not exist
* @name ensureSubscriptionStack
* @type function
* @access internal
* @param string stack name
* @return void
*/
ensureSubscriptionStack = function(s)
{
if (typeof subscription[s] === 'undefined') subscription[s] = [];
},
/**
* Add handler to specified stack
* @name add
* @type function
* @access internal
* @param string stack name
* @param function handler
* @return int total number of subscriptions in this stack
*/
add = function(s, f)
{
ensureSubscriptionStack(s);
return subscription[s].push(f);
},
/**
* Disable a handler for specified stack
* @name disable
* @type function
* @access internal
* @param string stack name
* @param function handler
* @return void
* @note this method is used from the Observation object, which would influence the number of
* subscriptions if the subscription itself was removed immediately
*/
disable = function(s, f)
{
for (var i = 0; i < subscription[s].length; ++i)
if (subscription[s][i] === f)
subscription[s][i] = false;
},
/**
* Remove specified handler (and all disabled handlers) from specified stack
* @name remove
* @type function
* @access internal
* @param string stack name
* @param function handler [optional]
* @return array removed handlers
*/
remove = function(s, f)
{
var r = [], n = [], i;
ensureSubscriptionStack(s);
for (i = 0; i < subscription[s].length; ++i)
(!subscription[s][i] || subscription[s][i] === f ? r : n).push(subscription[s][i]);
subscription[s] = n;
return r;
},
/**
* Flush specified stack
* @name flush
* @type function
* @access internal
* @param string stack name
* @return array removed handlers (false if the stack did not exist);
*/
flush = function(s)
{
var r = false;
if (typeof subscription[s] !== 'undefined')
{
r = subscription[s];
delete subscription[s];
}
return r;
},
/**
* Trigger the handlers in specified stack
* @name trigger
* @type function
* @access internal
* @param string stack name
* @param mixed arg1 ... argN
* @return void
*/
trigger = function(s)
{
var arg = Array.prototype.slice.call(arguments),
ref = unique(),
part = s.split('.'),
wildcard = false,
name, i;
while (part.length >= 0)
{
active[ref] = true;
name = part.join('.') + (wildcard ? (part.length ? '.' : '') + '*' : '');
wildcard = true;
if (typeof subscription[name] !== 'undefined')
for (i = 0; i < subscription[name].length; ++i)
{
if (!active[ref])
break;
if (subscription[name][i])
{
arg[0] = new kxObservation(s, subscription[name][i], ref);
subscription[name][i].apply(subscription[name][i], arg);
}
}
if (!part.pop())
break;
}
delete active[ref];
};
/**
* Observation object, instances of this are be provided to all observer notification subscribers
* @name kxObservation
* @type class
* @access internal
* @param string type
* @param function handle
* @param string reference
* @return kxObservation object
*/
function kxObservation(type, handle, reference)
{
var observation = this;
observation.type = type;
observation.reference = reference;
observation.timeStamp = time();
observation.timeDelta = elapsed();
/**
* Unsubscribe from the current observer stack
* @name unsubscribe
* @type method
* @access public
* @return void
*/
observation.unsubscribe = function()
{
return disable(type, handle);
};
/**
* Stop the execution of this Observation
* @name stop
* @type method
* @access public
* @return void
*/
observation.stop = function()
{
active[reference] = false;
};
};
/**
* Subscribe a handler to an observer stack
* @name subscribe
* @type method
* @access public
* @param string stack name
* @param function handle
* @return bool success
*/
observer.subscribe = function(stack, handle)
{
var list = stack.split(','),
result = true,
i;
for (i = 0; i < list.length; ++i)
result = (add(list[i], handle) ? true : false) && result;
return result;
};
/**
* Unsubscribe a handler from an observer stack
* @name unsubscribe
* @type method
* @access public
* @param string stack name
* @param function handle
* @return array removed handlers
*/
observer.unsubscribe = function(stack, handle)
{
var list = stack.split(','),
result = [],
i;
for (i = 0; i < list.length; ++i)
result = result.concat(handle ? remove(list[i], handle) : flush(list[i]));
return result;
};
/**
* Notify all subscribers to a stack
* @name notify
* @type method
* @access public
* @param string stack name
* @param mixed arg1 ... argN
* @return void
*/
observer.notify = function()
{
return trigger.apply(observer, arguments);
};
}
/**
* Breakpoint object, add/remove classes on specified object (or body) when specific browser dimensions are met
* (triggers observations when viewport dimensions change)
* @module breakpoint
* @note available as konflux.breakpoint / kx.breakpoint
*/
function kxBreakpoint()
{
var breakpoint = this,
dimensionStack = buffer('breakpoint.dimension'),
ratioStack = buffer('breakpoint.ratio'),
current = null,
timer = null,
ratioTimer = null,
/**
* Handle browser window resize events, matching the most appropriate size
* @name resize
* @type function
* @access internal
* @param event
* @return void
*/
resize = function(e)
{
var dimension = match(window.innerWidth || document.documentElement.clientWidth || document.body.clientWidth);
// if we don't have any valid dimension or the dimension is equal to the current one, stop
if (!dimension || current === dimension)
return false;
// is there a current set, remove it
if (current)
current.element.className = current.element.className.replace(current.expression, '');
// do we have an element to manipulate
if (!dimension.element)
dimension.element = document.body;
// set the given class on the element
dimension.element.className = konflux.string.trim(dimension.element.className + ' ' + dimension.className);
konflux.observer.notify('breakpoint.change', dimension.className);
current = dimension;
},
/**
* Determine the best matching dimension and return the settings
* @name match
* @type function
* @access internal
* @param int browser width
* @return object config
*/
match = function(width){
var found, delta, min, p;
for (p in dimensionStack)
{
min = !min ? p : Math.min(min, p);
if (p < width && (!delta || width - p < delta))
{
found = p;
delta = width - p;
}
}
return dimensionStack[found] || dimensionStack[min] || false;
},
/**
* Determine the best matching pixel ratio and set the defined classes
* @name pixelRatio
* @type function
* @access internal
* @return void
*/
pixelRatio = function(){
var ratio = typeof window.devicePixelRatio !== 'undefined' ? window.devicePixelRatio : 1;
if (typeof ratioStack[ratio] !== 'undefined')
ratioStack[ratio].element.className = konflux.string.trim(ratioStack[ratio].element.className) + ' ' + ratioStack[ratio].className;
};
/**
* Add breakpoint configuration
* @name add
* @type method
* @access public
* @param int width
* @param string classname
* @param DOMElement target (defaults to 'body')
* @return object breakpoint
* @note when a breakpoint is added, the internal resize handler will be triggered with a slight delay,
* so if a suitable breakpoint is added it will be used immediately but _resize will occur only once.
* This ought to prevent FOUC
*/
breakpoint.add = function(width, className, target)
{
clearTimeout(timer);
dimensionStack[width] = {
expression: new RegExp('\s*' + className + '\s*', 'g'),
className: className,
element: target
};
timer = setTimeout(function(){resize()}, 1);
return breakpoint;
};
/**
* Add pixel ratio configuration
* @name ratio
* @type method
* @access public
* @param int ratio
* @param string classname
* @param DOMElement target (defaults to 'body')
* @return object breakpoint
* @note as the ratio does not change, the best matching ratio will be added once
*/
breakpoint.ratio = function(ratio, className, target)
{
clearTimeout(ratioTimer);
ratioStack[ratio] = {
expression: new RegExp('\s*' + className + '\s*', 'g'),
className: className,
element: target || document.body
};
ratioTimer = setTimeout(function(){pixelRatio()}, 1);
return breakpoint;
};
// listen to the resize event
konflux.event.listen(window, 'resize', resize);
}
/**
* Point object, handling the (heavy) lifting of working with points
* @module point
* @note available as konflux.point / kx.point
*/
function kxPoint(x, y)
{
var point = this;
point.x = x || 0;
point.y = y || 0;
/**
* Move the point object by given x and y
* @name move
* @type method
* @access public
* @param number x
* @param number y
* @return void
*/
point.move = function(x, y)
{
point.x += x;
point.y += y;
};
/**
* Scale the points coordinates by given factor
* @name scale
* @type method
* @access public
* @param number factor
* @return void
*/
point.scale = function(factor)
{
point.x *= factor;
point.y *= factor;
};
/**
* Subtract a point for the current point
* @name subtract
* @type method
* @access public
* @param object point
* @return kxPoint
*/
point.subtract = function(p)
{
return new kxPoint(point.x - p.x, point.y - p.y);
};
/**
* Add a point to the current point
* @name add
* @type method
* @access public
* @param object point
* @return kxPoint
*/
point.add = function(p)
{
return new kxPoint(pint.x + p.x, point.y + p.y);
};
/**
* Get the distance between given and current point
* @name distance
* @type method
* @access public
* @param object point
* @return number distance
*/
point.distance = function(p)
{
return Math.sqrt(Math.pow(Math.abs(point.x - p.x), 2) + Math.pow(Math.abs(point.y - p.y), 2));
};
/**
* Get the angle in radians between given and current point
* @name angle
* @type method
* @access public
* @param object point
* @return number angle
*/
point.angle = function(p)
{
return Math.atan2(point.x - p.x, point.y - p.y);
};
}
/**
* Cookie object, making working with cookies a wee bit easier
* @module cookie
* @note available as konflux.cookie / kx.cookie
*/
function kxCookie()
{
var cookie = this,
jar = {},
/**
* Read the available cookie information and populate the jar variable
* @name init
* @type function
* @access internal
* @return void
*/
init = function()
{
var part = document.cookie.split(';'),
data;
while (part.length)
{
data = part.shift().split('=');
jar[konflux.string.trim(data.shift())] = konflux.string.trim(data.join('='));
}
},
/**
* Set a cookie
* @name setCookie
* @type function
* @access internal
* @param string key
* @param string value
* @param int expire [optional, default expire at the end of the session]
* @param string path [optional, default the current path]
* @param string domain [optional, default the current domain]
* @return void
* @note the syntax of setCookie is compatible with that of PHP's setCookie
* this means that setting an empty value (string '' | null | false) or
* an expiry time in the past, the cookie will be removed
*/
setCookie = function(key, value, expire, path, domain)
{
var pairs = [key + '=' + (typeof value === 'number' ? value : value || '')],
date;
if (pairs[0].substr(-1) === '=')
expire = -1;
if (typeof expire !== 'undefined' && expire)
date = new Date(expire);
if (date)
{
if (date < (new Date()).getTime() && typeof jar[key] !== 'undefined')
delete jar[key];
pairs.push('expires=' + date);
}
if (typeof path !== 'undefined' && path)
pairs.push('path=' + path);
if (typeof domain !== 'undefined' && domain)
pairs.push('domain=' + domain);
document.cookie = pairs.join(';');
if (document.cookie.indexOf(pairs.shift()) >= 0)
jar[key] = value + '';
},
/**
* Obtain a cookie value
* @name getCookie
* @type function
* @access internal
* @param string key
* @return void
*/
getCookie = function(key)
{
return typeof jar[key] !== 'undefined' ? jar[key] : null;
};
/**
* Get and/or set cookies
* @name value
* @type method
* @access public
* @param string key [optional, an object containing all cookies is returned if omitted]
* @param string value [optional, if no value is given the current value will be returned]
* @param int expire [optional, default expire at the end of the session]
* @param string path [optional, default the current path]
* @param string domain [optional, default the current domain]
* @return void
*/
cookie.value = function(key, value, expire, path, domain)
{
if (typeof key === 'undefined')
return jar;
// if a second argument (value) was given, we update the cookie
if (arguments.length >= 2)
setCookie(key, value, expire, path, domain);
return getCookie(key);
}
init();
}
/**
* Storage object, a simple wrapper for localStorage
* @module storage
* @note available as konflux.storage / kx.storage
*/
function kxStorage()
{
var ls = this,
maxSize = 2048,
storage = typeof window.localStorage !== 'undefined' ? window.localStorage : false;
/**
* Combine stored fragments together into the original data string
* @name combineFragments
* @type function
* @access internal
* @param string data index
* @return string data combined
*/
function combineFragments(data)
{
var match, part, fragment, length, variable, i;
if (data && (match = data.match(/^\[fragment:([0-9]+),([0-9]+),([a-z_]+)\]$/)))
{
fragment = parseInt(match[1]);
length = parseInt(match[2]);
variable = match[3];
data = '';
for (i = 0; i < fragment; ++i)
{
part = storage.getItem(variable + i);
if (part !== null)
data += part;
else
return false;
}
if (!data || data.length != length)
return false;
}
return data;
}
/**
* Split a large data string into several smaller fragments
* @name createFragments
* @type function
* @access internal
* @param string name
* @param string data
* @return bool success
*/
function createFragments(name, data)
{
var variable = '__' + name,
fragment = Math.ceil(data.length / maxSize),
success = storage.setItem(name, '[fragment:' + fragment + ',' + data.length + ',' + variable + ']'),
i;
for (i = 0; i < fragment; ++i)
success = success && storage.setItem(variable + i, data.substring(i * maxSize, Math.min(i * maxSize + maxSize, data.length)));
return success;
}
/**
* Remove all fragmented keys
* @name dropFragments
* @type function
* @access internal
* @param array match
* @return void
*/
function dropFragments(match)
{
var fragment = parseInt(match[1]),
variable = match[3],
i;
for (i = 0; i < fragment; ++i)
drop(variable + i);
}
/**
* Obtain all data from localStorage
* @name getAll
* @type function
* @access internal
* @return mixed data
*/
function getAll()
{
var result = null,
i, key;
if (storage)
{
result = {};
for (i = 0; i < storage.length; ++i)
{
key = storage.key(i);
result[key] = getItem(key);
}
}
return result;
}
/**
* Obtain the data for given name
* @name getItem
* @type function
* @access internal
* @param string name
* @return mixed data
*/
function getItem(name)
{
var data = storage ? storage.getItem(name) : false;
if (data && data.match(/^\[fragment:([0-9]+),([0-9]+),([a-z_]+)\]$/))
data = combineFragments(data);
if (data && data.match(/^[a-z0-9]+:.*$/i))
{
data = /([a-z0-9]+):(.*)/i.exec(data);
if (data.length > 2 && data[1] === konflux.string.checksum(data[2]))
return JSON.parse(data[2]);
}
return data ? data : false;
}
/**
* Set the data for given name
* @name setItem
* @type function
* @access internal
* @param string name
* @param mixed data
* @return string data
*/
function setItem(name, data)
{
data = JSON.stringify(data);
data = konflux.string.checksum(data) + ':' + data;
if (storage)
return data.length > maxSize ? createFragments(name, data) : storage.setItem(name, data);
return false;
}
/**
* Remove the data for given name
* @name remove
* @type function
* @access internal
* @param string name
* @return bool success
*/
function remove(name)
{
var data, match;
if (storage)
{
data = storage.getItem(name);
if (data && (match = data.match(/^\[fragment:([0-9]+),([0-9]+),([a-z_]+)\]$/)))
dropFragments(match);
return storage.removeItem(name);
}
return false;
}
/**
* Get the data for given name
* @name get
* @type method
* @access public
* @param string name [optional, omit to get all stored entries]
* @return mixed data
*/
ls.get = function(name)
{
return name ? getItem(name) : getAll();
};
/**
* Set the data for given name
* @name set
* @type method
* @access public
* @param string name
* @param mixed data
* @return void
*/
ls.set = setItem;
/**
* Remove the data for given name
* @name remove
* @type method
* @access public
* @param string name
* @return bool success
*/
ls.remove = remove;
/**
* Get the amount of stored keys
* @name length
* @type method
* @access public
* @return number stored keys
*/
ls.length = function()
{
return storage ? storage.length : false;
};
/**
* Obtain all the keys
* @name keys
* @type method
* @access public
* @return Array keys
*/
ls.keys = function()
{
var key = getAll(),
list = [],
p;
for (p in key)
list.push(p);
return list;
};
/**
* Flush all stored items
* @name flush
* @type method
* @access public
* @return void
*/
ls.flush = function()
{
var list = ls.keys(),
i;
for (i = 0; i < list.length; ++i)
remove(list[i]);
};
}
/**
* Canvas object, allowing for chainable access to canvas methods
* @module canvas
* @note available as konflux.canvas / kx.canvas
* @TODO documentation
*/
function kxCanvas()
{
var canvas = this;
function kxCanvasContext(canvas)
{
var context = this;
function init()
{
var property = {
globalAlpha: 1,
globalCompositeOperation: 'source-over', // source-over, source-in, source-out, source-atop, destination-over, destination-in, destination-out, destination-atop, lighter, copy, xor
height: null, // readonly
lineWidth: 1,
lineCap: 'butt', // butt, round, square
lineJoin: 'miter', // round, bevel, miter
miterLimit: 10,
strokeStyle: '#000',
fillStyle: '#000',
shadowOffsetX: 0,
shadowOffsetY: 0,
shadowBlur: 0,
shadowColor: 'transparent black',
font: '10px sans-serif',
textAlign: 'start', // start, end, left, right, center
textBaseLine: 'alphabetic', // top, hanging, middle, alphabetic, ideographic, bottom
width: null // readonly
},
p;
context.ctx2d = canvas.getContext('2d');
// relay all methods
for (p in context.ctx2d)
if (typeof context.ctx2d[p] === 'function')
context[p] = relayMethod(context.ctx2d[p]);
// relay all properties (as we want chainability)
for (p in property)
{
context[p] = relayProperty(p);
context[p](property[p]);
}
}
function relayMethod(f)
{
return function(){
f.apply(context.ctx2d, arguments);
return context;
};
}
function relayProperty(key)
{
return function(value){
if (typeof value === 'undefined')
return context.ctx2d[key];
context.ctx2d[key] = value;
return context;
};
}
function gradientFill(gradient, color)
{
var p;
for (p in color)
gradient.addColorStop(p, color[p]);
context.fillStyle(gradient);
context.fill();
return context;
}
context.data = function(data)
{
var image;
if (data)
{
image = new Image();
image.src = data;
context.ctx2d.clearRect(0, 0, canvas.width, canvas.height);
context.drawImage(image, 0, 0);
return context;
}
return canvas.toDataURL();
};
context.append = function(target)
{
if (typeof target === 'string')
target = document.getElementById(target);
if (typeof target === 'object')
return target.appendChild(canvas) ? context : false;
return false;
};
context.shadow = function(x, y, blur, color)
{
if (typeof x === 'number')
context.shadowOffsetX(x);
if (typeof y === 'number')
context.shadowOffsetY(y);
if (typeof blur === 'number')
context.shadowBlur(blur);
if (typeof color !== 'undefined')
context.shadowColor(color);
return context;
};
context.colorFill = function(color)
{
if (color)
context.fillStyle(color);
context.fill();
return context;
};
context.strokeStyle = function(color, width, cap)
{
if (color)
context.strokeStyle(color);
if (width)
context.lineWidth(width);
if (cap)
context.lineCap(cap);
context.stroke();
return context;
};
context.radialGradientFill = function(a, ar, b, br, color)
{
return gradientFill(context.ctx2d.createRadialGradient(a.x, a.y, ar, b.x, b.y, br), color);
};
context.linearGradientFill = function(a, b, color)
{
return gradientFill(context.ctx2d.createLinearGradient(a.x, a.y, b.x, b.y), color);
};
context.circle = function(x, y, radius)
{
context.beginPath();
context.arc(x, y, radius, 0, Math.PI * 2, 1);
context.closePath();
return context;
};
context.line = function()
{
var len = arguments.length,
i;
context.beginPath();
for (i = 0; i < len; ++i)
if (i == len - 1 && arguments[i].x === arguments[0].x && arguments[i].y === arguments[0].y)
context.closePath();
else
context[i == 0 ? 'moveTo' : 'lineTo'](arguments[i].x, arguments[i].y);
context.stroke();
return context;
};
init();
}
canvas.create = function(width, height)
{
var object = document.createElement('canvas');
object.setAttribute('width', width);
object.setAttribute('height', height);
return canvas.init(object);
};
canvas.init = function(object)
{
return new kxCanvasContext(object);
};
canvas.append = function(target, mixed)
{
if (typeof mixed === 'number')
mixed = canvas.create(mixed, arguments.length > 2 ? arguments[2] : mixed);
if (mixed instanceof kxCanvasContext)
return mixed.append(target);
return false;
};
}
/**
* Logo object, creates the konflux logo on canvas
* @module logo
* @note available as konflux.logo / kx.logo
* @TODO documentation
*/
function kxLogo()
{
var logo = this,
P = function(x, y){
return new konflux.point(x, y);
},
design = {
konfirm: [
{line:[P(3, 44), P(2, 35), P(41, 66), P(96, 22), P(94, 31), P(41, 75), P(3, 44)],fillStyle:['rgb(25,25,25)'],fill:[]},
{line:[P(77, 0), P(41, 25), P(21, 12), P(0, 25), P(2, 35), P(41, 66), P(96, 22), P(99, 12), P(77, 0)],fillStyle:['rgb(7,221,246)'],fill:[]},
{globalAlpha:[.2],line:[P(0, 25), P(2, 35), P(41, 66), P(96, 22), P(99, 12), P(41, 56), P(0, 25)],fillStyle:['rgb(0, 0, 0)'],fill:[]}
]
},
render = function(dsgn){
var c, p, i;
dsgn = dsgn || 'konfirm';
if (typeof design[dsgn] !== 'undefined')
{
c = konflux.canvas.create(100, 75);
for (i = 0; i < design[dsgn].length; ++i)
for (p in design[dsgn][i])
c[p].apply(null, design[dsgn][i][p]);
return c;
}
return false;
};
logo.append = function(o)
{
return render().append(o);
};
logo.data = function()
{
return render().data()
};
logo.image = function()
{
var img = document.createElement('img');
img.src = logo.data();
return img;
};
}
// expose object references
konflux.point = kxPoint;
konflux.logo = kxLogo;
// expose object instances
konflux.browser = new kxBrowser();
konflux.url = new kxURL();
konflux.style = new kxStyle();
konflux.number = new kxNumber();
konflux.string = new kxString();
konflux.array = new kxArray();
konflux.event = new kxEvent();
konflux.timing = new kxTiming();
konflux.observer = new kxObserver();
konflux.breakpoint = new kxBreakpoint();
konflux.cookie = new kxCookie();
konflux.storage = new kxStorage();
konflux.canvas = new kxCanvas();
// make konflux available on the global (window) scope both as 'konflux' and 'kx'
window.konflux = window.kx = konflux;
})(window); | konflux.js | /**
* __ Konflux (version 0.2.4, rev 317) - a javascript helper library
* /\_\
* /\/ / / Copyright 2012-2013, Konfirm (Rogier Spieker)
* \ / / Releases under the MIT license
* \/_/ More information: http://konfirm.net/konflux
*/
;(function(window, undefined){
"use strict";
var document = window.document,
// Private functions
/**
* Obtain a reference to a specific buffer object, creates one if it does not exist
* @name buffer
* @type function
* @access internal
* @param string object name
* @return object
*/
buffer = function(key)
{
if (typeof _buffer[key] === 'undefined')
_buffer[key] = {};
return _buffer[key];
},
/**
* Obtain the milliseconds since the UNIX Epoch (Jan 1, 1970 00:00:00)
* @name time
* @type function
* @access internal
* @return int milliseconds
*/
time = function()
{
return Date.now ? Date.now() : (new Date()).getTime();
},
/**
* Shorthand method for creating a combined version of several objects
* @name combine
* @type function
* @access internal
* @param object 1
* @param object ...
* @param object N
* @return function constructor
*/
combine = function()
{
var obj = {},
i, p;
for (i = 0; i < arguments.length; ++i)
if (typeof arguments[i] === 'object')
for (p in arguments[i])
obj[p] = arguments[i][p];
return obj;
},
/**
* Shorthand method creating object prototypes
* @name proto
* @type function
* @access internal
* @param function prototype
* @param object extension
* @return function constructor
*/
proto = function(construct, prototype)
{
var obj = construct || function(){};
if (prototype)
{
obj.prototype = typeof prototype === 'function' ? new prototype : prototype;
obj.prototype.constructor = obj;
}
return obj;
},
/**
* Obtain the elapsed time since Konflux started (roughly), using the format: [Nd ] hh:mm:ss.ms
* @name elapsed
* @type function
* @access internal
* @return string formatted time
*/
elapsed = function()
{
var delta = Math.abs((new Date()).getTime() - _timestamp),
days = Math.floor(delta / 86400000),
hours = Math.floor((delta -= days * 86400000) / 3600000),
minutes = Math.floor((delta -= hours * 3600000) / 60000),
seconds = Math.floor((delta -= minutes * 60000) / 1000),
ms = Math.floor(delta -= seconds * 1000);
return (days > 0 ? days + 'd ' : '') +
('00' + hours).substr(-2) + ':' +
('00' + minutes).substr(-2) + ':' +
('00' + seconds).substr(-2) + '.' +
('000' + ms).substr(-3);
},
/**
* Obtain an unique key, the key is guaranteed to be unique within the browser runtime
* @name unique
* @type function
* @access internal
* @return string key
*/
unique = function()
{
return (++_count + time() % 86400000).toString(36);
},
/**
* Verify whether given argument is empty
* @name empty
* @type function
* @access internal
* @param mixed variable to check
` * @note The function follows PHP's empty function; null, undefined, 0, '', '0' and false are all considered empty
*/
empty = function(p)
{
var types = {
'object': function(o){if (o instanceof Array)return o.length > 0; for (o in o)return true;return false},
'boolean': function(b){return b},
'number': function(n){return n !== 0},
'string': function(s){return !/^0?$/.test(p)}
};
if (typeof types[typeof p] === 'function' && types[typeof p](p))
return false;
return true;
},
/**
* Determine the type of given variable
* @name type
* @type function
* @access internal
* @param mixed variable
* @return string type
*/
type = function(variable)
{
return variable instanceof Array ? 'array' : typeof variable;
},
// Private properties
_buffer = {}, // singleton-like container, providing 'static' objects
_timestamp = time(), // rough execution start time
_count = 0,
konflux
; // end var
function Konflux()
{
var kx = this;
/**
* Return konflux itself
* @name master
* @type method
* @access public
* @return object konflux
*/
kx.master = function()
{
return kx
};
/**
* Obtain the milliseconds since the UNIX Epoch (Jan 1, 1970 00:00:00)
* @name time
* @type method
* @access public
* @return int milliseconds
*/
kx.time = time;
/**
* Obtain the elapsed time since Konflux started (roughly), using the format: [Nd ] hh:mm:ss.ms
* @name elapsed
* @type method
* @access public
* @return string formatted time
*/
kx.elapsed = elapsed;
/**
* Obtain an unique key, the key is guaranteed to be unique within the browser runtime
* @name unique
* @type method
* @access public
* @return string key
*/
kx.unique = unique;
/**
* Verify whether given arguments are empty
* @name empty
* @type method
* @access public
* @param mixed variable1
* @param mixed variableN, ...
* @return bool variable is empty
*/
kx.empty = function()
{
var arg = Array.prototype.slice.call(arguments);
while (arg.length)
if (!empty(arg.shift()))
return false;
return true;
};
/**
* Determine the type of given variable
* @name type
* @type method
* @access public
* @param mixed variable
* @param bool object types
* @return string type
*/
kx.type = function(variable, objectTypes)
{
var result = type(variable),
name;
if (result === 'object' && objectTypes)
{
name = /(?:function\s+)?(.{1,})\(/i.exec(variable.constructor.toString());
if (name && name.length > 1)
result = name[1];
}
return result;
};
}
konflux = new Konflux();
/**
* Browser/feature detection
* @note available as konflux.browser / kx.browser
*/
function kxBrowser()
{
var browser = this,
ieVersion;
/**
* Verify if the browser at hand is any version of Internet Explorer (4+)
* @name detectIE
* @type function
* @access internal
* @return mixed (boolean false if not IE, version number if IE)
*/
function detectIE()
{
// https://gist.github.com/527683 (Conditional comments only work for IE 5 - 9)
var node = document.createElement('div'),
check = node.getElementsByTagName('i'),
version = 0;
// Starting with IE 4 (as version is incremented before first use), an <i> element is added to
// the 'node' element surrounded by conditional comments. The 'check' variable is automatically updated
// to contain all <i> elements. These elements are not there if the browser does not support conditional
// comments or does not match the IE version.
// Note that there are two conditions for the while loop; the innerHTML filling and the check, the while
// loop itself has no body (as it is closed off by a semi-colon right after declaration)
while (
node.innerHTML = '<!--[if gt IE ' + (++version) + ']><i></i><![endif]-->',
check.length && version < 10
);
// Added IE's @cc_on trickery for browser which do not support conditional comments (such as IE10)
return version > 4 ? version : Function('/*@cc_on return document.documentMode@*/return false')();
}
/**
* Verify if the browser at hand is any version of Internet Explorer (4+)
* @name ie
* @type method
* @access public
* @return mixed (boolean false if not IE, version number if IE)
* @see detectIE
* @note this public implementation caches the result
*/
browser.ie = function()
{
if (typeof ieVersion === 'undefined')
ieVersion = detectIE();
return ieVersion;
}
/**
* Test whether or not the browser at hand is aware of given feature(s) exist in either the window or document scope
* @name supports
* @type method
* @access public
* @param string feature
* @param string ...
* @return boolean support
* @note multi features can be provided, in which case the return value indicates the support of all given features
*/
browser.supports = function()
{
var r = true,
i = arguments.length;
while (r && --i >= 0)
r = r && (typeof window[arguments[i]] !== 'undefined' || typeof document[arguments[i]] !== 'undefined');
return r;
}
}
/**
* Handle URL's/URI's
* @note available as konflux.url / kx.url
*/
function kxURL()
{
var url = this;
function parse(loc)
{
// URL regex + key processing based on the work of Derek Watson's jsUri (http://code.google.com/p/jsuri/)
var match = /^(?:([^:\/?#]+):)?(?:\/\/((?:(([^:@]*)(?::([^:@]*))?)?@)?([^:\/?#]*)(?::(\d*))?))?((((?:[^?#\/]*\/)*)([^?#]*))(?:\?([^#]*))?(?:#(.*))?)/.exec(loc),
prop = ['source', 'protocol', 'domain', 'userInfo', 'user', 'password', 'host', 'port', 'relative', 'path', 'directory', 'file', 'query', 'anchor'],
result = {};
while (prop.length)
result[prop.shift()] = match.length ? match.shift() : '';
if (result.query)
result.query.replace(/(?:^|&)([^&=]*)=?([^&]*)/g, function(a, b, c){
if (typeof result.query !== 'object')
result.query = {};
if (b)
result.query[b] = c;
});
return result;
}
url.current = window && window.location ? parse(window.location.href) : false;
url.parse = parse;
url.isLocal = function(loc)
{
return url.current.domain === url.parse(loc).domain;
};
}
/**
* Style(sheet) manipulation
* @note available as konflux.style / kx.style
*/
function kxStyle()
{
var style = this;
/**
* Obtain the script property notation for given property
* @name scriptProperty
* @type function
* @access internal
* @param string property
* @return string script property
* @note 'background-color' => 'backgroundColor'
*/
function scriptProperty(property)
{
var n = 0;
while ((n = property.indexOf('-', n)) >= 0)
property = property.substr(0, n) + property.charAt(++n).toUpperCase() + property.substring(n + 1);
return property;
}
/**
* Obtain the CSS property notation for given property
* @name cssProperty
* @type function
* @access internal
* @param string property
* @return string CSS property
* @note 'backgroundColor' => 'background-color'
*/
function cssProperty(property)
{
return property.replace(/([A-Z])/g, '-$1').toLowerCase();
}
/**
* Obtain all local stylesheets, where local is determined on a match of the domain
* @name getLocalStylesheets
* @type function
* @access internal
* @return Array stylesheets
*/
function getLocalStylesheets()
{
var all = document.styleSheets,
list = [],
i;
for (i = 0; i < all.length; ++i)
if (konflux.url.isLocal(all[i].href))
list.push(all[i]);
return list;
}
/**
* Obtain specific stylesheets
* @name getStylesheet
* @type function
* @access internal
* @param string name (optional, default 'all'. Possible values 'first', 'last', 'all' or string filename)
* @param bool includeOffset (optional default false, local stylesheets only)
* @return Array stylesheets
*/
function getStylesheet(name, includeOffsite)
{
var list = includeOffsite ? document.styleSheets : getLocalStylesheets(),
match = [],
i;
switch (name)
{
// get the first stylesheet from the list of selected stylesheets
case 'first':
if (list.length > 0)
match = [list[0]];
break;
// get the last stylesheet from the list of selected stylesheets
case 'last':
if (list.length > 0)
match = [list[list.length - 1]];
break;
default:
// if no name was provided, return the entire list of (editable) stylesheets
if (!name || name === 'all')
match = list;
// search for the stylesheet(s) whose href matches the given name
else if (list.length > 0)
for (i = 0; i < list.length; ++i)
{
if (list[i].href && list[i].href.substr(-name.length) === name)
match.push(list[i]);
else if (list[i].title && list[i].title === name)
match.push(list[i]);
}
break;
}
return match;
}
/**
* Obtain a stylesheet by its url or title
* @name findStylesheet
* @type function
* @access internal
* @param string url
* @param string name
* @return StyleSheet (bool false if not found)
*/
function findStylesheet(url, name)
{
var match = getStylesheet(url, true);
if (name && match.length === 0)
match = getStylesheet(name, true);
return match.length > 0 ? match[0] : false;
}
/**
* Create a new stylesheet
* @name createStylesheet
* @type function
* @access internal
* @param string url
* @param bool before (effectively true for being the first stylesheet, anything else for last)
* @param string name
* @return style node
*/
function createStylesheet(url, before, name)
{
var element = findStylesheet(url, name),
head = document.head || document.getElementsByTagName('head')[0];
if (!element)
{
element = document.createElement(url ? 'link' : 'style');
element.setAttribute('type', 'text/css');
element.setAttribute('title', name || 'konflux.style.' + unique());
if (/link/i.test(element.nodeName))
{
element.setAttribute('rel', 'stylesheet');
element.setAttribute('href', url);
}
if (before && document.head.firstChild)
{
head.insertBefore(element, head.firstChild);
}
else
{
head.appendChild(element);
}
}
return element;
}
/**
* Parse the style declarations' cssText into key/value pairs
* @name getStyleProperties
* @type function
* @access internal
* @param CSS Rule
* @return Object key value pairs
*/
function getStyleProperties(declaration)
{
var list = declaration.split(/\s*;\s*/),
rules = {},
i, part;
for (i = 0; i < list.length; ++i)
{
part = list[i].split(/\s*:\s*/);
if (part[0] !== '')
rules[scriptProperty(part.shift())] = normalizeValue(part.join(':'));
}
return rules;
}
/**
* Normalize given selector string
* @name normalizeSelector
* @type function
* @access internal
* @param string selector
* @return string normalized selector
*/
function normalizeSelector(selector)
{
return selector.split(/\s+/).join(' ').toLowerCase();
}
/**
* Normalize given CSS value
* @name normalizeValue
* @type function
* @access internal
* @param string value
* @return string normalized value
*/
function normalizeValue(value)
{
var pattern = {
' ': /\s+/g, // minimize whitespace
'"': /["']/g, // unify quotes
',': /\s*,\s*/g, // unify whitespace around separators
'.': /\b0+\./g, // remove leading 0 from decimals
'0': /0(?:px|em|%|pt)\b/g // remove units from 0 value
},
p;
for (p in pattern)
value = value.replace(pattern[p], p);
// most browsers will recalculate hex color notation to rgb, so we do the same
if (pattern = value.match(/#([0-9a-f]+)/))
{
pattern = pattern[1];
if (pattern.length % 3 !== 0)
pattern = konflux.string.pad(pattern, 6, '0');
else if (pattern.length === 3)
pattern = pattern[0] + pattern[0] + pattern[1] + pattern[1] + pattern[2] + pattern[2];
value = 'rgb(' + [
parseInt(pattern[0] + pattern[1], 16),
parseInt(pattern[2] + pattern[3], 16),
parseInt(pattern[4] + pattern[5], 16)
].join(',') + ')';
}
return value;
}
/**
* Obtain a stylesheet by its name or by a mnemonic (first, last, all)
* @name sheet
* @type method
* @access public
* @param string target (optional, default 'all'. Possible values 'first', 'last', 'all' or string filename)
* @param bool editable (optional, default true)
* @return Array stylesheets
*/
style.sheet = function(target, editable)
{
var list = getStylesheet(typeof target === 'string' ? target : null, editable === false ? true : false),
i;
if (typeof target.nodeName !== 'undefined')
for (i = 0; i < list.length; ++i)
if (list[i].ownerNode === target)
return [list[i]];
return list;
};
/**
* Create a new stylesheet, either as first or last
* @name create
* @type method
* @access public
* @param bool before all other stylesheets
* @return styleSheet
*/
style.create = function(name, before)
{
return createStylesheet(false, before, name);
};
/**
* Load an external stylesheet, either as first or last
* @name load
* @type method
* @access public
* @param string url the url of the stylesheet to load
* @param function callback
* @param bool before all other style sheets
* @return style node (<link...> element
*/
style.load = function(url, callback, before)
{
var style = createStylesheet(url, before);
// if style is a StyleSheet object, it has the ownerNode property containing the actual DOMElement in which it resides
if (typeof style.ownerNode !== 'undefined')
{
style = style.ownerNode;
// it is safe to assume here that the stylesheet was loaded, hence we need to apply the callback (with a slight delay, so the order of returning and execution of the callback is the same for both load scenario's)
if (callback)
setTimeout(function(){
callback.apply(style, [style]);
}, 1);
}
else if (callback)
{
konflux.event.listen(style, 'load', function(e){
callback.apply(style, [style]);
});
}
return style;
};
/**
* Determine whether or not the given style (node) is editable
* @name isEditable
* @type method
* @access public
* @param Stylesheet object or DOMelement style/link
* @return bool editable
*/
style.isEditable = function(stylesheet)
{
var list = getLocalStylesheets(),
node = typeof stylesheet.ownerNode !== 'undefined' ? stylesheet.ownerNode : stylesheet,
i;
for (i = 0; i < list.length; ++i)
if (list[i].ownerNode === node)
return true;
return false;
};
/**
* Create and add a new style rule
* @name add
* @type method
* @access public
* @param string selector
* @param mixed rules (one of; object {property: value} or string 'property: value')
* @param mixed sheet (either a sheet object or named reference, like 'first', 'last' or file name)
* @return int index at which the rule was added
*/
style.add = function(selector, rules, sheet)
{
var rule = '',
find, p;
// make the rules into an object
if (typeof rules === 'string')
rules = getStyleProperties(rules);
// if rules isn't an object, we exit right here
if (typeof rules !== 'object')
return false;
// if no sheet was provided, or a string reference to a sheet was provided, resolve it
if (!sheet || typeof sheet === 'string')
sheet = getStylesheet(sheet || 'last');
// in case we now have a list of stylesheets, we either want one (if there's just one) or we add the style to all
if (sheet instanceof Array)
{
if (sheet.length === 1)
{
sheet = sheet[0];
}
else
{
rule = true;
for (p = 0; p < sheet.length; ++p)
rule = rule && style.add(selector, rules, sheet[p]);
return rule;
}
}
// populate the find buffer, so we can determine which style rules we actually need
find = style.find(selector, sheet);
for (p in rules)
if (typeof find[p] === 'undefined' || normalizeValue(find[p]) !== normalizeValue(rules[p]))
rule += (rule !== '' ? ';' : '') + cssProperty(p) + ':' + rules[p];
// finally, add the rules to the stylesheet
if (sheet.addRule)
return sheet.addRule(selector, rule);
else if (sheet.insertRule)
return sheet.insertRule(selector + '{' + rule + '}', sheet.cssRules.length);
return false;
};
/**
* Find all style rules for given selector (in optionally given sheet)
* @name find
* @type method
* @access public
* @param string selector
* @param mixed sheet (optional, either a sheet object or named reference, like 'first', 'last' or file name)
* @return object style rules
*/
style.find = function(selector, sheet)
{
var match = {},
rules, i, j;
if (selector)
selector = normalizeSelector(selector);
if (!sheet)
sheet = getStylesheet();
else if (!(sheet instanceof Array))
sheet = [sheet];
for (i = 0; i < sheet.length; ++i)
{
rules = typeof sheet[i].cssRules ? sheet[i].cssRules : sheet[i].rules;
if (rules)
for (j = 0; j < rules.length; ++j)
if (!selector || normalizeSelector(rules[j].selectorText) === selector)
match = combine(match, getStyleProperties(rules[j].style.cssText));
}
return match;
};
}
/**
* String utils
* @note available as konflux.string / kx.string
*/
function kxString()
{
var string = this,
/**
* Javascript port of Java’s String.hashCode()
* (Based on http://werxltd.com/wp/2010/05/13/javascript-implementation-of-javas-string-hashcode-method/)
* @name hashCode
* @type function
* @access internal
* @param string input
* @return number hash (32bit integer)
*/
hashCode = function(s)
{
for (var r = 0, i = 0, l = s.length; i < l; ++i)
r = (r = r * 31 + s.charCodeAt(i)) & r;
return r;
},
/**
* Create a hash from a string
* @name hash
* @type function
* @access internal
* @param string source
* @return string hash
*/
hash = function(s)
{
var p = 8,
pad = ('abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ' + s).substr(-(Math.ceil((s.length || 1) / p) * p)),
r = 0;
while (pad.length)
{
r += hashCode(pad.substr(0, p));
pad = pad.substr(p);
}
return Math.abs(r).toString(36);
},
/**
* Return the ASCII value of given character
* @name ord
* @type function
* @access internal
* @param string character
* @return number character code
*/
ord = function(s)
{
return s.charCodeAt(0);
},
/**
* Return the character corresponding with given ASCII value
* @name chr
* @type function
* @access internal
* @param number character code
* @return string character
*/
chr = function(n)
{
return String.fromCharCode(n);
},
/**
* Pad a string
* @name pad
* @type function
* @access internal
* @param string to pad
* @param number length
* @param string pad string (optional, default ' ')
* @param int pad type (optional, default PAD_RIGHT)
* @return padded string
*/
pad = function(s, n, c, t)
{
c = Array(n).join(c);
return (n -= s.length) > 0 && (t = t === string.PAD_LEFT ? n : (t === string.PAD_BOTH ? Math.ceil(n / 2): 0)) !== false
? (t > 0 ? c.substr(0, 1 + t) : '') + s + c.substr(0, 1 + n - t)
: s;
},
/**
* Generate a checksum for given string
* @name checksum
* @type function
* @access internal
* @param string source
* @return string checksum
*/
checksum = function(s)
{
for (var n = s.length, r = 0; n > 0; r += n * ord(s[--n]));
return Math.abs((r + '' + s.length) | 0).toString(36);
},
/**
* Generate a UUID
* @name uuid
* @type function
* @access internal
* @return string uuid
*/
uuid = function()
{
return 'xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx'.replace(/[xy]/g, function(c){
var r = Math.random() * 16 | 0;
return (c === 'x' ? r : (r & 0x3 | 0x8)).toString(16);
});
};
// 'constants'
string.PAD_LEFT = 1;
string.PAD_BOTH = 2;
string.PAD_RIGHT = 3;
/**
* Trim string from leading/trailing whitespace
* @name trim
* @type method
* @access public
* @param string to trim
* @return trimmed string
*/
string.trim = function(s)
{
var r = s.replace(/^\s\s*/, ''),
x = /\s/,
i = r.length;
while (x.test(r.charAt(--i)));
return r.slice(0, i + 1);
};
/**
* Reverse given string
* @name reverse
* @type method
* @access public
* @param string to reverse
* @return reversed string
*/
string.reverse = function(s)
{
for (var n = s.length, r = ''; n > 0; r += s[--n]);
return r;
};
/**
* Pad a string
* @name pad
* @type method
* @access public
* @param string to pad
* @param number length
* @param string pad string (optional, default ' ')
* @param int pad type (optional, default PAD_RIGHT)
* @return padded string
*/
string.pad = function(s, n, c, t)
{
return pad(s, n, c || ' ', t || string.PAD_RIGHT);
};
/**
* Create a hash from a string
* @name hash
* @type method
* @access public
* @param string source
* @return string hash
*/
string.hash = function(s)
{
return hash(s);
};
/**
* Generate a checksum for given string
* @name checksum
* @type method
* @access public
* @param string source
* @return string checksum
*/
string.checksum = checksum;
/**
* Generate a UUID
* @name uuid
* @type method
* @access public
* @return string uuid
*/
string.uuid = uuid;
}
/**
* Array utils
* @note available as konflux.array / kx.array
*/
function kxArray()
{
var array = this,
/**
* Create a hash from a string
* @name contains
* @type function
* @access internal
* @param array haystack
* @param mixed value
* @return boolean contains
*/
contains = function(a, v)
{
for (var i = 0; i < a.length; ++i)
if (a[i] === v)
return true;
return false;
},
/**
* Return the difference between two arrays
* @name diff
* @type function
* @access internal
* @param array array1
* @param array array2
* @return array difference
*/
diff = function(a, b)
{
var ret = [],
i;
for (i = 0; i < a.length; ++i)
if (!contains(b, a[i]))
ret.push(a[i]);
return ret;
},
/**
* Create an array with values between (including) given start and end
* @name range
* @type function
* @access internal
* @param number start
* @param number end
* @return array range
*/
range = function(a, b)
{
var r = [];
b -= a;
while (r.length <= b)
r.push(a + r.length);
return r;
},
/**
* Shuffle given array
* @name shuffle
* @type function
* @access internal
* @param array source
* @return array shuffled
*/
shuffle = function(a)
{
for (var j, x, i = a.length; i; j = parseInt(Math.random() * i), x = a[--i], a[i] = a[j], a[j] = x);
return a;
};
// expose
/**
* Create a hash from a string
* @name hash
* @type method
* @access public
* @param array haystack
* @param mixed value
* @return boolean contains
*/
array.contains = contains;
/**
* Return the difference between two arrays
* @name diff
* @type function
* @access internal
* @param array array1
* @param array array2
* @return array difference
*/
array.diff = diff;
/**
* Create an array with values between (including) given start and end
* @name range
* @type method
* @access public
* @param number start
* @param number end
* @return array range
*/
array.range = range;
/**
* Shuffle given array
* @type method
* @access public
* @access internal
* @param array source
* @return array shuffled
*/
array.shuffle = shuffle;
}
/**
* Event attachment handler
* @note available as konflux.event / kx.event
*/
function kxEvent()
{
var event = this,
queue = buffer('event.queue'),
/**
* Ready state handler, removes all relevant triggers and executes any handler that is set
* @name ready
* @type function
* @access internal
* @return void
*/
ready = function(e){
var run = false,
p;
if (document.removeEventListener)
{
document.removeEventListener('DOMContentLoaded', ready, false);
window.removeEventListener('load', ready, false);
run = true;
}
else if (document.readyState === 'complete')
{
document.detachEvent('onreadystate', ready);
window.detachEvent('onload', ready);
run = true;
}
if (run && queue.ready)
for (p in queue.ready)
queue.ready[p].call(e);
},
/**
* Unify the event object, which makes event more consistent across browsers
* @name unifyEvent
* @type function
* @access internal
* @return Event object
*/
unifyEvent = function(e)
{
var evt = e || window.event;
if (typeof evt.target === 'undefined')
evt.target = typeof evt.srcElement !== 'undefined' ? evt.srcElement : null;
if (/^mouse[a-z]+|drag[a-z]+|drop$/i.test(evt.type))
{
evt.mouse = new kxPoint(
evt.pageX ? evt.pageX : (evt.clientX ? evt.clientX + document.body.scrollLeft + document.documentElement.scrollLeft : 0),
evt.pageY ? evt.pageY : (evt.clientY ? evt.clientY + document.body.scrollTop + document.documentElement.scrollTop : 0)
);
}
return evt;
};
/**
* A custom DOMReady handler
* @name add
* @type method
* @access public
* @param function handler
* @return void
*/
event.ready = function(handler){
// the document is ready already
if (document.readyState === 'complete')
return setTimeout(handler, 1); // make sure we run the 'event' asynchronously
// we cannot use the event.listen method, as we need very different event listeners
if (typeof queue.ready === 'undefined')
{
queue.ready = [];
if (document.addEventListener)
{
// prefer the 'DOM ready' event
document.addEventListener('DOMContentLoaded', ready, false);
// failsafe to window.onload
window.addEventListener('load', ready, false);
}
else
{
// the closest we can get to 'DOMContentLoaded' in IE, this is still prior to onload
document.attachEvent('onreadystatechange', ready);
// again the failsafe, now IE style
window.attachEvent('onload', ready);
}
}
queue.ready.push(handler);
};
/**
* Add event listeners to target
* @name listen
* @type method
* @access public
* @param DOMElement target
* @param string event type
* @param function handler
* @return bool success
*/
event.listen = function(target, type, handler){
var delegate = function(e){handler.apply(target, [unifyEvent(e)])},
list = typeof type === 'string' ? type.split(',') : type,
i;
for (i = 0; i < list.length; ++i)
{
if (target.addEventListener)
target.addEventListener(list[i], delegate, false);
else if (target.attachEvent)
target.attachEvent('on' + list[i], delegate);
else
target['on' + list[i]] = delegate;
}
return event;
};
}
/**
* Timing utils
* @note available as konflux.timing / kx.timing
* @TODO documentation (honestly... what DOES this do??)
*/
function kxTiming()
{
function kxDelay(handler, timeout, reference){
var delay = this,
timer = null,
cancel = function(){
clearTimeout(timer);
},
start = function(){
timer = setTimeout(function(){cancel();handler.call();}, timeout);
};
delay.cancel = function()
{
cancel();
};
start();
}
var timing = this,
stack = buffer('timing.delay'),
remove = function(reference){
if (typeof stack[reference] !== 'undefined')
{
// cancel the stack reference
stack[reference].cancel();
// delete it
delete stack[reference];
}
},
create = function(handler, delay, reference){
if (reference)
remove(reference);
else
reference = handler.toString() || unique();
return stack[reference] = new kxDelay(handler, delay, reference);
};
timing.remove = remove;
timing.create = create;
}
/**
* Observer object, handles subscriptions to messages
* @note available as konflux.observer / kx.observer
*/
function kxObserver()
{
var observer = this,
subscription = buffer('observer.subscriptions'),
active = buffer('observer.active'),
/**
* Create the subscription stack if it does not exist
* @name ensureSubscriptionStack
* @type function
* @access internal
* @param string stack name
* @return void
*/
ensureSubscriptionStack = function(s)
{
if (typeof subscription[s] === 'undefined') subscription[s] = [];
},
/**
* Add handler to specified stack
* @name add
* @type function
* @access internal
* @param string stack name
* @param function handler
* @return int total number of subscriptions in this stack
*/
add = function(s, f)
{
ensureSubscriptionStack(s);
return subscription[s].push(f);
},
/**
* Disable a handler for specified stack
* @name disable
* @type function
* @access internal
* @param string stack name
* @param function handler
* @return void
* @note this method is used from the Observation object, which would influence the number of
* subscriptions if the subscription itself was removed immediately
*/
disable = function(s, f)
{
for (var i = 0; i < subscription[s].length; ++i)
if (subscription[s][i] === f)
subscription[s][i] = false;
},
/**
* Remove specified handler (and all disabled handlers) from specified stack
* @name remove
* @type function
* @access internal
* @param string stack name
* @param function handler (optional)
* @return array removed handlers
*/
remove = function(s, f)
{
var r = [], n = [], i;
ensureSubscriptionStack(s);
for (i = 0; i < subscription[s].length; ++i)
(!subscription[s][i] || subscription[s][i] === f ? r : n).push(subscription[s][i]);
subscription[s] = n;
return r;
},
/**
* Flush specified stack
* @name flush
* @type function
* @access internal
* @param string stack name
* @return array removed handlers (false if the stack did not exist);
*/
flush = function(s)
{
var r = false;
if (typeof subscription[s] !== 'undefined')
{
r = subscription[s];
delete subscription[s];
}
return r;
},
/**
* Trigger the handlers in specified stack
* @name trigger
* @type function
* @access internal
* @param string stack name
* @param mixed arg1 ... argN
* @return void
*/
trigger = function(s)
{
var arg = Array.prototype.slice.call(arguments),
ref = unique(),
part = s.split('.'),
wildcard = false,
name, i;
while (part.length >= 0)
{
active[ref] = true;
name = part.join('.') + (wildcard ? (part.length ? '.' : '') + '*' : '');
wildcard = true;
if (typeof subscription[name] !== 'undefined')
for (i = 0; i < subscription[name].length; ++i)
{
if (!active[ref])
break;
if (subscription[name][i])
{
arg[0] = new kxObservation(s, subscription[name][i], ref);
subscription[name][i].apply(subscription[name][i], arg);
}
}
if (!part.pop())
break;
}
delete active[ref];
};
/**
* Observation object, instances of this are be provided to all observer notification subscribers
* @name kxObservation
* @type class
* @access internal
* @param string type
* @param function handle
* @param string reference
* @return kxObservation object
*/
function kxObservation(type, handle, reference)
{
var observation = this;
observation.type = type;
observation.reference = reference;
observation.timeStamp = time();
observation.timeDelta = elapsed();
/**
* Unsubscribe from the current observer stack
* @name unsubscribe
* @type function
* @access public
* @return void
*/
observation.unsubscribe = function()
{
return disable(type, handle);
};
/**
* Stop the execution of this Observation
* @name stop
* @type function
* @access public
* @return void
*/
observation.stop = function()
{
active[reference] = false;
};
};
/**
* Subscribe a handler to an observer stack
* @name subscribe
* @type method
* @access public
* @param string stack name
* @param function handle
* @return bool success
*/
observer.subscribe = function subscribe(stack, handle)
{
var list = stack.split(','),
result = true,
i;
for (i = 0; i < list.length; ++i)
result = (add(list[i], handle) ? true : false) && result;
return result;
};
/**
* Unsubscribe a handler from an observer stack
* @name unsubscribe
* @type method
* @access public
* @param string stack name
* @param function handle
* @return array removed handlers
*/
observer.unsubscribe = function unsubscribe(stack, handle)
{
var list = stack.split(','),
result = [],
i;
for (i = 0; i < list.length; ++i)
result = result.concat(handle ? remove(list[i], handle) : flush(list[i]));
return result;
};
/**
* Notify all subscribers to a stack
* @name subscribe
* @type method
* @access public
* @param string stack name
* @param mixed arg1 ... argN
* @return void
*/
observer.notify = function notify()
{
return trigger.apply(observer, arguments);
};
}
/**
* Breakpoint object, add/remove classes on specified object (or body) when specific browser dimensions are met
* (triggers observations when viewport dimensions change)
* @note available as konflux.breakpoint / kx.breakpoint
*/
function kxBreakpoint()
{
var breakpoint = this,
dimensionStack = buffer('breakpoint.dimension'),
ratioStack = buffer('breakpoint.ratio'),
current = null,
timer = null,
ratioTimer = null,
/**
* Handle browser window resize events, matching the most appropriate size
* @name _resize
* @type function
* @access internal
* @param event
* @return void
*/
resize = function(e)
{
var dimension = match(window.innerWidth || document.documentElement.clientWidth || document.body.clientWidth);
// if we don't have any valid dimension or the dimension is equal to the current one, stop
if (!dimension || current === dimension)
return false;
// is there a current set, remove it
if (current)
current.element.className = current.element.className.replace(current.expression, '');
// do we have an element to manipulate
if (!dimension.element)
dimension.element = document.body;
// set the given class on the element
dimension.element.className = konflux.string.trim(dimension.element.className + ' ' + dimension.className);
konflux.observer.notify('breakpoint.change', dimension.className);
current = dimension;
},
/**
* Determine the best matching dimension and return the settings
* @name match
* @type function
* @access internal
* @param int browser width
* @return object config
*/
match = function(width){
var found, delta, min, p;
for (p in dimensionStack)
{
min = !min ? p : Math.min(min, p);
if (p < width && (!delta || width - p < delta))
{
found = p;
delta = width - p;
}
}
return dimensionStack[found] || dimensionStack[min] || false;
},
/**
* Determine the best matching pixel ratio and set the defined classes
* @name pixelRatio
* @type function
* @access internal
* @return void
*/
pixelRatio = function(){
var ratio = typeof window.devicePixelRatio !== 'undefined' ? window.devicePixelRatio : 1;
if (typeof ratioStack[ratio] !== 'undefined')
ratioStack[ratio].element.className = konflux.string.trim(ratioStack[ratio].element.className) + ' ' + ratioStack[ratio].className;
};
/**
* Add breakpoint configuration
* @name add
* @type function
* @access public
* @param int width
* @param string classname
* @param DOMElement target (defaults to 'body')
* @return breakpoint object
* @note when a breakpoint is added, the _resize handler will be triggered with a slight delay,
* so if a suitable breakpoint is added it will be used immediately but _resize will occur only once.
* This ought to prevent FOUC
*/
breakpoint.add = function(width, className, target)
{
clearTimeout(timer);
dimensionStack[width] = {
expression: new RegExp('\s*' + className + '\s*', 'g'),
className: className,
element: target
};
timer = setTimeout(function(){resize()}, 1);
return breakpoint;
};
/**
* Add pixel ratio configuration
* @name ratio
* @type function
* @access public
* @param int ratio
* @param string classname
* @param DOMElement target (defaults to 'body')
* @return breakpoint object
* @note as the ratio does not change, the best matching ratio will be added once
*/
breakpoint.ratio = function(ratio, className, target)
{
clearTimeout(ratioTimer);
ratioStack[ratio] = {
expression: new RegExp('\s*' + className + '\s*', 'g'),
className: className,
element: target || document.body
};
ratioTimer = setTimeout(function(){pixelRatio()}, 1);
return breakpoint;
};
// listen to the resize event
konflux.event.listen(window, 'resize', resize);
}
/**
* Point object, handling the (heavy) lifting of working with points
* @note available as konflux.point / kx.point
* @TODO documentation
*/
function kxPoint(x, y)
{
var point = this;
point.x = x || 0;
point.y = y || 0;
/**
* Move the point object by given x and y
* @name move
* @type method
* @access public
* @param number x
* @param number y
* @return void
*/
point.move = function(x, y)
{
point.x += x;
point.y += y;
};
/**
* Scale the points coordinates by given factor
* @name scale
* @type method
* @access public
* @param number factor
* @return void
*/
point.scale = function(factor)
{
point.x *= factor;
point.y *= factor;
};
/**
* Subtract a point for the current point
* @name subtract
* @type method
* @access public
* @param object point
* @return kxPoint
*/
point.subtract = function(p)
{
return new kxPoint(point.x - p.x, point.y - p.y);
};
/**
* Add a point to the current point
* @name add
* @type method
* @access public
* @param object point
* @return kxPoint
*/
point.add = function(p)
{
return new kxPoint(pint.x + p.x, point.y + p.y);
};
/**
* Get the distance between given and current point
* @name distance
* @type method
* @access public
* @param object point
* @return number distance
*/
point.distance = function(p)
{
return Math.sqrt(Math.pow(Math.abs(point.x - p.x), 2) + Math.pow(Math.abs(point.y - p.y), 2));
};
/**
* Get the angle in radians between given and current point
* @name angle
* @type method
* @access public
* @param object point
* @return number angle
*/
point.angle = function(p)
{
return Math.atan2(point.x - p.x, point.y - p.y);
};
}
/**
* Cookie object, making working with cookies a wee bit easier
* @note available as konflux.cookie / kx.cookie
*/
function kxCookie()
{
var cookie = this,
jar = {},
/**
* Read the available cookie information and populate the jar variable
* @name init
* @type function
* @access internal
* @return void
*/
init = function()
{
var part = document.cookie.split(';'),
data;
while (part.length)
{
data = part.shift().split('=');
jar[konflux.string.trim(data.shift())] = konflux.string.trim(data.join('='));
}
},
/**
* Set a cookie
* @name setCookie
* @type function
* @access internal
* @param string key
* @param string value
* @param int expire [optional, default expire at the end of the session]
* @param string path [optional, default the current path]
* @param string domain [optional, default the current domain]
* @return void
* @note the syntax of setCookie is compatible with that of PHP's setCookie
* this means that setting an empty value (string '' | null | false) or
* an expiry time in the past, the cookie will be removed
*/
setCookie = function(key, value, expire, path, domain)
{
var pairs = [key + '=' + (typeof value === 'number' ? value : value || '')],
date;
if (pairs[0].substr(-1) === '=')
expire = -1;
if (typeof expire !== 'undefined' && expire)
date = new Date(expire);
if (date)
{
if (date < (new Date()).getTime() && typeof jar[key] !== 'undefined')
delete jar[key];
pairs.push('expires=' + date);
}
if (typeof path !== 'undefined' && path)
pairs.push('path=' + path);
if (typeof domain !== 'undefined' && domain)
pairs.push('domain=' + domain);
document.cookie = pairs.join(';');
if (document.cookie.indexOf(pairs.shift()) >= 0)
jar[key] = value + '';
},
/**
* Obtain a cookie value
* @name getCookie
* @type function
* @access internal
* @param string key
* @return void
*/
getCookie = function(key)
{
return typeof jar[key] !== 'undefined' ? jar[key] : null;
};
/**
* Get and/or set cookies
* @name value
* @type function
* @access internal
* @param string key [optional, an object containing all cookies is returned id omitted]
* @param string value [optional, if no value is given the current value will be returned]
* @param int expire [optional, default expire at the end of the session]
* @param string path [optional, default the current path]
* @param string domain [optional, default the current domain]
* @return void
*/
cookie.value = function(key, value, expire, path, domain)
{
if (typeof key === 'undefined')
return jar;
// if a second argument (value) was given, we update the cookie
if (arguments.length >= 2)
setCookie(key, value, expire, path, domain);
return getCookie(key);
}
init();
}
/**
* Storage object, a simple wrapper for localStorage
* @note available as konflux.storage / kx.storage
*/
function kxStorage()
{
var ls = this,
maxSize = 2048,
storage = typeof window.localStorage !== 'undefined' ? window.localStorage : false;
/**
* Combine stored fragments together into the original data string
* @name combineFragments
* @type function
* @access internal
* @param string data index
* @return string data combined
*/
function combineFragments(data)
{
var match, part, fragment, length, variable, i;
if (data && (match = data.match(/^\[fragment:([0-9]+),([0-9]+),([a-z_]+)\]$/)))
{
fragment = parseInt(match[1]);
length = parseInt(match[2]);
variable = match[3];
data = '';
for (i = 0; i < fragment; ++i)
{
part = storage.getItem(variable + i);
if (part !== null)
data += part;
else
return false;
}
if (!data || data.length != length)
return false;
}
return data;
}
/**
* Split a large data string into several smaller fragments
* @name createFragments
* @type function
* @access internal
* @param string name
* @param string data
* @return bool success
*/
function createFragments(name, data)
{
var variable = '__' + name,
fragment = Math.ceil(data.length / maxSize),
success = storage.setItem(name, '[fragment:' + fragment + ',' + data.length + ',' + variable + ']'),
i;
for (i = 0; i < fragment; ++i)
success = success && storage.setItem(variable + i, data.substring(i * maxSize, Math.min(i * maxSize + maxSize, data.length)));
return success;
}
/**
* Remove all fragmented keys
* @name dropFragments
* @type function
* @access internal
* @param array match
* @return void
*/
function dropFragments(match)
{
var fragment = parseInt(match[1]),
variable = match[3],
i;
for (i = 0; i < fragment; ++i)
drop(variable + i);
}
/**
* Obtain the data for given name
* @name getItem
* @type function
* @access internal
* @param string name
* @return mixed data
*/
function getItem(name)
{
var data = storage ? storage.getItem(name) : false,
checksum;
if (data && data.match(/^\[fragment:([0-9]+),([0-9]+),([a-z_]+)\]$/))
data = combineFragments(data);
data = /([a-z0-9]+):(.*)/i.exec(data);
if (data.length > 2 && data[1] === konflux.string.checksum(data[2]))
return JSON.parse(data[2]);
return false;
}
/**
* Set the data for given name
* @name setItem
* @type function
* @access internal
* @param string name
* @param mixed data
* @return string data
*/
function setItem(name, data)
{
data = JSON.stringify(data);
data = konflux.string.checksum(data) + ':' + data;
if (storage)
return data.length > maxSize ? createFragments(name, data) : storage.setItem(name, data);
return false;
}
/**
* Drop the data for given name
* @name drop
* @type function
* @access internal
* @param string name
* @return bool success
*/
function drop(name)
{
var data, match;
if (storage)
{
data = storage.getItem(name);
if (data && (match = data.match(/^\[fragment:([0-9]+),([0-9]+),([a-z_]+)\]$/)))
dropFragments(match);
return storage.removeItem(name);
}
return false;
}
/**
* Get the data for given name
* @name get
* @type method
* @access public
* @param string name
* @return mixed data
*/
ls.get = getItem;
/**
* Set the data for given name
* @name set
* @type method
* @access public
* @param string name
* @param mixed data
* @return void
*/
ls.set = setItem;
/**
* Remove the data for given name
* @name remove
* @type method
* @access public
* @param string name
* @return bool success
*/
ls.remove = drop;
}
/**
* Canvas object, allowing for chainable access to canvas methods
* @note available as konflux.canvas / kx.canvas
* @TODO documentation
*/
function kxCanvas()
{
var canvas = this;
function kxCanvasContext(canvas)
{
var context = this;
function init()
{
var property = {
globalAlpha: 1,
globalCompositeOperation: 'source-over', // source-over, source-in, source-out, source-atop, destination-over, destination-in, destination-out, destination-atop, lighter, copy, xor
height: null, // readonly
lineWidth: 1,
lineCap: 'butt', // butt, round, square
lineJoin: 'miter', // round, bevel, miter
miterLimit: 10,
strokeStyle: '#000',
fillStyle: '#000',
shadowOffsetX: 0,
shadowOffsetY: 0,
shadowBlur: 0,
shadowColor: 'transparent black',
font: '10px sans-serif',
textAlign: 'start', // start, end, left, right, center
textBaseLine: 'alphabetic', // top, hanging, middle, alphabetic, ideographic, bottom
width: null // readonly
},
p;
context.ctx2d = canvas.getContext('2d');
// relay all methods
for (p in context.ctx2d)
if (typeof context.ctx2d[p] === 'function')
context[p] = relayMethod(context.ctx2d[p]);
// relay all properties (as we want chainability)
for (p in property)
{
context[p] = relayProperty(p);
context[p](property[p]);
}
}
function relayMethod(f)
{
return function(){
f.apply(context.ctx2d, arguments);
return context;
};
}
function relayProperty(key)
{
return function(value){
if (typeof value === 'undefined')
return context.ctx2d[key];
context.ctx2d[key] = value;
return context;
};
}
function gradientFill(gradient, color)
{
var p;
for (p in color)
gradient.addColorStop(p, color[p]);
context.fillStyle(gradient);
context.fill();
return context;
}
context.data = function(data)
{
var image;
if (data)
{
image = new Image();
image.src = data;
context.ctx2d.clearRect(0, 0, canvas.width, canvas.height);
context.drawImage(image, 0, 0);
return context;
}
return canvas.toDataURL();
};
context.append = function(target)
{
if (typeof target === 'string')
target = document.getElementById(target);
if (typeof target === 'object')
return target.appendChild(canvas) ? context : false;
return false;
};
context.shadow = function(x, y, blur, color)
{
if (typeof x === 'number')
context.shadowOffsetX(x);
if (typeof y === 'number')
context.shadowOffsetY(y);
if (typeof blur === 'number')
context.shadowBlur(blur);
if (typeof color !== 'undefined')
context.shadowColor(color);
return context;
};
context.colorFill = function(color)
{
if (color)
context.fillStyle(color);
context.fill();
return context;
};
context.strokeStyle = function(color, width, cap)
{
if (color)
context.strokeStyle(color);
if (width)
context.lineWidth(width);
if (cap)
context.lineCap(cap);
context.stroke();
return context;
};
context.radialGradientFill = function(a, ar, b, br, color)
{
return gradientFill(context.ctx2d.createRadialGradient(a.x, a.y, ar, b.x, b.y, br), color);
};
context.linearGradientFill = function(a, b, color)
{
return gradientFill(context.ctx2d.createLinearGradient(a.x, a.y, b.x, b.y), color);
};
context.circle = function(x, y, radius)
{
context.beginPath();
context.arc(x, y, radius, 0, Math.PI * 2, 1);
context.closePath();
return context;
};
context.line = function()
{
var len = arguments.length,
i;
context.beginPath();
for (i = 0; i < len; ++i)
if (i == len - 1 && arguments[i].x === arguments[0].x && arguments[i].y === arguments[0].y)
context.closePath();
else
context[i == 0 ? 'moveTo' : 'lineTo'](arguments[i].x, arguments[i].y);
context.stroke();
return context;
};
init();
}
canvas.create = function(width, height)
{
var object = document.createElement('canvas');
object.setAttribute('width', width);
object.setAttribute('height', height);
return canvas.init(object);
};
canvas.init = function(object)
{
return new kxCanvasContext(object);
};
canvas.append = function(target, mixed)
{
if (typeof mixed === 'number')
mixed = canvas.create(arguments[1], arguments[2]);
if (mixed instanceof kxCanvasContext)
return mixed.append(target);
return false;
};
}
/**
* Logo object, creates the konflux logo on canvas
* @note available as konflux.logo / kx.logo
* @TODO documentation
*/
function kxLogo()
{
var logo = this,
P = function(x, y){
return new konflux.point(x, y);
},
design = {
konfirm: [
{line:[P(3, 44), P(2, 35), P(41, 66), P(96, 22), P(94, 31), P(41, 75), P(3, 44)],fillStyle:['rgb(25,25,25)'],fill:[]},
{line:[P(77, 0), P(41, 25), P(21, 12), P(0, 25), P(2, 35), P(41, 66), P(96, 22), P(99, 12), P(77, 0)],fillStyle:['rgb(7,221,246)'],fill:[]},
{globalAlpha:[.2],line:[P(0, 25), P(2, 35), P(41, 66), P(96, 22), P(99, 12), P(41, 56), P(0, 25)],fillStyle:['rgb(0, 0, 0)'],fill:[]}
]
},
render = function(dsgn){
var c, p, i;
dsgn = dsgn || 'konfirm';
if (typeof design[dsgn] !== 'undefined')
{
c = konflux.canvas.create(100, 75);
for (i = 0; i < design[dsgn].length; ++i)
for (p in design[dsgn][i])
c[p].apply(null, design[dsgn][i][p]);
return c;
}
return false;
};
logo.append = function(o)
{
return render().append(o);
};
logo.data = function()
{
return render().data()
};
logo.image = function()
{
var img = document.createElement('img');
img.src = logo.data();
return img;
};
}
// expose object references
konflux.point = kxPoint;
konflux.logo = kxLogo;
// expose object instances
konflux.browser = new kxBrowser();
konflux.url = new kxURL();
konflux.style = new kxStyle();
konflux.string = new kxString();
konflux.array = new kxArray();
konflux.event = new kxEvent();
konflux.timing = new kxTiming();
konflux.observer = new kxObserver();
konflux.breakpoint = new kxBreakpoint();
konflux.cookie = new kxCookie();
konflux.storage = new kxStorage();
konflux.canvas = new kxCanvas();
// make konflux available on the global (window) scope both as 'konflux' and 'kx'
window.konflux = window.kx = konflux;
})(window); | General
* bumped version to 0.2.5
* aligned docblocks
+ added hasProperty function
kxBrowser
+ added 'feature' method (including its internal 'hasFeature' function), to test wether or not a certain feature is available
+ added 'prefix' method (including its internal 'vendorPrefix' function), to obtain the proper vendor prefix for the current browser
* changed 'supports' method to use the 'hasFeature' function)
+ added 'fullscreen' method, which allows for setting a DOMElement to fullscreen (if the browsers supports this)
kxURL
* Added API documentation
kxStyle
+ added 'inline' method, allowing for adding inline style to a DOMElement
+ added 'selector' method, allowing for creating a style selector from any DOMElement
* minor optimisation in 'add' method
kxNumber
* added kxNumber module, with 'even', 'odd' and 'between' method to determine if given input matches it
kxString
* added 'ucFirst' method, uppercasing the first letter of given string (compatible with PHP's ucFirst function)
kxEvent
* added 'hasTouch' method, which indicates if Konflux is running on a touch capable device
kxStorage
* changed 'get' method to return all stored items if no name is provided (this uses the newly introduced 'getAll' function)
+ added 'length' method, which returns the amount of stored items
+ added 'keys' method, which returns the name of all available keys
* renamed 'remove' method to 'flush', which describes the purpose much better
| konflux.js | General * bumped version to 0.2.5 * aligned docblocks + added hasProperty function | <ide><path>onflux.js
<del>/**
<del> * __ Konflux (version 0.2.4, rev 317) - a javascript helper library
<add>/*
<add> * __ Konflux (version 0.2.5, rev 331) - a javascript helper library
<ide> * /\_\
<ide> * /\/ / / Copyright 2012-2013, Konfirm (Rogier Spieker)
<ide> * \ / / Releases under the MIT license
<ide>
<ide> /**
<ide> * Obtain a reference to a specific buffer object, creates one if it does not exist
<del> * @name buffer
<del> * @type function
<del> * @access internal
<del> * @param string object name
<del> * @return object
<add> * @name buffer
<add> * @type function
<add> * @access internal
<add> * @param string object name
<add> * @return object
<ide> */
<ide> buffer = function(key)
<ide> {
<ide> },
<ide> /**
<ide> * Obtain the milliseconds since the UNIX Epoch (Jan 1, 1970 00:00:00)
<del> * @name time
<del> * @type function
<del> * @access internal
<del> * @return int milliseconds
<add> * @name time
<add> * @type function
<add> * @access internal
<add> * @return int milliseconds
<ide> */
<ide> time = function()
<ide> {
<ide> },
<ide> /**
<ide> * Shorthand method for creating a combined version of several objects
<del> * @name combine
<del> * @type function
<del> * @access internal
<del> * @param object 1
<del> * @param object ...
<del> * @param object N
<del> * @return function constructor
<add> * @name combine
<add> * @type function
<add> * @access internal
<add> * @param object 1
<add> * @param object ...
<add> * @param object N
<add> * @return function constructor
<ide> */
<ide> combine = function()
<ide> {
<ide> },
<ide> /**
<ide> * Shorthand method creating object prototypes
<del> * @name proto
<del> * @type function
<del> * @access internal
<del> * @param function prototype
<del> * @param object extension
<del> * @return function constructor
<add> * @name proto
<add> * @type function
<add> * @access internal
<add> * @param function prototype
<add> * @param object extension
<add> * @return function constructor
<ide> */
<ide> proto = function(construct, prototype)
<ide> {
<ide> },
<ide> /**
<ide> * Obtain the elapsed time since Konflux started (roughly), using the format: [Nd ] hh:mm:ss.ms
<del> * @name elapsed
<del> * @type function
<del> * @access internal
<del> * @return string formatted time
<add> * @name elapsed
<add> * @type function
<add> * @access internal
<add> * @return string formatted time
<ide> */
<ide> elapsed = function()
<ide> {
<ide> },
<ide> /**
<ide> * Obtain an unique key, the key is guaranteed to be unique within the browser runtime
<del> * @name unique
<del> * @type function
<del> * @access internal
<del> * @return string key
<add> * @name unique
<add> * @type function
<add> * @access internal
<add> * @return string key
<ide> */
<ide> unique = function()
<ide> {
<ide> },
<ide> /**
<ide> * Verify whether given argument is empty
<del> * @name empty
<del> * @type function
<del> * @access internal
<del> * @param mixed variable to check
<del>` * @note The function follows PHP's empty function; null, undefined, 0, '', '0' and false are all considered empty
<add> * @name empty
<add> * @type function
<add> * @access internal
<add> * @param mixed variable to check
<add>` * @note The function follows PHP's empty function; null, undefined, 0, '', '0' and false are all considered empty
<ide> */
<ide> empty = function(p)
<ide> {
<ide> },
<ide> /**
<ide> * Determine the type of given variable
<del> * @name type
<del> * @type function
<del> * @access internal
<del> * @param mixed variable
<del> * @return string type
<add> * @name type
<add> * @type function
<add> * @access internal
<add> * @param mixed variable
<add> * @return string type
<ide> */
<ide> type = function(variable)
<ide> {
<ide> return variable instanceof Array ? 'array' : typeof variable;
<add> },
<add> /**
<add> * Does given object have given property
<add> * @name hasProperty
<add> * @type function
<add> * @access internal
<add> * @param object haystack
<add> * @param string property
<add> * @return bool available
<add> */
<add> hasProperty = function(haystack, needle)
<add> {
<add> return !!(needle in haystack);
<ide> },
<ide>
<ide> // Private properties
<ide> konflux
<ide> ; // end var
<ide>
<add> /**
<add> * The Konflux object itself
<add> * @name Konflux
<add> * @type constructor function
<add> * @access internal
<add> * @return Konflux instance
<add> * @note konflux is available both as (window.)konflux and (window.)kx
<add> */
<ide> function Konflux()
<ide> {
<ide> var kx = this;
<ide>
<ide> /**
<ide> * Return konflux itself
<del> * @name master
<del> * @type method
<del> * @access public
<del> * @return object konflux
<add> * @name master
<add> * @type method
<add> * @access public
<add> * @return object konflux
<ide> */
<ide> kx.master = function()
<ide> {
<ide>
<ide> /**
<ide> * Obtain the milliseconds since the UNIX Epoch (Jan 1, 1970 00:00:00)
<del> * @name time
<del> * @type method
<del> * @access public
<del> * @return int milliseconds
<add> * @name time
<add> * @type method
<add> * @access public
<add> * @return int milliseconds
<ide> */
<ide> kx.time = time;
<ide>
<ide> /**
<ide> * Obtain the elapsed time since Konflux started (roughly), using the format: [Nd ] hh:mm:ss.ms
<del> * @name elapsed
<del> * @type method
<del> * @access public
<del> * @return string formatted time
<add> * @name elapsed
<add> * @type method
<add> * @access public
<add> * @return string formatted time
<ide> */
<ide> kx.elapsed = elapsed;
<ide>
<ide> /**
<ide> * Obtain an unique key, the key is guaranteed to be unique within the browser runtime
<del> * @name unique
<del> * @type method
<del> * @access public
<del> * @return string key
<add> * @name unique
<add> * @type method
<add> * @access public
<add> * @return string key
<ide> */
<ide> kx.unique = unique;
<ide>
<ide> /**
<add> * Shorthand method for creating a combined version of several objects
<add> * @name combine
<add> * @type function
<add> * @access internal
<add> * @param object 1
<add> * @param object ...
<add> * @param object N
<add> * @return function constructor
<add> */
<add> kx.combine = combine;
<add>
<add> /**
<ide> * Verify whether given arguments are empty
<del> * @name empty
<del> * @type method
<del> * @access public
<del> * @param mixed variable1
<del> * @param mixed variableN, ...
<del> * @return bool variable is empty
<add> * @name empty
<add> * @type method
<add> * @access public
<add> * @param mixed variable1
<add> * @param mixed variableN, ...
<add> * @return bool variable is empty
<ide> */
<ide> kx.empty = function()
<ide> {
<ide>
<ide> /**
<ide> * Determine the type of given variable
<del> * @name type
<del> * @type method
<del> * @access public
<del> * @param mixed variable
<del> * @param bool object types
<del> * @return string type
<add> * @name type
<add> * @type method
<add> * @access public
<add> * @param mixed variable
<add> * @param bool object types
<add> * @return string type
<ide> */
<ide> kx.type = function(variable, objectTypes)
<ide> {
<ide>
<ide> return result;
<ide> };
<add>
<add> return this;
<ide> }
<ide> konflux = new Konflux();
<ide>
<ide>
<ide> /**
<ide> * Browser/feature detection
<del> * @note available as konflux.browser / kx.browser
<add> * @module browser
<add> * @note available as konflux.browser / kx.browser
<ide> */
<ide> function kxBrowser()
<ide> {
<ide> var browser = this,
<add> support = {
<add> touch: hasProperty(window, 'ontouchstart') || hasProperty(window.navigator, 'msMaxTouchPoints')
<add> },
<add> prefix,
<ide> ieVersion;
<ide>
<ide> /**
<del> * Verify if the browser at hand is any version of Internet Explorer (4+)
<del> * @name detectIE
<del> * @type function
<del> * @access internal
<del> * @return mixed (boolean false if not IE, version number if IE)
<add> * Determine whether or not the browser is Internet Explorer (4+)
<add> * @name detectIE
<add> * @type function
<add> * @access internal
<add> * @return mixed (boolean false if not IE, version number if IE)
<ide> */
<ide> function detectIE()
<ide> {
<ide> // https://gist.github.com/527683 (Conditional comments only work for IE 5 - 9)
<ide> var node = document.createElement('div'),
<ide> check = node.getElementsByTagName('i'),
<del> version = 0;
<add> version = 3;
<ide>
<ide> // Starting with IE 4 (as version is incremented before first use), an <i> element is added to
<ide> // the 'node' element surrounded by conditional comments. The 'check' variable is automatically updated
<ide> // to contain all <i> elements. These elements are not there if the browser does not support conditional
<del> // comments or does not match the IE version.
<add> // comments or does not match the IE version
<ide> // Note that there are two conditions for the while loop; the innerHTML filling and the check, the while
<ide> // loop itself has no body (as it is closed off by a semi-colon right after declaration)
<del> while (
<del> node.innerHTML = '<!--[if gt IE ' + (++version) + ']><i></i><![endif]-->',
<del> check.length && version < 10
<del> );
<add> while (node.innerHTML = '<!--[if gt IE ' + (++version) + ']><i></i><![endif]-->', check.length && version < 10);
<ide> // Added IE's @cc_on trickery for browser which do not support conditional comments (such as IE10)
<ide> return version > 4 ? version : Function('/*@cc_on return document.documentMode@*/return false')();
<ide> }
<ide>
<ide> /**
<add> * Determine whether or not the browser has given feature in either the window or document scope
<add> * @name hasFeature
<add> * @type function
<add> * @access internal
<add> * @param string feature
<add> * @return boolean has feature
<add> */
<add> function hasFeature(feature)
<add> {
<add> return typeof support[feature] !== 'undefined' ? support[feature] : hasProperty(window, feature) || hasProperty(document, feature);
<add> }
<add>
<add> /**
<add> * Obtain the vendor prefix for the current browser
<add> * @name vendorPrefix
<add> * @type function
<add> * @access internal
<add> * @return string prefix
<add> */
<add> function vendorPrefix()
<add> {
<add> var vendor = ['O', 'ms', 'Moz', 'Icab', 'Khtml', 'Webkit'],
<add> regex = new RegExp('^(' + vendor.join('|') + ')(?=[A-Z])'),
<add> script = document.createElement('script'),
<add> p;
<add>
<add> for (p in script.style)
<add> if (regex.test(p))
<add> {
<add> prefix = p.match(regex).shift();
<add> break;
<add> }
<add>
<add> while (!prefix && vendor.length)
<add> {
<add> p = vendor.pop();
<add> if (hasProperty(script.style, p + 'Opacity'))
<add> prefix = p;
<add> }
<add>
<add> script = null;
<add> return prefix;
<add> };
<add>
<add> /**
<ide> * Verify if the browser at hand is any version of Internet Explorer (4+)
<del> * @name ie
<del> * @type method
<del> * @access public
<del> * @return mixed (boolean false if not IE, version number if IE)
<del> * @see detectIE
<del> * @note this public implementation caches the result
<add> * @name ie
<add> * @type method
<add> * @access public
<add> * @return mixed (boolean false if not IE, version number if IE)
<add> * @see detectIE
<add> * @note this public implementation caches the result
<ide> */
<ide> browser.ie = function()
<ide> {
<ide> if (typeof ieVersion === 'undefined')
<ide> ieVersion = detectIE();
<ide> return ieVersion;
<del> }
<add> };
<add> /**
<add> * Obtain the vendor prefix for the current browser
<add> * @name prefix
<add> * @type method
<add> * @access public
<add> * @return string prefix
<add> * @note this public implementation caches the result
<add> */
<add> browser.prefix = function()
<add> {
<add> if (!prefix)
<add> prefix = vendorPrefix();
<add>
<add> return prefix;
<add> };
<ide> /**
<ide> * Test whether or not the browser at hand is aware of given feature(s) exist in either the window or document scope
<del> * @name supports
<del> * @type method
<del> * @access public
<del> * @param string feature
<del> * @param string ...
<del> * @return boolean support
<del> * @note multi features can be provided, in which case the return value indicates the support of all given features
<add> * @name supports
<add> * @type method
<add> * @access public
<add> * @param string feature
<add> * @param string ...
<add> * @return boolean support
<add> * @note multiple features can be provided, in which case the return value indicates the support of all given features
<ide> */
<ide> browser.supports = function()
<ide> {
<ide> var r = true,
<ide> i = arguments.length;
<add>
<add> // test all the features given
<ide> while (r && --i >= 0)
<del> r = r && (typeof window[arguments[i]] !== 'undefined' || typeof document[arguments[i]] !== 'undefined');
<add> r = r && hasFeature(arguments[i]);
<add>
<ide> return r;
<del> }
<add> };
<add> /**
<add> * Enable the HTML5 fullscreen mode for given element
<add> * @name fullscreen
<add> * @type method
<add> * @access public
<add> * @param DOMNode target
<add> * @return bool success
<add> * @note this method is highly experimental
<add> */
<add> browser.fullscreen = function(target)
<add> {
<add> var check = ['fullScreen', 'isFullScreen'],
<add> vendor = konflux.browser.prefix().toLowerCase(),
<add> method, i;
<add>
<add> if (!target)
<add> target = document.documentElement;
<add>
<add> for (i = 0, method = null; i < check.length, method === null; ++i)
<add> {
<add> method = hasProperty(document, check[i]) ? check[i] : vendor + konflux.string.ucFirst(check[i]);
<add> if (!hasProperty(document, method))
<add> method = null;
<add> }
<add>
<add> vendor = method.match(new RegExp('^' + vendor)) ? vendor : null;
<add> vendor = (vendor || (document[method] ? 'cancel' : 'request')) + konflux.string.ucFirst((vendor ? (document[method] ? 'cancel' : 'request') : '') + konflux.string.ucFirst(check[0]));
<add>
<add> (document[method] ? document : target)[vendor](Element.ALLOW_KEYBOARD_INPUT || null);
<add> };
<ide> }
<ide>
<ide>
<ide> /**
<ide> * Handle URL's/URI's
<del> * @note available as konflux.url / kx.url
<add> * @module url
<add> * @note available as konflux.url / kx.url
<ide> */
<ide> function kxURL()
<ide> {
<ide> var url = this;
<ide>
<del> function parse(loc)
<add> /**
<add> * Parse given URL into its URI components
<add> * @name parse
<add> * @type function
<add> * @access internal
<add> * @param string location
<add> * @return object result
<add> */
<add> function parse(location)
<ide> {
<ide> // URL regex + key processing based on the work of Derek Watson's jsUri (http://code.google.com/p/jsuri/)
<del> var match = /^(?:([^:\/?#]+):)?(?:\/\/((?:(([^:@]*)(?::([^:@]*))?)?@)?([^:\/?#]*)(?::(\d*))?))?((((?:[^?#\/]*\/)*)([^?#]*))(?:\?([^#]*))?(?:#(.*))?)/.exec(loc),
<add> var match = /^(?:([^:\/?#]+):)?(?:\/\/((?:(([^:@]*)(?::([^:@]*))?)?@)?([^:\/?#]*)(?::(\d*))?))?((((?:[^?#\/]*\/)*)([^?#]*))(?:\?([^#]*))?(?:#(.*))?)/.exec(location),
<ide> prop = ['source', 'protocol', 'domain', 'userInfo', 'user', 'password', 'host', 'port', 'relative', 'path', 'directory', 'file', 'query', 'anchor'],
<ide> result = {};
<ide> while (prop.length)
<ide> return result;
<ide> }
<ide>
<add> /**
<add> * The parsed url for the URL of the current page
<add> * @name current
<add> * @type object
<add> * @access public
<add> */
<ide> url.current = window && window.location ? parse(window.location.href) : false;
<add> /**
<add> * Parse given URL into its URI components
<add> * @name parse
<add> * @type method
<add> * @access public
<add> * @param string url
<add> * @return object result
<add> */
<ide> url.parse = parse;
<del> url.isLocal = function(loc)
<del> {
<del> return url.current.domain === url.parse(loc).domain;
<add> /**
<add> * Determine whether given URL is on the same domain as the page itself
<add> * @name isLocal
<add> * @type method
<add> * @access public
<add> * @param string location
<add> * @return bool local
<add> */
<add> url.isLocal = function(location)
<add> {
<add> return url.current.domain === url.parse(location).domain;
<ide> };
<ide> }
<ide>
<ide>
<ide> /**
<ide> * Style(sheet) manipulation
<del> * @note available as konflux.style / kx.style
<add> * @module style
<add> * @note available as konflux.style / kx.style
<ide> */
<ide> function kxStyle()
<ide> {
<ide>
<ide> /**
<ide> * Obtain the script property notation for given property
<del> * @name scriptProperty
<del> * @type function
<del> * @access internal
<del> * @param string property
<del> * @return string script property
<del> * @note 'background-color' => 'backgroundColor'
<add> * @name scriptProperty
<add> * @type function
<add> * @access internal
<add> * @param string property
<add> * @return string script property
<add> * @note 'background-color' => 'backgroundColor'
<ide> */
<ide> function scriptProperty(property)
<ide> {
<ide>
<ide> /**
<ide> * Obtain the CSS property notation for given property
<del> * @name cssProperty
<del> * @type function
<del> * @access internal
<del> * @param string property
<del> * @return string CSS property
<del> * @note 'backgroundColor' => 'background-color'
<add> * @name cssProperty
<add> * @type function
<add> * @access internal
<add> * @param string property
<add> * @return string CSS property
<add> * @note 'backgroundColor' => 'background-color'
<ide> */
<ide> function cssProperty(property)
<ide> {
<ide>
<ide> /**
<ide> * Obtain all local stylesheets, where local is determined on a match of the domain
<del> * @name getLocalStylesheets
<del> * @type function
<del> * @access internal
<del> * @return Array stylesheets
<add> * @name getLocalStylesheets
<add> * @type function
<add> * @access internal
<add> * @return array stylesheets
<ide> */
<ide> function getLocalStylesheets()
<ide> {
<ide>
<ide> /**
<ide> * Obtain specific stylesheets
<del> * @name getStylesheet
<del> * @type function
<del> * @access internal
<del> * @param string name (optional, default 'all'. Possible values 'first', 'last', 'all' or string filename)
<del> * @param bool includeOffset (optional default false, local stylesheets only)
<del> * @return Array stylesheets
<add> * @name getStylesheet
<add> * @type function
<add> * @access internal
<add> * @param string name [optional, default 'all'. Possible values 'first', 'last', 'all' or string filename]
<add> * @param bool includeOffset [optional, default false, local stylesheets only]
<add> * @return array stylesheets
<ide> */
<ide> function getStylesheet(name, includeOffsite)
<ide> {
<ide>
<ide> default:
<ide> // if no name was provided, return the entire list of (editable) stylesheets
<del> if (!name || name === 'all')
<add> if (name === 'all')
<ide> match = list;
<add> else if (!name)
<add> match = false;
<ide> // search for the stylesheet(s) whose href matches the given name
<ide> else if (list.length > 0)
<ide> for (i = 0; i < list.length; ++i)
<ide>
<ide> /**
<ide> * Obtain a stylesheet by its url or title
<del> * @name findStylesheet
<del> * @type function
<del> * @access internal
<del> * @param string url
<del> * @param string name
<del> * @return StyleSheet (bool false if not found)
<add> * @name findStylesheet
<add> * @type function
<add> * @access internal
<add> * @param string url
<add> * @param string name
<add> * @return StyleSheet (bool false if not found)
<ide> */
<ide> function findStylesheet(url, name)
<ide> {
<ide>
<ide> /**
<ide> * Create a new stylesheet
<del> * @name createStylesheet
<del> * @type function
<del> * @access internal
<del> * @param string url
<del> * @param bool before (effectively true for being the first stylesheet, anything else for last)
<del> * @param string name
<del> * @return style node
<add> * @name createStylesheet
<add> * @type function
<add> * @access internal
<add> * @param string url
<add> * @param bool before (effectively true for being the first stylesheet, anything else for last)
<add> * @param string name
<add> * @return style node
<ide> */
<ide> function createStylesheet(url, before, name)
<ide> {
<ide> var element = findStylesheet(url, name),
<del> head = document.head || document.getElementsByTagName('head')[0];
<add> head = document.head || document.getElementsByTagName('head')[0],
<add> i;
<ide>
<ide> if (!element)
<ide> {
<ide>
<ide> /**
<ide> * Parse the style declarations' cssText into key/value pairs
<del> * @name getStyleProperties
<del> * @type function
<del> * @access internal
<del> * @param CSS Rule
<del> * @return Object key value pairs
<add> * @name getStyleProperties
<add> * @type function
<add> * @access internal
<add> * @param CSS Rule
<add> * @return Object key value pairs
<ide> */
<ide> function getStyleProperties(declaration)
<ide> {
<ide>
<ide> /**
<ide> * Normalize given selector string
<del> * @name normalizeSelector
<del> * @type function
<del> * @access internal
<del> * @param string selector
<del> * @return string normalized selector
<add> * @name normalizeSelector
<add> * @type function
<add> * @access internal
<add> * @param string selector
<add> * @return string normalized selector
<ide> */
<ide> function normalizeSelector(selector)
<ide> {
<ide>
<ide> /**
<ide> * Normalize given CSS value
<del> * @name normalizeValue
<del> * @type function
<del> * @access internal
<del> * @param string value
<del> * @return string normalized value
<add> * @name normalizeValue
<add> * @type function
<add> * @access internal
<add> * @param string value
<add> * @return string normalized value
<ide> */
<ide> function normalizeValue(value)
<ide> {
<ide>
<ide>
<ide> /**
<add> * Apply style rules to target DOMElement
<add> * @name inline
<add> * @type method
<add> * @access public
<add> * @param DOMElement target
<add> * @param object style rules
<add> * @return void
<add> */
<add> style.inline = function(target, rules)
<add> {
<add> var p;
<add>
<add> for (p in rules)
<add> target.style[scriptProperty(p)] = rules[p];
<add> };
<add>
<add> /**
<add> * Obtain a CSS selector for given element
<add> * @name selector
<add> * @type method
<add> * @access public
<add> * @param DOMElement target
<add> * @return string selector
<add> */
<add> style.selector = function(target)
<add> {
<add> var node = target.nodeName.toLowerCase(),
<add> id = target.hasAttribute('id') ? '#' + target.getAttribute('id') : null,
<add> classes = target.hasAttribute('class') ? '.' + target.getAttribute('class').split(' ').join('.') : null,
<add> select = '';
<add>
<add> if (arguments.length === 1 || id || classes)
<add> select = node + (id || classes || '');
<add>
<add> return kx.string.trim((!id && target.parentNode && target !== document.body ? style.selector(target.parentNode, true) + ' ' : '') + select);
<add> };
<add>
<add> /**
<ide> * Obtain a stylesheet by its name or by a mnemonic (first, last, all)
<del> * @name sheet
<del> * @type method
<del> * @access public
<del> * @param string target (optional, default 'all'. Possible values 'first', 'last', 'all' or string filename)
<del> * @param bool editable (optional, default true)
<del> * @return Array stylesheets
<add> * @name sheet
<add> * @type method
<add> * @access public
<add> * @param string target [optional, default 'all'. Possible values 'first', 'last', 'all' or string filename]
<add> * @param bool editable [optional, default true]
<add> * @return array stylesheets
<ide> */
<ide> style.sheet = function(target, editable)
<ide> {
<ide>
<ide> /**
<ide> * Create a new stylesheet, either as first or last
<del> * @name create
<del> * @type method
<del> * @access public
<del> * @param bool before all other stylesheets
<del> * @return styleSheet
<add> * @name create
<add> * @type method
<add> * @access public
<add> * @param bool before all other stylesheets
<add> * @return styleSheet
<ide> */
<ide> style.create = function(name, before)
<ide> {
<del> return createStylesheet(false, before, name);
<add> var element = createStylesheet(false, before, name);
<add> return element.sheet || false;
<ide> };
<ide>
<ide> /**
<ide> * Load an external stylesheet, either as first or last
<del> * @name load
<del> * @type method
<del> * @access public
<del> * @param string url the url of the stylesheet to load
<del> * @param function callback
<del> * @param bool before all other style sheets
<del> * @return style node (<link...> element
<add> * @name load
<add> * @type method
<add> * @access public
<add> * @param string url the url of the stylesheet to load
<add> * @param function callback
<add> * @param bool before all other style sheets
<add> * @return style node (<link...> element
<ide> */
<ide> style.load = function(url, callback, before)
<ide> {
<ide>
<ide> /**
<ide> * Determine whether or not the given style (node) is editable
<del> * @name isEditable
<del> * @type method
<del> * @access public
<del> * @param Stylesheet object or DOMelement style/link
<del> * @return bool editable
<add> * @name isEditable
<add> * @type method
<add> * @access public
<add> * @param Stylesheet object or DOMelement style/link
<add> * @return bool editable
<ide> */
<ide> style.isEditable = function(stylesheet)
<ide> {
<ide>
<ide> /**
<ide> * Create and add a new style rule
<del> * @name add
<del> * @type method
<del> * @access public
<del> * @param string selector
<del> * @param mixed rules (one of; object {property: value} or string 'property: value')
<del> * @param mixed sheet (either a sheet object or named reference, like 'first', 'last' or file name)
<del> * @return int index at which the rule was added
<add> * @name add
<add> * @type method
<add> * @access public
<add> * @param string selector
<add> * @param mixed rules (one of; object {property: value} or string 'property: value')
<add> * @param mixed sheet (either a sheet object or named reference, like 'first', 'last' or file name)
<add> * @return int index at which the rule was added
<ide> */
<ide> style.add = function(selector, rules, sheet)
<ide> {
<ide> // populate the find buffer, so we can determine which style rules we actually need
<ide> find = style.find(selector, sheet);
<ide> for (p in rules)
<del> if (typeof find[p] === 'undefined' || normalizeValue(find[p]) !== normalizeValue(rules[p]))
<add> if (!(p in find) || normalizeValue(find[p]) !== normalizeValue(rules[p]))
<ide> rule += (rule !== '' ? ';' : '') + cssProperty(p) + ':' + rules[p];
<ide>
<ide> // finally, add the rules to the stylesheet
<ide>
<ide> /**
<ide> * Find all style rules for given selector (in optionally given sheet)
<del> * @name find
<del> * @type method
<del> * @access public
<del> * @param string selector
<del> * @param mixed sheet (optional, either a sheet object or named reference, like 'first', 'last' or file name)
<del> * @return object style rules
<add> * @name find
<add> * @type method
<add> * @access public
<add> * @param string selector
<add> * @param mixed sheet [optional, either a sheet object or named reference, like 'first', 'last' or file name]
<add> * @return object style rules
<ide> */
<ide> style.find = function(selector, sheet)
<ide> {
<ide>
<ide>
<ide> /**
<add> * Number utils
<add> * @module number
<add> * @note available as konflux.number / kx.number
<add> */
<add> function kxNumber()
<add> {
<add> var number = this;
<add>
<add>
<add> /**
<add> * Test wheter given input is an even number
<add> * @name even
<add> * @type method
<add> * @access public
<add> * @param number input
<add> * @return bool even
<add> */
<add> number.even = function(input)
<add> {
<add> return input % 2 === 0;
<add> };
<add>
<add> /**
<add> * Test wheter given input is an odd number
<add> * @name odd
<add> * @type method
<add> * @access public
<add> * @param number input
<add> * @return bool odd
<add> */
<add> number.odd = function(input)
<add> {
<add> return !number.even(input);
<add> };
<add>
<add> /**
<add> * Test wheter given input between the low and high values
<add> * @name between
<add> * @type method
<add> * @access public
<add> * @param number input
<add> * @param number low
<add> * @param number hight
<add> * @return bool between
<add> */
<add> number.between = function(input, low, high)
<add> {
<add> return input >= low && input <= high;
<add> };
<add> }
<add>
<add>
<add> /**
<ide> * String utils
<del> * @note available as konflux.string / kx.string
<add> * @module string
<add> * @note available as konflux.string / kx.string
<ide> */
<ide> function kxString()
<ide> {
<ide> /**
<ide> * Javascript port of Java’s String.hashCode()
<ide> * (Based on http://werxltd.com/wp/2010/05/13/javascript-implementation-of-javas-string-hashcode-method/)
<del> * @name hashCode
<del> * @type function
<del> * @access internal
<del> * @param string input
<del> * @return number hash (32bit integer)
<add> * @name hashCode
<add> * @type function
<add> * @access internal
<add> * @param string input
<add> * @return number hash (32bit integer)
<ide> */
<ide> hashCode = function(s)
<ide> {
<ide> },
<ide> /**
<ide> * Create a hash from a string
<del> * @name hash
<del> * @type function
<del> * @access internal
<del> * @param string source
<del> * @return string hash
<add> * @name hash
<add> * @type function
<add> * @access internal
<add> * @param string source
<add> * @return string hash
<ide> */
<ide> hash = function(s)
<ide> {
<ide> },
<ide> /**
<ide> * Return the ASCII value of given character
<del> * @name ord
<del> * @type function
<del> * @access internal
<del> * @param string character
<del> * @return number character code
<add> * @name ord
<add> * @type function
<add> * @access internal
<add> * @param string character
<add> * @return number character code
<ide> */
<ide> ord = function(s)
<ide> {
<ide> },
<ide> /**
<ide> * Return the character corresponding with given ASCII value
<del> * @name chr
<del> * @type function
<del> * @access internal
<del> * @param number character code
<del> * @return string character
<add> * @name chr
<add> * @type function
<add> * @access internal
<add> * @param number character code
<add> * @return string character
<ide> */
<ide> chr = function(n)
<ide> {
<ide> },
<ide> /**
<ide> * Pad a string
<del> * @name pad
<del> * @type function
<del> * @access internal
<del> * @param string to pad
<del> * @param number length
<del> * @param string pad string (optional, default ' ')
<del> * @param int pad type (optional, default PAD_RIGHT)
<del> * @return padded string
<add> * @name pad
<add> * @type function
<add> * @access internal
<add> * @param string to pad
<add> * @param number length
<add> * @param string pad string [optional, default ' ']
<add> * @param int pad type [optional, default PAD_RIGHT]
<add> * @return padded string
<ide> */
<ide> pad = function(s, n, c, t)
<ide> {
<ide> },
<ide> /**
<ide> * Generate a checksum for given string
<del> * @name checksum
<del> * @type function
<del> * @access internal
<del> * @param string source
<del> * @return string checksum
<add> * @name checksum
<add> * @type function
<add> * @access internal
<add> * @param string source
<add> * @return string checksum
<ide> */
<ide> checksum = function(s)
<ide> {
<ide> },
<ide> /**
<ide> * Generate a UUID
<del> * @name uuid
<del> * @type function
<del> * @access internal
<del> * @return string uuid
<add> * @name uuid
<add> * @type function
<add> * @access internal
<add> * @return string uuid
<ide> */
<ide> uuid = function()
<ide> {
<ide>
<ide> /**
<ide> * Trim string from leading/trailing whitespace
<del> * @name trim
<del> * @type method
<del> * @access public
<del> * @param string to trim
<del> * @return trimmed string
<add> * @name trim
<add> * @type method
<add> * @access public
<add> * @param string to trim
<add> * @return trimmed string
<ide> */
<ide> string.trim = function(s)
<ide> {
<ide> };
<ide> /**
<ide> * Reverse given string
<del> * @name reverse
<del> * @type method
<del> * @access public
<del> * @param string to reverse
<del> * @return reversed string
<add> * @name reverse
<add> * @type method
<add> * @access public
<add> * @param string to reverse
<add> * @return reversed string
<ide> */
<ide> string.reverse = function(s)
<ide> {
<ide> };
<ide> /**
<ide> * Pad a string
<del> * @name pad
<del> * @type method
<del> * @access public
<del> * @param string to pad
<del> * @param number length
<del> * @param string pad string (optional, default ' ')
<del> * @param int pad type (optional, default PAD_RIGHT)
<del> * @return padded string
<add> * @name pad
<add> * @type method
<add> * @access public
<add> * @param string to pad
<add> * @param number length
<add> * @param string pad string [optional, default ' ']
<add> * @param int pad type [optional, default PAD_RIGHT]
<add> * @return padded string
<ide> */
<ide> string.pad = function(s, n, c, t)
<ide> {
<ide> return pad(s, n, c || ' ', t || string.PAD_RIGHT);
<ide> };
<ide> /**
<add> * Uppercase the first character of given string
<add> * @name ucFirst
<add> * @type method
<add> * @access public
<add> * @param string of which to uppercase the first char
<add> * @return string
<add> */
<add> string.ucFirst = function(input)
<add> {
<add> return input.charAt(0).toUpperCase() + input.substr(1);
<add> };
<add> /**
<ide> * Create a hash from a string
<del> * @name hash
<del> * @type method
<del> * @access public
<del> * @param string source
<del> * @return string hash
<add> * @name hash
<add> * @type method
<add> * @access public
<add> * @param string source
<add> * @return string hash
<ide> */
<ide> string.hash = function(s)
<ide> {
<ide> };
<ide> /**
<ide> * Generate a checksum for given string
<del> * @name checksum
<del> * @type method
<del> * @access public
<del> * @param string source
<del> * @return string checksum
<add> * @name checksum
<add> * @type method
<add> * @access public
<add> * @param string source
<add> * @return string checksum
<ide> */
<ide> string.checksum = checksum;
<ide> /**
<ide> * Generate a UUID
<del> * @name uuid
<del> * @type method
<del> * @access public
<del> * @return string uuid
<add> * @name uuid
<add> * @type method
<add> * @access public
<add> * @return string uuid
<ide> */
<ide> string.uuid = uuid;
<ide> }
<ide>
<ide> /**
<ide> * Array utils
<del> * @note available as konflux.array / kx.array
<add> * @module array
<add> * @note available as konflux.array / kx.array
<ide> */
<ide> function kxArray()
<ide> {
<ide> var array = this,
<ide> /**
<ide> * Create a hash from a string
<del> * @name contains
<del> * @type function
<del> * @access internal
<del> * @param array haystack
<del> * @param mixed value
<del> * @return boolean contains
<add> * @name contains
<add> * @type function
<add> * @access internal
<add> * @param array haystack
<add> * @param mixed value
<add> * @return boolean contains
<ide> */
<ide> contains = function(a, v)
<ide> {
<ide> },
<ide> /**
<ide> * Return the difference between two arrays
<del> * @name diff
<del> * @type function
<del> * @access internal
<del> * @param array array1
<del> * @param array array2
<del> * @return array difference
<add> * @name diff
<add> * @type function
<add> * @access internal
<add> * @param array array1
<add> * @param array array2
<add> * @return array difference
<ide> */
<ide> diff = function(a, b)
<ide> {
<ide> },
<ide> /**
<ide> * Create an array with values between (including) given start and end
<del> * @name range
<del> * @type function
<del> * @access internal
<del> * @param number start
<del> * @param number end
<del> * @return array range
<add> * @name range
<add> * @type function
<add> * @access internal
<add> * @param number start
<add> * @param number end
<add> * @return array range
<ide> */
<ide> range = function(a, b)
<ide> {
<ide> },
<ide> /**
<ide> * Shuffle given array
<del> * @name shuffle
<del> * @type function
<del> * @access internal
<del> * @param array source
<del> * @return array shuffled
<add> * @name shuffle
<add> * @type function
<add> * @access internal
<add> * @param array source
<add> * @return array shuffled
<ide> */
<ide> shuffle = function(a)
<ide> {
<ide>
<ide> // expose
<ide> /**
<del> * Create a hash from a string
<del> * @name hash
<del> * @type method
<del> * @access public
<del> * @param array haystack
<del> * @param mixed value
<del> * @return boolean contains
<add> * Does the array contain given value
<add> * @name contains
<add> * @type method
<add> * @access public
<add> * @param array haystack
<add> * @param mixed value
<add> * @return boolean contains
<ide> */
<ide> array.contains = contains;
<ide> /**
<ide> * Return the difference between two arrays
<del> * @name diff
<del> * @type function
<del> * @access internal
<del> * @param array array1
<del> * @param array array2
<del> * @return array difference
<add> * @name diff
<add> * @type method
<add> * @access public
<add> * @param array array1
<add> * @param array array2
<add> * @return array difference
<ide> */
<ide> array.diff = diff;
<ide> /**
<ide> * Create an array with values between (including) given start and end
<del> * @name range
<del> * @type method
<del> * @access public
<del> * @param number start
<del> * @param number end
<del> * @return array range
<add> * @name range
<add> * @type method
<add> * @access public
<add> * @param number start
<add> * @param number end
<add> * @return array range
<ide> */
<ide> array.range = range;
<ide> /**
<ide> * Shuffle given array
<del> * @type method
<del> * @access public
<del> * @access internal
<del> * @param array source
<del> * @return array shuffled
<add> * @name shuffle
<add> * @type method
<add> * @access public
<add> * @param array source
<add> * @return array shuffled
<ide> */
<ide> array.shuffle = shuffle;
<ide> }
<ide>
<ide> /**
<ide> * Event attachment handler
<del> * @note available as konflux.event / kx.event
<add> * @module event
<add> * @note available as konflux.event / kx.event
<ide> */
<ide> function kxEvent()
<ide> {
<ide> var event = this,
<ide> queue = buffer('event.queue'),
<add> touch = konflux.browser.supports('touch'),
<ide>
<ide> /**
<ide> * Ready state handler, removes all relevant triggers and executes any handler that is set
<del> * @name ready
<del> * @type function
<del> * @access internal
<del> * @return void
<add> * @name ready
<add> * @type function
<add> * @access internal
<add> * @return void
<ide> */
<ide> ready = function(e){
<ide> var run = false,
<ide> },
<ide> /**
<ide> * Unify the event object, which makes event more consistent across browsers
<del> * @name unifyEvent
<del> * @type function
<del> * @access internal
<del> * @return Event object
<add> * @name unifyEvent
<add> * @type function
<add> * @access internal
<add> * @return Event object
<ide> */
<ide> unifyEvent = function(e)
<ide> {
<ide> };
<ide>
<ide> /**
<add> * Is the browser capable of touch events
<add> * @name hasTouch
<add> * @type method
<add> * @access public
<add> * @return bool is touch device
<add> */
<add> event.hasTouch = function()
<add> {
<add> return touch;
<add> };
<add>
<add> /**
<ide> * A custom DOMReady handler
<del> * @name add
<del> * @type method
<del> * @access public
<del> * @param function handler
<del> * @return void
<del> */
<del> event.ready = function(handler){
<add> * @name add
<add> * @type method
<add> * @access public
<add> * @param function handler
<add> * @return void
<add> */
<add> event.ready = function(handler)
<add> {
<ide> // the document is ready already
<ide> if (document.readyState === 'complete')
<ide> return setTimeout(handler, 1); // make sure we run the 'event' asynchronously
<ide>
<ide> /**
<ide> * Add event listeners to target
<del> * @name listen
<del> * @type method
<del> * @access public
<del> * @param DOMElement target
<del> * @param string event type
<del> * @param function handler
<del> * @return bool success
<del> */
<del> event.listen = function(target, type, handler){
<add> * @name listen
<add> * @type method
<add> * @access public
<add> * @param DOMElement target
<add> * @param string event type
<add> * @param function handler
<add> * @return bool success
<add> */
<add> event.listen = function(target, type, handler)
<add> {
<ide> var delegate = function(e){handler.apply(target, [unifyEvent(e)])},
<ide> list = typeof type === 'string' ? type.split(',') : type,
<ide> i;
<ide>
<ide> /**
<ide> * Timing utils
<del> * @note available as konflux.timing / kx.timing
<del> * @TODO documentation (honestly... what DOES this do??)
<add> * @module timing
<add> * @note available as konflux.timing / kx.timing
<add> * @TODO documentation (honestly... what DOES this do??)
<ide> */
<ide> function kxTiming()
<ide> {
<ide>
<ide> /**
<ide> * Observer object, handles subscriptions to messages
<del> * @note available as konflux.observer / kx.observer
<add> * @module observer
<add> * @note available as konflux.observer / kx.observer
<ide> */
<ide> function kxObserver()
<ide> {
<ide>
<ide> /**
<ide> * Create the subscription stack if it does not exist
<del> * @name ensureSubscriptionStack
<del> * @type function
<del> * @access internal
<del> * @param string stack name
<del> * @return void
<add> * @name ensureSubscriptionStack
<add> * @type function
<add> * @access internal
<add> * @param string stack name
<add> * @return void
<ide> */
<ide> ensureSubscriptionStack = function(s)
<ide> {
<ide> },
<ide> /**
<ide> * Add handler to specified stack
<del> * @name add
<del> * @type function
<del> * @access internal
<del> * @param string stack name
<del> * @param function handler
<del> * @return int total number of subscriptions in this stack
<add> * @name add
<add> * @type function
<add> * @access internal
<add> * @param string stack name
<add> * @param function handler
<add> * @return int total number of subscriptions in this stack
<ide> */
<ide> add = function(s, f)
<ide> {
<ide> },
<ide> /**
<ide> * Disable a handler for specified stack
<del> * @name disable
<del> * @type function
<del> * @access internal
<del> * @param string stack name
<del> * @param function handler
<del> * @return void
<del> * @note this method is used from the Observation object, which would influence the number of
<add> * @name disable
<add> * @type function
<add> * @access internal
<add> * @param string stack name
<add> * @param function handler
<add> * @return void
<add> * @note this method is used from the Observation object, which would influence the number of
<ide> * subscriptions if the subscription itself was removed immediately
<ide> */
<ide> disable = function(s, f)
<ide> },
<ide> /**
<ide> * Remove specified handler (and all disabled handlers) from specified stack
<del> * @name remove
<del> * @type function
<del> * @access internal
<del> * @param string stack name
<del> * @param function handler (optional)
<del> * @return array removed handlers
<add> * @name remove
<add> * @type function
<add> * @access internal
<add> * @param string stack name
<add> * @param function handler [optional]
<add> * @return array removed handlers
<ide> */
<ide> remove = function(s, f)
<ide> {
<ide> },
<ide> /**
<ide> * Flush specified stack
<del> * @name flush
<del> * @type function
<del> * @access internal
<del> * @param string stack name
<del> * @return array removed handlers (false if the stack did not exist);
<add> * @name flush
<add> * @type function
<add> * @access internal
<add> * @param string stack name
<add> * @return array removed handlers (false if the stack did not exist);
<ide> */
<ide> flush = function(s)
<ide> {
<ide> },
<ide> /**
<ide> * Trigger the handlers in specified stack
<del> * @name trigger
<del> * @type function
<del> * @access internal
<del> * @param string stack name
<del> * @param mixed arg1 ... argN
<del> * @return void
<add> * @name trigger
<add> * @type function
<add> * @access internal
<add> * @param string stack name
<add> * @param mixed arg1 ... argN
<add> * @return void
<ide> */
<ide> trigger = function(s)
<ide> {
<ide>
<ide> /**
<ide> * Observation object, instances of this are be provided to all observer notification subscribers
<del> * @name kxObservation
<del> * @type class
<del> * @access internal
<del> * @param string type
<del> * @param function handle
<del> * @param string reference
<del> * @return kxObservation object
<add> * @name kxObservation
<add> * @type class
<add> * @access internal
<add> * @param string type
<add> * @param function handle
<add> * @param string reference
<add> * @return kxObservation object
<ide> */
<ide> function kxObservation(type, handle, reference)
<ide> {
<ide>
<ide> /**
<ide> * Unsubscribe from the current observer stack
<del> * @name unsubscribe
<del> * @type function
<del> * @access public
<del> * @return void
<add> * @name unsubscribe
<add> * @type method
<add> * @access public
<add> * @return void
<ide> */
<ide> observation.unsubscribe = function()
<ide> {
<ide> };
<ide> /**
<ide> * Stop the execution of this Observation
<del> * @name stop
<del> * @type function
<del> * @access public
<del> * @return void
<add> * @name stop
<add> * @type method
<add> * @access public
<add> * @return void
<ide> */
<ide> observation.stop = function()
<ide> {
<ide>
<ide> /**
<ide> * Subscribe a handler to an observer stack
<del> * @name subscribe
<del> * @type method
<del> * @access public
<del> * @param string stack name
<del> * @param function handle
<del> * @return bool success
<del> */
<del> observer.subscribe = function subscribe(stack, handle)
<add> * @name subscribe
<add> * @type method
<add> * @access public
<add> * @param string stack name
<add> * @param function handle
<add> * @return bool success
<add> */
<add> observer.subscribe = function(stack, handle)
<ide> {
<ide> var list = stack.split(','),
<ide> result = true,
<ide>
<ide> /**
<ide> * Unsubscribe a handler from an observer stack
<del> * @name unsubscribe
<del> * @type method
<del> * @access public
<del> * @param string stack name
<del> * @param function handle
<del> * @return array removed handlers
<del> */
<del> observer.unsubscribe = function unsubscribe(stack, handle)
<add> * @name unsubscribe
<add> * @type method
<add> * @access public
<add> * @param string stack name
<add> * @param function handle
<add> * @return array removed handlers
<add> */
<add> observer.unsubscribe = function(stack, handle)
<ide> {
<ide> var list = stack.split(','),
<ide> result = [],
<ide>
<ide> /**
<ide> * Notify all subscribers to a stack
<del> * @name subscribe
<del> * @type method
<del> * @access public
<del> * @param string stack name
<del> * @param mixed arg1 ... argN
<del> * @return void
<del> */
<del> observer.notify = function notify()
<add> * @name notify
<add> * @type method
<add> * @access public
<add> * @param string stack name
<add> * @param mixed arg1 ... argN
<add> * @return void
<add> */
<add> observer.notify = function()
<ide> {
<ide> return trigger.apply(observer, arguments);
<ide> };
<ide> /**
<ide> * Breakpoint object, add/remove classes on specified object (or body) when specific browser dimensions are met
<ide> * (triggers observations when viewport dimensions change)
<del> * @note available as konflux.breakpoint / kx.breakpoint
<add> * @module breakpoint
<add> * @note available as konflux.breakpoint / kx.breakpoint
<ide> */
<ide> function kxBreakpoint()
<ide> {
<ide>
<ide> /**
<ide> * Handle browser window resize events, matching the most appropriate size
<del> * @name _resize
<del> * @type function
<del> * @access internal
<del> * @param event
<del> * @return void
<add> * @name resize
<add> * @type function
<add> * @access internal
<add> * @param event
<add> * @return void
<ide> */
<ide> resize = function(e)
<ide> {
<ide> },
<ide> /**
<ide> * Determine the best matching dimension and return the settings
<del> * @name match
<del> * @type function
<del> * @access internal
<del> * @param int browser width
<del> * @return object config
<add> * @name match
<add> * @type function
<add> * @access internal
<add> * @param int browser width
<add> * @return object config
<ide> */
<ide> match = function(width){
<ide> var found, delta, min, p;
<ide> },
<ide> /**
<ide> * Determine the best matching pixel ratio and set the defined classes
<del> * @name pixelRatio
<del> * @type function
<del> * @access internal
<del> * @return void
<add> * @name pixelRatio
<add> * @type function
<add> * @access internal
<add> * @return void
<ide> */
<ide> pixelRatio = function(){
<ide> var ratio = typeof window.devicePixelRatio !== 'undefined' ? window.devicePixelRatio : 1;
<ide>
<ide> /**
<ide> * Add breakpoint configuration
<del> * @name add
<del> * @type function
<del> * @access public
<del> * @param int width
<del> * @param string classname
<del> * @param DOMElement target (defaults to 'body')
<del> * @return breakpoint object
<del> * @note when a breakpoint is added, the _resize handler will be triggered with a slight delay,
<del> * so if a suitable breakpoint is added it will be used immediately but _resize will occur only once.
<del> * This ought to prevent FOUC
<add> * @name add
<add> * @type method
<add> * @access public
<add> * @param int width
<add> * @param string classname
<add> * @param DOMElement target (defaults to 'body')
<add> * @return object breakpoint
<add> * @note when a breakpoint is added, the internal resize handler will be triggered with a slight delay,
<add> * so if a suitable breakpoint is added it will be used immediately but _resize will occur only once.
<add> * This ought to prevent FOUC
<ide> */
<ide> breakpoint.add = function(width, className, target)
<ide> {
<ide>
<ide> /**
<ide> * Add pixel ratio configuration
<del> * @name ratio
<del> * @type function
<del> * @access public
<del> * @param int ratio
<del> * @param string classname
<del> * @param DOMElement target (defaults to 'body')
<del> * @return breakpoint object
<del> * @note as the ratio does not change, the best matching ratio will be added once
<add> * @name ratio
<add> * @type method
<add> * @access public
<add> * @param int ratio
<add> * @param string classname
<add> * @param DOMElement target (defaults to 'body')
<add> * @return object breakpoint
<add> * @note as the ratio does not change, the best matching ratio will be added once
<ide> */
<ide> breakpoint.ratio = function(ratio, className, target)
<ide> {
<ide>
<ide> /**
<ide> * Point object, handling the (heavy) lifting of working with points
<del> * @note available as konflux.point / kx.point
<del> * @TODO documentation
<add> * @module point
<add> * @note available as konflux.point / kx.point
<ide> */
<ide> function kxPoint(x, y)
<ide> {
<ide>
<ide> /**
<ide> * Move the point object by given x and y
<del> * @name move
<del> * @type method
<del> * @access public
<del> * @param number x
<del> * @param number y
<del> * @return void
<add> * @name move
<add> * @type method
<add> * @access public
<add> * @param number x
<add> * @param number y
<add> * @return void
<ide> */
<ide> point.move = function(x, y)
<ide> {
<ide>
<ide> /**
<ide> * Scale the points coordinates by given factor
<del> * @name scale
<del> * @type method
<del> * @access public
<del> * @param number factor
<del> * @return void
<add> * @name scale
<add> * @type method
<add> * @access public
<add> * @param number factor
<add> * @return void
<ide> */
<ide> point.scale = function(factor)
<ide> {
<ide>
<ide> /**
<ide> * Subtract a point for the current point
<del> * @name subtract
<del> * @type method
<del> * @access public
<del> * @param object point
<del> * @return kxPoint
<add> * @name subtract
<add> * @type method
<add> * @access public
<add> * @param object point
<add> * @return kxPoint
<ide> */
<ide> point.subtract = function(p)
<ide> {
<ide>
<ide> /**
<ide> * Add a point to the current point
<del> * @name add
<del> * @type method
<del> * @access public
<del> * @param object point
<del> * @return kxPoint
<add> * @name add
<add> * @type method
<add> * @access public
<add> * @param object point
<add> * @return kxPoint
<ide> */
<ide> point.add = function(p)
<ide> {
<ide>
<ide> /**
<ide> * Get the distance between given and current point
<del> * @name distance
<del> * @type method
<del> * @access public
<del> * @param object point
<del> * @return number distance
<add> * @name distance
<add> * @type method
<add> * @access public
<add> * @param object point
<add> * @return number distance
<ide> */
<ide> point.distance = function(p)
<ide> {
<ide>
<ide> /**
<ide> * Get the angle in radians between given and current point
<del> * @name angle
<del> * @type method
<del> * @access public
<del> * @param object point
<del> * @return number angle
<add> * @name angle
<add> * @type method
<add> * @access public
<add> * @param object point
<add> * @return number angle
<ide> */
<ide> point.angle = function(p)
<ide> {
<ide>
<ide> /**
<ide> * Cookie object, making working with cookies a wee bit easier
<del> * @note available as konflux.cookie / kx.cookie
<add> * @module cookie
<add> * @note available as konflux.cookie / kx.cookie
<ide> */
<ide> function kxCookie()
<ide> {
<ide> jar = {},
<ide> /**
<ide> * Read the available cookie information and populate the jar variable
<del> * @name init
<del> * @type function
<del> * @access internal
<del> * @return void
<add> * @name init
<add> * @type function
<add> * @access internal
<add> * @return void
<ide> */
<ide> init = function()
<ide> {
<ide> },
<ide> /**
<ide> * Set a cookie
<del> * @name setCookie
<del> * @type function
<del> * @access internal
<del> * @param string key
<del> * @param string value
<del> * @param int expire [optional, default expire at the end of the session]
<del> * @param string path [optional, default the current path]
<del> * @param string domain [optional, default the current domain]
<del> * @return void
<del> * @note the syntax of setCookie is compatible with that of PHP's setCookie
<add> * @name setCookie
<add> * @type function
<add> * @access internal
<add> * @param string key
<add> * @param string value
<add> * @param int expire [optional, default expire at the end of the session]
<add> * @param string path [optional, default the current path]
<add> * @param string domain [optional, default the current domain]
<add> * @return void
<add> * @note the syntax of setCookie is compatible with that of PHP's setCookie
<ide> * this means that setting an empty value (string '' | null | false) or
<ide> * an expiry time in the past, the cookie will be removed
<ide> */
<ide> },
<ide> /**
<ide> * Obtain a cookie value
<del> * @name getCookie
<del> * @type function
<del> * @access internal
<del> * @param string key
<del> * @return void
<add> * @name getCookie
<add> * @type function
<add> * @access internal
<add> * @param string key
<add> * @return void
<ide> */
<ide> getCookie = function(key)
<ide> {
<ide>
<ide> /**
<ide> * Get and/or set cookies
<del> * @name value
<del> * @type function
<del> * @access internal
<del> * @param string key [optional, an object containing all cookies is returned id omitted]
<del> * @param string value [optional, if no value is given the current value will be returned]
<del> * @param int expire [optional, default expire at the end of the session]
<del> * @param string path [optional, default the current path]
<del> * @param string domain [optional, default the current domain]
<del> * @return void
<add> * @name value
<add> * @type method
<add> * @access public
<add> * @param string key [optional, an object containing all cookies is returned if omitted]
<add> * @param string value [optional, if no value is given the current value will be returned]
<add> * @param int expire [optional, default expire at the end of the session]
<add> * @param string path [optional, default the current path]
<add> * @param string domain [optional, default the current domain]
<add> * @return void
<ide> */
<ide> cookie.value = function(key, value, expire, path, domain)
<ide> {
<ide>
<ide> /**
<ide> * Storage object, a simple wrapper for localStorage
<del> * @note available as konflux.storage / kx.storage
<add> * @module storage
<add> * @note available as konflux.storage / kx.storage
<ide> */
<ide> function kxStorage()
<ide> {
<ide>
<ide> /**
<ide> * Combine stored fragments together into the original data string
<del> * @name combineFragments
<del> * @type function
<del> * @access internal
<del> * @param string data index
<del> * @return string data combined
<add> * @name combineFragments
<add> * @type function
<add> * @access internal
<add> * @param string data index
<add> * @return string data combined
<ide> */
<ide> function combineFragments(data)
<ide> {
<ide>
<ide> /**
<ide> * Split a large data string into several smaller fragments
<del> * @name createFragments
<del> * @type function
<del> * @access internal
<del> * @param string name
<del> * @param string data
<del> * @return bool success
<add> * @name createFragments
<add> * @type function
<add> * @access internal
<add> * @param string name
<add> * @param string data
<add> * @return bool success
<ide> */
<ide> function createFragments(name, data)
<ide> {
<ide>
<ide> /**
<ide> * Remove all fragmented keys
<del> * @name dropFragments
<del> * @type function
<del> * @access internal
<del> * @param array match
<del> * @return void
<add> * @name dropFragments
<add> * @type function
<add> * @access internal
<add> * @param array match
<add> * @return void
<ide> */
<ide> function dropFragments(match)
<ide> {
<ide> }
<ide>
<ide> /**
<add> * Obtain all data from localStorage
<add> * @name getAll
<add> * @type function
<add> * @access internal
<add> * @return mixed data
<add> */
<add> function getAll()
<add> {
<add> var result = null,
<add> i, key;
<add>
<add> if (storage)
<add> {
<add> result = {};
<add> for (i = 0; i < storage.length; ++i)
<add> {
<add> key = storage.key(i);
<add> result[key] = getItem(key);
<add> }
<add> }
<add>
<add> return result;
<add> }
<add>
<add> /**
<ide> * Obtain the data for given name
<del> * @name getItem
<del> * @type function
<del> * @access internal
<del> * @param string name
<del> * @return mixed data
<add> * @name getItem
<add> * @type function
<add> * @access internal
<add> * @param string name
<add> * @return mixed data
<ide> */
<ide> function getItem(name)
<ide> {
<del> var data = storage ? storage.getItem(name) : false,
<del> checksum;
<add> var data = storage ? storage.getItem(name) : false;
<ide>
<ide> if (data && data.match(/^\[fragment:([0-9]+),([0-9]+),([a-z_]+)\]$/))
<ide> data = combineFragments(data);
<ide>
<del> data = /([a-z0-9]+):(.*)/i.exec(data);
<del> if (data.length > 2 && data[1] === konflux.string.checksum(data[2]))
<del> return JSON.parse(data[2]);
<del>
<del> return false;
<add> if (data && data.match(/^[a-z0-9]+:.*$/i))
<add> {
<add> data = /([a-z0-9]+):(.*)/i.exec(data);
<add> if (data.length > 2 && data[1] === konflux.string.checksum(data[2]))
<add> return JSON.parse(data[2]);
<add> }
<add>
<add> return data ? data : false;
<ide> }
<ide>
<ide> /**
<ide> * Set the data for given name
<del> * @name setItem
<del> * @type function
<del> * @access internal
<del> * @param string name
<del> * @param mixed data
<del> * @return string data
<add> * @name setItem
<add> * @type function
<add> * @access internal
<add> * @param string name
<add> * @param mixed data
<add> * @return string data
<ide> */
<ide> function setItem(name, data)
<ide> {
<ide> }
<ide>
<ide> /**
<del> * Drop the data for given name
<del> * @name drop
<del> * @type function
<del> * @access internal
<del> * @param string name
<del> * @return bool success
<del> */
<del> function drop(name)
<add> * Remove the data for given name
<add> * @name remove
<add> * @type function
<add> * @access internal
<add> * @param string name
<add> * @return bool success
<add> */
<add> function remove(name)
<ide> {
<ide> var data, match;
<ide>
<ide>
<ide> /**
<ide> * Get the data for given name
<del> * @name get
<del> * @type method
<del> * @access public
<del> * @param string name
<del> * @return mixed data
<del> */
<del> ls.get = getItem;
<add> * @name get
<add> * @type method
<add> * @access public
<add> * @param string name [optional, omit to get all stored entries]
<add> * @return mixed data
<add> */
<add> ls.get = function(name)
<add> {
<add> return name ? getItem(name) : getAll();
<add> };
<ide>
<ide> /**
<ide> * Set the data for given name
<del> * @name set
<del> * @type method
<del> * @access public
<del> * @param string name
<del> * @param mixed data
<del> * @return void
<add> * @name set
<add> * @type method
<add> * @access public
<add> * @param string name
<add> * @param mixed data
<add> * @return void
<ide> */
<ide> ls.set = setItem;
<ide>
<ide> /**
<ide> * Remove the data for given name
<del> * @name remove
<del> * @type method
<del> * @access public
<del> * @param string name
<del> * @return bool success
<del> */
<del> ls.remove = drop;
<add> * @name remove
<add> * @type method
<add> * @access public
<add> * @param string name
<add> * @return bool success
<add> */
<add> ls.remove = remove;
<add>
<add> /**
<add> * Get the amount of stored keys
<add> * @name length
<add> * @type method
<add> * @access public
<add> * @return number stored keys
<add> */
<add> ls.length = function()
<add> {
<add> return storage ? storage.length : false;
<add> };
<add>
<add> /**
<add> * Obtain all the keys
<add> * @name keys
<add> * @type method
<add> * @access public
<add> * @return Array keys
<add> */
<add> ls.keys = function()
<add> {
<add> var key = getAll(),
<add> list = [],
<add> p;
<add>
<add> for (p in key)
<add> list.push(p);
<add>
<add> return list;
<add> };
<add>
<add> /**
<add> * Flush all stored items
<add> * @name flush
<add> * @type method
<add> * @access public
<add> * @return void
<add> */
<add> ls.flush = function()
<add> {
<add> var list = ls.keys(),
<add> i;
<add> for (i = 0; i < list.length; ++i)
<add> remove(list[i]);
<add> };
<ide> }
<ide>
<ide>
<ide> /**
<ide> * Canvas object, allowing for chainable access to canvas methods
<del> * @note available as konflux.canvas / kx.canvas
<del> * @TODO documentation
<add> * @module canvas
<add> * @note available as konflux.canvas / kx.canvas
<add> * @TODO documentation
<ide> */
<ide> function kxCanvas()
<ide> {
<ide> canvas.append = function(target, mixed)
<ide> {
<ide> if (typeof mixed === 'number')
<del> mixed = canvas.create(arguments[1], arguments[2]);
<add> mixed = canvas.create(mixed, arguments.length > 2 ? arguments[2] : mixed);
<ide>
<ide> if (mixed instanceof kxCanvasContext)
<ide> return mixed.append(target);
<ide>
<ide> /**
<ide> * Logo object, creates the konflux logo on canvas
<del> * @note available as konflux.logo / kx.logo
<del> * @TODO documentation
<add> * @module logo
<add> * @note available as konflux.logo / kx.logo
<add> * @TODO documentation
<ide> */
<ide> function kxLogo()
<ide> {
<ide> konflux.browser = new kxBrowser();
<ide> konflux.url = new kxURL();
<ide> konflux.style = new kxStyle();
<add> konflux.number = new kxNumber();
<ide> konflux.string = new kxString();
<ide> konflux.array = new kxArray();
<ide> konflux.event = new kxEvent(); |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.