lang
stringclasses 2
values | license
stringclasses 13
values | stderr
stringlengths 0
343
| commit
stringlengths 40
40
| returncode
int64 0
128
| repos
stringlengths 6
87.7k
| new_contents
stringlengths 0
6.23M
| new_file
stringlengths 3
311
| old_contents
stringlengths 0
6.23M
| message
stringlengths 6
9.1k
| old_file
stringlengths 3
311
| subject
stringlengths 0
4k
| git_diff
stringlengths 0
6.31M
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
Java | apache-2.0 | 66ea5b16a48e58d5cc14d99c7e7311a412432216 | 0 | spinnaker/clouddriver,ajordens/clouddriver,ajordens/clouddriver,ajordens/clouddriver,ajordens/clouddriver,spinnaker/clouddriver,spinnaker/clouddriver | /*
* Copyright 2020 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.spinnaker.clouddriver.aws.provider.config;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.netflix.spectator.api.Registry;
import com.netflix.spinnaker.cats.agent.Agent;
import com.netflix.spinnaker.clouddriver.aws.AmazonCloudProvider;
import com.netflix.spinnaker.clouddriver.aws.agent.ReconcileClassicLinkSecurityGroupsAgent;
import com.netflix.spinnaker.clouddriver.aws.edda.EddaApiFactory;
import com.netflix.spinnaker.clouddriver.aws.provider.AwsCleanupProvider;
import com.netflix.spinnaker.clouddriver.aws.provider.AwsInfrastructureProvider;
import com.netflix.spinnaker.clouddriver.aws.provider.AwsProvider;
import com.netflix.spinnaker.clouddriver.aws.provider.agent.AmazonApplicationLoadBalancerCachingAgent;
import com.netflix.spinnaker.clouddriver.aws.provider.agent.AmazonCertificateCachingAgent;
import com.netflix.spinnaker.clouddriver.aws.provider.agent.AmazonCloudFormationCachingAgent;
import com.netflix.spinnaker.clouddriver.aws.provider.agent.AmazonElasticIpCachingAgent;
import com.netflix.spinnaker.clouddriver.aws.provider.agent.AmazonInstanceTypeCachingAgent;
import com.netflix.spinnaker.clouddriver.aws.provider.agent.AmazonKeyPairCachingAgent;
import com.netflix.spinnaker.clouddriver.aws.provider.agent.AmazonLaunchTemplateCachingAgent;
import com.netflix.spinnaker.clouddriver.aws.provider.agent.AmazonLoadBalancerCachingAgent;
import com.netflix.spinnaker.clouddriver.aws.provider.agent.AmazonLoadBalancerInstanceStateCachingAgent;
import com.netflix.spinnaker.clouddriver.aws.provider.agent.AmazonSecurityGroupCachingAgent;
import com.netflix.spinnaker.clouddriver.aws.provider.agent.AmazonSubnetCachingAgent;
import com.netflix.spinnaker.clouddriver.aws.provider.agent.AmazonVpcCachingAgent;
import com.netflix.spinnaker.clouddriver.aws.provider.agent.ClusterCachingAgent;
import com.netflix.spinnaker.clouddriver.aws.provider.agent.EddaLoadBalancerCachingAgent;
import com.netflix.spinnaker.clouddriver.aws.provider.agent.ImageCachingAgent;
import com.netflix.spinnaker.clouddriver.aws.provider.agent.InstanceCachingAgent;
import com.netflix.spinnaker.clouddriver.aws.provider.agent.LaunchConfigCachingAgent;
import com.netflix.spinnaker.clouddriver.aws.provider.agent.ReservationReportCachingAgent;
import com.netflix.spinnaker.clouddriver.aws.provider.agent.ReservedInstancesCachingAgent;
import com.netflix.spinnaker.clouddriver.aws.security.AmazonClientProvider;
import com.netflix.spinnaker.clouddriver.aws.security.EddaTimeoutConfig;
import com.netflix.spinnaker.clouddriver.aws.security.NetflixAmazonCredentials;
import com.netflix.spinnaker.clouddriver.security.AccountCredentialsRepository;
import com.netflix.spinnaker.clouddriver.security.ProviderUtils;
import com.netflix.spinnaker.config.AwsConfiguration;
import com.netflix.spinnaker.kork.dynamicconfig.DynamicConfigService;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Optional;
import java.util.Set;
import java.util.stream.Collectors;
import lombok.Getter;
import lombok.RequiredArgsConstructor;
import org.springframework.context.ApplicationContext;
public class ProviderHelpers {
@Getter
@RequiredArgsConstructor
public static class BuildResult {
private final List<Agent> agents;
private final Set<String> regionsToAdd;
}
public static BuildResult buildAwsInfrastructureAgents(
NetflixAmazonCredentials credentials,
AwsInfrastructureProvider awsInfrastructureProvider,
AccountCredentialsRepository accountCredentialsRepository,
AmazonClientProvider amazonClientProvider,
ObjectMapper amazonObjectMapper,
Registry registry,
EddaTimeoutConfig eddaTimeoutConfig,
Set<String> regions) {
Set<String> scheduledAccounts = ProviderUtils.getScheduledAccounts(awsInfrastructureProvider);
List<Agent> newlyAddedAgents = new ArrayList<>();
for (NetflixAmazonCredentials.AWSRegion region : credentials.getRegions()) {
if (!scheduledAccounts.contains(credentials.getName())) {
if (regions.add(region.getName())) {
newlyAddedAgents.add(
new AmazonInstanceTypeCachingAgent(region.getName(), accountCredentialsRepository));
}
newlyAddedAgents.add(
new AmazonElasticIpCachingAgent(amazonClientProvider, credentials, region.getName()));
newlyAddedAgents.add(
new AmazonKeyPairCachingAgent(amazonClientProvider, credentials, region.getName()));
newlyAddedAgents.add(
new AmazonSecurityGroupCachingAgent(
amazonClientProvider,
credentials,
region.getName(),
amazonObjectMapper,
registry,
eddaTimeoutConfig));
newlyAddedAgents.add(
new AmazonSubnetCachingAgent(
amazonClientProvider, credentials, region.getName(), amazonObjectMapper));
newlyAddedAgents.add(
new AmazonVpcCachingAgent(
amazonClientProvider, credentials, region.getName(), amazonObjectMapper));
}
}
return new BuildResult(newlyAddedAgents, regions);
}
public static BuildResult buildAwsProviderAgents(
NetflixAmazonCredentials credentials,
AmazonClientProvider amazonClientProvider,
ObjectMapper objectMapper,
Registry registry,
EddaTimeoutConfig eddaTimeoutConfig,
AwsProvider awsProvider,
AmazonCloudProvider amazonCloudProvider,
DynamicConfigService dynamicConfigService,
EddaApiFactory eddaApiFactory,
ApplicationContext ctx,
Set<String> publicRegions) {
Set<String> scheduledAccounts = ProviderUtils.getScheduledAccounts(awsProvider);
List<Agent> newlyAddedAgents = new ArrayList<>();
for (NetflixAmazonCredentials.AWSRegion region : credentials.getRegions()) {
if (!scheduledAccounts.contains(credentials.getName())) {
newlyAddedAgents.add(
new ClusterCachingAgent(
amazonCloudProvider,
amazonClientProvider,
credentials,
region.getName(),
objectMapper,
registry,
eddaTimeoutConfig));
newlyAddedAgents.add(
new LaunchConfigCachingAgent(
amazonClientProvider, credentials, region.getName(), objectMapper, registry));
// always index private images per account/region
newlyAddedAgents.add(
new ImageCachingAgent(
amazonClientProvider,
credentials,
region.getName(),
objectMapper,
registry,
false,
dynamicConfigService));
if (!publicRegions.contains(region.getName())) {
// only index public images once per region (regardless of account)
publicRegions.add(region.getName());
newlyAddedAgents.add(
new ImageCachingAgent(
amazonClientProvider,
credentials,
region.getName(),
objectMapper,
registry,
true,
dynamicConfigService));
}
newlyAddedAgents.add(
new InstanceCachingAgent(
amazonClientProvider, credentials, region.getName(), objectMapper, registry));
newlyAddedAgents.add(
new AmazonLoadBalancerCachingAgent(
amazonCloudProvider,
amazonClientProvider,
credentials,
region.getName(),
eddaApiFactory.createApi(credentials.getEdda(), region.getName()),
objectMapper,
registry));
newlyAddedAgents.add(
new AmazonApplicationLoadBalancerCachingAgent(
amazonCloudProvider,
amazonClientProvider,
credentials,
region.getName(),
eddaApiFactory.createApi(credentials.getEdda(), region.getName()),
objectMapper,
registry,
eddaTimeoutConfig));
newlyAddedAgents.add(
new ReservedInstancesCachingAgent(
amazonClientProvider, credentials, region.getName(), objectMapper, registry));
newlyAddedAgents.add(
new AmazonCertificateCachingAgent(
amazonClientProvider, credentials, region.getName(), objectMapper, registry));
if (dynamicConfigService.isEnabled("aws.features.cloud-formation", false)) {
newlyAddedAgents.add(
new AmazonCloudFormationCachingAgent(
amazonClientProvider, credentials, region.getName(), registry));
}
if (credentials.getEddaEnabled()
&& !eddaTimeoutConfig.getDisabledRegions().contains(region.getName())) {
newlyAddedAgents.add(
new EddaLoadBalancerCachingAgent(
eddaApiFactory.createApi(credentials.getEdda(), region.getName()),
credentials,
region.getName(),
objectMapper));
} else {
newlyAddedAgents.add(
new AmazonLoadBalancerInstanceStateCachingAgent(
amazonClientProvider, credentials, region.getName(), objectMapper, ctx));
}
if (dynamicConfigService.isEnabled("aws.features.launch-templates", false)) {
newlyAddedAgents.add(
new AmazonLaunchTemplateCachingAgent(
amazonClientProvider, credentials, region.getName(), objectMapper, registry));
}
}
}
return new BuildResult(newlyAddedAgents, publicRegions);
}
public static List<Agent> buildAwsCleanupAgents(
NetflixAmazonCredentials credentials,
AmazonClientProvider amazonClientProvider,
AwsCleanupProvider awsCleanupProvider,
AwsConfiguration.DeployDefaults deployDefaults) {
Set<String> scheduledAccounts = ProviderUtils.getScheduledAccounts(awsCleanupProvider);
List<Agent> newlyAddedAgents = new ArrayList<>();
if (!scheduledAccounts.contains(credentials.getName())) {
for (NetflixAmazonCredentials.AWSRegion region : credentials.getRegions()) {
if (deployDefaults.isReconcileClassicLinkAccount(credentials)) {
newlyAddedAgents.add(
new ReconcileClassicLinkSecurityGroupsAgent(
amazonClientProvider, credentials, region.getName(), deployDefaults));
}
}
}
return newlyAddedAgents;
}
public static void synchronizeReservationReportCachingAgentAccounts(
AwsProvider awsProvider, Collection<NetflixAmazonCredentials> allAccounts) {
ReservationReportCachingAgent reservationReportCachingAgent =
awsProvider.getAgents().stream()
.filter(agent -> agent instanceof ReservationReportCachingAgent)
.map(ReservationReportCachingAgent.class::cast)
.findFirst()
.orElse(null);
if (reservationReportCachingAgent != null) {
Collection<NetflixAmazonCredentials> reservationReportAccounts =
reservationReportCachingAgent.getAccounts();
List<String> oldAccountNames =
reservationReportAccounts.stream()
.map(NetflixAmazonCredentials::getName)
.collect(Collectors.toList());
List<String> newAccountNames =
allAccounts.stream().map(NetflixAmazonCredentials::getName).collect(Collectors.toList());
List<String> accountNamesToDelete =
oldAccountNames.stream()
.filter(it -> !newAccountNames.contains(it))
.collect(Collectors.toList());
List<String> accountNamesToAdd =
newAccountNames.stream()
.filter(it -> !oldAccountNames.contains(it))
.collect(Collectors.toList());
for (String name : accountNamesToDelete) {
reservationReportCachingAgent.getAccounts().removeIf(it -> it.getName().equals(name));
}
for (String name : accountNamesToAdd) {
Optional<NetflixAmazonCredentials> accountToAdd =
allAccounts.stream().filter(it -> it.getName().equals(name)).findFirst();
accountToAdd.ifPresent(account -> reservationReportCachingAgent.getAccounts().add(account));
}
}
}
}
| clouddriver-aws/src/main/groovy/com/netflix/spinnaker/clouddriver/aws/provider/config/ProviderHelpers.java | /*
* Copyright 2020 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.spinnaker.clouddriver.aws.provider.config;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.netflix.spectator.api.Registry;
import com.netflix.spinnaker.cats.agent.Agent;
import com.netflix.spinnaker.clouddriver.aws.AmazonCloudProvider;
import com.netflix.spinnaker.clouddriver.aws.agent.ReconcileClassicLinkSecurityGroupsAgent;
import com.netflix.spinnaker.clouddriver.aws.edda.EddaApiFactory;
import com.netflix.spinnaker.clouddriver.aws.provider.AwsCleanupProvider;
import com.netflix.spinnaker.clouddriver.aws.provider.AwsInfrastructureProvider;
import com.netflix.spinnaker.clouddriver.aws.provider.AwsProvider;
import com.netflix.spinnaker.clouddriver.aws.provider.agent.AmazonApplicationLoadBalancerCachingAgent;
import com.netflix.spinnaker.clouddriver.aws.provider.agent.AmazonCertificateCachingAgent;
import com.netflix.spinnaker.clouddriver.aws.provider.agent.AmazonCloudFormationCachingAgent;
import com.netflix.spinnaker.clouddriver.aws.provider.agent.AmazonElasticIpCachingAgent;
import com.netflix.spinnaker.clouddriver.aws.provider.agent.AmazonInstanceTypeCachingAgent;
import com.netflix.spinnaker.clouddriver.aws.provider.agent.AmazonKeyPairCachingAgent;
import com.netflix.spinnaker.clouddriver.aws.provider.agent.AmazonLaunchTemplateCachingAgent;
import com.netflix.spinnaker.clouddriver.aws.provider.agent.AmazonLoadBalancerCachingAgent;
import com.netflix.spinnaker.clouddriver.aws.provider.agent.AmazonLoadBalancerInstanceStateCachingAgent;
import com.netflix.spinnaker.clouddriver.aws.provider.agent.AmazonSecurityGroupCachingAgent;
import com.netflix.spinnaker.clouddriver.aws.provider.agent.AmazonSubnetCachingAgent;
import com.netflix.spinnaker.clouddriver.aws.provider.agent.AmazonVpcCachingAgent;
import com.netflix.spinnaker.clouddriver.aws.provider.agent.ClusterCachingAgent;
import com.netflix.spinnaker.clouddriver.aws.provider.agent.EddaLoadBalancerCachingAgent;
import com.netflix.spinnaker.clouddriver.aws.provider.agent.ImageCachingAgent;
import com.netflix.spinnaker.clouddriver.aws.provider.agent.InstanceCachingAgent;
import com.netflix.spinnaker.clouddriver.aws.provider.agent.LaunchConfigCachingAgent;
import com.netflix.spinnaker.clouddriver.aws.provider.agent.ReservationReportCachingAgent;
import com.netflix.spinnaker.clouddriver.aws.provider.agent.ReservedInstancesCachingAgent;
import com.netflix.spinnaker.clouddriver.aws.security.AmazonClientProvider;
import com.netflix.spinnaker.clouddriver.aws.security.EddaTimeoutConfig;
import com.netflix.spinnaker.clouddriver.aws.security.NetflixAmazonCredentials;
import com.netflix.spinnaker.clouddriver.security.AccountCredentialsRepository;
import com.netflix.spinnaker.clouddriver.security.ProviderUtils;
import com.netflix.spinnaker.config.AwsConfiguration;
import com.netflix.spinnaker.kork.dynamicconfig.DynamicConfigService;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Optional;
import java.util.Set;
import java.util.stream.Collectors;
import lombok.Getter;
import lombok.RequiredArgsConstructor;
import org.springframework.context.ApplicationContext;
public class ProviderHelpers {
@Getter
@RequiredArgsConstructor
public static class BuildResult {
private final List<Agent> agents;
private final Set<String> regionsToAdd;
}
public static BuildResult buildAwsInfrastructureAgents(
NetflixAmazonCredentials credentials,
AwsInfrastructureProvider awsInfrastructureProvider,
AccountCredentialsRepository accountCredentialsRepository,
AmazonClientProvider amazonClientProvider,
ObjectMapper amazonObjectMapper,
Registry registry,
EddaTimeoutConfig eddaTimeoutConfig,
Set<String> regions) {
Set<String> scheduledAccounts = ProviderUtils.getScheduledAccounts(awsInfrastructureProvider);
List<Agent> newlyAddedAgents = new ArrayList<>();
for (NetflixAmazonCredentials.AWSRegion region : credentials.getRegions()) {
if (!scheduledAccounts.contains(credentials.getName())) {
if (regions.add(region.getName())) {
newlyAddedAgents.add(
new AmazonInstanceTypeCachingAgent(region.getName(), accountCredentialsRepository));
}
newlyAddedAgents.add(
new AmazonElasticIpCachingAgent(amazonClientProvider, credentials, region.getName()));
newlyAddedAgents.add(
new AmazonKeyPairCachingAgent(amazonClientProvider, credentials, region.getName()));
newlyAddedAgents.add(
new AmazonSecurityGroupCachingAgent(
amazonClientProvider,
credentials,
region.getName(),
amazonObjectMapper,
registry,
eddaTimeoutConfig));
newlyAddedAgents.add(
new AmazonSubnetCachingAgent(
amazonClientProvider, credentials, region.getName(), amazonObjectMapper));
newlyAddedAgents.add(
new AmazonVpcCachingAgent(
amazonClientProvider, credentials, region.getName(), amazonObjectMapper));
}
}
return new BuildResult(newlyAddedAgents, regions);
}
public static BuildResult buildAwsProviderAgents(
NetflixAmazonCredentials credentials,
AmazonClientProvider amazonClientProvider,
ObjectMapper objectMapper,
Registry registry,
EddaTimeoutConfig eddaTimeoutConfig,
AwsProvider awsProvider,
AmazonCloudProvider amazonCloudProvider,
DynamicConfigService dynamicConfigService,
EddaApiFactory eddaApiFactory,
ApplicationContext ctx,
Set<String> publicRegions) {
Set<String> scheduledAccounts = ProviderUtils.getScheduledAccounts(awsProvider);
List<Agent> newlyAddedAgents = new ArrayList<>();
for (NetflixAmazonCredentials.AWSRegion region : credentials.getRegions()) {
if (!scheduledAccounts.contains(credentials.getName())) {
newlyAddedAgents.add(
new ClusterCachingAgent(
amazonCloudProvider,
amazonClientProvider,
credentials,
region.getName(),
objectMapper,
registry,
eddaTimeoutConfig));
newlyAddedAgents.add(
new LaunchConfigCachingAgent(
amazonClientProvider, credentials, region.getName(), objectMapper, registry));
boolean publicImages = false;
if (!publicRegions.contains(region.getName())) {
publicImages = true;
publicRegions.add(region.getName());
}
newlyAddedAgents.add(
new ImageCachingAgent(
amazonClientProvider,
credentials,
region.getName(),
objectMapper,
registry,
publicImages,
dynamicConfigService));
newlyAddedAgents.add(
new InstanceCachingAgent(
amazonClientProvider, credentials, region.getName(), objectMapper, registry));
newlyAddedAgents.add(
new AmazonLoadBalancerCachingAgent(
amazonCloudProvider,
amazonClientProvider,
credentials,
region.getName(),
eddaApiFactory.createApi(credentials.getEdda(), region.getName()),
objectMapper,
registry));
newlyAddedAgents.add(
new AmazonApplicationLoadBalancerCachingAgent(
amazonCloudProvider,
amazonClientProvider,
credentials,
region.getName(),
eddaApiFactory.createApi(credentials.getEdda(), region.getName()),
objectMapper,
registry,
eddaTimeoutConfig));
newlyAddedAgents.add(
new ReservedInstancesCachingAgent(
amazonClientProvider, credentials, region.getName(), objectMapper, registry));
newlyAddedAgents.add(
new AmazonCertificateCachingAgent(
amazonClientProvider, credentials, region.getName(), objectMapper, registry));
if (dynamicConfigService.isEnabled("aws.features.cloud-formation", false)) {
newlyAddedAgents.add(
new AmazonCloudFormationCachingAgent(
amazonClientProvider, credentials, region.getName(), registry));
}
if (credentials.getEddaEnabled()
&& !eddaTimeoutConfig.getDisabledRegions().contains(region.getName())) {
newlyAddedAgents.add(
new EddaLoadBalancerCachingAgent(
eddaApiFactory.createApi(credentials.getEdda(), region.getName()),
credentials,
region.getName(),
objectMapper));
} else {
newlyAddedAgents.add(
new AmazonLoadBalancerInstanceStateCachingAgent(
amazonClientProvider, credentials, region.getName(), objectMapper, ctx));
}
if (dynamicConfigService.isEnabled("aws.features.launch-templates", false)) {
newlyAddedAgents.add(
new AmazonLaunchTemplateCachingAgent(
amazonClientProvider, credentials, region.getName(), objectMapper, registry));
}
}
}
return new BuildResult(newlyAddedAgents, publicRegions);
}
public static List<Agent> buildAwsCleanupAgents(
NetflixAmazonCredentials credentials,
AmazonClientProvider amazonClientProvider,
AwsCleanupProvider awsCleanupProvider,
AwsConfiguration.DeployDefaults deployDefaults) {
Set<String> scheduledAccounts = ProviderUtils.getScheduledAccounts(awsCleanupProvider);
List<Agent> newlyAddedAgents = new ArrayList<>();
if (!scheduledAccounts.contains(credentials.getName())) {
for (NetflixAmazonCredentials.AWSRegion region : credentials.getRegions()) {
if (deployDefaults.isReconcileClassicLinkAccount(credentials)) {
newlyAddedAgents.add(
new ReconcileClassicLinkSecurityGroupsAgent(
amazonClientProvider, credentials, region.getName(), deployDefaults));
}
}
}
return newlyAddedAgents;
}
public static void synchronizeReservationReportCachingAgentAccounts(
AwsProvider awsProvider, Collection<NetflixAmazonCredentials> allAccounts) {
ReservationReportCachingAgent reservationReportCachingAgent =
awsProvider.getAgents().stream()
.filter(agent -> agent instanceof ReservationReportCachingAgent)
.map(ReservationReportCachingAgent.class::cast)
.findFirst()
.orElse(null);
if (reservationReportCachingAgent != null) {
Collection<NetflixAmazonCredentials> reservationReportAccounts =
reservationReportCachingAgent.getAccounts();
List<String> oldAccountNames =
reservationReportAccounts.stream()
.map(NetflixAmazonCredentials::getName)
.collect(Collectors.toList());
List<String> newAccountNames =
allAccounts.stream().map(NetflixAmazonCredentials::getName).collect(Collectors.toList());
List<String> accountNamesToDelete =
oldAccountNames.stream()
.filter(it -> !newAccountNames.contains(it))
.collect(Collectors.toList());
List<String> accountNamesToAdd =
newAccountNames.stream()
.filter(it -> !oldAccountNames.contains(it))
.collect(Collectors.toList());
for (String name : accountNamesToDelete) {
reservationReportCachingAgent.getAccounts().removeIf(it -> it.getName().equals(name));
}
for (String name : accountNamesToAdd) {
Optional<NetflixAmazonCredentials> accountToAdd =
allAccounts.stream().filter(it -> it.getName().equals(name)).findFirst();
accountToAdd.ifPresent(account -> reservationReportCachingAgent.getAccounts().add(account));
}
}
}
}
| fix(aws): Always index private images for an account (#5036)
Corrects a regression from spinnaker/clouddriver#5004 where we did not consistently
index private images across all accounts. | clouddriver-aws/src/main/groovy/com/netflix/spinnaker/clouddriver/aws/provider/config/ProviderHelpers.java | fix(aws): Always index private images for an account (#5036) | <ide><path>louddriver-aws/src/main/groovy/com/netflix/spinnaker/clouddriver/aws/provider/config/ProviderHelpers.java
<ide> newlyAddedAgents.add(
<ide> new LaunchConfigCachingAgent(
<ide> amazonClientProvider, credentials, region.getName(), objectMapper, registry));
<del> boolean publicImages = false;
<add>
<add> // always index private images per account/region
<add> newlyAddedAgents.add(
<add> new ImageCachingAgent(
<add> amazonClientProvider,
<add> credentials,
<add> region.getName(),
<add> objectMapper,
<add> registry,
<add> false,
<add> dynamicConfigService));
<add>
<ide> if (!publicRegions.contains(region.getName())) {
<del> publicImages = true;
<add> // only index public images once per region (regardless of account)
<ide> publicRegions.add(region.getName());
<del> }
<del> newlyAddedAgents.add(
<del> new ImageCachingAgent(
<del> amazonClientProvider,
<del> credentials,
<del> region.getName(),
<del> objectMapper,
<del> registry,
<del> publicImages,
<del> dynamicConfigService));
<add> newlyAddedAgents.add(
<add> new ImageCachingAgent(
<add> amazonClientProvider,
<add> credentials,
<add> region.getName(),
<add> objectMapper,
<add> registry,
<add> true,
<add> dynamicConfigService));
<add> }
<add>
<ide> newlyAddedAgents.add(
<ide> new InstanceCachingAgent(
<ide> amazonClientProvider, credentials, region.getName(), objectMapper, registry)); |
|
Java | apache-2.0 | 0b976a226ecd617e023a38b63f03c072303f3ab3 | 0 | oxmcvusd/lemon,mosoft521/lemon,izerui/lemon,yinhe402/lemon,lilitao/lemon,NichenLg/lemon,izerui/lemon,dazhi2010/lemon,AWendy/lemon,zp994188707/lemon-OA,hutea/lemon,langhsu/lemon,mosoft521/lemon,07033320a/lemon,xingstarx/lemon,glpenghui/lemon,wzguo/lemon,yinhe402/lemon,glpenghui/lemon,mlc0202/lemon,zp994188707/lemon-OA,xuhuisheng/lemon,wzguo/lemon,vigosser/lemon,hutea/lemon,yujingu828/lemon,xingstarx/lemon,yujingu828/lemon,vigosser/lemon,AWendy/lemon,izerui/lemon,zeisschoice/lemon,JakFlew/lemon,langhsu/lemon,JeffLi1993/lemon,callmeyan/lemon,zeisschoice/lemon,lilitao/lemon,xuhuisheng/lemon,xingstarx/lemon,07033320a/lemon,JakFlew/lemon,FuYung/lemon,lilitao/lemon,mosoft521/lemon,oxmcvusd/lemon,yujingu828/lemon,dazhi2010/lemon,JeffLi1993/lemon,dazhi2010/lemon,callmeyan/lemon,FuYung/lemon,hesling/lemon,yinhe402/lemon,mlc0202/lemon,JeffLi1993/lemon,glpenghui/lemon,NichenLg/lemon,wzguo/lemon,JakFlew/lemon,oxmcvusd/lemon,xuhuisheng/lemon,callmeyan/lemon,hutea/lemon,NichenLg/lemon,FuYung/lemon,vigosser/lemon,wufabeishang/lemon,wufabeishang/lemon,07033320a/lemon,zp994188707/lemon-OA,wufabeishang/lemon,mlc0202/lemon,hesling/lemon,AWendy/lemon,zeisschoice/lemon,hesling/lemon,langhsu/lemon | package com.mossle.form.web.form;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import com.mossle.bpm.FormInfo;
import com.mossle.bpm.cmd.CompleteTaskWithCommentCmd;
import com.mossle.bpm.cmd.FindStartFormCmd;
import com.mossle.bpm.cmd.FindTaskDefinitionsCmd;
import com.mossle.bpm.persistence.domain.BpmProcess;
import com.mossle.bpm.persistence.domain.BpmTaskConf;
import com.mossle.bpm.persistence.manager.BpmProcessManager;
import com.mossle.bpm.persistence.manager.BpmTaskConfManager;
import com.mossle.core.mapper.JsonMapper;
import com.mossle.core.struts2.BaseAction;
import com.mossle.form.domain.FormTemplate;
import com.mossle.form.keyvalue.KeyValue;
import com.mossle.form.keyvalue.Prop;
import com.mossle.form.keyvalue.Record;
import com.mossle.form.keyvalue.RecordBuilder;
import com.mossle.form.manager.FormTemplateManager;
import com.mossle.security.util.SpringSecurityUtils;
import org.activiti.engine.FormService;
import org.activiti.engine.IdentityService;
import org.activiti.engine.ProcessEngine;
import org.activiti.engine.TaskService;
import org.activiti.engine.impl.task.TaskDefinition;
import org.activiti.engine.repository.ProcessDefinition;
import org.activiti.engine.runtime.ProcessInstance;
import org.activiti.engine.task.Task;
import org.apache.struts2.ServletActionContext;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* 电子表单与流程集成的地方.
*
* @author Lingo
*/
public class FormAction extends BaseAction {
private static Logger logger = LoggerFactory.getLogger(FormAction.class);
public static final int STATUS_DRAFT_PROCESS = 0;
public static final int STATUS_DRAFT_TASK = 1;
public static final int STATUS_RUNNING = 2;
private ProcessEngine processEngine;
private BpmProcessManager bpmProcessManager;
private BpmTaskConfManager bpmTaskConfManager;
private String businessKey;
private String processDefinitionId;
private String processDefinitionKey;
private int processDefinitionVersion;
private String taskId;
private Long id;
private FormTemplateManager formTemplateManager;
private JsonMapper jsonMapper = new JsonMapper();
private String json = "{}";
private FormInfo formInfo;
private FormTemplate formTemplate;
private List<TaskDefinition> taskDefinitions;
private List<String> taskDefinitionKeys;
private List<String> taskAssignees;
private KeyValue keyValue;
private List<Record> records;
private String status;
/**
* 根据id显示表单模板,把表单模板生成json,返回到页面显示.
*
* @todo: 放到rest里?
*/
public String loadForm() throws Exception {
FormTemplate theFormTemplate = formTemplateManager.get(id);
json = jsonMapper.toJson(theFormTemplate);
return "loadForm";
}
/**
* 保存草稿.
*/
public String saveDraft() throws Exception {
Map<String, String[]> parameterMap = ServletActionContext.getRequest()
.getParameterMap();
if ((taskId != null) && (!"".equals(taskId))) {
// 如果是任务草稿,直接通过processInstanceId获得record,更新数据
// TODO: 分支肯定有问题
Task task = processEngine.getTaskService().createTaskQuery()
.taskId(taskId).singleResult();
String processInstanceId = task.getProcessInstanceId();
Record record = keyValue.findByRef(processInstanceId);
record = new RecordBuilder().build(record, STATUS_DRAFT_TASK,
parameterMap);
keyValue.save(record);
businessKey = record.getCode();
} else if ((businessKey != null) && (!"".equals(businessKey))) {
// 如果是流程草稿,直接通过businessKey获得record,更新数据
Record record = keyValue.findByCode(businessKey);
record = new RecordBuilder().build(record, STATUS_DRAFT_PROCESS,
parameterMap);
keyValue.save(record);
} else {
// 如果是第一次保存草稿,肯定是流程草稿,先初始化record,再保存数据
Record record = new RecordBuilder().build(processDefinitionId,
STATUS_DRAFT_PROCESS, parameterMap);
keyValue.save(record);
businessKey = record.getCode();
}
return "saveDraft";
}
/**
* 列出所有草稿.
*/
public String listDrafts() throws Exception {
records = keyValue.findByStatus(STATUS_DRAFT_PROCESS);
return "listDrafts";
}
/**
* 显示启动流程的表单.
*/
public String viewStartForm() throws Exception {
if (processDefinitionId == null) {
this.processDefinitionId = processEngine.getRepositoryService()
.createProcessDefinitionQuery()
.processDefinitionKey(this.processDefinitionKey)
.processDefinitionVersion(this.processDefinitionVersion)
.singleResult().getId();
}
formInfo = processEngine.getManagementService().executeCommand(
new FindStartFormCmd(processDefinitionId));
if (formInfo.isFormExists()) {
this.formTemplate = formTemplateManager.findUniqueBy("name",
formInfo.getFormKey());
Record record = keyValue.findByCode(businessKey);
if (record != null) {
Map map = new HashMap();
for (Prop prop : record.getProps().values()) {
map.put(prop.getCode(), prop.getValue());
}
json = jsonMapper.toJson(map);
}
return "viewStartForm";
} else {
return taskConf();
}
}
/**
* 配置每个任务的参与人.
*/
public String taskConf() {
ProcessDefinition processDefinition = processEngine
.getRepositoryService().getProcessDefinition(
processDefinitionId);
BpmProcess bpmProcess = bpmProcessManager
.findUnique(
"from BpmProcess where processDefinitionKey=? and processDefinitionVersion=?",
processDefinition.getKey(),
processDefinition.getVersion());
if ((bpmProcess != null)
&& Integer.valueOf(1).equals(bpmProcess.getUseTaskConf())) {
FindTaskDefinitionsCmd cmd = new FindTaskDefinitionsCmd(
processDefinitionId);
taskDefinitions = processEngine.getManagementService()
.executeCommand(cmd);
return "taskConf";
} else {
return "confirmStartProcessInstance";
}
}
/**
* 发起流程.
*/
public String startProcessInstance() throws Exception {
// 先保存草稿
this.saveDraft();
if ((!"taskConf".equals(status)) && "taskConf".equals(taskConf())) {
return "taskConf";
}
// 先设置登录用户
IdentityService identityService = processEngine.getIdentityService();
identityService.setAuthenticatedUserId(SpringSecurityUtils
.getCurrentUsername());
// 获得form的信息
formInfo = processEngine.getManagementService().executeCommand(
new FindStartFormCmd(processDefinitionId));
// 尝试根据表单里字段的类型,进行转换
Map<String, String> formTypeMap = new HashMap<String, String>();
if (formInfo.isFormExists()) {
this.formTemplate = formTemplateManager.findUniqueBy("name",
formInfo.getFormKey());
String content = formTemplate.getContent();
logger.info("content : {}", content);
Map map = jsonMapper.fromJson(content, Map.class);
logger.info("map : {}", map);
List<Map> list = (List<Map>) map.get("fields");
logger.info("list : {}", list);
for (Map item : list) {
formTypeMap.put((String) item.get("name"),
(String) item.get("type"));
}
}
Record record = keyValue.findByCode(businessKey);
Map<String, Object> processParameters = new HashMap<String, Object>();
// 如果有表单,就从数据库获取数据
for (Prop prop : record.getProps().values()) {
String key = prop.getCode();
String value = prop.getValue();
String formType = this.getFormType(formTypeMap, key);
if ("userPicker".equals(formType)) {
processParameters.put(key,
new ArrayList(Arrays.asList(value.split(","))));
} else if (formType != null) {
processParameters.put(key, value);
}
}
if (taskDefinitionKeys != null) {
// 如果是从配置任务负责人的页面过来,就保存TaskConf,再从草稿中得到数据启动流程
int index = 0;
for (String taskDefinitionKey : taskDefinitionKeys) {
String taskAssignee = taskAssignees.get(index++);
BpmTaskConf bpmTaskConf = new BpmTaskConf();
bpmTaskConf.setBusinessKey(businessKey);
bpmTaskConf.setTaskDefinitionKey(taskDefinitionKey);
bpmTaskConf.setAssignee(taskAssignee);
bpmTaskConfManager.save(bpmTaskConf);
}
}
ProcessInstance processInstance = processEngine.getRuntimeService()
.startProcessInstanceById(processDefinitionId, businessKey,
processParameters);
record = new RecordBuilder().build(record, STATUS_RUNNING,
processInstance.getId());
keyValue.save(record);
return "startProcessInstance";
}
/**
* 工具方法,获取表单的类型.
*/
private String getFormType(Map<String, String> formTypeMap, String name) {
if (formTypeMap.containsKey(name)) {
return formTypeMap.get(name);
} else {
return null;
}
}
/**
* 显示任务表单.
*/
public String viewTaskForm() throws Exception {
TaskService taskService = processEngine.getTaskService();
Task task = taskService.createTaskQuery().taskId(taskId).singleResult();
FormService formService = processEngine.getFormService();
String taskFormKey = formService.getTaskFormKey(
task.getProcessDefinitionId(), task.getTaskDefinitionKey());
formTemplate = formTemplateManager.findUniqueBy("name", taskFormKey);
formInfo = new FormInfo();
formInfo.setTaskId(taskId);
if ((taskId != null) && (!"".equals(taskId))) {
// 如果是任务草稿,直接通过processInstanceId获得record,更新数据
// TODO: 分支肯定有问题
String processInstanceId = task.getProcessInstanceId();
Record record = keyValue.findByRef(processInstanceId);
if (record != null) {
Map map = new HashMap();
for (Prop prop : record.getProps().values()) {
map.put(prop.getCode(), prop.getValue());
}
json = jsonMapper.toJson(map);
}
}
return "viewTaskForm";
}
/**
* 完成任务.
*/
public String completeTask() throws Exception {
IdentityService identityService = processEngine.getIdentityService();
identityService.setAuthenticatedUserId(SpringSecurityUtils
.getCurrentUsername());
this.saveDraft();
TaskService taskService = processEngine.getTaskService();
Task task = taskService.createTaskQuery().taskId(taskId).singleResult();
FormService formService = processEngine.getFormService();
String taskFormKey = formService.getTaskFormKey(
task.getProcessDefinitionId(), task.getTaskDefinitionKey());
formInfo = new FormInfo();
formInfo.setTaskId(taskId);
formInfo.setFormKey(taskFormKey);
// 尝试根据表单里字段的类型,进行转换
Map<String, String> formTypeMap = new HashMap<String, String>();
if (formInfo.isFormExists()) {
this.formTemplate = formTemplateManager.findUniqueBy("name",
formInfo.getFormKey());
String content = formTemplate.getContent();
logger.info("content : {}", content);
Map map = jsonMapper.fromJson(content, Map.class);
logger.info("map : {}", map);
if (map != null) {
List<Map> list = (List<Map>) map.get("fields");
logger.info("list : {}", list);
for (Map item : list) {
formTypeMap.put((String) item.get("name"),
(String) item.get("type"));
}
}
}
String processInstanceId = task.getProcessInstanceId();
Record record = keyValue.findByRef(processInstanceId);
Map<String, Object> processParameters = new HashMap<String, Object>();
// 如果有表单,就从数据库获取数据
for (Prop prop : record.getProps().values()) {
String key = prop.getCode();
String value = prop.getValue();
String formType = this.getFormType(formTypeMap, key);
if ("userPicker".equals(formType)) {
processParameters.put(key,
new ArrayList(Arrays.asList(value.split(","))));
} else if (formType != null) {
processParameters.put(key, value);
}
}
processEngine.getManagementService()
.executeCommand(
new CompleteTaskWithCommentCmd(taskId,
processParameters, "完成"));
record = new RecordBuilder().build(record, STATUS_RUNNING,
processInstanceId);
keyValue.save(record);
return "completeTask";
}
// ~ ======================================================================
public void setProcessEngine(ProcessEngine processEngine) {
this.processEngine = processEngine;
}
public void setBpmProcessManager(BpmProcessManager bpmProcessManager) {
this.bpmProcessManager = bpmProcessManager;
}
public void setBpmTaskConfManager(BpmTaskConfManager bpmTaskConfManager) {
this.bpmTaskConfManager = bpmTaskConfManager;
}
// ~ ======================================================================
public String getBusinessKey() {
return businessKey;
}
public void setBusinessKey(String businessKey) {
this.businessKey = businessKey;
}
public String getProcessDefinitionId() {
return processDefinitionId;
}
public void setProcessDefinitionId(String processDefinitionId) {
this.processDefinitionId = processDefinitionId;
}
public void setProcessDefinitionKey(String processDefinitionKey) {
this.processDefinitionKey = processDefinitionKey;
}
public void setProcessDefinitionVersion(int processDefinitionVersion) {
this.processDefinitionVersion = processDefinitionVersion;
}
public void setTaskId(String taskId) {
this.taskId = taskId;
}
public void setId(Long id) {
this.id = id;
}
public void setFormTemplateManager(FormTemplateManager formTemplateManager) {
this.formTemplateManager = formTemplateManager;
}
public String getJson() {
return json;
}
public FormInfo getFormInfo() {
return formInfo;
}
public FormTemplate getFormTemplate() {
return formTemplate;
}
public List<TaskDefinition> getTaskDefinitions() {
return taskDefinitions;
}
public void setTaskDefinitionKeys(List<String> taskDefinitionKeys) {
this.taskDefinitionKeys = taskDefinitionKeys;
}
public void setTaskAssignees(List<String> taskAssignees) {
this.taskAssignees = taskAssignees;
}
public void setKeyValue(KeyValue keyValue) {
this.keyValue = keyValue;
}
public List<Record> getRecords() {
return records;
}
public void setStatus(String status) {
this.status = status;
}
}
| src/main/java/com/mossle/form/web/form/FormAction.java | package com.mossle.form.web.form;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import com.mossle.bpm.FormInfo;
import com.mossle.bpm.cmd.CompleteTaskWithCommentCmd;
import com.mossle.bpm.cmd.FindStartFormCmd;
import com.mossle.bpm.cmd.FindTaskDefinitionsCmd;
import com.mossle.bpm.persistence.domain.BpmProcess;
import com.mossle.bpm.persistence.domain.BpmTaskConf;
import com.mossle.bpm.persistence.manager.BpmProcessManager;
import com.mossle.bpm.persistence.manager.BpmTaskConfManager;
import com.mossle.core.mapper.JsonMapper;
import com.mossle.core.struts2.BaseAction;
import com.mossle.form.domain.FormTemplate;
import com.mossle.form.keyvalue.KeyValue;
import com.mossle.form.keyvalue.Prop;
import com.mossle.form.keyvalue.Record;
import com.mossle.form.keyvalue.RecordBuilder;
import com.mossle.form.manager.FormTemplateManager;
import com.mossle.security.util.SpringSecurityUtils;
import org.activiti.engine.FormService;
import org.activiti.engine.IdentityService;
import org.activiti.engine.ProcessEngine;
import org.activiti.engine.TaskService;
import org.activiti.engine.impl.task.TaskDefinition;
import org.activiti.engine.repository.ProcessDefinition;
import org.activiti.engine.runtime.ProcessInstance;
import org.activiti.engine.task.Task;
import org.apache.struts2.ServletActionContext;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* 电子表单与流程集成的地方.
*
* @author Lingo
*/
public class FormAction extends BaseAction {
private static Logger logger = LoggerFactory.getLogger(FormAction.class);
public static final int STATUS_DRAFT_PROCESS = 0;
public static final int STATUS_DRAFT_TASK = 1;
public static final int STATUS_RUNNING = 2;
private ProcessEngine processEngine;
private BpmProcessManager bpmProcessManager;
private BpmTaskConfManager bpmTaskConfManager;
private String businessKey;
private String processDefinitionId;
private String processDefinitionKey;
private int processDefinitionVersion;
private String taskId;
private Long id;
private FormTemplateManager formTemplateManager;
private JsonMapper jsonMapper = new JsonMapper();
private String json = "{}";
private FormInfo formInfo;
private FormTemplate formTemplate;
private List<TaskDefinition> taskDefinitions;
private List<String> taskDefinitionKeys;
private List<String> taskAssignees;
private KeyValue keyValue;
private List<Record> records;
private String status;
/**
* 根据id显示表单模板,把表单模板生成json,返回到页面显示.
*
* @todo: 放到rest里?
*/
public String loadForm() throws Exception {
FormTemplate theFormTemplate = formTemplateManager.get(id);
json = jsonMapper.toJson(theFormTemplate);
return "loadForm";
}
/**
* 保存草稿.
*/
public String saveDraft() throws Exception {
Map<String, String[]> parameterMap = ServletActionContext.getRequest()
.getParameterMap();
if ((taskId != null) && (!"".equals(taskId))) {
// 如果是任务草稿,直接通过processInstanceId获得record,更新数据
// TODO: 分支肯定有问题
Task task = processEngine.getTaskService().createTaskQuery()
.taskId(taskId).singleResult();
String processInstanceId = task.getProcessInstanceId();
Record record = keyValue.findByRef(processInstanceId);
record = new RecordBuilder().build(record, STATUS_DRAFT_TASK,
parameterMap);
keyValue.save(record);
businessKey = record.getCode();
} else if ((businessKey != null) && (!"".equals(businessKey))) {
// 如果是流程草稿,直接通过businessKey获得record,更新数据
Record record = keyValue.findByCode(businessKey);
record = new RecordBuilder().build(record, STATUS_DRAFT_PROCESS,
parameterMap);
keyValue.save(record);
} else {
// 如果是第一次保存草稿,肯定是流程草稿,先初始化record,再保存数据
Record record = new RecordBuilder().build(processDefinitionId,
STATUS_DRAFT_PROCESS, parameterMap);
keyValue.save(record);
businessKey = record.getCode();
}
return "saveDraft";
}
/**
* 列出所有草稿.
*/
public String listDrafts() throws Exception {
records = keyValue.findByStatus(STATUS_DRAFT_PROCESS);
return "listDrafts";
}
/**
* 显示启动流程的表单.
*/
public String viewStartForm() throws Exception {
if (processDefinitionId == null) {
this.processDefinitionId = processEngine.getRepositoryService()
.createProcessDefinitionQuery()
.processDefinitionKey(this.processDefinitionKey)
.processDefinitionVersion(this.processDefinitionVersion)
.singleResult().getId();
}
formInfo = processEngine.getManagementService().executeCommand(
new FindStartFormCmd(processDefinitionId));
if (formInfo.isFormExists()) {
this.formTemplate = formTemplateManager.findUniqueBy("name",
formInfo.getFormKey());
Record record = keyValue.findByCode(businessKey);
if (record != null) {
Map map = new HashMap();
for (Prop prop : record.getProps().values()) {
map.put(prop.getCode(), prop.getValue());
}
json = jsonMapper.toJson(map);
}
return "viewStartForm";
} else {
return taskConf();
}
}
/**
* 配置每个任务的参与人.
*/
public String taskConf() {
ProcessDefinition processDefinition = processEngine
.getRepositoryService().getProcessDefinition(
processDefinitionId);
BpmProcess bpmProcess = bpmProcessManager
.findUnique(
"from BpmProcess where processDefinitionKey=? and processDefinitionVersion=?",
processDefinition.getKey(),
processDefinition.getVersion());
if ((bpmProcess != null)
&& Integer.valueOf(1).equals(bpmProcess.getUseTaskConf())) {
FindTaskDefinitionsCmd cmd = new FindTaskDefinitionsCmd(
processDefinitionId);
taskDefinitions = processEngine.getManagementService()
.executeCommand(cmd);
return "taskConf";
} else {
return "confirmStartProcessInstance";
}
}
/**
* 发起流程.
*/
public String startProcessInstance() throws Exception {
// 先保存草稿
this.saveDraft();
if ((!"taskConf".equals(status)) && "taskConf".equals(taskConf())) {
return "taskConf";
}
// 先设置登录用户
IdentityService identityService = processEngine.getIdentityService();
identityService.setAuthenticatedUserId(SpringSecurityUtils
.getCurrentUsername());
// 获得form的信息
formInfo = processEngine.getManagementService().executeCommand(
new FindStartFormCmd(processDefinitionId));
// 尝试根据表单里字段的类型,进行转换
Map<String, String> formTypeMap = new HashMap<String, String>();
if (formInfo.isFormExists()) {
this.formTemplate = formTemplateManager.findUniqueBy("name",
formInfo.getFormKey());
String content = formTemplate.getContent();
logger.info("content : {}", content);
Map map = jsonMapper.fromJson(content, Map.class);
logger.info("map : {}", map);
List<Map> list = (List<Map>) map.get("fields");
logger.info("list : {}", list);
for (Map item : list) {
formTypeMap.put((String) item.get("name"),
(String) item.get("type"));
}
}
Record record = keyValue.findByCode(businessKey);
Map<String, Object> processParameters = new HashMap<String, Object>();
// 如果有表单,就从数据库获取数据
for (Prop prop : record.getProps().values()) {
String key = prop.getCode();
String value = prop.getValue();
String formType = this.getFormType(formTypeMap, key);
if ("userPicker".equals(formType)) {
processParameters.put(key,
new ArrayList(Arrays.asList(value.split(","))));
} else {
processParameters.put(key, value);
}
}
if (taskDefinitionKeys != null) {
// 如果是从配置任务负责人的页面过来,就保存TaskConf,再从草稿中得到数据启动流程
int index = 0;
for (String taskDefinitionKey : taskDefinitionKeys) {
String taskAssignee = taskAssignees.get(index++);
BpmTaskConf bpmTaskConf = new BpmTaskConf();
bpmTaskConf.setBusinessKey(businessKey);
bpmTaskConf.setTaskDefinitionKey(taskDefinitionKey);
bpmTaskConf.setAssignee(taskAssignee);
bpmTaskConfManager.save(bpmTaskConf);
}
}
ProcessInstance processInstance = processEngine.getRuntimeService()
.startProcessInstanceById(processDefinitionId, businessKey,
processParameters);
record = new RecordBuilder().build(record, STATUS_RUNNING,
processInstance.getId());
keyValue.save(record);
return "startProcessInstance";
}
/**
* 工具方法,获取表单的类型.
*/
private String getFormType(Map<String, String> formTypeMap, String name) {
if (formTypeMap.containsKey(name)) {
return formTypeMap.get(name);
} else {
return "textfield";
}
}
/**
* 显示任务表单.
*/
public String viewTaskForm() throws Exception {
TaskService taskService = processEngine.getTaskService();
Task task = taskService.createTaskQuery().taskId(taskId).singleResult();
FormService formService = processEngine.getFormService();
String taskFormKey = formService.getTaskFormKey(
task.getProcessDefinitionId(), task.getTaskDefinitionKey());
formTemplate = formTemplateManager.findUniqueBy("name", taskFormKey);
formInfo = new FormInfo();
formInfo.setTaskId(taskId);
if ((taskId != null) && (!"".equals(taskId))) {
// 如果是任务草稿,直接通过processInstanceId获得record,更新数据
// TODO: 分支肯定有问题
String processInstanceId = task.getProcessInstanceId();
Record record = keyValue.findByRef(processInstanceId);
if (record != null) {
Map map = new HashMap();
for (Prop prop : record.getProps().values()) {
map.put(prop.getCode(), prop.getValue());
}
json = jsonMapper.toJson(map);
}
}
return "viewTaskForm";
}
/**
* 完成任务.
*/
public String completeTask() throws Exception {
IdentityService identityService = processEngine.getIdentityService();
identityService.setAuthenticatedUserId(SpringSecurityUtils
.getCurrentUsername());
this.saveDraft();
TaskService taskService = processEngine.getTaskService();
Task task = taskService.createTaskQuery().taskId(taskId).singleResult();
FormService formService = processEngine.getFormService();
String taskFormKey = formService.getTaskFormKey(
task.getProcessDefinitionId(), task.getTaskDefinitionKey());
formInfo = new FormInfo();
formInfo.setTaskId(taskId);
formInfo.setFormKey(taskFormKey);
// 尝试根据表单里字段的类型,进行转换
Map<String, String> formTypeMap = new HashMap<String, String>();
if (formInfo.isFormExists()) {
this.formTemplate = formTemplateManager.findUniqueBy("name",
formInfo.getFormKey());
String content = formTemplate.getContent();
logger.info("content : {}", content);
Map map = jsonMapper.fromJson(content, Map.class);
logger.info("map : {}", map);
List<Map> list = (List<Map>) map.get("fields");
logger.info("list : {}", list);
for (Map item : list) {
formTypeMap.put((String) item.get("name"),
(String) item.get("type"));
}
}
String processInstanceId = task.getProcessInstanceId();
Record record = keyValue.findByRef(processInstanceId);
Map<String, Object> processParameters = new HashMap<String, Object>();
// 如果有表单,就从数据库获取数据
for (Prop prop : record.getProps().values()) {
String key = prop.getCode();
String value = prop.getValue();
String formType = this.getFormType(formTypeMap, key);
if ("userPicker".equals(formType)) {
processParameters.put(key,
new ArrayList(Arrays.asList(value.split(","))));
} else {
processParameters.put(key, value);
}
}
processEngine.getManagementService()
.executeCommand(
new CompleteTaskWithCommentCmd(taskId,
processParameters, "完成"));
record = new RecordBuilder().build(record, STATUS_RUNNING,
processInstanceId);
keyValue.save(record);
return "completeTask";
}
// ~ ======================================================================
public void setProcessEngine(ProcessEngine processEngine) {
this.processEngine = processEngine;
}
public void setBpmProcessManager(BpmProcessManager bpmProcessManager) {
this.bpmProcessManager = bpmProcessManager;
}
public void setBpmTaskConfManager(BpmTaskConfManager bpmTaskConfManager) {
this.bpmTaskConfManager = bpmTaskConfManager;
}
// ~ ======================================================================
public String getBusinessKey() {
return businessKey;
}
public void setBusinessKey(String businessKey) {
this.businessKey = businessKey;
}
public String getProcessDefinitionId() {
return processDefinitionId;
}
public void setProcessDefinitionId(String processDefinitionId) {
this.processDefinitionId = processDefinitionId;
}
public void setProcessDefinitionKey(String processDefinitionKey) {
this.processDefinitionKey = processDefinitionKey;
}
public void setProcessDefinitionVersion(int processDefinitionVersion) {
this.processDefinitionVersion = processDefinitionVersion;
}
public void setTaskId(String taskId) {
this.taskId = taskId;
}
public void setId(Long id) {
this.id = id;
}
public void setFormTemplateManager(FormTemplateManager formTemplateManager) {
this.formTemplateManager = formTemplateManager;
}
public String getJson() {
return json;
}
public FormInfo getFormInfo() {
return formInfo;
}
public FormTemplate getFormTemplate() {
return formTemplate;
}
public List<TaskDefinition> getTaskDefinitions() {
return taskDefinitions;
}
public void setTaskDefinitionKeys(List<String> taskDefinitionKeys) {
this.taskDefinitionKeys = taskDefinitionKeys;
}
public void setTaskAssignees(List<String> taskAssignees) {
this.taskAssignees = taskAssignees;
}
public void setKeyValue(KeyValue keyValue) {
this.keyValue = keyValue;
}
public List<Record> getRecords() {
return records;
}
public void setStatus(String status) {
this.status = status;
}
}
| #34 限制只把当前表单填写的数据设置到流程变量里,这样可以避免之前表单设置的变量被覆盖,覆盖时因为读取不到类型,导致转换失败
| src/main/java/com/mossle/form/web/form/FormAction.java | #34 限制只把当前表单填写的数据设置到流程变量里,这样可以避免之前表单设置的变量被覆盖,覆盖时因为读取不到类型,导致转换失败 | <ide><path>rc/main/java/com/mossle/form/web/form/FormAction.java
<ide> if ("userPicker".equals(formType)) {
<ide> processParameters.put(key,
<ide> new ArrayList(Arrays.asList(value.split(","))));
<del> } else {
<add> } else if (formType != null) {
<ide> processParameters.put(key, value);
<ide> }
<ide> }
<ide> if (formTypeMap.containsKey(name)) {
<ide> return formTypeMap.get(name);
<ide> } else {
<del> return "textfield";
<add> return null;
<ide> }
<ide> }
<ide>
<ide> Map map = jsonMapper.fromJson(content, Map.class);
<ide> logger.info("map : {}", map);
<ide>
<del> List<Map> list = (List<Map>) map.get("fields");
<del> logger.info("list : {}", list);
<del>
<del> for (Map item : list) {
<del> formTypeMap.put((String) item.get("name"),
<del> (String) item.get("type"));
<add> if (map != null) {
<add> List<Map> list = (List<Map>) map.get("fields");
<add> logger.info("list : {}", list);
<add>
<add> for (Map item : list) {
<add> formTypeMap.put((String) item.get("name"),
<add> (String) item.get("type"));
<add> }
<ide> }
<ide> }
<ide>
<ide> if ("userPicker".equals(formType)) {
<ide> processParameters.put(key,
<ide> new ArrayList(Arrays.asList(value.split(","))));
<del> } else {
<add> } else if (formType != null) {
<ide> processParameters.put(key, value);
<ide> }
<ide> } |
|
Java | apache-2.0 | b6e158bafea3f1ddecebeb44f8a9c36f85eb4938 | 0 | bayofmany/peapod,mohataher/peapod | /*
* Copyright 2015 Bay of Many
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* This project is derived from code in the Tinkerpop project under the following license:
*
* Tinkerpop3
* http://www.apache.org/licenses/LICENSE-2.0
*/
package peapod;
import com.tinkerpop.gremlin.process.T;
import com.tinkerpop.gremlin.structure.Vertex;
import org.junit.Before;
import org.junit.Test;
import peapod.model.Person;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
public class FramedElementTest extends GraphTest {
private FramedGraph graph;
private Person person;
private Vertex v;
@Before
public void init() {
v = g.addVertex(T.label, "Person", "name", "alice");
graph = new FramedGraph(g, Person.class.getPackage());
person = graph.v(v.id());
}
@Test
public void testElement() throws Exception {
assertEquals(v, person.element());
}
@Test
public void testGraph() throws Exception {
assertEquals(graph, person.graph());
}
@Test
public void testId() throws Exception {
assertEquals(v.id(), person.id());
}
@Test
public void testRemove() throws Exception {
person.remove();
assertTrue(!g.V(Person.class).hasNext());
}
@Test
public void testEquals() {
assertEquals(graph.<Person>v(v.id()), graph.<Person>v(v.id()));
}
@Test
public void testHashCode() {
assertEquals(v.hashCode(), person.hashCode());
}
} | core/src/test/java/peapod/FramedElementTest.java | /*
* Copyright 2015 Bay of Many
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* This project is derived from code in the Tinkerpop project under the following license:
*
* Tinkerpop3
* http://www.apache.org/licenses/LICENSE-2.0
*/
package peapod;
import com.tinkerpop.gremlin.process.T;
import com.tinkerpop.gremlin.structure.Vertex;
import org.junit.Before;
import org.junit.Test;
import peapod.model.Person;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
public class FramedElementTest extends GraphTest {
private FramedGraph graph;
private Person person;
private Vertex v;
@Before
public void init() {
v = g.addVertex(T.label, "Person", "name", "alice");
graph = new FramedGraph(g);
person = graph.v(v.id(), Person.class);
}
@Test
public void testElement() throws Exception {
assertEquals(v, person.element());
}
@Test
public void testGraph() throws Exception {
assertEquals(graph, person.graph());
}
@Test
public void testId() throws Exception {
assertEquals(v.id(), person.id());
}
@Test
public void testRemove() throws Exception {
person.remove();
assertTrue(!g.V(Person.class).hasNext());
}
@Test
public void testEquals() {
assertEquals(graph.v(v.id(), Person.class), graph.v(v.id(), Person.class));
}
@Test
public void testHashCode() {
assertEquals(v.hashCode(), person.hashCode());
}
} | refactored inheritance
| core/src/test/java/peapod/FramedElementTest.java | refactored inheritance | <ide><path>ore/src/test/java/peapod/FramedElementTest.java
<ide> @Before
<ide> public void init() {
<ide> v = g.addVertex(T.label, "Person", "name", "alice");
<del> graph = new FramedGraph(g);
<del> person = graph.v(v.id(), Person.class);
<add> graph = new FramedGraph(g, Person.class.getPackage());
<add> person = graph.v(v.id());
<ide> }
<ide>
<ide> @Test
<ide>
<ide> @Test
<ide> public void testEquals() {
<del> assertEquals(graph.v(v.id(), Person.class), graph.v(v.id(), Person.class));
<add> assertEquals(graph.<Person>v(v.id()), graph.<Person>v(v.id()));
<ide> }
<ide>
<ide> @Test |
|
Java | apache-2.0 | b02e82c0da8de9bd6571d41f44208ff695de30cc | 0 | subutai-io/base,subutai-io/Subutai,subutai-io/base,subutai-io/Subutai,subutai-io/Subutai,subutai-io/Subutai,subutai-io/Subutai,subutai-io/base,subutai-io/Subutai,subutai-io/base | package org.safehaus.kiskis.mgmt.cli.commands;
import org.apache.felix.gogo.commands.Command;
import org.apache.karaf.shell.console.OsgiCommandSupport;
import org.safehaus.kiskis.mgmt.api.cassandra.Cassandra;
import org.safehaus.kiskis.mgmt.api.cassandra.Config;
import org.safehaus.kiskis.mgmt.api.tracker.Tracker;
import java.util.List;
/**
* Displays the last log entries
*/
@Command(scope = "cassandra", name = "list-clusters", description = "Gets the list of Cassandra clusters")
public class ListClustersCommand extends OsgiCommandSupport {
private static Cassandra cassandraManager;
private static Tracker tracker;
public Tracker getTracker() {
return tracker;
}
public void setTracker(Tracker tracker) {
ListClustersCommand.tracker = tracker;
}
public void setCassandraManager(Cassandra cassandraManager) {
ListClustersCommand.cassandraManager = cassandraManager;
}
public static Cassandra getCassandraManager() {
return cassandraManager;
}
protected Object doExecute() {
List<Config> list = cassandraManager.getClusters();
if (list.size() > 0) {
StringBuilder sb = new StringBuilder();
for (Config config : list) {
sb.append(config.getClusterName()).append("\n");
}
System.out.println(sb.toString());
} else System.out.println("No clusters found...");
return null;
}
}
| management/server/products/cassandra/cassandra-cli/src/main/java/org/safehaus/kiskis/mgmt/cli/commands/ListClustersCommand.java | package org.safehaus.kiskis.mgmt.cli.commands;
import org.apache.felix.gogo.commands.Command;
import org.apache.karaf.shell.console.OsgiCommandSupport;
import org.safehaus.kiskis.mgmt.api.cassandra.Cassandra;
import org.safehaus.kiskis.mgmt.api.cassandra.Config;
import org.safehaus.kiskis.mgmt.api.tracker.Tracker;
import java.util.List;
/**
* Displays the last log entries
*/
@Command(scope = "cassandra", name = "list-clusters", description = "mydescription")
public class ListClustersCommand extends OsgiCommandSupport {
private static Cassandra cassandraManager;
private static Tracker tracker;
public Tracker getTracker() {
return tracker;
}
public void setTracker(Tracker tracker) {
ListClustersCommand.tracker = tracker;
}
public void setCassandraManager(Cassandra cassandraManager) {
ListClustersCommand.cassandraManager = cassandraManager;
}
public static Cassandra getCassandraManager() {
return cassandraManager;
}
protected Object doExecute() {
List<Config> list = cassandraManager.getClusters();
if (list.size() > 0) {
StringBuilder sb = new StringBuilder();
for (Config config : list) {
sb.append(config.getClusterName()).append("\n");
}
System.out.println(sb.toString());
} else System.out.println("No clusters found...");
return null;
}
}
| Code refactor.
Former-commit-id: b630b26edb5d1f6c8e048f764d26f4201e7c163b | management/server/products/cassandra/cassandra-cli/src/main/java/org/safehaus/kiskis/mgmt/cli/commands/ListClustersCommand.java | Code refactor. | <ide><path>anagement/server/products/cassandra/cassandra-cli/src/main/java/org/safehaus/kiskis/mgmt/cli/commands/ListClustersCommand.java
<ide> /**
<ide> * Displays the last log entries
<ide> */
<del>@Command(scope = "cassandra", name = "list-clusters", description = "mydescription")
<add>@Command(scope = "cassandra", name = "list-clusters", description = "Gets the list of Cassandra clusters")
<ide> public class ListClustersCommand extends OsgiCommandSupport {
<ide>
<ide> private static Cassandra cassandraManager; |
|
Java | epl-1.0 | 6d30211514432cabdbcb616836a75918eecc747b | 0 | rrimmana/birt-1,sguan-actuate/birt,Charling-Huang/birt,Charling-Huang/birt,rrimmana/birt-1,Charling-Huang/birt,sguan-actuate/birt,rrimmana/birt-1,sguan-actuate/birt,Charling-Huang/birt,rrimmana/birt-1,sguan-actuate/birt,rrimmana/birt-1,sguan-actuate/birt,Charling-Huang/birt | /***********************************************************************
* Copyright (c) 2004,2007 Actuate Corporation.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* Actuate Corporation - initial API and implementation
***********************************************************************/
package org.eclipse.birt.report.engine.layout.html;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Stack;
import java.util.logging.Logger;
import org.eclipse.birt.core.exception.BirtException;
import org.eclipse.birt.report.engine.api.IEngineTask;
import org.eclipse.birt.report.engine.api.impl.EngineTask;
import org.eclipse.birt.report.engine.content.IBandContent;
import org.eclipse.birt.report.engine.content.ICellContent;
import org.eclipse.birt.report.engine.content.IContent;
import org.eclipse.birt.report.engine.content.IReportContent;
import org.eclipse.birt.report.engine.content.IRowContent;
import org.eclipse.birt.report.engine.content.ITableBandContent;
import org.eclipse.birt.report.engine.content.ITableContent;
import org.eclipse.birt.report.engine.content.ITableGroupContent;
import org.eclipse.birt.report.engine.emitter.BufferedReportEmitter;
import org.eclipse.birt.report.engine.emitter.ContentEmitterAdapter;
import org.eclipse.birt.report.engine.emitter.ContentEmitterUtil;
import org.eclipse.birt.report.engine.emitter.IContentEmitter;
import org.eclipse.birt.report.engine.emitter.IEmitterServices;
import org.eclipse.birt.report.engine.executor.buffermgr.Cell;
import org.eclipse.birt.report.engine.executor.buffermgr.Row;
import org.eclipse.birt.report.engine.executor.buffermgr.TableContentLayout;
import org.eclipse.birt.report.engine.internal.content.wrap.CellContentWrapper;
import org.eclipse.birt.report.engine.ir.CellDesign;
import org.eclipse.birt.report.engine.ir.DimensionType;
import org.eclipse.birt.report.engine.ir.EngineIRConstants;
import org.eclipse.birt.report.engine.layout.LayoutUtil;
import org.eclipse.birt.report.engine.presentation.UnresolvedRowHint;
public class HTMLTableLayoutEmitter extends ContentEmitterAdapter
{
final static Logger logger = Logger.getLogger( HTMLTableLayoutEmitter.class
.getName( ) );
/**
* the emitter used to output the table content
*/
protected IContentEmitter emitter;
/**
* the current table layout
*/
protected TableContentLayout layout;
/**
* the cached start/end content events
*/
protected Stack layoutEvents;
/**
* emitter used to cache the content in current cell.
*/
protected IContentEmitter cellEmitter;
protected HTMLLayoutContext context;
/**
* the group level information used to resovle the drop cells.
*/
protected Stack groupStack = new Stack( );
protected HashMap<String, UnresolvedRowHint> hintMap = new HashMap<String, UnresolvedRowHint>();
protected boolean isFirst = true;
int nestTableCount = 0;
protected int lastRowId = -1;
public HTMLTableLayoutEmitter( IContentEmitter emitter,HTMLLayoutContext context )
{
this.emitter = emitter;
this.context = context;
}
public void end( IReportContent report ) throws BirtException
{
emitter.end( report );
}
public String getOutputFormat( )
{
return emitter.getOutputFormat( );
}
public void initialize( IEmitterServices service ) throws BirtException
{
emitter.initialize( service );
}
public void start( IReportContent report ) throws BirtException
{
emitter.start( report );
}
protected int getGroupLevel( )
{
if ( !groupStack.isEmpty( ) )
{
return ( (Integer) groupStack.peek( ) ).intValue( );
}
return -1;
}
protected boolean isContentFinished(IContent content)
{
if(context!=null)
{
return context.getPageHintManager( ).getLayoutHint( content );
}
return true;
}
protected boolean allowPageBreak()
{
if(context!=null)
{
return context.allowPageBreak( );
}
return false;
}
public void startContent( IContent content ) throws BirtException
{
if ( cellEmitter != null )
{
ContentEmitterUtil.startContent( content, cellEmitter );
}
else
{
ContentEmitterUtil.startContent( content, emitter );
}
}
public void endContent( IContent content ) throws BirtException
{
if ( cellEmitter != null )
{
ContentEmitterUtil.endContent( content, cellEmitter );
}
else
{
ContentEmitterUtil.endContent( content, emitter );
}
}
boolean hasDropCell = false;
public void resetLayout( )
{
layout.reset( );
layoutEvents.clear( );
hasDropCell = false;
}
public void initLayout( ITableContent table )
{
String keyString = context.getPageHintManager( ).getHintMapKey(table.getInstanceID( ).toUniqueString( ));
this.layout = new TableContentLayout( table,
getOutputFormat( ), context, keyString );
this.layoutEvents = new Stack( );
UnresolvedRowHint hint = null;
if(isFirst)
{
if(context!=null)
{
hint = context.getPageHintManager( ).getUnresolvedRowHint( keyString );
isFirst = false;
}
}
if(hint == null )
{
hint = hintMap.get( keyString );
}
layout.setUnresolvedRowHint( hint );
}
public boolean isLayoutStarted()
{
return layout!=null;
}
protected boolean hasDropCell( )
{
return hasDropCell;
}
protected int createDropID( int groupIndex, String dropType )
{
int dropId = -10 * ( groupIndex + 1 );
if ( "all".equals( dropType ) ) //$NON-NLS-1$
{
dropId--;
}
return dropId;
}
public void resolveCellsOfDrop( int groupLevel, boolean dropAll, boolean finished )
{
if ( hasDropCell )
{
if ( dropAll )
{
layout.resolveDropCells( createDropID( groupLevel, "all" ), finished ); //$NON-NLS-1$
}
else
{
layout.resolveDropCells( createDropID( groupLevel, "detail" ), finished ); //$NON-NLS-1$
}
hasDropCell = layout.hasDropCell( );
}
}
protected static class LayoutEvent
{
final static int START_GROUP = 0;
final static int START_BAND = 1;
final static int END_GROUP = 2;
final static int END_BAND = 3;
final static int ON_ROW = 4;
final static int ON_FIRST_DROP_CELL = 5;
LayoutEvent(int type, Object value )
{
this.eventType= type;
this.value = value;
}
int eventType;
Object value;
}
protected static class StartInfo
{
StartInfo(int rowId, int cellId)
{
this.rowId = rowId;
this.cellId = cellId;
}
int rowId;
int cellId;
}
public static class CellContent implements Cell.Content
{
protected ICellContent cell;
protected BufferedReportEmitter buffer;
public CellContent( ICellContent cell, BufferedReportEmitter buffer )
{
this.cell = cell;
this.buffer = buffer;
}
public ICellContent getContent()
{
return cell;
}
public boolean isEmpty( )
{
return buffer == null || buffer.isEmpty( );
}
public void reset( )
{
buffer = null;
}
}
public void flush( ) throws BirtException
{
if ( hasDropCell( ) )
{
return;
}
Iterator iter = layoutEvents.iterator( );
while ( iter.hasNext( ) )
{
LayoutEvent event = (LayoutEvent) iter.next( );
switch ( event.eventType )
{
case LayoutEvent.START_GROUP :
case LayoutEvent.START_BAND :
ContentEmitterUtil.startContent( (IContent) event.value,
emitter );
break;
case LayoutEvent.END_GROUP :
case LayoutEvent.END_BAND :
ContentEmitterUtil.endContent( (IContent) event.value,
emitter );
break;
case LayoutEvent.ON_ROW :
flushRow( ( (StartInfo) event.value ).rowId , 0, true );
break;
case LayoutEvent.ON_FIRST_DROP_CELL:
flushRow( ( (StartInfo) event.value ).rowId,
( (StartInfo) event.value ).cellId, false );
break;
}
}
resetLayout();
}
protected void flushRow( int rowId, int colId, boolean withStart )
throws BirtException
{
int colCount = layout.getColCount( );
int columnId = layout.getColumnId( colId );
Row row = layout.getRow( rowId );
IRowContent rowContent = (IRowContent) row.getContent( );
if ( withStart )
{
emitter.startRow( rowContent );
}
for ( int j = columnId; j < colCount; j++ )
{
Cell cell = row.getCell( j );
if ( cell.getStatus( ) == Cell.CELL_USED )
{
CellContent content = (CellContent) cell.getContent( );
CellContentWrapper tempCell = new CellContentWrapper(
content.cell );
tempCell.setColumn( cell.getColId( ) );
tempCell.setRowSpan( cell.getRowSpan( ) );
tempCell.setColSpan( cell.getColSpan( ) );
emitter.startCell( tempCell );
if ( content.buffer != null )
{
content.buffer.flush( );
}
emitter.endCell( tempCell );
}
if ( cell.getStatus( ) == Cell.CELL_EMPTY )
{
IReportContent report = rowContent.getReportContent( );
ICellContent cellContent = report.createCellContent( );
cellContent.setParent( rowContent );
cellContent.setColumn( j );
cellContent.setRowSpan( cell.getRowSpan( ) );
cellContent.setColSpan( cell.getColSpan( ) );
emitter.startCell( cellContent );
emitter.endCell( cellContent );
}
}
emitter.endRow( rowContent );
}
private boolean isNestTable( )
{
return nestTableCount > 1;
}
public void startTable( ITableContent table ) throws BirtException
{
nestTableCount++;
if ( cellEmitter != null )
{
cellEmitter.startTable( table );
}
else
{
if ( !isNestTable( ) )
{
UnresolvedRowHint hint = null;
initLayout( table );
emitter.startTable( layout.getWrappedTableContent( ) );
this.lastRowId = -1;
}
else
{
emitter.startTable( table );
}
}
}
public void resolveAll( boolean finished )
{
layout.resolveDropCells( finished );
UnresolvedRowHint hint = layout.getUnresolvedRow( );
if ( hint != null )
{
hintMap.put( layout.getKeyString( ), hint );
if(context!=null )
{
context.getPageHintManager( ).addUnresolvedRowHint(layout.getKeyString( ), hint );
}
}
hasDropCell = layout.hasDropCell( );
}
public void createCell( int colId, int rowSpan, int colSpan,
Cell.Content cellContent )
{
layout.createCell( colId, rowSpan, colSpan, cellContent );
if ( rowSpan < 0 || rowSpan > 1)
{
hasDropCell = true;
}
}
public void endTable( ITableContent table ) throws BirtException
{
if ( cellEmitter != null )
{
cellEmitter.endTable( table );
}
else
{
if ( !isNestTable( ) )
{
resolveAll( isContentFinished(table) );
flush( );
emitter.endTable( layout.getWrappedTableContent( ) );
}
else
{
emitter.endTable( table );
}
}
nestTableCount--;
}
public void startTableGroup( ITableGroupContent group )
throws BirtException
{
if ( cellEmitter != null )
{
cellEmitter.startTableGroup( group );
}
else
{
if ( !isNestTable( ) )
{
int groupLevel = group.getGroupLevel( );
groupStack.push( Integer.valueOf( groupLevel ) );
if ( hasDropCell( ) )
{
layoutEvents.push( new LayoutEvent(
LayoutEvent.START_GROUP, group ) );
return;
}
}
emitter.startTableGroup( group );
}
}
public void endTableGroup( ITableGroupContent group ) throws BirtException
{
if ( cellEmitter != null )
{
cellEmitter.endTableGroup( group );
}
else
{
if ( !isNestTable( ) )
{
// if there is no group footer, we still need to do with the
// drop.
int groupLevel = getGroupLevel( );
resolveCellsOfDrop( groupLevel, false, isContentFinished( group ) );
resolveCellsOfDrop( groupLevel, true, isContentFinished( group ) );
assert !groupStack.isEmpty( );
groupStack.pop( );
if ( hasDropCell( ) )
{
layoutEvents.push( new LayoutEvent( LayoutEvent.END_GROUP,
group ) );
return;
}
flush( );
}
emitter.endTableGroup( group );
}
}
public void startTableBand( ITableBandContent band ) throws BirtException
{
if ( cellEmitter != null )
{
cellEmitter.startTableBand( band );
}
else
{
if ( !isNestTable( ) )
{
if ( band.getBandType( ) == IBandContent.BAND_GROUP_FOOTER )
{
int groupLevel = getGroupLevel( );
resolveCellsOfDrop( groupLevel, false, true );
}
if ( hasDropCell( ) )
{
layoutEvents.push( new LayoutEvent( LayoutEvent.START_BAND,
band ) );
return;
}
flush( );
}
emitter.startTableBand( band );
}
}
public void endTableBand( ITableBandContent band ) throws BirtException
{
if ( cellEmitter != null )
{
cellEmitter.endTableBand( band );
}
else
{
if ( !isNestTable( ) )
{
if(LayoutUtil.isRepeatableBand( band ))
{
lastRowId = -1;
}
if ( band.getBandType( ) == IBandContent.BAND_GROUP_FOOTER )
{
int groupLevel = getGroupLevel( );
resolveCellsOfDrop( groupLevel, true, isContentFinished(band) );
}
if ( hasDropCell( ) )
{
layoutEvents.push( new LayoutEvent( LayoutEvent.END_BAND,
band ) );
return;
}
flush( );
}
emitter.endTableBand( band );
}
}
public void startRow( IRowContent row ) throws BirtException
{
if ( cellEmitter != null )
{
cellEmitter.startRow( row );
}
else
{
// For fixed layout reports and in run task, we need to emit the
// invisible content to PDF layout engine.
boolean hiddenMask = context.isFixedLayout( )
&& (Integer) context.getLayoutEngine( ).getOption(
EngineTask.TASK_TYPE ) == IEngineTask.TASK_RUN;
boolean isHidden = LayoutUtil.isHidden( row, emitter
.getOutputFormat( ), context.getOutputDisplayNone( ), hiddenMask );
if ( !isNestTable( ) )
{
int rowId = row.getRowID( );
if(lastRowId>=0 && rowId>lastRowId+1)
{
for(int i=lastRowId+1; i<rowId; i++)
{
IRowContent newRow = (IRowContent) row
.cloneContent( false );
newRow.setHeight( new DimensionType( 0,
EngineIRConstants.UNITS_IN ) );
newRow.setParent( row.getParent( ) );
newRow.setRowID( i );
startRow( newRow );
layout.setNeedFormalize( true );
endRow( newRow );
}
}
layout.createRow( row, isHidden );
if(!isHidden)
{
if ( hasDropCell( ) )
{
layoutEvents.push( new LayoutEvent( LayoutEvent.ON_ROW,
new StartInfo( layout.getRowCount( ) - 1, 0 ) ) );
return;
}
else if(layout.hasUnResolvedRow( ) && !LayoutUtil.isRepeatableRow( row ))
{
layoutEvents.push( new LayoutEvent( LayoutEvent.ON_ROW,
new StartInfo( layout.getRowCount( ) - 1, 0) ) );
hasDropCell = true;
return;
}
}
// TODO: here we need handle the hidden row and change the row
// id.
}
if(!isHidden)
{
emitter.startRow( row );
}
}
}
public void endRow( IRowContent row ) throws BirtException
{
if ( cellEmitter != null )
{
cellEmitter.endRow( row );
}
else
{
if ( !isNestTable( ) )
{
layout.endRow(row);
lastRowId = row.getRowID( );
hasDropCell = layout.hasDropCell( );
if ( hasDropCell( ) )
{
if ( layout.exceedMaxCache( ) )
{
context.softRowBreak = true;
}
return;
}
if(layoutEvents.size( )>0)
{
flush( );
return;
}
}
// For fixed layout reports and in run task, we need to emit the
// invisible content to PDF layout engine.
boolean hiddenMask = context.isFixedLayout( )
&& (Integer) context.getLayoutEngine( ).getOption(
EngineTask.TASK_TYPE ) == IEngineTask.TASK_RUN;
boolean isHidden = LayoutUtil.isHidden( row, emitter
.getOutputFormat( ), context.getOutputDisplayNone( ), hiddenMask );
if(!isHidden)
{
emitter.endRow( row );
}
}
}
public void startCell( ICellContent cell ) throws BirtException
{
if ( cellEmitter != null )
{
cellEmitter.startCell( cell );
}
else
{
if ( !isNestTable( ) )
{
BufferedReportEmitter buffer = null;
int colId = cell.getColumn( );
int colSpan = cell.getColSpan( );
int rowSpan = cell.getRowSpan( );
// the current executed cell is rowIndex, columnIndex
// get the span value of that cell.
if ( cell.getGenerateBy( ) instanceof CellDesign )
{
CellDesign cellDesign = (CellDesign) cell.getGenerateBy( );
if ( cellDesign != null )
{
String dropType = cellDesign.getDrop( );
if ( dropType != null && !"none".equals( dropType ) ) //$NON-NLS-1$
{
rowSpan = createDropID( getGroupLevel( ), dropType );
}
}
}
// the table has no cache, the cell is the first drop or spanned cell
if ( !hasDropCell( ) && (rowSpan < 0 || rowSpan > 1) )
{
layoutEvents.push( new LayoutEvent(
LayoutEvent.ON_FIRST_DROP_CELL, new StartInfo( layout
.getRowCount( ) - 1, colId ) ) );
}
if ( hasDropCell( ) || rowSpan < 0 || rowSpan > 1)
{
buffer = new BufferedReportEmitter( emitter );
cellEmitter = buffer;
}
// we need cache the cell
createCell( colId, rowSpan, colSpan, new CellContent( cell,
buffer ) );
if ( hasDropCell( ) )
{
return;
}
// TODO: changes the column id and output it.
emitter.startCell( layout.getWrappedCellContent( cell ) );
}
else
{
emitter.startCell( cell );
}
}
}
public void endCell( ICellContent cell ) throws BirtException
{
if ( !isNestTable( ) )
{
if ( cellEmitter != null )
{
cellEmitter = null;
return;
}
else
{
emitter.endCell( layout.getWrappedCellContent( cell ) );
}
}
else
{
if ( cellEmitter != null )
{
cellEmitter.endCell( cell );
}
else
{
emitter.endCell( cell );
}
}
}
public IContentEmitter getInternalEmitter( )
{
return emitter;
}
}
| engine/org.eclipse.birt.report.engine/src/org/eclipse/birt/report/engine/layout/html/HTMLTableLayoutEmitter.java | /***********************************************************************
* Copyright (c) 2004,2007 Actuate Corporation.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* Actuate Corporation - initial API and implementation
***********************************************************************/
package org.eclipse.birt.report.engine.layout.html;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Stack;
import java.util.logging.Logger;
import org.eclipse.birt.core.exception.BirtException;
import org.eclipse.birt.report.engine.api.IEngineTask;
import org.eclipse.birt.report.engine.api.impl.EngineTask;
import org.eclipse.birt.report.engine.content.IBandContent;
import org.eclipse.birt.report.engine.content.ICellContent;
import org.eclipse.birt.report.engine.content.IContent;
import org.eclipse.birt.report.engine.content.IReportContent;
import org.eclipse.birt.report.engine.content.IRowContent;
import org.eclipse.birt.report.engine.content.ITableBandContent;
import org.eclipse.birt.report.engine.content.ITableContent;
import org.eclipse.birt.report.engine.content.ITableGroupContent;
import org.eclipse.birt.report.engine.emitter.BufferedReportEmitter;
import org.eclipse.birt.report.engine.emitter.ContentEmitterAdapter;
import org.eclipse.birt.report.engine.emitter.ContentEmitterUtil;
import org.eclipse.birt.report.engine.emitter.IContentEmitter;
import org.eclipse.birt.report.engine.emitter.IEmitterServices;
import org.eclipse.birt.report.engine.executor.buffermgr.Cell;
import org.eclipse.birt.report.engine.executor.buffermgr.Row;
import org.eclipse.birt.report.engine.executor.buffermgr.TableContentLayout;
import org.eclipse.birt.report.engine.internal.content.wrap.CellContentWrapper;
import org.eclipse.birt.report.engine.ir.CellDesign;
import org.eclipse.birt.report.engine.ir.DimensionType;
import org.eclipse.birt.report.engine.ir.EngineIRConstants;
import org.eclipse.birt.report.engine.layout.LayoutUtil;
import org.eclipse.birt.report.engine.presentation.UnresolvedRowHint;
public class HTMLTableLayoutEmitter extends ContentEmitterAdapter
{
final static Logger logger = Logger.getLogger( HTMLTableLayoutEmitter.class
.getName( ) );
/**
* the emitter used to output the table content
*/
protected IContentEmitter emitter;
/**
* the current table layout
*/
protected TableContentLayout layout;
/**
* the cached start/end content events
*/
protected Stack layoutEvents;
/**
* emitter used to cache the content in current cell.
*/
protected IContentEmitter cellEmitter;
protected HTMLLayoutContext context;
/**
* the group level information used to resovle the drop cells.
*/
protected Stack groupStack = new Stack( );
protected HashMap<String, UnresolvedRowHint> hintMap = new HashMap<String, UnresolvedRowHint>();
protected boolean isFirst = true;
int nestTableCount = 0;
protected int lastRowId = -1;
public HTMLTableLayoutEmitter( IContentEmitter emitter,HTMLLayoutContext context )
{
this.emitter = emitter;
this.context = context;
}
public void end( IReportContent report ) throws BirtException
{
emitter.end( report );
}
public String getOutputFormat( )
{
return emitter.getOutputFormat( );
}
public void initialize( IEmitterServices service ) throws BirtException
{
emitter.initialize( service );
}
public void start( IReportContent report ) throws BirtException
{
emitter.start( report );
}
protected int getGroupLevel( )
{
if ( !groupStack.isEmpty( ) )
{
return ( (Integer) groupStack.peek( ) ).intValue( );
}
return -1;
}
protected boolean isContentFinished(IContent content)
{
if(context!=null)
{
return context.getPageHintManager( ).getLayoutHint( content );
}
return true;
}
protected boolean allowPageBreak()
{
if(context!=null)
{
return context.allowPageBreak( );
}
return false;
}
public void startContent( IContent content ) throws BirtException
{
if ( cellEmitter != null )
{
ContentEmitterUtil.startContent( content, cellEmitter );
}
else
{
ContentEmitterUtil.startContent( content, emitter );
}
}
public void endContent( IContent content ) throws BirtException
{
if ( cellEmitter != null )
{
ContentEmitterUtil.endContent( content, cellEmitter );
}
else
{
ContentEmitterUtil.endContent( content, emitter );
}
}
boolean hasDropCell = false;
public void resetLayout( )
{
layout.reset( );
layoutEvents.clear( );
hasDropCell = false;
}
public void initLayout( ITableContent table )
{
String keyString = context.getPageHintManager( ).getHintMapKey(table.getInstanceID( ).toUniqueString( ));
this.layout = new TableContentLayout( table,
getOutputFormat( ), context, keyString );
this.layoutEvents = new Stack( );
UnresolvedRowHint hint = null;
if(isFirst)
{
if(context!=null)
{
hint = context.getPageHintManager( ).getUnresolvedRowHint( keyString );
isFirst = false;
}
}
if(hint == null )
{
hint = hintMap.get( keyString );
}
layout.setUnresolvedRowHint( hint );
}
public boolean isLayoutStarted()
{
return layout!=null;
}
protected boolean hasDropCell( )
{
return hasDropCell;
}
protected int createDropID( int groupIndex, String dropType )
{
int dropId = -10 * ( groupIndex + 1 );
if ( "all".equals( dropType ) ) //$NON-NLS-1$
{
dropId--;
}
return dropId;
}
public void resolveCellsOfDrop( int groupLevel, boolean dropAll, boolean finished )
{
if ( hasDropCell )
{
if ( dropAll )
{
layout.resolveDropCells( createDropID( groupLevel, "all" ), finished ); //$NON-NLS-1$
}
else
{
layout.resolveDropCells( createDropID( groupLevel, "detail" ), finished ); //$NON-NLS-1$
}
hasDropCell = layout.hasDropCell( );
}
}
protected static class LayoutEvent
{
final static int START_GROUP = 0;
final static int START_BAND = 1;
final static int END_GROUP = 2;
final static int END_BAND = 3;
final static int ON_ROW = 4;
final static int ON_FIRST_DROP_CELL = 5;
LayoutEvent(int type, Object value )
{
this.eventType= type;
this.value = value;
}
int eventType;
Object value;
}
protected static class StartInfo
{
StartInfo(int rowId, int cellId)
{
this.rowId = rowId;
this.cellId = cellId;
}
int rowId;
int cellId;
}
public static class CellContent implements Cell.Content
{
protected ICellContent cell;
protected BufferedReportEmitter buffer;
public CellContent( ICellContent cell, BufferedReportEmitter buffer )
{
this.cell = cell;
this.buffer = buffer;
}
public ICellContent getContent()
{
return cell;
}
public boolean isEmpty( )
{
return buffer == null || buffer.isEmpty( );
}
public void reset( )
{
buffer = null;
}
}
public void flush( ) throws BirtException
{
if ( hasDropCell( ) )
{
return;
}
Iterator iter = layoutEvents.iterator( );
while ( iter.hasNext( ) )
{
LayoutEvent event = (LayoutEvent) iter.next( );
switch ( event.eventType )
{
case LayoutEvent.START_GROUP :
case LayoutEvent.START_BAND :
ContentEmitterUtil.startContent( (IContent) event.value,
emitter );
break;
case LayoutEvent.END_GROUP :
case LayoutEvent.END_BAND :
ContentEmitterUtil.endContent( (IContent) event.value,
emitter );
break;
case LayoutEvent.ON_ROW :
flushRow( ( (StartInfo) event.value ).rowId , 0, true );
break;
case LayoutEvent.ON_FIRST_DROP_CELL:
flushRow( ( (StartInfo) event.value ).rowId,
( (StartInfo) event.value ).cellId, false );
break;
}
}
resetLayout();
}
protected void flushRow( int rowId, int colId, boolean withStart )
throws BirtException
{
int colCount = layout.getColCount( );
int columnId = layout.getColumnId( colId );
Row row = layout.getRow( rowId );
IRowContent rowContent = (IRowContent) row.getContent( );
if ( withStart )
{
emitter.startRow( rowContent );
}
for ( int j = columnId; j < colCount; j++ )
{
Cell cell = row.getCell( j );
if ( cell.getStatus( ) == Cell.CELL_USED )
{
CellContent content = (CellContent) cell.getContent( );
CellContentWrapper tempCell = new CellContentWrapper(
content.cell );
tempCell.setColumn( cell.getColId( ) );
tempCell.setRowSpan( cell.getRowSpan( ) );
tempCell.setColSpan( cell.getColSpan( ) );
emitter.startCell( tempCell );
if ( content.buffer != null )
{
content.buffer.flush( );
}
emitter.endCell( tempCell );
}
if ( cell.getStatus( ) == Cell.CELL_EMPTY )
{
IReportContent report = rowContent.getReportContent( );
ICellContent cellContent = report.createCellContent( );
cellContent.setParent( rowContent );
cellContent.setColumn( cell.getColId( ) + 1 );
cellContent.setRowSpan( cell.getRowSpan( ) );
cellContent.setColSpan( cell.getColSpan( ) );
emitter.startCell( cellContent );
emitter.endCell( cellContent );
}
}
emitter.endRow( rowContent );
}
private boolean isNestTable( )
{
return nestTableCount > 1;
}
public void startTable( ITableContent table ) throws BirtException
{
nestTableCount++;
if ( cellEmitter != null )
{
cellEmitter.startTable( table );
}
else
{
if ( !isNestTable( ) )
{
UnresolvedRowHint hint = null;
initLayout( table );
emitter.startTable( layout.getWrappedTableContent( ) );
this.lastRowId = -1;
}
else
{
emitter.startTable( table );
}
}
}
public void resolveAll( boolean finished )
{
layout.resolveDropCells( finished );
UnresolvedRowHint hint = layout.getUnresolvedRow( );
if ( hint != null )
{
hintMap.put( layout.getKeyString( ), hint );
if(context!=null )
{
context.getPageHintManager( ).addUnresolvedRowHint(layout.getKeyString( ), hint );
}
}
hasDropCell = layout.hasDropCell( );
}
public void createCell( int colId, int rowSpan, int colSpan,
Cell.Content cellContent )
{
layout.createCell( colId, rowSpan, colSpan, cellContent );
if ( rowSpan < 0 || rowSpan > 1)
{
hasDropCell = true;
}
}
public void endTable( ITableContent table ) throws BirtException
{
if ( cellEmitter != null )
{
cellEmitter.endTable( table );
}
else
{
if ( !isNestTable( ) )
{
resolveAll( isContentFinished(table) );
flush( );
emitter.endTable( layout.getWrappedTableContent( ) );
}
else
{
emitter.endTable( table );
}
}
nestTableCount--;
}
public void startTableGroup( ITableGroupContent group )
throws BirtException
{
if ( cellEmitter != null )
{
cellEmitter.startTableGroup( group );
}
else
{
if ( !isNestTable( ) )
{
int groupLevel = group.getGroupLevel( );
groupStack.push( Integer.valueOf( groupLevel ) );
if ( hasDropCell( ) )
{
layoutEvents.push( new LayoutEvent(
LayoutEvent.START_GROUP, group ) );
return;
}
}
emitter.startTableGroup( group );
}
}
public void endTableGroup( ITableGroupContent group ) throws BirtException
{
if ( cellEmitter != null )
{
cellEmitter.endTableGroup( group );
}
else
{
if ( !isNestTable( ) )
{
// if there is no group footer, we still need to do with the
// drop.
int groupLevel = getGroupLevel( );
resolveCellsOfDrop( groupLevel, false, isContentFinished( group ) );
resolveCellsOfDrop( groupLevel, true, isContentFinished( group ) );
assert !groupStack.isEmpty( );
groupStack.pop( );
if ( hasDropCell( ) )
{
layoutEvents.push( new LayoutEvent( LayoutEvent.END_GROUP,
group ) );
return;
}
flush( );
}
emitter.endTableGroup( group );
}
}
public void startTableBand( ITableBandContent band ) throws BirtException
{
if ( cellEmitter != null )
{
cellEmitter.startTableBand( band );
}
else
{
if ( !isNestTable( ) )
{
if ( band.getBandType( ) == IBandContent.BAND_GROUP_FOOTER )
{
int groupLevel = getGroupLevel( );
resolveCellsOfDrop( groupLevel, false, true );
}
if ( hasDropCell( ) )
{
layoutEvents.push( new LayoutEvent( LayoutEvent.START_BAND,
band ) );
return;
}
flush( );
}
emitter.startTableBand( band );
}
}
public void endTableBand( ITableBandContent band ) throws BirtException
{
if ( cellEmitter != null )
{
cellEmitter.endTableBand( band );
}
else
{
if ( !isNestTable( ) )
{
if(LayoutUtil.isRepeatableBand( band ))
{
lastRowId = -1;
}
if ( band.getBandType( ) == IBandContent.BAND_GROUP_FOOTER )
{
int groupLevel = getGroupLevel( );
resolveCellsOfDrop( groupLevel, true, isContentFinished(band) );
}
if ( hasDropCell( ) )
{
layoutEvents.push( new LayoutEvent( LayoutEvent.END_BAND,
band ) );
return;
}
flush( );
}
emitter.endTableBand( band );
}
}
public void startRow( IRowContent row ) throws BirtException
{
if ( cellEmitter != null )
{
cellEmitter.startRow( row );
}
else
{
// For fixed layout reports and in run task, we need to emit the
// invisible content to PDF layout engine.
boolean hiddenMask = context.isFixedLayout( )
&& (Integer) context.getLayoutEngine( ).getOption(
EngineTask.TASK_TYPE ) == IEngineTask.TASK_RUN;
boolean isHidden = LayoutUtil.isHidden( row, emitter
.getOutputFormat( ), context.getOutputDisplayNone( ), hiddenMask );
if ( !isNestTable( ) )
{
int rowId = row.getRowID( );
if(lastRowId>=0 && rowId>lastRowId+1)
{
for(int i=lastRowId+1; i<rowId; i++)
{
IRowContent newRow = (IRowContent) row
.cloneContent( false );
newRow.setHeight( new DimensionType( 0,
EngineIRConstants.UNITS_IN ) );
newRow.setParent( row.getParent( ) );
newRow.setRowID( i );
startRow( newRow );
layout.setNeedFormalize( true );
endRow( newRow );
}
}
layout.createRow( row, isHidden );
if(!isHidden)
{
if ( hasDropCell( ) )
{
layoutEvents.push( new LayoutEvent( LayoutEvent.ON_ROW,
new StartInfo( layout.getRowCount( ) - 1, 0 ) ) );
return;
}
else if(layout.hasUnResolvedRow( ) && !LayoutUtil.isRepeatableRow( row ))
{
layoutEvents.push( new LayoutEvent( LayoutEvent.ON_ROW,
new StartInfo( layout.getRowCount( ) - 1, 0) ) );
hasDropCell = true;
return;
}
}
// TODO: here we need handle the hidden row and change the row
// id.
}
if(!isHidden)
{
emitter.startRow( row );
}
}
}
public void endRow( IRowContent row ) throws BirtException
{
if ( cellEmitter != null )
{
cellEmitter.endRow( row );
}
else
{
if ( !isNestTable( ) )
{
layout.endRow(row);
lastRowId = row.getRowID( );
hasDropCell = layout.hasDropCell( );
if ( hasDropCell( ) )
{
if ( layout.exceedMaxCache( ) )
{
context.softRowBreak = true;
}
return;
}
if(layoutEvents.size( )>0)
{
flush( );
return;
}
}
// For fixed layout reports and in run task, we need to emit the
// invisible content to PDF layout engine.
boolean hiddenMask = context.isFixedLayout( )
&& (Integer) context.getLayoutEngine( ).getOption(
EngineTask.TASK_TYPE ) == IEngineTask.TASK_RUN;
boolean isHidden = LayoutUtil.isHidden( row, emitter
.getOutputFormat( ), context.getOutputDisplayNone( ), hiddenMask );
if(!isHidden)
{
emitter.endRow( row );
}
}
}
public void startCell( ICellContent cell ) throws BirtException
{
if ( cellEmitter != null )
{
cellEmitter.startCell( cell );
}
else
{
if ( !isNestTable( ) )
{
BufferedReportEmitter buffer = null;
int colId = cell.getColumn( );
int colSpan = cell.getColSpan( );
int rowSpan = cell.getRowSpan( );
// the current executed cell is rowIndex, columnIndex
// get the span value of that cell.
if ( cell.getGenerateBy( ) instanceof CellDesign )
{
CellDesign cellDesign = (CellDesign) cell.getGenerateBy( );
if ( cellDesign != null )
{
String dropType = cellDesign.getDrop( );
if ( dropType != null && !"none".equals( dropType ) ) //$NON-NLS-1$
{
rowSpan = createDropID( getGroupLevel( ), dropType );
}
}
}
// the table has no cache, the cell is the first drop or spanned cell
if ( !hasDropCell( ) && (rowSpan < 0 || rowSpan > 1) )
{
layoutEvents.push( new LayoutEvent(
LayoutEvent.ON_FIRST_DROP_CELL, new StartInfo( layout
.getRowCount( ) - 1, colId ) ) );
}
if ( hasDropCell( ) || rowSpan < 0 || rowSpan > 1)
{
buffer = new BufferedReportEmitter( emitter );
cellEmitter = buffer;
}
// we need cache the cell
createCell( colId, rowSpan, colSpan, new CellContent( cell,
buffer ) );
if ( hasDropCell( ) )
{
return;
}
// TODO: changes the column id and output it.
emitter.startCell( layout.getWrappedCellContent( cell ) );
}
else
{
emitter.startCell( cell );
}
}
}
public void endCell( ICellContent cell ) throws BirtException
{
if ( !isNestTable( ) )
{
if ( cellEmitter != null )
{
cellEmitter = null;
return;
}
else
{
emitter.endCell( layout.getWrappedCellContent( cell ) );
}
}
else
{
if ( cellEmitter != null )
{
cellEmitter.endCell( cell );
}
else
{
emitter.endCell( cell );
}
}
}
public IContentEmitter getInternalEmitter( )
{
return emitter;
}
}
| Fixed Drop to detail makes PDF output not match Web Viewer 56654 | engine/org.eclipse.birt.report.engine/src/org/eclipse/birt/report/engine/layout/html/HTMLTableLayoutEmitter.java | Fixed Drop to detail makes PDF output not match Web Viewer 56654 | <ide><path>ngine/org.eclipse.birt.report.engine/src/org/eclipse/birt/report/engine/layout/html/HTMLTableLayoutEmitter.java
<ide> IReportContent report = rowContent.getReportContent( );
<ide> ICellContent cellContent = report.createCellContent( );
<ide> cellContent.setParent( rowContent );
<del> cellContent.setColumn( cell.getColId( ) + 1 );
<add> cellContent.setColumn( j );
<ide> cellContent.setRowSpan( cell.getRowSpan( ) );
<ide> cellContent.setColSpan( cell.getColSpan( ) );
<ide> emitter.startCell( cellContent ); |
|
Java | apache-2.0 | b85e0d6348e9bddc11839fc9e4614bda153a9311 | 0 | iservport/helianto,iservport/helianto | /* Copyright 2005 I Serv Consultoria Empresarial Ltda.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.helianto.inventory;
import java.math.BigDecimal;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.Date;
import javax.persistence.CascadeType;
import javax.persistence.Column;
import javax.persistence.JoinColumn;
import javax.persistence.ManyToOne;
import javax.persistence.MappedSuperclass;
import javax.persistence.Temporal;
import javax.persistence.TemporalType;
import javax.persistence.Transient;
import org.helianto.core.Entity;
import org.helianto.core.TopLevelNumberedEntity;
import org.helianto.core.Unit;
import org.helianto.document.AbstractControl;
import org.helianto.process.DerivedProcessDocument;
import org.helianto.process.ProcessDocument;
/**
* Common properties to customer requirements, stock, purchase and production
* orders, deliveries and agreements.
*
* @author Mauricio Fernandes de Castro
*/
@MappedSuperclass
public abstract class AbstractRequirement extends AbstractControl {
/**
* Internal factory method.
*
* @param clazz
* @param part
* @param requirementDate
*/
public static <T extends AbstractRequirement> T internalRequirementFactory(Class<T> clazz, Entity entity, Date requirementDate) {
T requirement;
try {
requirement = clazz.newInstance();
requirement.setEntity(entity);
requirement.setRequirementDate(requirementDate);
requirement.setResolution(RequirementState.FORECAST.getValue());
return requirement;
} catch (Exception e) {
throw new IllegalArgumentException("Unable to instantiate "+clazz);
}
}
private static final long serialVersionUID = 1L;
protected ProcessDocument document;
protected Date requirementDate;
protected BigDecimal requirementAmount = BigDecimal.ZERO;
protected int requirementSign;
/**
* Constructor
*/
public AbstractRequirement() {
super();
setRequirementDate(new Date());
setResolution(RequirementState.FORECAST.getValue());
}
/**
* Owning process document.
*
* <p>
* Usually a part or a process operation.
* </p>
*/
@ManyToOne(cascade = {CascadeType.ALL})
@JoinColumn(name="documentId", nullable=true)
public ProcessDocument getProcessDocument() {
ProcessDocument externalProcessDocument = resolveExternalProcessDocument();
if (externalProcessDocument!=null) {
return externalProcessDocument;
}
return this.document;
}
@Transient
protected ProcessDocument resolveExternalProcessDocument() {
return null;
}
@Transient
public String getDocCode() {
if (getProcessDocument()!=null) {
return getProcessDocument().getDocCode();
}
return "";
}
@Transient
public String getDocName() {
if (getProcessDocument()!=null) {
return getProcessDocument().getDocName();
}
return "";
}
@Transient
public String[] getColorChain() {
if (getProcessDocument()!=null && getProcessDocument() instanceof DerivedProcessDocument) {
return ((DerivedProcessDocument) getProcessDocument()).getProcessColorChain();
}
return new String[] {""};
}
public void setProcessDocument(ProcessDocument document) {
this.document = document;
}
/**
* Requirement date.
*/
@Temporal(TemporalType.TIMESTAMP)
public Date getRequirementDate() {
return this.requirementDate;
}
@Transient
public String getRequirementDateTimeAsString() {
return SimpleDateFormat.getDateTimeInstance(DateFormat.SHORT, DateFormat.SHORT, getLocale()).format(getRequirementDate());
}
@Transient
public String getRequirementDateAsString() {
return SimpleDateFormat.getDateInstance(DateFormat.SHORT, getLocale()).format(getRequirementDate());
}
@Transient
public String getRequirementTimeAsString() {
return SimpleDateFormat.getTimeInstance(DateFormat.SHORT, getLocale()).format(getRequirementDate());
}
public void setRequirementDate(Date requirementDate) {
this.requirementDate = requirementDate;
}
/**
* Requirement amount.
*/
@Column(precision=10, scale=4)
public BigDecimal getRequirementAmount() {
return this.requirementAmount;
}
public void setRequirementAmount(BigDecimal requirementAmount) {
this.requirementAmount = requirementAmount;
}
/**
* <<Transient>> Unit.
*/
@Transient
public Unit getUnit() {
return getProcessDocument().getUnit();
}
/**
* Resolution.
*/
public void setResolution(char resolution) {
super.setResolution(validateResolutionChange(resolution));
}
public void setResolution(RequirementState resolution) {
setResolution(resolution.getValue());
}
public char validateResolutionChange(char newResolution) {
return newResolution;
}
/**
* Signal input or output.
*/
@Column(precision=1, scale=0)
public int getRequirementSign() {
return this.requirementSign;
}
public void setRequirementSign(int requirementSign) {
this.requirementSign = requirementSign;
}
public void setRequirementSign(RequirementSign requirementSign) {
this.requirementSign = requirementSign.getValue();
}
public TopLevelNumberedEntity setKey(Entity entity, long internalNumber) {
this.setEntity(entity);
this.setInternalNumber(internalNumber);
return this;
}
/**
* equals
*/
@Override
public boolean equals(Object other) {
if ( !(other instanceof AbstractRequirement) ) return false;
return super.equals(other);
}
}
| helianto-inventory/src/main/java/org/helianto/inventory/AbstractRequirement.java | /* Copyright 2005 I Serv Consultoria Empresarial Ltda.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.helianto.inventory;
import java.math.BigDecimal;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.Date;
import javax.persistence.CascadeType;
import javax.persistence.Column;
import javax.persistence.JoinColumn;
import javax.persistence.ManyToOne;
import javax.persistence.MappedSuperclass;
import javax.persistence.Temporal;
import javax.persistence.TemporalType;
import javax.persistence.Transient;
import org.helianto.core.Entity;
import org.helianto.core.TopLevelNumberedEntity;
import org.helianto.core.Unit;
import org.helianto.document.AbstractControl;
import org.helianto.process.DerivedProcessDocument;
import org.helianto.process.ProcessDocument;
/**
* Common properties to customer requirements, stock, purchase and production
* orders, deliveries and agreements.
*
* @author Mauricio Fernandes de Castro
*/
@MappedSuperclass
public abstract class AbstractRequirement extends AbstractControl {
/**
* Internal factory method.
*
* @param clazz
* @param part
* @param requirementDate
*/
public static <T extends AbstractRequirement> T internalRequirementFactory(Class<T> clazz, Entity entity, Date requirementDate) {
T requirement;
try {
requirement = clazz.newInstance();
requirement.setEntity(entity);
requirement.setRequirementDate(requirementDate);
requirement.setResolution(RequirementState.FORECAST.getValue());
return requirement;
} catch (Exception e) {
throw new IllegalArgumentException("Unable to instantiate "+clazz);
}
}
private static final long serialVersionUID = 1L;
protected ProcessDocument document;
protected Date requirementDate;
protected BigDecimal requirementAmount = BigDecimal.ZERO;
protected int requirementSign;
/**
* Constructor
*/
public AbstractRequirement() {
super();
setRequirementDate(new Date());
setResolution(RequirementState.FORECAST.getValue());
}
/**
* Owning process document.
*
* <p>
* Usually a part or a process operation.
* </p>
*/
@ManyToOne(cascade = {CascadeType.PERSIST, CascadeType.MERGE})
@JoinColumn(name="documentId", nullable=true)
public ProcessDocument getProcessDocument() {
ProcessDocument externalProcessDocument = resolveExternalProcessDocument();
if (externalProcessDocument!=null) {
return externalProcessDocument;
}
return this.document;
}
@Transient
protected ProcessDocument resolveExternalProcessDocument() {
return null;
}
@Transient
public String getDocCode() {
if (getProcessDocument()!=null) {
return getProcessDocument().getDocCode();
}
return "";
}
@Transient
public String getDocName() {
if (getProcessDocument()!=null) {
return getProcessDocument().getDocName();
}
return "";
}
@Transient
public String[] getColorChain() {
if (getProcessDocument()!=null && getProcessDocument() instanceof DerivedProcessDocument) {
return ((DerivedProcessDocument) getProcessDocument()).getProcessColorChain();
}
return new String[] {""};
}
public void setProcessDocument(ProcessDocument document) {
this.document = document;
}
/**
* Requirement date.
*/
@Temporal(TemporalType.TIMESTAMP)
public Date getRequirementDate() {
return this.requirementDate;
}
@Transient
public String getRequirementDateTimeAsString() {
return SimpleDateFormat.getDateTimeInstance(DateFormat.SHORT, DateFormat.SHORT, getLocale()).format(getRequirementDate());
}
@Transient
public String getRequirementDateAsString() {
return SimpleDateFormat.getDateInstance(DateFormat.SHORT, getLocale()).format(getRequirementDate());
}
@Transient
public String getRequirementTimeAsString() {
return SimpleDateFormat.getTimeInstance(DateFormat.SHORT, getLocale()).format(getRequirementDate());
}
public void setRequirementDate(Date requirementDate) {
this.requirementDate = requirementDate;
}
/**
* Requirement amount.
*/
@Column(precision=10, scale=4)
public BigDecimal getRequirementAmount() {
return this.requirementAmount;
}
public void setRequirementAmount(BigDecimal requirementAmount) {
this.requirementAmount = requirementAmount;
}
/**
* <<Transient>> Unit.
*/
@Transient
public Unit getUnit() {
return getProcessDocument().getUnit();
}
/**
* Resolution.
*/
public void setResolution(char resolution) {
super.setResolution(validateResolutionChange(resolution));
}
public void setResolution(RequirementState resolution) {
setResolution(resolution.getValue());
}
public char validateResolutionChange(char newResolution) {
return newResolution;
}
/**
* Signal input or output.
*/
@Column(precision=1, scale=0)
public int getRequirementSign() {
return this.requirementSign;
}
public void setRequirementSign(int requirementSign) {
this.requirementSign = requirementSign;
}
public void setRequirementSign(RequirementSign requirementSign) {
this.requirementSign = requirementSign.getValue();
}
public TopLevelNumberedEntity setKey(Entity entity, long internalNumber) {
this.setEntity(entity);
this.setInternalNumber(internalNumber);
return this;
}
/**
* equals
*/
@Override
public boolean equals(Object other) {
if ( !(other instanceof AbstractRequirement) ) return false;
return super.equals(other);
}
}
| Fixed cascading.
git-svn-id: 8e38db1cf16c4a277275c2a31413431bbf4974e4@1472 d46e4f78-7810-0410-b419-9f95c2a9a517
| helianto-inventory/src/main/java/org/helianto/inventory/AbstractRequirement.java | Fixed cascading. | <ide><path>elianto-inventory/src/main/java/org/helianto/inventory/AbstractRequirement.java
<ide> * Usually a part or a process operation.
<ide> * </p>
<ide> */
<del> @ManyToOne(cascade = {CascadeType.PERSIST, CascadeType.MERGE})
<add> @ManyToOne(cascade = {CascadeType.ALL})
<ide> @JoinColumn(name="documentId", nullable=true)
<ide> public ProcessDocument getProcessDocument() {
<ide> ProcessDocument externalProcessDocument = resolveExternalProcessDocument(); |
|
Java | mit | e18a412e19a39e3e092f59c0ca1ade3a202b2737 | 0 | typemeta/funcj,jon-hanson/funcj,jon-hanson/parsec4j | package org.typemeta.funcj.control;
import org.typemeta.funcj.algebra.Monoid;
import org.typemeta.funcj.functions.Functions;
public interface Writer<T, W> {
class Base<T, W> implements Writer<T, W> {
final T value;
final Monoid<W> monoid;
final W written;
public Base(T value, Monoid<W> monoid, W written) {
this.value = value;
this.monoid = monoid;
this.written = written;
}
@Override
public T value() {
return value;
}
@Override
public Monoid<W> monoid() {
return monoid;
}
@Override
public W written() {
return written;
}
}
static <T, W> Writer<T, W> pure(T value, Monoid<W> monoid) {
return new Base<T, W>(value, monoid, monoid.zero());
}
Monoid<W> monoid();
T value();
W written();
default <U> Writer<U, W> flatMap(Functions.F<T, Writer<U, W>> fw) {
final Writer<U, W> wu = fw.apply(value());
return new Base<U, W>(wu.value(), monoid(), monoid().combine(written(), wu.written()));
}
}
| core/src/main/java/org/typemeta/funcj/control/Writer.java | package org.typemeta.funcj.control;
import org.typemeta.funcj.algebra.Monoid;
import org.typemeta.funcj.functions.Functions;
public abstract class Writer<T, W> {
static <T, W> Writer<T, W> pure(T value, Monoid<W> monoid) {
return new Writer<T, W>(value, monoid.zero()) {
@Override
Monoid<W> monoid() {
return monoid;
}
};
}
abstract Monoid<W> monoid();
public final T value;
public final W written;
protected Writer(T value, W written) {
this.value = value;
this.written = written;
}
public <U> Writer<U, W> flatMap(Functions.F<T, Writer<U, W>> fw) {
final Writer<U, W> wu = fw.apply(value);
return new Writer<U, W>(wu.value, monoid().combine(written, wu.written)) {
@Override
Monoid<W> monoid() {
return Writer.this.monoid();
}
};
}
}
| Minor tweaks.
| core/src/main/java/org/typemeta/funcj/control/Writer.java | Minor tweaks. | <ide><path>ore/src/main/java/org/typemeta/funcj/control/Writer.java
<ide> import org.typemeta.funcj.algebra.Monoid;
<ide> import org.typemeta.funcj.functions.Functions;
<ide>
<del>public abstract class Writer<T, W> {
<del> static <T, W> Writer<T, W> pure(T value, Monoid<W> monoid) {
<del> return new Writer<T, W>(value, monoid.zero()) {
<del> @Override
<del> Monoid<W> monoid() {
<del> return monoid;
<del> }
<del> };
<add>public interface Writer<T, W> {
<add> class Base<T, W> implements Writer<T, W> {
<add> final T value;
<add> final Monoid<W> monoid;
<add> final W written;
<add>
<add> public Base(T value, Monoid<W> monoid, W written) {
<add> this.value = value;
<add> this.monoid = monoid;
<add> this.written = written;
<add> }
<add>
<add> @Override
<add> public T value() {
<add> return value;
<add> }
<add>
<add> @Override
<add> public Monoid<W> monoid() {
<add> return monoid;
<add> }
<add>
<add> @Override
<add> public W written() {
<add> return written;
<add> }
<ide> }
<ide>
<del> abstract Monoid<W> monoid();
<del>
<del> public final T value;
<del> public final W written;
<del>
<del> protected Writer(T value, W written) {
<del> this.value = value;
<del> this.written = written;
<add> static <T, W> Writer<T, W> pure(T value, Monoid<W> monoid) {
<add> return new Base<T, W>(value, monoid, monoid.zero());
<ide> }
<ide>
<del> public <U> Writer<U, W> flatMap(Functions.F<T, Writer<U, W>> fw) {
<del> final Writer<U, W> wu = fw.apply(value);
<del> return new Writer<U, W>(wu.value, monoid().combine(written, wu.written)) {
<del> @Override
<del> Monoid<W> monoid() {
<del> return Writer.this.monoid();
<del> }
<del> };
<add> Monoid<W> monoid();
<add>
<add> T value();
<add>
<add> W written();
<add>
<add> default <U> Writer<U, W> flatMap(Functions.F<T, Writer<U, W>> fw) {
<add> final Writer<U, W> wu = fw.apply(value());
<add> return new Base<U, W>(wu.value(), monoid(), monoid().combine(written(), wu.written()));
<ide> }
<ide> } |
|
Java | apache-2.0 | aa2742dab3300d7b15c53c7174c2f6dc04605cce | 0 | frank-rahn/microservices,frank-rahn/microservices,frank-rahn/microservices | /*
* Copyright © 2015 by Frank W. Rahn. Alle Rechte vorbehalten. All rights reserved.
*/
package de.rahn.finances.commons.metrics;
import static java.util.concurrent.TimeUnit.MILLISECONDS;
import static java.util.concurrent.TimeUnit.SECONDS;
import static org.slf4j.LoggerFactory.getLogger;
import javax.annotation.PostConstruct;
import org.slf4j.Logger;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Service;
import com.codahale.metrics.MetricRegistry;
import com.codahale.metrics.Snapshot;
/**
* Diser Service exportiert die Dropwizard Metriken.<br>
* <br>
* In diesem Fall loggt der Service die Metriken periodisch.
* @author Frank W. Rahn
*/
@Service
public class MetricsExporterService {
private static final Logger LOGGER = getLogger("reporting-metrics");
@Autowired
private MetricRegistry registry;
private double rateFactor;
private String rateUnit;
private double durationFactor;
private String durationUnit;
/**
* Initialisiere den Reporter.
*/
@PostConstruct
public void initialize() {
rateFactor = SECONDS.toSeconds(1);
rateUnit = "second";
durationFactor = 1.0 / MILLISECONDS.toNanos(1);
durationUnit = "milliseconds";
}
/**
* Schreibe jede volle Stunde die aktuellen Metriken und setze sie zurück.
*/
@Scheduled(cron = "0 0 * * * ?")
public void exportMetrics() {
StringBuilder builder = new StringBuilder();
registry.getGauges().forEach((s, m) -> {
registry.remove(s);
builder.append("metric=GAUGE, name=").append(s).append(", value=").append(m.getValue()).append('\n');
});
registry.getCounters().forEach((s, m) -> {
registry.remove(s);
builder.append("metric=COUNTER, name=").append(s).append(", count=").append(m.getCount()).append('\n');
});
registry.getHistograms().forEach((s, m) -> {
registry.remove(s);
Snapshot snapshot = m.getSnapshot();
builder.append("metric=HISTOGRAM, name=").append(s).append(", count=").append(m.getCount()).append(", min=")
.append(snapshot.getMin()).append(", max=").append(snapshot.getMax()).append(", mean=")
.append(snapshot.getMean()).append(", stddev=").append(snapshot.getStdDev()).append(", median=")
.append(snapshot.getMedian()).append(", 75%=").append(snapshot.get75thPercentile()).append(", 95%=")
.append(snapshot.get95thPercentile()).append(", 98%=").append(snapshot.get98thPercentile()).append(", 99%=")
.append(snapshot.get99thPercentile()).append(", 99.9%=").append(snapshot.get999thPercentile()).append('\n');
});
registry.getMeters().forEach((s, m) -> {
registry.remove(s);
builder.append("metric=METER, name=").append(s).append(", count=").append(m.getCount()).append(", mean-rate=")
.append(convertRate(m.getMeanRate())).append(", 1-minute-rate=").append(convertRate(m.getOneMinuteRate()))
.append(", 5-minute-rate=").append(convertRate(m.getFiveMinuteRate())).append(", 15-minute-rate=")
.append(convertRate(m.getFifteenMinuteRate())).append(", rate-unit=events/").append(rateUnit).append('\n');
});
registry.getTimers().forEach((s, m) -> {
registry.remove(s);
Snapshot snapshot = m.getSnapshot();
builder.append("metric=TIMER, name=").append(s).append(", count=").append(m.getCount()).append(", mean-rate=")
.append(convertRate(m.getMeanRate())).append(", 1-minute-rate=").append(convertRate(m.getOneMinuteRate()))
.append(", 5-minute-rate=").append(convertRate(m.getFiveMinuteRate())).append(", 15-minute-rate=")
.append(convertRate(m.getFifteenMinuteRate())).append(", rate-unit=calls/").append(rateUnit).append(", min=")
.append(convertDuration(snapshot.getMin())).append(", max=").append(convertDuration(snapshot.getMax()))
.append(", mean=").append(convertDuration(snapshot.getMean())).append(", stddev=")
.append(convertDuration(snapshot.getStdDev())).append(", median=").append(convertDuration(snapshot.getMedian()))
.append(", 75%=").append(convertDuration(snapshot.get75thPercentile())).append(", 95%=")
.append(convertDuration(snapshot.get95thPercentile())).append(", 98%=")
.append(convertDuration(snapshot.get98thPercentile())).append(", 99%=")
.append(convertDuration(snapshot.get99thPercentile())).append(", 99.9%=")
.append(convertDuration(snapshot.get999thPercentile())).append(", duration-unit=").append(durationUnit)
.append('\n');
});
if (builder.length() > 0) {
LOGGER.info(builder.insert(0, "\n***** Metrics Report *****\n").append("***************************").toString());
}
}
private double convertDuration(double duration) {
return duration * durationFactor;
}
private double convertRate(double rate) {
return rate * rateFactor;
}
} | securities-management/src/main/java/de/rahn/finances/commons/metrics/MetricsExporterService.java | /*
* Copyright © 2015 by Frank W. Rahn. Alle Rechte vorbehalten. All rights reserved.
*/
package de.rahn.finances.commons.metrics;
import static java.util.concurrent.TimeUnit.MILLISECONDS;
import static java.util.concurrent.TimeUnit.SECONDS;
import static org.slf4j.LoggerFactory.getLogger;
import javax.annotation.PostConstruct;
import org.slf4j.Logger;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Service;
import com.codahale.metrics.MetricRegistry;
import com.codahale.metrics.Snapshot;
/**
* Diser Service exportiert die Metriken.<br>
* <br>
* In diesem Fall loggt der Service die Metriken periodisch.
* @author Frank W. Rahn
*/
@Service
public class MetricsExporterService {
private static final Logger LOGGER = getLogger("reporting-metrics");
@Autowired
private MetricRegistry registry;
private double rateFactor;
private String rateUnit;
private double durationFactor;
private String durationUnit;
/**
* Initialisiere den Reporter.
*/
@PostConstruct
public void initialize() {
rateFactor = SECONDS.toSeconds(1);
rateUnit = "second";
durationFactor = 1.0 / MILLISECONDS.toNanos(1);
durationUnit = "milliseconds";
}
/**
* Schreibe jede volle Stunde die aktuellen Metriken und setze sie zurück.
*/
@Scheduled(cron = "0 0 * * * ?")
public void exportMetrics() {
StringBuilder builder = new StringBuilder();
registry.getGauges().forEach((s, m) -> {
registry.remove(s);
builder.append("metric=GAUGE, name=").append(s).append(", value=").append(m.getValue()).append('\n');
});
registry.getCounters().forEach((s, m) -> {
registry.remove(s);
builder.append("metric=COUNTER, name=").append(s).append(", count=").append(m.getCount()).append('\n');
});
registry.getHistograms().forEach((s, m) -> {
registry.remove(s);
Snapshot snapshot = m.getSnapshot();
builder.append("metric=HISTOGRAM, name=").append(s).append(", count=").append(m.getCount()).append(", min=")
.append(snapshot.getMin()).append(", max=").append(snapshot.getMax()).append(", mean=")
.append(snapshot.getMean()).append(", stddev=").append(snapshot.getStdDev()).append(", median=")
.append(snapshot.getMedian()).append(", 75%=").append(snapshot.get75thPercentile()).append(", 95%=")
.append(snapshot.get95thPercentile()).append(", 98%=").append(snapshot.get98thPercentile()).append(", 99%=")
.append(snapshot.get99thPercentile()).append(", 99.9%=").append(snapshot.get999thPercentile()).append('\n');
});
registry.getMeters().forEach((s, m) -> {
registry.remove(s);
builder.append("metric=METER, name=").append(s).append(", count=").append(m.getCount()).append(", mean-rate=")
.append(convertRate(m.getMeanRate())).append(", 1-minute-rate=").append(convertRate(m.getOneMinuteRate()))
.append(", 5-minute-rate=").append(convertRate(m.getFiveMinuteRate())).append(", 15-minute-rate=")
.append(convertRate(m.getFifteenMinuteRate())).append(", rate-unit=events/").append(rateUnit).append('\n');
});
registry.getTimers().forEach((s, m) -> {
registry.remove(s);
Snapshot snapshot = m.getSnapshot();
builder.append("metric=TIMER, name=").append(s).append(", count=").append(m.getCount()).append(", mean-rate=")
.append(convertRate(m.getMeanRate())).append(", 1-minute-rate=").append(convertRate(m.getOneMinuteRate()))
.append(", 5-minute-rate=").append(convertRate(m.getFiveMinuteRate())).append(", 15-minute-rate=")
.append(convertRate(m.getFifteenMinuteRate())).append(", rate-unit=calls/").append(rateUnit).append(", min=")
.append(convertDuration(snapshot.getMin())).append(", max=").append(convertDuration(snapshot.getMax()))
.append(", mean=").append(convertDuration(snapshot.getMean())).append(", stddev=")
.append(convertDuration(snapshot.getStdDev())).append(", median=").append(convertDuration(snapshot.getMedian()))
.append(", 75%=").append(convertDuration(snapshot.get75thPercentile())).append(", 95%=")
.append(convertDuration(snapshot.get95thPercentile())).append(", 98%=")
.append(convertDuration(snapshot.get98thPercentile())).append(", 99%=")
.append(convertDuration(snapshot.get99thPercentile())).append(", 99.9%=")
.append(convertDuration(snapshot.get999thPercentile())).append(", duration-unit=").append(durationUnit)
.append('\n');
});
if (builder.length() > 0) {
LOGGER.info(builder.insert(0, "\n***** Metrics Report *****\n").append("***************************").toString());
}
}
private double convertDuration(double duration) {
return duration * durationFactor;
}
private double convertRate(double rate) {
return rate * rateFactor;
}
} | Kommentar erweitert | securities-management/src/main/java/de/rahn/finances/commons/metrics/MetricsExporterService.java | Kommentar erweitert | <ide><path>ecurities-management/src/main/java/de/rahn/finances/commons/metrics/MetricsExporterService.java
<ide> import com.codahale.metrics.Snapshot;
<ide>
<ide> /**
<del> * Diser Service exportiert die Metriken.<br>
<add> * Diser Service exportiert die Dropwizard Metriken.<br>
<ide> * <br>
<ide> * In diesem Fall loggt der Service die Metriken periodisch.
<ide> * @author Frank W. Rahn |
|
Java | apache-2.0 | 5c65241583e2420e4b7931f8aaf510f75de7b654 | 0 | ingorichtsmeier/camunda-bpm-platform,AlexMinsk/camunda-bpm-platform,camunda/camunda-bpm-platform,AlexMinsk/camunda-bpm-platform,AlexMinsk/camunda-bpm-platform,xasx/camunda-bpm-platform,ingorichtsmeier/camunda-bpm-platform,camunda/camunda-bpm-platform,plexiti/camunda-bpm-platform,subhrajyotim/camunda-bpm-platform,langfr/camunda-bpm-platform,subhrajyotim/camunda-bpm-platform,subhrajyotim/camunda-bpm-platform,falko/camunda-bpm-platform,camunda/camunda-bpm-platform,plexiti/camunda-bpm-platform,bentrm/camunda-bpm-platform,langfr/camunda-bpm-platform,xasx/camunda-bpm-platform,camunda/camunda-bpm-platform,AlexMinsk/camunda-bpm-platform,jangalinski/camunda-bpm-platform,bentrm/camunda-bpm-platform,filiphr/camunda-bpm-platform,bentrm/camunda-bpm-platform,subhrajyotim/camunda-bpm-platform,filiphr/camunda-bpm-platform,jangalinski/camunda-bpm-platform,AlexMinsk/camunda-bpm-platform,camunda/camunda-bpm-platform,langfr/camunda-bpm-platform,jangalinski/camunda-bpm-platform,plexiti/camunda-bpm-platform,langfr/camunda-bpm-platform,jangalinski/camunda-bpm-platform,xasx/camunda-bpm-platform,langfr/camunda-bpm-platform,xasx/camunda-bpm-platform,plexiti/camunda-bpm-platform,falko/camunda-bpm-platform,subhrajyotim/camunda-bpm-platform,filiphr/camunda-bpm-platform,filiphr/camunda-bpm-platform,plexiti/camunda-bpm-platform,falko/camunda-bpm-platform,falko/camunda-bpm-platform,AlexMinsk/camunda-bpm-platform,ingorichtsmeier/camunda-bpm-platform,plexiti/camunda-bpm-platform,ingorichtsmeier/camunda-bpm-platform,langfr/camunda-bpm-platform,filiphr/camunda-bpm-platform,jangalinski/camunda-bpm-platform,jangalinski/camunda-bpm-platform,ingorichtsmeier/camunda-bpm-platform,bentrm/camunda-bpm-platform,camunda/camunda-bpm-platform,falko/camunda-bpm-platform,subhrajyotim/camunda-bpm-platform,ingorichtsmeier/camunda-bpm-platform,bentrm/camunda-bpm-platform,bentrm/camunda-bpm-platform,filiphr/camunda-bpm-platform,xasx/camunda-bpm-platform,xasx/camunda-bpm-platform,falko/camunda-bpm-platform | /* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.camunda.bpm.engine.rest;
import static com.jayway.restassured.RestAssured.given;
import static com.jayway.restassured.path.json.JsonPath.from;
import static org.camunda.bpm.engine.rest.helper.MockProvider.ANOTHER_EXAMPLE_ACTIVITY_ID;
import static org.camunda.bpm.engine.rest.helper.MockProvider.ANOTHER_EXAMPLE_PROCESS_DEFINITION_ID;
import static org.camunda.bpm.engine.rest.helper.MockProvider.ANOTHER_EXAMPLE_PROCESS_INSTANCE_ID;
import static org.camunda.bpm.engine.rest.helper.MockProvider.EXAMPLE_BATCH_ID;
import static org.camunda.bpm.engine.rest.helper.MockProvider.EXAMPLE_BATCH_JOBS_PER_SEED;
import static org.camunda.bpm.engine.rest.helper.MockProvider.EXAMPLE_BATCH_JOB_DEFINITION_ID;
import static org.camunda.bpm.engine.rest.helper.MockProvider.EXAMPLE_BATCH_TOTAL_JOBS;
import static org.camunda.bpm.engine.rest.helper.MockProvider.EXAMPLE_BATCH_TYPE;
import static org.camunda.bpm.engine.rest.helper.MockProvider.EXAMPLE_INVOCATIONS_PER_BATCH_JOB;
import static org.camunda.bpm.engine.rest.helper.MockProvider.EXAMPLE_MONITOR_JOB_DEFINITION_ID;
import static org.camunda.bpm.engine.rest.helper.MockProvider.EXAMPLE_SEED_JOB_DEFINITION_ID;
import static org.camunda.bpm.engine.rest.helper.MockProvider.EXAMPLE_TENANT_ID;
import static org.camunda.bpm.engine.rest.helper.MockProvider.NON_EXISTING_ACTIVITY_ID;
import static org.camunda.bpm.engine.rest.helper.MockProvider.NON_EXISTING_PROCESS_DEFINITION_ID;
import static org.camunda.bpm.engine.rest.helper.MockProvider.createMockBatch;
import static org.camunda.bpm.engine.rest.helper.NoIntermediaryInvocation.immediatelyAfter;
import static org.fest.assertions.Assertions.assertThat;
import static org.fest.assertions.MapAssert.entry;
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.CoreMatchers.nullValue;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.hasSize;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.anyListOf;
import static org.mockito.Matchers.anyString;
import static org.mockito.Matchers.eq;
import static org.mockito.Matchers.isNull;
import static org.mockito.Mockito.doThrow;
import static org.mockito.Mockito.inOrder;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import javax.ws.rs.core.Response.Status;
import org.camunda.bpm.engine.BadUserRequestException;
import org.camunda.bpm.engine.RuntimeService;
import org.camunda.bpm.engine.batch.Batch;
import org.camunda.bpm.engine.impl.ProcessInstanceQueryImpl;
import org.camunda.bpm.engine.migration.MigratingActivityInstanceValidationReport;
import org.camunda.bpm.engine.migration.MigratingProcessInstanceValidationException;
import org.camunda.bpm.engine.migration.MigratingProcessInstanceValidationReport;
import org.camunda.bpm.engine.migration.MigratingTransitionInstanceValidationReport;
import org.camunda.bpm.engine.migration.MigrationInstruction;
import org.camunda.bpm.engine.migration.MigrationInstructionValidationReport;
import org.camunda.bpm.engine.migration.MigrationPlan;
import org.camunda.bpm.engine.migration.MigrationPlanBuilder;
import org.camunda.bpm.engine.migration.MigrationPlanExecutionBuilder;
import org.camunda.bpm.engine.migration.MigrationPlanValidationException;
import org.camunda.bpm.engine.migration.MigrationPlanValidationReport;
import org.camunda.bpm.engine.rest.dto.migration.MigrationInstructionDto;
import org.camunda.bpm.engine.rest.dto.runtime.ProcessInstanceQueryDto;
import org.camunda.bpm.engine.rest.helper.FluentAnswer;
import org.camunda.bpm.engine.rest.helper.MockMigrationPlanBuilder;
import org.camunda.bpm.engine.rest.helper.MockMigrationPlanBuilder.JoinedMigrationPlanBuilderMock;
import org.camunda.bpm.engine.rest.util.container.TestContainerRule;
import org.camunda.bpm.engine.rest.util.migration.MigrationExecutionDtoBuilder;
import org.camunda.bpm.engine.rest.util.migration.MigrationPlanDtoBuilder;
import org.camunda.bpm.engine.runtime.ProcessInstanceQuery;
import org.junit.Before;
import org.junit.ClassRule;
import org.junit.Test;
import org.mockito.ArgumentCaptor;
import org.mockito.InOrder;
import org.mockito.Mockito;
import com.jayway.restassured.response.Response;
import java.util.List;
import org.camunda.bpm.engine.rest.util.migration.MigrationInstructionDtoBuilder;
public class MigrationRestServiceInteractionTest extends AbstractRestServiceTest {
@ClassRule
public static TestContainerRule rule = new TestContainerRule();
protected static final String MIGRATION_URL = TEST_RESOURCE_ROOT_PATH + "/migration";
protected static final String GENERATE_MIGRATION_URL = MIGRATION_URL + "/generate";
protected static final String VALIDATE_MIGRATION_URL = MIGRATION_URL + "/validate";
protected static final String EXECUTE_MIGRATION_URL = MIGRATION_URL + "/execute";
protected static final String EXECUTE_MIGRATION_ASYNC_URL = MIGRATION_URL + "/executeAsync";
protected RuntimeService runtimeServiceMock;
protected JoinedMigrationPlanBuilderMock migrationPlanBuilderMock;
protected MigrationPlanExecutionBuilder migrationPlanExecutionBuilderMock;
@Before
public void setUpRuntimeData() {
runtimeServiceMock = mock(RuntimeService.class);
when(processEngine.getRuntimeService()).thenReturn(runtimeServiceMock);
migrationPlanBuilderMock = new MockMigrationPlanBuilder()
.sourceProcessDefinitionId(EXAMPLE_PROCESS_DEFINITION_ID)
.targetProcessDefinitionId(ANOTHER_EXAMPLE_PROCESS_DEFINITION_ID)
.instruction(EXAMPLE_ACTIVITY_ID, ANOTHER_EXAMPLE_ACTIVITY_ID)
.instruction(ANOTHER_EXAMPLE_ACTIVITY_ID, EXAMPLE_ACTIVITY_ID)
.builder();
when(runtimeServiceMock.createMigrationPlan(eq(EXAMPLE_PROCESS_DEFINITION_ID), eq(ANOTHER_EXAMPLE_PROCESS_DEFINITION_ID)))
.thenReturn(migrationPlanBuilderMock);
migrationPlanExecutionBuilderMock = mock(MigrationPlanExecutionBuilder.class);
when(migrationPlanExecutionBuilderMock.processInstanceIds(anyListOf(String.class))).thenReturn(migrationPlanExecutionBuilderMock);
when(runtimeServiceMock.newMigration(any(MigrationPlan.class))).thenReturn(migrationPlanExecutionBuilderMock);
}
@Test
public void generateMigrationPlanWithInitialEmptyInstructions() {
Map<String, Object> initialMigrationPlan = new MigrationPlanDtoBuilder(EXAMPLE_PROCESS_DEFINITION_ID, ANOTHER_EXAMPLE_PROCESS_DEFINITION_ID)
.instructions(Collections.<Map<String, Object>>emptyList())
.build();
Response response = given()
.contentType(POST_JSON_CONTENT_TYPE)
.body(initialMigrationPlan)
.then().expect()
.statusCode(Status.OK.getStatusCode())
.when()
.post(GENERATE_MIGRATION_URL);
verifyGenerateMigrationPlanInteraction(migrationPlanBuilderMock, initialMigrationPlan);
verifyGenerateMigrationPlanResponse(response);
}
@Test
public void generateMigrationPlanWithInitialNullInstructions() {
Map<String, Object> initialMigrationPlan = new MigrationPlanDtoBuilder(EXAMPLE_PROCESS_DEFINITION_ID, ANOTHER_EXAMPLE_PROCESS_DEFINITION_ID)
.instructions(null)
.build();
Response response = given()
.contentType(POST_JSON_CONTENT_TYPE)
.body(initialMigrationPlan)
.then().expect()
.statusCode(Status.OK.getStatusCode())
.when()
.post(GENERATE_MIGRATION_URL);
verifyGenerateMigrationPlanInteraction(migrationPlanBuilderMock, initialMigrationPlan);
verifyGenerateMigrationPlanResponse(response);
}
@Test
public void generateMigrationPlanWithNoInitialInstructions() {
Map<String, Object> initialMigrationPlan = new MigrationPlanDtoBuilder(EXAMPLE_PROCESS_DEFINITION_ID, ANOTHER_EXAMPLE_PROCESS_DEFINITION_ID)
.build();
Response response = given()
.contentType(POST_JSON_CONTENT_TYPE)
.body(initialMigrationPlan)
.then()
.statusCode(Status.OK.getStatusCode())
.when()
.post(GENERATE_MIGRATION_URL);
verifyGenerateMigrationPlanInteraction(migrationPlanBuilderMock, initialMigrationPlan);
verifyGenerateMigrationPlanResponse(response);
}
@Test
public void generateMigrationPlanIgnoringInitialInstructions() {
Map<String, Object> initialMigrationPlan = new MigrationPlanDtoBuilder(EXAMPLE_PROCESS_DEFINITION_ID, ANOTHER_EXAMPLE_PROCESS_DEFINITION_ID)
.instruction("ignored", "ignored")
.build();
Response response = given()
.contentType(POST_JSON_CONTENT_TYPE)
.body(initialMigrationPlan)
.then().expect()
.statusCode(Status.OK.getStatusCode())
.when()
.post(GENERATE_MIGRATION_URL);
verifyGenerateMigrationPlanInteraction(migrationPlanBuilderMock, initialMigrationPlan);
verifyGenerateMigrationPlanResponse(response);
}
@Test
public void generateMigrationPlanWithNullSourceProcessDefinition() {
String message = "source process definition id is null";
MigrationPlanBuilder planBuilder = mock(MigrationPlanBuilder.class, Mockito.RETURNS_DEEP_STUBS);
when(runtimeServiceMock.createMigrationPlan(isNull(String.class), anyString()))
.thenReturn(planBuilder);
when(planBuilder.mapEqualActivities().build())
.thenThrow(new BadUserRequestException(message));
Map<String, Object> initialMigrationPlan = new MigrationPlanDtoBuilder(null, ANOTHER_EXAMPLE_PROCESS_DEFINITION_ID).build();
given()
.contentType(POST_JSON_CONTENT_TYPE)
.body(initialMigrationPlan)
.then().expect()
.statusCode(Status.BAD_REQUEST.getStatusCode())
.body("message", is(message))
.when()
.post(GENERATE_MIGRATION_URL);
}
@Test
public void generateMigrationPlanWithNonExistingSourceProcessDefinition() {
String message = "source process definition with id " + NON_EXISTING_PROCESS_DEFINITION_ID + " does not exist";
MigrationPlanBuilder migrationPlanBuilder = mock(MigrationPlanBuilder.class, Mockito.RETURNS_DEEP_STUBS);
when(runtimeServiceMock.createMigrationPlan(eq(NON_EXISTING_PROCESS_DEFINITION_ID), anyString()))
.thenReturn(migrationPlanBuilder);
when(
migrationPlanBuilder
.mapEqualActivities()
.build())
.thenThrow(new BadUserRequestException(message));
Map<String, Object> initialMigrationPlan = new MigrationPlanDtoBuilder(NON_EXISTING_PROCESS_DEFINITION_ID, ANOTHER_EXAMPLE_PROCESS_DEFINITION_ID).build();
given()
.contentType(POST_JSON_CONTENT_TYPE)
.body(initialMigrationPlan)
.then().expect()
.statusCode(Status.BAD_REQUEST.getStatusCode())
.body("message", is(message))
.when()
.post(GENERATE_MIGRATION_URL);
}
@Test
public void generateMigrationPlanWithNullTargetProcessDefinition() {
String message = "target process definition id is null";
MigrationPlanBuilder migrationPlanBuilder = mock(MigrationPlanBuilder.class, Mockito.RETURNS_DEEP_STUBS);
when(runtimeServiceMock.createMigrationPlan(anyString(), isNull(String.class)))
.thenReturn(migrationPlanBuilder);
when(
migrationPlanBuilder
.mapEqualActivities()
.build())
.thenThrow(new BadUserRequestException(message));
Map<String, Object> initialMigrationPlan = new MigrationPlanDtoBuilder(EXAMPLE_PROCESS_DEFINITION_ID, null).build();
given()
.contentType(POST_JSON_CONTENT_TYPE)
.body(initialMigrationPlan)
.then().expect()
.statusCode(Status.BAD_REQUEST.getStatusCode())
.body("message", is(message))
.when()
.post(GENERATE_MIGRATION_URL);
}
@Test
public void generateMigrationPlanWithNonExistingTargetProcessDefinition() {
String message = "target process definition with id " + NON_EXISTING_PROCESS_DEFINITION_ID + " does not exist";
MigrationPlanBuilder migrationPlanBuilder = mock(MigrationPlanBuilder.class, Mockito.RETURNS_DEEP_STUBS);
when(runtimeServiceMock.createMigrationPlan(anyString(), eq(NON_EXISTING_PROCESS_DEFINITION_ID)))
.thenReturn(migrationPlanBuilder);
when(
migrationPlanBuilder
.mapEqualActivities()
.build())
.thenThrow(new BadUserRequestException(message));
Map<String, Object> initialMigrationPlan = new MigrationPlanDtoBuilder(EXAMPLE_PROCESS_DEFINITION_ID, NON_EXISTING_PROCESS_DEFINITION_ID).build();
given()
.contentType(POST_JSON_CONTENT_TYPE)
.body(initialMigrationPlan)
.then().expect()
.statusCode(Status.BAD_REQUEST.getStatusCode())
.body("message", is(message))
.when()
.post(GENERATE_MIGRATION_URL);
}
@Test
public void generatePlanUpdateEventTriggers() {
migrationPlanBuilderMock = new MockMigrationPlanBuilder()
.sourceProcessDefinitionId(EXAMPLE_PROCESS_DEFINITION_ID)
.targetProcessDefinitionId(ANOTHER_EXAMPLE_PROCESS_DEFINITION_ID)
.instruction(EXAMPLE_ACTIVITY_ID, ANOTHER_EXAMPLE_ACTIVITY_ID, true)
.builder();
Map<String, Object> generationRequest = new HashMap<String, Object>();
generationRequest.put("sourceProcessDefinitionId", EXAMPLE_PROCESS_DEFINITION_ID);
generationRequest.put("targetProcessDefinitionId", ANOTHER_EXAMPLE_PROCESS_DEFINITION_ID);
generationRequest.put("updateEventTriggers", true);
when(runtimeServiceMock.createMigrationPlan(anyString(), anyString()))
.thenReturn(migrationPlanBuilderMock);
given()
.contentType(POST_JSON_CONTENT_TYPE)
.body(generationRequest)
.then().expect()
.statusCode(Status.OK.getStatusCode())
.when()
.post(GENERATE_MIGRATION_URL);
verify(runtimeServiceMock).createMigrationPlan(eq(EXAMPLE_PROCESS_DEFINITION_ID), eq(ANOTHER_EXAMPLE_PROCESS_DEFINITION_ID));
InOrder inOrder = Mockito.inOrder(migrationPlanBuilderMock);
// the map equal activities method should be called
inOrder.verify(migrationPlanBuilderMock).mapEqualActivities();
inOrder.verify(migrationPlanBuilderMock, immediatelyAfter()).updateEventTriggers();
verify(migrationPlanBuilderMock, never()).mapActivities(anyString(), anyString());
}
@Test
public void generatePlanUpdateEventTriggerResponse() {
migrationPlanBuilderMock = new MockMigrationPlanBuilder()
.sourceProcessDefinitionId(EXAMPLE_PROCESS_DEFINITION_ID)
.targetProcessDefinitionId(ANOTHER_EXAMPLE_PROCESS_DEFINITION_ID)
.instruction(EXAMPLE_ACTIVITY_ID, ANOTHER_EXAMPLE_ACTIVITY_ID, true)
.builder();
when(runtimeServiceMock.createMigrationPlan(anyString(), anyString()))
.thenReturn(migrationPlanBuilderMock);
Map<String, Object> generationRequest = new HashMap<String, Object>();
generationRequest.put("sourceProcessDefinitionId", EXAMPLE_PROCESS_DEFINITION_ID);
generationRequest.put("targetProcessDefinitionId", ANOTHER_EXAMPLE_PROCESS_DEFINITION_ID);
given()
.contentType(POST_JSON_CONTENT_TYPE)
.body(generationRequest)
.then().expect()
.statusCode(Status.OK.getStatusCode())
.body("instructions[0].sourceActivityIds[0]", equalTo(EXAMPLE_ACTIVITY_ID))
.body("instructions[0].targetActivityIds[0]", equalTo(ANOTHER_EXAMPLE_ACTIVITY_ID))
.body("instructions[0].updateEventTrigger", equalTo(true))
.when()
.post(GENERATE_MIGRATION_URL);
}
@Test
public void executeMigrationPlan() {
Map<String, Object> migrationExecution = new MigrationExecutionDtoBuilder()
.migrationPlan(EXAMPLE_PROCESS_DEFINITION_ID, ANOTHER_EXAMPLE_PROCESS_DEFINITION_ID)
.instruction(EXAMPLE_ACTIVITY_ID, ANOTHER_EXAMPLE_ACTIVITY_ID)
.instruction(ANOTHER_EXAMPLE_ACTIVITY_ID, EXAMPLE_ACTIVITY_ID)
.done()
.processInstances(EXAMPLE_PROCESS_INSTANCE_ID, ANOTHER_EXAMPLE_PROCESS_INSTANCE_ID)
.build();
given()
.contentType(POST_JSON_CONTENT_TYPE)
.body(migrationExecution)
.then().expect()
.statusCode(Status.NO_CONTENT.getStatusCode())
.when()
.post(EXECUTE_MIGRATION_URL);
verifyCreateMigrationPlanInteraction(migrationPlanBuilderMock, (Map<String, Object>) migrationExecution.get(MigrationExecutionDtoBuilder.PROP_MIGRATION_PLAN));
verifyMigrationPlanExecutionInteraction(migrationExecution);
}
@Test
public void executeMigrationPlanWithProcessInstanceQuery() {
when(runtimeServiceMock.createProcessInstanceQuery())
.thenReturn(new ProcessInstanceQueryImpl());
ProcessInstanceQueryDto processInstanceQuery = new ProcessInstanceQueryDto();
processInstanceQuery.setProcessDefinitionId(EXAMPLE_PROCESS_DEFINITION_ID);
Map<String, Object> migrationExecution = new MigrationExecutionDtoBuilder()
.migrationPlan(EXAMPLE_PROCESS_DEFINITION_ID, ANOTHER_EXAMPLE_PROCESS_DEFINITION_ID)
.instruction(EXAMPLE_ACTIVITY_ID, ANOTHER_EXAMPLE_ACTIVITY_ID)
.instruction(ANOTHER_EXAMPLE_ACTIVITY_ID, EXAMPLE_ACTIVITY_ID)
.done()
.processInstanceQuery(processInstanceQuery)
.build();
given()
.contentType(POST_JSON_CONTENT_TYPE)
.body(migrationExecution)
.then().expect()
.statusCode(Status.NO_CONTENT.getStatusCode())
.when()
.post(EXECUTE_MIGRATION_URL);
verifyCreateMigrationPlanInteraction(migrationPlanBuilderMock, (Map<String, Object>) migrationExecution.get(MigrationExecutionDtoBuilder.PROP_MIGRATION_PLAN));
verifyMigrationPlanExecutionInteraction(migrationExecution);
}
@Test
public void executeMigrationPlanSkipListeners() {
Map<String, Object> migrationExecution = new MigrationExecutionDtoBuilder()
.migrationPlan(EXAMPLE_PROCESS_DEFINITION_ID, ANOTHER_EXAMPLE_PROCESS_DEFINITION_ID)
.instruction(EXAMPLE_ACTIVITY_ID, ANOTHER_EXAMPLE_ACTIVITY_ID)
.done()
.processInstances(EXAMPLE_PROCESS_INSTANCE_ID)
.skipCustomListeners(true)
.build();
given()
.contentType(POST_JSON_CONTENT_TYPE)
.body(migrationExecution)
.then().expect()
.statusCode(Status.NO_CONTENT.getStatusCode())
.when()
.post(EXECUTE_MIGRATION_URL);
verifyMigrationPlanExecutionInteraction(migrationExecution);
}
@Test
public void executeMigrationPlanSkipIoMappings() {
Map<String, Object> migrationExecution = new MigrationExecutionDtoBuilder()
.migrationPlan(EXAMPLE_PROCESS_DEFINITION_ID, ANOTHER_EXAMPLE_PROCESS_DEFINITION_ID)
.instruction(EXAMPLE_ACTIVITY_ID, ANOTHER_EXAMPLE_ACTIVITY_ID)
.done()
.processInstances(EXAMPLE_PROCESS_INSTANCE_ID)
.skipIoMappings(true)
.build();
given()
.contentType(POST_JSON_CONTENT_TYPE)
.body(migrationExecution)
.then().expect()
.statusCode(Status.NO_CONTENT.getStatusCode())
.when()
.post(EXECUTE_MIGRATION_URL);
verifyMigrationPlanExecutionInteraction(migrationExecution);
}
@Test
public void executeMigrationPlanWithNullInstructions() {
MigrationInstructionValidationReport instructionReport = mock(MigrationInstructionValidationReport.class);
when(instructionReport.getMigrationInstruction()).thenReturn(null);
when(instructionReport.getFailures()).thenReturn(Collections.singletonList("failure"));
MigrationPlanValidationReport validationReport = mock(MigrationPlanValidationReport.class);
when(validationReport.getInstructionReports()).thenReturn(Collections.singletonList(instructionReport));
when(migrationPlanBuilderMock.build()).thenThrow(new MigrationPlanValidationException("fooo", validationReport));
Map<String, Object> migrationExecution = new MigrationExecutionDtoBuilder()
.migrationPlan(EXAMPLE_PROCESS_DEFINITION_ID, ANOTHER_EXAMPLE_PROCESS_DEFINITION_ID)
.done()
.processInstances(EXAMPLE_PROCESS_INSTANCE_ID, ANOTHER_EXAMPLE_PROCESS_INSTANCE_ID)
.build();
given()
.contentType(POST_JSON_CONTENT_TYPE)
.body(migrationExecution)
.then().expect()
.statusCode(Status.BAD_REQUEST.getStatusCode())
.body("type", equalTo(MigrationPlanValidationException.class.getSimpleName()))
.body("message", is("fooo"))
.body("validationReport.instructionReports", hasSize(1))
.body("validationReport.instructionReports[0].instruction", nullValue())
.body("validationReport.instructionReports[0].failures", hasSize(1))
.body("validationReport.instructionReports[0].failures[0]", is("failure"))
.when()
.post(EXECUTE_MIGRATION_URL);
}
@Test
public void executeMigrationPlanWithEmptyInstructions() {
MigrationInstructionValidationReport instructionReport = mock(MigrationInstructionValidationReport.class);
when(instructionReport.getMigrationInstruction()).thenReturn(null);
when(instructionReport.getFailures()).thenReturn(Collections.singletonList("failure"));
MigrationPlanValidationReport validationReport = mock(MigrationPlanValidationReport.class);
when(validationReport.getInstructionReports()).thenReturn(Collections.singletonList(instructionReport));
when(migrationPlanBuilderMock.build()).thenThrow(new MigrationPlanValidationException("fooo", validationReport));
Map<String, Object> migrationExecution = new MigrationExecutionDtoBuilder()
.migrationPlan(EXAMPLE_PROCESS_DEFINITION_ID, ANOTHER_EXAMPLE_PROCESS_DEFINITION_ID)
.done()
.processInstances(EXAMPLE_PROCESS_INSTANCE_ID, ANOTHER_EXAMPLE_PROCESS_INSTANCE_ID)
.build();
((Map<String, Object>) migrationExecution.get(MigrationExecutionDtoBuilder.PROP_MIGRATION_PLAN))
.put(MigrationPlanDtoBuilder.PROP_INSTRUCTIONS, Collections.<MigrationInstructionDto>emptyList());
given()
.contentType(POST_JSON_CONTENT_TYPE)
.body(migrationExecution)
.then().expect()
.statusCode(Status.BAD_REQUEST.getStatusCode())
.body("type", equalTo(MigrationPlanValidationException.class.getSimpleName()))
.body("message", is("fooo"))
.body("validationReport.instructionReports", hasSize(1))
.body("validationReport.instructionReports[0].instruction", nullValue())
.body("validationReport.instructionReports[0].failures", hasSize(1))
.body("validationReport.instructionReports[0].failures[0]", is("failure"))
.when()
.post(EXECUTE_MIGRATION_URL);
}
@Test
public void executeMigrationPlanWithNullSourceProcessInstanceId() {
String message = "source process definition id is null";
JoinedMigrationPlanBuilderMock migrationPlanBuilder = mock(JoinedMigrationPlanBuilderMock.class, new FluentAnswer());
when(runtimeServiceMock.createMigrationPlan(isNull(String.class), anyString()))
.thenReturn(migrationPlanBuilder);
when(migrationPlanBuilder.build()).thenThrow(new BadUserRequestException(message));
Map<String, Object> migrationExecution = new MigrationExecutionDtoBuilder()
.migrationPlan(null, ANOTHER_EXAMPLE_PROCESS_DEFINITION_ID)
.instruction(EXAMPLE_ACTIVITY_ID, ANOTHER_EXAMPLE_ACTIVITY_ID)
.instruction(ANOTHER_EXAMPLE_ACTIVITY_ID, EXAMPLE_ACTIVITY_ID)
.done()
.processInstances(EXAMPLE_PROCESS_INSTANCE_ID, ANOTHER_EXAMPLE_PROCESS_INSTANCE_ID)
.build();
given()
.contentType(POST_JSON_CONTENT_TYPE)
.body(migrationExecution)
.then().expect()
.statusCode(Status.BAD_REQUEST.getStatusCode())
.body("message", is(message))
.when()
.post(EXECUTE_MIGRATION_URL);
}
@Test
public void executeMigrationPlanWithNonExistingSourceProcessInstanceId() {
String message = "source process definition with id " + NON_EXISTING_PROCESS_DEFINITION_ID + " does not exist";
JoinedMigrationPlanBuilderMock migrationPlanBuilder = mock(JoinedMigrationPlanBuilderMock.class, new FluentAnswer());
when(runtimeServiceMock.createMigrationPlan(eq(NON_EXISTING_PROCESS_DEFINITION_ID), anyString()))
.thenReturn(migrationPlanBuilder);
when(migrationPlanBuilder.build()).thenThrow(new BadUserRequestException(message));
Map<String, Object> migrationExecution = new MigrationExecutionDtoBuilder()
.migrationPlan(NON_EXISTING_PROCESS_DEFINITION_ID, ANOTHER_EXAMPLE_PROCESS_DEFINITION_ID)
.instruction(EXAMPLE_ACTIVITY_ID, ANOTHER_EXAMPLE_ACTIVITY_ID)
.instruction(ANOTHER_EXAMPLE_ACTIVITY_ID, EXAMPLE_ACTIVITY_ID)
.done()
.processInstances(EXAMPLE_PROCESS_INSTANCE_ID, ANOTHER_EXAMPLE_PROCESS_INSTANCE_ID)
.build();
given()
.contentType(POST_JSON_CONTENT_TYPE)
.body(migrationExecution)
.then().expect()
.statusCode(Status.BAD_REQUEST.getStatusCode())
.body("message", is(message))
.when()
.post(EXECUTE_MIGRATION_URL);
}
@Test
public void executeMigrationPlanWithNullTargetProcessInstanceId() {
String message = "target process definition id is null";
JoinedMigrationPlanBuilderMock migrationPlanBuilder = mock(JoinedMigrationPlanBuilderMock.class, new FluentAnswer());
when(runtimeServiceMock.createMigrationPlan(anyString(), isNull(String.class)))
.thenReturn(migrationPlanBuilder);
when(migrationPlanBuilder.build()).thenThrow(new BadUserRequestException(message));
Map<String, Object> migrationExecution = new MigrationExecutionDtoBuilder()
.migrationPlan(EXAMPLE_PROCESS_DEFINITION_ID, null)
.instruction(EXAMPLE_ACTIVITY_ID, ANOTHER_EXAMPLE_ACTIVITY_ID)
.instruction(ANOTHER_EXAMPLE_ACTIVITY_ID, EXAMPLE_ACTIVITY_ID)
.done()
.processInstances(EXAMPLE_PROCESS_INSTANCE_ID, ANOTHER_EXAMPLE_PROCESS_INSTANCE_ID)
.build();
given()
.contentType(POST_JSON_CONTENT_TYPE)
.body(migrationExecution)
.then().expect()
.statusCode(Status.BAD_REQUEST.getStatusCode())
.body("message", is(message))
.when()
.post(EXECUTE_MIGRATION_URL);
}
@Test
public void executeMigrationPlanWithNonExistingTargetProcessInstanceId() {
String message = "target process definition with id " + NON_EXISTING_PROCESS_DEFINITION_ID + " does not exist";
JoinedMigrationPlanBuilderMock migrationPlanBuilder = mock(JoinedMigrationPlanBuilderMock.class, new FluentAnswer());
when(runtimeServiceMock.createMigrationPlan(anyString(), eq(NON_EXISTING_PROCESS_DEFINITION_ID)))
.thenReturn(migrationPlanBuilder);
when(migrationPlanBuilder.build()).thenThrow(new BadUserRequestException(message));
Map<String, Object> migrationExecution = new MigrationExecutionDtoBuilder()
.migrationPlan(EXAMPLE_PROCESS_DEFINITION_ID, NON_EXISTING_PROCESS_DEFINITION_ID)
.instruction(EXAMPLE_ACTIVITY_ID, ANOTHER_EXAMPLE_ACTIVITY_ID)
.instruction(ANOTHER_EXAMPLE_ACTIVITY_ID, EXAMPLE_ACTIVITY_ID)
.done()
.processInstances(EXAMPLE_PROCESS_INSTANCE_ID, ANOTHER_EXAMPLE_PROCESS_INSTANCE_ID)
.build();
given()
.contentType(POST_JSON_CONTENT_TYPE)
.body(migrationExecution)
.then().expect()
.statusCode(Status.BAD_REQUEST.getStatusCode())
.body("message", is(message))
.when()
.post(EXECUTE_MIGRATION_URL);
}
@Test
public void executeMigrationPlanWithNullSourceActivityId() {
String message = "sourceActivityId is null";
when(migrationPlanBuilderMock.mapActivities(isNull(String.class), anyString()))
.thenThrow(new BadUserRequestException(message));
Map<String, Object> migrationExecution = new MigrationExecutionDtoBuilder()
.migrationPlan(EXAMPLE_PROCESS_DEFINITION_ID, ANOTHER_EXAMPLE_PROCESS_DEFINITION_ID)
.instruction(null, ANOTHER_EXAMPLE_ACTIVITY_ID)
.instruction(ANOTHER_EXAMPLE_ACTIVITY_ID, EXAMPLE_ACTIVITY_ID)
.done()
.processInstances(EXAMPLE_PROCESS_INSTANCE_ID, ANOTHER_EXAMPLE_PROCESS_INSTANCE_ID)
.build();
given()
.contentType(POST_JSON_CONTENT_TYPE)
.body(migrationExecution)
.then().expect()
.statusCode(Status.BAD_REQUEST.getStatusCode())
.body("message", is(message))
.when()
.post(EXECUTE_MIGRATION_URL);
}
@Test
public void executeMigrationPlanWithNonExistingSourceActivityId() {
String message = "sourceActivity is null";
when(migrationPlanBuilderMock.mapActivities(eq(NON_EXISTING_ACTIVITY_ID), anyString()))
.thenThrow(new BadUserRequestException(message));
Map<String, Object> migrationExecution = new MigrationExecutionDtoBuilder()
.migrationPlan(EXAMPLE_PROCESS_DEFINITION_ID, ANOTHER_EXAMPLE_PROCESS_DEFINITION_ID)
.instruction(NON_EXISTING_ACTIVITY_ID, ANOTHER_EXAMPLE_ACTIVITY_ID)
.instruction(ANOTHER_EXAMPLE_ACTIVITY_ID, EXAMPLE_ACTIVITY_ID)
.done()
.processInstances(EXAMPLE_PROCESS_INSTANCE_ID, ANOTHER_EXAMPLE_PROCESS_INSTANCE_ID)
.build();
given()
.contentType(POST_JSON_CONTENT_TYPE)
.body(migrationExecution)
.then().expect()
.statusCode(Status.BAD_REQUEST.getStatusCode())
.body("message", is(message))
.when()
.post(EXECUTE_MIGRATION_URL);
}
@Test
public void executeMigrationPlanWithNullTargetActivityId() {
String message = "targetActivityId is null";
when(migrationPlanBuilderMock.mapActivities(anyString(), isNull(String.class)))
.thenThrow(new BadUserRequestException(message));
Map<String, Object> migrationExecution = new MigrationExecutionDtoBuilder()
.migrationPlan(EXAMPLE_PROCESS_DEFINITION_ID, ANOTHER_EXAMPLE_PROCESS_DEFINITION_ID)
.instruction(EXAMPLE_ACTIVITY_ID, null)
.instruction(ANOTHER_EXAMPLE_ACTIVITY_ID, EXAMPLE_ACTIVITY_ID)
.done()
.processInstances(EXAMPLE_PROCESS_INSTANCE_ID, ANOTHER_EXAMPLE_PROCESS_INSTANCE_ID)
.build();
given()
.contentType(POST_JSON_CONTENT_TYPE)
.body(migrationExecution)
.then().expect()
.statusCode(Status.BAD_REQUEST.getStatusCode())
.body("message", is(message))
.when()
.post(EXECUTE_MIGRATION_URL);
}
@Test
public void executeMigrationPlanWithNonExistingTargetActivityId() {
String message = "targetActivity is null";
when(migrationPlanBuilderMock.mapActivities(anyString(), eq(NON_EXISTING_ACTIVITY_ID)))
.thenThrow(new BadUserRequestException(message));
Map<String, Object> migrationExecution = new MigrationExecutionDtoBuilder()
.migrationPlan(EXAMPLE_PROCESS_DEFINITION_ID, ANOTHER_EXAMPLE_PROCESS_DEFINITION_ID)
.instruction(EXAMPLE_ACTIVITY_ID, NON_EXISTING_ACTIVITY_ID)
.instruction(ANOTHER_EXAMPLE_ACTIVITY_ID, EXAMPLE_ACTIVITY_ID)
.done()
.processInstances(EXAMPLE_PROCESS_INSTANCE_ID, ANOTHER_EXAMPLE_PROCESS_INSTANCE_ID)
.build();
given()
.contentType(POST_JSON_CONTENT_TYPE)
.body(migrationExecution)
.then().expect()
.statusCode(Status.BAD_REQUEST.getStatusCode())
.body("message", is(message))
.when()
.post(EXECUTE_MIGRATION_URL);
}
@Test
public void executeMigrationPlanValidationException() {
MigrationInstruction migrationInstruction = mock(MigrationInstruction.class);
when(migrationInstruction.getSourceActivityId()).thenReturn(EXAMPLE_ACTIVITY_ID);
when(migrationInstruction.getTargetActivityId()).thenReturn(ANOTHER_EXAMPLE_ACTIVITY_ID);
MigrationInstructionValidationReport instructionReport1 = mock(MigrationInstructionValidationReport.class);
when(instructionReport1.getMigrationInstruction()).thenReturn(migrationInstruction);
when(instructionReport1.getFailures()).thenReturn(Arrays.asList("failure1", "failure2"));
MigrationInstructionValidationReport instructionReport2 = mock(MigrationInstructionValidationReport.class);
when(instructionReport2.getMigrationInstruction()).thenReturn(migrationInstruction);
when(instructionReport2.getFailures()).thenReturn(Arrays.asList("failure1", "failure2"));
MigrationPlanValidationReport validationReport = mock(MigrationPlanValidationReport.class);
when(validationReport.getInstructionReports()).thenReturn(Arrays.asList(instructionReport1, instructionReport2));
when(migrationPlanBuilderMock.build()).thenThrow(new MigrationPlanValidationException("fooo", validationReport));
Map<String, Object> migrationExecution = new MigrationExecutionDtoBuilder()
.migrationPlan(EXAMPLE_PROCESS_DEFINITION_ID, ANOTHER_EXAMPLE_PROCESS_DEFINITION_ID)
.instruction(EXAMPLE_ACTIVITY_ID, ANOTHER_EXAMPLE_ACTIVITY_ID)
.done()
.processInstances(EXAMPLE_PROCESS_INSTANCE_ID, ANOTHER_EXAMPLE_PROCESS_INSTANCE_ID)
.build();
given()
.contentType(POST_JSON_CONTENT_TYPE)
.body(migrationExecution)
.then().expect()
.statusCode(Status.BAD_REQUEST.getStatusCode())
.body("type", equalTo(MigrationPlanValidationException.class.getSimpleName()))
.body("message", is("fooo"))
.body("validationReport.instructionReports", hasSize(2))
.body("validationReport.instructionReports[0].instruction.sourceActivityIds", hasSize(1))
.body("validationReport.instructionReports[0].instruction.sourceActivityIds[0]", is(EXAMPLE_ACTIVITY_ID))
.body("validationReport.instructionReports[0].instruction.targetActivityIds", hasSize(1))
.body("validationReport.instructionReports[0].instruction.targetActivityIds[0]", is(ANOTHER_EXAMPLE_ACTIVITY_ID))
.body("validationReport.instructionReports[0].failures", hasSize(2))
.body("validationReport.instructionReports[0].failures[0]", is("failure1"))
.body("validationReport.instructionReports[0].failures[1]", is("failure2"))
.when()
.post(EXECUTE_MIGRATION_URL);
}
@Test
public void executeMigratingProcessInstanceValidationException() {
MigrationInstruction migrationInstruction = mock(MigrationInstruction.class);
when(migrationInstruction.getSourceActivityId()).thenReturn(EXAMPLE_ACTIVITY_ID);
when(migrationInstruction.getTargetActivityId()).thenReturn(ANOTHER_EXAMPLE_ACTIVITY_ID);
MigratingActivityInstanceValidationReport instanceReport1 = mock(MigratingActivityInstanceValidationReport.class);
when(instanceReport1.getActivityInstanceId()).thenReturn(EXAMPLE_ACTIVITY_INSTANCE_ID);
when(instanceReport1.getMigrationInstruction()).thenReturn(migrationInstruction);
when(instanceReport1.getSourceScopeId()).thenReturn(EXAMPLE_ACTIVITY_ID);
when(instanceReport1.getFailures()).thenReturn(Arrays.asList("failure1", "failure2"));
MigratingTransitionInstanceValidationReport instanceReport2 = mock(MigratingTransitionInstanceValidationReport.class);
when(instanceReport2.getTransitionInstanceId()).thenReturn("transitionInstanceId");
when(instanceReport2.getMigrationInstruction()).thenReturn(migrationInstruction);
when(instanceReport2.getSourceScopeId()).thenReturn(EXAMPLE_ACTIVITY_ID);
when(instanceReport2.getFailures()).thenReturn(Arrays.asList("failure1", "failure2"));
MigratingProcessInstanceValidationReport processInstanceReport = mock(MigratingProcessInstanceValidationReport.class);
when(processInstanceReport.getProcessInstanceId()).thenReturn(EXAMPLE_PROCESS_INSTANCE_ID);
when(processInstanceReport.getFailures()).thenReturn(Arrays.asList("failure1", "failure2"));
when(processInstanceReport.getActivityInstanceReports()).thenReturn(Arrays.asList(instanceReport1));
when(processInstanceReport.getTransitionInstanceReports()).thenReturn(Arrays.asList(instanceReport2));
doThrow(new MigratingProcessInstanceValidationException("fooo", processInstanceReport))
.when(migrationPlanExecutionBuilderMock).execute();
Map<String, Object> migrationExecution = new MigrationExecutionDtoBuilder()
.migrationPlan(EXAMPLE_PROCESS_DEFINITION_ID, ANOTHER_EXAMPLE_PROCESS_DEFINITION_ID)
.instruction(EXAMPLE_ACTIVITY_ID, ANOTHER_EXAMPLE_ACTIVITY_ID)
.done()
.processInstances(EXAMPLE_PROCESS_INSTANCE_ID, ANOTHER_EXAMPLE_PROCESS_INSTANCE_ID)
.build();
given()
.contentType(POST_JSON_CONTENT_TYPE)
.body(migrationExecution)
.then().expect()
.statusCode(Status.BAD_REQUEST.getStatusCode())
.body("type", equalTo(MigratingProcessInstanceValidationException.class.getSimpleName()))
.body("message", is("fooo"))
.body("validationReport.processInstanceId", is(EXAMPLE_PROCESS_INSTANCE_ID))
.body("validationReport.failures", hasSize(2))
.body("validationReport.failures[0]", is("failure1"))
.body("validationReport.failures[1]", is("failure2"))
.body("validationReport.activityInstanceValidationReports", hasSize(1))
.body("validationReport.activityInstanceValidationReports[0].migrationInstruction.sourceActivityIds", hasSize(1))
.body("validationReport.activityInstanceValidationReports[0].migrationInstruction.sourceActivityIds[0]", is(EXAMPLE_ACTIVITY_ID))
.body("validationReport.activityInstanceValidationReports[0].migrationInstruction.targetActivityIds", hasSize(1))
.body("validationReport.activityInstanceValidationReports[0].migrationInstruction.targetActivityIds[0]", is(ANOTHER_EXAMPLE_ACTIVITY_ID))
.body("validationReport.activityInstanceValidationReports[0].activityInstanceId", is(EXAMPLE_ACTIVITY_INSTANCE_ID))
.body("validationReport.activityInstanceValidationReports[0].sourceScopeId", is(EXAMPLE_ACTIVITY_ID))
.body("validationReport.activityInstanceValidationReports[0].failures", hasSize(2))
.body("validationReport.activityInstanceValidationReports[0].failures[0]", is("failure1"))
.body("validationReport.activityInstanceValidationReports[0].failures[1]", is("failure2"))
.body("validationReport.transitionInstanceValidationReports", hasSize(1))
.body("validationReport.transitionInstanceValidationReports[0].migrationInstruction.sourceActivityIds", hasSize(1))
.body("validationReport.transitionInstanceValidationReports[0].migrationInstruction.sourceActivityIds[0]", is(EXAMPLE_ACTIVITY_ID))
.body("validationReport.transitionInstanceValidationReports[0].migrationInstruction.targetActivityIds", hasSize(1))
.body("validationReport.transitionInstanceValidationReports[0].migrationInstruction.targetActivityIds[0]", is(ANOTHER_EXAMPLE_ACTIVITY_ID))
.body("validationReport.transitionInstanceValidationReports[0].transitionInstanceId", is("transitionInstanceId"))
.body("validationReport.transitionInstanceValidationReports[0].sourceScopeId", is(EXAMPLE_ACTIVITY_ID))
.body("validationReport.transitionInstanceValidationReports[0].failures", hasSize(2))
.body("validationReport.transitionInstanceValidationReports[0].failures[0]", is("failure1"))
.body("validationReport.transitionInstanceValidationReports[0].failures[1]", is("failure2"))
.when()
.post(EXECUTE_MIGRATION_URL);
}
@Test
public void executeMigrationPlanAsync() {
Batch batchMock = createMockBatch();
when(migrationPlanExecutionBuilderMock.executeAsync()).thenReturn(batchMock);
Map<String, Object> migrationExecution = new MigrationExecutionDtoBuilder()
.migrationPlan(EXAMPLE_PROCESS_DEFINITION_ID, ANOTHER_EXAMPLE_PROCESS_DEFINITION_ID)
.instruction(EXAMPLE_ACTIVITY_ID, ANOTHER_EXAMPLE_ACTIVITY_ID)
.instruction(ANOTHER_EXAMPLE_ACTIVITY_ID, EXAMPLE_ACTIVITY_ID)
.done()
.processInstances(EXAMPLE_PROCESS_INSTANCE_ID, ANOTHER_EXAMPLE_PROCESS_INSTANCE_ID)
.build();
given()
.contentType(POST_JSON_CONTENT_TYPE)
.body(migrationExecution)
.then().expect()
.statusCode(Status.OK.getStatusCode())
.body("id", is(EXAMPLE_BATCH_ID))
.body("type", is(EXAMPLE_BATCH_TYPE))
.body("totalJobs", is(EXAMPLE_BATCH_TOTAL_JOBS))
.body("batchJobsPerSeed", is(EXAMPLE_BATCH_JOBS_PER_SEED))
.body("invocationsPerBatchJob", is(EXAMPLE_INVOCATIONS_PER_BATCH_JOB))
.body("seedJobDefinitionId", is(EXAMPLE_SEED_JOB_DEFINITION_ID))
.body("monitorJobDefinitionId", is(EXAMPLE_MONITOR_JOB_DEFINITION_ID))
.body("batchJobDefinitionId", is(EXAMPLE_BATCH_JOB_DEFINITION_ID))
.body("tenantId", is(EXAMPLE_TENANT_ID))
.when()
.post(EXECUTE_MIGRATION_ASYNC_URL);
verifyCreateMigrationPlanInteraction(migrationPlanBuilderMock, (Map<String, Object>) migrationExecution.get(MigrationExecutionDtoBuilder.PROP_MIGRATION_PLAN));
verifyMigrationPlanAsyncExecutionInteraction(migrationExecution);
}
@Test
public void executeMigrationPlanAsyncWithProcessInstanceQuery() {
when(runtimeServiceMock.createProcessInstanceQuery())
.thenReturn(new ProcessInstanceQueryImpl());
ProcessInstanceQueryDto processInstanceQuery = new ProcessInstanceQueryDto();
processInstanceQuery.setProcessDefinitionId(EXAMPLE_PROCESS_DEFINITION_ID);
Batch batchMock = createMockBatch();
when(migrationPlanExecutionBuilderMock.executeAsync()).thenReturn(batchMock);
Map<String, Object> migrationExecution = new MigrationExecutionDtoBuilder()
.migrationPlan(EXAMPLE_PROCESS_DEFINITION_ID, ANOTHER_EXAMPLE_PROCESS_DEFINITION_ID)
.instruction(EXAMPLE_ACTIVITY_ID, ANOTHER_EXAMPLE_ACTIVITY_ID)
.instruction(ANOTHER_EXAMPLE_ACTIVITY_ID, EXAMPLE_ACTIVITY_ID)
.done()
.processInstanceQuery(processInstanceQuery)
.build();
given()
.contentType(POST_JSON_CONTENT_TYPE)
.body(migrationExecution)
.then().expect()
.statusCode(Status.OK.getStatusCode())
.body("id", is(EXAMPLE_BATCH_ID))
.body("type", is(EXAMPLE_BATCH_TYPE))
.body("totalJobs", is(EXAMPLE_BATCH_TOTAL_JOBS))
.body("batchJobsPerSeed", is(EXAMPLE_BATCH_JOBS_PER_SEED))
.body("invocationsPerBatchJob", is(EXAMPLE_INVOCATIONS_PER_BATCH_JOB))
.body("seedJobDefinitionId", is(EXAMPLE_SEED_JOB_DEFINITION_ID))
.body("monitorJobDefinitionId", is(EXAMPLE_MONITOR_JOB_DEFINITION_ID))
.body("batchJobDefinitionId", is(EXAMPLE_BATCH_JOB_DEFINITION_ID))
.body("tenantId", is(EXAMPLE_TENANT_ID))
.when()
.post(EXECUTE_MIGRATION_ASYNC_URL);
verifyCreateMigrationPlanInteraction(migrationPlanBuilderMock, (Map<String, Object>) migrationExecution.get(MigrationExecutionDtoBuilder.PROP_MIGRATION_PLAN));
verifyMigrationPlanAsyncExecutionInteraction(migrationExecution);
}
@Test
public void executeMigrationPlanAsyncSkipListeners() {
Batch batchMock = createMockBatch();
when(migrationPlanExecutionBuilderMock.executeAsync()).thenReturn(batchMock);
Map<String, Object> migrationExecution = new MigrationExecutionDtoBuilder()
.migrationPlan(EXAMPLE_PROCESS_DEFINITION_ID, ANOTHER_EXAMPLE_PROCESS_DEFINITION_ID)
.instruction(EXAMPLE_ACTIVITY_ID, ANOTHER_EXAMPLE_ACTIVITY_ID)
.done()
.processInstances(EXAMPLE_PROCESS_INSTANCE_ID)
.skipCustomListeners(true)
.build();
given()
.contentType(POST_JSON_CONTENT_TYPE)
.body(migrationExecution)
.then().expect()
.statusCode(Status.OK.getStatusCode())
.when()
.post(EXECUTE_MIGRATION_ASYNC_URL);
verifyMigrationPlanAsyncExecutionInteraction(migrationExecution);
}
@Test
public void executeMigrationPlanAsyncSkipIoMappings() {
Batch batchMock = createMockBatch();
when(migrationPlanExecutionBuilderMock.executeAsync()).thenReturn(batchMock);
Map<String, Object> migrationExecution = new MigrationExecutionDtoBuilder()
.migrationPlan(EXAMPLE_PROCESS_DEFINITION_ID, ANOTHER_EXAMPLE_PROCESS_DEFINITION_ID)
.instruction(EXAMPLE_ACTIVITY_ID, ANOTHER_EXAMPLE_ACTIVITY_ID)
.done()
.processInstances(EXAMPLE_PROCESS_INSTANCE_ID)
.skipIoMappings(true)
.build();
given()
.contentType(POST_JSON_CONTENT_TYPE)
.body(migrationExecution)
.then().expect()
.statusCode(Status.OK.getStatusCode())
.when()
.post(EXECUTE_MIGRATION_ASYNC_URL);
verifyMigrationPlanAsyncExecutionInteraction(migrationExecution);
}
@Test
public void executeMigrationPlanAsyncWithNullInstructions() {
MigrationInstructionValidationReport instructionReport = mock(MigrationInstructionValidationReport.class);
when(instructionReport.getMigrationInstruction()).thenReturn(null);
when(instructionReport.getFailures()).thenReturn(Collections.singletonList("failure"));
MigrationPlanValidationReport validationReport = mock(MigrationPlanValidationReport.class);
when(validationReport.getInstructionReports()).thenReturn(Collections.singletonList(instructionReport));
when(migrationPlanBuilderMock.build()).thenThrow(new MigrationPlanValidationException("fooo", validationReport));
Map<String, Object> migrationExecution = new MigrationExecutionDtoBuilder()
.migrationPlan(EXAMPLE_PROCESS_DEFINITION_ID, ANOTHER_EXAMPLE_PROCESS_DEFINITION_ID)
.done()
.processInstances(EXAMPLE_PROCESS_INSTANCE_ID, ANOTHER_EXAMPLE_PROCESS_INSTANCE_ID)
.build();
given()
.contentType(POST_JSON_CONTENT_TYPE)
.body(migrationExecution)
.then().expect()
.statusCode(Status.BAD_REQUEST.getStatusCode())
.body("type", equalTo(MigrationPlanValidationException.class.getSimpleName()))
.body("message", is("fooo"))
.body("validationReport.instructionReports", hasSize(1))
.body("validationReport.instructionReports[0].instruction", nullValue())
.body("validationReport.instructionReports[0].failures", hasSize(1))
.body("validationReport.instructionReports[0].failures[0]", is("failure"))
.when()
.post(EXECUTE_MIGRATION_ASYNC_URL);
}
@Test
public void executeMigrationPlanAsyncWithEmptyInstructions() {
MigrationInstructionValidationReport instructionReport = mock(MigrationInstructionValidationReport.class);
when(instructionReport.getMigrationInstruction()).thenReturn(null);
when(instructionReport.getFailures()).thenReturn(Collections.singletonList("failure"));
MigrationPlanValidationReport validationReport = mock(MigrationPlanValidationReport.class);
when(validationReport.getInstructionReports()).thenReturn(Collections.singletonList(instructionReport));
when(migrationPlanBuilderMock.build()).thenThrow(new MigrationPlanValidationException("fooo", validationReport));
Map<String, Object> migrationExecution = new MigrationExecutionDtoBuilder()
.migrationPlan(EXAMPLE_PROCESS_DEFINITION_ID, ANOTHER_EXAMPLE_PROCESS_DEFINITION_ID)
.done()
.processInstances(EXAMPLE_PROCESS_INSTANCE_ID, ANOTHER_EXAMPLE_PROCESS_INSTANCE_ID)
.build();
((Map<String, Object>) migrationExecution.get(MigrationExecutionDtoBuilder.PROP_MIGRATION_PLAN))
.put(MigrationPlanDtoBuilder.PROP_INSTRUCTIONS, Collections.<MigrationInstructionDto>emptyList());
given()
.contentType(POST_JSON_CONTENT_TYPE)
.body(migrationExecution)
.then().expect()
.statusCode(Status.BAD_REQUEST.getStatusCode())
.body("type", equalTo(MigrationPlanValidationException.class.getSimpleName()))
.body("message", is("fooo"))
.body("validationReport.instructionReports", hasSize(1))
.body("validationReport.instructionReports[0].instruction", nullValue())
.body("validationReport.instructionReports[0].failures", hasSize(1))
.body("validationReport.instructionReports[0].failures[0]", is("failure"))
.when()
.post(EXECUTE_MIGRATION_ASYNC_URL);
}
@Test
public void executeMigrationPlanAsyncWithNullSourceProcessDefinitionId() {
String message = "source process definition id is null";
JoinedMigrationPlanBuilderMock migrationPlanBuilder = mock(JoinedMigrationPlanBuilderMock.class, new FluentAnswer());
when(runtimeServiceMock.createMigrationPlan(isNull(String.class), anyString()))
.thenReturn(migrationPlanBuilder);
when(migrationPlanBuilder.build()).thenThrow(new BadUserRequestException(message));
Map<String, Object> migrationExecution = new MigrationExecutionDtoBuilder()
.migrationPlan(null, ANOTHER_EXAMPLE_PROCESS_DEFINITION_ID)
.instruction(EXAMPLE_ACTIVITY_ID, ANOTHER_EXAMPLE_ACTIVITY_ID)
.instruction(ANOTHER_EXAMPLE_ACTIVITY_ID, EXAMPLE_ACTIVITY_ID)
.done()
.processInstances(EXAMPLE_PROCESS_INSTANCE_ID, ANOTHER_EXAMPLE_PROCESS_INSTANCE_ID)
.build();
given()
.contentType(POST_JSON_CONTENT_TYPE)
.body(migrationExecution)
.then().expect()
.statusCode(Status.BAD_REQUEST.getStatusCode())
.body("message", is(message))
.when()
.post(EXECUTE_MIGRATION_ASYNC_URL);
}
@Test
public void executeMigrationPlanAsyncWithNonExistingSourceProcessDefinitionId() {
String message = "source process definition with id " + NON_EXISTING_PROCESS_DEFINITION_ID + " does not exist";
JoinedMigrationPlanBuilderMock migrationPlanBuilder = mock(JoinedMigrationPlanBuilderMock.class, new FluentAnswer());
when(runtimeServiceMock.createMigrationPlan(eq(NON_EXISTING_PROCESS_DEFINITION_ID), anyString()))
.thenReturn(migrationPlanBuilder);
when(migrationPlanBuilder.build()).thenThrow(new BadUserRequestException(message));
Map<String, Object> migrationExecution = new MigrationExecutionDtoBuilder()
.migrationPlan(NON_EXISTING_PROCESS_DEFINITION_ID, ANOTHER_EXAMPLE_PROCESS_DEFINITION_ID)
.instruction(EXAMPLE_ACTIVITY_ID, ANOTHER_EXAMPLE_ACTIVITY_ID)
.instruction(ANOTHER_EXAMPLE_ACTIVITY_ID, EXAMPLE_ACTIVITY_ID)
.done()
.processInstances(EXAMPLE_PROCESS_INSTANCE_ID, ANOTHER_EXAMPLE_PROCESS_INSTANCE_ID)
.build();
given()
.contentType(POST_JSON_CONTENT_TYPE)
.body(migrationExecution)
.then().expect()
.statusCode(Status.BAD_REQUEST.getStatusCode())
.body("message", is(message))
.when()
.post(EXECUTE_MIGRATION_ASYNC_URL);
}
@Test
public void executeMigrationPlanAsyncWithNullTargetProcessDefinitionId() {
String message = "target process definition id is null";
JoinedMigrationPlanBuilderMock migrationPlanBuilder = mock(JoinedMigrationPlanBuilderMock.class, new FluentAnswer());
when(runtimeServiceMock.createMigrationPlan(anyString(), isNull(String.class)))
.thenReturn(migrationPlanBuilder);
when(migrationPlanBuilder.build()).thenThrow(new BadUserRequestException(message));
Map<String, Object> migrationExecution = new MigrationExecutionDtoBuilder()
.migrationPlan(EXAMPLE_PROCESS_DEFINITION_ID, null)
.instruction(EXAMPLE_ACTIVITY_ID, ANOTHER_EXAMPLE_ACTIVITY_ID)
.instruction(ANOTHER_EXAMPLE_ACTIVITY_ID, EXAMPLE_ACTIVITY_ID)
.done()
.processInstances(EXAMPLE_PROCESS_INSTANCE_ID, ANOTHER_EXAMPLE_PROCESS_INSTANCE_ID)
.build();
given()
.contentType(POST_JSON_CONTENT_TYPE)
.body(migrationExecution)
.then().expect()
.statusCode(Status.BAD_REQUEST.getStatusCode())
.body("message", is(message))
.when()
.post(EXECUTE_MIGRATION_ASYNC_URL);
}
@Test
public void executeMigrationPlanAsyncWithNonExistingTargetProcessDefinitionId() {
String message = "target process definition with id " + NON_EXISTING_PROCESS_DEFINITION_ID + " does not exist";
JoinedMigrationPlanBuilderMock migrationPlanBuilder = mock(JoinedMigrationPlanBuilderMock.class, new FluentAnswer());
when(runtimeServiceMock.createMigrationPlan(anyString(), eq(NON_EXISTING_PROCESS_DEFINITION_ID)))
.thenReturn(migrationPlanBuilder);
when(migrationPlanBuilder.build()).thenThrow(new BadUserRequestException(message));
Map<String, Object> migrationExecution = new MigrationExecutionDtoBuilder()
.migrationPlan(EXAMPLE_PROCESS_DEFINITION_ID, NON_EXISTING_PROCESS_DEFINITION_ID)
.instruction(EXAMPLE_ACTIVITY_ID, ANOTHER_EXAMPLE_ACTIVITY_ID)
.instruction(ANOTHER_EXAMPLE_ACTIVITY_ID, EXAMPLE_ACTIVITY_ID)
.done()
.processInstances(EXAMPLE_PROCESS_INSTANCE_ID, ANOTHER_EXAMPLE_PROCESS_INSTANCE_ID)
.build();
given()
.contentType(POST_JSON_CONTENT_TYPE)
.body(migrationExecution)
.then().expect()
.statusCode(Status.BAD_REQUEST.getStatusCode())
.body("message", is(message))
.when()
.post(EXECUTE_MIGRATION_ASYNC_URL);
}
@Test
public void executeMigrationPlanAsyncWithNullSourceActivityId() {
String message = "sourceActivityId is null";
when(migrationPlanBuilderMock.mapActivities(isNull(String.class), anyString()))
.thenThrow(new BadUserRequestException(message));
Map<String, Object> migrationExecution = new MigrationExecutionDtoBuilder()
.migrationPlan(EXAMPLE_PROCESS_DEFINITION_ID, ANOTHER_EXAMPLE_PROCESS_DEFINITION_ID)
.instruction(null, ANOTHER_EXAMPLE_ACTIVITY_ID)
.instruction(ANOTHER_EXAMPLE_ACTIVITY_ID, EXAMPLE_ACTIVITY_ID)
.done()
.processInstances(EXAMPLE_PROCESS_INSTANCE_ID, ANOTHER_EXAMPLE_PROCESS_INSTANCE_ID)
.build();
given()
.contentType(POST_JSON_CONTENT_TYPE)
.body(migrationExecution)
.then().expect()
.statusCode(Status.BAD_REQUEST.getStatusCode())
.body("message", is(message))
.when()
.post(EXECUTE_MIGRATION_ASYNC_URL);
}
@Test
public void executeMigrationPlanAsyncWithNonExistingSourceActivityId() {
String message = "sourceActivity is null";
when(migrationPlanBuilderMock.mapActivities(eq(NON_EXISTING_ACTIVITY_ID), anyString()))
.thenThrow(new BadUserRequestException(message));
Map<String, Object> migrationExecution = new MigrationExecutionDtoBuilder()
.migrationPlan(EXAMPLE_PROCESS_DEFINITION_ID, ANOTHER_EXAMPLE_PROCESS_DEFINITION_ID)
.instruction(NON_EXISTING_ACTIVITY_ID, ANOTHER_EXAMPLE_ACTIVITY_ID)
.instruction(ANOTHER_EXAMPLE_ACTIVITY_ID, EXAMPLE_ACTIVITY_ID)
.done()
.processInstances(EXAMPLE_PROCESS_INSTANCE_ID, ANOTHER_EXAMPLE_PROCESS_INSTANCE_ID)
.build();
given()
.contentType(POST_JSON_CONTENT_TYPE)
.body(migrationExecution)
.then().expect()
.statusCode(Status.BAD_REQUEST.getStatusCode())
.body("message", is(message))
.when()
.post(EXECUTE_MIGRATION_ASYNC_URL);
}
@Test
public void executeMigrationPlanAsyncWithNullTargetActivityId() {
String message = "targetActivityId is null";
when(migrationPlanBuilderMock.mapActivities(anyString(), isNull(String.class)))
.thenThrow(new BadUserRequestException(message));
Map<String, Object> migrationExecution = new MigrationExecutionDtoBuilder()
.migrationPlan(EXAMPLE_PROCESS_DEFINITION_ID, ANOTHER_EXAMPLE_PROCESS_DEFINITION_ID)
.instruction(EXAMPLE_ACTIVITY_ID, null)
.instruction(ANOTHER_EXAMPLE_ACTIVITY_ID, EXAMPLE_ACTIVITY_ID)
.done()
.processInstances(EXAMPLE_PROCESS_INSTANCE_ID, ANOTHER_EXAMPLE_PROCESS_INSTANCE_ID)
.build();
given()
.contentType(POST_JSON_CONTENT_TYPE)
.body(migrationExecution)
.then().expect()
.statusCode(Status.BAD_REQUEST.getStatusCode())
.body("message", is(message))
.when()
.post(EXECUTE_MIGRATION_ASYNC_URL);
}
@Test
public void executeMigrationPlanAsyncWithNonExistingTargetActivityId() {
String message = "targetActivity is null";
when(migrationPlanBuilderMock.mapActivities(anyString(), eq(NON_EXISTING_ACTIVITY_ID)))
.thenThrow(new BadUserRequestException(message));
Map<String, Object> migrationExecution = new MigrationExecutionDtoBuilder()
.migrationPlan(EXAMPLE_PROCESS_DEFINITION_ID, ANOTHER_EXAMPLE_PROCESS_DEFINITION_ID)
.instruction(EXAMPLE_ACTIVITY_ID, NON_EXISTING_ACTIVITY_ID)
.instruction(ANOTHER_EXAMPLE_ACTIVITY_ID, EXAMPLE_ACTIVITY_ID)
.done()
.processInstances(EXAMPLE_PROCESS_INSTANCE_ID, ANOTHER_EXAMPLE_PROCESS_INSTANCE_ID)
.build();
given()
.contentType(POST_JSON_CONTENT_TYPE)
.body(migrationExecution)
.then().expect()
.statusCode(Status.BAD_REQUEST.getStatusCode())
.body("message", is(message))
.when()
.post(EXECUTE_MIGRATION_URL);
}
@Test
public void executeMigrationPlanAsyncValidationException() {
MigrationInstruction migrationInstruction = mock(MigrationInstruction.class);
when(migrationInstruction.getSourceActivityId()).thenReturn(EXAMPLE_ACTIVITY_ID);
when(migrationInstruction.getTargetActivityId()).thenReturn(ANOTHER_EXAMPLE_ACTIVITY_ID);
MigrationInstructionValidationReport instructionReport1 = mock(MigrationInstructionValidationReport.class);
when(instructionReport1.getMigrationInstruction()).thenReturn(migrationInstruction);
when(instructionReport1.getFailures()).thenReturn(Arrays.asList("failure1", "failure2"));
MigrationInstructionValidationReport instructionReport2 = mock(MigrationInstructionValidationReport.class);
when(instructionReport2.getMigrationInstruction()).thenReturn(migrationInstruction);
when(instructionReport2.getFailures()).thenReturn(Arrays.asList("failure1", "failure2"));
MigrationPlanValidationReport validationReport = mock(MigrationPlanValidationReport.class);
when(validationReport.getInstructionReports()).thenReturn(Arrays.asList(instructionReport1, instructionReport2));
when(migrationPlanBuilderMock.build()).thenThrow(new MigrationPlanValidationException("fooo", validationReport));
Map<String, Object> migrationExecution = new MigrationExecutionDtoBuilder()
.migrationPlan(EXAMPLE_PROCESS_DEFINITION_ID, ANOTHER_EXAMPLE_PROCESS_DEFINITION_ID)
.instruction(EXAMPLE_ACTIVITY_ID, ANOTHER_EXAMPLE_ACTIVITY_ID)
.done()
.processInstances(EXAMPLE_PROCESS_INSTANCE_ID, ANOTHER_EXAMPLE_PROCESS_INSTANCE_ID)
.build();
given()
.contentType(POST_JSON_CONTENT_TYPE)
.body(migrationExecution)
.then().expect()
.statusCode(Status.BAD_REQUEST.getStatusCode())
.body("type", equalTo(MigrationPlanValidationException.class.getSimpleName()))
.body("message", is("fooo"))
.body("validationReport.instructionReports", hasSize(2))
.body("validationReport.instructionReports[0].instruction.sourceActivityIds", hasSize(1))
.body("validationReport.instructionReports[0].instruction.sourceActivityIds[0]", is(EXAMPLE_ACTIVITY_ID))
.body("validationReport.instructionReports[0].instruction.targetActivityIds", hasSize(1))
.body("validationReport.instructionReports[0].instruction.targetActivityIds[0]", is(ANOTHER_EXAMPLE_ACTIVITY_ID))
.body("validationReport.instructionReports[0].failures", hasSize(2))
.body("validationReport.instructionReports[0].failures[0]", is("failure1"))
.body("validationReport.instructionReports[0].failures[1]", is("failure2"))
.when()
.post(EXECUTE_MIGRATION_ASYNC_URL);
}
@Test
public void executeMigrationPlanUpdateEventTrigger() {
Map<String, Object> migrationExecution = new MigrationExecutionDtoBuilder()
.migrationPlan(EXAMPLE_PROCESS_DEFINITION_ID, ANOTHER_EXAMPLE_PROCESS_DEFINITION_ID)
.instruction(EXAMPLE_ACTIVITY_ID, ANOTHER_EXAMPLE_ACTIVITY_ID, true)
.instruction(ANOTHER_EXAMPLE_ACTIVITY_ID, EXAMPLE_ACTIVITY_ID, false)
.done()
.processInstances(EXAMPLE_PROCESS_INSTANCE_ID, ANOTHER_EXAMPLE_PROCESS_INSTANCE_ID)
.build();
given()
.contentType(POST_JSON_CONTENT_TYPE)
.body(migrationExecution)
.then().expect()
.statusCode(Status.NO_CONTENT.getStatusCode())
.when()
.post(EXECUTE_MIGRATION_URL);
verifyCreateMigrationPlanInteraction(migrationPlanBuilderMock, (Map<String, Object>) migrationExecution.get(MigrationExecutionDtoBuilder.PROP_MIGRATION_PLAN));
verifyMigrationPlanExecutionInteraction(migrationExecution);
}
@Test
public void validateMigrationPlan() {
Map<String, Object> migrationPlan = new MigrationPlanDtoBuilder(EXAMPLE_PROCESS_DEFINITION_ID, ANOTHER_EXAMPLE_PROCESS_DEFINITION_ID)
.instruction(EXAMPLE_ACTIVITY_ID, ANOTHER_EXAMPLE_ACTIVITY_ID)
.instruction(ANOTHER_EXAMPLE_ACTIVITY_ID, EXAMPLE_ACTIVITY_ID, true)
.build();
given()
.contentType(POST_JSON_CONTENT_TYPE)
.body(migrationPlan)
.then().expect()
.statusCode(Status.OK.getStatusCode())
.body("instructionReports", hasSize(0))
.when()
.post(VALIDATE_MIGRATION_URL);
verifyCreateMigrationPlanInteraction(migrationPlanBuilderMock, migrationPlan);
}
@Test
public void validateMigrationPlanValidationException() {
MigrationInstruction migrationInstruction = mock(MigrationInstruction.class);
when(migrationInstruction.getSourceActivityId()).thenReturn(EXAMPLE_ACTIVITY_ID);
when(migrationInstruction.getTargetActivityId()).thenReturn(ANOTHER_EXAMPLE_ACTIVITY_ID);
MigrationInstructionValidationReport instructionReport1 = mock(MigrationInstructionValidationReport.class);
when(instructionReport1.getMigrationInstruction()).thenReturn(migrationInstruction);
when(instructionReport1.getFailures()).thenReturn(Arrays.asList("failure1", "failure2"));
MigrationInstructionValidationReport instructionReport2 = mock(MigrationInstructionValidationReport.class);
when(instructionReport2.getMigrationInstruction()).thenReturn(migrationInstruction);
when(instructionReport2.getFailures()).thenReturn(Arrays.asList("failure1", "failure2"));
MigrationPlanValidationReport validationReport = mock(MigrationPlanValidationReport.class);
when(validationReport.getInstructionReports()).thenReturn(Arrays.asList(instructionReport1, instructionReport2));
when(migrationPlanBuilderMock.build()).thenThrow(new MigrationPlanValidationException("fooo", validationReport));
Map<String, Object> migrationPlan = new MigrationPlanDtoBuilder(EXAMPLE_PROCESS_DEFINITION_ID, ANOTHER_EXAMPLE_PROCESS_DEFINITION_ID)
.instruction(EXAMPLE_ACTIVITY_ID, ANOTHER_EXAMPLE_ACTIVITY_ID)
.build();
given()
.contentType(POST_JSON_CONTENT_TYPE)
.body(migrationPlan)
.then().expect()
.statusCode(Status.OK.getStatusCode())
.body("instructionReports", hasSize(2))
.body("instructionReports[0].instruction.sourceActivityIds", hasSize(1))
.body("instructionReports[0].instruction.sourceActivityIds[0]", is(EXAMPLE_ACTIVITY_ID))
.body("instructionReports[0].instruction.targetActivityIds", hasSize(1))
.body("instructionReports[0].instruction.targetActivityIds[0]", is(ANOTHER_EXAMPLE_ACTIVITY_ID))
.body("instructionReports[0].failures", hasSize(2))
.body("instructionReports[0].failures[0]", is("failure1"))
.body("instructionReports[0].failures[1]", is("failure2"))
.when()
.post(VALIDATE_MIGRATION_URL);
}
protected void verifyGenerateMigrationPlanResponse(Response response) {
String responseContent = response.asString();
String sourceProcessDefinitionId = from(responseContent).getString("sourceProcessDefinitionId");
String targetProcessDefinitionId = from(responseContent).getString("targetProcessDefinitionId");
List<Map<String, Object>> instructions = from(responseContent).getList("instructions");
assertThat(sourceProcessDefinitionId).isEqualTo(EXAMPLE_PROCESS_DEFINITION_ID);
assertThat(targetProcessDefinitionId).isEqualTo(ANOTHER_EXAMPLE_PROCESS_DEFINITION_ID);
assertThat(instructions).hasSize(2);
assertThat(instructions.get(0))
.includes(
entry("sourceActivityIds", Collections.singletonList(EXAMPLE_ACTIVITY_ID)),
entry("targetActivityIds", Collections.singletonList(ANOTHER_EXAMPLE_ACTIVITY_ID)),
entry("updateEventTrigger", false)
);
assertThat(instructions.get(1))
.includes(
entry("sourceActivityIds", Collections.singletonList(ANOTHER_EXAMPLE_ACTIVITY_ID)),
entry("targetActivityIds", Collections.singletonList(EXAMPLE_ACTIVITY_ID)),
entry("updateEventTrigger", false)
);
}
protected void verifyGenerateMigrationPlanInteraction(MigrationPlanBuilder migrationPlanBuilderMock, Map<String, Object> initialMigrationPlan) {
verify(runtimeServiceMock).createMigrationPlan(eq(initialMigrationPlan.get(MigrationPlanDtoBuilder.PROP_SOURCE_PROCESS_DEFINITION_ID).toString()),
eq(initialMigrationPlan.get(MigrationPlanDtoBuilder.PROP_TARGET_PROCESS_DEFINITION_ID).toString()));
// the map equal activities method should be called
verify(migrationPlanBuilderMock).mapEqualActivities();
// other instructions are ignored
verify(migrationPlanBuilderMock, never()).mapActivities(anyString(), anyString());
}
protected void verifyCreateMigrationPlanInteraction(JoinedMigrationPlanBuilderMock migrationPlanBuilderMock, Map<String, Object> migrationPlan) {
verify(runtimeServiceMock).createMigrationPlan(migrationPlan.get(MigrationPlanDtoBuilder.PROP_SOURCE_PROCESS_DEFINITION_ID).toString(),
migrationPlan.get(MigrationPlanDtoBuilder.PROP_TARGET_PROCESS_DEFINITION_ID).toString());
// the map equal activities method should not be called
verify(migrationPlanBuilderMock, never()).mapEqualActivities();
// all instructions are added
List<Map<String, Object>> instructions = (List<Map<String, Object>>) migrationPlan.get(MigrationPlanDtoBuilder.PROP_INSTRUCTIONS);
if (instructions != null) {
for (Map<String, Object> migrationInstructionDto : instructions) {
InOrder inOrder = Mockito.inOrder(migrationPlanBuilderMock);
String sourceActivityId = ((List<String>) migrationInstructionDto.get(MigrationInstructionDtoBuilder.PROP_SOURCE_ACTIVITY_IDS)).get(0);
String targetActivityId = ((List<String>) migrationInstructionDto.get(MigrationInstructionDtoBuilder.PROP_TARGET_ACTIVITY_IDS)).get(0);
inOrder.verify(migrationPlanBuilderMock).mapActivities(eq(sourceActivityId), eq(targetActivityId));
Boolean updateEventTrigger = (Boolean) migrationInstructionDto.get(MigrationInstructionDtoBuilder.PROP_UPDATE_EVENT_TRIGGER);
if (Boolean.TRUE.equals(updateEventTrigger)) {
inOrder.verify(migrationPlanBuilderMock, immediatelyAfter()).updateEventTrigger();
}
}
}
}
protected void verifyMigrationPlanExecutionInteraction(Map<String, Object> migrationExecution) {
InOrder inOrder = inOrder(runtimeServiceMock, migrationPlanExecutionBuilderMock);
inOrder.verify(runtimeServiceMock).newMigration(any(MigrationPlan.class));
verifyMigrationExecutionBuilderInteraction(inOrder, migrationExecution);
inOrder.verify(migrationPlanExecutionBuilderMock).execute();
inOrder.verifyNoMoreInteractions();
}
protected void verifyMigrationPlanAsyncExecutionInteraction(Map<String, Object> migrationExecution) {
InOrder inOrder = inOrder(runtimeServiceMock, migrationPlanExecutionBuilderMock);
inOrder.verify(runtimeServiceMock).newMigration(any(MigrationPlan.class));
verifyMigrationExecutionBuilderInteraction(inOrder, migrationExecution);
inOrder.verify(migrationPlanExecutionBuilderMock).executeAsync();
Mockito.verifyNoMoreInteractions(migrationPlanExecutionBuilderMock);
}
protected void verifyMigrationExecutionBuilderInteraction(InOrder inOrder, Map<String, Object> migrationExecution) {
List<String> processInstanceIds = ((List<String>) migrationExecution.get(MigrationExecutionDtoBuilder.PROP_PROCESS_INSTANCE_IDS));
inOrder.verify(migrationPlanExecutionBuilderMock).processInstanceIds(eq(processInstanceIds));
ProcessInstanceQueryDto processInstanceQuery = (ProcessInstanceQueryDto) migrationExecution.get(MigrationExecutionDtoBuilder.PROP_PROCESS_INSTANCE_QUERY);
if (processInstanceQuery != null) {
verifyMigrationPlanExecutionProcessInstanceQuery(inOrder);
}
Boolean skipCustomListeners = (Boolean) migrationExecution.get(MigrationExecutionDtoBuilder.PROP_SKIP_CUSTOM_LISTENERS);
if (Boolean.TRUE.equals(skipCustomListeners)) {
inOrder.verify(migrationPlanExecutionBuilderMock).skipCustomListeners();
}
Boolean skipIoMappings = (Boolean) migrationExecution.get(MigrationExecutionDtoBuilder.PROP_SKIP_IO_MAPPINGS);
if (Boolean.TRUE.equals(skipIoMappings)) {
inOrder.verify(migrationPlanExecutionBuilderMock).skipIoMappings();
}
}
protected void verifyMigrationPlanExecutionProcessInstanceQuery(InOrder inOrder) {
ArgumentCaptor<ProcessInstanceQuery> queryCapture = ArgumentCaptor.forClass(ProcessInstanceQuery.class);
inOrder.verify(migrationPlanExecutionBuilderMock).processInstanceQuery(queryCapture.capture());
ProcessInstanceQueryImpl actualQuery = (ProcessInstanceQueryImpl) queryCapture.getValue();
assertThat(actualQuery).isNotNull();
assertThat(actualQuery.getProcessDefinitionId()).isEqualTo(EXAMPLE_PROCESS_DEFINITION_ID);
}
}
| engine-rest/engine-rest/src/test/java/org/camunda/bpm/engine/rest/MigrationRestServiceInteractionTest.java | /* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.camunda.bpm.engine.rest;
import static com.jayway.restassured.RestAssured.given;
import static com.jayway.restassured.path.json.JsonPath.from;
import static org.camunda.bpm.engine.rest.helper.MockProvider.ANOTHER_EXAMPLE_ACTIVITY_ID;
import static org.camunda.bpm.engine.rest.helper.MockProvider.ANOTHER_EXAMPLE_PROCESS_DEFINITION_ID;
import static org.camunda.bpm.engine.rest.helper.MockProvider.ANOTHER_EXAMPLE_PROCESS_INSTANCE_ID;
import static org.camunda.bpm.engine.rest.helper.MockProvider.EXAMPLE_BATCH_ID;
import static org.camunda.bpm.engine.rest.helper.MockProvider.EXAMPLE_BATCH_JOBS_PER_SEED;
import static org.camunda.bpm.engine.rest.helper.MockProvider.EXAMPLE_BATCH_JOB_DEFINITION_ID;
import static org.camunda.bpm.engine.rest.helper.MockProvider.EXAMPLE_BATCH_TOTAL_JOBS;
import static org.camunda.bpm.engine.rest.helper.MockProvider.EXAMPLE_BATCH_TYPE;
import static org.camunda.bpm.engine.rest.helper.MockProvider.EXAMPLE_INVOCATIONS_PER_BATCH_JOB;
import static org.camunda.bpm.engine.rest.helper.MockProvider.EXAMPLE_MONITOR_JOB_DEFINITION_ID;
import static org.camunda.bpm.engine.rest.helper.MockProvider.EXAMPLE_SEED_JOB_DEFINITION_ID;
import static org.camunda.bpm.engine.rest.helper.MockProvider.EXAMPLE_TENANT_ID;
import static org.camunda.bpm.engine.rest.helper.MockProvider.NON_EXISTING_ACTIVITY_ID;
import static org.camunda.bpm.engine.rest.helper.MockProvider.NON_EXISTING_PROCESS_DEFINITION_ID;
import static org.camunda.bpm.engine.rest.helper.MockProvider.createMockBatch;
import static org.camunda.bpm.engine.rest.helper.NoIntermediaryInvocation.immediatelyAfter;
import static org.fest.assertions.Assertions.assertThat;
import static org.fest.assertions.MapAssert.entry;
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.CoreMatchers.nullValue;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.hasSize;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.anyListOf;
import static org.mockito.Matchers.anyString;
import static org.mockito.Matchers.eq;
import static org.mockito.Matchers.isNull;
import static org.mockito.Mockito.doThrow;
import static org.mockito.Mockito.inOrder;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import javax.ws.rs.core.Response.Status;
import org.camunda.bpm.engine.BadUserRequestException;
import org.camunda.bpm.engine.RuntimeService;
import org.camunda.bpm.engine.batch.Batch;
import org.camunda.bpm.engine.impl.ProcessInstanceQueryImpl;
import org.camunda.bpm.engine.migration.MigratingActivityInstanceValidationReport;
import org.camunda.bpm.engine.migration.MigratingProcessInstanceValidationException;
import org.camunda.bpm.engine.migration.MigratingProcessInstanceValidationReport;
import org.camunda.bpm.engine.migration.MigratingTransitionInstanceValidationReport;
import org.camunda.bpm.engine.migration.MigrationInstruction;
import org.camunda.bpm.engine.migration.MigrationInstructionValidationReport;
import org.camunda.bpm.engine.migration.MigrationPlan;
import org.camunda.bpm.engine.migration.MigrationPlanBuilder;
import org.camunda.bpm.engine.migration.MigrationPlanExecutionBuilder;
import org.camunda.bpm.engine.migration.MigrationPlanValidationException;
import org.camunda.bpm.engine.migration.MigrationPlanValidationReport;
import org.camunda.bpm.engine.rest.dto.migration.MigrationInstructionDto;
import org.camunda.bpm.engine.rest.dto.runtime.ProcessInstanceQueryDto;
import org.camunda.bpm.engine.rest.helper.FluentAnswer;
import org.camunda.bpm.engine.rest.helper.MockMigrationPlanBuilder;
import org.camunda.bpm.engine.rest.helper.MockMigrationPlanBuilder.JoinedMigrationPlanBuilderMock;
import org.camunda.bpm.engine.rest.util.container.TestContainerRule;
import org.camunda.bpm.engine.rest.util.migration.MigrationExecutionDtoBuilder;
import org.camunda.bpm.engine.rest.util.migration.MigrationPlanDtoBuilder;
import org.camunda.bpm.engine.runtime.ProcessInstanceQuery;
import org.junit.Before;
import org.junit.ClassRule;
import org.junit.Test;
import org.mockito.ArgumentCaptor;
import org.mockito.InOrder;
import org.mockito.Mockito;
import com.jayway.restassured.response.Response;
import java.util.List;
import org.camunda.bpm.engine.rest.util.migration.MigrationInstructionDtoBuilder;
public class MigrationRestServiceInteractionTest extends AbstractRestServiceTest {
@ClassRule
public static TestContainerRule rule = new TestContainerRule();
protected static final String MIGRATION_URL = TEST_RESOURCE_ROOT_PATH + "/migration";
protected static final String GENERATE_MIGRATION_URL = MIGRATION_URL + "/generate";
protected static final String VALIDATE_MIGRATION_URL = MIGRATION_URL + "/validate";
protected static final String EXECUTE_MIGRATION_URL = MIGRATION_URL + "/execute";
protected static final String EXECUTE_MIGRATION_ASYNC_URL = MIGRATION_URL + "/executeAsync";
protected RuntimeService runtimeServiceMock;
protected JoinedMigrationPlanBuilderMock migrationPlanBuilderMock;
protected MigrationPlanExecutionBuilder migrationPlanExecutionBuilderMock;
@Before
public void setUpRuntimeData() {
runtimeServiceMock = mock(RuntimeService.class);
when(processEngine.getRuntimeService()).thenReturn(runtimeServiceMock);
migrationPlanBuilderMock = new MockMigrationPlanBuilder()
.sourceProcessDefinitionId(EXAMPLE_PROCESS_DEFINITION_ID)
.targetProcessDefinitionId(ANOTHER_EXAMPLE_PROCESS_DEFINITION_ID)
.instruction(EXAMPLE_ACTIVITY_ID, ANOTHER_EXAMPLE_ACTIVITY_ID)
.instruction(ANOTHER_EXAMPLE_ACTIVITY_ID, EXAMPLE_ACTIVITY_ID)
.builder();
when(runtimeServiceMock.createMigrationPlan(eq(EXAMPLE_PROCESS_DEFINITION_ID), eq(ANOTHER_EXAMPLE_PROCESS_DEFINITION_ID)))
.thenReturn(migrationPlanBuilderMock);
migrationPlanExecutionBuilderMock = mock(MigrationPlanExecutionBuilder.class);
when(migrationPlanExecutionBuilderMock.processInstanceIds(anyListOf(String.class))).thenReturn(migrationPlanExecutionBuilderMock);
when(runtimeServiceMock.newMigration(any(MigrationPlan.class))).thenReturn(migrationPlanExecutionBuilderMock);
}
@Test
public void generateMigrationPlanWithInitialEmptyInstructions() {
Map<String, Object> initialMigrationPlan = new MigrationPlanDtoBuilder(EXAMPLE_PROCESS_DEFINITION_ID, ANOTHER_EXAMPLE_PROCESS_DEFINITION_ID)
.instructions(Collections.<Map<String, Object>>emptyList())
.build();
Response response = given()
.contentType(POST_JSON_CONTENT_TYPE)
.body(initialMigrationPlan)
.then().expect()
.statusCode(Status.OK.getStatusCode())
.when()
.post(GENERATE_MIGRATION_URL);
verifyGenerateMigrationPlanInteraction(migrationPlanBuilderMock, initialMigrationPlan);
verifyGenerateMigrationPlanResponse(response);
}
@Test
public void generateMigrationPlanWithInitialNullInstructions() {
Map<String, Object> initialMigrationPlan = new MigrationPlanDtoBuilder(EXAMPLE_PROCESS_DEFINITION_ID, ANOTHER_EXAMPLE_PROCESS_DEFINITION_ID)
.instructions(null)
.build();
Response response = given()
.contentType(POST_JSON_CONTENT_TYPE)
.body(initialMigrationPlan)
.then().expect()
.statusCode(Status.OK.getStatusCode())
.when()
.post(GENERATE_MIGRATION_URL);
verifyGenerateMigrationPlanInteraction(migrationPlanBuilderMock, initialMigrationPlan);
verifyGenerateMigrationPlanResponse(response);
}
@Test
public void generateMigrationPlanWithNoInitialInstructions() {
Map<String, Object> initialMigrationPlan = new MigrationPlanDtoBuilder(EXAMPLE_PROCESS_DEFINITION_ID, ANOTHER_EXAMPLE_PROCESS_DEFINITION_ID)
.build();
Response response = given()
.contentType(POST_JSON_CONTENT_TYPE)
.body(initialMigrationPlan)
.then()
.statusCode(Status.OK.getStatusCode())
.when()
.post(GENERATE_MIGRATION_URL);
verifyGenerateMigrationPlanInteraction(migrationPlanBuilderMock, initialMigrationPlan);
verifyGenerateMigrationPlanResponse(response);
}
@Test
public void generateMigrationPlanIgnoringInitialInstructions() {
Map<String, Object> initialMigrationPlan = new MigrationPlanDtoBuilder(EXAMPLE_PROCESS_DEFINITION_ID, ANOTHER_EXAMPLE_PROCESS_DEFINITION_ID)
.instruction("ignored", "ignored")
.build();
Response response = given()
.contentType(POST_JSON_CONTENT_TYPE)
.body(initialMigrationPlan)
.then().expect()
.statusCode(Status.OK.getStatusCode())
.when()
.post(GENERATE_MIGRATION_URL);
verifyGenerateMigrationPlanInteraction(migrationPlanBuilderMock, initialMigrationPlan);
verifyGenerateMigrationPlanResponse(response);
}
@Test
public void generateMigrationPlanWithNullSourceProcessDefinition() {
String message = "source process definition id is null";
MigrationPlanBuilder planBuilder = mock(MigrationPlanBuilder.class, Mockito.RETURNS_DEEP_STUBS);
when(runtimeServiceMock.createMigrationPlan(isNull(String.class), anyString()))
.thenReturn(planBuilder);
when(planBuilder.mapEqualActivities().build())
.thenThrow(new BadUserRequestException(message));
Map<String, Object> initialMigrationPlan = new MigrationPlanDtoBuilder(null, ANOTHER_EXAMPLE_PROCESS_DEFINITION_ID).build();
given()
.contentType(POST_JSON_CONTENT_TYPE)
.body(initialMigrationPlan)
.then().expect()
.statusCode(Status.BAD_REQUEST.getStatusCode())
.body("message", is(message))
.when()
.post(GENERATE_MIGRATION_URL);
}
@Test
public void generateMigrationPlanWithNonExistingSourceProcessDefinition() {
String message = "source process definition with id " + NON_EXISTING_PROCESS_DEFINITION_ID + " does not exist";
MigrationPlanBuilder migrationPlanBuilder = mock(MigrationPlanBuilder.class, Mockito.RETURNS_DEEP_STUBS);
when(runtimeServiceMock.createMigrationPlan(eq(NON_EXISTING_PROCESS_DEFINITION_ID), anyString()))
.thenReturn(migrationPlanBuilder);
when(
migrationPlanBuilder
.mapEqualActivities()
.build())
.thenThrow(new BadUserRequestException(message));
Map<String, Object> initialMigrationPlan = new MigrationPlanDtoBuilder(NON_EXISTING_PROCESS_DEFINITION_ID, ANOTHER_EXAMPLE_PROCESS_DEFINITION_ID).build();
given()
.contentType(POST_JSON_CONTENT_TYPE)
.body(initialMigrationPlan)
.then().expect()
.statusCode(Status.BAD_REQUEST.getStatusCode())
.body("message", is(message))
.when()
.post(GENERATE_MIGRATION_URL);
}
@Test
public void generateMigrationPlanWithNullTargetProcessDefinition() {
String message = "target process definition id is null";
MigrationPlanBuilder migrationPlanBuilder = mock(MigrationPlanBuilder.class, Mockito.RETURNS_DEEP_STUBS);
when(runtimeServiceMock.createMigrationPlan(anyString(), isNull(String.class)))
.thenReturn(migrationPlanBuilder);
when(
migrationPlanBuilder
.mapEqualActivities()
.build())
.thenThrow(new BadUserRequestException(message));
Map<String, Object> initialMigrationPlan = new MigrationPlanDtoBuilder(EXAMPLE_PROCESS_DEFINITION_ID, null).build();
given()
.contentType(POST_JSON_CONTENT_TYPE)
.body(initialMigrationPlan)
.then().expect()
.statusCode(Status.BAD_REQUEST.getStatusCode())
.body("message", is(message))
.when()
.post(GENERATE_MIGRATION_URL);
}
@Test
public void generateMigrationPlanWithNonExistingTargetProcessDefinition() {
String message = "target process definition with id " + NON_EXISTING_PROCESS_DEFINITION_ID + " does not exist";
MigrationPlanBuilder migrationPlanBuilder = mock(MigrationPlanBuilder.class, Mockito.RETURNS_DEEP_STUBS);
when(runtimeServiceMock.createMigrationPlan(anyString(), eq(NON_EXISTING_PROCESS_DEFINITION_ID)))
.thenReturn(migrationPlanBuilder);
when(
migrationPlanBuilder
.mapEqualActivities()
.build())
.thenThrow(new BadUserRequestException(message));
Map<String, Object> initialMigrationPlan = new MigrationPlanDtoBuilder(EXAMPLE_PROCESS_DEFINITION_ID, NON_EXISTING_PROCESS_DEFINITION_ID).build();
given()
.contentType(POST_JSON_CONTENT_TYPE)
.body(initialMigrationPlan)
.then().expect()
.statusCode(Status.BAD_REQUEST.getStatusCode())
.body("message", is(message))
.when()
.post(GENERATE_MIGRATION_URL);
}
@Test
public void generatePlanUpdateEventTriggers() {
migrationPlanBuilderMock = new MockMigrationPlanBuilder()
.sourceProcessDefinitionId(EXAMPLE_PROCESS_DEFINITION_ID)
.targetProcessDefinitionId(ANOTHER_EXAMPLE_PROCESS_DEFINITION_ID)
.instruction(EXAMPLE_ACTIVITY_ID, ANOTHER_EXAMPLE_ACTIVITY_ID, true)
.builder();
Map<String, Object> generationRequest = new HashMap<String, Object>();
generationRequest.put("sourceProcessDefinitionId", EXAMPLE_PROCESS_DEFINITION_ID);
generationRequest.put("targetProcessDefinitionId", ANOTHER_EXAMPLE_PROCESS_DEFINITION_ID);
generationRequest.put("updateEventTriggers", true);
when(runtimeServiceMock.createMigrationPlan(anyString(), anyString()))
.thenReturn(migrationPlanBuilderMock);
given()
.contentType(POST_JSON_CONTENT_TYPE)
.body(generationRequest)
.then().expect()
.statusCode(Status.OK.getStatusCode())
.when()
.post(GENERATE_MIGRATION_URL);
verify(runtimeServiceMock).createMigrationPlan(eq(EXAMPLE_PROCESS_DEFINITION_ID), eq(ANOTHER_EXAMPLE_PROCESS_DEFINITION_ID));
InOrder inOrder = Mockito.inOrder(migrationPlanBuilderMock);
// the map equal activities method should be called
inOrder.verify(migrationPlanBuilderMock).mapEqualActivities();
inOrder.verify(migrationPlanBuilderMock, immediatelyAfter()).updateEventTriggers();
verify(migrationPlanBuilderMock, never()).mapActivities(anyString(), anyString());
}
@Test
public void generatePlanUpdateEventTriggerResponse() {
migrationPlanBuilderMock = new MockMigrationPlanBuilder()
.sourceProcessDefinitionId(EXAMPLE_PROCESS_DEFINITION_ID)
.targetProcessDefinitionId(ANOTHER_EXAMPLE_PROCESS_DEFINITION_ID)
.instruction(EXAMPLE_ACTIVITY_ID, ANOTHER_EXAMPLE_ACTIVITY_ID, true)
.builder();
when(runtimeServiceMock.createMigrationPlan(anyString(), anyString()))
.thenReturn(migrationPlanBuilderMock);
Map<String, Object> generationRequest = new HashMap<String, Object>();
generationRequest.put("sourceProcessDefinitionId", EXAMPLE_PROCESS_DEFINITION_ID);
generationRequest.put("targetProcessDefinitionId", ANOTHER_EXAMPLE_PROCESS_DEFINITION_ID);
given()
.contentType(POST_JSON_CONTENT_TYPE)
.body(generationRequest)
.then().expect()
.statusCode(Status.OK.getStatusCode())
.body("instructions[0].sourceActivityIds[0]", equalTo(EXAMPLE_ACTIVITY_ID))
.body("instructions[0].targetActivityIds[0]", equalTo(ANOTHER_EXAMPLE_ACTIVITY_ID))
.body("instructions[0].updateEventTrigger", equalTo(true))
.when()
.post(GENERATE_MIGRATION_URL);
}
@Test
public void executeMigrationPlan() {
Map<String, Object> migrationExecution = new MigrationExecutionDtoBuilder()
.migrationPlan(EXAMPLE_PROCESS_DEFINITION_ID, ANOTHER_EXAMPLE_PROCESS_DEFINITION_ID)
.instruction(EXAMPLE_ACTIVITY_ID, ANOTHER_EXAMPLE_ACTIVITY_ID)
.instruction(ANOTHER_EXAMPLE_ACTIVITY_ID, EXAMPLE_ACTIVITY_ID)
.done()
.processInstances(EXAMPLE_PROCESS_INSTANCE_ID, ANOTHER_EXAMPLE_PROCESS_INSTANCE_ID)
.build();
given()
.contentType(POST_JSON_CONTENT_TYPE)
.body(migrationExecution)
.then().expect()
.statusCode(Status.NO_CONTENT.getStatusCode())
.when()
.post(EXECUTE_MIGRATION_URL);
verifyCreateMigrationPlanInteraction(migrationPlanBuilderMock, (Map<String, Object>) migrationExecution.get(MigrationExecutionDtoBuilder.PROP_MIGRATION_PLAN));
verifyMigrationPlanExecutionInteraction(migrationExecution);
}
@Test
public void executeMigrationPlanWithProcessInstanceQuery() {
when(runtimeServiceMock.createProcessInstanceQuery())
.thenReturn(new ProcessInstanceQueryImpl());
ProcessInstanceQueryDto processInstanceQuery = new ProcessInstanceQueryDto();
processInstanceQuery.setProcessDefinitionId(EXAMPLE_PROCESS_DEFINITION_ID);
Map<String, Object> migrationExecution = new MigrationExecutionDtoBuilder()
.migrationPlan(EXAMPLE_PROCESS_DEFINITION_ID, ANOTHER_EXAMPLE_PROCESS_DEFINITION_ID)
.instruction(EXAMPLE_ACTIVITY_ID, ANOTHER_EXAMPLE_ACTIVITY_ID)
.instruction(ANOTHER_EXAMPLE_ACTIVITY_ID, EXAMPLE_ACTIVITY_ID)
.done()
.processInstanceQuery(processInstanceQuery)
.build();
given()
.contentType(POST_JSON_CONTENT_TYPE)
.body(migrationExecution)
.then().expect()
.statusCode(Status.NO_CONTENT.getStatusCode())
.when()
.post(EXECUTE_MIGRATION_URL);
verifyCreateMigrationPlanInteraction(migrationPlanBuilderMock, (Map<String, Object>) migrationExecution.get(MigrationExecutionDtoBuilder.PROP_MIGRATION_PLAN));
verifyMigrationPlanExecutionInteraction(migrationExecution);
}
@Test
public void executeMigrationPlanSkipListeners() {
Map<String, Object> migrationExecution = new MigrationExecutionDtoBuilder()
.migrationPlan(EXAMPLE_PROCESS_DEFINITION_ID, ANOTHER_EXAMPLE_PROCESS_DEFINITION_ID)
.instruction(EXAMPLE_ACTIVITY_ID, ANOTHER_EXAMPLE_ACTIVITY_ID)
.done()
.processInstances(EXAMPLE_PROCESS_INSTANCE_ID)
.skipCustomListeners(true)
.build();
given()
.contentType(POST_JSON_CONTENT_TYPE)
.body(migrationExecution)
.then().expect()
.statusCode(Status.NO_CONTENT.getStatusCode())
.when()
.post(EXECUTE_MIGRATION_URL);
verifyMigrationPlanExecutionInteraction(migrationExecution);
}
@Test
public void executeMigrationPlanSkipIoMappings() {
Map<String, Object> migrationExecution = new MigrationExecutionDtoBuilder()
.migrationPlan(EXAMPLE_PROCESS_DEFINITION_ID, ANOTHER_EXAMPLE_PROCESS_DEFINITION_ID)
.instruction(EXAMPLE_ACTIVITY_ID, ANOTHER_EXAMPLE_ACTIVITY_ID)
.done()
.processInstances(EXAMPLE_PROCESS_INSTANCE_ID)
.skipIoMappings(true)
.build();
given()
.contentType(POST_JSON_CONTENT_TYPE)
.body(migrationExecution)
.then().expect()
.statusCode(Status.NO_CONTENT.getStatusCode())
.when()
.post(EXECUTE_MIGRATION_URL);
verifyMigrationPlanExecutionInteraction(migrationExecution);
}
@Test
public void executeMigrationPlanWithNullInstructions() {
MigrationInstructionValidationReport instructionReport = mock(MigrationInstructionValidationReport.class);
when(instructionReport.getMigrationInstruction()).thenReturn(null);
when(instructionReport.getFailures()).thenReturn(Collections.singletonList("failure"));
MigrationPlanValidationReport validationReport = mock(MigrationPlanValidationReport.class);
when(validationReport.getInstructionReports()).thenReturn(Collections.singletonList(instructionReport));
when(migrationPlanBuilderMock.build()).thenThrow(new MigrationPlanValidationException("fooo", validationReport));
Map<String, Object> migrationExecution = new MigrationExecutionDtoBuilder()
.migrationPlan(EXAMPLE_PROCESS_DEFINITION_ID, ANOTHER_EXAMPLE_PROCESS_DEFINITION_ID)
.done()
.processInstances(EXAMPLE_PROCESS_INSTANCE_ID, ANOTHER_EXAMPLE_PROCESS_INSTANCE_ID)
.build();
given()
.contentType(POST_JSON_CONTENT_TYPE)
.body(migrationExecution)
.then().expect()
.statusCode(Status.BAD_REQUEST.getStatusCode())
.body("type", equalTo(MigrationPlanValidationException.class.getSimpleName()))
.body("message", is("fooo"))
.body("validationReport.instructionReports", hasSize(1))
.body("validationReport.instructionReports[0].instruction", nullValue())
.body("validationReport.instructionReports[0].failures", hasSize(1))
.body("validationReport.instructionReports[0].failures[0]", is("failure"))
.when()
.post(EXECUTE_MIGRATION_URL);
}
@Test
public void executeMigrationPlanWithEmptyInstructions() {
MigrationInstructionValidationReport instructionReport = mock(MigrationInstructionValidationReport.class);
when(instructionReport.getMigrationInstruction()).thenReturn(null);
when(instructionReport.getFailures()).thenReturn(Collections.singletonList("failure"));
MigrationPlanValidationReport validationReport = mock(MigrationPlanValidationReport.class);
when(validationReport.getInstructionReports()).thenReturn(Collections.singletonList(instructionReport));
when(migrationPlanBuilderMock.build()).thenThrow(new MigrationPlanValidationException("fooo", validationReport));
Map<String, Object> migrationExecution = new MigrationExecutionDtoBuilder()
.migrationPlan(EXAMPLE_PROCESS_DEFINITION_ID, ANOTHER_EXAMPLE_PROCESS_DEFINITION_ID)
.done()
.processInstances(EXAMPLE_PROCESS_INSTANCE_ID, ANOTHER_EXAMPLE_PROCESS_INSTANCE_ID)
.build();
((Map<String, Object>) migrationExecution.get(MigrationExecutionDtoBuilder.PROP_MIGRATION_PLAN))
.put(MigrationPlanDtoBuilder.PROP_INSTRUCTIONS, Collections.<MigrationInstructionDto>emptyList());
given()
.contentType(POST_JSON_CONTENT_TYPE)
.body(migrationExecution)
.then().expect()
.statusCode(Status.BAD_REQUEST.getStatusCode())
.body("type", equalTo(MigrationPlanValidationException.class.getSimpleName()))
.body("message", is("fooo"))
.body("validationReport.instructionReports", hasSize(1))
.body("validationReport.instructionReports[0].instruction", nullValue())
.body("validationReport.instructionReports[0].failures", hasSize(1))
.body("validationReport.instructionReports[0].failures[0]", is("failure"))
.when()
.post(EXECUTE_MIGRATION_URL);
}
@Test
public void executeMigrationPlanWithNullSourceProcessInstanceId() {
String message = "source process definition id is null";
JoinedMigrationPlanBuilderMock migrationPlanBuilder = mock(JoinedMigrationPlanBuilderMock.class, new FluentAnswer());
when(runtimeServiceMock.createMigrationPlan(isNull(String.class), anyString()))
.thenReturn(migrationPlanBuilder);
when(migrationPlanBuilder.build()).thenThrow(new BadUserRequestException(message));
Map<String, Object> migrationExecution = new MigrationExecutionDtoBuilder()
.migrationPlan(null, ANOTHER_EXAMPLE_PROCESS_DEFINITION_ID)
.instruction(EXAMPLE_ACTIVITY_ID, ANOTHER_EXAMPLE_ACTIVITY_ID)
.instruction(ANOTHER_EXAMPLE_ACTIVITY_ID, EXAMPLE_ACTIVITY_ID)
.done()
.processInstances(EXAMPLE_PROCESS_INSTANCE_ID, ANOTHER_EXAMPLE_PROCESS_INSTANCE_ID)
.build();
given()
.contentType(POST_JSON_CONTENT_TYPE)
.body(migrationExecution)
.then().expect()
.statusCode(Status.BAD_REQUEST.getStatusCode())
.body("message", is(message))
.when()
.post(EXECUTE_MIGRATION_URL);
}
@Test
public void executeMigrationPlanWithNonExistingSourceProcessInstanceId() {
String message = "source process definition with id " + NON_EXISTING_PROCESS_DEFINITION_ID + " does not exist";
JoinedMigrationPlanBuilderMock migrationPlanBuilder = mock(JoinedMigrationPlanBuilderMock.class, new FluentAnswer());
when(runtimeServiceMock.createMigrationPlan(eq(NON_EXISTING_PROCESS_DEFINITION_ID), anyString()))
.thenReturn(migrationPlanBuilder);
when(migrationPlanBuilder.build()).thenThrow(new BadUserRequestException(message));
Map<String, Object> migrationExecution = new MigrationExecutionDtoBuilder()
.migrationPlan(NON_EXISTING_PROCESS_DEFINITION_ID, ANOTHER_EXAMPLE_PROCESS_DEFINITION_ID)
.instruction(EXAMPLE_ACTIVITY_ID, ANOTHER_EXAMPLE_ACTIVITY_ID)
.instruction(ANOTHER_EXAMPLE_ACTIVITY_ID, EXAMPLE_ACTIVITY_ID)
.done()
.processInstances(EXAMPLE_PROCESS_INSTANCE_ID, ANOTHER_EXAMPLE_PROCESS_INSTANCE_ID)
.build();
given()
.contentType(POST_JSON_CONTENT_TYPE)
.body(migrationExecution)
.then().expect()
.statusCode(Status.BAD_REQUEST.getStatusCode())
.body("message", is(message))
.when()
.post(EXECUTE_MIGRATION_URL);
}
@Test
public void executeMigrationPlanWithNullTargetProcessInstanceId() {
String message = "target process definition id is null";
JoinedMigrationPlanBuilderMock migrationPlanBuilder = mock(JoinedMigrationPlanBuilderMock.class, new FluentAnswer());
when(runtimeServiceMock.createMigrationPlan(anyString(), isNull(String.class)))
.thenReturn(migrationPlanBuilder);
when(migrationPlanBuilder.build()).thenThrow(new BadUserRequestException(message));
Map<String, Object> migrationExecution = new MigrationExecutionDtoBuilder()
.migrationPlan(EXAMPLE_PROCESS_DEFINITION_ID, null)
.instruction(EXAMPLE_ACTIVITY_ID, ANOTHER_EXAMPLE_ACTIVITY_ID)
.instruction(ANOTHER_EXAMPLE_ACTIVITY_ID, EXAMPLE_ACTIVITY_ID)
.done()
.processInstances(EXAMPLE_PROCESS_INSTANCE_ID, ANOTHER_EXAMPLE_PROCESS_INSTANCE_ID)
.build();
given()
.contentType(POST_JSON_CONTENT_TYPE)
.body(migrationExecution)
.then().expect()
.statusCode(Status.BAD_REQUEST.getStatusCode())
.body("message", is(message))
.when()
.post(EXECUTE_MIGRATION_URL);
}
@Test
public void executeMigrationPlanWithNonExistingTargetProcessInstanceId() {
String message = "target process definition with id " + NON_EXISTING_PROCESS_DEFINITION_ID + " does not exist";
JoinedMigrationPlanBuilderMock migrationPlanBuilder = mock(JoinedMigrationPlanBuilderMock.class, new FluentAnswer());
when(runtimeServiceMock.createMigrationPlan(anyString(), eq(NON_EXISTING_PROCESS_DEFINITION_ID)))
.thenReturn(migrationPlanBuilder);
when(migrationPlanBuilder.build()).thenThrow(new BadUserRequestException(message));
Map<String, Object> migrationExecution = new MigrationExecutionDtoBuilder()
.migrationPlan(EXAMPLE_PROCESS_DEFINITION_ID, NON_EXISTING_PROCESS_DEFINITION_ID)
.instruction(EXAMPLE_ACTIVITY_ID, ANOTHER_EXAMPLE_ACTIVITY_ID)
.instruction(ANOTHER_EXAMPLE_ACTIVITY_ID, EXAMPLE_ACTIVITY_ID)
.done()
.processInstances(EXAMPLE_PROCESS_INSTANCE_ID, ANOTHER_EXAMPLE_PROCESS_INSTANCE_ID)
.build();
given()
.contentType(POST_JSON_CONTENT_TYPE)
.body(migrationExecution)
.then().expect()
.statusCode(Status.BAD_REQUEST.getStatusCode())
.body("message", is(message))
.when()
.post(EXECUTE_MIGRATION_URL);
}
@Test
public void executeMigrationPlanWithNullSourceActivityId() {
String message = "sourceActivityId is null";
when(migrationPlanBuilderMock.mapActivities(isNull(String.class), anyString()))
.thenThrow(new BadUserRequestException(message));
Map<String, Object> migrationExecution = new MigrationExecutionDtoBuilder()
.migrationPlan(EXAMPLE_PROCESS_DEFINITION_ID, ANOTHER_EXAMPLE_PROCESS_DEFINITION_ID)
.instruction(null, ANOTHER_EXAMPLE_ACTIVITY_ID)
.instruction(ANOTHER_EXAMPLE_ACTIVITY_ID, EXAMPLE_ACTIVITY_ID)
.done()
.processInstances(EXAMPLE_PROCESS_INSTANCE_ID, ANOTHER_EXAMPLE_PROCESS_INSTANCE_ID)
.build();
given()
.contentType(POST_JSON_CONTENT_TYPE)
.body(migrationExecution)
.then().expect()
.statusCode(Status.BAD_REQUEST.getStatusCode())
.body("message", is(message))
.when()
.post(EXECUTE_MIGRATION_URL);
}
@Test
public void executeMigrationPlanWithNonExistingSourceActivityId() {
String message = "sourceActivity is null";
when(migrationPlanBuilderMock.mapActivities(eq(NON_EXISTING_ACTIVITY_ID), anyString()))
.thenThrow(new BadUserRequestException(message));
Map<String, Object> migrationExecution = new MigrationExecutionDtoBuilder()
.migrationPlan(EXAMPLE_PROCESS_DEFINITION_ID, ANOTHER_EXAMPLE_PROCESS_DEFINITION_ID)
.instruction(NON_EXISTING_ACTIVITY_ID, ANOTHER_EXAMPLE_ACTIVITY_ID)
.instruction(ANOTHER_EXAMPLE_ACTIVITY_ID, EXAMPLE_ACTIVITY_ID)
.done()
.processInstances(EXAMPLE_PROCESS_INSTANCE_ID, ANOTHER_EXAMPLE_PROCESS_INSTANCE_ID)
.build();
given()
.contentType(POST_JSON_CONTENT_TYPE)
.body(migrationExecution)
.then().expect()
.statusCode(Status.BAD_REQUEST.getStatusCode())
.body("message", is(message))
.when()
.post(EXECUTE_MIGRATION_URL);
}
@Test
public void executeMigrationPlanWithNullTargetActivityId() {
String message = "targetActivityId is null";
when(migrationPlanBuilderMock.mapActivities(anyString(), isNull(String.class)))
.thenThrow(new BadUserRequestException(message));
Map<String, Object> migrationExecution = new MigrationExecutionDtoBuilder()
.migrationPlan(EXAMPLE_PROCESS_DEFINITION_ID, ANOTHER_EXAMPLE_PROCESS_DEFINITION_ID)
.instruction(EXAMPLE_ACTIVITY_ID, null)
.instruction(ANOTHER_EXAMPLE_ACTIVITY_ID, EXAMPLE_ACTIVITY_ID)
.done()
.processInstances(EXAMPLE_PROCESS_INSTANCE_ID, ANOTHER_EXAMPLE_PROCESS_INSTANCE_ID)
.build();
given()
.contentType(POST_JSON_CONTENT_TYPE)
.body(migrationExecution)
.then().expect()
.statusCode(Status.BAD_REQUEST.getStatusCode())
.body("message", is(message))
.when()
.post(EXECUTE_MIGRATION_URL);
}
@Test
public void executeMigrationPlanWithNonExistingTargetActivityId() {
String message = "targetActivity is null";
when(migrationPlanBuilderMock.mapActivities(anyString(), eq(NON_EXISTING_ACTIVITY_ID)))
.thenThrow(new BadUserRequestException(message));
Map<String, Object> migrationExecution = new MigrationExecutionDtoBuilder()
.migrationPlan(EXAMPLE_PROCESS_DEFINITION_ID, ANOTHER_EXAMPLE_PROCESS_DEFINITION_ID)
.instruction(EXAMPLE_ACTIVITY_ID, NON_EXISTING_ACTIVITY_ID)
.instruction(ANOTHER_EXAMPLE_ACTIVITY_ID, EXAMPLE_ACTIVITY_ID)
.done()
.processInstances(EXAMPLE_PROCESS_INSTANCE_ID, ANOTHER_EXAMPLE_PROCESS_INSTANCE_ID)
.build();
given()
.contentType(POST_JSON_CONTENT_TYPE)
.body(migrationExecution)
.then().expect()
.statusCode(Status.BAD_REQUEST.getStatusCode())
.body("message", is(message))
.when()
.post(EXECUTE_MIGRATION_URL);
}
@Test
public void executeMigrationPlanValidationException() {
MigrationInstruction migrationInstruction = mock(MigrationInstruction.class);
when(migrationInstruction.getSourceActivityId()).thenReturn(EXAMPLE_ACTIVITY_ID);
when(migrationInstruction.getTargetActivityId()).thenReturn(ANOTHER_EXAMPLE_ACTIVITY_ID);
MigrationInstructionValidationReport instructionReport1 = mock(MigrationInstructionValidationReport.class);
when(instructionReport1.getMigrationInstruction()).thenReturn(migrationInstruction);
when(instructionReport1.getFailures()).thenReturn(Arrays.asList("failure1", "failure2"));
MigrationInstructionValidationReport instructionReport2 = mock(MigrationInstructionValidationReport.class);
when(instructionReport2.getMigrationInstruction()).thenReturn(migrationInstruction);
when(instructionReport2.getFailures()).thenReturn(Arrays.asList("failure1", "failure2"));
MigrationPlanValidationReport validationReport = mock(MigrationPlanValidationReport.class);
when(validationReport.getInstructionReports()).thenReturn(Arrays.asList(instructionReport1, instructionReport2));
when(migrationPlanBuilderMock.build()).thenThrow(new MigrationPlanValidationException("fooo", validationReport));
Map<String, Object> migrationExecution = new MigrationExecutionDtoBuilder()
.migrationPlan(EXAMPLE_PROCESS_DEFINITION_ID, ANOTHER_EXAMPLE_PROCESS_DEFINITION_ID)
.instruction(EXAMPLE_ACTIVITY_ID, ANOTHER_EXAMPLE_ACTIVITY_ID)
.done()
.processInstances(EXAMPLE_PROCESS_INSTANCE_ID, ANOTHER_EXAMPLE_PROCESS_INSTANCE_ID)
.build();
given()
.contentType(POST_JSON_CONTENT_TYPE)
.body(migrationExecution)
.then().expect()
.statusCode(Status.BAD_REQUEST.getStatusCode())
.body("type", equalTo(MigrationPlanValidationException.class.getSimpleName()))
.body("message", is("fooo"))
.body("validationReport.instructionReports", hasSize(2))
.body("validationReport.instructionReports[0].instruction.sourceActivityIds", hasSize(1))
.body("validationReport.instructionReports[0].instruction.sourceActivityIds[0]", is(EXAMPLE_ACTIVITY_ID))
.body("validationReport.instructionReports[0].instruction.targetActivityIds", hasSize(1))
.body("validationReport.instructionReports[0].instruction.targetActivityIds[0]", is(ANOTHER_EXAMPLE_ACTIVITY_ID))
.body("validationReport.instructionReports[0].failures", hasSize(2))
.body("validationReport.instructionReports[0].failures[0]", is("failure1"))
.body("validationReport.instructionReports[0].failures[1]", is("failure2"))
.when()
.post(EXECUTE_MIGRATION_URL);
}
@Test
public void executeMigratingProcessInstanceValidationException() {
MigrationInstruction migrationInstruction = mock(MigrationInstruction.class);
when(migrationInstruction.getSourceActivityId()).thenReturn(EXAMPLE_ACTIVITY_ID);
when(migrationInstruction.getTargetActivityId()).thenReturn(ANOTHER_EXAMPLE_ACTIVITY_ID);
MigratingActivityInstanceValidationReport instanceReport1 = mock(MigratingActivityInstanceValidationReport.class);
when(instanceReport1.getActivityInstanceId()).thenReturn(EXAMPLE_ACTIVITY_INSTANCE_ID);
when(instanceReport1.getMigrationInstruction()).thenReturn(migrationInstruction);
when(instanceReport1.getSourceScopeId()).thenReturn(EXAMPLE_ACTIVITY_ID);
when(instanceReport1.getFailures()).thenReturn(Arrays.asList("failure1", "failure2"));
MigratingTransitionInstanceValidationReport instanceReport2 = mock(MigratingTransitionInstanceValidationReport.class);
when(instanceReport2.getTransitionInstanceId()).thenReturn("transitionInstanceId");
when(instanceReport2.getMigrationInstruction()).thenReturn(migrationInstruction);
when(instanceReport2.getSourceScopeId()).thenReturn(EXAMPLE_ACTIVITY_ID);
when(instanceReport2.getFailures()).thenReturn(Arrays.asList("failure1", "failure2"));
MigratingProcessInstanceValidationReport processInstanceReport = mock(MigratingProcessInstanceValidationReport.class);
when(processInstanceReport.getProcessInstanceId()).thenReturn(EXAMPLE_PROCESS_INSTANCE_ID);
when(processInstanceReport.getFailures()).thenReturn(Arrays.asList("failure1", "failure2"));
when(processInstanceReport.getActivityInstanceReports()).thenReturn(Arrays.asList(instanceReport1));
when(processInstanceReport.getTransitionInstanceReports()).thenReturn(Arrays.asList(instanceReport2));
doThrow(new MigratingProcessInstanceValidationException("fooo", processInstanceReport))
.when(migrationPlanExecutionBuilderMock).execute();
Map<String, Object> migrationExecution = new MigrationExecutionDtoBuilder()
.migrationPlan(EXAMPLE_PROCESS_DEFINITION_ID, ANOTHER_EXAMPLE_PROCESS_DEFINITION_ID)
.instruction(EXAMPLE_ACTIVITY_ID, ANOTHER_EXAMPLE_ACTIVITY_ID)
.done()
.processInstances(EXAMPLE_PROCESS_INSTANCE_ID, ANOTHER_EXAMPLE_PROCESS_INSTANCE_ID)
.build();
given()
.contentType(POST_JSON_CONTENT_TYPE)
.body(migrationExecution)
.then().expect()
.statusCode(Status.BAD_REQUEST.getStatusCode())
.body("type", equalTo(MigratingProcessInstanceValidationException.class.getSimpleName()))
.body("message", is("fooo"))
.body("validationReport.processInstanceId", is(EXAMPLE_PROCESS_INSTANCE_ID))
.body("validationReport.failures", hasSize(2))
.body("validationReport.failures[0]", is("failure1"))
.body("validationReport.failures[1]", is("failure2"))
.body("validationReport.activityInstanceValidationReports", hasSize(1))
.body("validationReport.activityInstanceValidationReports[0].migrationInstruction.sourceActivityIds", hasSize(1))
.body("validationReport.activityInstanceValidationReports[0].migrationInstruction.sourceActivityIds[0]", is(EXAMPLE_ACTIVITY_ID))
.body("validationReport.activityInstanceValidationReports[0].migrationInstruction.targetActivityIds", hasSize(1))
.body("validationReport.activityInstanceValidationReports[0].migrationInstruction.targetActivityIds[0]", is(ANOTHER_EXAMPLE_ACTIVITY_ID))
.body("validationReport.activityInstanceValidationReports[0].activityInstanceId", is(EXAMPLE_ACTIVITY_INSTANCE_ID))
.body("validationReport.activityInstanceValidationReports[0].sourceScopeId", is(EXAMPLE_ACTIVITY_ID))
.body("validationReport.activityInstanceValidationReports[0].failures", hasSize(2))
.body("validationReport.activityInstanceValidationReports[0].failures[0]", is("failure1"))
.body("validationReport.activityInstanceValidationReports[0].failures[1]", is("failure2"))
.body("validationReport.transitionInstanceValidationReports", hasSize(1))
.body("validationReport.transitionInstanceValidationReports[0].migrationInstruction.sourceActivityIds", hasSize(1))
.body("validationReport.transitionInstanceValidationReports[0].migrationInstruction.sourceActivityIds[0]", is(EXAMPLE_ACTIVITY_ID))
.body("validationReport.transitionInstanceValidationReports[0].migrationInstruction.targetActivityIds", hasSize(1))
.body("validationReport.transitionInstanceValidationReports[0].migrationInstruction.targetActivityIds[0]", is(ANOTHER_EXAMPLE_ACTIVITY_ID))
.body("validationReport.transitionInstanceValidationReports[0].transitionInstanceId", is("transitionInstanceId"))
.body("validationReport.transitionInstanceValidationReports[0].sourceScopeId", is(EXAMPLE_ACTIVITY_ID))
.body("validationReport.transitionInstanceValidationReports[0].failures", hasSize(2))
.body("validationReport.transitionInstanceValidationReports[0].failures[0]", is("failure1"))
.body("validationReport.transitionInstanceValidationReports[0].failures[1]", is("failure2"))
.when()
.post(EXECUTE_MIGRATION_URL);
}
@Test
public void executeMigrationPlanAsync() {
Batch batchMock = createMockBatch();
when(migrationPlanExecutionBuilderMock.executeAsync()).thenReturn(batchMock);
Map<String, Object> migrationExecution = new MigrationExecutionDtoBuilder()
.migrationPlan(EXAMPLE_PROCESS_DEFINITION_ID, ANOTHER_EXAMPLE_PROCESS_DEFINITION_ID)
.instruction(EXAMPLE_ACTIVITY_ID, ANOTHER_EXAMPLE_ACTIVITY_ID)
.instruction(ANOTHER_EXAMPLE_ACTIVITY_ID, EXAMPLE_ACTIVITY_ID)
.done()
.processInstances(EXAMPLE_PROCESS_INSTANCE_ID, ANOTHER_EXAMPLE_PROCESS_INSTANCE_ID)
.build();
given()
.contentType(POST_JSON_CONTENT_TYPE)
.body(migrationExecution)
.then().expect()
.statusCode(Status.OK.getStatusCode())
.body("id", is(EXAMPLE_BATCH_ID))
.body("type", is(EXAMPLE_BATCH_TYPE))
.body("totalJobs", is(EXAMPLE_BATCH_TOTAL_JOBS))
.body("batchJobsPerSeed", is(EXAMPLE_BATCH_JOBS_PER_SEED))
.body("invocationsPerBatchJob", is(EXAMPLE_INVOCATIONS_PER_BATCH_JOB))
.body("seedJobDefinitionId", is(EXAMPLE_SEED_JOB_DEFINITION_ID))
.body("monitorJobDefinitionId", is(EXAMPLE_MONITOR_JOB_DEFINITION_ID))
.body("batchJobDefinitionId", is(EXAMPLE_BATCH_JOB_DEFINITION_ID))
.body("tenantId", is(EXAMPLE_TENANT_ID))
.when()
.post(EXECUTE_MIGRATION_ASYNC_URL);
verifyCreateMigrationPlanInteraction(migrationPlanBuilderMock, (Map<String, Object>) migrationExecution.get(MigrationExecutionDtoBuilder.PROP_MIGRATION_PLAN));
verifyMigrationPlanAsyncExecutionInteraction(migrationExecution);
}
@Test
public void executeMigrationPlanAsyncWithProcessInstanceQuery() {
when(runtimeServiceMock.createProcessInstanceQuery())
.thenReturn(new ProcessInstanceQueryImpl());
ProcessInstanceQueryDto processInstanceQuery = new ProcessInstanceQueryDto();
processInstanceQuery.setProcessDefinitionId(EXAMPLE_PROCESS_DEFINITION_ID);
Batch batchMock = createMockBatch();
when(migrationPlanExecutionBuilderMock.executeAsync()).thenReturn(batchMock);
Map<String, Object> migrationExecution = new MigrationExecutionDtoBuilder()
.migrationPlan(EXAMPLE_PROCESS_DEFINITION_ID, ANOTHER_EXAMPLE_PROCESS_DEFINITION_ID)
.instruction(EXAMPLE_ACTIVITY_ID, ANOTHER_EXAMPLE_ACTIVITY_ID)
.instruction(ANOTHER_EXAMPLE_ACTIVITY_ID, EXAMPLE_ACTIVITY_ID)
.done()
.processInstanceQuery(processInstanceQuery)
.build();
given()
.contentType(POST_JSON_CONTENT_TYPE)
.body(migrationExecution)
.then().expect()
.statusCode(Status.OK.getStatusCode())
.body("id", is(EXAMPLE_BATCH_ID))
.body("type", is(EXAMPLE_BATCH_TYPE))
.body("totalJobs", is(EXAMPLE_BATCH_TOTAL_JOBS))
.body("batchJobsPerSeed", is(EXAMPLE_BATCH_JOBS_PER_SEED))
.body("invocationsPerBatchJob", is(EXAMPLE_INVOCATIONS_PER_BATCH_JOB))
.body("seedJobDefinitionId", is(EXAMPLE_SEED_JOB_DEFINITION_ID))
.body("monitorJobDefinitionId", is(EXAMPLE_MONITOR_JOB_DEFINITION_ID))
.body("batchJobDefinitionId", is(EXAMPLE_BATCH_JOB_DEFINITION_ID))
.body("tenantId", is(EXAMPLE_TENANT_ID))
.when()
.post(EXECUTE_MIGRATION_ASYNC_URL);
verifyCreateMigrationPlanInteraction(migrationPlanBuilderMock, (Map<String, Object>) migrationExecution.get(MigrationExecutionDtoBuilder.PROP_MIGRATION_PLAN));
verifyMigrationPlanAsyncExecutionInteraction(migrationExecution);
}
@Test
public void executeMigrationPlanAsyncSkipListeners() {
Batch batchMock = createMockBatch();
when(migrationPlanExecutionBuilderMock.executeAsync()).thenReturn(batchMock);
Map<String, Object> migrationExecution = new MigrationExecutionDtoBuilder()
.migrationPlan(EXAMPLE_PROCESS_DEFINITION_ID, ANOTHER_EXAMPLE_PROCESS_DEFINITION_ID)
.instruction(EXAMPLE_ACTIVITY_ID, ANOTHER_EXAMPLE_ACTIVITY_ID)
.done()
.processInstances(EXAMPLE_PROCESS_INSTANCE_ID)
.skipCustomListeners(true)
.build();
given()
.contentType(POST_JSON_CONTENT_TYPE)
.body(migrationExecution)
.then().expect()
.statusCode(Status.OK.getStatusCode())
.when()
.post(EXECUTE_MIGRATION_ASYNC_URL);
verifyMigrationPlanAsyncExecutionInteraction(migrationExecution);
}
@Test
public void executeMigrationPlanAsyncSkipIoMappings() {
Batch batchMock = createMockBatch();
when(migrationPlanExecutionBuilderMock.executeAsync()).thenReturn(batchMock);
Map<String, Object> migrationExecution = new MigrationExecutionDtoBuilder()
.migrationPlan(EXAMPLE_PROCESS_DEFINITION_ID, ANOTHER_EXAMPLE_PROCESS_DEFINITION_ID)
.instruction(EXAMPLE_ACTIVITY_ID, ANOTHER_EXAMPLE_ACTIVITY_ID)
.done()
.processInstances(EXAMPLE_PROCESS_INSTANCE_ID)
.skipIoMappings(true)
.build();
given()
.contentType(POST_JSON_CONTENT_TYPE)
.body(migrationExecution)
.then().expect()
.statusCode(Status.OK.getStatusCode())
.when()
.post(EXECUTE_MIGRATION_ASYNC_URL);
verifyMigrationPlanAsyncExecutionInteraction(migrationExecution);
}
@Test
public void executeMigrationPlanAsyncWithNullInstructions() {
MigrationInstructionValidationReport instructionReport = mock(MigrationInstructionValidationReport.class);
when(instructionReport.getMigrationInstruction()).thenReturn(null);
when(instructionReport.getFailures()).thenReturn(Collections.singletonList("failure"));
MigrationPlanValidationReport validationReport = mock(MigrationPlanValidationReport.class);
when(validationReport.getInstructionReports()).thenReturn(Collections.singletonList(instructionReport));
when(migrationPlanBuilderMock.build()).thenThrow(new MigrationPlanValidationException("fooo", validationReport));
Map<String, Object> migrationExecution = new MigrationExecutionDtoBuilder()
.migrationPlan(EXAMPLE_PROCESS_DEFINITION_ID, ANOTHER_EXAMPLE_PROCESS_DEFINITION_ID)
.done()
.processInstances(EXAMPLE_PROCESS_INSTANCE_ID, ANOTHER_EXAMPLE_PROCESS_INSTANCE_ID)
.build();
given()
.contentType(POST_JSON_CONTENT_TYPE)
.body(migrationExecution)
.then().expect()
.statusCode(Status.BAD_REQUEST.getStatusCode())
.body("type", equalTo(MigrationPlanValidationException.class.getSimpleName()))
.body("message", is("fooo"))
.body("validationReport.instructionReports", hasSize(1))
.body("validationReport.instructionReports[0].instruction", nullValue())
.body("validationReport.instructionReports[0].failures", hasSize(1))
.body("validationReport.instructionReports[0].failures[0]", is("failure"))
.when()
.post(EXECUTE_MIGRATION_ASYNC_URL);
}
@Test
public void executeMigrationPlanAsyncWithEmptyInstructions() {
MigrationInstructionValidationReport instructionReport = mock(MigrationInstructionValidationReport.class);
when(instructionReport.getMigrationInstruction()).thenReturn(null);
when(instructionReport.getFailures()).thenReturn(Collections.singletonList("failure"));
MigrationPlanValidationReport validationReport = mock(MigrationPlanValidationReport.class);
when(validationReport.getInstructionReports()).thenReturn(Collections.singletonList(instructionReport));
when(migrationPlanBuilderMock.build()).thenThrow(new MigrationPlanValidationException("fooo", validationReport));
Map<String, Object> migrationExecution = new MigrationExecutionDtoBuilder()
.migrationPlan(EXAMPLE_PROCESS_DEFINITION_ID, ANOTHER_EXAMPLE_PROCESS_DEFINITION_ID)
.done()
.processInstances(EXAMPLE_PROCESS_INSTANCE_ID, ANOTHER_EXAMPLE_PROCESS_INSTANCE_ID)
.build();
((Map<String, Object>) migrationExecution.get(MigrationExecutionDtoBuilder.PROP_MIGRATION_PLAN))
.put(MigrationPlanDtoBuilder.PROP_INSTRUCTIONS, Collections.<MigrationInstructionDto>emptyList());
given()
.contentType(POST_JSON_CONTENT_TYPE)
.body(migrationExecution)
.then().expect()
.statusCode(Status.BAD_REQUEST.getStatusCode())
.body("type", equalTo(MigrationPlanValidationException.class.getSimpleName()))
.body("message", is("fooo"))
.body("validationReport.instructionReports", hasSize(1))
.body("validationReport.instructionReports[0].instruction", nullValue())
.body("validationReport.instructionReports[0].failures", hasSize(1))
.body("validationReport.instructionReports[0].failures[0]", is("failure"))
.when()
.post(EXECUTE_MIGRATION_ASYNC_URL);
}
@Test
public void executeMigrationPlanAsyncWithNullSourceProcessDefinitionId() {
String message = "source process definition id is null";
JoinedMigrationPlanBuilderMock migrationPlanBuilder = mock(JoinedMigrationPlanBuilderMock.class, new FluentAnswer());
when(runtimeServiceMock.createMigrationPlan(isNull(String.class), anyString()))
.thenReturn(migrationPlanBuilder);
when(migrationPlanBuilder.build()).thenThrow(new BadUserRequestException(message));
Map<String, Object> migrationExecution = new MigrationExecutionDtoBuilder()
.migrationPlan(null, ANOTHER_EXAMPLE_PROCESS_DEFINITION_ID)
.instruction(EXAMPLE_ACTIVITY_ID, ANOTHER_EXAMPLE_ACTIVITY_ID)
.instruction(ANOTHER_EXAMPLE_ACTIVITY_ID, EXAMPLE_ACTIVITY_ID)
.done()
.processInstances(EXAMPLE_PROCESS_INSTANCE_ID, ANOTHER_EXAMPLE_PROCESS_INSTANCE_ID)
.build();
given()
.contentType(POST_JSON_CONTENT_TYPE)
.body(migrationExecution)
.then().expect()
.statusCode(Status.BAD_REQUEST.getStatusCode())
.body("message", is(message))
.when()
.post(EXECUTE_MIGRATION_ASYNC_URL);
}
@Test
public void executeMigrationPlanAsyncWithNonExistingSourceProcessDefinitionId() {
String message = "source process definition with id " + NON_EXISTING_PROCESS_DEFINITION_ID + " does not exist";
JoinedMigrationPlanBuilderMock migrationPlanBuilder = mock(JoinedMigrationPlanBuilderMock.class, new FluentAnswer());
when(runtimeServiceMock.createMigrationPlan(eq(NON_EXISTING_PROCESS_DEFINITION_ID), anyString()))
.thenReturn(migrationPlanBuilder);
when(migrationPlanBuilder.build()).thenThrow(new BadUserRequestException(message));
Map<String, Object> migrationExecution = new MigrationExecutionDtoBuilder()
.migrationPlan(NON_EXISTING_PROCESS_DEFINITION_ID, ANOTHER_EXAMPLE_PROCESS_DEFINITION_ID)
.instruction(EXAMPLE_ACTIVITY_ID, ANOTHER_EXAMPLE_ACTIVITY_ID)
.instruction(ANOTHER_EXAMPLE_ACTIVITY_ID, EXAMPLE_ACTIVITY_ID)
.done()
.processInstances(EXAMPLE_PROCESS_INSTANCE_ID, ANOTHER_EXAMPLE_PROCESS_INSTANCE_ID)
.build();
given()
.contentType(POST_JSON_CONTENT_TYPE)
.body(migrationExecution)
.then().expect()
.statusCode(Status.BAD_REQUEST.getStatusCode())
.body("message", is(message))
.when()
.post(EXECUTE_MIGRATION_ASYNC_URL);
}
@Test
public void executeMigrationPlanAsyncWithNullTargetProcessDefinitionId() {
String message = "target process definition id is null";
JoinedMigrationPlanBuilderMock migrationPlanBuilder = mock(JoinedMigrationPlanBuilderMock.class, new FluentAnswer());
when(runtimeServiceMock.createMigrationPlan(anyString(), isNull(String.class)))
.thenReturn(migrationPlanBuilder);
when(migrationPlanBuilder.build()).thenThrow(new BadUserRequestException(message));
Map<String, Object> migrationExecution = new MigrationExecutionDtoBuilder()
.migrationPlan(EXAMPLE_PROCESS_DEFINITION_ID, null)
.instruction(EXAMPLE_ACTIVITY_ID, ANOTHER_EXAMPLE_ACTIVITY_ID)
.instruction(ANOTHER_EXAMPLE_ACTIVITY_ID, EXAMPLE_ACTIVITY_ID)
.done()
.processInstances(EXAMPLE_PROCESS_INSTANCE_ID, ANOTHER_EXAMPLE_PROCESS_INSTANCE_ID)
.build();
given()
.contentType(POST_JSON_CONTENT_TYPE)
.body(migrationExecution)
.then().expect()
.statusCode(Status.BAD_REQUEST.getStatusCode())
.body("message", is(message))
.when()
.post(EXECUTE_MIGRATION_ASYNC_URL);
}
@Test
public void executeMigrationPlanAsyncWithNonExistingTargetProcessDefinitionId() {
String message = "target process definition with id " + NON_EXISTING_PROCESS_DEFINITION_ID + " does not exist";
JoinedMigrationPlanBuilderMock migrationPlanBuilder = mock(JoinedMigrationPlanBuilderMock.class, new FluentAnswer());
when(runtimeServiceMock.createMigrationPlan(anyString(), eq(NON_EXISTING_PROCESS_DEFINITION_ID)))
.thenReturn(migrationPlanBuilder);
when(migrationPlanBuilder.build()).thenThrow(new BadUserRequestException(message));
Map<String, Object> migrationExecution = new MigrationExecutionDtoBuilder()
.migrationPlan(EXAMPLE_PROCESS_DEFINITION_ID, NON_EXISTING_PROCESS_DEFINITION_ID)
.instruction(EXAMPLE_ACTIVITY_ID, ANOTHER_EXAMPLE_ACTIVITY_ID)
.instruction(ANOTHER_EXAMPLE_ACTIVITY_ID, EXAMPLE_ACTIVITY_ID)
.done()
.processInstances(EXAMPLE_PROCESS_INSTANCE_ID, ANOTHER_EXAMPLE_PROCESS_INSTANCE_ID)
.build();
given()
.contentType(POST_JSON_CONTENT_TYPE)
.body(migrationExecution)
.then().expect()
.statusCode(Status.BAD_REQUEST.getStatusCode())
.body("message", is(message))
.when()
.post(EXECUTE_MIGRATION_ASYNC_URL);
}
@Test
public void executeMigrationPlanAsyncWithNullSourceActivityId() {
String message = "sourceActivityId is null";
when(migrationPlanBuilderMock.mapActivities(isNull(String.class), anyString()))
.thenThrow(new BadUserRequestException(message));
Map<String, Object> migrationExecution = new MigrationExecutionDtoBuilder()
.migrationPlan(EXAMPLE_PROCESS_DEFINITION_ID, ANOTHER_EXAMPLE_PROCESS_DEFINITION_ID)
.instruction(null, ANOTHER_EXAMPLE_ACTIVITY_ID)
.instruction(ANOTHER_EXAMPLE_ACTIVITY_ID, EXAMPLE_ACTIVITY_ID)
.done()
.processInstances(EXAMPLE_PROCESS_INSTANCE_ID, ANOTHER_EXAMPLE_PROCESS_INSTANCE_ID)
.build();
given()
.contentType(POST_JSON_CONTENT_TYPE)
.body(migrationExecution)
.then().expect()
.statusCode(Status.BAD_REQUEST.getStatusCode())
.body("message", is(message))
.when()
.post(EXECUTE_MIGRATION_ASYNC_URL);
}
@Test
public void executeMigrationPlanAsyncWithNonExistingSourceActivityId() {
String message = "sourceActivity is null";
when(migrationPlanBuilderMock.mapActivities(eq(NON_EXISTING_ACTIVITY_ID), anyString()))
.thenThrow(new BadUserRequestException(message));
Map<String, Object> migrationExecution = new MigrationExecutionDtoBuilder()
.migrationPlan(EXAMPLE_PROCESS_DEFINITION_ID, ANOTHER_EXAMPLE_PROCESS_DEFINITION_ID)
.instruction(NON_EXISTING_ACTIVITY_ID, ANOTHER_EXAMPLE_ACTIVITY_ID)
.instruction(ANOTHER_EXAMPLE_ACTIVITY_ID, EXAMPLE_ACTIVITY_ID)
.done()
.processInstances(EXAMPLE_PROCESS_INSTANCE_ID, ANOTHER_EXAMPLE_PROCESS_INSTANCE_ID)
.build();
given()
.contentType(POST_JSON_CONTENT_TYPE)
.body(migrationExecution)
.then().expect()
.statusCode(Status.BAD_REQUEST.getStatusCode())
.body("message", is(message))
.when()
.post(EXECUTE_MIGRATION_ASYNC_URL);
}
@Test
public void executeMigrationPlanAsyncWithNullTargetActivityId() {
String message = "targetActivityId is null";
when(migrationPlanBuilderMock.mapActivities(anyString(), isNull(String.class)))
.thenThrow(new BadUserRequestException(message));
Map<String, Object> migrationExecution = new MigrationExecutionDtoBuilder()
.migrationPlan(EXAMPLE_PROCESS_DEFINITION_ID, ANOTHER_EXAMPLE_PROCESS_DEFINITION_ID)
.instruction(EXAMPLE_ACTIVITY_ID, null)
.instruction(ANOTHER_EXAMPLE_ACTIVITY_ID, EXAMPLE_ACTIVITY_ID)
.done()
.processInstances(EXAMPLE_PROCESS_INSTANCE_ID, ANOTHER_EXAMPLE_PROCESS_INSTANCE_ID)
.build();
given()
.contentType(POST_JSON_CONTENT_TYPE)
.body(migrationExecution)
.then().expect()
.statusCode(Status.BAD_REQUEST.getStatusCode())
.body("message", is(message))
.when()
.post(EXECUTE_MIGRATION_ASYNC_URL);
}
@Test
public void executeMigrationPlanAsyncWithNonExistingTargetActivityId() {
String message = "targetActivity is null";
when(migrationPlanBuilderMock.mapActivities(anyString(), eq(NON_EXISTING_ACTIVITY_ID)))
.thenThrow(new BadUserRequestException(message));
Map<String, Object> migrationExecution = new MigrationExecutionDtoBuilder()
.migrationPlan(EXAMPLE_PROCESS_DEFINITION_ID, ANOTHER_EXAMPLE_PROCESS_DEFINITION_ID)
.instruction(EXAMPLE_ACTIVITY_ID, NON_EXISTING_ACTIVITY_ID)
.instruction(ANOTHER_EXAMPLE_ACTIVITY_ID, EXAMPLE_ACTIVITY_ID)
.done()
.processInstances(EXAMPLE_PROCESS_INSTANCE_ID, ANOTHER_EXAMPLE_PROCESS_INSTANCE_ID)
.build();
given()
.contentType(POST_JSON_CONTENT_TYPE)
.body(migrationExecution)
.then().expect()
.statusCode(Status.BAD_REQUEST.getStatusCode())
.body("message", is(message))
.when()
.post(EXECUTE_MIGRATION_URL);
}
@Test
public void executeMigrationPlanAsyncValidationException() {
MigrationInstruction migrationInstruction = mock(MigrationInstruction.class);
when(migrationInstruction.getSourceActivityId()).thenReturn(EXAMPLE_ACTIVITY_ID);
when(migrationInstruction.getTargetActivityId()).thenReturn(ANOTHER_EXAMPLE_ACTIVITY_ID);
MigrationInstructionValidationReport instructionReport1 = mock(MigrationInstructionValidationReport.class);
when(instructionReport1.getMigrationInstruction()).thenReturn(migrationInstruction);
when(instructionReport1.getFailures()).thenReturn(Arrays.asList("failure1", "failure2"));
MigrationInstructionValidationReport instructionReport2 = mock(MigrationInstructionValidationReport.class);
when(instructionReport2.getMigrationInstruction()).thenReturn(migrationInstruction);
when(instructionReport2.getFailures()).thenReturn(Arrays.asList("failure1", "failure2"));
MigrationPlanValidationReport validationReport = mock(MigrationPlanValidationReport.class);
when(validationReport.getInstructionReports()).thenReturn(Arrays.asList(instructionReport1, instructionReport2));
when(migrationPlanBuilderMock.build()).thenThrow(new MigrationPlanValidationException("fooo", validationReport));
Map<String, Object> migrationExecution = new MigrationExecutionDtoBuilder()
.migrationPlan(EXAMPLE_PROCESS_DEFINITION_ID, ANOTHER_EXAMPLE_PROCESS_DEFINITION_ID)
.instruction(EXAMPLE_ACTIVITY_ID, ANOTHER_EXAMPLE_ACTIVITY_ID)
.done()
.processInstances(EXAMPLE_PROCESS_INSTANCE_ID, ANOTHER_EXAMPLE_PROCESS_INSTANCE_ID)
.build();
given()
.contentType(POST_JSON_CONTENT_TYPE)
.body(migrationExecution)
.then().expect()
.statusCode(Status.BAD_REQUEST.getStatusCode())
.body("type", equalTo(MigrationPlanValidationException.class.getSimpleName()))
.body("message", is("fooo"))
.body("validationReport.instructionReports", hasSize(2))
.body("validationReport.instructionReports[0].instruction.sourceActivityIds", hasSize(1))
.body("validationReport.instructionReports[0].instruction.sourceActivityIds[0]", is(EXAMPLE_ACTIVITY_ID))
.body("validationReport.instructionReports[0].instruction.targetActivityIds", hasSize(1))
.body("validationReport.instructionReports[0].instruction.targetActivityIds[0]", is(ANOTHER_EXAMPLE_ACTIVITY_ID))
.body("validationReport.instructionReports[0].failures", hasSize(2))
.body("validationReport.instructionReports[0].failures[0]", is("failure1"))
.body("validationReport.instructionReports[0].failures[1]", is("failure2"))
.when()
.post(EXECUTE_MIGRATION_ASYNC_URL);
}
@Test
public void executeMigrationPlanUpdateEventTrigger() {
Map<String, Object> migrationExecution = new MigrationExecutionDtoBuilder()
.migrationPlan(EXAMPLE_PROCESS_DEFINITION_ID, ANOTHER_EXAMPLE_PROCESS_DEFINITION_ID)
.instruction(EXAMPLE_ACTIVITY_ID, ANOTHER_EXAMPLE_ACTIVITY_ID, true)
.instruction(ANOTHER_EXAMPLE_ACTIVITY_ID, EXAMPLE_ACTIVITY_ID, false)
.done()
.processInstances(EXAMPLE_PROCESS_INSTANCE_ID, ANOTHER_EXAMPLE_PROCESS_INSTANCE_ID)
.build();
given()
.contentType(POST_JSON_CONTENT_TYPE)
.body(migrationExecution)
.then().expect()
.statusCode(Status.NO_CONTENT.getStatusCode())
.when()
.post(EXECUTE_MIGRATION_URL);
verifyCreateMigrationPlanInteraction(migrationPlanBuilderMock, (Map<String, Object>) migrationExecution.get(MigrationExecutionDtoBuilder.PROP_MIGRATION_PLAN));
verifyMigrationPlanExecutionInteraction(migrationExecution);
}
@Test
public void validateMigrationPlan() {
Map<String, Object> migrationPlan = new MigrationPlanDtoBuilder(EXAMPLE_PROCESS_DEFINITION_ID, ANOTHER_EXAMPLE_PROCESS_DEFINITION_ID)
.instruction(EXAMPLE_ACTIVITY_ID, ANOTHER_EXAMPLE_ACTIVITY_ID)
.instruction(ANOTHER_EXAMPLE_ACTIVITY_ID, EXAMPLE_ACTIVITY_ID, true)
.build();
given()
.contentType(POST_JSON_CONTENT_TYPE)
.body(migrationPlan)
.then().expect()
.statusCode(Status.OK.getStatusCode())
.body("instructionReports", hasSize(0))
.when()
.post(VALIDATE_MIGRATION_URL);
verifyCreateMigrationPlanInteraction(migrationPlanBuilderMock, migrationPlan);
}
@Test
public void validateMigrationPlanValidationException() {
MigrationInstruction migrationInstruction = mock(MigrationInstruction.class);
when(migrationInstruction.getSourceActivityId()).thenReturn(EXAMPLE_ACTIVITY_ID);
when(migrationInstruction.getTargetActivityId()).thenReturn(ANOTHER_EXAMPLE_ACTIVITY_ID);
MigrationInstructionValidationReport instructionReport1 = mock(MigrationInstructionValidationReport.class);
when(instructionReport1.getMigrationInstruction()).thenReturn(migrationInstruction);
when(instructionReport1.getFailures()).thenReturn(Arrays.asList("failure1", "failure2"));
MigrationInstructionValidationReport instructionReport2 = mock(MigrationInstructionValidationReport.class);
when(instructionReport2.getMigrationInstruction()).thenReturn(migrationInstruction);
when(instructionReport2.getFailures()).thenReturn(Arrays.asList("failure1", "failure2"));
MigrationPlanValidationReport validationReport = mock(MigrationPlanValidationReport.class);
when(validationReport.getInstructionReports()).thenReturn(Arrays.asList(instructionReport1, instructionReport2));
when(migrationPlanBuilderMock.build()).thenThrow(new MigrationPlanValidationException("fooo", validationReport));
Map<String, Object> migrationPlan = new MigrationPlanDtoBuilder(EXAMPLE_PROCESS_DEFINITION_ID, ANOTHER_EXAMPLE_PROCESS_DEFINITION_ID)
.instruction(EXAMPLE_ACTIVITY_ID, ANOTHER_EXAMPLE_ACTIVITY_ID)
.build();
given()
.contentType(POST_JSON_CONTENT_TYPE)
.body(migrationPlan)
.then().expect()
.statusCode(Status.OK.getStatusCode())
.body("instructionReports", hasSize(2))
.body("instructionReports[0].instruction.sourceActivityIds", hasSize(1))
.body("instructionReports[0].instruction.sourceActivityIds[0]", is(EXAMPLE_ACTIVITY_ID))
.body("instructionReports[0].instruction.targetActivityIds", hasSize(1))
.body("instructionReports[0].instruction.targetActivityIds[0]", is(ANOTHER_EXAMPLE_ACTIVITY_ID))
.body("instructionReports[0].failures", hasSize(2))
.body("instructionReports[0].failures[0]", is("failure1"))
.body("instructionReports[0].failures[1]", is("failure2"))
.when()
.post(VALIDATE_MIGRATION_URL);
}
protected void verifyGenerateMigrationPlanResponse(Response response) {
String responseContent = response.asString();
String sourceProcessDefinitionId = from(responseContent).getString("sourceProcessDefinitionId");
String targetProcessDefinitionId = from(responseContent).getString("targetProcessDefinitionId");
List<Map<String, Object>> instructions = from(responseContent).getList("instructions");
assertThat(sourceProcessDefinitionId).isEqualTo(EXAMPLE_PROCESS_DEFINITION_ID);
assertThat(targetProcessDefinitionId).isEqualTo(ANOTHER_EXAMPLE_PROCESS_DEFINITION_ID);
assertThat(instructions).hasSize(2);
assertThat(instructions.get(0))
.includes(
entry("sourceActivityIds", Collections.singletonList(EXAMPLE_ACTIVITY_ID)),
entry("targetActivityIds", Collections.singletonList(ANOTHER_EXAMPLE_ACTIVITY_ID)),
entry("updateEventTrigger", false)
);
assertThat(instructions.get(1))
.includes(
entry("sourceActivityIds", Collections.singletonList(ANOTHER_EXAMPLE_ACTIVITY_ID)),
entry("targetActivityIds", Collections.singletonList(EXAMPLE_ACTIVITY_ID)),
entry("updateEventTrigger", false)
);
}
protected void verifyGenerateMigrationPlanInteraction(MigrationPlanBuilder migrationPlanBuilderMock, Map<String, Object> initialMigrationPlan) {
verify(runtimeServiceMock).createMigrationPlan(eq(initialMigrationPlan.get(MigrationPlanDtoBuilder.PROP_SOURCE_PROCESS_DEFINITION_ID).toString()),
eq(initialMigrationPlan.get(MigrationPlanDtoBuilder.PROP_TARGET_PROCESS_DEFINITION_ID).toString()));
// the map equal activities method should be called
verify(migrationPlanBuilderMock).mapEqualActivities();
// other instructions are ignored
verify(migrationPlanBuilderMock, never()).mapActivities(anyString(), anyString());
}
protected void verifyCreateMigrationPlanInteraction(JoinedMigrationPlanBuilderMock migrationPlanBuilderMock, Map<String, Object> migrationPlan) {
verify(runtimeServiceMock).createMigrationPlan(migrationPlan.get(MigrationPlanDtoBuilder.PROP_SOURCE_PROCESS_DEFINITION_ID).toString(),
migrationPlan.get(MigrationPlanDtoBuilder.PROP_TARGET_PROCESS_DEFINITION_ID).toString());
// the map equal activities method should not be called
verify(migrationPlanBuilderMock, never()).mapEqualActivities();
// all instructions are added
List<Map<String, Object>> instructions = (List<Map<String, Object>>) migrationPlan.get(MigrationPlanDtoBuilder.PROP_INSTRUCTIONS);
if (instructions != null) {
for (Map<String, Object> migrationInstructionDto : instructions) {
InOrder inOrder = Mockito.inOrder(migrationPlanBuilderMock);
String sourceActivityId = ((List<String>) migrationInstructionDto.get(MigrationInstructionDtoBuilder.PROP_SOURCE_ACTIVITY_IDS)).get(0);
String targetActivityId = ((List<String>) migrationInstructionDto.get(MigrationInstructionDtoBuilder.PROP_TARGET_ACTIVITY_IDS)).get(0);
inOrder.verify(migrationPlanBuilderMock).mapActivities(eq(sourceActivityId), eq(targetActivityId));
Boolean updateEventTrigger = (Boolean) migrationInstructionDto.get(MigrationInstructionDtoBuilder.PROP_UPDATE_EVENT_TRIGGER);
if (Boolean.TRUE.equals(updateEventTrigger)) {
inOrder.verify(migrationPlanBuilderMock, immediatelyAfter()).updateEventTrigger();
}
}
}
}
protected void verifyMigrationPlanExecutionInteraction(Map<String, Object> migrationExecution) {
InOrder inOrder = inOrder(runtimeServiceMock, migrationPlanExecutionBuilderMock);
inOrder.verify(runtimeServiceMock).newMigration(any(MigrationPlan.class));
verifyMigrationExecutionBuilderInteraction(inOrder, migrationExecution);
inOrder.verify(migrationPlanExecutionBuilderMock).execute();
inOrder.verifyNoMoreInteractions();
}
protected void verifyMigrationPlanAsyncExecutionInteraction(Map<String, Object> migrationExecution) {
InOrder inOrder = inOrder(runtimeServiceMock, migrationPlanExecutionBuilderMock);
inOrder.verify(runtimeServiceMock).newMigration(any(MigrationPlan.class));
verifyMigrationExecutionBuilderInteraction(inOrder, migrationExecution);
inOrder.verify(migrationPlanExecutionBuilderMock).executeAsync();
Mockito.verifyNoMoreInteractions(migrationPlanExecutionBuilderMock);
}
protected void verifyMigrationExecutionBuilderInteraction(InOrder inOrder, Map<String, Object> migrationExecution) {
List<String> processInstanceIds = ((List<String>) migrationExecution.get(MigrationExecutionDtoBuilder.PROP_PROCESS_INSTANCE_IDS));
inOrder.verify(migrationPlanExecutionBuilderMock).processInstanceIds(eq(processInstanceIds));
ProcessInstanceQueryDto processInstanceQuery = (ProcessInstanceQueryDto) migrationExecution.get(MigrationExecutionDtoBuilder.PROP_PROCESS_INSTANCE_QUERY);
if (processInstanceQuery != null) {
verifyMigrationPlanExecutionProcessInstanceQuery(inOrder);
}
Boolean skipCustomListeners = (Boolean) migrationExecution.getOrDefault(MigrationExecutionDtoBuilder.PROP_SKIP_CUSTOM_LISTENERS, false);
if (Boolean.TRUE.equals(skipCustomListeners)) {
inOrder.verify(migrationPlanExecutionBuilderMock).skipCustomListeners();
}
Boolean skipIoMappings = (Boolean) migrationExecution.get(MigrationExecutionDtoBuilder.PROP_SKIP_IO_MAPPINGS);
if (Boolean.TRUE.equals(skipIoMappings)) {
inOrder.verify(migrationPlanExecutionBuilderMock).skipIoMappings();
}
}
protected void verifyMigrationPlanExecutionProcessInstanceQuery(InOrder inOrder) {
ArgumentCaptor<ProcessInstanceQuery> queryCapture = ArgumentCaptor.forClass(ProcessInstanceQuery.class);
inOrder.verify(migrationPlanExecutionBuilderMock).processInstanceQuery(queryCapture.capture());
ProcessInstanceQueryImpl actualQuery = (ProcessInstanceQueryImpl) queryCapture.getValue();
assertThat(actualQuery).isNotNull();
assertThat(actualQuery.getProcessDefinitionId()).isEqualTo(EXAMPLE_PROCESS_DEFINITION_ID);
}
}
| fix(engine-rest): remove java 8 method usage
related to CAM-5899
| engine-rest/engine-rest/src/test/java/org/camunda/bpm/engine/rest/MigrationRestServiceInteractionTest.java | fix(engine-rest): remove java 8 method usage | <ide><path>ngine-rest/engine-rest/src/test/java/org/camunda/bpm/engine/rest/MigrationRestServiceInteractionTest.java
<ide> if (processInstanceQuery != null) {
<ide> verifyMigrationPlanExecutionProcessInstanceQuery(inOrder);
<ide> }
<del> Boolean skipCustomListeners = (Boolean) migrationExecution.getOrDefault(MigrationExecutionDtoBuilder.PROP_SKIP_CUSTOM_LISTENERS, false);
<add> Boolean skipCustomListeners = (Boolean) migrationExecution.get(MigrationExecutionDtoBuilder.PROP_SKIP_CUSTOM_LISTENERS);
<ide> if (Boolean.TRUE.equals(skipCustomListeners)) {
<ide> inOrder.verify(migrationPlanExecutionBuilderMock).skipCustomListeners();
<ide> } |
|
Java | apache-2.0 | 73b06289e8e9c9edd0f257bd89321db61d85b9d3 | 0 | rhdunn/xquery-intellij-plugin,rhdunn/xquery-intellij-plugin | /*
* Copyright (C) 2016 Reece H. Dunn
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.co.reecedunn.intellij.plugin.xquery.functional;
import java.util.NoSuchElementException;
import java.util.function.Function;
/**
* A value that may (<em>some</em>) or may not (<em>none</em>) exist.
*
* This option type is defined to support Category Theory semantics. In particular,
* <em>some</em> may be used to create an optional value that is null. This is
* important in order to preserve the composability semantics needed for Option
* to be a category.
*
* @param <A> The type of the value being stored in the option.
*/
public abstract class Option<A> {
public <B> Option<B> map(Function<? super A, ? extends B> f) {
return isDefined() ? some(f.apply(get())) : none();
}
public <B> Option<B> flatMap(Function<? super A, ? extends Option<? extends B>> f) {
if (isDefined()) {
Option<? extends B> ret = f.apply(get());
return ret.isDefined() ? some(ret.get()) : none();
}
return none();
}
// region Option Interface
/**
* Checks whether the option is defined.
*
* @return true if the option contains a value (<em>some</em>), false otherwise.
*/
public abstract boolean isDefined();
/**
* Returns the value stored within the option.
*
* @throws NoSuchElementException If the value is not defined (i.e. is <em>none</em>).
* @return The value held by the option.
*/
public abstract A get();
// endregion
// region Value Constructors
/**
* Create an optional value from a Java value, where null denotes a missing
* (<em>none</em>) value.
*
* @param value The value to store in the option.
* @param <A> The type of the value being stored.
* @return <em>none</em> if the value is null, <em>some</em> containing the
* value otherwise.
*/
public static <A> Option<A> of(final A value) {
return (value == null) ? Option.<A>none() : some(value);
}
/**
* Create an option denoting the absence of a value.
*
* @param <A> The type of the option.
* @return An option representing no value.
*/
@SuppressWarnings("unchecked")
public static <A> Option<A> none() {
return (Option<A>)None.NONE;
}
/**
* Create an option denoting the presence of a value (which may be null).
*
* This can be used to create an option that has a defined value of null,
* as well as creating an option with a non-null value. This is to support
* using it in things like Map.get(key) to differentiate from an entry with
* that key not existing (<em>none</em>) and an entry that contains a null
* value (<em>some(null)</em>).
*
* @param <A> The type of the option.
* @return An option containing that specified value.
*/
public static <A> Option<A> some(final A value) {
return new Some<>(value);
}
// endregion
// region Implementation Details
private Option() {
}
private static final class None extends Option<Object> {
private static final Option<Object> NONE = new None();
@Override
public boolean isDefined() {
return false;
}
@Override
public Object get() {
throw new NoSuchElementException();
}
}
private static final class Some<A> extends Option<A> {
private final A mValue;
private Some(final A value) {
mValue = value;
}
@Override
public boolean isDefined() {
return true;
}
@Override
public A get() {
return mValue;
}
}
// endregion
}
| src/main/java/uk/co/reecedunn/intellij/plugin/xquery/functional/Option.java | /*
* Copyright (C) 2016 Reece H. Dunn
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.co.reecedunn.intellij.plugin.xquery.functional;
import java.util.NoSuchElementException;
import java.util.function.Function;
/**
* A value that may (<em>some</em>) or may not (<em>none</em>) exist.
*
* This option type is defined to support Category Theory semantics. In particular,
* <em>some</em> may be used to create an optional value that is null. This is
* important in order to preserve the composability semantics needed for Option
* to be a category.
*
* @param <A> The type of the value being stored in the option.
*/
public abstract class Option<A> {
public <B> Option<B> map(Function<? super A, ? extends B> f) {
return isDefined() ? some(f.apply(get())) : none();
}
// region Option Interface
/**
* Checks whether the option is defined.
*
* @return true if the option contains a value (<em>some</em>), false otherwise.
*/
public abstract boolean isDefined();
/**
* Returns the value stored within the option.
*
* @throws NoSuchElementException If the value is not defined (i.e. is <em>none</em>).
* @return The value held by the option.
*/
public abstract A get();
// endregion
// region Value Constructors
/**
* Create an optional value from a Java value, where null denotes a missing
* (<em>none</em>) value.
*
* @param value The value to store in the option.
* @param <A> The type of the value being stored.
* @return <em>none</em> if the value is null, <em>some</em> containing the
* value otherwise.
*/
public static <A> Option<A> of(final A value) {
return (value == null) ? Option.<A>none() : some(value);
}
/**
* Create an option denoting the absence of a value.
*
* @param <A> The type of the option.
* @return An option representing no value.
*/
@SuppressWarnings("unchecked")
public static <A> Option<A> none() {
return (Option<A>)None.NONE;
}
/**
* Create an option denoting the presence of a value (which may be null).
*
* This can be used to create an option that has a defined value of null,
* as well as creating an option with a non-null value. This is to support
* using it in things like Map.get(key) to differentiate from an entry with
* that key not existing (<em>none</em>) and an entry that contains a null
* value (<em>some(null)</em>).
*
* @param <A> The type of the option.
* @return An option containing that specified value.
*/
public static <A> Option<A> some(final A value) {
return new Some<>(value);
}
// endregion
// region Implementation Details
private Option() {
}
private static final class None extends Option<Object> {
private static final Option<Object> NONE = new None();
@Override
public boolean isDefined() {
return false;
}
@Override
public Object get() {
throw new NoSuchElementException();
}
}
private static final class Some<A> extends Option<A> {
private final A mValue;
private Some(final A value) {
mValue = value;
}
@Override
public boolean isDefined() {
return true;
}
@Override
public A get() {
return mValue;
}
}
// endregion
}
| Implement FlatMap on the Option type.
| src/main/java/uk/co/reecedunn/intellij/plugin/xquery/functional/Option.java | Implement FlatMap on the Option type. | <ide><path>rc/main/java/uk/co/reecedunn/intellij/plugin/xquery/functional/Option.java
<ide> public abstract class Option<A> {
<ide> public <B> Option<B> map(Function<? super A, ? extends B> f) {
<ide> return isDefined() ? some(f.apply(get())) : none();
<add> }
<add>
<add> public <B> Option<B> flatMap(Function<? super A, ? extends Option<? extends B>> f) {
<add> if (isDefined()) {
<add> Option<? extends B> ret = f.apply(get());
<add> return ret.isDefined() ? some(ret.get()) : none();
<add> }
<add> return none();
<ide> }
<ide>
<ide> // region Option Interface |
|
Java | agpl-3.0 | 6496992c7e2f722f8f54e0f8c23bd528b708a555 | 0 | HeBIS-VZ/GndAuthorityRecords | /*
* Copyright 2016, 2017 by HeBIS (www.hebis.de).
*
* This file is part of HeBIS project Gnd4Index.
*
* This is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or any later version.
*
* This code is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with the code. If not, see http://www.gnu.org/licenses/agpl>.
*/
package de.hebis.it.hds.gnd.in;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.util.Arrays;
import java.util.List;
import java.util.function.Function;
import javax.xml.stream.XMLInputFactory;
import javax.xml.stream.XMLStreamConstants;
import javax.xml.stream.XMLStreamException;
import javax.xml.stream.XMLStreamReader;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.apache.solr.client.solrj.SolrClient;
import org.apache.solr.client.solrj.SolrServerException;
import org.apache.solr.common.SolrInputDocument;
import de.hebis.it.hds.gnd.in.subfields.CooperationFields;
import de.hebis.it.hds.gnd.in.subfields.DataField;
import de.hebis.it.hds.gnd.in.subfields.GeneralFields;
import de.hebis.it.hds.gnd.in.subfields.GeoFields;
import de.hebis.it.hds.gnd.in.subfields.MeetingFields;
import de.hebis.it.hds.gnd.in.subfields.PersonFields;
import de.hebis.it.hds.gnd.in.subfields.TitleFields;
import de.hebis.it.hds.gnd.in.subfields.TopicFields;
/**
* Function to parse/convert a single Marc21-XML authority record<br>
* The converted data will be returned as a {@link SolrInputDocument}.
*
* @author Uwe Reh (uh), HeBIS-IT
* @version 2017-03-17 uh initial
*
*/
public class MarcXmlParser implements Function<List<String>, Boolean> {
private final static Logger LOG = LogManager.getLogger(MarcXmlParser.class);
private final static XMLInputFactory srf = XMLInputFactory.newInstance();
private static final String[] unusedfields = { "001", "003", "005", "008", "024", "040", "043", "065", "089", "336", "339", "372", "375", "377", "380", "382", "383", "384", "548",
"667", "670", "675", "678", "679", "680", "682", "692", "912", "913" };
private static final List<String> dataFieldsToIgnore = Arrays.asList(unusedfields);
private String recordId = null;
private SolrClient solrClient = null;
/**
* Define where to store the parsed records.
*
* @param solrClient A solrj client to the designated index.
*/
public MarcXmlParser(SolrClient solrClient) {
this.solrClient = solrClient;
}
/**
* Parse the XML and store the relevant Data into an SolrInputDocument.<br>
*
*/
@Override
public Boolean apply(List<String> recordAsListOfLines) {
if (recordAsListOfLines == null) {
LOG.debug("NULL record received. If this is an OAI update, its normal");
return true;
}
if (recordAsListOfLines.size() < 3) {
LOG.warn("Unusable record received");
return false;
}
StringBuilder fullrecord = new StringBuilder();
// concat lines, omit unneeded whitespace characters and replace weird SOS ans EOS characters
for (String line : recordAsListOfLines) {
fullrecord.append(line.trim().replace("˜", "<").replace("œ", ">"));
}
parse(fullrecord.toString());
return true;
}
/**
* Real start of parsing.
*
* @param xmlRecord The marc21-XML record as String
*/
private void parse(String xmlRecord) {
SolrInputDocument doc = new SolrInputDocument();
doc.addField("fullrecord", xmlRecord);
XMLStreamReader rawreader = null;
DataField dataField = null;
recordId = null;
char recordType = 'n'; // New/Normal
try {
rawreader = srf.createXMLStreamReader(new ByteArrayInputStream(xmlRecord.getBytes("UTF-8")), "UTF-8");
} catch (Exception e) {
LOG.error("Can't create the stax parser.");
throw new RuntimeException(e);
}
try {
while (rawreader.hasNext()) {
switch (rawreader.getEventType()) {
case XMLStreamConstants.START_ELEMENT:
switch (rawreader.getLocalName()) {
case "leader":
if (LOG.isTraceEnabled()) LOG.trace("Process leader.");
recordType = readTypeFromLeader(rawreader);
break;
case "datafield":
String tagId = rawreader.getAttributeValue(null, "tag");
if (recordType == 'n') { // Normal authority record
if (dataFieldsToIgnore.contains(tagId)) {
if (LOG.isTraceEnabled()) LOG.trace("Skip unused field : " + tagId);
break;
}
} else { // Control record, only the '682' data field needs to be evaluated
if (!"682".equals(tagId)) {
if (LOG.isTraceEnabled()) LOG.trace("Skip unused field in control record : " + tagId);
break;
}
}
// only if used
if (LOG.isTraceEnabled()) LOG.trace("Found field : " + tagId);
dataField = new DataField(recordId, doc);
dataField.put("tag", dataField.newList(tagId));
dataField.put("ind1", dataField.newList(rawreader.getAttributeValue(null, "ind1")));
dataField.put("ind2", dataField.newList(rawreader.getAttributeValue(null, "ind2")));
break;
case "subfield":
if (dataField == null) break; // Only if the field matters
dataField.addSubField(rawreader);
break;
}
break;
case XMLStreamConstants.END_ELEMENT:
if ("datafield".equals(rawreader.getLocalName())) {
if (dataField != null) { // // Only if the field matters
if (LOG.isTraceEnabled()) LOG.trace("Process field.");
eval(dataField);
dataField = null;
}
}
break;
}
rawreader.next();
}
rawreader.close();
} catch (XMLStreamException e) {
throw new RuntimeException("Data error in XML file.", e);
}
if (LOG.isTraceEnabled()) LOG.trace("Index record");
try {
if (LOG.isDebugEnabled()) LOG.debug("New Document: " + doc.toString());
solrClient.add(doc);
} catch (SolrServerException | IOException e) {
LOG.warn("Failed sending document:" + doc.get("id") + " to " + solrClient.toString(), e);
}
if (LOG.isTraceEnabled()) LOG.trace("Record is send.");
}
private char readTypeFromLeader(XMLStreamReader rawreader) {
// TODO Auto-generated method stub
// TODO Dummy for upcomming functions for deletions ans redirections.
return 'n';
}
/**
* Interpret a single marcXML 'datafield'
*
* @param dataField internal representation of the authority record
* @return FALSE if the parser has found a Problem, otherwise TRUE
*/
public boolean eval(DataField dataField) {
String subFieldId = dataField.getFirstValue("tag");
if ((subFieldId == null) || subFieldId.isEmpty()) {
LOG.warn("XML Data error: Category without the attribute 'tag'.");
return false;
}
switch (subFieldId) {
case "034": // geographic coordinates
GeoFields.coordinates(dataField);
break;
case "035": // The id(s) of the record
recordId = GeneralFields.id(dataField);
break;
case "079": // type of authority record
GeneralFields.type(dataField);
break;
case "083": // DDC
GeneralFields.dewey(dataField);
break;
case "100": // Personal name
PersonFields.headingPersonalName(dataField);
break;
case "110": // Cooperation name
CooperationFields.headingCooperationName(dataField);
break;
case "111": // Meeting name
MeetingFields.headingMeetingName(dataField);
break;
case "130": // Title name
TitleFields.headingTitle(dataField);
break;
case "150": // This term/topic
TopicFields.headingTopicalTerm(dataField);
break;
case "151": // This term/topic
GeoFields.headingGeoName(dataField);
break;
case "400": // Alternative name
PersonFields.tracingPersonalName(dataField);
break;
case "410": // Alternative cooperation name
CooperationFields.tracingCooperationName(dataField);
break;
case "411": // Alternative meeting name
MeetingFields.tracingMeetingName(dataField);
break;
case "430": // Alternative title
TitleFields.tracingTitle(dataField);
break;
case "450": // Alternative term/topic
TopicFields.tracingTopicalTerm(dataField);
break;
case "451": // Alternative geoname
GeoFields.tracingGeoName(dataField);
break;
case "500": // Related personal name
PersonFields.relatedPersonalName(dataField);
break;
case "510": // Related cooperation
CooperationFields.relatedCooperationName(dataField);
break;
case "511": // Related meeting
MeetingFields.relatedMeetingName(dataField);
break;
case "530": // Related uniform title
TitleFields.relatedTitle(dataField);
break;
case "550": // Related term
TopicFields.relatedTopicalTerm(dataField);
break;
case "551": // Related geographic name
GeoFields.relatedGeoName(dataField);
break;
case "682": // Infos for control records TODO
GeneralFields.controllInfos(dataField);
break;
case "700": // Alternative name for person in other system
PersonFields.linkingEntryPersonalName(dataField);
break;
case "710": // Alternative cooperation name in other system
CooperationFields.linkingEntryCooperationName(dataField);
break;
case "711": // Alternative meeting name in other system
MeetingFields.linkingEntryMeetingName(dataField);
break;
case "730": // Alternative title in other system
TitleFields.linkingEntryTitle(dataField);
break;
case "750": // Alternative name for topic in other system
TopicFields.linkingEntryTopicalTerm(dataField);
break;
case "751": // Alternative name for topic in other system
GeoFields.linkingEntryGeoName(dataField);
break;
default:
LOG.warn("No Rule for " + recordId + " : " + subFieldId);
}
return true;
}
}
| src/de/hebis/it/hds/gnd/in/MarcXmlParser.java | /*
* Copyright 2016, 2017 by HeBIS (www.hebis.de).
*
* This file is part of HeBIS project Gnd4Index.
*
* This is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or any later version.
*
* This code is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with the code. If not, see http://www.gnu.org/licenses/agpl>.
*/
package de.hebis.it.hds.gnd.in;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.util.Arrays;
import java.util.List;
import java.util.function.Function;
import javax.xml.stream.XMLInputFactory;
import javax.xml.stream.XMLStreamConstants;
import javax.xml.stream.XMLStreamException;
import javax.xml.stream.XMLStreamReader;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.apache.solr.client.solrj.SolrClient;
import org.apache.solr.client.solrj.SolrServerException;
import org.apache.solr.common.SolrInputDocument;
import de.hebis.it.hds.gnd.in.subfields.CooperationFields;
import de.hebis.it.hds.gnd.in.subfields.DataField;
import de.hebis.it.hds.gnd.in.subfields.GeneralFields;
import de.hebis.it.hds.gnd.in.subfields.GeoFields;
import de.hebis.it.hds.gnd.in.subfields.MeetingFields;
import de.hebis.it.hds.gnd.in.subfields.PersonFields;
import de.hebis.it.hds.gnd.in.subfields.TitleFields;
import de.hebis.it.hds.gnd.in.subfields.TopicFields;
/**
* Function to parse/convert a single Marc21-XML authority record<br>
* The converted data will be returned as a {@link SolrInputDocument}.
*
* @author Uwe Reh (uh), HeBIS-IT
* @version 2017-03-17 uh initial
*
*/
public class MarcXmlParser implements Function<List<String>, Boolean> {
private final static Logger LOG = LogManager.getLogger(MarcXmlParser.class);
private final static XMLInputFactory srf = XMLInputFactory.newInstance();
private static final String[] unusedfields = { "001", "003", "005", "008", "024", "040", "043", "065", "089", "336", "339", "372", "375", "377", "380", "382", "383", "384", "548",
"667", "670", "675", "678", "679", "680", "682", "692", "912", "913" };
private static final List<String> dataFieldsToIgnore = Arrays.asList(unusedfields);
private String recordId = null;
private SolrClient solrClient = null;
/**
* Define where to store the parsed records.
*
* @param solrClient A solrj client to the designated index.
*/
public MarcXmlParser(SolrClient solrClient) {
this.solrClient = solrClient;
}
/**
* Parse the XML and store the relevant Data into an SolrInputDocument.<br>
*
*/
@Override
public Boolean apply(List<String> recordAsListOfLines) {
if (recordAsListOfLines == null) {
LOG.debug("NULL record received. If this is an OAI update, its normal");
return true;
}
if (recordAsListOfLines.size() < 3) {
LOG.warn("Unusable record received");
return false;
}
StringBuilder fullrecord = new StringBuilder();
// concat lines, omit unneeded whitespace characters and replace weird SOS ans EOS characters
for (String line : recordAsListOfLines) {
fullrecord.append(line.trim().replace("˜", "<").replace("œ", ">"));
}
parse(fullrecord.toString());
return true;
}
/**
* Real start of parsing.
*
* @param xmlRecord The marc21-XML record as String
*/
private void parse(String xmlRecord) {
SolrInputDocument doc = new SolrInputDocument();
doc.addField("fullrecord", xmlRecord);
XMLStreamReader rawreader = null;
DataField dataField = null;
recordId = null;
char recordType = 'n'; // New/Normal
try {
rawreader = srf.createXMLStreamReader(new ByteArrayInputStream(xmlRecord.getBytes("UTF-8")), "UTF-8");
} catch (Exception e) {
LOG.error("Can't create the stax parser.");
throw new RuntimeException(e);
}
try {
while (rawreader.hasNext()) {
switch (rawreader.getEventType()) {
case XMLStreamConstants.START_ELEMENT:
switch (rawreader.getLocalName()) {
case "leader":
if (LOG.isTraceEnabled()) LOG.trace("Process leader.");
recordType = readTypeFromLeader(rawreader);
break;
case "datafield":
String tagId = rawreader.getAttributeValue(null, "tag");
if (recordType == 'n') { // Normal authority record
if (dataFieldsToIgnore.contains(tagId)) {
if (LOG.isTraceEnabled()) LOG.trace("Skip unused field : " + tagId);
break;
}
} else { // Control record, only the '682' data field needs to be evaluated
if (!"682".equals(tagId)) {
if (LOG.isTraceEnabled()) LOG.trace("Skip unused field in control record : " + tagId);
break;
}
}
// only if used
if (LOG.isTraceEnabled()) LOG.trace("Found field : " + tagId);
dataField = new DataField(recordId, doc);
dataField.put("tag", dataField.newList(tagId));
dataField.put("ind1", dataField.newList(rawreader.getAttributeValue(null, "ind1")));
dataField.put("ind2", dataField.newList(rawreader.getAttributeValue(null, "ind2")));
break;
case "subfield":
if (dataField == null) break; // Only if the field matters
dataField.addSubField(rawreader);
break;
}
break;
case XMLStreamConstants.END_ELEMENT:
if ("datafield".equals(rawreader.getLocalName())) {
if (dataField != null) { // // Only if the field matters
if (LOG.isTraceEnabled()) LOG.trace("Process field.");
eval(dataField);
dataField = null;
}
}
break;
}
rawreader.next();
}
rawreader.close();
} catch (XMLStreamException e) {
throw new RuntimeException("Data error in XML file.", e);
}
if (LOG.isTraceEnabled()) LOG.trace("Index record");
try {
if (LOG.isDebugEnabled()) LOG.debug("New Document: " + doc.toString());
solrClient.add(doc);
} catch (SolrServerException | IOException e) {
LOG.warn("Failed sending document:" + doc.get("id") + " to " + solrClient.toString(), e);
}
if (LOG.isTraceEnabled()) LOG.trace("Record is send.");
}
private char readTypeFromLeader(XMLStreamReader rawreader) {
// TODO Auto-generated method stub
// TODO Dummy for upcomming functions for deletions ans redirections.
return 'n';
}
/**
* Interpret a single marcXML 'datafield'
*
* @param dataField internal representation of the authority record
* @return FALSE if the parser has found a Problem, otherwise TRUE
*/
public boolean eval(DataField dataField) {
String subFieldId = dataField.getFirstValue("tag");
if ((subFieldId == null) || subFieldId.isEmpty()) {
LOG.warn("XML Data error: Category without the attribute 'tag'.");
return false;
}
switch (subFieldId) {
case "034": // geographic coordinates
GeoFields.coordinates(dataField);
break;
case "035": // The id(s) of the record
recordId = GeneralFields.id(dataField);
break;
case "079": // type of authority record
GeneralFields.type(dataField);
break;
case "083": // DDC
GeneralFields.dewey(dataField);
break;
case "100": // Personal name
PersonFields.headingPersonalName(dataField);
break;
case "110": // Cooperation name
CooperationFields.headingCooperationName(dataField);
break;
case "111": // Meeting name
MeetingFields.headingMeetingName(dataField);
break;
case "130": // Title name
TitleFields.headingTitle(dataField);
break;
case "150": // This term/topic
TopicFields.headingTopicalTerm(dataField);
break;
case "151": // This term/topic
GeoFields.headingGeoName(dataField);
break;
case "400": // Alternative name
PersonFields.tracingPersonalName(dataField);
break;
case "410": // Alternative cooperation name
CooperationFields.tracingCooperationName(dataField);
break;
case "411": // Alternative meeting name
MeetingFields.tracingMeetingName(dataField);
break;
case "430": // Alternative title
TitleFields.tracingTitle(dataField);
break;
case "450": // Alternative term/topic
TopicFields.tracingTopicalTerm(dataField);
break;
case "451": // Alternative geoname
GeoFields.tracingGeoName(dataField);
break;
case "500": // Related personal name
PersonFields.relatedPersonalName(dataField);
break;
case "510": // Related cooperation
CooperationFields.relatedCooperationName(dataField);
break;
case "511": // Related meeting
MeetingFields.relatedMeetingName(dataField);
break;
case "530": // Related uniform title
TitleFields.relatedTitle(dataField);
break;
case "550": // Related term
TopicFields.relatedTopicalTerm(dataField);
break;
case "551": // Related geographic name
GeoFields.relatedGeoName(dataField);
break;
case "682": // Infos for control records TODO
GeneralFields.controllInfos(dataField);
break;
case "700": // Alternative name for person in other system
PersonFields.linkingEntryPersonalName(dataField);
break;
case "710": // Alternative cooperation name in other system
CooperationFields.linkingEntryCooperationName(dataField);
break;
case "711": // Alternative meeting name in other system
MeetingFields.linkingEntryMeetingName(dataField);
break;
case "730": // Alternative title in other system
TitleFields.linkingEntryTitle(dataField);
break;
case "750": // Alternative name for topic in other system
TopicFields.linkingEntryTopicalTerm(dataField);
break;
case "751": // Alternative name for topic in other system
GeoFields.linkingEntryGeoName(dataField);
break;
default:
LOG.warn("No Rule for " + recordId + " : " + subFieldId);
}
return true;
}
}
| replace with "<" instead of "<"
Stupid me :( | src/de/hebis/it/hds/gnd/in/MarcXmlParser.java | replace with "<" instead of "<" | <ide><path>rc/de/hebis/it/hds/gnd/in/MarcXmlParser.java
<ide> StringBuilder fullrecord = new StringBuilder();
<ide> // concat lines, omit unneeded whitespace characters and replace weird SOS ans EOS characters
<ide> for (String line : recordAsListOfLines) {
<del> fullrecord.append(line.trim().replace("˜", "<").replace("œ", ">"));
<add> fullrecord.append(line.trim().replace("˜", "<").replace("œ", ">"));
<ide> }
<ide> parse(fullrecord.toString());
<ide> return true; |
|
JavaScript | mit | 32728f2a1250d2580da4b7050fa880a89ae1a4e9 | 0 | smhg/express-locale | import 'babel-polyfill';
// lookup methods
let lookups = {
'cookie': (req, options) => {
if (req.cookies) {
return req.cookies[options.cookie.name];
}
},
'domain': (req, options) => {
if (options.map && options.map.domain && (req.hostname || req.host)) {
return options.map.domain[req.hostname || req.host];
}
},
'accept-language': (req, options) => {
let locale;
let accepted;
if (req.acceptsLanguages) {
accepted = req.acceptsLanguages();
} else if (req.acceptedLanguages) {
accepted = req.acceptedLanguages;
} else {
return false;
}
accepted.some(item => {
locale = filter(complete(item.replace(/-+/g, '_'), options), options);
return locale;
});
return locale;
},
'default': (req, options) => {
return options['default'];
}
};
// filter wrong formats (and optionally non-allowed values)
function filter (locale, options) {
if (locale && locale.length === 5) {
if (!options.allowed) {
return locale;
}
if (options.allowed.indexOf(locale) >= 0) {
return locale;
}
}
}
// complete languages to locale (if options available)
function complete (locale, options) {
if (locale && locale.length === 2 && options.map) {
if (options.map.language) {
return options.map.language[locale.toLowerCase()];
}
}
return locale;
}
// lookup locale using specified source method
function lookup (source, req, options) {
if (!(source in lookups)) {
throw Error('Locale lookup source method "' + source + '" not defined');
}
return filter(complete(lookups[source](req, options), options), options);
}
function createLocaleMiddleware (options = {}) {
options = Object.assign({
default: 'en_GB',
cookie: {name: 'locale'},
priority: ['accept-language', 'default']
}, options);
return function (req, res, next) {
options.priority.some(source => {
let locale = lookup(source, req, options);
if (locale) {
let [language, region] = locale.split('_');
req.locale = {
code: locale,
source,
language,
region
};
}
return locale;
});
next();
};
}
createLocaleMiddleware.prototype.addLookup = function (name, lookup) {
lookups[name] = lookup;
};
module.exports = createLocaleMiddleware;
| src/index.js | import 'babel-polyfill';
// lookup methods
let lookups = {
'cookie': (req, options) => {
if (req.cookies) {
return req.cookies[options.cookie.name];
}
},
'domain': (req, options) => {
if (options.map && options.map.domain && (req.hostname || req.host)) {
return options.map.domain[req.hostname || req.host];
}
},
'accept-language': (req, options) => {
let locale;
let accepted;
if (req.acceptsLanguages) {
accepted = req.acceptsLanguages();
} else if (req.acceptedLanguages) {
accepted = req.acceptedLanguages;
} else {
return false;
}
accepted.some(item => {
locale = filter(complete(item.replace(/-+/g, '_'), options), options);
return locale;
});
return locale;
},
'default': (req, options) => {
return options['default'];
}
};
// filter wrong formats (and optionally non-allowed values)
function filter (locale, options) {
if (locale && locale.length === 5) {
if (!options.allowed) {
return locale;
}
if (options.allowed.indexOf(locale) >= 0) {
return locale;
}
}
}
// complete languages to locale (if options available)
function complete (locale, options) {
if (locale && locale.length === 2 && options.map) {
if (options.map.language) {
return options.map.language[locale.toLowerCase()];
}
}
return locale;
}
// lookup locale using specified source method
function lookup (source, req, options) {
if (!(source in lookups)) {
throw Error('Locale lookup source method "' + source + '" not defined');
}
return filter(complete(lookups[source](req, options), options), options);
}
function createLocaleMiddleware (options = {}) {
options = Object.assign({
default: 'en_GB',
cookie: {name: 'locale'},
priority: ['accept-language', 'default']
}, options);
return function (req, res, next) {
options.priority.some(source => {
let locale = lookup(source, req, options);
if (locale) {
req.locale = {
code: locale,
source: source
};
}
return locale;
});
next();
};
}
createLocaleMiddleware.prototype.addLookup = function (name, lookup) {
lookups[name] = lookup;
};
module.exports = createLocaleMiddleware;
| add language and region props to result (closes #1)
| src/index.js | add language and region props to result (closes #1) | <ide><path>rc/index.js
<ide> let locale = lookup(source, req, options);
<ide>
<ide> if (locale) {
<add> let [language, region] = locale.split('_');
<add>
<ide> req.locale = {
<ide> code: locale,
<del> source: source
<add> source,
<add> language,
<add> region
<ide> };
<ide> }
<ide> |
|
JavaScript | mit | d1e0c11f6831a39373685517b3da2f19224b7220 | 0 | wq/wq.app,wq/wq.app,wq/wq.app | /*!
* wq.app - app.js
* Utilizes store and pages to dynamically load and render
* content from a wq.db-compatible REST service
* (c) 2012 S. Andrew Sheppard
* http://wq.io/license
*/
define(['./lib/jquery', './lib/jquery.mobile',
'./store', './pages', './template', './spinner',
'./lib/es5-shim'],
function($, jqm, ds, pages, tmpl, spin) {
var app = {};
app.init = function(config, templates, baseurl, svc) {
if (baseurl === undefined)
baseurl = '';
if (svc === undefined)
svc = baseurl;
app.config = app.default_config = config;
app['native'] = !!window.cordova;
app.can_login = !!config.pages.login;
ds.init(svc, {'format':'json'}, {'applyResult': _applyResult});
pages.init(baseurl);
tmpl.init(templates, templates.partials, config.defaults);
tmpl.setDefault('native', app['native']);
tmpl.setDefault('app_config', app.config);
if (app.can_login) {
var user = ds.get('user');
var csrftoken = ds.get('csrftoken');
if (user) {
app.user = user;
tmpl.setDefault('user', user);
tmpl.setDefault('is_authenticated', true);
tmpl.setDefault('csrftoken', csrftoken);
app.config = ds.get({'url': 'config'});
tmpl.setDefault('app_config', app.config);
$('body').trigger('login');
}
app.check_login();
pages.register('logout\/?', app.logout);
}
if (config.transitions) {
var def = "default";
if (config.transitions[def])
jqm.defaultPageTransition = config.transitions[def];
if (config.transitions.dialog)
jqm.defaultDialogTransition = config.transitions.dialog;
if (config.transitions.save)
_saveTransition = config.transitions.save;
jqm.maxTransitionWidth = config.transitions.maxwidth || 800;
}
for (var page in app.config.pages) {
var conf = _getConf(page);
if (conf.list) {
_registerList(page);
_registerDetail(page);
_registerEdit(page);
} else if (conf) {
_registerOther(page);
}
}
$(document).on('submit', 'form', _handleForm);
}
app.logout = function() {
if (!app.can_login)
return;
delete app.user;
ds.set('user', null);
tmpl.setDefault('user', null);
tmpl.setDefault('is_authenticated', false);
tmpl.setDefault('csrftoken', null);
app.config = app.default_config;
tmpl.setDefault('app_config', app.config);
ds.fetch({'url': 'logout'}, true, undefined, true);
$('body').trigger('logout');
};
app.save_login = function(result) {
var config = result.config,
user = result.user,
csrftoken = result.csrftoken;
if (!app.can_login)
return;
app.config = config;
ds.set({'url': 'config'}, config);
tmpl.setDefault('app_config', config);
app.user = user;
tmpl.setDefault('user', user);
tmpl.setDefault('is_authenticated', true);
tmpl.setDefault('csrftoken', csrftoken);
ds.set('user', user);
ds.set('csrftoken', csrftoken);
$('body').trigger('login');
};
app.check_login = function() {
if (!app.can_login)
return;
ds.fetch({'url': 'login'}, true, function(result) {
if (result && result.user && result.config) {
app.save_login(result);
} else if (result && app.user) {
app.logout();
}
}, true);
};
// Internal variables and functions
var _saveTransition = "none";
// Wrappers for pages.register & pages.go to handle common use cases
// Determine appropriate context & template for pages.go
app.go = function(page, ui, params, itemid, edit, url) {
if (ui && ui.options && ui.options.data) return; // Ignore form actions
var conf = _getConf(page);
if (!conf.list) {
_renderOther(page, ui, params);
return;
}
ds.getList({'url': conf.url}, function(list) {
if (itemid) {
if (edit)
_renderEdit(page, list, ui, params, itemid, url);
else
_renderDetail(page, list, ui, params, itemid, url);
} else {
_renderList(page, list, ui, params, url);
}
});
}
app.getAnnotationTypeFilter = function(page, obj) {
return {'for': page};
};
// Generate list view context and render with [url]_list template;
// handles requests for [url] and [url]/
function _registerList(page) {
var conf = _getConf(page);
pages.register(conf.url, go);
pages.register(conf.url + '/', go);
function go(match, ui, params) {
app.go(page, ui, params);
}
}
function _renderList(page, list, ui, params, url) {
var conf = _getConf(page);
var pnum = 1, next = null, prev = null, filter;
if (url === undefined) {
url = conf.url;
if (url)
url += '/';
}
if (params) {
url += "?" + $.param(params);
if (params['page']) {
pnum = params['page'];
} else {
filter = {};
for (var key in params) {
filter[key] = params[key];
}
(conf.parents || []).forEach(function(p) {
if (p == page + 'type')
p = 'type';
if (filter[p]) {
filter[p + '_id'] = filter[p];
delete filter[p];
}
});
}
}
if (pnum > conf.max_local_pages) {
// Set max_local_pages to avoid filling up local storage and
// instead attempt to load HTML directly from the server
// (using built-in jQM loader)
var jqmurl = '/' + url;
spin.start();
jqm.loadPage(jqmurl).then(function() {
spin.stop();
$page = $(":jqmData(url='" + jqmurl + "')");
if ($page.length > 0)
jqm.changePage($page);
else
pages.notFound(url);
});
return;
}
var data = filter ? list.filter(filter) : list.page(pnum);
if (pnum > 1) {
var prevp = {'page': parseInt(pnum) - 1};
prev = conf.url + '/?' + $.param(prevp);
}
if (pnum < data.info.pages) {
var nextp = {'page': parseInt(pnum) + 1};
next = conf.url + '/?' + $.param(nextp);
}
var context = $.extend({}, conf, {
'list': data,
'page': pnum,
'pages': data.info.pages,
'per_page': data.info.per_page,
'total': data.info.total,
'previous': prev ? '/' + prev : null,
'next': next ? '/' + next : null,
'multiple': data.info.pages > 1
});
_addLookups(page, context, false, function(context) {
pages.go(url, page + '_list', context, ui);
});
}
// Generate item detail view context and render with [url]_detail template;
// handles requests for [url]/[id]
function _registerDetail(page) {
var conf = _getConf(page);
var url = conf.url;
var reserved = ["new"];
if (url) {
url += "/";
} else {
// This list is bound to the root URL, don't mistake other lists for items
for (var key in app.config.pages)
reserved.push(app.config.pages[key].url);
}
pages.register(url + '([^/\?]+)', function(match, ui, params) {
if (reserved.indexOf(match[1]) > -1)
return;
app.go(page, ui, params, match[1]);
});
}
function _renderDetail(page, list, ui, params, itemid, url) {
var conf = _getConf(page);
if (url === undefined) {
url = conf.url;
if (url)
url += '/';
url += itemid;
}
var item = list.find(itemid, undefined, undefined, conf.max_local_pages);
if (!item) {
// Item not found in stored list...
if (!conf.partial) {
// If partial is not set, locally stored list is assumed to
// contain the entire dataset, so the item probably does not exist.
pages.notFound(url);
} else {
// Set partial to indicate local list does not represent entire
// dataset; if an item is not found will attempt to load HTML
// directly from the server (using built-in jQM loader)
var jqmurl = '/' + url;
spin.start();
jqm.loadPage(jqmurl).then(function() {
spin.stop();
$page = $(":jqmData(url='" + jqmurl + "')");
if ($page.length > 0)
jqm.changePage($page);
else
pages.notFound(url);
});
}
return;
}
var context = $.extend({}, conf, item);
_addLookups(page, context, false, function(context) {
pages.go(url, page + '_detail', context, ui);
});
}
// Generate item edit context and render with [url]_edit template;
// handles requests for [url]/[id]/edit and [url]/new
function _registerEdit(page) {
var conf = _getConf(page);
pages.register(conf.url + '/([^/]+)/edit', go);
pages.register(conf.url + '/(new)', go);
function go(match, ui, params) {
app.go(page, ui, params, match[1], true);
}
}
function _renderEdit(page, list, ui, params, itemid, url) {
var conf = _getConf(page);
if (itemid != "new") {
// Edit existing item
if (url === undefined)
url = itemid + '/edit';
var item = list.find(itemid, undefined, undefined, conf.max_local_pages);
if (!item) {
pages.notFound(url);
return;
}
var context = $.extend({}, conf, item);
_addLookups(page, context, true, done);
} else {
// Create new item
var context = $.extend({}, conf, params); //FIXME: defaults
if (url === undefined) {
url = 'new';
if (params && $.param(params))
url += '?' + $.param(params);
}
_addLookups(page, context, true, function(context) {
if (!conf.annotated) {
done(context);
return;
}
context['annotations'] = [];
ds.getList({'url': 'annotationtypes'}, function(list) {
var types = list.filter(app.getAnnotationTypeFilter(page, context));
$.each(types, function(i, t) {
context['annotations'].push({'annotationtype_id': t.id});
});
done(context);
});
});
}
function done(context) {
var divid = page + '_' + itemid + '-page';
pages.go(conf.url + '/' + url, page + '_edit', context, ui, false, divid);
}
}
// Render non-list pages with with [url] template;
// handles requests for [url] and [url]/
function _registerOther(page) {
var conf = _getConf(page);
pages.register(conf.url, go);
pages.register(conf.url + '/', go);
function go(match, ui, params) {
app.go(page, ui, params);
}
}
function _renderOther(page, ui, params, url) {
var conf = _getConf(page);
if (url === undefined)
url = conf.url;
var context = $.extend({}, conf, params);
pages.go(url, page, context, ui, conf.once ? true : false);
}
// Handle form submit from [url]_edit views
function _handleForm(evt) {
var $form = $(this);
if ($form.data('json') !== undefined && $form.data('json') == false)
return; // Defer to default (HTML-based) handler
var url = $form.attr('action').substring(1);
var conf = _getConfByUrl(url);
var vals = {};
var $files = $form.find('input[type=file]');
var has_files = ($files.length > 0 && $files.val().length > 0);
if (!app['native'] && has_files) {
// Files present and we're not running in Cordova.
if (window.FormData && window.Blob)
// Modern browser; use FormData to upload files via AJAX.
// FIXME: localStorage version of outbox item will be unusable.
// Can we serialize this object somehow?
vals.data = new FormData(this);
else
// Looks like we're in a an old browser and we can't upload files
// via AJAX or Cordova... Bypass store and assume server is
// configured to accept regular form posts.
return;
} else {
// No files, or we're running in Cordova.
// Use a simple dictionary for values, which is better for outbox
// serialization. store will automatically use Cordova FileUpload iff
// there is a form field named 'fileupload'.
$.each($form.serializeArray(), function(i, v) {
vals[v.name] = v.value;
});
}
// Skip regular form submission, we're saving this via store
evt.preventDefault();
vals.url = url;
if (url == conf.url + "/" || !conf.list)
vals.method = "POST"; // REST API uses POST for new records
else
vals.method = "PUT"; // .. but PUT to update existing records
$('.error').html('');
spin.start();
ds.save(vals, undefined, function(item) {
spin.stop();
if (item && item.saved) {
// Save was successful
var options = {'reverse': true, 'transition': _saveTransition};
if (conf.list)
jqm.changePage('/' + conf.url + '/' + item.newid, options);
else
jqm.changePage('/' + conf.url + '/', options);
return;
}
if (!item || !item.error) {
// Save failed for some unknown reason
showError("Error saving data.");
return;
}
// REST API provided general error information
if (typeof(item.error) === 'string') {
showError(item.error);
return;
}
// REST API provided per-field error information
var errs = Object.keys(item.error);
// General API errors have a single "detail" attribute
if (errs.length == 1 && errs[0] == 'detail') {
showError(item.error.detail);
} else {
// Form errors (other than non_field_errors) are keyed by field name
for (f in item.error) {
// FIXME: there may be multiple errors per field
var err = item.error[f][0];
if (f == 'non_field_errors')
showError(err);
else
showError(err, f);
}
if (!item.error.non_field_errors)
showError('One or more errors were found.');
}
function showError(err, field) {
var sel = '.' + conf.page + '-' + (field ? field + '-' : '') + 'errors';
$form.find(sel).html(err);
}
});
}
// Successful results from REST API contain the newly saved object
function _applyResult(item, result) {
if (result && result.id) {
var conf = _getConfByUrl(item.data.url);
item.saved = true;
item.newid = result.id;
ds.getList({'url': conf.url}, function(list) {
var res = $.extend({}, result);
if (conf.annotated && res.annotations)
delete res.annotations;
list.update([res], 'id', conf.reversed);
});
if (conf.annotated && result.annotations) {
var annots = result.annotations;
annots.forEach(function(a) {
a[conf.page + '_id'] = result.id;
});
ds.getList({'url': 'annotations'}, function(list) {
list.update(annots, 'id');
});
}
} else if (app.can_login && result && result.user && result.config) {
app.save_login(result);
pages.go("login", "login");
}
}
// Add various callback functions to context object to automate foreign key
// lookups within templates
function _addLookups(page, context, editable, callback) {
var conf = _getConf(page);
var lookups = {};
$.each(conf.parents || [], function(i, v) {
var pconf = _getConf(v);
lookups[v] = _parent_lookup(v)
if (editable) {
lookups[pconf.url] = _parent_dropdown_lookup(v);
lookups[v + '_list'] = _parent_dropdown_lookup(v);
}
});
$.each(conf.children || [], function(i, v) {
var cconf = _getConf(v);
lookups[cconf.url] = _children_lookup(page, v)
});
if (conf.annotated) {
lookups['annotations'] = _annotation_lookup(page);
lookups['annotationtype'] = _parent_lookup('annotationtype');
}
if (conf.related) {
lookups['relationships'] = _relationship_lookup(page);
lookups['inverserelationships'] = _relationship_lookup(page, true);
lookups['relationshiptype'] = _parent_lookup('relationshiptype');
}
var queue = [];
for (key in lookups)
queue.push(key);
step();
function step() {
if (queue.length == 0) {
callback(context);
return;
}
var key = queue.shift();
lookups[key](context, key, step);
}
}
function _make_lookup(page, fn) {
return function(context, key, callback) {
var conf = _getConf(page);
ds.getList({'url': conf.url}, function(list) {
context[key] = fn(list);
callback(context);
});
}
}
// Simple foreign key lookup
function _parent_lookup(page) {
return _make_lookup(page, function(list) {
return function() {
return list.find(this[page + '_id']);
}
});
}
// List of all potential foreign key values (useful for generating dropdowns)
function _parent_dropdown_lookup(page) {
return _make_lookup(page, function(list) {
return function() {
var obj = this;
var parents = [];
list.forEach(function(v) {
var item = $.extend({}, v);
if (item.id == obj[page + '_id'])
item.selected = true; // Currently selected item
parents.push(item);
});
return parents;
};
});
}
// List of objects with a foreign key pointing to this one
function _children_lookup(ppage, cpage) {
return _make_lookup(cpage, function(list) {
return function() {
var filter = {};
filter[ppage + '_id'] = this.id;
return list.filter(filter);
}
});
}
// List of annotations for this object
// (like _children_lookup but with a dropdown helper)
function _annotation_lookup(page) {
return _make_lookup('annotation', function(list) {
return function() {
var filter = {};
filter[page + '_id'] = this.id;
var annots = [];
list.filter(filter).forEach(function(v) {
var item = $.extend({}, v);
item.selected = function(){return this == item.value};
annots.push(item);
});
return annots;
}
});
}
// List of relationships for this object
// (grouped by type)
function _relationship_lookup(page, inverse) {
var name = inverse ? 'inverserelationship' : 'relationship';
return _make_lookup(name, function(list) {
return function() {
var filter = {}, groups = {};
filter[page + '_id'] = this.id;
list.filter(filter).forEach(function(rel) {
if (!groups[rel.type])
groups[rel.type] = {
'type': rel.type,
'list': []
}
groups[rel.type].list.push(rel)
});
var garray = [];
for (group in groups) {
garray.push(groups[group]);
}
return garray;
}
});
}
// Load configuration based on page id
function _getConf(page) {
var conf = app.config.pages[page];
if (!conf)
throw 'Configuration for "' + page + '" not found!';
return conf;
}
// Helper to load configuration based on URL
function _getConfByUrl(url) {
var parts = url.split('/');
var conf;
for (var p in app.config.pages)
if (app.config.pages[p].url == parts[0]) {
conf = $.extend({}, app.config.pages[p]);
conf.page = p;
}
if (!conf)
throw 'Configuration for "/' + url + '" not found!';
return conf;
}
return app;
});
| js/app.js | /*!
* wq.app - app.js
* Utilizes store and pages to dynamically load and render
* content from a wq.db-compatible REST service
* (c) 2012 S. Andrew Sheppard
* http://wq.io/license
*/
define(['./lib/jquery', './lib/jquery.mobile',
'./store', './pages', './template', './spinner',
'./lib/es5-shim'],
function($, jqm, ds, pages, tmpl, spin) {
var app = {};
app.init = function(config, templates, baseurl, svc) {
if (baseurl === undefined)
baseurl = '';
if (svc === undefined)
svc = baseurl;
app.config = app.default_config = config;
app['native'] = !!window.cordova;
app.can_login = !!config.pages.login;
ds.init(svc, {'format':'json'}, {'applyResult': _applyResult});
pages.init(baseurl);
tmpl.init(templates, templates.partials, config.defaults);
tmpl.setDefault('native', app['native']);
if (app.can_login) {
var user = ds.get('user');
var csrftoken = ds.get('csrftoken');
if (user) {
app.user = user;
tmpl.setDefault('user', user);
tmpl.setDefault('is_authenticated', true);
tmpl.setDefault('csrftoken', csrftoken);
app.config = ds.get({'url': 'config'});
$('body').trigger('login');
}
app.check_login();
pages.register('logout\/?', app.logout);
}
if (config.transitions) {
var def = "default";
if (config.transitions[def])
jqm.defaultPageTransition = config.transitions[def];
if (config.transitions.dialog)
jqm.defaultDialogTransition = config.transitions.dialog;
if (config.transitions.save)
_saveTransition = config.transitions.save;
jqm.maxTransitionWidth = config.transitions.maxwidth || 800;
}
for (var page in app.config.pages) {
var conf = _getConf(page);
if (conf.list) {
_registerList(page);
_registerDetail(page);
_registerEdit(page);
} else if (conf) {
_registerOther(page);
}
}
$(document).on('submit', 'form', _handleForm);
}
app.logout = function() {
if (!app.can_login)
return;
delete app.user;
ds.set('user', null);
tmpl.setDefault('user', null);
tmpl.setDefault('is_authenticated', false);
tmpl.setDefault('csrftoken', null);
app.config = app.default_config;
ds.fetch({'url': 'logout'}, true, undefined, true);
$('body').trigger('logout');
};
app.save_login = function(result) {
var config = result.config,
user = result.user,
csrftoken = result.csrftoken;
if (!app.can_login)
return;
app.config = config;
ds.set({'url': 'config'}, config);
app.user = user;
tmpl.setDefault('user', user);
tmpl.setDefault('is_authenticated', true);
tmpl.setDefault('csrftoken', csrftoken);
ds.set('user', user);
ds.set('csrftoken', csrftoken);
$('body').trigger('login');
};
app.check_login = function() {
if (!app.can_login)
return;
ds.fetch({'url': 'login'}, true, function(result) {
if (result && result.user && result.config) {
app.save_login(result);
} else if (result && app.user) {
app.logout();
}
}, true);
};
// Internal variables and functions
var _saveTransition = "none";
// Wrappers for pages.register & pages.go to handle common use cases
// Determine appropriate context & template for pages.go
app.go = function(page, ui, params, itemid, edit, url) {
if (ui && ui.options && ui.options.data) return; // Ignore form actions
var conf = _getConf(page);
if (!conf.list) {
_renderOther(page, ui, params);
return;
}
ds.getList({'url': conf.url}, function(list) {
if (itemid) {
if (edit)
_renderEdit(page, list, ui, params, itemid, url);
else
_renderDetail(page, list, ui, params, itemid, url);
} else {
_renderList(page, list, ui, params, url);
}
});
}
app.getAnnotationTypeFilter = function(page, obj) {
return {'for': page};
};
// Generate list view context and render with [url]_list template;
// handles requests for [url] and [url]/
function _registerList(page) {
var conf = _getConf(page);
pages.register(conf.url, go);
pages.register(conf.url + '/', go);
function go(match, ui, params) {
app.go(page, ui, params);
}
}
function _renderList(page, list, ui, params, url) {
var conf = _getConf(page);
var pnum = 1, next = null, prev = null, filter;
if (url === undefined) {
url = conf.url;
if (url)
url += '/';
}
if (params) {
url += "?" + $.param(params);
if (params['page']) {
pnum = params['page'];
} else {
filter = {};
for (var key in params) {
filter[key] = params[key];
}
(conf.parents || []).forEach(function(p) {
if (p == page + 'type')
p = 'type';
if (filter[p]) {
filter[p + '_id'] = filter[p];
delete filter[p];
}
});
}
}
if (pnum > conf.max_local_pages) {
// Set max_local_pages to avoid filling up local storage and
// instead attempt to load HTML directly from the server
// (using built-in jQM loader)
var jqmurl = '/' + url;
spin.start();
jqm.loadPage(jqmurl).then(function() {
spin.stop();
$page = $(":jqmData(url='" + jqmurl + "')");
if ($page.length > 0)
jqm.changePage($page);
else
pages.notFound(url);
});
return;
}
var data = filter ? list.filter(filter) : list.page(pnum);
if (pnum > 1) {
var prevp = {'page': parseInt(pnum) - 1};
prev = conf.url + '/?' + $.param(prevp);
}
if (pnum < data.info.pages) {
var nextp = {'page': parseInt(pnum) + 1};
next = conf.url + '/?' + $.param(nextp);
}
var context = {
'list': data,
'page': pnum,
'pages': data.info.pages,
'per_page': data.info.per_page,
'total': data.info.total,
'previous': prev ? '/' + prev : null,
'next': next ? '/' + next : null,
'multiple': data.info.pages > 1
};
_addLookups(page, context, false, function(context) {
pages.go(url, page + '_list', context, ui);
});
}
// Generate item detail view context and render with [url]_detail template;
// handles requests for [url]/[id]
function _registerDetail(page) {
var conf = _getConf(page);
var url = conf.url;
var reserved = ["new"];
if (url) {
url += "/";
} else {
// This list is bound to the root URL, don't mistake other lists for items
for (var key in app.config.pages)
reserved.push(app.config.pages[key].url);
}
pages.register(url + '([^/\?]+)', function(match, ui, params) {
if (reserved.indexOf(match[1]) > -1)
return;
app.go(page, ui, params, match[1]);
});
}
function _renderDetail(page, list, ui, params, itemid, url) {
var conf = _getConf(page);
if (url === undefined) {
url = conf.url;
if (url)
url += '/';
url += itemid;
}
var item = list.find(itemid, undefined, undefined, conf.max_local_pages);
if (!item) {
// Item not found in stored list...
if (!conf.partial) {
// If partial is not set, locally stored list is assumed to
// contain the entire dataset, so the item probably does not exist.
pages.notFound(url);
} else {
// Set partial to indicate local list does not represent entire
// dataset; if an item is not found will attempt to load HTML
// directly from the server (using built-in jQM loader)
var jqmurl = '/' + url;
spin.start();
jqm.loadPage(jqmurl).then(function() {
spin.stop();
$page = $(":jqmData(url='" + jqmurl + "')");
if ($page.length > 0)
jqm.changePage($page);
else
pages.notFound(url);
});
}
return;
}
var context = $.extend({}, item);
_addLookups(page, context, false, function(context) {
pages.go(url, page + '_detail', context, ui);
});
}
// Generate item edit context and render with [url]_edit template;
// handles requests for [url]/[id]/edit and [url]/new
function _registerEdit(page) {
var conf = _getConf(page);
pages.register(conf.url + '/([^/]+)/edit', go);
pages.register(conf.url + '/(new)', go);
function go(match, ui, params) {
app.go(page, ui, params, match[1], true);
}
}
function _renderEdit(page, list, ui, params, itemid, url) {
var conf = _getConf(page);
if (itemid != "new") {
// Edit existing item
if (url === undefined)
url = itemid + '/edit';
var item = list.find(itemid, undefined, undefined, conf.max_local_pages);
if (!item) {
pages.notFound(url);
return;
}
var context = $.extend({}, item);
_addLookups(page, context, true, done);
} else {
// Create new item
var context = $.extend({}, params); //FIXME: defaults
if (url === undefined) {
url = 'new';
if (params && $.param(params))
url += '?' + $.param(params);
}
_addLookups(page, context, true, function(context) {
if (!conf.annotated) {
done(context);
return;
}
context['annotations'] = [];
ds.getList({'url': 'annotationtypes'}, function(list) {
var types = list.filter(app.getAnnotationTypeFilter(page, context));
$.each(types, function(i, t) {
context['annotations'].push({'annotationtype_id': t.id});
});
done(context);
});
});
}
function done(context) {
var divid = page + '_' + itemid + '-page';
pages.go(conf.url + '/' + url, page + '_edit', context, ui, false, divid);
}
}
// Render non-list pages with with [url] template;
// handles requests for [url] and [url]/
function _registerOther(page) {
var conf = _getConf(page);
pages.register(conf.url, go);
pages.register(conf.url + '/', go);
function go(match, ui, params) {
app.go(page, ui, params);
}
}
function _renderOther(page, ui, params, url) {
var conf = _getConf(page);
if (url === undefined)
url = conf.url;
pages.go(url, page, params, ui, conf.once ? true : false);
}
// Handle form submit from [url]_edit views
function _handleForm(evt) {
var $form = $(this);
if ($form.data('json') !== undefined && $form.data('json') == false)
return; // Defer to default (HTML-based) handler
var url = $form.attr('action').substring(1);
var conf = _getConfByUrl(url);
var vals = {};
var $files = $form.find('input[type=file]');
var has_files = ($files.length > 0 && $files.val().length > 0);
if (!app['native'] && has_files) {
// Files present and we're not running in Cordova.
if (window.FormData && window.Blob)
// Modern browser; use FormData to upload files via AJAX.
// FIXME: localStorage version of outbox item will be unusable.
// Can we serialize this object somehow?
vals.data = new FormData(this);
else
// Looks like we're in a an old browser and we can't upload files
// via AJAX or Cordova... Bypass store and assume server is
// configured to accept regular form posts.
return;
} else {
// No files, or we're running in Cordova.
// Use a simple dictionary for values, which is better for outbox
// serialization. store will automatically use Cordova FileUpload iff
// there is a form field named 'fileupload'.
$.each($form.serializeArray(), function(i, v) {
vals[v.name] = v.value;
});
}
// Skip regular form submission, we're saving this via store
evt.preventDefault();
vals.url = url;
if (url == conf.url + "/" || !conf.list)
vals.method = "POST"; // REST API uses POST for new records
else
vals.method = "PUT"; // .. but PUT to update existing records
$('.error').html('');
spin.start();
ds.save(vals, undefined, function(item) {
spin.stop();
if (item && item.saved) {
// Save was successful
var options = {'reverse': true, 'transition': _saveTransition};
if (conf.list)
jqm.changePage('/' + conf.url + '/' + item.newid, options);
else
jqm.changePage('/' + conf.url + '/', options);
return;
}
if (!item || !item.error) {
// Save failed for some unknown reason
showError("Error saving data.");
return;
}
// REST API provided general error information
if (typeof(item.error) === 'string') {
showError(item.error);
return;
}
// REST API provided per-field error information
var errs = Object.keys(item.error);
// General API errors have a single "detail" attribute
if (errs.length == 1 && errs[0] == 'detail') {
showError(item.error.detail);
} else {
// Form errors (other than non_field_errors) are keyed by field name
for (f in item.error) {
// FIXME: there may be multiple errors per field
var err = item.error[f][0];
if (f == 'non_field_errors')
showError(err);
else
showError(err, f);
}
if (!item.error.non_field_errors)
showError('One or more errors were found.');
}
function showError(err, field) {
var sel = '.' + conf.page + '-' + (field ? field + '-' : '') + 'errors';
$form.find(sel).html(err);
}
});
}
// Successful results from REST API contain the newly saved object
function _applyResult(item, result) {
if (result && result.id) {
var conf = _getConfByUrl(item.data.url);
item.saved = true;
item.newid = result.id;
ds.getList({'url': conf.url}, function(list) {
var res = $.extend({}, result);
if (conf.annotated && res.annotations)
delete res.annotations;
list.update([res], 'id', conf.reversed);
});
if (conf.annotated && result.annotations) {
var annots = result.annotations;
annots.forEach(function(a) {
a[conf.page + '_id'] = result.id;
});
ds.getList({'url': 'annotations'}, function(list) {
list.update(annots, 'id');
});
}
} else if (app.can_login && result && result.user && result.config) {
app.save_login(result);
pages.go("login", "login");
}
}
// Add various callback functions to context object to automate foreign key
// lookups within templates
function _addLookups(page, context, editable, callback) {
var conf = _getConf(page);
var lookups = {};
$.each(conf.parents || [], function(i, v) {
var pconf = _getConf(v);
lookups[v] = _parent_lookup(v)
if (editable) {
lookups[pconf.url] = _parent_dropdown_lookup(v);
lookups[v + '_list'] = _parent_dropdown_lookup(v);
}
});
$.each(conf.children || [], function(i, v) {
var cconf = _getConf(v);
lookups[cconf.url] = _children_lookup(page, v)
});
if (conf.annotated) {
lookups['annotations'] = _annotation_lookup(page);
lookups['annotationtype'] = _parent_lookup('annotationtype');
}
if (conf.related) {
lookups['relationships'] = _relationship_lookup(page);
lookups['inverserelationships'] = _relationship_lookup(page, true);
lookups['relationshiptype'] = _parent_lookup('relationshiptype');
}
var queue = [];
for (key in lookups)
queue.push(key);
step();
function step() {
if (queue.length == 0) {
callback(context);
return;
}
var key = queue.shift();
lookups[key](context, key, step);
}
}
function _make_lookup(page, fn) {
return function(context, key, callback) {
var conf = _getConf(page);
ds.getList({'url': conf.url}, function(list) {
context[key] = fn(list);
callback(context);
});
}
}
// Simple foreign key lookup
function _parent_lookup(page) {
return _make_lookup(page, function(list) {
return function() {
return list.find(this[page + '_id']);
}
});
}
// List of all potential foreign key values (useful for generating dropdowns)
function _parent_dropdown_lookup(page) {
return _make_lookup(page, function(list) {
return function() {
var obj = this;
var parents = [];
list.forEach(function(v) {
var item = $.extend({}, v);
if (item.id == obj[page + '_id'])
item.selected = true; // Currently selected item
parents.push(item);
});
return parents;
};
});
}
// List of objects with a foreign key pointing to this one
function _children_lookup(ppage, cpage) {
return _make_lookup(cpage, function(list) {
return function() {
var filter = {};
filter[ppage + '_id'] = this.id;
return list.filter(filter);
}
});
}
// List of annotations for this object
// (like _children_lookup but with a dropdown helper)
function _annotation_lookup(page) {
return _make_lookup('annotation', function(list) {
return function() {
var filter = {};
filter[page + '_id'] = this.id;
var annots = [];
list.filter(filter).forEach(function(v) {
var item = $.extend({}, v);
item.selected = function(){return this == item.value};
annots.push(item);
});
return annots;
}
});
}
// List of relationships for this object
// (grouped by type)
function _relationship_lookup(page, inverse) {
var name = inverse ? 'inverserelationship' : 'relationship';
return _make_lookup(name, function(list) {
return function() {
var filter = {}, groups = {};
filter[page + '_id'] = this.id;
list.filter(filter).forEach(function(rel) {
if (!groups[rel.type])
groups[rel.type] = {
'type': rel.type,
'list': []
}
groups[rel.type].list.push(rel)
});
var garray = [];
for (group in groups) {
garray.push(groups[group]);
}
return garray;
}
});
}
// Load configuration based on page id
function _getConf(page) {
var conf = app.config.pages[page];
if (!conf)
throw 'Configuration for "' + page + '" not found!';
return conf;
}
// Helper to load configuration based on URL
function _getConfByUrl(url) {
var parts = url.split('/');
var conf;
for (var p in app.config.pages)
if (app.config.pages[p].url == parts[0]) {
conf = $.extend({}, app.config.pages[p]);
conf.page = p;
}
if (!conf)
throw 'Configuration for "/' + url + '" not found!';
return conf;
}
return app;
});
| add config to template context
| js/app.js | add config to template context | <ide><path>s/app.js
<ide> pages.init(baseurl);
<ide> tmpl.init(templates, templates.partials, config.defaults);
<ide> tmpl.setDefault('native', app['native']);
<add> tmpl.setDefault('app_config', app.config);
<ide>
<ide> if (app.can_login) {
<ide> var user = ds.get('user');
<ide> tmpl.setDefault('is_authenticated', true);
<ide> tmpl.setDefault('csrftoken', csrftoken);
<ide> app.config = ds.get({'url': 'config'});
<add> tmpl.setDefault('app_config', app.config);
<ide> $('body').trigger('login');
<ide> }
<ide> app.check_login();
<ide> tmpl.setDefault('is_authenticated', false);
<ide> tmpl.setDefault('csrftoken', null);
<ide> app.config = app.default_config;
<add> tmpl.setDefault('app_config', app.config);
<ide> ds.fetch({'url': 'logout'}, true, undefined, true);
<ide> $('body').trigger('logout');
<ide> };
<ide> return;
<ide> app.config = config;
<ide> ds.set({'url': 'config'}, config);
<add> tmpl.setDefault('app_config', config);
<ide> app.user = user;
<ide> tmpl.setDefault('user', user);
<ide> tmpl.setDefault('is_authenticated', true);
<ide> next = conf.url + '/?' + $.param(nextp);
<ide> }
<ide>
<del> var context = {
<add> var context = $.extend({}, conf, {
<ide> 'list': data,
<ide> 'page': pnum,
<ide> 'pages': data.info.pages,
<ide> 'previous': prev ? '/' + prev : null,
<ide> 'next': next ? '/' + next : null,
<ide> 'multiple': data.info.pages > 1
<del> };
<add> });
<ide> _addLookups(page, context, false, function(context) {
<ide> pages.go(url, page + '_list', context, ui);
<ide> });
<ide> }
<ide> return;
<ide> }
<del> var context = $.extend({}, item);
<add> var context = $.extend({}, conf, item);
<ide> _addLookups(page, context, false, function(context) {
<ide> pages.go(url, page + '_detail', context, ui);
<ide> });
<ide> pages.notFound(url);
<ide> return;
<ide> }
<del> var context = $.extend({}, item);
<add> var context = $.extend({}, conf, item);
<ide> _addLookups(page, context, true, done);
<ide> } else {
<ide> // Create new item
<del> var context = $.extend({}, params); //FIXME: defaults
<add> var context = $.extend({}, conf, params); //FIXME: defaults
<ide> if (url === undefined) {
<ide> url = 'new';
<ide> if (params && $.param(params))
<ide> var conf = _getConf(page);
<ide> if (url === undefined)
<ide> url = conf.url;
<del> pages.go(url, page, params, ui, conf.once ? true : false);
<add> var context = $.extend({}, conf, params);
<add> pages.go(url, page, context, ui, conf.once ? true : false);
<ide> }
<ide>
<ide> // Handle form submit from [url]_edit views |
|
JavaScript | mit | e935418c0f3d6129bde324002fdab6adc1511a90 | 0 | LordVonAdel/U-Singularity,LordVonAdel/U-Singularity | const Grid = require('./grid.js');
const Entity = require('./entity.js');
const fs = require('fs');
const mixtures = require('./mixtures.js');
const Bucket = require('./bucket.js');
const PowerSystem = require('./systems/power.js');
const Atmos = require('./systems/atmos.js');
//The constructor for a world instance
function World(game){
this.width = 100;
this.height = 100;
this.ents = {};
this.entsStep = []; //A list with all entites having a step event
this.grid = new Grid(this.width, this.height);
this.gridEntities = new Grid(this.width, this.height);
this.nextEntId = 0;
this.gridEntities.forEach(function(tileX,tileY){
return [];
});
this.spawnX = 0;
this.spawnY = 0;
this.buckets = new Grid(this.width/config.bucket.width,this.height/config.bucket.height);
this.buckets.forEach(function(tileX,tileY){
return new Bucket(tileX,tileY,this);
}, this);
this.game = game;
this.consolePrefix = "[Game:"+game.index+"-World:0]";
console.log(this.consolePrefix+"Initialized World");
console.log(this.consolePrefix+"Using "+this.buckets.width+"x"+this.buckets.height+" ("+this.buckets.width*this.buckets.height+") buckets");
//Initializing systems
this.systems = {
power: new PowerSystem(this),
atmos: new Atmos(this)
};
for (var k in this.systems){
console.log(this.consolePrefix+"Initialized " + this.systems[k].modulename);
}
}
//resizes the world to a new width and height
World.prototype.resize = function(width, height){
this.width = width;
this.height = height;
this.grid.resize(width, height);
this.gridEntities.resize(width,height);
this.buckets.resize(Math.floor(width/config.bucket.width), Math.floor(height/config.bucket.height))
this.buckets.forEach(function(tileX,tileY){
this.buckets.cellSet(tileX,tileY,new Bucket(tileX,tileY,this));
}, this);
}
//sets the content of a cell in the world
World.prototype.cellSet = function(tileX,tileY,id){
var bucket = this.buckets.cellGet(Math.floor(tileX/config.bucket.width),Math.floor(tileY/config.bucket.height))
bucket.broadcastArea('change_tile',{x:tileX, y:tileY, id:id});
this.grid.cellSet(tileX,tileY,id);
var ents = this.gridEntities.cellGet(tileX, tileY);
for (var i = 0; i < ents.length; i++){
ents[i].update();
}
}
//gets the content of a cell in the world
World.prototype.cellGet = function(tileX,tileY){
return this.grid.cellGet(tileX,tileY);
}
//gets the tile details of a cell in the world
World.prototype.cellGetTile = function(tileX,tileY){
return loader.res.tiles[this.grid.cellGet(tileX,tileY)];
}
//gets a region of the world as a string
World.prototype.regionGet = function(x,y,width,height){
this.grid.saveRegion(x,y,width,height);
}
//sets the spawnpoint of this wolrld
World.prototype.setSpawn = function(x, y){
this.spawnX = +x;
this.spawnY = +y;
}
//saves the world to a file
World.prototype.save = function(filename){
this.saveRegion(0, 0, this.width, this.height, filename);
}
//saves a region of the world
World.prototype.saveRegion = function(x, y, width, height, filename){
var obj = {};
var ret = false;
obj.grid = this.grid.saveRegion(x, y, width, height);
obj.worldWidth = width;
obj.worldHeight = height;
obj.nextEntId = nextEntId;
obj.spawnX = this.spawnX;
obj.spawnY = this.spawnY;
var ents = {}
for (var key in this.ents){
var ent = this.ents[key];
if (ent.tx >= x && ent.ty >= y && ent.tx < x + width && ent.ty < y + height){
ents[key] = {
x: this.ents[key].x,
y: this.ents[key].y,
tx: this.ents[key].tx,
ty: this.ents[key].ty,
type: this.ents[key].type,
sync: this.ents[key].sync
}
}
}
obj.ents = ents;
str = JSON.stringify(obj);
var that = this;
fs.writeFile(filename,str,"utf8",function(err){
if (err){
ret = false;
that.game.sendChatMessage("Failed to save world!");
}else{
ret = true;
that.game.sendChatMessage("World Saved in "+filename);
}
});
return ret;
}
//clears the world
World.prototype.clear = function(){
this.broadcast('clear',{});
this.gridEntities.forEach(function(tileX,tileY){
this.gridEntities.cellSet(tileX,tileY,[]);
}, this);
this.grid.forEach(function(tileX,tileY){
this.grid.cellSet(tileX,tileY,0);
}, this);
this.spawnX = 0;
this.spawnY = 0;
this.resize(100,100);
/*
this.buckets.forEach(function(tileX,tileY){
var bucket = that.buckets.cellGet(tileX,tileY);
if (bucket){
console.log("Clear bucket!")
bucket.clear();
}
});
*/
this.ents = {};
this.broadcast('world',{w:this.width,h:this.height,str:this.grid.save()});
}
//loads the world from a file
World.prototype.load = function(filename){
var that = this;
fs.readFile(filename,function(err, data){
if (err){
that.broadcast('chat',{msg: "Failed to load map: "+filename});
}else{
that.clear();
var obj = JSON.parse(data);
that.resize(obj.worldWidth, obj.worldHeight);
that.grid.load(obj.grid);
that.spawnX = +obj.spawnX || 0;
that.spawnY = +obj.spawnY || 0;
that.nextEntId = obj.nextEntId || 100;
var ents = obj.ents;
for (var k in ents) {
var spwn = ents[k];
var ent = that.spawnEntity(spwn.type, spwn.tx, spwn.ty);
ent.x = spwn.x;
ent.y = spwn.y;
if (!ent.ent){
console.error("There are things in this map, which we don't know what they are! ("+spwn.type+")");
}else{
if (spwn.sync == undefined){
}else{
Object.assign(ent.sync, spwn.sync);
}
ent.update();
}
}
that.broadcast('world',{w:that.width,h:that.height,str:that.grid.save()});
}
});
}
//adds a thing at a position to collide with
World.prototype.gridEntAdd = function(tileX,tileY,obj){
var cell = this.gridEntities.cellGet(tileX,tileY);
if (Array.isArray(cell)){
if (!cell.includes(obj)){
cell.push(obj);
}
}
}
//removes a thing at a position, which could be collided with
World.prototype.gridEntFree = function(tileX,tileY,obj){
var cell = this.gridEntities.cellGet(tileX,tileY);
if (Array.isArray(cell)){
var index = cell.indexOf(obj)
if (index != -1){
cell.splice(index,1);
}
}
}
//checks if something is at a specific position which blocks it
World.prototype.collisionCheck = function(tileX,tileY){
var col = 0;
var tile_id = this.grid.cellGet(tileX,tileY);
var tile = global.res.tiles[tile_id];
if (tile != undefined){
col = tile.collision;
}
if (this.gridEntities.cellGet(tileX,tileY) instanceof Array){
col += this.collisionsGet(tileX, tileY).length;
}
return (col != 0);
}
//get array of solid ents on position
World.prototype.collisionsGet = function(tileX, tileY){
if (this.isInWorld(tileX, tileY)){
return this.gridEntities.cellGet(tileX,tileY).filter((ent) => {return ent.collision});
}
}
//Checks if a tile is in the world
World.prototype.isInWorld = function(x, y){
if (x >= 0 && y >= 0 && x < this.width && y < this.height){
return true;
}else{
return false;
}
}
//returns the distance between to points
World.prototype.dist = function(x1,y1,x2,y2){
return Math.sqrt( Math.pow((x1-x2),2)+Math.pow((y1-y2),2));
}
//executes a step / tick in the world
World.prototype.step = function(delta){
for (var i = 0; i < this.entsStep.length; i++){
var ent = this.entsStep[i];
ent.step(delta);
if (ent.animation){
ent.animate(delta);
}
}
for (var k in this.systems){
var sys = this.systems[k];
if (sys.step){
sys.step(delta);
}
}
}
//sends a packet to all player on this world
World.prototype.broadcast = function(event, data){
for (var i = 0; i < this.game.clients.length; i++) {
var player = this.game.clients[i];
if (player.world == this){
player.socket.emit(event, data);
}
}
}
//gets an entity form this world by its id
World.prototype.getEntById = function(entId){
var ent = this.ents[entId];
if (ent == undefined){
return null
}else{
return ent;
}
}
//gets a list of entites from this world based on the type
World.prototype.getEntsByType = function(type){
var list = [];
for (k in this.ents){
var ent = this.ents[k];
if (ent.type == type){
list.push(ent);
}
}
return list;
}
//gets a list of entities from this world based on the position
World.prototype.getEntsByPosition = function(tileX, tileY){
return this.gridEntities.cellGet(tileX, tileY);
}
//Spawn an entity somewhere in the world
World.prototype.spawnEntity = function(type, x, y){
var x = x || 0;
var y = y || 0;
var entity = new Entity(this, type, x, y);
if (!entity.error){
entity.spawn();
this.nextEntId ++;
}
//update other ents on this cell
var ents = this.gridEntities.cellGet(x, y);
if (ents){
for (var i = 0; i < ents.length; i++){
var ent = ents[i];
if (ent != entity){
ent.update();
}
}
}
return entity;
}
//Spawn Item
World.prototype.spawnItem = function(x, y, item){
var entity = new Entity(this, "item", x, y);
entity.spawn();
entity.sync.item = item;
entity.update();
this.nextEntId ++;
return entity;
}
module.exports = World; | code/world.js | const Grid = require('./grid.js');
const Entity = require('./entity.js');
const fs = require('fs');
const mixtures = require('./mixtures.js');
const Bucket = require('./bucket.js');
const PowerSystem = require('./systems/power.js');
const Atmos = require('./systems/atmos.js');
//The constructor for a world instance
function World(game){
this.width = 100;
this.height = 100;
this.ents = {};
this.entsStep = []; //A list with all entites having a step event
this.grid = new Grid(this.width, this.height);
this.gridEntities = new Grid(this.width, this.height);
this.nextEntId = 0;
this.gridEntities.forEach(function(tileX,tileY){
return [];
});
this.spawnX = 0;
this.spawnY = 0;
this.buckets = new Grid(this.width/config.bucket.width,this.height/config.bucket.height);
this.buckets.forEach(function(tileX,tileY){
return new Bucket(tileX,tileY,this);
}, this);
this.game = game;
this.consolePrefix = "[Game:"+game.index+"-World:0]";
console.log(this.consolePrefix+"Initialized World");
console.log(this.consolePrefix+"Using "+this.buckets.width+"x"+this.buckets.height+" ("+this.buckets.width*this.buckets.height+") buckets");
//Initializing systems
this.systems = {
power: new PowerSystem(this),
atmos: new Atmos(this)
};
for (var k in this.systems){
console.log(this.consolePrefix+"Initialized " + this.systems[k].modulename);
}
}
//resizes the world to a new width and height
World.prototype.resize = function(width, height){
this.width = width;
this.height = height;
this.grid.resize(width, height);
this.gridEntities.resize(width,height);
this.buckets.resize(Math.floor(width/config.bucket.width), Math.floor(height/config.bucket.height))
this.buckets.forEach(function(tileX,tileY){
this.buckets.cellSet(tileX,tileY,new Bucket(tileX,tileY,this));
}, this);
}
//sets the content of a cell in the world
World.prototype.cellSet = function(tileX,tileY,id){
var bucket = this.buckets.cellGet(Math.floor(tileX/config.bucket.width),Math.floor(tileY/config.bucket.height))
bucket.broadcastArea('change_tile',{x:tileX, y:tileY, id:id});
this.grid.cellSet(tileX,tileY,id);
var ents = this.gridEntities.cellGet(tileX, tileY);
for (var i = 0; i < ents.length; i++){
ents[i].update();
}
}
//gets the content of a cell in the world
World.prototype.cellGet = function(tileX,tileY){
return this.grid.cellGet(tileX,tileY);
}
//gets the tile details of a cell in the world
World.prototype.cellGetTile = function(tileX,tileY){
return loader.res.tiles[this.grid.cellGet(tileX,tileY)];
}
//gets a region of the world as a string
World.prototype.regionGet = function(x,y,width,height){
this.grid.saveRegion(x,y,width,height);
}
//sets the spawnpoint of this wolrld
World.prototype.setSpawn = function(x, y){
this.spawnX = +x;
this.spawnY = +y;
}
//saves the world to a file
World.prototype.save = function(filename){
this.saveRegion(0, 0, this.width, this.height, filename);
}
World.prototype.saveRegion = function(x, y, width, height, filename){
var obj = {};
var ret = false;
obj.grid = this.grid.saveRegion(x, y, width, height);
obj.worldWidth = width;
obj.worldHeight = height;
obj.nextEntId = nextEntId;
obj.spawnX = this.spawnX;
obj.spawnY = this.spawnY;
var ents = {}
for (var key in this.ents){
var ent = this.ents[key];
if (ent.tx >= x && ent.ty >= y && ent.tx < x + width && ent.ty < y + height){
ents[key] = {
x: this.ents[key].x,
y: this.ents[key].y,
tx: this.ents[key].tx,
ty: this.ents[key].ty,
type: this.ents[key].type,
sync: this.ents[key].sync
}
}
}
obj.ents = ents;
str = JSON.stringify(obj);
var that = this;
fs.writeFile(filename,str,"utf8",function(err){
if (err){
ret = false;
that.game.sendChatMessage("Failed to save world!");
}else{
ret = true;
that.game.sendChatMessage("World Saved in "+filename);
}
});
return ret;
}
//clears the world
World.prototype.clear = function(){
this.broadcast('clear',{});
this.gridEntities.forEach(function(tileX,tileY){
this.gridEntities.cellSet(tileX,tileY,[]);
}, this);
this.grid.forEach(function(tileX,tileY){
this.grid.cellSet(tileX,tileY,0);
}, this);
this.spawnX = 0;
this.spawnY = 0;
this.resize(100,100);
/*
this.buckets.forEach(function(tileX,tileY){
var bucket = that.buckets.cellGet(tileX,tileY);
if (bucket){
console.log("Clear bucket!")
bucket.clear();
}
});
*/
this.ents = {};
this.broadcast('world',{w:this.width,h:this.height,str:this.grid.save()});
}
//loads the world from a file
World.prototype.load = function(filename){
var that = this;
fs.readFile(filename,function(err, data){
if (err){
that.broadcast('chat',{msg: "Failed to load map: "+filename});
}else{
that.clear();
var obj = JSON.parse(data);
that.resize(obj.worldWidth, obj.worldHeight);
that.grid.load(obj.grid);
that.spawnX = +obj.spawnX || 0;
that.spawnY = +obj.spawnY || 0;
that.nextEntId = obj.nextEntId || 100;
var ents = obj.ents;
for (var k in ents) {
var spwn = ents[k];
var ent = that.spawnEntity(spwn.type, spwn.tx, spwn.ty);
ent.x = spwn.x;
ent.y = spwn.y;
if (!ent.ent){
console.error("There are things in this map, which we don't know what they are! ("+spwn.type+")");
}else{
if (spwn.sync == undefined){
}else{
Object.assign(ent.sync, spwn.sync);
}
ent.update();
}
}
that.broadcast('world',{w:that.width,h:that.height,str:that.grid.save()});
}
});
}
//adds a thing at a position to collide with
World.prototype.gridEntAdd = function(tileX,tileY,obj){
var cell = this.gridEntities.cellGet(tileX,tileY);
if (Array.isArray(cell)){
if (!cell.includes(obj)){
cell.push(obj);
}
}
}
//removes a thing at a position, which could be collided with
World.prototype.gridEntFree = function(tileX,tileY,obj){
var cell = this.gridEntities.cellGet(tileX,tileY);
if (Array.isArray(cell)){
var index = cell.indexOf(obj)
if (index != -1){
cell.splice(index,1);
}
}
}
//checks if something is at a specific position which blocks it
World.prototype.collisionCheck = function(tileX,tileY){
var col = 0;
var tile_id = this.grid.cellGet(tileX,tileY);
var tile = global.res.tiles[tile_id];
if (tile != undefined){
col = tile.collision;
}
if (this.gridEntities.cellGet(tileX,tileY) instanceof Array){
col += this.collisionsGet(tileX, tileY).length;
}
return (col != 0);
}
//get array of solid ents on position
World.prototype.collisionsGet = function(tileX, tileY){
if (this.isInWorld(tileX, tileY)){
return this.gridEntities.cellGet(tileX,tileY).filter((ent) => {return ent.collision});
}
}
//Checks if a tile is in the world
World.prototype.isInWorld = function(x, y){
if (x >= 0 && y >= 0 && x < this.width && y < this.height){
return true;
}else{
return false;
}
}
//returns the distance between to points
World.prototype.dist = function(x1,y1,x2,y2){
return Math.sqrt( Math.pow((x1-x2),2)+Math.pow((y1-y2),2));
}
//executes a step / tick in the world
World.prototype.step = function(delta){
for (var i = 0; i < this.entsStep.length; i++){
var ent = this.entsStep[i];
ent.step(delta);
if (ent.animation){
ent.animate(delta);
}
}
for (var k in this.systems){
var sys = this.systems[k];
if (sys.step){
sys.step(delta);
}
}
}
//sends a packet to all player on this world
World.prototype.broadcast = function(event, data){
for (var i = 0; i < this.game.clients.length; i++) {
var player = this.game.clients[i];
if (player.world == this){
player.socket.emit(event, data);
}
}
}
//gets an entity form this world by its id
World.prototype.getEntById = function(entId){
var ent = this.ents[entId];
if (ent == undefined){
return null
}else{
return ent;
}
}
//gets a list of entites from this world based on the type
World.prototype.getEntsByType = function(type){
var list = [];
for (k in this.ents){
var ent = this.ents[k];
if (ent.type == type){
list.push(ent);
}
}
return list;
}
//gets a list of entities from this world based on the position
World.prototype.getEntsByPosition = function(tileX, tileY){
return this.gridEntities.cellGet(tileX, tileY);
}
//Spawn an entity somewhere in the world
World.prototype.spawnEntity = function(type, x, y){
var x = x || 0;
var y = y || 0;
var entity = new Entity(this, type, x, y);
if (!entity.error){
entity.spawn();
this.nextEntId ++;
}
//update other ents on this cell
var ents = this.gridEntities.cellGet(x, y);
if (ents){
for (var i = 0; i < ents.length; i++){
var ent = ents[i];
if (ent != entity){
ent.update();
}
}
}
return entity;
}
//Spawn Item
World.prototype.spawnItem = function(x, y, item){
var entity = new Entity(this, "item", x, y);
entity.spawn();
entity.sync.item = item;
entity.update();
this.nextEntId ++;
return entity;
}
module.exports = World; | Added a missing comment
| code/world.js | Added a missing comment | <ide><path>ode/world.js
<ide> this.saveRegion(0, 0, this.width, this.height, filename);
<ide> }
<ide>
<add>//saves a region of the world
<ide> World.prototype.saveRegion = function(x, y, width, height, filename){
<ide> var obj = {};
<ide> var ret = false; |
|
JavaScript | mit | bd070590e2680db0ddd30d8df1d8b9c9f67dea0b | 0 | rishabhdixit/woocommercePlugin,BuildFire/woocommercePlugin,rishabhdixit/woocommercePlugin,BuildFire/woocommercePlugin | var express = require('express')
, WooCommerceAPI = require('woocommerce-api')
, bodyParser = require('body-parser')
, app = express()
, cookieParser = require('cookie-parser')
, session = require('express-session')
, _Port = 3000
, env = process.env.NODE_ENV || 'development'
, server = require('http').createServer(app);
var getErrorStatusCode = function (errCode) {
switch (errCode) {
case 'woocommerce_api_unsupported_method' :
return 400;
break;
case 'woocommerce_api_authentication_error' :
return 401;
break;
case 'woocommerce_api_invalid_order' :
return 404;
break;
case 'woocommerce_api_invalid_product' :
return 500;
break;
}
};
var getWooCommerceObject = function (storeURL, consumerKey, consumerSecret) {
return new WooCommerceAPI({
url: storeURL,
consumerKey: consumerKey,
consumerSecret: consumerSecret,
version: 'v3' // WooCommerce API version
});
};
/* To Allow cross-domain Access-Control*/
var allowCrossDomain = function (req, res, next) {
res.header("Access-Control-Allow-Origin", "*");
res.header("Access-Control-Allow-Headers", "Origin, X-Requested-With, Content-Type, Accept");
// intercept OPTIONS method
if ('OPTIONS' == req.method) {
res.send(200);
}
else {
next();
}
};
app.use(allowCrossDomain);
// Parsing json and urlencoded requests
app.use(bodyParser.urlencoded({extended: false}));
app.use(bodyParser.json());
app.use(bodyParser.json({type: 'application/vnd.api+json'}));
app.use(cookieParser());
app.use(session({
secret: 'keyboard cat',
resave: false,
saveUninitialized: true
}));
/*initialize routes*/
app.get('/', function (req, res) {
res.send('Welcome to our proxy server!').end();
});
app.post('/initialize', function (req, res) {
getProductCategories(req, res);
});
function isJson(str) {
try {
JSON.parse(str);
} catch (e) {
return false;
}
return true;
}
app.get('/getProducts', function (req, res) {
var WooCommerce = getWooCommerceObject(req.query.storeURL, req.query.consumerKey, req.query.consumerSecret);
if(req.query && req.query.id) {
WooCommerce.get('products/' + req.query.id, function(err, data, response) {
response = response && isJson(response) && JSON.parse(response);
if(err || !response) {
res.send({
data: err || response,
status: 500
});
} else if(response && response.errors && response.errors.length > 0 && response.errors[0].code) {
res.status(getErrorStatusCode(response.errors[0].code)).send({
data: response,
status: getErrorStatusCode(response.errors[0].code)
});
} else {
res.send({
data: response,
status: 200
});
}
});
} else {
WooCommerce.get('products', function (err, data, response) {
response = response && isJson(response) && JSON.parse(response);
console.log('response is::::::::::', err, response);
if(err || !response) {
res.send({
data: err || response,
status: 500
});
} else if(response && response.errors && response.errors.length > 0 && response.errors[0].code) {
res.status(getErrorStatusCode(response.errors[0].code)).send({
data: response,
status: getErrorStatusCode(response.errors[0].code)
});
} else {
res.send({
data: response,
status: 200
});
}
});
}
});
app.get('/productCategories', function (req, res) {
getProductCategories(req, res);
});
app.get('/getProductsByCategory', function (req, res) {
var url = req.query && req.query.slug ? 'products?filter[category]=' + req.query.slug + '&filter[limit]=' + req.query.pageSize + '&page=' + req.query.pageNumber : 'products/';
console.log('getProductsByCategory url is:', url);
var WooCommerce = getWooCommerceObject(req.query.storeURL, req.query.consumerKey, req.query.consumerSecret);
WooCommerce.get(url, function (err, data, response) {
response = response && isJson(response) && JSON.parse(response);
if(err || !response) {
res.send({
data: err || response,
status: 500
});
} else if(response && response.errors && response.errors.length > 0 && response.errors[0].code) {
res.status(getErrorStatusCode(response.errors[0].code)).send({
data: response,
status: getErrorStatusCode(response.errors[0].code)
});
} else {
res.send({
data: response,
status: 200
});
}
})
});
function getProductCategories(req, res) {
var url = req.query && req.query.pageSize && req.query.pageNumber ? 'products/categories?filter[limit]=' + req.query.pageSize + '&page=' + req.query.pageNumber : 'products/categories';
console.log('url is >>>>>>>>>>>', url, req.query, req.body);
var storeURL = req.query.storeURL || req.body.storeURL;
var consumerKey = req.query.consumerKey || req.body.consumerKey;
var consumerSecret = req.query.consumerSecret || req.body.consumerSecret;
var WooCommerce = getWooCommerceObject(storeURL, consumerKey, consumerSecret);
WooCommerce.get(url, function (err, data, response) {
response = response && isJson(response) && JSON.parse(response);
console.log('error and response is : ', err, response);
if (err || !response) {
res.send({
data: err || response,
status: 500
});
} else if (response && response.errors && response.errors.length > 0 && response.errors[0].code) {
res.status(getErrorStatusCode(response.errors[0].code)).send({
data: response,
status: getErrorStatusCode(response.errors[0].code)
});
} else {
res.send({
data: response,
status: 200
});
}
});
}
/**
* Different setup for 'development' and 'production' modes
* @type {string}
*/
if (env === 'development') {
// Development-mode-specific configuration
console.info('Node is running in development mode');
}
else if (env === 'test') {
// Development-mode-specific configuration
console.info('Node is running in test mode');
}
else {
// Production-mode-specific configuration goes here...
console.info('Node is running in production mode');
}
/**
* Server init and start
*/
server.listen(_Port);
function stopWebServer() {
server.close(function () {
console.info('Webserver shutdown');
process.exit();
});
}
var gracefulShutdown = function () {
console.info("Received kill signal, shutting down Webserver gracefully.");
stopWebServer();
// if after
setTimeout(function () {
console.error("Could not close connections in time, forcefully shutting down webserver");
process.exit();
}, 10 * 1000);
};
//listen for Ctrl + C
process.on('SIGINT', gracefulShutdown);
// listen for TERM signal .e.g. kill
process.on('SIGTERM', gracefulShutdown);
// listen for uncaughtException
process.on('uncaughtException', function (err) {
console.error("Uncaught Exception: " + err);
console.error("Stack: " + err.stack);
process.exit(1);
});
console.info('Express server listening on port: %s', server.address().port);
module.exports = function () {
return server;
}; | server/index.js | var express = require('express')
, WooCommerceAPI = require('woocommerce-api')
, bodyParser = require('body-parser')
, app = express()
, cookieParser = require('cookie-parser')
, session = require('express-session')
, _Port = 3000
, env = process.env.NODE_ENV || 'development'
, server = require('http').createServer(app);
var getErrorStatusCode = function (errCode) {
switch (errCode) {
case 'woocommerce_api_unsupported_method' :
return 400;
break;
case 'woocommerce_api_authentication_error' :
return 401;
break;
case 'woocommerce_api_invalid_order' :
return 404;
break;
case 'woocommerce_api_invalid_product' :
return 500;
break;
}
};
var getWooCommerceObject = function (storeURL, consumerKey, consumerSecret) {
return new WooCommerceAPI({
url: storeURL,
consumerKey: consumerKey,
consumerSecret: consumerSecret,
version: 'v3' // WooCommerce API version
});
};
/* To Allow cross-domain Access-Control*/
var allowCrossDomain = function (req, res, next) {
res.header("Access-Control-Allow-Origin", "*");
res.header("Access-Control-Allow-Headers", "Origin, X-Requested-With, Content-Type, Accept");
// intercept OPTIONS method
if ('OPTIONS' == req.method) {
res.send(200);
}
else {
next();
}
};
app.use(allowCrossDomain);
// Parsing json and urlencoded requests
app.use(bodyParser.urlencoded({extended: false}));
app.use(bodyParser.json());
app.use(bodyParser.json({type: 'application/vnd.api+json'}));
app.use(cookieParser());
app.use(session({
secret: 'keyboard cat',
resave: false,
saveUninitialized: true
}));
/*initialize routes*/
app.get('/', function (req, res) {
res.send('Welcome to our proxy server!').end();
});
app.post('/initialize', function (req, res) {
getProductCategories(req, res);
});
app.get('/getProducts', function (req, res) {
var WooCommerce = getWooCommerceObject(req.query.storeURL, req.query.consumerKey, req.query.consumerSecret);
if(req.query && req.query.id) {
WooCommerce.get('products/' + req.query.id, function(err, data, response) {
response = response && JSON.parse(response);
if(err) {
res.send({
data: err,
status: 500
});
} else if(response && response.errors && response.errors.length > 0 && response.errors[0].code) {
res.status(getErrorStatusCode(response.errors[0].code)).send({
data: response,
status: getErrorStatusCode(response.errors[0].code)
});
} else {
res.send({
data: response,
status: 200
});
}
});
} else {
WooCommerce.get('products', function (err, data, response) {
response = response && JSON.parse(response);
console.log('response is::::::::::', err, response);
if(err) {
res.send({
data: err,
status: 500
});
} else if(response && response.errors && response.errors.length > 0 && response.errors[0].code) {
res.status(getErrorStatusCode(response.errors[0].code)).send({
data: response,
status: getErrorStatusCode(response.errors[0].code)
});
} else {
res.send({
data: response,
status: 200
});
}
});
}
});
app.get('/productCategories', function (req, res) {
getProductCategories(req, res);
});
app.get('/getProductsByCategory', function (req, res) {
var url = req.query && req.query.slug ? 'products?filter[category]=' + req.query.slug + '&filter[limit]=' + req.query.pageSize + '&page=' + req.query.pageNumber : 'products/';
console.log('getProductsByCategory url is:', url);
var WooCommerce = getWooCommerceObject(req.query.storeURL, req.query.consumerKey, req.query.consumerSecret);
WooCommerce.get(url, function (err, data, response) {
response = response && JSON.parse(response);
if(err) {
res.send({
data: err,
status: 500
});
} else if(response && response.errors && response.errors.length > 0 && response.errors[0].code) {
res.status(getErrorStatusCode(response.errors[0].code)).send({
data: response,
status: getErrorStatusCode(response.errors[0].code)
});
} else {
res.send({
data: response,
status: 200
});
}
})
});
function getProductCategories(req, res) {
var url = req.query && req.query.pageSize && req.query.pageNumber ? 'products/categories?filter[limit]=' + req.query.pageSize + '&page=' + req.query.pageNumber : 'products/categories';
console.log('url is >>>>>>>>>>>', url, req.query, req.body);
var storeURL = req.query.storeURL || req.body.storeURL;
var consumerKey = req.query.consumerKey || req.body.consumerKey;
var consumerSecret = req.query.consumerSecret || req.body.consumerSecret;
var WooCommerce = getWooCommerceObject(storeURL, consumerKey, consumerSecret);
WooCommerce.get(url, function (err, data, response) {
response = response && JSON.parse(response);
console.log('error and response is : ', err, response);
if (err || !response) {
res.send({
data: err || response,
status: 500
});
} else if (response && response.errors && response.errors.length > 0 && response.errors[0].code) {
res.status(getErrorStatusCode(response.errors[0].code)).send({
data: response,
status: getErrorStatusCode(response.errors[0].code)
});
} else {
res.send({
data: response,
status: 200
});
}
});
}
/**
* Different setup for 'development' and 'production' modes
* @type {string}
*/
if (env === 'development') {
// Development-mode-specific configuration
console.info('Node is running in development mode');
}
else if (env === 'test') {
// Development-mode-specific configuration
console.info('Node is running in test mode');
}
else {
// Production-mode-specific configuration goes here...
console.info('Node is running in production mode');
}
/**
* Server init and start
*/
server.listen(_Port);
function stopWebServer() {
server.close(function () {
console.info('Webserver shutdown');
process.exit();
});
}
var gracefulShutdown = function () {
console.info("Received kill signal, shutting down Webserver gracefully.");
stopWebServer();
// if after
setTimeout(function () {
console.error("Could not close connections in time, forcefully shutting down webserver");
process.exit();
}, 10 * 1000);
};
//listen for Ctrl + C
process.on('SIGINT', gracefulShutdown);
// listen for TERM signal .e.g. kill
process.on('SIGTERM', gracefulShutdown);
// listen for uncaughtException
process.on('uncaughtException', function (err) {
console.error("Uncaught Exception: " + err);
console.error("Stack: " + err.stack);
process.exit(1);
});
console.info('Express server listening on port: %s', server.address().port);
module.exports = function () {
return server;
}; | Added check before parsing response
| server/index.js | Added check before parsing response | <ide><path>erver/index.js
<ide> getProductCategories(req, res);
<ide> });
<ide>
<add>function isJson(str) {
<add> try {
<add> JSON.parse(str);
<add> } catch (e) {
<add> return false;
<add> }
<add> return true;
<add>}
<add>
<ide> app.get('/getProducts', function (req, res) {
<ide> var WooCommerce = getWooCommerceObject(req.query.storeURL, req.query.consumerKey, req.query.consumerSecret);
<ide> if(req.query && req.query.id) {
<ide> WooCommerce.get('products/' + req.query.id, function(err, data, response) {
<del> response = response && JSON.parse(response);
<del> if(err) {
<del> res.send({
<del> data: err,
<add> response = response && isJson(response) && JSON.parse(response);
<add> if(err || !response) {
<add> res.send({
<add> data: err || response,
<ide> status: 500
<ide> });
<ide> } else if(response && response.errors && response.errors.length > 0 && response.errors[0].code) {
<ide> });
<ide> } else {
<ide> WooCommerce.get('products', function (err, data, response) {
<del> response = response && JSON.parse(response);
<add> response = response && isJson(response) && JSON.parse(response);
<ide> console.log('response is::::::::::', err, response);
<del> if(err) {
<del> res.send({
<del> data: err,
<add> if(err || !response) {
<add> res.send({
<add> data: err || response,
<ide> status: 500
<ide> });
<ide> } else if(response && response.errors && response.errors.length > 0 && response.errors[0].code) {
<ide> console.log('getProductsByCategory url is:', url);
<ide> var WooCommerce = getWooCommerceObject(req.query.storeURL, req.query.consumerKey, req.query.consumerSecret);
<ide> WooCommerce.get(url, function (err, data, response) {
<del> response = response && JSON.parse(response);
<del> if(err) {
<del> res.send({
<del> data: err,
<add> response = response && isJson(response) && JSON.parse(response);
<add> if(err || !response) {
<add> res.send({
<add> data: err || response,
<ide> status: 500
<ide> });
<ide> } else if(response && response.errors && response.errors.length > 0 && response.errors[0].code) {
<ide> var consumerSecret = req.query.consumerSecret || req.body.consumerSecret;
<ide> var WooCommerce = getWooCommerceObject(storeURL, consumerKey, consumerSecret);
<ide> WooCommerce.get(url, function (err, data, response) {
<del> response = response && JSON.parse(response);
<add> response = response && isJson(response) && JSON.parse(response);
<ide> console.log('error and response is : ', err, response);
<ide> if (err || !response) {
<ide> res.send({ |
|
Java | mit | error: pathspec 'LeetCodeSolutions/java/src/98_Validate_Binary_Search_Tree/Solution.java' did not match any file(s) known to git
| 37302cbac976fdcd95fb00efd17776485c206031 | 1 | ChuanleiGuo/AlgorithmsPlayground,ChuanleiGuo/AlgorithmsPlayground,ChuanleiGuo/AlgorithmsPlayground,ChuanleiGuo/AlgorithmsPlayground | public class TreeNode {
int val;
TreeNode left;
TreeNode right;
TreeNode(int x) { val = x; }
}
class Solution {
public boolean isValidBST(TreeNode root) {
return isValidBST(root, Long.MIN_VALUE, Long.MAX_VALUE);
}
private boolean isValidBST(TreeNode node, long minValue, long maxValue) {
if (node == null) {
return true;
}
if (node.val <= minValue || node.val >= maxValue) {
return false;
}
return isValidBST(node.left, minValue, node.val) && isValidBST(node.right, node.val, maxValue);
}
} | LeetCodeSolutions/java/src/98_Validate_Binary_Search_Tree/Solution.java | 98. Validate Binary Search Tree
| LeetCodeSolutions/java/src/98_Validate_Binary_Search_Tree/Solution.java | 98. Validate Binary Search Tree | <ide><path>eetCodeSolutions/java/src/98_Validate_Binary_Search_Tree/Solution.java
<add>public class TreeNode {
<add> int val;
<add> TreeNode left;
<add> TreeNode right;
<add> TreeNode(int x) { val = x; }
<add>}
<add>
<add>class Solution {
<add> public boolean isValidBST(TreeNode root) {
<add> return isValidBST(root, Long.MIN_VALUE, Long.MAX_VALUE);
<add> }
<add>
<add> private boolean isValidBST(TreeNode node, long minValue, long maxValue) {
<add> if (node == null) {
<add> return true;
<add> }
<add> if (node.val <= minValue || node.val >= maxValue) {
<add> return false;
<add> }
<add> return isValidBST(node.left, minValue, node.val) && isValidBST(node.right, node.val, maxValue);
<add> }
<add>} |
|
JavaScript | mit | 26b0532fe2fea7641618d0c66d3dc0677c9fcf5f | 0 | kimgel/Impex | 'use strict';
var mongoose = require('mongoose'),
select = require('mongoose-json-select').select,
fs = require('fs'),
fmt = require('fmt'),
_ = require('lodash'),
moment = require('moment'),
config = require('../../../config/credentials/aws'),
multiparty = require('multiparty'),
s3 = require('streaming-s3'),
RegulatoryDoc = require('./schema').RegulatoryDoc;
// Find Regulatory Document by id
exports.regulatoryDocument = function(req, res, next, id) {
RegulatoryDoc.load(id, function(err, regulatoryDoc) {
if (err) return next(err);
if (!regulatoryDoc) return next(new Error('Failed to load Regulatory Document: ' + id));
req.regulatoryDoc = regulatoryDoc;
next();
});
};
//List all Regulatory Documents
exports.all = function(req, res) {
var query = req.query.fields;
//replace comma with whitespace to validate syntax for mongoose select
var columns = (query ? query.replace(/\,/g, ' '): '');
RegulatoryDoc
.find()
.select(columns)
.sort('-created')
.populate('creator','name')
.exec(function(err, regulatoryDocs) {
if (!err) {
res.jsonp(regulatoryDocs);
} else {
return res.send(err);
}
});
};
//Show Regulatory Document
exports.show = function(req, res) {
res.jsonp(req.regulatoryDoc);
};
//Update Regulatory Document
exports.update = function(req, res) {
var updateRegulatoryDoc = req.regulatoryDoc;
updateRegulatoryDoc = _.extend(updateRegulatoryDoc, req.body);
updateRegulatoryDoc.save(function(err) {
if (err) {
return res.send('login', {
errors: err.errors,
updateRegulatoryDoc: updateRegulatoryDoc
});
} else {
res.jsonp(updateRegulatoryDoc);
}
});
};
// Create Regulatory Document
exports.create = function(req, res) {
var request = req.body;
var newRegulatoryDoc = new RegulatoryDoc(request);
var dateIssued = request.date_issued;
var validUntil = request.valid_until;
newRegulatoryDoc.creator = req.user;
if (dateIssued) {
newRegulatoryDoc.date_issued = new Date(req.body.date_issued);
}
if (validUntil) {
newRegulatoryDoc.valid_until = new Date(req.body.valid_until);
}
newRegulatoryDoc.save(function(err) {
if (!err) {
res.jsonp(newRegulatoryDoc);
} else {
return res.send(err);
}
});
};
exports.upload = function(req, res) {
var form = new multiparty.Form();
form.on('file', function(name, file) {
var fileName = file.originalFilename,
path = file.path;
var fileStream = fs.createReadStream(path);
var uploader = new s3(
fileStream,
config.aws.key,
config.aws.secret,{
Bucket: config.aws.bucket,
Key: 'regulatorydocuments/' + fileName
}, function(err, resp) {
if (err) return fmt.dump(err, 'Error');
// delete temp file
resp.FileName = fileName;
fs.unlinkSync(path);
res.jsonp(resp);
}
);
});
form.parse(req);
}; | lib/modules/settings/regulatorydocuments/controller.js | 'use strict';
var mongoose = require('mongoose'),
select = require('mongoose-json-select').select,
fs = require('fs'),
fmt = require('fmt'),
_ = require('lodash'),
moment = require('moment'),
config = require('../../../config/credentials/aws'),
multiparty = require('multiparty'),
s3 = require('streaming-s3'),
RegulatoryDoc = require('./schema').RegulatoryDoc;
// Find Regulatory Document by id
exports.regulatoryDocument = function(req, res, next, id) {
RegulatoryDoc.load(id, function(err, regulatoryDoc) {
if (err) return next(err);
if (!regulatoryDoc) return next(new Error('Failed to load Regulatory Document: ' + id));
req.regulatoryDoc = regulatoryDoc;
next();
});
};
//List all Regulatory Documents
exports.all = function(req, res) {
var query = req.query.fields;
var columns = (
query
? query.replace(/\,/g, " ") //replace comma with whitespace to validate syntax for mongoose select
: ''
);
RegulatoryDoc
.find()
.select(columns)
.sort('-created')
.populate('creator','name')
.exec(function(err, regulatoryDocs) {
if (!err) {
res.jsonp(regulatoryDocs);
} else {
return res.send(err);
}
});
};
//Show Regulatory Document
exports.show = function(req, res) {
res.jsonp(req.regulatoryDoc);
};
//Update Regulatory Document
exports.update = function(req, res) {
var updateRegulatoryDoc = req.regulatoryDoc;
updateRegulatoryDoc = _.extend(updateRegulatoryDoc, req.body);
updateRegulatoryDoc.save(function(err) {
if (err) {
return res.send('login', {
errors: err.errors,
updateRegulatoryDoc: updateRegulatoryDoc
});
} else {
res.jsonp(updateRegulatoryDoc);
}
});
};
// Create Regulatory Document
exports.create = function(req, res) {
var request = req.body;
var newRegulatoryDoc = new RegulatoryDoc(request);
var dateIssued = request.date_issued;
var validUntil = request.valid_until;
newRegulatoryDoc.creator = req.user;
if (dateIssued) {
newRegulatoryDoc.date_issued = new Date(req.body.date_issued);
}
if (validUntil) {
newRegulatoryDoc.valid_until = new Date(req.body.valid_until);
}
newRegulatoryDoc.save(function(err) {
if (!err) {
res.jsonp(newRegulatoryDoc);
} else {
return res.send(err);
}
});
};
exports.upload = function(req, res) {
var form = new multiparty.Form();
form.on('file', function(name, file) {
var fileName = file.originalFilename,
path = file.path;
var fileStream = fs.createReadStream(path);
var uploader = new s3(
fileStream,
config.aws.key,
config.aws.secret,{
Bucket: config.aws.bucket,
Key: 'regulatorydocuments/' + fileName
}, function(err, resp) {
if (err) return fmt.dump(err, 'Error');
// delete temp file
resp.FileName = fileName;
fs.unlinkSync(path);
res.jsonp(resp);
}
);
});
form.parse(req);
}; | removed some extra line breaks
| lib/modules/settings/regulatorydocuments/controller.js | removed some extra line breaks | <ide><path>ib/modules/settings/regulatorydocuments/controller.js
<ide> //List all Regulatory Documents
<ide> exports.all = function(req, res) {
<ide> var query = req.query.fields;
<del> var columns = (
<del> query
<del> ? query.replace(/\,/g, " ") //replace comma with whitespace to validate syntax for mongoose select
<del> : ''
<del> );
<add>
<add> //replace comma with whitespace to validate syntax for mongoose select
<add> var columns = (query ? query.replace(/\,/g, ' '): '');
<ide> RegulatoryDoc
<ide> .find()
<ide> .select(columns) |
|
Java | apache-2.0 | 623a3622fdfd91f85ffbde5eee70a42a3c78622b | 0 | apache/incubator-taverna-workbench-common-activities | /*******************************************************************************
* Copyright (C) 2009 The University of Manchester
*
* Modifications to the initial code base are copyright of their
* respective authors, or their employers as appropriate.
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public License
* as published by the Free Software Foundation; either version 2.1 of
* the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
******************************************************************************/
package net.sf.taverna.t2.activities.spreadsheet.servicedescriptions;
import java.net.URI;
import javax.swing.Icon;
import net.sf.taverna.t2.activities.spreadsheet.SpreadsheetImportActivity;
import net.sf.taverna.t2.activities.spreadsheet.SpreadsheetImportConfiguration;
import net.sf.taverna.t2.activities.spreadsheet.il8n.SpreadsheetImportUIText;
import net.sf.taverna.t2.servicedescriptions.AbstractTemplateService;
import net.sf.taverna.t2.servicedescriptions.ServiceDescription;
/**
* Definition of the SpreadsheetImport spreadsheet import template service.
*
* @author David Withers
*/
public class SpreadsheetImportTemplateService extends
AbstractTemplateService<SpreadsheetImportConfiguration> {
private static final String SERVICE_NAME = SpreadsheetImportUIText
.getString("SpreadsheetImportTemplateService.serviceName");
private static final URI providerId = URI
.create("http://taverna.sf.net/2010/service-provider/spreadsheet");
public String getName() {
return SERVICE_NAME;
}
@Override
public Class<SpreadsheetImportActivity> getActivityClass() {
return SpreadsheetImportActivity.class;
}
@Override
public SpreadsheetImportConfiguration getActivityConfiguration() {
return new SpreadsheetImportConfiguration();
}
@Override
public Icon getIcon() {
return SpreadsheetImportActivityIcon.getSpreadsheetImportIcon();
}
@Override
public String getDescription() {
return SpreadsheetImportUIText
.getString("SpreadsheetImportTemplateService.serviceDescription");
}
public static ServiceDescription<SpreadsheetImportConfiguration> getServiceDescription() {
SpreadsheetImportTemplateService bts = new SpreadsheetImportTemplateService();
return bts.templateService;
}
public String getId() {
return providerId.toString();
}
}
| src/main/java/net/sf/taverna/t2/activities/spreadsheet/servicedescriptions/SpreadsheetImportTemplateService.java | /*******************************************************************************
* Copyright (C) 2009 The University of Manchester
*
* Modifications to the initial code base are copyright of their
* respective authors, or their employers as appropriate.
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public License
* as published by the Free Software Foundation; either version 2.1 of
* the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
******************************************************************************/
package net.sf.taverna.t2.activities.spreadsheet.servicedescriptions;
import java.awt.Color;
import javax.swing.Icon;
import net.sf.taverna.t2.activities.spreadsheet.SpreadsheetImportActivity;
import net.sf.taverna.t2.activities.spreadsheet.SpreadsheetImportConfiguration;
import net.sf.taverna.t2.activities.spreadsheet.il8n.SpreadsheetImportUIText;
import net.sf.taverna.t2.servicedescriptions.AbstractTemplateService;
import net.sf.taverna.t2.servicedescriptions.ServiceDescription;
import net.sf.taverna.t2.workbench.ui.impl.configuration.colour.ColourManager;
/**
* Definition of the SpreadsheetImport spreadsheet import template service.
*
* @author David Withers
*/
public class SpreadsheetImportTemplateService extends
AbstractTemplateService<SpreadsheetImportConfiguration> {
private static final String SERVICE_NAME = SpreadsheetImportUIText
.getString("SpreadsheetImportTemplateService.serviceName");
public String getName() {
return SERVICE_NAME;
}
@Override
public Class<SpreadsheetImportActivity> getActivityClass() {
return SpreadsheetImportActivity.class;
}
@Override
public SpreadsheetImportConfiguration getActivityConfiguration() {
return new SpreadsheetImportConfiguration();
}
@Override
public Icon getIcon() {
return SpreadsheetImportActivityIcon.getSpreadsheetImportIcon();
}
@Override
public String getDescription() {
return SpreadsheetImportUIText
.getString("SpreadsheetImportTemplateService.serviceDescription");
}
public static ServiceDescription<SpreadsheetImportConfiguration> getServiceDescription() {
SpreadsheetImportTemplateService bts = new SpreadsheetImportTemplateService();
return bts.templateService;
}
}
| Fix for T2-674: Hardcoded default locations. System default configurable providers are now read from a file in Taverna startup/conf directory. Failing that - they are read from a hard coded list. User can now import and export services from such files as well.
git-svn-id: 0c3fe568c6fc21f6b564d51b719cf5d3bfeae51e@10041 bf327186-88b3-11dd-a302-d386e5130c1c
| src/main/java/net/sf/taverna/t2/activities/spreadsheet/servicedescriptions/SpreadsheetImportTemplateService.java | Fix for T2-674: Hardcoded default locations. System default configurable providers are now read from a file in Taverna startup/conf directory. Failing that - they are read from a hard coded list. User can now import and export services from such files as well. | <ide><path>rc/main/java/net/sf/taverna/t2/activities/spreadsheet/servicedescriptions/SpreadsheetImportTemplateService.java
<ide> ******************************************************************************/
<ide> package net.sf.taverna.t2.activities.spreadsheet.servicedescriptions;
<ide>
<del>import java.awt.Color;
<add>import java.net.URI;
<ide>
<ide> import javax.swing.Icon;
<ide>
<ide> import net.sf.taverna.t2.activities.spreadsheet.il8n.SpreadsheetImportUIText;
<ide> import net.sf.taverna.t2.servicedescriptions.AbstractTemplateService;
<ide> import net.sf.taverna.t2.servicedescriptions.ServiceDescription;
<del>import net.sf.taverna.t2.workbench.ui.impl.configuration.colour.ColourManager;
<ide>
<ide> /**
<ide> * Definition of the SpreadsheetImport spreadsheet import template service.
<ide> private static final String SERVICE_NAME = SpreadsheetImportUIText
<ide> .getString("SpreadsheetImportTemplateService.serviceName");
<ide>
<add> private static final URI providerId = URI
<add> .create("http://taverna.sf.net/2010/service-provider/spreadsheet");
<add>
<ide> public String getName() {
<ide> return SERVICE_NAME;
<ide> }
<ide> SpreadsheetImportTemplateService bts = new SpreadsheetImportTemplateService();
<ide> return bts.templateService;
<ide> }
<add>
<add> public String getId() {
<add> return providerId.toString();
<add> }
<ide> } |
|
Java | apache-2.0 | fef78e047cbca211a0e03c57e1be302494058dcc | 0 | cnevinc/zxing-android-embedded,tsdl2013/zxing-android-embedded,praveen062/zxing-android-embedded,BugMaker/zxing-android-embedded,j-mateo/zxing-android-embedded,movedon2otherthings/zxing-android-embedded,krischik/zxing-android-minimal,jonzl/sample-zxing,krischik/zxing-android-minimal,dimoge/zxing-android-embedded,binson1989/zxing-android-embedded,BugMaker/zxing-android-embedded,renekaigen/zxing-android-embedded,WangXiaoxi/zxing-android-minimal,youyi1314/zxing-android-embedded,Promptus/zxing-android-minimal,WangXiaoxi/zxing-android-minimal,wudayu/zxing-android-embedded,jonzl/sample-zxing,ALenfant/zxing-android-embedded,Promptus/zxing-android-minimal,journeyapps/zxing-android-embedded,dhosford/zxing-android-embedded,BugMaker/zxing-android-embedded,jemsnaban/zxing-android-embedded,jonzl/sample-zxing,jeason789/test_embedded,0359xiaodong/zxing-android-embedded,BBBInc/zxing-android-embedded | /*
* Copyright (C) 2008 ZXing authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.zxing.client.android.camera;
import android.content.Context;
import android.content.SharedPreferences;
import android.graphics.PixelFormat;
import android.graphics.Point;
import android.graphics.Rect;
import android.hardware.Camera;
import android.os.Build;
import android.os.Handler;
import android.preference.PreferenceManager;
import android.util.Log;
import android.view.SurfaceHolder;
import com.google.zxing.client.android.PlanarYUVLuminanceSource;
import com.google.zxing.client.android.PreferencesActivity;
import java.io.IOException;
/**
* This object wraps the Camera service object and expects to be the only one talking to it. The
* implementation encapsulates the steps needed to take preview-sized images, which are used for
* both preview and decoding.
*
* @author [email protected] (Daniel Switkin)
*/
public final class CameraManager {
private static final String TAG = CameraManager.class.getSimpleName();
private static final int MIN_FRAME_WIDTH = 240;
private static final int MIN_FRAME_HEIGHT = 240;
private static final int MAX_FRAME_WIDTH = 600;
private static final int MAX_FRAME_HEIGHT = 400;
static final int SDK_INT; // Later we can use Build.VERSION.SDK_INT
static {
int sdkInt;
try {
sdkInt = Integer.parseInt(Build.VERSION.SDK);
} catch (NumberFormatException nfe) {
// Just to be safe
sdkInt = 10000;
}
SDK_INT = sdkInt;
}
private final Context context;
private final CameraConfigurationManager configManager;
private Camera camera;
private Rect framingRect;
private Rect framingRectInPreview;
private boolean initialized;
private boolean previewing;
private boolean reverseImage;
private final boolean useOneShotPreviewCallback;
private int requestedFramingRectWidth;
private int requestedFramingRectHeight;
/**
* Preview frames are delivered here, which we pass on to the registered handler. Make sure to
* clear the handler so it will only receive one message.
*/
private final PreviewCallback previewCallback;
/** Autofocus callbacks arrive here, and are dispatched to the Handler which requested them. */
private final AutoFocusCallback autoFocusCallback;
public CameraManager(Context context) {
this.context = context;
this.configManager = new CameraConfigurationManager(context);
// Camera.setOneShotPreviewCallback() has a race condition in Cupcake, so we use the older
// Camera.setPreviewCallback() on 1.5 and earlier. For Donut and later, we need to use
// the more efficient one shot callback, as the older one can swamp the system and cause it
// to run out of memory. We can't use SDK_INT because it was introduced in the Donut SDK.
useOneShotPreviewCallback = Integer.parseInt(Build.VERSION.SDK) > 3; // 3 = Cupcake
previewCallback = new PreviewCallback(configManager, useOneShotPreviewCallback);
autoFocusCallback = new AutoFocusCallback();
}
/**
* Opens the camera driver and initializes the hardware parameters.
*
* @param holder The surface object which the camera will draw preview frames into.
* @throws IOException Indicates the camera driver failed to open.
*/
public void openDriver(SurfaceHolder holder) throws IOException {
Camera theCamera = camera;
if (theCamera == null) {
theCamera = Camera.open();
if (theCamera == null) {
throw new IOException();
}
camera = theCamera;
}
theCamera.setPreviewDisplay(holder);
if (!initialized) {
initialized = true;
configManager.initFromCameraParameters(theCamera);
if (requestedFramingRectWidth > 0 && requestedFramingRectHeight > 0) {
setManualFramingRect(requestedFramingRectWidth, requestedFramingRectHeight);
requestedFramingRectWidth = 0;
requestedFramingRectHeight = 0;
}
}
configManager.setDesiredCameraParameters(theCamera);
SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(context);
reverseImage = prefs.getBoolean(PreferencesActivity.KEY_REVERSE_IMAGE, false);
if (prefs.getBoolean(PreferencesActivity.KEY_FRONT_LIGHT, false)) {
FlashlightManager.enableFlashlight();
}
}
/**
* Closes the camera driver if still in use.
*/
public void closeDriver() {
if (camera != null) {
FlashlightManager.disableFlashlight();
camera.release();
camera = null;
// Make sure to clear these each time we close the camera, so that any scanning rect
// requested by intent is forgotten.
framingRect = null;
framingRectInPreview = null;
}
}
/**
* Asks the camera hardware to begin drawing preview frames to the screen.
*/
public void startPreview() {
Camera theCamera = camera;
if (theCamera != null && !previewing) {
theCamera.startPreview();
previewing = true;
}
}
/**
* Tells the camera to stop drawing preview frames.
*/
public void stopPreview() {
if (camera != null && previewing) {
if (!useOneShotPreviewCallback) {
camera.setPreviewCallback(null);
}
camera.stopPreview();
previewCallback.setHandler(null, 0);
autoFocusCallback.setHandler(null, 0);
previewing = false;
}
}
/**
* A single preview frame will be returned to the handler supplied. The data will arrive as byte[]
* in the message.obj field, with width and height encoded as message.arg1 and message.arg2,
* respectively.
*
* @param handler The handler to send the message to.
* @param message The what field of the message to be sent.
*/
public void requestPreviewFrame(Handler handler, int message) {
Camera theCamera = camera;
if (theCamera != null && previewing) {
previewCallback.setHandler(handler, message);
if (useOneShotPreviewCallback) {
theCamera.setOneShotPreviewCallback(previewCallback);
} else {
theCamera.setPreviewCallback(previewCallback);
}
}
}
/**
* Asks the camera hardware to perform an autofocus.
*
* @param handler The Handler to notify when the autofocus completes.
* @param message The message to deliver.
*/
public void requestAutoFocus(Handler handler, int message) {
if (camera != null && previewing) {
autoFocusCallback.setHandler(handler, message);
//Log.d(TAG, "Requesting auto-focus callback");
camera.autoFocus(autoFocusCallback);
}
}
/**
* Calculates the framing rect which the UI should draw to show the user where to place the
* barcode. This target helps with alignment as well as forces the user to hold the device
* far enough away to ensure the image will be in focus.
*
* @return The rectangle to draw on screen in window coordinates.
*/
public Rect getFramingRect() {
if (framingRect == null) {
if (camera == null) {
return null;
}
Point screenResolution = configManager.getScreenResolution();
int width = screenResolution.x * 3 / 4;
if (width < MIN_FRAME_WIDTH) {
width = MIN_FRAME_WIDTH;
} else if (width > MAX_FRAME_WIDTH) {
width = MAX_FRAME_WIDTH;
}
int height = screenResolution.y * 3 / 4;
if (height < MIN_FRAME_HEIGHT) {
height = MIN_FRAME_HEIGHT;
} else if (height > MAX_FRAME_HEIGHT) {
height = MAX_FRAME_HEIGHT;
}
int leftOffset = (screenResolution.x - width) / 2;
int topOffset = (screenResolution.y - height) / 2;
framingRect = new Rect(leftOffset, topOffset, leftOffset + width, topOffset + height);
Log.d(TAG, "Calculated framing rect: " + framingRect);
}
return framingRect;
}
/**
* Like {@link #getFramingRect} but coordinates are in terms of the preview frame,
* not UI / screen.
*/
public Rect getFramingRectInPreview() {
if (framingRectInPreview == null) {
Rect framingRect = getFramingRect();
if (framingRect == null) {
return null;
}
Rect rect = new Rect(framingRect);
Point cameraResolution = configManager.getCameraResolution();
Point screenResolution = configManager.getScreenResolution();
rect.left = rect.left * cameraResolution.x / screenResolution.x;
rect.right = rect.right * cameraResolution.x / screenResolution.x;
rect.top = rect.top * cameraResolution.y / screenResolution.y;
rect.bottom = rect.bottom * cameraResolution.y / screenResolution.y;
framingRectInPreview = rect;
}
return framingRectInPreview;
}
/**
* Allows third party apps to specify the scanning rectangle dimensions, rather than determine
* them automatically based on screen resolution.
*
* @param width The width in pixels to scan.
* @param height The height in pixels to scan.
*/
public void setManualFramingRect(int width, int height) {
if (initialized) {
Point screenResolution = configManager.getScreenResolution();
if (width > screenResolution.x) {
width = screenResolution.x;
}
if (height > screenResolution.y) {
height = screenResolution.y;
}
int leftOffset = (screenResolution.x - width) / 2;
int topOffset = (screenResolution.y - height) / 2;
framingRect = new Rect(leftOffset, topOffset, leftOffset + width, topOffset + height);
Log.d(TAG, "Calculated manual framing rect: " + framingRect);
framingRectInPreview = null;
} else {
requestedFramingRectWidth = width;
requestedFramingRectHeight = height;
}
}
/**
* A factory method to build the appropriate LuminanceSource object based on the format
* of the preview buffers, as described by Camera.Parameters.
*
* @param data A preview frame.
* @param width The width of the image.
* @param height The height of the image.
* @return A PlanarYUVLuminanceSource instance.
*/
public PlanarYUVLuminanceSource buildLuminanceSource(byte[] data, int width, int height) {
Rect rect = getFramingRectInPreview();
if (rect == null) {
throw new IllegalStateException();
}
int previewFormat = configManager.getPreviewFormat();
String previewFormatString = configManager.getPreviewFormatString();
switch (previewFormat) {
// This is the standard Android format which all devices are REQUIRED to support.
// In theory, it's the only one we should ever care about.
case PixelFormat.YCbCr_420_SP:
// This format has never been seen in the wild, but is compatible as we only care
// about the Y channel, so allow it.
case PixelFormat.YCbCr_422_SP:
return new PlanarYUVLuminanceSource(data, width, height, rect.left, rect.top,
rect.width(), rect.height(), reverseImage);
default:
// The Samsung Moment incorrectly uses this variant instead of the 'sp' version.
// Fortunately, it too has all the Y data up front, so we can read it.
if ("yuv420p".equals(previewFormatString)) {
return new PlanarYUVLuminanceSource(data, width, height, rect.left, rect.top,
rect.width(), rect.height(), reverseImage);
}
}
throw new IllegalArgumentException("Unsupported picture format: " +
previewFormat + '/' + previewFormatString);
}
}
| src/com/google/zxing/client/android/camera/CameraManager.java | /*
* Copyright (C) 2008 ZXing authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.zxing.client.android.camera;
import android.content.Context;
import android.content.SharedPreferences;
import android.graphics.PixelFormat;
import android.graphics.Point;
import android.graphics.Rect;
import android.hardware.Camera;
import android.os.Build;
import android.os.Handler;
import android.preference.PreferenceManager;
import android.util.Log;
import android.view.SurfaceHolder;
import com.google.zxing.client.android.PlanarYUVLuminanceSource;
import com.google.zxing.client.android.PreferencesActivity;
import java.io.IOException;
/**
* This object wraps the Camera service object and expects to be the only one talking to it. The
* implementation encapsulates the steps needed to take preview-sized images, which are used for
* both preview and decoding.
*
* @author [email protected] (Daniel Switkin)
*/
public final class CameraManager {
private static final String TAG = CameraManager.class.getSimpleName();
private static final int MIN_FRAME_WIDTH = 240;
private static final int MIN_FRAME_HEIGHT = 240;
private static final int MAX_FRAME_WIDTH = 600;
private static final int MAX_FRAME_HEIGHT = 400;
static final int SDK_INT; // Later we can use Build.VERSION.SDK_INT
static {
int sdkInt;
try {
sdkInt = Integer.parseInt(Build.VERSION.SDK);
} catch (NumberFormatException nfe) {
// Just to be safe
sdkInt = 10000;
}
SDK_INT = sdkInt;
}
private final Context context;
private final CameraConfigurationManager configManager;
private Camera camera;
private Rect framingRect;
private Rect framingRectInPreview;
private boolean initialized;
private boolean previewing;
private boolean reverseImage;
private final boolean useOneShotPreviewCallback;
private int requestedFramingRectWidth;
private int requestedFramingRectHeight;
/**
* Preview frames are delivered here, which we pass on to the registered handler. Make sure to
* clear the handler so it will only receive one message.
*/
private final PreviewCallback previewCallback;
/** Autofocus callbacks arrive here, and are dispatched to the Handler which requested them. */
private final AutoFocusCallback autoFocusCallback;
public CameraManager(Context context) {
this.context = context;
this.configManager = new CameraConfigurationManager(context);
// Camera.setOneShotPreviewCallback() has a race condition in Cupcake, so we use the older
// Camera.setPreviewCallback() on 1.5 and earlier. For Donut and later, we need to use
// the more efficient one shot callback, as the older one can swamp the system and cause it
// to run out of memory. We can't use SDK_INT because it was introduced in the Donut SDK.
useOneShotPreviewCallback = Integer.parseInt(Build.VERSION.SDK) > 3; // 3 = Cupcake
previewCallback = new PreviewCallback(configManager, useOneShotPreviewCallback);
autoFocusCallback = new AutoFocusCallback();
}
/**
* Opens the camera driver and initializes the hardware parameters.
*
* @param holder The surface object which the camera will draw preview frames into.
* @throws IOException Indicates the camera driver failed to open.
*/
public void openDriver(SurfaceHolder holder) throws IOException {
Camera theCamera = camera;
if (theCamera == null) {
theCamera = Camera.open();
if (theCamera == null) {
throw new IOException();
}
camera = theCamera;
}
theCamera.setPreviewDisplay(holder);
if (!initialized) {
initialized = true;
configManager.initFromCameraParameters(theCamera);
if (requestedFramingRectWidth > 0 && requestedFramingRectHeight > 0) {
setManualFramingRect(requestedFramingRectWidth, requestedFramingRectHeight);
requestedFramingRectWidth = 0;
requestedFramingRectHeight = 0;
}
}
configManager.setDesiredCameraParameters(theCamera);
SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(context);
reverseImage = prefs.getBoolean(PreferencesActivity.KEY_REVERSE_IMAGE, false);
if (prefs.getBoolean(PreferencesActivity.KEY_FRONT_LIGHT, false)) {
FlashlightManager.enableFlashlight();
}
}
/**
* Closes the camera driver if still in use.
*/
public void closeDriver() {
if (camera != null) {
FlashlightManager.disableFlashlight();
camera.release();
camera = null;
// Make sure to clear these each time we close the camera, so that any scanning rect
// requested by intent is forgotten.
framingRect = null;
framingRectInPreview = null;
}
}
/**
* Asks the camera hardware to begin drawing preview frames to the screen.
*/
public void startPreview() {
Camera theCamera = camera;
if (theCamera != null && !previewing) {
theCamera.startPreview();
previewing = true;
}
}
/**
* Tells the camera to stop drawing preview frames.
*/
public void stopPreview() {
if (camera != null && previewing) {
if (!useOneShotPreviewCallback) {
camera.setPreviewCallback(null);
}
camera.stopPreview();
previewCallback.setHandler(null, 0);
autoFocusCallback.setHandler(null, 0);
previewing = false;
}
}
/**
* A single preview frame will be returned to the handler supplied. The data will arrive as byte[]
* in the message.obj field, with width and height encoded as message.arg1 and message.arg2,
* respectively.
*
* @param handler The handler to send the message to.
* @param message The what field of the message to be sent.
*/
public void requestPreviewFrame(Handler handler, int message) {
Camera theCamera = camera;
if (theCamera != null && previewing) {
previewCallback.setHandler(handler, message);
if (useOneShotPreviewCallback) {
theCamera.setOneShotPreviewCallback(previewCallback);
} else {
theCamera.setPreviewCallback(previewCallback);
}
}
}
/**
* Asks the camera hardware to perform an autofocus.
*
* @param handler The Handler to notify when the autofocus completes.
* @param message The message to deliver.
*/
public void requestAutoFocus(Handler handler, int message) {
if (camera != null && previewing) {
autoFocusCallback.setHandler(handler, message);
//Log.d(TAG, "Requesting auto-focus callback");
camera.autoFocus(autoFocusCallback);
}
}
/**
* Calculates the framing rect which the UI should draw to show the user where to place the
* barcode. This target helps with alignment as well as forces the user to hold the device
* far enough away to ensure the image will be in focus.
*
* @return The rectangle to draw on screen in window coordinates.
*/
public Rect getFramingRect() {
if (framingRect == null) {
if (camera == null) {
return null;
}
Point screenResolution = configManager.getScreenResolution();
int width = screenResolution.x * 3 / 4;
if (width < MIN_FRAME_WIDTH) {
width = MIN_FRAME_WIDTH;
} else if (width > MAX_FRAME_WIDTH) {
width = MAX_FRAME_WIDTH;
}
int height = screenResolution.y * 3 / 4;
if (height < MIN_FRAME_HEIGHT) {
height = MIN_FRAME_HEIGHT;
} else if (height > MAX_FRAME_HEIGHT) {
height = MAX_FRAME_HEIGHT;
}
int leftOffset = (screenResolution.x - width) / 2;
int topOffset = (screenResolution.y - height) / 2;
framingRect = new Rect(leftOffset, topOffset, leftOffset + width, topOffset + height);
Log.d(TAG, "Calculated framing rect: " + framingRect);
}
return framingRect;
}
/**
* Like {@link #getFramingRect} but coordinates are in terms of the preview frame,
* not UI / screen.
*/
public Rect getFramingRectInPreview() {
if (framingRectInPreview == null) {
Rect rect = new Rect(getFramingRect());
Point cameraResolution = configManager.getCameraResolution();
Point screenResolution = configManager.getScreenResolution();
rect.left = rect.left * cameraResolution.x / screenResolution.x;
rect.right = rect.right * cameraResolution.x / screenResolution.x;
rect.top = rect.top * cameraResolution.y / screenResolution.y;
rect.bottom = rect.bottom * cameraResolution.y / screenResolution.y;
framingRectInPreview = rect;
}
return framingRectInPreview;
}
/**
* Allows third party apps to specify the scanning rectangle dimensions, rather than determine
* them automatically based on screen resolution.
*
* @param width The width in pixels to scan.
* @param height The height in pixels to scan.
*/
public void setManualFramingRect(int width, int height) {
if (initialized) {
Point screenResolution = configManager.getScreenResolution();
if (width > screenResolution.x) {
width = screenResolution.x;
}
if (height > screenResolution.y) {
height = screenResolution.y;
}
int leftOffset = (screenResolution.x - width) / 2;
int topOffset = (screenResolution.y - height) / 2;
framingRect = new Rect(leftOffset, topOffset, leftOffset + width, topOffset + height);
Log.d(TAG, "Calculated manual framing rect: " + framingRect);
framingRectInPreview = null;
} else {
requestedFramingRectWidth = width;
requestedFramingRectHeight = height;
}
}
/**
* A factory method to build the appropriate LuminanceSource object based on the format
* of the preview buffers, as described by Camera.Parameters.
*
* @param data A preview frame.
* @param width The width of the image.
* @param height The height of the image.
* @return A PlanarYUVLuminanceSource instance.
*/
public PlanarYUVLuminanceSource buildLuminanceSource(byte[] data, int width, int height) {
Rect rect = getFramingRectInPreview();
if (rect == null) {
throw new IllegalStateException();
}
int previewFormat = configManager.getPreviewFormat();
String previewFormatString = configManager.getPreviewFormatString();
switch (previewFormat) {
// This is the standard Android format which all devices are REQUIRED to support.
// In theory, it's the only one we should ever care about.
case PixelFormat.YCbCr_420_SP:
// This format has never been seen in the wild, but is compatible as we only care
// about the Y channel, so allow it.
case PixelFormat.YCbCr_422_SP:
return new PlanarYUVLuminanceSource(data, width, height, rect.left, rect.top,
rect.width(), rect.height(), reverseImage);
default:
// The Samsung Moment incorrectly uses this variant instead of the 'sp' version.
// Fortunately, it too has all the Y data up front, so we can read it.
if ("yuv420p".equals(previewFormatString)) {
return new PlanarYUVLuminanceSource(data, width, height, rect.left, rect.top,
rect.width(), rect.height(), reverseImage);
}
}
throw new IllegalArgumentException("Unsupported picture format: " +
previewFormat + '/' + previewFormatString);
}
}
| Another bug fix to go with Issue 1044 -- this one only cropped up on my tablet though
git-svn-id: 0b99b41c5c84574e525f2ba59b508f5deb570dd8@2014 59b500cc-1b3d-0410-9834-0bbf25fbcc57
| src/com/google/zxing/client/android/camera/CameraManager.java | Another bug fix to go with Issue 1044 -- this one only cropped up on my tablet though | <ide><path>rc/com/google/zxing/client/android/camera/CameraManager.java
<ide> */
<ide> public Rect getFramingRectInPreview() {
<ide> if (framingRectInPreview == null) {
<del> Rect rect = new Rect(getFramingRect());
<add> Rect framingRect = getFramingRect();
<add> if (framingRect == null) {
<add> return null;
<add> }
<add> Rect rect = new Rect(framingRect);
<ide> Point cameraResolution = configManager.getCameraResolution();
<ide> Point screenResolution = configManager.getScreenResolution();
<ide> rect.left = rect.left * cameraResolution.x / screenResolution.x; |
|
Java | apache-2.0 | 59fb749eca4ea1136f4c3857fba952aa490aab1e | 0 | SynBioDex/libSBOLj |
package org.synbiohub.frontend;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.StringWriter;
import java.io.UnsupportedEncodingException;
import java.net.URI;
import java.net.URLEncoder;
import java.util.ArrayList;
import java.util.List;
import java.util.Set;
import com.google.gson.Gson;
import com.google.gson.reflect.TypeToken;
import org.apache.commons.io.IOUtils;
import org.apache.http.HttpEntity;
import org.apache.http.HttpResponse;
import org.apache.http.NameValuePair;
import org.apache.http.client.HttpClient;
import org.apache.http.client.entity.UrlEncodedFormEntity;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.client.methods.HttpRequestBase;
import org.apache.http.entity.ContentType;
import org.apache.http.entity.mime.HttpMultipartMode;
import org.apache.http.entity.mime.MultipartEntityBuilder;
import org.apache.http.impl.client.HttpClients;
import org.apache.http.message.BasicNameValuePair;
import org.apache.http.impl.conn.PoolingHttpClientConnectionManager;
import org.sbolstandard.core2.*;
/**
* Provides a Java API to SynBioHub instances.
* @author James McLaughlin
* @author Chris Myers
*
*/
public class SynBioHubFrontend
{
PoolingHttpClientConnectionManager connectionManager;
HttpClient client;
String backendUrl;
String uriPrefix;
String user = "";
/**
* Creates an instance of the SynBioHub API.
* @param backendUrl - URL for the SynBioHub instance.
* @param uriPrefix - prefix for all URIs stored in this repository
*/
public SynBioHubFrontend(String backendUrl,String uriPrefix)
{
this.backendUrl = backendUrl;
this.uriPrefix = uriPrefix;
connectionManager = new PoolingHttpClientConnectionManager();
client = HttpClients.custom().setConnectionManager(connectionManager).build();
}
/**
* Creates an instance of the SynBioHub API.
* @param backendUrl - URL for the SynBioHub instance.
*/
public SynBioHubFrontend(String backendUrl)
{
this.backendUrl = backendUrl;
this.uriPrefix = backendUrl;
connectionManager = new PoolingHttpClientConnectionManager();
client = HttpClients.custom().setConnectionManager(connectionManager).build();
}
/**
* Returns the URL for the SynBioHub instance.
* @return the URL for the SynBioHub instance.
*/
public String getBackendUrl()
{
return this.backendUrl;
}
/**
* Return the total number of objects of a specified type in the repository.
*
* @return the total number of objects of a specified type in the repository.
*
* @param objectType The object type to count
* (Collection, ComponentDefinition, Sequence, ModuleDefinition, Model, etc.).
*
* @throws SynBioHubException if there was an error communicating with the SynBioHub
*/
public int getCount(String objectType) throws SynBioHubException
{
return fetchCount(backendUrl + "/" + objectType + "/count");
}
/**
* Retrieve SBOL TopLevel object from a SynBioHub instance using its URI.
*
* @param topLevelUri The URI of the SBOL TopLevel
*
* @return A libSBOLj TopLevel instance corresponding to the TopLevel
*
* @throws SynBioHubException if there was an error communicating with the SynBioHub
*/
public SBOLDocument getSBOL(URI topLevelUri) throws SynBioHubException
{
if (!topLevelUri.toString().startsWith(uriPrefix)) {
throw new SynBioHubException("Object URI does not start with correct URI prefix for this repository.");
}
String url = topLevelUri + "/sbol";
url = url.replace(uriPrefix, backendUrl);
SBOLDocument document = fetchFromSynBioHub(url);
return document;
}
/**
* Retrieve an attachment from a SynBioHub instance using its URI.
*
* @param attachmentUri The URI of the SBOL Attachment object
* @param path Directory path to save the attachment
*
* @throws SynBioHubException if there was an error communicating with the SynBioHub
* @throws IOException if there is an I/O error
*/
public void getAttachment(URI attachmentUri, String path) throws SynBioHubException, IOException
{
if (!attachmentUri.toString().startsWith(uriPrefix)) {
throw new SynBioHubException("Object URI does not start with correct URI prefix for this repository.");
}
String url = attachmentUri + "/download";
url = url.replace(uriPrefix, backendUrl);
fetchContentSaveToFile(url,path);
}
/**
* Remove SBOL TopLevel object from a SynBioHub instance using its URI.
*
* @param topLevelUri The URI of the SBOL TopLevel
*
* @throws SynBioHubException if there was an error communicating with the SynBioHub
*/
public void removeSBOL(URI topLevelUri) throws SynBioHubException
{
if (!topLevelUri.toString().startsWith(uriPrefix)) {
throw new SynBioHubException("Object URI does not start with correct URI prefix for this repository.");
}
String url = topLevelUri + "/remove";
url = url.replace(uriPrefix, backendUrl);
fetchFromSynBioHub(url);
}
/**
* Search the default store for ComponentDefinition instances matching a name and/or a set of roles
*
* @param name The dcterms:title to search for, or null
* @param roles A set of role URIs to search for, or null
* @param types A set of type URIs to search for, or null
* @param collections A set of Collection URIs to search for, or null
* @param offset The offset of the results to begin at, or null to begin at 0
* @param limit The maximum number of results to return, or null to return all results
*
* @return An ArrayList of ComponentDefinitionMetaData objects with a summary of all matching ComponentDefinitions.
*
* @throws SynBioHubException if there was an error communicating with the SynBioHub
*/
public ArrayList<IdentifiedMetadata> getMatchingComponentDefinitionMetadata(String name, Set<URI> roles,
Set<URI> types, Set<URI> collections, Integer offset, Integer limit)
throws SynBioHubException
{
SearchQuery query = new SearchQuery();
query.setOffset(offset);
query.setLimit(limit);
SearchCriteria objectCriteria = new SearchCriteria();
objectCriteria.setKey("objectType");
objectCriteria.setValue("ComponentDefinition");
query.addCriteria(objectCriteria);
if (roles != null) {
for(URI uri : roles)
{
SearchCriteria roleCriteria = new SearchCriteria();
roleCriteria.setKey("role");
roleCriteria.setValue(uri.toString());
query.getCriteria().add(roleCriteria);
}
}
if (types != null) {
for(URI uri : types)
{
SearchCriteria typeCriteria = new SearchCriteria();
typeCriteria.setKey("type");
typeCriteria.setValue(uri.toString());
query.getCriteria().add(typeCriteria);
}
}
if (collections != null) {
for(URI uri : collections)
{
SearchCriteria collectionCriteria = new SearchCriteria();
collectionCriteria.setKey("collection");
collectionCriteria.setValue(uri.toString());
query.getCriteria().add(collectionCriteria);
}
}
if(name != null)
{
SearchCriteria nameCriteria = new SearchCriteria();
nameCriteria.setKey("name");
nameCriteria.setValue(name);
query.getCriteria().add(nameCriteria);
}
return search(query);
}
/**
* Search this SynBioHub instance for objects matching a search query
*
* @param query the search query
*
* @return An ArrayList of MetaData for objects that match the specified search query
*
* @throws SynBioHubException if there was an error communicating with the SynBioHub
*/
public ArrayList<IdentifiedMetadata> search(SearchQuery query) throws SynBioHubException
{
String url = backendUrl + "/search/";
//query.offset = offset;
//query.limit = limit;
String textQuery = "";
boolean first = true;
for (SearchCriteria criteria : query.getCriteria()) {
if (criteria.getKey().equals("objectType")) {
url += encodeUri(criteria.getKey()+"="+criteria.getValue()+"&");
continue;
}
if (criteria.getKey().equals("name")) {
if (first) first = false;
else textQuery = " ";
textQuery = criteria.getValue();
continue;
}
if (criteria.getKey().startsWith("http")) {
url += encodeUri("<" + criteria.getKey() + ">=");
} else {
url += encodeUri(criteria.getKey()+"=");
}
if (criteria.getValue().startsWith("http")) {
url += encodeUri("<"+criteria.getValue()+">&");
} else {
url += encodeUri("'"+criteria.getValue()+"'&");
}
}
url += encodeUri(textQuery);
if (query.getOffset()!=null && query.getLimit()!=null) {
url += "/?offset="+query.getOffset() + "&" + "limit="+query.getLimit();
} else if (query.getOffset()!=null) {
url += "/?offset="+query.getOffset();
} else if (query.getLimit()!=null) {
url += "/?limit="+query.getLimit();
}
//System.out.println(url);
Gson gson = new Gson();
HttpGet request = new HttpGet(url);
request.setHeader("X-authorization", user);
request.setHeader("Accept", "text/plain");
try
{
HttpResponse response = client.execute(request);
checkResponseCode(response);
InputStream inputStream = response.getEntity().getContent();
ArrayList<IdentifiedMetadata> metadataList = gson.fromJson(
new InputStreamReader(inputStream),
new TypeToken<ArrayList<IdentifiedMetadata>>(){}.getType());
return metadataList;
}
catch (Exception e)
{
throw new SynBioHubException(e);
}
finally
{
request.releaseConnection();
}
}
/**
* Search the default store for Collections that are not members of any other Collections
*
* @return An ArrayList of CollectionMetaData objects with a summary of all matching Collections.
*
* @throws SynBioHubException if there was an error communicating with the SynBioHub
*/
public ArrayList<IdentifiedMetadata> getRootCollectionMetadata()
throws SynBioHubException
{
String url = backendUrl + "/rootCollections";
Gson gson = new Gson();
HttpGet request = new HttpGet(url);
request.setHeader("X-authorization", user);
request.setHeader("Accept", "text/plain");
try
{
HttpResponse response = client.execute(request);
checkResponseCode(response);
InputStream inputStream = response.getEntity().getContent();
ArrayList<IdentifiedMetadata> metadataList = gson.fromJson(
new InputStreamReader(inputStream),
new TypeToken<ArrayList<IdentifiedMetadata>>(){}.getType());
return metadataList;
}
catch (Exception e)
{
throw new SynBioHubException(e);
}
finally
{
request.releaseConnection();
}
}
/**
* Perform a SPARQL query
* @param query SPARQL query string
*
* @return result as a JSON string
*
* @throws SynBioHubException if there was an error communicating with the SynBioHub
*/
public String sparqlQuery(String query) throws SynBioHubException
{
String url = backendUrl + "/sparql";
url += "?query="+encodeUri(query);
HttpGet request = new HttpGet(url);
request.setHeader("X-authorization", user);
request.setHeader("Accept", "application/json");
try
{
HttpResponse response = client.execute(request);
checkResponseCode(response);
InputStream inputStream = response.getEntity().getContent();
String result = inputStreamToString(inputStream);
return result;
}
catch (Exception e)
{
throw new SynBioHubException(e);
}
finally
{
request.releaseConnection();
}
}
/**
* Search the default store for Collections that are members of the specified Collection
*
* @param parentCollectionUri URI for Collection to search for member Collections
* @return An ArrayList of CollectionMetaData objects with a summary of all matching Collections.
*
* @throws SynBioHubException if there was an error communicating with the SynBioHub
*/
public ArrayList<IdentifiedMetadata> getSubCollectionMetadata(URI parentCollectionUri)
throws SynBioHubException
{
if (!parentCollectionUri.toString().startsWith(uriPrefix)) {
throw new SynBioHubException("Object URI does not start with correct URI prefix for this repository.");
}
String url = parentCollectionUri + "/subCollections";
url = url.replace(uriPrefix, backendUrl);
Gson gson = new Gson();
HttpGet request = new HttpGet(url);
request.setHeader("X-authorization", user);
request.setHeader("Accept", "text/plain");
try
{
HttpResponse response = client.execute(request);
checkResponseCode(response);
InputStream inputStream = response.getEntity().getContent();
ArrayList<IdentifiedMetadata> metadataList = gson.fromJson(
new InputStreamReader(inputStream),
new TypeToken<ArrayList<IdentifiedMetadata>>(){}.getType());
return metadataList;
}
catch (Exception e)
{
throw new SynBioHubException(e);
}
finally
{
request.releaseConnection();
}
}
// /**
// * Upload an SBOLDocument to the SynBioHub.
// *
// * @param document The document to upload
// *
// * @throws SynBioHubException if there was an error communicating with the SynBioHub
// */
// public void upload(SBOLDocument document) throws SynBioHubException
// {
// String url = backendUrl;
//
// HttpPost request = new HttpPost(url);
//
// try
// {
// request.setEntity(new StringEntity(serializeDocument(document)));
// request.setHeader("Content-Type", "application/rdf+xml");
//
// HttpResponse response = client.execute(request);
//
// checkResponseCode(response);
// }
// catch (Exception e)
// {
// throw new SynBioHubException(e);
// }
// finally
// {
// request.releaseConnection();
// }
// }
/**
* Sets the user to null to indicate that no user is logged in.
*/
public void logout()
{
user = "";
}
/**
* Login to the SynBioHub.
* @param email The user's email
* @param password The user's password
*
* @throws SynBioHubException if there was an error communicating with the SynBioHub
*/
public void login(String email, String password) throws SynBioHubException
{
String url = backendUrl + "/login";
HttpPost request = new HttpPost(url);
request.setHeader("Accept", "text/plain");
List<NameValuePair> params = new ArrayList<NameValuePair>();
params.add(new BasicNameValuePair("email", email));
params.add(new BasicNameValuePair("password", password));
try
{
request.setEntity(new UrlEncodedFormEntity(params));
request.setHeader("Content-Type", "application/x-www-form-urlencoded");
HttpResponse response = client.execute(request);
checkResponseCode(response);
HttpEntity entity = response.getEntity();
user = inputStreamToString(entity.getContent());
}
catch (Exception e)
{
throw new SynBioHubException(e);
}
finally
{
request.releaseConnection();
}
}
/**
* Remove all parts from this registry from a given SBOL document
*
* @param document The document to remove all registry parts from
*/
public void removeRegistryParts(SBOLDocument document) {
for (TopLevel topLevel : document.getTopLevels()) {
if (topLevel.getIdentity().toString().startsWith(uriPrefix)) {
try {
document.removeTopLevel(topLevel);
}
catch (SBOLValidationException e) {
// TODO: ignore for now
}
}
}
}
/**
* Attach a file to an object in SynBioHub.
* @param topLevelUri identity of the object to attach the file to
* @param filename the name of the file to attach
*
* @throws SynBioHubException if there was an error communicating with the SynBioHub
*/
public void attachFile(URI topLevelUri, String filename) throws SynBioHubException
{
attachFile(topLevelUri,new File(filename));
}
/**
* Attach a file to an object in SynBioHub.
* @param topLevelUri identity of the object to attach the file to
* @param file the file to attach
*
* @throws SynBioHubException if there was an error communicating with the SynBioHub
*/
public void attachFile(URI topLevelUri, File file) throws SynBioHubException
{
if (user.equals("")) {
Exception e = new Exception("Must be logged in to submit.");
throw new SynBioHubException(e);
}
String url = topLevelUri + "/attach";
url = url.replace(uriPrefix, backendUrl);
HttpPost request = new HttpPost(url);
request.setHeader("X-authorization", user);
request.setHeader("Accept", "text/plain");
MultipartEntityBuilder params = MultipartEntityBuilder.create();
/* example for setting a HttpMultipartMode */
params.setMode(HttpMultipartMode.BROWSER_COMPATIBLE);
params.addTextBody("user", user);
params.addBinaryBody("file", file);
try
{
request.setEntity(params.build());
HttpResponse response = client.execute(request);
checkResponseCode(response);
}
catch (Exception e)
{
//e.printStackTrace();
throw new SynBioHubException(e);
}
finally
{
request.releaseConnection();
}
}
/**
* Submit to the SynBioHub.
* @param id The submission identifier
* @param version The submission version
* @param name The submission name
* @param description The submission description
* @param citations The pubMedIds for this submission
* @param collections A comma separated list of collections
* @param overwrite_merge '0' prevent, '1' overwrite, '2' merge and prevent, '3' merge and overwrite
* @param document the SBOL document to submit
*
* @throws SynBioHubException if there was an error communicating with the SynBioHub
*/
public void submit(String id, String version, String name, String description, String citations,
String collections, String overwrite_merge, SBOLDocument document) throws SynBioHubException
{
if (user.equals("")) {
Exception e = new Exception("Must be logged in to submit.");
throw new SynBioHubException(e);
}
String url = backendUrl + "/submit";
HttpPost request = new HttpPost(url);
request.setHeader("X-authorization", user);
request.setHeader("Accept", "text/plain");
MultipartEntityBuilder params = MultipartEntityBuilder.create();
/* example for setting a HttpMultipartMode */
params.setMode(HttpMultipartMode.BROWSER_COMPATIBLE);
params.addTextBody("id", id);
params.addTextBody("version", version);
params.addTextBody("name", name);
params.addTextBody("description", description);
params.addTextBody("citations", citations);
params.addTextBody("collectionChoices", collections);
params.addTextBody("overwrite_merge", overwrite_merge);
params.addTextBody("user", user);
if (document != null) {
InputStream stream = new ByteArrayInputStream(serializeDocument(document).getBytes());
params.addBinaryBody("file", stream, ContentType.APPLICATION_XML, "file");
} else {
params.addTextBody("file", "");
}
try
{
request.setEntity(params.build());
HttpResponse response = client.execute(request);
checkResponseCode(response);
}
catch (Exception e)
{
//e.printStackTrace();
throw new SynBioHubException(e);
}
finally
{
request.releaseConnection();
}
}
private String serializeDocument(SBOLDocument document) throws SynBioHubException
{
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
try
{
SBOLWriter.write(document, outputStream);
return outputStream.toString("UTF-8");
}
catch(Exception e)
{
throw new SynBioHubException("Error serializing SBOL document", e);
}
}
private SBOLDocument fetchFromSynBioHub(String url) throws SynBioHubException
{
HttpStream stream;
try
{
stream = fetchContentAsInputStream(url);
}
catch (Exception e)
{
throw new SynBioHubException("Error connecting to SynBioHub endpoint", e);
}
SBOLDocument document;
try
{
document = SBOLReader.read(stream.inputStream);
}
catch (Exception e)
{
throw new SynBioHubException("Error reading SBOL", e);
}
finally
{
stream.request.releaseConnection();
}
//TopLevel topLevel = document.getTopLevel(topLevelUri);
//if(topLevel == null)
//{
// throw new SynBioHubException("Matching top-level not found in response");
//}
return document;
}
private int fetchCount(String url) throws SynBioHubException
{
try
{
return Integer.parseInt(fetchContentAsString(url));
}
catch(Exception e)
{
throw new SynBioHubException(e);
}
}
private void fetchContentSaveToFile(String url,String path) throws SynBioHubException, IOException
{
HttpGet request = new HttpGet(url);
request.setHeader("X-authorization", user);
request.setHeader("Accept", "text/plain");
try
{
HttpResponse response = client.execute(request);
checkResponseCode(response);
String filename = "default";
String dispositionValue = response.getFirstHeader("Content-Disposition").getValue();
int index = dispositionValue.indexOf("filename=");
if (index > 0) {
filename = dispositionValue.substring(index + 10, dispositionValue.length() - 1);
}
File file = new File(path + filename);
HttpEntity entity = response.getEntity();
if (entity != null) {
try (FileOutputStream outstream = new FileOutputStream(file)) {
entity.writeTo(outstream);
}
}
}
catch(SynBioHubException e)
{
request.releaseConnection();
throw e;
}
catch(IOException e)
{
request.releaseConnection();
throw e;
}
}
private String fetchContentAsString(String url) throws SynBioHubException, IOException
{
HttpStream stream = fetchContentAsInputStream(url);
String str;
try
{
str = inputStreamToString(stream.inputStream);
}
finally
{
stream.request.releaseConnection();
}
return str;
}
private String inputStreamToString(InputStream inputStream) throws IOException
{
StringWriter writer = new StringWriter();
IOUtils.copy(inputStream, writer);
return writer.toString();
}
class HttpStream
{
public InputStream inputStream;
public HttpRequestBase request;
}
private HttpStream fetchContentAsInputStream(String url) throws SynBioHubException, IOException
{
HttpGet request = new HttpGet(url);
request.setHeader("X-authorization", user);
request.setHeader("Accept", "text/plain");
try
{
HttpResponse response = client.execute(request);
checkResponseCode(response);
HttpStream res = new HttpStream();
res.inputStream = response.getEntity().getContent();
res.request = request;
return res;
}
catch(SynBioHubException e)
{
request.releaseConnection();
throw e;
}
catch(IOException e)
{
request.releaseConnection();
throw e;
}
}
private String encodeUri(String uri)
{
try
{
return URLEncoder.encode(uri, "UTF-8").replace("+", "%20");
}
catch (UnsupportedEncodingException e)
{
throw new RuntimeException("UTF-8 not supported?");
}
}
private void checkResponseCode(HttpResponse response) throws SynBioHubException
{
int statusCode = response.getStatusLine().getStatusCode();
if(statusCode >= 300)
{
switch(statusCode)
{
case 401:
throw new PermissionException();
case 404:
throw new NotFoundException();
default:
HttpEntity entity = response.getEntity();
try {
throw new SynBioHubException(inputStreamToString(entity.getContent()));
}
catch (UnsupportedOperationException | IOException e) {
throw new SynBioHubException(statusCode+"");
}
}
}
}
}
| core2/src/main/java/org/synbiohub/frontend/SynBioHubFrontend.java |
package org.synbiohub.frontend;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.StringWriter;
import java.io.UnsupportedEncodingException;
import java.net.URI;
import java.net.URLEncoder;
import java.util.ArrayList;
import java.util.List;
import java.util.Set;
import com.google.gson.Gson;
import com.google.gson.reflect.TypeToken;
import org.apache.commons.io.IOUtils;
import org.apache.http.HttpEntity;
import org.apache.http.HttpResponse;
import org.apache.http.NameValuePair;
import org.apache.http.client.HttpClient;
import org.apache.http.client.entity.UrlEncodedFormEntity;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.client.methods.HttpRequestBase;
import org.apache.http.entity.ContentType;
import org.apache.http.entity.mime.HttpMultipartMode;
import org.apache.http.entity.mime.MultipartEntityBuilder;
import org.apache.http.impl.client.HttpClients;
import org.apache.http.message.BasicNameValuePair;
import org.apache.http.impl.conn.PoolingHttpClientConnectionManager;
import org.sbolstandard.core2.*;
/**
* Provides a Java API to SynBioHub instances.
* @author James McLaughlin
* @author Chris Myers
*
*/
public class SynBioHubFrontend
{
PoolingHttpClientConnectionManager connectionManager;
HttpClient client;
String backendUrl;
String uriPrefix;
String user = "";
/**
* Creates an instance of the SynBioHub API.
* @param backendUrl - URL for the SynBioHub instance.
* @param uriPrefix - prefix for all URIs stored in this repository
*/
public SynBioHubFrontend(String backendUrl,String uriPrefix)
{
this.backendUrl = backendUrl;
this.uriPrefix = uriPrefix;
connectionManager = new PoolingHttpClientConnectionManager();
client = HttpClients.custom().setConnectionManager(connectionManager).build();
}
/**
* Creates an instance of the SynBioHub API.
* @param backendUrl - URL for the SynBioHub instance.
*/
public SynBioHubFrontend(String backendUrl)
{
this.backendUrl = backendUrl;
this.uriPrefix = backendUrl;
connectionManager = new PoolingHttpClientConnectionManager();
client = HttpClients.custom().setConnectionManager(connectionManager).build();
}
/**
* Returns the URL for the SynBioHub instance.
* @return the URL for the SynBioHub instance.
*/
public String getBackendUrl()
{
return this.backendUrl;
}
/**
* Return the total number of objects of a specified type in the repository.
*
* @return the total number of objects of a specified type in the repository.
*
* @param objectType The object type to count
* (Collection, ComponentDefinition, Sequence, ModuleDefinition, Model, etc.).
*
* @throws SynBioHubException if there was an error communicating with the SynBioHub
*/
public int getCount(String objectType) throws SynBioHubException
{
return fetchCount(backendUrl + "/" + objectType + "/count");
}
/**
* Retrieve SBOL TopLevel object from a SynBioHub instance using its URI.
*
* @param topLevelUri The URI of the SBOL TopLevel
*
* @return A libSBOLj TopLevel instance corresponding to the TopLevel
*
* @throws SynBioHubException if there was an error communicating with the SynBioHub
*/
public SBOLDocument getSBOL(URI topLevelUri) throws SynBioHubException
{
if (!topLevelUri.toString().startsWith(uriPrefix)) {
throw new SynBioHubException("Object URI does not start with correct URI prefix for this repository.");
}
String url = topLevelUri + "/sbol";
url = url.replace(uriPrefix, backendUrl);
SBOLDocument document = fetchFromSynBioHub(url);
return document;
}
/**
* Remove SBOL TopLevel object from a SynBioHub instance using its URI.
*
* @param topLevelUri The URI of the SBOL TopLevel
*
* @throws SynBioHubException if there was an error communicating with the SynBioHub
*/
public void removeSBOL(URI topLevelUri) throws SynBioHubException
{
if (!topLevelUri.toString().startsWith(uriPrefix)) {
throw new SynBioHubException("Object URI does not start with correct URI prefix for this repository.");
}
String url = topLevelUri + "/remove";
url = url.replace(uriPrefix, backendUrl);
fetchFromSynBioHub(url);
}
/**
* Search the default store for ComponentDefinition instances matching a name and/or a set of roles
*
* @param name The dcterms:title to search for, or null
* @param roles A set of role URIs to search for, or null
* @param types A set of type URIs to search for, or null
* @param collections A set of Collection URIs to search for, or null
* @param offset The offset of the results to begin at, or null to begin at 0
* @param limit The maximum number of results to return, or null to return all results
*
* @return An ArrayList of ComponentDefinitionMetaData objects with a summary of all matching ComponentDefinitions.
*
* @throws SynBioHubException if there was an error communicating with the SynBioHub
*/
public ArrayList<IdentifiedMetadata> getMatchingComponentDefinitionMetadata(String name, Set<URI> roles,
Set<URI> types, Set<URI> collections, Integer offset, Integer limit)
throws SynBioHubException
{
SearchQuery query = new SearchQuery();
query.setOffset(offset);
query.setLimit(limit);
SearchCriteria objectCriteria = new SearchCriteria();
objectCriteria.setKey("objectType");
objectCriteria.setValue("ComponentDefinition");
query.addCriteria(objectCriteria);
if (roles != null) {
for(URI uri : roles)
{
SearchCriteria roleCriteria = new SearchCriteria();
roleCriteria.setKey("role");
roleCriteria.setValue(uri.toString());
query.getCriteria().add(roleCriteria);
}
}
if (types != null) {
for(URI uri : types)
{
SearchCriteria typeCriteria = new SearchCriteria();
typeCriteria.setKey("type");
typeCriteria.setValue(uri.toString());
query.getCriteria().add(typeCriteria);
}
}
if (collections != null) {
for(URI uri : collections)
{
SearchCriteria collectionCriteria = new SearchCriteria();
collectionCriteria.setKey("collection");
collectionCriteria.setValue(uri.toString());
query.getCriteria().add(collectionCriteria);
}
}
if(name != null)
{
SearchCriteria nameCriteria = new SearchCriteria();
nameCriteria.setKey("name");
nameCriteria.setValue(name);
query.getCriteria().add(nameCriteria);
}
return search(query);
}
/**
* Search this SynBioHub instance for objects matching a search query
*
* @param query the search query
*
* @return An ArrayList of MetaData for objects that match the specified search query
*
* @throws SynBioHubException if there was an error communicating with the SynBioHub
*/
public ArrayList<IdentifiedMetadata> search(SearchQuery query) throws SynBioHubException
{
String url = backendUrl + "/search/";
//query.offset = offset;
//query.limit = limit;
String textQuery = "";
boolean first = true;
for (SearchCriteria criteria : query.getCriteria()) {
if (criteria.getKey().equals("objectType")) {
url += encodeUri(criteria.getKey()+"="+criteria.getValue()+"&");
continue;
}
if (criteria.getKey().equals("name")) {
if (first) first = false;
else textQuery = " ";
textQuery = criteria.getValue();
continue;
}
if (criteria.getKey().startsWith("http")) {
url += encodeUri("<" + criteria.getKey() + ">=");
} else {
url += encodeUri(criteria.getKey()+"=");
}
if (criteria.getValue().startsWith("http")) {
url += encodeUri("<"+criteria.getValue()+">&");
} else {
url += encodeUri("'"+criteria.getValue()+"'&");
}
}
url += encodeUri(textQuery);
if (query.getOffset()!=null && query.getLimit()!=null) {
url += "/?offset="+query.getOffset() + "&" + "limit="+query.getLimit();
} else if (query.getOffset()!=null) {
url += "/?offset="+query.getOffset();
} else if (query.getLimit()!=null) {
url += "/?limit="+query.getLimit();
}
//System.out.println(url);
Gson gson = new Gson();
HttpGet request = new HttpGet(url);
request.setHeader("X-authorization", user);
request.setHeader("Accept", "text/plain");
try
{
HttpResponse response = client.execute(request);
checkResponseCode(response);
InputStream inputStream = response.getEntity().getContent();
ArrayList<IdentifiedMetadata> metadataList = gson.fromJson(
new InputStreamReader(inputStream),
new TypeToken<ArrayList<IdentifiedMetadata>>(){}.getType());
return metadataList;
}
catch (Exception e)
{
throw new SynBioHubException(e);
}
finally
{
request.releaseConnection();
}
}
/**
* Search the default store for Collections that are not members of any other Collections
*
* @return An ArrayList of CollectionMetaData objects with a summary of all matching Collections.
*
* @throws SynBioHubException if there was an error communicating with the SynBioHub
*/
public ArrayList<IdentifiedMetadata> getRootCollectionMetadata()
throws SynBioHubException
{
String url = backendUrl + "/rootCollections";
Gson gson = new Gson();
HttpGet request = new HttpGet(url);
request.setHeader("X-authorization", user);
request.setHeader("Accept", "text/plain");
try
{
HttpResponse response = client.execute(request);
checkResponseCode(response);
InputStream inputStream = response.getEntity().getContent();
ArrayList<IdentifiedMetadata> metadataList = gson.fromJson(
new InputStreamReader(inputStream),
new TypeToken<ArrayList<IdentifiedMetadata>>(){}.getType());
return metadataList;
}
catch (Exception e)
{
throw new SynBioHubException(e);
}
finally
{
request.releaseConnection();
}
}
/**
* Perform a SPARQL query
* @param query SPARQL query string
*
* @return result as a JSON string
*
* @throws SynBioHubException if there was an error communicating with the SynBioHub
*/
public String sparqlQuery(String query) throws SynBioHubException
{
String url = backendUrl + "/sparql";
url += "?query="+encodeUri(query);
HttpGet request = new HttpGet(url);
request.setHeader("X-authorization", user);
request.setHeader("Accept", "application/json");
try
{
HttpResponse response = client.execute(request);
checkResponseCode(response);
InputStream inputStream = response.getEntity().getContent();
String result = inputStreamToString(inputStream);
return result;
}
catch (Exception e)
{
throw new SynBioHubException(e);
}
finally
{
request.releaseConnection();
}
}
/**
* Search the default store for Collections that are members of the specified Collection
*
* @param parentCollectionUri URI for Collection to search for member Collections
* @return An ArrayList of CollectionMetaData objects with a summary of all matching Collections.
*
* @throws SynBioHubException if there was an error communicating with the SynBioHub
*/
public ArrayList<IdentifiedMetadata> getSubCollectionMetadata(URI parentCollectionUri)
throws SynBioHubException
{
if (!parentCollectionUri.toString().startsWith(uriPrefix)) {
throw new SynBioHubException("Object URI does not start with correct URI prefix for this repository.");
}
String url = parentCollectionUri + "/subCollections";
url = url.replace(uriPrefix, backendUrl);
Gson gson = new Gson();
HttpGet request = new HttpGet(url);
request.setHeader("X-authorization", user);
request.setHeader("Accept", "text/plain");
try
{
HttpResponse response = client.execute(request);
checkResponseCode(response);
InputStream inputStream = response.getEntity().getContent();
ArrayList<IdentifiedMetadata> metadataList = gson.fromJson(
new InputStreamReader(inputStream),
new TypeToken<ArrayList<IdentifiedMetadata>>(){}.getType());
return metadataList;
}
catch (Exception e)
{
throw new SynBioHubException(e);
}
finally
{
request.releaseConnection();
}
}
// /**
// * Upload an SBOLDocument to the SynBioHub.
// *
// * @param document The document to upload
// *
// * @throws SynBioHubException if there was an error communicating with the SynBioHub
// */
// public void upload(SBOLDocument document) throws SynBioHubException
// {
// String url = backendUrl;
//
// HttpPost request = new HttpPost(url);
//
// try
// {
// request.setEntity(new StringEntity(serializeDocument(document)));
// request.setHeader("Content-Type", "application/rdf+xml");
//
// HttpResponse response = client.execute(request);
//
// checkResponseCode(response);
// }
// catch (Exception e)
// {
// throw new SynBioHubException(e);
// }
// finally
// {
// request.releaseConnection();
// }
// }
/**
* Sets the user to null to indicate that no user is logged in.
*/
public void logout()
{
user = "";
}
/**
* Login to the SynBioHub.
* @param email The user's email
* @param password The user's password
*
* @throws SynBioHubException if there was an error communicating with the SynBioHub
*/
public void login(String email, String password) throws SynBioHubException
{
String url = backendUrl + "/login";
HttpPost request = new HttpPost(url);
request.setHeader("Accept", "text/plain");
List<NameValuePair> params = new ArrayList<NameValuePair>();
params.add(new BasicNameValuePair("email", email));
params.add(new BasicNameValuePair("password", password));
try
{
request.setEntity(new UrlEncodedFormEntity(params));
request.setHeader("Content-Type", "application/x-www-form-urlencoded");
HttpResponse response = client.execute(request);
checkResponseCode(response);
HttpEntity entity = response.getEntity();
user = inputStreamToString(entity.getContent());
}
catch (Exception e)
{
throw new SynBioHubException(e);
}
finally
{
request.releaseConnection();
}
}
/**
* Remove all parts from this registry from a given SBOL document
*
* @param document The document to remove all registry parts from
*/
public void removeRegistryParts(SBOLDocument document) {
for (TopLevel topLevel : document.getTopLevels()) {
if (topLevel.getIdentity().toString().startsWith(uriPrefix)) {
try {
document.removeTopLevel(topLevel);
}
catch (SBOLValidationException e) {
// TODO: ignore for now
}
}
}
}
/**
* Attach a file to an object in SynBioHub.
* @param topLevelUri identity of the object to attach the file to
* @param filename the name of the file to attach
*
* @throws SynBioHubException if there was an error communicating with the SynBioHub
*/
public void attachFile(URI topLevelUri, String filename) throws SynBioHubException
{
attachFile(topLevelUri,new File(filename));
}
/**
* Attach a file to an object in SynBioHub.
* @param topLevelUri identity of the object to attach the file to
* @param file the file to attach
*
* @throws SynBioHubException if there was an error communicating with the SynBioHub
*/
public void attachFile(URI topLevelUri, File file) throws SynBioHubException
{
if (user.equals("")) {
Exception e = new Exception("Must be logged in to submit.");
throw new SynBioHubException(e);
}
String url = topLevelUri + "/attach";
url = url.replace(uriPrefix, backendUrl);
HttpPost request = new HttpPost(url);
request.setHeader("X-authorization", user);
request.setHeader("Accept", "text/plain");
MultipartEntityBuilder params = MultipartEntityBuilder.create();
/* example for setting a HttpMultipartMode */
params.setMode(HttpMultipartMode.BROWSER_COMPATIBLE);
params.addTextBody("user", user);
params.addBinaryBody("file", file);
try
{
request.setEntity(params.build());
HttpResponse response = client.execute(request);
checkResponseCode(response);
}
catch (Exception e)
{
//e.printStackTrace();
throw new SynBioHubException(e);
}
finally
{
request.releaseConnection();
}
}
/**
* Submit to the SynBioHub.
* @param id The submission identifier
* @param version The submission version
* @param name The submission name
* @param description The submission description
* @param citations The pubMedIds for this submission
* @param collections A comma separated list of collections
* @param overwrite_merge '0' prevent, '1' overwrite, '2' merge and prevent, '3' merge and overwrite
* @param document the SBOL document to submit
*
* @throws SynBioHubException if there was an error communicating with the SynBioHub
*/
public void submit(String id, String version, String name, String description, String citations,
String collections, String overwrite_merge, SBOLDocument document) throws SynBioHubException
{
if (user.equals("")) {
Exception e = new Exception("Must be logged in to submit.");
throw new SynBioHubException(e);
}
String url = backendUrl + "/submit";
HttpPost request = new HttpPost(url);
request.setHeader("X-authorization", user);
request.setHeader("Accept", "text/plain");
MultipartEntityBuilder params = MultipartEntityBuilder.create();
/* example for setting a HttpMultipartMode */
params.setMode(HttpMultipartMode.BROWSER_COMPATIBLE);
params.addTextBody("id", id);
params.addTextBody("version", version);
params.addTextBody("name", name);
params.addTextBody("description", description);
params.addTextBody("citations", citations);
params.addTextBody("collectionChoices", collections);
params.addTextBody("overwrite_merge", overwrite_merge);
params.addTextBody("user", user);
if (document != null) {
InputStream stream = new ByteArrayInputStream(serializeDocument(document).getBytes());
params.addBinaryBody("file", stream, ContentType.APPLICATION_XML, "file");
} else {
params.addTextBody("file", "");
}
try
{
request.setEntity(params.build());
HttpResponse response = client.execute(request);
checkResponseCode(response);
}
catch (Exception e)
{
//e.printStackTrace();
throw new SynBioHubException(e);
}
finally
{
request.releaseConnection();
}
}
private String serializeDocument(SBOLDocument document) throws SynBioHubException
{
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
try
{
SBOLWriter.write(document, outputStream);
return outputStream.toString("UTF-8");
}
catch(Exception e)
{
throw new SynBioHubException("Error serializing SBOL document", e);
}
}
private SBOLDocument fetchFromSynBioHub(String url) throws SynBioHubException
{
HttpStream stream;
try
{
stream = fetchContentAsInputStream(url);
}
catch (Exception e)
{
throw new SynBioHubException("Error connecting to SynBioHub endpoint", e);
}
SBOLDocument document;
try
{
document = SBOLReader.read(stream.inputStream);
}
catch (Exception e)
{
throw new SynBioHubException("Error reading SBOL", e);
}
finally
{
stream.request.releaseConnection();
}
//TopLevel topLevel = document.getTopLevel(topLevelUri);
//if(topLevel == null)
//{
// throw new SynBioHubException("Matching top-level not found in response");
//}
return document;
}
private int fetchCount(String url) throws SynBioHubException
{
try
{
return Integer.parseInt(fetchContentAsString(url));
}
catch(Exception e)
{
throw new SynBioHubException(e);
}
}
private String fetchContentAsString(String url) throws SynBioHubException, IOException
{
HttpStream stream = fetchContentAsInputStream(url);
String str;
try
{
str = inputStreamToString(stream.inputStream);
}
finally
{
stream.request.releaseConnection();
}
return str;
}
private String inputStreamToString(InputStream inputStream) throws IOException
{
StringWriter writer = new StringWriter();
IOUtils.copy(inputStream, writer);
return writer.toString();
}
class HttpStream
{
public InputStream inputStream;
public HttpRequestBase request;
}
private HttpStream fetchContentAsInputStream(String url) throws SynBioHubException, IOException
{
HttpGet request = new HttpGet(url);
request.setHeader("X-authorization", user);
request.setHeader("Accept", "text/plain");
try
{
HttpResponse response = client.execute(request);
checkResponseCode(response);
HttpStream res = new HttpStream();
res.inputStream = response.getEntity().getContent();
res.request = request;
return res;
}
catch(SynBioHubException e)
{
request.releaseConnection();
throw e;
}
catch(IOException e)
{
request.releaseConnection();
throw e;
}
}
private String encodeUri(String uri)
{
try
{
return URLEncoder.encode(uri, "UTF-8").replace("+", "%20");
}
catch (UnsupportedEncodingException e)
{
throw new RuntimeException("UTF-8 not supported?");
}
}
private void checkResponseCode(HttpResponse response) throws SynBioHubException
{
int statusCode = response.getStatusLine().getStatusCode();
if(statusCode >= 300)
{
switch(statusCode)
{
case 401:
throw new PermissionException();
case 404:
throw new NotFoundException();
default:
HttpEntity entity = response.getEntity();
try {
throw new SynBioHubException(inputStreamToString(entity.getContent()));
}
catch (UnsupportedOperationException | IOException e) {
throw new SynBioHubException(statusCode+"");
}
}
}
}
}
| Add method to download attachments | core2/src/main/java/org/synbiohub/frontend/SynBioHubFrontend.java | Add method to download attachments | <ide><path>ore2/src/main/java/org/synbiohub/frontend/SynBioHubFrontend.java
<ide> import java.io.ByteArrayInputStream;
<ide> import java.io.ByteArrayOutputStream;
<ide> import java.io.File;
<add>import java.io.FileOutputStream;
<ide> import java.io.IOException;
<ide> import java.io.InputStream;
<ide> import java.io.InputStreamReader;
<ide> SBOLDocument document = fetchFromSynBioHub(url);
<ide>
<ide> return document;
<add> }
<add>
<add> /**
<add> * Retrieve an attachment from a SynBioHub instance using its URI.
<add> *
<add> * @param attachmentUri The URI of the SBOL Attachment object
<add> * @param path Directory path to save the attachment
<add> *
<add> * @throws SynBioHubException if there was an error communicating with the SynBioHub
<add> * @throws IOException if there is an I/O error
<add> */
<add> public void getAttachment(URI attachmentUri, String path) throws SynBioHubException, IOException
<add> {
<add> if (!attachmentUri.toString().startsWith(uriPrefix)) {
<add> throw new SynBioHubException("Object URI does not start with correct URI prefix for this repository.");
<add> }
<add> String url = attachmentUri + "/download";
<add> url = url.replace(uriPrefix, backendUrl);
<add>
<add> fetchContentSaveToFile(url,path);
<ide> }
<ide>
<ide> /**
<ide> }
<ide> }
<ide>
<add> private void fetchContentSaveToFile(String url,String path) throws SynBioHubException, IOException
<add> {
<add> HttpGet request = new HttpGet(url);
<add> request.setHeader("X-authorization", user);
<add> request.setHeader("Accept", "text/plain");
<add>
<add> try
<add> {
<add> HttpResponse response = client.execute(request);
<add>
<add> checkResponseCode(response);
<add>
<add> String filename = "default";
<add> String dispositionValue = response.getFirstHeader("Content-Disposition").getValue();
<add> int index = dispositionValue.indexOf("filename=");
<add> if (index > 0) {
<add> filename = dispositionValue.substring(index + 10, dispositionValue.length() - 1);
<add> }
<add> File file = new File(path + filename);
<add>
<add> HttpEntity entity = response.getEntity();
<add> if (entity != null) {
<add> try (FileOutputStream outstream = new FileOutputStream(file)) {
<add> entity.writeTo(outstream);
<add> }
<add> }
<add> }
<add> catch(SynBioHubException e)
<add> {
<add> request.releaseConnection();
<add>
<add> throw e;
<add> }
<add> catch(IOException e)
<add> {
<add> request.releaseConnection();
<add>
<add> throw e;
<add> }
<add> }
<add>
<ide> private String fetchContentAsString(String url) throws SynBioHubException, IOException
<ide> {
<ide> HttpStream stream = fetchContentAsInputStream(url); |
|
JavaScript | mit | 38c2e019f6d66a1b946622a248ee0e81952086b3 | 0 | derekmpham/interview-prep,derekmpham/interview-prep | "use strict";
// LINKED LIST
// define constructor
function Node(data) {
this.data = data;
this.next = null;
}
function LinkedList() {
this._length = 0; // assign number of nodes in linked list
this.head = null; // points to head of linked list (node at front of linked list)
}
// add node to linked list
LinkedList.prototype.add = function(val) {
var node = new Node(val), // create new instance of node
currentNode = this.head;
// first case: if linked list is initially empty
if (!currentNode) {
this.head = node; // make new node head of linked list
this._length++;
return node;
}
// second case: if list linked is initially not empty
while (currentNode.next) {
currentNode = currentNode.next; // iterate through entire non-empty linked list to get to end of linked list
}
currentNode.next = node; // add new node to end of linked list
this._length++;
return node;
};
// search nodes at specific positions in linked list
LinkedList.prototype.searchNodeAt = function(position) {
var currentNode = this.head,
length = this._length,
count = 1,
message = {failure: 'Failure: non-existent node in this list'};
// first case: invalid position
if (length === 0 || position < 1 || position > length) {
throw new Error(message.failure);
}
// second case: valid position
while (count < position) {
// go through entire linked list until currentNode is equal to position
currentNode = currentNode.next;
count++;
}
return currentNode; // here currentNode is equal to position
};
// remove node from linked list
LinkedList.prototype.remove = function(position) {
var currentNode = this.head,
length = this.length,
count = 0,
message = {failure: 'Failure: non-existent node in this list'},
beforeNodeToDelete = null,
nodeToDelete = null,
deletedNode = null;
// first case: invalid position
if (position < 0 || position > length) {
throw new Error(message.failure);
}
// second case: first node is removed
if (position === 1) {
this.head = currentNode.next; // head reassigned
deletedNode = currentNode;
currentNode = null;
this._length--;
return deletedNode;
}
// third case: any other node is removed
while (count < position) { // loop until we reach node at position we want to remove
beforeNodeToDelete = currentNode;
nodeToDelete = currentNode.next;
count++;
}
beforeNodeToDelete.next = nodeToDelete.next;
deletedNode = nodeToDelete;
nodeToDelete = null;
this._length--;
return deletedNode;
};
// remove duplicates from linked list
LinkedList.prototype.removeDuplicates = function() {
var currentNode = this.head,
nextNode = currentNode.next,
nodes = {},
message = {failure: 'Failure: empty or single node linked list'};
// first case: empty or only one node in linked list
if (!currentNode || !nextNode) {
throw new Error(message.failure);
}
}
| remove-duplicates-linked-list.js | "use strict";
// LINKED LIST
// define constructor
function Node(data) {
this.data = data;
this.next = null;
}
function LinkedList() {
this._length = 0; // assign number of nodes in linked list
this.head = null; // points to head of linked list (node at front of linked list)
}
// add node to linked list
LinkedList.prototype.add = function(val) {
var node = new Node(val), // create new instance of node
currentNode = this.head;
// first case: if linked list is initially empty
if (!currentNode) {
this.head = node; // make new node head of linked list
this._length++;
return node;
}
// second case: if list linked is initially not empty
while (currentNode.next) {
currentNode = currentNode.next; // iterate through entire non-empty linked list to get to end of linked list
}
currentNode.next = node; // add new node to end of linked list
this._length++;
return node;
};
// search nodes at specific positions in linked list
LinkedList.prototype.searchNodeAt = function(position) {
var currentNode = this.head,
length = this._length,
count = 1,
message = {failure: 'Failure: non-existent node in this list'};
// first case: invalid position
if (length === 0 || position < 1 || position > length) {
throw new Error(message.failure);
}
// second case: valid position
while (count < position) {
// go through entire linked list until currentNode is equal to position
currentNode = currentNode.next;
count++;
}
return currentNode; // here currentNode is equal to position
};
// remove node from linked list
LinkedList.prototype.remove = function(position) {
var currentNode = this.head,
length = this.length,
count = 0,
message = {failure: 'Failure: non-existent node in this list'},
beforeNodeToDelete = null,
nodeToDelete = null,
deletedNode = null;
// first case: invalid position
if (position < 0 || position > length) {
throw new Error(message.failure);
}
// second case: first node is removed
if (position === 1) {
this.head = currentNode.next; // head reassigned
deletedNode = currentNode;
currentNode = null;
this._length--;
return deletedNode;
}
// third case: any other node is removed
while (count < position) { // loop until we reach node at position we want to remove
beforeNodeToDelete = currentNode;
nodeToDelete = currentNode.next;
count++;
}
beforeNodeToDelete.next = nodeToDelete.next;
deletedNode = nodeToDelete;
nodeToDelete = null;
this._length--;
return deletedNode;
};
| Account for first case in remove duplicates method
| remove-duplicates-linked-list.js | Account for first case in remove duplicates method | <ide><path>emove-duplicates-linked-list.js
<ide>
<ide> return deletedNode;
<ide> };
<add>
<add>// remove duplicates from linked list
<add>LinkedList.prototype.removeDuplicates = function() {
<add> var currentNode = this.head,
<add> nextNode = currentNode.next,
<add> nodes = {},
<add> message = {failure: 'Failure: empty or single node linked list'};
<add>
<add> // first case: empty or only one node in linked list
<add> if (!currentNode || !nextNode) {
<add> throw new Error(message.failure);
<add> }
<add>
<add>
<add>}
<add>
<add>
<add>
<add>
<add>
<add>
<add>
<add>
<add>
<add>
<add>
<add>
<add>
<add>
<add>
<add>
<add> |
|
JavaScript | bsd-3-clause | 2384abb04a5aa2df93c85c8784b01f0a00fbd9b3 | 0 | pirafrank/crypto-js,pirafrank/crypto-js,pirafrank/crypto-js,pirafrank/crypto-js | (function(){
// Shortcut
var util = Crypto.util;
Crypto.PBKDF2 = function (password, salt, keylen, options) {
// Defaults
var hasher = options && options.hasher || Crypto.SHA1,
iterations = options && options.iterations || 1;
// Pseudo-random function
function PRF(password, salt) {
return Crypto.HMAC(hasher, salt, password, { asBytes: true });
}
// Generate key
var derivedKeyBytes = [],
blockindex = 1;
while (derivedKeyBytes.length < keylen) {
var block = PRF(password, salt + util.bytesToString(
util.wordsToBytes([blockindex]))),
u = block;
for (var i = 1; i < iterations; i++) {
u = PRF(password, util.bytesToString(u));
for (var j = 0; j < block.length; j++) block[j] ^= u[j];
}
derivedKeyBytes = derivedKeyBytes.concat(block);
blockindex++;
}
// Truncate excess bytes
derivedKeyBytes.splice(keylen);
return options && options.asBytes ? derivedKeyBytes :
options && options.asString ? util.bytesToString(derivedKeyBytes) :
util.bytesToHex(derivedKeyBytes);
};
})();
| trunk/src/PBKDF2.js | (function(){
// Shortcut
var util = Crypto.util;
Crypto.PBKDF2 = function (password, salt, keylen, options) {
// Defaults
var hasher = options && options.hasher || Crypto.SHA1,
iterations = options && options.iterations || 1;
// Pseudo-random function
function prf(password, salt) {
return Crypto.HMAC(hasher, password, salt, { asBytes: true });
}
// Generate key
var derivedKeyBytes = [],
blockindex = 1;
while (derivedKeyBytes.length < keylen) {
var block = prf(password, salt + util.bytesToString(
util.wordsToBytes([blockindex]))),
u = block;
for (var i = 1; i < iterations; i++) {
u = prf(password, util.bytesToString(u));
for (var j = 0; j < block.length; j++) block[j] ^= u[j];
}
derivedKeyBytes = derivedKeyBytes.concat(block);
blockindex++;
}
// Truncate excess bytes
derivedKeyBytes.splice(keylen);
return options && options.asBytes ? derivedKeyBytes :
options && options.asString ? util.bytesToString(derivedKeyBytes) :
util.bytesToHex(derivedKeyBytes);
};
})();
| Fixed argument order, and cosmetic change. | trunk/src/PBKDF2.js | Fixed argument order, and cosmetic change. | <ide><path>runk/src/PBKDF2.js
<ide> iterations = options && options.iterations || 1;
<ide>
<ide> // Pseudo-random function
<del> function prf(password, salt) {
<del> return Crypto.HMAC(hasher, password, salt, { asBytes: true });
<add> function PRF(password, salt) {
<add> return Crypto.HMAC(hasher, salt, password, { asBytes: true });
<ide> }
<ide>
<ide> // Generate key
<ide> blockindex = 1;
<ide> while (derivedKeyBytes.length < keylen) {
<ide>
<del> var block = prf(password, salt + util.bytesToString(
<add> var block = PRF(password, salt + util.bytesToString(
<ide> util.wordsToBytes([blockindex]))),
<ide> u = block;
<ide> for (var i = 1; i < iterations; i++) {
<del> u = prf(password, util.bytesToString(u));
<add> u = PRF(password, util.bytesToString(u));
<ide> for (var j = 0; j < block.length; j++) block[j] ^= u[j];
<ide> }
<ide> |
|
Java | mit | 949e36de3717801293b012917d7f011765f4920e | 0 | algoliareadmebot/algoliasearch-client-android,algoliareadmebot/algoliasearch-client-android,algolia/algoliasearch-client-android,algolia/algoliasearch-client-android,algolia/algoliasearch-client-android | /*
* Copyright (c) 2012-2016 Algolia
* http://www.algolia.com/
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package com.algolia.search.saas;
import android.content.Context;
import android.support.annotation.NonNull;
import com.algolia.search.offline.core.Sdk;
import java.io.File;
import java.lang.ref.WeakReference;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
/**
* An API client that adds offline features on top of the regular online API client.
*
* <p>NOTE: Requires Algolia's SDK. The {@link #enableOfflineMode(String)} method must be called with a valid license
* key prior to calling any offline-related method.</p>
*/
public class OfflineClient extends Client
{
private Context context;
private File rootDataDir;
// Threading facilities
// --------------------
// Used by the indices to coordinate their execution.
//
// NOTE: The build and search queues must be serial to prevent concurrent searches or builds on a given index, but
// may be distinct because building can be done in parallel with search.
//
// NOTE: Although serialization is only strictly needed at the index level, we use global queues as a way to limit
// resource consumption by the SDK.
/** Background queue used to build local indices. */
protected ExecutorService localBuildExecutorService = Executors.newSingleThreadExecutor();
/** Background queue used to search local indices. */
protected ExecutorService localSearchExecutorService = Executors.newSingleThreadExecutor();
/**
* Construct a new offline-enabled API client.
*
* @param context An Android context.
* @param applicationID See {@link Client}.
* @param apiKey See {@link Client}.
*/
public OfflineClient(@NonNull Context context, @NonNull String applicationID, @NonNull String apiKey)
{
this(context, applicationID, apiKey, null, null);
}
/**
* Construct a new offline-enabled API client.
*
* @param context An Android context.
* @param applicationID See {@link Client}.
* @param apiKey See {@link Client}.
* @param dataDir Path to the directory where the local data will be stored. If null, the default directory will
* be used. See {@link #getDefaultDataDir()}.
*/
public OfflineClient(@NonNull Context context, @NonNull String applicationID, @NonNull String apiKey, File dataDir)
{
this(context, applicationID, apiKey, dataDir, null);
}
/**
* Construct a new offline-enabled API client.
*
* @param context An Android context.
* @param applicationID See {@link Client}.
* @param apiKey See {@link Client}.
* @param dataDir Path to the directory where the local data will be stored. If null, the default directory will
* be used. See {@link #getDefaultDataDir()}.
* @param hosts See {@link Client}.
*/
public OfflineClient(@NonNull Context context, @NonNull String applicationID, @NonNull String apiKey, File dataDir, String[] hosts)
{
super(applicationID, apiKey, hosts);
this.context = context;
if (dataDir != null) {
this.rootDataDir = dataDir;
} else {
this.rootDataDir = getDefaultDataDir();
}
this.addUserAgent(new LibraryVersion("algoliasearch-offline-core-android", Sdk.getInstance().getVersionString()));
}
/**
* Create a new index. Although this will always be an instance of {@link MirroredIndex}, mirroring is deactivated
* by default.
*
* @param indexName the name of index
* @return The newly created index.
*
* @deprecated You should now use {@link #getIndex(String)}, which re-uses instances with the same name.
*/
@Override
public MirroredIndex initIndex(@NonNull String indexName)
{
return new MirroredIndex(this, indexName);
}
/**
* Obtain a mirrored index. Although this will always be an instance of {@link MirroredIndex}, mirroring is
* deactivated by default.
*
* @param indexName The name of the index.
* @return A proxy to the specified index.
*/
@Override
public @NonNull MirroredIndex getIndex(@NonNull String indexName) {
MirroredIndex index = null;
WeakReference<Index> existingIndex = indices.get(indexName);
if (existingIndex != null) {
index = (MirroredIndex)existingIndex.get();
}
if (index == null) {
index = new MirroredIndex(this, indexName);
indices.put(indexName, new WeakReference<Index>(index));
}
return index;
}
/**
* Get the path to directory where the local data is stored.
*/
public @NonNull File getRootDataDir()
{
return rootDataDir;
}
/**
* Get the path to the temporary directory used by this client.
*
* @return The path to the temporary directory.
*/
protected @NonNull File getTempDir() {
return context.getCacheDir();
}
/**
* Enable the offline mode.
* @param licenseData License for Algolia's SDK.
*/
public void enableOfflineMode(@NonNull String licenseData) {
// Init the SDK.
Sdk.getInstance().init(context, licenseData);
// TODO: Report any error.
}
/**
* Get the default data directory.
* This is an "algolia" subdirectory inside the application's files directory.
*
* @return The default data directory.
*/
public File getDefaultDataDir() {
return new File(context.getFilesDir(), "algolia");
}
}
| algoliasearch/src/offline/java/com/algolia/search/saas/OfflineClient.java | /*
* Copyright (c) 2012-2016 Algolia
* http://www.algolia.com/
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package com.algolia.search.saas;
import android.content.Context;
import android.support.annotation.NonNull;
import com.algolia.search.offline.core.Sdk;
import java.io.File;
import java.lang.ref.WeakReference;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
/**
* An API client that adds offline features on top of the regular online API client.
*
* <p>NOTE: Requires Algolia's SDK. The {@link #enableOfflineMode(String)} method must be called with a valid license
* key prior to calling any offline-related method.</p>
*/
public class OfflineClient extends Client
{
private Context context;
private File rootDataDir;
// Threading facilities
// --------------------
// Used by the indices to coordinate their execution.
//
// NOTE: The build and search queues must be serial to prevent concurrent searches or builds on a given index, but
// may be distinct because building can be done in parallel with search.
//
// NOTE: Although serialization is only strictly needed at the index level, we use global queues as a way to limit
// resource consumption by the SDK.
/** Background queue used to build local indices. */
protected ExecutorService localBuildExecutorService = Executors.newSingleThreadExecutor();
/** Background queue used to search local indices. */
protected ExecutorService localSearchExecutorService = Executors.newSingleThreadExecutor();
/**
* Construct a new offline-enabled API client.
*
* @param context An Android context.
* @param applicationID See {@link Client}.
* @param apiKey See {@link Client}.
*/
public OfflineClient(@NonNull Context context, @NonNull String applicationID, @NonNull String apiKey)
{
this(context, applicationID, apiKey, null, null);
}
/**
* Construct a new offline-enabled API client.
*
* @param context An Android context.
* @param applicationID See {@link Client}.
* @param apiKey See {@link Client}.
* @param dataDir Path to the directory where the local data will be stored. If null, the default directory will
* be used. See {@link #getDefaultDataDir()}.
*/
public OfflineClient(@NonNull Context context, @NonNull String applicationID, @NonNull String apiKey, File dataDir)
{
this(context, applicationID, apiKey, dataDir, null);
}
/**
* Construct a new offline-enabled API client.
*
* @param context An Android context.
* @param applicationID See {@link Client}.
* @param apiKey See {@link Client}.
* @param dataDir Path to the directory where the local data will be stored. If null, the default directory will
* be used. See {@link #getDefaultDataDir()}.
* @param hosts See {@link Client}.
*/
public OfflineClient(@NonNull Context context, @NonNull String applicationID, @NonNull String apiKey, File dataDir, String[] hosts)
{
super(applicationID, apiKey, hosts);
this.context = context;
if (dataDir != null) {
this.rootDataDir = dataDir;
} else {
this.rootDataDir = getDefaultDataDir();
}
this.addUserAgent(new LibraryVersion("algoliasearch-offline-core-android", Sdk.getInstance().getVersionString()));
}
/**
* Create a new index. Although this will always be an instance of {@link MirroredIndex}, mirroring is deactivated
* by default.
*
* @param indexName the name of index
* @return The newly created index.
*
* @deprecated You should now use {@link #getIndex(String)}, which re-uses instances with the same name.
*/
@Override
public MirroredIndex initIndex(@NonNull String indexName)
{
return new MirroredIndex(this, indexName);
}
/**
* Obtain a mirrored index. Although this will always be an instance of {@link MirroredIndex}, mirroring is
* deactivated by default.
*
* @param indexName The name of the index.
* @return A proxy to the specified index.
*/
@Override
public @NonNull MirroredIndex getIndex(@NonNull String indexName) {
MirroredIndex index = null;
WeakReference<Index> existingIndex = indices.get(indexName);
if (existingIndex != null) {
Index anIndex = existingIndex.get();
if (anIndex != null && !(anIndex instanceof MirroredIndex)) {
throw new IllegalStateException("An index with the same name but a different type has already been created");
} else {
index = (MirroredIndex)anIndex;
}
}
if (index == null) {
index = new MirroredIndex(this, indexName);
indices.put(indexName, new WeakReference<Index>(index));
}
return index;
}
/**
* Get the path to directory where the local data is stored.
*/
public @NonNull File getRootDataDir()
{
return rootDataDir;
}
/**
* Get the path to the temporary directory used by this client.
*
* @return The path to the temporary directory.
*/
protected @NonNull File getTempDir() {
return context.getCacheDir();
}
/**
* Enable the offline mode.
* @param licenseData License for Algolia's SDK.
*/
public void enableOfflineMode(@NonNull String licenseData) {
// Init the SDK.
Sdk.getInstance().init(context, licenseData);
// TODO: Report any error.
}
/**
* Get the default data directory.
* This is an "algolia" subdirectory inside the application's files directory.
*
* @return The default data directory.
*/
public File getDefaultDataDir() {
return new File(context.getFilesDir(), "algolia");
}
}
| Remove superfluous type checking in `OfflineClient.getIndex()` | algoliasearch/src/offline/java/com/algolia/search/saas/OfflineClient.java | Remove superfluous type checking in `OfflineClient.getIndex()` | <ide><path>lgoliasearch/src/offline/java/com/algolia/search/saas/OfflineClient.java
<ide> MirroredIndex index = null;
<ide> WeakReference<Index> existingIndex = indices.get(indexName);
<ide> if (existingIndex != null) {
<del> Index anIndex = existingIndex.get();
<del> if (anIndex != null && !(anIndex instanceof MirroredIndex)) {
<del> throw new IllegalStateException("An index with the same name but a different type has already been created");
<del> } else {
<del> index = (MirroredIndex)anIndex;
<del> }
<add> index = (MirroredIndex)existingIndex.get();
<ide> }
<ide> if (index == null) {
<ide> index = new MirroredIndex(this, indexName); |
|
Java | apache-2.0 | 3f8cc0a82af133f232523f153052f292de48d9a8 | 0 | neo4j-contrib/docker-neo4j,neo4j-contrib/docker-neo4j | src/test/java/com/neo4j/docker/TestDumpLoad.java | package com.neo4j.docker;
import com.neo4j.docker.utils.DatabaseIO;
import com.neo4j.docker.utils.HostFileSystemOperations;
import com.neo4j.docker.utils.SetContainerUser;
import com.neo4j.docker.utils.TestSettings;
import org.junit.jupiter.api.Disabled;
import org.junit.jupiter.api.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.testcontainers.containers.GenericContainer;
import org.testcontainers.containers.output.Slf4jLogConsumer;
import org.testcontainers.containers.wait.strategy.Wait;
import java.io.IOException;
import java.nio.file.Path;
import java.time.Duration;
@Disabled
public class TestDumpLoad
{
private static final Logger log = LoggerFactory.getLogger( TestDumpLoad.class );
private GenericContainer createContainer( )
{
GenericContainer container = new GenericContainer( TestSettings.IMAGE_ID );
container.withEnv( "NEO4J_ACCEPT_LICENSE_AGREEMENT", "yes" )
.withEnv( "NEO4J_AUTH", "none" )
.withExposedPorts( 7474, 7687 )
.withLogConsumer( new Slf4jLogConsumer( log ) )
.waitingFor( Wait.forHttp( "/" )
.forPort( 7474 )
.forStatusCode( 200 )
.withStartupTimeout( Duration.ofSeconds( 90 ) ) );
return container;
}
@Test
void dumpCompletes() throws IOException, InterruptedException
{
Path dataDir, dumpDir, logDir;
Path testOutputFolder = HostFileSystemOperations.createTempFolder( "dumpCompletes-" );
try(GenericContainer container = createContainer())
{
log.info( "creating a populated database to back up" );
dataDir = HostFileSystemOperations.createTempFolderAndMountAsVolume(
container,
testOutputFolder,
"data-",
"/data" );
logDir = HostFileSystemOperations.createTempFolderAndMountAsVolume(
container,
testOutputFolder,
"logs-",
"/logs"
);
SetContainerUser.nonRootUser( container );
container.start();
DatabaseIO db = new DatabaseIO( container);
db.putInitialDataIntoContainer( "","" );
}
// at this point, because we exited the try, the container should have closed and neo4j should be shut down.
// However, it looks like the dump command fails because the database isn't shutdown properly.
// This works when I run the docker stop command from a script but not here.
log.info( "database created, Neo4j stopped" );
try(GenericContainer container = createContainer())
{
log.info( "Doing database dump" );
//start container and call neo4j-admin instead of default command
HostFileSystemOperations.mountHostFolderAsVolume(
container,
dataDir,
"/data");
HostFileSystemOperations.mountHostFolderAsVolume(
container,
logDir,
"/logs");
dumpDir = HostFileSystemOperations.createTempFolderAndMountAsVolume(
container,
testOutputFolder,
"dump-",
"/dump"
);
// if we don't set the user, then neo4j-admin will fail because of write permissions on the destination folder.
SetContainerUser.nonRootUser( container );
container.withCommand( "neo4j-admin", "dump", "--to=/dump", "--verbose" );
container.start();
}
// do some stuff to load dumpfile back into a database
// neo4j-admin load --from=/dump/neo4j.dump --database=neo4j
}
}
| removed dump and load test
So I can merge host file system refactors.
| src/test/java/com/neo4j/docker/TestDumpLoad.java | removed dump and load test So I can merge host file system refactors. | <ide><path>rc/test/java/com/neo4j/docker/TestDumpLoad.java
<del>package com.neo4j.docker;
<del>
<del>import com.neo4j.docker.utils.DatabaseIO;
<del>import com.neo4j.docker.utils.HostFileSystemOperations;
<del>import com.neo4j.docker.utils.SetContainerUser;
<del>import com.neo4j.docker.utils.TestSettings;
<del>import org.junit.jupiter.api.Disabled;
<del>import org.junit.jupiter.api.Test;
<del>import org.slf4j.Logger;
<del>import org.slf4j.LoggerFactory;
<del>import org.testcontainers.containers.GenericContainer;
<del>import org.testcontainers.containers.output.Slf4jLogConsumer;
<del>import org.testcontainers.containers.wait.strategy.Wait;
<del>
<del>import java.io.IOException;
<del>import java.nio.file.Path;
<del>import java.time.Duration;
<del>
<del>@Disabled
<del>public class TestDumpLoad
<del>{
<del> private static final Logger log = LoggerFactory.getLogger( TestDumpLoad.class );
<del>
<del> private GenericContainer createContainer( )
<del> {
<del> GenericContainer container = new GenericContainer( TestSettings.IMAGE_ID );
<del> container.withEnv( "NEO4J_ACCEPT_LICENSE_AGREEMENT", "yes" )
<del> .withEnv( "NEO4J_AUTH", "none" )
<del> .withExposedPorts( 7474, 7687 )
<del> .withLogConsumer( new Slf4jLogConsumer( log ) )
<del> .waitingFor( Wait.forHttp( "/" )
<del> .forPort( 7474 )
<del> .forStatusCode( 200 )
<del> .withStartupTimeout( Duration.ofSeconds( 90 ) ) );
<del> return container;
<del> }
<del>
<del> @Test
<del> void dumpCompletes() throws IOException, InterruptedException
<del> {
<del> Path dataDir, dumpDir, logDir;
<del> Path testOutputFolder = HostFileSystemOperations.createTempFolder( "dumpCompletes-" );
<del>
<del> try(GenericContainer container = createContainer())
<del> {
<del> log.info( "creating a populated database to back up" );
<del> dataDir = HostFileSystemOperations.createTempFolderAndMountAsVolume(
<del> container,
<del> testOutputFolder,
<del> "data-",
<del> "/data" );
<del> logDir = HostFileSystemOperations.createTempFolderAndMountAsVolume(
<del> container,
<del> testOutputFolder,
<del> "logs-",
<del> "/logs"
<del> );
<del> SetContainerUser.nonRootUser( container );
<del> container.start();
<del> DatabaseIO db = new DatabaseIO( container);
<del> db.putInitialDataIntoContainer( "","" );
<del> }
<del> // at this point, because we exited the try, the container should have closed and neo4j should be shut down.
<del> // However, it looks like the dump command fails because the database isn't shutdown properly.
<del> // This works when I run the docker stop command from a script but not here.
<del>
<del> log.info( "database created, Neo4j stopped" );
<del>
<del> try(GenericContainer container = createContainer())
<del> {
<del> log.info( "Doing database dump" );
<del> //start container and call neo4j-admin instead of default command
<del> HostFileSystemOperations.mountHostFolderAsVolume(
<del> container,
<del> dataDir,
<del> "/data");
<del> HostFileSystemOperations.mountHostFolderAsVolume(
<del> container,
<del> logDir,
<del> "/logs");
<del> dumpDir = HostFileSystemOperations.createTempFolderAndMountAsVolume(
<del> container,
<del> testOutputFolder,
<del> "dump-",
<del> "/dump"
<del> );
<del> // if we don't set the user, then neo4j-admin will fail because of write permissions on the destination folder.
<del> SetContainerUser.nonRootUser( container );
<del> container.withCommand( "neo4j-admin", "dump", "--to=/dump", "--verbose" );
<del> container.start();
<del> }
<del>
<del> // do some stuff to load dumpfile back into a database
<del> // neo4j-admin load --from=/dump/neo4j.dump --database=neo4j
<del> }
<del>} |
||
Java | apache-2.0 | d9d0982be74141efea3c0b81e50afd83dc00a5b1 | 0 | udacity/android-web-api-sample | package demo.example.com.spotifysample;
import android.os.AsyncTask;
import android.os.Bundle;
import android.support.v7.app.ActionBarActivity;
import android.util.Log;
import android.view.Menu;
import android.view.MenuItem;
import java.util.List;
import kaaes.spotify.webapi.android.SpotifyApi;
import kaaes.spotify.webapi.android.SpotifyService;
import kaaes.spotify.webapi.android.models.Artist;
import kaaes.spotify.webapi.android.models.ArtistsPager;
public class MainActivity extends ActionBarActivity {
private static final String LOG_TAG = MainActivity.class.getSimpleName();
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
SearchSpotifyTask task = new SearchSpotifyTask();
task.execute();
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
getMenuInflater().inflate(R.menu.menu_main, menu);
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
// Handle action bar item clicks here. The action bar will
// automatically handle clicks on the Home/Up button, so long
// as you specify a parent activity in AndroidManifest.xml.
int id = item.getItemId();
//noinspection SimplifiableIfStatement
if (id == R.id.action_settings) {
return true;
}
return super.onOptionsItemSelected(item);
}
public class SearchSpotifyTask extends AsyncTask<Void, Void, Void>
{
@Override
protected Void doInBackground(Void... strings) {
SpotifyApi api = new SpotifyApi();
SpotifyService service = api.getService();
ArtistsPager results = service.searchArtists("Paul");
List<Artist> artists = results.artists.items;
for (int i = 0; i < artists.size(); i++) {
Artist artist = artists.get(i);
Log.i(LOG_TAG, i + " " + artist.name);
}
return null;
}
}
}
| app/src/main/java/demo/example/com/spotifysample/MainActivity.java | package demo.example.com.spotifysample;
import android.support.v7.app.ActionBarActivity;
import android.os.Bundle;
import android.view.Menu;
import android.view.MenuItem;
public class MainActivity extends ActionBarActivity {
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
getMenuInflater().inflate(R.menu.menu_main, menu);
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
// Handle action bar item clicks here. The action bar will
// automatically handle clicks on the Home/Up button, so long
// as you specify a parent activity in AndroidManifest.xml.
int id = item.getItemId();
//noinspection SimplifiableIfStatement
if (id == R.id.action_settings) {
return true;
}
return super.onOptionsItemSelected(item);
}
}
| feat: Add Spotify wrapper call in AsyncTask.
| app/src/main/java/demo/example/com/spotifysample/MainActivity.java | feat: Add Spotify wrapper call in AsyncTask. | <ide><path>pp/src/main/java/demo/example/com/spotifysample/MainActivity.java
<ide> package demo.example.com.spotifysample;
<ide>
<add>import android.os.AsyncTask;
<add>import android.os.Bundle;
<ide> import android.support.v7.app.ActionBarActivity;
<del>import android.os.Bundle;
<add>import android.util.Log;
<ide> import android.view.Menu;
<ide> import android.view.MenuItem;
<ide>
<add>import java.util.List;
<add>
<add>import kaaes.spotify.webapi.android.SpotifyApi;
<add>import kaaes.spotify.webapi.android.SpotifyService;
<add>import kaaes.spotify.webapi.android.models.Artist;
<add>import kaaes.spotify.webapi.android.models.ArtistsPager;
<add>
<ide>
<ide> public class MainActivity extends ActionBarActivity {
<add>
<add> private static final String LOG_TAG = MainActivity.class.getSimpleName();
<ide>
<ide> @Override
<ide> protected void onCreate(Bundle savedInstanceState) {
<ide> super.onCreate(savedInstanceState);
<ide> setContentView(R.layout.activity_main);
<add>
<add> SearchSpotifyTask task = new SearchSpotifyTask();
<add> task.execute();
<ide> }
<ide>
<ide> @Override
<ide>
<ide> return super.onOptionsItemSelected(item);
<ide> }
<add>
<add> public class SearchSpotifyTask extends AsyncTask<Void, Void, Void>
<add> {
<add> @Override
<add> protected Void doInBackground(Void... strings) {
<add> SpotifyApi api = new SpotifyApi();
<add> SpotifyService service = api.getService();
<add>
<add> ArtistsPager results = service.searchArtists("Paul");
<add> List<Artist> artists = results.artists.items;
<add> for (int i = 0; i < artists.size(); i++) {
<add> Artist artist = artists.get(i);
<add> Log.i(LOG_TAG, i + " " + artist.name);
<add> }
<add> return null;
<add> }
<add> }
<ide> } |
|
Java | apache-2.0 | 3b3acffa5b2feb149b2ad6766e789e8996a2dc04 | 0 | marubinotto/Piggydb,marubinotto/Piggydb,vincentschut/Piggydb,marubinotto/Piggydb,vincentschut/Piggydb,vincentschut/Piggydb,vincentschut/Piggydb,marubinotto/Piggydb | package marubinotto.piggydb.ui.page.partial;
import static marubinotto.util.CollectionUtils.list;
import static org.apache.commons.lang.StringUtils.isBlank;
import static org.apache.commons.lang.StringUtils.isNotBlank;
import marubinotto.piggydb.model.Classification;
import marubinotto.piggydb.model.Filter;
import marubinotto.piggydb.model.Fragment;
import marubinotto.piggydb.model.Tag;
import marubinotto.piggydb.model.query.FragmentsQuery;
import marubinotto.piggydb.model.query.FragmentsSortOption;
import marubinotto.piggydb.ui.wiki.DefaultWikiParser;
import marubinotto.piggydb.util.PiggydbUtils;
import marubinotto.util.RegexUtils;
import marubinotto.util.paging.Page;
import marubinotto.util.paging.PageUtils;
import org.apache.commons.lang.ObjectUtils;
import org.apache.commons.lang.StringEscapeUtils;
import org.apache.commons.lang.StringUtils;
public abstract class AbstractFragments extends AbstractPartial {
//
// Input
//
public String viewId;
public Integer scale; // 0 - 1000
public Integer orderBy;
public Boolean ascending;
public Boolean shuffle;
private FragmentsSortOption sortOption;
public int pi = 0;
public static final String SK_SCALE = "fragmentsViewScale";
public static final String SK_ORDERBY = "fragmentsViewOrderBy";
public static final String SK_ASCENDING = "fragmentsViewAscending";
public String query;
public String tagsToInclude;
public String tagsToExclude;
@Override
public void onInit() {
super.onInit();
// restore the session values
if (this.scale == null) {
this.scale = (Integer)ObjectUtils.defaultIfNull(
getContext().getSessionAttribute(SK_SCALE),
getWarSetting().getDefaultFragmentsViewScale());
}
if (this.orderBy == null) {
this.orderBy = (Integer)getContext().getSessionAttribute(SK_ORDERBY);
}
if (this.ascending == null) {
this.ascending = (Boolean)getContext().getSessionAttribute(SK_ASCENDING);
}
// create a sortOption
this.sortOption = new FragmentsSortOption(this.orderBy, this.ascending);
if (this.shuffle != null) {
this.sortOption.shuffle = this.shuffle;
}
if (isNotBlank(this.query)) {
this.query = modifyIfGarbledByTomcat(this.query);
}
}
//
// Model
//
public FragmentsView view;
public String label;
public boolean hideHeader = false;
public Page<Fragment> fragments;
public Classification contextTags;
public boolean firstSet = true;
public boolean lastSet = false;
public String keywordRegex;
public Filter filter;
@Override
protected void setModels() throws Exception {
super.setModels();
this.view = new FragmentsView(this.viewId);
this.view.setScale(this.scale);
setSelectedFragments();
checkFragmentRef();
this.filter = createFilter();
if (this.fragments == null) setFragmentsByFilter();
if (this.fragments == null) setFragments();
if (this.fragments != null) {
this.firstSet = (this.pi == 0);
this.lastSet = this.fragments.isLastPage();
}
saveStateToSession();
}
private void checkFragmentRef() throws Exception {
if (isBlank(this.query)) return;
// query == "#<number>"
if (this.query.matches(DefaultWikiParser.PS_FRAGMENT_REF)) {
long id = Long.parseLong(this.query.substring(1));
Fragment fragment = getDomain().getFragmentRepository().get(id);
this.fragments = fragment != null ?
PageUtils.getPage(list(fragment), this.view.getPageSize(), this.pi) :
emptyFragments();
this.label = this.query;
}
}
private Page<Fragment> emptyFragments() {
return PageUtils.<Fragment>empty(this.view.getPageSize());
}
protected Filter createFilter() throws Exception {
return null;
}
private void setFragmentsByFilter() throws Exception {
if (this.filter == null) return;
// add tags to include
if (isNotBlank(this.tagsToInclude)) {
for (String tagName : StringUtils.split(this.tagsToInclude, ',')) {
Tag tag = getTagByName(tagName);
if (tag == null) {
this.fragments = emptyFragments();
}
this.filter.addIncludeByUser(tag, getUser());
}
}
// add tag to exclude
if (isNotBlank(this.tagsToExclude)) {
for (String tagName : StringUtils.split(this.tagsToExclude, ',')) {
Tag tag = getTagByName(tagName);
if (tag != null) {
this.filter.addExcludeByUser(tag, getUser());
}
}
}
// query
if (this.fragments == null) {
marubinotto.piggydb.model.query.FragmentsByFilter query =
(marubinotto.piggydb.model.query.FragmentsByFilter)getQuery(
marubinotto.piggydb.model.query.FragmentsByFilter.class);
query.setFilter(this.filter);
if (isNotBlank(this.query)) {
query.setKeywords(this.query);
setKeywordRegex(this.query);
appendKeywordSearchLabel();
}
this.fragments = getPage(query);
}
this.contextTags = this.filter.getIncludes();
}
private Tag getTagByName(String name) throws Exception {
return isNotBlank(name) ? getDomain().getTagRepository().getByName(name.trim()) : null;
}
protected void setKeywordRegex(String keywords) {
StringBuilder keywordRegex = new StringBuilder();
for (String word : PiggydbUtils.splitToKeywords(keywords)) {
if (keywordRegex.length() > 0) keywordRegex.append("|");
word = StringEscapeUtils.escapeJavaScript(word);
word = RegexUtils.escapeRegex(word);
keywordRegex.append(word);
}
this.keywordRegex = "(" + keywordRegex.toString() + ")";
}
protected FragmentsQuery getQuery(Class<? extends FragmentsQuery> queryClass)
throws Exception {
FragmentsQuery query = (FragmentsQuery)
getDomain().getFragmentRepository().getQuery(queryClass);
query.setSortOption(this.sortOption);
query.setEagerFetching(this.view.needsEagerFetching());
query.setEagerFetchingMore(this.view.needsEagerFetchingMore());
return query;
}
protected Page<Fragment> getPage(FragmentsQuery query) throws Exception {
return query.getPage(this.view.getPageSize(), this.pi);
}
protected void setFragments() throws Exception {
}
private void saveStateToSession() {
if (this.scale != null) getContext().setSessionAttribute(SK_SCALE, this.scale);
if (this.orderBy != null) getContext().setSessionAttribute(SK_ORDERBY, this.orderBy);
if (this.ascending != null) getContext().setSessionAttribute(SK_ASCENDING, this.ascending);
}
protected static String makeKeywordSearchLabel(String keywords) {
String label = "<span class=\"search-icon-mini\"> </span> ";
for (String keyword : PiggydbUtils.splitToKeywords(keywords)) {
label += "\"" + keyword + "\" ";
}
return label.trim();
}
protected void appendKeywordSearchLabel() {
if (isNotBlank(this.query)) {
this.label += " + ";
this.label += makeKeywordSearchLabel(this.query);
}
}
}
| src/main/java/marubinotto/piggydb/ui/page/partial/AbstractFragments.java | package marubinotto.piggydb.ui.page.partial;
import static marubinotto.util.CollectionUtils.list;
import static org.apache.commons.lang.StringUtils.isBlank;
import static org.apache.commons.lang.StringUtils.isNotBlank;
import marubinotto.piggydb.model.Classification;
import marubinotto.piggydb.model.Filter;
import marubinotto.piggydb.model.Fragment;
import marubinotto.piggydb.model.query.FragmentsQuery;
import marubinotto.piggydb.model.query.FragmentsSortOption;
import marubinotto.piggydb.ui.wiki.DefaultWikiParser;
import marubinotto.piggydb.util.PiggydbUtils;
import marubinotto.util.RegexUtils;
import marubinotto.util.paging.Page;
import marubinotto.util.paging.PageUtils;
import org.apache.commons.lang.ObjectUtils;
import org.apache.commons.lang.StringEscapeUtils;
public abstract class AbstractFragments extends AbstractPartial {
//
// Input
//
public String viewId;
public Integer scale; // 0 - 1000
public Integer orderBy;
public Boolean ascending;
public Boolean shuffle;
private FragmentsSortOption sortOption;
public int pi = 0;
public static final String SK_SCALE = "fragmentsViewScale";
public static final String SK_ORDERBY = "fragmentsViewOrderBy";
public static final String SK_ASCENDING = "fragmentsViewAscending";
public String query;
public String tagsToInclude;
public String tagsToExclude;
@Override
public void onInit() {
super.onInit();
// restore the session values
if (this.scale == null) {
this.scale = (Integer)ObjectUtils.defaultIfNull(
getContext().getSessionAttribute(SK_SCALE),
getWarSetting().getDefaultFragmentsViewScale());
}
if (this.orderBy == null) {
this.orderBy = (Integer)getContext().getSessionAttribute(SK_ORDERBY);
}
if (this.ascending == null) {
this.ascending = (Boolean)getContext().getSessionAttribute(SK_ASCENDING);
}
// create a sortOption
this.sortOption = new FragmentsSortOption(this.orderBy, this.ascending);
if (this.shuffle != null) {
this.sortOption.shuffle = this.shuffle;
}
if (isNotBlank(this.query)) {
this.query = modifyIfGarbledByTomcat(this.query);
}
}
//
// Model
//
public FragmentsView view;
public String label;
public boolean hideHeader = false;
public Page<Fragment> fragments;
public Classification contextTags;
public boolean firstSet = true;
public boolean lastSet = false;
public String keywordRegex;
public Filter filter;
@Override
protected void setModels() throws Exception {
super.setModels();
this.view = new FragmentsView(this.viewId);
this.view.setScale(this.scale);
setSelectedFragments();
checkFragmentRef();
this.filter = createFilter();
if (this.fragments == null) setFragmentsByFilter();
if (this.fragments == null) setFragments();
if (this.fragments != null) {
this.firstSet = (this.pi == 0);
this.lastSet = this.fragments.isLastPage();
}
saveStateToSession();
}
private void checkFragmentRef() throws Exception {
if (isBlank(this.query)) return;
// query == "#<number>"
if (this.query.matches(DefaultWikiParser.PS_FRAGMENT_REF)) {
long id = Long.parseLong(this.query.substring(1));
Fragment fragment = getDomain().getFragmentRepository().get(id);
this.fragments = fragment != null ?
PageUtils.getPage(list(fragment), this.view.getPageSize(), this.pi) :
PageUtils.<Fragment>empty(this.view.getPageSize());
this.label = this.query;
}
}
protected Filter createFilter() throws Exception {
return null;
}
private void setFragmentsByFilter() throws Exception {
if (this.filter == null) return;
marubinotto.piggydb.model.query.FragmentsByFilter query =
(marubinotto.piggydb.model.query.FragmentsByFilter)getQuery(
marubinotto.piggydb.model.query.FragmentsByFilter.class);
query.setFilter(this.filter);
if (isNotBlank(this.query)) {
query.setKeywords(this.query);
setKeywordRegex(this.query);
appendKeywordSearchLabel();
}
this.fragments = getPage(query);
this.contextTags = this.filter.getIncludes();
}
protected void setKeywordRegex(String keywords) {
StringBuilder keywordRegex = new StringBuilder();
for (String word : PiggydbUtils.splitToKeywords(keywords)) {
if (keywordRegex.length() > 0) keywordRegex.append("|");
word = StringEscapeUtils.escapeJavaScript(word);
word = RegexUtils.escapeRegex(word);
keywordRegex.append(word);
}
this.keywordRegex = "(" + keywordRegex.toString() + ")";
}
protected FragmentsQuery getQuery(Class<? extends FragmentsQuery> queryClass)
throws Exception {
FragmentsQuery query = (FragmentsQuery)
getDomain().getFragmentRepository().getQuery(queryClass);
query.setSortOption(this.sortOption);
query.setEagerFetching(this.view.needsEagerFetching());
query.setEagerFetchingMore(this.view.needsEagerFetchingMore());
return query;
}
protected Page<Fragment> getPage(FragmentsQuery query) throws Exception {
return query.getPage(this.view.getPageSize(), this.pi);
}
protected void setFragments() throws Exception {
}
private void saveStateToSession() {
if (this.scale != null) getContext().setSessionAttribute(SK_SCALE, this.scale);
if (this.orderBy != null) getContext().setSessionAttribute(SK_ORDERBY, this.orderBy);
if (this.ascending != null) getContext().setSessionAttribute(SK_ASCENDING, this.ascending);
}
protected static String makeKeywordSearchLabel(String keywords) {
String label = "<span class=\"search-icon-mini\"> </span> ";
for (String keyword : PiggydbUtils.splitToKeywords(keywords)) {
label += "\"" + keyword + "\" ";
}
return label.trim();
}
protected void appendKeywordSearchLabel() {
if (isNotBlank(this.query)) {
this.label += " + ";
this.label += makeKeywordSearchLabel(this.query);
}
}
}
| added logic to add tags to include and exclude to
marubinotto.piggydb.ui.page.partial.AbstractFragments.setFragmentsByFilter() | src/main/java/marubinotto/piggydb/ui/page/partial/AbstractFragments.java | added logic to add tags to include and exclude to marubinotto.piggydb.ui.page.partial.AbstractFragments.setFragmentsByFilter() | <ide><path>rc/main/java/marubinotto/piggydb/ui/page/partial/AbstractFragments.java
<ide> import marubinotto.piggydb.model.Classification;
<ide> import marubinotto.piggydb.model.Filter;
<ide> import marubinotto.piggydb.model.Fragment;
<add>import marubinotto.piggydb.model.Tag;
<ide> import marubinotto.piggydb.model.query.FragmentsQuery;
<ide> import marubinotto.piggydb.model.query.FragmentsSortOption;
<ide> import marubinotto.piggydb.ui.wiki.DefaultWikiParser;
<ide>
<ide> import org.apache.commons.lang.ObjectUtils;
<ide> import org.apache.commons.lang.StringEscapeUtils;
<add>import org.apache.commons.lang.StringUtils;
<ide>
<ide> public abstract class AbstractFragments extends AbstractPartial {
<ide>
<ide> Fragment fragment = getDomain().getFragmentRepository().get(id);
<ide> this.fragments = fragment != null ?
<ide> PageUtils.getPage(list(fragment), this.view.getPageSize(), this.pi) :
<del> PageUtils.<Fragment>empty(this.view.getPageSize());
<add> emptyFragments();
<ide> this.label = this.query;
<ide> }
<add> }
<add>
<add> private Page<Fragment> emptyFragments() {
<add> return PageUtils.<Fragment>empty(this.view.getPageSize());
<ide> }
<ide>
<ide> protected Filter createFilter() throws Exception {
<ide> private void setFragmentsByFilter() throws Exception {
<ide> if (this.filter == null) return;
<ide>
<del> marubinotto.piggydb.model.query.FragmentsByFilter query =
<del> (marubinotto.piggydb.model.query.FragmentsByFilter)getQuery(
<del> marubinotto.piggydb.model.query.FragmentsByFilter.class);
<del> query.setFilter(this.filter);
<del> if (isNotBlank(this.query)) {
<del> query.setKeywords(this.query);
<del> setKeywordRegex(this.query);
<del> appendKeywordSearchLabel();
<del> }
<del> this.fragments = getPage(query);
<add> // add tags to include
<add> if (isNotBlank(this.tagsToInclude)) {
<add> for (String tagName : StringUtils.split(this.tagsToInclude, ',')) {
<add> Tag tag = getTagByName(tagName);
<add> if (tag == null) {
<add> this.fragments = emptyFragments();
<add> }
<add> this.filter.addIncludeByUser(tag, getUser());
<add> }
<add> }
<add>
<add> // add tag to exclude
<add> if (isNotBlank(this.tagsToExclude)) {
<add> for (String tagName : StringUtils.split(this.tagsToExclude, ',')) {
<add> Tag tag = getTagByName(tagName);
<add> if (tag != null) {
<add> this.filter.addExcludeByUser(tag, getUser());
<add> }
<add> }
<add> }
<add>
<add> // query
<add> if (this.fragments == null) {
<add> marubinotto.piggydb.model.query.FragmentsByFilter query =
<add> (marubinotto.piggydb.model.query.FragmentsByFilter)getQuery(
<add> marubinotto.piggydb.model.query.FragmentsByFilter.class);
<add> query.setFilter(this.filter);
<add> if (isNotBlank(this.query)) {
<add> query.setKeywords(this.query);
<add> setKeywordRegex(this.query);
<add> appendKeywordSearchLabel();
<add> }
<add> this.fragments = getPage(query);
<add> }
<ide>
<ide> this.contextTags = this.filter.getIncludes();
<add> }
<add>
<add> private Tag getTagByName(String name) throws Exception {
<add> return isNotBlank(name) ? getDomain().getTagRepository().getByName(name.trim()) : null;
<ide> }
<ide>
<ide> protected void setKeywordRegex(String keywords) { |
|
Java | bsd-3-clause | 5d70057eb9362a9f30de183fe34d3539a736f541 | 0 | flutter/flutter-intellij,flutter/flutter-intellij,flutter/flutter-intellij,flutter/flutter-intellij,flutter/flutter-intellij | /*
* Copyright 2019 The Chromium Authors. All rights reserved.
* Use of this source code is governed by a BSD-style license that can be
* found in the LICENSE file.
*/
package io.flutter.logging;
import com.google.common.annotations.VisibleForTesting;
import com.google.gson.GsonBuilder;
import com.google.gson.JsonElement;
import com.google.gson.JsonObject;
import com.google.gson.JsonSyntaxException;
import com.intellij.execution.ui.ConsoleView;
import com.intellij.execution.ui.ConsoleViewContentType;
import com.intellij.ide.util.PropertiesComponent;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.editor.ex.EditorSettingsExternalizable;
import com.intellij.openapi.editor.impl.softwrap.SoftWrapAppliancePlaces;
import com.intellij.openapi.util.Disposer;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.ui.SimpleTextAttributes;
import com.intellij.util.concurrency.QueueProcessor;
import io.flutter.FlutterInitializer;
import io.flutter.inspector.DiagnosticLevel;
import io.flutter.inspector.DiagnosticsNode;
import io.flutter.inspector.DiagnosticsTreeStyle;
import io.flutter.inspector.InspectorService;
import io.flutter.run.daemon.FlutterApp;
import io.flutter.settings.FlutterSettings;
import io.flutter.utils.JsonUtils;
import io.flutter.vmService.VmServiceConsumers;
import org.dartlang.vm.service.VmService;
import org.dartlang.vm.service.consumer.GetObjectConsumer;
import org.dartlang.vm.service.element.*;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.ArrayList;
import java.util.Collections;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
/**
* Handle displaying dart:developer log messages and Flutter.Error messages in the Run and Debug
* console.
*/
public class FlutterConsoleLogManager {
private static final Logger LOG = Logger.getInstance(FlutterConsoleLogManager.class);
private static final String consolePreferencesSetKey = "io.flutter.console.preferencesSet";
private static final ConsoleViewContentType TITLE_CONTENT_TYPE =
new ConsoleViewContentType("title", SimpleTextAttributes.REGULAR_BOLD_ATTRIBUTES.toTextAttributes());
private static final ConsoleViewContentType NORMAL_CONTENT_TYPE = ConsoleViewContentType.NORMAL_OUTPUT;
private static final ConsoleViewContentType SUBTLE_CONTENT_TYPE =
new ConsoleViewContentType("subtle", SimpleTextAttributes.GRAY_ATTRIBUTES.toTextAttributes());
private static final ConsoleViewContentType ERROR_CONTENT_TYPE = ConsoleViewContentType.ERROR_OUTPUT;
final private CompletableFuture<InspectorService.ObjectGroup> objectGroup;
private static QueueProcessor<Runnable> queue;
/**
* Set our preferred settings for the run console.
*/
public static void initConsolePreferences() {
final PropertiesComponent properties = PropertiesComponent.getInstance();
if (!properties.getBoolean(consolePreferencesSetKey)) {
properties.setValue(consolePreferencesSetKey, true);
// Set our preferred default settings for console text wrapping.
final EditorSettingsExternalizable editorSettings = EditorSettingsExternalizable.getInstance();
editorSettings.setUseSoftWraps(true, SoftWrapAppliancePlaces.CONSOLE);
}
}
@NotNull final VmService service;
@NotNull final ConsoleView console;
@NotNull final FlutterApp app;
private int frameErrorCount = 0;
public FlutterConsoleLogManager(@NotNull ConsoleView console, @NotNull FlutterApp app) {
this.console = console;
this.app = app;
assert (app.getVmService() != null);
this.service = app.getVmService();
app.addStateListener(new FlutterApp.FlutterAppListener() {
@Override
public void notifyFrameRendered() {
frameErrorCount = 0;
}
@Override
public void stateChanged(FlutterApp.State newState) {
frameErrorCount = 0;
}
@Override
public void notifyAppReloaded() {
frameErrorCount = 0;
}
@Override
public void notifyAppRestarted() {
frameErrorCount = 0;
}
});
assert (app.getFlutterDebugProcess() != null);
objectGroup = InspectorService.createGroup(app, app.getFlutterDebugProcess(), app.getVmService(), "console-group");
objectGroup.whenCompleteAsync((group, error) -> {
if (group != null) {
Disposer.register(app, group.getInspectorService());
}
});
if (queue == null) {
queue = QueueProcessor.createRunnableQueueProcessor();
}
}
public void handleFlutterErrorEvent(@NotNull Event event) {
try {
final ExtensionData extensionData = event.getExtensionData();
final JsonObject jsonObject = extensionData.getJson().getAsJsonObject();
final DiagnosticsNode diagnosticsNode = new DiagnosticsNode(jsonObject, objectGroup, app, false, null);
// Send analytics for the diagnosticsNode.
if (isFirstErrorForFrame()) {
final String errorId = FlutterErrorHelper.getAnalyticsId(diagnosticsNode);
if (errorId != null) {
FlutterInitializer.getAnalytics().sendEvent("flutter-error", errorId);
}
}
if (FlutterSettings.getInstance().isShowStructuredErrors()) {
queue.add(() -> {
try {
processFlutterErrorEvent(diagnosticsNode);
}
catch (Throwable t) {
LOG.warn(t);
}
});
}
}
catch (Throwable t) {
LOG.warn(t);
}
}
private static final int errorSeparatorLength = 100;
private static final String errorSeparatorChar = "═";
private static final ArrayList<DiagnosticsNode> emptyList = new ArrayList<>();
/**
* Pretty print the error using the available console syling attributes.
*/
private void processFlutterErrorEvent(@NotNull DiagnosticsNode diagnosticsNode) {
final String description = " " + diagnosticsNode.toString() + " ";
frameErrorCount++;
final boolean terseError = !isFirstErrorForFrame();
final String prefix = "════════";
final String suffix = "══";
console.print("\n" + prefix, TITLE_CONTENT_TYPE);
console.print(description, NORMAL_CONTENT_TYPE);
console.print(
StringUtil.repeat(errorSeparatorChar, Math.max(
errorSeparatorLength - prefix.length() - description.length() - suffix.length(), 0)),
TITLE_CONTENT_TYPE);
console.print(suffix + "\n", TITLE_CONTENT_TYPE);
// TODO(devoncarew): Create a hyperlink to a widget - ala 'widget://inspector-1347'.
if (terseError) {
for (DiagnosticsNode property : diagnosticsNode.getInlineProperties()) {
printTerseNodeProperty(console, "", property);
}
}
else {
DiagnosticLevel lastLevel = null;
for (DiagnosticsNode property : diagnosticsNode.getInlineProperties()) {
// Add blank line between hint and non-hint properties.
if (lastLevel != property.getLevel()) {
if (lastLevel == DiagnosticLevel.hint || property.getLevel() == DiagnosticLevel.hint) {
console.print("\n", NORMAL_CONTENT_TYPE);
}
}
lastLevel = property.getLevel();
printDiagnosticsNodeProperty(console, "", property, null, false);
}
}
console.print(StringUtil.repeat(errorSeparatorChar, errorSeparatorLength) + "\n", TITLE_CONTENT_TYPE);
}
private boolean isFirstErrorForFrame() {
return frameErrorCount == 0;
}
private void printTerseNodeProperty(ConsoleView console, String indent, DiagnosticsNode property) {
boolean skip = true;
if (property.getLevel() == DiagnosticLevel.summary) {
skip = false;
}
else if (property.hasChildren()) {
final CompletableFuture<ArrayList<DiagnosticsNode>> future = property.getChildren();
final ArrayList<DiagnosticsNode> children = future.getNow(emptyList);
if (children.stream().noneMatch(DiagnosticsNode::hasChildren)) {
skip = false;
}
}
if (skip) {
return;
}
final ConsoleViewContentType contentType = getContentTypeFor(property.getLevel());
console.print(indent, contentType);
if (property.getShowName()) {
console.print(property.getName(), contentType);
if (property.getShowSeparator()) {
console.print(property.getSeparator() + " ", contentType);
}
}
final String description = property.getDescription() == null ? "" : property.getDescription();
console.print(description + "\n", contentType);
final String childIndent = getChildIndent(indent, property);
if (property.hasInlineProperties()) {
for (DiagnosticsNode childProperty : property.getInlineProperties()) {
printDiagnosticsNodeProperty(console, childIndent, childProperty, contentType, false);
}
}
if (property.hasChildren()) {
final CompletableFuture<ArrayList<DiagnosticsNode>> future = property.getChildren();
final ArrayList<DiagnosticsNode> children = future.getNow(emptyList);
for (DiagnosticsNode child : children) {
printDiagnosticsNodeProperty(console, childIndent, child, contentType, false);
}
}
}
private void printDiagnosticsNodeProperty(ConsoleView console, String indent, DiagnosticsNode property,
ConsoleViewContentType contentType,
boolean isInChild) {
// TODO(devoncarew): Change the error message display in the framework.
if (property.getDescription() != null && property.getLevel() == DiagnosticLevel.info) {
// Elide framework blank styling lines.
if (StringUtil.equals("ErrorSpacer", property.getType())) {
return;
}
}
if (contentType == null) {
contentType = getContentTypeFor(property.getLevel());
}
console.print(indent, contentType);
if (property.getShowName()) {
final String name = property.getName();
console.print(name == null ? "" : name, contentType);
if (property.getShowSeparator()) {
console.print(property.getSeparator() + " ", contentType);
}
}
final String description = property.getDescription() == null ? "" : property.getDescription();
console.print(description + "\n", contentType);
if (property.hasInlineProperties()) {
String childIndent = getChildIndent(indent, property);
if (property.getStyle() == DiagnosticsTreeStyle.shallow && !indent.startsWith("...")) {
// Render properties of shallow nodes as collapesed.
childIndent = "... " + indent;
}
for (DiagnosticsNode childProperty : property.getInlineProperties()) {
printDiagnosticsNodeProperty(console, childIndent, childProperty, contentType, isInChild);
}
}
if (property.hasChildren()) {
final CompletableFuture<ArrayList<DiagnosticsNode>> future = property.getChildren();
final ArrayList<DiagnosticsNode> children = future.getNow(emptyList);
// Don't collapse children if it's just a flat list of children.
if (!isInChild && children.stream().noneMatch(DiagnosticsNode::hasChildren)) {
final String childIndent = getChildIndent(indent, property);
for (DiagnosticsNode child : children) {
printDiagnosticsNodeProperty(console, childIndent, child, contentType, false);
}
}
else {
if (property.getStyle() != DiagnosticsTreeStyle.shallow) {
// For deep trees, we show the text as collapsed.
final String childIndent = isInChild ? getChildIndent(indent, property) : "... " + indent;
for (DiagnosticsNode child : children) {
printDiagnosticsNodeProperty(console, childIndent, child, contentType, true);
}
}
}
}
// Print an extra line after the summary.
if (property.getLevel() == DiagnosticLevel.summary) {
console.print("\n", contentType);
}
}
private String getChildIndent(String indent, DiagnosticsNode property) {
if (property.getStyle() == DiagnosticsTreeStyle.flat) {
return indent;
}
else {
return indent + " ";
}
}
public void handleLoggingEvent(@NotNull Event event) {
queue.add(() -> {
try {
processLoggingEvent(event);
}
catch (Throwable t) {
LOG.warn(t);
}
});
}
private ConsoleViewContentType getContentTypeFor(DiagnosticLevel level) {
switch (level) {
case error:
case summary:
return ERROR_CONTENT_TYPE;
case hint:
return NORMAL_CONTENT_TYPE;
default:
return SUBTLE_CONTENT_TYPE;
}
}
@VisibleForTesting
public void processLoggingEvent(@NotNull Event event) {
final LogRecord logRecord = event.getLogRecord();
if (logRecord == null) return;
final IsolateRef isolateRef = event.getIsolate();
final InstanceRef message = logRecord.getMessage();
@NotNull final InstanceRef loggerName = logRecord.getLoggerName();
final String name = loggerName.getValueAsString().isEmpty() ? "log" : loggerName.getValueAsString();
final String prefix = "[" + name + "] ";
final String messageStr = getFullStringValue(service, isolateRef.getId(), message);
console.print(prefix, SUBTLE_CONTENT_TYPE);
console.print(messageStr + "\n", NORMAL_CONTENT_TYPE);
@NotNull final InstanceRef error = logRecord.getError();
@NotNull final InstanceRef stackTrace = logRecord.getStackTrace();
if (!error.isNull()) {
final String padding = StringUtil.repeat(" ", prefix.length());
if (error.getKind() == InstanceKind.String) {
String string = getFullStringValue(service, isolateRef.getId(), error);
// Handle json in the error payload.
boolean isJson = false;
try {
final JsonElement json = JsonUtils.parseString(string);
isJson = true;
string = new GsonBuilder().setPrettyPrinting().create().toJson(json);
string = string.replaceAll("\n", "\n" + padding);
}
catch (JsonSyntaxException ignored) {
}
console.print(padding + string + "\n", isJson ? ConsoleViewContentType.NORMAL_OUTPUT : ERROR_CONTENT_TYPE);
}
else {
final CountDownLatch latch = new CountDownLatch(1);
service.invoke(
isolateRef.getId(), error.getId(),
"toString", Collections.emptyList(),
true,
new VmServiceConsumers.InvokeConsumerWrapper() {
@Override
public void received(InstanceRef response) {
console.print(padding + stringValueFromStringRef(response) + "\n", ERROR_CONTENT_TYPE);
latch.countDown();
}
@Override
public void noGoodResult() {
console.print(padding + error.getClassRef().getName() + " " + error.getId() + "\n", ERROR_CONTENT_TYPE);
latch.countDown();
}
});
try {
latch.await();
}
catch (InterruptedException ignored) {
}
}
}
if (!stackTrace.isNull()) {
final String padding = StringUtil.repeat(" ", prefix.length());
final String out = stackTrace.getValueAsString() == null ? "" : stackTrace.getValueAsString().trim();
console.print(
padding + out.replaceAll("\n", "\n" + padding) + "\n", ERROR_CONTENT_TYPE);
}
}
private String stringValueFromStringRef(InstanceRef ref) {
return ref.getValueAsStringIsTruncated() ? formatTruncatedString(ref) : ref.getValueAsString();
}
private String stringValueFromStringRef(Instance instance) {
return instance.getValueAsStringIsTruncated() ? instance.getValueAsString() + "..." : instance.getValueAsString();
}
private String formatTruncatedString(InstanceRef ref) {
return ref.getValueAsString() + "...";
}
private String getFullStringValue(@NotNull VmService service, String isolateId, @Nullable InstanceRef ref) {
if (ref == null) return null;
if (!ref.getValueAsStringIsTruncated()) {
return ref.getValueAsString();
}
final CountDownLatch latch = new CountDownLatch(1);
final String[] result = new String[1];
service.getObject(isolateId, ref.getId(), 0, ref.getLength(), new GetObjectConsumer() {
@Override
public void onError(RPCError error) {
result[0] = formatTruncatedString(ref);
latch.countDown();
}
@Override
public void received(Obj response) {
if (response instanceof Instance && ((Instance)response).getKind() == InstanceKind.String) {
result[0] = stringValueFromStringRef((Instance)response);
}
else {
result[0] = formatTruncatedString(ref);
}
latch.countDown();
}
@Override
public void received(Sentinel response) {
result[0] = formatTruncatedString(ref);
latch.countDown();
}
});
try {
latch.await(1, TimeUnit.SECONDS);
}
catch (InterruptedException e) {
return null;
}
return result[0];
}
}
| src/io/flutter/logging/FlutterConsoleLogManager.java | /*
* Copyright 2019 The Chromium Authors. All rights reserved.
* Use of this source code is governed by a BSD-style license that can be
* found in the LICENSE file.
*/
package io.flutter.logging;
import com.google.common.annotations.VisibleForTesting;
import com.google.gson.GsonBuilder;
import com.google.gson.JsonElement;
import com.google.gson.JsonObject;
import com.google.gson.JsonSyntaxException;
import com.intellij.execution.ui.ConsoleView;
import com.intellij.execution.ui.ConsoleViewContentType;
import com.intellij.ide.util.PropertiesComponent;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.editor.ex.EditorSettingsExternalizable;
import com.intellij.openapi.editor.impl.softwrap.SoftWrapAppliancePlaces;
import com.intellij.openapi.util.Disposer;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.ui.SimpleTextAttributes;
import com.intellij.util.concurrency.QueueProcessor;
import io.flutter.FlutterInitializer;
import io.flutter.inspector.DiagnosticLevel;
import io.flutter.inspector.DiagnosticsNode;
import io.flutter.inspector.DiagnosticsTreeStyle;
import io.flutter.inspector.InspectorService;
import io.flutter.run.daemon.FlutterApp;
import io.flutter.settings.FlutterSettings;
import io.flutter.utils.JsonUtils;
import io.flutter.vmService.VmServiceConsumers;
import org.dartlang.vm.service.VmService;
import org.dartlang.vm.service.consumer.GetObjectConsumer;
import org.dartlang.vm.service.element.*;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.ArrayList;
import java.util.Collections;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
/**
* Handle displaying dart:developer log messages and Flutter.Error messages in the Run and Debug
* console.
*/
public class FlutterConsoleLogManager {
private static final Logger LOG = Logger.getInstance(FlutterConsoleLogManager.class);
private static final String consolePreferencesSetKey = "io.flutter.console.preferencesSet";
private static final ConsoleViewContentType TITLE_CONTENT_TYPE =
new ConsoleViewContentType("title", SimpleTextAttributes.REGULAR_BOLD_ATTRIBUTES.toTextAttributes());
private static final ConsoleViewContentType NORMAL_CONTENT_TYPE = ConsoleViewContentType.NORMAL_OUTPUT;
private static final ConsoleViewContentType SUBTLE_CONTENT_TYPE =
new ConsoleViewContentType("subtle", SimpleTextAttributes.GRAY_ATTRIBUTES.toTextAttributes());
private static final ConsoleViewContentType ERROR_CONTENT_TYPE = ConsoleViewContentType.ERROR_OUTPUT;
final private CompletableFuture<InspectorService.ObjectGroup> objectGroup;
private static QueueProcessor<Runnable> queue;
/**
* Set our preferred settings for the run console.
*/
public static void initConsolePreferences() {
final PropertiesComponent properties = PropertiesComponent.getInstance();
if (!properties.getBoolean(consolePreferencesSetKey)) {
properties.setValue(consolePreferencesSetKey, true);
// Set our preferred default settings for console text wrapping.
final EditorSettingsExternalizable editorSettings = EditorSettingsExternalizable.getInstance();
editorSettings.setUseSoftWraps(true, SoftWrapAppliancePlaces.CONSOLE);
}
}
@NotNull final VmService service;
@NotNull final ConsoleView console;
@NotNull final FlutterApp app;
private int frameErrorCount = 0;
public FlutterConsoleLogManager(@NotNull ConsoleView console, @NotNull FlutterApp app) {
this.console = console;
this.app = app;
assert (app.getVmService() != null);
this.service = app.getVmService();
app.addStateListener(new FlutterApp.FlutterAppListener() {
@Override
public void notifyFrameRendered() {
frameErrorCount = 0;
}
@Override
public void stateChanged(FlutterApp.State newState) {
frameErrorCount = 0;
}
@Override
public void notifyAppReloaded() {
frameErrorCount = 0;
}
@Override
public void notifyAppRestarted() {
frameErrorCount = 0;
}
});
assert (app.getFlutterDebugProcess() != null);
objectGroup = InspectorService.createGroup(app, app.getFlutterDebugProcess(), app.getVmService(), "console-group");
objectGroup.whenCompleteAsync((group, error) -> {
if (group != null) {
Disposer.register(app, group.getInspectorService());
}
});
if (queue == null) {
queue = QueueProcessor.createRunnableQueueProcessor();
}
}
public void handleFlutterErrorEvent(@NotNull Event event) {
try {
final ExtensionData extensionData = event.getExtensionData();
final JsonObject jsonObject = extensionData.getJson().getAsJsonObject();
final DiagnosticsNode diagnosticsNode = new DiagnosticsNode(jsonObject, objectGroup, app, false, null);
// Send analytics for the diagnosticsNode.
final String errorId = FlutterErrorHelper.getAnalyticsId(diagnosticsNode);
if (errorId != null) {
FlutterInitializer.getAnalytics().sendEvent("flutter-error", errorId);
}
if (FlutterSettings.getInstance().isShowStructuredErrors()) {
queue.add(() -> {
try {
processFlutterErrorEvent(diagnosticsNode);
}
catch (Throwable t) {
LOG.warn(t);
}
});
}
}
catch (Throwable t) {
LOG.warn(t);
}
}
private static final int errorSeparatorLength = 100;
private static final String errorSeparatorChar = "═";
private static final ArrayList<DiagnosticsNode> emptyList = new ArrayList<>();
/**
* Pretty print the error using the available console syling attributes.
*/
private void processFlutterErrorEvent(@NotNull DiagnosticsNode diagnosticsNode) {
final String description = " " + diagnosticsNode.toString() + " ";
frameErrorCount++;
final boolean terseError = frameErrorCount > 1;
final String prefix = "════════";
final String suffix = "══";
console.print("\n" + prefix, TITLE_CONTENT_TYPE);
console.print(description, NORMAL_CONTENT_TYPE);
console.print(
StringUtil.repeat(errorSeparatorChar, Math.max(
errorSeparatorLength - prefix.length() - description.length() - suffix.length(), 0)),
TITLE_CONTENT_TYPE);
console.print(suffix + "\n", TITLE_CONTENT_TYPE);
// TODO(devoncarew): Create a hyperlink to a widget - ala 'widget://inspector-1347'.
if (terseError) {
for (DiagnosticsNode property : diagnosticsNode.getInlineProperties()) {
printTerseNodeProperty(console, "", property);
}
}
else {
DiagnosticLevel lastLevel = null;
for (DiagnosticsNode property : diagnosticsNode.getInlineProperties()) {
// Add blank line between hint and non-hint properties.
if (lastLevel != property.getLevel()) {
if (lastLevel == DiagnosticLevel.hint || property.getLevel() == DiagnosticLevel.hint) {
console.print("\n", NORMAL_CONTENT_TYPE);
}
}
lastLevel = property.getLevel();
printDiagnosticsNodeProperty(console, "", property, null, false);
}
}
console.print(StringUtil.repeat(errorSeparatorChar, errorSeparatorLength) + "\n", TITLE_CONTENT_TYPE);
}
private void printTerseNodeProperty(ConsoleView console, String indent, DiagnosticsNode property) {
boolean skip = true;
if (property.getLevel() == DiagnosticLevel.summary) {
skip = false;
}
else if (property.hasChildren()) {
final CompletableFuture<ArrayList<DiagnosticsNode>> future = property.getChildren();
final ArrayList<DiagnosticsNode> children = future.getNow(emptyList);
if (children.stream().noneMatch(DiagnosticsNode::hasChildren)) {
skip = false;
}
}
if (skip) {
return;
}
final ConsoleViewContentType contentType = getContentTypeFor(property.getLevel());
console.print(indent, contentType);
if (property.getShowName()) {
console.print(property.getName(), contentType);
if (property.getShowSeparator()) {
console.print(property.getSeparator() + " ", contentType);
}
}
final String description = property.getDescription() == null ? "" : property.getDescription();
console.print(description + "\n", contentType);
final String childIndent = getChildIndent(indent, property);
if (property.hasInlineProperties()) {
for (DiagnosticsNode childProperty : property.getInlineProperties()) {
printDiagnosticsNodeProperty(console, childIndent, childProperty, contentType, false);
}
}
if (property.hasChildren()) {
final CompletableFuture<ArrayList<DiagnosticsNode>> future = property.getChildren();
final ArrayList<DiagnosticsNode> children = future.getNow(emptyList);
for (DiagnosticsNode child : children) {
printDiagnosticsNodeProperty(console, childIndent, child, contentType, false);
}
}
}
private void printDiagnosticsNodeProperty(ConsoleView console, String indent, DiagnosticsNode property,
ConsoleViewContentType contentType,
boolean isInChild) {
// TODO(devoncarew): Change the error message display in the framework.
if (property.getDescription() != null && property.getLevel() == DiagnosticLevel.info) {
// Elide framework blank styling lines.
if (StringUtil.equals("ErrorSpacer", property.getType())) {
return;
}
}
if (contentType == null) {
contentType = getContentTypeFor(property.getLevel());
}
console.print(indent, contentType);
if (property.getShowName()) {
final String name = property.getName();
console.print(name == null ? "" : name, contentType);
if (property.getShowSeparator()) {
console.print(property.getSeparator() + " ", contentType);
}
}
final String description = property.getDescription() == null ? "" : property.getDescription();
console.print(description + "\n", contentType);
if (property.hasInlineProperties()) {
String childIndent = getChildIndent(indent, property);
if (property.getStyle() == DiagnosticsTreeStyle.shallow && !indent.startsWith("...")) {
// Render properties of shallow nodes as collapesed.
childIndent = "... " + indent;
}
for (DiagnosticsNode childProperty : property.getInlineProperties()) {
printDiagnosticsNodeProperty(console, childIndent, childProperty, contentType, isInChild);
}
}
if (property.hasChildren()) {
final CompletableFuture<ArrayList<DiagnosticsNode>> future = property.getChildren();
final ArrayList<DiagnosticsNode> children = future.getNow(emptyList);
// Don't collapse children if it's just a flat list of children.
if (!isInChild && children.stream().noneMatch(DiagnosticsNode::hasChildren)) {
final String childIndent = getChildIndent(indent, property);
for (DiagnosticsNode child : children) {
printDiagnosticsNodeProperty(console, childIndent, child, contentType, false);
}
}
else {
if (property.getStyle() != DiagnosticsTreeStyle.shallow) {
// For deep trees, we show the text as collapsed.
final String childIndent = isInChild ? getChildIndent(indent, property) : "... " + indent;
for (DiagnosticsNode child : children) {
printDiagnosticsNodeProperty(console, childIndent, child, contentType, true);
}
}
}
}
// Print an extra line after the summary.
if (property.getLevel() == DiagnosticLevel.summary) {
console.print("\n", contentType);
}
}
private String getChildIndent(String indent, DiagnosticsNode property) {
if (property.getStyle() == DiagnosticsTreeStyle.flat) {
return indent;
}
else {
return indent + " ";
}
}
public void handleLoggingEvent(@NotNull Event event) {
queue.add(() -> {
try {
processLoggingEvent(event);
}
catch (Throwable t) {
LOG.warn(t);
}
});
}
private ConsoleViewContentType getContentTypeFor(DiagnosticLevel level) {
switch (level) {
case error:
case summary:
return ERROR_CONTENT_TYPE;
case hint:
return NORMAL_CONTENT_TYPE;
default:
return SUBTLE_CONTENT_TYPE;
}
}
@VisibleForTesting
public void processLoggingEvent(@NotNull Event event) {
final LogRecord logRecord = event.getLogRecord();
if (logRecord == null) return;
final IsolateRef isolateRef = event.getIsolate();
final InstanceRef message = logRecord.getMessage();
@NotNull final InstanceRef loggerName = logRecord.getLoggerName();
final String name = loggerName.getValueAsString().isEmpty() ? "log" : loggerName.getValueAsString();
final String prefix = "[" + name + "] ";
final String messageStr = getFullStringValue(service, isolateRef.getId(), message);
console.print(prefix, SUBTLE_CONTENT_TYPE);
console.print(messageStr + "\n", NORMAL_CONTENT_TYPE);
@NotNull final InstanceRef error = logRecord.getError();
@NotNull final InstanceRef stackTrace = logRecord.getStackTrace();
if (!error.isNull()) {
final String padding = StringUtil.repeat(" ", prefix.length());
if (error.getKind() == InstanceKind.String) {
String string = getFullStringValue(service, isolateRef.getId(), error);
// Handle json in the error payload.
boolean isJson = false;
try {
final JsonElement json = JsonUtils.parseString(string);
isJson = true;
string = new GsonBuilder().setPrettyPrinting().create().toJson(json);
string = string.replaceAll("\n", "\n" + padding);
}
catch (JsonSyntaxException ignored) {
}
console.print(padding + string + "\n", isJson ? ConsoleViewContentType.NORMAL_OUTPUT : ERROR_CONTENT_TYPE);
}
else {
final CountDownLatch latch = new CountDownLatch(1);
service.invoke(
isolateRef.getId(), error.getId(),
"toString", Collections.emptyList(),
true,
new VmServiceConsumers.InvokeConsumerWrapper() {
@Override
public void received(InstanceRef response) {
console.print(padding + stringValueFromStringRef(response) + "\n", ERROR_CONTENT_TYPE);
latch.countDown();
}
@Override
public void noGoodResult() {
console.print(padding + error.getClassRef().getName() + " " + error.getId() + "\n", ERROR_CONTENT_TYPE);
latch.countDown();
}
});
try {
latch.await();
}
catch (InterruptedException ignored) {
}
}
}
if (!stackTrace.isNull()) {
final String padding = StringUtil.repeat(" ", prefix.length());
final String out = stackTrace.getValueAsString() == null ? "" : stackTrace.getValueAsString().trim();
console.print(
padding + out.replaceAll("\n", "\n" + padding) + "\n", ERROR_CONTENT_TYPE);
}
}
private String stringValueFromStringRef(InstanceRef ref) {
return ref.getValueAsStringIsTruncated() ? formatTruncatedString(ref) : ref.getValueAsString();
}
private String stringValueFromStringRef(Instance instance) {
return instance.getValueAsStringIsTruncated() ? instance.getValueAsString() + "..." : instance.getValueAsString();
}
private String formatTruncatedString(InstanceRef ref) {
return ref.getValueAsString() + "...";
}
private String getFullStringValue(@NotNull VmService service, String isolateId, @Nullable InstanceRef ref) {
if (ref == null) return null;
if (!ref.getValueAsStringIsTruncated()) {
return ref.getValueAsString();
}
final CountDownLatch latch = new CountDownLatch(1);
final String[] result = new String[1];
service.getObject(isolateId, ref.getId(), 0, ref.getLength(), new GetObjectConsumer() {
@Override
public void onError(RPCError error) {
result[0] = formatTruncatedString(ref);
latch.countDown();
}
@Override
public void received(Obj response) {
if (response instanceof Instance && ((Instance)response).getKind() == InstanceKind.String) {
result[0] = stringValueFromStringRef((Instance)response);
}
else {
result[0] = formatTruncatedString(ref);
}
latch.countDown();
}
@Override
public void received(Sentinel response) {
result[0] = formatTruncatedString(ref);
latch.countDown();
}
});
try {
latch.await(1, TimeUnit.SECONDS);
}
catch (InterruptedException e) {
return null;
}
return result[0];
}
}
| only send analytics for the first error for a frame (#4867)
| src/io/flutter/logging/FlutterConsoleLogManager.java | only send analytics for the first error for a frame (#4867) | <ide><path>rc/io/flutter/logging/FlutterConsoleLogManager.java
<ide> final DiagnosticsNode diagnosticsNode = new DiagnosticsNode(jsonObject, objectGroup, app, false, null);
<ide>
<ide> // Send analytics for the diagnosticsNode.
<del> final String errorId = FlutterErrorHelper.getAnalyticsId(diagnosticsNode);
<del> if (errorId != null) {
<del> FlutterInitializer.getAnalytics().sendEvent("flutter-error", errorId);
<add> if (isFirstErrorForFrame()) {
<add> final String errorId = FlutterErrorHelper.getAnalyticsId(diagnosticsNode);
<add> if (errorId != null) {
<add> FlutterInitializer.getAnalytics().sendEvent("flutter-error", errorId);
<add> }
<ide> }
<ide>
<ide> if (FlutterSettings.getInstance().isShowStructuredErrors()) {
<ide>
<ide> frameErrorCount++;
<ide>
<del> final boolean terseError = frameErrorCount > 1;
<add> final boolean terseError = !isFirstErrorForFrame();
<ide>
<ide> final String prefix = "════════";
<ide> final String suffix = "══";
<ide> }
<ide>
<ide> console.print(StringUtil.repeat(errorSeparatorChar, errorSeparatorLength) + "\n", TITLE_CONTENT_TYPE);
<add> }
<add>
<add> private boolean isFirstErrorForFrame() {
<add> return frameErrorCount == 0;
<ide> }
<ide>
<ide> private void printTerseNodeProperty(ConsoleView console, String indent, DiagnosticsNode property) { |
|
Java | apache-2.0 | e1451b50cd41b876b84adb196076fceb7c52336e | 0 | adammurdoch/native-platform,adammurdoch/native-platform,adammurdoch/native-platform,adammurdoch/native-platform | import com.google.common.collect.ImmutableList;
import org.gradle.api.Action;
import org.gradle.api.Plugin;
import org.gradle.api.Project;
import org.gradle.api.Task;
import org.gradle.api.file.Directory;
import org.gradle.api.file.FileCollection;
import org.gradle.api.file.FileSystemOperations;
import org.gradle.api.provider.Provider;
import org.gradle.api.tasks.OutputDirectory;
import org.gradle.api.tasks.TaskContainer;
import org.gradle.api.tasks.TaskProvider;
import org.gradle.api.tasks.compile.JavaCompile;
import org.gradle.language.cpp.tasks.CppCompile;
import org.gradle.process.CommandLineArgumentProvider;
import javax.inject.Inject;
public abstract class JniPlugin implements Plugin<Project> {
@Override
public void apply(Project project) {
project.getPluginManager().withPlugin("java", plugin -> {
JniExtension jniExtension = project.getExtensions().create("jni", JniExtension.class);
jniExtension.getGeneratedHeadersDirectory().convention(project.getLayout().getBuildDirectory().dir("generated/jni-headers"));
JniCompilerArguments compilerArguments = new JniCompilerArguments(jniExtension.getGeneratedHeadersDirectory());
TaskContainer tasks = project.getTasks();
TaskProvider<JavaCompile> compileJavaProvider = tasks.named("compileJava", JavaCompile.class);
RemoveGeneratedNativeHeaders removeGeneratedNativeHeaders = project.getObjects().newInstance(RemoveGeneratedNativeHeaders.class, compilerArguments.getGeneratedHeadersDirectory());
configureCompileJava(compilerArguments, removeGeneratedNativeHeaders, compileJavaProvider);
configureIncludePath(
tasks,
project.files(compilerArguments.getGeneratedHeadersDirectory()).builtBy(compileJavaProvider)
);
});
}
private void configureCompileJava(
JniCompilerArguments compilerArguments,
RemoveGeneratedNativeHeaders removeGeneratedNativeHeaders,
TaskProvider<JavaCompile> compileJavaProvider
) {
compileJavaProvider.configure(compileJava -> {
compileJava.getOptions().getCompilerArgumentProviders().add(compilerArguments);
// Cannot do incremental header generation
compileJava.getOptions().setIncremental(false);
compileJava.doFirst(removeGeneratedNativeHeaders);
});
}
private void configureIncludePath(TaskContainer tasks, FileCollection generatedHeaderDirectory) {
tasks.withType(CppCompile.class).configureEach(task -> {
task.includes(generatedHeaderDirectory);
});
}
private static class JniCompilerArguments implements CommandLineArgumentProvider {
private final Provider<Directory> generatedHeadersDirectory;
public JniCompilerArguments(Provider<Directory> generatedHeadersDirectory) {
this.generatedHeadersDirectory = generatedHeadersDirectory;
}
@OutputDirectory
public Provider<Directory> getGeneratedHeadersDirectory() {
return generatedHeadersDirectory;
}
@Override
public Iterable<String> asArguments() {
return ImmutableList.of("-h", generatedHeadersDirectory.get().getAsFile().getAbsolutePath());
}
}
abstract static class RemoveGeneratedNativeHeaders implements Action<Task> {
private final Provider<Directory> generatedHeadersDirectory;
@Inject
public abstract FileSystemOperations getFileSystemOperations();
@Inject
public RemoveGeneratedNativeHeaders(Provider<Directory> generatedHeadersDirectory) {
this.generatedHeadersDirectory = generatedHeadersDirectory;
}
@Override
public void execute(Task task) {
getFileSystemOperations().delete(spec -> spec.delete(generatedHeadersDirectory));
}
}
}
| buildSrc/src/main/java/JniPlugin.java | import com.google.common.collect.ImmutableList;
import org.gradle.api.Action;
import org.gradle.api.Plugin;
import org.gradle.api.Project;
import org.gradle.api.Task;
import org.gradle.api.file.Directory;
import org.gradle.api.file.FileSystemOperations;
import org.gradle.api.provider.Provider;
import org.gradle.api.tasks.OutputDirectory;
import org.gradle.api.tasks.TaskContainer;
import org.gradle.api.tasks.TaskProvider;
import org.gradle.api.tasks.compile.JavaCompile;
import org.gradle.language.cpp.tasks.CppCompile;
import org.gradle.process.CommandLineArgumentProvider;
import javax.inject.Inject;
public abstract class JniPlugin implements Plugin<Project> {
@Override
public void apply(Project project) {
project.getPluginManager().withPlugin("java", plugin -> {
JniExtension jniExtension = project.getExtensions().create("jni", JniExtension.class);
jniExtension.getGeneratedHeadersDirectory().convention(project.getLayout().getBuildDirectory().dir("generated/jni-headers"));
JniCompilerArguments compilerArguments = new JniCompilerArguments(jniExtension.getGeneratedHeadersDirectory());
TaskContainer tasks = project.getTasks();
TaskProvider<JavaCompile> compileJavaProvider = tasks.named("compileJava", JavaCompile.class);
RemoveGeneratedNativeHeaders removeGeneratedNativeHeaders = project.getObjects().newInstance(RemoveGeneratedNativeHeaders.class, compilerArguments.getGeneratedHeadersDirectory());
configureCompileJava(compilerArguments, removeGeneratedNativeHeaders, compileJavaProvider);
configureIncludePath(
tasks,
compileJavaProvider.flatMap(it -> compilerArguments.getGeneratedHeadersDirectory())
);
});
}
private void configureCompileJava(
JniCompilerArguments compilerArguments,
RemoveGeneratedNativeHeaders removeGeneratedNativeHeaders,
TaskProvider<JavaCompile> compileJavaProvider
) {
compileJavaProvider.configure(compileJava -> {
compileJava.getOptions().getCompilerArgumentProviders().add(compilerArguments);
// Cannot do incremental header generation
compileJava.getOptions().setIncremental(false);
compileJava.doFirst(removeGeneratedNativeHeaders);
});
}
private void configureIncludePath(TaskContainer tasks, Provider<Directory> generatedHeaderDirectory) {
tasks.withType(CppCompile.class).configureEach(task -> {
task.includes(generatedHeaderDirectory);
});
}
private static class JniCompilerArguments implements CommandLineArgumentProvider {
private final Provider<Directory> generatedHeadersDirectory;
public JniCompilerArguments(Provider<Directory> generatedHeadersDirectory) {
this.generatedHeadersDirectory = generatedHeadersDirectory;
}
@OutputDirectory
public Provider<Directory> getGeneratedHeadersDirectory() {
return generatedHeadersDirectory;
}
@Override
public Iterable<String> asArguments() {
return ImmutableList.of("-h", generatedHeadersDirectory.get().getAsFile().getAbsolutePath());
}
}
abstract static class RemoveGeneratedNativeHeaders implements Action<Task> {
private final Provider<Directory> generatedHeadersDirectory;
@Inject
public abstract FileSystemOperations getFileSystemOperations();
@Inject
public RemoveGeneratedNativeHeaders(Provider<Directory> generatedHeadersDirectory) {
this.generatedHeadersDirectory = generatedHeadersDirectory;
}
@Override
public void execute(Task task) {
getFileSystemOperations().delete(spec -> spec.delete(generatedHeadersDirectory));
}
}
}
| Add the dependency manually to a file collection
| buildSrc/src/main/java/JniPlugin.java | Add the dependency manually to a file collection | <ide><path>uildSrc/src/main/java/JniPlugin.java
<ide> import org.gradle.api.Project;
<ide> import org.gradle.api.Task;
<ide> import org.gradle.api.file.Directory;
<add>import org.gradle.api.file.FileCollection;
<ide> import org.gradle.api.file.FileSystemOperations;
<ide> import org.gradle.api.provider.Provider;
<ide> import org.gradle.api.tasks.OutputDirectory;
<ide> configureCompileJava(compilerArguments, removeGeneratedNativeHeaders, compileJavaProvider);
<ide> configureIncludePath(
<ide> tasks,
<del> compileJavaProvider.flatMap(it -> compilerArguments.getGeneratedHeadersDirectory())
<add> project.files(compilerArguments.getGeneratedHeadersDirectory()).builtBy(compileJavaProvider)
<ide> );
<ide> });
<ide> }
<ide> });
<ide> }
<ide>
<del> private void configureIncludePath(TaskContainer tasks, Provider<Directory> generatedHeaderDirectory) {
<add> private void configureIncludePath(TaskContainer tasks, FileCollection generatedHeaderDirectory) {
<ide> tasks.withType(CppCompile.class).configureEach(task -> {
<ide> task.includes(generatedHeaderDirectory);
<ide> }); |
|
Java | apache-2.0 | fc1186a0a293fb30fbf43bf9c3b1157cff0b77e6 | 0 | nssales/OG-Platform,codeaudit/OG-Platform,DevStreet/FinanceAnalytics,McLeodMoores/starling,DevStreet/FinanceAnalytics,jeorme/OG-Platform,jerome79/OG-Platform,nssales/OG-Platform,McLeodMoores/starling,McLeodMoores/starling,DevStreet/FinanceAnalytics,DevStreet/FinanceAnalytics,ChinaQuants/OG-Platform,codeaudit/OG-Platform,codeaudit/OG-Platform,jerome79/OG-Platform,ChinaQuants/OG-Platform,jeorme/OG-Platform,jeorme/OG-Platform,jerome79/OG-Platform,jeorme/OG-Platform,ChinaQuants/OG-Platform,jerome79/OG-Platform,ChinaQuants/OG-Platform,nssales/OG-Platform,codeaudit/OG-Platform,McLeodMoores/starling,nssales/OG-Platform | /**
* Copyright (C) 2013 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.financial.analytics.model.curve;
import static com.opengamma.engine.value.ValueRequirementNames.HULL_WHITE_ONE_FACTOR_PARAMETERS;
import static com.opengamma.financial.analytics.model.curve.CurveCalculationPropertyNamesAndValues.PROPERTY_HULL_WHITE_CURRENCY;
import static com.opengamma.financial.analytics.model.curve.CurveCalculationPropertyNamesAndValues.PROPERTY_HULL_WHITE_PARAMETERS;
import it.unimi.dsi.fastutil.doubles.DoubleArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.Set;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.threeten.bp.Clock;
import org.threeten.bp.Instant;
import org.threeten.bp.LocalTime;
import org.threeten.bp.ZoneOffset;
import org.threeten.bp.ZonedDateTime;
import com.opengamma.OpenGammaRuntimeException;
import com.opengamma.analytics.financial.model.interestrate.definition.HullWhiteOneFactorPiecewiseConstantParameters;
import com.opengamma.analytics.util.time.TimeCalculator;
import com.opengamma.core.config.ConfigSource;
import com.opengamma.core.config.impl.ConfigItem;
import com.opengamma.core.value.MarketDataRequirementNames;
import com.opengamma.engine.ComputationTarget;
import com.opengamma.engine.ComputationTargetSpecification;
import com.opengamma.engine.function.AbstractFunction;
import com.opengamma.engine.function.CompiledFunctionDefinition;
import com.opengamma.engine.function.FunctionCompilationContext;
import com.opengamma.engine.function.FunctionExecutionContext;
import com.opengamma.engine.function.FunctionInputs;
import com.opengamma.engine.target.ComputationTargetType;
import com.opengamma.engine.value.ComputedValue;
import com.opengamma.engine.value.ValueProperties;
import com.opengamma.engine.value.ValueRequirement;
import com.opengamma.engine.value.ValueSpecification;
import com.opengamma.financial.OpenGammaCompilationContext;
import com.opengamma.financial.analytics.parameters.HullWhiteOneFactorParameters;
import com.opengamma.financial.view.ConfigDocumentWatchSetProvider;
import com.opengamma.id.ExternalId;
import com.opengamma.id.ExternalScheme;
import com.opengamma.id.VersionCorrection;
import com.opengamma.util.ArgumentChecker;
import com.opengamma.util.async.AsynchronousExecution;
import com.opengamma.util.money.Currency;
import com.opengamma.util.time.Tenor;
/**
* Function that supplies Hull-White one factor parameters.
*/
public class HullWhiteOneFactorParametersFunction extends AbstractFunction {
/** The logger */
private static final Logger s_logger = LoggerFactory.getLogger(HullWhiteOneFactorParametersFunction.class);
private static final Map<Tenor, Double> VOLATILITY_TERMS = new LinkedHashMap<>();
static {
VOLATILITY_TERMS.put(Tenor.THREE_MONTHS, 0.01d);
VOLATILITY_TERMS.put(Tenor.TWELVE_MONTHS, 0.01d);
VOLATILITY_TERMS.put(Tenor.TWO_YEARS, 0.01d);
VOLATILITY_TERMS.put(Tenor.THREE_YEARS, 0.01d);
VOLATILITY_TERMS.put(Tenor.FOUR_YEARS, 0.01d);
VOLATILITY_TERMS.put(Tenor.FIVE_YEARS, 0.01d);
}
private static final Double MEAN_REVERSION_DEFAULT = 0.01d;
private static final Double INITIAL_VOLATILITY_DEFAULT = 0.01d;
/** The configuration name */
private final String _name;
/** The currency for which these parameters are valid */
private final Currency _currency;
/**
* @param name The name of the Hull-White parameter set, not null
* @param currency The currency for which the parameters are valid, not null
*/
public HullWhiteOneFactorParametersFunction(final String name, final String currency) {
ArgumentChecker.notNull(name, "name");
ArgumentChecker.notNull(currency, "currency");
_name = name;
_currency = Currency.of(currency);
}
@Override
public void init(final FunctionCompilationContext context) {
ConfigDocumentWatchSetProvider.reinitOnChanges(context, this, HullWhiteOneFactorParameters.class);
}
@Override
public CompiledFunctionDefinition compile(final FunctionCompilationContext context, final Instant atInstant) {
final ValueProperties properties = createValueProperties()
.with(PROPERTY_HULL_WHITE_PARAMETERS, _name)
.with(PROPERTY_HULL_WHITE_CURRENCY, _currency.getCode())
.get();
final ValueSpecification result = new ValueSpecification(HULL_WHITE_ONE_FACTOR_PARAMETERS, ComputationTargetSpecification.of(_currency), properties);
final Set<ValueRequirement> requirements = new HashSet<>();
final ConfigSource configSource = OpenGammaCompilationContext.getConfigSource(context);
final Collection<ConfigItem<HullWhiteOneFactorParameters>> configs = configSource.get(HullWhiteOneFactorParameters.class, _name, VersionCorrection.LATEST);
if (configs == null) {
throw new OpenGammaRuntimeException("HullWhiteOneFactorParameter configuration called " + _name + " was null");
}
final HullWhiteOneFactorParameters parameters = configs.iterator().next().getValue();
requirements.add(new ValueRequirement(MarketDataRequirementNames.MARKET_VALUE, ComputationTargetType.PRIMITIVE, parameters.getMeanReversionId()));
requirements.add(new ValueRequirement(MarketDataRequirementNames.MARKET_VALUE, ComputationTargetType.PRIMITIVE, parameters.getInitialVolatilityId()));
final Map<Tenor, ExternalId> volatilityTermStructure = parameters.getVolatilityTermStructure();
for (final Map.Entry<Tenor, ExternalId> entry : volatilityTermStructure.entrySet()) {
final ExternalScheme scheme = entry.getValue().getScheme();
final String id = entry.getValue().getValue();
final ExternalId tenorAppendedId = ExternalId.of(scheme, createId(entry.getKey(), id));
requirements.add(new ValueRequirement(MarketDataRequirementNames.MARKET_VALUE, ComputationTargetType.PRIMITIVE, tenorAppendedId));
}
final ZonedDateTime atZDT = ZonedDateTime.ofInstant(atInstant, ZoneOffset.UTC);
return new AbstractInvokingCompiledFunction(atZDT.with(LocalTime.MIDNIGHT), atZDT.plusDays(1).with(LocalTime.MIDNIGHT).minusNanos(1000000)) {
@Override
public Set<ComputedValue> execute(final FunctionExecutionContext executionContext, final FunctionInputs inputs, final ComputationTarget target,
final Set<ValueRequirement> desiredValues) throws AsynchronousExecution {
final Clock snapshotClock = executionContext.getValuationClock();
final ZonedDateTime now = ZonedDateTime.now(snapshotClock);
Object meanReversionObject = inputs.getValue(new ValueRequirement(MarketDataRequirementNames.MARKET_VALUE,
ComputationTargetType.PRIMITIVE, parameters.getMeanReversionId()));
if (meanReversionObject == null) {
// Jim - these are hacks that should be removed.
meanReversionObject = MEAN_REVERSION_DEFAULT;
s_logger.warn("Using default mean reversion");
//throw new OpenGammaRuntimeException("Could not get mean reversion value");
}
Object initialVolatilityObject = inputs.getValue(new ValueRequirement(MarketDataRequirementNames.MARKET_VALUE,
ComputationTargetType.PRIMITIVE, parameters.getInitialVolatilityId()));
if (initialVolatilityObject == null) {
// Jim - these are hacks that should be removed.
initialVolatilityObject = INITIAL_VOLATILITY_DEFAULT;
s_logger.warn("Using default initial volatility");
//throw new OpenGammaRuntimeException("Could not get initial volatility value");
}
final Double meanReversion = (Double) meanReversionObject;
final Double initialVolatility = (Double) initialVolatilityObject;
final DoubleArrayList volatility = new DoubleArrayList();
volatility.add(initialVolatility);
final DoubleArrayList volatilityTime = new DoubleArrayList();
for (final Map.Entry<Tenor, ExternalId> entry : volatilityTermStructure.entrySet()) {
final ExternalScheme scheme = entry.getValue().getScheme();
final String id = entry.getValue().getValue();
final ExternalId tenorAppendedId = ExternalId.of(scheme, createId(entry.getKey(), id));
Object volatilityObject = inputs.getValue(new ValueRequirement(MarketDataRequirementNames.MARKET_VALUE,
ComputationTargetType.PRIMITIVE, tenorAppendedId));
// Jim - next block is a hack that should be removed.
if (volatilityObject == null) {
volatilityObject = VOLATILITY_TERMS.get(entry.getKey());
}
if (volatilityObject == null) {
s_logger.error("Could not get value for " + tenorAppendedId);
} else {
final double t = TimeCalculator.getTimeBetween(now, now.plus(entry.getKey().getPeriod()));
volatility.add((Double) volatilityObject);
volatilityTime.add(t);
}
}
final HullWhiteOneFactorPiecewiseConstantParameters hullWhiteParameters = new HullWhiteOneFactorPiecewiseConstantParameters(meanReversion, volatility.toDoubleArray(),
volatilityTime.toDoubleArray());
return Collections.singleton(new ComputedValue(result, hullWhiteParameters));
}
@Override
public ComputationTargetType getTargetType() {
return ComputationTargetType.CURRENCY;
}
@Override
public boolean canApplyTo(final FunctionCompilationContext compilationContext, final ComputationTarget target) {
return _currency.equals(target.getValue());
}
@Override
public Set<ValueSpecification> getResults(final FunctionCompilationContext compilationContext, final ComputationTarget target) {
return Collections.singleton(result);
}
@Override
public Set<ValueRequirement> getRequirements(final FunctionCompilationContext compilationContext, final ComputationTarget target, final ValueRequirement desiredValue) {
final ValueProperties constraints = desiredValue.getConstraints();
final Set<String> names = constraints.getValues(PROPERTY_HULL_WHITE_PARAMETERS);
if (names == null || names.size() != 1) {
return null;
}
return requirements;
}
@Override
public boolean canHandleMissingRequirements() {
return true;
}
@Override
public boolean canHandleMissingInputs() {
return true;
}
};
}
/**
* Appends the tenor to an id to create the market data identifier.
* @param tenor The tenor
* @param id The id
* @return The market data id
*/
static String createId(final Tenor tenor, final String id) {
final StringBuilder newId = new StringBuilder(id);
newId.append("_");
newId.append(tenor.getPeriod().toString());
return newId.toString();
}
}
| projects/OG-Financial/src/main/java/com/opengamma/financial/analytics/model/curve/HullWhiteOneFactorParametersFunction.java | /**
* Copyright (C) 2013 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.financial.analytics.model.curve;
import static com.opengamma.engine.value.ValueRequirementNames.HULL_WHITE_ONE_FACTOR_PARAMETERS;
import static com.opengamma.financial.analytics.model.curve.CurveCalculationPropertyNamesAndValues.PROPERTY_HULL_WHITE_CURRENCY;
import static com.opengamma.financial.analytics.model.curve.CurveCalculationPropertyNamesAndValues.PROPERTY_HULL_WHITE_PARAMETERS;
import it.unimi.dsi.fastutil.doubles.DoubleArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.threeten.bp.Clock;
import org.threeten.bp.Instant;
import org.threeten.bp.LocalTime;
import org.threeten.bp.ZoneOffset;
import org.threeten.bp.ZonedDateTime;
import com.opengamma.OpenGammaRuntimeException;
import com.opengamma.analytics.financial.model.interestrate.definition.HullWhiteOneFactorPiecewiseConstantParameters;
import com.opengamma.analytics.util.time.TimeCalculator;
import com.opengamma.core.config.ConfigSource;
import com.opengamma.core.config.impl.ConfigItem;
import com.opengamma.core.value.MarketDataRequirementNames;
import com.opengamma.engine.ComputationTarget;
import com.opengamma.engine.ComputationTargetSpecification;
import com.opengamma.engine.function.AbstractFunction;
import com.opengamma.engine.function.CompiledFunctionDefinition;
import com.opengamma.engine.function.FunctionCompilationContext;
import com.opengamma.engine.function.FunctionExecutionContext;
import com.opengamma.engine.function.FunctionInputs;
import com.opengamma.engine.target.ComputationTargetType;
import com.opengamma.engine.value.ComputedValue;
import com.opengamma.engine.value.ValueProperties;
import com.opengamma.engine.value.ValueRequirement;
import com.opengamma.engine.value.ValueSpecification;
import com.opengamma.financial.OpenGammaCompilationContext;
import com.opengamma.financial.analytics.parameters.HullWhiteOneFactorParameters;
import com.opengamma.financial.view.ConfigDocumentWatchSetProvider;
import com.opengamma.id.ExternalId;
import com.opengamma.id.ExternalScheme;
import com.opengamma.id.VersionCorrection;
import com.opengamma.util.ArgumentChecker;
import com.opengamma.util.async.AsynchronousExecution;
import com.opengamma.util.money.Currency;
import com.opengamma.util.time.Tenor;
/**
* Function that supplies Hull-White one factor parameters.
*/
public class HullWhiteOneFactorParametersFunction extends AbstractFunction {
/** The logger */
private static final Logger s_logger = LoggerFactory.getLogger(HullWhiteOneFactorParametersFunction.class);
/** The configuration name */
private final String _name;
/** The currency for which these parameters are valid */
private final Currency _currency;
/**
* @param name The name of the Hull-White parameter set, not null
* @param currency The currency for which the parameters are valid, not null
*/
public HullWhiteOneFactorParametersFunction(final String name, final String currency) {
ArgumentChecker.notNull(name, "name");
ArgumentChecker.notNull(currency, "currency");
_name = name;
_currency = Currency.of(currency);
}
@Override
public void init(final FunctionCompilationContext context) {
ConfigDocumentWatchSetProvider.reinitOnChanges(context, this, HullWhiteOneFactorParameters.class);
}
@Override
public CompiledFunctionDefinition compile(final FunctionCompilationContext context, final Instant atInstant) {
final ValueProperties properties = createValueProperties()
.with(PROPERTY_HULL_WHITE_PARAMETERS, _name)
.with(PROPERTY_HULL_WHITE_CURRENCY, _currency.getCode())
.get();
final ValueSpecification result = new ValueSpecification(HULL_WHITE_ONE_FACTOR_PARAMETERS, ComputationTargetSpecification.of(_currency), properties);
final Set<ValueRequirement> requirements = new HashSet<>();
final ConfigSource configSource = OpenGammaCompilationContext.getConfigSource(context);
final Collection<ConfigItem<HullWhiteOneFactorParameters>> configs = configSource.get(HullWhiteOneFactorParameters.class, _name, VersionCorrection.LATEST);
if (configs == null) {
throw new OpenGammaRuntimeException("HullWhiteOneFactorParameter configuration called " + _name + " was null");
}
final HullWhiteOneFactorParameters parameters = configs.iterator().next().getValue();
requirements.add(new ValueRequirement(MarketDataRequirementNames.MARKET_VALUE, ComputationTargetType.PRIMITIVE, parameters.getMeanReversionId()));
requirements.add(new ValueRequirement(MarketDataRequirementNames.MARKET_VALUE, ComputationTargetType.PRIMITIVE, parameters.getInitialVolatilityId()));
final Map<Tenor, ExternalId> volatilityTermStructure = parameters.getVolatilityTermStructure();
for (final Map.Entry<Tenor, ExternalId> entry : volatilityTermStructure.entrySet()) {
final ExternalScheme scheme = entry.getValue().getScheme();
final String id = entry.getValue().getValue();
final ExternalId tenorAppendedId = ExternalId.of(scheme, createId(entry.getKey(), id));
requirements.add(new ValueRequirement(MarketDataRequirementNames.MARKET_VALUE, ComputationTargetType.PRIMITIVE, tenorAppendedId));
}
final ZonedDateTime atZDT = ZonedDateTime.ofInstant(atInstant, ZoneOffset.UTC);
return new AbstractInvokingCompiledFunction(atZDT.with(LocalTime.MIDNIGHT), atZDT.plusDays(1).with(LocalTime.MIDNIGHT).minusNanos(1000000)) {
@Override
public Set<ComputedValue> execute(final FunctionExecutionContext executionContext, final FunctionInputs inputs, final ComputationTarget target,
final Set<ValueRequirement> desiredValues) throws AsynchronousExecution {
final Clock snapshotClock = executionContext.getValuationClock();
final ZonedDateTime now = ZonedDateTime.now(snapshotClock);
final Object meanReversionObject = inputs.getValue(new ValueRequirement(MarketDataRequirementNames.MARKET_VALUE,
ComputationTargetType.PRIMITIVE, parameters.getMeanReversionId()));
if (meanReversionObject == null) {
throw new OpenGammaRuntimeException("Could not get mean reversion value");
}
final Object initialVolatilityObject = inputs.getValue(new ValueRequirement(MarketDataRequirementNames.MARKET_VALUE,
ComputationTargetType.PRIMITIVE, parameters.getInitialVolatilityId()));
if (initialVolatilityObject == null) {
throw new OpenGammaRuntimeException("Could not get initial volatility value");
}
final Double meanReversion = (Double) meanReversionObject;
final Double initialVolatility = (Double) initialVolatilityObject;
final DoubleArrayList volatility = new DoubleArrayList();
volatility.add(initialVolatility);
final DoubleArrayList volatilityTime = new DoubleArrayList();
for (final Map.Entry<Tenor, ExternalId> entry : volatilityTermStructure.entrySet()) {
final ExternalScheme scheme = entry.getValue().getScheme();
final String id = entry.getValue().getValue();
final ExternalId tenorAppendedId = ExternalId.of(scheme, createId(entry.getKey(), id));
final Object volatilityObject = inputs.getValue(new ValueRequirement(MarketDataRequirementNames.MARKET_VALUE,
ComputationTargetType.PRIMITIVE, tenorAppendedId));
if (volatilityObject == null) {
s_logger.error("Could not get value for " + tenorAppendedId);
} else {
final double t = TimeCalculator.getTimeBetween(now, now.plus(entry.getKey().getPeriod()));
volatility.add((Double) volatilityObject);
volatilityTime.add(t);
}
}
final HullWhiteOneFactorPiecewiseConstantParameters hullWhiteParameters = new HullWhiteOneFactorPiecewiseConstantParameters(meanReversion, volatility.toDoubleArray(),
volatilityTime.toDoubleArray());
return Collections.singleton(new ComputedValue(result, hullWhiteParameters));
}
@Override
public ComputationTargetType getTargetType() {
return ComputationTargetType.CURRENCY;
}
@Override
public boolean canApplyTo(final FunctionCompilationContext compilationContext, final ComputationTarget target) {
return _currency.equals(target.getValue());
}
@Override
public Set<ValueSpecification> getResults(final FunctionCompilationContext compilationContext, final ComputationTarget target) {
return Collections.singleton(result);
}
@Override
public Set<ValueRequirement> getRequirements(final FunctionCompilationContext compilationContext, final ComputationTarget target, final ValueRequirement desiredValue) {
final ValueProperties constraints = desiredValue.getConstraints();
final Set<String> names = constraints.getValues(PROPERTY_HULL_WHITE_PARAMETERS);
if (names == null || names.size() != 1) {
return null;
}
return requirements;
}
@Override
public boolean canHandleMissingRequirements() {
return true;
}
@Override
public boolean canHandleMissingInputs() {
return true;
}
};
}
/**
* Appends the tenor to an id to create the market data identifier.
* @param tenor The tenor
* @param id The id
* @return The market data id
*/
static String createId(final Tenor tenor, final String id) {
final StringBuilder newId = new StringBuilder(id);
newId.append("_");
newId.append(tenor.getPeriod().toString());
return newId.toString();
}
}
| PLAT-4659 Added defaults for Hull White One Factor Parameter Function, need to be reverted.
| projects/OG-Financial/src/main/java/com/opengamma/financial/analytics/model/curve/HullWhiteOneFactorParametersFunction.java | PLAT-4659 Added defaults for Hull White One Factor Parameter Function, need to be reverted. | <ide><path>rojects/OG-Financial/src/main/java/com/opengamma/financial/analytics/model/curve/HullWhiteOneFactorParametersFunction.java
<ide> import java.util.Collection;
<ide> import java.util.Collections;
<ide> import java.util.HashSet;
<add>import java.util.LinkedHashMap;
<ide> import java.util.Map;
<ide> import java.util.Set;
<ide>
<ide> public class HullWhiteOneFactorParametersFunction extends AbstractFunction {
<ide> /** The logger */
<ide> private static final Logger s_logger = LoggerFactory.getLogger(HullWhiteOneFactorParametersFunction.class);
<add> private static final Map<Tenor, Double> VOLATILITY_TERMS = new LinkedHashMap<>();
<add> static {
<add> VOLATILITY_TERMS.put(Tenor.THREE_MONTHS, 0.01d);
<add> VOLATILITY_TERMS.put(Tenor.TWELVE_MONTHS, 0.01d);
<add> VOLATILITY_TERMS.put(Tenor.TWO_YEARS, 0.01d);
<add> VOLATILITY_TERMS.put(Tenor.THREE_YEARS, 0.01d);
<add> VOLATILITY_TERMS.put(Tenor.FOUR_YEARS, 0.01d);
<add> VOLATILITY_TERMS.put(Tenor.FIVE_YEARS, 0.01d);
<add> }
<add> private static final Double MEAN_REVERSION_DEFAULT = 0.01d;
<add> private static final Double INITIAL_VOLATILITY_DEFAULT = 0.01d;
<add>
<ide> /** The configuration name */
<ide> private final String _name;
<ide> /** The currency for which these parameters are valid */
<ide> final Set<ValueRequirement> desiredValues) throws AsynchronousExecution {
<ide> final Clock snapshotClock = executionContext.getValuationClock();
<ide> final ZonedDateTime now = ZonedDateTime.now(snapshotClock);
<del> final Object meanReversionObject = inputs.getValue(new ValueRequirement(MarketDataRequirementNames.MARKET_VALUE,
<add> Object meanReversionObject = inputs.getValue(new ValueRequirement(MarketDataRequirementNames.MARKET_VALUE,
<ide> ComputationTargetType.PRIMITIVE, parameters.getMeanReversionId()));
<ide> if (meanReversionObject == null) {
<del> throw new OpenGammaRuntimeException("Could not get mean reversion value");
<del> }
<del> final Object initialVolatilityObject = inputs.getValue(new ValueRequirement(MarketDataRequirementNames.MARKET_VALUE,
<add> // Jim - these are hacks that should be removed.
<add> meanReversionObject = MEAN_REVERSION_DEFAULT;
<add> s_logger.warn("Using default mean reversion");
<add> //throw new OpenGammaRuntimeException("Could not get mean reversion value");
<add> }
<add> Object initialVolatilityObject = inputs.getValue(new ValueRequirement(MarketDataRequirementNames.MARKET_VALUE,
<ide> ComputationTargetType.PRIMITIVE, parameters.getInitialVolatilityId()));
<ide> if (initialVolatilityObject == null) {
<del> throw new OpenGammaRuntimeException("Could not get initial volatility value");
<add> // Jim - these are hacks that should be removed.
<add> initialVolatilityObject = INITIAL_VOLATILITY_DEFAULT;
<add> s_logger.warn("Using default initial volatility");
<add> //throw new OpenGammaRuntimeException("Could not get initial volatility value");
<ide> }
<ide> final Double meanReversion = (Double) meanReversionObject;
<ide> final Double initialVolatility = (Double) initialVolatilityObject;
<ide> final ExternalScheme scheme = entry.getValue().getScheme();
<ide> final String id = entry.getValue().getValue();
<ide> final ExternalId tenorAppendedId = ExternalId.of(scheme, createId(entry.getKey(), id));
<del> final Object volatilityObject = inputs.getValue(new ValueRequirement(MarketDataRequirementNames.MARKET_VALUE,
<add> Object volatilityObject = inputs.getValue(new ValueRequirement(MarketDataRequirementNames.MARKET_VALUE,
<ide> ComputationTargetType.PRIMITIVE, tenorAppendedId));
<add> // Jim - next block is a hack that should be removed.
<add> if (volatilityObject == null) {
<add> volatilityObject = VOLATILITY_TERMS.get(entry.getKey());
<add> }
<ide> if (volatilityObject == null) {
<ide> s_logger.error("Could not get value for " + tenorAppendedId);
<ide> } else { |
|
JavaScript | mit | 1a232582dd064fcf9532496a15cd4b39b2e55107 | 0 | Eloqua/sproutcore,Eloqua/sproutcore,Eloqua/sproutcore | // ==========================================================================
// Project: SproutCore - JavaScript Application Framework
// Copyright: ©2006-2009 Sprout Systems, Inc. and contributors.
// portions copyright @2009 Apple Inc.
// License: Licened under MIT license (see license.js)
// ==========================================================================
sc_require('panes/palette');
/**
Popular customized picker position rules:
default: initiated just below the anchor.
shift x, y to optimized picker visibility and make sure top-left corner is always visible.
menu : same as default rule +
default(1,4,3) or custom offset below the anchor for default location to fine tunned visual alignment +
enforce min left(7px)/right(8px) padding to the window
fixed : default(1,4,3) or custom offset below the anchor for default location to cope with specific anchor and skip fitPositionToScreen
pointer :take default [0,1,2,3,2] or custom matrix to choose one of four perfect pointer positions.Ex:
perfect right (0) > perfect left (1) > perfect top (2) > perfect bottom (3)
fallback to perfect top (2)
*/
SC.PICKER_MENU = 'menu';
SC.PICKER_FIXED = 'fixed';
SC.PICKER_POINTER = 'pointer';
/**
Pointer layout for perfect right/left/top/bottom
*/
SC.POINTER_LAYOUT = ["perfectRight", "perfectLeft", "perfectTop", "perfectBottom"];
/**
Displays a non-modal, self anchor positioned picker pane.
The default way to use the picker pane is to simply add it to your page like this:
{{{
SC.PickerPane.create({
layout: { width: 400, height: 200 },
contentView: SC.View.extend({
})
}).popup(anchor);
}}}
This will cause your picker pane to display.
Picker pane is a simple way to provide non-modal messaging that won't
blocks the user's interaction with your application. Picker panes are
useful for showing important detail informations with optimized position around anchor.
They provide a better user experience than modal panel.
Examples for applying popular customized picker position rules:
1. default:
{{{
SC.PickerPane.create({layout: { width: 400, height: 200 },contentView: SC.View.extend({})
}).popup(anchor);
}}}
2. menu below the anchor with default offset matrix [1,4,3]:
{{{
SC.PickerPane.create({layout: { width: 400, height: 200 },contentView: SC.View.extend({})
}).popup(anchor, SC.PICKER_MENU);
}}}
3. menu on the right side of anchor with custom offset matrix [2,6,0]:
{{{
SC.PickerPane.create({layout: { width: 400, height: 200 },contentView: SC.View.extend({})
}).popup(anchor, SC.PICKER_MENU, [2,6,0]);
}}}
4. fixed below the anchor with default offset matrix [1,4,3]:
{{{
SC.PickerPane.create({layout: { width: 400, height: 200 },contentView: SC.View.extend({})
}).popup(anchor, SC.PICKER_FIXED);
}}}
5. fixed on the right side of anchor with custom offset matrix [-22,-17,0]:
{{{
SC.PickerPane.create({layout: { width: 400, height: 200 },contentView: SC.View.extend({})
}).popup(anchor, SC.PICKER_FIXED, [-22,-17,0]);
}}}
6. pointer with default position pref matrix [0,1,2,3,2]:
{{{
SC.PickerPane.create({layout: { width: 400, height: 200 },contentView: SC.View.extend({})
}).popup(anchor, SC.PICKER_POINTER);
}}}
perfect right (0) > perfect left (1) > perfect top (2) > perfect bottom (3)
fallback to perfect top (2)
7. pointer with custom position pref matrix [3,0,1,2,2]:
{{{
SC.PickerPane.create({layout: { width: 400, height: 200 },contentView: SC.View.extend({})
}).popup(anchor, SC.PICKER_POINTER, [3,0,1,2,2]);
}}}
perfect bottom (3) > perfect right (0) > perfect left (1) > perfect top (2)
fallback to perfect top (2)
@extends SC.PalettePane
@since SproutCore 1.0
*/
SC.PickerPane = SC.PalettePane.extend({
classNames: 'sc-picker',
isAnchored: YES,
isModal: YES,
pointerPos: 'perfectRight',
pointerPosX: 0,
pointerPosY: 0,
/**
This property will be set to the element (or view.get('layer')) that
triggered your picker to show. You can use this to properly position your
picker.
@property {Object}
*/
anchorElement: null,
/**
popular customized picker position rule
@property {String}
*/
preferType: null,
/**
default/custom offset or position pref matrix for specific preferType
@property {String}
*/
preferMatrix: null,
/**
Displays a new picker pane according to the passed parameters.
Every parameter except for the anchorViewOrElement is optional.
@param {Object} anchorViewOrElement view or element to anchor to
@param {String} preferType optional apply picker position rule
@param {Array} preferMatrix optional apply custom offset or position pref matrix for specific preferType
@returns {SC.PickerPane} receiver
*/
popup: function(anchorViewOrElement, preferType, preferMatrix) {
var anchor = anchorViewOrElement.isView ? anchorViewOrElement.get('layer') : anchorViewOrElement;
this.beginPropertyChanges();
this.set('anchorElement',anchor) ;
if (preferType) this.set('preferType',preferType) ;
if (preferMatrix) this.set('preferMatrix',preferMatrix) ;
this.endPropertyChanges();
this.positionPane();
this.append();
},
/** @private
The ideal position for a picker pane is just below the anchor that
triggered it + offset of specific preferType. Find that ideal position,
then call fitPositionToScreen to get final position. If anchor is missing,
fallback to center.
*/
positionPane: function() {
var anchor = this.get('anchorElement'),
preferType = this.get('preferType'),
preferMatrix = this.get('preferMatrix'),
layout = this.get('layout'),
origin ;
// usually an anchorElement will be passed. The ideal position is just
// below the anchor + default or custom offset according to preferType.
// If that is not possible, fitPositionToScreen will take care of that for
// other alternative and fallback position.
if (anchor) {
anchor = this.computeAnchorRect(anchor);
origin = SC.cloneRect(anchor);
if (preferType) {
switch (preferType) {
case SC.PICKER_MENU:
case SC.PICKER_FIXED:
if(!preferMatrix || preferMatrix.length != 3) {
// default below the anchor with fine tunned visual alignment
// for Menu to appear just below the anchorElement.
this.set('preferMatrix', [1, 4, 3]) ;
}
// fine tunned visual alignment from preferMatrix
origin.x += ((this.preferMatrix[2]===0) ? origin.width : 0) + this.preferMatrix[0] ;
origin.y += ((this.preferMatrix[2]===3) ? origin.height : 0) + this.preferMatrix[1];
break;
default:
origin.y += origin.height ;
break;
}
} else {
origin.y += origin.height ;
}
origin = this.fitPositionToScreen(origin, this.get('frame'), anchor) ;
layout = { width: origin.width, height: origin.height, left: origin.x, top: origin.y };
// if no anchor view has been set for some reason, just center.
} else {
layout = { width: layout.width, height: layout.height, centerX: 0, centerY: 0 };
}
this.set('layout', layout).updateLayout();
return this ;
},
/** @private
This method will return ret (x, y, width, height) from a rectangular element
*/
computeAnchorRect: function(anchor) {
var ret = SC.viewportOffset(anchor); // get x & y
var cq = SC.$(anchor);
ret.width = cq.width();
ret.height = cq.height();
return ret ;
},
/** @private
This method will dispatch to the right re-position rule according to preferType
*/
fitPositionToScreen: function(preferredPosition, picker, anchor) {
// get window rect.
var wsize = this.get('currentWindowSize') || SC.RootResponder.responder.computeWindowSize() ;
var wret = { x: 0, y: 0, width: wsize.width, height: wsize.height } ;
picker.x = preferredPosition.x ; picker.y = preferredPosition.y ;
if(this.preferType) {
switch(this.preferType) {
case SC.PICKER_MENU:
// apply default + menu re-position rule
picker = this.fitPositionToScreenDefault(wret, picker, anchor) ;
picker = this.fitPositionToScreenMenu(wret, picker) ;
break;
case SC.PICKER_POINTER:
// apply pointer re-position rule
picker = this.fitPositionToScreenPointer(wret, picker, anchor) ;
break;
case SC.PICKER_FIXED:
// skip fitPositionToScreen
break;
default:
break;
}
} else {
// apply default re-position rule
picker = this.fitPositionToScreenDefault(wret, picker, anchor) ;
}
this.displayDidChange();
return picker ;
},
/** @private
re-position rule migrated from old SC.OverlayPaneView.
shift x, y to optimized picker visibility and make sure top-left corner is always visible.
*/
fitPositionToScreenDefault: function(w, f, a) {
// make sure the right edge fits on the screen. If not, anchor to
// right edge of anchor or right edge of window, whichever is closer.
if (SC.maxX(f) > w.width) {
var mx = Math.max(SC.maxX(a), f.width) ;
f.x = Math.min(mx, w.width) - f.width ;
}
// if the left edge is off of the screen, try to position at left edge
// of anchor. If that pushes right edge off screen, shift back until
// right is on screen or left = 0
if (SC.minX(f) < 0) {
f.x = SC.minX(Math.max(a,0)) ;
if (SC.maxX(f) > w.width) {
f.x = Math.max(0, w.width - f.width);
}
}
// make sure bottom edge fits on screen. If not, try to anchor to top
// of anchor or bottom edge of screen.
if (SC.maxY(f) > w.height) {
mx = Math.max((a.y - f.height), 0) ;
if (mx > w.height) {
f.y = Math.max(0, w.height - f.height) ;
} else f.y = mx ;
}
// if Top edge is off screen, try to anchor to bottom of anchor. If that
// pushes off bottom edge, shift up until it is back on screen or top =0
if (SC.minY(f) < 0) {
mx = Math.min(SC.maxY(a), (w.height - a.height)) ;
f.y = Math.max(mx, 0) ;
}
return f ;
},
/** @private
re-position rule optimized for Menu to enforce min left(7px)/right(8px) padding to the window
*/
fitPositionToScreenMenu: function(w, f) {
// min left/right padding to the window
if( (f.x + f.width) > (w.width-20) ) f.x = w.width - f.width - 20;
if( f.x < 7 ) f.x = 7;
// if the height of the menu is bigger than the window height resize it.
if( f.height > w.height){
f.y = 15;
f.height = w.height - 35;
}
return f ;
},
/** @private
re-position rule for triangle pointer picker: take default [0,1,2,3,2] or custom matrix to choose one of four perfect pointer positions.
*/
fitPositionToScreenPointer: function(w, f, a) {
// initiate perfect positions matrix
// 4 perfect positions: right > left > top > bottom
// 2 coordinates: x, y
// top-left corner of 4 perfect positioned f (4x2)
var overlapTunningX = (a.height > 12) ? 0 : 1;
var overlapTunningY = (a.height > 12) ? 0 : 3;
var prefP1 =[[a.x+a.width+(19+overlapTunningX), a.y+parseInt(a.height/2,0)-40],
[a.x-f.width-(7+overlapTunningX), a.y+parseInt(a.height/2,0)-40],
[a.x+parseInt(a.width/2,0)-parseInt(f.width/2,0), a.y-f.height-(17+overlapTunningY)],
[a.x+parseInt(a.width/2,0)-parseInt(f.width/2,0), a.y+a.height+(17+overlapTunningY)]];
// bottom-right corner of 4 perfect positioned f (4x2)
var prefP2 =[[a.x+a.width+f.width+(19+overlapTunningX), a.y+parseInt(a.height/2,0)+f.height-40],
[a.x-(7+overlapTunningX), a.y+parseInt(a.height/2,0)+f.height-40],
[a.x+parseInt(a.width/2,0)-parseInt(f.width/2,0)+f.width, a.y-(17+overlapTunningY)],
[a.x+parseInt(a.width/2,0)-parseInt(f.width/2,0)+f.width, a.y+a.height+f.height+(17+overlapTunningY)]];
// cutoff of 4 perfect positioned f: top, right, bottom, left (4x4)
var cutoffPrefP =[[prefP1[0][1]>0 ? 0 : 0-prefP1[0][1], prefP2[0][0]<w.width ? 0 : prefP2[0][0]-w.width, prefP2[0][1]<w.height ? 0 : prefP2[0][1]-w.height, prefP1[0][0]>0 ? 0 : 0-prefP1[0][0]],
[prefP1[1][1]>0 ? 0 : 0-prefP1[1][1], prefP2[1][0]<w.width ? 0 : prefP2[1][0]-w.width, prefP2[1][1]<w.height ? 0 : prefP2[1][1]-w.height, prefP1[1][0]>0 ? 0 : 0-prefP1[1][0]],
[prefP1[2][1]>0 ? 0 : 0-prefP1[2][1], prefP2[2][0]<w.width ? 0 : prefP2[2][0]-w.width, prefP2[2][1]<w.height ? 0 : prefP2[2][1]-w.height, prefP1[2][0]>0 ? 0 : 0-prefP1[2][0]],
[prefP1[3][1]>0 ? 0 : 0-prefP1[3][1], prefP2[3][0]<w.width ? 0 : prefP2[3][0]-w.width, prefP2[3][1]<w.height ? 0 : prefP2[3][1]-w.height, prefP1[3][0]>0 ? 0 : 0-prefP1[3][0]]];
if(!this.preferMatrix || this.preferMatrix.length != 5) {
// default re-position rule : perfect right (0) > perfect left (1) > perfect top (2) > perfect bottom (3)
// fallback to perfect top (2)
this.set('preferMatrix', [0,1,2,3,2]) ;
}
var m = this.preferMatrix;
//var pointer = this.contentView.childViews[this.contentView.childViews.length-1];
// initiated with fallback position
// Will be used only if the following preferred alternative can not be found
f.x = prefP1[m[4]][0] ;
f.y = prefP1[m[4]][1] ;
this.set('pointerPos', SC.POINTER_LAYOUT[m[4]]);
this.set('pointerPosX', 0);
this.set('pointerPosY', 0);
for(var i=0; i<SC.POINTER_LAYOUT.length; i++) {
if (cutoffPrefP[m[i]][0]===0 && cutoffPrefP[m[i]][1]===0 && cutoffPrefP[m[i]][2]===0 && cutoffPrefP[m[i]][3]===0) {
// alternative i in preferMatrix by priority
if (m[4] != m[i]) {
f.x = prefP1[m[i]][0] ;
f.y = prefP1[m[i]][1] ;
this.set('pointerPos', SC.POINTER_LAYOUT[m[i]]);
}
i = SC.POINTER_LAYOUT.length;
} else if ((m[i] === 0 || m[i] === 1) && cutoffPrefP[m[i]][0]===0 && cutoffPrefP[m[i]][1]===0 && cutoffPrefP[m[i]][2] < f.height-91 && cutoffPrefP[m[i]][3]===0) {
if (m[4] != m[i]) {
f.x = prefP1[m[i]][0] ;
this.set('pointerPos', SC.POINTER_LAYOUT[m[i]]);
}
f.y = prefP1[m[i]][1] - cutoffPrefP[m[i]][2] ;
this.set('pointerPosY', cutoffPrefP[m[i]][2]);
i = SC.POINTER_LAYOUT.length;
}
}
return f ;
},
render: function(context, firstTime) {
var ret = sc_super();
if (context.needsContent) {
if (this.get('preferType') == SC.PICKER_POINTER) {
context.push('<div class="sc-pointer %@" style="margin-top: %@px"></div>'.fmt(this.get('pointerPos'), this.get('pointerPosY')));
}
} else {
var el = this.$('.sc-pointer');
el.attr('class', "sc-pointer %@".fmt(this.get('pointerPos')));
el.attr('style', "margin-top: %@px".fmt(this.get('pointerPosY')));
}
return ret ;
},
/** @private - click away picker. */
modalPaneDidClick: function(evt) {
var f = this.get("frame");
if(!this.clickInside(f, evt)) this.remove();
return YES ;
},
mouseDown: function(evt) {
return this.modalPaneDidClick(evt);
},
/** @private
internal method to define the range for clicking inside so the picker
won't be clicked away default is the range of contentView frame.
Over-write for adjustments. ex: shadow
*/
clickInside: function(frame, evt) {
return SC.pointInRect({ x: evt.pageX, y: evt.pageY }, frame);
},
/**
Invoked by the root responder. Re-position picker whenever the window resizes.
*/
windowSizeDidChange: function(oldSize, newSize) {
sc_super();
this.positionPane();
}
});
| frameworks/desktop/panes/picker.js | // ==========================================================================
// Project: SproutCore - JavaScript Application Framework
// Copyright: ©2006-2009 Sprout Systems, Inc. and contributors.
// portions copyright @2009 Apple Inc.
// License: Licened under MIT license (see license.js)
// ==========================================================================
sc_require('panes/palette');
/**
Popular customized picker position rules:
default: initiated just below the anchor.
shift x, y to optimized picker visibility and make sure top-left corner is always visible.
menu : same as default rule +
default(1,4,3) or custom offset below the anchor for default location to fine tunned visual alignment +
enforce min left(7px)/right(8px) padding to the window
fixed : default(1,4,3) or custom offset below the anchor for default location to cope with specific anchor and skip fitPositionToScreen
pointer :take default [0,1,2,3,2] or custom matrix to choose one of four perfect pointer positions.Ex:
perfect right (0) > perfect left (1) > perfect top (2) > perfect bottom (3)
fallback to perfect top (2)
*/
SC.PICKER_MENU = 'menu';
SC.PICKER_FIXED = 'fixed';
SC.PICKER_POINTER = 'pointer';
/**
Pointer layout for perfect right/left/top/bottom
*/
SC.POINTER_LAYOUT = ["perfectRight", "perfectLeft", "perfectTop", "perfectBottom"];
/**
Displays a non-modal, self anchor positioned picker pane.
The default way to use the picker pane is to simply add it to your page like this:
{{{
SC.PickerPane.create({
layout: { width: 400, height: 200 },
contentView: SC.View.extend({
})
}).popup(anchor);
}}}
This will cause your picker pane to display.
Picker pane is a simple way to provide non-modal messaging that won't
blocks the user's interaction with your application. Picker panes are
useful for showing important detail informations with optimized position around anchor.
They provide a better user experience than modal panel.
Examples for applying popular customized picker position rules:
1. default:
{{{
SC.PickerPane.create({layout: { width: 400, height: 200 },contentView: SC.View.extend({})
}).popup(anchor);
}}}
2. menu below the anchor with default offset matrix [1,4,3]:
{{{
SC.PickerPane.create({layout: { width: 400, height: 200 },contentView: SC.View.extend({})
}).popup(anchor, SC.PICKER_MENU);
}}}
3. menu on the right side of anchor with custom offset matrix [2,6,0]:
{{{
SC.PickerPane.create({layout: { width: 400, height: 200 },contentView: SC.View.extend({})
}).popup(anchor, SC.PICKER_MENU, [2,6,0]);
}}}
4. fixed below the anchor with default offset matrix [1,4,3]:
{{{
SC.PickerPane.create({layout: { width: 400, height: 200 },contentView: SC.View.extend({})
}).popup(anchor, SC.PICKER_FIXED);
}}}
5. fixed on the right side of anchor with custom offset matrix [-22,-17,0]:
{{{
SC.PickerPane.create({layout: { width: 400, height: 200 },contentView: SC.View.extend({})
}).popup(anchor, SC.PICKER_FIXED, [-22,-17,0]);
}}}
6. pointer with default position pref matrix [0,1,2,3,2]:
{{{
SC.PickerPane.create({layout: { width: 400, height: 200 },contentView: SC.View.extend({})
}).popup(anchor, SC.PICKER_POINTER);
}}}
perfect right (0) > perfect left (1) > perfect top (2) > perfect bottom (3)
fallback to perfect top (2)
7. pointer with custom position pref matrix [3,0,1,2,2]:
{{{
SC.PickerPane.create({layout: { width: 400, height: 200 },contentView: SC.View.extend({})
}).popup(anchor, SC.PICKER_POINTER, [3,0,1,2,2]);
}}}
perfect bottom (3) > perfect right (0) > perfect left (1) > perfect top (2)
fallback to perfect top (2)
@extends SC.PalettePane
@since SproutCore 1.0
*/
SC.PickerPane = SC.PalettePane.extend({
classNames: 'sc-picker',
isAnchored: YES,
isModal: YES,
pointerPos: 'perfectRight',
/**
This property will be set to the element (or view.get('layer')) that
triggered your picker to show. You can use this to properly position your
picker.
@property {Object}
*/
anchorElement: null,
/**
popular customized picker position rule
@property {String}
*/
preferType: null,
/**
default/custom offset or position pref matrix for specific preferType
@property {String}
*/
preferMatrix: null,
/**
Displays a new picker pane according to the passed parameters.
Every parameter except for the anchorViewOrElement is optional.
@param {Object} anchorViewOrElement view or element to anchor to
@param {String} preferType optional apply picker position rule
@param {Array} preferMatrix optional apply custom offset or position pref matrix for specific preferType
@returns {SC.PickerPane} receiver
*/
popup: function(anchorViewOrElement, preferType, preferMatrix) {
var anchor = anchorViewOrElement.isView ? anchorViewOrElement.get('layer') : anchorViewOrElement;
this.beginPropertyChanges();
this.set('anchorElement',anchor) ;
if (preferType) this.set('preferType',preferType) ;
if (preferMatrix) this.set('preferMatrix',preferMatrix) ;
this.endPropertyChanges();
this.positionPane();
this.append();
},
/** @private
The ideal position for a picker pane is just below the anchor that
triggered it + offset of specific preferType. Find that ideal position,
then call fitPositionToScreen to get final position. If anchor is missing,
fallback to center.
*/
positionPane: function() {
var anchor = this.get('anchorElement'),
preferType = this.get('preferType'),
preferMatrix = this.get('preferMatrix'),
layout = this.get('layout'),
origin ;
// usually an anchorElement will be passed. The ideal position is just
// below the anchor + default or custom offset according to preferType.
// If that is not possible, fitPositionToScreen will take care of that for
// other alternative and fallback position.
if (anchor) {
anchor = this.computeAnchorRect(anchor);
origin = SC.cloneRect(anchor);
if (preferType) {
switch (preferType) {
case SC.PICKER_MENU:
case SC.PICKER_FIXED:
if(!preferMatrix || preferMatrix.length != 3) {
// default below the anchor with fine tunned visual alignment
// for Menu to appear just below the anchorElement.
this.set('preferMatrix', [1, 4, 3]) ;
}
// fine tunned visual alignment from preferMatrix
origin.x += ((this.preferMatrix[2]===0) ? origin.width : 0) + this.preferMatrix[0] ;
origin.y += ((this.preferMatrix[2]===3) ? origin.height : 0) + this.preferMatrix[1];
break;
default:
origin.y += origin.height ;
break;
}
} else {
origin.y += origin.height ;
}
origin = this.fitPositionToScreen(origin, this.get('frame'), anchor) ;
layout = { width: origin.width, height: origin.height, left: origin.x, top: origin.y };
// if no anchor view has been set for some reason, just center.
} else {
layout = { width: layout.width, height: layout.height, centerX: 0, centerY: 0 };
}
this.set('layout', layout).updateLayout();
return this ;
},
/** @private
This method will return ret (x, y, width, height) from a rectangular element
*/
computeAnchorRect: function(anchor) {
var ret = SC.viewportOffset(anchor); // get x & y
var cq = SC.$(anchor);
ret.width = cq.width();
ret.height = cq.height();
return ret ;
},
/** @private
This method will dispatch to the right re-position rule according to preferType
*/
fitPositionToScreen: function(preferredPosition, picker, anchor) {
// get window rect.
var wsize = this.get('currentWindowSize') || SC.RootResponder.responder.computeWindowSize() ;
var wret = { x: 0, y: 0, width: wsize.width, height: wsize.height } ;
picker.x = preferredPosition.x ; picker.y = preferredPosition.y ;
if(this.preferType) {
switch(this.preferType) {
case SC.PICKER_MENU:
// apply default + menu re-position rule
picker = this.fitPositionToScreenDefault(wret, picker, anchor) ;
picker = this.fitPositionToScreenMenu(wret, picker) ;
break;
case SC.PICKER_POINTER:
// apply pointer re-position rule
picker = this.fitPositionToScreenPointer(wret, picker, anchor) ;
break;
case SC.PICKER_FIXED:
// skip fitPositionToScreen
break;
default:
break;
}
} else {
// apply default re-position rule
picker = this.fitPositionToScreenDefault(wret, picker, anchor) ;
}
this.displayDidChange();
return picker ;
},
/** @private
re-position rule migrated from old SC.OverlayPaneView.
shift x, y to optimized picker visibility and make sure top-left corner is always visible.
*/
fitPositionToScreenDefault: function(w, f, a) {
// make sure the right edge fits on the screen. If not, anchor to
// right edge of anchor or right edge of window, whichever is closer.
if (SC.maxX(f) > w.width) {
var mx = Math.max(SC.maxX(a), f.width) ;
f.x = Math.min(mx, w.width) - f.width ;
}
// if the left edge is off of the screen, try to position at left edge
// of anchor. If that pushes right edge off screen, shift back until
// right is on screen or left = 0
if (SC.minX(f) < 0) {
f.x = SC.minX(Math.max(a,0)) ;
if (SC.maxX(f) > w.width) {
f.x = Math.max(0, w.width - f.width);
}
}
// make sure bottom edge fits on screen. If not, try to anchor to top
// of anchor or bottom edge of screen.
if (SC.maxY(f) > w.height) {
mx = Math.max((a.y - f.height), 0) ;
if (mx > w.height) {
f.y = Math.max(0, w.height - f.height) ;
} else f.y = mx ;
}
// if Top edge is off screen, try to anchor to bottom of anchor. If that
// pushes off bottom edge, shift up until it is back on screen or top =0
if (SC.minY(f) < 0) {
mx = Math.min(SC.maxY(a), (w.height - a.height)) ;
f.y = Math.max(mx, 0) ;
}
return f ;
},
/** @private
re-position rule optimized for Menu to enforce min left(7px)/right(8px) padding to the window
*/
fitPositionToScreenMenu: function(w, f) {
// min left/right padding to the window
if( (f.x + f.width) > (w.width-20) ) f.x = w.width - f.width - 20;
if( f.x < 7 ) f.x = 7;
// if the height of the menu is bigger than the window height resize it.
if( f.height > w.height){
f.y = 15;
f.height = w.height - 35;
}
return f ;
},
/** @private
re-position rule for triangle pointer picker: take default [0,1,2,3,2] or custom matrix to choose one of four perfect pointer positions.
*/
fitPositionToScreenPointer: function(w, f, a) {
// initiate perfect positions matrix
// 4 perfect positions: right > left > top > bottom
// 2 coordinates: x, y
// top-left corner of 4 perfect positioned f (4x2)
var overlapTunningX = (a.height > 12) ? 0 : 1;
var overlapTunningY = (a.height > 12) ? 0 : 3;
var prefP1 =[[a.x+a.width+(19+overlapTunningX), a.y+parseInt(a.height/2,0)-40],
[a.x-f.width-(7+overlapTunningX), a.y+parseInt(a.height/2,0)-40],
[a.x+parseInt(a.width/2,0)-parseInt(f.width/2,0), a.y-f.height-(17+overlapTunningY)],
[a.x+parseInt(a.width/2,0)-parseInt(f.width/2,0), a.y+a.height+(17+overlapTunningY)]];
// bottom-right corner of 4 perfect positioned f (4x2)
var prefP2 =[[a.x+a.width+f.width+(19+overlapTunningX), a.y+parseInt(a.height/2,0)+f.height-40],
[a.x-(7+overlapTunningX), a.y+parseInt(a.height/2,0)+f.height-40],
[a.x+parseInt(a.width/2,0)-parseInt(f.width/2,0)+f.width, a.y-(17+overlapTunningY)],
[a.x+parseInt(a.width/2,0)-parseInt(f.width/2,0)+f.width, a.y+a.height+f.height+(17+overlapTunningY)]];
// cutoff of 4 perfect positioned f: top, right, bottom, left (4x4)
var cutoffPrefP =[[prefP1[0][1]>0 ? 0 : 0-prefP1[0][1], prefP2[0][0]<w.width ? 0 : prefP2[0][0]-w.width, prefP2[0][1]<w.height ? 0 : prefP2[0][1]-w.height, prefP1[0][0]>0 ? 0 : 0-prefP1[0][0]],
[prefP1[1][1]>0 ? 0 : 0-prefP1[1][1], prefP2[1][0]<w.width ? 0 : prefP2[1][0]-w.width, prefP2[1][1]<w.height ? 0 : prefP2[1][1]-w.height, prefP1[1][0]>0 ? 0 : 0-prefP1[1][0]],
[prefP1[2][1]>0 ? 0 : 0-prefP1[2][1], prefP2[2][0]<w.width ? 0 : prefP2[2][0]-w.width, prefP2[2][1]<w.height ? 0 : prefP2[2][1]-w.height, prefP1[2][0]>0 ? 0 : 0-prefP1[2][0]],
[prefP1[3][1]>0 ? 0 : 0-prefP1[3][1], prefP2[3][0]<w.width ? 0 : prefP2[3][0]-w.width, prefP2[3][1]<w.height ? 0 : prefP2[3][1]-w.height, prefP1[3][0]>0 ? 0 : 0-prefP1[3][0]]];
if(!this.preferMatrix || this.preferMatrix.length != 5) {
// default re-position rule : perfect right (0) > perfect left (1) > perfect top (2) > perfect bottom (3)
// fallback to perfect top (2)
this.set('preferMatrix', [0,1,2,3,2]) ;
}
var m = this.preferMatrix;
//var pointer = this.contentView.childViews[this.contentView.childViews.length-1];
// initiated with fallback position
// Will be used only if the following preferred alternative can not be found
f.x = prefP1[m[4]][0] ;
f.y = prefP1[m[4]][1] ;
this.set('pointerPos', SC.POINTER_LAYOUT[m[4]]);
for(var i=0; i<SC.POINTER_LAYOUT.length; i++) {
if (cutoffPrefP[m[i]][0]===0 && cutoffPrefP[m[i]][1]===0 && cutoffPrefP[m[i]][2]===0 && cutoffPrefP[m[i]][3]===0) {
// alternative i in preferMatrix by priority
if (m[4] != m[i]) {
f.x = prefP1[m[i]][0] ;
f.y = prefP1[m[i]][1] ;
this.set('pointerPos', SC.POINTER_LAYOUT[m[i]]);
}
i = SC.POINTER_LAYOUT.length;
}
}
return f ;
},
render: function(context, firstTime) {
var ret = sc_super();
if (context.needsContent) {
if (this.get('preferType') == SC.PICKER_POINTER) {
context.push('<div class="sc-pointer %@"></div>'.fmt(this.get('pointerPos')));
}
} else {
var el = this.$('.sc-pointer');
el.attr('class', "sc-pointer %@".fmt(this.get('pointerPos')));
}
return ret ;
},
/** @private - click away picker. */
modalPaneDidClick: function(evt) {
var f = this.get("frame");
if(!this.clickInside(f, evt)) this.remove();
return YES ;
},
mouseDown: function(evt) {
return this.modalPaneDidClick(evt);
},
/** @private
internal method to define the range for clicking inside so the picker
won't be clicked away default is the range of contentView frame.
Over-write for adjustments. ex: shadow
*/
clickInside: function(frame, evt) {
return SC.pointInRect({ x: evt.pageX, y: evt.pageY }, frame);
},
/**
Invoked by the root responder. Re-position picker whenever the window resizes.
*/
windowSizeDidChange: function(oldSize, newSize) {
sc_super();
this.positionPane();
}
});
| adjust left/right pointer position up/down to 20px from top/bottom so more possible positions can be available
| frameworks/desktop/panes/picker.js | adjust left/right pointer position up/down to 20px from top/bottom so more possible positions can be available | <ide><path>rameworks/desktop/panes/picker.js
<ide> isModal: YES,
<ide>
<ide> pointerPos: 'perfectRight',
<add> pointerPosX: 0,
<add> pointerPosY: 0,
<ide>
<ide> /**
<ide> This property will be set to the element (or view.get('layer')) that
<ide> f.x = prefP1[m[4]][0] ;
<ide> f.y = prefP1[m[4]][1] ;
<ide> this.set('pointerPos', SC.POINTER_LAYOUT[m[4]]);
<add> this.set('pointerPosX', 0);
<add> this.set('pointerPosY', 0);
<ide>
<ide> for(var i=0; i<SC.POINTER_LAYOUT.length; i++) {
<ide> if (cutoffPrefP[m[i]][0]===0 && cutoffPrefP[m[i]][1]===0 && cutoffPrefP[m[i]][2]===0 && cutoffPrefP[m[i]][3]===0) {
<ide> this.set('pointerPos', SC.POINTER_LAYOUT[m[i]]);
<ide> }
<ide> i = SC.POINTER_LAYOUT.length;
<add> } else if ((m[i] === 0 || m[i] === 1) && cutoffPrefP[m[i]][0]===0 && cutoffPrefP[m[i]][1]===0 && cutoffPrefP[m[i]][2] < f.height-91 && cutoffPrefP[m[i]][3]===0) {
<add> if (m[4] != m[i]) {
<add> f.x = prefP1[m[i]][0] ;
<add> this.set('pointerPos', SC.POINTER_LAYOUT[m[i]]);
<add> }
<add> f.y = prefP1[m[i]][1] - cutoffPrefP[m[i]][2] ;
<add> this.set('pointerPosY', cutoffPrefP[m[i]][2]);
<add> i = SC.POINTER_LAYOUT.length;
<ide> }
<ide> }
<ide> return f ;
<ide> var ret = sc_super();
<ide> if (context.needsContent) {
<ide> if (this.get('preferType') == SC.PICKER_POINTER) {
<del> context.push('<div class="sc-pointer %@"></div>'.fmt(this.get('pointerPos')));
<add> context.push('<div class="sc-pointer %@" style="margin-top: %@px"></div>'.fmt(this.get('pointerPos'), this.get('pointerPosY')));
<ide> }
<ide> } else {
<ide> var el = this.$('.sc-pointer');
<ide> el.attr('class', "sc-pointer %@".fmt(this.get('pointerPos')));
<add> el.attr('style', "margin-top: %@px".fmt(this.get('pointerPosY')));
<ide> }
<ide> return ret ;
<ide> }, |
|
JavaScript | bsd-3-clause | c9bcaf33c7c0f21fe5d90bf99fc9e2c1422729f6 | 0 | 1950195/mojito,1950195/mojito,yahoo/mojito,ooskapenaar/mojito,ooskapenaar/mojito,yahoo/mojito,yahoo/mojito,ooskapenaar/mojito,ooskapenaar/mojito,1950195/mojito | /*
* Copyright (c) 2011-2013, Yahoo! Inc. All rights reserved.
* Copyrights licensed under the New BSD License.
* See the accompanying LICENSE file for terms.
*/
YUI().use(
'oop',
'mojito-test-extra',
'mojito-resource-store',
'addon-rs-config',
'addon-rs-selector',
'addon-rs-url',
'addon-rs-yui',
'test',
function(Y) {
var suite = new Y.Test.Suite('mojito-store-server-tests'),
libpath = require('path'),
mojitoRoot = libpath.join(__dirname, '../../../lib'),
store,
Mock = Y.Mock,
A = Y.Assert,
AA = Y.ArrayAssert,
OA = Y.ObjectAssert;
suite.add(new Y.Test.Case({
name: 'Store tests -- preload fixture "store"',
init: function() {
var fixtures = libpath.join(__dirname, '../../../../fixtures/store');
store = new Y.mojito.ResourceStore({ root: fixtures });
store.preload();
},
'pre load': function() {
var fixtures = libpath.join(__dirname, '../../../../fixtures/store');
//Y.log(Y.JSON.stringify(store,null,4));
A.isTrue(store._config.root === fixtures);
},
'store is not lazy by default': function () {
A.isFalse(Object.keys(store._mojitDetailsCache).length === 0);
},
'valid context': function() {
var success;
try {
store.validateContext({});
} catch(e) {
A.fail('{} should be valid');
}
try {
store.validateContext({device:'iphone'});
} catch(e) {
A.fail('{device:iphone} should be valid');
}
try {
store.validateContext({device:'iphone',lang:'en'});
} catch(e) {
A.fail('{device:iphone,lang:en} should be valid');
}
try {
store.validateContext({device:'iphone',runtime:'common'});
} catch(e) {
A.fail('{device:iphone,runtime:common} should be valid');
}
try {
success = undefined;
store.validateContext({device:'blender'});
success = true;
} catch(e) {
success = false;
}
A.isFalse(success, '{device:blender} should be invalid');
try {
success = undefined;
store.validateContext({device:'iphone',texture:'corrugated'});
success = true;
} catch(e) {
success = false;
}
A.isFalse(success, '{device:iphone,texture:corrugated} should be invalid');
try {
success = undefined;
store.validateContext({device:'iphone',runtime:'kite'});
success = true;
} catch(e) {
success = false;
}
A.isFalse(success, '{device:iphone,runtime:kite} should be invalid');
},
'server app config value': function() {
var config = store.getAppConfig(null);
A.isTrue(config.testKey1 === 'testVal1');
},
'server mojit config value': function() {
var instance = {base:'test1'};
store.expandInstance(instance, {}, function(err, instance){
A.isNull(err);
A.isTrue(instance.id === 'test1', 'wrong ID');
A.isTrue(instance.type === 'test_mojit_1', 'wrong type');
A.isTrue(instance.config.testKey4 === 'testVal4', 'missing key from definition.json');
});
},
'server mojit config value via type': function() {
var instance = {type:'test_mojit_1'};
store.expandInstance(instance, {}, function(err, instance){
A.isTrue(instance.type === 'test_mojit_1', 'wrong ID');
A.isTrue(instance.config.testKey4 === 'testVal4', 'missing config from definition.json');
A.isTrue(instance.config.testKey6.testKey7 === 'testVal7', 'missing deep config from definition.json');
});
},
'server mojit config value via type and override': function() {
var instance = {
type:'test_mojit_1',
config:{testKey4: 'other'}
};
store.expandInstance(instance, {}, function(err, instance){
A.isTrue(instance.type === 'test_mojit_1', 'wrong ID');
A.areSame('other', instance.config.testKey4, 'missing config from definition.json');
A.areSame('testVal5', instance.config.testKey5, 'missing deep config from defaults.json');
});
},
'server mojit instance assets': function() {
var fixtures = libpath.join(__dirname, '../../../../fixtures/store');
var instance = {type:'test_mojit_1'};
store.expandInstance(instance, {}, function(err, instance) {
// we'll skip the favicon.ico that ships with Mojito
// (it's not availble when running --coverage anyway)
A.areSame(libpath.join(fixtures, 'mojits/test_mojit_1/assets/css/main.css'), instance.assets['css/main.css']);
A.areSame(libpath.join(fixtures, 'mojits/test_mojit_1/assets/js/main.js'), instance.assets['js/main.js']);
});
},
'server mojit instance views and binders': function() {
var instance = {type:'test_mojit_1'};
store.expandInstanceForEnv('client', instance, {}, function(err, instance) {
A.areSame(4, Y.Object.keys(instance.views).length);
A.isObject(instance.views['test_1']);
A.areSame('/static/test_mojit_1/views/test_1.hb.html', instance.views['test_1']['content-path']);
A.areSame('hb', instance.views['test_1']['engine']);
A.areSame('test_mojit_1Bindertest_1', instance.binders['test_1']);
A.areSame('test_mojit_1Bindersubdir/test_1', instance.binders['subdir/test_1']);
A.isObject(instance.views['test_1']);
A.areSame('/static/test_mojit_1/views/test_1.hb.html', instance.views['test_1']['content-path']);
A.areSame('hb', instance.views['test_1']['engine']);
A.isObject(instance.views['test_2']);
A.areSame('/static/test_mojit_1/views/test_2.hb.html', instance.views['test_2']['content-path']);
A.areSame('hb', instance.views['test_2']['engine']);
A.isObject(instance.views['subdir/test_1']);
A.areSame('/static/test_mojit_1/views/subdir/test_1.hb.html', instance.views['subdir/test_1']['content-path']);
A.areSame('hb', instance.views['subdir/test_1']['engine']);
A.isObject(instance.partials['test_3']);
A.areSame('/static/test_mojit_1/views/partials/test_3.hb.html', instance.partials['test_3']['content-path']);
A.areSame('hb', instance.partials['test_3']['engine']);
});
},
'server mojit instance models': function() {
var instance = {type:'test_mojit_1'};
store.expandInstance(instance, {}, function(err, instance) {
A.areSame(4, Y.Object.keys(instance.models).length);
A.areSame('ModelFlickr', instance.models['flickr']);
A.areSame('test_applevelModel', instance.models['test_applevel']);
A.areSame('test_mojit_1_model_test_1', instance.models['test_1']);
A.areSame('test_mojit_1_model_test_2', instance.models['test_2']);
});
},
'server mojit type name can come from package.json': function() {
var fixtures = libpath.join(__dirname, '../../../../fixtures/store');
var instance = {type:'TestMojit2'};
store.expandInstance(instance, {}, function(err, instance){
A.isNotUndefined(instance.controller);
A.areSame('TestMojit2', instance.type);
A.areSame(libpath.join(fixtures, 'mojits/test_mojit_2/views/index.hb.html'), instance.views.index['content-path']);
});
},
'server mojit is NOT loaded because of package mojito version mismatch': function(){
var urls = store.getAllURLs();
A.isUndefined(urls['/static/test_mojit_4/package.json']);
A.isUndefined(urls['/static/TestMojit4/package.json']);
},
'server mojit is loaded because of package mojito version match': function(){
var instance = {type:'TestMojit2'};
store.expandInstance(instance, {}, function(err, instance){
A.areSame('TestMojit2', instance.type);
});
},
'server a mojits package.json file is available as appropriate': function() {
var urls = store.getAllURLs();
A.isUndefined(urls['/static/TestMojit2/package.json']);
A.isNotUndefined(urls['/static/TestMojit3/package.json']);
A.isUndefined(urls['/static/TestMojit5/package.json']);
},
'server mojit view index.hb.html is loaded correctly': function() {
var instance = {type:'TestMojit3'};
store.expandInstance(instance, {}, function(err, instance){
A.areSame('index.hb.html', instance.views.index['content-path'].split(libpath.sep).pop());
});
},
'server mojit view index.iphone.hb.html is loaded correctly': function(){
var instance = {type:'TestMojit3'};
store.expandInstance(instance, {device:'iphone'}, function(err, instance){
A.areSame('index.iphone.hb.html', instance.views.index['content-path'].split(libpath.sep).pop());
});
},
'app-level mojits': function() {
var instance = { type: 'test_mojit_1' };
store.expandInstance(instance, {}, function(err, instance) {
A.isNotUndefined(instance.models.test_applevel);
});
},
'mojitDirs setting': function() {
var instance = { type: 'soloMojit' };
store.expandInstance(instance, {}, function(err, instance) {
A.areSame('soloMojit', instance.controller);
});
},
'getMojitTypeDetails caching': function() {
var key = Y.JSON.stringify(['server', ['*'], 'en', 'x']);
store._getMojitTypeDetailsCache[key] = { x: 'y' };
var details = store.getMojitTypeDetails('server', {lang: 'en'}, 'x');
A.isObject(details);
A.areEqual(1, Object.keys(details).length);
A.areEqual('y', details.x);
},
'expandInstanceForEnv preserves instanceId': function() {
var inInstance = {
type: 'test_mojit_1',
instanceId: 'foo'
};
store.expandInstanceForEnv('server', inInstance, {}, function(err, outInstance) {
A.areSame('foo', outInstance.instanceId);
});
},
'multi preload': function() {
var pre = {
appRVs: Y.clone(store._appRVs, true),
mojitRVs: Y.clone(store._mojitRVs, true),
appResources: Y.clone(store._appResources, true),
mojitResources: Y.clone(store._mojitResources, true)
};
store.preload();
var post = {
appRVs: Y.clone(store._appRVs, true),
mojitRVs: Y.clone(store._mojitRVs, true),
appResources: Y.clone(store._appResources, true),
mojitResources: Y.clone(store._mojitResources, true)
};
Y.TEST_CMP(post, pre);
},
'instance with base pointing to non-existant spec': function() {
var spec = { base: 'nonexistant' };
store.expandInstance(spec, {}, function(err, instance) {
A.isNotUndefined(err);
A.areSame('Unknown base "nonexistant". You should have configured "nonexistant" in application.json under specs or used "@nonexistant" if you wanted to specify a mojit name.', err.message);
A.isUndefined(instance);
});
},
'instance with default spec': function() {
// should use tests/fixtures/store/mojits/test_mojit_2/specs/default.json
var spec = { base: 'TestMojit2' };
store.expandInstance(spec, {}, function(err, instance) {
A.areSame('testVal1', instance.config.testKey1);
});
},
'getAppConfig() returns contextualized info': function() {
var context = { runtime: 'server' },
config;
config = store.getAppConfig(context);
A.isObject(config);
A.areSame('testVal1-server', config.testKey1, 'testKey1 wasnt contextualized to the server');
A.areSame('testVal2', config.testKey2, 'testKey2 gotten from the wrong context');
A.areSame('portended', config.pathos, 'missing contextualized config');
A.isUndefined(config.testKey4, 'testKey4 gotten from the wrong context');
},
'call getRoutes()': function() {
var routes = store.getRoutes({});
A.isObject(routes, 'no routes at all');
A.isObject(routes.flickr_by_page, 'missing route flickr_by_page');
A.isObject(routes.flickr_base, 'missing route flickr_base');
},
'call listAllMojits()': function() {
var list = store.listAllMojits('server');
A.areSame(11, list.length, 'found the wrong number of mojits');
AA.contains('TunnelProxy', list);
AA.contains('HTMLFrameMojit', list);
AA.contains('LazyLoad', list);
AA.contains('inlinecss', list);
AA.contains('rollups', list);
AA.contains('test_mojit_1', list);
AA.contains('TestMojit2', list);
AA.contains('TestMojit3', list);
AA.contains('TestMojit5', list);
AA.contains('soloMojit', list);
AA.contains('page', list);
},
// TODO -- do we still need rollups?
'ignore: app with rollups': function() {
var fixtures = libpath.join(__dirname, '../../../../fixtures/store');
var spec = { type: 'rollups' };
store.expandInstanceForEnv('client', spec, {}, function(err, instance) {
A.areSame('/static/rollups/rollup.client.js', instance.yui.config.modules['rollups'].fullpath, 'main rollup');
var urls = store.getAllURLs();
A.areSame(libpath.join(fixtures, 'mojits/rollups/rollup.client.js'), urls['/static/rollups/rollup.client.js']);
});
},
'app resource overrides framework resource': function() {
var fixtures = libpath.join(__dirname, '../../../../fixtures/store'),
details = store.getMojitTypeDetails('server', {}, 'HTMLFrameMojit');
A.areSame(libpath.join(fixtures, 'mojits/HTMLFrameMojit'), details.fullPath);
},
'ignore: getAllURLResources()': function() {
// TODO
},
'ignore: makeResourceVersions()': function() {
// TODO
},
'ignore: getResourceContent()': function() {
// TODO
},
'ignore: processResourceContent()': function() {
// TODO
},
'ignore: getAppPkgMeta()': function() {
// TODO
},
'ignore: makeResourceFSMeta()': function() {
// TODO
}
}));
suite.add(new Y.Test.Case({
name: 'Store tests -- preload fixture "gsg5"',
init: function() {
var fixtures = libpath.join(__dirname, '../../../../fixtures/gsg5');
store = new Y.mojito.ResourceStore({ root: fixtures });
store.preload();
},
'controller with selector': function() {
var fixtures = libpath.join(__dirname, '../../../../fixtures/gsg5');
var spec = { type: 'PagedFlickr' };
var ctx = { device: 'iphone' };
store.expandInstance(spec, ctx, function(err, instance) {
A.areSame('PagedFlickr', instance.controller);
});
},
'binder with selector': function() {
var fixtures = libpath.join(__dirname, '../../../../fixtures/gsg5');
var spec = { type: 'PagedFlickr' };
var ctx = { device: 'iphone' };
store.expandInstance(spec, ctx, function(err, instance) {
A.areSame(libpath.join(fixtures, 'mojits/PagedFlickr/views/index.iphone.hb.html'), instance.views.index['content-path']);
});
},
'augment getMojitTypeDetails with AC addons': function() {
var details = store.getMojitTypeDetails('server', {}, 'PagedFlickr');
// order matters
A.areSame(4, details.acAddons.length, 'number of AC addons');
A.areSame(JSON.stringify(['config','intl','params','url']), JSON.stringify(details.acAddons), 'correct order');
}
}));
suite.add(new Y.Test.Case({
name: 'Store tests -- preload fixture "gsg5-appConfig"',
init: function() {
var fixtures = libpath.join(__dirname, '../../../../fixtures/gsg5-appConfig');
store = new Y.mojito.ResourceStore({ root: fixtures });
store.preload();
},
'appConfig staticHandling.prefix': function() {
var spec = { type: 'PagedFlickr' };
store.expandInstanceForEnv('client', spec, {}, function(err, instance) {
A.areSame('/static/PagedFlickr/assets', instance.assetsRoot);
});
}
}));
suite.add(new Y.Test.Case({
name: 'Store tests -- preload fixture "lazy-resolve"',
init: function () {
var fixtures = libpath.join(__dirname, '../../../../fixtures/lazy-resolve');
store = new Y.mojito.ResourceStore({ root: fixtures });
store.preload();
},
'store is actually lazy': function () {
A.isTrue(Object.keys(store._mojitDetailsCache).length === 0);
}
}));
suite.add(new Y.Test.Case({
name: 'Store tests -- misc',
'static context is really static': function() {
var fixtures = libpath.join(__dirname, '../../../../fixtures/store'),
context = { runtime: 'server' },
store = new Y.mojito.ResourceStore({ root: fixtures, context: context }),
config;
store.preload();
config = store.getAppConfig();
A.isObject(config);
A.areSame('testVal1-server', config.testKey1, 'testKey1 wasnt contextualized to the server');
A.areSame('testVal2', config.testKey2, 'testKey2 gotten from the wrong context');
A.areSame('portended', config.pathos, 'missing contextualized config');
A.isUndefined(config.testKey4, 'testKey4 gotten from the wrong context');
},
'pre load no application.json file': function() {
var fixtures = libpath.join(__dirname, '../../../../fixtures/store_no_app_config'),
store = new Y.mojito.ResourceStore({ root: fixtures });
store.preload();
//Y.log(Y.JSON.stringify(store,null,4));
A.isTrue(store._config.root === fixtures);
},
'default routes': function() {
var fixtures = libpath.join(__dirname, '../../../../fixtures/store_no_app_config'),
store = new Y.mojito.ResourceStore({ root: fixtures });
store.preload();
var have = store.getRoutes();
A.isObject(have._default_path);
},
'bad files': function() {
var fixtures = libpath.join(__dirname, '../../../../fixtures/badfiles'),
store = new Y.mojito.ResourceStore({ root: fixtures });
store.preload();
var spec = { type: 'M' };
store.expandInstance(spec, {}, function(err, instance) {
A.isUndefined(instance.models['MModelNot']);
A.isUndefined(instance.binders.not);
});
},
'sortedReaddirSync() sorts the result of fs.readdirSync()': function() {
var fixtures = libpath.join(__dirname, '../../../../fixtures/store');
var mockfs = Mock();
Mock.expect(mockfs, {
method: 'readdirSync',
args: ['dir'],
returns: ['d', 'c', 'a', 'b']
});
var store = new Y.mojito.ResourceStore({ root: fixtures });
store._mockLib('fs', mockfs);
var files = store._sortedReaddirSync('dir');
AA.itemsAreSame(['a', 'b', 'c', 'd'], files);
Mock.verify(mockfs);
},
'_skipBadPath() does just that': function() {
var fixtures = libpath.join(__dirname, '../../../../fixtures/store');
var store = new Y.mojito.ResourceStore({ root: fixtures });
A.isTrue(store._skipBadPath({ isFile: true, ext: '.js~' }), 'need to skip bad file naems');
A.isFalse(store._skipBadPath({ isFile: false, ext: '.js~' }), 'need to not-skip bad directory names');
A.isFalse(store._skipBadPath({ isFile: true, ext: '.js' }), 'need to not-skip good file names');
},
'load node_modules': function() {
var fixtures = libpath.join(__dirname, '../../../../fixtures/packages'),
store = new Y.mojito.ResourceStore({ root: fixtures });
if (!store._mojitRVs.a && !store._mojitRVs.aa && !store._mojitRVs.ba) {
// This happens when mojito is installed via npm, since npm
// won't install the node_modules/ directories in
// tests/fixtures/packages.
A.isTrue(true);
return;
}
var config = store.yui.getConfigShared('server');
A.isObject(config.modules.b, 'b');
A.isObject(config.modules.ab, 'ab');
A.isObject(config.modules.bb, 'bb');
A.isObject(config.modules.cb, 'cb');
var details = store.getMojitTypeDetails('server', {}, 'a');
A.areSame('a', details.controller);
},
'skip loaded packages': function() {
var fixtures = libpath.join(__dirname, '../../../../fixtures/packages'),
store = new Y.mojito.ResourceStore({ root: fixtures });
store.preload();
var oldlog = Y.log;
var logged = false;
Y.log = function(msg, lvl, src) {
if ('debug' === lvl && 'mojito-resource-store' === src && msg.match(/^skipping duplicate package a/)) {
logged = true;
}
};
try {
store.preload();
} finally {
Y.log = oldlog;
}
A.isTrue(logged, 'info logged');
},
'find and parse resources by convention': function() {
var fixtures = libpath.join(__dirname, '../../../../fixtures/conventions'),
store = new Y.mojito.ResourceStore({ root: fixtures });
// fake out some parts of preload(), which we're trying to avoid
store._fwConfig = store.config.readConfigSimple(libpath.join(mojitoRoot, 'config.json'));
store._appConfigStatic = store.getStaticAppConfig();
var dir = libpath.join(__dirname, '../../../../fixtures/conventions');
var pkg = { name: 'test', version: '6.6.6' };
var mojitType = 'testing';
var ress = store._findResourcesByConvention(dir, 'app', pkg, mojitType);
var r, res;
for (r = 0; r < ress.length; r++) {
res = ress[r];
A.isNotUndefined(res.id, 'no resource id');
switch (res.id) {
case 'action--x':
A.areSame(pkg, res.source.pkg);
A.areSame('action', res.type);
A.areSame('x', res.name);
switch (res.source.fs.basename) {
case 'x.common':
A.areSame('*', res.selector);
A.areSame('common', res.affinity);
A.areSame('.js', res.source.fs.ext);
A.areSame('x', res.name);
break;
case 'x.common.iphone':
A.areSame('iphone', res.selector);
A.areSame('common', res.affinity);
A.areSame('.js', res.source.fs.ext);
A.areSame('x', res.name);
break;
default:
A.fail('unknown resource ' + res.source.fs.fullPath);
break;
}
break;
case 'action--y/z':
A.areSame(pkg, res.source.pkg);
A.areSame('action', res.type);
A.areSame('y/z', res.name);
A.areSame('*', res.selector);
A.areSame('common', res.affinity);
A.areSame('.js', res.source.fs.ext);
A.areSame('z.common', res.source.fs.basename);
break;
case 'addon-a-x':
A.areSame(pkg, res.source.pkg);
A.areSame('addon', res.type);
A.areSame('a', res.subtype);
A.areSame('x', res.name);
switch (res.source.fs.basename) {
case 'x.common':
A.areSame('*', res.selector);
A.areSame('common', res.affinity);
A.areSame('.js', res.source.fs.ext);
A.areSame('x', res.name);
break;
case 'x.common.iphone':
A.areSame('iphone', res.selector);
A.areSame('common', res.affinity);
A.areSame('.js', res.source.fs.ext);
A.areSame('x', res.name);
break;
default:
A.fail('unknown resource ' + res.source.fs.fullPath);
break;
}
break;
case 'archetype-x-y':
A.areSame(pkg, res.source.pkg);
A.areSame('archetype', res.type);
A.areSame('x', res.subtype);
A.areSame('y', res.name);
A.areSame('y', res.source.fs.basename);
break;
case 'asset-css-x':
A.areSame(pkg, res.source.pkg);
A.areSame('asset', res.type);
A.areSame('css', res.subtype);
A.areSame('x', res.name);
switch (res.source.fs.basename) {
case 'x':
A.areSame('*', res.selector);
A.areSame('common', res.affinity);
A.areSame('.css', res.source.fs.ext);
break;
case 'x.iphone':
A.areSame('iphone', res.selector);
A.areSame('common', res.affinity);
A.areSame('.css', res.source.fs.ext);
break;
default:
A.fail('unknown resource ' + res.source.fs.fullPath);
break;
}
break;
case 'asset-css-y/z':
A.areSame(pkg, res.source.pkg);
A.areSame('asset', res.type);
A.areSame('css', res.subtype);
A.areSame('y/z', res.name);
switch (res.source.fs.basename) {
case 'z':
A.areSame('*', res.selector);
A.areSame('common', res.affinity);
A.areSame('.css', res.source.fs.ext);
break;
case 'z.android':
A.areSame('android', res.selector);
A.areSame('common', res.affinity);
A.areSame('.css', res.source.fs.ext);
break;
default:
A.fail('unknown resource ' + res.source.fs.fullPath);
break;
}
break;
case 'binder--x':
A.areSame(pkg, res.source.pkg);
A.areSame('binder', res.type);
A.areSame('x', res.name);
switch (res.source.fs.basename) {
case 'x':
A.areSame('*', res.selector);
A.areSame('client', res.affinity);
A.areSame('.js', res.source.fs.ext);
break;
case 'x.iphone':
A.areSame('iphone', res.selector);
A.areSame('client', res.affinity);
A.areSame('.js', res.source.fs.ext);
break;
default:
A.fail('unknown resource ' + res.source.fs.fullPath);
break;
}
break;
case 'command--x':
A.areSame(pkg, res.source.pkg);
A.areSame('command', res.type);
A.areSame('x', res.name);
A.areSame('x', res.source.fs.basename);
break;
case 'config--config':
A.areSame(pkg, res.source.pkg);
A.areSame('config', res.type);
A.areSame('config', res.name);
A.areSame('config', res.source.fs.basename);
A.areSame('.json', res.source.fs.ext);
break;
case 'controller--controller':
A.areSame(pkg, res.source.pkg);
A.areSame('controller', res.type);
A.areSame('controller', res.name);
switch (res.source.fs.basename) {
case 'controller.common':
A.areSame('*', res.selector);
A.areSame('common', res.affinity);
A.areSame('.js', res.source.fs.ext);
break;
case 'controller.server.iphone':
A.areSame('iphone', res.selector);
A.areSame('server', res.affinity);
A.areSame('.js', res.source.fs.ext);
break;
default:
A.fail('unknown resource ' + res.source.fs.fullPath);
break;
}
break;
case 'middleware--x':
A.areSame(pkg, res.source.pkg);
A.areSame('middleware', res.type);
A.areSame('x', res.name);
A.areSame('x', res.source.fs.basename);
A.areSame('.js', res.source.fs.ext);
break;
case 'spec--default':
A.areSame(pkg, res.source.pkg);
A.areSame('spec', res.type);
A.areSame('default', res.name);
A.areSame('default', res.source.fs.basename);
A.areSame('.json', res.source.fs.ext);
break;
case 'spec--x':
A.areSame(pkg, res.source.pkg);
A.areSame('spec', res.type);
A.areSame('testing', res.mojit);
A.areSame('x', res.name);
A.areSame('x', res.source.fs.basename);
A.areSame('.json', res.source.fs.ext);
break;
case 'view--x':
A.areSame(pkg, res.source.pkg);
A.areSame('view', res.type);
A.areSame('x', res.name);
A.areSame('html', res.view.outputFormat);
A.areSame('hb', res.view.engine);
switch (res.source.fs.basename) {
case 'x.hb':
A.areSame('*', res.selector);
A.areSame('common', res.affinity);
A.areSame('.html', res.source.fs.ext);
break;
case 'x.iphone.hb':
A.areSame('iphone', res.selector);
A.areSame('common', res.affinity);
A.areSame('.html', res.source.fs.ext);
break;
default:
A.fail('unknown resource ' + res.source.fs.fullPath);
break;
}
break;
default:
A.fail('unknown resource ' + res.id);
break;
}
}
A.areSame(21, ress.length, 'wrong number of resources');
}
}));
suite.add(new Y.Test.Case({
name: 'Store tests -- "bleeding"',
init: function() {
var fixtures = libpath.join(__dirname, '../../../../fixtures/store');
store = new Y.mojito.ResourceStore({ root: fixtures });
store.preload();
},
'test bleeding spec with no config': function() {
var instance = { type: "page" },
ctx = {};
store.expandInstanceForEnv('server', instance, ctx, function(err, expanded) {
A.isNotUndefined(expanded, 'expanded should not be undefined');
OA.areEqual({}, expanded.config, 'config should be empty');
});
},
'test bleeding spec with config': function() {
var instance,
ctx = {};
instance = {
type: "page",
config: {
children: {
weather: { type: "weather", action: "index" },
stream: { type: "stream", action: "index" }
}
}
};
store.expandInstanceForEnv('server', instance, ctx, function(err, expanded) {
A.isNotUndefined(expanded, 'expanded should not be undefined');
OA.areEqual(instance.config.children.weather,
expanded.config.children.weather,
'config missing children.weather');
OA.areEqual(instance.config.children.stream,
expanded.config.children.stream,
'config missing children.stream');
});
},
'test bleeding spec with mixed config': function() {
var instance1,
instance2,
ctx = {};
instance1 = {
type: "page",
config: {
children: {
weather: { type: "weather", action: "index" },
stream: { type: "stream", action: "index" }
}
}
};
instance2 = {
type: "page"
};
store.expandInstanceForEnv('server', instance1, ctx, function(err, expanded1) {
// test 1
A.isNotUndefined(expanded1, 'expanded1 should not be undefined');
OA.areEqual(instance1.config.children.weather,
expanded1.config.children.weather,
'config missing children.weather');
OA.areEqual(instance1.config.children.stream,
expanded1.config.children.stream,
'config missing children.stream');
// test 2
store.expandInstanceForEnv('server', instance2, ctx, function(err, expanded2) {
A.isNotUndefined(expanded2, 'expanded2 should not be undefined');
OA.areEqual({}, expanded2.config, 'expanded2 instance config should be empty!');
});
});
}
}));
Y.Test.Runner.add(suite);
});
| tests/unit/lib/app/autoload/test-store.server.js | /*
* Copyright (c) 2011-2013, Yahoo! Inc. All rights reserved.
* Copyrights licensed under the New BSD License.
* See the accompanying LICENSE file for terms.
*/
YUI().use(
'oop',
'mojito-test-extra',
'mojito-resource-store',
'addon-rs-config',
'addon-rs-selector',
'addon-rs-url',
'addon-rs-yui',
'test',
function(Y) {
var suite = new Y.Test.Suite('mojito-store-server-tests'),
libpath = require('path'),
mojitoRoot = libpath.join(__dirname, '../../../lib'),
store,
Mock = Y.Mock,
A = Y.Assert,
AA = Y.ArrayAssert,
OA = Y.ObjectAssert;
suite.add(new Y.Test.Case({
name: 'Store tests -- preload fixture "store"',
init: function() {
var fixtures = libpath.join(__dirname, '../../../../fixtures/store');
store = new Y.mojito.ResourceStore({ root: fixtures });
store.preload();
},
'pre load': function() {
var fixtures = libpath.join(__dirname, '../../../../fixtures/store');
//Y.log(Y.JSON.stringify(store,null,4));
A.isTrue(store._config.root === fixtures);
},
'store is not lazy by default': function () {
A.isFalse(Object.keys(store._mojitDetailsCache).length === 0);
},
'valid context': function() {
var success;
try {
store.validateContext({});
} catch(e) {
A.fail('{} should be valid');
}
try {
store.validateContext({device:'iphone'});
} catch(e) {
A.fail('{device:iphone} should be valid');
}
try {
store.validateContext({device:'iphone',lang:'en'});
} catch(e) {
A.fail('{device:iphone,lang:en} should be valid');
}
try {
store.validateContext({device:'iphone',runtime:'common'});
} catch(e) {
A.fail('{device:iphone,runtime:common} should be valid');
}
try {
success = undefined;
store.validateContext({device:'blender'});
success = true;
} catch(e) {
success = false;
}
A.isFalse(success, '{device:blender} should be invalid');
try {
success = undefined;
store.validateContext({device:'iphone',texture:'corrugated'});
success = true;
} catch(e) {
success = false;
}
A.isFalse(success, '{device:iphone,texture:corrugated} should be invalid');
try {
success = undefined;
store.validateContext({device:'iphone',runtime:'kite'});
success = true;
} catch(e) {
success = false;
}
A.isFalse(success, '{device:iphone,runtime:kite} should be invalid');
},
'server app config value': function() {
var config = store.getAppConfig(null);
A.isTrue(config.testKey1 === 'testVal1');
},
'server mojit config value': function() {
var instance = {base:'test1'};
store.expandInstance(instance, {}, function(err, instance){
A.isNull(err);
A.isTrue(instance.id === 'test1', 'wrong ID');
A.isTrue(instance.type === 'test_mojit_1', 'wrong type');
A.isTrue(instance.config.testKey4 === 'testVal4', 'missing key from definition.json');
});
},
'server mojit config value via type': function() {
var instance = {type:'test_mojit_1'};
store.expandInstance(instance, {}, function(err, instance){
A.isTrue(instance.type === 'test_mojit_1', 'wrong ID');
A.isTrue(instance.config.testKey4 === 'testVal4', 'missing config from definition.json');
A.isTrue(instance.config.testKey6.testKey7 === 'testVal7', 'missing deep config from definition.json');
});
},
'server mojit config value via type and override': function() {
var instance = {
type:'test_mojit_1',
config:{testKey4: 'other'}
};
store.expandInstance(instance, {}, function(err, instance){
A.isTrue(instance.type === 'test_mojit_1', 'wrong ID');
A.areSame('other', instance.config.testKey4, 'missing config from definition.json');
A.areSame('testVal5', instance.config.testKey5, 'missing deep config from defaults.json');
});
},
'server mojit instance assets': function() {
var fixtures = libpath.join(__dirname, '../../../../fixtures/store');
var instance = {type:'test_mojit_1'};
store.expandInstance(instance, {}, function(err, instance) {
// we'll skip the favicon.ico that ships with Mojito
// (it's not availble when running --coverage anyway)
A.areSame(libpath.join(fixtures, 'mojits/test_mojit_1/assets/css/main.css'), instance.assets['css/main.css']);
A.areSame(libpath.join(fixtures, 'mojits/test_mojit_1/assets/js/main.js'), instance.assets['js/main.js']);
});
},
'server mojit instance views and binders': function() {
var instance = {type:'test_mojit_1'};
store.expandInstanceForEnv('client', instance, {}, function(err, instance) {
A.areSame(4, Y.Object.keys(instance.views).length);
A.isObject(instance.views['test_1']);
A.areSame('/static/test_mojit_1/views/test_1.hb.html', instance.views['test_1']['content-path']);
A.areSame('hb', instance.views['test_1']['engine']);
A.areSame('test_mojit_1Bindertest_1', instance.binders['test_1']);
A.areSame('test_mojit_1Bindersubdir/test_1', instance.binders['subdir/test_1']);
A.isObject(instance.views['test_1']);
A.areSame('/static/test_mojit_1/views/test_1.hb.html', instance.views['test_1']['content-path']);
A.areSame('hb', instance.views['test_1']['engine']);
A.isObject(instance.views['test_2']);
A.areSame('/static/test_mojit_1/views/test_2.hb.html', instance.views['test_2']['content-path']);
A.areSame('hb', instance.views['test_2']['engine']);
A.isObject(instance.views['subdir/test_1']);
A.areSame('/static/test_mojit_1/views/subdir/test_1.hb.html', instance.views['subdir/test_1']['content-path']);
A.areSame('hb', instance.views['subdir/test_1']['engine']);
A.isObject(instance.partials['test_3']);
A.areSame('/static/test_mojit_1/views/partials/test_3.hb.html', instance.partials['test_3']['content-path']);
A.areSame('hb', instance.partials['test_3']['engine']);
});
},
'server mojit instance models': function() {
var instance = {type:'test_mojit_1'};
store.expandInstance(instance, {}, function(err, instance) {
A.areSame(4, Y.Object.keys(instance.models).length);
A.areSame('ModelFlickr', instance.models['flickr']);
A.areSame('test_applevelModel', instance.models['test_applevel']);
A.areSame('test_mojit_1_model_test_1', instance.models['test_1']);
A.areSame('test_mojit_1_model_test_2', instance.models['test_2']);
});
},
'server mojit type name can come from package.json': function() {
var fixtures = libpath.join(__dirname, '../../../../fixtures/store');
var instance = {type:'TestMojit2'};
store.expandInstance(instance, {}, function(err, instance){
A.isNotUndefined(instance.controller);
A.areSame('TestMojit2', instance.type);
A.areSame(libpath.join(fixtures, 'mojits/test_mojit_2/views/index.hb.html'), instance.views.index['content-path']);
});
},
'server mojit is NOT loaded because of package mojito version mismatch': function(){
var urls = store.getAllURLs();
A.isUndefined(urls['/static/test_mojit_4/package.json']);
A.isUndefined(urls['/static/TestMojit4/package.json']);
},
'server mojit is loaded because of package mojito version match': function(){
var instance = {type:'TestMojit2'};
store.expandInstance(instance, {}, function(err, instance){
A.areSame('TestMojit2', instance.type);
});
},
'server a mojits package.json file is available as appropriate': function() {
var urls = store.getAllURLs();
A.isUndefined(urls['/static/TestMojit2/package.json']);
A.isNotUndefined(urls['/static/TestMojit3/package.json']);
A.isUndefined(urls['/static/TestMojit5/package.json']);
},
'server mojit view index.hb.html is loaded correctly': function() {
var instance = {type:'TestMojit3'};
store.expandInstance(instance, {}, function(err, instance){
A.areSame('index.hb.html', instance.views.index['content-path'].split(libpath.sep).pop());
});
},
'server mojit view index.iphone.hb.html is loaded correctly': function(){
var instance = {type:'TestMojit3'};
store.expandInstance(instance, {device:'iphone'}, function(err, instance){
A.areSame('index.iphone.hb.html', instance.views.index['content-path'].split(libpath.sep).pop());
});
},
'app-level mojits': function() {
var instance = { type: 'test_mojit_1' };
store.expandInstance(instance, {}, function(err, instance) {
A.isNotUndefined(instance.models.test_applevel);
});
},
'mojitDirs setting': function() {
var instance = { type: 'soloMojit' };
store.expandInstance(instance, {}, function(err, instance) {
A.areSame('soloMojit', instance.controller);
});
},
'getMojitTypeDetails caching': function() {
var key = Y.JSON.stringify(['server', ['*'], 'en', 'x']);
store._getMojitTypeDetailsCache[key] = { x: 'y' };
var details = store.getMojitTypeDetails('server', {lang: 'en'}, 'x');
A.isObject(details);
A.areEqual(1, Object.keys(details).length);
A.areEqual('y', details.x);
},
'expandInstanceForEnv preserves instanceId': function() {
var inInstance = {
type: 'test_mojit_1',
instanceId: 'foo'
};
store.expandInstanceForEnv('server', inInstance, {}, function(err, outInstance) {
A.areSame('foo', outInstance.instanceId);
});
},
'multi preload': function() {
var pre = {
appRVs: Y.clone(store._appRVs, true),
mojitRVs: Y.clone(store._mojitRVs, true),
appResources: Y.clone(store._appResources, true),
mojitResources: Y.clone(store._mojitResources, true)
};
// clear yui rs _langLoaderCreated such that the loaders are created again.
store.yui._langLoaderCreated = {};
store.preload();
var post = {
appRVs: Y.clone(store._appRVs, true),
mojitRVs: Y.clone(store._mojitRVs, true),
appResources: Y.clone(store._appResources, true),
mojitResources: Y.clone(store._mojitResources, true)
};
Y.TEST_CMP(post, pre);
},
'instance with base pointing to non-existant spec': function() {
var spec = { base: 'nonexistant' };
store.expandInstance(spec, {}, function(err, instance) {
A.isNotUndefined(err);
A.areSame('Unknown base "nonexistant". You should have configured "nonexistant" in application.json under specs or used "@nonexistant" if you wanted to specify a mojit name.', err.message);
A.isUndefined(instance);
});
},
'instance with default spec': function() {
// should use tests/fixtures/store/mojits/test_mojit_2/specs/default.json
var spec = { base: 'TestMojit2' };
store.expandInstance(spec, {}, function(err, instance) {
A.areSame('testVal1', instance.config.testKey1);
});
},
'getAppConfig() returns contextualized info': function() {
var context = { runtime: 'server' },
config;
config = store.getAppConfig(context);
A.isObject(config);
A.areSame('testVal1-server', config.testKey1, 'testKey1 wasnt contextualized to the server');
A.areSame('testVal2', config.testKey2, 'testKey2 gotten from the wrong context');
A.areSame('portended', config.pathos, 'missing contextualized config');
A.isUndefined(config.testKey4, 'testKey4 gotten from the wrong context');
},
'call getRoutes()': function() {
var routes = store.getRoutes({});
A.isObject(routes, 'no routes at all');
A.isObject(routes.flickr_by_page, 'missing route flickr_by_page');
A.isObject(routes.flickr_base, 'missing route flickr_base');
},
'call listAllMojits()': function() {
var list = store.listAllMojits('server');
A.areSame(11, list.length, 'found the wrong number of mojits');
AA.contains('TunnelProxy', list);
AA.contains('HTMLFrameMojit', list);
AA.contains('LazyLoad', list);
AA.contains('inlinecss', list);
AA.contains('rollups', list);
AA.contains('test_mojit_1', list);
AA.contains('TestMojit2', list);
AA.contains('TestMojit3', list);
AA.contains('TestMojit5', list);
AA.contains('soloMojit', list);
AA.contains('page', list);
},
// TODO -- do we still need rollups?
'ignore: app with rollups': function() {
var fixtures = libpath.join(__dirname, '../../../../fixtures/store');
var spec = { type: 'rollups' };
store.expandInstanceForEnv('client', spec, {}, function(err, instance) {
A.areSame('/static/rollups/rollup.client.js', instance.yui.config.modules['rollups'].fullpath, 'main rollup');
var urls = store.getAllURLs();
A.areSame(libpath.join(fixtures, 'mojits/rollups/rollup.client.js'), urls['/static/rollups/rollup.client.js']);
});
},
'app resource overrides framework resource': function() {
var fixtures = libpath.join(__dirname, '../../../../fixtures/store'),
details = store.getMojitTypeDetails('server', {}, 'HTMLFrameMojit');
A.areSame(libpath.join(fixtures, 'mojits/HTMLFrameMojit'), details.fullPath);
},
'ignore: getAllURLResources()': function() {
// TODO
},
'ignore: makeResourceVersions()': function() {
// TODO
},
'ignore: getResourceContent()': function() {
// TODO
},
'ignore: processResourceContent()': function() {
// TODO
},
'ignore: getAppPkgMeta()': function() {
// TODO
},
'ignore: makeResourceFSMeta()': function() {
// TODO
}
}));
suite.add(new Y.Test.Case({
name: 'Store tests -- preload fixture "gsg5"',
init: function() {
var fixtures = libpath.join(__dirname, '../../../../fixtures/gsg5');
store = new Y.mojito.ResourceStore({ root: fixtures });
store.preload();
},
'controller with selector': function() {
var fixtures = libpath.join(__dirname, '../../../../fixtures/gsg5');
var spec = { type: 'PagedFlickr' };
var ctx = { device: 'iphone' };
store.expandInstance(spec, ctx, function(err, instance) {
A.areSame('PagedFlickr', instance.controller);
});
},
'binder with selector': function() {
var fixtures = libpath.join(__dirname, '../../../../fixtures/gsg5');
var spec = { type: 'PagedFlickr' };
var ctx = { device: 'iphone' };
store.expandInstance(spec, ctx, function(err, instance) {
A.areSame(libpath.join(fixtures, 'mojits/PagedFlickr/views/index.iphone.hb.html'), instance.views.index['content-path']);
});
},
'augment getMojitTypeDetails with AC addons': function() {
var details = store.getMojitTypeDetails('server', {}, 'PagedFlickr');
// order matters
A.areSame(4, details.acAddons.length, 'number of AC addons');
A.areSame(JSON.stringify(['config','intl','params','url']), JSON.stringify(details.acAddons), 'correct order');
}
}));
suite.add(new Y.Test.Case({
name: 'Store tests -- preload fixture "gsg5-appConfig"',
init: function() {
var fixtures = libpath.join(__dirname, '../../../../fixtures/gsg5-appConfig');
store = new Y.mojito.ResourceStore({ root: fixtures });
store.preload();
},
'appConfig staticHandling.prefix': function() {
var spec = { type: 'PagedFlickr' };
store.expandInstanceForEnv('client', spec, {}, function(err, instance) {
A.areSame('/static/PagedFlickr/assets', instance.assetsRoot);
});
}
}));
suite.add(new Y.Test.Case({
name: 'Store tests -- preload fixture "lazy-resolve"',
init: function () {
var fixtures = libpath.join(__dirname, '../../../../fixtures/lazy-resolve');
store = new Y.mojito.ResourceStore({ root: fixtures });
store.preload();
},
'store is actually lazy': function () {
A.isTrue(Object.keys(store._mojitDetailsCache).length === 0);
}
}));
suite.add(new Y.Test.Case({
name: 'Store tests -- misc',
'static context is really static': function() {
var fixtures = libpath.join(__dirname, '../../../../fixtures/store'),
context = { runtime: 'server' },
store = new Y.mojito.ResourceStore({ root: fixtures, context: context }),
config;
store.preload();
config = store.getAppConfig();
A.isObject(config);
A.areSame('testVal1-server', config.testKey1, 'testKey1 wasnt contextualized to the server');
A.areSame('testVal2', config.testKey2, 'testKey2 gotten from the wrong context');
A.areSame('portended', config.pathos, 'missing contextualized config');
A.isUndefined(config.testKey4, 'testKey4 gotten from the wrong context');
},
'pre load no application.json file': function() {
var fixtures = libpath.join(__dirname, '../../../../fixtures/store_no_app_config'),
store = new Y.mojito.ResourceStore({ root: fixtures });
store.preload();
//Y.log(Y.JSON.stringify(store,null,4));
A.isTrue(store._config.root === fixtures);
},
'default routes': function() {
var fixtures = libpath.join(__dirname, '../../../../fixtures/store_no_app_config'),
store = new Y.mojito.ResourceStore({ root: fixtures });
store.preload();
var have = store.getRoutes();
A.isObject(have._default_path);
},
'bad files': function() {
var fixtures = libpath.join(__dirname, '../../../../fixtures/badfiles'),
store = new Y.mojito.ResourceStore({ root: fixtures });
store.preload();
var spec = { type: 'M' };
store.expandInstance(spec, {}, function(err, instance) {
A.isUndefined(instance.models['MModelNot']);
A.isUndefined(instance.binders.not);
});
},
'sortedReaddirSync() sorts the result of fs.readdirSync()': function() {
var fixtures = libpath.join(__dirname, '../../../../fixtures/store');
var mockfs = Mock();
Mock.expect(mockfs, {
method: 'readdirSync',
args: ['dir'],
returns: ['d', 'c', 'a', 'b']
});
var store = new Y.mojito.ResourceStore({ root: fixtures });
store._mockLib('fs', mockfs);
var files = store._sortedReaddirSync('dir');
AA.itemsAreSame(['a', 'b', 'c', 'd'], files);
Mock.verify(mockfs);
},
'_skipBadPath() does just that': function() {
var fixtures = libpath.join(__dirname, '../../../../fixtures/store');
var store = new Y.mojito.ResourceStore({ root: fixtures });
A.isTrue(store._skipBadPath({ isFile: true, ext: '.js~' }), 'need to skip bad file naems');
A.isFalse(store._skipBadPath({ isFile: false, ext: '.js~' }), 'need to not-skip bad directory names');
A.isFalse(store._skipBadPath({ isFile: true, ext: '.js' }), 'need to not-skip good file names');
},
'load node_modules': function() {
var fixtures = libpath.join(__dirname, '../../../../fixtures/packages'),
store = new Y.mojito.ResourceStore({ root: fixtures });
if (!store._mojitRVs.a && !store._mojitRVs.aa && !store._mojitRVs.ba) {
// This happens when mojito is installed via npm, since npm
// won't install the node_modules/ directories in
// tests/fixtures/packages.
A.isTrue(true);
return;
}
var config = store.yui.getConfigShared('server');
A.isObject(config.modules.b, 'b');
A.isObject(config.modules.ab, 'ab');
A.isObject(config.modules.bb, 'bb');
A.isObject(config.modules.cb, 'cb');
var details = store.getMojitTypeDetails('server', {}, 'a');
A.areSame('a', details.controller);
},
'skip loaded packages': function() {
var fixtures = libpath.join(__dirname, '../../../../fixtures/packages'),
store = new Y.mojito.ResourceStore({ root: fixtures });
store.preload();
var oldlog = Y.log;
var logged = false;
Y.log = function(msg, lvl, src) {
if ('debug' === lvl && 'mojito-resource-store' === src && msg.match(/^skipping duplicate package a/)) {
logged = true;
}
};
try {
store.preload();
} finally {
Y.log = oldlog;
}
A.isTrue(logged, 'info logged');
},
'find and parse resources by convention': function() {
var fixtures = libpath.join(__dirname, '../../../../fixtures/conventions'),
store = new Y.mojito.ResourceStore({ root: fixtures });
// fake out some parts of preload(), which we're trying to avoid
store._fwConfig = store.config.readConfigSimple(libpath.join(mojitoRoot, 'config.json'));
store._appConfigStatic = store.getStaticAppConfig();
var dir = libpath.join(__dirname, '../../../../fixtures/conventions');
var pkg = { name: 'test', version: '6.6.6' };
var mojitType = 'testing';
var ress = store._findResourcesByConvention(dir, 'app', pkg, mojitType);
var r, res;
for (r = 0; r < ress.length; r++) {
res = ress[r];
A.isNotUndefined(res.id, 'no resource id');
switch (res.id) {
case 'action--x':
A.areSame(pkg, res.source.pkg);
A.areSame('action', res.type);
A.areSame('x', res.name);
switch (res.source.fs.basename) {
case 'x.common':
A.areSame('*', res.selector);
A.areSame('common', res.affinity);
A.areSame('.js', res.source.fs.ext);
A.areSame('x', res.name);
break;
case 'x.common.iphone':
A.areSame('iphone', res.selector);
A.areSame('common', res.affinity);
A.areSame('.js', res.source.fs.ext);
A.areSame('x', res.name);
break;
default:
A.fail('unknown resource ' + res.source.fs.fullPath);
break;
}
break;
case 'action--y/z':
A.areSame(pkg, res.source.pkg);
A.areSame('action', res.type);
A.areSame('y/z', res.name);
A.areSame('*', res.selector);
A.areSame('common', res.affinity);
A.areSame('.js', res.source.fs.ext);
A.areSame('z.common', res.source.fs.basename);
break;
case 'addon-a-x':
A.areSame(pkg, res.source.pkg);
A.areSame('addon', res.type);
A.areSame('a', res.subtype);
A.areSame('x', res.name);
switch (res.source.fs.basename) {
case 'x.common':
A.areSame('*', res.selector);
A.areSame('common', res.affinity);
A.areSame('.js', res.source.fs.ext);
A.areSame('x', res.name);
break;
case 'x.common.iphone':
A.areSame('iphone', res.selector);
A.areSame('common', res.affinity);
A.areSame('.js', res.source.fs.ext);
A.areSame('x', res.name);
break;
default:
A.fail('unknown resource ' + res.source.fs.fullPath);
break;
}
break;
case 'archetype-x-y':
A.areSame(pkg, res.source.pkg);
A.areSame('archetype', res.type);
A.areSame('x', res.subtype);
A.areSame('y', res.name);
A.areSame('y', res.source.fs.basename);
break;
case 'asset-css-x':
A.areSame(pkg, res.source.pkg);
A.areSame('asset', res.type);
A.areSame('css', res.subtype);
A.areSame('x', res.name);
switch (res.source.fs.basename) {
case 'x':
A.areSame('*', res.selector);
A.areSame('common', res.affinity);
A.areSame('.css', res.source.fs.ext);
break;
case 'x.iphone':
A.areSame('iphone', res.selector);
A.areSame('common', res.affinity);
A.areSame('.css', res.source.fs.ext);
break;
default:
A.fail('unknown resource ' + res.source.fs.fullPath);
break;
}
break;
case 'asset-css-y/z':
A.areSame(pkg, res.source.pkg);
A.areSame('asset', res.type);
A.areSame('css', res.subtype);
A.areSame('y/z', res.name);
switch (res.source.fs.basename) {
case 'z':
A.areSame('*', res.selector);
A.areSame('common', res.affinity);
A.areSame('.css', res.source.fs.ext);
break;
case 'z.android':
A.areSame('android', res.selector);
A.areSame('common', res.affinity);
A.areSame('.css', res.source.fs.ext);
break;
default:
A.fail('unknown resource ' + res.source.fs.fullPath);
break;
}
break;
case 'binder--x':
A.areSame(pkg, res.source.pkg);
A.areSame('binder', res.type);
A.areSame('x', res.name);
switch (res.source.fs.basename) {
case 'x':
A.areSame('*', res.selector);
A.areSame('client', res.affinity);
A.areSame('.js', res.source.fs.ext);
break;
case 'x.iphone':
A.areSame('iphone', res.selector);
A.areSame('client', res.affinity);
A.areSame('.js', res.source.fs.ext);
break;
default:
A.fail('unknown resource ' + res.source.fs.fullPath);
break;
}
break;
case 'command--x':
A.areSame(pkg, res.source.pkg);
A.areSame('command', res.type);
A.areSame('x', res.name);
A.areSame('x', res.source.fs.basename);
break;
case 'config--config':
A.areSame(pkg, res.source.pkg);
A.areSame('config', res.type);
A.areSame('config', res.name);
A.areSame('config', res.source.fs.basename);
A.areSame('.json', res.source.fs.ext);
break;
case 'controller--controller':
A.areSame(pkg, res.source.pkg);
A.areSame('controller', res.type);
A.areSame('controller', res.name);
switch (res.source.fs.basename) {
case 'controller.common':
A.areSame('*', res.selector);
A.areSame('common', res.affinity);
A.areSame('.js', res.source.fs.ext);
break;
case 'controller.server.iphone':
A.areSame('iphone', res.selector);
A.areSame('server', res.affinity);
A.areSame('.js', res.source.fs.ext);
break;
default:
A.fail('unknown resource ' + res.source.fs.fullPath);
break;
}
break;
case 'middleware--x':
A.areSame(pkg, res.source.pkg);
A.areSame('middleware', res.type);
A.areSame('x', res.name);
A.areSame('x', res.source.fs.basename);
A.areSame('.js', res.source.fs.ext);
break;
case 'spec--default':
A.areSame(pkg, res.source.pkg);
A.areSame('spec', res.type);
A.areSame('default', res.name);
A.areSame('default', res.source.fs.basename);
A.areSame('.json', res.source.fs.ext);
break;
case 'spec--x':
A.areSame(pkg, res.source.pkg);
A.areSame('spec', res.type);
A.areSame('testing', res.mojit);
A.areSame('x', res.name);
A.areSame('x', res.source.fs.basename);
A.areSame('.json', res.source.fs.ext);
break;
case 'view--x':
A.areSame(pkg, res.source.pkg);
A.areSame('view', res.type);
A.areSame('x', res.name);
A.areSame('html', res.view.outputFormat);
A.areSame('hb', res.view.engine);
switch (res.source.fs.basename) {
case 'x.hb':
A.areSame('*', res.selector);
A.areSame('common', res.affinity);
A.areSame('.html', res.source.fs.ext);
break;
case 'x.iphone.hb':
A.areSame('iphone', res.selector);
A.areSame('common', res.affinity);
A.areSame('.html', res.source.fs.ext);
break;
default:
A.fail('unknown resource ' + res.source.fs.fullPath);
break;
}
break;
default:
A.fail('unknown resource ' + res.id);
break;
}
}
A.areSame(21, ress.length, 'wrong number of resources');
}
}));
suite.add(new Y.Test.Case({
name: 'Store tests -- "bleeding"',
init: function() {
var fixtures = libpath.join(__dirname, '../../../../fixtures/store');
store = new Y.mojito.ResourceStore({ root: fixtures });
store.preload();
},
'test bleeding spec with no config': function() {
var instance = { type: "page" },
ctx = {};
store.expandInstanceForEnv('server', instance, ctx, function(err, expanded) {
A.isNotUndefined(expanded, 'expanded should not be undefined');
OA.areEqual({}, expanded.config, 'config should be empty');
});
},
'test bleeding spec with config': function() {
var instance,
ctx = {};
instance = {
type: "page",
config: {
children: {
weather: { type: "weather", action: "index" },
stream: { type: "stream", action: "index" }
}
}
};
store.expandInstanceForEnv('server', instance, ctx, function(err, expanded) {
A.isNotUndefined(expanded, 'expanded should not be undefined');
OA.areEqual(instance.config.children.weather,
expanded.config.children.weather,
'config missing children.weather');
OA.areEqual(instance.config.children.stream,
expanded.config.children.stream,
'config missing children.stream');
});
},
'test bleeding spec with mixed config': function() {
var instance1,
instance2,
ctx = {};
instance1 = {
type: "page",
config: {
children: {
weather: { type: "weather", action: "index" },
stream: { type: "stream", action: "index" }
}
}
};
instance2 = {
type: "page"
};
store.expandInstanceForEnv('server', instance1, ctx, function(err, expanded1) {
// test 1
A.isNotUndefined(expanded1, 'expanded1 should not be undefined');
OA.areEqual(instance1.config.children.weather,
expanded1.config.children.weather,
'config missing children.weather');
OA.areEqual(instance1.config.children.stream,
expanded1.config.children.stream,
'config missing children.stream');
// test 2
store.expandInstanceForEnv('server', instance2, ctx, function(err, expanded2) {
A.isNotUndefined(expanded2, 'expanded2 should not be undefined');
OA.areEqual({}, expanded2.config, 'expanded2 instance config should be empty!');
});
});
}
}));
Y.Test.Runner.add(suite);
});
| Removed clearing of langLoaderCreated, since this is now done in yui.preloadResourceVersions.
| tests/unit/lib/app/autoload/test-store.server.js | Removed clearing of langLoaderCreated, since this is now done in yui.preloadResourceVersions. | <ide><path>ests/unit/lib/app/autoload/test-store.server.js
<ide> appResources: Y.clone(store._appResources, true),
<ide> mojitResources: Y.clone(store._mojitResources, true)
<ide> };
<del> // clear yui rs _langLoaderCreated such that the loaders are created again.
<del> store.yui._langLoaderCreated = {};
<add>
<ide> store.preload();
<ide> var post = {
<ide> appRVs: Y.clone(store._appRVs, true), |
|
Java | apache-2.0 | 3dd9a255469c6512526f8788fce8f2dc2e8523b7 | 0 | zhiqinghuang/druid,zxs/druid,mangeshpardeshiyahoo/druid,qix/druid,skyportsystems/druid,knoguchi/druid,noddi/druid,deltaprojects/druid,liquidm/druid,KurtYoung/druid,premc/druid,OttoOps/druid,dclim/druid,monetate/druid,kevintvh/druid,Kleagleguo/druid,elijah513/druid,kevintvh/druid,mrijke/druid,leventov/druid,haoch/druid,druid-io/druid,zhihuij/druid,Deebs21/druid,mghosh4/druid,du00cs/druid,pjain1/druid,michaelschiff/druid,pdeva/druid,gianm/druid,lizhanhui/data_druid,rasahner/druid,cocosli/druid,winval/druid,pjain1/druid,metamx/druid,tubemogul/druid,lcp0578/druid,Fokko/druid,druid-io/druid,pombredanne/druid,guobingkun/druid,wenjixin/druid,potto007/druid-avro,se7entyse7en/druid,jon-wei/druid,knoguchi/druid,gianm/druid,du00cs/druid,optimizely/druid,metamx/druid,nishantmonu51/druid,authbox-lib/druid,liquidm/druid,premc/druid,erikdubbelboer/druid,mrijke/druid,redBorder/druid,Kleagleguo/druid,Fokko/druid,taochaoqiang/druid,se7entyse7en/druid,praveev/druid,monetate/druid,mrijke/druid,tubemogul/druid,himanshug/druid,amikey/druid,optimizely/druid,smartpcr/druid,taochaoqiang/druid,wenjixin/druid,calliope7/druid,zhaown/druid,potto007/druid-avro,b-slim/druid,zhiqinghuang/druid,se7entyse7en/druid,haoch/druid,deltaprojects/druid,eshen1991/druid,dclim/druid,pjain1/druid,fjy/druid,winval/druid,smartpcr/druid,cocosli/druid,OttoOps/druid,druid-io/druid,implydata/druid,dkhwangbo/druid,nvoron23/druid,deltaprojects/druid,andy256/druid,elijah513/druid,minewhat/druid,minewhat/druid,zengzhihai110/druid,pdeva/druid,milimetric/druid,wenjixin/druid,fjy/druid,erikdubbelboer/druid,premc/druid,KurtYoung/druid,lizhanhui/data_druid,du00cs/druid,zengzhihai110/druid,amikey/druid,metamx/druid,jon-wei/druid,tubemogul/druid,qix/druid,zhiqinghuang/druid,dkhwangbo/druid,OttoOps/druid,jon-wei/druid,dkhwangbo/druid,mghosh4/druid,pjain1/druid,potto007/druid-avro,anupkumardixit/druid,penuel-leo/druid,zhaown/druid,andy256/druid,fjy/druid,b-slim/druid,qix/druid,Kleagleguo/druid,rasahner/druid,himanshug/druid,skyportsystems/druid,yaochitc/druid-dev,friedhardware/druid,se7entyse7en/druid,zengzhihai110/druid,michaelschiff/druid,zhihuij/druid,mangeshpardeshiyahoo/druid,metamx/druid,yaochitc/druid-dev,lizhanhui/data_druid,gianm/druid,penuel-leo/druid,mghosh4/druid,authbox-lib/druid,eshen1991/druid,nvoron23/druid,taochaoqiang/druid,mangeshpardeshiyahoo/druid,noddi/druid,calliope7/druid,pdeva/druid,minewhat/druid,nvoron23/druid,erikdubbelboer/druid,767326791/druid,zhihuij/druid,haoch/druid,solimant/druid,elijah513/druid,767326791/druid,friedhardware/druid,pombredanne/druid,lcp0578/druid,leventov/druid,mghosh4/druid,lizhanhui/data_druid,friedhardware/druid,fjy/druid,Deebs21/druid,skyportsystems/druid,penuel-leo/druid,gianm/druid,haoch/druid,pombredanne/druid,michaelschiff/druid,calliope7/druid,deltaprojects/druid,taochaoqiang/druid,deltaprojects/druid,redBorder/druid,yaochitc/druid-dev,monetate/druid,Fokko/druid,zengzhihai110/druid,zhihuij/druid,Fokko/druid,cocosli/druid,pjain1/druid,mghosh4/druid,haoch/druid,authbox-lib/druid,leventov/druid,Fokko/druid,nishantmonu51/druid,dclim/druid,skyportsystems/druid,Fokko/druid,nishantmonu51/druid,767326791/druid,pdeva/druid,Deebs21/druid,pdeva/druid,deltaprojects/druid,erikdubbelboer/druid,noddi/druid,dclim/druid,michaelschiff/druid,premc/druid,calliope7/druid,druid-io/druid,tubemogul/druid,noddi/druid,jon-wei/druid,liquidm/druid,calliope7/druid,jon-wei/druid,dkhwangbo/druid,solimant/druid,optimizely/druid,druid-io/druid,smartpcr/druid,Kleagleguo/druid,kevintvh/druid,cocosli/druid,du00cs/druid,mrijke/druid,b-slim/druid,penuel-leo/druid,KurtYoung/druid,guobingkun/druid,yaochitc/druid-dev,solimant/druid,anupkumardixit/druid,implydata/druid,nishantmonu51/druid,milimetric/druid,michaelschiff/druid,zhaown/druid,pjain1/druid,Deebs21/druid,knoguchi/druid,friedhardware/druid,guobingkun/druid,amikey/druid,gianm/druid,premc/druid,zengzhihai110/druid,knoguchi/druid,mangeshpardeshiyahoo/druid,milimetric/druid,knoguchi/druid,minewhat/druid,767326791/druid,gianm/druid,monetate/druid,dkhwangbo/druid,taochaoqiang/druid,leventov/druid,pombredanne/druid,winval/druid,kevintvh/druid,se7entyse7en/druid,smartpcr/druid,mangeshpardeshiyahoo/druid,andy256/druid,smartpcr/druid,skyportsystems/druid,nishantmonu51/druid,du00cs/druid,nvoron23/druid,yaochitc/druid-dev,jon-wei/druid,OttoOps/druid,guobingkun/druid,KurtYoung/druid,monetate/druid,redBorder/druid,implydata/druid,implydata/druid,tubemogul/druid,anupkumardixit/druid,deltaprojects/druid,Fokko/druid,dclim/druid,qix/druid,himanshug/druid,eshen1991/druid,Deebs21/druid,767326791/druid,zxs/druid,noddi/druid,milimetric/druid,fjy/druid,redBorder/druid,elijah513/druid,lcp0578/druid,pombredanne/druid,b-slim/druid,minewhat/druid,milimetric/druid,andy256/druid,zhaown/druid,guobingkun/druid,liquidm/druid,michaelschiff/druid,anupkumardixit/druid,b-slim/druid,penuel-leo/druid,kevintvh/druid,zxs/druid,zhiqinghuang/druid,winval/druid,wenjixin/druid,monetate/druid,praveev/druid,amikey/druid,pjain1/druid,liquidm/druid,lcp0578/druid,optimizely/druid,michaelschiff/druid,erikdubbelboer/druid,nvoron23/druid,KurtYoung/druid,zxs/druid,lizhanhui/data_druid,nishantmonu51/druid,leventov/druid,zhihuij/druid,implydata/druid,anupkumardixit/druid,OttoOps/druid,mghosh4/druid,eshen1991/druid,potto007/druid-avro,zxs/druid,mghosh4/druid,solimant/druid,zhiqinghuang/druid,rasahner/druid,qix/druid,potto007/druid-avro,redBorder/druid,himanshug/druid,andy256/druid,optimizely/druid,authbox-lib/druid,friedhardware/druid,authbox-lib/druid,rasahner/druid,zhaown/druid,wenjixin/druid,solimant/druid,rasahner/druid,eshen1991/druid,praveev/druid,jon-wei/druid,mrijke/druid,nishantmonu51/druid,elijah513/druid,cocosli/druid,himanshug/druid,amikey/druid,winval/druid,lcp0578/druid,implydata/druid,praveev/druid,gianm/druid,metamx/druid,liquidm/druid,praveev/druid,Kleagleguo/druid,monetate/druid | /*
* Druid - a distributed column store.
* Copyright (C) 2012, 2013 Metamarkets Group Inc.
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation; either version 2
* of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*/
package io.druid.indexing.overlord.scaling;
import com.google.common.base.Function;
import com.google.common.base.Joiner;
import com.google.common.base.Predicate;
import com.google.common.base.Supplier;
import com.google.common.collect.Collections2;
import com.google.common.collect.FluentIterable;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import com.google.inject.Inject;
import com.metamx.common.ISE;
import com.metamx.emitter.EmittingLogger;
import io.druid.indexing.overlord.RemoteTaskRunnerWorkItem;
import io.druid.indexing.overlord.TaskRunnerWorkItem;
import io.druid.indexing.overlord.ZkWorker;
import io.druid.indexing.overlord.setup.WorkerSetupData;
import org.joda.time.DateTime;
import org.joda.time.Duration;
import java.util.Collection;
import java.util.List;
import java.util.Set;
/**
*/
public class SimpleResourceManagementStrategy implements ResourceManagementStrategy
{
private static final EmittingLogger log = new EmittingLogger(SimpleResourceManagementStrategy.class);
private final AutoScalingStrategy autoScalingStrategy;
private final SimpleResourceManagementConfig config;
private final Supplier<WorkerSetupData> workerSetupDataRef;
private final ScalingStats scalingStats;
private final Object lock = new Object();
private final Set<String> currentlyProvisioning = Sets.newHashSet();
private final Set<String> currentlyTerminating = Sets.newHashSet();
private final Predicate<ZkWorker> isLazyWorker = new Predicate<ZkWorker>()
{
@Override
public boolean apply(ZkWorker input)
{
return input.getRunningTasks().isEmpty()
&& System.currentTimeMillis() - input.getLastCompletedTaskTime().getMillis()
>= config.getWorkerIdleTimeout().toStandardDuration().getMillis();
}
};
private int targetWorkerCount = -1;
private DateTime lastProvisionTime = new DateTime();
private DateTime lastTerminateTime = new DateTime();
@Inject
public SimpleResourceManagementStrategy(
AutoScalingStrategy autoScalingStrategy,
SimpleResourceManagementConfig config,
Supplier<WorkerSetupData> workerSetupDataRef
)
{
this.autoScalingStrategy = autoScalingStrategy;
this.config = config;
this.workerSetupDataRef = workerSetupDataRef;
this.scalingStats = new ScalingStats(config.getNumEventsToTrack());
}
@Override
public boolean doProvision(Collection<RemoteTaskRunnerWorkItem> pendingTasks, Collection<ZkWorker> zkWorkers)
{
synchronized (lock) {
boolean didProvision = false;
final WorkerSetupData workerSetupData = workerSetupDataRef.get();
final Predicate<ZkWorker> isValidWorker = createValidWorkerPredicate(workerSetupData);
final int currValidWorkers = Collections2.filter(zkWorkers, isValidWorker).size();
final List<String> workerNodeIds = autoScalingStrategy.ipToIdLookup(
Lists.newArrayList(
Iterables.<ZkWorker, String>transform(
zkWorkers,
new Function<ZkWorker, String>()
{
@Override
public String apply(ZkWorker input)
{
return input.getWorker().getIp();
}
}
)
)
);
currentlyProvisioning.removeAll(workerNodeIds);
updateTargetWorkerCount(pendingTasks, zkWorkers);
if (currentlyProvisioning.isEmpty()) {
int want = targetWorkerCount - (currValidWorkers + currentlyProvisioning.size());
while (want > 0) {
final AutoScalingData provisioned = autoScalingStrategy.provision();
if (provisioned == null) {
break;
} else {
currentlyProvisioning.addAll(provisioned.getNodeIds());
lastProvisionTime = new DateTime();
scalingStats.addProvisionEvent(provisioned);
want -= provisioned.getNodeIds().size();
didProvision = true;
}
}
} else {
Duration durSinceLastProvision = new Duration(lastProvisionTime, new DateTime());
log.info("%s provisioning. Current wait time: %s", currentlyProvisioning, durSinceLastProvision);
if (durSinceLastProvision.isLongerThan(config.getMaxScalingDuration().toStandardDuration())) {
log.makeAlert("Worker node provisioning taking too long!")
.addData("millisSinceLastProvision", durSinceLastProvision.getMillis())
.addData("provisioningCount", currentlyProvisioning.size())
.emit();
List<String> nodeIps = autoScalingStrategy.idToIpLookup(Lists.newArrayList(currentlyProvisioning));
autoScalingStrategy.terminate(nodeIps);
currentlyProvisioning.clear();
}
}
return didProvision;
}
}
@Override
public boolean doTerminate(Collection<RemoteTaskRunnerWorkItem> pendingTasks, Collection<ZkWorker> zkWorkers)
{
synchronized (lock) {
boolean didTerminate = false;
final Set<String> workerNodeIds = Sets.newHashSet(
autoScalingStrategy.ipToIdLookup(
Lists.newArrayList(
Iterables.transform(
zkWorkers,
new Function<ZkWorker, String>()
{
@Override
public String apply(ZkWorker input)
{
return input.getWorker().getIp();
}
}
)
)
)
);
final Set<String> stillExisting = Sets.newHashSet();
for (String s : currentlyTerminating) {
if (workerNodeIds.contains(s)) {
stillExisting.add(s);
}
}
currentlyTerminating.clear();
currentlyTerminating.addAll(stillExisting);
updateTargetWorkerCount(pendingTasks, zkWorkers);
if (currentlyTerminating.isEmpty()) {
final int want = zkWorkers.size() - targetWorkerCount;
if (want > 0) {
final List<String> laziestWorkerIps =
FluentIterable.from(zkWorkers)
.filter(isLazyWorker)
.limit(want)
.transform(
new Function<ZkWorker, String>()
{
@Override
public String apply(ZkWorker zkWorker)
{
return zkWorker.getWorker().getIp();
}
}
)
.toList();
log.info(
"Terminating %,d workers (wanted %,d): %s",
laziestWorkerIps.size(),
want,
Joiner.on(", ").join(laziestWorkerIps)
);
final AutoScalingData terminated = autoScalingStrategy.terminate(laziestWorkerIps);
if (terminated != null) {
currentlyTerminating.addAll(terminated.getNodeIds());
lastTerminateTime = new DateTime();
scalingStats.addTerminateEvent(terminated);
didTerminate = true;
}
}
} else {
Duration durSinceLastTerminate = new Duration(lastTerminateTime, new DateTime());
log.info("%s terminating. Current wait time: ", currentlyTerminating, durSinceLastTerminate);
if (durSinceLastTerminate.isLongerThan(config.getMaxScalingDuration().toStandardDuration())) {
log.makeAlert("Worker node termination taking too long!")
.addData("millisSinceLastTerminate", durSinceLastTerminate.getMillis())
.addData("terminatingCount", currentlyTerminating.size())
.emit();
currentlyTerminating.clear();
}
}
return didTerminate;
}
}
@Override
public ScalingStats getStats()
{
return scalingStats;
}
private Predicate<ZkWorker> createValidWorkerPredicate(final WorkerSetupData workerSetupData)
{
return new Predicate<ZkWorker>()
{
@Override
public boolean apply(ZkWorker zkWorker)
{
final String minVersion = workerSetupData.getMinVersion() != null
? workerSetupData.getMinVersion()
: config.getWorkerVersion();
if (minVersion == null) {
throw new ISE("No minVersion found! It should be set in your runtime properties or configuration database.");
}
return zkWorker.isValidVersion(minVersion);
}
};
}
private void updateTargetWorkerCount(
final Collection<RemoteTaskRunnerWorkItem> pendingTasks,
final Collection<ZkWorker> zkWorkers
)
{
synchronized (lock) {
final WorkerSetupData workerSetupData = workerSetupDataRef.get();
if (targetWorkerCount < 0) {
// Initialize to size of current worker pool
targetWorkerCount = zkWorkers.size();
log.info(
"Starting with %,d workers (min = %,d, max = %,d).",
targetWorkerCount,
workerSetupData.getMinNumWorkers(),
workerSetupData.getMaxNumWorkers()
);
}
final Collection<ZkWorker> validWorkers = Collections2.filter(
zkWorkers,
createValidWorkerPredicate(workerSetupData)
);
final boolean atSteadyState = currentlyProvisioning.isEmpty()
&& currentlyTerminating.isEmpty()
&& validWorkers.size() == targetWorkerCount;
final boolean shouldScaleUp = atSteadyState
&& hasTaskPendingBeyondThreshold(pendingTasks)
&& targetWorkerCount < workerSetupData.getMaxNumWorkers();
final boolean shouldScaleDown = atSteadyState
&& Iterables.any(validWorkers, isLazyWorker)
&& targetWorkerCount > workerSetupData.getMinNumWorkers();
if (shouldScaleUp) {
targetWorkerCount++;
log.info(
"I think we should scale up to %,d workers (current = %,d, min = %,d, max = %,d).",
targetWorkerCount,
validWorkers.size(),
workerSetupData.getMinNumWorkers(),
workerSetupData.getMaxNumWorkers()
);
} else if (shouldScaleDown) {
targetWorkerCount--;
log.info(
"I think we should scale down to %,d workers (current = %,d, min = %,d, max = %,d).",
targetWorkerCount,
validWorkers.size(),
workerSetupData.getMinNumWorkers(),
workerSetupData.getMaxNumWorkers()
);
} else {
log.info(
"Our target is %,d workers, and I'm okay with that (current = %,d, min = %,d, max = %,d).",
targetWorkerCount,
validWorkers.size(),
workerSetupData.getMinNumWorkers(),
workerSetupData.getMaxNumWorkers()
);
}
}
}
private boolean hasTaskPendingBeyondThreshold(Collection<RemoteTaskRunnerWorkItem> pendingTasks)
{
synchronized (lock) {
long now = System.currentTimeMillis();
for (TaskRunnerWorkItem pendingTask : pendingTasks) {
final Duration durationSinceInsertion = new Duration(pendingTask.getQueueInsertionTime().getMillis(), now);
final Duration timeoutDuration = config.getPendingTaskTimeout().toStandardDuration();
if (durationSinceInsertion.isEqual(timeoutDuration) || durationSinceInsertion.isLongerThan(timeoutDuration)) {
return true;
}
}
return false;
}
}
}
| indexing-service/src/main/java/io/druid/indexing/overlord/scaling/SimpleResourceManagementStrategy.java | /*
* Druid - a distributed column store.
* Copyright (C) 2012, 2013 Metamarkets Group Inc.
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation; either version 2
* of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*/
package io.druid.indexing.overlord.scaling;
import com.google.api.client.repackaged.com.google.common.base.Joiner;
import com.google.common.base.Function;
import com.google.common.base.Predicate;
import com.google.common.base.Supplier;
import com.google.common.collect.Collections2;
import com.google.common.collect.FluentIterable;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import com.google.inject.Inject;
import com.metamx.common.ISE;
import com.metamx.emitter.EmittingLogger;
import io.druid.indexing.overlord.RemoteTaskRunnerWorkItem;
import io.druid.indexing.overlord.TaskRunnerWorkItem;
import io.druid.indexing.overlord.ZkWorker;
import io.druid.indexing.overlord.setup.WorkerSetupData;
import org.joda.time.DateTime;
import org.joda.time.Duration;
import java.util.Collection;
import java.util.List;
import java.util.Set;
/**
*/
public class SimpleResourceManagementStrategy implements ResourceManagementStrategy
{
private static final EmittingLogger log = new EmittingLogger(SimpleResourceManagementStrategy.class);
private final AutoScalingStrategy autoScalingStrategy;
private final SimpleResourceManagementConfig config;
private final Supplier<WorkerSetupData> workerSetupDataRef;
private final ScalingStats scalingStats;
private final Object lock = new Object();
private final Set<String> currentlyProvisioning = Sets.newHashSet();
private final Set<String> currentlyTerminating = Sets.newHashSet();
private final Predicate<ZkWorker> isLazyWorker = new Predicate<ZkWorker>()
{
@Override
public boolean apply(ZkWorker input)
{
return input.getRunningTasks().isEmpty()
&& System.currentTimeMillis() - input.getLastCompletedTaskTime().getMillis()
>= config.getWorkerIdleTimeout().toStandardDuration().getMillis();
}
};
private int targetWorkerCount = -1;
private DateTime lastProvisionTime = new DateTime();
private DateTime lastTerminateTime = new DateTime();
@Inject
public SimpleResourceManagementStrategy(
AutoScalingStrategy autoScalingStrategy,
SimpleResourceManagementConfig config,
Supplier<WorkerSetupData> workerSetupDataRef
)
{
this.autoScalingStrategy = autoScalingStrategy;
this.config = config;
this.workerSetupDataRef = workerSetupDataRef;
this.scalingStats = new ScalingStats(config.getNumEventsToTrack());
}
@Override
public boolean doProvision(Collection<RemoteTaskRunnerWorkItem> pendingTasks, Collection<ZkWorker> zkWorkers)
{
synchronized (lock) {
boolean didProvision = false;
final WorkerSetupData workerSetupData = workerSetupDataRef.get();
final Predicate<ZkWorker> isValidWorker = createValidWorkerPredicate(workerSetupData);
final int currValidWorkers = Collections2.filter(zkWorkers, isValidWorker).size();
final List<String> workerNodeIds = autoScalingStrategy.ipToIdLookup(
Lists.newArrayList(
Iterables.<ZkWorker, String>transform(
zkWorkers,
new Function<ZkWorker, String>()
{
@Override
public String apply(ZkWorker input)
{
return input.getWorker().getIp();
}
}
)
)
);
currentlyProvisioning.removeAll(workerNodeIds);
updateTargetWorkerCount(pendingTasks, zkWorkers);
if (currentlyProvisioning.isEmpty()) {
int want = targetWorkerCount - (currValidWorkers + currentlyProvisioning.size());
while (want > 0) {
final AutoScalingData provisioned = autoScalingStrategy.provision();
if (provisioned == null) {
break;
} else {
currentlyProvisioning.addAll(provisioned.getNodeIds());
lastProvisionTime = new DateTime();
scalingStats.addProvisionEvent(provisioned);
want -= provisioned.getNodeIds().size();
didProvision = true;
}
}
} else {
Duration durSinceLastProvision = new Duration(lastProvisionTime, new DateTime());
log.info("%s provisioning. Current wait time: %s", currentlyProvisioning, durSinceLastProvision);
if (durSinceLastProvision.isLongerThan(config.getMaxScalingDuration().toStandardDuration())) {
log.makeAlert("Worker node provisioning taking too long!")
.addData("millisSinceLastProvision", durSinceLastProvision.getMillis())
.addData("provisioningCount", currentlyProvisioning.size())
.emit();
List<String> nodeIps = autoScalingStrategy.idToIpLookup(Lists.newArrayList(currentlyProvisioning));
autoScalingStrategy.terminate(nodeIps);
currentlyProvisioning.clear();
}
}
return didProvision;
}
}
@Override
public boolean doTerminate(Collection<RemoteTaskRunnerWorkItem> pendingTasks, Collection<ZkWorker> zkWorkers)
{
synchronized (lock) {
boolean didTerminate = false;
final Set<String> workerNodeIds = Sets.newHashSet(
autoScalingStrategy.ipToIdLookup(
Lists.newArrayList(
Iterables.transform(
zkWorkers,
new Function<ZkWorker, String>()
{
@Override
public String apply(ZkWorker input)
{
return input.getWorker().getIp();
}
}
)
)
)
);
final Set<String> stillExisting = Sets.newHashSet();
for (String s : currentlyTerminating) {
if (workerNodeIds.contains(s)) {
stillExisting.add(s);
}
}
currentlyTerminating.clear();
currentlyTerminating.addAll(stillExisting);
updateTargetWorkerCount(pendingTasks, zkWorkers);
if (currentlyTerminating.isEmpty()) {
final int want = zkWorkers.size() - targetWorkerCount;
if (want > 0) {
final List<String> laziestWorkerIps =
FluentIterable.from(zkWorkers)
.filter(isLazyWorker)
.limit(want)
.transform(
new Function<ZkWorker, String>()
{
@Override
public String apply(ZkWorker zkWorker)
{
return zkWorker.getWorker().getIp();
}
}
)
.toList();
log.info(
"Terminating %,d workers (wanted %,d): %s",
laziestWorkerIps.size(),
want,
Joiner.on(", ").join(laziestWorkerIps)
);
final AutoScalingData terminated = autoScalingStrategy.terminate(laziestWorkerIps);
if (terminated != null) {
currentlyTerminating.addAll(terminated.getNodeIds());
lastTerminateTime = new DateTime();
scalingStats.addTerminateEvent(terminated);
didTerminate = true;
}
}
} else {
Duration durSinceLastTerminate = new Duration(lastTerminateTime, new DateTime());
log.info("%s terminating. Current wait time: ", currentlyTerminating, durSinceLastTerminate);
if (durSinceLastTerminate.isLongerThan(config.getMaxScalingDuration().toStandardDuration())) {
log.makeAlert("Worker node termination taking too long!")
.addData("millisSinceLastTerminate", durSinceLastTerminate.getMillis())
.addData("terminatingCount", currentlyTerminating.size())
.emit();
currentlyTerminating.clear();
}
}
return didTerminate;
}
}
@Override
public ScalingStats getStats()
{
return scalingStats;
}
private Predicate<ZkWorker> createValidWorkerPredicate(final WorkerSetupData workerSetupData)
{
return new Predicate<ZkWorker>()
{
@Override
public boolean apply(ZkWorker zkWorker)
{
final String minVersion = workerSetupData.getMinVersion() != null
? workerSetupData.getMinVersion()
: config.getWorkerVersion();
if (minVersion == null) {
throw new ISE("No minVersion found! It should be set in your runtime properties or configuration database.");
}
return zkWorker.isValidVersion(minVersion);
}
};
}
private void updateTargetWorkerCount(
final Collection<RemoteTaskRunnerWorkItem> pendingTasks,
final Collection<ZkWorker> zkWorkers
)
{
synchronized (lock) {
final WorkerSetupData workerSetupData = workerSetupDataRef.get();
if (targetWorkerCount < 0) {
// Initialize to size of current worker pool
targetWorkerCount = zkWorkers.size();
log.info(
"Starting with %,d workers (min = %,d, max = %,d).",
targetWorkerCount,
workerSetupData.getMinNumWorkers(),
workerSetupData.getMaxNumWorkers()
);
}
final Collection<ZkWorker> validWorkers = Collections2.filter(
zkWorkers,
createValidWorkerPredicate(workerSetupData)
);
final boolean atSteadyState = currentlyProvisioning.isEmpty()
&& currentlyTerminating.isEmpty()
&& validWorkers.size() == targetWorkerCount;
final boolean shouldScaleUp = atSteadyState
&& hasTaskPendingBeyondThreshold(pendingTasks)
&& targetWorkerCount < workerSetupData.getMaxNumWorkers();
final boolean shouldScaleDown = atSteadyState
&& Iterables.any(validWorkers, isLazyWorker)
&& targetWorkerCount > workerSetupData.getMinNumWorkers();
if (shouldScaleUp) {
targetWorkerCount++;
log.info(
"I think we should scale up to %,d workers (current = %,d, min = %,d, max = %,d).",
targetWorkerCount,
validWorkers.size(),
workerSetupData.getMinNumWorkers(),
workerSetupData.getMaxNumWorkers()
);
} else if (shouldScaleDown) {
targetWorkerCount--;
log.info(
"I think we should scale down to %,d workers (current = %,d, min = %,d, max = %,d).",
targetWorkerCount,
validWorkers.size(),
workerSetupData.getMinNumWorkers(),
workerSetupData.getMaxNumWorkers()
);
} else {
log.info(
"Our target is %,d workers, and I'm okay with that (current = %,d, min = %,d, max = %,d).",
targetWorkerCount,
validWorkers.size(),
workerSetupData.getMinNumWorkers(),
workerSetupData.getMaxNumWorkers()
);
}
}
}
private boolean hasTaskPendingBeyondThreshold(Collection<RemoteTaskRunnerWorkItem> pendingTasks)
{
synchronized (lock) {
long now = System.currentTimeMillis();
for (TaskRunnerWorkItem pendingTask : pendingTasks) {
final Duration durationSinceInsertion = new Duration(pendingTask.getQueueInsertionTime().getMillis(), now);
final Duration timeoutDuration = config.getPendingTaskTimeout().toStandardDuration();
if (durationSinceInsertion.isEqual(timeoutDuration) || durationSinceInsertion.isLongerThan(timeoutDuration)) {
return true;
}
}
return false;
}
}
}
| Fix import
| indexing-service/src/main/java/io/druid/indexing/overlord/scaling/SimpleResourceManagementStrategy.java | Fix import | <ide><path>ndexing-service/src/main/java/io/druid/indexing/overlord/scaling/SimpleResourceManagementStrategy.java
<ide>
<ide> package io.druid.indexing.overlord.scaling;
<ide>
<del>import com.google.api.client.repackaged.com.google.common.base.Joiner;
<ide> import com.google.common.base.Function;
<add>import com.google.common.base.Joiner;
<ide> import com.google.common.base.Predicate;
<ide> import com.google.common.base.Supplier;
<ide> import com.google.common.collect.Collections2; |
|
Java | apache-2.0 | 56e7dcd6588134b48933ecc0e3200a5848b0cfa6 | 0 | dgrove727/autopsy,APriestman/autopsy,millmanorama/autopsy,rcordovano/autopsy,narfindustries/autopsy,wschaeferB/autopsy,APriestman/autopsy,APriestman/autopsy,dgrove727/autopsy,wschaeferB/autopsy,APriestman/autopsy,narfindustries/autopsy,millmanorama/autopsy,wschaeferB/autopsy,wschaeferB/autopsy,rcordovano/autopsy,rcordovano/autopsy,dgrove727/autopsy,APriestman/autopsy,esaunders/autopsy,esaunders/autopsy,APriestman/autopsy,esaunders/autopsy,esaunders/autopsy,narfindustries/autopsy,millmanorama/autopsy,rcordovano/autopsy,rcordovano/autopsy,millmanorama/autopsy,esaunders/autopsy,APriestman/autopsy,rcordovano/autopsy,wschaeferB/autopsy | /*
* Autopsy Forensic Browser
*
* Copyright 2011-2016 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
//
package org.sleuthkit.autopsy.keywordsearch;
import com.google.common.base.CharMatcher;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.logging.Level;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.validator.routines.checkdigit.LuhnCheckDigit;
import org.apache.solr.client.solrj.SolrQuery;
import org.apache.solr.client.solrj.response.TermsResponse.Term;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.coreutils.Version;
import org.sleuthkit.autopsy.datamodel.CreditCards;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.Account;
import org.sleuthkit.datamodel.BlackboardArtifact;
import org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE;
import org.sleuthkit.datamodel.BlackboardAttribute;
import org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE;
import org.sleuthkit.datamodel.TskCoreException;
import org.sleuthkit.datamodel.TskData;
/**
* Performs a regular expression query to the SOLR/Lucene instance.
*/
final class TermComponentQuery implements KeywordSearchQuery {
private static final Logger LOGGER = Logger.getLogger(TermComponentQuery.class.getName());
private static final boolean DEBUG = Version.Type.DEVELOPMENT.equals(Version.getBuildType());
private static final String MODULE_NAME = KeywordSearchModuleFactory.getModuleName();
private static final BlackboardAttribute.Type KEYWORD_SEARCH_DOCUMENT_ID = new BlackboardAttribute.Type(ATTRIBUTE_TYPE.TSK_KEYWORD_SEARCH_DOCUMENT_ID);
//TODO: move these regex and the luhn check to a new class, something like: CreditCardNumberValidator
/*
* Track 2 is numeric plus six punctuation symbolls :;<=>?
*
* This regex matches 12-19 digit ccns embeded in a track 2 formated string.
* This regex matches (and extracts groups) even if the entire track is not
* present as long as the part that is conforms to the track format.
*
*/
private static final Pattern TRACK2_PATTERN = Pattern.compile(
"[:;<=>?]?" //(optional)start sentinel //NON-NLS
+ "(?<accountNumber>[3456]([ -]?\\d){11,18})" //12-19 digits, with possible single spaces or dashes in between. first digit is 3,4,5, or 6 //NON-NLS
+ "(?:[:;<=>?]" //separator //NON-NLS
+ "(?:(?<expiration>\\d{4})" //4 digit expiration date YYMM //NON-NLS
+ "(?:(?<serviceCode>\\d{3})" //3 digit service code //NON-NLS
+ "(?:(?<discretionary>[^:;<=>?]*)" //discretionary data, not containing punctuation marks //NON-NLS
+ "(?:[:;<=>?]" //end sentinel //NON-NLS
+ "(?<LRC>.)" //longitudinal redundancy check //NON-NLS
+ "?)?)?)?)?)?"); //close nested optional groups //NON-NLS
/*
* Track 1 is alphanumeric.
*
* This regex matches 12-19 digit ccns embeded in a track 1 formated string.
* This regex matches (and extracts groups) even if the entire track is not
* present as long as the part that is conforms to the track format.
*/
private static final Pattern TRACK1_PATTERN = Pattern.compile(
"(?:" //begin nested optinal group //NON-NLS
+ "%?" //optional start sentinal: % //NON-NLS
+ "B)?" //format code //NON-NLS
+ "(?<accountNumber>[3456]([ -]?\\d){11,18})" //12-19 digits, with possible single spaces or dashes in between. first digit is 3,4,5, or 6 //NON-NLS
+ "\\^" //separator //NON-NLS
+ "(?<name>[^^]{2,26})" //2-26 charachter name, not containing ^ //NON-NLS
+ "(?:\\^" //separator //NON-NLS
+ "(?:(?:\\^|(?<expiration>\\d{4}))" //separator or 4 digit expiration YYMM //NON-NLS
+ "(?:(?:\\^|(?<serviceCode>\\d{3}))"//separator or 3 digit service code //NON-NLS
+ "(?:(?<discretionary>[^?]*)" // discretionary data not containing separator //NON-NLS
+ "(?:\\?" // end sentinal: ? //NON-NLS
+ "(?<LRC>.)" //longitudinal redundancy check //NON-NLS
+ "?)?)?)?)?)?");//close nested optional groups //NON-NLS
private static final Pattern CCN_PATTERN = Pattern.compile("(?<ccn>[3456]([ -]?\\d){11,18})"); //12-19 digits, with possible single spaces or dashes in between. first digit is 3,4,5, or 6 //NON-NLS
private static final LuhnCheckDigit LUHN_CHECK = new LuhnCheckDigit();
//corresponds to field in Solr schema, analyzed with white-space tokenizer only
private static final String TERMS_SEARCH_FIELD = Server.Schema.CONTENT_WS.toString();
private static final String TERMS_HANDLER = "/terms"; //NON-NLS
private static final int TERMS_TIMEOUT = 90 * 1000; //in ms
private static final String CASE_INSENSITIVE = "case_insensitive"; //NON-NLS
private static final int MAX_TERMS_RESULTS = 20000;
private String escapedQuery;
private final KeywordList keywordList;
private final Keyword keyword;
private boolean isEscaped;
private final List<KeywordQueryFilter> filters = new ArrayList<>();
TermComponentQuery(KeywordList keywordList, Keyword keyword) {
this.keyword = keyword;
this.keywordList = keywordList;
this.escapedQuery = keyword.getQuery();
}
@Override
public void addFilter(KeywordQueryFilter filter) {
this.filters.add(filter);
}
/**
* @param field
*
* @deprecated This method is unused and no-op
*/
@Override
@Deprecated
public void setField(String field) {
}
@Override
public void setSubstringQuery() {
escapedQuery = ".*" + escapedQuery + ".*";
}
@Override
public void escape() {
escapedQuery = Pattern.quote(keyword.getQuery());
isEscaped = true;
}
@Override
public boolean validate() {
if (escapedQuery.isEmpty()) {
return false;
}
try {
Pattern.compile(escapedQuery);
return true;
} catch (IllegalArgumentException ex) {
return false;
}
}
@Override
public boolean isEscaped() {
return isEscaped;
}
@Override
public boolean isLiteral() {
return false;
}
@Override
public String getEscapedQueryString() {
return this.escapedQuery;
}
@Override
public String getQueryString() {
return keyword.getQuery();
}
@Override
public KeywordCachedArtifact writeSingleFileHitsToBlackBoard(String termHit, KeywordHit hit, String snippet, String listName) {
BlackboardArtifact newArtifact;
Collection<BlackboardAttribute> attributes = new ArrayList<>();
if (keyword.getType() == ATTRIBUTE_TYPE.TSK_CARD_NUMBER) {
attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_ACCOUNT_TYPE, MODULE_NAME, Account.Type.CREDIT_CARD.name()));
Map<BlackboardAttribute.Type, BlackboardAttribute> parsedTrackAttributeMap = new HashMap<>();
//try to match it against the track 1 regex
Matcher matcher = TRACK1_PATTERN.matcher(hit.getSnippet());
if (matcher.find()) {
parseTrack1Data(parsedTrackAttributeMap, matcher);
}
//then try to match it against the track 2 regex
matcher = TRACK2_PATTERN.matcher(hit.getSnippet());
if (matcher.find()) {
parseTrack2Data(parsedTrackAttributeMap, matcher);
}
//if we couldn't parse the CCN abort this artifact
final BlackboardAttribute ccnAttribute = parsedTrackAttributeMap.get(new BlackboardAttribute.Type(ATTRIBUTE_TYPE.TSK_CARD_NUMBER));
if (ccnAttribute == null || StringUtils.isBlank(ccnAttribute.getValueString())) {
if (hit.isArtifactHit()) {
LOGGER.log(Level.SEVERE, String.format("Failed to parse credit card account number for artifact keyword hit: term = %s, snippet = '%s', artifact id = %d", termHit, hit.getSnippet(), hit.getArtifact().getArtifactID()));
} else {
LOGGER.log(Level.SEVERE, String.format("Failed to parse credit card account number for content keyword hit: term = %s, snippet = '%s', object id = %d", termHit, hit.getSnippet(), hit.getContent().getId()));
}
return null;
}
attributes.addAll(parsedTrackAttributeMap.values());
//look up the bank name, schem, etc from the BIN
final int bin = Integer.parseInt(ccnAttribute.getValueString().substring(0, 8));
CreditCards.BankIdentificationNumber binInfo = CreditCards.getBINInfo(bin);
if (binInfo != null) {
binInfo.getScheme().ifPresent(scheme
-> attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_CARD_SCHEME, MODULE_NAME, scheme)));
binInfo.getCardType().ifPresent(cardType
-> attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_CARD_TYPE, MODULE_NAME, cardType)));
binInfo.getBrand().ifPresent(brand
-> attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_BRAND_NAME, MODULE_NAME, brand)));
binInfo.getBankName().ifPresent(bankName
-> attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_BANK_NAME, MODULE_NAME, bankName)));
binInfo.getBankPhoneNumber().ifPresent(phoneNumber
-> attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PHONE_NUMBER, MODULE_NAME, phoneNumber)));
binInfo.getBankURL().ifPresent(url
-> attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL, MODULE_NAME, url)));
binInfo.getCountry().ifPresent(country
-> attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_COUNTRY, MODULE_NAME, country)));
binInfo.getBankCity().ifPresent(city
-> attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_CITY, MODULE_NAME, city)));
}
/* if the hit is from unused or unalocated blocks, record the
* KEYWORD_SEARCH_DOCUMENT_ID, so we can show just that chunk in the
* UI
*/
if (hit.getContent() instanceof AbstractFile) {
AbstractFile file = (AbstractFile) hit.getContent();
if (file.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.UNUSED_BLOCKS
|| file.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS) {
attributes.add(new BlackboardAttribute(KEYWORD_SEARCH_DOCUMENT_ID, MODULE_NAME, hit.getSolrDocumentId()));
}
}
// make account artifact
try {
newArtifact = hit.getContent().newArtifact(ARTIFACT_TYPE.TSK_ACCOUNT);
} catch (TskCoreException tskCoreException) {
LOGGER.log(Level.SEVERE, "Error adding bb artifact for account", tskCoreException); //NON-NLS
return null;
}
} else {
//regex match
attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_KEYWORD, MODULE_NAME, termHit));
//regex keyword
attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_KEYWORD_REGEXP, MODULE_NAME, keyword.getQuery()));
//make keyword hit artifact
try {
newArtifact = hit.getContent().newArtifact(ARTIFACT_TYPE.TSK_KEYWORD_HIT);
} catch (TskCoreException tskCoreException) {
LOGGER.log(Level.SEVERE, "Error adding bb artifact for keyword hit", tskCoreException); //NON-NLS
return null;
}
}
if (StringUtils.isNotBlank(listName)) {
attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_SET_NAME, MODULE_NAME, listName));
}
//preview
if (snippet != null) {
attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_KEYWORD_PREVIEW, MODULE_NAME, snippet));
}
if (hit.isArtifactHit()) {
attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_ASSOCIATED_ARTIFACT, MODULE_NAME, hit.getArtifact().getArtifactID()));
}
try {
//TODO: do we still/really need this KeywordCachedArtifact class?
newArtifact.addAttributes(attributes);
KeywordCachedArtifact writeResult = new KeywordCachedArtifact(newArtifact);
writeResult.add(attributes);
return writeResult;
} catch (TskCoreException e) {
LOGGER.log(Level.SEVERE, "Error adding bb attributes for terms search artifact", e); //NON-NLS
return null;
}
}
@Override
public QueryResults performQuery() throws NoOpenCoreException {
/*
* Execute the regex query to get a list of terms that match the regex.
* Note that the field that is being searched is tokenized based on
* whitespace.
*/
//create the query
final SolrQuery q = new SolrQuery();
q.setRequestHandler(TERMS_HANDLER);
q.setTerms(true);
q.setTermsRegexFlag(CASE_INSENSITIVE);
q.setTermsRegex(escapedQuery);
q.addTermsField(TERMS_SEARCH_FIELD);
q.setTimeAllowed(TERMS_TIMEOUT);
q.setShowDebugInfo(DEBUG);
q.setTermsLimit(MAX_TERMS_RESULTS);
LOGGER.log(Level.INFO, "Query: {0}", q.toString()); //NON-NLS
//execute the query
List<Term> terms = null;
try {
terms = KeywordSearch.getServer().queryTerms(q).getTerms(TERMS_SEARCH_FIELD);
} catch (KeywordSearchModuleException ex) {
LOGGER.log(Level.SEVERE, "Error executing the regex terms query: " + keyword.getQuery(), ex); //NON-NLS
//TODO: this is almost certainly wrong and guaranteed to throw a NPE at some point!!!!
}
/*
* For each term that matched the regex, query for full set of document
* hits for that term.
*/
QueryResults results = new QueryResults(this, keywordList);
int resultSize = 0;
for (Term term : terms) {
final String termStr = KeywordSearchUtil.escapeLuceneQuery(term.getTerm());
if (keyword.getType() == ATTRIBUTE_TYPE.TSK_CARD_NUMBER) {
//If the keyword is a credit card number, pass it through luhn validator
Matcher matcher = CCN_PATTERN.matcher(term.getTerm());
matcher.find();
final String ccn = CharMatcher.anyOf(" -").removeFrom(matcher.group("ccn"));
if (false == LUHN_CHECK.isValid(ccn)) {
continue; //if the hit does not pass the luhn check, skip it.
}
}
/*
* Note: we can't set filter query on terms query but setting filter
* query on fileResults query will yield the same result
*/
LuceneQuery filesQuery = new LuceneQuery(keywordList, new Keyword(termStr, true));
filters.forEach(filesQuery::addFilter);
try {
QueryResults fileQueryResults = filesQuery.performQuery();
Set<KeywordHit> filesResults = new HashSet<>();
for (Keyword key : fileQueryResults.getKeywords()) { //flatten results into a single list
List<KeywordHit> keyRes = fileQueryResults.getResults(key);
resultSize += keyRes.size();
filesResults.addAll(keyRes);
}
results.addResult(new Keyword(term.getTerm(), false), new ArrayList<>(filesResults));
} catch (NoOpenCoreException | RuntimeException e) {
LOGGER.log(Level.WARNING, "Error executing Solr query,", e); //NON-NLS
throw e;
}
}
//TODO limit how many results we store, not to hit memory limits
LOGGER.log(Level.INFO, "Regex # results: {0}", resultSize); //NON-NLS
return results;
}
@Override
public KeywordList getKeywordList() {
return keywordList;
}
/**
* Add an attribute of the the given type to the given artifact with the
* value taken from the matcher. If an attribute of the given type already
* exists on the artifact or if the value is null, no attribute is added.
*
* @param attributeMap
* @param attrType
* @param groupName
* @param matcher *
*/
static private void addAttributeIfNotAlreadyCaptured(Map<BlackboardAttribute.Type, BlackboardAttribute> attributeMap, ATTRIBUTE_TYPE attrType, String groupName, Matcher matcher) {
BlackboardAttribute.Type type = new BlackboardAttribute.Type(attrType);
attributeMap.computeIfAbsent(type, (BlackboardAttribute.Type t) -> {
String value = matcher.group(groupName);
if (attrType.equals(ATTRIBUTE_TYPE.TSK_CARD_NUMBER)) {
value = CharMatcher.anyOf(" -").removeFrom(value);
}
if (StringUtils.isNotBlank(value)) {
return new BlackboardAttribute(attrType, MODULE_NAME, value);
}
return null;
});
}
/**
* Parse the track 2 data from a KeywordHit and add it to the given
* artifact.
*
* @param attributeMAp
* @param matcher
*/
static private void parseTrack2Data(Map<BlackboardAttribute.Type, BlackboardAttribute> attributeMAp, Matcher matcher) {
//try to add all the attrributes common to track 1 and 2
addAttributeIfNotAlreadyCaptured(attributeMAp, ATTRIBUTE_TYPE.TSK_CARD_NUMBER, "accountNumber", matcher);
addAttributeIfNotAlreadyCaptured(attributeMAp, ATTRIBUTE_TYPE.TSK_CARD_EXPIRATION, "expiration", matcher);
addAttributeIfNotAlreadyCaptured(attributeMAp, ATTRIBUTE_TYPE.TSK_CARD_SERVICE_CODE, "serviceCode", matcher);
addAttributeIfNotAlreadyCaptured(attributeMAp, ATTRIBUTE_TYPE.TSK_CARD_DISCRETIONARY, "discretionary", matcher);
addAttributeIfNotAlreadyCaptured(attributeMAp, ATTRIBUTE_TYPE.TSK_CARD_LRC, "LRC", matcher);
}
/**
* Parse the track 1 data from a KeywordHit and add it to the given
* artifact.
*
* @param attributeMap
* @param matcher
*/
static private void parseTrack1Data(Map<BlackboardAttribute.Type, BlackboardAttribute> attributeMap, Matcher matcher) {
// track 1 has all the fields present in track 2
parseTrack2Data(attributeMap, matcher);
//plus it also has the account holders name
addAttributeIfNotAlreadyCaptured(attributeMap, ATTRIBUTE_TYPE.TSK_NAME_PERSON, "name", matcher);
}
}
| KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/TermComponentQuery.java | /*
* Autopsy Forensic Browser
*
* Copyright 2011-2016 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
//
package org.sleuthkit.autopsy.keywordsearch;
import com.google.common.base.CharMatcher;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.logging.Level;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.validator.routines.checkdigit.LuhnCheckDigit;
import org.apache.solr.client.solrj.SolrQuery;
import org.apache.solr.client.solrj.response.TermsResponse.Term;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.coreutils.Version;
import org.sleuthkit.autopsy.datamodel.CreditCards;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.Account;
import org.sleuthkit.datamodel.BlackboardArtifact;
import org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE;
import org.sleuthkit.datamodel.BlackboardAttribute;
import org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE;
import org.sleuthkit.datamodel.TskCoreException;
import org.sleuthkit.datamodel.TskData;
/**
* Performs a regular expression query to the SOLR/Lucene instance.
*/
final class TermComponentQuery implements KeywordSearchQuery {
private static final Logger LOGGER = Logger.getLogger(TermComponentQuery.class.getName());
private static final boolean DEBUG = Version.Type.DEVELOPMENT.equals(Version.getBuildType());
private static final String MODULE_NAME = KeywordSearchModuleFactory.getModuleName();
private static final BlackboardAttribute.Type KEYWORD_SEARCH_DOCUMENT_ID = new BlackboardAttribute.Type(ATTRIBUTE_TYPE.TSK_KEYWORD_SEARCH_DOCUMENT_ID);
//TODO: move these regex and the luhn check to a new class, something like: CreditCardNumberValidator
/*
* Track 2 is numeric plus six punctuation symbolls :;<=>?
*
* This regex matches 12-19 digit ccns embeded in a track 2 formated string.
* This regex matches (and extracts groups) even if the entire track is not
* present as long as the part that is conforms to the track format.
*
*/
private static final Pattern TRACK2_PATTERN = Pattern.compile(
"[:;<=>?]?" //(optional)start sentinel //NON-NLS
+ "(?<accountNumber>[3456]([ -]?\\d){11,18})" //12-19 digits, with possible single spaces or dashes in between. first digit is 3,4,5, or 6 //NON-NLS
+ "(?:[:;<=>?]" //separator //NON-NLS
+ "(?:(?<expiration>\\d{4})" //4 digit expiration date YYMM //NON-NLS
+ "(?:(?<serviceCode>\\d{3})" //3 digit service code //NON-NLS
+ "(?:(?<discretionary>[^:;<=>?]*)" //discretionary data, not containing punctuation marks //NON-NLS
+ "(?:[:;<=>?]" //end sentinel //NON-NLS
+ "(?<LRC>.)" //longitudinal redundancy check //NON-NLS
+ "?)?)?)?)?)?"); //close nested optional groups //NON-NLS
/*
* Track 1 is alphanumeric.
*
* This regex matches 12-19 digit ccns embeded in a track 1 formated string.
* This regex matches (and extracts groups) even if the entire track is not
* present as long as the part that is conforms to the track format.
*/
private static final Pattern TRACK1_PATTERN = Pattern.compile(
"(?:" //begin nested optinal group //NON-NLS
+ "%?" //optional start sentinal: % //NON-NLS
+ "B)?" //format code //NON-NLS
+ "(?<accountNumber>[3456]([ -]?\\d){11,18})" //12-19 digits, with possible single spaces or dashes in between. first digit is 3,4,5, or 6 //NON-NLS
+ "\\^" //separator //NON-NLS
+ "(?<name>[^^]{2,26})" //2-26 charachter name, not containing ^ //NON-NLS
+ "(?:\\^" //separator //NON-NLS
+ "(?:(?:\\^|(?<expiration>\\d{4}))" //separator or 4 digit expiration YYMM //NON-NLS
+ "(?:(?:\\^|(?<serviceCode>\\d{3}))"//separator or 3 digit service code //NON-NLS
+ "(?:(?<discretionary>[^?]*)" // discretionary data not containing separator //NON-NLS
+ "(?:\\?" // end sentinal: ? //NON-NLS
+ "(?<LRC>.)" //longitudinal redundancy check //NON-NLS
+ "?)?)?)?)?)?");//close nested optional groups //NON-NLS
private static final Pattern CCN_PATTERN = Pattern.compile("(?<ccn>[3456]([ -]?\\d){11,18})"); //12-19 digits, with possible single spaces or dashes in between. first digit is 3,4,5, or 6 //NON-NLS
private static final LuhnCheckDigit LUHN_CHECK = new LuhnCheckDigit();
//corresponds to field in Solr schema, analyzed with white-space tokenizer only
private static final String TERMS_SEARCH_FIELD = Server.Schema.CONTENT_WS.toString();
private static final String TERMS_HANDLER = "/terms"; //NON-NLS
private static final int TERMS_TIMEOUT = 90 * 1000; //in ms
private static final String CASE_INSENSITIVE = "case_insensitive"; //NON-NLS
private static final int MAX_TERMS_RESULTS = 20000;
private String escapedQuery;
private final KeywordList keywordList;
private final Keyword keyword;
private boolean isEscaped;
private final List<KeywordQueryFilter> filters = new ArrayList<>();
TermComponentQuery(KeywordList keywordList, Keyword keyword) {
this.keyword = keyword;
this.keywordList = keywordList;
this.escapedQuery = keyword.getQuery();
}
@Override
public void addFilter(KeywordQueryFilter filter) {
this.filters.add(filter);
}
/**
* @param field
*
* @deprecated This method is unused and no-op
*/
@Override
@Deprecated
public void setField(String field) {
}
@Override
public void setSubstringQuery() {
escapedQuery = ".*" + escapedQuery + ".*";
}
@Override
public void escape() {
escapedQuery = Pattern.quote(keyword.getQuery());
isEscaped = true;
}
@Override
public boolean validate() {
if (escapedQuery.isEmpty()) {
return false;
}
try {
Pattern.compile(escapedQuery);
return true;
} catch (IllegalArgumentException ex) {
return false;
}
}
@Override
public boolean isEscaped() {
return isEscaped;
}
@Override
public boolean isLiteral() {
return false;
}
@Override
public String getEscapedQueryString() {
return this.escapedQuery;
}
@Override
public String getQueryString() {
return keyword.getQuery();
}
@Override
public KeywordCachedArtifact writeSingleFileHitsToBlackBoard(String termHit, KeywordHit hit, String snippet, String listName) {
BlackboardArtifact newArtifact;
Collection<BlackboardAttribute> attributes = new ArrayList<>();
if (keyword.getType() == ATTRIBUTE_TYPE.TSK_CARD_NUMBER) {
attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_ACCOUNT_TYPE, MODULE_NAME, Account.Type.CREDIT_CARD.name()));
Map<BlackboardAttribute.Type, BlackboardAttribute> parsedTrackAttributeMap = new HashMap<>();
//try to match it against the track 1 regex
Matcher matcher = TRACK1_PATTERN.matcher(hit.getSnippet());
if (matcher.find()) {
parseTrack1Data(parsedTrackAttributeMap, matcher);
}
//then try to match it against the track 2 regex
matcher = TRACK2_PATTERN.matcher(hit.getSnippet());
if (matcher.find()) {
parseTrack2Data(parsedTrackAttributeMap, matcher);
}
//if we couldn't parse the CCN abort this artifact
final BlackboardAttribute ccnAttribute = parsedTrackAttributeMap.get(new BlackboardAttribute.Type(ATTRIBUTE_TYPE.TSK_CARD_NUMBER));
if (ccnAttribute == null || StringUtils.isBlank(ccnAttribute.getValueString())) {
LOGGER.log(Level.SEVERE, "Failed to parse CCN from hit: " + hit.getSnippet());
return null;
}
attributes.addAll(parsedTrackAttributeMap.values());
//look up the bank name, schem, etc from the BIN
final int bin = Integer.parseInt(ccnAttribute.getValueString().substring(0, 8));
CreditCards.BankIdentificationNumber binInfo = CreditCards.getBINInfo(bin);
if (binInfo != null) {
binInfo.getScheme().ifPresent(scheme
-> attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_CARD_SCHEME, MODULE_NAME, scheme)));
binInfo.getCardType().ifPresent(cardType
-> attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_CARD_TYPE, MODULE_NAME, cardType)));
binInfo.getBrand().ifPresent(brand
-> attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_BRAND_NAME, MODULE_NAME, brand)));
binInfo.getBankName().ifPresent(bankName
-> attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_BANK_NAME, MODULE_NAME, bankName)));
binInfo.getBankPhoneNumber().ifPresent(phoneNumber
-> attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PHONE_NUMBER, MODULE_NAME, phoneNumber)));
binInfo.getBankURL().ifPresent(url
-> attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL, MODULE_NAME, url)));
binInfo.getCountry().ifPresent(country
-> attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_COUNTRY, MODULE_NAME, country)));
binInfo.getBankCity().ifPresent(city
-> attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_CITY, MODULE_NAME, city)));
}
/* if the hit is from unused or unalocated blocks, record the
* KEYWORD_SEARCH_DOCUMENT_ID, so we can show just that chunk in the
* UI
*/
if (hit.getContent() instanceof AbstractFile) {
AbstractFile file = (AbstractFile) hit.getContent();
if (file.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.UNUSED_BLOCKS
|| file.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS) {
attributes.add(new BlackboardAttribute(KEYWORD_SEARCH_DOCUMENT_ID, MODULE_NAME, hit.getSolrDocumentId()));
}
}
// make account artifact
try {
newArtifact = hit.getContent().newArtifact(ARTIFACT_TYPE.TSK_ACCOUNT);
} catch (TskCoreException tskCoreException) {
LOGGER.log(Level.SEVERE, "Error adding bb artifact for account", tskCoreException); //NON-NLS
return null;
}
} else {
//regex match
attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_KEYWORD, MODULE_NAME, termHit));
//regex keyword
attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_KEYWORD_REGEXP, MODULE_NAME, keyword.getQuery()));
//make keyword hit artifact
try {
newArtifact = hit.getContent().newArtifact(ARTIFACT_TYPE.TSK_KEYWORD_HIT);
} catch (TskCoreException tskCoreException) {
LOGGER.log(Level.SEVERE, "Error adding bb artifact for keyword hit", tskCoreException); //NON-NLS
return null;
}
}
if (StringUtils.isNotBlank(listName)) {
attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_SET_NAME, MODULE_NAME, listName));
}
//preview
if (snippet != null) {
attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_KEYWORD_PREVIEW, MODULE_NAME, snippet));
}
if (hit.isArtifactHit()) {
attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_ASSOCIATED_ARTIFACT, MODULE_NAME, hit.getArtifact().getArtifactID()));
}
try {
//TODO: do we still/really need this KeywordCachedArtifact class?
newArtifact.addAttributes(attributes);
KeywordCachedArtifact writeResult = new KeywordCachedArtifact(newArtifact);
writeResult.add(attributes);
return writeResult;
} catch (TskCoreException e) {
LOGGER.log(Level.SEVERE, "Error adding bb attributes for terms search artifact", e); //NON-NLS
return null;
}
}
@Override
public QueryResults performQuery() throws NoOpenCoreException {
/*
* Execute the regex query to get a list of terms that match the regex.
* Note that the field that is being searched is tokenized based on
* whitespace.
*/
//create the query
final SolrQuery q = new SolrQuery();
q.setRequestHandler(TERMS_HANDLER);
q.setTerms(true);
q.setTermsRegexFlag(CASE_INSENSITIVE);
q.setTermsRegex(escapedQuery);
q.addTermsField(TERMS_SEARCH_FIELD);
q.setTimeAllowed(TERMS_TIMEOUT);
q.setShowDebugInfo(DEBUG);
q.setTermsLimit(MAX_TERMS_RESULTS);
LOGGER.log(Level.INFO, "Query: {0}", q.toString()); //NON-NLS
//execute the query
List<Term> terms = null;
try {
terms = KeywordSearch.getServer().queryTerms(q).getTerms(TERMS_SEARCH_FIELD);
} catch (KeywordSearchModuleException ex) {
LOGGER.log(Level.SEVERE, "Error executing the regex terms query: " + keyword.getQuery(), ex); //NON-NLS
//TODO: this is almost certainly wrong and guaranteed to throw a NPE at some point!!!!
}
/*
* For each term that matched the regex, query for full set of document
* hits for that term.
*/
QueryResults results = new QueryResults(this, keywordList);
int resultSize = 0;
for (Term term : terms) {
final String termStr = KeywordSearchUtil.escapeLuceneQuery(term.getTerm());
if (keyword.getType() == ATTRIBUTE_TYPE.TSK_CARD_NUMBER) {
//If the keyword is a credit card number, pass it through luhn validator
Matcher matcher = CCN_PATTERN.matcher(term.getTerm());
matcher.find();
final String ccn = CharMatcher.anyOf(" -").removeFrom(matcher.group("ccn"));
if (false == LUHN_CHECK.isValid(ccn)) {
continue; //if the hit does not pass the luhn check, skip it.
}
}
/*
* Note: we can't set filter query on terms query but setting filter
* query on fileResults query will yield the same result
*/
LuceneQuery filesQuery = new LuceneQuery(keywordList, new Keyword(termStr, true));
filters.forEach(filesQuery::addFilter);
try {
QueryResults fileQueryResults = filesQuery.performQuery();
Set<KeywordHit> filesResults = new HashSet<>();
for (Keyword key : fileQueryResults.getKeywords()) { //flatten results into a single list
List<KeywordHit> keyRes = fileQueryResults.getResults(key);
resultSize += keyRes.size();
filesResults.addAll(keyRes);
}
results.addResult(new Keyword(term.getTerm(), false), new ArrayList<>(filesResults));
} catch (NoOpenCoreException | RuntimeException e) {
LOGGER.log(Level.WARNING, "Error executing Solr query,", e); //NON-NLS
throw e;
}
}
//TODO limit how many results we store, not to hit memory limits
LOGGER.log(Level.INFO, "Regex # results: {0}", resultSize); //NON-NLS
return results;
}
@Override
public KeywordList getKeywordList() {
return keywordList;
}
/**
* Add an attribute of the the given type to the given artifact with the
* value taken from the matcher. If an attribute of the given type already
* exists on the artifact or if the value is null, no attribute is added.
*
* @param attributeMap
* @param attrType
* @param groupName
* @param matcher *
*/
static private void addAttributeIfNotAlreadyCaptured(Map<BlackboardAttribute.Type, BlackboardAttribute> attributeMap, ATTRIBUTE_TYPE attrType, String groupName, Matcher matcher) {
BlackboardAttribute.Type type = new BlackboardAttribute.Type(attrType);
attributeMap.computeIfAbsent(type, (BlackboardAttribute.Type t) -> {
String value = matcher.group(groupName);
if (attrType.equals(ATTRIBUTE_TYPE.TSK_CARD_NUMBER)) {
value = CharMatcher.anyOf(" -").removeFrom(value);
}
if (StringUtils.isNotBlank(value)) {
return new BlackboardAttribute(attrType, MODULE_NAME, value);
}
return null;
});
}
/**
* Parse the track 2 data from a KeywordHit and add it to the given
* artifact.
*
* @param attributeMAp
* @param matcher
*/
static private void parseTrack2Data(Map<BlackboardAttribute.Type, BlackboardAttribute> attributeMAp, Matcher matcher) {
//try to add all the attrributes common to track 1 and 2
addAttributeIfNotAlreadyCaptured(attributeMAp, ATTRIBUTE_TYPE.TSK_CARD_NUMBER, "accountNumber", matcher);
addAttributeIfNotAlreadyCaptured(attributeMAp, ATTRIBUTE_TYPE.TSK_CARD_EXPIRATION, "expiration", matcher);
addAttributeIfNotAlreadyCaptured(attributeMAp, ATTRIBUTE_TYPE.TSK_CARD_SERVICE_CODE, "serviceCode", matcher);
addAttributeIfNotAlreadyCaptured(attributeMAp, ATTRIBUTE_TYPE.TSK_CARD_DISCRETIONARY, "discretionary", matcher);
addAttributeIfNotAlreadyCaptured(attributeMAp, ATTRIBUTE_TYPE.TSK_CARD_LRC, "LRC", matcher);
}
/**
* Parse the track 1 data from a KeywordHit and add it to the given
* artifact.
*
* @param attributeMap
* @param matcher
*/
static private void parseTrack1Data(Map<BlackboardAttribute.Type, BlackboardAttribute> attributeMap, Matcher matcher) {
// track 1 has all the fields present in track 2
parseTrack2Data(attributeMap, matcher);
//plus it also has the account holders name
addAttributeIfNotAlreadyCaptured(attributeMap, ATTRIBUTE_TYPE.TSK_NAME_PERSON, "name", matcher);
}
}
| Improve logging of credit card number parsing errors
| KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/TermComponentQuery.java | Improve logging of credit card number parsing errors | <ide><path>eywordSearch/src/org/sleuthkit/autopsy/keywordsearch/TermComponentQuery.java
<ide> //if we couldn't parse the CCN abort this artifact
<ide> final BlackboardAttribute ccnAttribute = parsedTrackAttributeMap.get(new BlackboardAttribute.Type(ATTRIBUTE_TYPE.TSK_CARD_NUMBER));
<ide> if (ccnAttribute == null || StringUtils.isBlank(ccnAttribute.getValueString())) {
<del> LOGGER.log(Level.SEVERE, "Failed to parse CCN from hit: " + hit.getSnippet());
<add> if (hit.isArtifactHit()) {
<add> LOGGER.log(Level.SEVERE, String.format("Failed to parse credit card account number for artifact keyword hit: term = %s, snippet = '%s', artifact id = %d", termHit, hit.getSnippet(), hit.getArtifact().getArtifactID()));
<add> } else {
<add> LOGGER.log(Level.SEVERE, String.format("Failed to parse credit card account number for content keyword hit: term = %s, snippet = '%s', object id = %d", termHit, hit.getSnippet(), hit.getContent().getId()));
<add> }
<ide> return null;
<ide> }
<ide> |
|
Java | apache-2.0 | 85447432ac652259ae38c0effb0c56cc73d5b930 | 0 | googlearchive/science-journal,googlearchive/science-journal,googlearchive/science-journal | /*
* Copyright 2016 Google Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.apps.forscience.whistlepunk.project;
import android.app.Activity;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.DialogInterface;
import android.content.Intent;
import android.content.IntentFilter;
import android.content.SharedPreferences;
import android.content.res.Resources;
import android.graphics.drawable.Drawable;
import android.net.ConnectivityManager;
import android.os.Bundle;
import android.os.Handler;
import androidx.annotation.Nullable;
import android.support.design.widget.FloatingActionButton;
import android.support.design.widget.Snackbar;
import androidx.fragment.app.Fragment;
import android.support.v4.widget.SwipeRefreshLayout;
import android.support.v4.widget.SwipeRefreshLayout.OnRefreshListener;
import androidx.appcompat.app.AlertDialog;
import androidx.recyclerview.widget.GridLayoutManager;
import androidx.appcompat.widget.PopupMenu;
import androidx.recyclerview.widget.RecyclerView;
import android.text.TextUtils;
import android.text.format.DateFormat;
import android.util.Log;
import android.util.TimingLogger;
import android.view.Gravity;
import android.view.LayoutInflater;
import android.view.Menu;
import android.view.MenuInflater;
import android.view.MenuItem;
import android.view.View;
import android.view.ViewGroup;
import android.widget.Button;
import android.widget.ImageButton;
import android.widget.ImageView;
import android.widget.TextView;
import com.google.android.apps.forscience.javalib.Success;
import com.google.android.apps.forscience.whistlepunk.AccessibilityUtils;
import com.google.android.apps.forscience.whistlepunk.AppSingleton;
import com.google.android.apps.forscience.whistlepunk.Clock;
import com.google.android.apps.forscience.whistlepunk.ColorUtils;
import com.google.android.apps.forscience.whistlepunk.DataController;
import com.google.android.apps.forscience.whistlepunk.ExportService;
import com.google.android.apps.forscience.whistlepunk.LoggingConsumer;
import com.google.android.apps.forscience.whistlepunk.PictureUtils;
import com.google.android.apps.forscience.whistlepunk.R;
import com.google.android.apps.forscience.whistlepunk.RecorderController;
import com.google.android.apps.forscience.whistlepunk.RxDataController;
import com.google.android.apps.forscience.whistlepunk.RxEvent;
import com.google.android.apps.forscience.whistlepunk.SnackbarManager;
import com.google.android.apps.forscience.whistlepunk.WhistlePunkApplication;
import com.google.android.apps.forscience.whistlepunk.accounts.AccountsUtils;
import com.google.android.apps.forscience.whistlepunk.accounts.AppAccount;
import com.google.android.apps.forscience.whistlepunk.accounts.NonSignedInAccount;
import com.google.android.apps.forscience.whistlepunk.analytics.TrackerConstants;
import com.google.android.apps.forscience.whistlepunk.cloudsync.CloudSyncManager;
import com.google.android.apps.forscience.whistlepunk.cloudsync.CloudSyncProvider;
import com.google.android.apps.forscience.whistlepunk.featurediscovery.FeatureDiscoveryProvider;
import com.google.android.apps.forscience.whistlepunk.filemetadata.Experiment;
import com.google.android.apps.forscience.whistlepunk.filemetadata.ExperimentLibraryManager;
import com.google.android.apps.forscience.whistlepunk.filemetadata.FileMetadataUtil;
import com.google.android.apps.forscience.whistlepunk.filemetadata.Label;
import com.google.android.apps.forscience.whistlepunk.metadata.nano.GoosciCaption;
import com.google.android.apps.forscience.whistlepunk.metadata.nano.GoosciLabel;
import com.google.android.apps.forscience.whistlepunk.metadata.nano.GoosciPictureLabelValue;
import com.google.android.apps.forscience.whistlepunk.metadata.nano.GoosciTextLabelValue;
import com.google.android.apps.forscience.whistlepunk.metadata.nano.GoosciUserMetadata;
import com.google.android.apps.forscience.whistlepunk.performance.PerfTrackerProvider;
import com.google.android.apps.forscience.whistlepunk.review.DeleteMetadataItemDialog;
import java.io.File;
import java.io.IOException;
import java.lang.ref.WeakReference;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Collections;
import java.util.Date;
import java.util.List;
/**
* Experiment List Fragment lists all experiments that belong to an account. This fragment is used
* in MainActivity and in ClaimExperimentsAcitivty. The claimExperimentsMode field can be used to
* determine whether it is in ClaimExperimentsAcitivty.
*
* <p>When used in MainActivity, the appAccount field is the "current" account, which may be the
* NonSignedInAccount.
*
* <p>When used in ClaimExperimentsAcitivty, the appAccount field is the NonSignedInAccount and the
* claimingAccount field is the "current" signed-in account.
*
* <p>Note that the options menu is different based on whether claimExperimentsMode is true or
* false. In claimExperimentsMode, the menu items for action_sync and action_network_disconnected do
* not exist. Care should be taken to check for null before dereferencing the result of
* Menu.findItem for these ids.
*/
public class ExperimentListFragment extends Fragment
implements DeleteMetadataItemDialog.DeleteDialogListener, OnRefreshListener {
private static final String TAG = "ExperimentListFragment";
/** Boolean extra for savedInstanceState with the state of includeArchived experiments. */
private static final String EXTRA_INCLUDE_ARCHIVED = "includeArchived";
private static final String ARG_ACCOUNT_KEY = "accountKey";
private static final String ARG_CLAIM_EXPERIMENTS_MODE = "claimExperimentsMode";
private static final String ARG_CLAIMING_ACCOUNT_KEY = "claimingAccountKey";
private static final String ARG_USE_PANES = "usePanes";
public static final String KEY_DEFAULT_EXPERIMENT_CREATED = "key_default_experiment_created";
private static final String TAG_NEW_EXPERIMENT_BUTTON = "new_experiment_button";
/** Duration of snackbar length long. 3.5 seconds */
private static final int LONG_DELAY_MILLIS = 3500;
private ExperimentListAdapter experimentListAdapter;
private boolean includeArchived;
private boolean progressBarVisible = false;
private final RxEvent destroyed = new RxEvent();
private final RxEvent paused = new RxEvent();
private final IntentFilter networkIntentFilter = new IntentFilter();
private AppAccount appAccount;
private boolean claimExperimentsMode;
private AppAccount claimingAccount;
private ConnectivityBroadcastReceiver connectivityBroadcastReceiver;
private Menu optionsMenu = null;
private FeatureDiscoveryProvider featureDiscoveryProvider;
private SwipeRefreshLayout swipeLayout;
public static ExperimentListFragment newInstance(AppAccount appAccount, boolean usePanes) {
return newInstance(createArguments(appAccount, usePanes));
}
private static ExperimentListFragment newInstance(Bundle arguments) {
ExperimentListFragment fragment = new ExperimentListFragment();
fragment.setArguments(arguments);
return fragment;
}
private static Bundle createArguments(AppAccount appAccount, boolean usePanes) {
Bundle args = new Bundle();
args.putString(ARG_ACCOUNT_KEY, appAccount.getAccountKey());
args.putBoolean(ARG_USE_PANES, usePanes);
return args;
}
public static ExperimentListFragment reuseOrCreateInstance(
@Nullable Fragment fragment, AppAccount appAccount, boolean usePanes) {
Bundle newArguments = createArguments(appAccount, usePanes);
if (fragment instanceof ExperimentListFragment
&& newArguments.equals(fragment.getArguments())) {
return (ExperimentListFragment) fragment;
}
return newInstance(newArguments);
}
public static ExperimentListFragment newInstanceForClaimExperimentsMode(
Context context, AppAccount claimingAccount, boolean usePanes) {
NonSignedInAccount nonSignedInAccount = NonSignedInAccount.getInstance(context);
ExperimentListFragment fragment = new ExperimentListFragment();
Bundle args = new Bundle();
args.putString(ARG_ACCOUNT_KEY, nonSignedInAccount.getAccountKey());
args.putBoolean(ARG_CLAIM_EXPERIMENTS_MODE, true);
args.putString(ARG_CLAIMING_ACCOUNT_KEY, claimingAccount.getAccountKey());
args.putBoolean(ARG_USE_PANES, usePanes);
fragment.setArguments(args);
return fragment;
}
public ExperimentListFragment() {
networkIntentFilter.addAction(ConnectivityManager.CONNECTIVITY_ACTION);
}
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
AppSingleton.getInstance(getContext())
.whenExportBusyChanges()
.takeUntil(destroyed.happens())
.subscribe(
busy -> {
setProgressBarVisible(busy);
});
appAccount = WhistlePunkApplication.getAccount(getContext(), getArguments(), ARG_ACCOUNT_KEY);
claimExperimentsMode = getArguments().getBoolean(ARG_CLAIM_EXPERIMENTS_MODE);
if (claimExperimentsMode) {
claimingAccount =
WhistlePunkApplication.getAccount(getContext(), getArguments(), ARG_CLAIMING_ACCOUNT_KEY);
// In claim experiments mode, we always start with showing archived experiments, even if the
// user hid them previously.
includeArchived = true;
getActivity().invalidateOptionsMenu();
} else {
if (savedInstanceState != null) {
includeArchived = savedInstanceState.getBoolean(EXTRA_INCLUDE_ARCHIVED, false);
getActivity().invalidateOptionsMenu();
}
}
featureDiscoveryProvider =
WhistlePunkApplication.getAppServices(getActivity()).getFeatureDiscoveryProvider();
setHasOptionsMenu(true);
}
@Override
public void onStart() {
super.onStart();
WhistlePunkApplication.getUsageTracker(getActivity())
.trackScreenView(TrackerConstants.SCREEN_EXPERIMENT_LIST);
}
@Override
public void onResume() {
super.onResume();
setProgressBarVisible(progressBarVisible);
connectivityBroadcastReceiver = new ConnectivityBroadcastReceiver();
getContext().registerReceiver(connectivityBroadcastReceiver, networkIntentFilter);
TimingLogger timing = new TimingLogger(TAG, "Sync on Resume");
AppSingleton.getInstance(getContext())
.whenNewExperimentSynced()
.takeUntil(paused.happens())
.subscribe(
count -> {
Handler uiHandler = new Handler(getContext().getMainLooper());
uiHandler.post(
() -> {
// This fragment may be gone by the time this code executes. Check getContext
// and give up if it is null, otherwise getResources() below will throw
// IllegalStateException.
if (getContext() == null) {
return;
}
loadExperiments();
timing.addSplit("Syncing complete");
timing.dumpToLog();
});
});
loadExperiments();
syncNow("Sync On Resume");
}
@Override
public void onSaveInstanceState(Bundle outState) {
super.onSaveInstanceState(outState);
outState.putBoolean(EXTRA_INCLUDE_ARCHIVED, includeArchived);
}
@Override
public void onDestroy() {
// TODO: Use RxEvent here
experimentListAdapter.onDestroy();
destroyed.onHappened();
super.onDestroy();
}
@Override
public void onPause() {
getContext().unregisterReceiver(connectivityBroadcastReceiver);
paused.onHappened();
super.onPause();
}
@Override public void onRefresh() {
swipeLayout.setRefreshing(false);
syncNow("Sync on Pulldown");
}
@Override
public View onCreateView(
LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {
View view = inflater.inflate(R.layout.fragment_experiment_list, container, false);
final RecyclerView detailList = (RecyclerView) view.findViewById(R.id.details);
experimentListAdapter = new ExperimentListAdapter(this);
swipeLayout = (SwipeRefreshLayout) view.findViewById(R.id.swipe_container);
swipeLayout.setOnRefreshListener(this);
// TODO: Adjust the column count based on breakpoint specs when available.
int column_count = 2;
GridLayoutManager manager = new GridLayoutManager(getActivity(), column_count);
manager.setSpanSizeLookup(
new GridLayoutManager.SpanSizeLookup() {
@Override
public int getSpanSize(int position) {
return experimentListAdapter.getItemViewType(position)
== ExperimentListAdapter.VIEW_TYPE_EXPERIMENT
? 1
: column_count;
}
});
detailList.setLayoutManager(manager);
detailList.setAdapter(experimentListAdapter);
FloatingActionButton newExperimentButton =
(FloatingActionButton) view.findViewById(R.id.new_experiment);
if (claimExperimentsMode) {
newExperimentButton.setVisibility(View.GONE);
} else {
newExperimentButton.setOnClickListener(
v -> {
if (getRecorderController().watchRecordingStatus().blockingFirst().isRecording()) {
// This should never happen, but apparently it does on some Xperia devices?
// b/117484248
return;
}
getDataController()
.createExperiment(
new LoggingConsumer<Experiment>(TAG, "Create a new experiment") {
@Override
public void success(final Experiment experiment) {
WhistlePunkApplication.getUsageTracker(getActivity())
.trackEvent(
TrackerConstants.CATEGORY_EXPERIMENTS,
TrackerConstants.ACTION_CREATE,
TrackerConstants.LABEL_EXPERIMENT_LIST,
0);
launchPanesActivity(
v.getContext(),
appAccount,
experiment.getExperimentId(),
false /* claimExperimentsMode */);
}
});
});
}
return view;
}
public static void launchPanesActivity(
Context context, AppAccount appAccount, String experimentId, boolean claimExperimentsMode) {
context.startActivity(
WhistlePunkApplication.getLaunchIntentForPanesActivity(
context, appAccount, experimentId, claimExperimentsMode));
}
private boolean shouldShowClaimExperimentsCard() {
// We should prompt to claim experiments if:
// we are not already in claim experiments mode
// and there is one or more experiments in unclaimed storage.
return !claimExperimentsMode
&& appAccount.isSignedIn()
&& AccountsUtils.getUnclaimedExperimentCount(getContext()) >= 1;
}
private boolean shouldShowAddExperimentsToDriveCard() {
return claimExperimentsMode;
}
private void loadExperiments() {
// Old fragments can still be alive, but not part of the activity, when resuming.
// See https://stackoverflow.com/questions/9727173/ ...
// support-fragmentpageradapter-holds-reference-to-old-fragments/9745935#9745935
// This prevents a crash, but I suspect there's a deeper solution we can investigate, later.
// TODO(b/116717025}
if (getActivity() == null) {
return;
}
PerfTrackerProvider perfTracker = WhistlePunkApplication.getPerfTrackerProvider(getActivity());
PerfTrackerProvider.TimerToken loadExperimentTimer = perfTracker.startTimer();
getDataController()
.getExperimentOverviews(
includeArchived,
new LoggingConsumer<List<GoosciUserMetadata.ExperimentOverview>>(
TAG, "Retrieve experiments") {
@Override
public void success(List<GoosciUserMetadata.ExperimentOverview> experiments) {
// In case the account changes multiple times quickly, ignore the results if
// the activity is now null.
if (getActivity() == null) {
return;
}
if (experiments.isEmpty()
&& !wasDefaultExperimentCreated()
&& !shouldShowClaimExperimentsCard()) {
// If there are no experiments and we've never made a default one,
// create the default experiment and set the boolean to true.
// Note that we don't create the default experiment if the user is
// prompted to claim unclaimed experiments.
createDefaultExperiment();
boolean discoveryEnabled =
featureDiscoveryProvider.isEnabled(
getActivity(),
appAccount,
FeatureDiscoveryProvider.FEATURE_NEW_EXPERIMENT);
if (discoveryEnabled) {
scheduleFeatureDiscovery();
}
perfTracker.stopTimer(
loadExperimentTimer, TrackerConstants.PRIMES_DEFAULT_EXPERIMENT_CREATED);
} else {
attachToExperiments(experiments);
perfTracker.stopTimer(
loadExperimentTimer, TrackerConstants.PRIMES_EXPERIMENT_LIST_LOADED);
}
perfTracker.onAppInteractive();
}
});
}
private void scheduleFeatureDiscovery() {
Handler handler = new Handler(getContext().getMainLooper());
handler.postDelayed(
this::showFeatureDiscovery, FeatureDiscoveryProvider.FEATURE_DISCOVERY_SHOW_DELAY_MS);
}
private void showFeatureDiscovery() {
if (getActivity() == null) {
return;
}
// Confirm that a view with the tag exists, so featureDiscoveryProvider can find it.
final View view = this.getView().findViewWithTag(TAG_NEW_EXPERIMENT_BUTTON);
if (view != null) {
featureDiscoveryProvider.show(
getActivity(),
appAccount,
FeatureDiscoveryProvider.FEATURE_NEW_EXPERIMENT,
TAG_NEW_EXPERIMENT_BUTTON);
}
}
private SharedPreferences getSharedPreferences() {
return AccountsUtils.getSharedPreferences(getContext(), appAccount);
}
private boolean wasDefaultExperimentCreated() {
return getSharedPreferences().getBoolean(KEY_DEFAULT_EXPERIMENT_CREATED, false);
}
private void setDefaultExperimentCreated() {
getSharedPreferences().edit().putBoolean(KEY_DEFAULT_EXPERIMENT_CREATED, true).apply();
}
private void createDefaultExperiment() {
DataController dataController = getDataController();
RxDataController.createExperiment(dataController)
.subscribe(
e -> {
Resources res = getActivity().getResources();
e.setTitle(res.getString(R.string.first_experiment_title));
Clock clock =
AppSingleton.getInstance(getActivity()).getSensorEnvironment().getDefaultClock();
// Create a text label 1 second ago with default text.
GoosciTextLabelValue.TextLabelValue goosciTextLabel1 =
new GoosciTextLabelValue.TextLabelValue();
goosciTextLabel1.text = res.getString(R.string.first_experiment_second_text_note);
Label textLabel1 =
Label.newLabelWithValue(
clock.getNow() - 1000,
GoosciLabel.Label.ValueType.TEXT,
goosciTextLabel1,
null);
e.addLabel(e, textLabel1);
// Create a text label 2 seconds ago with default text.
GoosciTextLabelValue.TextLabelValue goosciTextLabel2 =
new GoosciTextLabelValue.TextLabelValue();
goosciTextLabel2.text = res.getString(R.string.first_experiment_text_note);
Label textLabel2 =
Label.newLabelWithValue(
clock.getNow() - 2000,
GoosciLabel.Label.ValueType.TEXT,
goosciTextLabel2,
null);
e.addLabel(e, textLabel2);
// Create a picture label 4 second ago with a default drawable and caption.
GoosciCaption.Caption caption = new GoosciCaption.Caption();
caption.text = res.getString(R.string.first_experiment_picture_note_caption);
caption.lastEditedTimestamp = clock.getNow() - 4000;
Label pictureLabel =
Label.newLabel(caption.lastEditedTimestamp, GoosciLabel.Label.ValueType.PICTURE);
File pictureFile =
PictureUtils.createImageFile(
getActivity(),
dataController.getAppAccount(),
e.getExperimentId(),
pictureLabel.getLabelId());
PictureUtils.writeDrawableToFile(getActivity(), pictureFile, R.drawable.first_note);
GoosciPictureLabelValue.PictureLabelValue goosciPictureLabel =
new GoosciPictureLabelValue.PictureLabelValue();
goosciPictureLabel.filePath =
FileMetadataUtil.getInstance()
.getRelativePathInExperiment(e.getExperimentId(), pictureFile);
pictureLabel.setLabelProtoData(goosciPictureLabel);
pictureLabel.setCaption(caption);
e.addLabel(e, pictureLabel);
// TODO: Add a recording item if required by b/64844798.
RxDataController.updateExperiment(dataController, e, true)
.subscribe(
() -> {
setDefaultExperimentCreated();
loadExperiments();
});
});
}
private void attachToExperiments(List<GoosciUserMetadata.ExperimentOverview> experiments) {
final View rootView = getView();
if (rootView == null) {
return;
}
experimentListAdapter.setData(experiments, includeArchived);
}
private DataController getDataController() {
return AppSingleton.getInstance(getActivity()).getDataController(appAccount);
}
private RecorderController getRecorderController() {
return AppSingleton.getInstance(getActivity()).getRecorderController(appAccount);
}
private ExperimentLibraryManager getExperimentLibraryManager() {
return AppSingleton.getInstance(getActivity()).getExperimentLibraryManager(appAccount);
}
public void setProgressBarVisible(boolean visible) {
progressBarVisible = visible;
if (getView() != null) {
if (visible) {
getView().findViewById(R.id.indeterminateBar).setVisibility(View.VISIBLE);
} else {
getView().findViewById(R.id.indeterminateBar).setVisibility(View.GONE);
}
}
}
@Override
public void onCreateOptionsMenu(Menu menu, MenuInflater inflater) {
super.onCreateOptionsMenu(menu, inflater);
if (claimExperimentsMode) {
inflater.inflate(R.menu.menu_claim_experiments, menu);
ColorUtils.colorDrawable(
getContext(),
menu.findItem(R.id.run_review_overflow_menu).getIcon(),
R.color.claim_experiments_action_bar_text);
} else {
inflater.inflate(R.menu.menu_experiment_list, menu);
}
optionsMenu = menu;
updateNetworkStatusIcon();
}
@Override
public void onPrepareOptionsMenu(Menu menu) {
menu.findItem(R.id.action_include_archived).setVisible(!includeArchived);
menu.findItem(R.id.action_exclude_archived).setVisible(includeArchived);
MenuItem menuItemSync = menu.findItem(R.id.action_sync);
if (menuItemSync != null) {
menuItemSync.setVisible(appAccount.isSignedIn());
}
optionsMenu = menu;
updateNetworkStatusIcon();
}
private void updateNetworkStatusIcon() {
if (optionsMenu == null) {
return;
}
MenuItem menuItemActionNetworkDisconnected =
optionsMenu.findItem(R.id.action_network_disconnected);
// In claim experiments mode, the menus are different. If menuItemActionNetworkDisconnected is
// null, the menu item doesn't exist.
if (menuItemActionNetworkDisconnected == null) {
return;
}
if (!appAccount.isSignedIn()) {
menuItemActionNetworkDisconnected.setVisible(false);
return;
}
ConnectivityManager cm =
(ConnectivityManager) getContext().getSystemService(Context.CONNECTIVITY_SERVICE);
boolean shouldShowIcon =
cm.getActiveNetworkInfo() == null || !cm.getActiveNetworkInfo().isConnectedOrConnecting();
menuItemActionNetworkDisconnected.setVisible(shouldShowIcon);
menuItemActionNetworkDisconnected.setEnabled(shouldShowIcon);
if (shouldShowIcon) {
getView()
.announceForAccessibility(
getResources().getString(R.string.drive_sync_cannot_reach_google_drive));
}
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
int id = item.getItemId();
if (progressBarVisible) {
return true;
}
if (id == R.id.action_include_archived) {
includeArchived = true;
loadExperiments();
getActivity().invalidateOptionsMenu();
return true;
} else if (id == R.id.action_exclude_archived) {
includeArchived = false;
loadExperiments();
getActivity().invalidateOptionsMenu();
return true;
} else if (id == R.id.action_claim_unclaimed_experiments) {
confirmClaimUnclaimedExperiments();
return true;
} else if (id == R.id.action_delete_unclaimed_experiments) {
confirmDeleteUnclaimedExperiments();
return true;
} else if (id == R.id.action_sync) {
syncNow("Sync from menu");
return true;
} else if (id == R.id.action_network_disconnected) {
Resources res = getActivity().getResources();
experimentListAdapter.showSnackbar(
res.getString(R.string.drive_sync_cannot_reach_google_drive), null);
}
return super.onOptionsItemSelected(item);
}
private void syncNow(String logMessage) {
if (appAccount.isSignedIn()) {
CloudSyncProvider syncProvider = WhistlePunkApplication.getCloudSyncProvider(getActivity());
CloudSyncManager syncService = syncProvider.getServiceForAccount(appAccount);
try {
getView()
.announceForAccessibility(
getResources().getString(R.string.action_sync_start));
syncService.syncExperimentLibrary(getContext(), logMessage);
} catch (IOException ioe) {
if (Log.isLoggable(TAG, Log.ERROR)) {
Log.e(TAG, "IOE", ioe);
}
}
} else {
loadExperiments();
}
}
private void confirmClaimUnclaimedExperiments() {
Context context = getContext();
int unclaimedExperimentCount = AccountsUtils.getUnclaimedExperimentCount(context);
AlertDialog.Builder builder = new AlertDialog.Builder(context);
builder.setTitle(
context
.getResources()
.getQuantityString(
R.plurals.claim_all_confirmation_text,
unclaimedExperimentCount,
unclaimedExperimentCount));
builder.setNegativeButton(android.R.string.cancel, (dialog, which) -> dialog.cancel());
builder.setPositiveButton(
R.string.claim_all_confirmation_yes,
(dialog, which) -> {
claimUnclaimedExperiments();
dialog.dismiss();
});
builder.create().show();
}
private void claimUnclaimedExperiments() {
getDataController()
.moveAllExperimentsToAnotherAccount(
claimingAccount,
new LoggingConsumer<Success>(TAG, "claimUnclaimedExperiments") {
@Override
public void success(Success value) {
getActivity().finish();
}
});
}
private void confirmDeleteUnclaimedExperiments() {
AlertDialog.Builder builder = new AlertDialog.Builder(getContext());
builder.setTitle(R.string.delete_all_prompt_headline);
builder.setMessage(R.string.delete_all_prompt_text);
builder.setNegativeButton(android.R.string.cancel, (dialog, which) -> dialog.cancel());
builder.setPositiveButton(
R.string.delete_all_prompt_yes,
(dialog, which) -> {
deleteUnclaimedExperiments();
dialog.dismiss();
});
builder.create().show();
}
private void deleteUnclaimedExperiments() {
getDataController()
.deleteAllExperiments(
new LoggingConsumer<Success>(TAG, "deleteUnclaimedExperiments") {
@Override
public void success(Success value) {
getActivity().finish();
}
});
}
private void confirmDelete(String experimentId) {
DeleteMetadataItemDialog dialog =
DeleteMetadataItemDialog.newInstance(
R.string.delete_experiment_dialog_title,
R.string.delete_experiment_dialog_message,
experimentId);
dialog.show(getChildFragmentManager(), DeleteMetadataItemDialog.TAG);
}
@Override
public void requestDelete(Bundle extras) {
String experimentId = extras.getString(DeleteMetadataItemDialog.KEY_ITEM_ID);
DataController dataController = getDataController();
RxDataController.getExperimentById(dataController, experimentId)
.subscribe(
fullExperiment -> {
dataController.deleteExperiment(
fullExperiment,
new LoggingConsumer<Success>(TAG, "delete experiment") {
@Override
public void success(Success value) {
experimentListAdapter.onExperimentDeleted(experimentId);
WhistlePunkApplication.getUsageTracker(getActivity())
.trackEvent(
TrackerConstants.CATEGORY_EXPERIMENTS,
TrackerConstants.ACTION_DELETED,
TrackerConstants.LABEL_EXPERIMENT_LIST,
0);
maybeFinishClaimExperimentsMode();
}
});
});
}
private void maybeFinishClaimExperimentsMode() {
// If the item count is now 1, then the only item is the
// add_experiments_to_drive_card. There are no unclaimed experiments left.
if (claimExperimentsMode && experimentListAdapter.getItemCount() == 1) {
Activity activity = getActivity();
if (activity != null) {
activity.finish();
}
}
}
static class ExperimentListItem {
public final int viewType;
public final GoosciUserMetadata.ExperimentOverview experimentOverview;
public final String dateString;
ExperimentListItem(GoosciUserMetadata.ExperimentOverview experimentOverview) {
viewType = ExperimentListAdapter.VIEW_TYPE_EXPERIMENT;
this.experimentOverview = experimentOverview;
dateString = null;
}
ExperimentListItem(String date) {
viewType = ExperimentListAdapter.VIEW_TYPE_DATE;
dateString = date;
experimentOverview = null;
}
ExperimentListItem(int viewType) {
this.viewType = viewType;
dateString = null;
experimentOverview = null;
}
}
public static class ExperimentListAdapter extends RecyclerView.Adapter<ViewHolder> {
static final int VIEW_TYPE_EXPERIMENT = 0;
static final int VIEW_TYPE_EMPTY = 1;
static final int VIEW_TYPE_DATE = 2;
static final int VIEW_TYPE_CLAIM_EXPERIMENTS = 3;
static final int VIEW_TYPE_ADD_EXPERIMENTS_TO_DRIVE = 4;
private final Drawable placeHolderImage;
private final List<ExperimentListItem> items;
private boolean includeArchived;
private final Calendar calendar;
private final int currentYear;
private final String monthYearFormat;
private final WeakReference<ExperimentListFragment> parentReference;
private final SnackbarManager snackbarManager = new SnackbarManager();
private PopupMenu popupMenu = null;
public ExperimentListAdapter(ExperimentListFragment parent) {
items = new ArrayList<>();
placeHolderImage =
parent.getActivity().getResources().getDrawable(R.drawable.experiment_card_placeholder);
calendar =
Calendar.getInstance(parent.getActivity().getResources().getConfiguration().locale);
currentYear = calendar.get(Calendar.YEAR);
monthYearFormat = parent.getActivity().getResources().getString(R.string.month_year_format);
parentReference = new WeakReference<>(parent);
}
void setData(
List<GoosciUserMetadata.ExperimentOverview> experimentOverviews, boolean includeArchived) {
this.includeArchived = includeArchived;
items.clear();
if (parentReference.get().shouldShowClaimExperimentsCard()) {
items.add(new ExperimentListItem(VIEW_TYPE_CLAIM_EXPERIMENTS));
}
if (parentReference.get().shouldShowAddExperimentsToDriveCard()) {
items.add(new ExperimentListItem(VIEW_TYPE_ADD_EXPERIMENTS_TO_DRIVE));
}
if (experimentOverviews.isEmpty()) {
items.add(new ExperimentListItem(VIEW_TYPE_EMPTY));
} else {
// Sort most recent first
Collections.sort(
experimentOverviews,
(eo1, eo2) -> Long.compare(eo2.lastUsedTimeMs, eo1.lastUsedTimeMs));
String date = "";
for (GoosciUserMetadata.ExperimentOverview overview : experimentOverviews) {
// Only show the year if it is not this year.
calendar.setTime(new Date(overview.lastUsedTimeMs));
String nextDate =
DateFormat.format(
calendar.get(Calendar.YEAR) == currentYear ? "MMMM" : monthYearFormat,
calendar)
.toString();
if (!TextUtils.equals(date, nextDate)) {
date = nextDate;
items.add(new ExperimentListItem(date));
}
items.add(new ExperimentListItem(overview));
}
}
notifyDataSetChanged();
}
@Override
public ViewHolder onCreateViewHolder(ViewGroup parent, int viewType) {
LayoutInflater inflater = LayoutInflater.from(parent.getContext());
View view;
if (viewType == VIEW_TYPE_EMPTY) {
view = inflater.inflate(R.layout.empty_list, parent, false);
} else if (viewType == VIEW_TYPE_DATE) {
view = inflater.inflate(R.layout.experiment_date, parent, false);
} else if (viewType == VIEW_TYPE_CLAIM_EXPERIMENTS) {
view = inflater.inflate(R.layout.claim_experiments_card, parent, false);
} else if (viewType == VIEW_TYPE_ADD_EXPERIMENTS_TO_DRIVE) {
view = inflater.inflate(R.layout.add_experiments_to_drive_card, parent, false);
} else { // VIEW_TYPE_EXPERIMENT
view =
inflater.inflate(
parentReference.get().claimExperimentsMode
? R.layout.claim_experiment_overview
: R.layout.project_experiment_overview,
parent,
false);
}
return new ViewHolder(view, viewType, parentReference.get().claimExperimentsMode);
}
@Override
public void onBindViewHolder(ViewHolder holder, int position) {
if (items.get(position).viewType == VIEW_TYPE_EXPERIMENT) {
bindExperiment(holder, items.get(position));
} else if (items.get(position).viewType == VIEW_TYPE_DATE) {
((TextView) holder.itemView).setText(items.get(position).dateString);
} else if (items.get(position).viewType == VIEW_TYPE_CLAIM_EXPERIMENTS) {
Context context = holder.itemView.getContext();
int unclaimedExperimentCount = AccountsUtils.getUnclaimedExperimentCount(context);
TextView textView = holder.itemView.findViewById(R.id.text_claim_experiments);
textView.setText(
context
.getResources()
.getQuantityString(
R.plurals.claim_experiments_card_text,
unclaimedExperimentCount,
unclaimedExperimentCount));
holder.claimButton.setOnClickListener(
v ->
ClaimExperimentsActivity.launch(
v.getContext(),
parentReference.get().appAccount,
parentReference.get().getArguments().getBoolean(ARG_USE_PANES)));
}
}
@Override
public int getItemCount() {
return items.size();
}
@Override
public int getItemViewType(int position) {
return items.get(position).viewType;
}
private void bindExperiment(final ViewHolder holder, final ExperimentListItem item) {
Resources res = holder.itemView.getResources();
// First on the UI thread, set what experiment we're trying to load.
GoosciUserMetadata.ExperimentOverview overview = item.experimentOverview;
holder.experimentId = overview.experimentId;
// Set the data we know about.
String experimentText =
Experiment.getDisplayTitle(holder.itemView.getContext(), overview.title);
holder.experimentTitle.setText(experimentText);
holder.archivedIndicator.setVisibility(overview.isArchived ? View.VISIBLE : View.GONE);
if (overview.isArchived) {
holder.experimentTitle.setContentDescription(
res.getString(R.string.archived_content_description, experimentText));
holder
.itemView
.findViewById(R.id.content)
.setAlpha(res.getFraction(R.fraction.metadata_card_archived_alpha, 1, 1));
} else {
// Use default.
holder.experimentTitle.setContentDescription("");
holder
.itemView
.findViewById(R.id.content)
.setAlpha(res.getFraction(R.fraction.metadata_card_alpha, 1, 1));
}
holder.itemView.setTag(R.id.experiment_title, overview.experimentId);
holder.cardView.setOnClickListener(
v -> {
if (!parentReference.get().progressBarVisible) {
launchPanesActivity(
v.getContext(),
parentReference.get().appAccount,
overview.experimentId,
parentReference.get().claimExperimentsMode);
}
});
Context context = holder.menuButton.getContext();
boolean isShareIntentValid =
FileMetadataUtil.getInstance()
.validateShareIntent(
context, parentReference.get().appAccount, overview.experimentId);
if (parentReference.get().claimExperimentsMode) {
holder.menuButton.setVisibility(View.GONE);
holder.driveButton.setOnClickListener(
v -> promptBeforeClaimExperiment(overview.experimentId, context));
if (isShareIntentValid) {
holder.shareButton.setOnClickListener(v -> exportExperiment(overview.experimentId));
} else {
holder.shareButton.setVisibility(View.GONE);
}
holder.deleteButton.setOnClickListener(v -> deleteExperiment(overview.experimentId));
} else if (parentReference
.get()
.getRecorderController()
.watchRecordingStatus()
.blockingFirst()
.isRecording()) {
// This should never occur. But apparently it does on some Lenovo K5 devices: b/119263772
// BlockingFirst above is ok because there will always be a RecordingStatus. This won't
// ever actually block.
holder.menuButton.setVisibility(View.GONE);
} else {
holder.menuButton.setOnClickListener(
v -> {
int position = items.indexOf(item);
popupMenu =
new PopupMenu(
context,
holder.menuButton,
Gravity.NO_GRAVITY,
R.attr.actionOverflowMenuStyle,
0);
popupMenu
.getMenuInflater()
.inflate(R.menu.menu_experiment_overview, popupMenu.getMenu());
popupMenu.getMenu().findItem(R.id.menu_item_archive).setVisible(!overview.isArchived);
popupMenu
.getMenu()
.findItem(R.id.menu_item_unarchive)
.setVisible(overview.isArchived);
popupMenu
.getMenu()
.findItem(R.id.menu_item_export_experiment)
.setVisible(isShareIntentValid);
popupMenu.setOnMenuItemClickListener(
menuItem -> {
if (parentReference.get().progressBarVisible) {
return true;
}
if (menuItem.getItemId() == R.id.menu_item_archive) {
setExperimentArchived(overview, position, true);
return true;
} else if (menuItem.getItemId() == R.id.menu_item_unarchive) {
setExperimentArchived(overview, position, false);
return true;
} else if (menuItem.getItemId() == R.id.menu_item_delete) {
deleteExperiment(overview.experimentId);
return true;
} else if (menuItem.getItemId() == R.id.menu_item_export_experiment) {
exportExperiment(overview.experimentId);
return true;
}
return false;
});
popupMenu.setOnDismissListener(menu -> popupMenu = null);
popupMenu.show();
});
}
if (!TextUtils.isEmpty(overview.imagePath)) {
PictureUtils.loadExperimentOverviewImage(
parentReference.get().appAccount, holder.experimentImage, overview.imagePath);
} else {
// Make sure the scale type is correct for the placeholder
holder.experimentImage.setScaleType(ImageView.ScaleType.FIT_CENTER);
holder.experimentImage.setImageDrawable(placeHolderImage);
int[] intArray =
holder
.experimentImage
.getContext()
.getResources()
.getIntArray(R.array.experiment_colors_array);
holder.experimentImage.setBackgroundColor(intArray[overview.colorIndex]);
}
}
private void setExperimentArchived(
GoosciUserMetadata.ExperimentOverview overview, final int position, boolean archived) {
if (parentReference.get() == null) {
return;
}
Context context = parentReference.get().getContext();
overview.isArchived = archived;
DataController dataController = parentReference.get().getDataController();
ExperimentLibraryManager elm = parentReference.get().getExperimentLibraryManager();
RxDataController.getExperimentById(dataController, overview.experimentId)
.subscribe(
fullExperiment -> {
fullExperiment.setArchived(context, dataController.getAppAccount(), archived);
elm.setArchived(fullExperiment.getExperimentId(), archived);
dataController.updateExperiment(
overview.experimentId,
new LoggingConsumer<Success>(TAG, "set archived bit") {
@Override
public void success(Success value) {
updateArchivedState(position, archived);
WhistlePunkApplication.getUsageTracker(parentReference.get().getActivity())
.trackEvent(
TrackerConstants.CATEGORY_EXPERIMENTS,
archived
? TrackerConstants.ACTION_ARCHIVE
: TrackerConstants.ACTION_UNARCHIVE,
TrackerConstants.LABEL_EXPERIMENT_LIST,
0);
showArchivedSnackbar(overview, position, archived);
}
});
});
}
private void updateArchivedState(int position, boolean archived) {
if (includeArchived) {
notifyItemChanged(position);
} else if (archived) {
// Remove archived experiment immediately.
int i = position;
removeExperiment(i);
} else {
// It could be added back anywhere.
if (parentReference.get() != null) {
parentReference.get().loadExperiments();
}
}
}
private void showClaimedSnackbar() {
if (parentReference.get() == null) {
return;
}
String accountName = parentReference.get().claimingAccount.getAccountName();
String message =
parentReference
.get()
.getResources()
.getString(R.string.experiment_added_text, accountName);
showSnackbar(message, null /* undoOnClickListener */);
}
private void showArchivedSnackbar(
GoosciUserMetadata.ExperimentOverview overview, int position, boolean archived) {
if (parentReference.get() == null) {
return;
}
String message =
parentReference
.get()
.getResources()
.getString(
archived
? R.string.archived_experiment_message
: R.string.unarchived_experiment_message);
// We only seem to show "undo" for archiving items, not unarchiving them.
View.OnClickListener undoOnClickListener =
archived ? view -> setExperimentArchived(overview, position, !archived) : null;
showSnackbar(message, undoOnClickListener);
}
public void showSnackbar(String message, @Nullable View.OnClickListener undoOnClickListener) {
Snackbar bar =
AccessibilityUtils.makeSnackbar(
parentReference.get().getView(), message, Snackbar.LENGTH_LONG);
if (undoOnClickListener != null) {
bar.setAction(R.string.action_undo, undoOnClickListener);
}
snackbarManager.showSnackbar(bar);
}
public void onExperimentDeleted(String experimentId) {
int index = -1;
for (int i = 0; i < items.size(); i++) {
ExperimentListItem item = items.get(i);
if (item.viewType == VIEW_TYPE_EXPERIMENT
&& TextUtils.equals(item.experimentOverview.experimentId, experimentId)) {
index = i;
break;
}
}
if (index > 0) {
removeExperiment(index);
}
}
private void removeExperiment(int index) {
items.remove(index);
notifyItemRemoved(index);
// Remove the previous item if it is a date with no children.
// We don't need to index check that index is zero because there must be a date card
// somewhere above the experiment we just removed. So, an experiment is never at index zero.
if (items.get(index - 1).viewType == VIEW_TYPE_DATE) {
// The previous item is a date.
// If there are no items after that date, or the item after that date is also a date
if (index == items.size() || items.get(index).viewType == VIEW_TYPE_DATE) {
items.remove(index - 1);
if (items.isEmpty()) {
notifyDataSetChanged();
} else {
notifyItemRemoved(index - 1);
}
}
}
}
private void promptBeforeClaimExperiment(String experimentId, Context context) {
AlertDialog.Builder builder = new AlertDialog.Builder(parentReference.get().getContext());
builder.setTitle(R.string.drive_confirmation_text);
builder.setNegativeButton(android.R.string.cancel, (dialog, which) -> dialog.cancel());
builder.setPositiveButton(
R.string.drive_confirmation_yes,
(dialog, which) -> {
claimExperiment(experimentId);
dialog.dismiss();
});
AlertDialog dialog = builder.create();
dialog.show();
// Need to reset the content description so the button will be read correctly b/116869645
dialog.getButton(DialogInterface.BUTTON_POSITIVE)
.setContentDescription(context.getResources().getString(R.string.drive_confirmation_yes));
}
private void claimExperiment(String experimentId) {
parentReference
.get()
.getDataController()
.moveExperimentToAnotherAccount(
experimentId,
parentReference.get().claimingAccount,
new LoggingConsumer<Success>(TAG, "claimExperiments") {
@Override
public void success(Success value) {
onExperimentDeleted(experimentId);
showClaimedSnackbar();
// When the snackbar disappears, finish claim experiments mode if there are no
// experiments left.
new Handler()
.postDelayed(
() -> {
if (parentReference.get() != null) {
parentReference.get().maybeFinishClaimExperimentsMode();
}
},
LONG_DELAY_MILLIS);
}
});
}
private void deleteExperiment(String experimentId) {
snackbarManager.hideVisibleSnackbar();
parentReference.get().confirmDelete(experimentId);
}
private void exportExperiment(String experimentId) {
Context context = parentReference.get().getContext();
WhistlePunkApplication.getUsageTracker(context)
.trackEvent(
TrackerConstants.CATEGORY_EXPERIMENTS,
TrackerConstants.ACTION_SHARED,
TrackerConstants.LABEL_EXPERIMENT_LIST,
0);
parentReference.get().setProgressBarVisible(true);
ExportService.handleExperimentExportClick(
context, parentReference.get().appAccount, experimentId);
}
public void onDestroy() {
snackbarManager.onDestroy();
if (popupMenu != null) {
popupMenu.dismiss();
}
}
}
public static class ViewHolder extends RecyclerView.ViewHolder {
// Accessing via fields for faster access.
/** Experiment ID that is being loaded or has been loaded. */
public String experimentId;
public TextView experimentTitle;
public ImageView experimentImage;
public View archivedIndicator;
public View cardView;
public ImageButton menuButton;
public ImageButton driveButton;
public ImageButton shareButton;
public ImageButton deleteButton;
public Button claimButton;
int viewType;
public ViewHolder(View itemView, int viewType, boolean claimExperimentsMode) {
super(itemView);
this.viewType = viewType;
if (viewType == ExperimentListAdapter.VIEW_TYPE_EXPERIMENT) {
cardView = itemView.findViewById(R.id.card_view);
experimentImage = (ImageView) itemView.findViewById(R.id.experiment_image);
experimentTitle = (TextView) itemView.findViewById(R.id.experiment_title);
archivedIndicator = itemView.findViewById(R.id.archived_indicator);
menuButton = (ImageButton) itemView.findViewById(R.id.menu_button);
if (claimExperimentsMode) {
driveButton = (ImageButton) itemView.findViewById(R.id.drive_button);
shareButton = (ImageButton) itemView.findViewById(R.id.share_button);
deleteButton = (ImageButton) itemView.findViewById(R.id.delete_button);
}
} else if (viewType == ExperimentListAdapter.VIEW_TYPE_CLAIM_EXPERIMENTS) {
claimButton = (Button) itemView.findViewById(R.id.btn_claim_experiments);
}
}
}
private class ConnectivityBroadcastReceiver extends BroadcastReceiver {
@Override
public void onReceive(Context context, Intent intent) {
updateNetworkStatusIcon();
}
}
}
| OpenScienceJournal/whistlepunk_library/src/main/java/com/google/android/apps/forscience/whistlepunk/project/ExperimentListFragment.java | /*
* Copyright 2016 Google Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.apps.forscience.whistlepunk.project;
import android.app.Activity;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.DialogInterface;
import android.content.Intent;
import android.content.IntentFilter;
import android.content.SharedPreferences;
import android.content.res.Resources;
import android.graphics.drawable.Drawable;
import android.net.ConnectivityManager;
import android.os.Bundle;
import android.os.Handler;
import androidx.annotation.Nullable;
import android.support.design.widget.FloatingActionButton;
import android.support.design.widget.Snackbar;
import androidx.fragment.app.Fragment;
import android.support.v4.widget.SwipeRefreshLayout;
import android.support.v4.widget.SwipeRefreshLayout.OnRefreshListener;
import androidx.appcompat.app.AlertDialog;
import androidx.recyclerview.widget.GridLayoutManager;
import androidx.appcompat.widget.PopupMenu;
import androidx.recyclerview.widget.RecyclerView;
import android.text.TextUtils;
import android.text.format.DateFormat;
import android.util.Log;
import android.util.TimingLogger;
import android.view.Gravity;
import android.view.LayoutInflater;
import android.view.Menu;
import android.view.MenuInflater;
import android.view.MenuItem;
import android.view.View;
import android.view.ViewGroup;
import android.widget.Button;
import android.widget.ImageButton;
import android.widget.ImageView;
import android.widget.TextView;
import com.google.android.apps.forscience.javalib.Success;
import com.google.android.apps.forscience.whistlepunk.AccessibilityUtils;
import com.google.android.apps.forscience.whistlepunk.AppSingleton;
import com.google.android.apps.forscience.whistlepunk.Clock;
import com.google.android.apps.forscience.whistlepunk.ColorUtils;
import com.google.android.apps.forscience.whistlepunk.DataController;
import com.google.android.apps.forscience.whistlepunk.ExportService;
import com.google.android.apps.forscience.whistlepunk.LoggingConsumer;
import com.google.android.apps.forscience.whistlepunk.PictureUtils;
import com.google.android.apps.forscience.whistlepunk.R;
import com.google.android.apps.forscience.whistlepunk.RecorderController;
import com.google.android.apps.forscience.whistlepunk.RxDataController;
import com.google.android.apps.forscience.whistlepunk.RxEvent;
import com.google.android.apps.forscience.whistlepunk.SnackbarManager;
import com.google.android.apps.forscience.whistlepunk.WhistlePunkApplication;
import com.google.android.apps.forscience.whistlepunk.accounts.AccountsUtils;
import com.google.android.apps.forscience.whistlepunk.accounts.AppAccount;
import com.google.android.apps.forscience.whistlepunk.accounts.NonSignedInAccount;
import com.google.android.apps.forscience.whistlepunk.analytics.TrackerConstants;
import com.google.android.apps.forscience.whistlepunk.cloudsync.CloudSyncManager;
import com.google.android.apps.forscience.whistlepunk.cloudsync.CloudSyncProvider;
import com.google.android.apps.forscience.whistlepunk.featurediscovery.FeatureDiscoveryProvider;
import com.google.android.apps.forscience.whistlepunk.filemetadata.Experiment;
import com.google.android.apps.forscience.whistlepunk.filemetadata.ExperimentLibraryManager;
import com.google.android.apps.forscience.whistlepunk.filemetadata.FileMetadataUtil;
import com.google.android.apps.forscience.whistlepunk.filemetadata.Label;
import com.google.android.apps.forscience.whistlepunk.metadata.nano.GoosciCaption;
import com.google.android.apps.forscience.whistlepunk.metadata.nano.GoosciLabel;
import com.google.android.apps.forscience.whistlepunk.metadata.nano.GoosciPictureLabelValue;
import com.google.android.apps.forscience.whistlepunk.metadata.nano.GoosciTextLabelValue;
import com.google.android.apps.forscience.whistlepunk.metadata.nano.GoosciUserMetadata;
import com.google.android.apps.forscience.whistlepunk.performance.PerfTrackerProvider;
import com.google.android.apps.forscience.whistlepunk.review.DeleteMetadataItemDialog;
import java.io.File;
import java.io.IOException;
import java.lang.ref.WeakReference;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Collections;
import java.util.Date;
import java.util.List;
/**
* Experiment List Fragment lists all experiments that belong to an account. This fragment is used
* in MainActivity and in ClaimExperimentsAcitivty. The claimExperimentsMode field can be used to
* determine whether it is in ClaimExperimentsAcitivty.
*
* <p>When used in MainActivity, the appAccount field is the "current" account, which may be the
* NonSignedInAccount.
*
* <p>When used in ClaimExperimentsAcitivty, the appAccount field is the NonSignedInAccount and the
* claimingAccount field is the "current" signed-in account.
*
* <p>Note that the options menu is different based on whether claimExperimentsMode is true or
* false. In claimExperimentsMode, the menu items for action_sync and action_network_disconnected do
* not exist. Care should be taken to check for null before dereferencing the result of
* Menu.findItem for these ids.
*/
public class ExperimentListFragment extends Fragment
implements DeleteMetadataItemDialog.DeleteDialogListener, OnRefreshListener {
private static final String TAG = "ExperimentListFragment";
/** Boolean extra for savedInstanceState with the state of includeArchived experiments. */
private static final String EXTRA_INCLUDE_ARCHIVED = "includeArchived";
private static final String ARG_ACCOUNT_KEY = "accountKey";
private static final String ARG_CLAIM_EXPERIMENTS_MODE = "claimExperimentsMode";
private static final String ARG_CLAIMING_ACCOUNT_KEY = "claimingAccountKey";
private static final String ARG_USE_PANES = "usePanes";
public static final String KEY_DEFAULT_EXPERIMENT_CREATED = "key_default_experiment_created";
private static final String TAG_NEW_EXPERIMENT_BUTTON = "new_experiment_button";
/** Duration of snackbar length long. 3.5 seconds */
private static final int LONG_DELAY_MILLIS = 3500;
private ExperimentListAdapter experimentListAdapter;
private boolean includeArchived;
private boolean progressBarVisible = false;
private final RxEvent destroyed = new RxEvent();
private final RxEvent paused = new RxEvent();
private final IntentFilter networkIntentFilter = new IntentFilter();
private AppAccount appAccount;
private boolean claimExperimentsMode;
private AppAccount claimingAccount;
private ConnectivityBroadcastReceiver connectivityBroadcastReceiver;
private Menu optionsMenu = null;
private FeatureDiscoveryProvider featureDiscoveryProvider;
private SwipeRefreshLayout swipeLayout;
public static ExperimentListFragment newInstance(AppAccount appAccount, boolean usePanes) {
return newInstance(createArguments(appAccount, usePanes));
}
private static ExperimentListFragment newInstance(Bundle arguments) {
ExperimentListFragment fragment = new ExperimentListFragment();
fragment.setArguments(arguments);
return fragment;
}
private static Bundle createArguments(AppAccount appAccount, boolean usePanes) {
Bundle args = new Bundle();
args.putString(ARG_ACCOUNT_KEY, appAccount.getAccountKey());
args.putBoolean(ARG_USE_PANES, usePanes);
return args;
}
public static ExperimentListFragment reuseOrCreateInstance(
@Nullable Fragment fragment, AppAccount appAccount, boolean usePanes) {
Bundle newArguments = createArguments(appAccount, usePanes);
if (fragment instanceof ExperimentListFragment
&& newArguments.equals(fragment.getArguments())) {
return (ExperimentListFragment) fragment;
}
return newInstance(newArguments);
}
public static ExperimentListFragment newInstanceForClaimExperimentsMode(
Context context, AppAccount claimingAccount, boolean usePanes) {
NonSignedInAccount nonSignedInAccount = NonSignedInAccount.getInstance(context);
ExperimentListFragment fragment = new ExperimentListFragment();
Bundle args = new Bundle();
args.putString(ARG_ACCOUNT_KEY, nonSignedInAccount.getAccountKey());
args.putBoolean(ARG_CLAIM_EXPERIMENTS_MODE, true);
args.putString(ARG_CLAIMING_ACCOUNT_KEY, claimingAccount.getAccountKey());
args.putBoolean(ARG_USE_PANES, usePanes);
fragment.setArguments(args);
return fragment;
}
public ExperimentListFragment() {
networkIntentFilter.addAction(ConnectivityManager.CONNECTIVITY_ACTION);
}
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
AppSingleton.getInstance(getContext())
.whenExportBusyChanges()
.takeUntil(destroyed.happens())
.subscribe(
busy -> {
setProgressBarVisible(busy);
});
appAccount = WhistlePunkApplication.getAccount(getContext(), getArguments(), ARG_ACCOUNT_KEY);
claimExperimentsMode = getArguments().getBoolean(ARG_CLAIM_EXPERIMENTS_MODE);
if (claimExperimentsMode) {
claimingAccount =
WhistlePunkApplication.getAccount(getContext(), getArguments(), ARG_CLAIMING_ACCOUNT_KEY);
// In claim experiments mode, we always start with showing archived experiments, even if the
// user hid them previously.
includeArchived = true;
getActivity().invalidateOptionsMenu();
} else {
if (savedInstanceState != null) {
includeArchived = savedInstanceState.getBoolean(EXTRA_INCLUDE_ARCHIVED, false);
getActivity().invalidateOptionsMenu();
}
}
featureDiscoveryProvider =
WhistlePunkApplication.getAppServices(getActivity()).getFeatureDiscoveryProvider();
setHasOptionsMenu(true);
}
@Override
public void onStart() {
super.onStart();
WhistlePunkApplication.getUsageTracker(getActivity())
.trackScreenView(TrackerConstants.SCREEN_EXPERIMENT_LIST);
}
@Override
public void onResume() {
super.onResume();
setProgressBarVisible(progressBarVisible);
connectivityBroadcastReceiver = new ConnectivityBroadcastReceiver();
getContext().registerReceiver(connectivityBroadcastReceiver, networkIntentFilter);
TimingLogger timing = new TimingLogger(TAG, "Sync on Resume");
AppSingleton.getInstance(getContext())
.whenNewExperimentSynced()
.takeUntil(paused.happens())
.subscribe(
count -> {
Handler uiHandler = new Handler(getContext().getMainLooper());
uiHandler.post(
() -> {
// This fragment may be gone by the time this code executes. Check getContext
// and give up if it is null, otherwise getResources() below will throw
// IllegalStateException.
if (getContext() == null) {
return;
}
loadExperiments();
timing.addSplit("Syncing complete");
timing.dumpToLog();
});
});
syncNow("Sync On Resume");
}
@Override
public void onSaveInstanceState(Bundle outState) {
super.onSaveInstanceState(outState);
outState.putBoolean(EXTRA_INCLUDE_ARCHIVED, includeArchived);
}
@Override
public void onDestroy() {
// TODO: Use RxEvent here
experimentListAdapter.onDestroy();
destroyed.onHappened();
super.onDestroy();
}
@Override
public void onPause() {
getContext().unregisterReceiver(connectivityBroadcastReceiver);
paused.onHappened();
super.onPause();
}
@Override public void onRefresh() {
swipeLayout.setRefreshing(false);
syncNow("Sync on Pulldown");
}
@Override
public View onCreateView(
LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {
View view = inflater.inflate(R.layout.fragment_experiment_list, container, false);
final RecyclerView detailList = (RecyclerView) view.findViewById(R.id.details);
experimentListAdapter = new ExperimentListAdapter(this);
swipeLayout = (SwipeRefreshLayout) view.findViewById(R.id.swipe_container);
swipeLayout.setOnRefreshListener(this);
// TODO: Adjust the column count based on breakpoint specs when available.
int column_count = 2;
GridLayoutManager manager = new GridLayoutManager(getActivity(), column_count);
manager.setSpanSizeLookup(
new GridLayoutManager.SpanSizeLookup() {
@Override
public int getSpanSize(int position) {
return experimentListAdapter.getItemViewType(position)
== ExperimentListAdapter.VIEW_TYPE_EXPERIMENT
? 1
: column_count;
}
});
detailList.setLayoutManager(manager);
detailList.setAdapter(experimentListAdapter);
FloatingActionButton newExperimentButton =
(FloatingActionButton) view.findViewById(R.id.new_experiment);
if (claimExperimentsMode) {
newExperimentButton.setVisibility(View.GONE);
} else {
newExperimentButton.setOnClickListener(
v -> {
if (getRecorderController().watchRecordingStatus().blockingFirst().isRecording()) {
// This should never happen, but apparently it does on some Xperia devices?
// b/117484248
return;
}
getDataController()
.createExperiment(
new LoggingConsumer<Experiment>(TAG, "Create a new experiment") {
@Override
public void success(final Experiment experiment) {
WhistlePunkApplication.getUsageTracker(getActivity())
.trackEvent(
TrackerConstants.CATEGORY_EXPERIMENTS,
TrackerConstants.ACTION_CREATE,
TrackerConstants.LABEL_EXPERIMENT_LIST,
0);
launchPanesActivity(
v.getContext(),
appAccount,
experiment.getExperimentId(),
false /* claimExperimentsMode */);
}
});
});
}
return view;
}
public static void launchPanesActivity(
Context context, AppAccount appAccount, String experimentId, boolean claimExperimentsMode) {
context.startActivity(
WhistlePunkApplication.getLaunchIntentForPanesActivity(
context, appAccount, experimentId, claimExperimentsMode));
}
private boolean shouldShowClaimExperimentsCard() {
// We should prompt to claim experiments if:
// we are not already in claim experiments mode
// and there is one or more experiments in unclaimed storage.
return !claimExperimentsMode
&& appAccount.isSignedIn()
&& AccountsUtils.getUnclaimedExperimentCount(getContext()) >= 1;
}
private boolean shouldShowAddExperimentsToDriveCard() {
return claimExperimentsMode;
}
private void loadExperiments() {
// Old fragments can still be alive, but not part of the activity, when resuming.
// See https://stackoverflow.com/questions/9727173/ ...
// support-fragmentpageradapter-holds-reference-to-old-fragments/9745935#9745935
// This prevents a crash, but I suspect there's a deeper solution we can investigate, later.
// TODO(b/116717025}
if (getActivity() == null) {
return;
}
PerfTrackerProvider perfTracker = WhistlePunkApplication.getPerfTrackerProvider(getActivity());
PerfTrackerProvider.TimerToken loadExperimentTimer = perfTracker.startTimer();
getDataController()
.getExperimentOverviews(
includeArchived,
new LoggingConsumer<List<GoosciUserMetadata.ExperimentOverview>>(
TAG, "Retrieve experiments") {
@Override
public void success(List<GoosciUserMetadata.ExperimentOverview> experiments) {
// In case the account changes multiple times quickly, ignore the results if
// the activity is now null.
if (getActivity() == null) {
return;
}
if (experiments.isEmpty()
&& !wasDefaultExperimentCreated()
&& !shouldShowClaimExperimentsCard()) {
// If there are no experiments and we've never made a default one,
// create the default experiment and set the boolean to true.
// Note that we don't create the default experiment if the user is
// prompted to claim unclaimed experiments.
createDefaultExperiment();
boolean discoveryEnabled =
featureDiscoveryProvider.isEnabled(
getActivity(),
appAccount,
FeatureDiscoveryProvider.FEATURE_NEW_EXPERIMENT);
if (discoveryEnabled) {
scheduleFeatureDiscovery();
}
perfTracker.stopTimer(
loadExperimentTimer, TrackerConstants.PRIMES_DEFAULT_EXPERIMENT_CREATED);
} else {
attachToExperiments(experiments);
perfTracker.stopTimer(
loadExperimentTimer, TrackerConstants.PRIMES_EXPERIMENT_LIST_LOADED);
}
perfTracker.onAppInteractive();
}
});
}
private void scheduleFeatureDiscovery() {
Handler handler = new Handler(getContext().getMainLooper());
handler.postDelayed(
this::showFeatureDiscovery, FeatureDiscoveryProvider.FEATURE_DISCOVERY_SHOW_DELAY_MS);
}
private void showFeatureDiscovery() {
if (getActivity() == null) {
return;
}
// Confirm that a view with the tag exists, so featureDiscoveryProvider can find it.
final View view = this.getView().findViewWithTag(TAG_NEW_EXPERIMENT_BUTTON);
if (view != null) {
featureDiscoveryProvider.show(
getActivity(),
appAccount,
FeatureDiscoveryProvider.FEATURE_NEW_EXPERIMENT,
TAG_NEW_EXPERIMENT_BUTTON);
}
}
private SharedPreferences getSharedPreferences() {
return AccountsUtils.getSharedPreferences(getContext(), appAccount);
}
private boolean wasDefaultExperimentCreated() {
return getSharedPreferences().getBoolean(KEY_DEFAULT_EXPERIMENT_CREATED, false);
}
private void setDefaultExperimentCreated() {
getSharedPreferences().edit().putBoolean(KEY_DEFAULT_EXPERIMENT_CREATED, true).apply();
}
private void createDefaultExperiment() {
DataController dataController = getDataController();
RxDataController.createExperiment(dataController)
.subscribe(
e -> {
Resources res = getActivity().getResources();
e.setTitle(res.getString(R.string.first_experiment_title));
Clock clock =
AppSingleton.getInstance(getActivity()).getSensorEnvironment().getDefaultClock();
// Create a text label 1 second ago with default text.
GoosciTextLabelValue.TextLabelValue goosciTextLabel1 =
new GoosciTextLabelValue.TextLabelValue();
goosciTextLabel1.text = res.getString(R.string.first_experiment_second_text_note);
Label textLabel1 =
Label.newLabelWithValue(
clock.getNow() - 1000,
GoosciLabel.Label.ValueType.TEXT,
goosciTextLabel1,
null);
e.addLabel(e, textLabel1);
// Create a text label 2 seconds ago with default text.
GoosciTextLabelValue.TextLabelValue goosciTextLabel2 =
new GoosciTextLabelValue.TextLabelValue();
goosciTextLabel2.text = res.getString(R.string.first_experiment_text_note);
Label textLabel2 =
Label.newLabelWithValue(
clock.getNow() - 2000,
GoosciLabel.Label.ValueType.TEXT,
goosciTextLabel2,
null);
e.addLabel(e, textLabel2);
// Create a picture label 4 second ago with a default drawable and caption.
GoosciCaption.Caption caption = new GoosciCaption.Caption();
caption.text = res.getString(R.string.first_experiment_picture_note_caption);
caption.lastEditedTimestamp = clock.getNow() - 4000;
Label pictureLabel =
Label.newLabel(caption.lastEditedTimestamp, GoosciLabel.Label.ValueType.PICTURE);
File pictureFile =
PictureUtils.createImageFile(
getActivity(),
dataController.getAppAccount(),
e.getExperimentId(),
pictureLabel.getLabelId());
PictureUtils.writeDrawableToFile(getActivity(), pictureFile, R.drawable.first_note);
GoosciPictureLabelValue.PictureLabelValue goosciPictureLabel =
new GoosciPictureLabelValue.PictureLabelValue();
goosciPictureLabel.filePath =
FileMetadataUtil.getInstance()
.getRelativePathInExperiment(e.getExperimentId(), pictureFile);
pictureLabel.setLabelProtoData(goosciPictureLabel);
pictureLabel.setCaption(caption);
e.addLabel(e, pictureLabel);
// TODO: Add a recording item if required by b/64844798.
RxDataController.updateExperiment(dataController, e, true)
.subscribe(
() -> {
setDefaultExperimentCreated();
loadExperiments();
});
});
}
private void attachToExperiments(List<GoosciUserMetadata.ExperimentOverview> experiments) {
final View rootView = getView();
if (rootView == null) {
return;
}
experimentListAdapter.setData(experiments, includeArchived);
}
private DataController getDataController() {
return AppSingleton.getInstance(getActivity()).getDataController(appAccount);
}
private RecorderController getRecorderController() {
return AppSingleton.getInstance(getActivity()).getRecorderController(appAccount);
}
private ExperimentLibraryManager getExperimentLibraryManager() {
return AppSingleton.getInstance(getActivity()).getExperimentLibraryManager(appAccount);
}
public void setProgressBarVisible(boolean visible) {
progressBarVisible = visible;
if (getView() != null) {
if (visible) {
getView().findViewById(R.id.indeterminateBar).setVisibility(View.VISIBLE);
} else {
getView().findViewById(R.id.indeterminateBar).setVisibility(View.GONE);
}
}
}
@Override
public void onCreateOptionsMenu(Menu menu, MenuInflater inflater) {
super.onCreateOptionsMenu(menu, inflater);
if (claimExperimentsMode) {
inflater.inflate(R.menu.menu_claim_experiments, menu);
ColorUtils.colorDrawable(
getContext(),
menu.findItem(R.id.run_review_overflow_menu).getIcon(),
R.color.claim_experiments_action_bar_text);
} else {
inflater.inflate(R.menu.menu_experiment_list, menu);
}
optionsMenu = menu;
updateNetworkStatusIcon();
}
@Override
public void onPrepareOptionsMenu(Menu menu) {
menu.findItem(R.id.action_include_archived).setVisible(!includeArchived);
menu.findItem(R.id.action_exclude_archived).setVisible(includeArchived);
MenuItem menuItemSync = menu.findItem(R.id.action_sync);
if (menuItemSync != null) {
menuItemSync.setVisible(appAccount.isSignedIn());
}
optionsMenu = menu;
updateNetworkStatusIcon();
}
private void updateNetworkStatusIcon() {
if (optionsMenu == null) {
return;
}
MenuItem menuItemActionNetworkDisconnected =
optionsMenu.findItem(R.id.action_network_disconnected);
// In claim experiments mode, the menus are different. If menuItemActionNetworkDisconnected is
// null, the menu item doesn't exist.
if (menuItemActionNetworkDisconnected == null) {
return;
}
if (!appAccount.isSignedIn()) {
menuItemActionNetworkDisconnected.setVisible(false);
return;
}
ConnectivityManager cm =
(ConnectivityManager) getContext().getSystemService(Context.CONNECTIVITY_SERVICE);
boolean shouldShowIcon =
cm.getActiveNetworkInfo() == null || !cm.getActiveNetworkInfo().isConnectedOrConnecting();
menuItemActionNetworkDisconnected.setVisible(shouldShowIcon);
menuItemActionNetworkDisconnected.setEnabled(shouldShowIcon);
if (shouldShowIcon) {
getView()
.announceForAccessibility(
getResources().getString(R.string.drive_sync_cannot_reach_google_drive));
}
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
int id = item.getItemId();
if (progressBarVisible) {
return true;
}
if (id == R.id.action_include_archived) {
includeArchived = true;
loadExperiments();
getActivity().invalidateOptionsMenu();
return true;
} else if (id == R.id.action_exclude_archived) {
includeArchived = false;
loadExperiments();
getActivity().invalidateOptionsMenu();
return true;
} else if (id == R.id.action_claim_unclaimed_experiments) {
confirmClaimUnclaimedExperiments();
return true;
} else if (id == R.id.action_delete_unclaimed_experiments) {
confirmDeleteUnclaimedExperiments();
return true;
} else if (id == R.id.action_sync) {
syncNow("Sync from menu");
return true;
} else if (id == R.id.action_network_disconnected) {
Resources res = getActivity().getResources();
experimentListAdapter.showSnackbar(
res.getString(R.string.drive_sync_cannot_reach_google_drive), null);
}
return super.onOptionsItemSelected(item);
}
private void syncNow(String logMessage) {
if (appAccount.isSignedIn()) {
CloudSyncProvider syncProvider = WhistlePunkApplication.getCloudSyncProvider(getActivity());
CloudSyncManager syncService = syncProvider.getServiceForAccount(appAccount);
try {
getView()
.announceForAccessibility(
getResources().getString(R.string.action_sync_start));
syncService.syncExperimentLibrary(getContext(), logMessage);
} catch (IOException ioe) {
if (Log.isLoggable(TAG, Log.ERROR)) {
Log.e(TAG, "IOE", ioe);
}
}
} else {
loadExperiments();
}
}
private void confirmClaimUnclaimedExperiments() {
Context context = getContext();
int unclaimedExperimentCount = AccountsUtils.getUnclaimedExperimentCount(context);
AlertDialog.Builder builder = new AlertDialog.Builder(context);
builder.setTitle(
context
.getResources()
.getQuantityString(
R.plurals.claim_all_confirmation_text,
unclaimedExperimentCount,
unclaimedExperimentCount));
builder.setNegativeButton(android.R.string.cancel, (dialog, which) -> dialog.cancel());
builder.setPositiveButton(
R.string.claim_all_confirmation_yes,
(dialog, which) -> {
claimUnclaimedExperiments();
dialog.dismiss();
});
builder.create().show();
}
private void claimUnclaimedExperiments() {
getDataController()
.moveAllExperimentsToAnotherAccount(
claimingAccount,
new LoggingConsumer<Success>(TAG, "claimUnclaimedExperiments") {
@Override
public void success(Success value) {
getActivity().finish();
}
});
}
private void confirmDeleteUnclaimedExperiments() {
AlertDialog.Builder builder = new AlertDialog.Builder(getContext());
builder.setTitle(R.string.delete_all_prompt_headline);
builder.setMessage(R.string.delete_all_prompt_text);
builder.setNegativeButton(android.R.string.cancel, (dialog, which) -> dialog.cancel());
builder.setPositiveButton(
R.string.delete_all_prompt_yes,
(dialog, which) -> {
deleteUnclaimedExperiments();
dialog.dismiss();
});
builder.create().show();
}
private void deleteUnclaimedExperiments() {
getDataController()
.deleteAllExperiments(
new LoggingConsumer<Success>(TAG, "deleteUnclaimedExperiments") {
@Override
public void success(Success value) {
getActivity().finish();
}
});
}
private void confirmDelete(String experimentId) {
DeleteMetadataItemDialog dialog =
DeleteMetadataItemDialog.newInstance(
R.string.delete_experiment_dialog_title,
R.string.delete_experiment_dialog_message,
experimentId);
dialog.show(getChildFragmentManager(), DeleteMetadataItemDialog.TAG);
}
@Override
public void requestDelete(Bundle extras) {
String experimentId = extras.getString(DeleteMetadataItemDialog.KEY_ITEM_ID);
DataController dataController = getDataController();
RxDataController.getExperimentById(dataController, experimentId)
.subscribe(
fullExperiment -> {
dataController.deleteExperiment(
fullExperiment,
new LoggingConsumer<Success>(TAG, "delete experiment") {
@Override
public void success(Success value) {
experimentListAdapter.onExperimentDeleted(experimentId);
WhistlePunkApplication.getUsageTracker(getActivity())
.trackEvent(
TrackerConstants.CATEGORY_EXPERIMENTS,
TrackerConstants.ACTION_DELETED,
TrackerConstants.LABEL_EXPERIMENT_LIST,
0);
maybeFinishClaimExperimentsMode();
}
});
});
}
private void maybeFinishClaimExperimentsMode() {
// If the item count is now 1, then the only item is the
// add_experiments_to_drive_card. There are no unclaimed experiments left.
if (claimExperimentsMode && experimentListAdapter.getItemCount() == 1) {
Activity activity = getActivity();
if (activity != null) {
activity.finish();
}
}
}
static class ExperimentListItem {
public final int viewType;
public final GoosciUserMetadata.ExperimentOverview experimentOverview;
public final String dateString;
ExperimentListItem(GoosciUserMetadata.ExperimentOverview experimentOverview) {
viewType = ExperimentListAdapter.VIEW_TYPE_EXPERIMENT;
this.experimentOverview = experimentOverview;
dateString = null;
}
ExperimentListItem(String date) {
viewType = ExperimentListAdapter.VIEW_TYPE_DATE;
dateString = date;
experimentOverview = null;
}
ExperimentListItem(int viewType) {
this.viewType = viewType;
dateString = null;
experimentOverview = null;
}
}
public static class ExperimentListAdapter extends RecyclerView.Adapter<ViewHolder> {
static final int VIEW_TYPE_EXPERIMENT = 0;
static final int VIEW_TYPE_EMPTY = 1;
static final int VIEW_TYPE_DATE = 2;
static final int VIEW_TYPE_CLAIM_EXPERIMENTS = 3;
static final int VIEW_TYPE_ADD_EXPERIMENTS_TO_DRIVE = 4;
private final Drawable placeHolderImage;
private final List<ExperimentListItem> items;
private boolean includeArchived;
private final Calendar calendar;
private final int currentYear;
private final String monthYearFormat;
private final WeakReference<ExperimentListFragment> parentReference;
private final SnackbarManager snackbarManager = new SnackbarManager();
private PopupMenu popupMenu = null;
public ExperimentListAdapter(ExperimentListFragment parent) {
items = new ArrayList<>();
placeHolderImage =
parent.getActivity().getResources().getDrawable(R.drawable.experiment_card_placeholder);
calendar =
Calendar.getInstance(parent.getActivity().getResources().getConfiguration().locale);
currentYear = calendar.get(Calendar.YEAR);
monthYearFormat = parent.getActivity().getResources().getString(R.string.month_year_format);
parentReference = new WeakReference<>(parent);
}
void setData(
List<GoosciUserMetadata.ExperimentOverview> experimentOverviews, boolean includeArchived) {
this.includeArchived = includeArchived;
items.clear();
if (parentReference.get().shouldShowClaimExperimentsCard()) {
items.add(new ExperimentListItem(VIEW_TYPE_CLAIM_EXPERIMENTS));
}
if (parentReference.get().shouldShowAddExperimentsToDriveCard()) {
items.add(new ExperimentListItem(VIEW_TYPE_ADD_EXPERIMENTS_TO_DRIVE));
}
if (experimentOverviews.isEmpty()) {
items.add(new ExperimentListItem(VIEW_TYPE_EMPTY));
} else {
// Sort most recent first
Collections.sort(
experimentOverviews,
(eo1, eo2) -> Long.compare(eo2.lastUsedTimeMs, eo1.lastUsedTimeMs));
String date = "";
for (GoosciUserMetadata.ExperimentOverview overview : experimentOverviews) {
// Only show the year if it is not this year.
calendar.setTime(new Date(overview.lastUsedTimeMs));
String nextDate =
DateFormat.format(
calendar.get(Calendar.YEAR) == currentYear ? "MMMM" : monthYearFormat,
calendar)
.toString();
if (!TextUtils.equals(date, nextDate)) {
date = nextDate;
items.add(new ExperimentListItem(date));
}
items.add(new ExperimentListItem(overview));
}
}
notifyDataSetChanged();
}
@Override
public ViewHolder onCreateViewHolder(ViewGroup parent, int viewType) {
LayoutInflater inflater = LayoutInflater.from(parent.getContext());
View view;
if (viewType == VIEW_TYPE_EMPTY) {
view = inflater.inflate(R.layout.empty_list, parent, false);
} else if (viewType == VIEW_TYPE_DATE) {
view = inflater.inflate(R.layout.experiment_date, parent, false);
} else if (viewType == VIEW_TYPE_CLAIM_EXPERIMENTS) {
view = inflater.inflate(R.layout.claim_experiments_card, parent, false);
} else if (viewType == VIEW_TYPE_ADD_EXPERIMENTS_TO_DRIVE) {
view = inflater.inflate(R.layout.add_experiments_to_drive_card, parent, false);
} else { // VIEW_TYPE_EXPERIMENT
view =
inflater.inflate(
parentReference.get().claimExperimentsMode
? R.layout.claim_experiment_overview
: R.layout.project_experiment_overview,
parent,
false);
}
return new ViewHolder(view, viewType, parentReference.get().claimExperimentsMode);
}
@Override
public void onBindViewHolder(ViewHolder holder, int position) {
if (items.get(position).viewType == VIEW_TYPE_EXPERIMENT) {
bindExperiment(holder, items.get(position));
} else if (items.get(position).viewType == VIEW_TYPE_DATE) {
((TextView) holder.itemView).setText(items.get(position).dateString);
} else if (items.get(position).viewType == VIEW_TYPE_CLAIM_EXPERIMENTS) {
Context context = holder.itemView.getContext();
int unclaimedExperimentCount = AccountsUtils.getUnclaimedExperimentCount(context);
TextView textView = holder.itemView.findViewById(R.id.text_claim_experiments);
textView.setText(
context
.getResources()
.getQuantityString(
R.plurals.claim_experiments_card_text,
unclaimedExperimentCount,
unclaimedExperimentCount));
holder.claimButton.setOnClickListener(
v ->
ClaimExperimentsActivity.launch(
v.getContext(),
parentReference.get().appAccount,
parentReference.get().getArguments().getBoolean(ARG_USE_PANES)));
}
}
@Override
public int getItemCount() {
return items.size();
}
@Override
public int getItemViewType(int position) {
return items.get(position).viewType;
}
private void bindExperiment(final ViewHolder holder, final ExperimentListItem item) {
Resources res = holder.itemView.getResources();
// First on the UI thread, set what experiment we're trying to load.
GoosciUserMetadata.ExperimentOverview overview = item.experimentOverview;
holder.experimentId = overview.experimentId;
// Set the data we know about.
String experimentText =
Experiment.getDisplayTitle(holder.itemView.getContext(), overview.title);
holder.experimentTitle.setText(experimentText);
holder.archivedIndicator.setVisibility(overview.isArchived ? View.VISIBLE : View.GONE);
if (overview.isArchived) {
holder.experimentTitle.setContentDescription(
res.getString(R.string.archived_content_description, experimentText));
holder
.itemView
.findViewById(R.id.content)
.setAlpha(res.getFraction(R.fraction.metadata_card_archived_alpha, 1, 1));
} else {
// Use default.
holder.experimentTitle.setContentDescription("");
holder
.itemView
.findViewById(R.id.content)
.setAlpha(res.getFraction(R.fraction.metadata_card_alpha, 1, 1));
}
holder.itemView.setTag(R.id.experiment_title, overview.experimentId);
holder.cardView.setOnClickListener(
v -> {
if (!parentReference.get().progressBarVisible) {
launchPanesActivity(
v.getContext(),
parentReference.get().appAccount,
overview.experimentId,
parentReference.get().claimExperimentsMode);
}
});
Context context = holder.menuButton.getContext();
boolean isShareIntentValid =
FileMetadataUtil.getInstance()
.validateShareIntent(
context, parentReference.get().appAccount, overview.experimentId);
if (parentReference.get().claimExperimentsMode) {
holder.menuButton.setVisibility(View.GONE);
holder.driveButton.setOnClickListener(
v -> promptBeforeClaimExperiment(overview.experimentId, context));
if (isShareIntentValid) {
holder.shareButton.setOnClickListener(v -> exportExperiment(overview.experimentId));
} else {
holder.shareButton.setVisibility(View.GONE);
}
holder.deleteButton.setOnClickListener(v -> deleteExperiment(overview.experimentId));
} else if (parentReference
.get()
.getRecorderController()
.watchRecordingStatus()
.blockingFirst()
.isRecording()) {
// This should never occur. But apparently it does on some Lenovo K5 devices: b/119263772
// BlockingFirst above is ok because there will always be a RecordingStatus. This won't
// ever actually block.
holder.menuButton.setVisibility(View.GONE);
} else {
holder.menuButton.setOnClickListener(
v -> {
int position = items.indexOf(item);
popupMenu =
new PopupMenu(
context,
holder.menuButton,
Gravity.NO_GRAVITY,
R.attr.actionOverflowMenuStyle,
0);
popupMenu
.getMenuInflater()
.inflate(R.menu.menu_experiment_overview, popupMenu.getMenu());
popupMenu.getMenu().findItem(R.id.menu_item_archive).setVisible(!overview.isArchived);
popupMenu
.getMenu()
.findItem(R.id.menu_item_unarchive)
.setVisible(overview.isArchived);
popupMenu
.getMenu()
.findItem(R.id.menu_item_export_experiment)
.setVisible(isShareIntentValid);
popupMenu.setOnMenuItemClickListener(
menuItem -> {
if (parentReference.get().progressBarVisible) {
return true;
}
if (menuItem.getItemId() == R.id.menu_item_archive) {
setExperimentArchived(overview, position, true);
return true;
} else if (menuItem.getItemId() == R.id.menu_item_unarchive) {
setExperimentArchived(overview, position, false);
return true;
} else if (menuItem.getItemId() == R.id.menu_item_delete) {
deleteExperiment(overview.experimentId);
return true;
} else if (menuItem.getItemId() == R.id.menu_item_export_experiment) {
exportExperiment(overview.experimentId);
return true;
}
return false;
});
popupMenu.setOnDismissListener(menu -> popupMenu = null);
popupMenu.show();
});
}
if (!TextUtils.isEmpty(overview.imagePath)) {
PictureUtils.loadExperimentOverviewImage(
parentReference.get().appAccount, holder.experimentImage, overview.imagePath);
} else {
// Make sure the scale type is correct for the placeholder
holder.experimentImage.setScaleType(ImageView.ScaleType.FIT_CENTER);
holder.experimentImage.setImageDrawable(placeHolderImage);
int[] intArray =
holder
.experimentImage
.getContext()
.getResources()
.getIntArray(R.array.experiment_colors_array);
holder.experimentImage.setBackgroundColor(intArray[overview.colorIndex]);
}
}
private void setExperimentArchived(
GoosciUserMetadata.ExperimentOverview overview, final int position, boolean archived) {
if (parentReference.get() == null) {
return;
}
Context context = parentReference.get().getContext();
overview.isArchived = archived;
DataController dataController = parentReference.get().getDataController();
ExperimentLibraryManager elm = parentReference.get().getExperimentLibraryManager();
RxDataController.getExperimentById(dataController, overview.experimentId)
.subscribe(
fullExperiment -> {
fullExperiment.setArchived(context, dataController.getAppAccount(), archived);
elm.setArchived(fullExperiment.getExperimentId(), archived);
dataController.updateExperiment(
overview.experimentId,
new LoggingConsumer<Success>(TAG, "set archived bit") {
@Override
public void success(Success value) {
updateArchivedState(position, archived);
WhistlePunkApplication.getUsageTracker(parentReference.get().getActivity())
.trackEvent(
TrackerConstants.CATEGORY_EXPERIMENTS,
archived
? TrackerConstants.ACTION_ARCHIVE
: TrackerConstants.ACTION_UNARCHIVE,
TrackerConstants.LABEL_EXPERIMENT_LIST,
0);
showArchivedSnackbar(overview, position, archived);
}
});
});
}
private void updateArchivedState(int position, boolean archived) {
if (includeArchived) {
notifyItemChanged(position);
} else if (archived) {
// Remove archived experiment immediately.
int i = position;
removeExperiment(i);
} else {
// It could be added back anywhere.
if (parentReference.get() != null) {
parentReference.get().loadExperiments();
}
}
}
private void showClaimedSnackbar() {
if (parentReference.get() == null) {
return;
}
String accountName = parentReference.get().claimingAccount.getAccountName();
String message =
parentReference
.get()
.getResources()
.getString(R.string.experiment_added_text, accountName);
showSnackbar(message, null /* undoOnClickListener */);
}
private void showArchivedSnackbar(
GoosciUserMetadata.ExperimentOverview overview, int position, boolean archived) {
if (parentReference.get() == null) {
return;
}
String message =
parentReference
.get()
.getResources()
.getString(
archived
? R.string.archived_experiment_message
: R.string.unarchived_experiment_message);
// We only seem to show "undo" for archiving items, not unarchiving them.
View.OnClickListener undoOnClickListener =
archived ? view -> setExperimentArchived(overview, position, !archived) : null;
showSnackbar(message, undoOnClickListener);
}
public void showSnackbar(String message, @Nullable View.OnClickListener undoOnClickListener) {
Snackbar bar =
AccessibilityUtils.makeSnackbar(
parentReference.get().getView(), message, Snackbar.LENGTH_LONG);
if (undoOnClickListener != null) {
bar.setAction(R.string.action_undo, undoOnClickListener);
}
snackbarManager.showSnackbar(bar);
}
public void onExperimentDeleted(String experimentId) {
int index = -1;
for (int i = 0; i < items.size(); i++) {
ExperimentListItem item = items.get(i);
if (item.viewType == VIEW_TYPE_EXPERIMENT
&& TextUtils.equals(item.experimentOverview.experimentId, experimentId)) {
index = i;
break;
}
}
if (index > 0) {
removeExperiment(index);
}
}
private void removeExperiment(int index) {
items.remove(index);
notifyItemRemoved(index);
// Remove the previous item if it is a date with no children.
// We don't need to index check that index is zero because there must be a date card
// somewhere above the experiment we just removed. So, an experiment is never at index zero.
if (items.get(index - 1).viewType == VIEW_TYPE_DATE) {
// The previous item is a date.
// If there are no items after that date, or the item after that date is also a date
if (index == items.size() || items.get(index).viewType == VIEW_TYPE_DATE) {
items.remove(index - 1);
if (items.isEmpty()) {
notifyDataSetChanged();
} else {
notifyItemRemoved(index - 1);
}
}
}
}
private void promptBeforeClaimExperiment(String experimentId, Context context) {
AlertDialog.Builder builder = new AlertDialog.Builder(parentReference.get().getContext());
builder.setTitle(R.string.drive_confirmation_text);
builder.setNegativeButton(android.R.string.cancel, (dialog, which) -> dialog.cancel());
builder.setPositiveButton(
R.string.drive_confirmation_yes,
(dialog, which) -> {
claimExperiment(experimentId);
dialog.dismiss();
});
AlertDialog dialog = builder.create();
dialog.show();
// Need to reset the content description so the button will be read correctly b/116869645
dialog.getButton(DialogInterface.BUTTON_POSITIVE)
.setContentDescription(context.getResources().getString(R.string.drive_confirmation_yes));
}
private void claimExperiment(String experimentId) {
parentReference
.get()
.getDataController()
.moveExperimentToAnotherAccount(
experimentId,
parentReference.get().claimingAccount,
new LoggingConsumer<Success>(TAG, "claimExperiments") {
@Override
public void success(Success value) {
onExperimentDeleted(experimentId);
showClaimedSnackbar();
// When the snackbar disappears, finish claim experiments mode if there are no
// experiments left.
new Handler()
.postDelayed(
() -> {
if (parentReference.get() != null) {
parentReference.get().maybeFinishClaimExperimentsMode();
}
},
LONG_DELAY_MILLIS);
}
});
}
private void deleteExperiment(String experimentId) {
snackbarManager.hideVisibleSnackbar();
parentReference.get().confirmDelete(experimentId);
}
private void exportExperiment(String experimentId) {
Context context = parentReference.get().getContext();
WhistlePunkApplication.getUsageTracker(context)
.trackEvent(
TrackerConstants.CATEGORY_EXPERIMENTS,
TrackerConstants.ACTION_SHARED,
TrackerConstants.LABEL_EXPERIMENT_LIST,
0);
parentReference.get().setProgressBarVisible(true);
ExportService.handleExperimentExportClick(
context, parentReference.get().appAccount, experimentId);
}
public void onDestroy() {
snackbarManager.onDestroy();
if (popupMenu != null) {
popupMenu.dismiss();
}
}
}
public static class ViewHolder extends RecyclerView.ViewHolder {
// Accessing via fields for faster access.
/** Experiment ID that is being loaded or has been loaded. */
public String experimentId;
public TextView experimentTitle;
public ImageView experimentImage;
public View archivedIndicator;
public View cardView;
public ImageButton menuButton;
public ImageButton driveButton;
public ImageButton shareButton;
public ImageButton deleteButton;
public Button claimButton;
int viewType;
public ViewHolder(View itemView, int viewType, boolean claimExperimentsMode) {
super(itemView);
this.viewType = viewType;
if (viewType == ExperimentListAdapter.VIEW_TYPE_EXPERIMENT) {
cardView = itemView.findViewById(R.id.card_view);
experimentImage = (ImageView) itemView.findViewById(R.id.experiment_image);
experimentTitle = (TextView) itemView.findViewById(R.id.experiment_title);
archivedIndicator = itemView.findViewById(R.id.archived_indicator);
menuButton = (ImageButton) itemView.findViewById(R.id.menu_button);
if (claimExperimentsMode) {
driveButton = (ImageButton) itemView.findViewById(R.id.drive_button);
shareButton = (ImageButton) itemView.findViewById(R.id.share_button);
deleteButton = (ImageButton) itemView.findViewById(R.id.delete_button);
}
} else if (viewType == ExperimentListAdapter.VIEW_TYPE_CLAIM_EXPERIMENTS) {
claimButton = (Button) itemView.findViewById(R.id.btn_claim_experiments);
}
}
}
private class ConnectivityBroadcastReceiver extends BroadcastReceiver {
@Override
public void onReceive(Context context, Intent intent) {
updateNetworkStatusIcon();
}
}
}
| Load the experiment list view on refresh before kicking off a sync. This will display any experiments already on the device.
PiperOrigin-RevId: 228515633
| OpenScienceJournal/whistlepunk_library/src/main/java/com/google/android/apps/forscience/whistlepunk/project/ExperimentListFragment.java | Load the experiment list view on refresh before kicking off a sync. This will display any experiments already on the device. | <ide><path>penScienceJournal/whistlepunk_library/src/main/java/com/google/android/apps/forscience/whistlepunk/project/ExperimentListFragment.java
<ide> timing.dumpToLog();
<ide> });
<ide> });
<del>
<add> loadExperiments();
<ide> syncNow("Sync On Resume");
<ide> }
<ide> |
|
JavaScript | mit | 0ea0f36bd6848d3f1031d7667988b24e311b0b1f | 0 | jaredscheib/tagasauris,jaredscheib/tagasauris | var $j = jQuery.noConflict();
// wrap in IIFE to not expose global variables
(function app () {
var db = new Firebase('https://dazzling-heat-3394.firebaseio.com/');
var params = _.getUrlParams();
var taskNum = Number(params.task.slice(-1));
var annotations = {};
var annotext;
var vidEvents = {};
var vidCompleted = false;
var data;
var assetsCounts;
const todayDataDate = '20160123';
const assetType = 'img'; // alternately, 'vid'
var assetId;
var firstImgToDisplay = 0;
var imgTotal = 30
var imgPerGrid = 4;
// page elements
var instructions;
var prevBtns;
var nextBtns;
var media_area;
var response_area;
var enterKeyword;
var submitBtn;
// db.on('child_added', function (snapshot){
// var addedAnnotation = snapshot.val();
// console.log('Confirmed post to Firebase:', addedAnnotation);
// });
if (assetType === 'img') {
loadScript('https://annotorious.github.com/latest/annotorious.css');
loadScript('https://annotorious.github.com/latest/annotorious.min.js');
}
db.once('value', function (snapshot) {
console.log('db.once event');
data = snapshot.val();
assetsCounts = data.assets[todayDataDate];
if (data.data[todayDataDate] === undefined) data.data[todayDataDate] = {};
if (todayDataDate === '20160114') {
assetId = getAssetId(assetsCounts, data.data[todayDataDate]);
} else if (todayDataDate === '20160123') {
assetId = getAssetId(assetsCounts[assetType], data.data[todayDataDate]);
drawImgGrid();
setImgCounter();
}
if (assetType === 'vid') {
loadScript('https://www.youtube.com/iframe_api');
}
});
// load and set callback for YouTube API
if (assetType === 'vid') {
var player;
// must be in global namespace to be triggered upon script load
window.onYouTubeIframeAPIReady = function () {
console.log('YT READY');
var sliceVid = assetId;
var startSeconds = 0;
var startSecondsIndex = assetId.indexOf('?t=');
if (startSecondsIndex !== -1) {
startSeconds = Number(assetId.slice(startSecondsIndex + 3));
sliceVid = assetId.slice(0, startSecondsIndex);
}
player = new YT.Player('player', {
height: '315',
width: '420',
videoId: sliceVid,
playerVars: {start: startSeconds},
events: {
'onReady': onPlayerReady,
'onStateChange': onPlayerStateChange
}
});
};
function onPlayerReady(event) {
// event.target.playVideo();
};
function onPlayerStateChange(event) {
var eventNames = {
'-1': 'unstarted',
'0': 'ended',
'1': 'playing',
'2': 'paused',
'3': 'buffering',
'5': 'video cued'
};
vidEvents[getNow()] = eventNames[String(event.data)];
if (event.data === 0) {
document.getElementById('submitBtn').disabled = false;
vidCompleted = true;
}
annotext.focus();
};
}
// set HTML and create event listeners on window load
window.onload = function () {
instructions = document.getElementById('instructions');
prevBtns = document.getElementsByClassName('prevBtn');
nextBtns = document.getElementsByClassName('nextBtn');
media_area = document.getElementById('media_area');
response_area = document.getElementById('response_area');
enterKeyword;
submitBtn = document.getElementById('submitBtn');
// non img+annotorious tasks
if (taskNum <= 3) {
$j('.prevBtn').each(function (i, el) { $j(el).hide(); });
$j('.nextBtn').each(function (i, el) { $j(el).hide(); });
var playerDiv = document.createElement('div');
playerDiv.id = 'player';
media_area.appendChild(playerDiv);
if (taskNum === 3) { // checkboxes response
instructions.innerHTML = 'Please watch the entire video. Pause and replay as necessary.<br>' +
'At the moment you see anything, click that concept from among the checkboxes below.<br>' +
'Please pause and replay as necessary in order to submit multiple simultaneous concepts.<br>' +
'When you have entered every concept and finished the video, click submit.';
response_area.innerHTML = '<div id="annochecks">' +
'<input type="checkbox" name="checkboxes" value="driving">driving</input><br>' +
'<input type="checkbox" name="checkboxes" value="carExterior">car exterior</input><br>' +
'<input type="checkbox" name="checkboxes" value="carInterior">car interior</input><br>' +
'<input type="checkbox" name="checkboxes" value="road">road</input><br>' +
'<input type="checkbox" name="checkboxes" value="people">people</input><br>' +
'<input type="checkbox" name="checkboxes" value="truck">truck</input><br>' +
'<input type="checkbox" name="checkboxes" value="BMW">BMW</input><br>' +
'<input type="checkbox" name="checkboxes" value="motorcycle">motorcycle</input><br>' +
'<input type="checkbox" name="checkboxes" value="diesel">diesel</input><br>' +
'<input type="checkbox" name="checkboxes" value="drifting">drifting</input><br>' +
'</div>';
var checkboxes = document.getElementById('annochecks');
checkboxes.addEventListener('change', function (event) {
event.preventDefault();
annotations[getNow()] = {text: event.target.value, timestamp: player.getCurrentTime()};
setTimeout(function() { event.target.checked = false; }, 100);
// console.log(annotations);
});
} else { // textarea response
if (taskNum === 1) {
instructions.innerHTML = '<li>Press play to watch the video.</li>' +
'<li>Enter one keyword or phrase at a time to describe what you see in the video.</li>' +
'<li>Pause and replay the video as necessary to enter all keywords.</li>' +
'<li>When you have entered keywords for the entire video, click Submit HIT.</li>';
} else if (taskNum === 2) {
instructions.innerHTML = '<li>Press play to watch the video related to <b>cars</b>.</li>' +
'<li>Enter one keyword or phrase at a time to describe what you see related to <b>cars</b> in the video.</li>' +
'<li>Pause and replay the video as necessary to enter all keywords.</li>' +
'<li>When you have entered keywords for the entire video, click Submit HIT.</li>';
}
response_area.innerHTML = '<textarea id="annotext" placeholder="Enter keyword or phrase"></textarea><button id="enterKeyword" disabled>Enter</button>';
annotext = document.getElementById('annotext');
annotext.focus();
enterKeyword = document.getElementById('enterKeyword');
annotext.addEventListener('keydown', function (event) {
if (event.keyCode === 13) {
event.preventDefault();
annotations[getNow()] = {text: annotext.value, timestamp: player.getCurrentTime()};
annotext.value = '';
// console.log(annotations);
}
});
annotext.addEventListener('keyup', function (event) {
if (annotext.value === '') {
enterKeyword.setAttribute('disabled', 'disabled');
} else {
enterKeyword.removeAttribute('disabled');
}
});
}
// img+annotorious tasks
} else {
if (assetType === 'img') {
instructions.innerHTML = '<li>Draw a box around each concept you see in each image.</li>' +
'<li>Enter a keyword or phrase in the text box that appears under each drawn box.</li>' +
'<li>Note: the same concept may appear across multiple images.</li>' +
'<li>When you have annotated each image in a set, click Next Set to annotate remaining images.</li>' +
'<li>When you have annotated every image, click Submit HIT.</li>';
response_area.remove()
// set up prev and next buttons for carousel
$j('.prevBtn')
.prop('disabled', false)
.on('click', function (e) {
e.preventDefault();
var imgSet = $j('.anno_img');
var allAnnotated = true;
imgSet.each(function (i, el) {
var imgAnno = anno.getAnnotations(el.src);
if (imgAnno.length === 0) allAnnotated = false;
});
if (allAnnotated) {
firstImgToDisplay -= imgPerGrid;
// prevent error on missing expected tail images
if (firstImgToDisplay < 0) firstImgToDisplay = imgTotal - (imgTotal % imgPerGrid);
drawImgGrid();
} else {
alert('Please annotate each image in the set.');
}
});
$j('.nextBtn')
.prop('disabled', false)
.on('click', function (e) {
e.preventDefault();
var imgSet = $j('.anno_img');
var allAnnotated = true;
imgSet.each(function (i, el) {
var imgAnno = anno.getAnnotations(el.src);
if (imgAnno.length === 0) allAnnotated = false;
});
if (allAnnotated) {
firstImgToDisplay += imgPerGrid;
if (firstImgToDisplay > imgTotal) firstImgToDisplay = 0;
drawImgGrid();
} else {
alert('Please annotate each image in the set.');
}
});
}
// annotorious event handlers
anno.addHandler('onAnnotationCreated', function (annotation) {
var imgNum = getImgNum(annotation);
if (annotation.text.length < 2) {
anno.removeAnnotation(annotation);
return alert('Text must be a valid keyword.')
}
// persist annotations to later remove and restore on Next/Prev
if (!annotations[imgNum]) annotations[imgNum] = [];
annotations[imgNum].push(annotation);
setImgCounter();
});
anno.addHandler('onAnnotationRemoved', function (annotation) {
var imgNum = getImgNum(annotation);
_.each(annotations[imgNum], function (tempAnno, i) {
if (_.deepEquals(annotation, tempAnno)) {
annotations[imgNum].splice(i, 1);
}
});
setImgCounter();
});
anno.addHandler('onAnnotationUpdated', function (annotation) {
var imgNum = getImgNum(annotation);
_.each(annotations[imgNum], function (tempAnno, i) {
if (_.deepEquals(annotation, tempAnno)) {
annotations[imgNum][i] = annotation;
}
});
setImgCounter();
});
// make 'Enter' trigger Save button to prevent multi-line annotations
$j(media_area).on('focus', '.annotorious-editor-text', function (e) {
$j(e.target).on('keyup', function (e) {
// var saveBtn = $j(e.target).parent().find('.annotorious-editor-button-save');
// console.log('keyup', e);
if (e.keyCode === 13) {
// val below is a hack rather than modifying annotorious source code via goog.events, which replaces the default 'click' event on the <a> of saveBtn
var val = $j(e.target).val().split('\n').join('');
$j(e.target).val(val);
// console.log('hit enter in box; saveBtn:', saveBtn);
// debugger;
// console.log(goog);
// to implement 'Enter' triggering Save, would need to modify annotorious.js, a la https://groups.google.com/forum/#!topic/annotorious/dq1-Qtif3b4
// goog.events.dispatchEvent(saveBtn, goog.events.EventType.CLICK);
// $j(saveBtn).simulate('click');
}
});
});
}
submitBtn.addEventListener('click', function (event) {
event.preventDefault();
if (taskNum < 4 && !vidCompleted) {
return alert('Please finish watching the video.');
}
console.log('submit event', annotations);
if (Object.keys(annotations).length > 0) {
params.workerId = params.workerId || 'test';
var postRef = new Firebase('https://dazzling-heat-3394.firebaseio.com/data/' + todayDataDate + '/' + params.workerId + '/');
var postData = {
assetId: assetId,
workerId: params.workerId,
task: taskNum,
annotations: annotations,
time_submitted: getNow()
};
if (taskNum <= 3) postData.video_events = vidEvents
postRef.push(postData, function () {
assetsCounts[assetId]++;
var assetsRef = new Firebase('https://dazzling-heat-3394.firebaseio.com/assets/' + todayDataDate + '/' + assetType + '/');
assetsRef.set(assetsCounts);
mturkSubmit();
console.log('POST to Firebase:', postData);
});
} else {
alert('Please annotate the media before submitting.');
}
});
mturkCheckPreview();
};
function getAssetId (assetsCounts, data) {
console.log(assetsCounts);
console.log(data);
if (Object.keys(data).length === 0) {
return Object.keys(assetsCounts)[0];
// return setVidHTML(assetId);
} else {
var assetsCountsClone = _.deepClone(assetsCounts);
var assetsCountsRemaining = [];
_.each(data[params.workerId], function (entry) {
if (taskNum === entry.task) {
assetsCountsClone[entry.assetId] = false;
}
});
_.each(assetsCountsClone, function (val, key) {
if (val !== false) assetsCountsRemaining.push([key, val]);
});
console.log('assetsCounts', assetsCounts);
console.log('assetsCountsClone', assetsCountsClone);
console.log('assetsCountsRemaining', assetsCountsRemaining);
if (assetsCountsRemaining.length > 0) {
// return vid with least views
assetsCountsRemaining.sort(function (a, b) { return a[1] < b[1]; });
return assetsCountsRemaining.pop()[0];
// return setVidHTML(assetId);
} else {
_.dialog($j('<div style="background-color: rgba(0,0,0,0.5);color:white;font-size:xx-large;padding:10px"/>').text('all HITs completed'), false)
$j('body').click(function () {
alert('You have annotated all videos. Please return this HIT.')
})
return true;
}
}
};
// create image grid
function drawImgGrid () {
// console.log(firstImgToDisplay);
$j(media_area).children().remove();
var imgGrid = document.createElement('div');
imgGrid.id = 'img_grid';
for (var i = 0; i < 2; i++) {
anno.reset(); // necessary for addAnnotation functionality below
var imgRow = document.createElement('div');
imgRow.className = 'img_row';
var j, limit;
if (i === 0) {
j = firstImgToDisplay;
limit = j + imgPerGrid / 2;
} else {
j = firstImgToDisplay + imgPerGrid / 2;
limit = j + imgPerGrid / 2;
}
// console.log('j, limit', j, limit);
for (j; j < limit; j++) {
var imgNum = String(j);
if (imgNum.length < 2) imgNum = '0' + imgNum;
imgNum += '00'; // until non-integer frames added
// console.log('imgNum', imgNum);
var newImg = document.createElement('img');
newImg.id = 'img' + imgNum;
newImg.className = 'anno_img';
newImg.src = 'assets/img/' + assetId + '-' + imgNum + '.jpeg';
imgRow.appendChild(newImg);
$j(newImg).load(function () { // on img load event so annotorious loads properly
anno.makeAnnotatable(this);
var imgNum = this.id.slice(3);
if (annotations[imgNum]) {
// console.log('annotations[imgNum]', annotations[imgNum]);
_.each(annotations[imgNum], function (tempAnno) {
// delete tempAnno.context;
// tempAnno.src = tempAnno.src.slice(tempAnno.src.indexOf('assets/'));
// tempAnno.editable = false; // make annotation read-only
anno.addAnnotation(tempAnno);
});
}
}).error(function (err) {
$j(this).parent().remove();
}).bind(newImg);
}
imgGrid.appendChild(imgRow);
}
media_area.appendChild(imgGrid);
};
function getImgNum (annotation) {
return annotation.src.slice(annotation.src.indexOf('/img/') + 8, annotation.src.indexOf('.jp'));
};
function imgRemaining () {
var remaining = imgTotal;
_.each(annotations, function (tempAnno) {
if (tempAnno.length > 0) {
remaining--;
}
});
if (submitBtn.disabled === true) {
if (remaining === 0) {
submitBtn.disabled = false;
}
} else {
if (remaining > 0) {
submitBtn.disabled = true;
}
}
return remaining;
};
function setImgCounter (remaining) {
document.getElementById('imgRemaining').innerHTML = imgRemaining();
};
function mturkSubmit() {
var f = $j('<form action="' + params.turkSubmitTo + '/mturk/externalSubmit" method="GET"><input type="hidden" name="assignmentId" value="' + params.assignmentId + '"></input><input type="hidden" name="unused" value="unused"></input></form>');
$j('body').append(f);
f.submit();
};
function mturkCheckPreview() {
if (params.assignmentId == "ASSIGNMENT_ID_NOT_AVAILABLE") {
_.dialog($j('<div style="background-color: rgba(0,0,0,0.5);color:white;font-size:xx-large;padding:10px"/>').text('preview'), false);
$j('body').click(function () {
alert('This is a preview. Please accept the HIT to work on it.');
});
return true;
}
};
function getNow() {
return new Date().getTime();
};
function loadScript (url) {
var fileType = url.split('.').reverse()[0];
var script;
if (fileType === 'js') {
script = document.createElement('script');
script.setAttribute('src', url);
script.setAttribute('type', 'text/javascript');
} else if (fileType === 'css') {
script = document.createElement('link');
script.setAttribute('rel', 'stylesheet');
script.setAttribute('type', 'text/css');
script.setAttribute('href', url);
}
$j('script').parent().append(script);
}
}());
| index.js | var $j = jQuery.noConflict();
// wrap in IIFE to not expose global variables
(function app () {
var db = new Firebase('https://dazzling-heat-3394.firebaseio.com/');
var params = _.getUrlParams();
var taskNum = Number(params.task.slice(-1));
var annotations = {};
var annotext;
var vidEvents = {};
var vidCompleted = false;
var data;
var assetsCounts;
const todayDataDate = '20160123';
const assetType = 'img'; // alternately, 'vid'
var assetId;
var firstImgToDisplay = 0;
var imgTotal = 30
var imgPerGrid = 4;
// page elements
var instructions;
var prevBtns;
var nextBtns;
var media_area;
var response_area;
var enterKeyword;
var submitBtn;
// db.on('child_added', function (snapshot){
// var addedAnnotation = snapshot.val();
// console.log('Confirmed post to Firebase:', addedAnnotation);
// });
if (assetType === 'img') {
loadScript('https://annotorious.github.com/latest/annotorious.min.js');
}
db.once('value', function (snapshot) {
console.log('db.once event');
data = snapshot.val();
assetsCounts = data.assets[todayDataDate];
if (data.data[todayDataDate] === undefined) data.data[todayDataDate] = {};
if (todayDataDate === '20160114') {
assetId = getAssetId(assetsCounts, data.data[todayDataDate]);
} else if (todayDataDate === '20160123') {
assetId = getAssetId(assetsCounts[assetType], data.data[todayDataDate]);
drawImgGrid();
setImgCounter();
}
if (assetType === 'vid') {
loadScript('https://www.youtube.com/iframe_api');
}
});
// load and set callback for YouTube API
if (assetType === 'vid') {
var player;
// must be in global namespace to be triggered upon script load
window.onYouTubeIframeAPIReady = function () {
console.log('YT READY');
var sliceVid = assetId;
var startSeconds = 0;
var startSecondsIndex = assetId.indexOf('?t=');
if (startSecondsIndex !== -1) {
startSeconds = Number(assetId.slice(startSecondsIndex + 3));
sliceVid = assetId.slice(0, startSecondsIndex);
}
player = new YT.Player('player', {
height: '315',
width: '420',
videoId: sliceVid,
playerVars: {start: startSeconds},
events: {
'onReady': onPlayerReady,
'onStateChange': onPlayerStateChange
}
});
};
function onPlayerReady(event) {
// event.target.playVideo();
};
function onPlayerStateChange(event) {
var eventNames = {
'-1': 'unstarted',
'0': 'ended',
'1': 'playing',
'2': 'paused',
'3': 'buffering',
'5': 'video cued'
};
vidEvents[getNow()] = eventNames[String(event.data)];
if (event.data === 0) {
document.getElementById('submitBtn').disabled = false;
vidCompleted = true;
}
annotext.focus();
};
}
// set HTML and create event listeners on window load
window.onload = function () {
instructions = document.getElementById('instructions');
prevBtns = document.getElementsByClassName('prevBtn');
nextBtns = document.getElementsByClassName('nextBtn');
media_area = document.getElementById('media_area');
response_area = document.getElementById('response_area');
enterKeyword;
submitBtn = document.getElementById('submitBtn');
// non img+annotorious tasks
if (taskNum <= 3) {
$j('.prevBtn').each(function (i, el) { $j(el).hide(); });
$j('.nextBtn').each(function (i, el) { $j(el).hide(); });
var playerDiv = document.createElement('div');
playerDiv.id = 'player';
media_area.appendChild(playerDiv);
if (taskNum === 3) { // checkboxes response
instructions.innerHTML = 'Please watch the entire video. Pause and replay as necessary.<br>' +
'At the moment you see anything, click that concept from among the checkboxes below.<br>' +
'Please pause and replay as necessary in order to submit multiple simultaneous concepts.<br>' +
'When you have entered every concept and finished the video, click submit.';
response_area.innerHTML = '<div id="annochecks">' +
'<input type="checkbox" name="checkboxes" value="driving">driving</input><br>' +
'<input type="checkbox" name="checkboxes" value="carExterior">car exterior</input><br>' +
'<input type="checkbox" name="checkboxes" value="carInterior">car interior</input><br>' +
'<input type="checkbox" name="checkboxes" value="road">road</input><br>' +
'<input type="checkbox" name="checkboxes" value="people">people</input><br>' +
'<input type="checkbox" name="checkboxes" value="truck">truck</input><br>' +
'<input type="checkbox" name="checkboxes" value="BMW">BMW</input><br>' +
'<input type="checkbox" name="checkboxes" value="motorcycle">motorcycle</input><br>' +
'<input type="checkbox" name="checkboxes" value="diesel">diesel</input><br>' +
'<input type="checkbox" name="checkboxes" value="drifting">drifting</input><br>' +
'</div>';
var checkboxes = document.getElementById('annochecks');
checkboxes.addEventListener('change', function (event) {
event.preventDefault();
annotations[getNow()] = {text: event.target.value, timestamp: player.getCurrentTime()};
setTimeout(function() { event.target.checked = false; }, 100);
// console.log(annotations);
});
} else { // textarea response
if (taskNum === 1) {
instructions.innerHTML = '<li>Press play to watch the video.</li>' +
'<li>Enter one keyword or phrase at a time to describe what you see in the video.</li>' +
'<li>Pause and replay the video as necessary to enter all keywords.</li>' +
'<li>When you have entered keywords for the entire video, click Submit HIT.</li>';
} else if (taskNum === 2) {
instructions.innerHTML = '<li>Press play to watch the video related to <b>cars</b>.</li>' +
'<li>Enter one keyword or phrase at a time to describe what you see related to <b>cars</b> in the video.</li>' +
'<li>Pause and replay the video as necessary to enter all keywords.</li>' +
'<li>When you have entered keywords for the entire video, click Submit HIT.</li>';
}
response_area.innerHTML = '<textarea id="annotext" placeholder="Enter keyword or phrase"></textarea><button id="enterKeyword" disabled>Enter</button>';
annotext = document.getElementById('annotext');
annotext.focus();
enterKeyword = document.getElementById('enterKeyword');
annotext.addEventListener('keydown', function (event) {
if (event.keyCode === 13) {
event.preventDefault();
annotations[getNow()] = {text: annotext.value, timestamp: player.getCurrentTime()};
annotext.value = '';
// console.log(annotations);
}
});
annotext.addEventListener('keyup', function (event) {
if (annotext.value === '') {
enterKeyword.setAttribute('disabled', 'disabled');
} else {
enterKeyword.removeAttribute('disabled');
}
});
}
// img+annotorious tasks
} else {
if (assetType === 'img') {
instructions.innerHTML = '<li>Draw a box around each concept you see in each image.</li>' +
'<li>Enter a keyword or phrase in the text box that appears under each drawn box.</li>' +
'<li>Note: the same concept may appear across multiple images.</li>' +
'<li>When you have annotated each image in a set, click Next Set to annotate remaining images.</li>' +
'<li>When you have annotated every image, click Submit HIT.</li>';
response_area.remove()
// set up prev and next buttons for carousel
$j('.prevBtn')
.prop('disabled', false)
.on('click', function (e) {
e.preventDefault();
var imgSet = $j('.anno_img');
var allAnnotated = true;
imgSet.each(function (i, el) {
var imgAnno = anno.getAnnotations(el.src);
if (imgAnno.length === 0) allAnnotated = false;
});
if (allAnnotated) {
firstImgToDisplay -= imgPerGrid;
// prevent error on missing expected tail images
if (firstImgToDisplay < 0) firstImgToDisplay = imgTotal - (imgTotal % imgPerGrid);
drawImgGrid();
} else {
alert('Please annotate each image in the set.');
}
});
$j('.nextBtn')
.prop('disabled', false)
.on('click', function (e) {
e.preventDefault();
var imgSet = $j('.anno_img');
var allAnnotated = true;
imgSet.each(function (i, el) {
var imgAnno = anno.getAnnotations(el.src);
if (imgAnno.length === 0) allAnnotated = false;
});
if (allAnnotated) {
firstImgToDisplay += imgPerGrid;
if (firstImgToDisplay > imgTotal) firstImgToDisplay = 0;
drawImgGrid();
} else {
alert('Please annotate each image in the set.');
}
});
}
// annotorious event handlers
anno.addHandler('onAnnotationCreated', function (annotation) {
var imgNum = getImgNum(annotation);
if (annotation.text.length < 2) {
anno.removeAnnotation(annotation);
return alert('Text must be a valid keyword.')
}
// persist annotations to later remove and restore on Next/Prev
if (!annotations[imgNum]) annotations[imgNum] = [];
annotations[imgNum].push(annotation);
setImgCounter();
});
anno.addHandler('onAnnotationRemoved', function (annotation) {
var imgNum = getImgNum(annotation);
_.each(annotations[imgNum], function (tempAnno, i) {
if (_.deepEquals(annotation, tempAnno)) {
annotations[imgNum].splice(i, 1);
}
});
setImgCounter();
});
anno.addHandler('onAnnotationUpdated', function (annotation) {
var imgNum = getImgNum(annotation);
_.each(annotations[imgNum], function (tempAnno, i) {
if (_.deepEquals(annotation, tempAnno)) {
annotations[imgNum][i] = annotation;
}
});
setImgCounter();
});
// make 'Enter' trigger Save button to prevent multi-line annotations
$j(media_area).on('focus', '.annotorious-editor-text', function (e) {
$j(e.target).on('keyup', function (e) {
// var saveBtn = $j(e.target).parent().find('.annotorious-editor-button-save');
// console.log('keyup', e);
if (e.keyCode === 13) {
// val below is a hack rather than modifying annotorious source code via goog.events, which replaces the default 'click' event on the <a> of saveBtn
var val = $j(e.target).val().split('\n').join('');
$j(e.target).val(val);
// console.log('hit enter in box; saveBtn:', saveBtn);
// debugger;
// console.log(goog);
// to implement 'Enter' triggering Save, would need to modify annotorious.js, a la https://groups.google.com/forum/#!topic/annotorious/dq1-Qtif3b4
// goog.events.dispatchEvent(saveBtn, goog.events.EventType.CLICK);
// $j(saveBtn).simulate('click');
}
});
});
}
submitBtn.addEventListener('click', function (event) {
event.preventDefault();
if (taskNum < 4 && !vidCompleted) {
return alert('Please finish watching the video.');
}
console.log('submit event', annotations);
if (Object.keys(annotations).length > 0) {
params.workerId = params.workerId || 'test';
var postRef = new Firebase('https://dazzling-heat-3394.firebaseio.com/data/' + todayDataDate + '/' + params.workerId + '/');
var postData = {
assetId: assetId,
workerId: params.workerId,
task: taskNum,
annotations: annotations,
time_submitted: getNow()
};
if (taskNum <= 3) postData.video_events = vidEvents
postRef.push(postData, function () {
assetsCounts[assetId]++;
var assetsRef = new Firebase('https://dazzling-heat-3394.firebaseio.com/assets/' + todayDataDate + '/' + assetType + '/');
assetsRef.set(assetsCounts);
mturkSubmit();
console.log('POST to Firebase:', postData);
});
} else {
alert('Please annotate the media before submitting.');
}
});
mturkCheckPreview();
};
function getAssetId (assetsCounts, data) {
console.log(assetsCounts);
console.log(data);
if (Object.keys(data).length === 0) {
return Object.keys(assetsCounts)[0];
// return setVidHTML(assetId);
} else {
var assetsCountsClone = _.deepClone(assetsCounts);
var assetsCountsRemaining = [];
_.each(data[params.workerId], function (entry) {
if (taskNum === entry.task) {
assetsCountsClone[entry.assetId] = false;
}
});
_.each(assetsCountsClone, function (val, key) {
if (val !== false) assetsCountsRemaining.push([key, val]);
});
console.log('assetsCounts', assetsCounts);
console.log('assetsCountsClone', assetsCountsClone);
console.log('assetsCountsRemaining', assetsCountsRemaining);
if (assetsCountsRemaining.length > 0) {
// return vid with least views
assetsCountsRemaining.sort(function (a, b) { return a[1] < b[1]; });
return assetsCountsRemaining.pop()[0];
// return setVidHTML(assetId);
} else {
_.dialog($j('<div style="background-color: rgba(0,0,0,0.5);color:white;font-size:xx-large;padding:10px"/>').text('all HITs completed'), false)
$j('body').click(function () {
alert('You have annotated all videos. Please return this HIT.')
})
return true;
}
}
};
// create image grid
function drawImgGrid () {
// console.log(firstImgToDisplay);
$j(media_area).children().remove();
var imgGrid = document.createElement('div');
imgGrid.id = 'img_grid';
for (var i = 0; i < 2; i++) {
anno.reset(); // necessary for addAnnotation functionality below
var imgRow = document.createElement('div');
imgRow.className = 'img_row';
var j, limit;
if (i === 0) {
j = firstImgToDisplay;
limit = j + imgPerGrid / 2;
} else {
j = firstImgToDisplay + imgPerGrid / 2;
limit = j + imgPerGrid / 2;
}
// console.log('j, limit', j, limit);
for (j; j < limit; j++) {
var imgNum = String(j);
if (imgNum.length < 2) imgNum = '0' + imgNum;
imgNum += '00'; // until non-integer frames added
// console.log('imgNum', imgNum);
var newImg = document.createElement('img');
newImg.id = 'img' + imgNum;
newImg.className = 'anno_img';
newImg.src = 'assets/img/' + assetId + '-' + imgNum + '.jpeg';
imgRow.appendChild(newImg);
$j(newImg).load(function () { // on img load event so annotorious loads properly
anno.makeAnnotatable(this);
var imgNum = this.id.slice(3);
if (annotations[imgNum]) {
// console.log('annotations[imgNum]', annotations[imgNum]);
_.each(annotations[imgNum], function (tempAnno) {
// delete tempAnno.context;
// tempAnno.src = tempAnno.src.slice(tempAnno.src.indexOf('assets/'));
// tempAnno.editable = false; // make annotation read-only
anno.addAnnotation(tempAnno);
});
}
}).error(function (err) {
$j(this).parent().remove();
}).bind(newImg);
}
imgGrid.appendChild(imgRow);
}
media_area.appendChild(imgGrid);
};
function getImgNum (annotation) {
return annotation.src.slice(annotation.src.indexOf('/img/') + 8, annotation.src.indexOf('.jp'));
};
function imgRemaining () {
var remaining = imgTotal;
_.each(annotations, function (tempAnno) {
if (tempAnno.length > 0) {
remaining--;
}
});
if (submitBtn.disabled === true) {
if (remaining === 0) {
submitBtn.disabled = false;
}
} else {
if (remaining > 0) {
submitBtn.disabled = true;
}
}
return remaining;
};
function setImgCounter (remaining) {
document.getElementById('imgRemaining').innerHTML = imgRemaining();
};
function mturkSubmit() {
var f = $j('<form action="' + params.turkSubmitTo + '/mturk/externalSubmit" method="GET"><input type="hidden" name="assignmentId" value="' + params.assignmentId + '"></input><input type="hidden" name="unused" value="unused"></input></form>');
$j('body').append(f);
f.submit();
};
function mturkCheckPreview() {
if (params.assignmentId == "ASSIGNMENT_ID_NOT_AVAILABLE") {
_.dialog($j('<div style="background-color: rgba(0,0,0,0.5);color:white;font-size:xx-large;padding:10px"/>').text('preview'), false);
$j('body').click(function () {
alert('This is a preview. Please accept the HIT to work on it.');
});
return true;
}
};
function getNow() {
return new Date().getTime();
};
function loadScript (src) {
var script = document.createElement('script');
script.src = src;
$j('script').parent().append(script);
}
}());
| Make annotorious scripts https, modify loadScript
| index.js | Make annotorious scripts https, modify loadScript | <ide><path>ndex.js
<ide> // });
<ide>
<ide> if (assetType === 'img') {
<add> loadScript('https://annotorious.github.com/latest/annotorious.css');
<ide> loadScript('https://annotorious.github.com/latest/annotorious.min.js');
<ide> }
<ide>
<ide> return new Date().getTime();
<ide> };
<ide>
<del>function loadScript (src) {
<del> var script = document.createElement('script');
<del> script.src = src;
<add>function loadScript (url) {
<add> var fileType = url.split('.').reverse()[0];
<add> var script;
<add> if (fileType === 'js') {
<add> script = document.createElement('script');
<add> script.setAttribute('src', url);
<add> script.setAttribute('type', 'text/javascript');
<add> } else if (fileType === 'css') {
<add> script = document.createElement('link');
<add> script.setAttribute('rel', 'stylesheet');
<add> script.setAttribute('type', 'text/css');
<add> script.setAttribute('href', url);
<add> }
<ide> $j('script').parent().append(script);
<ide> }
<ide> |
|
Java | apache-2.0 | ff7a03bae9f04528679bd880b0be0666a2335b2f | 0 | michalkurka/h2o-3,spennihana/h2o-3,spennihana/h2o-3,spennihana/h2o-3,h2oai/h2o-3,michalkurka/h2o-3,h2oai/h2o-dev,h2oai/h2o-dev,spennihana/h2o-3,michalkurka/h2o-3,h2oai/h2o-dev,h2oai/h2o-3,spennihana/h2o-3,michalkurka/h2o-3,h2oai/h2o-3,michalkurka/h2o-3,spennihana/h2o-3,h2oai/h2o-3,spennihana/h2o-3,h2oai/h2o-3,michalkurka/h2o-3,h2oai/h2o-dev,michalkurka/h2o-3,h2oai/h2o-3,h2oai/h2o-dev,h2oai/h2o-3,h2oai/h2o-dev,h2oai/h2o-3,h2oai/h2o-dev | package water;
import java.io.*;
import java.lang.reflect.Array;
import java.net.*;
import java.nio.*;
import java.nio.channels.*;
import java.util.ArrayList;
import java.util.Random;
import water.network.SocketChannelUtils;
import water.util.Log;
import water.util.StringUtils;
import water.util.TwoDimTable;
import static water.H2O.OptArgs.SYSTEM_PROP_PREFIX;
/** A ByteBuffer backed mixed Input/Output streaming class, using Iced serialization.
*
* Reads/writes empty/fill the ByteBuffer as needed. When it is empty/full it
* we go to the ByteChannel for more/less. Because DirectByteBuffers are
* expensive to make, we keep a few pooled.
*
* When talking to a remote H2O node, switches between UDP and TCP transport
* protocols depending on the message size. The TypeMap is not included, and
* is assumed to exist on the remote H2O node.
*
* Supports direct NIO FileChannel read/write to disk, used during user-mode
* swapping. The TypeMap is not included on write, and is assumed to be the
* current map on read.
*
* Support read/write from byte[] - and this defeats the purpose of a
* Streaming protocol, but is frequently handy for small structures. The
* TypeMap is not included, and is assumed to be the current map on read.
*
* Supports read/write from a standard Stream, which by default assumes it is
* NOT going in and out of the same Cloud, so the TypeMap IS included. The
* serialized object can only be read back into the same minor version of H2O.
*
* @author <a href="mailto:[email protected]"></a>
*/
public final class AutoBuffer {
// Maximum size of an array we allow to allocate (the value is designed
// to mimic the behavior of OpenJDK libraries)
private static final int MAX_ARRAY_SIZE = Integer.MAX_VALUE - 8;
private static String H2O_SYSTEM_SERIALIZATION_IGNORE_VERSION = SYSTEM_PROP_PREFIX + "serialization.ignore.version";
// The direct ByteBuffer for schlorping data about.
// Set to null to indicate the AutoBuffer is closed.
ByteBuffer _bb;
public String sourceName = "???";
public boolean isClosed() { return _bb == null ; }
// The ByteChannel for moving data in or out. Could be a SocketChannel (for
// a TCP connection) or a FileChannel (spill-to-disk) or a DatagramChannel
// (for a UDP connection). Null on closed AutoBuffers. Null on initial
// remote-writing AutoBuffers which are still deciding UDP vs TCP. Not-null
// for open AutoBuffers doing file i/o or reading any TCP/UDP or having
// written at least one buffer to TCP/UDP.
private Channel _chan;
// A Stream for moving data in. Null unless this AutoBuffer is
// stream-based, in which case _chan field is null. This path supports
// persistance: reading and writing objects from different H2O cluster
// instances (but exactly the same H2O revision). The only required
// similarity is same-classes-same-fields; changes here will probably
// silently crash. If the fields are named the same but the semantics
// differ, then again the behavior is probably silent crash.
private InputStream _is;
private short[] _typeMap; // Mapping from input stream map to current map, or null
// If we need a SocketChannel, raise the priority so we get the I/O over
// with. Do not want to have some TCP socket open, blocking the TCP channel
// and then have the thread stalled out. If we raise the priority - be sure
// to lower it again. Note this is for TCP channels ONLY, and only because
// we are blocking another Node with I/O.
private int _oldPrior = -1;
// Where to send or receive data via TCP or UDP (choice made as we discover
// how big the message is); used to lazily create a Channel. If NULL, then
// _chan should be a pre-existing Channel, such as a FileChannel.
final H2ONode _h2o;
// TRUE for read-mode. FALSE for write-mode. Can be flipped for rapid turnaround.
private boolean _read;
// TRUE if this AutoBuffer has never advanced past the first "page" of data.
// The UDP-flavor, port# and task fields are only valid until we read over
// them when flipping the ByteBuffer to the next chunk of data. Used in
// asserts all over the place.
private boolean _firstPage;
// Total size written out from 'new' to 'close'. Only updated when actually
// reading or writing data, or after close(). For profiling only.
int _size;
//int _zeros, _arys;
// More profiling: start->close msec, plus nano's spent in blocking I/O
// calls. The difference between (close-start) and i/o msec is the time the
// i/o thread spends doing other stuff (e.g. allocating Java objects or
// (de)serializing).
long _time_start_ms, _time_close_ms, _time_io_ns;
// I/O persistence flavor: Value.ICE, NFS, HDFS, S3, TCP. Used to record I/O time.
final byte _persist;
// The assumed max UDP packetsize
static final int MTU = 1500-8/*UDP packet header size*/;
// Enable this to test random TCP fails on open or write
static final Random RANDOM_TCP_DROP = null; //new Random();
static final java.nio.charset.Charset UTF_8 = java.nio.charset.Charset.forName("UTF-8");
/** Incoming UDP request. Make a read-mode AutoBuffer from the open Channel,
* figure the originating H2ONode from the first few bytes read. */
AutoBuffer( DatagramChannel sock ) throws IOException {
_chan = null;
_bb = BBP_SML.make(); // Get a small / UDP-sized ByteBuffer
_read = true; // Reading by default
_firstPage = true;
// Read a packet; can get H2ONode from 'sad'?
Inet4Address addr = null;
SocketAddress sad = sock.receive(_bb);
if( sad instanceof InetSocketAddress ) {
InetAddress address = ((InetSocketAddress) sad).getAddress();
if( address instanceof Inet4Address ) {
addr = (Inet4Address) address;
}
}
_size = _bb.position();
_bb.flip(); // Set limit=amount read, and position==0
if( addr == null ) throw new RuntimeException("Unhandled socket type: " + sad);
// Read Inet from socket, port from the stream, figure out H2ONode
_h2o = H2ONode.intern(addr, getPort());
_firstPage = true;
assert _h2o != null;
_persist = 0; // No persistance
}
/** Incoming TCP request. Make a read-mode AutoBuffer from the open Channel,
* figure the originating H2ONode from the first few bytes read.
*
* remoteAddress set to null means that the communication is originating from non-h2o node, non-null value
* represents the case where the communication is coming from h2o node.
* */
AutoBuffer( ByteChannel sock, InetAddress remoteAddress ) throws IOException {
_chan = sock;
raisePriority(); // Make TCP priority high
_bb = BBP_BIG.make(); // Get a big / TPC-sized ByteBuffer
_bb.flip();
_read = true; // Reading by default
_firstPage = true;
// Read Inet from socket, port from the stream, figure out H2ONode
if(remoteAddress!=null) {
_h2o = H2ONode.intern(remoteAddress, getPort());
}else{
// In case the communication originates from non-h2o node, we set _h2o node to null.
// It is done for 2 reasons:
// - H2ONode.intern creates a new thread and if there's a lot of connections
// from non-h2o environment, it could end up with too many open files exception.
// - H2OIntern also reads port (getPort()) and additional information which we do not send
// in communication originating from non-h2o nodes
_h2o = null;
}
_firstPage = true; // Yes, must reset this.
_time_start_ms = System.currentTimeMillis();
_persist = Value.TCP;
}
/** Make an AutoBuffer to write to an H2ONode. Requests for full buffer will
* open a TCP socket and roll through writing to the target. Smaller
* requests will send via UDP. Small requests get ordered by priority, so
* that e.g. NACK and ACKACK messages have priority over most anything else.
* This helps in UDP floods to shut down flooding senders. */
private byte _msg_priority;
AutoBuffer( H2ONode h2o, byte priority ) {
// If UDP goes via UDP, we write into a DBB up front - because we plan on
// sending it out via a Datagram socket send call. If UDP goes via batched
// TCP, we write into a HBB up front, because this will be copied again
// into a large outgoing buffer.
_bb = H2O.ARGS.useUDP // Actually use UDP?
? BBP_SML.make() // Make DirectByteBuffers to start with
: ByteBuffer.wrap(new byte[16]).order(ByteOrder.nativeOrder());
_chan = null; // Channel made lazily only if we write alot
_h2o = h2o;
_read = false; // Writing by default
_firstPage = true; // Filling first page
assert _h2o != null;
_time_start_ms = System.currentTimeMillis();
_persist = Value.TCP;
_msg_priority = priority;
}
/** Spill-to/from-disk request. */
public AutoBuffer( FileChannel fc, boolean read, byte persist ) {
_bb = BBP_BIG.make(); // Get a big / TPC-sized ByteBuffer
_chan = fc; // Write to read/write
_h2o = null; // File Channels never have an _h2o
_read = read; // Mostly assert reading vs writing
if( read ) _bb.flip();
_time_start_ms = System.currentTimeMillis();
_persist = persist; // One of Value.ICE, NFS, S3, HDFS
}
/** Read from UDP multicast. Same as the byte[]-read variant, except there is an H2O. */
AutoBuffer( DatagramPacket pack ) {
_size = pack.getLength();
_bb = ByteBuffer.wrap(pack.getData(), 0, pack.getLength()).order(ByteOrder.nativeOrder());
_bb.position(0);
_read = true;
_firstPage = true;
_chan = null;
_h2o = H2ONode.intern(pack.getAddress(), getPort());
_persist = 0; // No persistance
}
/** Read from a UDP_TCP buffer; could be in the middle of a large buffer */
AutoBuffer( H2ONode h2o, byte[] buf, int off, int len ) {
assert buf != null : "null fed to ByteBuffer.wrap";
_h2o = h2o;
_bb = ByteBuffer.wrap(buf,off,len).order(ByteOrder.nativeOrder());
_chan = null;
_read = true;
_firstPage = true;
_persist = 0; // No persistance
_size = len;
}
/** Read from a fixed byte[]; should not be closed. */
public AutoBuffer( byte[] buf ) { this(null,buf,0, buf.length); }
/** Write to an ever-expanding byte[]. Instead of calling {@link #close()},
* call {@link #buf()} to retrieve the final byte[]. */
public AutoBuffer( ) {
_bb = ByteBuffer.wrap(new byte[16]).order(ByteOrder.nativeOrder());
_chan = null;
_h2o = null;
_read = false;
_firstPage = true;
_persist = 0; // No persistance
}
/** Write to a known sized byte[]. Instead of calling close(), call
* {@link #bufClose()} to retrieve the final byte[]. */
public AutoBuffer( int len ) {
_bb = ByteBuffer.wrap(MemoryManager.malloc1(len)).order(ByteOrder.nativeOrder());
_chan = null;
_h2o = null;
_read = false;
_firstPage = true;
_persist = 0; // No persistance
}
/** Write to a persistent Stream, including all TypeMap info to allow later
* reloading (by the same exact rev of H2O). */
public AutoBuffer( OutputStream os, boolean persist ) {
_bb = ByteBuffer.wrap(MemoryManager.malloc1(BBP_BIG._size)).order(ByteOrder.nativeOrder());
_read = false;
_chan = Channels.newChannel(os);
_h2o = null;
_firstPage = true;
_persist = 0;
if( persist ) put1(0x1C).put1(0xED).putStr(H2O.ABV.projectVersion()).putAStr(TypeMap.CLAZZES);
else put1(0);
}
/** Read from a persistent Stream (including all TypeMap info) into same
* exact rev of H2O). */
public AutoBuffer( InputStream is ) {
_chan = null;
_h2o = null;
_firstPage = true;
_persist = 0;
_read = true;
_bb = ByteBuffer.wrap(MemoryManager.malloc1(BBP_BIG._size)).order(ByteOrder.nativeOrder());
_bb.flip();
_is = is;
int b = get1U();
if( b==0 ) return; // No persistence info
int magic = get1U();
if( b!=0x1C || magic != 0xED ) throw new IllegalArgumentException("Missing magic number 0x1CED at stream start");
checkVersion(getStr());
String[] typeMap = getAStr();
_typeMap = new short[typeMap.length];
for( int i=0; i<typeMap.length; i++ )
_typeMap[i] = (short)(typeMap[i]==null ? 0 : TypeMap.onIce(typeMap[i]));
}
private void checkVersion(String version) {
final boolean ignoreVersion = Boolean.getBoolean(H2O_SYSTEM_SERIALIZATION_IGNORE_VERSION);
if (! version.equals(H2O.ABV.projectVersion())) {
String msg = "Found version "+version+", but running version "+H2O.ABV.projectVersion();
if (ignoreVersion)
Log.warn("Loading data from a different version! " + msg);
else
throw new IllegalArgumentException(msg);
}
}
@Override public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("[AB ").append(_read ? "read " : "write ");
sb.append(_firstPage?"first ":"2nd ").append(_h2o);
sb.append(" ").append(Value.nameOfPersist(_persist));
if( _bb != null ) sb.append(" 0 <= ").append(_bb.position()).append(" <= ").append(_bb.limit());
if( _bb != null ) sb.append(" <= ").append(_bb.capacity());
return sb.append("]").toString();
}
// Fetch a DBB from an object pool... they are fairly expensive to make
// because a native call is required to get the backing memory. I've
// included BB count tracking code to help track leaks. As of 12/17/2012 the
// leaks are under control, but figure this may happen again so keeping these
// counters around.
//
// We use 2 pool sizes: lots of small UDP packet-sized buffers and fewer
// larger TCP-sized buffers.
private static final boolean DEBUG = Boolean.getBoolean("h2o.find-ByteBuffer-leaks");
private static long HWM=0;
static class BBPool {
long _made, _cached, _freed;
long _numer, _denom, _goal=4*H2O.NUMCPUS, _lastGoal;
final ArrayList<ByteBuffer> _bbs = new ArrayList<>();
final int _size; // Big or small size of ByteBuffers
BBPool( int sz) { _size=sz; }
private ByteBuffer stats( ByteBuffer bb ) {
if( !DEBUG ) return bb;
if( ((_made+_cached)&255)!=255 ) return bb; // Filter printing to 1 in 256
long now = System.currentTimeMillis();
if( now < HWM ) return bb;
HWM = now+1000;
water.util.SB sb = new water.util.SB();
sb.p("BB").p(this==BBP_BIG?1:0).p(" made=").p(_made).p(" -freed=").p(_freed).p(", cache hit=").p(_cached).p(" ratio=").p(_numer/_denom).p(", goal=").p(_goal).p(" cache size=").p(_bbs.size()).nl();
for( int i=0; i<H2O.MAX_PRIORITY; i++ ) {
int x = H2O.getWrkQueueSize(i);
if( x > 0 ) sb.p('Q').p(i).p('=').p(x).p(' ');
}
Log.warn(sb.nl().toString());
return bb;
}
ByteBuffer make() {
while( true ) { // Repeat loop for DBB OutOfMemory errors
ByteBuffer bb=null;
synchronized(_bbs) {
int sz = _bbs.size();
if( sz > 0 ) { bb = _bbs.remove(sz-1); _cached++; _numer++; }
}
if( bb != null ) return stats(bb);
// Cache empty; go get one from C/Native memory
try {
bb = ByteBuffer.allocateDirect(_size).order(ByteOrder.nativeOrder());
synchronized(this) { _made++; _denom++; _goal = Math.max(_goal,_made-_freed); _lastGoal=System.nanoTime(); } // Goal was too low, raise it
return stats(bb);
} catch( OutOfMemoryError oome ) {
// java.lang.OutOfMemoryError: Direct buffer memory
if( !"Direct buffer memory".equals(oome.getMessage()) ) throw oome;
System.out.println("OOM DBB - Sleeping & retrying");
try { Thread.sleep(100); } catch( InterruptedException ignore ) { }
}
}
}
void free(ByteBuffer bb) {
// Heuristic: keep the ratio of BB's made to cache-hits at a fixed level.
// Free to GC if ratio is high, free to internal cache if low.
long ratio = _numer/(_denom+1);
synchronized(_bbs) {
if( ratio < 100 || _bbs.size() < _goal ) { // low hit/miss ratio or below goal
bb.clear(); // Clear-before-add
_bbs.add(bb);
} else _freed++; // Toss the extras (above goal & ratio)
long now = System.nanoTime();
if( now-_lastGoal > 1000000000L ) { // Once/sec, drop goal by 10%
_lastGoal = now;
if( ratio > 110 ) // If ratio is really high, lower goal
_goal=Math.max(4*H2O.NUMCPUS,(long)(_goal*0.99));
// Once/sec, lower numer/denom... means more recent activity outweighs really old stuff
long denom = (long) (0.99 * _denom); // Proposed reduction
if( denom > 10 ) { // Keep a little precision
_numer = (long) (0.99 * _numer); // Keep ratio between made & cached the same
_denom = denom; // ... by lowering both by 10%
}
}
}
}
static int FREE( ByteBuffer bb ) {
if(bb.isDirect())
(bb.capacity()==BBP_BIG._size ? BBP_BIG : BBP_SML).free(bb);
return 0; // Flow coding
}
}
static BBPool BBP_SML = new BBPool( 2*1024); // Bytebuffer "common small size", for UDP
static BBPool BBP_BIG = new BBPool(64*1024); // Bytebuffer "common big size", for TCP
public static int TCP_BUF_SIZ = BBP_BIG._size;
private int bbFree() {
if(_bb != null && _bb.isDirect())
BBPool.FREE(_bb);
_bb = null;
return 0; // Flow-coding
}
// You thought TCP was a reliable protocol, right? WRONG! Fails 100% of the
// time under heavy network load. Connection-reset-by-peer & connection
// timeouts abound, even after a socket open and after a 1st successful
// ByteBuffer write. It *appears* that the reader is unaware that a writer
// was told "go ahead and write" by the TCP stack, so all these fails are
// only on the writer-side.
public static class AutoBufferException extends RuntimeException {
public final IOException _ioe;
AutoBufferException( IOException ioe ) { _ioe = ioe; }
}
// For reads, just assert all was read and close and release resources.
// (release ByteBuffer back to the common pool). For writes, force any final
// bytes out. If the write is to an H2ONode and is short, send via UDP.
// AutoBuffer close calls order; i.e. a reader close() will block until the
// writer does a close().
public final int close() {
//if( _size > 2048 ) System.out.println("Z="+_zeros+" / "+_size+", A="+_arys);
if( isClosed() ) return 0; // Already closed
assert _h2o != null || _chan != null || _is != null; // Byte-array backed should not be closed
try {
if( _chan == null ) { // No channel?
if( _read ) {
if( _is != null ) _is.close();
return 0;
} else { // Write
// For small-packet write, send via UDP. Since nothing is sent until
// now, this close() call trivially orders - since the reader will not
// even start (much less close()) until this packet is sent.
if( _bb.position() < MTU) return udpSend();
// oops - Big Write, switch to TCP and finish out there
}
}
// Force AutoBuffer 'close' calls to order; i.e. block readers until
// writers do a 'close' - by writing 1 more byte in the close-call which
// the reader will have to wait for.
if( hasTCP()) { // TCP connection?
try {
if( _read ) { // Reader?
int x = get1U(); // Read 1 more byte
assert x == 0xab : "AB.close instead of 0xab sentinel got "+x+", "+this;
assert _chan != null; // chan set by incoming reader, since we KNOW it is a TCP
// Write the reader-handshake-byte.
SocketChannelUtils.underlyingSocketChannel(_chan).socket().getOutputStream().write(0xcd);
// do not close actually reader socket; recycle it in TCPReader thread
} else { // Writer?
put1(0xab); // Write one-more byte ; might set _chan from null to not-null
sendPartial(); // Finish partial writes; might set _chan from null to not-null
assert _chan != null; // _chan is set not-null now!
// Read the writer-handshake-byte.
int x = SocketChannelUtils.underlyingSocketChannel(_chan).socket().getInputStream().read();
// either TCP con was dropped or other side closed connection without reading/confirming (e.g. task was cancelled).
if( x == -1 ) throw new IOException("Other side closed connection before handshake byte read");
assert x == 0xcd : "Handshake; writer expected a 0xcd from reader but got "+x;
}
} catch( IOException ioe ) {
try { _chan.close(); } catch( IOException ignore ) {} // Silently close
_chan = null; // No channel now, since i/o error
throw ioe; // Rethrow after close
} finally {
if( !_read ) _h2o.freeTCPSocket((ByteChannel) _chan); // Recycle writable TCP channel
restorePriority(); // And if we raised priority, lower it back
}
} else { // FileChannel
if( !_read ) sendPartial(); // Finish partial file-system writes
_chan.close();
_chan = null; // Closed file channel
}
} catch( IOException e ) { // Dunno how to handle so crash-n-burn
throw new AutoBufferException(e);
} finally {
bbFree();
_time_close_ms = System.currentTimeMillis();
// TimeLine.record_IOclose(this,_persist); // Profile AutoBuffer connections
assert isClosed();
}
return 0;
}
// Need a sock for a big read or write operation.
// See if we got one already, else open a new socket.
private void tcpOpen() throws IOException {
assert _firstPage && _bb.limit() >= 1+2+4; // At least something written
assert _chan == null;
// assert _bb.position()==0;
_chan = _h2o.getTCPSocket();
raisePriority();
}
// Just close the channel here without reading anything. Without the task
// object at hand we do not know what (how many bytes) should we read from
// the channel. And since the other side will try to read confirmation from
// us before closing the channel, we can not read till the end. So we just
// close the channel and let the other side to deal with it and figure out
// the task has been cancelled (still sending ack ack back).
void drainClose() {
if( isClosed() ) return; // Already closed
final Channel chan = _chan; // Read before closing
assert _h2o != null || chan != null; // Byte-array backed should not be closed
if( chan != null ) { // Channel assumed sick from prior IOException
try { chan.close(); } catch( IOException ignore ) {} // Silently close
_chan = null; // No channel now!
if( !_read && SocketChannelUtils.isSocketChannel(chan)) _h2o.freeTCPSocket((ByteChannel) chan); // Recycle writable TCP channel
}
restorePriority(); // And if we raised priority, lower it back
bbFree();
_time_close_ms = System.currentTimeMillis();
// TimeLine.record_IOclose(this,_persist); // Profile AutoBuffer connections
assert isClosed();
}
// True if we opened a TCP channel, or will open one to close-and-send
boolean hasTCP() { assert !isClosed(); return SocketChannelUtils.isSocketChannel(_chan) || (_h2o!=null && _bb.position() >= MTU); }
// Size in bytes sent, after a close()
int size() { return _size; }
//int zeros() { return _zeros; }
public int position () { return _bb.position(); }
public AutoBuffer position(int p) {_bb.position(p); return this;}
/** Skip over some bytes in the byte buffer. Caller is responsible for not
* reading off end of the bytebuffer; generally this is easy for
* array-backed autobuffers and difficult for i/o-backed bytebuffers. */
public void skip(int skip) { _bb.position(_bb.position()+skip); }
// Return byte[] from a writable AutoBuffer
public final byte[] buf() {
assert _h2o==null && _chan==null && !_read && !_bb.isDirect();
return MemoryManager.arrayCopyOfRange(_bb.array(), _bb.arrayOffset(), _bb.position());
}
public final byte[] bufClose() {
byte[] res = _bb.array();
bbFree();
return res;
}
// For TCP sockets ONLY, raise the thread priority. We assume we are
// blocking other Nodes with our network I/O, so try to get the I/O
// over with.
private void raisePriority() {
if(_oldPrior == -1){
assert SocketChannelUtils.isSocketChannel(_chan);
_oldPrior = Thread.currentThread().getPriority();
Thread.currentThread().setPriority(Thread.MAX_PRIORITY-1);
}
}
private void restorePriority() {
if( _oldPrior == -1 ) return;
Thread.currentThread().setPriority(_oldPrior);
_oldPrior = -1;
}
// Send via UDP socket. Unlike eg TCP sockets, we only need one for sending
// so we keep a global one. Also, we do not close it when done, and we do
// not connect it up-front to a target - but send the entire packet right now.
private int udpSend() throws IOException {
assert _chan == null;
TimeLine.record_send(this,false);
_size = _bb.position();
assert _size < AutoBuffer.BBP_SML._size;
_bb.flip(); // Flip for sending
if( _h2o==H2O.SELF ) { // SELF-send is the multi-cast signal
water.init.NetworkInit.multicast(_bb, _msg_priority);
} else { // Else single-cast send
if(H2O.ARGS.useUDP) // Send via UDP directly
water.init.NetworkInit.CLOUD_DGRAM.send(_bb, _h2o._key);
else // Send via bulk TCP
_h2o.sendMessage(_bb, _msg_priority);
}
return 0; // Flow-coding
}
// Flip to write-mode
AutoBuffer clearForWriting(byte priority) {
assert _read;
_read = false;
_msg_priority = priority;
_bb.clear();
_firstPage = true;
return this;
}
// Flip to read-mode
public AutoBuffer flipForReading() {
assert !_read;
_read = true;
_bb.flip();
_firstPage = true;
return this;
}
/** Ensure the buffer has space for sz more bytes */
private ByteBuffer getSp( int sz ) { return sz > _bb.remaining() ? getImpl(sz) : _bb; }
/** Ensure buffer has at least sz bytes in it.
* - Also, set position just past this limit for future reading. */
private ByteBuffer getSz(int sz) {
assert _firstPage : "getSz() is only valid for early UDP bytes";
if( sz > _bb.limit() ) getImpl(sz);
_bb.position(sz);
return _bb;
}
private ByteBuffer getImpl( int sz ) {
assert _read : "Reading from a buffer in write mode";
_bb.compact(); // Move remaining unread bytes to start of buffer; prep for reading
// Its got to fit or we asked for too much
assert _bb.position()+sz <= _bb.capacity() : "("+_bb.position()+"+"+sz+" <= "+_bb.capacity()+")";
long ns = System.nanoTime();
while( _bb.position() < sz ) { // Read until we got enuf
try {
int res = readAnInt(); // Read more
// Readers are supposed to be strongly typed and read the exact expected bytes.
// However, if a TCP connection fails mid-read we'll get a short-read.
// This is indistinguishable from a mis-alignment between the writer and reader!
if( res <= 0 )
throw new AutoBufferException(new EOFException("Reading "+sz+" bytes, AB="+this));
if( _is != null ) _bb.position(_bb.position()+res); // Advance BB for Streams manually
_size += res; // What we read
} catch( IOException e ) { // Dunno how to handle so crash-n-burn
// Linux/Ubuntu message for a reset-channel
if( e.getMessage().equals("An existing connection was forcibly closed by the remote host") )
throw new AutoBufferException(e);
// Windows message for a reset-channel
if( e.getMessage().equals("An established connection was aborted by the software in your host machine") )
throw new AutoBufferException(e);
throw Log.throwErr(e);
}
}
_time_io_ns += (System.nanoTime()-ns);
_bb.flip(); // Prep for handing out bytes
//for( int i=0; i < _bb.limit(); i++ ) if( _bb.get(i)==0 ) _zeros++;
_firstPage = false; // First page of data is gone gone gone
return _bb;
}
private int readAnInt() throws IOException {
if (_is == null) return ((ReadableByteChannel) _chan).read(_bb);
final byte[] array = _bb.array();
final int position = _bb.position();
final int remaining = _bb.remaining();
try {
return _is.read(array, position, remaining);
} catch (IOException ioe) {
throw new IOException("Failed reading " + remaining + " bytes into buffer[" + array.length + "] at " + position + " from " + sourceName + " " + _is, ioe);
}
}
/** Put as needed to keep from overflowing the ByteBuffer. */
private ByteBuffer putSp( int sz ) {
assert !_read;
if (sz > _bb.remaining()) {
if ((_h2o == null && _chan == null) || (_bb.hasArray() && _bb.capacity() < BBP_BIG._size))
expandByteBuffer(sz);
else
sendPartial();
assert sz <= _bb.remaining();
}
return _bb;
}
// Do something with partial results, because the ByteBuffer is full.
// If we are doing I/O, ship the bytes we have now and flip the ByteBuffer.
private ByteBuffer sendPartial() {
// Doing I/O with the full ByteBuffer - ship partial results
_size += _bb.position();
if( _chan == null )
TimeLine.record_send(this, true);
_bb.flip(); // Prep for writing.
try {
if( _chan == null )
tcpOpen(); // This is a big operation. Open a TCP socket as-needed.
//for( int i=0; i < _bb.limit(); i++ ) if( _bb.get(i)==0 ) _zeros++;
long ns = System.nanoTime();
while( _bb.hasRemaining() ) {
((WritableByteChannel) _chan).write(_bb);
if( RANDOM_TCP_DROP != null && SocketChannelUtils.isSocketChannel(_chan) && RANDOM_TCP_DROP.nextInt(100) == 0 )
throw new IOException("Random TCP Write Fail");
}
_time_io_ns += (System.nanoTime()-ns);
} catch( IOException e ) { // Some kind of TCP fail?
// Change to an unchecked exception (so we don't have to annotate every
// frick'n put1/put2/put4/read/write call). Retry & recovery happens at
// a higher level. AutoBuffers are used for many things including e.g.
// disk i/o & UDP writes; this exception only happens on a failed TCP
// write - and we don't want to make the other AutoBuffer users have to
// declare (and then ignore) this exception.
throw new AutoBufferException(e);
}
_firstPage = false;
_bb.clear();
return _bb;
}
// Called when the byte buffer doesn't have enough room
// If buffer is array backed, and the needed room is small,
// increase the size of the backing array,
// otherwise dump into a large direct buffer
private ByteBuffer expandByteBuffer(int sizeHint) {
final long needed = (long) sizeHint - _bb.remaining() + _bb.capacity(); // Max needed is 2G
if ((_h2o==null && _chan == null) || (_bb.hasArray() && needed < MTU)) {
if (needed > MAX_ARRAY_SIZE) {
throw new IllegalArgumentException("Cannot allocate more than 2GB array: sizeHint="+sizeHint+", "
+ "needed="+needed
+ ", bb.remaining()=" + _bb.remaining() + ", bb.capacity()="+_bb.capacity());
}
byte[] ary = _bb.array();
// just get twice what is currently needed but not more then max array size (2G)
// Be careful not to overflow because of integer math!
int newLen = (int) Math.min(1L << (water.util.MathUtils.log2(needed)+1), MAX_ARRAY_SIZE);
int oldpos = _bb.position();
_bb = ByteBuffer.wrap(MemoryManager.arrayCopyOfRange(ary,0,newLen),oldpos,newLen-oldpos)
.order(ByteOrder.nativeOrder());
} else if (_bb.capacity() != BBP_BIG._size) { //avoid expanding existing BBP items
int oldPos = _bb.position();
_bb.flip();
_bb = BBP_BIG.make().put(_bb);
_bb.position(oldPos);
}
return _bb;
}
@SuppressWarnings("unused") public String getStr(int off, int len) {
return new String(_bb.array(), _bb.arrayOffset()+off, len, UTF_8);
}
// -----------------------------------------------
// Utility functions to get various Java primitives
@SuppressWarnings("unused") public boolean getZ() { return get1()!=0; }
@SuppressWarnings("unused") public byte get1 () { return getSp(1).get (); }
@SuppressWarnings("unused") public int get1U() { return get1() & 0xFF; }
@SuppressWarnings("unused") public char get2 () { return getSp(2).getChar (); }
@SuppressWarnings("unused") public short get2s () { return getSp(2).getShort (); }
@SuppressWarnings("unused") public int get3 () { getSp(3); return get1U() | get1U() << 8 | get1U() << 16; }
@SuppressWarnings("unused") public int get4 () { return getSp(4).getInt (); }
@SuppressWarnings("unused") public float get4f() { return getSp(4).getFloat (); }
@SuppressWarnings("unused") public long get8 () { return getSp(8).getLong (); }
@SuppressWarnings("unused") public double get8d() { return getSp(8).getDouble(); }
int get1U(int off) { return _bb.get (off)&0xFF; }
int get4 (int off) { return _bb.getInt (off); }
long get8 (int off) { return _bb.getLong(off); }
@SuppressWarnings("unused") public AutoBuffer putZ (boolean b){ return put1(b?1:0); }
@SuppressWarnings("unused") public AutoBuffer put1 ( int b) { assert b >= -128 && b <= 255 : ""+b+" is not a byte";
putSp(1).put((byte)b); return this; }
@SuppressWarnings("unused") public AutoBuffer put2 ( char c) { putSp(2).putChar (c); return this; }
@SuppressWarnings("unused") public AutoBuffer put2 ( short s) { putSp(2).putShort (s); return this; }
@SuppressWarnings("unused") public AutoBuffer put2s ( short s) { return put2(s); }
@SuppressWarnings("unused") public AutoBuffer put3( int x ) { assert (-1<<24) <= x && x < (1<<24);
return put1((x)&0xFF).put1((x >> 8)&0xFF).put1(x >> 16); }
@SuppressWarnings("unused") public AutoBuffer put4 ( int i) { putSp(4).putInt (i); return this; }
@SuppressWarnings("unused") public AutoBuffer put4f( float f) { putSp(4).putFloat (f); return this; }
@SuppressWarnings("unused") public AutoBuffer put8 ( long l) { putSp(8).putLong (l); return this; }
@SuppressWarnings("unused") public AutoBuffer put8d(double d) { putSp(8).putDouble(d); return this; }
public AutoBuffer put(Freezable f) {
if( f == null ) return putInt(TypeMap.NULL);
assert f.frozenType() > 0 : "No TypeMap for "+f.getClass().getName();
putInt(f.frozenType());
return f.write(this);
}
public <T extends Freezable> T get() {
int id = getInt();
if( id == TypeMap.NULL ) return null;
if( _is!=null ) id = _typeMap[id];
return (T)TypeMap.newFreezable(id).read(this);
}
public <T extends Freezable> T get(Class<T> tc) {
int id = getInt();
if( id == TypeMap.NULL ) return null;
if( _is!=null ) id = _typeMap[id];
assert tc.isInstance(TypeMap.theFreezable(id)):tc.getName() + " != " + TypeMap.theFreezable(id).getClass().getName() + ", id = " + id;
return (T)TypeMap.newFreezable(id).read(this);
}
// Write Key's target IFF the Key is not null; target can be null.
public AutoBuffer putKey(Key k) {
if( k==null ) return this; // Key is null ==> write nothing
Keyed kd = DKV.getGet(k);
put(kd);
return kd == null ? this : kd.writeAll_impl(this);
}
public Keyed getKey(Key k, Futures fs) {
return k==null ? null : getKey(fs); // Key is null ==> read nothing
}
public Keyed getKey(Futures fs) {
Keyed kd = get(Keyed.class);
if( kd == null ) return null;
DKV.put(kd,fs);
return kd.readAll_impl(this,fs);
}
// Put a (compressed) integer. Specifically values in the range -1 to ~250
// will take 1 byte, values near a Short will take 1+2 bytes, values near an
// Int will take 1+4 bytes, and bigger values 1+8 bytes. This compression is
// optimized for small integers (including -1 which is often used as a "array
// is null" flag when passing the array length).
public AutoBuffer putInt(int x) {
if( 0 <= (x+1)&& (x+1) <= 253 ) return put1(x+1);
if( Short.MIN_VALUE <= x && x <= Short.MAX_VALUE ) return put1(255).put2((short)x);
return put1(254).put4(x);
}
// Get a (compressed) integer. See above for the compression strategy and reasoning.
int getInt( ) {
int x = get1U();
if( x <= 253 ) return x-1;
if( x==255 ) return (short)get2();
assert x==254;
return get4();
}
// Put a zero-compressed array. Compression is:
// If null : putInt(-1)
// Else
// putInt(# of leading nulls)
// putInt(# of non-nulls)
// If # of non-nulls is > 0, putInt( # of trailing nulls)
long putZA( Object[] A ) {
if( A==null ) { putInt(-1); return 0; }
int x=0; for( ; x<A.length; x++ ) if( A[x ]!=null ) break;
int y=A.length; for( ; y>x; y-- ) if( A[y-1]!=null ) break;
putInt(x); // Leading zeros to skip
putInt(y-x); // Mixed non-zero guts in middle
if( y > x ) // If any trailing nulls
putInt(A.length-y); // Trailing zeros
return ((long)x<<32)|(y-x); // Return both leading zeros, and middle non-zeros
}
// Get the lengths of a zero-compressed array.
// Returns -1 if null.
// Returns a long of (leading zeros | middle non-zeros).
// If there are non-zeros, caller has to read the trailing zero-length.
long getZA( ) {
int x=getInt(); // Length of leading zeros
if( x == -1 ) return -1; // or a null
int nz=getInt(); // Non-zero in the middle
return ((long)x<<32)|(long)nz; // Return both ints
}
// TODO: untested. . .
@SuppressWarnings("unused")
public AutoBuffer putAEnum(Enum[] enums) {
//_arys++;
long xy = putZA(enums);
if( xy == -1 ) return this;
int x=(int)(xy>>32);
int y=(int)xy;
for( int i=x; i<x+y; i++ ) putEnum(enums[i]);
return this;
}
@SuppressWarnings("unused")
public <E extends Enum> E[] getAEnum(E[] values) {
//_arys++;
long xy = getZA();
if( xy == -1 ) return null;
int x=(int)(xy>>32); // Leading nulls
int y=(int)xy; // Middle non-zeros
int z = y==0 ? 0 : getInt(); // Trailing nulls
E[] ts = (E[]) Array.newInstance(values.getClass().getComponentType(), x+y+z);
for( int i = x; i < x+y; ++i ) ts[i] = getEnum(values);
return ts;
}
@SuppressWarnings("unused")
public AutoBuffer putA(Freezable[] fs) {
//_arys++;
long xy = putZA(fs);
if( xy == -1 ) return this;
int x=(int)(xy>>32);
int y=(int)xy;
for( int i=x; i<x+y; i++ ) put(fs[i]);
return this;
}
public AutoBuffer putAA(Freezable[][] fs) {
//_arys++;
long xy = putZA(fs);
if( xy == -1 ) return this;
int x=(int)(xy>>32);
int y=(int)xy;
for( int i=x; i<x+y; i++ ) putA(fs[i]);
return this;
}
@SuppressWarnings("unused") public AutoBuffer putAAA(Freezable[][][] fs) {
//_arys++;
long xy = putZA(fs);
if( xy == -1 ) return this;
int x=(int)(xy>>32);
int y=(int)xy;
for( int i=x; i<x+y; i++ ) putAA(fs[i]);
return this;
}
public <T extends Freezable> T[] getA(Class<T> tc) {
//_arys++;
long xy = getZA();
if( xy == -1 ) return null;
int x=(int)(xy>>32); // Leading nulls
int y=(int)xy; // Middle non-zeros
int z = y==0 ? 0 : getInt(); // Trailing nulls
T[] ts = (T[]) Array.newInstance(tc, x+y+z);
for( int i = x; i < x+y; ++i ) ts[i] = get(tc);
return ts;
}
public <T extends Freezable> T[][] getAA(Class<T> tc) {
//_arys++;
long xy = getZA();
if( xy == -1 ) return null;
int x=(int)(xy>>32); // Leading nulls
int y=(int)xy; // Middle non-zeros
int z = y==0 ? 0 : getInt(); // Trailing nulls
Class<T[]> tcA = (Class<T[]>) Array.newInstance(tc, 0).getClass();
T[][] ts = (T[][]) Array.newInstance(tcA, x+y+z);
for( int i = x; i < x+y; ++i ) ts[i] = getA(tc);
return ts;
}
@SuppressWarnings("unused") public <T extends Freezable> T[][][] getAAA(Class<T> tc) {
//_arys++;
long xy = getZA();
if( xy == -1 ) return null;
int x=(int)(xy>>32); // Leading nulls
int y=(int)xy; // Middle non-zeros
int z = y==0 ? 0 : getInt(); // Trailing nulls
Class<T[] > tcA = (Class<T[] >) Array.newInstance(tc , 0).getClass();
Class<T[][]> tcAA = (Class<T[][]>) Array.newInstance(tcA, 0).getClass();
T[][][] ts = (T[][][]) Array.newInstance(tcAA, x+y+z);
for( int i = x; i < x+y; ++i ) ts[i] = getAA(tc);
return ts;
}
public AutoBuffer putAStr(String[] fs) {
//_arys++;
long xy = putZA(fs);
if( xy == -1 ) return this;
int x=(int)(xy>>32);
int y=(int)xy;
for( int i=x; i<x+y; i++ ) putStr(fs[i]);
return this;
}
public String[] getAStr() {
//_arys++;
long xy = getZA();
if( xy == -1 ) return null;
int x=(int)(xy>>32); // Leading nulls
int y=(int)xy; // Middle non-zeros
int z = y==0 ? 0 : getInt(); // Trailing nulls
String[] ts = new String[x+y+z];
for( int i = x; i < x+y; ++i ) ts[i] = getStr();
return ts;
}
@SuppressWarnings("unused") public AutoBuffer putAAStr(String[][] fs) {
//_arys++;
long xy = putZA(fs);
if( xy == -1 ) return this;
int x=(int)(xy>>32);
int y=(int)xy;
for( int i=x; i<x+y; i++ ) putAStr(fs[i]);
return this;
}
@SuppressWarnings("unused") public String[][] getAAStr() {
//_arys++;
long xy = getZA();
if( xy == -1 ) return null;
int x=(int)(xy>>32); // Leading nulls
int y=(int)xy; // Middle non-zeros
int z = y==0 ? 0 : getInt(); // Trailing nulls
String[][] ts = new String[x+y+z][];
for( int i = x; i < x+y; ++i ) ts[i] = getAStr();
return ts;
}
// Read the smaller of _bb.remaining() and len into buf.
// Return bytes read, which could be zero.
int read( byte[] buf, int off, int len ) {
int sz = Math.min(_bb.remaining(),len);
_bb.get(buf,off,sz);
return sz;
}
// -----------------------------------------------
// Utility functions to handle common UDP packet tasks.
// Get the 1st control byte
int getCtrl( ) { return getSz(1).get(0)&0xFF; }
// Get the port in next 2 bytes
int getPort( ) { return getSz(1+2).getChar(1); }
// Get the task# in the next 4 bytes
int getTask( ) { return getSz(1+2+4).getInt(1+2); }
// Get the flag in the next 1 byte
int getFlag( ) { return getSz(1+2+4+1).get(1+2+4); }
/**
* Write UDP into the ByteBuffer with custom sender's port number
*
* This method sets the ctrl, port, task.
* Ready to write more bytes afterwards
*
* @param type type of the UDP datagram
* @param senderPort port of the sender of the datagram
*/
AutoBuffer putUdp(UDP.udp type, int senderPort){
assert _bb.position() == 0;
putSp(_bb.position()+1+2);
_bb.put ((byte)type.ordinal());
_bb.putChar((char)senderPort );
return this;
}
/**
* Write UDP into the ByteBuffer with the current node as the sender.
*
* This method sets the ctrl, port, task.
* Ready to write more bytes afterwards
*
* @param type type of the UDP datagram
*/
AutoBuffer putUdp (UDP.udp type) {
return putUdp(type, H2O.H2O_PORT); // Outgoing port is always the sender's (me) port
}
AutoBuffer putTask(UDP.udp type, int tasknum) {
return putUdp(type).put4(tasknum);
}
AutoBuffer putTask(int ctrl, int tasknum) {
assert _bb.position() == 0;
putSp(_bb.position()+1+2+4);
_bb.put((byte)ctrl).putChar((char)H2O.H2O_PORT).putInt(tasknum);
return this;
}
// -----------------------------------------------
// Utility functions to read & write arrays
public boolean[] getAZ() {
int len = getInt();
if (len == -1) return null;
boolean[] r = new boolean[len];
for (int i=0;i<len;++i) r[i] = getZ();
return r;
}
public byte[] getA1( ) {
//_arys++;
int len = getInt();
return len == -1 ? null : getA1(len);
}
public byte[] getA1( int len ) {
byte[] buf = MemoryManager.malloc1(len);
int sofar = 0;
while( sofar < len ) {
int more = Math.min(_bb.remaining(), len - sofar);
_bb.get(buf, sofar, more);
sofar += more;
if( sofar < len ) getSp(Math.min(_bb.capacity(), len-sofar));
}
return buf;
}
public short[] getA2( ) {
//_arys++;
int len = getInt(); if( len == -1 ) return null;
short[] buf = MemoryManager.malloc2(len);
int sofar = 0;
while( sofar < buf.length ) {
ShortBuffer as = _bb.asShortBuffer();
int more = Math.min(as.remaining(), len - sofar);
as.get(buf, sofar, more);
sofar += more;
_bb.position(_bb.position() + as.position()*2);
if( sofar < len ) getSp(Math.min(_bb.capacity()-1, (len-sofar)*2));
}
return buf;
}
public int[] getA4( ) {
//_arys++;
int len = getInt(); if( len == -1 ) return null;
int[] buf = MemoryManager.malloc4(len);
int sofar = 0;
while( sofar < buf.length ) {
IntBuffer as = _bb.asIntBuffer();
int more = Math.min(as.remaining(), len - sofar);
as.get(buf, sofar, more);
sofar += more;
_bb.position(_bb.position() + as.position()*4);
if( sofar < len ) getSp(Math.min(_bb.capacity()-3, (len-sofar)*4));
}
return buf;
}
public float[] getA4f( ) {
//_arys++;
int len = getInt(); if( len == -1 ) return null;
float[] buf = MemoryManager.malloc4f(len);
int sofar = 0;
while( sofar < buf.length ) {
FloatBuffer as = _bb.asFloatBuffer();
int more = Math.min(as.remaining(), len - sofar);
as.get(buf, sofar, more);
sofar += more;
_bb.position(_bb.position() + as.position()*4);
if( sofar < len ) getSp(Math.min(_bb.capacity()-3, (len-sofar)*4));
}
return buf;
}
public long[] getA8( ) {
//_arys++;
// Get the lengths of lead & trailing zero sections, and the non-zero
// middle section.
int x = getInt(); if( x == -1 ) return null;
int y = getInt(); // Non-zero in the middle
int z = y==0 ? 0 : getInt();// Trailing zeros
long[] buf = MemoryManager.malloc8(x+y+z);
switch( get1U() ) { // 1,2,4 or 8 for how the middle section is passed
case 1: for( int i=x; i<x+y; i++ ) buf[i] = get1U(); return buf;
case 2: for( int i=x; i<x+y; i++ ) buf[i] = (short)get2(); return buf;
case 4: for( int i=x; i<x+y; i++ ) buf[i] = get4(); return buf;
case 8: break;
default: throw H2O.fail();
}
int sofar = x;
while( sofar < x+y ) {
LongBuffer as = _bb.asLongBuffer();
int more = Math.min(as.remaining(), x+y - sofar);
as.get(buf, sofar, more);
sofar += more;
_bb.position(_bb.position() + as.position()*8);
if( sofar < x+y ) getSp(Math.min(_bb.capacity()-7, (x+y-sofar)*8));
}
return buf;
}
public double[] getA8d( ) {
//_arys++;
int len = getInt(); if( len == -1 ) return null;
double[] buf = MemoryManager.malloc8d(len);
int sofar = 0;
while( sofar < len ) {
DoubleBuffer as = _bb.asDoubleBuffer();
int more = Math.min(as.remaining(), len - sofar);
as.get(buf, sofar, more);
sofar += more;
_bb.position(_bb.position() + as.position()*8);
if( sofar < len ) getSp(Math.min(_bb.capacity()-7, (len-sofar)*8));
}
return buf;
}
@SuppressWarnings("unused")
public byte[][] getAA1( ) {
//_arys++;
long xy = getZA();
if( xy == -1 ) return null;
int x=(int)(xy>>32); // Leading nulls
int y=(int)xy; // Middle non-zeros
int z = y==0 ? 0 : getInt(); // Trailing nulls
byte[][] ary = new byte[x+y+z][];
for( int i=x; i<x+y; i++ ) ary[i] = getA1();
return ary;
}
@SuppressWarnings("unused")
public short[][] getAA2( ) {
//_arys++;
long xy = getZA();
if( xy == -1 ) return null;
int x=(int)(xy>>32); // Leading nulls
int y=(int)xy; // Middle non-zeros
int z = y==0 ? 0 : getInt(); // Trailing nulls
short[][] ary = new short[x+y+z][];
for( int i=x; i<x+y; i++ ) ary[i] = getA2();
return ary;
}
public int[][] getAA4( ) {
//_arys++;
long xy = getZA();
if( xy == -1 ) return null;
int x=(int)(xy>>32); // Leading nulls
int y=(int)xy; // Middle non-zeros
int z = y==0 ? 0 : getInt(); // Trailing nulls
int[][] ary = new int[x+y+z][];
for( int i=x; i<x+y; i++ ) ary[i] = getA4();
return ary;
}
@SuppressWarnings("unused") public float[][] getAA4f( ) {
//_arys++;
long xy = getZA();
if( xy == -1 ) return null;
int x=(int)(xy>>32); // Leading nulls
int y=(int)xy; // Middle non-zeros
int z = y==0 ? 0 : getInt(); // Trailing nulls
float[][] ary = new float[x+y+z][];
for( int i=x; i<x+y; i++ ) ary[i] = getA4f();
return ary;
}
public long[][] getAA8( ) {
//_arys++;
long xy = getZA();
if( xy == -1 ) return null;
int x=(int)(xy>>32); // Leading nulls
int y=(int)xy; // Middle non-zeros
int z = y==0 ? 0 : getInt(); // Trailing nulls
long[][] ary = new long[x+y+z][];
for( int i=x; i<x+y; i++ ) ary[i] = getA8();
return ary;
}
@SuppressWarnings("unused") public double[][] getAA8d( ) {
//_arys++;
long xy = getZA();
if( xy == -1 ) return null;
int x=(int)(xy>>32); // Leading nulls
int y=(int)xy; // Middle non-zeros
int z = y==0 ? 0 : getInt(); // Trailing nulls
double[][] ary = new double[x+y+z][];
for( int i=x; i<x+y; i++ ) ary[i] = getA8d();
return ary;
}
@SuppressWarnings("unused") public int[][][] getAAA4( ) {
//_arys++;
long xy = getZA();
if( xy == -1 ) return null;
int x=(int)(xy>>32); // Leading nulls
int y=(int)xy; // Middle non-zeros
int z = y==0 ? 0 : getInt(); // Trailing nulls
int[][][] ary = new int[x+y+z][][];
for( int i=x; i<x+y; i++ ) ary[i] = getAA4();
return ary;
}
@SuppressWarnings("unused") public long[][][] getAAA8( ) {
//_arys++;
long xy = getZA();
if( xy == -1 ) return null;
int x=(int)(xy>>32); // Leading nulls
int y=(int)xy; // Middle non-zeros
int z = y==0 ? 0 : getInt(); // Trailing nulls
long[][][] ary = new long[x+y+z][][];
for( int i=x; i<x+y; i++ ) ary[i] = getAA8();
return ary;
}
public double[][][] getAAA8d( ) {
//_arys++;
long xy = getZA();
if( xy == -1 ) return null;
int x=(int)(xy>>32); // Leading nulls
int y=(int)xy; // Middle non-zeros
int z = y==0 ? 0 : getInt(); // Trailing nulls
double[][][] ary = new double[x+y+z][][];
for( int i=x; i<x+y; i++ ) ary[i] = getAA8d();
return ary;
}
public String getStr( ) {
int len = getInt();
return len == -1 ? null : new String(getA1(len), UTF_8);
}
public <E extends Enum> E getEnum(E[] values ) {
int idx = get1();
return idx == -1 ? null : values[idx];
}
public AutoBuffer putAZ( boolean[] ary ) {
if( ary == null ) return putInt(-1);
putInt(ary.length);
for (boolean anAry : ary) putZ(anAry);
return this;
}
public AutoBuffer putA1( byte[] ary ) {
//_arys++;
if( ary == null ) return putInt(-1);
putInt(ary.length);
return putA1(ary,ary.length);
}
public AutoBuffer putA1( byte[] ary, int length ) { return putA1(ary,0,length); }
public AutoBuffer putA1( byte[] ary, int sofar, int length ) {
if (length - sofar > _bb.remaining()) expandByteBuffer(length-sofar);
while( sofar < length ) {
int len = Math.min(length - sofar, _bb.remaining());
_bb.put(ary, sofar, len);
sofar += len;
if( sofar < length ) sendPartial();
}
return this;
}
AutoBuffer putA2( short[] ary ) {
//_arys++;
if( ary == null ) return putInt(-1);
putInt(ary.length);
if (ary.length*2 > _bb.remaining()) expandByteBuffer(ary.length*2);
int sofar = 0;
while( sofar < ary.length ) {
ShortBuffer sb = _bb.asShortBuffer();
int len = Math.min(ary.length - sofar, sb.remaining());
sb.put(ary, sofar, len);
sofar += len;
_bb.position(_bb.position() + sb.position()*2);
if( sofar < ary.length ) sendPartial();
}
return this;
}
public AutoBuffer putA4( int[] ary ) {
//_arys++;
if( ary == null ) return putInt(-1);
putInt(ary.length);
// Note: based on Brandon commit this should improve performance during parse (7d950d622ee3037555ecbab0e39404f8f0917652)
if (ary.length*4 > _bb.remaining()) {
expandByteBuffer(ary.length*4); // Try to expand BB buffer to fit input array
}
int sofar = 0;
while( sofar < ary.length ) {
IntBuffer ib = _bb.asIntBuffer();
int len = Math.min(ary.length - sofar, ib.remaining());
ib.put(ary, sofar, len);
sofar += len;
_bb.position(_bb.position() + ib.position()*4);
if( sofar < ary.length ) sendPartial();
}
return this;
}
public AutoBuffer putA8( long[] ary ) {
//_arys++;
if( ary == null ) return putInt(-1);
// Trim leading & trailing zeros. Pass along the length of leading &
// trailing zero sections, and the non-zero section in the middle.
int x=0; for( ; x<ary.length; x++ ) if( ary[x ]!=0 ) break;
int y=ary.length; for( ; y>x; y-- ) if( ary[y-1]!=0 ) break;
int nzlen = y-x;
putInt(x);
putInt(nzlen);
if( nzlen > 0 ) // If any trailing nulls
putInt(ary.length-y); // Trailing zeros
// Size trim the NZ section: pass as bytes or shorts if possible.
long min=Long.MAX_VALUE, max=Long.MIN_VALUE;
for( int i=x; i<y; i++ ) { if( ary[i]<min ) min=ary[i]; if( ary[i]>max ) max=ary[i]; }
if( 0 <= min && max < 256 ) { // Ship as unsigned bytes
put1(1); for( int i=x; i<y; i++ ) put1((int)ary[i]);
return this;
}
if( Short.MIN_VALUE <= min && max < Short.MAX_VALUE ) { // Ship as shorts
put1(2); for( int i=x; i<y; i++ ) put2((short)ary[i]);
return this;
}
if( Integer.MIN_VALUE <= min && max < Integer.MAX_VALUE ) { // Ship as ints
put1(4); for( int i=x; i<y; i++ ) put4((int)ary[i]);
return this;
}
put1(8); // Ship as full longs
int sofar = x;
if ((y-sofar)*8 > _bb.remaining()) expandByteBuffer(ary.length*8);
while( sofar < y ) {
LongBuffer lb = _bb.asLongBuffer();
int len = Math.min(y - sofar, lb.remaining());
lb.put(ary, sofar, len);
sofar += len;
_bb.position(_bb.position() + lb.position() * 8);
if( sofar < y ) sendPartial();
}
return this;
}
public AutoBuffer putA4f( float[] ary ) {
//_arys++;
if( ary == null ) return putInt(-1);
putInt(ary.length);
if (ary.length*4 > _bb.remaining()) expandByteBuffer(ary.length*4);
int sofar = 0;
while( sofar < ary.length ) {
FloatBuffer fb = _bb.asFloatBuffer();
int len = Math.min(ary.length - sofar, fb.remaining());
fb.put(ary, sofar, len);
sofar += len;
_bb.position(_bb.position() + fb.position()*4);
if( sofar < ary.length ) sendPartial();
}
return this;
}
public AutoBuffer putA8d( double[] ary ) {
//_arys++;
if( ary == null ) return putInt(-1);
putInt(ary.length);
if (ary.length*8 > _bb.remaining()) expandByteBuffer(ary.length*8);
int sofar = 0;
while( sofar < ary.length ) {
DoubleBuffer db = _bb.asDoubleBuffer();
int len = Math.min(ary.length - sofar, db.remaining());
db.put(ary, sofar, len);
sofar += len;
_bb.position(_bb.position() + db.position()*8);
if( sofar < ary.length ) sendPartial();
}
return this;
}
public AutoBuffer putAA1( byte[][] ary ) {
//_arys++;
long xy = putZA(ary);
if( xy == -1 ) return this;
int x=(int)(xy>>32);
int y=(int)xy;
for( int i=x; i<x+y; i++ ) putA1(ary[i]);
return this;
}
@SuppressWarnings("unused") AutoBuffer putAA2( short[][] ary ) {
//_arys++;
long xy = putZA(ary);
if( xy == -1 ) return this;
int x=(int)(xy>>32);
int y=(int)xy;
for( int i=x; i<x+y; i++ ) putA2(ary[i]);
return this;
}
public AutoBuffer putAA4( int[][] ary ) {
//_arys++;
long xy = putZA(ary);
if( xy == -1 ) return this;
int x=(int)(xy>>32);
int y=(int)xy;
for( int i=x; i<x+y; i++ ) putA4(ary[i]);
return this;
}
@SuppressWarnings("unused")
public AutoBuffer putAA4f( float[][] ary ) {
//_arys++;
long xy = putZA(ary);
if( xy == -1 ) return this;
int x=(int)(xy>>32);
int y=(int)xy;
for( int i=x; i<x+y; i++ ) putA4f(ary[i]);
return this;
}
public AutoBuffer putAA8( long[][] ary ) {
//_arys++;
long xy = putZA(ary);
if( xy == -1 ) return this;
int x=(int)(xy>>32);
int y=(int)xy;
for( int i=x; i<x+y; i++ ) putA8(ary[i]);
return this;
}
@SuppressWarnings("unused") public AutoBuffer putAA8d( double[][] ary ) {
//_arys++;
long xy = putZA(ary);
if( xy == -1 ) return this;
int x=(int)(xy>>32);
int y=(int)xy;
for( int i=x; i<x+y; i++ ) putA8d(ary[i]);
return this;
}
public AutoBuffer putAAA4( int[][][] ary ) {
//_arys++;
long xy = putZA(ary);
if( xy == -1 ) return this;
int x=(int)(xy>>32);
int y=(int)xy;
for( int i=x; i<x+y; i++ ) putAA4(ary[i]);
return this;
}
public AutoBuffer putAAA8( long[][][] ary ) {
//_arys++;
long xy = putZA(ary);
if( xy == -1 ) return this;
int x=(int)(xy>>32);
int y=(int)xy;
for( int i=x; i<x+y; i++ ) putAA8(ary[i]);
return this;
}
public AutoBuffer putAAA8d( double[][][] ary ) {
//_arys++;
long xy = putZA(ary);
if( xy == -1 ) return this;
int x=(int)(xy>>32);
int y=(int)xy;
for( int i=x; i<x+y; i++ ) putAA8d(ary[i]);
return this;
}
// Put a String as bytes (not chars!)
public AutoBuffer putStr( String s ) {
if( s==null ) return putInt(-1);
return putA1(StringUtils.bytesOf(s));
}
@SuppressWarnings("unused") public AutoBuffer putEnum( Enum x ) {
return put1(x==null ? -1 : x.ordinal());
}
public static byte[] javaSerializeWritePojo(Object o) {
ByteArrayOutputStream bos = new ByteArrayOutputStream();
ObjectOutputStream out = null;
try {
out = new ObjectOutputStream(bos);
out.writeObject(o);
out.close();
return bos.toByteArray();
} catch (IOException e) {
throw Log.throwErr(e);
}
}
public static Object javaSerializeReadPojo(byte [] bytes) {
try {
final ObjectInputStream ois = new ObjectInputStream(new ByteArrayInputStream(bytes));
Object o = ois.readObject();
return o;
} catch (IOException e) {
String className = nameOfClass(bytes);
throw Log.throwErr(new RuntimeException("Failed to deserialize " + className, e));
} catch (ClassNotFoundException e) {
throw Log.throwErr(e);
}
}
static String nameOfClass(byte[] bytes) {
if (bytes == null) return "(null)";
if (bytes.length < 11) return "(no name)";
int nameSize = Math.min(40, Math.max(3, bytes[7]));
return new String(bytes, 8, Math.min(nameSize, bytes.length - 8));
}
// ==========================================================================
// Java Serializable objects
// Note: These are heck-a-lot more expensive than their Freezable equivalents.
@SuppressWarnings("unused") public AutoBuffer putSer( Object obj ) {
if (obj == null) return putA1(null);
return putA1(javaSerializeWritePojo(obj));
}
@SuppressWarnings("unused") public AutoBuffer putASer(Object[] fs) {
//_arys++;
long xy = putZA(fs);
if( xy == -1 ) return this;
int x=(int)(xy>>32);
int y=(int)xy;
for( int i=x; i<x+y; i++ ) putSer(fs[i]);
return this;
}
@SuppressWarnings("unused") public AutoBuffer putAASer(Object[][] fs) {
//_arys++;
long xy = putZA(fs);
if( xy == -1 ) return this;
int x=(int)(xy>>32);
int y=(int)xy;
for( int i=x; i<x+y; i++ ) putASer(fs[i]);
return this;
}
@SuppressWarnings("unused") public AutoBuffer putAAASer(Object[][][] fs) {
//_arys++;
long xy = putZA(fs);
if( xy == -1 ) return this;
int x=(int)(xy>>32);
int y=(int)xy;
for( int i=x; i<x+y; i++ ) putAASer(fs[i]);
return this;
}
@SuppressWarnings("unused") public Object getSer() {
byte[] ba = getA1();
return ba == null ? null : javaSerializeReadPojo(ba);
}
@SuppressWarnings("unused") public <T> T getSer(Class<T> tc) {
return (T)getSer();
}
@SuppressWarnings("unused") public <T> T[] getASer(Class<T> tc) {
//_arys++;
long xy = getZA();
if( xy == -1 ) return null;
int x=(int)(xy>>32); // Leading nulls
int y=(int)xy; // Middle non-zeros
int z = y==0 ? 0 : getInt(); // Trailing nulls
T[] ts = (T[]) Array.newInstance(tc, x+y+z);
for( int i = x; i < x+y; ++i ) ts[i] = getSer(tc);
return ts;
}
@SuppressWarnings("unused") public <T> T[][] getAASer(Class<T> tc) {
//_arys++;
long xy = getZA();
if( xy == -1 ) return null;
int x=(int)(xy>>32); // Leading nulls
int y=(int)xy; // Middle non-zeros
int z = y==0 ? 0 : getInt(); // Trailing nulls
T[][] ts = (T[][]) Array.newInstance(tc, x+y+z);
for( int i = x; i < x+y; ++i ) ts[i] = getASer(tc);
return ts;
}
@SuppressWarnings("unused") public <T> T[][][] getAAASer(Class<T> tc) {
//_arys++;
long xy = getZA();
if( xy == -1 ) return null;
int x=(int)(xy>>32); // Leading nulls
int y=(int)xy; // Middle non-zeros
int z = y==0 ? 0 : getInt(); // Trailing nulls
T[][][] ts = (T[][][]) Array.newInstance(tc, x+y+z);
for( int i = x; i < x+y; ++i ) ts[i] = getAASer(tc);
return ts;
}
// ==========================================================================
// JSON AutoBuffer printers
public AutoBuffer putJNULL( ) { return put1('n').put1('u').put1('l').put1('l'); }
// Escaped JSON string
private AutoBuffer putJStr( String s ) {
byte[] b = StringUtils.bytesOf(s);
int off=0;
for( int i=0; i<b.length; i++ ) {
if( b[i] == '\\' || b[i] == '"') { // Double up backslashes, escape quotes
putA1(b,off,i); // Everything so far (no backslashes)
put1('\\'); // The extra backslash
off=i; // Advance the "so far" variable
}
// Handle remaining special cases in JSON
// if( b[i] == '/' ) { putA1(b,off,i); put1('\\'); put1('/'); off=i+1; continue;}
if( b[i] == '\b' ) { putA1(b,off,i); put1('\\'); put1('b'); off=i+1; continue;}
if( b[i] == '\f' ) { putA1(b,off,i); put1('\\'); put1('f'); off=i+1; continue;}
if( b[i] == '\n' ) { putA1(b,off,i); put1('\\'); put1('n'); off=i+1; continue;}
if( b[i] == '\r' ) { putA1(b,off,i); put1('\\'); put1('r'); off=i+1; continue;}
if( b[i] == '\t' ) { putA1(b,off,i); put1('\\'); put1('t'); off=i+1; continue;}
// ASCII Control characters
if( b[i] == 127 ) { putA1(b,off,i); put1('\\'); put1('u'); put1('0'); put1('0'); put1('7'); put1('f'); off=i+1; continue;}
if( b[i] >= 0 && b[i] < 32 ) {
String hexStr = Integer.toHexString(b[i]);
putA1(b, off, i); put1('\\'); put1('u');
for (int j = 0; j < 4 - hexStr.length(); j++) put1('0');
for (int j = 0; j < hexStr.length(); j++) put1(hexStr.charAt(hexStr.length()-j-1));
off=i+1;
}
}
return putA1(b,off,b.length);
}
public AutoBuffer putJSONStrUnquoted ( String s ) { return s==null ? putJNULL() : putJStr(s); }
public AutoBuffer putJSONStrUnquoted ( String name, String s ) { return s==null ? putJSONStr(name).put1(':').putJNULL() : putJSONStr(name).put1(':').putJStr(s); }
public AutoBuffer putJSONName( String s ) { return put1('"').putJStr(s).put1('"'); }
public AutoBuffer putJSONStr ( String s ) { return s==null ? putJNULL() : putJSONName(s); }
public AutoBuffer putJSONAStr(String[] ss) {
if( ss == null ) return putJNULL();
put1('[');
for( int i=0; i<ss.length; i++ ) {
if( i>0 ) put1(',');
putJSONStr(ss[i]);
}
return put1(']');
}
private AutoBuffer putJSONAAStr( String[][] sss) {
if( sss == null ) return putJNULL();
put1('[');
for( int i=0; i<sss.length; i++ ) {
if( i>0 ) put1(',');
putJSONAStr(sss[i]);
}
return put1(']');
}
@SuppressWarnings("unused") public AutoBuffer putJSONStr (String name, String s ) { return putJSONStr(name).put1(':').putJSONStr(s); }
@SuppressWarnings("unused") public AutoBuffer putJSONAStr (String name, String[] ss ) { return putJSONStr(name).put1(':').putJSONAStr(ss); }
@SuppressWarnings("unused") public AutoBuffer putJSONAAStr(String name, String[][]sss) { return putJSONStr(name).put1(':').putJSONAAStr(sss); }
@SuppressWarnings("unused") public AutoBuffer putJSONSer (String name, Object o ) { return putJSONStr(name).put1(':').putJNULL(); }
@SuppressWarnings("unused") public AutoBuffer putJSONASer (String name, Object[] oo ) { return putJSONStr(name).put1(':').putJNULL(); }
@SuppressWarnings("unused") public AutoBuffer putJSONAASer (String name, Object[][] ooo ) { return putJSONStr(name).put1(':').putJNULL(); }
@SuppressWarnings("unused") public AutoBuffer putJSONAAASer(String name, Object[][][] oooo) { return putJSONStr(name).put1(':').putJNULL(); }
public AutoBuffer putJSONAZ( String name, boolean[] f) { return putJSONStr(name).put1(':').putJSONAZ(f); }
public AutoBuffer putJSON(Freezable ice) { return ice == null ? putJNULL() : ice.writeJSON(this); }
public AutoBuffer putJSONA( Freezable fs[] ) {
if( fs == null ) return putJNULL();
put1('[');
for( int i=0; i<fs.length; i++ ) {
if( i>0 ) put1(',');
putJSON(fs[i]);
}
return put1(']');
}
public AutoBuffer putJSONAA( Freezable fs[][]) {
if( fs == null ) return putJNULL();
put1('[');
for( int i=0; i<fs.length; i++ ) {
if( i>0 ) put1(',');
putJSONA(fs[i]);
}
return put1(']');
}
public AutoBuffer putJSONAAA( Freezable fs[][][]) {
if( fs == null ) return putJNULL();
put1('[');
for( int i=0; i<fs.length; i++ ) {
if( i>0 ) put1(',');
putJSONAA(fs[i]);
}
return put1(']');
}
@SuppressWarnings("unused") public AutoBuffer putJSON ( String name, Freezable f ) { return putJSONStr(name).put1(':').putJSON (f); }
public AutoBuffer putJSONA ( String name, Freezable f[] ) { return putJSONStr(name).put1(':').putJSONA (f); }
@SuppressWarnings("unused") public AutoBuffer putJSONAA( String name, Freezable f[][]){ return putJSONStr(name).put1(':').putJSONAA(f); }
@SuppressWarnings("unused") public AutoBuffer putJSONAAA( String name, Freezable f[][][]){ return putJSONStr(name).put1(':').putJSONAAA(f); }
@SuppressWarnings("unused") public AutoBuffer putJSONZ( String name, boolean value ) { return putJSONStr(name).put1(':').putJStr("" + value); }
private AutoBuffer putJSONAZ(boolean [] b) {
if (b == null) return putJNULL();
put1('[');
for( int i = 0; i < b.length; ++i) {
if (i > 0) put1(',');
putJStr(""+b[i]);
}
return put1(']');
}
// Most simple integers
private AutoBuffer putJInt( int i ) {
byte b[] = StringUtils.toBytes(i);
return putA1(b,b.length);
}
public AutoBuffer putJSON1( byte b ) { return putJInt(b); }
public AutoBuffer putJSONA1( byte ary[] ) {
if( ary == null ) return putJNULL();
put1('[');
for( int i=0; i<ary.length; i++ ) {
if( i>0 ) put1(',');
putJSON1(ary[i]);
}
return put1(']');
}
private AutoBuffer putJSONAA1(byte ary[][]) {
if( ary == null ) return putJNULL();
put1('[');
for( int i=0; i<ary.length; i++ ) {
if( i>0 ) put1(',');
putJSONA1(ary[i]);
}
return put1(']');
}
@SuppressWarnings("unused") public AutoBuffer putJSON1 (String name, byte b ) { return putJSONStr(name).put1(':').putJSON1(b); }
@SuppressWarnings("unused") public AutoBuffer putJSONA1 (String name, byte b[] ) { return putJSONStr(name).put1(':').putJSONA1(b); }
@SuppressWarnings("unused") public AutoBuffer putJSONAA1(String name, byte b[][]) { return putJSONStr(name).put1(':').putJSONAA1(b); }
public AutoBuffer putJSONAEnum(String name, Enum[] enums) {
return putJSONStr(name).put1(':').putJSONAEnum(enums);
}
public AutoBuffer putJSONAEnum( Enum[] enums ) {
if( enums == null ) return putJNULL();
put1('[');
for( int i=0; i<enums.length; i++ ) {
if( i>0 ) put1(',');
putJSONEnum(enums[i]);
}
return put1(']');
}
AutoBuffer putJSON2( char c ) { return putJSON4(c); }
AutoBuffer putJSON2( String name, char c ) { return putJSONStr(name).put1(':').putJSON2(c); }
AutoBuffer putJSON2( short c ) { return putJSON4(c); }
AutoBuffer putJSON2( String name, short c ) { return putJSONStr(name).put1(':').putJSON2(c); }
public AutoBuffer putJSONA2( String name, short ary[] ) { return putJSONStr(name).put1(':').putJSONA2(ary); }
AutoBuffer putJSONA2( short ary[] ) {
if( ary == null ) return putJNULL();
put1('[');
for( int i=0; i<ary.length; i++ ) {
if( i>0 ) put1(',');
putJSON2(ary[i]);
}
return put1(']');
}
AutoBuffer putJSON8 ( long l ) { return putJStr(Long.toString(l)); }
AutoBuffer putJSONA8( long ary[] ) {
if( ary == null ) return putJNULL();
put1('[');
for( int i=0; i<ary.length; i++ ) {
if( i>0 ) put1(',');
putJSON8(ary[i]);
}
return put1(']');
}
AutoBuffer putJSONAA8( long ary[][] ) {
if( ary == null ) return putJNULL();
put1('[');
for( int i=0; i<ary.length; i++ ) {
if( i>0 ) put1(',');
putJSONA8(ary[i]);
}
return put1(']');
}
AutoBuffer putJSONAAA8( long ary[][][] ) {
if( ary == null ) return putJNULL();
put1('[');
for( int i=0; i<ary.length; i++ ) {
if( i>0 ) put1(',');
putJSONAA8(ary[i]);
}
return put1(']');
}
AutoBuffer putJSONEnum( Enum e ) {
return e==null ? putJNULL() : put1('"').putJStr(e.toString()).put1('"');
}
public AutoBuffer putJSON8 ( String name, long l ) { return putJSONStr(name).put1(':').putJSON8(l); }
public AutoBuffer putJSONEnum( String name, Enum e ) { return putJSONStr(name).put1(':').putJSONEnum(e); }
public AutoBuffer putJSONA8( String name, long ary[] ) { return putJSONStr(name).put1(':').putJSONA8(ary); }
public AutoBuffer putJSONAA8( String name, long ary[][] ) { return putJSONStr(name).put1(':').putJSONAA8(ary); }
public AutoBuffer putJSONAAA8( String name, long ary[][][] ) { return putJSONStr(name).put1(':').putJSONAAA8(ary); }
public AutoBuffer putJSON4(int i) { return putJStr(Integer.toString(i)); }
AutoBuffer putJSONA4( int[] a) {
if( a == null ) return putJNULL();
put1('[');
for( int i=0; i<a.length; i++ ) {
if( i>0 ) put1(',');
putJSON4(a[i]);
}
return put1(']');
}
AutoBuffer putJSONAA4( int[][] a ) {
if( a == null ) return putJNULL();
put1('[');
for( int i=0; i<a.length; i++ ) {
if( i>0 ) put1(',');
putJSONA4(a[i]);
}
return put1(']');
}
AutoBuffer putJSONAAA4( int[][][] a ) {
if( a == null ) return putJNULL();
put1('[');
for( int i=0; i<a.length; i++ ) {
if( i>0 ) put1(',');
putJSONAA4(a[i]);
}
return put1(']');
}
public AutoBuffer putJSON4 ( String name, int i ) { return putJSONStr(name).put1(':').putJSON4(i); }
public AutoBuffer putJSONA4( String name, int[] a) { return putJSONStr(name).put1(':').putJSONA4(a); }
public AutoBuffer putJSONAA4( String name, int[][] a ) { return putJSONStr(name).put1(':').putJSONAA4(a); }
public AutoBuffer putJSONAAA4( String name, int[][][] a ) { return putJSONStr(name).put1(':').putJSONAAA4(a); }
AutoBuffer putJSON4f ( float f ) { return f==Float.POSITIVE_INFINITY?putJSONStr(JSON_POS_INF):(f==Float.NEGATIVE_INFINITY?putJSONStr(JSON_NEG_INF):(Float.isNaN(f)?putJSONStr(JSON_NAN):putJStr(Float .toString(f)))); }
public AutoBuffer putJSON4f ( String name, float f ) { return putJSONStr(name).put1(':').putJSON4f(f); }
AutoBuffer putJSONA4f( float[] a ) {
if( a == null ) return putJNULL();
put1('[');
for( int i=0; i<a.length; i++ ) {
if( i>0 ) put1(',');
putJSON4f(a[i]);
}
return put1(']');
}
public AutoBuffer putJSONA4f(String name, float[] a) {
putJSONStr(name).put1(':');
return putJSONA4f(a);
}
AutoBuffer putJSONAA4f(String name, float[][] a) {
putJSONStr(name).put1(':');
if( a == null ) return putJNULL();
put1('[');
for( int i=0; i<a.length; i++ ) {
if( i>0 ) put1(',');
putJSONA4f(a[i]);
}
return put1(']');
}
AutoBuffer putJSON8d( double d ) {
if (TwoDimTable.isEmpty(d)) return putJNULL();
return d==Double.POSITIVE_INFINITY?putJSONStr(JSON_POS_INF):(d==Double.NEGATIVE_INFINITY?putJSONStr(JSON_NEG_INF):(Double.isNaN(d)?putJSONStr(JSON_NAN):putJStr(Double.toString(d))));
}
public AutoBuffer putJSON8d( String name, double d ) { return putJSONStr(name).put1(':').putJSON8d(d); }
public AutoBuffer putJSONA8d( String name, double[] a ) {
return putJSONStr(name).put1(':').putJSONA8d(a);
}
public AutoBuffer putJSONAA8d( String name, double[][] a) {
return putJSONStr(name).put1(':').putJSONAA8d(a);
}
public AutoBuffer putJSONAAA8d( String name, double[][][] a) { return putJSONStr(name).put1(':').putJSONAAA8d(a); }
public AutoBuffer putJSONA8d( double[] a ) {
if( a == null ) return putJNULL();
put1('[');
for( int i=0; i<a.length; i++ ) {
if( i>0 ) put1(',');
putJSON8d(a[i]);
}
return put1(']');
}
public AutoBuffer putJSONAA8d( double[][] a ) {
if( a == null ) return putJNULL();
put1('[');
for( int i=0; i<a.length; i++ ) {
if( i>0 ) put1(',');
putJSONA8d(a[i]);
}
return put1(']');
}
AutoBuffer putJSONAAA8d( double ary[][][] ) {
if( ary == null ) return putJNULL();
put1('[');
for( int i=0; i<ary.length; i++ ) {
if( i>0 ) put1(',');
putJSONAA8d(ary[i]);
}
return put1(']');
}
static final String JSON_NAN = "NaN";
static final String JSON_POS_INF = "Infinity";
static final String JSON_NEG_INF = "-Infinity";
}
| h2o-core/src/main/java/water/AutoBuffer.java | package water;
import java.io.*;
import java.lang.reflect.Array;
import java.net.*;
import java.nio.*;
import java.nio.channels.*;
import java.util.ArrayList;
import java.util.Random;
import water.network.SocketChannelUtils;
import water.util.Log;
import water.util.StringUtils;
import water.util.TwoDimTable;
/** A ByteBuffer backed mixed Input/Output streaming class, using Iced serialization.
*
* Reads/writes empty/fill the ByteBuffer as needed. When it is empty/full it
* we go to the ByteChannel for more/less. Because DirectByteBuffers are
* expensive to make, we keep a few pooled.
*
* When talking to a remote H2O node, switches between UDP and TCP transport
* protocols depending on the message size. The TypeMap is not included, and
* is assumed to exist on the remote H2O node.
*
* Supports direct NIO FileChannel read/write to disk, used during user-mode
* swapping. The TypeMap is not included on write, and is assumed to be the
* current map on read.
*
* Support read/write from byte[] - and this defeats the purpose of a
* Streaming protocol, but is frequently handy for small structures. The
* TypeMap is not included, and is assumed to be the current map on read.
*
* Supports read/write from a standard Stream, which by default assumes it is
* NOT going in and out of the same Cloud, so the TypeMap IS included. The
* serialized object can only be read back into the same minor version of H2O.
*
* @author <a href="mailto:[email protected]"></a>
*/
public final class AutoBuffer {
// Maximum size of an array we allow to allocate (the value is designed
// to mimic the behavior of OpenJDK libraries)
private static final int MAX_ARRAY_SIZE = Integer.MAX_VALUE - 8;
// The direct ByteBuffer for schlorping data about.
// Set to null to indicate the AutoBuffer is closed.
ByteBuffer _bb;
public String sourceName = "???";
public boolean isClosed() { return _bb == null ; }
// The ByteChannel for moving data in or out. Could be a SocketChannel (for
// a TCP connection) or a FileChannel (spill-to-disk) or a DatagramChannel
// (for a UDP connection). Null on closed AutoBuffers. Null on initial
// remote-writing AutoBuffers which are still deciding UDP vs TCP. Not-null
// for open AutoBuffers doing file i/o or reading any TCP/UDP or having
// written at least one buffer to TCP/UDP.
private Channel _chan;
// A Stream for moving data in. Null unless this AutoBuffer is
// stream-based, in which case _chan field is null. This path supports
// persistance: reading and writing objects from different H2O cluster
// instances (but exactly the same H2O revision). The only required
// similarity is same-classes-same-fields; changes here will probably
// silently crash. If the fields are named the same but the semantics
// differ, then again the behavior is probably silent crash.
private InputStream _is;
private short[] _typeMap; // Mapping from input stream map to current map, or null
// If we need a SocketChannel, raise the priority so we get the I/O over
// with. Do not want to have some TCP socket open, blocking the TCP channel
// and then have the thread stalled out. If we raise the priority - be sure
// to lower it again. Note this is for TCP channels ONLY, and only because
// we are blocking another Node with I/O.
private int _oldPrior = -1;
// Where to send or receive data via TCP or UDP (choice made as we discover
// how big the message is); used to lazily create a Channel. If NULL, then
// _chan should be a pre-existing Channel, such as a FileChannel.
final H2ONode _h2o;
// TRUE for read-mode. FALSE for write-mode. Can be flipped for rapid turnaround.
private boolean _read;
// TRUE if this AutoBuffer has never advanced past the first "page" of data.
// The UDP-flavor, port# and task fields are only valid until we read over
// them when flipping the ByteBuffer to the next chunk of data. Used in
// asserts all over the place.
private boolean _firstPage;
// Total size written out from 'new' to 'close'. Only updated when actually
// reading or writing data, or after close(). For profiling only.
int _size;
//int _zeros, _arys;
// More profiling: start->close msec, plus nano's spent in blocking I/O
// calls. The difference between (close-start) and i/o msec is the time the
// i/o thread spends doing other stuff (e.g. allocating Java objects or
// (de)serializing).
long _time_start_ms, _time_close_ms, _time_io_ns;
// I/O persistence flavor: Value.ICE, NFS, HDFS, S3, TCP. Used to record I/O time.
final byte _persist;
// The assumed max UDP packetsize
static final int MTU = 1500-8/*UDP packet header size*/;
// Enable this to test random TCP fails on open or write
static final Random RANDOM_TCP_DROP = null; //new Random();
static final java.nio.charset.Charset UTF_8 = java.nio.charset.Charset.forName("UTF-8");
/** Incoming UDP request. Make a read-mode AutoBuffer from the open Channel,
* figure the originating H2ONode from the first few bytes read. */
AutoBuffer( DatagramChannel sock ) throws IOException {
_chan = null;
_bb = BBP_SML.make(); // Get a small / UDP-sized ByteBuffer
_read = true; // Reading by default
_firstPage = true;
// Read a packet; can get H2ONode from 'sad'?
Inet4Address addr = null;
SocketAddress sad = sock.receive(_bb);
if( sad instanceof InetSocketAddress ) {
InetAddress address = ((InetSocketAddress) sad).getAddress();
if( address instanceof Inet4Address ) {
addr = (Inet4Address) address;
}
}
_size = _bb.position();
_bb.flip(); // Set limit=amount read, and position==0
if( addr == null ) throw new RuntimeException("Unhandled socket type: " + sad);
// Read Inet from socket, port from the stream, figure out H2ONode
_h2o = H2ONode.intern(addr, getPort());
_firstPage = true;
assert _h2o != null;
_persist = 0; // No persistance
}
/** Incoming TCP request. Make a read-mode AutoBuffer from the open Channel,
* figure the originating H2ONode from the first few bytes read.
*
* remoteAddress set to null means that the communication is originating from non-h2o node, non-null value
* represents the case where the communication is coming from h2o node.
* */
AutoBuffer( ByteChannel sock, InetAddress remoteAddress ) throws IOException {
_chan = sock;
raisePriority(); // Make TCP priority high
_bb = BBP_BIG.make(); // Get a big / TPC-sized ByteBuffer
_bb.flip();
_read = true; // Reading by default
_firstPage = true;
// Read Inet from socket, port from the stream, figure out H2ONode
if(remoteAddress!=null) {
_h2o = H2ONode.intern(remoteAddress, getPort());
}else{
// In case the communication originates from non-h2o node, we set _h2o node to null.
// It is done for 2 reasons:
// - H2ONode.intern creates a new thread and if there's a lot of connections
// from non-h2o environment, it could end up with too many open files exception.
// - H2OIntern also reads port (getPort()) and additional information which we do not send
// in communication originating from non-h2o nodes
_h2o = null;
}
_firstPage = true; // Yes, must reset this.
_time_start_ms = System.currentTimeMillis();
_persist = Value.TCP;
}
/** Make an AutoBuffer to write to an H2ONode. Requests for full buffer will
* open a TCP socket and roll through writing to the target. Smaller
* requests will send via UDP. Small requests get ordered by priority, so
* that e.g. NACK and ACKACK messages have priority over most anything else.
* This helps in UDP floods to shut down flooding senders. */
private byte _msg_priority;
AutoBuffer( H2ONode h2o, byte priority ) {
// If UDP goes via UDP, we write into a DBB up front - because we plan on
// sending it out via a Datagram socket send call. If UDP goes via batched
// TCP, we write into a HBB up front, because this will be copied again
// into a large outgoing buffer.
_bb = H2O.ARGS.useUDP // Actually use UDP?
? BBP_SML.make() // Make DirectByteBuffers to start with
: ByteBuffer.wrap(new byte[16]).order(ByteOrder.nativeOrder());
_chan = null; // Channel made lazily only if we write alot
_h2o = h2o;
_read = false; // Writing by default
_firstPage = true; // Filling first page
assert _h2o != null;
_time_start_ms = System.currentTimeMillis();
_persist = Value.TCP;
_msg_priority = priority;
}
/** Spill-to/from-disk request. */
public AutoBuffer( FileChannel fc, boolean read, byte persist ) {
_bb = BBP_BIG.make(); // Get a big / TPC-sized ByteBuffer
_chan = fc; // Write to read/write
_h2o = null; // File Channels never have an _h2o
_read = read; // Mostly assert reading vs writing
if( read ) _bb.flip();
_time_start_ms = System.currentTimeMillis();
_persist = persist; // One of Value.ICE, NFS, S3, HDFS
}
/** Read from UDP multicast. Same as the byte[]-read variant, except there is an H2O. */
AutoBuffer( DatagramPacket pack ) {
_size = pack.getLength();
_bb = ByteBuffer.wrap(pack.getData(), 0, pack.getLength()).order(ByteOrder.nativeOrder());
_bb.position(0);
_read = true;
_firstPage = true;
_chan = null;
_h2o = H2ONode.intern(pack.getAddress(), getPort());
_persist = 0; // No persistance
}
/** Read from a UDP_TCP buffer; could be in the middle of a large buffer */
AutoBuffer( H2ONode h2o, byte[] buf, int off, int len ) {
assert buf != null : "null fed to ByteBuffer.wrap";
_h2o = h2o;
_bb = ByteBuffer.wrap(buf,off,len).order(ByteOrder.nativeOrder());
_chan = null;
_read = true;
_firstPage = true;
_persist = 0; // No persistance
_size = len;
}
/** Read from a fixed byte[]; should not be closed. */
public AutoBuffer( byte[] buf ) { this(null,buf,0, buf.length); }
/** Write to an ever-expanding byte[]. Instead of calling {@link #close()},
* call {@link #buf()} to retrieve the final byte[]. */
public AutoBuffer( ) {
_bb = ByteBuffer.wrap(new byte[16]).order(ByteOrder.nativeOrder());
_chan = null;
_h2o = null;
_read = false;
_firstPage = true;
_persist = 0; // No persistance
}
/** Write to a known sized byte[]. Instead of calling close(), call
* {@link #bufClose()} to retrieve the final byte[]. */
public AutoBuffer( int len ) {
_bb = ByteBuffer.wrap(MemoryManager.malloc1(len)).order(ByteOrder.nativeOrder());
_chan = null;
_h2o = null;
_read = false;
_firstPage = true;
_persist = 0; // No persistance
}
/** Write to a persistent Stream, including all TypeMap info to allow later
* reloading (by the same exact rev of H2O). */
public AutoBuffer( OutputStream os, boolean persist ) {
_bb = ByteBuffer.wrap(MemoryManager.malloc1(BBP_BIG._size)).order(ByteOrder.nativeOrder());
_read = false;
_chan = Channels.newChannel(os);
_h2o = null;
_firstPage = true;
_persist = 0;
if( persist ) put1(0x1C).put1(0xED).putStr(H2O.ABV.projectVersion()).putAStr(TypeMap.CLAZZES);
else put1(0);
}
/** Read from a persistent Stream (including all TypeMap info) into same
* exact rev of H2O). */
public AutoBuffer( InputStream is ) {
_chan = null;
_h2o = null;
_firstPage = true;
_persist = 0;
_read = true;
_bb = ByteBuffer.wrap(MemoryManager.malloc1(BBP_BIG._size)).order(ByteOrder.nativeOrder());
_bb.flip();
_is = is;
int b = get1U();
if( b==0 ) return; // No persistence info
int magic = get1U();
if( b!=0x1C || magic != 0xED ) throw new IllegalArgumentException("Missing magic number 0x1CED at stream start");
String version = getStr();
if( !version.equals(H2O.ABV.projectVersion()) )
throw new IllegalArgumentException("Found version "+version+", but running version "+H2O.ABV.projectVersion());
String[] typeMap = getAStr();
_typeMap = new short[typeMap.length];
for( int i=0; i<typeMap.length; i++ )
_typeMap[i] = (short)(typeMap[i]==null ? 0 : TypeMap.onIce(typeMap[i]));
}
@Override public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("[AB ").append(_read ? "read " : "write ");
sb.append(_firstPage?"first ":"2nd ").append(_h2o);
sb.append(" ").append(Value.nameOfPersist(_persist));
if( _bb != null ) sb.append(" 0 <= ").append(_bb.position()).append(" <= ").append(_bb.limit());
if( _bb != null ) sb.append(" <= ").append(_bb.capacity());
return sb.append("]").toString();
}
// Fetch a DBB from an object pool... they are fairly expensive to make
// because a native call is required to get the backing memory. I've
// included BB count tracking code to help track leaks. As of 12/17/2012 the
// leaks are under control, but figure this may happen again so keeping these
// counters around.
//
// We use 2 pool sizes: lots of small UDP packet-sized buffers and fewer
// larger TCP-sized buffers.
private static final boolean DEBUG = Boolean.getBoolean("h2o.find-ByteBuffer-leaks");
private static long HWM=0;
static class BBPool {
long _made, _cached, _freed;
long _numer, _denom, _goal=4*H2O.NUMCPUS, _lastGoal;
final ArrayList<ByteBuffer> _bbs = new ArrayList<>();
final int _size; // Big or small size of ByteBuffers
BBPool( int sz) { _size=sz; }
private ByteBuffer stats( ByteBuffer bb ) {
if( !DEBUG ) return bb;
if( ((_made+_cached)&255)!=255 ) return bb; // Filter printing to 1 in 256
long now = System.currentTimeMillis();
if( now < HWM ) return bb;
HWM = now+1000;
water.util.SB sb = new water.util.SB();
sb.p("BB").p(this==BBP_BIG?1:0).p(" made=").p(_made).p(" -freed=").p(_freed).p(", cache hit=").p(_cached).p(" ratio=").p(_numer/_denom).p(", goal=").p(_goal).p(" cache size=").p(_bbs.size()).nl();
for( int i=0; i<H2O.MAX_PRIORITY; i++ ) {
int x = H2O.getWrkQueueSize(i);
if( x > 0 ) sb.p('Q').p(i).p('=').p(x).p(' ');
}
Log.warn(sb.nl().toString());
return bb;
}
ByteBuffer make() {
while( true ) { // Repeat loop for DBB OutOfMemory errors
ByteBuffer bb=null;
synchronized(_bbs) {
int sz = _bbs.size();
if( sz > 0 ) { bb = _bbs.remove(sz-1); _cached++; _numer++; }
}
if( bb != null ) return stats(bb);
// Cache empty; go get one from C/Native memory
try {
bb = ByteBuffer.allocateDirect(_size).order(ByteOrder.nativeOrder());
synchronized(this) { _made++; _denom++; _goal = Math.max(_goal,_made-_freed); _lastGoal=System.nanoTime(); } // Goal was too low, raise it
return stats(bb);
} catch( OutOfMemoryError oome ) {
// java.lang.OutOfMemoryError: Direct buffer memory
if( !"Direct buffer memory".equals(oome.getMessage()) ) throw oome;
System.out.println("OOM DBB - Sleeping & retrying");
try { Thread.sleep(100); } catch( InterruptedException ignore ) { }
}
}
}
void free(ByteBuffer bb) {
// Heuristic: keep the ratio of BB's made to cache-hits at a fixed level.
// Free to GC if ratio is high, free to internal cache if low.
long ratio = _numer/(_denom+1);
synchronized(_bbs) {
if( ratio < 100 || _bbs.size() < _goal ) { // low hit/miss ratio or below goal
bb.clear(); // Clear-before-add
_bbs.add(bb);
} else _freed++; // Toss the extras (above goal & ratio)
long now = System.nanoTime();
if( now-_lastGoal > 1000000000L ) { // Once/sec, drop goal by 10%
_lastGoal = now;
if( ratio > 110 ) // If ratio is really high, lower goal
_goal=Math.max(4*H2O.NUMCPUS,(long)(_goal*0.99));
// Once/sec, lower numer/denom... means more recent activity outweighs really old stuff
long denom = (long) (0.99 * _denom); // Proposed reduction
if( denom > 10 ) { // Keep a little precision
_numer = (long) (0.99 * _numer); // Keep ratio between made & cached the same
_denom = denom; // ... by lowering both by 10%
}
}
}
}
static int FREE( ByteBuffer bb ) {
if(bb.isDirect())
(bb.capacity()==BBP_BIG._size ? BBP_BIG : BBP_SML).free(bb);
return 0; // Flow coding
}
}
static BBPool BBP_SML = new BBPool( 2*1024); // Bytebuffer "common small size", for UDP
static BBPool BBP_BIG = new BBPool(64*1024); // Bytebuffer "common big size", for TCP
public static int TCP_BUF_SIZ = BBP_BIG._size;
private int bbFree() {
if(_bb != null && _bb.isDirect())
BBPool.FREE(_bb);
_bb = null;
return 0; // Flow-coding
}
// You thought TCP was a reliable protocol, right? WRONG! Fails 100% of the
// time under heavy network load. Connection-reset-by-peer & connection
// timeouts abound, even after a socket open and after a 1st successful
// ByteBuffer write. It *appears* that the reader is unaware that a writer
// was told "go ahead and write" by the TCP stack, so all these fails are
// only on the writer-side.
public static class AutoBufferException extends RuntimeException {
public final IOException _ioe;
AutoBufferException( IOException ioe ) { _ioe = ioe; }
}
// For reads, just assert all was read and close and release resources.
// (release ByteBuffer back to the common pool). For writes, force any final
// bytes out. If the write is to an H2ONode and is short, send via UDP.
// AutoBuffer close calls order; i.e. a reader close() will block until the
// writer does a close().
public final int close() {
//if( _size > 2048 ) System.out.println("Z="+_zeros+" / "+_size+", A="+_arys);
if( isClosed() ) return 0; // Already closed
assert _h2o != null || _chan != null || _is != null; // Byte-array backed should not be closed
try {
if( _chan == null ) { // No channel?
if( _read ) {
if( _is != null ) _is.close();
return 0;
} else { // Write
// For small-packet write, send via UDP. Since nothing is sent until
// now, this close() call trivially orders - since the reader will not
// even start (much less close()) until this packet is sent.
if( _bb.position() < MTU) return udpSend();
// oops - Big Write, switch to TCP and finish out there
}
}
// Force AutoBuffer 'close' calls to order; i.e. block readers until
// writers do a 'close' - by writing 1 more byte in the close-call which
// the reader will have to wait for.
if( hasTCP()) { // TCP connection?
try {
if( _read ) { // Reader?
int x = get1U(); // Read 1 more byte
assert x == 0xab : "AB.close instead of 0xab sentinel got "+x+", "+this;
assert _chan != null; // chan set by incoming reader, since we KNOW it is a TCP
// Write the reader-handshake-byte.
SocketChannelUtils.underlyingSocketChannel(_chan).socket().getOutputStream().write(0xcd);
// do not close actually reader socket; recycle it in TCPReader thread
} else { // Writer?
put1(0xab); // Write one-more byte ; might set _chan from null to not-null
sendPartial(); // Finish partial writes; might set _chan from null to not-null
assert _chan != null; // _chan is set not-null now!
// Read the writer-handshake-byte.
int x = SocketChannelUtils.underlyingSocketChannel(_chan).socket().getInputStream().read();
// either TCP con was dropped or other side closed connection without reading/confirming (e.g. task was cancelled).
if( x == -1 ) throw new IOException("Other side closed connection before handshake byte read");
assert x == 0xcd : "Handshake; writer expected a 0xcd from reader but got "+x;
}
} catch( IOException ioe ) {
try { _chan.close(); } catch( IOException ignore ) {} // Silently close
_chan = null; // No channel now, since i/o error
throw ioe; // Rethrow after close
} finally {
if( !_read ) _h2o.freeTCPSocket((ByteChannel) _chan); // Recycle writable TCP channel
restorePriority(); // And if we raised priority, lower it back
}
} else { // FileChannel
if( !_read ) sendPartial(); // Finish partial file-system writes
_chan.close();
_chan = null; // Closed file channel
}
} catch( IOException e ) { // Dunno how to handle so crash-n-burn
throw new AutoBufferException(e);
} finally {
bbFree();
_time_close_ms = System.currentTimeMillis();
// TimeLine.record_IOclose(this,_persist); // Profile AutoBuffer connections
assert isClosed();
}
return 0;
}
// Need a sock for a big read or write operation.
// See if we got one already, else open a new socket.
private void tcpOpen() throws IOException {
assert _firstPage && _bb.limit() >= 1+2+4; // At least something written
assert _chan == null;
// assert _bb.position()==0;
_chan = _h2o.getTCPSocket();
raisePriority();
}
// Just close the channel here without reading anything. Without the task
// object at hand we do not know what (how many bytes) should we read from
// the channel. And since the other side will try to read confirmation from
// us before closing the channel, we can not read till the end. So we just
// close the channel and let the other side to deal with it and figure out
// the task has been cancelled (still sending ack ack back).
void drainClose() {
if( isClosed() ) return; // Already closed
final Channel chan = _chan; // Read before closing
assert _h2o != null || chan != null; // Byte-array backed should not be closed
if( chan != null ) { // Channel assumed sick from prior IOException
try { chan.close(); } catch( IOException ignore ) {} // Silently close
_chan = null; // No channel now!
if( !_read && SocketChannelUtils.isSocketChannel(chan)) _h2o.freeTCPSocket((ByteChannel) chan); // Recycle writable TCP channel
}
restorePriority(); // And if we raised priority, lower it back
bbFree();
_time_close_ms = System.currentTimeMillis();
// TimeLine.record_IOclose(this,_persist); // Profile AutoBuffer connections
assert isClosed();
}
// True if we opened a TCP channel, or will open one to close-and-send
boolean hasTCP() { assert !isClosed(); return SocketChannelUtils.isSocketChannel(_chan) || (_h2o!=null && _bb.position() >= MTU); }
// Size in bytes sent, after a close()
int size() { return _size; }
//int zeros() { return _zeros; }
public int position () { return _bb.position(); }
public AutoBuffer position(int p) {_bb.position(p); return this;}
/** Skip over some bytes in the byte buffer. Caller is responsible for not
* reading off end of the bytebuffer; generally this is easy for
* array-backed autobuffers and difficult for i/o-backed bytebuffers. */
public void skip(int skip) { _bb.position(_bb.position()+skip); }
// Return byte[] from a writable AutoBuffer
public final byte[] buf() {
assert _h2o==null && _chan==null && !_read && !_bb.isDirect();
return MemoryManager.arrayCopyOfRange(_bb.array(), _bb.arrayOffset(), _bb.position());
}
public final byte[] bufClose() {
byte[] res = _bb.array();
bbFree();
return res;
}
// For TCP sockets ONLY, raise the thread priority. We assume we are
// blocking other Nodes with our network I/O, so try to get the I/O
// over with.
private void raisePriority() {
if(_oldPrior == -1){
assert SocketChannelUtils.isSocketChannel(_chan);
_oldPrior = Thread.currentThread().getPriority();
Thread.currentThread().setPriority(Thread.MAX_PRIORITY-1);
}
}
private void restorePriority() {
if( _oldPrior == -1 ) return;
Thread.currentThread().setPriority(_oldPrior);
_oldPrior = -1;
}
// Send via UDP socket. Unlike eg TCP sockets, we only need one for sending
// so we keep a global one. Also, we do not close it when done, and we do
// not connect it up-front to a target - but send the entire packet right now.
private int udpSend() throws IOException {
assert _chan == null;
TimeLine.record_send(this,false);
_size = _bb.position();
assert _size < AutoBuffer.BBP_SML._size;
_bb.flip(); // Flip for sending
if( _h2o==H2O.SELF ) { // SELF-send is the multi-cast signal
water.init.NetworkInit.multicast(_bb, _msg_priority);
} else { // Else single-cast send
if(H2O.ARGS.useUDP) // Send via UDP directly
water.init.NetworkInit.CLOUD_DGRAM.send(_bb, _h2o._key);
else // Send via bulk TCP
_h2o.sendMessage(_bb, _msg_priority);
}
return 0; // Flow-coding
}
// Flip to write-mode
AutoBuffer clearForWriting(byte priority) {
assert _read;
_read = false;
_msg_priority = priority;
_bb.clear();
_firstPage = true;
return this;
}
// Flip to read-mode
public AutoBuffer flipForReading() {
assert !_read;
_read = true;
_bb.flip();
_firstPage = true;
return this;
}
/** Ensure the buffer has space for sz more bytes */
private ByteBuffer getSp( int sz ) { return sz > _bb.remaining() ? getImpl(sz) : _bb; }
/** Ensure buffer has at least sz bytes in it.
* - Also, set position just past this limit for future reading. */
private ByteBuffer getSz(int sz) {
assert _firstPage : "getSz() is only valid for early UDP bytes";
if( sz > _bb.limit() ) getImpl(sz);
_bb.position(sz);
return _bb;
}
private ByteBuffer getImpl( int sz ) {
assert _read : "Reading from a buffer in write mode";
_bb.compact(); // Move remaining unread bytes to start of buffer; prep for reading
// Its got to fit or we asked for too much
assert _bb.position()+sz <= _bb.capacity() : "("+_bb.position()+"+"+sz+" <= "+_bb.capacity()+")";
long ns = System.nanoTime();
while( _bb.position() < sz ) { // Read until we got enuf
try {
int res = readAnInt(); // Read more
// Readers are supposed to be strongly typed and read the exact expected bytes.
// However, if a TCP connection fails mid-read we'll get a short-read.
// This is indistinguishable from a mis-alignment between the writer and reader!
if( res <= 0 )
throw new AutoBufferException(new EOFException("Reading "+sz+" bytes, AB="+this));
if( _is != null ) _bb.position(_bb.position()+res); // Advance BB for Streams manually
_size += res; // What we read
} catch( IOException e ) { // Dunno how to handle so crash-n-burn
// Linux/Ubuntu message for a reset-channel
if( e.getMessage().equals("An existing connection was forcibly closed by the remote host") )
throw new AutoBufferException(e);
// Windows message for a reset-channel
if( e.getMessage().equals("An established connection was aborted by the software in your host machine") )
throw new AutoBufferException(e);
throw Log.throwErr(e);
}
}
_time_io_ns += (System.nanoTime()-ns);
_bb.flip(); // Prep for handing out bytes
//for( int i=0; i < _bb.limit(); i++ ) if( _bb.get(i)==0 ) _zeros++;
_firstPage = false; // First page of data is gone gone gone
return _bb;
}
private int readAnInt() throws IOException {
if (_is == null) return ((ReadableByteChannel) _chan).read(_bb);
final byte[] array = _bb.array();
final int position = _bb.position();
final int remaining = _bb.remaining();
try {
return _is.read(array, position, remaining);
} catch (IOException ioe) {
throw new IOException("Failed reading " + remaining + " bytes into buffer[" + array.length + "] at " + position + " from " + sourceName + " " + _is, ioe);
}
}
/** Put as needed to keep from overflowing the ByteBuffer. */
private ByteBuffer putSp( int sz ) {
assert !_read;
if (sz > _bb.remaining()) {
if ((_h2o == null && _chan == null) || (_bb.hasArray() && _bb.capacity() < BBP_BIG._size))
expandByteBuffer(sz);
else
sendPartial();
assert sz <= _bb.remaining();
}
return _bb;
}
// Do something with partial results, because the ByteBuffer is full.
// If we are doing I/O, ship the bytes we have now and flip the ByteBuffer.
private ByteBuffer sendPartial() {
// Doing I/O with the full ByteBuffer - ship partial results
_size += _bb.position();
if( _chan == null )
TimeLine.record_send(this, true);
_bb.flip(); // Prep for writing.
try {
if( _chan == null )
tcpOpen(); // This is a big operation. Open a TCP socket as-needed.
//for( int i=0; i < _bb.limit(); i++ ) if( _bb.get(i)==0 ) _zeros++;
long ns = System.nanoTime();
while( _bb.hasRemaining() ) {
((WritableByteChannel) _chan).write(_bb);
if( RANDOM_TCP_DROP != null && SocketChannelUtils.isSocketChannel(_chan) && RANDOM_TCP_DROP.nextInt(100) == 0 )
throw new IOException("Random TCP Write Fail");
}
_time_io_ns += (System.nanoTime()-ns);
} catch( IOException e ) { // Some kind of TCP fail?
// Change to an unchecked exception (so we don't have to annotate every
// frick'n put1/put2/put4/read/write call). Retry & recovery happens at
// a higher level. AutoBuffers are used for many things including e.g.
// disk i/o & UDP writes; this exception only happens on a failed TCP
// write - and we don't want to make the other AutoBuffer users have to
// declare (and then ignore) this exception.
throw new AutoBufferException(e);
}
_firstPage = false;
_bb.clear();
return _bb;
}
// Called when the byte buffer doesn't have enough room
// If buffer is array backed, and the needed room is small,
// increase the size of the backing array,
// otherwise dump into a large direct buffer
private ByteBuffer expandByteBuffer(int sizeHint) {
final long needed = (long) sizeHint - _bb.remaining() + _bb.capacity(); // Max needed is 2G
if ((_h2o==null && _chan == null) || (_bb.hasArray() && needed < MTU)) {
if (needed > MAX_ARRAY_SIZE) {
throw new IllegalArgumentException("Cannot allocate more than 2GB array: sizeHint="+sizeHint+", "
+ "needed="+needed
+ ", bb.remaining()=" + _bb.remaining() + ", bb.capacity()="+_bb.capacity());
}
byte[] ary = _bb.array();
// just get twice what is currently needed but not more then max array size (2G)
// Be careful not to overflow because of integer math!
int newLen = (int) Math.min(1L << (water.util.MathUtils.log2(needed)+1), MAX_ARRAY_SIZE);
int oldpos = _bb.position();
_bb = ByteBuffer.wrap(MemoryManager.arrayCopyOfRange(ary,0,newLen),oldpos,newLen-oldpos)
.order(ByteOrder.nativeOrder());
} else if (_bb.capacity() != BBP_BIG._size) { //avoid expanding existing BBP items
int oldPos = _bb.position();
_bb.flip();
_bb = BBP_BIG.make().put(_bb);
_bb.position(oldPos);
}
return _bb;
}
@SuppressWarnings("unused") public String getStr(int off, int len) {
return new String(_bb.array(), _bb.arrayOffset()+off, len, UTF_8);
}
// -----------------------------------------------
// Utility functions to get various Java primitives
@SuppressWarnings("unused") public boolean getZ() { return get1()!=0; }
@SuppressWarnings("unused") public byte get1 () { return getSp(1).get (); }
@SuppressWarnings("unused") public int get1U() { return get1() & 0xFF; }
@SuppressWarnings("unused") public char get2 () { return getSp(2).getChar (); }
@SuppressWarnings("unused") public short get2s () { return getSp(2).getShort (); }
@SuppressWarnings("unused") public int get3 () { getSp(3); return get1U() | get1U() << 8 | get1U() << 16; }
@SuppressWarnings("unused") public int get4 () { return getSp(4).getInt (); }
@SuppressWarnings("unused") public float get4f() { return getSp(4).getFloat (); }
@SuppressWarnings("unused") public long get8 () { return getSp(8).getLong (); }
@SuppressWarnings("unused") public double get8d() { return getSp(8).getDouble(); }
int get1U(int off) { return _bb.get (off)&0xFF; }
int get4 (int off) { return _bb.getInt (off); }
long get8 (int off) { return _bb.getLong(off); }
@SuppressWarnings("unused") public AutoBuffer putZ (boolean b){ return put1(b?1:0); }
@SuppressWarnings("unused") public AutoBuffer put1 ( int b) { assert b >= -128 && b <= 255 : ""+b+" is not a byte";
putSp(1).put((byte)b); return this; }
@SuppressWarnings("unused") public AutoBuffer put2 ( char c) { putSp(2).putChar (c); return this; }
@SuppressWarnings("unused") public AutoBuffer put2 ( short s) { putSp(2).putShort (s); return this; }
@SuppressWarnings("unused") public AutoBuffer put2s ( short s) { return put2(s); }
@SuppressWarnings("unused") public AutoBuffer put3( int x ) { assert (-1<<24) <= x && x < (1<<24);
return put1((x)&0xFF).put1((x >> 8)&0xFF).put1(x >> 16); }
@SuppressWarnings("unused") public AutoBuffer put4 ( int i) { putSp(4).putInt (i); return this; }
@SuppressWarnings("unused") public AutoBuffer put4f( float f) { putSp(4).putFloat (f); return this; }
@SuppressWarnings("unused") public AutoBuffer put8 ( long l) { putSp(8).putLong (l); return this; }
@SuppressWarnings("unused") public AutoBuffer put8d(double d) { putSp(8).putDouble(d); return this; }
public AutoBuffer put(Freezable f) {
if( f == null ) return putInt(TypeMap.NULL);
assert f.frozenType() > 0 : "No TypeMap for "+f.getClass().getName();
putInt(f.frozenType());
return f.write(this);
}
public <T extends Freezable> T get() {
int id = getInt();
if( id == TypeMap.NULL ) return null;
if( _is!=null ) id = _typeMap[id];
return (T)TypeMap.newFreezable(id).read(this);
}
public <T extends Freezable> T get(Class<T> tc) {
int id = getInt();
if( id == TypeMap.NULL ) return null;
if( _is!=null ) id = _typeMap[id];
assert tc.isInstance(TypeMap.theFreezable(id)):tc.getName() + " != " + TypeMap.theFreezable(id).getClass().getName() + ", id = " + id;
return (T)TypeMap.newFreezable(id).read(this);
}
// Write Key's target IFF the Key is not null; target can be null.
public AutoBuffer putKey(Key k) {
if( k==null ) return this; // Key is null ==> write nothing
Keyed kd = DKV.getGet(k);
put(kd);
return kd == null ? this : kd.writeAll_impl(this);
}
public Keyed getKey(Key k, Futures fs) {
return k==null ? null : getKey(fs); // Key is null ==> read nothing
}
public Keyed getKey(Futures fs) {
Keyed kd = get(Keyed.class);
if( kd == null ) return null;
DKV.put(kd,fs);
return kd.readAll_impl(this,fs);
}
// Put a (compressed) integer. Specifically values in the range -1 to ~250
// will take 1 byte, values near a Short will take 1+2 bytes, values near an
// Int will take 1+4 bytes, and bigger values 1+8 bytes. This compression is
// optimized for small integers (including -1 which is often used as a "array
// is null" flag when passing the array length).
public AutoBuffer putInt(int x) {
if( 0 <= (x+1)&& (x+1) <= 253 ) return put1(x+1);
if( Short.MIN_VALUE <= x && x <= Short.MAX_VALUE ) return put1(255).put2((short)x);
return put1(254).put4(x);
}
// Get a (compressed) integer. See above for the compression strategy and reasoning.
int getInt( ) {
int x = get1U();
if( x <= 253 ) return x-1;
if( x==255 ) return (short)get2();
assert x==254;
return get4();
}
// Put a zero-compressed array. Compression is:
// If null : putInt(-1)
// Else
// putInt(# of leading nulls)
// putInt(# of non-nulls)
// If # of non-nulls is > 0, putInt( # of trailing nulls)
long putZA( Object[] A ) {
if( A==null ) { putInt(-1); return 0; }
int x=0; for( ; x<A.length; x++ ) if( A[x ]!=null ) break;
int y=A.length; for( ; y>x; y-- ) if( A[y-1]!=null ) break;
putInt(x); // Leading zeros to skip
putInt(y-x); // Mixed non-zero guts in middle
if( y > x ) // If any trailing nulls
putInt(A.length-y); // Trailing zeros
return ((long)x<<32)|(y-x); // Return both leading zeros, and middle non-zeros
}
// Get the lengths of a zero-compressed array.
// Returns -1 if null.
// Returns a long of (leading zeros | middle non-zeros).
// If there are non-zeros, caller has to read the trailing zero-length.
long getZA( ) {
int x=getInt(); // Length of leading zeros
if( x == -1 ) return -1; // or a null
int nz=getInt(); // Non-zero in the middle
return ((long)x<<32)|(long)nz; // Return both ints
}
// TODO: untested. . .
@SuppressWarnings("unused")
public AutoBuffer putAEnum(Enum[] enums) {
//_arys++;
long xy = putZA(enums);
if( xy == -1 ) return this;
int x=(int)(xy>>32);
int y=(int)xy;
for( int i=x; i<x+y; i++ ) putEnum(enums[i]);
return this;
}
@SuppressWarnings("unused")
public <E extends Enum> E[] getAEnum(E[] values) {
//_arys++;
long xy = getZA();
if( xy == -1 ) return null;
int x=(int)(xy>>32); // Leading nulls
int y=(int)xy; // Middle non-zeros
int z = y==0 ? 0 : getInt(); // Trailing nulls
E[] ts = (E[]) Array.newInstance(values.getClass().getComponentType(), x+y+z);
for( int i = x; i < x+y; ++i ) ts[i] = getEnum(values);
return ts;
}
@SuppressWarnings("unused")
public AutoBuffer putA(Freezable[] fs) {
//_arys++;
long xy = putZA(fs);
if( xy == -1 ) return this;
int x=(int)(xy>>32);
int y=(int)xy;
for( int i=x; i<x+y; i++ ) put(fs[i]);
return this;
}
public AutoBuffer putAA(Freezable[][] fs) {
//_arys++;
long xy = putZA(fs);
if( xy == -1 ) return this;
int x=(int)(xy>>32);
int y=(int)xy;
for( int i=x; i<x+y; i++ ) putA(fs[i]);
return this;
}
@SuppressWarnings("unused") public AutoBuffer putAAA(Freezable[][][] fs) {
//_arys++;
long xy = putZA(fs);
if( xy == -1 ) return this;
int x=(int)(xy>>32);
int y=(int)xy;
for( int i=x; i<x+y; i++ ) putAA(fs[i]);
return this;
}
public <T extends Freezable> T[] getA(Class<T> tc) {
//_arys++;
long xy = getZA();
if( xy == -1 ) return null;
int x=(int)(xy>>32); // Leading nulls
int y=(int)xy; // Middle non-zeros
int z = y==0 ? 0 : getInt(); // Trailing nulls
T[] ts = (T[]) Array.newInstance(tc, x+y+z);
for( int i = x; i < x+y; ++i ) ts[i] = get(tc);
return ts;
}
public <T extends Freezable> T[][] getAA(Class<T> tc) {
//_arys++;
long xy = getZA();
if( xy == -1 ) return null;
int x=(int)(xy>>32); // Leading nulls
int y=(int)xy; // Middle non-zeros
int z = y==0 ? 0 : getInt(); // Trailing nulls
Class<T[]> tcA = (Class<T[]>) Array.newInstance(tc, 0).getClass();
T[][] ts = (T[][]) Array.newInstance(tcA, x+y+z);
for( int i = x; i < x+y; ++i ) ts[i] = getA(tc);
return ts;
}
@SuppressWarnings("unused") public <T extends Freezable> T[][][] getAAA(Class<T> tc) {
//_arys++;
long xy = getZA();
if( xy == -1 ) return null;
int x=(int)(xy>>32); // Leading nulls
int y=(int)xy; // Middle non-zeros
int z = y==0 ? 0 : getInt(); // Trailing nulls
Class<T[] > tcA = (Class<T[] >) Array.newInstance(tc , 0).getClass();
Class<T[][]> tcAA = (Class<T[][]>) Array.newInstance(tcA, 0).getClass();
T[][][] ts = (T[][][]) Array.newInstance(tcAA, x+y+z);
for( int i = x; i < x+y; ++i ) ts[i] = getAA(tc);
return ts;
}
public AutoBuffer putAStr(String[] fs) {
//_arys++;
long xy = putZA(fs);
if( xy == -1 ) return this;
int x=(int)(xy>>32);
int y=(int)xy;
for( int i=x; i<x+y; i++ ) putStr(fs[i]);
return this;
}
public String[] getAStr() {
//_arys++;
long xy = getZA();
if( xy == -1 ) return null;
int x=(int)(xy>>32); // Leading nulls
int y=(int)xy; // Middle non-zeros
int z = y==0 ? 0 : getInt(); // Trailing nulls
String[] ts = new String[x+y+z];
for( int i = x; i < x+y; ++i ) ts[i] = getStr();
return ts;
}
@SuppressWarnings("unused") public AutoBuffer putAAStr(String[][] fs) {
//_arys++;
long xy = putZA(fs);
if( xy == -1 ) return this;
int x=(int)(xy>>32);
int y=(int)xy;
for( int i=x; i<x+y; i++ ) putAStr(fs[i]);
return this;
}
@SuppressWarnings("unused") public String[][] getAAStr() {
//_arys++;
long xy = getZA();
if( xy == -1 ) return null;
int x=(int)(xy>>32); // Leading nulls
int y=(int)xy; // Middle non-zeros
int z = y==0 ? 0 : getInt(); // Trailing nulls
String[][] ts = new String[x+y+z][];
for( int i = x; i < x+y; ++i ) ts[i] = getAStr();
return ts;
}
// Read the smaller of _bb.remaining() and len into buf.
// Return bytes read, which could be zero.
int read( byte[] buf, int off, int len ) {
int sz = Math.min(_bb.remaining(),len);
_bb.get(buf,off,sz);
return sz;
}
// -----------------------------------------------
// Utility functions to handle common UDP packet tasks.
// Get the 1st control byte
int getCtrl( ) { return getSz(1).get(0)&0xFF; }
// Get the port in next 2 bytes
int getPort( ) { return getSz(1+2).getChar(1); }
// Get the task# in the next 4 bytes
int getTask( ) { return getSz(1+2+4).getInt(1+2); }
// Get the flag in the next 1 byte
int getFlag( ) { return getSz(1+2+4+1).get(1+2+4); }
/**
* Write UDP into the ByteBuffer with custom sender's port number
*
* This method sets the ctrl, port, task.
* Ready to write more bytes afterwards
*
* @param type type of the UDP datagram
* @param senderPort port of the sender of the datagram
*/
AutoBuffer putUdp(UDP.udp type, int senderPort){
assert _bb.position() == 0;
putSp(_bb.position()+1+2);
_bb.put ((byte)type.ordinal());
_bb.putChar((char)senderPort );
return this;
}
/**
* Write UDP into the ByteBuffer with the current node as the sender.
*
* This method sets the ctrl, port, task.
* Ready to write more bytes afterwards
*
* @param type type of the UDP datagram
*/
AutoBuffer putUdp (UDP.udp type) {
return putUdp(type, H2O.H2O_PORT); // Outgoing port is always the sender's (me) port
}
AutoBuffer putTask(UDP.udp type, int tasknum) {
return putUdp(type).put4(tasknum);
}
AutoBuffer putTask(int ctrl, int tasknum) {
assert _bb.position() == 0;
putSp(_bb.position()+1+2+4);
_bb.put((byte)ctrl).putChar((char)H2O.H2O_PORT).putInt(tasknum);
return this;
}
// -----------------------------------------------
// Utility functions to read & write arrays
public boolean[] getAZ() {
int len = getInt();
if (len == -1) return null;
boolean[] r = new boolean[len];
for (int i=0;i<len;++i) r[i] = getZ();
return r;
}
public byte[] getA1( ) {
//_arys++;
int len = getInt();
return len == -1 ? null : getA1(len);
}
public byte[] getA1( int len ) {
byte[] buf = MemoryManager.malloc1(len);
int sofar = 0;
while( sofar < len ) {
int more = Math.min(_bb.remaining(), len - sofar);
_bb.get(buf, sofar, more);
sofar += more;
if( sofar < len ) getSp(Math.min(_bb.capacity(), len-sofar));
}
return buf;
}
public short[] getA2( ) {
//_arys++;
int len = getInt(); if( len == -1 ) return null;
short[] buf = MemoryManager.malloc2(len);
int sofar = 0;
while( sofar < buf.length ) {
ShortBuffer as = _bb.asShortBuffer();
int more = Math.min(as.remaining(), len - sofar);
as.get(buf, sofar, more);
sofar += more;
_bb.position(_bb.position() + as.position()*2);
if( sofar < len ) getSp(Math.min(_bb.capacity()-1, (len-sofar)*2));
}
return buf;
}
public int[] getA4( ) {
//_arys++;
int len = getInt(); if( len == -1 ) return null;
int[] buf = MemoryManager.malloc4(len);
int sofar = 0;
while( sofar < buf.length ) {
IntBuffer as = _bb.asIntBuffer();
int more = Math.min(as.remaining(), len - sofar);
as.get(buf, sofar, more);
sofar += more;
_bb.position(_bb.position() + as.position()*4);
if( sofar < len ) getSp(Math.min(_bb.capacity()-3, (len-sofar)*4));
}
return buf;
}
public float[] getA4f( ) {
//_arys++;
int len = getInt(); if( len == -1 ) return null;
float[] buf = MemoryManager.malloc4f(len);
int sofar = 0;
while( sofar < buf.length ) {
FloatBuffer as = _bb.asFloatBuffer();
int more = Math.min(as.remaining(), len - sofar);
as.get(buf, sofar, more);
sofar += more;
_bb.position(_bb.position() + as.position()*4);
if( sofar < len ) getSp(Math.min(_bb.capacity()-3, (len-sofar)*4));
}
return buf;
}
public long[] getA8( ) {
//_arys++;
// Get the lengths of lead & trailing zero sections, and the non-zero
// middle section.
int x = getInt(); if( x == -1 ) return null;
int y = getInt(); // Non-zero in the middle
int z = y==0 ? 0 : getInt();// Trailing zeros
long[] buf = MemoryManager.malloc8(x+y+z);
switch( get1U() ) { // 1,2,4 or 8 for how the middle section is passed
case 1: for( int i=x; i<x+y; i++ ) buf[i] = get1U(); return buf;
case 2: for( int i=x; i<x+y; i++ ) buf[i] = (short)get2(); return buf;
case 4: for( int i=x; i<x+y; i++ ) buf[i] = get4(); return buf;
case 8: break;
default: throw H2O.fail();
}
int sofar = x;
while( sofar < x+y ) {
LongBuffer as = _bb.asLongBuffer();
int more = Math.min(as.remaining(), x+y - sofar);
as.get(buf, sofar, more);
sofar += more;
_bb.position(_bb.position() + as.position()*8);
if( sofar < x+y ) getSp(Math.min(_bb.capacity()-7, (x+y-sofar)*8));
}
return buf;
}
public double[] getA8d( ) {
//_arys++;
int len = getInt(); if( len == -1 ) return null;
double[] buf = MemoryManager.malloc8d(len);
int sofar = 0;
while( sofar < len ) {
DoubleBuffer as = _bb.asDoubleBuffer();
int more = Math.min(as.remaining(), len - sofar);
as.get(buf, sofar, more);
sofar += more;
_bb.position(_bb.position() + as.position()*8);
if( sofar < len ) getSp(Math.min(_bb.capacity()-7, (len-sofar)*8));
}
return buf;
}
@SuppressWarnings("unused")
public byte[][] getAA1( ) {
//_arys++;
long xy = getZA();
if( xy == -1 ) return null;
int x=(int)(xy>>32); // Leading nulls
int y=(int)xy; // Middle non-zeros
int z = y==0 ? 0 : getInt(); // Trailing nulls
byte[][] ary = new byte[x+y+z][];
for( int i=x; i<x+y; i++ ) ary[i] = getA1();
return ary;
}
@SuppressWarnings("unused")
public short[][] getAA2( ) {
//_arys++;
long xy = getZA();
if( xy == -1 ) return null;
int x=(int)(xy>>32); // Leading nulls
int y=(int)xy; // Middle non-zeros
int z = y==0 ? 0 : getInt(); // Trailing nulls
short[][] ary = new short[x+y+z][];
for( int i=x; i<x+y; i++ ) ary[i] = getA2();
return ary;
}
public int[][] getAA4( ) {
//_arys++;
long xy = getZA();
if( xy == -1 ) return null;
int x=(int)(xy>>32); // Leading nulls
int y=(int)xy; // Middle non-zeros
int z = y==0 ? 0 : getInt(); // Trailing nulls
int[][] ary = new int[x+y+z][];
for( int i=x; i<x+y; i++ ) ary[i] = getA4();
return ary;
}
@SuppressWarnings("unused") public float[][] getAA4f( ) {
//_arys++;
long xy = getZA();
if( xy == -1 ) return null;
int x=(int)(xy>>32); // Leading nulls
int y=(int)xy; // Middle non-zeros
int z = y==0 ? 0 : getInt(); // Trailing nulls
float[][] ary = new float[x+y+z][];
for( int i=x; i<x+y; i++ ) ary[i] = getA4f();
return ary;
}
public long[][] getAA8( ) {
//_arys++;
long xy = getZA();
if( xy == -1 ) return null;
int x=(int)(xy>>32); // Leading nulls
int y=(int)xy; // Middle non-zeros
int z = y==0 ? 0 : getInt(); // Trailing nulls
long[][] ary = new long[x+y+z][];
for( int i=x; i<x+y; i++ ) ary[i] = getA8();
return ary;
}
@SuppressWarnings("unused") public double[][] getAA8d( ) {
//_arys++;
long xy = getZA();
if( xy == -1 ) return null;
int x=(int)(xy>>32); // Leading nulls
int y=(int)xy; // Middle non-zeros
int z = y==0 ? 0 : getInt(); // Trailing nulls
double[][] ary = new double[x+y+z][];
for( int i=x; i<x+y; i++ ) ary[i] = getA8d();
return ary;
}
@SuppressWarnings("unused") public int[][][] getAAA4( ) {
//_arys++;
long xy = getZA();
if( xy == -1 ) return null;
int x=(int)(xy>>32); // Leading nulls
int y=(int)xy; // Middle non-zeros
int z = y==0 ? 0 : getInt(); // Trailing nulls
int[][][] ary = new int[x+y+z][][];
for( int i=x; i<x+y; i++ ) ary[i] = getAA4();
return ary;
}
@SuppressWarnings("unused") public long[][][] getAAA8( ) {
//_arys++;
long xy = getZA();
if( xy == -1 ) return null;
int x=(int)(xy>>32); // Leading nulls
int y=(int)xy; // Middle non-zeros
int z = y==0 ? 0 : getInt(); // Trailing nulls
long[][][] ary = new long[x+y+z][][];
for( int i=x; i<x+y; i++ ) ary[i] = getAA8();
return ary;
}
public double[][][] getAAA8d( ) {
//_arys++;
long xy = getZA();
if( xy == -1 ) return null;
int x=(int)(xy>>32); // Leading nulls
int y=(int)xy; // Middle non-zeros
int z = y==0 ? 0 : getInt(); // Trailing nulls
double[][][] ary = new double[x+y+z][][];
for( int i=x; i<x+y; i++ ) ary[i] = getAA8d();
return ary;
}
public String getStr( ) {
int len = getInt();
return len == -1 ? null : new String(getA1(len), UTF_8);
}
public <E extends Enum> E getEnum(E[] values ) {
int idx = get1();
return idx == -1 ? null : values[idx];
}
public AutoBuffer putAZ( boolean[] ary ) {
if( ary == null ) return putInt(-1);
putInt(ary.length);
for (boolean anAry : ary) putZ(anAry);
return this;
}
public AutoBuffer putA1( byte[] ary ) {
//_arys++;
if( ary == null ) return putInt(-1);
putInt(ary.length);
return putA1(ary,ary.length);
}
public AutoBuffer putA1( byte[] ary, int length ) { return putA1(ary,0,length); }
public AutoBuffer putA1( byte[] ary, int sofar, int length ) {
if (length - sofar > _bb.remaining()) expandByteBuffer(length-sofar);
while( sofar < length ) {
int len = Math.min(length - sofar, _bb.remaining());
_bb.put(ary, sofar, len);
sofar += len;
if( sofar < length ) sendPartial();
}
return this;
}
AutoBuffer putA2( short[] ary ) {
//_arys++;
if( ary == null ) return putInt(-1);
putInt(ary.length);
if (ary.length*2 > _bb.remaining()) expandByteBuffer(ary.length*2);
int sofar = 0;
while( sofar < ary.length ) {
ShortBuffer sb = _bb.asShortBuffer();
int len = Math.min(ary.length - sofar, sb.remaining());
sb.put(ary, sofar, len);
sofar += len;
_bb.position(_bb.position() + sb.position()*2);
if( sofar < ary.length ) sendPartial();
}
return this;
}
public AutoBuffer putA4( int[] ary ) {
//_arys++;
if( ary == null ) return putInt(-1);
putInt(ary.length);
// Note: based on Brandon commit this should improve performance during parse (7d950d622ee3037555ecbab0e39404f8f0917652)
if (ary.length*4 > _bb.remaining()) {
expandByteBuffer(ary.length*4); // Try to expand BB buffer to fit input array
}
int sofar = 0;
while( sofar < ary.length ) {
IntBuffer ib = _bb.asIntBuffer();
int len = Math.min(ary.length - sofar, ib.remaining());
ib.put(ary, sofar, len);
sofar += len;
_bb.position(_bb.position() + ib.position()*4);
if( sofar < ary.length ) sendPartial();
}
return this;
}
public AutoBuffer putA8( long[] ary ) {
//_arys++;
if( ary == null ) return putInt(-1);
// Trim leading & trailing zeros. Pass along the length of leading &
// trailing zero sections, and the non-zero section in the middle.
int x=0; for( ; x<ary.length; x++ ) if( ary[x ]!=0 ) break;
int y=ary.length; for( ; y>x; y-- ) if( ary[y-1]!=0 ) break;
int nzlen = y-x;
putInt(x);
putInt(nzlen);
if( nzlen > 0 ) // If any trailing nulls
putInt(ary.length-y); // Trailing zeros
// Size trim the NZ section: pass as bytes or shorts if possible.
long min=Long.MAX_VALUE, max=Long.MIN_VALUE;
for( int i=x; i<y; i++ ) { if( ary[i]<min ) min=ary[i]; if( ary[i]>max ) max=ary[i]; }
if( 0 <= min && max < 256 ) { // Ship as unsigned bytes
put1(1); for( int i=x; i<y; i++ ) put1((int)ary[i]);
return this;
}
if( Short.MIN_VALUE <= min && max < Short.MAX_VALUE ) { // Ship as shorts
put1(2); for( int i=x; i<y; i++ ) put2((short)ary[i]);
return this;
}
if( Integer.MIN_VALUE <= min && max < Integer.MAX_VALUE ) { // Ship as ints
put1(4); for( int i=x; i<y; i++ ) put4((int)ary[i]);
return this;
}
put1(8); // Ship as full longs
int sofar = x;
if ((y-sofar)*8 > _bb.remaining()) expandByteBuffer(ary.length*8);
while( sofar < y ) {
LongBuffer lb = _bb.asLongBuffer();
int len = Math.min(y - sofar, lb.remaining());
lb.put(ary, sofar, len);
sofar += len;
_bb.position(_bb.position() + lb.position() * 8);
if( sofar < y ) sendPartial();
}
return this;
}
public AutoBuffer putA4f( float[] ary ) {
//_arys++;
if( ary == null ) return putInt(-1);
putInt(ary.length);
if (ary.length*4 > _bb.remaining()) expandByteBuffer(ary.length*4);
int sofar = 0;
while( sofar < ary.length ) {
FloatBuffer fb = _bb.asFloatBuffer();
int len = Math.min(ary.length - sofar, fb.remaining());
fb.put(ary, sofar, len);
sofar += len;
_bb.position(_bb.position() + fb.position()*4);
if( sofar < ary.length ) sendPartial();
}
return this;
}
public AutoBuffer putA8d( double[] ary ) {
//_arys++;
if( ary == null ) return putInt(-1);
putInt(ary.length);
if (ary.length*8 > _bb.remaining()) expandByteBuffer(ary.length*8);
int sofar = 0;
while( sofar < ary.length ) {
DoubleBuffer db = _bb.asDoubleBuffer();
int len = Math.min(ary.length - sofar, db.remaining());
db.put(ary, sofar, len);
sofar += len;
_bb.position(_bb.position() + db.position()*8);
if( sofar < ary.length ) sendPartial();
}
return this;
}
public AutoBuffer putAA1( byte[][] ary ) {
//_arys++;
long xy = putZA(ary);
if( xy == -1 ) return this;
int x=(int)(xy>>32);
int y=(int)xy;
for( int i=x; i<x+y; i++ ) putA1(ary[i]);
return this;
}
@SuppressWarnings("unused") AutoBuffer putAA2( short[][] ary ) {
//_arys++;
long xy = putZA(ary);
if( xy == -1 ) return this;
int x=(int)(xy>>32);
int y=(int)xy;
for( int i=x; i<x+y; i++ ) putA2(ary[i]);
return this;
}
public AutoBuffer putAA4( int[][] ary ) {
//_arys++;
long xy = putZA(ary);
if( xy == -1 ) return this;
int x=(int)(xy>>32);
int y=(int)xy;
for( int i=x; i<x+y; i++ ) putA4(ary[i]);
return this;
}
@SuppressWarnings("unused")
public AutoBuffer putAA4f( float[][] ary ) {
//_arys++;
long xy = putZA(ary);
if( xy == -1 ) return this;
int x=(int)(xy>>32);
int y=(int)xy;
for( int i=x; i<x+y; i++ ) putA4f(ary[i]);
return this;
}
public AutoBuffer putAA8( long[][] ary ) {
//_arys++;
long xy = putZA(ary);
if( xy == -1 ) return this;
int x=(int)(xy>>32);
int y=(int)xy;
for( int i=x; i<x+y; i++ ) putA8(ary[i]);
return this;
}
@SuppressWarnings("unused") public AutoBuffer putAA8d( double[][] ary ) {
//_arys++;
long xy = putZA(ary);
if( xy == -1 ) return this;
int x=(int)(xy>>32);
int y=(int)xy;
for( int i=x; i<x+y; i++ ) putA8d(ary[i]);
return this;
}
public AutoBuffer putAAA4( int[][][] ary ) {
//_arys++;
long xy = putZA(ary);
if( xy == -1 ) return this;
int x=(int)(xy>>32);
int y=(int)xy;
for( int i=x; i<x+y; i++ ) putAA4(ary[i]);
return this;
}
public AutoBuffer putAAA8( long[][][] ary ) {
//_arys++;
long xy = putZA(ary);
if( xy == -1 ) return this;
int x=(int)(xy>>32);
int y=(int)xy;
for( int i=x; i<x+y; i++ ) putAA8(ary[i]);
return this;
}
public AutoBuffer putAAA8d( double[][][] ary ) {
//_arys++;
long xy = putZA(ary);
if( xy == -1 ) return this;
int x=(int)(xy>>32);
int y=(int)xy;
for( int i=x; i<x+y; i++ ) putAA8d(ary[i]);
return this;
}
// Put a String as bytes (not chars!)
public AutoBuffer putStr( String s ) {
if( s==null ) return putInt(-1);
return putA1(StringUtils.bytesOf(s));
}
@SuppressWarnings("unused") public AutoBuffer putEnum( Enum x ) {
return put1(x==null ? -1 : x.ordinal());
}
public static byte[] javaSerializeWritePojo(Object o) {
ByteArrayOutputStream bos = new ByteArrayOutputStream();
ObjectOutputStream out = null;
try {
out = new ObjectOutputStream(bos);
out.writeObject(o);
out.close();
return bos.toByteArray();
} catch (IOException e) {
throw Log.throwErr(e);
}
}
public static Object javaSerializeReadPojo(byte [] bytes) {
try {
final ObjectInputStream ois = new ObjectInputStream(new ByteArrayInputStream(bytes));
Object o = ois.readObject();
return o;
} catch (IOException e) {
String className = nameOfClass(bytes);
throw Log.throwErr(new RuntimeException("Failed to deserialize " + className, e));
} catch (ClassNotFoundException e) {
throw Log.throwErr(e);
}
}
static String nameOfClass(byte[] bytes) {
if (bytes == null) return "(null)";
if (bytes.length < 11) return "(no name)";
int nameSize = Math.min(40, Math.max(3, bytes[7]));
return new String(bytes, 8, Math.min(nameSize, bytes.length - 8));
}
// ==========================================================================
// Java Serializable objects
// Note: These are heck-a-lot more expensive than their Freezable equivalents.
@SuppressWarnings("unused") public AutoBuffer putSer( Object obj ) {
if (obj == null) return putA1(null);
return putA1(javaSerializeWritePojo(obj));
}
@SuppressWarnings("unused") public AutoBuffer putASer(Object[] fs) {
//_arys++;
long xy = putZA(fs);
if( xy == -1 ) return this;
int x=(int)(xy>>32);
int y=(int)xy;
for( int i=x; i<x+y; i++ ) putSer(fs[i]);
return this;
}
@SuppressWarnings("unused") public AutoBuffer putAASer(Object[][] fs) {
//_arys++;
long xy = putZA(fs);
if( xy == -1 ) return this;
int x=(int)(xy>>32);
int y=(int)xy;
for( int i=x; i<x+y; i++ ) putASer(fs[i]);
return this;
}
@SuppressWarnings("unused") public AutoBuffer putAAASer(Object[][][] fs) {
//_arys++;
long xy = putZA(fs);
if( xy == -1 ) return this;
int x=(int)(xy>>32);
int y=(int)xy;
for( int i=x; i<x+y; i++ ) putAASer(fs[i]);
return this;
}
@SuppressWarnings("unused") public Object getSer() {
byte[] ba = getA1();
return ba == null ? null : javaSerializeReadPojo(ba);
}
@SuppressWarnings("unused") public <T> T getSer(Class<T> tc) {
return (T)getSer();
}
@SuppressWarnings("unused") public <T> T[] getASer(Class<T> tc) {
//_arys++;
long xy = getZA();
if( xy == -1 ) return null;
int x=(int)(xy>>32); // Leading nulls
int y=(int)xy; // Middle non-zeros
int z = y==0 ? 0 : getInt(); // Trailing nulls
T[] ts = (T[]) Array.newInstance(tc, x+y+z);
for( int i = x; i < x+y; ++i ) ts[i] = getSer(tc);
return ts;
}
@SuppressWarnings("unused") public <T> T[][] getAASer(Class<T> tc) {
//_arys++;
long xy = getZA();
if( xy == -1 ) return null;
int x=(int)(xy>>32); // Leading nulls
int y=(int)xy; // Middle non-zeros
int z = y==0 ? 0 : getInt(); // Trailing nulls
T[][] ts = (T[][]) Array.newInstance(tc, x+y+z);
for( int i = x; i < x+y; ++i ) ts[i] = getASer(tc);
return ts;
}
@SuppressWarnings("unused") public <T> T[][][] getAAASer(Class<T> tc) {
//_arys++;
long xy = getZA();
if( xy == -1 ) return null;
int x=(int)(xy>>32); // Leading nulls
int y=(int)xy; // Middle non-zeros
int z = y==0 ? 0 : getInt(); // Trailing nulls
T[][][] ts = (T[][][]) Array.newInstance(tc, x+y+z);
for( int i = x; i < x+y; ++i ) ts[i] = getAASer(tc);
return ts;
}
// ==========================================================================
// JSON AutoBuffer printers
public AutoBuffer putJNULL( ) { return put1('n').put1('u').put1('l').put1('l'); }
// Escaped JSON string
private AutoBuffer putJStr( String s ) {
byte[] b = StringUtils.bytesOf(s);
int off=0;
for( int i=0; i<b.length; i++ ) {
if( b[i] == '\\' || b[i] == '"') { // Double up backslashes, escape quotes
putA1(b,off,i); // Everything so far (no backslashes)
put1('\\'); // The extra backslash
off=i; // Advance the "so far" variable
}
// Handle remaining special cases in JSON
// if( b[i] == '/' ) { putA1(b,off,i); put1('\\'); put1('/'); off=i+1; continue;}
if( b[i] == '\b' ) { putA1(b,off,i); put1('\\'); put1('b'); off=i+1; continue;}
if( b[i] == '\f' ) { putA1(b,off,i); put1('\\'); put1('f'); off=i+1; continue;}
if( b[i] == '\n' ) { putA1(b,off,i); put1('\\'); put1('n'); off=i+1; continue;}
if( b[i] == '\r' ) { putA1(b,off,i); put1('\\'); put1('r'); off=i+1; continue;}
if( b[i] == '\t' ) { putA1(b,off,i); put1('\\'); put1('t'); off=i+1; continue;}
// ASCII Control characters
if( b[i] == 127 ) { putA1(b,off,i); put1('\\'); put1('u'); put1('0'); put1('0'); put1('7'); put1('f'); off=i+1; continue;}
if( b[i] >= 0 && b[i] < 32 ) {
String hexStr = Integer.toHexString(b[i]);
putA1(b, off, i); put1('\\'); put1('u');
for (int j = 0; j < 4 - hexStr.length(); j++) put1('0');
for (int j = 0; j < hexStr.length(); j++) put1(hexStr.charAt(hexStr.length()-j-1));
off=i+1;
}
}
return putA1(b,off,b.length);
}
public AutoBuffer putJSONStrUnquoted ( String s ) { return s==null ? putJNULL() : putJStr(s); }
public AutoBuffer putJSONStrUnquoted ( String name, String s ) { return s==null ? putJSONStr(name).put1(':').putJNULL() : putJSONStr(name).put1(':').putJStr(s); }
public AutoBuffer putJSONName( String s ) { return put1('"').putJStr(s).put1('"'); }
public AutoBuffer putJSONStr ( String s ) { return s==null ? putJNULL() : putJSONName(s); }
public AutoBuffer putJSONAStr(String[] ss) {
if( ss == null ) return putJNULL();
put1('[');
for( int i=0; i<ss.length; i++ ) {
if( i>0 ) put1(',');
putJSONStr(ss[i]);
}
return put1(']');
}
private AutoBuffer putJSONAAStr( String[][] sss) {
if( sss == null ) return putJNULL();
put1('[');
for( int i=0; i<sss.length; i++ ) {
if( i>0 ) put1(',');
putJSONAStr(sss[i]);
}
return put1(']');
}
@SuppressWarnings("unused") public AutoBuffer putJSONStr (String name, String s ) { return putJSONStr(name).put1(':').putJSONStr(s); }
@SuppressWarnings("unused") public AutoBuffer putJSONAStr (String name, String[] ss ) { return putJSONStr(name).put1(':').putJSONAStr(ss); }
@SuppressWarnings("unused") public AutoBuffer putJSONAAStr(String name, String[][]sss) { return putJSONStr(name).put1(':').putJSONAAStr(sss); }
@SuppressWarnings("unused") public AutoBuffer putJSONSer (String name, Object o ) { return putJSONStr(name).put1(':').putJNULL(); }
@SuppressWarnings("unused") public AutoBuffer putJSONASer (String name, Object[] oo ) { return putJSONStr(name).put1(':').putJNULL(); }
@SuppressWarnings("unused") public AutoBuffer putJSONAASer (String name, Object[][] ooo ) { return putJSONStr(name).put1(':').putJNULL(); }
@SuppressWarnings("unused") public AutoBuffer putJSONAAASer(String name, Object[][][] oooo) { return putJSONStr(name).put1(':').putJNULL(); }
public AutoBuffer putJSONAZ( String name, boolean[] f) { return putJSONStr(name).put1(':').putJSONAZ(f); }
public AutoBuffer putJSON(Freezable ice) { return ice == null ? putJNULL() : ice.writeJSON(this); }
public AutoBuffer putJSONA( Freezable fs[] ) {
if( fs == null ) return putJNULL();
put1('[');
for( int i=0; i<fs.length; i++ ) {
if( i>0 ) put1(',');
putJSON(fs[i]);
}
return put1(']');
}
public AutoBuffer putJSONAA( Freezable fs[][]) {
if( fs == null ) return putJNULL();
put1('[');
for( int i=0; i<fs.length; i++ ) {
if( i>0 ) put1(',');
putJSONA(fs[i]);
}
return put1(']');
}
public AutoBuffer putJSONAAA( Freezable fs[][][]) {
if( fs == null ) return putJNULL();
put1('[');
for( int i=0; i<fs.length; i++ ) {
if( i>0 ) put1(',');
putJSONAA(fs[i]);
}
return put1(']');
}
@SuppressWarnings("unused") public AutoBuffer putJSON ( String name, Freezable f ) { return putJSONStr(name).put1(':').putJSON (f); }
public AutoBuffer putJSONA ( String name, Freezable f[] ) { return putJSONStr(name).put1(':').putJSONA (f); }
@SuppressWarnings("unused") public AutoBuffer putJSONAA( String name, Freezable f[][]){ return putJSONStr(name).put1(':').putJSONAA(f); }
@SuppressWarnings("unused") public AutoBuffer putJSONAAA( String name, Freezable f[][][]){ return putJSONStr(name).put1(':').putJSONAAA(f); }
@SuppressWarnings("unused") public AutoBuffer putJSONZ( String name, boolean value ) { return putJSONStr(name).put1(':').putJStr("" + value); }
private AutoBuffer putJSONAZ(boolean [] b) {
if (b == null) return putJNULL();
put1('[');
for( int i = 0; i < b.length; ++i) {
if (i > 0) put1(',');
putJStr(""+b[i]);
}
return put1(']');
}
// Most simple integers
private AutoBuffer putJInt( int i ) {
byte b[] = StringUtils.toBytes(i);
return putA1(b,b.length);
}
public AutoBuffer putJSON1( byte b ) { return putJInt(b); }
public AutoBuffer putJSONA1( byte ary[] ) {
if( ary == null ) return putJNULL();
put1('[');
for( int i=0; i<ary.length; i++ ) {
if( i>0 ) put1(',');
putJSON1(ary[i]);
}
return put1(']');
}
private AutoBuffer putJSONAA1(byte ary[][]) {
if( ary == null ) return putJNULL();
put1('[');
for( int i=0; i<ary.length; i++ ) {
if( i>0 ) put1(',');
putJSONA1(ary[i]);
}
return put1(']');
}
@SuppressWarnings("unused") public AutoBuffer putJSON1 (String name, byte b ) { return putJSONStr(name).put1(':').putJSON1(b); }
@SuppressWarnings("unused") public AutoBuffer putJSONA1 (String name, byte b[] ) { return putJSONStr(name).put1(':').putJSONA1(b); }
@SuppressWarnings("unused") public AutoBuffer putJSONAA1(String name, byte b[][]) { return putJSONStr(name).put1(':').putJSONAA1(b); }
public AutoBuffer putJSONAEnum(String name, Enum[] enums) {
return putJSONStr(name).put1(':').putJSONAEnum(enums);
}
public AutoBuffer putJSONAEnum( Enum[] enums ) {
if( enums == null ) return putJNULL();
put1('[');
for( int i=0; i<enums.length; i++ ) {
if( i>0 ) put1(',');
putJSONEnum(enums[i]);
}
return put1(']');
}
AutoBuffer putJSON2( char c ) { return putJSON4(c); }
AutoBuffer putJSON2( String name, char c ) { return putJSONStr(name).put1(':').putJSON2(c); }
AutoBuffer putJSON2( short c ) { return putJSON4(c); }
AutoBuffer putJSON2( String name, short c ) { return putJSONStr(name).put1(':').putJSON2(c); }
public AutoBuffer putJSONA2( String name, short ary[] ) { return putJSONStr(name).put1(':').putJSONA2(ary); }
AutoBuffer putJSONA2( short ary[] ) {
if( ary == null ) return putJNULL();
put1('[');
for( int i=0; i<ary.length; i++ ) {
if( i>0 ) put1(',');
putJSON2(ary[i]);
}
return put1(']');
}
AutoBuffer putJSON8 ( long l ) { return putJStr(Long.toString(l)); }
AutoBuffer putJSONA8( long ary[] ) {
if( ary == null ) return putJNULL();
put1('[');
for( int i=0; i<ary.length; i++ ) {
if( i>0 ) put1(',');
putJSON8(ary[i]);
}
return put1(']');
}
AutoBuffer putJSONAA8( long ary[][] ) {
if( ary == null ) return putJNULL();
put1('[');
for( int i=0; i<ary.length; i++ ) {
if( i>0 ) put1(',');
putJSONA8(ary[i]);
}
return put1(']');
}
AutoBuffer putJSONAAA8( long ary[][][] ) {
if( ary == null ) return putJNULL();
put1('[');
for( int i=0; i<ary.length; i++ ) {
if( i>0 ) put1(',');
putJSONAA8(ary[i]);
}
return put1(']');
}
AutoBuffer putJSONEnum( Enum e ) {
return e==null ? putJNULL() : put1('"').putJStr(e.toString()).put1('"');
}
public AutoBuffer putJSON8 ( String name, long l ) { return putJSONStr(name).put1(':').putJSON8(l); }
public AutoBuffer putJSONEnum( String name, Enum e ) { return putJSONStr(name).put1(':').putJSONEnum(e); }
public AutoBuffer putJSONA8( String name, long ary[] ) { return putJSONStr(name).put1(':').putJSONA8(ary); }
public AutoBuffer putJSONAA8( String name, long ary[][] ) { return putJSONStr(name).put1(':').putJSONAA8(ary); }
public AutoBuffer putJSONAAA8( String name, long ary[][][] ) { return putJSONStr(name).put1(':').putJSONAAA8(ary); }
public AutoBuffer putJSON4(int i) { return putJStr(Integer.toString(i)); }
AutoBuffer putJSONA4( int[] a) {
if( a == null ) return putJNULL();
put1('[');
for( int i=0; i<a.length; i++ ) {
if( i>0 ) put1(',');
putJSON4(a[i]);
}
return put1(']');
}
AutoBuffer putJSONAA4( int[][] a ) {
if( a == null ) return putJNULL();
put1('[');
for( int i=0; i<a.length; i++ ) {
if( i>0 ) put1(',');
putJSONA4(a[i]);
}
return put1(']');
}
AutoBuffer putJSONAAA4( int[][][] a ) {
if( a == null ) return putJNULL();
put1('[');
for( int i=0; i<a.length; i++ ) {
if( i>0 ) put1(',');
putJSONAA4(a[i]);
}
return put1(']');
}
public AutoBuffer putJSON4 ( String name, int i ) { return putJSONStr(name).put1(':').putJSON4(i); }
public AutoBuffer putJSONA4( String name, int[] a) { return putJSONStr(name).put1(':').putJSONA4(a); }
public AutoBuffer putJSONAA4( String name, int[][] a ) { return putJSONStr(name).put1(':').putJSONAA4(a); }
public AutoBuffer putJSONAAA4( String name, int[][][] a ) { return putJSONStr(name).put1(':').putJSONAAA4(a); }
AutoBuffer putJSON4f ( float f ) { return f==Float.POSITIVE_INFINITY?putJSONStr(JSON_POS_INF):(f==Float.NEGATIVE_INFINITY?putJSONStr(JSON_NEG_INF):(Float.isNaN(f)?putJSONStr(JSON_NAN):putJStr(Float .toString(f)))); }
public AutoBuffer putJSON4f ( String name, float f ) { return putJSONStr(name).put1(':').putJSON4f(f); }
AutoBuffer putJSONA4f( float[] a ) {
if( a == null ) return putJNULL();
put1('[');
for( int i=0; i<a.length; i++ ) {
if( i>0 ) put1(',');
putJSON4f(a[i]);
}
return put1(']');
}
public AutoBuffer putJSONA4f(String name, float[] a) {
putJSONStr(name).put1(':');
return putJSONA4f(a);
}
AutoBuffer putJSONAA4f(String name, float[][] a) {
putJSONStr(name).put1(':');
if( a == null ) return putJNULL();
put1('[');
for( int i=0; i<a.length; i++ ) {
if( i>0 ) put1(',');
putJSONA4f(a[i]);
}
return put1(']');
}
AutoBuffer putJSON8d( double d ) {
if (TwoDimTable.isEmpty(d)) return putJNULL();
return d==Double.POSITIVE_INFINITY?putJSONStr(JSON_POS_INF):(d==Double.NEGATIVE_INFINITY?putJSONStr(JSON_NEG_INF):(Double.isNaN(d)?putJSONStr(JSON_NAN):putJStr(Double.toString(d))));
}
public AutoBuffer putJSON8d( String name, double d ) { return putJSONStr(name).put1(':').putJSON8d(d); }
public AutoBuffer putJSONA8d( String name, double[] a ) {
return putJSONStr(name).put1(':').putJSONA8d(a);
}
public AutoBuffer putJSONAA8d( String name, double[][] a) {
return putJSONStr(name).put1(':').putJSONAA8d(a);
}
public AutoBuffer putJSONAAA8d( String name, double[][][] a) { return putJSONStr(name).put1(':').putJSONAAA8d(a); }
public AutoBuffer putJSONA8d( double[] a ) {
if( a == null ) return putJNULL();
put1('[');
for( int i=0; i<a.length; i++ ) {
if( i>0 ) put1(',');
putJSON8d(a[i]);
}
return put1(']');
}
public AutoBuffer putJSONAA8d( double[][] a ) {
if( a == null ) return putJNULL();
put1('[');
for( int i=0; i<a.length; i++ ) {
if( i>0 ) put1(',');
putJSONA8d(a[i]);
}
return put1(']');
}
AutoBuffer putJSONAAA8d( double ary[][][] ) {
if( ary == null ) return putJNULL();
put1('[');
for( int i=0; i<ary.length; i++ ) {
if( i>0 ) put1(',');
putJSONAA8d(ary[i]);
}
return put1(']');
}
static final String JSON_NAN = "NaN";
static final String JSON_POS_INF = "Infinity";
static final String JSON_NEG_INF = "-Infinity";
}
| Add a system property to ignore version of deserialized objects (models)
| h2o-core/src/main/java/water/AutoBuffer.java | Add a system property to ignore version of deserialized objects (models) | <ide><path>2o-core/src/main/java/water/AutoBuffer.java
<ide> import water.util.Log;
<ide> import water.util.StringUtils;
<ide> import water.util.TwoDimTable;
<add>
<add>import static water.H2O.OptArgs.SYSTEM_PROP_PREFIX;
<ide>
<ide> /** A ByteBuffer backed mixed Input/Output streaming class, using Iced serialization.
<ide> *
<ide> // to mimic the behavior of OpenJDK libraries)
<ide> private static final int MAX_ARRAY_SIZE = Integer.MAX_VALUE - 8;
<ide>
<add> private static String H2O_SYSTEM_SERIALIZATION_IGNORE_VERSION = SYSTEM_PROP_PREFIX + "serialization.ignore.version";
<add>
<ide> // The direct ByteBuffer for schlorping data about.
<ide> // Set to null to indicate the AutoBuffer is closed.
<ide> ByteBuffer _bb;
<ide> if( b==0 ) return; // No persistence info
<ide> int magic = get1U();
<ide> if( b!=0x1C || magic != 0xED ) throw new IllegalArgumentException("Missing magic number 0x1CED at stream start");
<del> String version = getStr();
<del> if( !version.equals(H2O.ABV.projectVersion()) )
<del> throw new IllegalArgumentException("Found version "+version+", but running version "+H2O.ABV.projectVersion());
<add> checkVersion(getStr());
<ide> String[] typeMap = getAStr();
<ide> _typeMap = new short[typeMap.length];
<ide> for( int i=0; i<typeMap.length; i++ )
<ide> _typeMap[i] = (short)(typeMap[i]==null ? 0 : TypeMap.onIce(typeMap[i]));
<ide> }
<ide>
<add> private void checkVersion(String version) {
<add> final boolean ignoreVersion = Boolean.getBoolean(H2O_SYSTEM_SERIALIZATION_IGNORE_VERSION);
<add> if (! version.equals(H2O.ABV.projectVersion())) {
<add> String msg = "Found version "+version+", but running version "+H2O.ABV.projectVersion();
<add> if (ignoreVersion)
<add> Log.warn("Loading data from a different version! " + msg);
<add> else
<add> throw new IllegalArgumentException(msg);
<add> }
<add> }
<ide>
<ide> @Override public String toString() {
<ide> StringBuilder sb = new StringBuilder(); |
|
JavaScript | mit | 7e2588171e5bb73a8993975e7951eb7a520a51fa | 0 | iuap-design/neoui-kero,iuap-design/neoui-kero | /**
* Module : Kero webpack entry index
* Author : Kvkens([email protected])
* Date : 2016-08-09 09:52:13
*/
import {BaseAdapter} from '../core/baseAdapter';
import {ValueMixin} from '../core/valueMixin';
import {EnableMixin} from '../core/enableMixin';
import {RequiredMixin} from '../core/requiredMixin';
import {ValidateMixin} from '../core/validateMixin';
import {getJSObject} from 'neoui-sparrow/js/util';
import {Combo} from 'neoui/js/neoui-combo';
import {env} from 'neoui-sparrow/js/env';
import {on,off,stopEvent} from 'neoui-sparrow/js/event';
import {addClass,removeClass} from 'neoui-sparrow/js/dom';
import {compMgr} from 'neoui-sparrow/js/compMgr';
var ComboboxAdapter = BaseAdapter.extend({
mixins:[ValueMixin,EnableMixin, RequiredMixin, ValidateMixin],
init: function () {
var self = this;
//ComboboxAdapter.superclass.initialize.apply(this, arguments);
this.datasource = getJSObject(this.viewModel, this.options['datasource']);
this.mutil = this.options.mutil || false;
this.onlySelect = this.options.onlySelect || false;
this.showFix = this.options.showFix || false;
this.validType = 'combobox';
this.isAutoTip = this.options.isAutoTip || false;
if(!this.element['u.Combo']) {
this.comp = new u.Combo({el:this.element,mutilSelect:this.mutil,onlySelect:this.onlySelect,showFix:this.showFix,isAutoTip:this.isAutoTip});
this.element['u.Combo'] = this.comp;
} else {
this.comp = this.element['u.Combo']
}
var isDsObservable = ko.isObservable(this.datasource);
if (this.datasource){
this.comp.setComboData(isDsObservable ? ko.toJS(this.datasource) : this.datasource);
}else{
if(u.isIE8 || u.isIE9)
alert("IE8/IE9必须设置datasource");
}
if(isDsObservable) {
// datasource 发生变化时改变控件
this.datasource.subscribe(function(value) {
self.comp.setComboData(value);
});
}
////TODO 后续支持多选
//if (this.mutil) {
// //$(this.comboEle).on("mutilSelect", function (event, value) {
// // self.setValue(value)
// //})
//}
this.comp.on('select', function(event){
// self.slice = true;
// if(self.dataModel)
// self.dataModel.setValue(self.field, event.value);
// self.slice = false;
self.setValue(event.value);
});
//if(this.dataModel){
// this.dataModel.ref(this.field).subscribe(function(value) {
// self.modelValueChange(value)
// })
//}
},
modelValueChange: function (value) {
if (this.slice) return;
//this.trueValue = value;
if (value === null || typeof value == "undefined")
value = "";
this.comp.setValue(value);
// this.trueValue = this.formater ? this.formater.format(value) : value;
// this.element.trueValue = this.trueValue;
//下面两句会在校验中用到
this.trueValue = this.formater ? this.formater.format(value) : value;
this.element.trueValue = this.trueValue;
// this.showValue = this.masker ? this.masker.format(this.trueValue).value : this.trueValue;
// this.setShowValue(this.showValue);
},
//setValue: function (value) {
// this.trueValue = value;
// this.slice = true;
// this.setModelValue(this.trueValue);
// this.slice = false;
//},
//getValue: function () {
// return this.trueValue
//},
setEnable: function (enable) {
var self = this;
if (enable === true || enable === 'true') {
this.enable = true;
this.element.removeAttribute('readonly');
this.comp._input.removeAttribute('readonly');
removeClass(this.element.parentNode,'disablecover');
on(this.comp._input, 'focus', function (e) {
self.comp.show(e);
stopEvent(e);
})
if (this.comp.iconBtn){
on(this.comp.iconBtn, 'click', function(e){
self.comp.show(e);
stopEvent(e);
})
}
} else if (enable === false || enable === 'false') {
this.enable = false;
this.element.setAttribute('readonly', 'readonly');
this.comp._input.setAttribute('readonly', 'readonly');
addClass(this.element.parentNode,'disablecover');
off(this.comp._input, 'focus')
if (this.comp.iconBtn){
off(this.comp.iconBtn, 'click')
}
}
}
});
compMgr.addDataAdapter(
{
adapter: ComboboxAdapter,
name: 'u-combobox'
});
export {ComboboxAdapter};
| js/component/keroa-combo.js | /**
* Module : Kero webpack entry index
* Author : Kvkens([email protected])
* Date : 2016-08-09 09:52:13
*/
import {BaseAdapter} from '../core/baseAdapter';
import {ValueMixin} from '../core/valueMixin';
import {EnableMixin} from '../core/enableMixin';
import {RequiredMixin} from '../core/requiredMixin';
import {ValidateMixin} from '../core/validateMixin';
import {getJSObject} from 'neoui-sparrow/js/util';
import {Combo} from 'neoui/js/neoui-combo';
import {env} from 'neoui-sparrow/js/env';
import {on,off,stopEvent} from 'neoui-sparrow/js/event';
import {addClass,removeClass} from 'neoui-sparrow/js/dom';
import {compMgr} from 'neoui-sparrow/js/compMgr';
var ComboboxAdapter = BaseAdapter.extend({
mixins:[ValueMixin,EnableMixin, RequiredMixin, ValidateMixin],
init: function () {
var self = this;
//ComboboxAdapter.superclass.initialize.apply(this, arguments);
this.datasource = getJSObject(this.viewModel, this.options['datasource']);
this.mutil = this.options.mutil || false;
this.onlySelect = this.options.onlySelect || false;
this.showFix = this.options.showFix || false;
this.validType = 'combobox';
this.isAutoTip = this.options.isAutoTip || false;
if(!this.element['u.Combo']) {
this.comp = new u.Combo({el:this.element,mutilSelect:this.mutil,onlySelect:this.onlySelect,showFix:this.showFix,isAutoTip:this.isAutoTip});
this.element['u.Combo'] = this.comp;
} else {
this.comp = this.element['u.Combo']
}
var isDsObservable = ko.isObservable(this.datasource);
if (this.datasource){
this.comp.setComboData(isDsObservable ? ko.toJS(this.datasource) : this.datasource);
}else{
if(u.isIE8 || u.isIE9)
alert("IE8/IE9必须设置datasource");
}
if(isDsObservable) {
// datasource 发生变化时改变控件
this.datasource.subscribe(function(value) {
self.comp.setComboData(value);
});
}
////TODO 后续支持多选
//if (this.mutil) {
// //$(this.comboEle).on("mutilSelect", function (event, value) {
// // self.setValue(value)
// //})
//}
this.comp.on('select', function(event){
// self.slice = true;
// if(self.dataModel)
// self.dataModel.setValue(self.field, event.value);
// self.slice = false;
self.setValue(event.name);
});
//if(this.dataModel){
// this.dataModel.ref(this.field).subscribe(function(value) {
// self.modelValueChange(value)
// })
//}
},
modelValueChange: function (value) {
if (this.slice) return;
//this.trueValue = value;
if (value === null || typeof value == "undefined")
value = "";
this.comp.setValue(value);
// this.trueValue = this.formater ? this.formater.format(value) : value;
// this.element.trueValue = this.trueValue;
//下面两句会在校验中用到
this.trueValue = this.formater ? this.formater.format(value) : value;
this.element.trueValue = this.trueValue;
// this.showValue = this.masker ? this.masker.format(this.trueValue).value : this.trueValue;
// this.setShowValue(this.showValue);
},
//setValue: function (value) {
// this.trueValue = value;
// this.slice = true;
// this.setModelValue(this.trueValue);
// this.slice = false;
//},
//getValue: function () {
// return this.trueValue
//},
setEnable: function (enable) {
var self = this;
if (enable === true || enable === 'true') {
this.enable = true;
this.element.removeAttribute('readonly');
this.comp._input.removeAttribute('readonly');
removeClass(this.element.parentNode,'disablecover');
on(this.comp._input, 'focus', function (e) {
self.comp.show(e);
stopEvent(e);
})
if (this.comp.iconBtn){
on(this.comp.iconBtn, 'click', function(e){
self.comp.show(e);
stopEvent(e);
})
}
} else if (enable === false || enable === 'false') {
this.enable = false;
this.element.setAttribute('readonly', 'readonly');
this.comp._input.setAttribute('readonly', 'readonly');
addClass(this.element.parentNode,'disablecover');
off(this.comp._input, 'focus')
if (this.comp.iconBtn){
off(this.comp.iconBtn, 'click')
}
}
}
});
compMgr.addDataAdapter(
{
adapter: ComboboxAdapter,
name: 'u-combobox'
});
export {ComboboxAdapter};
| fixed:优化grid中下拉多选
| js/component/keroa-combo.js | fixed:优化grid中下拉多选 | <ide><path>s/component/keroa-combo.js
<ide> // if(self.dataModel)
<ide> // self.dataModel.setValue(self.field, event.value);
<ide> // self.slice = false;
<del> self.setValue(event.name);
<add> self.setValue(event.value);
<ide> });
<ide> //if(this.dataModel){
<ide> // this.dataModel.ref(this.field).subscribe(function(value) { |
|
JavaScript | mit | 042a70ea5abeae2e0baf23bebe265d64a382fa20 | 0 | heshamsafi/leetcode-cli,skygragon/leetcode-cli,skygragon/leetcode-cli,heshamsafi/leetcode-cli | var fs = require('fs');
var _ = require('underscore');
var log = require('loglevel');
var sprintf = require('sprintf-js').sprintf;
var chalk = require('../chalk');
var core = require('../core');
var h = require('../helper');
var queue = require('../queue');
var cmd = {
command: 'submission [keyword]',
desc: 'retrieve earlier submission by name or index',
builder: {
all: {
alias: 'a',
type: 'boolean',
default: false,
describe: 'Retrieve for all problems'
},
outdir: {
alias: 'o',
type: 'string',
describe: 'Where to save the submissions',
default: '.'
},
extra: {
alias: 'x',
type: 'boolean',
default: false,
describe: 'Provide extra problem details in submission file'
},
lang: {
alias: 'l',
type: 'string',
default: 'all',
describe: 'Programming language used for previous submission'
}
}
};
function onTaskDone(e, msg, problem, cb) {
// NOTE: msg color means different purpose:
// - red: error
// - green: accepted, fresh download
// - yellow: not ac-ed, fresh download
// - white: existed already, skip download
log.info(sprintf('[%3d] %-60s %s',
problem.id,
problem.name,
(e ? chalk.red('ERROR: ' + (e.msg || e)) : msg)
));
if (cb) cb(e);
}
function onTaskRun(argv, problem, cb) {
var done = _.partial(onTaskDone, _, _, problem, cb);
if (argv.extra) {
// have to get problem details, e.g. problem description.
core.getProblem(problem.id, function(e, problem) {
if (e) return done(e);
exportSubmission(argv, problem, done);
});
} else {
exportSubmission(argv, problem, done);
}
}
function exportSubmission(argv, problem, cb) {
core.getSubmissions(problem, function(e, submissions) {
if (e) return cb(e);
if (submissions.length === 0) return cb('no submissions?');
// get obj list contain required filetype
var submissionInTargetType = _.filter(submissions, function(x) {
return argv.lang === 'all' || argv.lang === x.lang;
});
if (submissionInTargetType.length === 0) {
return cb("No previous submission in required language.");
}
var submission = _.find(submissionInTargetType, function(x) {
return x.status_display === 'Accepted';
});
var submissionState = submission === undefined ? 'notac' : 'ac';
// if no accepted, use the latest non-accepted one
submission = submission || submissionInTargetType[0];
var filename = sprintf('%s/%d.%s.%s.%s%s',
argv.outdir,
problem.id,
problem.key,
submission.id,
submissionState,
h.langToExt(submission.lang));
// skip the existing cached submissions
if (fs.existsSync(filename)) {
return cb(null, chalk.underline(filename));
}
core.getSubmission(submission, function(e, submission) {
if (e) return cb(e);
problem.code = submission.code;
core.exportProblem(problem, filename, !argv.extra);
if (submission.status_display === 'Accepted')
cb(null, chalk.green.underline(filename));
else
cb(null, chalk.yellow.underline(filename));
});
});
}
cmd.handler = function(argv) {
var doTask = _.partial(onTaskRun, argv, _, _);
if (argv.all) {
core.getProblems(function(e, problems) {
if (e) return log.fail(e);
problems = problems.filter(function(q) {
return q.state === 'ac' || q.state === 'notac';
});
queue.run(problems, doTask);
});
return;
}
if (!argv.keyword)
return log.fail('missing keyword?');
core.getProblem(argv.keyword, function(e, problem) {
if (e) return log.fail(e);
queue.run([problem], doTask);
});
};
module.exports = cmd;
| lib/commands/submission.js | var fs = require('fs');
var _ = require('underscore');
var log = require('loglevel');
var sprintf = require('sprintf-js').sprintf;
var chalk = require('../chalk');
var core = require('../core');
var h = require('../helper');
var queue = require('../queue');
var cmd = {
command: 'submission [keyword]',
desc: 'retrieve earlier submission by name or index',
builder: {
all: {
alias: 'a',
type: 'boolean',
default: false,
describe: 'Retrieve for all problems'
},
outdir: {
alias: 'o',
type: 'string',
describe: 'Where to save the submissions',
default: '.'
},
extra: {
alias: 'x',
type: 'boolean',
default: false,
describe: 'Provide extra problem details in submission file'
}
}
};
function onTaskDone(e, msg, problem, cb) {
// NOTE: msg color means different purpose:
// - red: error
// - green: accepted, fresh download
// - yellow: not ac-ed, fresh download
// - white: existed already, skip download
log.info(sprintf('[%3d] %-60s %s',
problem.id,
problem.name,
(e ? chalk.red('ERROR: ' + (e.msg || e)) : msg)
));
if (cb) cb(e);
}
function onTaskRun(argv, problem, cb) {
var done = _.partial(onTaskDone, _, _, problem, cb);
if (argv.extra) {
// have to get problem details, e.g. problem description.
core.getProblem(problem.id, function(e, problem) {
if (e) return done(e);
exportSubmission(argv, problem, done);
});
} else {
exportSubmission(argv, problem, done);
}
}
function exportSubmission(argv, problem, cb) {
core.getSubmissions(problem, function(e, submissions) {
if (e) return cb(e);
if (submissions.length === 0) return cb('no submissions?');
// find the latest accepted one
var submission = _.find(submissions, function(x) {
// TODO: select by lang?
return x.status_display === 'Accepted';
});
// if no accepted, use the latest non-accepted one
submission = submission || submissions[0];
var filename = sprintf('%s/%d.%s.%s.%s%s',
argv.outdir,
problem.id,
problem.key,
submission.id,
problem.state,
h.langToExt(submission.lang));
// skip the existing cached submissions
if (fs.existsSync(filename)) {
return cb(null, chalk.underline(filename));
}
core.getSubmission(submission, function(e, submission) {
if (e) return cb(e);
problem.code = submission.code;
core.exportProblem(problem, filename, !argv.extra);
if (submission.status_display === 'Accepted')
cb(null, chalk.green.underline(filename));
else
cb(null, chalk.yellow.underline(filename));
});
});
}
cmd.handler = function(argv) {
var doTask = _.partial(onTaskRun, argv, _, _);
if (argv.all) {
core.getProblems(function(e, problems) {
if (e) return log.fail(e);
problems = problems.filter(function(q) {
return q.state === 'ac' || q.state === 'notac';
});
queue.run(problems, doTask);
});
return;
}
if (!argv.keyword)
return log.fail('missing keyword?');
core.getProblem(argv.keyword, function(e, problem) {
if (e) return log.fail(e);
queue.run([problem], doTask);
});
};
module.exports = cmd;
| 1.Add an option -l for submission command which allows user to retrive recent submission in requried programming language, if no submissino in required language, an error will be printed. If no language is specified, program will retrive retrive most recent accepted submission. 2. change behavior: when user specify a language and there are no accepted submission, program will return the most recent non accepted submission in this language 3. bug-fix: problem state is related to the latest submission regardless which language we use. When user specified a language, the status should be related to the latest submission in this language rather than problem
| lib/commands/submission.js | 1.Add an option -l for submission command which allows user to retrive recent submission in requried programming language, if no submissino in required language, an error will be printed. If no language is specified, program will retrive retrive most recent accepted submission. 2. change behavior: when user specify a language and there are no accepted submission, program will return the most recent non accepted submission in this language 3. bug-fix: problem state is related to the latest submission regardless which language we use. When user specified a language, the status should be related to the latest submission in this language rather than problem | <ide><path>ib/commands/submission.js
<ide> type: 'boolean',
<ide> default: false,
<ide> describe: 'Provide extra problem details in submission file'
<add> },
<add> lang: {
<add> alias: 'l',
<add> type: 'string',
<add> default: 'all',
<add> describe: 'Programming language used for previous submission'
<ide> }
<ide> }
<ide> };
<ide> if (e) return cb(e);
<ide> if (submissions.length === 0) return cb('no submissions?');
<ide>
<del> // find the latest accepted one
<del> var submission = _.find(submissions, function(x) {
<del> // TODO: select by lang?
<add> // get obj list contain required filetype
<add> var submissionInTargetType = _.filter(submissions, function(x) {
<add> return argv.lang === 'all' || argv.lang === x.lang;
<add> });
<add> if (submissionInTargetType.length === 0) {
<add> return cb("No previous submission in required language.");
<add> }
<add> var submission = _.find(submissionInTargetType, function(x) {
<ide> return x.status_display === 'Accepted';
<ide> });
<ide>
<add> var submissionState = submission === undefined ? 'notac' : 'ac';
<add>
<ide> // if no accepted, use the latest non-accepted one
<del> submission = submission || submissions[0];
<add> submission = submission || submissionInTargetType[0];
<ide>
<ide> var filename = sprintf('%s/%d.%s.%s.%s%s',
<ide> argv.outdir,
<ide> problem.id,
<ide> problem.key,
<ide> submission.id,
<del> problem.state,
<add> submissionState,
<ide> h.langToExt(submission.lang));
<ide>
<ide> // skip the existing cached submissions |
|
Java | apache-2.0 | f37cc45c16fdde294c1219e4c4d48101b33c689f | 0 | xixifeng/fastquery | /*
* Copyright (c) 2016-2016, fastquery.org and/or its affiliates. All rights reserved.
*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* For more information, please see http://www.fastquery.org/.
*
*/
package org.fastquery.util;
import static org.junit.Assert.*;
import java.lang.reflect.Method;
import java.lang.reflect.Parameter;
import java.util.List;
import java.util.Map;
import java.util.regex.Pattern;
import org.fastquery.core.Placeholder;
import org.fastquery.core.Query;
import org.fastquery.core.QueryRepository;
import org.fastquery.core.Repository;
import org.fastquery.filter.BeforeFilter;
import org.fastquery.util.TypeUtil;
import org.fastquery.where.COperator;
import org.fastquery.where.Condition;
import org.fastquery.where.Operator;
import org.junit.Test;
import org.objectweb.asm.Opcodes;
import org.objectweb.asm.Type;
import static org.hamcrest.Matchers.*;
/**
*
* @author xixifeng ([email protected])
*/
public class TypeUtilTest implements Opcodes {
@Test
public void testGetTypeInfo() {
Object[] strs = TypeUtil.getTypeInfo("I");
assertThat(strs[0], equalTo("java/lang/Integer"));
assertThat(strs[1], equalTo("intValue"));
assertThat(strs[2], equalTo("()I"));
assertThat(strs[3], equalTo(IRETURN));
strs = TypeUtil.getTypeInfo("Z");
assertThat(strs[0], equalTo("java/lang/Boolean"));
assertThat(strs[1], equalTo("booleanValue"));
assertThat(strs[2], equalTo("()Z"));
assertThat(strs[3], equalTo(IRETURN));
strs = TypeUtil.getTypeInfo("B");
assertThat(strs[0], equalTo("java/lang/Byte"));
assertThat(strs[1], equalTo("byteValue"));
assertThat(strs[2], equalTo("()B"));
assertThat(strs[3], equalTo(IRETURN));
strs = TypeUtil.getTypeInfo("C");
assertThat(strs[0], equalTo("java/lang/Character"));
assertThat(strs[1], equalTo("charValue"));
assertThat(strs[2], equalTo("()C"));
assertThat(strs[3], equalTo(IRETURN));
strs = TypeUtil.getTypeInfo("D");
assertThat(strs[0], equalTo("java/lang/Double"));
assertThat(strs[1], equalTo("doubleValue"));
assertThat(strs[2], equalTo("()D"));
assertThat(strs[3], equalTo(DRETURN));
strs = TypeUtil.getTypeInfo("F");
assertThat(strs[0], equalTo("java/lang/Float"));
assertThat(strs[1], equalTo("floatValue"));
assertThat(strs[2], equalTo("()F"));
assertThat(strs[3], equalTo(FRETURN));
strs = TypeUtil.getTypeInfo("J");
assertThat(strs[0], equalTo("java/lang/Long"));
assertThat(strs[1], equalTo("longValue"));
assertThat(strs[2], equalTo("()J"));
assertThat(strs[3], equalTo(LRETURN));
strs = TypeUtil.getTypeInfo("S");
assertThat(strs[0], equalTo("java/lang/Short"));
assertThat(strs[1], equalTo("shortValue"));
assertThat(strs[2], equalTo("()S"));
assertThat(strs[3], equalTo(IRETURN));
}
@Test
// 测试 TypeUtil.getMethod
public void testGetMethod() {
List<Class<?>> clazzs = JarListClass.jarClasses("/web/progm/java/jdk1.8.0_45/jre/lib");
clazzs.addAll( JarListClass.jarClasses("/web/progm/java/jdk1.8.0_45/jre/lib"));
long start = System.currentTimeMillis();
for (Class<?> clazz : clazzs) {
Method[] methods = clazz.getMethods();
for (Method method : methods) {
String methodName = method.getName();
String methodDescriptor = Type.getType(method).getDescriptor();
Method m2 = TypeUtil.getMethod(clazz, methodName, methodDescriptor);
assertThat(method.getParameterCount(), equalTo(m2.getParameterCount()));
Parameter[] parameters = method.getParameters();
Parameter[] parameters2 = m2.getParameters();
for (int i = 0; i < parameters.length; i++) {
assertThat((parameters[i].getType() == parameters2[i].getType()), is(true));
}
assertThat(methodName, equalTo(m2.getName()));
assertThat(methodDescriptor, equalTo(Type.getType(m2).getDescriptor()));
}
}
System.out.println( "testGetMethod,共测试了"+clazzs.size()+"个类, 用时: " + (System.currentTimeMillis() - start) +" 毫秒!");
}
@Test
public void testMatches() throws ClassNotFoundException {
}
@Test
public void testMatchesNotrepeat() {
}
@Test
public void testMatcheAll() {
}
@Test
public void testGetSQLParameter() {
}
@Test
public void testContainsIgnoreCase() {
}
@Test
public void testHasDefaultConstructor() {
}
@Test
public void testFindId() {
}
// 别删除用做测试用
@Query("select * from Student #{#where} order by desc")
// 增加一些条件
@Condition(l = "field1", o = Operator.EQ, r = "?1") // ?1的值,如果是null,
// 该行条件将不参与运算
@Condition(c = COperator.AND, l = "field2", o = Operator.EQ, r = "?2")
@Condition(c = COperator.AND, l = "field3", o = Operator.EQ, r = "?3", ignoreNull = false) // ?3的值是null,该条件也参与运算.
@Condition(c = COperator.OR, l = "age", o = Operator.IN, r = "(?3,?7,?8)") // age
// in(?3,?7?8)
@Condition(c = COperator.AND, l = "name", o = { Operator.NOT, Operator.LIKE }, r = "?7") // 等效于
// name
// not
// like
// ?7
@Condition(c = COperator.OR, l = "info", o = Operator.BETWEEN, r = "?8 and ?9") // 等效于
// info
// between
// ?8
// and
// ?9
public void method01() {
}
@Test
public void placeholder() {
// 匹配 (?4,?5,?6)的正则(允许有首尾空格)
String reg1 = Placeholder.INV_REG;
assertThat(Pattern.matches(reg1, "(?3,?7,?8)"), is(true));
assertThat(Pattern.matches(reg1, "( ?3,?7 ,?8 ) "), is(true));
assertThat(Pattern.matches(reg1, " ( ?3 ,?7 , ?8 )"), is(true));
assertThat(Pattern.matches(reg1, " (?3, ?7, ?8 )"), is(true));
assertThat(Pattern.matches(reg1, " (?3, ?7, ?8)"), is(true));
assertThat(Pattern.matches(reg1, "( ?3, ?7,?8)"), is(true));
assertThat(Pattern.matches(reg1, "( ?3,?7, ?8 ) "), is(true));
assertThat(Pattern.matches(reg1, "( ?3, ?7, ?8) "), is(true));
assertThat(Pattern.matches(reg1, "( ?3, ?7 ?8) "), is(false));
assertThat(Pattern.matches(reg1, "( ?3?7?8) "), is(false));
assertThat(Pattern.matches(reg1, "( ?s,?7, ?8) "), is(false));
assertThat(Pattern.matches(reg1, "( ?3, ?7, ?8)s "), is(false));
assertThat(Pattern.matches(reg1, "(?3, ?7, ?8)12 "), is(false));
assertThat(Pattern.matches(reg1, "(?3?7?8)"), is(false));
assertThat(Pattern.matches(reg1, "( ?3666,?7 ?8 ) "), is(false));
assertThat(Pattern.matches(reg1, " ( ?3777 32?7 , ?8 )"), is(false));
assertThat(Pattern.matches(reg1, " (?3xx, ?7, ?8 )"), is(false));
assertThat(Pattern.matches(reg1, " (?3a, ?7, ?8)"), is(false));
assertThat(Pattern.matches(reg1, "( ?3, 263, ?7,?8)"), is(false));
assertThat(Pattern.matches(reg1, "( ?3,?7, ?8,? ) "), is(false));
assertThat(Pattern.matches(reg1, "( ?3, ?x5, ?8) "), is(false));
// 不区分大小写匹配格式 "?8 and ?9"
reg1 = Placeholder.ANDV_REG;
assertThat(Pattern.matches(reg1, "?12 AnD ?456"), is(true));
assertThat(Pattern.matches(reg1, "?1 AnD ?45"), is(true));
assertThat(Pattern.matches(reg1, "?3 AnD ?6"), is(true));
assertThat(Pattern.matches(reg1, "?3 AnD ?456"), is(true));
assertThat(Pattern.matches(reg1, " ?123 AnD ?456 "), is(true));
assertThat(Pattern.matches(reg1, " ?123 AnD ?456 "), is(true));
assertThat(Pattern.matches(reg1, " ?123 AnD ?456"), is(true));
assertThat(Pattern.matches(reg1, "?123 AnD ?456"), is(true));
assertThat(Pattern.matches(reg1, "?123 AnD ?456 "), is(true));
assertThat(Pattern.matches(reg1, "?12AnD ?456"), is(false));
assertThat(Pattern.matches(reg1, "?1 AnD?45"), is(false));
assertThat(Pattern.matches(reg1, "?3 AndD ?6"), is(false));
assertThat(Pattern.matches(reg1, "?3 AnD ?45x"), is(false));
assertThat(Pattern.matches(reg1, " ?123 AAnD ?456 "), is(false));
assertThat(Pattern.matches(reg1, " ? AnD ?456 "), is(false));
assertThat(Pattern.matches(reg1, " ?123 AnD ?"), is(false));
assertThat(Pattern.matches(reg1, "?123 AnND ?456"), is(false));
assertThat(Pattern.matches(reg1, "? 123 AnD ?456 "), is(false));
// 匹配格式 "?2"(允许首尾空格)
reg1 = Placeholder.SP2_REG;
assertThat(Pattern.matches(reg1, "?1"), is(true));
assertThat(Pattern.matches(reg1, "?12"), is(true));
assertThat(Pattern.matches(reg1, "?13"), is(true));
assertThat(Pattern.matches(reg1, " ?1 "), is(true));
assertThat(Pattern.matches(reg1, "?12 "), is(true));
assertThat(Pattern.matches(reg1, " ?123"), is(true));
assertThat(Pattern.matches(reg1, "?1 "), is(true));
assertThat(Pattern.matches(reg1, " ?1234242"), is(true));
assertThat(Pattern.matches(reg1, "?1 "), is(true));
assertThat(Pattern.matches(reg1, " ?1365 "), is(true));
assertThat(Pattern.matches(reg1, " ? 1"), is(false));
assertThat(Pattern.matches(reg1, " ?1x"), is(false));
assertThat(Pattern.matches(reg1, " ?S"), is(false));
assertThat(Pattern.matches(reg1, " ?a"), is(false));
assertThat(Pattern.matches(reg1, " ?1 1"), is(false));
assertThat(Pattern.matches(reg1, " ?3,3"), is(false));
assertThat(Pattern.matches(reg1, "%?1"), is(false));
}
@Test
public void filterComments(){
assertThat(TypeUtil.filterComments("/* \n abc */123\n /* 123 */"), equalTo("123\n "));
assertThat(TypeUtil.filterComments("/*** * 111*/abc/*111*/222/*** *333*/"),equalTo("abc222"));
}
public Map<String, String > todo1(){
return null;
}
public Map<String, Integer > todo2(){
return null;
}
public Map<String, Object > todo3(){
return null;
}
public List<Map<String, Object >> todo4(){
return null;
}
@Test
public void isMapSO() throws NoSuchMethodException, SecurityException{
java.lang.reflect.Type type1 = TypeUtilTest.class.getMethod("todo1").getGenericReturnType();
assertThat(TypeUtil.isMapSO(type1), is(false));
java.lang.reflect.Type type2 = TypeUtilTest.class.getMethod("todo2").getGenericReturnType();
assertThat(TypeUtil.isMapSO(type2), is(false));
java.lang.reflect.Type type3 = TypeUtilTest.class.getMethod("todo3").getGenericReturnType();
assertThat(TypeUtil.isMapSO(type3), is(true));
}
@Test
public void isListMapSO() throws NoSuchMethodException, SecurityException{
java.lang.reflect.Type type1 = TypeUtilTest.class.getMethod("todo1").getGenericReturnType();
assertThat(TypeUtil.isListMapSO(type1), is(false));
java.lang.reflect.Type type2 = TypeUtilTest.class.getMethod("todo2").getGenericReturnType();
assertThat(TypeUtil.isListMapSO(type2), is(false));
java.lang.reflect.Type type3 = TypeUtilTest.class.getMethod("todo3").getGenericReturnType();
assertThat(TypeUtil.isListMapSO(type3), is(false));
java.lang.reflect.Type type4 = TypeUtilTest.class.getMethod("todo4").getGenericReturnType();
assertThat(TypeUtil.isListMapSO(type4), is(true));
}
// 这些内部类,用做测试
class BeforeFilter0 extends BeforeFilter<Repository> {
@Override
protected void doFilter(Repository repository, Method method, Object[] args) {
}
}
class BeforeFilter1 extends BeforeFilter<DB1> {
@Override
protected void doFilter(DB1 db1, Method method, Object[] args) {
}
}
class BeforeFilter2 extends BeforeFilter<DB2> {
@Override
protected void doFilter(DB2 db2, Method method, Object[] args) {
}
}
class BeforeFilter3 extends BeforeFilter<DB3> {
@Override
protected void doFilter(DB3 db3, Method method, Object[] args) {
}
}
class BeforeFilter4 extends BeforeFilter<DB4> {
@Override
protected void doFilter(DB4 db4, Method method, Object[] args) {
}
}
class BeforeFilter5 extends BeforeFilter<DB5> {
@Override
protected void doFilter(DB5 db5, Method method, Object[] args) {
}
}
class BeforeFilter6 extends BeforeFilter<DB6> {
@Override
protected void doFilter(DB6 db6, Method method, Object[] args) {
}
}
// 这些内部类,用做测试
class DB1 implements Repository {
}
class DB2 implements QueryRepository {
}
class DB3 implements Repository {
}
class DB4 implements QueryRepository {
}
class DB5 implements Repository {
}
class DB6 implements QueryRepository {
}
@Test
public void compareType(){
assertThat(TypeUtil.compareType(BeforeFilter1.class, DB1.class), is(true));
assertThat(TypeUtil.compareType(BeforeFilter2.class, DB2.class), is(true));
assertThat(TypeUtil.compareType(BeforeFilter3.class, DB3.class), is(true));
assertThat(TypeUtil.compareType(BeforeFilter4.class, DB4.class), is(true));
assertThat(TypeUtil.compareType(BeforeFilter5.class, DB5.class), is(true));
assertThat(TypeUtil.compareType(BeforeFilter6.class, DB6.class), is(true));
assertThat(TypeUtil.compareType(BeforeFilter1.class, DB6.class), is(false));
assertThat(TypeUtil.compareType(BeforeFilter2.class, DB5.class), is(false));
assertThat(TypeUtil.compareType(BeforeFilter3.class, DB4.class), is(false));
assertThat(TypeUtil.compareType(BeforeFilter4.class, DB3.class), is(false));
assertThat(TypeUtil.compareType(BeforeFilter5.class, DB2.class), is(false));
assertThat(TypeUtil.compareType(BeforeFilter6.class, DB1.class), is(false));
assertThat(TypeUtil.compareType(BeforeFilter1.class, DB2.class), is(false));
assertThat(TypeUtil.compareType(BeforeFilter2.class, DB1.class), is(false));
assertThat(TypeUtil.compareType(BeforeFilter3.class, DB4.class), is(false));
assertThat(TypeUtil.compareType(BeforeFilter4.class, DB3.class), is(false));
assertThat(TypeUtil.compareType(BeforeFilter5.class, DB6.class), is(false));
assertThat(TypeUtil.compareType(BeforeFilter6.class, DB5.class), is(false));
assertThat(TypeUtil.compareType(BeforeFilter0.class, DB1.class), is(true));
assertThat(TypeUtil.compareType(BeforeFilter0.class, DB2.class), is(true));
assertThat(TypeUtil.compareType(BeforeFilter0.class, DB3.class), is(true));
assertThat(TypeUtil.compareType(BeforeFilter0.class, DB4.class), is(true));
assertThat(TypeUtil.compareType(BeforeFilter0.class, DB5.class), is(true));
assertThat(TypeUtil.compareType(BeforeFilter0.class, DB6.class), is(true));
}
}
| src/test/java/org/fastquery/util/TypeUtilTest.java | /*
* Copyright (c) 2016-2016, fastquery.org and/or its affiliates. All rights reserved.
*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* For more information, please see http://www.fastquery.org/.
*
*/
package org.fastquery.util;
import static org.junit.Assert.*;
import java.lang.reflect.Method;
import java.lang.reflect.Parameter;
import java.util.List;
import java.util.Map;
import java.util.regex.Pattern;
import org.fastquery.core.Placeholder;
import org.fastquery.core.Query;
import org.fastquery.core.QueryRepository;
import org.fastquery.core.Repository;
import org.fastquery.filter.BeforeFilter;
import org.fastquery.util.TypeUtil;
import org.fastquery.where.COperator;
import org.fastquery.where.Condition;
import org.fastquery.where.Operator;
import org.junit.Test;
import org.objectweb.asm.Opcodes;
import org.objectweb.asm.Type;
import static org.hamcrest.Matchers.*;
/**
*
* @author xixifeng ([email protected])
*/
public class TypeUtilTest implements Opcodes {
@Test
public void testGetTypeInfo() {
Object[] strs = TypeUtil.getTypeInfo("I");
assertThat(strs[0], equalTo("java/lang/Integer"));
assertThat(strs[1], equalTo("intValue"));
assertThat(strs[2], equalTo("()I"));
assertThat(strs[3], equalTo(IRETURN));
strs = TypeUtil.getTypeInfo("Z");
assertThat(strs[0], equalTo("java/lang/Boolean"));
assertThat(strs[1], equalTo("booleanValue"));
assertThat(strs[2], equalTo("()Z"));
assertThat(strs[3], equalTo(IRETURN));
strs = TypeUtil.getTypeInfo("B");
assertThat(strs[0], equalTo("java/lang/Byte"));
assertThat(strs[1], equalTo("byteValue"));
assertThat(strs[2], equalTo("()B"));
assertThat(strs[3], equalTo(IRETURN));
strs = TypeUtil.getTypeInfo("C");
assertThat(strs[0], equalTo("java/lang/Character"));
assertThat(strs[1], equalTo("charValue"));
assertThat(strs[2], equalTo("()C"));
assertThat(strs[3], equalTo(IRETURN));
strs = TypeUtil.getTypeInfo("D");
assertThat(strs[0], equalTo("java/lang/Double"));
assertThat(strs[1], equalTo("doubleValue"));
assertThat(strs[2], equalTo("()D"));
assertThat(strs[3], equalTo(DRETURN));
strs = TypeUtil.getTypeInfo("F");
assertThat(strs[0], equalTo("java/lang/Float"));
assertThat(strs[1], equalTo("floatValue"));
assertThat(strs[2], equalTo("()F"));
assertThat(strs[3], equalTo(FRETURN));
strs = TypeUtil.getTypeInfo("J");
assertThat(strs[0], equalTo("java/lang/Long"));
assertThat(strs[1], equalTo("longValue"));
assertThat(strs[2], equalTo("()J"));
assertThat(strs[3], equalTo(LRETURN));
strs = TypeUtil.getTypeInfo("S");
assertThat(strs[0], equalTo("java/lang/Short"));
assertThat(strs[1], equalTo("shortValue"));
assertThat(strs[2], equalTo("()S"));
assertThat(strs[3], equalTo(IRETURN));
}
@Test
public void testGetMethod() {
List<Class<?>> clazzs = JarListClass.jarClasses("/web/progm/java/jdk1.8.0_45/jre/lib");
clazzs.addAll( JarListClass.jarClasses("/web/progm/java/jdk1.8.0_45/jre/lib"));
long start = System.currentTimeMillis();
for (Class<?> clazz : clazzs) {
Method[] methods = clazz.getMethods();
for (Method method : methods) {
String methodName = method.getName();
String methodDescriptor = Type.getType(method).getDescriptor();
Method m2 = TypeUtil.getMethod(clazz, methodName, methodDescriptor);
assertThat(method.getParameterCount(), equalTo(m2.getParameterCount()));
Parameter[] parameters = method.getParameters();
Parameter[] parameters2 = m2.getParameters();
for (int i = 0; i < parameters.length; i++) {
assertThat((parameters[i].getType() == parameters2[i].getType()), is(true));
}
assertThat(methodName, equalTo(m2.getName()));
assertThat(methodDescriptor, equalTo(Type.getType(m2).getDescriptor()));
}
}
System.out.println( "testGetMethod,共测试了"+clazzs.size()+"个类, 用时: " + (System.currentTimeMillis() - start) +" 毫秒!");
}
// 重大发现
@Test
public void testClass(){
List<Class<?>> clazzs = JarListClass.jarClasses("/web/progm/java/jdk1.8.0_45/jre/lib");
clazzs.addAll( JarListClass.jarClasses("/web/progm/java/jdk1.8.0_45/jre/lib"));
for (Class<?> clazz : clazzs) {
Method[] methods = clazz.getDeclaredMethods();
for (Method method : methods) {
if(method.getDeclaringClass()!=clazz){
System.out.println("当前方法的声明类为:" + method.getDeclaringClass());
System.out.println("当前clazz为:" + clazz);
System.exit(2);
}
}
}
}
@Test
public void testMatches() throws ClassNotFoundException {
}
@Test
public void testMatchesNotrepeat() {
}
@Test
public void testMatcheAll() {
}
@Test
public void testGetSQLParameter() {
}
@Test
public void testContainsIgnoreCase() {
}
@Test
public void testHasDefaultConstructor() {
}
@Test
public void testFindId() {
}
// 别删除用做测试用
@Query("select * from Student #{#where} order by desc")
// 增加一些条件
@Condition(l = "field1", o = Operator.EQ, r = "?1") // ?1的值,如果是null,
// 该行条件将不参与运算
@Condition(c = COperator.AND, l = "field2", o = Operator.EQ, r = "?2")
@Condition(c = COperator.AND, l = "field3", o = Operator.EQ, r = "?3", ignoreNull = false) // ?3的值是null,该条件也参与运算.
@Condition(c = COperator.OR, l = "age", o = Operator.IN, r = "(?3,?7,?8)") // age
// in(?3,?7?8)
@Condition(c = COperator.AND, l = "name", o = { Operator.NOT, Operator.LIKE }, r = "?7") // 等效于
// name
// not
// like
// ?7
@Condition(c = COperator.OR, l = "info", o = Operator.BETWEEN, r = "?8 and ?9") // 等效于
// info
// between
// ?8
// and
// ?9
public void method01() {
}
@Test
public void placeholder() {
// 匹配 (?4,?5,?6)的正则(允许有首尾空格)
String reg1 = Placeholder.INV_REG;
assertThat(Pattern.matches(reg1, "(?3,?7,?8)"), is(true));
assertThat(Pattern.matches(reg1, "( ?3,?7 ,?8 ) "), is(true));
assertThat(Pattern.matches(reg1, " ( ?3 ,?7 , ?8 )"), is(true));
assertThat(Pattern.matches(reg1, " (?3, ?7, ?8 )"), is(true));
assertThat(Pattern.matches(reg1, " (?3, ?7, ?8)"), is(true));
assertThat(Pattern.matches(reg1, "( ?3, ?7,?8)"), is(true));
assertThat(Pattern.matches(reg1, "( ?3,?7, ?8 ) "), is(true));
assertThat(Pattern.matches(reg1, "( ?3, ?7, ?8) "), is(true));
assertThat(Pattern.matches(reg1, "( ?3, ?7 ?8) "), is(false));
assertThat(Pattern.matches(reg1, "( ?3?7?8) "), is(false));
assertThat(Pattern.matches(reg1, "( ?s,?7, ?8) "), is(false));
assertThat(Pattern.matches(reg1, "( ?3, ?7, ?8)s "), is(false));
assertThat(Pattern.matches(reg1, "(?3, ?7, ?8)12 "), is(false));
assertThat(Pattern.matches(reg1, "(?3?7?8)"), is(false));
assertThat(Pattern.matches(reg1, "( ?3666,?7 ?8 ) "), is(false));
assertThat(Pattern.matches(reg1, " ( ?3777 32?7 , ?8 )"), is(false));
assertThat(Pattern.matches(reg1, " (?3xx, ?7, ?8 )"), is(false));
assertThat(Pattern.matches(reg1, " (?3a, ?7, ?8)"), is(false));
assertThat(Pattern.matches(reg1, "( ?3, 263, ?7,?8)"), is(false));
assertThat(Pattern.matches(reg1, "( ?3,?7, ?8,? ) "), is(false));
assertThat(Pattern.matches(reg1, "( ?3, ?x5, ?8) "), is(false));
// 不区分大小写匹配格式 "?8 and ?9"
reg1 = Placeholder.ANDV_REG;
assertThat(Pattern.matches(reg1, "?12 AnD ?456"), is(true));
assertThat(Pattern.matches(reg1, "?1 AnD ?45"), is(true));
assertThat(Pattern.matches(reg1, "?3 AnD ?6"), is(true));
assertThat(Pattern.matches(reg1, "?3 AnD ?456"), is(true));
assertThat(Pattern.matches(reg1, " ?123 AnD ?456 "), is(true));
assertThat(Pattern.matches(reg1, " ?123 AnD ?456 "), is(true));
assertThat(Pattern.matches(reg1, " ?123 AnD ?456"), is(true));
assertThat(Pattern.matches(reg1, "?123 AnD ?456"), is(true));
assertThat(Pattern.matches(reg1, "?123 AnD ?456 "), is(true));
assertThat(Pattern.matches(reg1, "?12AnD ?456"), is(false));
assertThat(Pattern.matches(reg1, "?1 AnD?45"), is(false));
assertThat(Pattern.matches(reg1, "?3 AndD ?6"), is(false));
assertThat(Pattern.matches(reg1, "?3 AnD ?45x"), is(false));
assertThat(Pattern.matches(reg1, " ?123 AAnD ?456 "), is(false));
assertThat(Pattern.matches(reg1, " ? AnD ?456 "), is(false));
assertThat(Pattern.matches(reg1, " ?123 AnD ?"), is(false));
assertThat(Pattern.matches(reg1, "?123 AnND ?456"), is(false));
assertThat(Pattern.matches(reg1, "? 123 AnD ?456 "), is(false));
// 匹配格式 "?2"(允许首尾空格)
reg1 = Placeholder.SP2_REG;
assertThat(Pattern.matches(reg1, "?1"), is(true));
assertThat(Pattern.matches(reg1, "?12"), is(true));
assertThat(Pattern.matches(reg1, "?13"), is(true));
assertThat(Pattern.matches(reg1, " ?1 "), is(true));
assertThat(Pattern.matches(reg1, "?12 "), is(true));
assertThat(Pattern.matches(reg1, " ?123"), is(true));
assertThat(Pattern.matches(reg1, "?1 "), is(true));
assertThat(Pattern.matches(reg1, " ?1234242"), is(true));
assertThat(Pattern.matches(reg1, "?1 "), is(true));
assertThat(Pattern.matches(reg1, " ?1365 "), is(true));
assertThat(Pattern.matches(reg1, " ? 1"), is(false));
assertThat(Pattern.matches(reg1, " ?1x"), is(false));
assertThat(Pattern.matches(reg1, " ?S"), is(false));
assertThat(Pattern.matches(reg1, " ?a"), is(false));
assertThat(Pattern.matches(reg1, " ?1 1"), is(false));
assertThat(Pattern.matches(reg1, " ?3,3"), is(false));
assertThat(Pattern.matches(reg1, "%?1"), is(false));
}
@Test
public void filterComments(){
assertThat(TypeUtil.filterComments("/* \n abc */123\n /* 123 */"), equalTo("123\n "));
assertThat(TypeUtil.filterComments("/*** * 111*/abc/*111*/222/*** *333*/"),equalTo("abc222"));
}
public Map<String, String > todo1(){
return null;
}
public Map<String, Integer > todo2(){
return null;
}
public Map<String, Object > todo3(){
return null;
}
public List<Map<String, Object >> todo4(){
return null;
}
@Test
public void isMapSO() throws NoSuchMethodException, SecurityException{
java.lang.reflect.Type type1 = TypeUtilTest.class.getMethod("todo1").getGenericReturnType();
assertThat(TypeUtil.isMapSO(type1), is(false));
java.lang.reflect.Type type2 = TypeUtilTest.class.getMethod("todo2").getGenericReturnType();
assertThat(TypeUtil.isMapSO(type2), is(false));
java.lang.reflect.Type type3 = TypeUtilTest.class.getMethod("todo3").getGenericReturnType();
assertThat(TypeUtil.isMapSO(type3), is(true));
}
@Test
public void isListMapSO() throws NoSuchMethodException, SecurityException{
java.lang.reflect.Type type1 = TypeUtilTest.class.getMethod("todo1").getGenericReturnType();
assertThat(TypeUtil.isListMapSO(type1), is(false));
java.lang.reflect.Type type2 = TypeUtilTest.class.getMethod("todo2").getGenericReturnType();
assertThat(TypeUtil.isListMapSO(type2), is(false));
java.lang.reflect.Type type3 = TypeUtilTest.class.getMethod("todo3").getGenericReturnType();
assertThat(TypeUtil.isListMapSO(type3), is(false));
java.lang.reflect.Type type4 = TypeUtilTest.class.getMethod("todo4").getGenericReturnType();
assertThat(TypeUtil.isListMapSO(type4), is(true));
}
// 这些内部类,用做测试
class BeforeFilter0 extends BeforeFilter<Repository> {
@Override
protected void doFilter(Repository repository, Method method, Object[] args) {
}
}
class BeforeFilter1 extends BeforeFilter<DB1> {
@Override
protected void doFilter(DB1 db1, Method method, Object[] args) {
}
}
class BeforeFilter2 extends BeforeFilter<DB2> {
@Override
protected void doFilter(DB2 db2, Method method, Object[] args) {
}
}
class BeforeFilter3 extends BeforeFilter<DB3> {
@Override
protected void doFilter(DB3 db3, Method method, Object[] args) {
}
}
class BeforeFilter4 extends BeforeFilter<DB4> {
@Override
protected void doFilter(DB4 db4, Method method, Object[] args) {
}
}
class BeforeFilter5 extends BeforeFilter<DB5> {
@Override
protected void doFilter(DB5 db5, Method method, Object[] args) {
}
}
class BeforeFilter6 extends BeforeFilter<DB6> {
@Override
protected void doFilter(DB6 db6, Method method, Object[] args) {
}
}
// 这些内部类,用做测试
class DB1 implements Repository {
}
class DB2 implements QueryRepository {
}
class DB3 implements Repository {
}
class DB4 implements QueryRepository {
}
class DB5 implements Repository {
}
class DB6 implements QueryRepository {
}
@Test
public void compareType(){
assertThat(TypeUtil.compareType(BeforeFilter1.class, DB1.class), is(true));
assertThat(TypeUtil.compareType(BeforeFilter2.class, DB2.class), is(true));
assertThat(TypeUtil.compareType(BeforeFilter3.class, DB3.class), is(true));
assertThat(TypeUtil.compareType(BeforeFilter4.class, DB4.class), is(true));
assertThat(TypeUtil.compareType(BeforeFilter5.class, DB5.class), is(true));
assertThat(TypeUtil.compareType(BeforeFilter6.class, DB6.class), is(true));
assertThat(TypeUtil.compareType(BeforeFilter1.class, DB6.class), is(false));
assertThat(TypeUtil.compareType(BeforeFilter2.class, DB5.class), is(false));
assertThat(TypeUtil.compareType(BeforeFilter3.class, DB4.class), is(false));
assertThat(TypeUtil.compareType(BeforeFilter4.class, DB3.class), is(false));
assertThat(TypeUtil.compareType(BeforeFilter5.class, DB2.class), is(false));
assertThat(TypeUtil.compareType(BeforeFilter6.class, DB1.class), is(false));
assertThat(TypeUtil.compareType(BeforeFilter1.class, DB2.class), is(false));
assertThat(TypeUtil.compareType(BeforeFilter2.class, DB1.class), is(false));
assertThat(TypeUtil.compareType(BeforeFilter3.class, DB4.class), is(false));
assertThat(TypeUtil.compareType(BeforeFilter4.class, DB3.class), is(false));
assertThat(TypeUtil.compareType(BeforeFilter5.class, DB6.class), is(false));
assertThat(TypeUtil.compareType(BeforeFilter6.class, DB5.class), is(false));
assertThat(TypeUtil.compareType(BeforeFilter0.class, DB1.class), is(true));
assertThat(TypeUtil.compareType(BeforeFilter0.class, DB2.class), is(true));
assertThat(TypeUtil.compareType(BeforeFilter0.class, DB3.class), is(true));
assertThat(TypeUtil.compareType(BeforeFilter0.class, DB4.class), is(true));
assertThat(TypeUtil.compareType(BeforeFilter0.class, DB5.class), is(true));
assertThat(TypeUtil.compareType(BeforeFilter0.class, DB6.class), is(true));
}
}
| update test case
| src/test/java/org/fastquery/util/TypeUtilTest.java | update test case | <ide><path>rc/test/java/org/fastquery/util/TypeUtilTest.java
<ide> }
<ide>
<ide> @Test
<add> // 测试 TypeUtil.getMethod
<ide> public void testGetMethod() {
<ide> List<Class<?>> clazzs = JarListClass.jarClasses("/web/progm/java/jdk1.8.0_45/jre/lib");
<ide> clazzs.addAll( JarListClass.jarClasses("/web/progm/java/jdk1.8.0_45/jre/lib"));
<ide> }
<ide> }
<ide> System.out.println( "testGetMethod,共测试了"+clazzs.size()+"个类, 用时: " + (System.currentTimeMillis() - start) +" 毫秒!");
<del> }
<del>
<del> // 重大发现
<del> @Test
<del> public void testClass(){
<del> List<Class<?>> clazzs = JarListClass.jarClasses("/web/progm/java/jdk1.8.0_45/jre/lib");
<del> clazzs.addAll( JarListClass.jarClasses("/web/progm/java/jdk1.8.0_45/jre/lib"));
<del> for (Class<?> clazz : clazzs) {
<del> Method[] methods = clazz.getDeclaredMethods();
<del> for (Method method : methods) {
<del> if(method.getDeclaringClass()!=clazz){
<del> System.out.println("当前方法的声明类为:" + method.getDeclaringClass());
<del> System.out.println("当前clazz为:" + clazz);
<del> System.exit(2);
<del> }
<del> }
<del> }
<ide> }
<ide>
<ide> @Test |
|
JavaScript | agpl-3.0 | fa07a5452f67dcb2cafc16dc33c1fec7450a75ed | 0 | scriptotek/bibduck | /*****************************************************************************
* <stikksedler.js>
* Modul for å skrive ut stikksedler ved hjelp av Excel-maler
* Av: Bård S. Tuseth (c) 2009
* Fredrik Hovind Juell (c) 2010
* Dan Michael O. Heggø (c) 2013
*
* Nye kommandoer:
* stikk! : Skriver stikkseddel
*****************************************************************************/
$.bibduck.stikksedler = {
// Settes under Innstillinger i brukergrensesnittet
beststed: '',
load_xls: function (filename) {
var printerStr = window.bibduck.printerName + ' on ' + window.bibduck.printerPort;
this.excel = new ActiveXObject('Excel.Application');
this.excel.Visible = false;
this.excel.Workbooks.Open(getCurrentDir() + filename);
this.excel.Application.ActivePrinter = printerStr;
return this.excel;
},
print_and_close: function() {
this.excel.ActiveWorkbook.PrintOut();
this.excel.ActiveWorkbook.Close(0);
this.excel.Quit();
delete this.excel;
this.excel = undefined;
$.bibduck.log('OK', {timestamp: false});
},
current_date: function() {
var today = new Date(),
dd = today.getDate(),
mm = today.getMonth() + 1, //January is 0!
yyyy = today.getFullYear();
if (dd < 10) {
dd = '0' + dd;
}
if(mm < 10) {
mm = '0' + mm;
}
return yyyy + '-' + mm + '-' + dd;
}
};
(function() {
var worker,
client,
dok = {},
laaner = {},
lib = {},
excel,
hjemmebibliotek = '',
current_date = '',
config,
seddel,
callback;
function les_dokstat_skjerm() {
if (client.get(2, 1, 28) !== 'Utlånsstatus for et dokument') {
alert('Vi er ikke på DOKST-skjermen :(');
return;
}
// Sjekker hvilken linje tittelen står på:
if (client.get(7, 2, 7) == 'Tittel') {
// Lån fra egen samling
dok.tittel = client.get(7, 14, 80).trim();
} else if (client.get(8, 2, 7) == 'Tittel') {
// ik...
dok.tittel = client.get(8, 13, 80).trim();
} else {
// Relativt sjelden case? Linje 7-10 er fritekst, og
// tittel og forfatter bytter typisk mellom linje 7 og 8.
// En enkel test, som sikkert vil feile i flere tilfeller:
var tittel1 = client.get(7, 2, 80).trim(),
tittel2 = client.get(8, 2, 80).trim();
if (tittel1.length > tittel2.length) {
dok.tittel = tittel1;
} else {
dok.tittel = tittel2;
}
}
dok.dokid = client.get( 6, 31, 39);
if (dok.dokid === '') {
alert('Har du husket å trykke enter?');
return;
}
laaner.ltid = client.get(14, 11, 20);
dok.utlaansdato = client.get(18, 18, 27); // Utlånsdato
dok.forfvres = client.get(20, 18, 27); // Forfall v./res
dok.forfallsdato = client.get(21, 18, 27); // Forfallsdato
dok.utlstatus = client.get( 3, 46, 65); // AVH, RES, UTL, UTL/RES, ...
dok.purretype = client.get(17, 68, 68);
dok.kommentar = client.get(23, 17, 80).trim();
// Dokument til avhenting?
if (dok.utlstatus === 'AVH') {
dok.hentenr = client.get(1, 44, 50);
dok.hentefrist = client.get(1, 26, 35);
} else {
//Tester om låntaker er et bibliotek:
if (laaner.ltid.substr(0,3) == 'lib') {
laaner.kind = 'bibliotek';
laaner.navn = client.get(14, 22, 79).trim();
} else {
laaner.kind = 'person';
}
}
// DEBUG:
/*
$.bibduck.log('Info om lånet:');
$.each(dok, function(k,v) {
$.bibduck.log(' ' + k + ': ' + v);
});
*/
worker.resetPointer();
// Hva gjør vi ift. UTL/RES?
// Skriver ut stikkseddel for det utlånet eks. eller det reserverte?
if (dok.utlstatus === 'AVH') {
// Vi trenger ikke mer informasjon.
// La oss kjøre i gang Excel-helvetet, joho!!
emitComplete();
seddel.avh(dok, laaner, lib);
} else if (dok.utlstatus === 'RES') {
// Dokument som *kun* er reservert
// Finn låneren i reservasjonslista:
worker.send('rlist,\n');
worker.wait_for('Hentefrist:', [6,5], function() {
if (worker.get(3, 63, 71) === dok.dokid) {
$.bibduck.log(' Bruker reservasjon nummer 1 på RLIST-skjermen');
laaner.ltid = worker.get(3, 15, 24);
} else if (worker.get(10, 63, 71) === dok.dokid) {
$.bibduck.log(' Bruker reservasjon nummer 2 på RLIST-skjermen');
laaner.ltid = worker.get(10, 15, 24);
} else if (worker.get(17, 63, 71) === dok.dokid) {
$.bibduck.log(' Bruker reservasjon nummer 3 på RLIST-skjermen');
laaner.ltid = worker.get(17, 15, 24);
}
$.bibduck.sendSpecialKey('F12');
worker.wait_for('DOkstat', [2,31], function() {
worker.resetPointer();
// Vi trenger mer info om låneren:
worker.send('ltsø,' + laaner.ltid + '\n');
worker.wait_for('Fyll ut:', [5,1], function() {
// Vi sender enter på nytt
worker.send('\n');
worker.wait_for('Sist aktiv dato', [22,1], les_ltst_skjerm);
});
});
});
} else if (laaner.kind === 'person') {
// Vi trenger mer info om låneren:
worker.send('ltsø,' + laaner.ltid + '\n');
worker.wait_for('Fyll ut:', [5,1], function() {
// Vi sender enter på nytt
worker.send('\n');
worker.wait_for('Sist aktiv dato', [22,1], les_ltst_skjerm);
});
} else {
// Vi trenger ikke mer informasjon.
// La oss kjøre i gang Excel-helvetet, joho!!
emitComplete();
seddel.reg(dok, laaner, lib);
}
}
function emitComplete() {
if (callback !== undefined) {
setTimeout(function() { // a slight delay never hurts
callback({
patron: laaner,
library: lib,
document: dok,
beststed: seddel.beststed
});
}, 200);
}
}
function les_ltst_skjerm() {
if (worker.get(2, 1, 24) !== 'Opplysninger om låntaker') {
alert("Vi er ikke på LTSØ-skjermen :(");
return;
}
laaner.beststed = worker.get( 7, 71, 80).trim();
laaner.etternavn = worker.get( 5, 18, 58).trim();
laaner.fornavn = worker.get( 6, 18, 58).trim();
laaner.spraak = worker.get(19, 41, 44).trim();
// DEBUG:
/*
$.bibduck.log('Info om låner:');
$.each(laaner, function(k,v) {
$.bibduck.log(' ' + k + ': ' + v);
});*/
lib.ltid = 'ukjent';
lib.navn = 'ukjent';
if (laaner.beststed in config.bestillingssteder) {
lib.ltid = config.bestillingssteder[laaner.beststed];
} else {
// @TODO: Sjekk hvordan stikksedler blir seende ut for brukere
// med koblede lånekort. En bruker med lånekort fra f.eks.
// ubbrb som vi kobler, beholder beststed ubbrb.
$.bibduck.log("Kjenner ikke libnr for bestillingssted: " + laaner.beststed, 'warn');
return;
}
if (lib.ltid in config.biblnavn) {
lib.navn = config.biblnavn[lib.ltid];
} else if (lib.ltid !== 'ukjent') {
$.bibduck.log("Kjenner ikke navn for libnr: " + lib.ltid, 'warn');
}
// DEBUG:
/*
$.bibduck.log('Info om bibliotek:');
$.each(lib, function(k,v) {
$.bibduck.log(' ' + k + ': ' + v);
});*/
if (worker !== client) {
worker.resetPointer();
worker.send('men,\n');
} else {
if (dok.utlstatus === 'RES') {
if (laaner.beststed == seddel.beststed) {
alert('Obs! Låner har bestillingssted ' + laaner.beststed + ', så det burde ikke være behov for å sende det.');
return;
}
} else {
// Gi beskjed hvis boka skal ut av huset
if (laaner.kind === 'person' && laaner.beststed !== seddel.beststed) {
alert('Obs! Låner har bestillingssted: ' + laaner.beststed);
// Hvis boken skal sendes, så gå til utlånskommentarfeltet.
client.send('en,' + dok.dokid + '\n');
client.wait_for('Utlmkomm:', [8,1], function() {
client.send('\t\t\t');
emitComplete();
});
// Hvis ikke går vi tilbake til dokst-skjermen:
} else {
//result = snt.MessageBox("Vil du gå til REG for å låne ut flere bøker?", "Error", ICON_QUESTION Or BUTTON_YESNO Or DEFBUTTON2)
//if (result == IDYES) {
// // ... tilbake til utlånsskjerm for å registrere flere utlån.
// snt.Send("reg,"+ltid)
// snt.QuickButton("^M")
//Else
// ... tilbake til dokst, for å sende hentebeskjed
client.send('dokst,' + dok.dokid + '\n');
client.wait_for('DOkstat', [2,31], function() {
// FINITO, emit
emitComplete();
});
//}
}
}
}
// Nå har vi informasjonen vi trenger. La oss kjøre i gang Excel-helvetet, joho!!
// @TODO: Hva med UTL/RES ?
if (dok.utlstatus === 'RES') {
seddel.res(dok, laaner, lib);
} else {
seddel.reg(dok, laaner, lib);
}
}
function start_from_res() {
/*
* Reservere (RES) BIBSYS UTLÅN
* Gi kommando: : 2013-06-27
*
* LTID: : DOKID/REFID/HEFTID/INNID: 96nf00169 :
* Reskomm: :
* Resreferanse: :
* Volum: År: Hefte: :
* ----------------------------- 96nf00169 ---------------------------------------
* Forfatter : Auyang, Sunny Y.
* Tittel : How is quantum field theory possible? / Sunny Y. Auyang.
* Trykt : New York : Oxford University Press, 1995.Finnes også som:
*
* Signatur : UREAL Fys. 0.2 AUY eks. 2
*
*
*
* -------------------------------------------------------------------------------
* ubo0292451 Dan Michael Olsen Heggø
* Nr. 1 på reserveringslista.
*/
laaner = { kind: 'person' };
lib = {};
dok = { utlstatus: 'RES' };
if (client.get(2, 1, 15) !== 'Reservere (RES)') {
$.bibduck.log('Ikke på reserveringsskjermen', 'error');
return;
}
if (client.get(1, 1, 12) === 'Hentebeskjed') {
dok.utlstatus = 'AVH';
}
if (client.get(1, 1, 12) !== 'Hentebeskjed' && client.get(20, 19, 21) !== 'Nr.') {
$.bibduck.log('Ingen reservering gjennomført, kan ikke skrive ut stikkseddel', 'error');
alert('Du må gjennomføre en reservering før du kan skrive ut stikkseddel');
return;
}
dok.tittel = '';
if (dok.utlstatus === 'AVH') {
laaner.ltid = client.get(5, 12, 22);
dok.dokid = client.get(5, 53, 61);
// Gå til dokst:
$.bibduck.sendSpecialKey('F12');
client.wait_for('DOkstat', [2,31], function() {
if (client.get(23,1,12) === 'Utlkommentar') {
les_dokstat_skjerm();
} else {
client.send(dok.dokid + '\n');
client.wait_for('Utlkommentar', [23,1], les_dokstat_skjerm);
}
});
} else {
laaner.ltid = client.get(19, 19, 28);
dok.dokid = client.get(9, 31, 39);
if (client.get(10, 2, 7) == 'Tittel') {
dok.tittel = client.get(10, 14, 79);
} else if (client.get(11, 2, 7) == 'Tittel') {
dok.tittel = client.get(11, 14, 79);
} else if (client.get(12, 2, 7) == 'Tittel') {
dok.tittel = client.get(12, 14, 79);
} else if (client.get(13, 2, 7) == 'Tittel') {
dok.tittel = client.get(13, 14, 79);
}
// Vi trenger mer info om låneren:
worker.resetPointer();
worker.send('ltsø,' + laaner.ltid + '\n');
worker.wait_for('Fyll ut:', [5,1], function() {
// Vi sender enter på nytt
worker.send('\n');
worker.wait_for('Sist aktiv dato', [22,1], les_ltst_skjerm);
});
}
}
function start_from_rlist() {
laaner = { kind: 'person' };
lib = {};
dok = {};
var resno = -1;
if (client.get(2, 1, 25) !== 'Reserveringsliste (RLIST)') {
$.bibduck.log('Ikke på rlist-skjerm', 'error');
return;
}
var firstline = client.get(1);
if (firstline.indexOf('Hentebeskjed er sendt') !== -1) {
var tilhvem = firstline.match(/på sms til (.+) merket/);
$.bibduck.log('Til hvem? ' + tilhvem[1]);
if (client.get(4).match(tilhvem[1])) {
resno = 1;
} else if (client.get(11).match(tilhvem[1])) {
resno = 2;
} else if (client.get(18).match(tilhvem[1])) {
resno = 3;
}
} else {
var lineno = client.getCurrentLineNumber();
$.bibduck.log(lineno);
if (lineno === 8) {
resno = 1;
} else if (lineno === 15) {
resno = 2;
} else if (lineno === 22) {
resno = 3;
} else {
alert("Du må stå i et ref.-felt");
return;
}
}
$.bibduck.log('Bruker reservasjon nummer ' + resno + ' på skjermen');
if (resno === 1) {
if (client.get(3,1,1) === 'A') {
dok.utlstatus = 'AVH';
} else {
dok.utlstatus = 'RES';
}
laaner.ltid = client.get(3, 15, 24);
laaner.beststed = client.get(3, 47, 54);
dok.dokid = client.get(3, 63, 71);
} else if (resno === 2) {
if (client.get(10,1,1) === 'A') {
dok.utlstatus = 'AVH';
} else {
dok.utlstatus = 'RES';
}
laaner.ltid = client.get(10, 15, 24);
laaner.beststed = client.get(10, 47, 54);
dok.dokid = client.get(10, 63, 71);
} else if (resno === 3) {
if (client.get(17,1,1) === 'A') {
dok.utlstatus = 'AVH';
} else {
dok.utlstatus = 'RES';
}
laaner.ltid = client.get(17, 15, 24);
laaner.beststed = client.get(17, 47, 54);
dok.dokid = client.get(17, 63, 71);
} else {
alert("Du må stå i et ref.-felt");
return;
}
dok.tittel = '';
if (dok.utlstatus === 'AVH') {
// Gå til dokst:
$.bibduck.sendSpecialKey('F12');
client.wait_for('DOkstat', [2,31], function() {
if (client.get(23,1,12) === 'Utlkommentar') {
les_dokstat_skjerm();
} else {
client.send(dok.dokid + '\n');
client.wait_for('Utlkommentar', [23,1], les_dokstat_skjerm);
}
});
} else {
$.bibduck.log('Sender F12');
$.bibduck.sendSpecialKey('F12');
client.wait_for('Utlkommentar', [23,1], les_dokstat_skjerm);
//emitComplete();
//seddel.res(dok, laaner, lib);
}
}
function utlaan() {
laaner = {};
lib = {};
dok = {};
if (client.get(2, 1, 22) == 'Registrere utlån (REG)') {
var dokid = client.get(10, 7, 15);
// Gå til DOKST-skjerm:
worker.resetPointer();
worker.send('dokst\n');
//Kan ikke ta dokst, (med komma) for da blir dokid automatisk valgt og aldri refid, sender separat
worker.wait_for('Utlånsstatus for et dokument', [2,1], function() {
worker.send(dokid + '\n');
worker.wait_for('Utlkommentar', [23,1], function() {
les_dokstat_skjerm(worker);
});
});
} else if (client.get(2, 1, 28) == 'Utlånsstatus for et dokument') {
les_dokstat_skjerm();
}
}
function retur() {
worker.resetPointer();
laaner = {};
lib = {};
dok = {};
if (client.get(2, 27, 31) === 'LTSØk') {
// Vi skriver ut en retur-seddel. Nyttig f.eks. hvis
// man ikke får stikkseddel fra IRET
if (client.get(18, 18, 20) !== 'lib') {
alert("Feil: Låntakeren er ikke et bibliotek!");
$.bibduck.log("Feil: Låntakeren er ikke et bibliotek!");
return;
}
laaner.ltid = client.get(18, 18, 27);
laaner.navn = client.get(10, 18, 50);
laaner.kind = 'bibliotek';
lib.ltid = laaner.ltid;
lib.navn = laaner.navn;
seddel.ret(dok, laaner, lib);
return;
} else if (client.get(2).indexOf('IRETur') !== -1) {
dok.dokid = client.get(1, 1, 9);
dok.bestnr = client.get(4, 49, 57);
laaner.ltid = client.get(6, 15, 24);
laaner.navn = client.get(7, 20, 50);
laaner.kind = 'bibliotek';
lib.ltid = client.get(6, 15, 24);
lib.navn = client.get(7, 20, 50);
if (laaner.navn === 'xxx') {
laaner.navn = '';
laaner.navn = '';
lib.ltid = '';
lib.navn = '';
}
} else {
// Retur til annet bibliotek innad i organisasjonen
var sig = client.get(11, 14, 40).split(' ')[0];
dok.dokid = client.get(6, 31, 39);
dok.bestnr = '';
if (sig in config.sigs) {
lib.ltid = config.sigs[sig];
lib.navn = config.biblnavn[lib.ltid];
} else {
alert('Beklager, BIBDUCK kjenner ikke igjen signaturen "' + sig + '".');
return;
}
}
dok.tittel = '';
if (client.get(7, 2, 7) == 'Tittel') {
dok.tittel = client.get(7, 14, 79);
} else if (client.get(8, 2, 7) == 'Tittel') {
dok.tittel = client.get(8, 14, 79);
} else if (client.get(9, 2, 7) == 'Tittel') {
dok.tittel = client.get(9, 14, 79);
} else if (client.get(10, 2, 7) == 'Tittel') {
dok.tittel = client.get(10, 14, 79);
}
if (hjemmebibliotek === '') {
alert('Libnr. er ikke satt. Dette setter du under Innstillinger.');
}
if (lib.ltid === 'lib'+hjemmebibliotek) {
alert('Boka hører til her. Returseddel trengs ikke.');
client.bringToFront();
return;
}
seddel.ret(dok, laaner, lib);
}
function checkFormatter() {
// Last inn enhetsspesifikt script
if (hjemmebibliotek !== $.bibduck.libnr) {
hjemmebibliotek = $.bibduck.libnr;
var f = config.formatters['lib' + hjemmebibliotek];
$.bibduck.log('Load: plugins/stikksedler/' + f);
$.getScript('plugins/stikksedler/' + f, function() {
start();
});
} else {
start();
}
}
function start() {
$.bibduck.log('Skriver ut stikkseddel... ', {linebreak: false});
seddel = $.bibduck.stikksedler;
seddel.libnr = 'lib' + $.bibduck.libnr;
seddel.beststed = '';
for (var key in config.bestillingssteder) {
if (config.bestillingssteder[key] == seddel.libnr) {
seddel.beststed = key;
}
}
if (seddel.libnr === 'lib') {
alert('Obs! Libnr. er ikke satt enda. Dette setter du under Innstillinger i Bibduck.');
return;
} else if (seddel.beststed === '') {
alert('Fant ikke et bestillingssted for biblioteksnummeret ' + seddel.libnr + ' i config.json!');
return;
}
if (client.get(2, 1, 22) === 'Registrere utlån (REG)') {
utlaan();
} else if (client.get(14, 1, 8) === 'Låntaker') { // DOkstat
utlaan();
} else if (client.get(15, 2, 13) === 'Returnert av') {
retur();
} else if (client.get(1).indexOf('er returnert') !== -1 && client.get(2).indexOf('IRETur') !== -1) { // Retur innlån (IRETur)
retur();
} else if (client.get(2, 1, 32) === 'Opplysninger om låntaker (LTSØk)') {
retur();
} else if (client.get(2, 1, 15) === 'Reservere (RES)') {
start_from_res();
} else if (client.get(2, 1, 25) === 'Reserveringsliste (RLIST)') {
start_from_rlist();
} else {
$.bibduck.log('ikke støttet ', {timestamp: false});
alert('Stikkseddel fra denne skjermen er ikke støttet (enda). Ta DOKST og prøv igjen');
client.bringToFront();
}
}
$.bibduck.plugins.push({
name: 'Stikkseddel-tillegg',
lag_stikkseddel: function(bibsys, cb) {
callback = cb;
client = bibsys;
current_date = client.get(3, 70, 79);
//$.bibduck.log(current_date);
if ($.bibduck.printerPort === '') {
alert('Sett opp stikkseddelskriver ved å trykke på knappen «Innstillinger» først.');
return;
}
if ($.bibduck.getBackgroundInstance() !== null) {
worker = $.bibduck.getBackgroundInstance();
} else {
worker = client;
}
// Load config if not yet loaded
if (config === undefined) {
bibduck.log('Load: plugins/stikksedler/config.json');
$.getJSON('plugins/stikksedler/config.json', function(json) {
config = json;
checkFormatter();
});
} else {
checkFormatter();
}
},
waiting: false,
update: function(bibsys) {
var trigger1 = (bibsys.get(1).indexOf('Hentebeskjed er sendt') !== -1 && (bibsys.get(2, 1, 17) === 'Reserveringsliste' || bibsys.get(2, 1, 15) === 'Reservere (RES)')),
trigger2 = (bibsys.get(1).indexOf('er returnert') !== -1 && bibsys.get(2).indexOf('IRETur') !== -1),
trigger3 = (bibsys.getCurrentLine().indexOf('stikk!') !== -1);
if (this.waiting === false && (trigger1 || trigger2 || trigger3)) {
this.waiting = true;
if (trigger3) bibsys.clearInput();
$.bibduck.log('stikksedler.js: Lager stikkseddel automatisk', 'info');
this.lag_stikkseddel(bibsys);
} else if (this.waiting === true && !trigger1 && !trigger2 && !trigger3) {
this.waiting = false;
}
}
});
})(); | plugins/stikksedler.js | /*****************************************************************************
* <stikksedler.js>
* Modul for å skrive ut stikksedler ved hjelp av Excel-maler
* Av: Bård S. Tuseth (c) 2009
* Fredrik Hovind Juell (c) 2010
* Dan Michael O. Heggø (c) 2013
*
* Nye kommandoer:
* stikk! : Skriver stikkseddel
*****************************************************************************/
$.bibduck.stikksedler = {
// Settes under Innstillinger i brukergrensesnittet
beststed: '',
load_xls: function (filename) {
var printerStr = window.bibduck.printerName + ' on ' + window.bibduck.printerPort;
this.excel = new ActiveXObject('Excel.Application');
this.excel.Visible = false;
this.excel.Workbooks.Open(getCurrentDir() + filename);
this.excel.Application.ActivePrinter = printerStr;
return this.excel;
},
print_and_close: function() {
this.excel.ActiveWorkbook.PrintOut();
this.excel.ActiveWorkbook.Close(0);
this.excel.Quit();
delete this.excel;
this.excel = undefined;
$.bibduck.log('OK', {timestamp: false});
},
current_date: function() {
var today = new Date(),
dd = today.getDate(),
mm = today.getMonth() + 1, //January is 0!
yyyy = today.getFullYear();
if (dd < 10) {
dd = '0' + dd;
}
if(mm < 10) {
mm = '0' + mm;
}
return yyyy + '-' + mm + '-' + dd;
}
};
(function() {
var worker,
client,
dok = {},
laaner = {},
lib = {},
excel,
hjemmebibliotek = '',
current_date = '',
config,
seddel,
callback;
function les_dokstat_skjerm() {
if (client.get(2, 1, 28) !== 'Utlånsstatus for et dokument') {
alert('Vi er ikke på DOKST-skjermen :(');
return;
}
// Sjekker hvilken linje tittelen står på:
if (client.get(7, 2, 7) == 'Tittel') {
// Lån fra egen samling
dok.tittel = client.get(7, 14, 80).trim();
} else if (client.get(8, 2, 7) == 'Tittel') {
// ik...
dok.tittel = client.get(8, 13, 80).trim();
} else {
// Relativt sjelden case? Linje 7-10 er fritekst, og
// tittel og forfatter bytter typisk mellom linje 7 og 8.
// En enkel test, som sikkert vil feile i flere tilfeller:
var tittel1 = client.get(7, 2, 80).trim(),
tittel2 = client.get(8, 2, 80).trim();
if (tittel1.length > tittel2.length) {
dok.tittel = tittel1;
} else {
dok.tittel = tittel2;
}
}
dok.dokid = client.get( 6, 31, 39);
if (dok.dokid === '') {
alert('Har du husket å trykke enter?');
return;
}
laaner.ltid = client.get(14, 11, 20);
dok.utlaansdato = client.get(18, 18, 27); // Utlånsdato
dok.forfvres = client.get(20, 18, 27); // Forfall v./res
dok.forfallsdato = client.get(21, 18, 27); // Forfallsdato
dok.utlstatus = client.get( 3, 46, 65); // AVH, RES, UTL, UTL/RES, ...
dok.purretype = client.get(17, 68, 68);
dok.kommentar = client.get(23, 17, 80).trim();
// Dokument til avhenting?
if (dok.utlstatus === 'AVH') {
dok.hentenr = client.get(1, 44, 50);
dok.hentefrist = client.get(1, 26, 35);
} else {
//Tester om låntaker er et bibliotek:
if (laaner.ltid.substr(0,3) == 'lib') {
laaner.kind = 'bibliotek';
laaner.navn = client.get(14, 22, 79).trim();
} else {
laaner.kind = 'person';
}
}
// DEBUG:
/*
$.bibduck.log('Info om lånet:');
$.each(dok, function(k,v) {
$.bibduck.log(' ' + k + ': ' + v);
});
*/
worker.resetPointer();
// Hva gjør vi ift. UTL/RES?
// Skriver ut stikkseddel for det utlånet eks. eller det reserverte?
if (dok.utlstatus === 'AVH') {
// Vi trenger ikke mer informasjon.
// La oss kjøre i gang Excel-helvetet, joho!!
emitComplete();
seddel.avh(dok, laaner, lib);
} else if (dok.utlstatus === 'RES') {
// Dokument som *kun* er reservert
// Finn låneren i reservasjonslista:
worker.send('rlist,\n');
worker.wait_for('Hentefrist:', [6,5], function() {
laaner.ltid = worker.get(3, 15, 24);
$.bibduck.sendSpecialKey('F12');
worker.wait_for('DOkstat', [2,31], function() {
worker.resetPointer();
// Vi trenger mer info om låneren:
worker.send('ltsø,' + laaner.ltid + '\n');
worker.wait_for('Fyll ut:', [5,1], function() {
// Vi sender enter på nytt
worker.send('\n');
worker.wait_for('Sist aktiv dato', [22,1], les_ltst_skjerm);
});
});
});
} else if (laaner.kind === 'person') {
// Vi trenger mer info om låneren:
worker.send('ltsø,' + laaner.ltid + '\n');
worker.wait_for('Fyll ut:', [5,1], function() {
// Vi sender enter på nytt
worker.send('\n');
worker.wait_for('Sist aktiv dato', [22,1], les_ltst_skjerm);
});
} else {
// Vi trenger ikke mer informasjon.
// La oss kjøre i gang Excel-helvetet, joho!!
emitComplete();
seddel.reg(dok, laaner, lib);
}
}
function emitComplete() {
if (callback !== undefined) {
setTimeout(function() { // a slight delay never hurts
callback({
patron: laaner,
library: lib,
document: dok,
beststed: seddel.beststed
});
}, 200);
}
}
function les_ltst_skjerm() {
if (worker.get(2, 1, 24) !== 'Opplysninger om låntaker') {
alert("Vi er ikke på LTSØ-skjermen :(");
return;
}
laaner.beststed = worker.get( 7, 71, 80).trim();
laaner.etternavn = worker.get( 5, 18, 58).trim();
laaner.fornavn = worker.get( 6, 18, 58).trim();
laaner.spraak = worker.get(19, 41, 44).trim();
// DEBUG:
/*
$.bibduck.log('Info om låner:');
$.each(laaner, function(k,v) {
$.bibduck.log(' ' + k + ': ' + v);
});*/
if (laaner.beststed in config.bestillingssteder) {
lib.ltid = config.bestillingssteder[laaner.beststed];
} else {
alert("Ukjent bestillingssted: " + laaner.beststed);
return;
}
if (lib.ltid in config.biblnavn) {
lib.navn = config.biblnavn[lib.ltid];
} else {
alert("Ukjent bibliotek: " + lib.ltid);
return;
}
// DEBUG:
/*
$.bibduck.log('Info om bibliotek:');
$.each(lib, function(k,v) {
$.bibduck.log(' ' + k + ': ' + v);
});*/
if (worker !== client) {
worker.resetPointer();
worker.send('men,\n');
} else {
if (dok.utlstatus === 'RES') {
if (laaner.beststed == seddel.beststed) {
alert('Obs! Låner har bestillingssted ' + laaner.beststed + ', så det burde ikke være behov for å sende det.');
return;
}
} else {
// Gi beskjed hvis boka skal ut av huset
if (laaner.kind === 'person' && laaner.beststed !== seddel.beststed) {
alert('Obs! Låner har bestillingssted: ' + laaner.beststed);
// Hvis boken skal sendes, så gå til utlånskommentarfeltet.
client.send('en,' + dok.dokid + '\n');
client.wait_for('Utlmkomm:', [8,1], function() {
client.send('\t\t\t');
emitComplete();
});
// Hvis ikke går vi tilbake til dokst-skjermen:
} else {
//result = snt.MessageBox("Vil du gå til REG for å låne ut flere bøker?", "Error", ICON_QUESTION Or BUTTON_YESNO Or DEFBUTTON2)
//if (result == IDYES) {
// // ... tilbake til utlånsskjerm for å registrere flere utlån.
// snt.Send("reg,"+ltid)
// snt.QuickButton("^M")
//Else
// ... tilbake til dokst, for å sende hentebeskjed
client.send('dokst,' + dok.dokid + '\n');
client.wait_for('DOkstat', [2,31], function() {
// FINITO, emit
emitComplete();
});
//}
}
}
}
// Nå har vi informasjonen vi trenger. La oss kjøre i gang Excel-helvetet, joho!!
// @TODO: Hva med UTL/RES ?
if (dok.utlstatus === 'RES') {
seddel.res(dok, laaner, lib);
} else {
seddel.reg(dok, laaner, lib);
}
}
function start_from_res() {
/*
* Reservere (RES) BIBSYS UTLÅN
* Gi kommando: : 2013-06-27
*
* LTID: : DOKID/REFID/HEFTID/INNID: 96nf00169 :
* Reskomm: :
* Resreferanse: :
* Volum: År: Hefte: :
* ----------------------------- 96nf00169 ---------------------------------------
* Forfatter : Auyang, Sunny Y.
* Tittel : How is quantum field theory possible? / Sunny Y. Auyang.
* Trykt : New York : Oxford University Press, 1995.Finnes også som:
*
* Signatur : UREAL Fys. 0.2 AUY eks. 2
*
*
*
* -------------------------------------------------------------------------------
* ubo0292451 Dan Michael Olsen Heggø
* Nr. 1 på reserveringslista.
*/
laaner = { kind: 'person' };
lib = {};
dok = { utlstatus: 'RES' };
if (client.get(2, 1, 15) !== 'Reservere (RES)') {
$.bibduck.log('Ikke på reserveringsskjermen', 'error');
return;
}
if (client.get(1, 1, 12) === 'Hentebeskjed') {
dok.utlstatus = 'AVH';
}
if (client.get(1, 1, 12) !== 'Hentebeskjed' && client.get(20, 19, 21) !== 'Nr.') {
$.bibduck.log('Ingen reservering gjennomført, kan ikke skrive ut stikkseddel', 'error');
alert('Du må gjennomføre en reservering før du kan skrive ut stikkseddel');
return;
}
dok.tittel = '';
if (dok.utlstatus === 'AVH') {
laaner.ltid = client.get(5, 12, 22);
dok.dokid = client.get(5, 53, 61);
// Gå til dokst:
$.bibduck.sendSpecialKey('F12');
client.wait_for('DOkstat', [2,31], function() {
if (client.get(23,1,12) === 'Utlkommentar') {
les_dokstat_skjerm();
} else {
client.send(dok.dokid + '\n');
client.wait_for('Utlkommentar', [23,1], les_dokstat_skjerm);
}
});
} else {
laaner.ltid = client.get(19, 19, 28);
dok.dokid = client.get(9, 31, 39);
if (client.get(10, 2, 7) == 'Tittel') {
dok.tittel = client.get(10, 14, 79);
} else if (client.get(11, 2, 7) == 'Tittel') {
dok.tittel = client.get(11, 14, 79);
} else if (client.get(12, 2, 7) == 'Tittel') {
dok.tittel = client.get(12, 14, 79);
} else if (client.get(13, 2, 7) == 'Tittel') {
dok.tittel = client.get(13, 14, 79);
}
// Vi trenger mer info om låneren:
worker.resetPointer();
worker.send('ltsø,' + laaner.ltid + '\n');
worker.wait_for('Fyll ut:', [5,1], function() {
// Vi sender enter på nytt
worker.send('\n');
worker.wait_for('Sist aktiv dato', [22,1], les_ltst_skjerm);
});
}
}
function start_from_rlist() {
laaner = { kind: 'person' };
lib = {};
dok = {};
if (client.get(2, 1, 25) !== 'Reserveringsliste (RLIST)') {
$.bibduck.log('Ikke på rlist-skjerm', 'error');
return;
}
if (client.get(3,1,1) === 'A') {
dok.utlstatus = 'AVH';
} else {
dok.utlstatus = 'RES';
}
laaner.ltid = client.get(3, 15, 24);
laaner.beststed = client.get(3, 47, 54);
dok.dokid = client.get(3, 63, 71);
dok.tittel = '';
if (dok.utlstatus === 'AVH') {
// Gå til dokst:
$.bibduck.sendSpecialKey('F12');
client.wait_for('DOkstat', [2,31], function() {
if (client.get(23,1,12) === 'Utlkommentar') {
les_dokstat_skjerm();
} else {
client.send(dok.dokid + '\n');
client.wait_for('Utlkommentar', [23,1], les_dokstat_skjerm);
}
});
} else {
$.bibduck.sendSpecialKey('F12');
client.wait_for('Utlkommentar', [23,1], les_dokstat_skjerm);
//emitComplete();
//seddel.res(dok, laaner, lib);
}
}
function utlaan() {
laaner = {};
lib = {};
dok = {};
if (client.get(2, 1, 22) == 'Registrere utlån (REG)') {
var dokid = client.get(10, 7, 15);
// Gå til DOKST-skjerm:
worker.resetPointer();
worker.send('dokst\n');
//Kan ikke ta dokst, (med komma) for da blir dokid automatisk valgt og aldri refid, sender separat
worker.wait_for('Utlånsstatus for et dokument', [2,1], function() {
worker.send(dokid + '\n');
worker.wait_for('Utlkommentar', [23,1], function() {
les_dokstat_skjerm(worker);
});
});
} else if (client.get(2, 1, 28) == 'Utlånsstatus for et dokument') {
les_dokstat_skjerm();
}
}
function retur() {
worker.resetPointer();
laaner = {};
lib = {};
dok = {};
if (client.get(2, 27, 31) === 'LTSØk') {
// Vi skriver ut en retur-seddel. Nyttig f.eks. hvis
// man ikke får stikkseddel fra IRET
if (client.get(18, 18, 20) !== 'lib') {
alert("Feil: Låntakeren er ikke et bibliotek!");
return;
}
laaner.ltid = client.get(18, 18, 27);
laaner.navn = client.get(10, 18, 50);
laaner.kind = 'bibliotek';
lib.ltid = laaner.ltid;
lib.navn = laaner.navn;
seddel.ret(dok, laaner, lib);
return;
} else if (client.get(2).indexOf('IRETur') !== -1) {
dok.dokid = client.get(1, 1, 9);
dok.bestnr = client.get(4, 49, 57);
laaner.ltid = client.get(6, 15, 24);
laaner.navn = client.get(7, 20, 50);
laaner.kind = 'bibliotek';
lib.ltid = client.get(6, 15, 24);
lib.navn = client.get(7, 20, 50);
if (laaner.navn === 'xxx') {
laaner.navn = '';
laaner.navn = '';
lib.ltid = '';
lib.navn = '';
}
} else {
// Retur til annet bibliotek innad i organisasjonen
var sig = client.get(11, 14, 40).split(' ')[0];
dok.dokid = client.get(6, 31, 39);
dok.bestnr = '';
if (sig in config.sigs) {
lib.ltid = config.sigs[sig];
lib.navn = config.biblnavn[lib.ltid];
} else {
alert('Beklager, BIBDUCK kjenner ikke igjen signaturen "' + sig + '".');
return;
}
}
dok.tittel = '';
if (client.get(7, 2, 7) == 'Tittel') {
dok.tittel = client.get(7, 14, 79);
} else if (client.get(8, 2, 7) == 'Tittel') {
dok.tittel = client.get(8, 14, 79);
} else if (client.get(9, 2, 7) == 'Tittel') {
dok.tittel = client.get(9, 14, 79);
} else if (client.get(10, 2, 7) == 'Tittel') {
dok.tittel = client.get(10, 14, 79);
}
if (hjemmebibliotek === '') {
alert('Libnr. er ikke satt. Dette setter du under Innstillinger.');
}
if (lib.ltid === 'lib'+hjemmebibliotek) {
alert('Boka hører til her. Returseddel trengs ikke.');
client.bringToFront();
return;
}
seddel.ret(dok, laaner, lib);
}
function checkFormatter() {
// Last inn enhetsspesifikt script
if (hjemmebibliotek !== $.bibduck.libnr) {
hjemmebibliotek = $.bibduck.libnr;
var f = config.formatters['lib' + hjemmebibliotek];
$.bibduck.log('Load: plugins/stikksedler/' + f);
$.getScript('plugins/stikksedler/' + f, function() {
start();
});
} else {
start();
}
}
function start() {
$.bibduck.log('Skriver ut stikkseddel... ', {linebreak: false});
seddel = $.bibduck.stikksedler;
seddel.libnr = 'lib' + $.bibduck.libnr;
seddel.beststed = '';
for (var key in config.bestillingssteder) {
if (config.bestillingssteder[key] == seddel.libnr) {
seddel.beststed = key;
}
}
if (seddel.libnr === 'lib') {
alert('Obs! Libnr. er ikke satt enda. Dette setter du under Innstillinger i Bibduck.');
return;
} else if (seddel.beststed === '') {
alert('Fant ikke et bestillingssted for biblioteksnummeret ' + seddel.libnr + ' i config.json!');
return;
}
if (client.get(2, 1, 22) === 'Registrere utlån (REG)') {
utlaan();
} else if (client.get(14, 1, 8) === 'Låntaker') {
utlaan();
} else if (client.get(15, 2, 13) === 'Returnert av') {
retur();
} else if (client.get(1).indexOf('er returnert') !== -1 && client.get(2).indexOf('IRETur') !== -1) { // Retur innlån (IRETur)
retur();
} else if (client.get(2, 1, 32) === 'Opplysninger om låntaker (LTSØk)') {
retur();
} else if (client.get(2, 1, 15) === 'Reservere (RES)') {
start_from_res();
} else if (client.get(2, 1, 25) === 'Reserveringsliste (RLIST)') {
start_from_rlist();
} else {
$.bibduck.log('ikke støttet ', {timestamp: false});
alert('Stikkseddel fra denne skjermen er ikke støttet (enda). Ta DOKST og prøv igjen');
client.bringToFront();
}
}
$.bibduck.plugins.push({
name: 'Stikkseddel-tillegg',
lag_stikkseddel: function(bibsys, cb) {
callback = cb;
client = bibsys;
current_date = client.get(3, 70, 79);
//$.bibduck.log(current_date);
if ($.bibduck.printerPort === '') {
alert('Sett opp stikkseddelskriver ved å trykke på knappen «Innstillinger» først.');
return;
}
if ($.bibduck.getBackgroundInstance() !== null) {
worker = $.bibduck.getBackgroundInstance();
} else {
worker = client;
}
// Load config if not yet loaded
if (config === undefined) {
bibduck.log('Load: plugins/stikksedler/config.json');
$.getJSON('plugins/stikksedler/config.json', function(json) {
config = json;
checkFormatter();
});
} else {
checkFormatter();
}
},
waiting: false,
update: function(bibsys) {
var trigger1 = (bibsys.get(1).indexOf('Hentebeskjed er sendt') !== -1 && (bibsys.get(2, 1, 17) === 'Reserveringsliste' || bibsys.get(2, 1, 15) === 'Reservere (RES)')),
trigger2 = (bibsys.get(1).indexOf('er returnert') !== -1 && bibsys.get(2).indexOf('IRETur') !== -1),
trigger3 = (bibsys.getCurrentLine().indexOf('stikk!') !== -1);
if (this.waiting === false && (trigger1 || trigger2 || trigger3)) {
this.waiting = true;
if (trigger3) bibsys.clearInput();
$.bibduck.log('stikksedler.js: Lager stikkseddel automatisk', 'info');
this.lag_stikkseddel(bibsys);
} else if (this.waiting === true && !trigger1 && !trigger2 && !trigger3) {
this.waiting = false;
}
}
});
})(); | bedre håndtering av stikksedler for dokumenter med status RES/AVH
| plugins/stikksedler.js | bedre håndtering av stikksedler for dokumenter med status RES/AVH | <ide><path>lugins/stikksedler.js
<ide> }
<ide> }
<ide>
<del> dok.dokid = client.get( 6, 31, 39);
<add> dok.dokid = client.get( 6, 31, 39);
<ide>
<ide> if (dok.dokid === '') {
<ide> alert('Har du husket å trykke enter?');
<ide> // Finn låneren i reservasjonslista:
<ide> worker.send('rlist,\n');
<ide> worker.wait_for('Hentefrist:', [6,5], function() {
<del> laaner.ltid = worker.get(3, 15, 24);
<del> $.bibduck.sendSpecialKey('F12');
<add> if (worker.get(3, 63, 71) === dok.dokid) {
<add> $.bibduck.log(' Bruker reservasjon nummer 1 på RLIST-skjermen');
<add> laaner.ltid = worker.get(3, 15, 24);
<add> } else if (worker.get(10, 63, 71) === dok.dokid) {
<add> $.bibduck.log(' Bruker reservasjon nummer 2 på RLIST-skjermen');
<add> laaner.ltid = worker.get(10, 15, 24);
<add> } else if (worker.get(17, 63, 71) === dok.dokid) {
<add> $.bibduck.log(' Bruker reservasjon nummer 3 på RLIST-skjermen');
<add> laaner.ltid = worker.get(17, 15, 24);
<add> }
<add> $.bibduck.sendSpecialKey('F12');
<ide> worker.wait_for('DOkstat', [2,31], function() {
<ide> worker.resetPointer();
<ide>
<ide> $.bibduck.log(' ' + k + ': ' + v);
<ide> });*/
<ide>
<add> lib.ltid = 'ukjent';
<add> lib.navn = 'ukjent';
<ide> if (laaner.beststed in config.bestillingssteder) {
<ide> lib.ltid = config.bestillingssteder[laaner.beststed];
<ide> } else {
<del> alert("Ukjent bestillingssted: " + laaner.beststed);
<add> // @TODO: Sjekk hvordan stikksedler blir seende ut for brukere
<add> // med koblede lånekort. En bruker med lånekort fra f.eks.
<add> // ubbrb som vi kobler, beholder beststed ubbrb.
<add> $.bibduck.log("Kjenner ikke libnr for bestillingssted: " + laaner.beststed, 'warn');
<ide> return;
<ide> }
<ide> if (lib.ltid in config.biblnavn) {
<ide> lib.navn = config.biblnavn[lib.ltid];
<del> } else {
<del> alert("Ukjent bibliotek: " + lib.ltid);
<del> return;
<add> } else if (lib.ltid !== 'ukjent') {
<add> $.bibduck.log("Kjenner ikke navn for libnr: " + lib.ltid, 'warn');
<ide> }
<ide>
<ide> // DEBUG:
<ide> laaner = { kind: 'person' };
<ide> lib = {};
<ide> dok = {};
<add> var resno = -1;
<ide>
<ide> if (client.get(2, 1, 25) !== 'Reserveringsliste (RLIST)') {
<ide> $.bibduck.log('Ikke på rlist-skjerm', 'error');
<ide> return;
<ide> }
<del>
<del> if (client.get(3,1,1) === 'A') {
<del> dok.utlstatus = 'AVH';
<del> } else {
<del> dok.utlstatus = 'RES';
<del> }
<del>
<del> laaner.ltid = client.get(3, 15, 24);
<del> laaner.beststed = client.get(3, 47, 54);
<del> dok.dokid = client.get(3, 63, 71);
<add>
<add> var firstline = client.get(1);
<add> if (firstline.indexOf('Hentebeskjed er sendt') !== -1) {
<add> var tilhvem = firstline.match(/på sms til (.+) merket/);
<add> $.bibduck.log('Til hvem? ' + tilhvem[1]);
<add> if (client.get(4).match(tilhvem[1])) {
<add> resno = 1;
<add> } else if (client.get(11).match(tilhvem[1])) {
<add> resno = 2;
<add> } else if (client.get(18).match(tilhvem[1])) {
<add> resno = 3;
<add> }
<add> } else {
<add> var lineno = client.getCurrentLineNumber();
<add> $.bibduck.log(lineno);
<add> if (lineno === 8) {
<add> resno = 1;
<add> } else if (lineno === 15) {
<add> resno = 2;
<add> } else if (lineno === 22) {
<add> resno = 3;
<add> } else {
<add> alert("Du må stå i et ref.-felt");
<add> return;
<add> }
<add> }
<add> $.bibduck.log('Bruker reservasjon nummer ' + resno + ' på skjermen');
<add> if (resno === 1) {
<add> if (client.get(3,1,1) === 'A') {
<add> dok.utlstatus = 'AVH';
<add> } else {
<add> dok.utlstatus = 'RES';
<add> }
<add> laaner.ltid = client.get(3, 15, 24);
<add> laaner.beststed = client.get(3, 47, 54);
<add> dok.dokid = client.get(3, 63, 71);
<add> } else if (resno === 2) {
<add> if (client.get(10,1,1) === 'A') {
<add> dok.utlstatus = 'AVH';
<add> } else {
<add> dok.utlstatus = 'RES';
<add> }
<add> laaner.ltid = client.get(10, 15, 24);
<add> laaner.beststed = client.get(10, 47, 54);
<add> dok.dokid = client.get(10, 63, 71);
<add>
<add> } else if (resno === 3) {
<add> if (client.get(17,1,1) === 'A') {
<add> dok.utlstatus = 'AVH';
<add> } else {
<add> dok.utlstatus = 'RES';
<add> }
<add> laaner.ltid = client.get(17, 15, 24);
<add> laaner.beststed = client.get(17, 47, 54);
<add> dok.dokid = client.get(17, 63, 71);
<add>
<add> } else {
<add> alert("Du må stå i et ref.-felt");
<add> return;
<add> }
<add>
<ide> dok.tittel = '';
<ide>
<ide> if (dok.utlstatus === 'AVH') {
<ide> });
<ide>
<ide> } else {
<del> $.bibduck.sendSpecialKey('F12');
<add> $.bibduck.log('Sender F12');
<add> $.bibduck.sendSpecialKey('F12');
<ide> client.wait_for('Utlkommentar', [23,1], les_dokstat_skjerm);
<ide> //emitComplete();
<ide> //seddel.res(dok, laaner, lib);
<ide>
<ide> if (client.get(18, 18, 20) !== 'lib') {
<ide> alert("Feil: Låntakeren er ikke et bibliotek!");
<add> $.bibduck.log("Feil: Låntakeren er ikke et bibliotek!");
<ide> return;
<ide> }
<ide>
<ide>
<ide> if (client.get(2, 1, 22) === 'Registrere utlån (REG)') {
<ide> utlaan();
<del> } else if (client.get(14, 1, 8) === 'Låntaker') {
<add> } else if (client.get(14, 1, 8) === 'Låntaker') { // DOkstat
<ide> utlaan();
<ide> } else if (client.get(15, 2, 13) === 'Returnert av') {
<ide> retur(); |
|
Java | apache-2.0 | ef090f23f925bdd76f61d0f394a4847c3746d8cc | 0 | jitsi/libjitsi,jitsi/libjitsi,jitsi/libjitsi,jitsi/libjitsi | /*
* Copyright @ 2015 Atlassian Pty Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jitsi.impl.neomedia.rtp.sendsidebandwidthestimation;
import org.jitsi.impl.neomedia.rtcp.*;
import org.jitsi.impl.neomedia.rtp.*;
import org.jitsi.impl.neomedia.*;
import org.jitsi.service.configuration.*;
import org.jitsi.service.libjitsi.*;
import org.jitsi.service.neomedia.*;
import org.jitsi.service.neomedia.rtp.*;
import org.jitsi.util.*;
import java.util.*;
/**
* Implements the send-side bandwidth estimation described in
* https://tools.ietf.org/html/draft-ietf-rmcat-gcc-01
* Heavily based on code from webrtc.org (send_side_bandwidth_estimation.cc,
* commit ID 7ad9e661f8a035d49d049ccdb87c77ae8ecdfa35).
*
* @author Boris Grozev
*/
class SendSideBandwidthEstimation
extends RTCPPacketListenerAdapter
implements BandwidthEstimator
{
/**
* The name of the property that specifies the low-loss threshold
* (expressed as a proportion of lost packets).
* See {@link #low_loss_threshold_}.
*/
public final static String LOW_LOSS_THRESHOLD_PNAME
= SendSideBandwidthEstimation.class.getName() + ".lowLossThreshold";
/**
* The name of the property that specifies the high-loss threshold
* (expressed as a proportion of lost packets).
* See {@link #high_loss_threshold_}.
*/
public final static String HIGH_LOSS_THRESHOLD_PNAME
= SendSideBandwidthEstimation.class.getName() + ".highLossThreshold";
/**
* The name of the property that specifies the bitrate threshold (in kbps).
* See {@link #bitrate_threshold_bps_}.
*/
public final static String BITRATE_THRESHOLD_KBPS_PNAME
= SendSideBandwidthEstimation.class.getName() + ".bitrateThresholdKbps";
/**
* The name of the property that specifies the probability of enabling the
* loss-based experiment.
*/
public final static String LOSS_EXPERIMENT_PROBABILITY_PNAME
= SendSideBandwidthEstimation.class.getName()
+ ".lossExperimentProbability";
/**
* The name of the property that specifies the probability of enabling the
* timeout experiment.
*/
public final static String TIMEOUT_EXPERIMENT_PROBABILITY_PNAME
= SendSideBandwidthEstimation.class.getName()
+ ".timeoutExperimentProbability";
/**
* The ConfigurationService to get config values from.
*/
private static final ConfigurationService
cfg = LibJitsi.getConfigurationService();
/**
* send_side_bandwidth_estimation.cc
*/
private static final int kBweIncreaseIntervalMs = 1000;
/**
* send_side_bandwidth_estimation.cc
*/
private static final long kBweDecreaseIntervalMs = 300;
/**
* send_side_bandwidth_estimation.cc
*/
private static final int kDefaultMinBitrateBps = 10000;
/**
* send_side_bandwidth_estimation.cc
*/
private static final int kDefaultMaxBitrateBps = 1000000000;
/**
* send_side_bandwidth_estimation.cc
*/
private static final int kStartPhaseMs = 2000;
/**
* send_side_bandwidth_estimation.cc
*/
private static final int kLimitNumPackets = 20;
// Expecting that RTCP feedback is sent uniformly within [0.5, 1.5]s
// intervals.
private static final long kFeedbackIntervalMs = 1500;
private static final long kFeedbackTimeoutIntervals = 3;
private static final long kTimeoutIntervalMs = 1000;
/**
* send_side_bandwidth_estimation.cc
*/
private static final float kDefaultLowLossThreshold = 0.02f;
/**
* send_side_bandwidth_estimation.cc
*/
private static final float kDefaultHighLossThreshold = 0.1f;
/**
* send_side_bandwidth_estimation.cc
*/
private static final int kDefaultBitrateThresholdKbps = 0;
/**
* Disable the loss experiment by default.
*/
private static final float kDefaultLossExperimentProbability = 0;
/**
* Disable the timeout experiment by default.
*/
private static final float kDefaultTimeoutExperimentProbability = 0;
/**
* The random number generator for all instances of this class.
*/
private static final Random kRandom = new Random();
/**
* The <tt>Logger</tt> used by the {@link SendSideBandwidthEstimation} class
* and its instances for logging output.
*/
private static final Logger logger
= Logger.getLogger(SendSideBandwidthEstimation.class);
/**
* The {@link TimeSeriesLogger} to be used by this instance to print time
* series.
*/
private static final TimeSeriesLogger timeSeriesLogger
= TimeSeriesLogger.getTimeSeriesLogger(
SendSideBandwidthEstimation.class);
/**
* send_side_bandwidth_estimation.h
*/
private final float low_loss_threshold_;
/**
* send_side_bandwidth_estimation.h
*/
private final float high_loss_threshold_;
/**
* send_side_bandwidth_estimation.h
*/
private final int bitrate_threshold_bps_;
/**
* send_side_bandwidth_estimation.h
*/
private long first_report_time_ms_ = -1;
/**
* send_side_bandwidth_estimation.h
*/
private int lost_packets_since_last_loss_update_Q8_ = 0;
/**
* send_side_bandwidth_estimation.h
*/
private int expected_packets_since_last_loss_update_ = 0;
/**
* send_side_bandwidth_estimation.h
*/
private boolean has_decreased_since_last_fraction_loss_ = false;
/**
* send_side_bandwidth_estimation.h
*
* uint8_t last_fraction_loss_;
*/
private int last_fraction_loss_ = 0;
/**
* send_side_bandwidth_estimation.h
*/
private long last_feedback_ms_ = -1;
/**
* send_side_bandwidth_estimation.h
*/
private long last_packet_report_ms_ = -1;
/**
* send_side_bandwidth_estimation.h
*/
private long last_timeout_ms_ = -1;
/**
* send_side_bandwidth_estimation.h
*/
private final boolean in_timeout_experiment_;
/**
* send_side_bandwidth_estimation.h
*/
private int min_bitrate_configured_ = kDefaultMinBitrateBps;
/**
* send_side_bandwidth_estimation.h
*/
private int max_bitrate_configured_ = kDefaultMaxBitrateBps;
/**
* send_side_bandwidth_estimation.h
*/
private long time_last_decrease_ms_= 0;
/**
* send_side_bandwidth_estimation.h
*/
private long bwe_incoming_ = 0;
/**
* send_side_bandwidth_estimation.h
*/
private long bitrate_;
/**
* send_side_bandwidth_estimation.h
*/
private Deque<Pair<Long>> min_bitrate_history_ = new LinkedList<>();
/**
* The {@link DiagnosticContext} of this instance.
*/
private final DiagnosticContext diagnosticContext;
private final List<BandwidthEstimator.Listener> listeners
= new LinkedList<>();
/**
* The {@link MediaStream} for this {@link SendSideBandwidthEstimation}.
*/
private final MediaStream mediaStream;
/**
* The instance that holds stats for this instance.
*/
private final StatisticsImpl statistics = new StatisticsImpl();
SendSideBandwidthEstimation(MediaStreamImpl stream, long startBitrate)
{
mediaStream = stream;
diagnosticContext = stream.getDiagnosticContext();
float lossExperimentProbability = (float) cfg.getDouble(
LOSS_EXPERIMENT_PROBABILITY_PNAME,
kDefaultLossExperimentProbability);
if (kRandom.nextFloat() < lossExperimentProbability)
{
low_loss_threshold_ = (float) cfg.getDouble(
LOW_LOSS_THRESHOLD_PNAME, kDefaultLowLossThreshold);
high_loss_threshold_ = (float) cfg.getDouble(
HIGH_LOSS_THRESHOLD_PNAME, kDefaultHighLossThreshold);
bitrate_threshold_bps_ = 1000 * cfg.getInt(
BITRATE_THRESHOLD_KBPS_PNAME, kDefaultBitrateThresholdKbps);
}
else
{
low_loss_threshold_ = kDefaultLowLossThreshold;
high_loss_threshold_ = kDefaultHighLossThreshold;
bitrate_threshold_bps_ = 1000 * kDefaultBitrateThresholdKbps;
}
float timeoutExperimentProbability = (float) cfg.getDouble(
TIMEOUT_EXPERIMENT_PROBABILITY_PNAME,
kDefaultTimeoutExperimentProbability);
in_timeout_experiment_
= kRandom.nextFloat() < timeoutExperimentProbability;
setBitrate(startBitrate);
}
/**
* bool SendSideBandwidthEstimation::IsInStartPhase(int64_t now_ms)
*/
private synchronized boolean isInStartPhase(long now)
{
return first_report_time_ms_ == -1 ||
now - first_report_time_ms_ < kStartPhaseMs;
}
/**
* int SendSideBandwidthEstimation::CapBitrateToThresholds
*/
private synchronized long capBitrateToThresholds(long bitrate)
{
if (bwe_incoming_ > 0 && bitrate > bwe_incoming_)
{
bitrate = bwe_incoming_;
}
if (bitrate > max_bitrate_configured_)
{
bitrate = max_bitrate_configured_;
}
if (bitrate < min_bitrate_configured_)
{
bitrate = min_bitrate_configured_;
}
return bitrate;
}
/**
* void SendSideBandwidthEstimation::UpdateEstimate(int64_t now_ms)
*/
protected synchronized void updateEstimate(long now)
{
long bitrate = bitrate_;
// We trust the REMB during the first 2 seconds if we haven't had any
// packet loss reported, to allow startup bitrate probing.
if (last_fraction_loss_ == 0 && isInStartPhase(now) &&
bwe_incoming_ > bitrate)
{
setBitrate(capBitrateToThresholds(bwe_incoming_));
min_bitrate_history_.clear();
min_bitrate_history_.addLast(new Pair<>(now, bitrate_));
return;
}
updateMinHistory(now);
if (last_packet_report_ms_ == -1)
{
// No feedback received.
bitrate_ = capBitrateToThresholds(bitrate_);
return;
}
long time_since_packet_report_ms = now - last_packet_report_ms_;
long time_since_feedback_ms = now - last_feedback_ms_;
if (time_since_packet_report_ms < 1.2 * kFeedbackIntervalMs)
{
// We only care about loss above a given bitrate threshold.
float loss = last_fraction_loss_ / 256.0f;
// We only make decisions based on loss when the bitrate is above a
// threshold. This is a crude way of handling loss which is
// uncorrelated to congestion.
if (bitrate_ < bitrate_threshold_bps_ || loss <= low_loss_threshold_)
{
// Loss < 2%: Increase rate by 8% of the min bitrate in the last
// kBweIncreaseIntervalMs.
// Note that by remembering the bitrate over the last second one can
// rampup up one second faster than if only allowed to start ramping
// at 8% per second rate now. E.g.:
// If sending a constant 100kbps it can rampup immediatly to 108kbps
// whenever a receiver report is received with lower packet loss.
// If instead one would do: bitrate_ *= 1.08^(delta time), it would
// take over one second since the lower packet loss to achieve 108kbps.
bitrate = (long) (min_bitrate_history_.getFirst().second * 1.08 + 0.5);
// Add 1 kbps extra, just to make sure that we do not get stuck
// (gives a little extra increase at low rates, negligible at higher
// rates).
bitrate += 1000;
statistics.update(now, false, LossRegion.LossFree);
}
else if (bitrate_ > bitrate_threshold_bps_)
{
if (loss <= high_loss_threshold_)
{
// Loss between 2% - 10%: Do nothing.
statistics.update(now, false, LossRegion.LossLimited);
}
else
{
// Loss > 10%: Limit the rate decreases to once a kBweDecreaseIntervalMs +
// rtt.
if (!has_decreased_since_last_fraction_loss_ &&
(now - time_last_decrease_ms_) >=
(kBweDecreaseIntervalMs + getRtt()))
{
time_last_decrease_ms_ = now;
// Reduce rate:
// newRate = rate * (1 - 0.5*lossRate);
// where packetLoss = 256*lossRate;
bitrate = (long) (
(bitrate * (512 - last_fraction_loss_)) / 512.0);
has_decreased_since_last_fraction_loss_ = true;
statistics.update(now, false, LossRegion.LossDegraded);
}
}
}
}
else
{
statistics.update(now, true, null);
if (time_since_feedback_ms >
kFeedbackTimeoutIntervals * kFeedbackIntervalMs
&& (last_timeout_ms_ == -1
|| now - last_timeout_ms_ > kTimeoutIntervalMs))
{
if (in_timeout_experiment_)
{
bitrate_ *= 0.8;
// Reset accumulators since we've already acted on missing
// feedback and shouldn't to act again on these old lost
// packets.
lost_packets_since_last_loss_update_Q8_ = 0;
expected_packets_since_last_loss_update_ = 0;
last_timeout_ms_ = now;
}
}
}
setBitrate(capBitrateToThresholds(bitrate));
}
/**
* void SendSideBandwidthEstimation::UpdateReceiverBlock
*/
synchronized void updateReceiverBlock(
long fraction_lost, long number_of_packets, long now)
{
last_feedback_ms_ = now;
if (first_report_time_ms_ == -1)
{
first_report_time_ms_ = now;
}
// Check sequence number diff and weight loss report
if (number_of_packets > 0)
{
// Calculate number of lost packets.
long num_lost_packets_Q8 = fraction_lost * number_of_packets;
// Accumulate reports.
lost_packets_since_last_loss_update_Q8_ += num_lost_packets_Q8;
expected_packets_since_last_loss_update_ += number_of_packets;
// Don't generate a loss rate until it can be based on enough packets.
if (expected_packets_since_last_loss_update_ < kLimitNumPackets)
return;
has_decreased_since_last_fraction_loss_ = false;
last_fraction_loss_ =
lost_packets_since_last_loss_update_Q8_ /
expected_packets_since_last_loss_update_;
// Reset accumulators.
lost_packets_since_last_loss_update_Q8_ = 0;
expected_packets_since_last_loss_update_ = 0;
last_packet_report_ms_ = now;
updateEstimate(now);
}
}
/**
* void SendSideBandwidthEstimation::UpdateMinHistory(int64_t now_ms)
*/
private synchronized void updateMinHistory(long now_ms)
{
// Remove old data points from history.
// Since history precision is in ms, add one so it is able to increase
// bitrate if it is off by as little as 0.5ms.
while (!min_bitrate_history_.isEmpty() &&
now_ms - min_bitrate_history_.getFirst().first + 1 >
kBweIncreaseIntervalMs)
{
min_bitrate_history_.removeFirst();
}
// Typical minimum sliding-window algorithm: Pop values higher than current
// bitrate before pushing it.
while (!min_bitrate_history_.isEmpty() &&
bitrate_ <= min_bitrate_history_.getLast().second)
{
min_bitrate_history_.removeLast();
}
min_bitrate_history_.addLast(new Pair<>(now_ms, bitrate_));
}
/**
* {@inheritDoc}
*/
@Override
public synchronized void updateReceiverEstimate(long bandwidth)
{
bwe_incoming_ = bandwidth;
setBitrate(capBitrateToThresholds(bitrate_));
}
/**
* void SendSideBandwidthEstimation::SetMinMaxBitrate
*/
synchronized void setMinMaxBitrate(int min_bitrate, int max_bitrate)
{
min_bitrate_configured_ = Math.max(min_bitrate, kDefaultMinBitrateBps);
if (max_bitrate > 0)
{
max_bitrate_configured_ =
Math.max(min_bitrate_configured_, max_bitrate);
}
else
{
max_bitrate_configured_ = kDefaultMaxBitrateBps;
}
}
/**
* Sets the value of {@link #bitrate_}.
* @param newValue the value to set
*/
private synchronized void setBitrate(long newValue)
{
long oldValue = bitrate_;
bitrate_ = newValue;
if (oldValue != bitrate_)
{
fireBandwidthEstimationChanged(oldValue, newValue);
}
}
/**
* {@inheritDoc}
*/
@Override
public long getLatestEstimate()
{
return bitrate_;
}
/**
* {@inheritDoc}
*/
@Override
public long getLatestREMB()
{
return bwe_incoming_;
}
/**
* {@inheritDoc}
*/
@Override
public int getLatestFractionLoss()
{
return last_fraction_loss_;
}
/**
* {@inheritDoc}
*/
@Override
public synchronized void addListener(Listener listener)
{
listeners.add(listener);
}
/**
* {@inheritDoc}
*/
@Override
public synchronized void removeListener(Listener listener)
{
listeners.remove(listener);
}
/**
* {@inheritDoc}
*/
@Override
public void rembReceived(RTCPREMBPacket remb)
{
updateReceiverEstimate(remb.getBitrate());
}
@Override
public StatisticsImpl getStatistics()
{
return statistics;
}
/**
* Returns the last calculated RTT to the endpoint.
* @return the last calculated RTT to the endpoint.
*/
private synchronized long getRtt()
{
long rtt = mediaStream.getMediaStreamStats().getSendStats().getRtt();
if (rtt < 0 || rtt > 1000)
{
logger.warn("RTT not calculated, or has a suspiciously high value ("
+ rtt + "). Using the default of 100ms.");
rtt = 100;
}
return rtt;
}
/**
* Notifies registered listeners that the estimation of the available
* bandwidth has changed.
* @param oldValue the old value (in bps).
* @param newValue the new value (in bps).
*/
private synchronized void fireBandwidthEstimationChanged(
long oldValue, long newValue)
{
for (BandwidthEstimator.Listener listener : listeners)
{
listener.bandwidthEstimationChanged(newValue);
}
}
private class Pair<T>
{
T first;
T second;
Pair(T a, T b)
{
first = a;
second = b;
}
}
/**
* This class records statistics information about how much time we spend
* in different loss-states (loss-free, loss-limited and loss-degraded).
*/
public class StatisticsImpl implements Statistics
{
/**
* The current state {@link LossRegion}.
*/
private LossRegion currentState = null;
/**
* Keeps the time (in millis) of the last transition (including a loop).
*/
private long lastTransitionTimestampMs = -1;
/**
* The cumulative duration (in millis) of the current state
* {@link #currentState} after having looped
* {@link #currentStateConsecutiveVisits} times.
*/
private long currentStateCumulativeDurationMs;
/**
* The number of loops over the current state {@link #currentState}.
*/
private int currentStateConsecutiveVisits;
/**
* The bitrate when we entered the current state {@link #currentState}.
*/
private long currentStateStartBitrateBps;
/**
* Computes the min/max/avg/sd of the bitrate while in
* {@link #currentState}.
*/
private LongSummaryStatistics currentStateBitrateStatistics
= new LongSummaryStatistics();
/**
* Computes the min/max/avg/sd of the loss while in
* {@link #currentState}.
*/
private IntSummaryStatistics currentStateLossStatistics
= new IntSummaryStatistics();
private boolean isDirty = false;
/**
* Computes the sum of the duration of the different states.
*/
private final LongSummaryStatistics
lossFreeMsStats = new LongSummaryStatistics(),
lossDegradedMsStats = new LongSummaryStatistics(),
lossLimitedMsStats = new LongSummaryStatistics();
@Override
public void update(long nowMs)
{
synchronized (SendSideBandwidthEstimation.this)
{
long time_since_packet_report_ms
= nowMs - last_packet_report_ms_;
boolean currentStateHasTimedOut
= time_since_packet_report_ms < 1.2 * kFeedbackIntervalMs;
update(nowMs, currentStateHasTimedOut, null);
}
}
/**
* Records a state transition and updates the statistics information.
*
* @param nowMs the time (in millis) of the transition.
* @param currentStateHasTimedOut true if the current state has timed
* out, i.e. we haven't received receiver reports "in a while".
* @param nextState the that the bwe is transitioning to.
*/
void update(
long nowMs, boolean currentStateHasTimedOut, LossRegion nextState)
{
synchronized (SendSideBandwidthEstimation.this)
{
if (lastTransitionTimestampMs > -1 && !currentStateHasTimedOut)
{
isDirty = true;
currentStateCumulativeDurationMs
+= nowMs - lastTransitionTimestampMs;
}
lastTransitionTimestampMs = nowMs;
if (!currentStateHasTimedOut)
{
isDirty = true;
// If the current state has not timed out, then update the
// stats that we gather.
currentStateLossStatistics.accept(last_fraction_loss_);
currentStateConsecutiveVisits++; // we start counting from 0.
if (this.currentState == nextState)
{
currentStateBitrateStatistics.accept(bitrate_);
return;
}
}
if (this.currentState != null)
{
// This is not a loop, we're transitioning to another state.
// Record how much time we've spent on this state, how many
// times we've looped through it and what was the impact on
// the bitrate.
switch (this.currentState)
{
case LossDegraded:
lossDegradedMsStats.accept(
currentStateCumulativeDurationMs);
break;
case LossFree:
lossFreeMsStats.accept(currentStateCumulativeDurationMs);
break;
case LossLimited:
lossLimitedMsStats.accept(
currentStateCumulativeDurationMs);
break;
}
if (timeSeriesLogger.isTraceEnabled())
{
timeSeriesLogger.trace(diagnosticContext
.makeTimeSeriesPoint("loss_estimate")
.addField("state", currentState.name())
.addField("max_loss",
currentStateLossStatistics.getMax() / 256.0f)
.addField("min_loss",
currentStateLossStatistics.getMin() / 256.0f)
.addField("avg_loss",
currentStateLossStatistics.getAverage()/256.0f)
.addField("max_bps",
currentStateBitrateStatistics.getMax())
.addField("min_bps",
currentStateBitrateStatistics.getMin())
.addField("avg_bps",
currentStateBitrateStatistics.getAverage())
.addField("duration_ms",
currentStateCumulativeDurationMs)
.addField("consecutive_visits",
currentStateConsecutiveVisits)
.addField("bitrate_threshold",
bitrate_threshold_bps_)
.addField("low_loss_threshold",
low_loss_threshold_)
.addField("high_loss_threshold",
high_loss_threshold_)
.addField("delta_bps",
bitrate_ - currentStateStartBitrateBps));
}
}
currentState = nextState;
currentStateStartBitrateBps = bitrate_;
if (isDirty)
{
currentStateLossStatistics = new IntSummaryStatistics();
currentStateBitrateStatistics = new LongSummaryStatistics();
currentStateConsecutiveVisits = 0;
currentStateCumulativeDurationMs = 0;
isDirty = false;
}
currentStateBitrateStatistics.accept(bitrate_);
}
}
@Override
public long getLossLimitedMs()
{
synchronized (SendSideBandwidthEstimation.this)
{
return lossLimitedMsStats.getSum();
}
}
@Override
public long getLossDegradedMs()
{
synchronized (SendSideBandwidthEstimation.this)
{
return lossDegradedMsStats.getSum();
}
}
@Override
public long getLossFreeMs()
{
synchronized (SendSideBandwidthEstimation.this)
{
return lossFreeMsStats.getSum();
}
}
}
/**
* Represents the loss-based controller states.
*/
private enum LossRegion
{
/**
* Loss is between 2% and 10%.
*/
LossLimited,
/**
* Loss is above 10%.
*/
LossDegraded,
/**
* Loss is bellow 2%.
*/
LossFree
}
}
| src/org/jitsi/impl/neomedia/rtp/sendsidebandwidthestimation/SendSideBandwidthEstimation.java | /*
* Copyright @ 2015 Atlassian Pty Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jitsi.impl.neomedia.rtp.sendsidebandwidthestimation;
import org.jitsi.impl.neomedia.rtcp.*;
import org.jitsi.impl.neomedia.rtp.*;
import org.jitsi.impl.neomedia.*;
import org.jitsi.service.configuration.*;
import org.jitsi.service.libjitsi.*;
import org.jitsi.service.neomedia.*;
import org.jitsi.service.neomedia.rtp.*;
import org.jitsi.util.*;
import java.util.*;
/**
* Implements the send-side bandwidth estimation described in
* https://tools.ietf.org/html/draft-ietf-rmcat-gcc-01
* Heavily based on code from webrtc.org (send_side_bandwidth_estimation.cc,
* commit ID 7ad9e661f8a035d49d049ccdb87c77ae8ecdfa35).
*
* @author Boris Grozev
*/
class SendSideBandwidthEstimation
extends RTCPPacketListenerAdapter
implements BandwidthEstimator
{
/**
* The name of the property that specifies the low-loss threshold
* (expressed as a proportion of lost packets).
* See {@link #low_loss_threshold_}.
*/
public final static String LOW_LOSS_THRESHOLD_PNAME
= SendSideBandwidthEstimation.class.getName() + ".lowLossThreshold";
/**
* The name of the property that specifies the high-loss threshold
* (expressed as a proportion of lost packets).
* See {@link #high_loss_threshold_}.
*/
public final static String HIGH_LOSS_THRESHOLD_PNAME
= SendSideBandwidthEstimation.class.getName() + ".highLossThreshold";
/**
* The name of the property that specifies the bitrate threshold (in kbps).
* See {@link #bitrate_threshold_bps_}.
*/
public final static String BITRATE_THRESHOLD_KBPS_PNAME
= SendSideBandwidthEstimation.class.getName() + ".bitrateThresholdKbps";
/**
* The name of the property that specifies the probability of enabling the
* loss-based experiment.
*/
public final static String LOSS_EXPERIMENT_PROBABILITY_PNAME
= SendSideBandwidthEstimation.class.getName()
+ ".lossExperimentProbability";
/**
* The name of the property that specifies the probability of enabling the
* timeout experiment.
*/
public final static String TIMEOUT_EXPERIMENT_PROBABILITY_PNAME
= SendSideBandwidthEstimation.class.getName()
+ ".timeoutExperimentProbability";
/**
* The ConfigurationService to get config values from.
*/
private static final ConfigurationService
cfg = LibJitsi.getConfigurationService();
/**
* send_side_bandwidth_estimation.cc
*/
private static final int kBweIncreaseIntervalMs = 1000;
/**
* send_side_bandwidth_estimation.cc
*/
private static final long kBweDecreaseIntervalMs = 300;
/**
* send_side_bandwidth_estimation.cc
*/
private static final int kDefaultMinBitrateBps = 10000;
/**
* send_side_bandwidth_estimation.cc
*/
private static final int kDefaultMaxBitrateBps = 1000000000;
/**
* send_side_bandwidth_estimation.cc
*/
private static final int kStartPhaseMs = 2000;
/**
* send_side_bandwidth_estimation.cc
*/
private static final int kLimitNumPackets = 20;
// Expecting that RTCP feedback is sent uniformly within [0.5, 1.5]s
// intervals.
private static final long kFeedbackIntervalMs = 1500;
private static final long kFeedbackTimeoutIntervals = 3;
private static final long kTimeoutIntervalMs = 1000;
/**
* send_side_bandwidth_estimation.cc
*/
private static final float kDefaultLowLossThreshold = 0.02f;
/**
* send_side_bandwidth_estimation.cc
*/
private static final float kDefaultHighLossThreshold = 0.1f;
/**
* send_side_bandwidth_estimation.cc
*/
private static final int kDefaultBitrateThresholdKbps = 0;
/**
* Disable the loss experiment by default.
*/
private static final float kDefaultLossExperimentProbability = 0;
/**
* Disable the timeout experiment by default.
*/
private static final float kDefaultTimeoutExperimentProbability = 0;
/**
* The random number generator for all instances of this class.
*/
private static final Random kRandom = new Random();
/**
* The <tt>Logger</tt> used by the {@link SendSideBandwidthEstimation} class
* and its instances for logging output.
*/
private static final Logger logger
= Logger.getLogger(SendSideBandwidthEstimation.class);
/**
* The {@link TimeSeriesLogger} to be used by this instance to print time
* series.
*/
private static final TimeSeriesLogger timeSeriesLogger
= TimeSeriesLogger.getTimeSeriesLogger(
SendSideBandwidthEstimation.class);
/**
* send_side_bandwidth_estimation.h
*/
private final float low_loss_threshold_;
/**
* send_side_bandwidth_estimation.h
*/
private final float high_loss_threshold_;
/**
* send_side_bandwidth_estimation.h
*/
private final int bitrate_threshold_bps_;
/**
* send_side_bandwidth_estimation.h
*/
private long first_report_time_ms_ = -1;
/**
* send_side_bandwidth_estimation.h
*/
private int lost_packets_since_last_loss_update_Q8_ = 0;
/**
* send_side_bandwidth_estimation.h
*/
private int expected_packets_since_last_loss_update_ = 0;
/**
* send_side_bandwidth_estimation.h
*/
private boolean has_decreased_since_last_fraction_loss_ = false;
/**
* send_side_bandwidth_estimation.h
*
* uint8_t last_fraction_loss_;
*/
private int last_fraction_loss_ = 0;
/**
* send_side_bandwidth_estimation.h
*/
private long last_feedback_ms_ = -1;
/**
* send_side_bandwidth_estimation.h
*/
private long last_packet_report_ms_ = -1;
/**
* send_side_bandwidth_estimation.h
*/
private long last_timeout_ms_ = -1;
/**
* send_side_bandwidth_estimation.h
*/
private final boolean in_timeout_experiment_;
/**
* send_side_bandwidth_estimation.h
*/
private int min_bitrate_configured_ = kDefaultMinBitrateBps;
/**
* send_side_bandwidth_estimation.h
*/
private int max_bitrate_configured_ = kDefaultMaxBitrateBps;
/**
* send_side_bandwidth_estimation.h
*/
private long time_last_decrease_ms_= 0;
/**
* send_side_bandwidth_estimation.h
*/
private long bwe_incoming_ = 0;
/**
* send_side_bandwidth_estimation.h
*/
private long bitrate_;
/**
* send_side_bandwidth_estimation.h
*/
private Deque<Pair<Long>> min_bitrate_history_ = new LinkedList<>();
/**
* The {@link DiagnosticContext} of this instance.
*/
private final DiagnosticContext diagnosticContext;
private final List<BandwidthEstimator.Listener> listeners
= new LinkedList<>();
/**
* The {@link MediaStream} for this {@link SendSideBandwidthEstimation}.
*/
private final MediaStream mediaStream;
/**
* The instance that holds stats for this instance.
*/
private final StatisticsImpl statistics = new StatisticsImpl();
SendSideBandwidthEstimation(MediaStreamImpl stream, long startBitrate)
{
mediaStream = stream;
diagnosticContext = stream.getDiagnosticContext();
float lossExperimentProbability = (float) cfg.getDouble(
LOSS_EXPERIMENT_PROBABILITY_PNAME,
kDefaultLossExperimentProbability);
if (kRandom.nextFloat() < lossExperimentProbability)
{
low_loss_threshold_ = (float) cfg.getDouble(
LOW_LOSS_THRESHOLD_PNAME, kDefaultLowLossThreshold);
high_loss_threshold_ = (float) cfg.getDouble(
HIGH_LOSS_THRESHOLD_PNAME, kDefaultHighLossThreshold);
bitrate_threshold_bps_ = 1000 * cfg.getInt(
BITRATE_THRESHOLD_KBPS_PNAME, kDefaultBitrateThresholdKbps);
}
else
{
low_loss_threshold_ = kDefaultLowLossThreshold;
high_loss_threshold_ = kDefaultHighLossThreshold;
bitrate_threshold_bps_ = 1000 * kDefaultBitrateThresholdKbps;
}
float timeoutExperimentProbability = (float) cfg.getDouble(
TIMEOUT_EXPERIMENT_PROBABILITY_PNAME,
kDefaultTimeoutExperimentProbability);
in_timeout_experiment_
= kRandom.nextFloat() < timeoutExperimentProbability;
setBitrate(startBitrate);
}
/**
* bool SendSideBandwidthEstimation::IsInStartPhase(int64_t now_ms)
*/
private synchronized boolean isInStartPhase(long now)
{
return first_report_time_ms_ == -1 ||
now - first_report_time_ms_ < kStartPhaseMs;
}
/**
* int SendSideBandwidthEstimation::CapBitrateToThresholds
*/
private synchronized long capBitrateToThresholds(long bitrate)
{
if (bwe_incoming_ > 0 && bitrate > bwe_incoming_)
{
bitrate = bwe_incoming_;
}
if (bitrate > max_bitrate_configured_)
{
bitrate = max_bitrate_configured_;
}
if (bitrate < min_bitrate_configured_)
{
bitrate = min_bitrate_configured_;
}
return bitrate;
}
/**
* void SendSideBandwidthEstimation::UpdateEstimate(int64_t now_ms)
*/
protected synchronized void updateEstimate(long now)
{
long bitrate = bitrate_;
// We trust the REMB during the first 2 seconds if we haven't had any
// packet loss reported, to allow startup bitrate probing.
if (last_fraction_loss_ == 0 && isInStartPhase(now) &&
bwe_incoming_ > bitrate)
{
setBitrate(capBitrateToThresholds(bwe_incoming_));
min_bitrate_history_.clear();
min_bitrate_history_.addLast(new Pair<>(now, bitrate_));
return;
}
updateMinHistory(now);
if (last_packet_report_ms_ == -1)
{
// No feedback received.
bitrate_ = capBitrateToThresholds(bitrate_);
return;
}
long time_since_packet_report_ms = now - last_packet_report_ms_;
long time_since_feedback_ms = now - last_feedback_ms_;
if (time_since_packet_report_ms < 1.2 * kFeedbackIntervalMs)
{
// We only care about loss above a given bitrate threshold.
float loss = last_fraction_loss_ / 256.0f;
// We only make decisions based on loss when the bitrate is above a
// threshold. This is a crude way of handling loss which is
// uncorrelated to congestion.
if (bitrate_ < bitrate_threshold_bps_ || loss <= low_loss_threshold_)
{
// Loss < 2%: Increase rate by 8% of the min bitrate in the last
// kBweIncreaseIntervalMs.
// Note that by remembering the bitrate over the last second one can
// rampup up one second faster than if only allowed to start ramping
// at 8% per second rate now. E.g.:
// If sending a constant 100kbps it can rampup immediatly to 108kbps
// whenever a receiver report is received with lower packet loss.
// If instead one would do: bitrate_ *= 1.08^(delta time), it would
// take over one second since the lower packet loss to achieve 108kbps.
bitrate = (long) (min_bitrate_history_.getFirst().second * 1.08 + 0.5);
// Add 1 kbps extra, just to make sure that we do not get stuck
// (gives a little extra increase at low rates, negligible at higher
// rates).
bitrate += 1000;
statistics.update(now, LossRegion.LossFree);
}
else if (bitrate_ > bitrate_threshold_bps_)
{
if (loss <= high_loss_threshold_)
{
// Loss between 2% - 10%: Do nothing.
statistics.update(now, LossRegion.LossLimited);
}
else
{
// Loss > 10%: Limit the rate decreases to once a kBweDecreaseIntervalMs +
// rtt.
if (!has_decreased_since_last_fraction_loss_ &&
(now - time_last_decrease_ms_) >=
(kBweDecreaseIntervalMs + getRtt()))
{
time_last_decrease_ms_ = now;
// Reduce rate:
// newRate = rate * (1 - 0.5*lossRate);
// where packetLoss = 256*lossRate;
bitrate = (long) (
(bitrate * (512 - last_fraction_loss_)) / 512.0);
has_decreased_since_last_fraction_loss_ = true;
statistics.update(now, LossRegion.LossDegraded);
}
}
}
}
else if (time_since_feedback_ms >
kFeedbackTimeoutIntervals * kFeedbackIntervalMs &&
(last_timeout_ms_ == -1 ||
now - last_timeout_ms_ > kTimeoutIntervalMs))
{
if (in_timeout_experiment_)
{
bitrate_ *= 0.8;
// Reset accumulators since we've already acted on missing
// feedback and shouldn't to act again on these old lost
// packets.
lost_packets_since_last_loss_update_Q8_ = 0;
expected_packets_since_last_loss_update_ = 0;
last_timeout_ms_ = now;
}
}
setBitrate(capBitrateToThresholds(bitrate));
}
/**
* void SendSideBandwidthEstimation::UpdateReceiverBlock
*/
synchronized void updateReceiverBlock(
long fraction_lost, long number_of_packets, long now)
{
last_feedback_ms_ = now;
if (first_report_time_ms_ == -1)
{
first_report_time_ms_ = now;
}
// Check sequence number diff and weight loss report
if (number_of_packets > 0)
{
// Calculate number of lost packets.
long num_lost_packets_Q8 = fraction_lost * number_of_packets;
// Accumulate reports.
lost_packets_since_last_loss_update_Q8_ += num_lost_packets_Q8;
expected_packets_since_last_loss_update_ += number_of_packets;
// Don't generate a loss rate until it can be based on enough packets.
if (expected_packets_since_last_loss_update_ < kLimitNumPackets)
return;
has_decreased_since_last_fraction_loss_ = false;
last_fraction_loss_ =
lost_packets_since_last_loss_update_Q8_ /
expected_packets_since_last_loss_update_;
// Reset accumulators.
lost_packets_since_last_loss_update_Q8_ = 0;
expected_packets_since_last_loss_update_ = 0;
last_packet_report_ms_ = now;
updateEstimate(now);
}
}
/**
* void SendSideBandwidthEstimation::UpdateMinHistory(int64_t now_ms)
*/
private synchronized void updateMinHistory(long now_ms)
{
// Remove old data points from history.
// Since history precision is in ms, add one so it is able to increase
// bitrate if it is off by as little as 0.5ms.
while (!min_bitrate_history_.isEmpty() &&
now_ms - min_bitrate_history_.getFirst().first + 1 >
kBweIncreaseIntervalMs)
{
min_bitrate_history_.removeFirst();
}
// Typical minimum sliding-window algorithm: Pop values higher than current
// bitrate before pushing it.
while (!min_bitrate_history_.isEmpty() &&
bitrate_ <= min_bitrate_history_.getLast().second)
{
min_bitrate_history_.removeLast();
}
min_bitrate_history_.addLast(new Pair<>(now_ms, bitrate_));
}
/**
* {@inheritDoc}
*/
@Override
public synchronized void updateReceiverEstimate(long bandwidth)
{
bwe_incoming_ = bandwidth;
setBitrate(capBitrateToThresholds(bitrate_));
}
/**
* void SendSideBandwidthEstimation::SetMinMaxBitrate
*/
synchronized void setMinMaxBitrate(int min_bitrate, int max_bitrate)
{
min_bitrate_configured_ = Math.max(min_bitrate, kDefaultMinBitrateBps);
if (max_bitrate > 0)
{
max_bitrate_configured_ =
Math.max(min_bitrate_configured_, max_bitrate);
}
else
{
max_bitrate_configured_ = kDefaultMaxBitrateBps;
}
}
/**
* Sets the value of {@link #bitrate_}.
* @param newValue the value to set
*/
private synchronized void setBitrate(long newValue)
{
long oldValue = bitrate_;
bitrate_ = newValue;
if (oldValue != bitrate_)
{
fireBandwidthEstimationChanged(oldValue, newValue);
}
}
/**
* {@inheritDoc}
*/
@Override
public long getLatestEstimate()
{
return bitrate_;
}
/**
* {@inheritDoc}
*/
@Override
public long getLatestREMB()
{
return bwe_incoming_;
}
/**
* {@inheritDoc}
*/
@Override
public int getLatestFractionLoss()
{
return last_fraction_loss_;
}
/**
* {@inheritDoc}
*/
@Override
public synchronized void addListener(Listener listener)
{
listeners.add(listener);
}
/**
* {@inheritDoc}
*/
@Override
public synchronized void removeListener(Listener listener)
{
listeners.remove(listener);
}
/**
* {@inheritDoc}
*/
@Override
public void rembReceived(RTCPREMBPacket remb)
{
updateReceiverEstimate(remb.getBitrate());
}
@Override
public StatisticsImpl getStatistics()
{
return statistics;
}
/**
* Returns the last calculated RTT to the endpoint.
* @return the last calculated RTT to the endpoint.
*/
private synchronized long getRtt()
{
long rtt = mediaStream.getMediaStreamStats().getSendStats().getRtt();
if (rtt < 0 || rtt > 1000)
{
logger.warn("RTT not calculated, or has a suspiciously high value ("
+ rtt + "). Using the default of 100ms.");
rtt = 100;
}
return rtt;
}
/**
* Notifies registered listeners that the estimation of the available
* bandwidth has changed.
* @param oldValue the old value (in bps).
* @param newValue the new value (in bps).
*/
private synchronized void fireBandwidthEstimationChanged(
long oldValue, long newValue)
{
for (BandwidthEstimator.Listener listener : listeners)
{
listener.bandwidthEstimationChanged(newValue);
}
}
private class Pair<T>
{
T first;
T second;
Pair(T a, T b)
{
first = a;
second = b;
}
}
/**
* This class records statistics information about how much time we spend
* in different loss-states (loss-free, loss-limited and loss-degraded).
*/
public class StatisticsImpl implements Statistics
{
/**
* The current state {@link LossRegion}.
*/
private LossRegion currentState = null;
/**
* Keeps the time (in millis) of the last transition (including a loop).
*/
private long lastTransitionTimestampMs = -1;
/**
* The cumulative duration (in millis) of the current state
* {@link #currentState} after having looped
* {@link #currentStateConsecutiveVisits} times.
*/
private long currentStateCumulativeDurationMs;
/**
* The number of loops over the current state {@link #currentState}.
*/
private int currentStateConsecutiveVisits;
/**
* The bitrate when we entered the current state {@link #currentState}.
*/
private long currentStateStartBitrateBps;
/**
* Computes the min/max/avg/sd of the bitrate while in
* {@link #currentState}.
*/
private LongSummaryStatistics currentStateBitrateStatistics
= new LongSummaryStatistics();
/**
* Computes the min/max/avg/sd of the loss while in
* {@link #currentState}.
*/
private IntSummaryStatistics currentStateLossStatistics
= new IntSummaryStatistics();
/**
* Computes the sum of the duration of the different states.
*/
private final LongSummaryStatistics
lossFreeMsStats = new LongSummaryStatistics(),
lossDegradedMsStats = new LongSummaryStatistics(),
lossLimitedMsStats = new LongSummaryStatistics();
@Override
public void update(long nowMs)
{
synchronized (SendSideBandwidthEstimation.this)
{
update(nowMs, null);
}
}
/**
* Records a state transition and updates the statistics information.
*
* @param nowMs the time (in millis) of the transition.
* @param nextState the that the bwe is transitioning to.
*/
void update(long nowMs, LossRegion nextState)
{
synchronized (SendSideBandwidthEstimation.this)
{
if (lastTransitionTimestampMs > -1)
{
currentStateCumulativeDurationMs
+= nowMs - lastTransitionTimestampMs;
}
lastTransitionTimestampMs = nowMs;
currentStateLossStatistics.accept(last_fraction_loss_);
currentStateConsecutiveVisits++; // we start counting from 0.
if (this.currentState == nextState)
{
currentStateBitrateStatistics.accept(bitrate_);
return;
}
if (this.currentState != null)
{
// This is not a loop, we're transitioning to another state.
// Record how much time we've spent on this state, how many
// times we've looped through it and what was the impact on
// the bitrate.
switch (this.currentState)
{
case LossDegraded:
lossDegradedMsStats.accept(
currentStateCumulativeDurationMs);
break;
case LossFree:
lossFreeMsStats.accept(currentStateCumulativeDurationMs);
break;
case LossLimited:
lossLimitedMsStats.accept(
currentStateCumulativeDurationMs);
break;
}
if (timeSeriesLogger.isTraceEnabled())
{
timeSeriesLogger.trace(diagnosticContext
.makeTimeSeriesPoint("loss_estimate")
.addField("state", currentState.name())
.addField("max_loss",
currentStateLossStatistics.getMax() / 256.0f)
.addField("min_loss",
currentStateLossStatistics.getMin() / 256.0f)
.addField("avg_loss",
currentStateLossStatistics.getAverage()/256.0f)
.addField("max_bps",
currentStateBitrateStatistics.getMax())
.addField("min_bps",
currentStateBitrateStatistics.getMin())
.addField("avg_bps",
currentStateBitrateStatistics.getAverage())
.addField("duration_ms",
currentStateCumulativeDurationMs)
.addField("consecutive_visits",
currentStateConsecutiveVisits)
.addField("bitrate_threshold",
bitrate_threshold_bps_)
.addField("low_loss_threshold",
low_loss_threshold_)
.addField("high_loss_threshold",
high_loss_threshold_)
.addField("delta_bps",
bitrate_ - currentStateStartBitrateBps));
}
}
currentState = nextState;
currentStateLossStatistics = new IntSummaryStatistics();
currentStateConsecutiveVisits = 0;
currentStateCumulativeDurationMs = 0;
currentStateStartBitrateBps = bitrate_;
currentStateBitrateStatistics.accept(bitrate_);
}
}
@Override
public long getLossLimitedMs()
{
synchronized (SendSideBandwidthEstimation.this)
{
return lossLimitedMsStats.getSum();
}
}
@Override
public long getLossDegradedMs()
{
synchronized (SendSideBandwidthEstimation.this)
{
return lossDegradedMsStats.getSum();
}
}
@Override
public long getLossFreeMs()
{
synchronized (SendSideBandwidthEstimation.this)
{
return lossFreeMsStats.getSum();
}
}
}
/**
* Represents the loss-based controller states.
*/
private enum LossRegion
{
/**
* Loss is between 2% and 10%.
*/
LossLimited,
/**
* Loss is above 10%.
*/
LossDegraded,
/**
* Loss is bellow 2%.
*/
LossFree
}
}
| Considers that a stream may timeout when logging the loss-region stats.
| src/org/jitsi/impl/neomedia/rtp/sendsidebandwidthestimation/SendSideBandwidthEstimation.java | Considers that a stream may timeout when logging the loss-region stats. | <ide><path>rc/org/jitsi/impl/neomedia/rtp/sendsidebandwidthestimation/SendSideBandwidthEstimation.java
<ide> // rates).
<ide> bitrate += 1000;
<ide>
<del> statistics.update(now, LossRegion.LossFree);
<add> statistics.update(now, false, LossRegion.LossFree);
<ide>
<ide> }
<ide> else if (bitrate_ > bitrate_threshold_bps_)
<ide> {
<ide> // Loss between 2% - 10%: Do nothing.
<ide>
<del> statistics.update(now, LossRegion.LossLimited);
<add> statistics.update(now, false, LossRegion.LossLimited);
<ide> }
<ide> else
<ide> {
<ide> (bitrate * (512 - last_fraction_loss_)) / 512.0);
<ide> has_decreased_since_last_fraction_loss_ = true;
<ide>
<del> statistics.update(now, LossRegion.LossDegraded);
<add> statistics.update(now, false, LossRegion.LossDegraded);
<ide> }
<ide> }
<ide> }
<ide> }
<del> else if (time_since_feedback_ms >
<del> kFeedbackTimeoutIntervals * kFeedbackIntervalMs &&
<del> (last_timeout_ms_ == -1 ||
<del> now - last_timeout_ms_ > kTimeoutIntervalMs))
<del> {
<del> if (in_timeout_experiment_)
<add> else
<add> {
<add> statistics.update(now, true, null);
<add>
<add> if (time_since_feedback_ms >
<add> kFeedbackTimeoutIntervals * kFeedbackIntervalMs
<add> && (last_timeout_ms_ == -1
<add> || now - last_timeout_ms_ > kTimeoutIntervalMs))
<ide> {
<del> bitrate_ *= 0.8;
<del> // Reset accumulators since we've already acted on missing
<del> // feedback and shouldn't to act again on these old lost
<del> // packets.
<del> lost_packets_since_last_loss_update_Q8_ = 0;
<del> expected_packets_since_last_loss_update_ = 0;
<del> last_timeout_ms_ = now;
<add> if (in_timeout_experiment_)
<add> {
<add> bitrate_ *= 0.8;
<add> // Reset accumulators since we've already acted on missing
<add> // feedback and shouldn't to act again on these old lost
<add> // packets.
<add> lost_packets_since_last_loss_update_Q8_ = 0;
<add> expected_packets_since_last_loss_update_ = 0;
<add> last_timeout_ms_ = now;
<add> }
<ide> }
<ide> }
<ide>
<ide> private IntSummaryStatistics currentStateLossStatistics
<ide> = new IntSummaryStatistics();
<ide>
<add> private boolean isDirty = false;
<add>
<ide> /**
<ide> * Computes the sum of the duration of the different states.
<ide> */
<ide> {
<ide> synchronized (SendSideBandwidthEstimation.this)
<ide> {
<del> update(nowMs, null);
<add> long time_since_packet_report_ms
<add> = nowMs - last_packet_report_ms_;
<add>
<add> boolean currentStateHasTimedOut
<add> = time_since_packet_report_ms < 1.2 * kFeedbackIntervalMs;
<add>
<add> update(nowMs, currentStateHasTimedOut, null);
<ide> }
<ide> }
<ide>
<ide> * Records a state transition and updates the statistics information.
<ide> *
<ide> * @param nowMs the time (in millis) of the transition.
<add> * @param currentStateHasTimedOut true if the current state has timed
<add> * out, i.e. we haven't received receiver reports "in a while".
<ide> * @param nextState the that the bwe is transitioning to.
<ide> */
<del> void update(long nowMs, LossRegion nextState)
<add> void update(
<add> long nowMs, boolean currentStateHasTimedOut, LossRegion nextState)
<ide> {
<ide> synchronized (SendSideBandwidthEstimation.this)
<ide> {
<del> if (lastTransitionTimestampMs > -1)
<add> if (lastTransitionTimestampMs > -1 && !currentStateHasTimedOut)
<ide> {
<add> isDirty = true;
<ide> currentStateCumulativeDurationMs
<ide> += nowMs - lastTransitionTimestampMs;
<ide> }
<ide>
<ide> lastTransitionTimestampMs = nowMs;
<del> currentStateLossStatistics.accept(last_fraction_loss_);
<del> currentStateConsecutiveVisits++; // we start counting from 0.
<del>
<del> if (this.currentState == nextState)
<add> if (!currentStateHasTimedOut)
<ide> {
<del> currentStateBitrateStatistics.accept(bitrate_);
<del> return;
<add> isDirty = true;
<add> // If the current state has not timed out, then update the
<add> // stats that we gather.
<add> currentStateLossStatistics.accept(last_fraction_loss_);
<add> currentStateConsecutiveVisits++; // we start counting from 0.
<add> if (this.currentState == nextState)
<add> {
<add> currentStateBitrateStatistics.accept(bitrate_);
<add> return;
<add> }
<ide> }
<ide>
<ide> if (this.currentState != null)
<ide> }
<ide>
<ide> currentState = nextState;
<del> currentStateLossStatistics = new IntSummaryStatistics();
<del> currentStateConsecutiveVisits = 0;
<del> currentStateCumulativeDurationMs = 0;
<ide> currentStateStartBitrateBps = bitrate_;
<add>
<add> if (isDirty)
<add> {
<add> currentStateLossStatistics = new IntSummaryStatistics();
<add> currentStateBitrateStatistics = new LongSummaryStatistics();
<add> currentStateConsecutiveVisits = 0;
<add> currentStateCumulativeDurationMs = 0;
<add> isDirty = false;
<add> }
<add>
<ide> currentStateBitrateStatistics.accept(bitrate_);
<ide> }
<ide> } |
|
Java | lgpl-2.1 | f02684f2884d342e88f121d97e0afb0a9e9a4cb4 | 0 | jfree/jfreechart-fse,oskopek/jfreechart-fse,oskopek/jfreechart-fse,jfree/jfreechart-fse | /* ===========================================================
* JFreeChart : a free chart library for the Java(tm) platform
* ===========================================================
*
* (C) Copyright 2000-2014, by Object Refinery Limited and Contributors.
*
* Project Info: http://www.jfree.org/jfreechart/index.html
*
* This library is free software; you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation; either version 2.1 of the License, or
* (at your option) any later version.
*
* This library is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
* License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301,
* USA.
*
* [Oracle and Java are registered trademarks of Oracle and/or its affiliates.
* Other names may be trademarks of their respective owners.]
*
* --------------
* PlotTests.java
* --------------
* (C) Copyright 2005-2014, by Object Refinery Limited and Contributors.
*
* Original Author: David Gilbert (for Object Refinery Limited);
* Contributor(s): -;
*
* Changes
* -------
* 06-Jun-2005 : Version 1 (DG);
* 30-Jun-2006 : Extended equals() test to cover new field (DG);
* 11-May-2007 : Another new field in testEquals() (DG);
* 17-Jun-2012 : Remove JCommon dependencies (DG);
*
*/
package org.jfree.chart.plot;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import org.junit.Test;
import java.awt.BasicStroke;
import java.awt.Color;
import java.awt.Font;
import java.awt.GradientPaint;
import java.awt.Image;
import java.awt.Paint;
import java.awt.Rectangle;
import java.awt.Shape;
import java.awt.Stroke;
import java.net.URL;
import javax.swing.ImageIcon;
import org.jfree.chart.drawable.BorderPainter;
import org.jfree.chart.drawable.ColorPainter;
import org.jfree.chart.ui.Align;
import org.jfree.chart.ui.RectangleInsets;
/**
* Some tests for the {@link Plot} class.
*/
public class PlotTest {
private Image testImage;
private Image getTestImage() {
if (testImage == null) {
URL imageURL = getClass().getClassLoader().getResource(
"org/jfree/chart/gorilla.jpg");
if (imageURL != null) {
ImageIcon temp = new ImageIcon(imageURL);
// use ImageIcon because it waits for the image to load...
testImage = temp.getImage();
}
}
return testImage;
}
/**
* Check that the equals() method can distinguish all fields (note that
* the dataset is NOT considered in the equals() method).
*/
@Test
public void testEquals() {
PiePlot plot1 = new PiePlot();
PiePlot plot2 = new PiePlot();
assertEquals(plot1, plot2);
assertEquals(plot2, plot1);
// noDataMessage
plot1.setNoDataMessage("No data XYZ");
assertFalse(plot1.equals(plot2));
plot2.setNoDataMessage("No data XYZ");
assertEquals(plot1, plot2);
// noDataMessageFont
plot1.setNoDataMessageFont(new Font("SansSerif", Font.PLAIN, 13));
assertFalse(plot1.equals(plot2));
plot2.setNoDataMessageFont(new Font("SansSerif", Font.PLAIN, 13));
assertEquals(plot1, plot2);
// noDataMessagePaint
plot1.setNoDataMessagePaint(new GradientPaint(1.0f, 2.0f, Color.RED,
3.0f, 4.0f, Color.BLUE));
assertFalse(plot1.equals(plot2));
plot2.setNoDataMessagePaint(new GradientPaint(1.0f, 2.0f, Color.RED,
3.0f, 4.0f, Color.BLUE));
assertEquals(plot1, plot2);
// insets
plot1.setInsets(new RectangleInsets(1.0, 2.0, 3.0, 4.0));
assertFalse(plot1.equals(plot2));
plot2.setInsets(new RectangleInsets(1.0, 2.0, 3.0, 4.0));
assertEquals(plot1, plot2);
plot1.setBorderPainter(new BorderPainter(Color.RED, new BasicStroke(1.0f)));
assertFalse(plot1.equals(plot2));
plot2.setBorderPainter(new BorderPainter(Color.RED, new BasicStroke(1.0f)));
assertEquals(plot1, plot2);
// backgroundPainter
plot1.setBackgroundPainter(new ColorPainter(Color.RED));
assertFalse(plot1.equals(plot2));
plot2.setBackgroundPainter(new ColorPainter(Color.RED));
assertEquals(plot1, plot2);
// backgroundImage
plot1.setBackgroundImage(getTestImage());
assertFalse(plot1.equals(plot2));
plot2.setBackgroundImage(getTestImage());
assertEquals(plot1, plot2);
// backgroundImageAlignment
plot1.setBackgroundImageAlignment(Align.BOTTOM_RIGHT);
assertFalse(plot1.equals(plot2));
plot2.setBackgroundImageAlignment(Align.BOTTOM_RIGHT);
assertEquals(plot1, plot2);
// backgroundImageAlpha
plot1.setBackgroundImageAlpha(0.77f);
assertFalse(plot1.equals(plot2));
plot2.setBackgroundImageAlpha(0.77f);
assertEquals(plot1, plot2);
// foregroundAlpha
plot1.setForegroundAlpha(0.99f);
assertFalse(plot1.equals(plot2));
plot2.setForegroundAlpha(0.99f);
assertEquals(plot1, plot2);
// backgroundAlpha
plot1.setBackgroundAlpha(0.99f);
assertFalse(plot1.equals(plot2));
plot2.setBackgroundAlpha(0.99f);
assertEquals(plot1, plot2);
// drawingSupplier
plot1.setDrawingSupplier(new DefaultDrawingSupplier(
new Paint[] {Color.BLUE}, new Paint[] {Color.RED},
new Stroke[] {new BasicStroke(1.1f)},
new Stroke[] {new BasicStroke(9.9f)},
new Shape[] {new Rectangle(1, 2, 3, 4)}));
assertFalse(plot1.equals(plot2));
plot2.setDrawingSupplier(new DefaultDrawingSupplier(
new Paint[] {Color.BLUE}, new Paint[] {Color.RED},
new Stroke[] {new BasicStroke(1.1f)},
new Stroke[] {new BasicStroke(9.9f)},
new Shape[] {new Rectangle(1, 2, 3, 4)}));
assertEquals(plot1, plot2);
}
}
| src/test/java/org/jfree/chart/plot/PlotTest.java | /* ===========================================================
* JFreeChart : a free chart library for the Java(tm) platform
* ===========================================================
*
* (C) Copyright 2000-2014, by Object Refinery Limited and Contributors.
*
* Project Info: http://www.jfree.org/jfreechart/index.html
*
* This library is free software; you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation; either version 2.1 of the License, or
* (at your option) any later version.
*
* This library is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
* License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301,
* USA.
*
* [Oracle and Java are registered trademarks of Oracle and/or its affiliates.
* Other names may be trademarks of their respective owners.]
*
* --------------
* PlotTests.java
* --------------
* (C) Copyright 2005-2014, by Object Refinery Limited and Contributors.
*
* Original Author: David Gilbert (for Object Refinery Limited);
* Contributor(s): -;
*
* Changes
* -------
* 06-Jun-2005 : Version 1 (DG);
* 30-Jun-2006 : Extended equals() test to cover new field (DG);
* 11-May-2007 : Another new field in testEquals() (DG);
* 17-Jun-2012 : Remove JCommon dependencies (DG);
*
*/
package org.jfree.chart.plot;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import org.junit.Test;
import java.awt.BasicStroke;
import java.awt.Color;
import java.awt.Font;
import java.awt.GradientPaint;
import java.awt.Image;
import java.awt.Paint;
import java.awt.Rectangle;
import java.awt.Shape;
import java.awt.Stroke;
import java.net.URL;
import javax.swing.ImageIcon;
import org.jfree.chart.drawable.ColorPainter;
import org.jfree.chart.ui.Align;
import org.jfree.chart.ui.RectangleInsets;
/**
* Some tests for the {@link Plot} class.
*/
public class PlotTest {
private Image testImage;
private Image getTestImage() {
if (testImage == null) {
URL imageURL = getClass().getClassLoader().getResource(
"org/jfree/chart/gorilla.jpg");
if (imageURL != null) {
ImageIcon temp = new ImageIcon(imageURL);
// use ImageIcon because it waits for the image to load...
testImage = temp.getImage();
}
}
return testImage;
}
/**
* Check that the equals() method can distinguish all fields (note that
* the dataset is NOT considered in the equals() method).
*/
@Test
public void testEquals() {
PiePlot plot1 = new PiePlot();
PiePlot plot2 = new PiePlot();
assertEquals(plot1, plot2);
assertEquals(plot2, plot1);
// noDataMessage
plot1.setNoDataMessage("No data XYZ");
assertFalse(plot1.equals(plot2));
plot2.setNoDataMessage("No data XYZ");
assertEquals(plot1, plot2);
// noDataMessageFont
plot1.setNoDataMessageFont(new Font("SansSerif", Font.PLAIN, 13));
assertFalse(plot1.equals(plot2));
plot2.setNoDataMessageFont(new Font("SansSerif", Font.PLAIN, 13));
assertEquals(plot1, plot2);
// noDataMessagePaint
plot1.setNoDataMessagePaint(new GradientPaint(1.0f, 2.0f, Color.RED,
3.0f, 4.0f, Color.BLUE));
assertFalse(plot1.equals(plot2));
plot2.setNoDataMessagePaint(new GradientPaint(1.0f, 2.0f, Color.RED,
3.0f, 4.0f, Color.BLUE));
assertEquals(plot1, plot2);
// insets
plot1.setInsets(new RectangleInsets(1.0, 2.0, 3.0, 4.0));
assertFalse(plot1.equals(plot2));
plot2.setInsets(new RectangleInsets(1.0, 2.0, 3.0, 4.0));
assertEquals(plot1, plot2);
// outlineVisible
plot1.setOutlineVisible(false);
assertFalse(plot1.equals(plot2));
plot2.setOutlineVisible(false);
assertEquals(plot1, plot2);
// outlineStroke
BasicStroke s = new BasicStroke(1.23f);
plot1.setOutlineStroke(s);
assertFalse(plot1.equals(plot2));
plot2.setOutlineStroke(s);
assertEquals(plot1, plot2);
// outlinePaint
plot1.setOutlinePaint(new GradientPaint(1.0f, 2.0f, Color.yellow,
3.0f, 4.0f, Color.green));
assertFalse(plot1.equals(plot2));
plot2.setOutlinePaint(new GradientPaint(1.0f, 2.0f, Color.yellow,
3.0f, 4.0f, Color.green));
assertEquals(plot1, plot2);
// backgroundPainter
plot1.setBackgroundPainter(new ColorPainter(Color.RED));
assertFalse(plot1.equals(plot2));
plot2.setBackgroundPainter(new ColorPainter(Color.RED));
assertEquals(plot1, plot2);
// backgroundImage
plot1.setBackgroundImage(getTestImage());
assertFalse(plot1.equals(plot2));
plot2.setBackgroundImage(getTestImage());
assertEquals(plot1, plot2);
// backgroundImageAlignment
plot1.setBackgroundImageAlignment(Align.BOTTOM_RIGHT);
assertFalse(plot1.equals(plot2));
plot2.setBackgroundImageAlignment(Align.BOTTOM_RIGHT);
assertEquals(plot1, plot2);
// backgroundImageAlpha
plot1.setBackgroundImageAlpha(0.77f);
assertFalse(plot1.equals(plot2));
plot2.setBackgroundImageAlpha(0.77f);
assertEquals(plot1, plot2);
// foregroundAlpha
plot1.setForegroundAlpha(0.99f);
assertFalse(plot1.equals(plot2));
plot2.setForegroundAlpha(0.99f);
assertEquals(plot1, plot2);
// backgroundAlpha
plot1.setBackgroundAlpha(0.99f);
assertFalse(plot1.equals(plot2));
plot2.setBackgroundAlpha(0.99f);
assertEquals(plot1, plot2);
// drawingSupplier
plot1.setDrawingSupplier(new DefaultDrawingSupplier(
new Paint[] {Color.BLUE}, new Paint[] {Color.RED},
new Stroke[] {new BasicStroke(1.1f)},
new Stroke[] {new BasicStroke(9.9f)},
new Shape[] {new Rectangle(1, 2, 3, 4)}));
assertFalse(plot1.equals(plot2));
plot2.setDrawingSupplier(new DefaultDrawingSupplier(
new Paint[] {Color.BLUE}, new Paint[] {Color.RED},
new Stroke[] {new BasicStroke(1.1f)},
new Stroke[] {new BasicStroke(9.9f)},
new Shape[] {new Rectangle(1, 2, 3, 4)}));
assertEquals(plot1, plot2);
}
}
| Update test for borderPainter. | src/test/java/org/jfree/chart/plot/PlotTest.java | Update test for borderPainter. | <ide><path>rc/test/java/org/jfree/chart/plot/PlotTest.java
<ide> import java.awt.Stroke;
<ide> import java.net.URL;
<ide> import javax.swing.ImageIcon;
<add>import org.jfree.chart.drawable.BorderPainter;
<ide> import org.jfree.chart.drawable.ColorPainter;
<ide>
<ide> import org.jfree.chart.ui.Align;
<ide> plot2.setInsets(new RectangleInsets(1.0, 2.0, 3.0, 4.0));
<ide> assertEquals(plot1, plot2);
<ide>
<del> // outlineVisible
<del> plot1.setOutlineVisible(false);
<add> plot1.setBorderPainter(new BorderPainter(Color.RED, new BasicStroke(1.0f)));
<ide> assertFalse(plot1.equals(plot2));
<del> plot2.setOutlineVisible(false);
<del> assertEquals(plot1, plot2);
<del>
<del> // outlineStroke
<del> BasicStroke s = new BasicStroke(1.23f);
<del> plot1.setOutlineStroke(s);
<del> assertFalse(plot1.equals(plot2));
<del> plot2.setOutlineStroke(s);
<del> assertEquals(plot1, plot2);
<del>
<del> // outlinePaint
<del> plot1.setOutlinePaint(new GradientPaint(1.0f, 2.0f, Color.yellow,
<del> 3.0f, 4.0f, Color.green));
<del> assertFalse(plot1.equals(plot2));
<del> plot2.setOutlinePaint(new GradientPaint(1.0f, 2.0f, Color.yellow,
<del> 3.0f, 4.0f, Color.green));
<add> plot2.setBorderPainter(new BorderPainter(Color.RED, new BasicStroke(1.0f)));
<ide> assertEquals(plot1, plot2);
<ide>
<ide> // backgroundPainter |
|
JavaScript | mit | 1134137e1a72fa6cea3135968bcea4d4ef2ab93c | 0 | debiasej/react-credit-card-blocker,debiasej/react-credit-card-blocker | import {appStep} from '../config/index'
import React, { Component } from 'react';
import AppBar from 'material-ui/AppBar';
import { deepOrange500 } from 'material-ui/styles/colors';
import getMuiTheme from 'material-ui/styles/getMuiTheme';
import MuiThemeProvider from 'material-ui/styles/MuiThemeProvider';
import BankCardContainer from './bank-card-container';
import appHttp from '../requests/http-requests';
const muiTheme = getMuiTheme({
palette: {
accent1Color: deepOrange500,
},
});
class Main extends Component {
constructor(props, context) {
super(props, context);
this.state = {
step: appStep.INIT,
cards: [],
currentCard: -1,
isCurrentCardBlocked: null,
showPasswordField: false,
typedPassword: ""
}
}
componentDidMount() {
this._getCards();
}
// TODO: Change eval in producction
selectorOnChangeHandler = (selectorValue) => {
let cardId = {identificadorTarjeta: this.state.cards[selectorValue].identificador};
this.setState({ step: appStep.INIT });
appHttp.postCheckIfCardIsBlockedOrUnBlocked( JSON.stringify( cardId ), (data) => {
this.setState({
step: appStep.READY,
currentCard: selectorValue,
isCurrentCardBlocked: eval(data.isBlocked)
});
});
}
buttonClickedHandler = () => {
switch ( this.state.step ) {
case appStep.READY:
this.setState({ step: appStep.SIGNATURE }, () => {
this._blockOrUnblockCard();
});
break;
default:
console.err("The operation is not allowed");
}
}
backButtonClickedHandler = () => {
this.setState({
step: appStep.INIT,
currentCard: -1,
isCurrentCardBlocked: null,
showPasswordField: false,
typedPassword: ""
}, () => {
this._getCards();
});
}
onInputChangeHandler = (event) => {
this.setState({ typedPassword: event.target.value });
}
render() {
const isPasswordEmpty = (this.state.typedPassword).length == 0 ? true : false;
return (
<MuiThemeProvider muiTheme={ muiTheme }>
<div>
<AppBar title="Block or unblock" iconClassNameRight="muidocs-icon-navigation-expand-more" />
<BankCardContainer
step={ this.state.step }
cards={ this.state.cards }
showPasswordField ={ this.state.showPasswordField }
isCurrentCardBlocked= { this.state.isCurrentCardBlocked }
selectorOnChange={ this.selectorOnChangeHandler }
buttonClicked={ this.buttonClickedHandler }
backButtonClicked={ this.backButtonClickedHandler }
onInputChange={ this.onInputChangeHandler }
isPasswordEmpty={ isPasswordEmpty } />
</div>
</MuiThemeProvider>
);
}
_getCards() {
appHttp.getCards( data => {
let initCurrentCard = data.cuentasOrigen.length > 0 ? 0 : -1;
this.setState({ cards: data.cuentasOrigen, currentCard: initCurrentCard });
this.selectorOnChangeHandler(initCurrentCard);
});
}
_blockOrUnblockCard() {
let cardRequestData = {
identificadorTarjeta: this.state.cards[this.state.currentCard].identificador,
tipoAccionBloque: ""
};
let setShowPasswordField = () => {
this.setState({ showPasswordField: true });
}
if (this.state.isCurrentCardBlocked) {
cardRequestData.tipoAccionBloque = "ENCENDER";
appHttp.postUnblockCard( cardRequestData, cardRequestData => {
console.log(cardRequestData);
setShowPasswordField();
});
} else {
cardRequestData.tipoAccionBloque = "APAGAR";
appHttp.postBlockCard( cardRequestData, cardRequestData => {
console.log(cardRequestData);
setShowPasswordField();
});
}
}
}
export default Main;
| src/app/main.js | import {appStep} from '../config/index'
import React, { Component } from 'react';
import AppBar from 'material-ui/AppBar';
import { deepOrange500 } from 'material-ui/styles/colors';
import getMuiTheme from 'material-ui/styles/getMuiTheme';
import MuiThemeProvider from 'material-ui/styles/MuiThemeProvider';
import BankCardContainer from './bank-card-container';
import appHttp from '../requests/http-requests';
const muiTheme = getMuiTheme({
palette: {
accent1Color: deepOrange500,
},
});
class Main extends Component {
constructor(props, context) {
super(props, context);
this.state = {
step: appStep.INIT,
cards: [],
currentCard: -1,
isCurrentCardBlocked: null,
showPasswordField: false,
typedPassword: ""
}
}
componentDidMount() {
appHttp.getCards( data => {
let initCurrentCard = data.cuentasOrigen.length > 0 ? 0 : -1;
this.setState({ cards: data.cuentasOrigen, currentCard: initCurrentCard });
this.selectorOnChangeHandler(initCurrentCard);
});
}
render() {
const isPasswordEmpty = (this.state.typedPassword).length == 0 ? true : false;
return (
<MuiThemeProvider muiTheme={ muiTheme }>
<div>
<AppBar title="Block or unblock" iconClassNameRight="muidocs-icon-navigation-expand-more" />
<BankCardContainer
step={ this.state.step }
cards={ this.state.cards }
showPasswordField ={ this.state.showPasswordField }
isCurrentCardBlocked= { this.state.isCurrentCardBlocked }
selectorOnChange={ this.selectorOnChangeHandler }
buttonClicked={ this.buttonClickedHandler }
backButtonClicked={ this.backButtonClickedHandler }
onInputChange={ this.onInputChangeHandler }
isPasswordEmpty={ isPasswordEmpty } />
</div>
</MuiThemeProvider>
);
}
// TODO: Change eval in producction
selectorOnChangeHandler = (selectorValue) => {
let cardId = {identificadorTarjeta: this.state.cards[selectorValue].identificador};
this.setState({ step: appStep.INIT });
appHttp.postCheckIfCardIsBlockedOrUnBlocked( JSON.stringify( cardId ), (data) => {
this.setState({
step: appStep.READY,
currentCard: selectorValue,
isCurrentCardBlocked: eval(data.isBlocked)
});
});
}
buttonClickedHandler = () => {
switch ( this.state.step ) {
case appStep.READY:
this.setState({ step: appStep.SIGNATURE }, () => {
this._blockOrUnblockCard();
});
break;
default:
console.err("The operation is not allowed");
}
}
_blockOrUnblockCard() {
let cardRequestData = {
identificadorTarjeta: this.state.cards[this.state.currentCard].identificador,
tipoAccionBloque: ""
};
let setShowPasswordField = () => {
this.setState({ showPasswordField: true });
}
if (this.state.isCurrentCardBlocked) {
cardRequestData.tipoAccionBloque = "ENCENDER";
appHttp.postUnblockCard( cardRequestData, cardRequestData => {
console.log(cardRequestData);
setShowPasswordField();
});
} else {
cardRequestData.tipoAccionBloque = "APAGAR";
appHttp.postBlockCard( cardRequestData, cardRequestData => {
console.log(cardRequestData);
setShowPasswordField();
});
}
}
backButtonClickedHandler = () => {
this.setState({
step: appStep.INIT,
cards: [],
currentCard: -1,
isCurrentCardBlocked: null,
typedPassword: ""
});
}
onInputChangeHandler = (event) => {
this.setState({ typedPassword: event.target.value });
}
}
export default Main;
| Add back button functionality
| src/app/main.js | Add back button functionality | <ide><path>rc/app/main.js
<ide> }
<ide>
<ide> componentDidMount() {
<del>
<del> appHttp.getCards( data => {
<del> let initCurrentCard = data.cuentasOrigen.length > 0 ? 0 : -1;
<del> this.setState({ cards: data.cuentasOrigen, currentCard: initCurrentCard });
<del> this.selectorOnChangeHandler(initCurrentCard);
<del> });
<del> }
<del>
<del> render() {
<del>
<del> const isPasswordEmpty = (this.state.typedPassword).length == 0 ? true : false;
<del>
<del> return (
<del> <MuiThemeProvider muiTheme={ muiTheme }>
<del> <div>
<del> <AppBar title="Block or unblock" iconClassNameRight="muidocs-icon-navigation-expand-more" />
<del> <BankCardContainer
<del> step={ this.state.step }
<del> cards={ this.state.cards }
<del> showPasswordField ={ this.state.showPasswordField }
<del> isCurrentCardBlocked= { this.state.isCurrentCardBlocked }
<del> selectorOnChange={ this.selectorOnChangeHandler }
<del> buttonClicked={ this.buttonClickedHandler }
<del> backButtonClicked={ this.backButtonClickedHandler }
<del> onInputChange={ this.onInputChangeHandler }
<del> isPasswordEmpty={ isPasswordEmpty } />
<del> </div>
<del> </MuiThemeProvider>
<del> );
<add> this._getCards();
<ide> }
<ide>
<ide> // TODO: Change eval in producction
<ide> default:
<ide> console.err("The operation is not allowed");
<ide> }
<add> }
<add>
<add> backButtonClickedHandler = () => {
<add> this.setState({
<add> step: appStep.INIT,
<add> currentCard: -1,
<add> isCurrentCardBlocked: null,
<add> showPasswordField: false,
<add> typedPassword: ""
<add> }, () => {
<add> this._getCards();
<add> });
<add> }
<add>
<add> onInputChangeHandler = (event) => {
<add> this.setState({ typedPassword: event.target.value });
<add> }
<add>
<add> render() {
<add>
<add> const isPasswordEmpty = (this.state.typedPassword).length == 0 ? true : false;
<add>
<add> return (
<add> <MuiThemeProvider muiTheme={ muiTheme }>
<add> <div>
<add> <AppBar title="Block or unblock" iconClassNameRight="muidocs-icon-navigation-expand-more" />
<add> <BankCardContainer
<add> step={ this.state.step }
<add> cards={ this.state.cards }
<add> showPasswordField ={ this.state.showPasswordField }
<add> isCurrentCardBlocked= { this.state.isCurrentCardBlocked }
<add> selectorOnChange={ this.selectorOnChangeHandler }
<add> buttonClicked={ this.buttonClickedHandler }
<add> backButtonClicked={ this.backButtonClickedHandler }
<add> onInputChange={ this.onInputChangeHandler }
<add> isPasswordEmpty={ isPasswordEmpty } />
<add> </div>
<add> </MuiThemeProvider>
<add> );
<add> }
<add>
<add> _getCards() {
<add> appHttp.getCards( data => {
<add> let initCurrentCard = data.cuentasOrigen.length > 0 ? 0 : -1;
<add> this.setState({ cards: data.cuentasOrigen, currentCard: initCurrentCard });
<add> this.selectorOnChangeHandler(initCurrentCard);
<add> });
<ide> }
<ide>
<ide> _blockOrUnblockCard() {
<ide> });
<ide> }
<ide> }
<del>
<del> backButtonClickedHandler = () => {
<del> this.setState({
<del> step: appStep.INIT,
<del> cards: [],
<del> currentCard: -1,
<del> isCurrentCardBlocked: null,
<del> typedPassword: ""
<del> });
<del> }
<del>
<del> onInputChangeHandler = (event) => {
<del> this.setState({ typedPassword: event.target.value });
<del> }
<ide> }
<ide>
<ide> export default Main; |
|
Java | apache-2.0 | becd47cc309facd40ae934a0a336079ab5794eec | 0 | codeaudit/OG-Platform,DevStreet/FinanceAnalytics,DevStreet/FinanceAnalytics,codeaudit/OG-Platform,McLeodMoores/starling,jeorme/OG-Platform,ChinaQuants/OG-Platform,ChinaQuants/OG-Platform,jeorme/OG-Platform,ChinaQuants/OG-Platform,jerome79/OG-Platform,nssales/OG-Platform,jeorme/OG-Platform,jeorme/OG-Platform,DevStreet/FinanceAnalytics,ChinaQuants/OG-Platform,codeaudit/OG-Platform,jerome79/OG-Platform,McLeodMoores/starling,McLeodMoores/starling,jerome79/OG-Platform,DevStreet/FinanceAnalytics,nssales/OG-Platform,codeaudit/OG-Platform,jerome79/OG-Platform,McLeodMoores/starling,nssales/OG-Platform,nssales/OG-Platform | /**
* Copyright (C) 2013 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.analytics.math.interpolation;
import static com.opengamma.analytics.math.matrix.MatrixAlgebraFactory.OG_ALGEBRA;
import static org.testng.Assert.assertEquals;
import java.util.Random;
import org.testng.annotations.Test;
import com.opengamma.analytics.math.function.PiecewisePolynomialFunction1D;
import com.opengamma.analytics.math.function.PiecewisePolynomialFunction2D;
import com.opengamma.analytics.math.matrix.DoubleMatrix2D;
/**
*
*/
public class BicubicSplineInterpolatorTest {
private static final double EPS = 1e-12;
private static final double INF = 1. / 0.;
/**
*
*/
@Test
public void linearTest() {
double[] x0Values = new double[] {1., 2., 3., 4. };
double[] x1Values = new double[] {-1., 0., 1., 2., 3. };
final int n0Data = x0Values.length;
final int n1Data = x1Values.length;
double[][] yValues = new double[n0Data][n1Data];
for (int i = 0; i < n0Data; ++i) {
for (int j = 0; j < n1Data; ++j) {
yValues[i][j] = (x0Values[i] + 2.) * (x1Values[j] + 5.);
}
}
// System.out.println(new DoubleMatrix2D(yValues));
CubicSplineInterpolator method = new CubicSplineInterpolator();
PiecewisePolynomialInterpolator2D interp = new BicubicSplineInterpolator(new CubicSplineInterpolator[] {method, method });
PiecewisePolynomialResult2D result = interp.interpolate(x0Values, x1Values, yValues);
final int n0IntExp = n0Data - 1;
final int n1IntExp = n1Data - 1;
final int orderExp = 4;
DoubleMatrix2D[][] coefsExp = new DoubleMatrix2D[n0Data - 1][n1Data - 1];
for (int i = 0; i < n0Data - 1; ++i) {
for (int j = 0; j < n1Data - 1; ++j) {
coefsExp[i][j] = new DoubleMatrix2D(new double[][] { {0., 0., 0., 0., }, {0., 0., 0., 0., }, {0., 0., 1., (5. + x1Values[j]) },
{0., 0., (2. + x0Values[i]), (2. + x0Values[i]) * (5. + x1Values[j]) } });
}
}
assertEquals(result.getNumberOfIntervals()[0], n0IntExp);
assertEquals(result.getNumberOfIntervals()[1], n1IntExp);
assertEquals(result.getOrder()[0], orderExp);
assertEquals(result.getOrder()[1], orderExp);
final int n0Keys = 51;
final int n1Keys = 61;
double[] x0Keys = new double[n0Keys];
double[] x1Keys = new double[n1Keys];
for (int i = 0; i < n0Keys; ++i) {
x0Keys[i] = 0. + 5. * i / (n0Keys - 1);
}
for (int i = 0; i < n1Keys; ++i) {
x1Keys[i] = -2. + 6. * i / (n1Keys - 1);
}
// PiecewisePolynomialFunction2D func = new PiecewisePolynomialFunction2D();
// final double[][] values = func.evaluate(result, x0Keys, x1Keys).getData();
for (int i = 0; i < n0Data; ++i) {
final double ref = Math.abs(x0Values[i]) == 0. ? 1. : Math.abs(x0Values[i]);
assertEquals(result.getKnots0().getData()[i], x0Values[i], ref * EPS);
assertEquals(result.getKnots2D().get(0).getData()[i], x0Values[i], ref * EPS);
}
for (int i = 0; i < n1Data; ++i) {
final double ref = Math.abs(x1Values[i]) == 0. ? 1. : Math.abs(x1Values[i]);
assertEquals(result.getKnots1().getData()[i], x1Values[i], ref * EPS);
assertEquals(result.getKnots2D().get(1).getData()[i], x1Values[i], ref * EPS);
}
for (int i = 0; i < n0Data - 1; ++i) {
for (int j = 0; j < n1Data - 1; ++j) {
for (int k = 0; k < orderExp; ++k) {
for (int l = 0; l < orderExp; ++l) {
final double ref = Math.abs(coefsExp[i][j].getData()[k][l]) == 0. ? 1. : Math.abs(coefsExp[i][j].getData()[k][l]);
assertEquals(result.getCoefs()[i][j].getData()[k][l], coefsExp[i][j].getData()[k][l], ref * EPS);
}
}
}
}
double[][] resValues = interp.interpolate(x0Values, x1Values, yValues, x0Keys, x1Keys).getData();
for (int i = 0; i < n0Keys; ++i) {
for (int j = 0; j < n1Keys; ++j) {
final double expVal = (x0Keys[i] + 2.) * (x1Keys[j] + 5.);
final double ref = Math.abs(expVal) == 0. ? 1. : Math.abs(expVal);
assertEquals(resValues[i][j], expVal, ref * EPS);
}
}
// final PiecewisePolynomialFunction2D func = new PiecewisePolynomialFunction2D();
for (int i = 0; i < n0Keys; ++i) {
for (int j = 0; j < n1Keys; ++j) {
final double expVal = (x0Keys[i] + 2.) * (x1Keys[j] + 5.);
final double ref = Math.abs(expVal) == 0. ? 1. : Math.abs(expVal);
assertEquals(resValues[i][j], expVal, ref * EPS);
// assertEquals(resValues[i][j], func.evaluate(result, x0Keys[i], x1Keys[j]), ref * EPS);
}
}
{
final double expVal = (x0Keys[1] + 2.) * (x1Keys[2] + 5.);
final double ref = Math.abs(expVal) == 0. ? 1. : Math.abs(expVal);
assertEquals(interp.interpolate(x0Values, x1Values, yValues, x0Keys[1], x1Keys[2]), expVal, ref * EPS);
}
{
final double expVal = (x0Keys[23] + 2.) * (x1Keys[20] + 5.);
final double ref = Math.abs(expVal) == 0. ? 1. : Math.abs(expVal);
assertEquals(interp.interpolate(x0Values, x1Values, yValues, x0Keys[23], x1Keys[20]), expVal, ref * EPS);
}
// for (int i = 0; i < n0Keys; ++i) {
// System.out.print("\t" + x0Keys[i]);
// }
// System.out.print("\n");
// for (int j = 0; j < n1Keys; ++j) {
// System.out.print(x1Keys[j]);
// for (int i = 0; i < n0Keys; ++i) {
// System.out.print("\t" + values[i][j]);
// }
// System.out.print("\n");
// }
//
// System.out.print("\n");
}
/**
* f(x0,x1) = ( x0 - 1.5)^2 * (x1 - 2.)^2
*/
@Test
public void quadraticTest() {
double[] x0Values = new double[] {1., 2., 3., 4. };
double[] x1Values = new double[] {-1., 0., 1., 2., 3. };
final int n0Data = x0Values.length;
final int n1Data = x1Values.length;
double[][] yValues = new double[n0Data][n1Data];
for (int i = 0; i < n0Data; ++i) {
for (int j = 0; j < n1Data; ++j) {
yValues[i][j] = (x0Values[i] - 1.5) * (x0Values[i] - 1.5) * (x1Values[j] - 2.) * (x1Values[j] - 2.);
}
}
CubicSplineInterpolator method = new CubicSplineInterpolator();
PiecewisePolynomialInterpolator2D interp = new BicubicSplineInterpolator(method);
PiecewisePolynomialResult2D result = interp.interpolate(x0Values, x1Values, yValues);
final int n0IntExp = n0Data - 1;
final int n1IntExp = n1Data - 1;
final int orderExp = 4;
final int n0Keys = 51;
final int n1Keys = 61;
double[] x0Keys = new double[n0Keys];
double[] x1Keys = new double[n1Keys];
for (int i = 0; i < n0Keys; ++i) {
x0Keys[i] = 0. + 5. * i / (n0Keys - 1);
}
for (int i = 0; i < n1Keys; ++i) {
x1Keys[i] = -2. + 6. * i / (n1Keys - 1);
}
assertEquals(result.getNumberOfIntervals()[0], n0IntExp);
assertEquals(result.getNumberOfIntervals()[1], n1IntExp);
assertEquals(result.getOrder()[0], orderExp);
assertEquals(result.getOrder()[1], orderExp);
for (int i = 0; i < n0Data; ++i) {
final double ref = Math.abs(x0Values[i]) == 0. ? 1. : Math.abs(x0Values[i]);
assertEquals(result.getKnots0().getData()[i], x0Values[i], ref * EPS);
assertEquals(result.getKnots2D().get(0).getData()[i], x0Values[i], ref * EPS);
}
for (int i = 0; i < n1Data; ++i) {
final double ref = Math.abs(x1Values[i]) == 0. ? 1. : Math.abs(x1Values[i]);
assertEquals(result.getKnots1().getData()[i], x1Values[i], ref * EPS);
assertEquals(result.getKnots2D().get(1).getData()[i], x1Values[i], ref * EPS);
}
for (int i = 0; i < n0Data - 1; ++i) {
for (int j = 0; j < n1Data - 1; ++j) {
final double ref = Math.abs(yValues[i][j]) == 0. ? 1. : Math.abs(yValues[i][j]);
assertEquals(result.getCoefs()[i][j].getData()[orderExp - 1][orderExp - 1], yValues[i][j], ref * EPS);
}
}
double[][] resValues = interp.interpolate(x0Values, x1Values, yValues, x0Values, x1Values).getData();
final PiecewisePolynomialFunction2D func2D = new PiecewisePolynomialFunction2D();
double[][] resDiffX0 = func2D.differentiateX0(result, x0Values, x1Values).getData();
double[][] resDiffX1 = func2D.differentiateX1(result, x0Values, x1Values).getData();
final PiecewisePolynomialFunction1D func1D = new PiecewisePolynomialFunction1D();
double[][] expDiffX0 = func1D.differentiate(method.interpolate(x0Values, OG_ALGEBRA.getTranspose(new DoubleMatrix2D(yValues)).getData()), x0Values).getData();
double[][] expDiffX1 = func1D.differentiate(method.interpolate(x1Values, yValues), x1Values).getData();
for (int i = 0; i < n0Data; ++i) {
for (int j = 0; j < n1Data; ++j) {
final double expVal = expDiffX1[i][j];
final double ref = Math.abs(expVal) == 0. ? 1. : Math.abs(expVal);
assertEquals(resDiffX1[i][j], expVal, ref * EPS);
}
}
// System.out.println(new DoubleMatrix2D(expDiffX0));
// System.out.println(new DoubleMatrix2D(resDiffX0));
for (int i = 0; i < n0Data; ++i) {
for (int j = 0; j < n1Data; ++j) {
final double expVal = expDiffX0[j][i];
final double ref = Math.abs(expVal) == 0. ? 1. : Math.abs(expVal);
assertEquals(resDiffX0[i][j], expVal, ref * EPS);
}
}
for (int i = 0; i < n0Data; ++i) {
for (int j = 0; j < n1Data; ++j) {
final double expVal = yValues[i][j];
final double ref = Math.abs(expVal) == 0. ? 1. : Math.abs(expVal);
assertEquals(resValues[i][j], expVal, ref * EPS);
}
}
}
/**
* f(x0,x1) = ( x0 - 1.)^3 * (x1 + 14./13.)^3
*/
@Test
public void cubicTest() {
double[] x0Values = new double[] {1., 2., 3., 4. };
double[] x1Values = new double[] {-1., 0., 1., 2., 3. };
final int n0Data = x0Values.length;
final int n1Data = x1Values.length;
double[][] yValues = new double[n0Data][n1Data];
for (int i = 0; i < n0Data; ++i) {
for (int j = 0; j < n1Data; ++j) {
yValues[i][j] = (x0Values[i] - 1.) * (x0Values[i] - 1.) * (x0Values[i] - 1.) * (x1Values[j] + 14. / 13.) * (x1Values[j] + 14. / 13.) * (x1Values[j] + 14. / 13.);
}
}
CubicSplineInterpolator method = new CubicSplineInterpolator();
PiecewisePolynomialInterpolator2D interp = new BicubicSplineInterpolator(method);
PiecewisePolynomialResult2D result = interp.interpolate(x0Values, x1Values, yValues);
final int n0IntExp = n0Data - 1;
final int n1IntExp = n1Data - 1;
final int orderExp = 4;
final int n0Keys = 51;
final int n1Keys = 61;
double[] x0Keys = new double[n0Keys];
double[] x1Keys = new double[n1Keys];
for (int i = 0; i < n0Keys; ++i) {
x0Keys[i] = 0. + 5. * i / (n0Keys - 1);
}
for (int i = 0; i < n1Keys; ++i) {
x1Keys[i] = -2. + 6. * i / (n1Keys - 1);
}
assertEquals(result.getNumberOfIntervals()[0], n0IntExp);
assertEquals(result.getNumberOfIntervals()[1], n1IntExp);
assertEquals(result.getOrder()[0], orderExp);
assertEquals(result.getOrder()[1], orderExp);
for (int i = 0; i < n0Data; ++i) {
final double ref = Math.abs(x0Values[i]) == 0. ? 1. : Math.abs(x0Values[i]);
assertEquals(result.getKnots0().getData()[i], x0Values[i], ref * EPS);
assertEquals(result.getKnots2D().get(0).getData()[i], x0Values[i], ref * EPS);
}
for (int i = 0; i < n1Data; ++i) {
final double ref = Math.abs(x1Values[i]) == 0. ? 1. : Math.abs(x1Values[i]);
assertEquals(result.getKnots1().getData()[i], x1Values[i], ref * EPS);
assertEquals(result.getKnots2D().get(1).getData()[i], x1Values[i], ref * EPS);
}
for (int i = 0; i < n0Data - 1; ++i) {
for (int j = 0; j < n1Data - 1; ++j) {
final double ref = Math.abs(yValues[i][j]) == 0. ? 1. : Math.abs(yValues[i][j]);
assertEquals(result.getCoefs()[i][j].getData()[orderExp - 1][orderExp - 1], yValues[i][j], ref * EPS);
}
}
double[][] resValues = interp.interpolate(x0Values, x1Values, yValues, x0Values, x1Values).getData();
final PiecewisePolynomialFunction2D func2D = new PiecewisePolynomialFunction2D();
double[][] resDiffX0 = func2D.differentiateX0(result, x0Values, x1Values).getData();
double[][] resDiffX1 = func2D.differentiateX1(result, x0Values, x1Values).getData();
final PiecewisePolynomialFunction1D func1D = new PiecewisePolynomialFunction1D();
double[][] expDiffX0 = func1D.differentiate(method.interpolate(x0Values, OG_ALGEBRA.getTranspose(new DoubleMatrix2D(yValues)).getData()), x0Values).getData();
double[][] expDiffX1 = func1D.differentiate(method.interpolate(x1Values, yValues), x1Values).getData();
for (int i = 0; i < n0Data; ++i) {
for (int j = 0; j < n1Data; ++j) {
final double expVal = expDiffX1[i][j];
final double ref = Math.abs(expVal) == 0. ? 1. : Math.abs(expVal);
assertEquals(resDiffX1[i][j], expVal, ref * EPS);
}
}
// System.out.println(new DoubleMatrix2D(expDiffX0));
// System.out.println(new DoubleMatrix2D(resDiffX0));
for (int i = 0; i < n0Data; ++i) {
for (int j = 0; j < n1Data; ++j) {
final double expVal = expDiffX0[j][i];
final double ref = Math.abs(expVal) == 0. ? 1. : Math.abs(expVal);
assertEquals(resDiffX0[i][j], expVal, ref * EPS);
}
}
for (int i = 0; i < n0Data; ++i) {
for (int j = 0; j < n1Data; ++j) {
final double expVal = yValues[i][j];
final double ref = Math.abs(expVal) == 0. ? 1. : Math.abs(expVal);
assertEquals(resValues[i][j], expVal, ref * EPS);
}
}
}
/**
*
*/
@Test
public void crossDerivativeTest() {
double[] x0Values = new double[] {1., 2., 3., 4. };
double[] x1Values = new double[] {-1., 0., 1., 2., 3. };
final int n0Data = x0Values.length;
final int n1Data = x1Values.length;
double[][] yValues = new double[][] { {1.0, -1.0, 0.0, 1.0, 0.0, }, {1.0, -1.0, 0.0, 1.0, -2.0 }, {1.0, -2.0, 0.0, -2.0, -2.0 }, {-1.0, -1.0, -2.0, -2.0, -1.0 } };
NaturalSplineInterpolator method = new NaturalSplineInterpolator();
PiecewisePolynomialInterpolator2D interp = new BicubicSplineInterpolator(method);
PiecewisePolynomialResult2D result = interp.interpolate(x0Values, x1Values, yValues);
final int n0IntExp = n0Data - 1;
final int n1IntExp = n1Data - 1;
final int orderExp = 4;
final int n0Keys = 51;
final int n1Keys = 61;
double[] x0Keys = new double[n0Keys];
double[] x1Keys = new double[n1Keys];
for (int i = 0; i < n0Keys; ++i) {
x0Keys[i] = 0. + 5. * i / (n0Keys - 1);
}
for (int i = 0; i < n1Keys; ++i) {
x1Keys[i] = -2. + 6. * i / (n1Keys - 1);
}
assertEquals(result.getNumberOfIntervals()[0], n0IntExp);
assertEquals(result.getNumberOfIntervals()[1], n1IntExp);
assertEquals(result.getOrder()[0], orderExp);
assertEquals(result.getOrder()[1], orderExp);
for (int i = 0; i < n0Data; ++i) {
final double ref = Math.abs(x0Values[i]) == 0. ? 1. : Math.abs(x0Values[i]);
assertEquals(result.getKnots0().getData()[i], x0Values[i], ref * EPS);
assertEquals(result.getKnots2D().get(0).getData()[i], x0Values[i], ref * EPS);
}
for (int i = 0; i < n1Data; ++i) {
final double ref = Math.abs(x1Values[i]) == 0. ? 1. : Math.abs(x1Values[i]);
assertEquals(result.getKnots1().getData()[i], x1Values[i], ref * EPS);
assertEquals(result.getKnots2D().get(1).getData()[i], x1Values[i], ref * EPS);
}
for (int i = 0; i < n0Data - 1; ++i) {
for (int j = 0; j < n1Data - 1; ++j) {
final double ref = Math.abs(yValues[i][j]) == 0. ? 1. : Math.abs(yValues[i][j]);
assertEquals(result.getCoefs()[i][j].getData()[orderExp - 1][orderExp - 1], yValues[i][j], ref * EPS);
}
}
double[][] resValues = interp.interpolate(x0Values, x1Values, yValues, x0Values, x1Values).getData();
final PiecewisePolynomialFunction2D func2D = new PiecewisePolynomialFunction2D();
double[][] resDiffX0 = func2D.differentiateX0(result, x0Values, x1Values).getData();
double[][] resDiffX1 = func2D.differentiateX1(result, x0Values, x1Values).getData();
final PiecewisePolynomialFunction1D func1D = new PiecewisePolynomialFunction1D();
double[][] expDiffX0 = func1D.differentiate(method.interpolate(x0Values, OG_ALGEBRA.getTranspose(new DoubleMatrix2D(yValues)).getData()), x0Values).getData();
double[][] expDiffX1 = func1D.differentiate(method.interpolate(x1Values, yValues), x1Values).getData();
for (int i = 0; i < n0Data; ++i) {
for (int j = 0; j < n1Data; ++j) {
final double expVal = expDiffX1[i][j];
final double ref = Math.abs(expVal) == 0. ? 1. : Math.abs(expVal);
assertEquals(resDiffX1[i][j], expVal, ref * EPS);
}
}
// System.out.println(new DoubleMatrix2D(expDiffX0));
// System.out.println(new DoubleMatrix2D(resDiffX0));
for (int i = 0; i < n0Data; ++i) {
for (int j = 0; j < n1Data; ++j) {
final double expVal = expDiffX0[j][i];
final double ref = Math.abs(expVal) == 0. ? 1. : Math.abs(expVal);
assertEquals(resDiffX0[i][j], expVal, ref * EPS);
}
}
for (int i = 0; i < n0Data; ++i) {
for (int j = 0; j < n1Data; ++j) {
final double expVal = yValues[i][j];
final double ref = Math.abs(expVal) == 0. ? 1. : Math.abs(expVal);
assertEquals(resValues[i][j], expVal, ref * EPS);
}
}
}
/**
*
*/
@Test(expectedExceptions = IllegalArgumentException.class)
public void nullx0Test() {
double[] x0Values = new double[] {0., 1., 2., 3. };
double[] x1Values = new double[] {0., 1., 2. };
double[][] yValues = new double[][] { {1., 2., 4. }, {-1., 2., -4. }, {2., 3., 4. }, {5., 2., 1. } };
x0Values = null;
BicubicSplineInterpolator interp = new BicubicSplineInterpolator(new CubicSplineInterpolator());
interp.interpolate(x0Values, x1Values, yValues);
}
/**
*
*/
@Test(expectedExceptions = IllegalArgumentException.class)
public void nullx1Test() {
double[] x0Values = new double[] {0., 1., 2., 3. };
double[] x1Values = new double[] {0., 1., 2. };
double[][] yValues = new double[][] { {1., 2., 4. }, {-1., 2., -4. }, {2., 3., 4. }, {5., 2., 1. } };
x1Values = null;
BicubicSplineInterpolator interp = new BicubicSplineInterpolator(new CubicSplineInterpolator());
interp.interpolate(x0Values, x1Values, yValues);
}
/**
*
*/
@Test(expectedExceptions = IllegalArgumentException.class)
public void nullyTest() {
double[] x0Values = new double[] {0., 1., 2., 3. };
double[] x1Values = new double[] {0., 1., 2. };
double[][] yValues = new double[][] { {1., 2., 4. }, {-1., 2., -4. }, {2., 3., 4. }, {5., 2., 1. } };
yValues = null;
BicubicSplineInterpolator interp = new BicubicSplineInterpolator(new CubicSplineInterpolator());
interp.interpolate(x0Values, x1Values, yValues);
}
/**
*
*/
@Test(expectedExceptions = IllegalArgumentException.class)
public void wrongLengthx0Test() {
double[] x0Values = new double[] {0., 1., 2. };
double[] x1Values = new double[] {0., 1., 2. };
double[][] yValues = new double[][] { {1., 2., 4. }, {-1., 2., -4. }, {2., 3., 4. }, {5., 2., 1. } };
BicubicSplineInterpolator interp = new BicubicSplineInterpolator(new CubicSplineInterpolator());
interp.interpolate(x0Values, x1Values, yValues);
}
/**
*
*/
@Test(expectedExceptions = IllegalArgumentException.class)
public void wrongLengthx1Test() {
double[] x0Values = new double[] {0., 1., 2., 3. };
double[] x1Values = new double[] {0., 1., 2., 3. };
double[][] yValues = new double[][] { {1., 2., 4. }, {-1., 2., -4. }, {2., 3., 4. }, {5., 2., 1. } };
BicubicSplineInterpolator interp = new BicubicSplineInterpolator(new CubicSplineInterpolator());
interp.interpolate(x0Values, x1Values, yValues);
}
/**
*
*/
@Test(expectedExceptions = IllegalArgumentException.class)
public void shortx0Test() {
double[] x0Values = new double[] {1. };
double[] x1Values = new double[] {0., 1., 2. };
double[][] yValues = new double[][] {{1., 2., 4. } };
BicubicSplineInterpolator interp = new BicubicSplineInterpolator(new CubicSplineInterpolator());
interp.interpolate(x0Values, x1Values, yValues);
}
/**
*
*/
@Test(expectedExceptions = IllegalArgumentException.class)
public void shortx1Test() {
double[] x0Values = new double[] {0., 1., 2., 3. };
double[] x1Values = new double[] {0. };
double[][] yValues = new double[][] { {1. }, {-1. }, {2. }, {5. } };
BicubicSplineInterpolator interp = new BicubicSplineInterpolator(new CubicSplineInterpolator());
interp.interpolate(x0Values, x1Values, yValues);
}
/**
*
*/
@Test(expectedExceptions = IllegalArgumentException.class)
public void infX0Test() {
double[] x0Values = new double[] {0., 1., 2., INF };
double[] x1Values = new double[] {0., 1., 2. };
double[][] yValues = new double[][] { {1., 2., 4. }, {-1., 2., -4. }, {2., 3., 4. }, {5., 2., 1. } };
BicubicSplineInterpolator interp = new BicubicSplineInterpolator(new CubicSplineInterpolator());
interp.interpolate(x0Values, x1Values, yValues);
}
/**
*
*/
@Test(expectedExceptions = IllegalArgumentException.class)
public void nanX0Test() {
double[] x0Values = new double[] {0., 1., 2., Double.NaN };
double[] x1Values = new double[] {0., 1., 2. };
double[][] yValues = new double[][] { {1., 2., 4. }, {-1., 2., -4. }, {2., 3., 4. }, {5., 2., 1. } };
BicubicSplineInterpolator interp = new BicubicSplineInterpolator(new CubicSplineInterpolator());
interp.interpolate(x0Values, x1Values, yValues);
}
/**
*
*/
@Test(expectedExceptions = IllegalArgumentException.class)
public void infX1Test() {
double[] x0Values = new double[] {0., 1., 2., 3. };
double[] x1Values = new double[] {0., 1., INF };
double[][] yValues = new double[][] { {1., 2., 4. }, {-1., 2., -4. }, {2., 3., 4. }, {5., 2., 1. } };
BicubicSplineInterpolator interp = new BicubicSplineInterpolator(new CubicSplineInterpolator());
interp.interpolate(x0Values, x1Values, yValues);
}
/**
*
*/
@Test(expectedExceptions = IllegalArgumentException.class)
public void nanX1Test() {
double[] x0Values = new double[] {0., 1., 2., 3. };
double[] x1Values = new double[] {0., 1., Double.NaN };
double[][] yValues = new double[][] { {1., 2., 4. }, {-1., 2., -4. }, {2., 3., 4. }, {5., 2., 1. } };
BicubicSplineInterpolator interp = new BicubicSplineInterpolator(new CubicSplineInterpolator());
interp.interpolate(x0Values, x1Values, yValues);
}
/**
*
*/
@Test(expectedExceptions = IllegalArgumentException.class)
public void infYTest() {
double[] x0Values = new double[] {0., 1., 2., 3. };
double[] x1Values = new double[] {0., 1., 2. };
double[][] yValues = new double[][] { {1., 2., 4. }, {-1., 2., INF }, {2., 3., 4. }, {5., 2., 1. } };
BicubicSplineInterpolator interp = new BicubicSplineInterpolator(new CubicSplineInterpolator());
interp.interpolate(x0Values, x1Values, yValues);
}
/**
*
*/
@Test(expectedExceptions = IllegalArgumentException.class)
public void nanYTest() {
double[] x0Values = new double[] {0., 1., 2., 3. };
double[] x1Values = new double[] {0., 1., 2. };
double[][] yValues = new double[][] { {1., 2., 4. }, {-1., 2., -4. }, {2., 3., 4. }, {5., 2., Double.NaN } };
BicubicSplineInterpolator interp = new BicubicSplineInterpolator(new CubicSplineInterpolator());
interp.interpolate(x0Values, x1Values, yValues);
}
/**
*
*/
@Test(expectedExceptions = IllegalArgumentException.class)
public void coincideX0Test() {
double[] x0Values = new double[] {0., 1., 1., 3. };
double[] x1Values = new double[] {0., 1., 2. };
double[][] yValues = new double[][] { {1., 2., 4. }, {-1., 2., -4. }, {2., 3., 4. }, {5., 2., 1. } };
BicubicSplineInterpolator interp = new BicubicSplineInterpolator(new CubicSplineInterpolator());
interp.interpolate(x0Values, x1Values, yValues);
}
/**
*
*/
@Test(expectedExceptions = IllegalArgumentException.class)
public void coincideX1Test() {
double[] x0Values = new double[] {0., 1., 2., 3. };
double[] x1Values = new double[] {0., 1., 1. };
double[][] yValues = new double[][] { {1., 2., 4. }, {-1., 2., -4. }, {2., 3., 4. }, {5., 2., 1. } };
BicubicSplineInterpolator interp = new BicubicSplineInterpolator(new CubicSplineInterpolator());
interp.interpolate(x0Values, x1Values, yValues);
}
/**
*
*/
@Test(expectedExceptions = IllegalArgumentException.class)
public void notTwoMethodsTest() {
double[] x0Values = new double[] {0., 1., 2., 3. };
double[] x1Values = new double[] {0., 1., 2. };
double[][] yValues = new double[][] { {1., 2., 4. }, {-1., 2., -4. }, {2., 3., 4. }, {5., 2., 1. } };
BicubicSplineInterpolator interp = new BicubicSplineInterpolator(new PiecewisePolynomialInterpolator[] {new CubicSplineInterpolator() });
interp.interpolate(x0Values, x1Values, yValues);
}
/**
*
*/
@Test(expectedExceptions = IllegalArgumentException.class)
public void notKnotRevoveredTests() {
double[] x0Values = new double[] {0., 1., 2., 3. };
double[] x1Values = new double[] {0., 1., 2. };
double[][] yValues = new double[][] { {1.e-20, 3.e-120, 5.e120 }, {2.e-20, 3.e-120, 4.e-120 }, {1.e-20, 1.e-120, 1.e-20 }, {4.e-120, 3.e-20, 2.e-20 } };
BicubicSplineInterpolator intp = new BicubicSplineInterpolator(new CubicSplineInterpolator());
intp.interpolate(x0Values, x1Values, yValues);
}
/**
* Tests below for debugging
*/
@Test
(enabled = false)
public void printTest() {
// double[] x0Values = new double[] {0., 1., 2., 3. };
// double[] x1Values = new double[] {0., 0.000000000001, 2. };
double[] x0Values = new double[] {0., 1., 2., 3. };
double[] x1Values = new double[] {0., 1., 2. };
double[][] yValues = new double[][] { {1.e-20, 3.e-120, 5.e-20 }, {2.e-20, 3.e-120, 4.e-120 }, {1.e-20, 1.e-120, 1.e-20 }, {4.e-120, 3.e-20, 2.e-20 } };
// double[] x0Values = new double[] {0., 1., 2. };
// double[] x1Values = new double[] {0., 1., 2., 3. };
// double[][] yValues = new double[][] { {1., 3., 5., 7. }, {2., 3., 4., 5. }, {1., 1., 1., 1. } };
BicubicSplineInterpolator intp = new BicubicSplineInterpolator(new CubicSplineInterpolator());
PiecewisePolynomialResult2D result2D = intp.interpolate(x0Values, x1Values, yValues);
System.out.println(result2D.getCoefs()[0][0]);
System.out.println(result2D.getCoefs()[2][1]);
final int n0Keys = 31;
final int n1Keys = 21;
double[] x0Keys = new double[n0Keys];
double[] x1Keys = new double[n1Keys];
for (int i = 0; i < n0Keys; ++i) {
x0Keys[i] = 0. + 3. * i / (n0Keys - 1);
}
for (int i = 0; i < n1Keys; ++i) {
x1Keys[i] = 0. + 2. * i / (n1Keys - 1);
}
// final int n0Keys = 61;
// final int n1Keys = 101;
// double[] x0Keys = new double[n0Keys];
// double[] x1Keys = new double[n1Keys];
// for (int i = 0; i < n0Keys; ++i) {
// x0Keys[i] = -1. + 4. * i / (n0Keys - 1);
// }
// for (int i = 0; i < n1Keys; ++i) {
// x1Keys[i] = -1. + 5. * i / (n1Keys - 1);
// }
PiecewisePolynomialFunction2D func = new PiecewisePolynomialFunction2D();
final double[][] values = func.evaluate(result2D, x0Keys, x1Keys).getData();
for (int i = 0; i < n0Keys; ++i) {
System.out.print("\t" + x0Keys[i]);
}
System.out.print("\n");
for (int j = 0; j < n1Keys; ++j) {
System.out.print(x1Keys[j]);
for (int i = 0; i < n0Keys; ++i) {
System.out.print("\t" + values[i][j]);
}
System.out.print("\n");
}
System.out.print("\n");
for (int i = 0; i < x0Values.length; ++i) {
System.out.print("\t" + x0Values[i]);
}
System.out.print("\n");
for (int j = 0; j < x1Values.length; ++j) {
System.out.print(x1Values[j]);
for (int i = 0; i < x0Values.length; ++i) {
System.out.print("\t" + yValues[i][j]);
}
System.out.print("\n");
}
System.out.print("\n");
}
/**
*
*/
@Test
(enabled = false)
public void randomTest() {
double[] x0Values = new double[] {1., 2., 3., 4. };
double[] x1Values = new double[] {-1., 0., 1., 2., 3. };
final int n0Data = x0Values.length;
final int n1Data = x1Values.length;
double[][] yValues = new double[n0Data][n1Data];
final Random randObj = new Random();
int k = 0;
while (k < 100000) {
for (int i = 0; i < n0Data; ++i) {
for (int j = 0; j < n1Data; ++j) {
yValues[i][j] = randObj.nextInt(4) - 2.;
}
}
System.out.println(new DoubleMatrix2D(yValues));
NaturalSplineInterpolator method = new NaturalSplineInterpolator();
PiecewisePolynomialInterpolator2D interp = new BicubicSplineInterpolator(method);
interp.interpolate(x0Values, x1Values, yValues);
++k;
}
}
}
| projects/OG-Analytics/src/test/java/com/opengamma/analytics/math/interpolation/BicubicSplineInterpolatorTest.java | /**
* Copyright (C) 2013 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.analytics.math.interpolation;
import static com.opengamma.analytics.math.matrix.MatrixAlgebraFactory.OG_ALGEBRA;
import static org.testng.Assert.assertEquals;
import java.util.Random;
import org.testng.annotations.Test;
import com.opengamma.analytics.math.function.PiecewisePolynomialFunction1D;
import com.opengamma.analytics.math.function.PiecewisePolynomialFunction2D;
import com.opengamma.analytics.math.matrix.DoubleMatrix2D;
/**
*
*/
public class BicubicSplineInterpolatorTest {
private static final double EPS = 1e-12;
private static final double INF = 1. / 0.;
/**
*
*/
@Test
public void linearTest() {
double[] x0Values = new double[] {1., 2., 3., 4. };
double[] x1Values = new double[] {-1., 0., 1., 2., 3. };
final int n0Data = x0Values.length;
final int n1Data = x1Values.length;
double[][] yValues = new double[n0Data][n1Data];
for (int i = 0; i < n0Data; ++i) {
for (int j = 0; j < n1Data; ++j) {
yValues[i][j] = (x0Values[i] + 2.) * (x1Values[j] + 5.);
}
}
// System.out.println(new DoubleMatrix2D(yValues));
CubicSplineInterpolator method = new CubicSplineInterpolator();
PiecewisePolynomialInterpolator2D interp = new BicubicSplineInterpolator(new CubicSplineInterpolator[] {method, method });
PiecewisePolynomialResult2D result = interp.interpolate(x0Values, x1Values, yValues);
final int n0IntExp = n0Data - 1;
final int n1IntExp = n1Data - 1;
final int orderExp = 4;
DoubleMatrix2D[][] coefsExp = new DoubleMatrix2D[n0Data - 1][n1Data - 1];
for (int i = 0; i < n0Data - 1; ++i) {
for (int j = 0; j < n1Data - 1; ++j) {
coefsExp[i][j] = new DoubleMatrix2D(new double[][] { {0., 0., 0., 0., }, {0., 0., 0., 0., }, {0., 0., 1., (5. + x1Values[j]) },
{0., 0., (2. + x0Values[i]), (2. + x0Values[i]) * (5. + x1Values[j]) } });
}
}
assertEquals(result.getNumberOfIntervals()[0], n0IntExp);
assertEquals(result.getNumberOfIntervals()[1], n1IntExp);
assertEquals(result.getOrder()[0], orderExp);
assertEquals(result.getOrder()[1], orderExp);
final int n0Keys = 51;
final int n1Keys = 61;
double[] x0Keys = new double[n0Keys];
double[] x1Keys = new double[n1Keys];
for (int i = 0; i < n0Keys; ++i) {
x0Keys[i] = 0. + 5. * i / (n0Keys - 1);
}
for (int i = 0; i < n1Keys; ++i) {
x1Keys[i] = -2. + 6. * i / (n1Keys - 1);
}
// PiecewisePolynomialFunction2D func = new PiecewisePolynomialFunction2D();
// final double[][] values = func.evaluate(result, x0Keys, x1Keys).getData();
for (int i = 0; i < n0Data; ++i) {
final double ref = Math.abs(x0Values[i]) == 0. ? 1. : Math.abs(x0Values[i]);
assertEquals(result.getKnots0().getData()[i], x0Values[i], ref * EPS);
assertEquals(result.getKnots2D().get(0).getData()[i], x0Values[i], ref * EPS);
}
for (int i = 0; i < n1Data; ++i) {
final double ref = Math.abs(x1Values[i]) == 0. ? 1. : Math.abs(x1Values[i]);
assertEquals(result.getKnots1().getData()[i], x1Values[i], ref * EPS);
assertEquals(result.getKnots2D().get(1).getData()[i], x1Values[i], ref * EPS);
}
for (int i = 0; i < n0Data - 1; ++i) {
for (int j = 0; j < n1Data - 1; ++j) {
for (int k = 0; k < orderExp; ++k) {
for (int l = 0; l < orderExp; ++l) {
final double ref = Math.abs(coefsExp[i][j].getData()[k][l]) == 0. ? 1. : Math.abs(coefsExp[i][j].getData()[k][l]);
assertEquals(result.getCoefs()[i][j].getData()[k][l], coefsExp[i][j].getData()[k][l], ref * EPS);
}
}
}
}
double[][] resValues = interp.interpolate(x0Values, x1Values, yValues, x0Keys, x1Keys).getData();
for (int i = 0; i < n0Keys; ++i) {
for (int j = 0; j < n1Keys; ++j) {
final double expVal = (x0Keys[i] + 2.) * (x1Keys[j] + 5.);
final double ref = Math.abs(expVal) == 0. ? 1. : Math.abs(expVal);
assertEquals(resValues[i][j], expVal, ref * EPS);
}
}
// final PiecewisePolynomialFunction2D func = new PiecewisePolynomialFunction2D();
for (int i = 0; i < n0Keys; ++i) {
for (int j = 0; j < n1Keys; ++j) {
final double expVal = (x0Keys[i] + 2.) * (x1Keys[j] + 5.);
final double ref = Math.abs(expVal) == 0. ? 1. : Math.abs(expVal);
assertEquals(resValues[i][j], expVal, ref * EPS);
// assertEquals(resValues[i][j], func.evaluate(result, x0Keys[i], x1Keys[j]), ref * EPS);
}
}
{
final double expVal = (x0Keys[1] + 2.) * (x1Keys[2] + 5.);
final double ref = Math.abs(expVal) == 0. ? 1. : Math.abs(expVal);
assertEquals(interp.interpolate(x0Values, x1Values, yValues, x0Keys[1], x1Keys[2]), expVal, ref * EPS);
}
{
final double expVal = (x0Keys[23] + 2.) * (x1Keys[20] + 5.);
final double ref = Math.abs(expVal) == 0. ? 1. : Math.abs(expVal);
assertEquals(interp.interpolate(x0Values, x1Values, yValues, x0Keys[23], x1Keys[20]), expVal, ref * EPS);
}
// for (int i = 0; i < n0Keys; ++i) {
// System.out.print("\t" + x0Keys[i]);
// }
// System.out.print("\n");
// for (int j = 0; j < n1Keys; ++j) {
// System.out.print(x1Keys[j]);
// for (int i = 0; i < n0Keys; ++i) {
// System.out.print("\t" + values[i][j]);
// }
// System.out.print("\n");
// }
//
// System.out.print("\n");
}
/**
* f(x0,x1) = ( x0 - 1.5)^2 * (x1 - 2.)^2
*/
@Test
public void quadraticTest() {
double[] x0Values = new double[] {1., 2., 3., 4. };
double[] x1Values = new double[] {-1., 0., 1., 2., 3. };
final int n0Data = x0Values.length;
final int n1Data = x1Values.length;
double[][] yValues = new double[n0Data][n1Data];
for (int i = 0; i < n0Data; ++i) {
for (int j = 0; j < n1Data; ++j) {
yValues[i][j] = (x0Values[i] - 1.5) * (x0Values[i] - 1.5) * (x1Values[j] - 2.) * (x1Values[j] - 2.);
}
}
CubicSplineInterpolator method = new CubicSplineInterpolator();
PiecewisePolynomialInterpolator2D interp = new BicubicSplineInterpolator(method);
PiecewisePolynomialResult2D result = interp.interpolate(x0Values, x1Values, yValues);
final int n0IntExp = n0Data - 1;
final int n1IntExp = n1Data - 1;
final int orderExp = 4;
final int n0Keys = 51;
final int n1Keys = 61;
double[] x0Keys = new double[n0Keys];
double[] x1Keys = new double[n1Keys];
for (int i = 0; i < n0Keys; ++i) {
x0Keys[i] = 0. + 5. * i / (n0Keys - 1);
}
for (int i = 0; i < n1Keys; ++i) {
x1Keys[i] = -2. + 6. * i / (n1Keys - 1);
}
assertEquals(result.getNumberOfIntervals()[0], n0IntExp);
assertEquals(result.getNumberOfIntervals()[1], n1IntExp);
assertEquals(result.getOrder()[0], orderExp);
assertEquals(result.getOrder()[1], orderExp);
for (int i = 0; i < n0Data; ++i) {
final double ref = Math.abs(x0Values[i]) == 0. ? 1. : Math.abs(x0Values[i]);
assertEquals(result.getKnots0().getData()[i], x0Values[i], ref * EPS);
assertEquals(result.getKnots2D().get(0).getData()[i], x0Values[i], ref * EPS);
}
for (int i = 0; i < n1Data; ++i) {
final double ref = Math.abs(x1Values[i]) == 0. ? 1. : Math.abs(x1Values[i]);
assertEquals(result.getKnots1().getData()[i], x1Values[i], ref * EPS);
assertEquals(result.getKnots2D().get(1).getData()[i], x1Values[i], ref * EPS);
}
for (int i = 0; i < n0Data - 1; ++i) {
for (int j = 0; j < n1Data - 1; ++j) {
final double ref = Math.abs(yValues[i][j]) == 0. ? 1. : Math.abs(yValues[i][j]);
assertEquals(result.getCoefs()[i][j].getData()[orderExp - 1][orderExp - 1], yValues[i][j], ref * EPS);
}
}
double[][] resValues = interp.interpolate(x0Values, x1Values, yValues, x0Values, x1Values).getData();
final PiecewisePolynomialFunction2D func2D = new PiecewisePolynomialFunction2D();
double[][] resDiffX0 = func2D.differentiateX0(result, x0Values, x1Values).getData();
double[][] resDiffX1 = func2D.differentiateX1(result, x0Values, x1Values).getData();
final PiecewisePolynomialFunction1D func1D = new PiecewisePolynomialFunction1D();
double[][] expDiffX0 = func1D.differentiate(method.interpolate(x0Values, OG_ALGEBRA.getTranspose(new DoubleMatrix2D(yValues)).getData()), x0Values).getData();
double[][] expDiffX1 = func1D.differentiate(method.interpolate(x1Values, yValues), x1Values).getData();
for (int i = 0; i < n0Data; ++i) {
for (int j = 0; j < n1Data; ++j) {
final double expVal = expDiffX1[i][j];
final double ref = Math.abs(expVal) == 0. ? 1. : Math.abs(expVal);
assertEquals(resDiffX1[i][j], expVal, ref * EPS);
}
}
// System.out.println(new DoubleMatrix2D(expDiffX0));
// System.out.println(new DoubleMatrix2D(resDiffX0));
for (int i = 0; i < n0Data; ++i) {
for (int j = 0; j < n1Data; ++j) {
final double expVal = expDiffX0[j][i];
final double ref = Math.abs(expVal) == 0. ? 1. : Math.abs(expVal);
assertEquals(resDiffX0[i][j], expVal, ref * EPS);
}
}
for (int i = 0; i < n0Data; ++i) {
for (int j = 0; j < n1Data; ++j) {
final double expVal = yValues[i][j];
final double ref = Math.abs(expVal) == 0. ? 1. : Math.abs(expVal);
assertEquals(resValues[i][j], expVal, ref * EPS);
}
}
}
/**
* f(x0,x1) = ( x0 - 1.)^3 * (x1 + 14./13.)^3
*/
@Test
public void cubicTest() {
double[] x0Values = new double[] {1., 2., 3., 4. };
double[] x1Values = new double[] {-1., 0., 1., 2., 3. };
final int n0Data = x0Values.length;
final int n1Data = x1Values.length;
double[][] yValues = new double[n0Data][n1Data];
for (int i = 0; i < n0Data; ++i) {
for (int j = 0; j < n1Data; ++j) {
yValues[i][j] = (x0Values[i] - 1.) * (x0Values[i] - 1.) * (x0Values[i] - 1.) * (x1Values[j] + 14. / 13.) * (x1Values[j] + 14. / 13.) * (x1Values[j] + 14. / 13.);
}
}
CubicSplineInterpolator method = new CubicSplineInterpolator();
PiecewisePolynomialInterpolator2D interp = new BicubicSplineInterpolator(method);
PiecewisePolynomialResult2D result = interp.interpolate(x0Values, x1Values, yValues);
final int n0IntExp = n0Data - 1;
final int n1IntExp = n1Data - 1;
final int orderExp = 4;
final int n0Keys = 51;
final int n1Keys = 61;
double[] x0Keys = new double[n0Keys];
double[] x1Keys = new double[n1Keys];
for (int i = 0; i < n0Keys; ++i) {
x0Keys[i] = 0. + 5. * i / (n0Keys - 1);
}
for (int i = 0; i < n1Keys; ++i) {
x1Keys[i] = -2. + 6. * i / (n1Keys - 1);
}
assertEquals(result.getNumberOfIntervals()[0], n0IntExp);
assertEquals(result.getNumberOfIntervals()[1], n1IntExp);
assertEquals(result.getOrder()[0], orderExp);
assertEquals(result.getOrder()[1], orderExp);
for (int i = 0; i < n0Data; ++i) {
final double ref = Math.abs(x0Values[i]) == 0. ? 1. : Math.abs(x0Values[i]);
assertEquals(result.getKnots0().getData()[i], x0Values[i], ref * EPS);
assertEquals(result.getKnots2D().get(0).getData()[i], x0Values[i], ref * EPS);
}
for (int i = 0; i < n1Data; ++i) {
final double ref = Math.abs(x1Values[i]) == 0. ? 1. : Math.abs(x1Values[i]);
assertEquals(result.getKnots1().getData()[i], x1Values[i], ref * EPS);
assertEquals(result.getKnots2D().get(1).getData()[i], x1Values[i], ref * EPS);
}
for (int i = 0; i < n0Data - 1; ++i) {
for (int j = 0; j < n1Data - 1; ++j) {
final double ref = Math.abs(yValues[i][j]) == 0. ? 1. : Math.abs(yValues[i][j]);
assertEquals(result.getCoefs()[i][j].getData()[orderExp - 1][orderExp - 1], yValues[i][j], ref * EPS);
}
}
double[][] resValues = interp.interpolate(x0Values, x1Values, yValues, x0Values, x1Values).getData();
final PiecewisePolynomialFunction2D func2D = new PiecewisePolynomialFunction2D();
double[][] resDiffX0 = func2D.differentiateX0(result, x0Values, x1Values).getData();
double[][] resDiffX1 = func2D.differentiateX1(result, x0Values, x1Values).getData();
final PiecewisePolynomialFunction1D func1D = new PiecewisePolynomialFunction1D();
double[][] expDiffX0 = func1D.differentiate(method.interpolate(x0Values, OG_ALGEBRA.getTranspose(new DoubleMatrix2D(yValues)).getData()), x0Values).getData();
double[][] expDiffX1 = func1D.differentiate(method.interpolate(x1Values, yValues), x1Values).getData();
for (int i = 0; i < n0Data; ++i) {
for (int j = 0; j < n1Data; ++j) {
final double expVal = expDiffX1[i][j];
final double ref = Math.abs(expVal) == 0. ? 1. : Math.abs(expVal);
assertEquals(resDiffX1[i][j], expVal, ref * EPS);
}
}
// System.out.println(new DoubleMatrix2D(expDiffX0));
// System.out.println(new DoubleMatrix2D(resDiffX0));
for (int i = 0; i < n0Data; ++i) {
for (int j = 0; j < n1Data; ++j) {
final double expVal = expDiffX0[j][i];
final double ref = Math.abs(expVal) == 0. ? 1. : Math.abs(expVal);
assertEquals(resDiffX0[i][j], expVal, ref * EPS);
}
}
for (int i = 0; i < n0Data; ++i) {
for (int j = 0; j < n1Data; ++j) {
final double expVal = yValues[i][j];
final double ref = Math.abs(expVal) == 0. ? 1. : Math.abs(expVal);
assertEquals(resValues[i][j], expVal, ref * EPS);
}
}
}
/**
*
*/
@Test
public void crossDerivativeTest() {
double[] x0Values = new double[] {1., 2., 3., 4. };
double[] x1Values = new double[] {-1., 0., 1., 2., 3. };
final int n0Data = x0Values.length;
final int n1Data = x1Values.length;
double[][] yValues = new double[][] { {1.0, -1.0, 0.0, 1.0, 0.0, }, {1.0, -1.0, 0.0, 1.0, -2.0 }, {1.0, -2.0, 0.0, -2.0, -2.0 }, {-1.0, -1.0, -2.0, -2.0, -1.0 } };
NaturalSplineInterpolator method = new NaturalSplineInterpolator();
PiecewisePolynomialInterpolator2D interp = new BicubicSplineInterpolator(method);
PiecewisePolynomialResult2D result = interp.interpolate(x0Values, x1Values, yValues);
final int n0IntExp = n0Data - 1;
final int n1IntExp = n1Data - 1;
final int orderExp = 4;
final int n0Keys = 51;
final int n1Keys = 61;
double[] x0Keys = new double[n0Keys];
double[] x1Keys = new double[n1Keys];
for (int i = 0; i < n0Keys; ++i) {
x0Keys[i] = 0. + 5. * i / (n0Keys - 1);
}
for (int i = 0; i < n1Keys; ++i) {
x1Keys[i] = -2. + 6. * i / (n1Keys - 1);
}
assertEquals(result.getNumberOfIntervals()[0], n0IntExp);
assertEquals(result.getNumberOfIntervals()[1], n1IntExp);
assertEquals(result.getOrder()[0], orderExp);
assertEquals(result.getOrder()[1], orderExp);
for (int i = 0; i < n0Data; ++i) {
final double ref = Math.abs(x0Values[i]) == 0. ? 1. : Math.abs(x0Values[i]);
assertEquals(result.getKnots0().getData()[i], x0Values[i], ref * EPS);
assertEquals(result.getKnots2D().get(0).getData()[i], x0Values[i], ref * EPS);
}
for (int i = 0; i < n1Data; ++i) {
final double ref = Math.abs(x1Values[i]) == 0. ? 1. : Math.abs(x1Values[i]);
assertEquals(result.getKnots1().getData()[i], x1Values[i], ref * EPS);
assertEquals(result.getKnots2D().get(1).getData()[i], x1Values[i], ref * EPS);
}
for (int i = 0; i < n0Data - 1; ++i) {
for (int j = 0; j < n1Data - 1; ++j) {
final double ref = Math.abs(yValues[i][j]) == 0. ? 1. : Math.abs(yValues[i][j]);
assertEquals(result.getCoefs()[i][j].getData()[orderExp - 1][orderExp - 1], yValues[i][j], ref * EPS);
}
}
double[][] resValues = interp.interpolate(x0Values, x1Values, yValues, x0Values, x1Values).getData();
final PiecewisePolynomialFunction2D func2D = new PiecewisePolynomialFunction2D();
double[][] resDiffX0 = func2D.differentiateX0(result, x0Values, x1Values).getData();
double[][] resDiffX1 = func2D.differentiateX1(result, x0Values, x1Values).getData();
final PiecewisePolynomialFunction1D func1D = new PiecewisePolynomialFunction1D();
double[][] expDiffX0 = func1D.differentiate(method.interpolate(x0Values, OG_ALGEBRA.getTranspose(new DoubleMatrix2D(yValues)).getData()), x0Values).getData();
double[][] expDiffX1 = func1D.differentiate(method.interpolate(x1Values, yValues), x1Values).getData();
for (int i = 0; i < n0Data; ++i) {
for (int j = 0; j < n1Data; ++j) {
final double expVal = expDiffX1[i][j];
final double ref = Math.abs(expVal) == 0. ? 1. : Math.abs(expVal);
assertEquals(resDiffX1[i][j], expVal, ref * EPS);
}
}
// System.out.println(new DoubleMatrix2D(expDiffX0));
// System.out.println(new DoubleMatrix2D(resDiffX0));
for (int i = 0; i < n0Data; ++i) {
for (int j = 0; j < n1Data; ++j) {
final double expVal = expDiffX0[j][i];
final double ref = Math.abs(expVal) == 0. ? 1. : Math.abs(expVal);
assertEquals(resDiffX0[i][j], expVal, ref * EPS);
}
}
for (int i = 0; i < n0Data; ++i) {
for (int j = 0; j < n1Data; ++j) {
final double expVal = yValues[i][j];
final double ref = Math.abs(expVal) == 0. ? 1. : Math.abs(expVal);
assertEquals(resValues[i][j], expVal, ref * EPS);
}
}
}
/**
*
*/
@Test(expectedExceptions = IllegalArgumentException.class)
public void nullx0Test() {
double[] x0Values = new double[] {0., 1., 2., 3. };
double[] x1Values = new double[] {0., 1., 2. };
double[][] yValues = new double[][] { {1., 2., 4. }, {-1., 2., -4. }, {2., 3., 4. }, {5., 2., 1. } };
x0Values = null;
BicubicSplineInterpolator interp = new BicubicSplineInterpolator(new CubicSplineInterpolator());
interp.interpolate(x0Values, x1Values, yValues);
}
/**
*
*/
@Test(expectedExceptions = IllegalArgumentException.class)
public void nullx1Test() {
double[] x0Values = new double[] {0., 1., 2., 3. };
double[] x1Values = new double[] {0., 1., 2. };
double[][] yValues = new double[][] { {1., 2., 4. }, {-1., 2., -4. }, {2., 3., 4. }, {5., 2., 1. } };
x1Values = null;
BicubicSplineInterpolator interp = new BicubicSplineInterpolator(new CubicSplineInterpolator());
interp.interpolate(x0Values, x1Values, yValues);
}
/**
*
*/
@Test(expectedExceptions = IllegalArgumentException.class)
public void nullyTest() {
double[] x0Values = new double[] {0., 1., 2., 3. };
double[] x1Values = new double[] {0., 1., 2. };
double[][] yValues = new double[][] { {1., 2., 4. }, {-1., 2., -4. }, {2., 3., 4. }, {5., 2., 1. } };
yValues = null;
BicubicSplineInterpolator interp = new BicubicSplineInterpolator(new CubicSplineInterpolator());
interp.interpolate(x0Values, x1Values, yValues);
}
/**
*
*/
@Test(expectedExceptions = IllegalArgumentException.class)
public void wrongLengthx0Test() {
double[] x0Values = new double[] {0., 1., 2. };
double[] x1Values = new double[] {0., 1., 2. };
double[][] yValues = new double[][] { {1., 2., 4. }, {-1., 2., -4. }, {2., 3., 4. }, {5., 2., 1. } };
BicubicSplineInterpolator interp = new BicubicSplineInterpolator(new CubicSplineInterpolator());
interp.interpolate(x0Values, x1Values, yValues);
}
/**
*
*/
@Test(expectedExceptions = IllegalArgumentException.class)
public void wrongLengthx1Test() {
double[] x0Values = new double[] {0., 1., 2., 3. };
double[] x1Values = new double[] {0., 1., 2., 3. };
double[][] yValues = new double[][] { {1., 2., 4. }, {-1., 2., -4. }, {2., 3., 4. }, {5., 2., 1. } };
BicubicSplineInterpolator interp = new BicubicSplineInterpolator(new CubicSplineInterpolator());
interp.interpolate(x0Values, x1Values, yValues);
}
/**
*
*/
@Test(expectedExceptions = IllegalArgumentException.class)
public void shortx0Test() {
double[] x0Values = new double[] {1. };
double[] x1Values = new double[] {0., 1., 2. };
double[][] yValues = new double[][] {{1., 2., 4. } };
BicubicSplineInterpolator interp = new BicubicSplineInterpolator(new CubicSplineInterpolator());
interp.interpolate(x0Values, x1Values, yValues);
}
/**
*
*/
@Test(expectedExceptions = IllegalArgumentException.class)
public void shortx1Test() {
double[] x0Values = new double[] {0., 1., 2., 3. };
double[] x1Values = new double[] {0. };
double[][] yValues = new double[][] { {1. }, {-1. }, {2. }, {5. } };
BicubicSplineInterpolator interp = new BicubicSplineInterpolator(new CubicSplineInterpolator());
interp.interpolate(x0Values, x1Values, yValues);
}
/**
*
*/
@Test(expectedExceptions = IllegalArgumentException.class)
public void infX0Test() {
double[] x0Values = new double[] {0., 1., 2., INF };
double[] x1Values = new double[] {0., 1., 2. };
double[][] yValues = new double[][] { {1., 2., 4. }, {-1., 2., -4. }, {2., 3., 4. }, {5., 2., 1. } };
BicubicSplineInterpolator interp = new BicubicSplineInterpolator(new CubicSplineInterpolator());
interp.interpolate(x0Values, x1Values, yValues);
}
/**
*
*/
@Test(expectedExceptions = IllegalArgumentException.class)
public void nanX0Test() {
double[] x0Values = new double[] {0., 1., 2., Double.NaN };
double[] x1Values = new double[] {0., 1., 2. };
double[][] yValues = new double[][] { {1., 2., 4. }, {-1., 2., -4. }, {2., 3., 4. }, {5., 2., 1. } };
BicubicSplineInterpolator interp = new BicubicSplineInterpolator(new CubicSplineInterpolator());
interp.interpolate(x0Values, x1Values, yValues);
}
/**
*
*/
@Test(expectedExceptions = IllegalArgumentException.class)
public void infX1Test() {
double[] x0Values = new double[] {0., 1., 2., 3. };
double[] x1Values = new double[] {0., 1., INF };
double[][] yValues = new double[][] { {1., 2., 4. }, {-1., 2., -4. }, {2., 3., 4. }, {5., 2., 1. } };
BicubicSplineInterpolator interp = new BicubicSplineInterpolator(new CubicSplineInterpolator());
interp.interpolate(x0Values, x1Values, yValues);
}
/**
*
*/
@Test(expectedExceptions = IllegalArgumentException.class)
public void nanX1Test() {
double[] x0Values = new double[] {0., 1., 2., 3. };
double[] x1Values = new double[] {0., 1., Double.NaN };
double[][] yValues = new double[][] { {1., 2., 4. }, {-1., 2., -4. }, {2., 3., 4. }, {5., 2., 1. } };
BicubicSplineInterpolator interp = new BicubicSplineInterpolator(new CubicSplineInterpolator());
interp.interpolate(x0Values, x1Values, yValues);
}
/**
*
*/
@Test(expectedExceptions = IllegalArgumentException.class)
public void infYTest() {
double[] x0Values = new double[] {0., 1., 2., 3. };
double[] x1Values = new double[] {0., 1., 2. };
double[][] yValues = new double[][] { {1., 2., 4. }, {-1., 2., INF }, {2., 3., 4. }, {5., 2., 1. } };
BicubicSplineInterpolator interp = new BicubicSplineInterpolator(new CubicSplineInterpolator());
interp.interpolate(x0Values, x1Values, yValues);
}
/**
*
*/
@Test(expectedExceptions = IllegalArgumentException.class)
public void nanYTest() {
double[] x0Values = new double[] {0., 1., 2., 3. };
double[] x1Values = new double[] {0., 1., 2. };
double[][] yValues = new double[][] { {1., 2., 4. }, {-1., 2., -4. }, {2., 3., 4. }, {5., 2., Double.NaN } };
BicubicSplineInterpolator interp = new BicubicSplineInterpolator(new CubicSplineInterpolator());
interp.interpolate(x0Values, x1Values, yValues);
}
/**
*
*/
@Test(expectedExceptions = IllegalArgumentException.class)
public void coincideX0Test() {
double[] x0Values = new double[] {0., 1., 1., 3. };
double[] x1Values = new double[] {0., 1., 2. };
double[][] yValues = new double[][] { {1., 2., 4. }, {-1., 2., -4. }, {2., 3., 4. }, {5., 2., 1. } };
BicubicSplineInterpolator interp = new BicubicSplineInterpolator(new CubicSplineInterpolator());
interp.interpolate(x0Values, x1Values, yValues);
}
/**
*
*/
@Test(expectedExceptions = IllegalArgumentException.class)
public void coincideX1Test() {
double[] x0Values = new double[] {0., 1., 2., 3. };
double[] x1Values = new double[] {0., 1., 1. };
double[][] yValues = new double[][] { {1., 2., 4. }, {-1., 2., -4. }, {2., 3., 4. }, {5., 2., 1. } };
BicubicSplineInterpolator interp = new BicubicSplineInterpolator(new CubicSplineInterpolator());
interp.interpolate(x0Values, x1Values, yValues);
}
/**
*
*/
@Test(expectedExceptions = IllegalArgumentException.class)
public void notTwoMethodsTest() {
double[] x0Values = new double[] {0., 1., 2., 3. };
double[] x1Values = new double[] {0., 1., 2. };
double[][] yValues = new double[][] { {1., 2., 4. }, {-1., 2., -4. }, {2., 3., 4. }, {5., 2., 1. } };
BicubicSplineInterpolator interp = new BicubicSplineInterpolator(new PiecewisePolynomialInterpolator[] {new CubicSplineInterpolator() });
interp.interpolate(x0Values, x1Values, yValues);
}
/**
*
*/
@Test(expectedExceptions = IllegalArgumentException.class)
public void notKnotRevoveredTests() {
double[] x0Values = new double[] {0., 1., 2., 3. };
double[] x1Values = new double[] {0., 1., 2. };
double[][] yValues = new double[][] { {1.e-20, 3.e-120, 5.e-20 }, {2.e-20, 3.e-120, 4.e-120 }, {1.e-20, 1.e-120, 1.e-20 }, {4.e-120, 3.e-20, 2.e-20 } };
BicubicSplineInterpolator intp = new BicubicSplineInterpolator(new CubicSplineInterpolator());
intp.interpolate(x0Values, x1Values, yValues);
}
/**
* Tests below for debugging
*/
@Test
(enabled = false)
public void printTest() {
// double[] x0Values = new double[] {0., 1., 2., 3. };
// double[] x1Values = new double[] {0., 0.000000000001, 2. };
double[] x0Values = new double[] {0., 1., 2., 3. };
double[] x1Values = new double[] {0., 1., 2. };
double[][] yValues = new double[][] { {1.e-20, 3.e-120, 5.e-20 }, {2.e-20, 3.e-120, 4.e-120 }, {1.e-20, 1.e-120, 1.e-20 }, {4.e-120, 3.e-20, 2.e-20 } };
// double[] x0Values = new double[] {0., 1., 2. };
// double[] x1Values = new double[] {0., 1., 2., 3. };
// double[][] yValues = new double[][] { {1., 3., 5., 7. }, {2., 3., 4., 5. }, {1., 1., 1., 1. } };
BicubicSplineInterpolator intp = new BicubicSplineInterpolator(new CubicSplineInterpolator());
PiecewisePolynomialResult2D result2D = intp.interpolate(x0Values, x1Values, yValues);
System.out.println(result2D.getCoefs()[0][0]);
System.out.println(result2D.getCoefs()[2][1]);
final int n0Keys = 31;
final int n1Keys = 21;
double[] x0Keys = new double[n0Keys];
double[] x1Keys = new double[n1Keys];
for (int i = 0; i < n0Keys; ++i) {
x0Keys[i] = 0. + 3. * i / (n0Keys - 1);
}
for (int i = 0; i < n1Keys; ++i) {
x1Keys[i] = 0. + 2. * i / (n1Keys - 1);
}
// final int n0Keys = 61;
// final int n1Keys = 101;
// double[] x0Keys = new double[n0Keys];
// double[] x1Keys = new double[n1Keys];
// for (int i = 0; i < n0Keys; ++i) {
// x0Keys[i] = -1. + 4. * i / (n0Keys - 1);
// }
// for (int i = 0; i < n1Keys; ++i) {
// x1Keys[i] = -1. + 5. * i / (n1Keys - 1);
// }
PiecewisePolynomialFunction2D func = new PiecewisePolynomialFunction2D();
final double[][] values = func.evaluate(result2D, x0Keys, x1Keys).getData();
for (int i = 0; i < n0Keys; ++i) {
System.out.print("\t" + x0Keys[i]);
}
System.out.print("\n");
for (int j = 0; j < n1Keys; ++j) {
System.out.print(x1Keys[j]);
for (int i = 0; i < n0Keys; ++i) {
System.out.print("\t" + values[i][j]);
}
System.out.print("\n");
}
System.out.print("\n");
for (int i = 0; i < x0Values.length; ++i) {
System.out.print("\t" + x0Values[i]);
}
System.out.print("\n");
for (int j = 0; j < x1Values.length; ++j) {
System.out.print(x1Values[j]);
for (int i = 0; i < x0Values.length; ++i) {
System.out.print("\t" + yValues[i][j]);
}
System.out.print("\n");
}
System.out.print("\n");
}
/**
*
*/
@Test
(enabled = false)
public void randomTest() {
double[] x0Values = new double[] {1., 2., 3., 4. };
double[] x1Values = new double[] {-1., 0., 1., 2., 3. };
final int n0Data = x0Values.length;
final int n1Data = x1Values.length;
double[][] yValues = new double[n0Data][n1Data];
final Random randObj = new Random();
int k = 0;
while (k < 100000) {
for (int i = 0; i < n0Data; ++i) {
for (int j = 0; j < n1Data; ++j) {
yValues[i][j] = randObj.nextInt(4) - 2.;
}
}
System.out.println(new DoubleMatrix2D(yValues));
NaturalSplineInterpolator method = new NaturalSplineInterpolator();
PiecewisePolynomialInterpolator2D interp = new BicubicSplineInterpolator(method);
interp.interpolate(x0Values, x1Values, yValues);
++k;
}
}
}
| [PLAT-3314] Bicubic Spline Interpolation
| projects/OG-Analytics/src/test/java/com/opengamma/analytics/math/interpolation/BicubicSplineInterpolatorTest.java | [PLAT-3314] Bicubic Spline Interpolation | <ide><path>rojects/OG-Analytics/src/test/java/com/opengamma/analytics/math/interpolation/BicubicSplineInterpolatorTest.java
<ide> public void notKnotRevoveredTests() {
<ide> double[] x0Values = new double[] {0., 1., 2., 3. };
<ide> double[] x1Values = new double[] {0., 1., 2. };
<del> double[][] yValues = new double[][] { {1.e-20, 3.e-120, 5.e-20 }, {2.e-20, 3.e-120, 4.e-120 }, {1.e-20, 1.e-120, 1.e-20 }, {4.e-120, 3.e-20, 2.e-20 } };
<add> double[][] yValues = new double[][] { {1.e-20, 3.e-120, 5.e120 }, {2.e-20, 3.e-120, 4.e-120 }, {1.e-20, 1.e-120, 1.e-20 }, {4.e-120, 3.e-20, 2.e-20 } };
<ide>
<ide> BicubicSplineInterpolator intp = new BicubicSplineInterpolator(new CubicSplineInterpolator());
<ide> intp.interpolate(x0Values, x1Values, yValues); |
|
JavaScript | bsd-3-clause | 72667cd046800607f06c93b5ad9f385ef485fd35 | 0 | simonlindholm/fireclosure | /* See license.txt for terms of usage */
define([
"firebug/lib/object",
"firebug/firebug",
"firebug/chrome/reps",
"firebug/lib/locale",
"firebug/lib/events",
"firebug/lib/wrapper",
"firebug/lib/dom",
"firebug/lib/string",
"firebug/lib/array",
"firebug/console/autoCompleter",
],
function(Obj, Firebug, FirebugReps, Locale, Events, Wrapper, Dom, Str, Arr) {
// ********************************************************************************************* //
// Constants
const reOpenBracket = /[\[\(\{]/;
const reCloseBracket = /[\]\)\}]/;
const reJSChar = /[a-zA-Z0-9$_]/;
const reLiteralExpr = /^[ "0-9,]*$/;
// ********************************************************************************************* //
// JavaScript auto-completion
var OldJSAutoCompleter = Firebug.JSAutoCompleter;
Firebug.JSAutoCompleter = function(textBox, completionBox, options)
{
OldJSAutoCompleter.apply(this, arguments);
this.shouldIncludeHint = function()
{
return (this.completions &&
this.completionBase.hasScope &&
!this.completions.prefix &&
!/\[['"]|\.%/.test(this.completionBase.expr.slice(-2)));
}
/* Modified to use the right parsing/evaluation functions, and pass scope
* data to them. */
this.createCandidates = function(context)
{
var offset = this.textBox.selectionStart;
if (offset !== this.textBox.value.length)
{
this.hide();
return;
}
var value = this.textBox.value;
// Create a simplified expression by redacting contents/normalizing
// delimiters of strings and regexes, to make parsing easier.
// Give up if the syntax is too weird.
var svalue = simplifyExpr(value);
if (svalue === null)
{
this.hide();
return;
}
if (killCompletions(svalue, value))
{
this.hide();
return;
}
// Find the expression to be completed.
var parseStart = getExpressionOffset(svalue);
var parsed = value.substr(parseStart);
var sparsed = svalue.substr(parseStart);
// Find which part of it represents the property access.
var propertyStart = getPropertyOffset(sparsed);
var prop = parsed.substring(propertyStart);
var spreExpr = sparsed.substr(0, propertyStart);
var preExpr = parsed.substr(0, propertyStart);
this.completionBase.pre = value.substr(0, parseStart);
if (FBTrace.DBG_COMMANDLINE)
{
var sep = (parsed.indexOf("|") > -1) ? "^" : "|";
FBTrace.sysout("Completing: " + this.completionBase.pre + sep + preExpr + sep + prop);
}
// We only need to calculate a new candidate list if the expression has
// changed (we can ignore this.completionBase.pre since completions do not
// depend upon that).
if (preExpr !== this.completionBase.expr)
{
this.completionBase.expr = preExpr;
this.completionBase.candidates = [];
this.completionBase.hasScope = false;
autoCompleteEval(this.completionBase, context, preExpr, spreExpr,
this.options.includeCurrentScope);
}
this.createCompletions(prop);
};
/* Hacked to include the .% hint in the count */
var oldShowCompletions = this.showCompletions;
this.showCompletions = function()
{
if (this.completions && this.shouldIncludeHint())
{
// Add a sentinel (removed further down, and in popupCandidates) to
// make the count right in the real showCompletions, without having
// to duplicate logic.
this.completions.list.push(undefined);
this.completions.hasHintElement = true;
}
oldShowCompletions.apply(this, arguments);
if (this.completions && this.completions.hasHintElement)
{
this.completions.list.pop();
delete this.completions.hasHintElement;
}
};
/* Edited to include the .% hint. */
this.popupCandidates = function()
{
if (this.completions.hasHintElement)
{
this.completions.list.pop();
delete this.completions.hasHintElement;
}
var commandCompletionLineLimit = 40;
Dom.eraseNode(this.completionPopup);
this.selectedPopupElement = null;
var vbox = this.completionPopup.ownerDocument.createElement("vbox");
this.completionPopup.appendChild(vbox);
vbox.classList.add("fbCommandLineCompletions");
var title = this.completionPopup.ownerDocument.
createElementNS("http://www.w3.org/1999/xhtml","div");
title.innerHTML = Locale.$STR("console.Use Arrow keys or Enter");
title.classList.add("fbPopupTitle");
vbox.appendChild(title);
var escPrefix = Str.escapeForTextNode(this.textBox.value);
var listSize = this.completions.list.length;
if (this.shouldIncludeHint())
++listSize;
var showTop = 0;
var showBottom = listSize;
if (listSize > commandCompletionLineLimit)
{
if (this.completions.index <= (commandCompletionLineLimit - 3))
{
// We are in the top part of the list.
showBottom = commandCompletionLineLimit;
}
else
{
// Implement manual scrolling.
if (this.completions.index > listSize - 3)
showBottom = listSize;
else
showBottom = this.completions.index + 3;
}
showTop = showBottom - commandCompletionLineLimit;
}
for (var i = showTop; i < showBottom; i++)
{
var hbox = this.completionPopup.ownerDocument.
createElementNS("http://www.w3.org/1999/xhtml","div");
if (i == this.completions.list.length) {
var text = this.completionPopup.ownerDocument.
createElementNS("http://www.w3.org/1999/xhtml","span");
text.innerHTML = "% for scope members...";
text.style.fontStyle = "italic";
text.style.paddingLeft = "3px";
text.style.fontSize = "90%";
text.style.color = "#777";
hbox.appendChild(text);
}
else {
hbox.completionIndex = i;
var pre = this.completionPopup.ownerDocument.
createElementNS("http://www.w3.org/1999/xhtml","span");
pre.innerHTML = escPrefix;
pre.classList.add("userTypedText");
var completion = this.completions.list[i].substr(this.completions.prefix.length);
var post = this.completionPopup.ownerDocument.
createElementNS("http://www.w3.org/1999/xhtml","span");
post.innerHTML = Str.escapeForTextNode(completion);
post.classList.add("completionText");
if (i === this.completions.index)
this.selectedPopupElement = hbox;
hbox.appendChild(pre);
hbox.appendChild(post);
}
vbox.appendChild(hbox);
}
if (this.selectedPopupElement)
this.selectedPopupElement.setAttribute("selected", "true");
this.completionPopup.openPopup(this.textBox, "before_start", 0, 0, false, false);
};
};
/**
* Transform an expression from using .% into something JavaScript-friendly, which
* delegates to _FirebugCommandLine.
* Used only in module.js, but autoCompleter.js has so many nice helper functions.
*/
Firebug.JSAutoCompleter.transformScopeExpr = function(expr, fname)
{
var sexpr = simplifyExpr(expr);
if (!sexpr) return expr;
var search = 0;
for (;;) {
var end = sexpr.indexOf(".%", search);
if (end === -1) break;
var start = getExpressionOffset(sexpr, end);
expr = expr.substr(0, start) + fname + "(" +
expr.substring(start, end) + ")." +
expr.substr(end+2);
sexpr = sexpr.substr(0, start) + fname + "(" +
sexpr.substring(start, end) + ")." +
sexpr.substr(end+2);
search = end + fname + "().".length;
}
return expr;
};
// ********************************************************************************************* //
// Auto-completion helpers
/**
* Try to find the position at which the expression to be completed starts.
*/
function getExpressionOffset(command, start)
{
if (typeof start === 'undefined')
start = command.length;
var bracketCount = 0, instr = false;
// When completing []-accessed properties, start instead from the last [.
var lastBr = command.lastIndexOf("[", start);
if (lastBr !== -1 && /^" *$/.test(command.substring(lastBr+1, start)))
start = lastBr;
for (var i = start-1; i >= 0; --i)
{
var c = command[i];
if (reOpenBracket.test(c))
{
if (bracketCount)
--bracketCount;
else
break;
}
else if (reCloseBracket.test(c))
{
var next = command[i + 1];
if (bracketCount === 0 && next !== "." && next !== "[")
break;
else
++bracketCount;
}
else if (bracketCount === 0)
{
if (c === '"') instr = !instr;
else if (instr || reJSChar.test(c) || c === "." ||
(c === "%" && command[i-1] === "."))
;
else
break;
}
}
++i;
// The 'new' operator has higher precedence than function calls, so, if
// present, it should be included if the expression contains a parenthesis.
if (i-4 >= 0 && command.indexOf("(", i) !== -1 && command.substr(i-4, 4) === "new ")
{
i -= 4;
}
return i;
}
/**
* Try to find the position at which the property name of the final property
* access in an expression starts (for example, 2 in 'a.b').
*/
function getPropertyOffset(expr)
{
var lastBr = expr.lastIndexOf("[");
if (lastBr !== -1 && /^" *$/.test(expr.substr(lastBr+1)))
return lastBr+2;
var lastDot = expr.lastIndexOf(".");
if (lastDot !== -1 && expr.charAt(lastDot+1) === "%")
return lastDot+2;
return lastDot+1;
}
/**
* Get the index of the last non-whitespace character in the range [0, from)
* in str, or -1 if there is none.
*/
function prevNonWs(str, from)
{
for (var i = from-1; i >= 0; --i)
{
if (str.charAt(i) !== " ")
return i;
}
return -1;
}
/**
* Find the start of a word consisting of characters matching reJSChar, if
* str[from] is the last character in the word. (This can be used together
* with prevNonWs to traverse words backwards from a position.)
*/
function prevWord(str, from)
{
for (var i = from-1; i >= 0; --i)
{
if (!reJSChar.test(str.charAt(i)))
return i+1;
}
return 0;
}
function isFunctionName(expr, pos)
{
pos -= 9;
return (pos >= 0 && expr.substr(pos, 9) === "function " &&
(pos === 0 || !reJSChar.test(expr.charAt(pos-1))));
}
function bwFindMatchingParen(expr, from)
{
var bcount = 1;
for (var i = from-1; i >= 0; --i)
{
if (reCloseBracket.test(expr.charAt(i)))
++bcount;
else if (reOpenBracket.test(expr.charAt(i)))
if (--bcount === 0)
return i;
}
return -1;
}
/**
* Check if a '/' at the end of 'expr' would be a regex or a division.
* May also return null if the expression seems invalid.
*/
function endingDivIsRegex(expr)
{
var kwActions = ["throw", "return", "in", "instanceof", "delete", "new",
"do", "else", "typeof", "void", "yield"];
var kwCont = ["function", "if", "while", "for", "switch", "catch", "with"];
var ind = prevNonWs(expr, expr.length), ch = (ind === -1 ? "{" : expr.charAt(ind));
if (reJSChar.test(ch))
{
// Test if the previous word is a keyword usable like 'kw <expr>'.
// If so, we have a regex, otherwise, we have a division (a variable
// or literal being divided by something).
var w = expr.substring(prevWord(expr, ind), ind+1);
return (kwActions.indexOf(w) !== -1);
}
else if (ch === ")")
{
// We have a regex in the cases 'if (...) /blah/' and 'function name(...) /blah/'.
ind = bwFindMatchingParen(expr, ind);
if (ind === -1)
return null;
ind = prevNonWs(expr, ind);
if (ind === -1)
return false;
if (!reJSChar.test(expr.charAt(ind)))
return false;
var wind = prevWord(expr, ind);
if (kwCont.indexOf(expr.substring(wind, ind+1)) !== -1)
return true;
return isFunctionName(expr, wind);
}
else if (ch === "]")
{
return false;
}
return true;
}
// Check if a "{" in an expression is an object declaration.
function isObjectDecl(expr, pos)
{
var ind = prevNonWs(expr, pos);
if (ind === -1)
return false;
var ch = expr.charAt(ind);
return !(ch === ")" || ch === "{" || ch === "}" || ch === ";");
}
function isCommaProp(expr, start)
{
var beg = expr.lastIndexOf(",")+1;
if (beg < start)
beg = start;
while (expr.charAt(beg) === " ")
++beg;
var prop = expr.substr(beg);
return isValidProperty(prop);
}
function simplifyExpr(expr)
{
var ret = "", len = expr.length, instr = false, strend, inreg = false, inclass, brackets = [];
for (var i = 0; i < len; ++i)
{
var ch = expr.charAt(i);
if (instr)
{
if (ch === strend)
{
ret += '"';
instr = false;
}
else
{
if (ch === "\\" && i+1 !== len)
{
ret += " ";
++i;
}
ret += " ";
}
}
else if (inreg)
{
if (inclass && ch === "]")
inclass = false;
else if (!inclass && ch === "[")
inclass = true;
else if (!inclass && ch === "/")
{
// End of regex, eat regex flags
inreg = false;
while (i+1 !== len && reJSChar.test(expr.charAt(i+1)))
{
ret += " ";
++i;
}
ret += '"';
}
if (inreg)
{
if (ch === "\\" && i+1 !== len)
{
ret += " ";
++i;
}
ret += " ";
}
}
else
{
if (ch === "'" || ch === '"')
{
instr = true;
strend = ch;
ret += '"';
}
else if (ch === "/")
{
var re = endingDivIsRegex(ret);
if (re === null)
return null;
if (re)
{
inreg = true;
ret += '"';
}
else
ret += "/";
}
else
{
if (reOpenBracket.test(ch))
brackets.push(ch);
else if (reCloseBracket.test(ch))
{
// Check for mismatched brackets
if (!brackets.length)
return null;
var br = brackets.pop();
if (br === "(" && ch !== ")")
return null;
if (br === "[" && ch !== "]")
return null;
if (br === "{" && ch !== "}")
return null;
}
ret += ch;
}
}
}
return ret;
}
// Check if auto-completion should be killed.
function killCompletions(expr, origExpr)
{
// Make sure there is actually something to complete at the end.
if (expr.length === 0)
return true;
if (reJSChar.test(expr[expr.length-1]) ||
expr.slice(-1) === "." ||
expr.slice(-2) === ".%")
{
// An expression at the end - we're fine.
}
else
{
var lastBr = expr.lastIndexOf("[");
if (lastBr !== -1 && /^" *$/.test(expr.substr(lastBr+1)) &&
origExpr.charAt(lastBr+1) !== "/")
{
// Array completions - we're fine.
}
else {
return true;
}
}
// Check for 'function i'.
var ind = expr.lastIndexOf(" ");
if (isValidProperty(expr.substr(ind+1)) && isFunctionName(expr, ind+1))
return true;
// Check for '{prop: ..., i'.
var bwp = bwFindMatchingParen(expr, expr.length);
if (bwp !== -1 && expr.charAt(bwp) === "{" &&
isObjectDecl(expr, bwp) && isCommaProp(expr, bwp+1))
{
return true;
}
// Check for 'var prop..., i'.
var vind = expr.lastIndexOf("var ");
if (bwp < vind && isCommaProp(expr, vind+4))
{
// Note: This doesn't strictly work, because it kills completions even
// when we have started a new expression and used the comma operator
// in it (ie. 'var a; a, i'). This happens very seldom though, so it's
// not really a problem.
return true;
}
// Check for 'function f(i'.
while (bwp !== -1 && expr.charAt(bwp) !== "(")
{
bwp = bwFindMatchingParen(expr, bwp);
}
if (bwp !== -1)
{
var ind = prevNonWs(expr, bwp);
if (ind !== -1)
{
var stw = prevWord(expr, ind);
if (expr.substring(stw, ind+1) === "function")
return true;
ind = prevNonWs(expr, stw);
if (ind !== -1 && expr.substring(prevWord(expr, ind), ind+1) === "function")
return true;
}
}
return false;
}
// Types the autocompletion knows about, some of their non-enumerable properties,
// and the return types of some member functions, included in the Firebug.CommandLine
// object to make it more easily extensible.
var AutoCompletionKnownTypes = {
"void": {
"_fb_ignorePrototype": true
},
"Array": {
"pop": "|void",
"push": "|void",
"shift": "|void",
"unshift": "|void",
"reverse": "|Array",
"sort": "|Array",
"splice": "|Array",
"concat": "|Array",
"slice": "|Array",
"join": "|String",
"indexOf": "|Number",
"lastIndexOf": "|Number",
"filter": "|Array",
"map": "|Array",
"reduce": "|void",
"reduceRight": "|void",
"every": "|void",
"forEach": "|void",
"some": "|void",
"length": "Number"
},
"String": {
"_fb_contType": "String",
"split": "|Array",
"substr": "|String",
"substring": "|String",
"charAt": "|String",
"charCodeAt": "|String",
"concat": "|String",
"indexOf": "|Number",
"lastIndexOf": "|Number",
"localeCompare": "|Number",
"match": "|Array",
"search": "|Number",
"slice": "|String",
"replace": "|String",
"toLowerCase": "|String",
"toLocaleLowerCase": "|String",
"toUpperCase": "|String",
"toLocaleUpperCase": "|String",
"trim": "|String",
"length": "Number"
},
"RegExp": {
"test": "|void",
"exec": "|Array",
"lastIndex": "Number",
"ignoreCase": "void",
"global": "void",
"multiline": "void",
"source": "String"
},
"Date": {
"getTime": "|Number",
"getYear": "|Number",
"getFullYear": "|Number",
"getMonth": "|Number",
"getDate": "|Number",
"getDay": "|Number",
"getHours": "|Number",
"getMinutes": "|Number",
"getSeconds": "|Number",
"getMilliseconds": "|Number",
"getUTCFullYear": "|Number",
"getUTCMonth": "|Number",
"getUTCDate": "|Number",
"getUTCDay": "|Number",
"getUTCHours": "|Number",
"getUTCMinutes": "|Number",
"getUTCSeconds": "|Number",
"getUTCMilliseconds": "|Number",
"setTime": "|void",
"setYear": "|void",
"setFullYear": "|void",
"setMonth": "|void",
"setDate": "|void",
"setHours": "|void",
"setMinutes": "|void",
"setSeconds": "|void",
"setMilliseconds": "|void",
"setUTCFullYear": "|void",
"setUTCMonth": "|void",
"setUTCDate": "|void",
"setUTCHours": "|void",
"setUTCMinutes": "|void",
"setUTCSeconds": "|void",
"setUTCMilliseconds": "|void",
"toUTCString": "|String",
"toLocaleDateString": "|String",
"toLocaleTimeString": "|String",
"toLocaleFormat": "|String",
"toDateString": "|String",
"toTimeString": "|String",
"toISOString": "|String",
"toGMTString": "|String",
"toJSON": "|String",
"toString": "|String",
"toLocaleString": "|String",
"getTimezoneOffset": "|Number"
},
"Function": {
"call": "|void",
"apply": "|void",
"length": "Number",
"prototype": "void"
},
"HTMLElement": {
"getElementsByClassName": "|NodeList",
"getElementsByTagName": "|NodeList",
"getElementsByTagNameNS": "|NodeList",
"querySelector": "|HTMLElement",
"querySelectorAll": "|NodeList",
"firstChild": "HTMLElement",
"lastChild": "HTMLElement",
"firstElementChild": "HTMLElement",
"lastElementChild": "HTMLElement",
"parentNode": "HTMLElement",
"previousSibling": "HTMLElement",
"nextSibling": "HTMLElement",
"previousElementSibling": "HTMLElement",
"nextElementSibling": "HTMLElement",
"children": "NodeList",
"childNodes": "NodeList"
},
"NodeList": {
"_fb_contType": "HTMLElement",
"length": "Number",
"item": "|HTMLElement",
"namedItem": "|HTMLElement"
},
"Window": {
"encodeURI": "|String",
"encodeURIComponent": "|String",
"decodeURI": "|String",
"decodeURIComponent": "|String",
"eval": "|void",
"parseInt": "|Number",
"parseFloat": "|Number",
"isNaN": "|void",
"isFinite": "|void",
"NaN": "Number",
"Math": "Math",
"undefined": "void",
"Infinity": "Number"
},
"HTMLDocument": {
"querySelector": "|HTMLElement",
"querySelectorAll": "|NodeList"
},
"Math": {
"E": "Number",
"LN2": "Number",
"LN10": "Number",
"LOG2E": "Number",
"LOG10E": "Number",
"PI": "Number",
"SQRT1_2": "Number",
"SQRT2": "Number",
"abs": "|Number",
"acos": "|Number",
"asin": "|Number",
"atan": "|Number",
"atan2": "|Number",
"ceil": "|Number",
"cos": "|Number",
"exp": "|Number",
"floor": "|Number",
"log": "|Number",
"max": "|Number",
"min": "|Number",
"pow": "|Number",
"random": "|Number",
"round": "|Number",
"sin": "|Number",
"sqrt": "|Number",
"tan": "|Number"
},
"Number": {
// There are also toFixed and valueOf, but they are left out because
// they steal focus from toString by being shorter (in the case of
// toFixed), and because they are used very seldom.
"toExponential": "|String",
"toPrecision": "|String",
"toLocaleString": "|String",
"toString": "|String"
}
};
var LinkType = {
"PROPERTY": 0,
"SCOPED_VARS": 1,
"INDEX": 2,
"CALL": 3,
"SAFECALL": 4,
"RETVAL_HEURISTIC": 5
};
function getKnownType(t)
{
var known = AutoCompletionKnownTypes;
if (known.hasOwnProperty(t))
return known[t];
return null;
}
function getKnownTypeInfo(r)
{
if (r.charAt(0) === "|")
return {"val": "Function", "ret": r.substr(1)};
return {"val": r};
}
function getFakeCompleteKeys(name)
{
var ret = [], type = getKnownType(name);
if (!type)
return ret;
for (var prop in type) {
if (prop.substr(0, 4) !== "_fb_")
ret.push(prop);
}
return ret;
}
function eatProp(expr, start)
{
for (var i = start; i < expr.length; ++i)
if (!reJSChar.test(expr.charAt(i)))
break;
return i;
}
function matchingBracket(expr, start)
{
var count = 1;
for (var i = start + 1; i < expr.length; ++i) {
var ch = expr.charAt(i);
if (reOpenBracket.test(ch))
++count;
else if (reCloseBracket.test(ch))
if (!--count)
return i;
}
return -1;
}
function getTypeExtractionExpression(command)
{
// Return a JavaScript expression for determining the type / [[Class]] of
// an object given by another JavaScript expression. For DOM nodes, return
// HTMLElement instead of HTML[node type]Element, for simplicity.
var ret = "(function() { var v = " + command + "; ";
ret += "if (window.HTMLElement && v instanceof HTMLElement) return 'HTMLElement'; ";
ret += "return Object.prototype.toString.call(v).slice(8, -1);})()";
return ret;
}
function sortUnique(ar)
{
ar = ar.slice();
ar.sort();
var ret = [];
for (var i = 0; i < ar.length; ++i)
{
if (i && ar[i-1] === ar[i])
continue;
ret.push(ar[i]);
}
return ret;
}
function hasScopedVariables(context, obj)
{
try {
if (typeof obj !== "object" && typeof obj !== "function")
return false;
var w = context.window.wrappedJSObject;
var parts = Firebug.FireClosure.getScopedVariables(w, obj);
return parts.some(function(part) { return part.length > 0; });
}
catch (e) {
if (FBTrace.DBG_FIRECLOSURE)
FBTrace.sysout("FireClosure; failed to check for closed over variables", e);
return false;
}
}
function propChainBuildComplete(out, context, tempExpr, result)
{
var complete = null, command = null;
if (out.scopeCompletion)
{
if (tempExpr.fake)
return;
if (typeof result !== "object" && typeof result !== "function")
return;
var w = context.window.wrappedJSObject;
var parts = Firebug.FireClosure.getScopedVariables(w, result);
complete = Array.prototype.concat.apply([], parts);
out.complete = sortUnique(complete);
return;
}
if (tempExpr.fake)
{
var name = tempExpr.value.val;
complete = getFakeCompleteKeys(name);
if (!getKnownType(name)._fb_ignorePrototype)
command = name + ".prototype";
}
else
{
if (typeof result === "string")
{
// Strings only have indices as properties, use the fake object
// completions instead.
tempExpr.fake = true;
tempExpr.value = getKnownTypeInfo("String");
propChainBuildComplete(out, context, tempExpr);
return;
}
else if (FirebugReps.Arr.isArray(result, context.window))
complete = nonNumericKeys(result);
else
complete = Arr.keys(result);
command = getTypeExtractionExpression(tempExpr.command);
out.hasScope = hasScopedVariables(context, result);
}
var done = function()
{
if (out.indexCompletion)
{
complete = complete.map(function(x)
{
x = (out.indexQuoteType === '"') ? Str.escapeJS(x): Str.escapeSingleQuoteJS(x);
return x + out.indexQuoteType + "]";
});
}
// Properties may be taken from several sources, so filter out duplicates.
out.complete = sortUnique(complete);
};
if (command === null)
{
done();
}
else
{
Firebug.CommandLine.evaluate(command, context, context.thisValue, null,
function found(result, context)
{
if (tempExpr.fake)
{
complete = complete.concat(Arr.keys(result));
}
else
{
if (typeof result === "string" && getKnownType(result))
{
complete = complete.concat(getFakeCompleteKeys(result));
}
}
done();
},
function failed(result, context)
{
done();
}
);
}
}
function evalPropChainStep(step, tempExpr, evalChain, out, context)
{
if (tempExpr.fake)
{
if (step === evalChain.length)
{
propChainBuildComplete(out, context, tempExpr);
return;
}
var link = evalChain[step], type = link.type;
if (type === LinkType.PROPERTY || type === LinkType.INDEX)
{
// Use the accessed property if it exists, otherwise abort. It
// would be possible to continue with a 'real' expression of
// `tempExpr.value.val`.prototype, but since prototypes seldom
// contain actual values of things this doesn't work very well.
var mem = (type === LinkType.INDEX ? "_fb_contType" : link.name);
var t = getKnownType(tempExpr.value.val);
if (t.hasOwnProperty(mem))
tempExpr.value = getKnownTypeInfo(t[mem]);
else
return;
}
else if (type === LinkType.CALL)
{
if (tempExpr.value.ret)
tempExpr.value = getKnownTypeInfo(tempExpr.value.ret);
else
return;
}
else
{
return;
}
evalPropChainStep(step+1, tempExpr, evalChain, out, context);
}
else
{
var funcCommand = null, link, type;
while (step !== evalChain.length)
{
link = evalChain[step];
type = link.type;
if (type === LinkType.PROPERTY)
{
tempExpr.thisCommand = tempExpr.command;
tempExpr.command += "." + link.name;
}
else if (type === LinkType.SCOPED_VARS)
{
tempExpr.thisCommand = "window";
tempExpr.command += ".%" + link.name;
}
else if (type === LinkType.INDEX)
{
tempExpr.thisCommand = "window";
tempExpr.command += "[" + link.cont + "]";
}
else if (type === LinkType.SAFECALL)
{
tempExpr.thisCommand = "window";
tempExpr.command += "(" + link.origCont + ")";
}
else if (type === LinkType.CALL)
{
if (link.name === "")
{
// We cannot know about functions without name; try the
// heuristic directly.
link.type = LinkType.RETVAL_HEURISTIC;
evalPropChainStep(step, tempExpr, evalChain, out, context);
return;
}
funcCommand = getTypeExtractionExpression(tempExpr.thisCommand);
break;
}
else if (type === LinkType.RETVAL_HEURISTIC)
{
if (link.origCont !== null &&
(link.name.substr(0, 3) === "get" ||
(link.name.charAt(0) === "$" && link.cont.indexOf(",") === -1)))
{
// Names beginning with get or $ are almost always getters, so
// assume it is a safecall and start over.
link.type = LinkType.SAFECALL;
evalPropChainStep(step, tempExpr, evalChain, out, context);
return;
}
funcCommand = "Function.prototype.toString.call(" + tempExpr.command + ")";
break;
}
++step;
}
var func = (funcCommand !== null), command = (func ? funcCommand : tempExpr.command);
Firebug.CommandLine.evaluate(command, context, context.thisValue, null,
function found(result, context)
{
if (func)
{
if (type === LinkType.CALL)
{
if (typeof result !== "string")
return;
var t = getKnownType(result);
if (t && t.hasOwnProperty(link.name))
{
var propVal = getKnownTypeInfo(t[link.name]);
// Make sure the property is a callable function
if (!propVal.ret)
return;
tempExpr.fake = true;
tempExpr.value = getKnownTypeInfo(propVal.ret);
evalPropChainStep(step+1, tempExpr, evalChain, out, context);
}
else
{
// Unknown 'this' type or function name, use
// heuristics on the function instead.
link.type = LinkType.RETVAL_HEURISTIC;
evalPropChainStep(step, tempExpr, evalChain, out, context);
}
}
else if (type === LinkType.RETVAL_HEURISTIC)
{
if (typeof result !== "string")
return;
// Perform some crude heuristics for figuring out the
// return value of a function based on its contents.
// It's certainly not perfect, and it's easily fooled
// into giving wrong results, but it might work in
// some common cases.
// Check for chaining functions. This is done before
// checking for nested functions, because completing
// results of member functions containing nested
// functions that use 'return this' seems uncommon,
// and being wrong is not a huge problem.
if (result.indexOf("return this;") !== -1)
{
tempExpr.command = tempExpr.thisCommand;
tempExpr.thisCommand = "window";
evalPropChainStep(step+1, tempExpr, evalChain, out, context);
return;
}
// Don't support nested functions.
if (result.lastIndexOf("function") !== 0)
return;
// Check for arrays.
if (result.indexOf("return [") !== -1)
{
tempExpr.fake = true;
tempExpr.value = getKnownTypeInfo("Array");
evalPropChainStep(step+1, tempExpr, evalChain, out, context);
return;
}
// Check for 'return new Type(...);', and use the
// prototype as a pseudo-object for those (since it
// is probably not a known type that we can fake).
var newPos = result.indexOf("return new ");
if (newPos !== -1)
{
var rest = result.substr(newPos + 11),
epos = rest.search(/[^a-zA-Z0-9_$.]/);
if (epos !== -1)
{
rest = rest.substring(0, epos);
tempExpr.command = rest + ".prototype";
evalPropChainStep(step+1, tempExpr, evalChain, out, context);
return;
}
}
}
}
else
{
propChainBuildComplete(out, context, tempExpr, result);
}
},
function failed(result, context) { }
);
}
}
function evalPropChain(out, preExpr, origExpr, context)
{
var evalChain = [], linkStart = 0, len = preExpr.length, lastProp = "";
var tempExpr = {"fake": false, "command": "window", "thisCommand": "window"};
while (linkStart !== len)
{
var ch = preExpr.charAt(linkStart);
if (linkStart === 0)
{
if (preExpr.substr(0, 4) === "new ")
{
var parInd = preExpr.indexOf("(");
tempExpr.command = preExpr.substring(4, parInd) + ".prototype";
linkStart = matchingBracket(preExpr, parInd) + 1;
}
else if (ch === "[")
{
tempExpr.fake = true;
tempExpr.value = getKnownTypeInfo("Array");
linkStart = matchingBracket(preExpr, linkStart) + 1;
}
else if (ch === '"')
{
var isRegex = (origExpr.charAt(0) === "/");
tempExpr.fake = true;
tempExpr.value = getKnownTypeInfo(isRegex ? "RegExp" : "String");
linkStart = preExpr.indexOf('"', 1) + 1;
}
else if (!isNaN(ch))
{
// The expression is really a decimal number.
return false;
}
else if (reJSChar.test(ch))
{
// The expression begins with a regular property name
var nextLink = eatProp(preExpr, linkStart);
lastProp = preExpr.substring(linkStart, nextLink);
linkStart = nextLink;
tempExpr.command = lastProp;
}
// Syntax error (like '.') or a too complicated expression.
if (linkStart === 0)
return false;
}
else
{
if (ch === ".")
{
// Property access
var scope = (preExpr.charAt(linkStart+1) === "%");
linkStart += (scope ? 2 : 1);
var nextLink = eatProp(preExpr, linkStart);
lastProp = preExpr.substring(linkStart, nextLink);
linkStart = nextLink;
evalChain.push({
"type": (scope ? LinkType.SCOPED_VARS : LinkType.PROPERTY),
"name": lastProp
});
}
else if (ch === "(")
{
// Function call. Save the function name and the arguments if
// they are safe to evaluate.
var endCont = matchingBracket(preExpr, linkStart);
var cont = preExpr.substring(linkStart+1, endCont), origCont = null;
if (reLiteralExpr.test(cont))
origCont = origExpr.substring(linkStart+1, endCont);
linkStart = endCont + 1;
evalChain.push({
"type": LinkType.CALL,
"name": lastProp,
"origCont": origCont,
"cont": cont
});
lastProp = "";
}
else if (ch === "[")
{
// Index. Use the supplied index if it is a literal; otherwise
// it is probably a loop index with a variable not yet defined
// (like 'for(var i = 0; i < ar.length; ++i) ar[i].prop'), and
// '0' seems like a reasonably good guess at a valid index.
var endInd = matchingBracket(preExpr, linkStart);
var ind = preExpr.substring(linkStart+1, endInd);
if (reLiteralExpr.test(ind))
ind = origExpr.substring(linkStart+1, endInd);
else
ind = "0";
linkStart = endInd+1;
evalChain.push({"type": LinkType.INDEX, "cont": ind});
lastProp = "";
}
else
{
// Syntax error
return false;
}
}
}
evalPropChainStep(0, tempExpr, evalChain, out, context);
return true;
}
function autoCompleteEval(base, context, preExpr, spreExpr, includeCurrentScope)
{
var out = {};
out.complete = [];
out.hasScope = false;
try
{
if (spreExpr)
{
// Complete member variables of some .-chained expression
// In case of array indexing, remove the bracket and set a flag to
// escape completions.
out.indexCompletion = false;
out.scopeCompletion = false;
var len = spreExpr.length;
if (len >= 2 && spreExpr[len-2] === "[" && spreExpr[len-1] === '"')
{
out.indexCompletion = true;
out.indexQuoteType = preExpr[len-1];
len -= 2;
}
else if (spreExpr.slice(-2) === ".%")
{
out.scopeCompletion = true;
len -= 2;
}
else
{
len -= 1;
}
spreExpr = spreExpr.substr(0, len);
preExpr = preExpr.substr(0, len);
if (FBTrace.DBG_COMMANDLINE)
FBTrace.sysout("commandLine.autoCompleteEval pre:'" + preExpr +
"' spre:'" + spreExpr + "'.");
// Don't auto-complete '.'.
if (spreExpr === "")
return;
evalPropChain(out, spreExpr, preExpr, context);
}
else
{
// Complete variables from the local scope
var contentView = Wrapper.getContentView(context.window);
if (context.stopped && includeCurrentScope)
{
out.complete = Firebug.Debugger.getCurrentFrameKeys(context);
}
else if (contentView && contentView.Window &&
contentView.constructor.toString() === contentView.Window.toString())
// Cross window type pseudo-comparison
{
out.complete = Arr.keys(contentView); // return is safe
// Add some known window properties
out.complete = out.complete.concat(getFakeCompleteKeys("Window"));
}
else // hopefully sandbox in Chromebug
{
out.complete = Arr.keys(context.global);
}
// Sort the completions, and avoid duplicates.
out.complete = sortUnique(out.complete);
}
}
catch (exc)
{
if (FBTrace.DBG_ERRORS && FBTrace.DBG_COMMANDLINE)
FBTrace.sysout("commandLine.autoCompleteEval FAILED", exc);
}
base.candidates = out.complete;
base.hasScope = out.hasScope;
}
var reValidJSToken = /^[A-Za-z_$][A-Za-z_$0-9]*$/;
function isValidProperty(value)
{
// Use only string props
if (typeof(value) != "string")
return false;
// Use only those props that don't contain unsafe charactes and so need
// quotation (e.g. object["my prop"] notice the space character).
// Following expression checks that the name starts with a letter or $_,
// and there are only letters, numbers or $_ character in the string (no spaces).
return reValidJSToken.test(value);
}
const rePositiveNumber = /^[1-9][0-9]*$/;
function nonNumericKeys(map) // keys will be on user-level window objects
{
var keys = [];
try
{
for (var name in map) // enumeration is safe
{
if (! (name === "0" || rePositiveNumber.test(name)) )
keys.push(name);
}
}
catch (exc)
{
// Sometimes we get exceptions trying to iterate properties
}
return keys; // return is safe
}
// ********************************************************************************************* //
// Registration
return Firebug.JSAutoCompleter;
// ********************************************************************************************* //
});
| content/autoCompleter.js | /* See license.txt for terms of usage */
define([
"firebug/lib/object",
"firebug/firebug",
"firebug/chrome/reps",
"firebug/lib/locale",
"firebug/lib/events",
"firebug/lib/wrapper",
"firebug/lib/dom",
"firebug/lib/string",
"firebug/lib/array",
"firebug/console/autoCompleter",
],
function(Obj, Firebug, FirebugReps, Locale, Events, Wrapper, Dom, Str, Arr) {
// ********************************************************************************************* //
// Constants
const reOpenBracket = /[\[\(\{]/;
const reCloseBracket = /[\]\)\}]/;
const reJSChar = /[a-zA-Z0-9$_]/;
const reLiteralExpr = /^[ "0-9,]*$/;
// ********************************************************************************************* //
// JavaScript auto-completion
Firebug.JSAutoCompleter = function(textBox, completionBox, options)
{
this.textBox = textBox;
this.completionBox = completionBox;
this.options = options;
this.showCompletionPopup = options.completionPopup;
this.completionBase = {
pre: null,
expr: null,
candidates: []
};
this.completions = null;
this.revertValue = null;
this.completionPopup = Firebug.chrome.$("fbCommandLineCompletionList");
this.selectedPopupElement = null;
/**
* If a completion was just performed, revert it. Otherwise do nothing.
* Returns true iff the completion was reverted.
*/
this.revert = function(context)
{
if (this.revertValue === null)
return false;
this.textBox.value = this.revertValue;
var len = this.textBox.value.length;
setCursorToEOL(this.textBox);
this.complete(context);
return true;
};
/**
* Hide completions temporarily, so they show up again on the next key press.
*/
this.hide = function()
{
this.completionBase = {
pre: null,
expr: null,
candidates: []
};
this.completions = null;
this.showCompletions();
};
/**
* Hide completions for this expression (/completion base). Appending further
* characters to the variable name will not make completions appear, but
* adding, say, a semicolon and typing something else will.
*/
this.hideForExpression = function()
{
this.completionBase.candidates = [];
this.completions = null;
this.showCompletions();
};
/**
* Check whether it would be acceptable for the return key to evaluate the
* expression instead of completing things.
*/
this.acceptReturn = function()
{
if (!this.completions)
return true;
if (this.getCompletionBoxValue() === this.textBox.value)
{
// The user wouldn't see a difference if we completed. This can
// happen for example if you type 'alert' and press enter,
// regardless of whether or not there exist other completions.
return true;
}
return false;
};
/**
* Show completions for the current contents of the text box. Either this or
* hide() must be called when the contents change.
*/
this.complete = function(context)
{
this.revertValue = null;
this.createCandidates(context);
this.showCompletions();
};
/**
* Update the completion base and create completion candidates for the
* current value of the text box.
*/
this.createCandidates = function(context)
{
var offset = this.textBox.selectionStart;
if (offset !== this.textBox.value.length)
{
this.hide();
return;
}
var value = this.textBox.value;
// Create a simplified expression by redacting contents/normalizing
// delimiters of strings and regexes, to make parsing easier.
// Give up if the syntax is too weird.
var svalue = simplifyExpr(value);
if (svalue === null)
{
this.hide();
return;
}
if (killCompletions(svalue, value))
{
this.hide();
return;
}
// Find the expression to be completed.
var parseStart = getExpressionOffset(svalue);
var parsed = value.substr(parseStart);
var sparsed = svalue.substr(parseStart);
// Find which part of it represents the property access.
var propertyStart = getPropertyOffset(sparsed);
var prop = parsed.substring(propertyStart);
var spreExpr = sparsed.substr(0, propertyStart);
var preExpr = parsed.substr(0, propertyStart);
this.completionBase.pre = value.substr(0, parseStart);
if (FBTrace.DBG_COMMANDLINE)
{
var sep = (parsed.indexOf("|") > -1) ? "^" : "|";
FBTrace.sysout("Completing: " + this.completionBase.pre + sep + preExpr + sep + prop);
}
// We only need to calculate a new candidate list if the expression has
// changed (we can ignore this.completionBase.pre since completions do not
// depend upon that).
if (preExpr !== this.completionBase.expr)
{
this.completionBase.expr = preExpr;
this.completionBase.candidates = [];
this.completionBase.hasScope = false;
autoCompleteEval(this.completionBase, context, preExpr, spreExpr,
this.options.includeCurrentScope);
}
this.createCompletions(prop);
};
/**
* From a valid completion base, create a list of completions (containing
* those completion candidates that share a prefix with the user's input)
* and a default completion.
*/
this.createCompletions = function(prefix)
{
var candidates = this.completionBase.candidates;
var valid = [];
if (!this.completionBase.expr && !prefix)
{
// Don't complete "".
}
else
{
for (var i = 0; i < candidates.length; ++i)
{
var name = candidates[i];
if (Str.hasPrefix(name, prefix))
valid.push(name);
}
}
if (valid.length > 0)
{
this.completions = {
list: valid,
prefix: prefix
};
this.pickDefaultCandidate();
}
else
{
this.completions = null;
}
};
/**
* Chose a default candidate from the list of completions. This is currently
* selected as the shortest completion, to make completions disappear when
* typing a variable name that is also the prefix of another.
*/
this.pickDefaultCandidate = function()
{
var pick = 0;
var ar = this.completions.list;
for (var i = 1; i < ar.length; i++)
{
if (ar[i].length < ar[pick].length)
pick = i;
}
this.completions.index = pick;
};
/**
* Go backward or forward one step in the list of completions.
* dir is the relative movement in the list; -1 means backward and 1 forward.
*/
this.cycle = function(dir)
{
this.completions.index += dir;
if (this.completions.index >= this.completions.list.length)
this.completions.index = 0;
else if (this.completions.index < 0)
this.completions.index = this.completions.list.length - 1;
this.showCompletions();
};
/**
* Get the property name that is currently selected as a completion (or
* null if there is none).
*/
this.getCurrentCompletion = function()
{
return (this.completions ? this.completions.list[this.completions.index] : null);
};
/**
* See if we have any completions.
*/
this.hasCompletions = function()
{
return !!this.completions;
};
/**
* Get the value the completion box should have for some value of the
* text box and a selected completion.
*/
this.getCompletionBoxValue = function()
{
var completion = this.getCurrentCompletion();
if (completion === null)
return "";
return this.completionBase.pre + this.completionBase.expr + completion;
};
/**
* Update the completion box and popup to be consistent with the current
* state of the auto-completer.
*/
this.showCompletions = function()
{
this.completionBox.value = this.getCompletionBoxValue();
var nc = (this.completions ?
(this.completions.list.length + (this.completionBase.hasScope ? 1 : 0)) :
0);
if (this.showCompletionPopup && nc > 1)
this.popupCandidates();
else
this.closePopup();
};
/**
* Handle a keypress event. Returns true if the auto-completer used up
* the event and does not want it to propagate further.
*/
this.handleKeyPress = function(event, context)
{
var clearedTabWarning = this.clearTabWarning();
if (Events.isAlt(event))
return false;
if (event.keyCode === KeyEvent.DOM_VK_TAB &&
!Events.isControl(event) && this.textBox.value !== "")
{
if (this.completions)
{
this.acceptCompletion();
Events.cancelEvent(event);
return true;
}
else if (this.options.tabWarnings)
{
if (clearedTabWarning)
{
// Send tab along if the user was warned.
return false;
}
this.setTabWarning();
Events.cancelEvent(event);
return true;
}
}
else if (event.keyCode === KeyEvent.DOM_VK_RETURN && !this.acceptReturn())
{
// Completion on return, when one is user-visible.
this.acceptCompletion();
Events.cancelEvent(event);
return true;
}
else if (event.keyCode === KeyEvent.DOM_VK_RIGHT && this.completions &&
this.textBox.selectionStart === this.textBox.value.length)
{
// Complete on right arrow at end of line.
this.acceptCompletion();
Events.cancelEvent(event);
return true;
}
else if (event.keyCode === KeyEvent.DOM_VK_ESCAPE)
{
if (this.completions)
{
this.hideForExpression();
Events.cancelEvent(event);
return true;
}
else
{
// There are no visible completions, but we might still be able to
// revert a recently performed completion.
if (this.revert(context))
{
Events.cancelEvent(event);
return true;
}
}
}
else if (event.keyCode === KeyEvent.DOM_VK_UP || event.keyCode === KeyEvent.DOM_VK_DOWN)
{
if (this.completions)
{
this.cycle((event.keyCode === KeyEvent.DOM_VK_UP ? -1 : 1));
Events.cancelEvent(event);
return true;
}
}
return false;
};
/**
* Handle a keydown event.
*/
this.handleKeyDown = function(event, context)
{
if (event.keyCode === KeyEvent.DOM_VK_ESCAPE && this.completions)
{
// Close the completion popup on escape in keydown, so that the popup
// does not close itself and prevent event propagation on keypress.
this.closePopup();
}
};
this.clearTabWarning = function()
{
if (this.tabWarning)
{
this.completionBox.value = "";
delete this.tabWarning;
return true;
}
return false;
};
this.setTabWarning = function()
{
this.completionBox.value = this.textBox.value + " " +
Locale.$STR("firebug.completion.empty");
this.tabWarning = true;
};
/**
* Accept the currently shown completion in the text box.
*/
this.acceptCompletion = function()
{
var completion = this.getCurrentCompletion();
completion = adjustCompletionOnAccept(this.completionBase.pre,
this.completionBase.expr, completion);
var originalValue = this.textBox.value;
this.textBox.value = completion;
setCursorToEOL(this.textBox);
this.hide();
this.revertValue = originalValue;
};
this.popupCandidates = function()
{
var commandCompletionLineLimit = 40;
Dom.eraseNode(this.completionPopup);
this.selectedPopupElement = null;
var vbox = this.completionPopup.ownerDocument.createElement("vbox");
this.completionPopup.appendChild(vbox);
vbox.classList.add("fbCommandLineCompletions");
var title = this.completionPopup.ownerDocument.
createElementNS("http://www.w3.org/1999/xhtml","div");
title.innerHTML = Locale.$STR("console.Use Arrow keys or Enter");
title.classList.add("fbPopupTitle");
vbox.appendChild(title);
var escPrefix = Str.escapeForTextNode(this.textBox.value);
var listSize = this.completions.list.length;
if (this.completions.prefix == '' &&
!/\[['"]/.test(this.completionBase.expr.slice(-2)) &&
this.completionBase.expr.slice(-2) !== ".%" &&
this.completionBase.hasScope)
{
++listSize;
}
var showTop = 0;
var showBottom = listSize;
if (listSize > commandCompletionLineLimit)
{
if (this.completions.index <= (commandCompletionLineLimit - 3))
{
// We are in the top part of the list.
showBottom = commandCompletionLineLimit;
}
else
{
// Implement manual scrolling.
if (this.completions.index > listSize - 3)
showBottom = listSize;
else
showBottom = this.completions.index + 3;
}
showTop = showBottom - commandCompletionLineLimit;
}
for (var i = showTop; i < showBottom; i++)
{
var hbox = this.completionPopup.ownerDocument.
createElementNS("http://www.w3.org/1999/xhtml","div");
if (i == this.completions.list.length) {
var text = this.completionPopup.ownerDocument.
createElementNS("http://www.w3.org/1999/xhtml","span");
text.innerHTML = "% for scope members...";
text.style.fontStyle = "italic";
text.style.paddingLeft = "3px";
text.style.fontSize = "90%";
text.style.color = "#777";
hbox.appendChild(text);
}
else {
hbox.completionIndex = i;
var pre = this.completionPopup.ownerDocument.
createElementNS("http://www.w3.org/1999/xhtml","span");
pre.innerHTML = escPrefix;
pre.classList.add("userTypedText");
var completion = this.completions.list[i].substr(this.completions.prefix.length);
var post = this.completionPopup.ownerDocument.
createElementNS("http://www.w3.org/1999/xhtml","span");
post.innerHTML = Str.escapeForTextNode(completion);
post.classList.add("completionText");
if (i === this.completions.index)
this.selectedPopupElement = hbox;
hbox.appendChild(pre);
hbox.appendChild(post);
}
vbox.appendChild(hbox);
}
if (this.selectedPopupElement)
this.selectedPopupElement.setAttribute("selected", "true");
this.completionPopup.openPopup(this.textBox, "before_start", 0, 0, false, false);
};
this.closePopup = function()
{
if (this.completionPopup.state == "closed")
return;
try
{
this.completionPopup.hidePopup();
}
catch (err)
{
if (FBTrace.DBG_ERRORS)
FBTrace.sysout("Firebug.JSAutoCompleter.closePopup; EXCEPTION " + err, err);
}
};
this.getCompletionPopupElementFromEvent = function(event)
{
var selected = event.target;
while (selected && selected.localName !== "div")
selected = selected.parentNode;
return (selected && typeof selected.completionIndex !== "undefined" ? selected : null);
};
this.popupMousedown = function(event)
{
var el = this.getCompletionPopupElementFromEvent(event);
if (!el)
return;
if (this.selectedPopupElement)
this.selectedPopupElement.removeAttribute("selected");
this.selectedPopupElement = el;
this.selectedPopupElement.setAttribute("selected", "true");
this.completions.index = el.completionIndex;
this.completionBox.value = this.getCompletionBoxValue();
};
this.popupClick = function(event)
{
var el = this.getCompletionPopupElementFromEvent(event);
if (!el)
return;
this.completions.index = el.completionIndex;
this.acceptCompletion();
};
this.popupMousedown = Obj.bind(this.popupMousedown, this);
this.popupClick = Obj.bind(this.popupClick, this);
/**
* A destructor function, to be called when the auto-completer is destroyed.
*/
this.shutdown = function()
{
this.completionBox.value = "";
Events.removeEventListener(this.completionPopup, "mousedown", this.popupMousedown, true);
Events.removeEventListener(this.completionPopup, "click", this.popupClick, true);
};
Events.addEventListener(this.completionPopup, "mousedown", this.popupMousedown, true);
Events.addEventListener(this.completionPopup, "click", this.popupClick, true);
};
/**
* Transform an expression from using .% into something JavaScript-friendly, which
* delegates to _FirebugCommandLine.
* Used only in module.js, but autoCompleter.js has so many nice helper functions.
*/
Firebug.JSAutoCompleter.transformScopeExpr = function(expr, fname)
{
var sexpr = simplifyExpr(expr);
if (!sexpr) return expr;
var search = 0;
for (;;) {
var end = sexpr.indexOf(".%", search);
if (end === -1) break;
var start = getExpressionOffset(sexpr, end);
expr = expr.substr(0, start) + fname + "(" +
expr.substring(start, end) + ")." +
expr.substr(end+2);
sexpr = sexpr.substr(0, start) + fname + "(" +
sexpr.substring(start, end) + ")." +
sexpr.substr(end+2);
search = end + fname + "().".length;
}
return expr;
};
// ********************************************************************************************* //
// Auto-completion helpers
/**
* Try to find the position at which the expression to be completed starts.
*/
function getExpressionOffset(command, start)
{
if (typeof start === 'undefined')
start = command.length;
var bracketCount = 0, instr = false;
// When completing []-accessed properties, start instead from the last [.
var lastBr = command.lastIndexOf("[", start);
if (lastBr !== -1 && /^" *$/.test(command.substring(lastBr+1, start)))
start = lastBr;
for (var i = start-1; i >= 0; --i)
{
var c = command[i];
if (reOpenBracket.test(c))
{
if (bracketCount)
--bracketCount;
else
break;
}
else if (reCloseBracket.test(c))
{
var next = command[i + 1];
if (bracketCount === 0 && next !== "." && next !== "[")
break;
else
++bracketCount;
}
else if (bracketCount === 0)
{
if (c === '"') instr = !instr;
else if (instr || reJSChar.test(c) || c === "." ||
(c === "%" && command[i-1] === "."))
;
else
break;
}
}
++i;
// The 'new' operator has higher precedence than function calls, so, if
// present, it should be included if the expression contains a parenthesis.
if (i-4 >= 0 && command.indexOf("(", i) !== -1 && command.substr(i-4, 4) === "new ")
{
i -= 4;
}
return i;
}
/**
* Try to find the position at which the property name of the final property
* access in an expression starts (for example, 2 in 'a.b').
*/
function getPropertyOffset(expr)
{
var lastBr = expr.lastIndexOf("[");
if (lastBr !== -1 && /^" *$/.test(expr.substr(lastBr+1)))
return lastBr+2;
var lastDot = expr.lastIndexOf(".");
if (lastDot !== -1 && expr.charAt(lastDot+1) === "%")
return lastDot+2;
return lastDot+1;
}
/**
* Get the index of the last non-whitespace character in the range [0, from)
* in str, or -1 if there is none.
*/
function prevNonWs(str, from)
{
for (var i = from-1; i >= 0; --i)
{
if (str.charAt(i) !== " ")
return i;
}
return -1;
}
/**
* Find the start of a word consisting of characters matching reJSChar, if
* str[from] is the last character in the word. (This can be used together
* with prevNonWs to traverse words backwards from a position.)
*/
function prevWord(str, from)
{
for (var i = from-1; i >= 0; --i)
{
if (!reJSChar.test(str.charAt(i)))
return i+1;
}
return 0;
}
function isFunctionName(expr, pos)
{
pos -= 9;
return (pos >= 0 && expr.substr(pos, 9) === "function " &&
(pos === 0 || !reJSChar.test(expr.charAt(pos-1))));
}
function bwFindMatchingParen(expr, from)
{
var bcount = 1;
for (var i = from-1; i >= 0; --i)
{
if (reCloseBracket.test(expr.charAt(i)))
++bcount;
else if (reOpenBracket.test(expr.charAt(i)))
if (--bcount === 0)
return i;
}
return -1;
}
/**
* Check if a '/' at the end of 'expr' would be a regex or a division.
* May also return null if the expression seems invalid.
*/
function endingDivIsRegex(expr)
{
var kwActions = ["throw", "return", "in", "instanceof", "delete", "new",
"do", "else", "typeof", "void", "yield"];
var kwCont = ["function", "if", "while", "for", "switch", "catch", "with"];
var ind = prevNonWs(expr, expr.length), ch = (ind === -1 ? "{" : expr.charAt(ind));
if (reJSChar.test(ch))
{
// Test if the previous word is a keyword usable like 'kw <expr>'.
// If so, we have a regex, otherwise, we have a division (a variable
// or literal being divided by something).
var w = expr.substring(prevWord(expr, ind), ind+1);
return (kwActions.indexOf(w) !== -1);
}
else if (ch === ")")
{
// We have a regex in the cases 'if (...) /blah/' and 'function name(...) /blah/'.
ind = bwFindMatchingParen(expr, ind);
if (ind === -1)
return null;
ind = prevNonWs(expr, ind);
if (ind === -1)
return false;
if (!reJSChar.test(expr.charAt(ind)))
return false;
var wind = prevWord(expr, ind);
if (kwCont.indexOf(expr.substring(wind, ind+1)) !== -1)
return true;
return isFunctionName(expr, wind);
}
else if (ch === "]")
{
return false;
}
return true;
}
// Check if a "{" in an expression is an object declaration.
function isObjectDecl(expr, pos)
{
var ind = prevNonWs(expr, pos);
if (ind === -1)
return false;
var ch = expr.charAt(ind);
return !(ch === ")" || ch === "{" || ch === "}" || ch === ";");
}
function isCommaProp(expr, start)
{
var beg = expr.lastIndexOf(",")+1;
if (beg < start)
beg = start;
while (expr.charAt(beg) === " ")
++beg;
var prop = expr.substr(beg);
return isValidProperty(prop);
}
function simplifyExpr(expr)
{
var ret = "", len = expr.length, instr = false, strend, inreg = false, inclass, brackets = [];
for (var i = 0; i < len; ++i)
{
var ch = expr.charAt(i);
if (instr)
{
if (ch === strend)
{
ret += '"';
instr = false;
}
else
{
if (ch === "\\" && i+1 !== len)
{
ret += " ";
++i;
}
ret += " ";
}
}
else if (inreg)
{
if (inclass && ch === "]")
inclass = false;
else if (!inclass && ch === "[")
inclass = true;
else if (!inclass && ch === "/")
{
// End of regex, eat regex flags
inreg = false;
while (i+1 !== len && reJSChar.test(expr.charAt(i+1)))
{
ret += " ";
++i;
}
ret += '"';
}
if (inreg)
{
if (ch === "\\" && i+1 !== len)
{
ret += " ";
++i;
}
ret += " ";
}
}
else
{
if (ch === "'" || ch === '"')
{
instr = true;
strend = ch;
ret += '"';
}
else if (ch === "/")
{
var re = endingDivIsRegex(ret);
if (re === null)
return null;
if (re)
{
inreg = true;
ret += '"';
}
else
ret += "/";
}
else
{
if (reOpenBracket.test(ch))
brackets.push(ch);
else if (reCloseBracket.test(ch))
{
// Check for mismatched brackets
if (!brackets.length)
return null;
var br = brackets.pop();
if (br === "(" && ch !== ")")
return null;
if (br === "[" && ch !== "]")
return null;
if (br === "{" && ch !== "}")
return null;
}
ret += ch;
}
}
}
return ret;
}
// Check if auto-completion should be killed.
function killCompletions(expr, origExpr)
{
// Make sure there is actually something to complete at the end.
if (expr.length === 0)
return true;
if (reJSChar.test(expr[expr.length-1]) ||
expr.slice(-1) === "." ||
expr.slice(-2) === ".%")
{
// An expression at the end - we're fine.
}
else
{
var lastBr = expr.lastIndexOf("[");
if (lastBr !== -1 && /^" *$/.test(expr.substr(lastBr+1)) &&
origExpr.charAt(lastBr+1) !== "/")
{
// Array completions - we're fine.
}
else {
return true;
}
}
// Check for 'function i'.
var ind = expr.lastIndexOf(" ");
if (isValidProperty(expr.substr(ind+1)) && isFunctionName(expr, ind+1))
return true;
// Check for '{prop: ..., i'.
var bwp = bwFindMatchingParen(expr, expr.length);
if (bwp !== -1 && expr.charAt(bwp) === "{" &&
isObjectDecl(expr, bwp) && isCommaProp(expr, bwp+1))
{
return true;
}
// Check for 'var prop..., i'.
var vind = expr.lastIndexOf("var ");
if (bwp < vind && isCommaProp(expr, vind+4))
{
// Note: This doesn't strictly work, because it kills completions even
// when we have started a new expression and used the comma operator
// in it (ie. 'var a; a, i'). This happens very seldom though, so it's
// not really a problem.
return true;
}
// Check for 'function f(i'.
while (bwp !== -1 && expr.charAt(bwp) !== "(")
{
bwp = bwFindMatchingParen(expr, bwp);
}
if (bwp !== -1)
{
var ind = prevNonWs(expr, bwp);
if (ind !== -1)
{
var stw = prevWord(expr, ind);
if (expr.substring(stw, ind+1) === "function")
return true;
ind = prevNonWs(expr, stw);
if (ind !== -1 && expr.substring(prevWord(expr, ind), ind+1) === "function")
return true;
}
}
return false;
}
function adjustCompletionOnAccept(preParsed, preExpr, property)
{
var res = preParsed + preExpr + property;
// Don't adjust index completions.
if (/^\[['"]$/.test(preExpr.slice(-2)))
return res;
// Nor completions of scoped variables.
if (preExpr.slice(-2) === ".%")
return res;
if (!isValidProperty(property))
{
// The property name is actually invalid in free form, so replace
// it with array syntax.
if (preExpr)
{
res = preParsed + preExpr.slice(0, -1);
}
else
{
// Global variable access - assume the variable is a member of 'window'.
res = preParsed + "window";
}
res += '["' + Str.escapeJS(property) + '"]';
}
return res;
}
// Types the autocompletion knows about, some of their non-enumerable properties,
// and the return types of some member functions, included in the Firebug.CommandLine
// object to make it more easily extensible.
var AutoCompletionKnownTypes = {
"void": {
"_fb_ignorePrototype": true
},
"Array": {
"pop": "|void",
"push": "|void",
"shift": "|void",
"unshift": "|void",
"reverse": "|Array",
"sort": "|Array",
"splice": "|Array",
"concat": "|Array",
"slice": "|Array",
"join": "|String",
"indexOf": "|Number",
"lastIndexOf": "|Number",
"filter": "|Array",
"map": "|Array",
"reduce": "|void",
"reduceRight": "|void",
"every": "|void",
"forEach": "|void",
"some": "|void",
"length": "Number"
},
"String": {
"_fb_contType": "String",
"split": "|Array",
"substr": "|String",
"substring": "|String",
"charAt": "|String",
"charCodeAt": "|String",
"concat": "|String",
"indexOf": "|Number",
"lastIndexOf": "|Number",
"localeCompare": "|Number",
"match": "|Array",
"search": "|Number",
"slice": "|String",
"replace": "|String",
"toLowerCase": "|String",
"toLocaleLowerCase": "|String",
"toUpperCase": "|String",
"toLocaleUpperCase": "|String",
"trim": "|String",
"length": "Number"
},
"RegExp": {
"test": "|void",
"exec": "|Array",
"lastIndex": "Number",
"ignoreCase": "void",
"global": "void",
"multiline": "void",
"source": "String"
},
"Date": {
"getTime": "|Number",
"getYear": "|Number",
"getFullYear": "|Number",
"getMonth": "|Number",
"getDate": "|Number",
"getDay": "|Number",
"getHours": "|Number",
"getMinutes": "|Number",
"getSeconds": "|Number",
"getMilliseconds": "|Number",
"getUTCFullYear": "|Number",
"getUTCMonth": "|Number",
"getUTCDate": "|Number",
"getUTCDay": "|Number",
"getUTCHours": "|Number",
"getUTCMinutes": "|Number",
"getUTCSeconds": "|Number",
"getUTCMilliseconds": "|Number",
"setTime": "|void",
"setYear": "|void",
"setFullYear": "|void",
"setMonth": "|void",
"setDate": "|void",
"setHours": "|void",
"setMinutes": "|void",
"setSeconds": "|void",
"setMilliseconds": "|void",
"setUTCFullYear": "|void",
"setUTCMonth": "|void",
"setUTCDate": "|void",
"setUTCHours": "|void",
"setUTCMinutes": "|void",
"setUTCSeconds": "|void",
"setUTCMilliseconds": "|void",
"toUTCString": "|String",
"toLocaleDateString": "|String",
"toLocaleTimeString": "|String",
"toLocaleFormat": "|String",
"toDateString": "|String",
"toTimeString": "|String",
"toISOString": "|String",
"toGMTString": "|String",
"toJSON": "|String",
"toString": "|String",
"toLocaleString": "|String",
"getTimezoneOffset": "|Number"
},
"Function": {
"call": "|void",
"apply": "|void",
"length": "Number",
"prototype": "void"
},
"HTMLElement": {
"getElementsByClassName": "|NodeList",
"getElementsByTagName": "|NodeList",
"getElementsByTagNameNS": "|NodeList",
"querySelector": "|HTMLElement",
"querySelectorAll": "|NodeList",
"firstChild": "HTMLElement",
"lastChild": "HTMLElement",
"firstElementChild": "HTMLElement",
"lastElementChild": "HTMLElement",
"parentNode": "HTMLElement",
"previousSibling": "HTMLElement",
"nextSibling": "HTMLElement",
"previousElementSibling": "HTMLElement",
"nextElementSibling": "HTMLElement",
"children": "NodeList",
"childNodes": "NodeList"
},
"NodeList": {
"_fb_contType": "HTMLElement",
"length": "Number",
"item": "|HTMLElement",
"namedItem": "|HTMLElement"
},
"Window": {
"encodeURI": "|String",
"encodeURIComponent": "|String",
"decodeURI": "|String",
"decodeURIComponent": "|String",
"eval": "|void",
"parseInt": "|Number",
"parseFloat": "|Number",
"isNaN": "|void",
"isFinite": "|void",
"NaN": "Number",
"Math": "Math",
"undefined": "void",
"Infinity": "Number"
},
"HTMLDocument": {
"querySelector": "|HTMLElement",
"querySelectorAll": "|NodeList"
},
"Math": {
"E": "Number",
"LN2": "Number",
"LN10": "Number",
"LOG2E": "Number",
"LOG10E": "Number",
"PI": "Number",
"SQRT1_2": "Number",
"SQRT2": "Number",
"abs": "|Number",
"acos": "|Number",
"asin": "|Number",
"atan": "|Number",
"atan2": "|Number",
"ceil": "|Number",
"cos": "|Number",
"exp": "|Number",
"floor": "|Number",
"log": "|Number",
"max": "|Number",
"min": "|Number",
"pow": "|Number",
"random": "|Number",
"round": "|Number",
"sin": "|Number",
"sqrt": "|Number",
"tan": "|Number"
},
"Number": {
// There are also toFixed and valueOf, but they are left out because
// they steal focus from toString by being shorter (in the case of
// toFixed), and because they are used very seldom.
"toExponential": "|String",
"toPrecision": "|String",
"toLocaleString": "|String",
"toString": "|String"
}
};
var LinkType = {
"PROPERTY": 0,
"SCOPED_VARS": 1,
"INDEX": 2,
"CALL": 3,
"SAFECALL": 4,
"RETVAL_HEURISTIC": 5
};
function getKnownType(t)
{
var known = AutoCompletionKnownTypes;
if (known.hasOwnProperty(t))
return known[t];
return null;
}
function getKnownTypeInfo(r)
{
if (r.charAt(0) === "|")
return {"val": "Function", "ret": r.substr(1)};
return {"val": r};
}
function getFakeCompleteKeys(name)
{
var ret = [], type = getKnownType(name);
if (!type)
return ret;
for (var prop in type) {
if (prop.substr(0, 4) !== "_fb_")
ret.push(prop);
}
return ret;
}
function eatProp(expr, start)
{
for (var i = start; i < expr.length; ++i)
if (!reJSChar.test(expr.charAt(i)))
break;
return i;
}
function matchingBracket(expr, start)
{
var count = 1;
for (var i = start + 1; i < expr.length; ++i) {
var ch = expr.charAt(i);
if (reOpenBracket.test(ch))
++count;
else if (reCloseBracket.test(ch))
if (!--count)
return i;
}
return -1;
}
function getTypeExtractionExpression(command)
{
// Return a JavaScript expression for determining the type / [[Class]] of
// an object given by another JavaScript expression. For DOM nodes, return
// HTMLElement instead of HTML[node type]Element, for simplicity.
var ret = "(function() { var v = " + command + "; ";
ret += "if (window.HTMLElement && v instanceof HTMLElement) return 'HTMLElement'; ";
ret += "return Object.prototype.toString.call(v).slice(8, -1);})()";
return ret;
}
function sortUnique(ar)
{
ar = ar.slice();
ar.sort();
var ret = [];
for (var i = 0; i < ar.length; ++i)
{
if (i && ar[i-1] === ar[i])
continue;
ret.push(ar[i]);
}
return ret;
}
function hasScopedVariables(context, obj)
{
try {
if (typeof obj !== "object" && typeof obj !== "function")
return false;
var w = context.window.wrappedJSObject;
var parts = Firebug.FireClosure.getScopedVariables(w, obj);
return parts.some(function(part) { return part.length > 0; });
}
catch (e) {
if (FBTrace.DBG_FIRECLOSURE)
FBTrace.sysout("FireClosure; failed to check for closed over variables", e);
return false;
}
}
function propChainBuildComplete(out, context, tempExpr, result)
{
var complete = null, command = null;
if (out.scopeCompletion)
{
if (tempExpr.fake)
return;
if (typeof result !== "object" && typeof result !== "function")
return;
var w = context.window.wrappedJSObject;
var parts = Firebug.FireClosure.getScopedVariables(w, result);
complete = Array.prototype.concat.apply([], parts);
out.complete = sortUnique(complete);
return;
}
if (tempExpr.fake)
{
var name = tempExpr.value.val;
complete = getFakeCompleteKeys(name);
if (!getKnownType(name)._fb_ignorePrototype)
command = name + ".prototype";
}
else
{
if (typeof result === "string")
{
// Strings only have indices as properties, use the fake object
// completions instead.
tempExpr.fake = true;
tempExpr.value = getKnownTypeInfo("String");
propChainBuildComplete(out, context, tempExpr);
return;
}
else if (FirebugReps.Arr.isArray(result, context.window))
complete = nonNumericKeys(result);
else
complete = Arr.keys(result);
command = getTypeExtractionExpression(tempExpr.command);
out.hasScope = hasScopedVariables(context, result);
}
var done = function()
{
if (out.indexCompletion)
{
complete = complete.map(function(x)
{
x = (out.indexQuoteType === '"') ? Str.escapeJS(x): Str.escapeSingleQuoteJS(x);
return x + out.indexQuoteType + "]";
});
}
// Properties may be taken from several sources, so filter out duplicates.
out.complete = sortUnique(complete);
};
if (command === null)
{
done();
}
else
{
Firebug.CommandLine.evaluate(command, context, context.thisValue, null,
function found(result, context)
{
if (tempExpr.fake)
{
complete = complete.concat(Arr.keys(result));
}
else
{
if (typeof result === "string" && getKnownType(result))
{
complete = complete.concat(getFakeCompleteKeys(result));
}
}
done();
},
function failed(result, context)
{
done();
}
);
}
}
function evalPropChainStep(step, tempExpr, evalChain, out, context)
{
if (tempExpr.fake)
{
if (step === evalChain.length)
{
propChainBuildComplete(out, context, tempExpr);
return;
}
var link = evalChain[step], type = link.type;
if (type === LinkType.PROPERTY || type === LinkType.INDEX)
{
// Use the accessed property if it exists, otherwise abort. It
// would be possible to continue with a 'real' expression of
// `tempExpr.value.val`.prototype, but since prototypes seldom
// contain actual values of things this doesn't work very well.
var mem = (type === LinkType.INDEX ? "_fb_contType" : link.name);
var t = getKnownType(tempExpr.value.val);
if (t.hasOwnProperty(mem))
tempExpr.value = getKnownTypeInfo(t[mem]);
else
return;
}
else if (type === LinkType.CALL)
{
if (tempExpr.value.ret)
tempExpr.value = getKnownTypeInfo(tempExpr.value.ret);
else
return;
}
else
{
return;
}
evalPropChainStep(step+1, tempExpr, evalChain, out, context);
}
else
{
var funcCommand = null, link, type;
while (step !== evalChain.length)
{
link = evalChain[step];
type = link.type;
if (type === LinkType.PROPERTY)
{
tempExpr.thisCommand = tempExpr.command;
tempExpr.command += "." + link.name;
}
else if (type === LinkType.SCOPED_VARS)
{
tempExpr.thisCommand = "window";
tempExpr.command += ".%" + link.name;
}
else if (type === LinkType.INDEX)
{
tempExpr.thisCommand = "window";
tempExpr.command += "[" + link.cont + "]";
}
else if (type === LinkType.SAFECALL)
{
tempExpr.thisCommand = "window";
tempExpr.command += "(" + link.origCont + ")";
}
else if (type === LinkType.CALL)
{
if (link.name === "")
{
// We cannot know about functions without name; try the
// heuristic directly.
link.type = LinkType.RETVAL_HEURISTIC;
evalPropChainStep(step, tempExpr, evalChain, out, context);
return;
}
funcCommand = getTypeExtractionExpression(tempExpr.thisCommand);
break;
}
else if (type === LinkType.RETVAL_HEURISTIC)
{
if (link.origCont !== null &&
(link.name.substr(0, 3) === "get" ||
(link.name.charAt(0) === "$" && link.cont.indexOf(",") === -1)))
{
// Names beginning with get or $ are almost always getters, so
// assume it is a safecall and start over.
link.type = LinkType.SAFECALL;
evalPropChainStep(step, tempExpr, evalChain, out, context);
return;
}
funcCommand = "Function.prototype.toString.call(" + tempExpr.command + ")";
break;
}
++step;
}
var func = (funcCommand !== null), command = (func ? funcCommand : tempExpr.command);
Firebug.CommandLine.evaluate(command, context, context.thisValue, null,
function found(result, context)
{
if (func)
{
if (type === LinkType.CALL)
{
if (typeof result !== "string")
return;
var t = getKnownType(result);
if (t && t.hasOwnProperty(link.name))
{
var propVal = getKnownTypeInfo(t[link.name]);
// Make sure the property is a callable function
if (!propVal.ret)
return;
tempExpr.fake = true;
tempExpr.value = getKnownTypeInfo(propVal.ret);
evalPropChainStep(step+1, tempExpr, evalChain, out, context);
}
else
{
// Unknown 'this' type or function name, use
// heuristics on the function instead.
link.type = LinkType.RETVAL_HEURISTIC;
evalPropChainStep(step, tempExpr, evalChain, out, context);
}
}
else if (type === LinkType.RETVAL_HEURISTIC)
{
if (typeof result !== "string")
return;
// Perform some crude heuristics for figuring out the
// return value of a function based on its contents.
// It's certainly not perfect, and it's easily fooled
// into giving wrong results, but it might work in
// some common cases.
// Check for chaining functions. This is done before
// checking for nested functions, because completing
// results of member functions containing nested
// functions that use 'return this' seems uncommon,
// and being wrong is not a huge problem.
if (result.indexOf("return this;") !== -1)
{
tempExpr.command = tempExpr.thisCommand;
tempExpr.thisCommand = "window";
evalPropChainStep(step+1, tempExpr, evalChain, out, context);
return;
}
// Don't support nested functions.
if (result.lastIndexOf("function") !== 0)
return;
// Check for arrays.
if (result.indexOf("return [") !== -1)
{
tempExpr.fake = true;
tempExpr.value = getKnownTypeInfo("Array");
evalPropChainStep(step+1, tempExpr, evalChain, out, context);
return;
}
// Check for 'return new Type(...);', and use the
// prototype as a pseudo-object for those (since it
// is probably not a known type that we can fake).
var newPos = result.indexOf("return new ");
if (newPos !== -1)
{
var rest = result.substr(newPos + 11),
epos = rest.search(/[^a-zA-Z0-9_$.]/);
if (epos !== -1)
{
rest = rest.substring(0, epos);
tempExpr.command = rest + ".prototype";
evalPropChainStep(step+1, tempExpr, evalChain, out, context);
return;
}
}
}
}
else
{
propChainBuildComplete(out, context, tempExpr, result);
}
},
function failed(result, context) { }
);
}
}
function evalPropChain(out, preExpr, origExpr, context)
{
var evalChain = [], linkStart = 0, len = preExpr.length, lastProp = "";
var tempExpr = {"fake": false, "command": "window", "thisCommand": "window"};
while (linkStart !== len)
{
var ch = preExpr.charAt(linkStart);
if (linkStart === 0)
{
if (preExpr.substr(0, 4) === "new ")
{
var parInd = preExpr.indexOf("(");
tempExpr.command = preExpr.substring(4, parInd) + ".prototype";
linkStart = matchingBracket(preExpr, parInd) + 1;
}
else if (ch === "[")
{
tempExpr.fake = true;
tempExpr.value = getKnownTypeInfo("Array");
linkStart = matchingBracket(preExpr, linkStart) + 1;
}
else if (ch === '"')
{
var isRegex = (origExpr.charAt(0) === "/");
tempExpr.fake = true;
tempExpr.value = getKnownTypeInfo(isRegex ? "RegExp" : "String");
linkStart = preExpr.indexOf('"', 1) + 1;
}
else if (!isNaN(ch))
{
// The expression is really a decimal number.
return false;
}
else if (reJSChar.test(ch))
{
// The expression begins with a regular property name
var nextLink = eatProp(preExpr, linkStart);
lastProp = preExpr.substring(linkStart, nextLink);
linkStart = nextLink;
tempExpr.command = lastProp;
}
// Syntax error (like '.') or a too complicated expression.
if (linkStart === 0)
return false;
}
else
{
if (ch === ".")
{
// Property access
var scope = (preExpr.charAt(linkStart+1) === "%");
linkStart += (scope ? 2 : 1);
var nextLink = eatProp(preExpr, linkStart);
lastProp = preExpr.substring(linkStart, nextLink);
linkStart = nextLink;
evalChain.push({
"type": (scope ? LinkType.SCOPED_VARS : LinkType.PROPERTY),
"name": lastProp
});
}
else if (ch === "(")
{
// Function call. Save the function name and the arguments if
// they are safe to evaluate.
var endCont = matchingBracket(preExpr, linkStart);
var cont = preExpr.substring(linkStart+1, endCont), origCont = null;
if (reLiteralExpr.test(cont))
origCont = origExpr.substring(linkStart+1, endCont);
linkStart = endCont + 1;
evalChain.push({
"type": LinkType.CALL,
"name": lastProp,
"origCont": origCont,
"cont": cont
});
lastProp = "";
}
else if (ch === "[")
{
// Index. Use the supplied index if it is a literal; otherwise
// it is probably a loop index with a variable not yet defined
// (like 'for(var i = 0; i < ar.length; ++i) ar[i].prop'), and
// '0' seems like a reasonably good guess at a valid index.
var endInd = matchingBracket(preExpr, linkStart);
var ind = preExpr.substring(linkStart+1, endInd);
if (reLiteralExpr.test(ind))
ind = origExpr.substring(linkStart+1, endInd);
else
ind = "0";
linkStart = endInd+1;
evalChain.push({"type": LinkType.INDEX, "cont": ind});
lastProp = "";
}
else
{
// Syntax error
return false;
}
}
}
evalPropChainStep(0, tempExpr, evalChain, out, context);
return true;
}
function autoCompleteEval(base, context, preExpr, spreExpr, includeCurrentScope)
{
var out = {};
out.complete = [];
out.hasScope = false;
try
{
if (spreExpr)
{
// Complete member variables of some .-chained expression
// In case of array indexing, remove the bracket and set a flag to
// escape completions.
out.indexCompletion = false;
out.scopeCompletion = false;
var len = spreExpr.length;
if (len >= 2 && spreExpr[len-2] === "[" && spreExpr[len-1] === '"')
{
out.indexCompletion = true;
out.indexQuoteType = preExpr[len-1];
len -= 2;
}
else if (spreExpr.slice(-2) === ".%")
{
out.scopeCompletion = true;
len -= 2;
}
else
{
len -= 1;
}
spreExpr = spreExpr.substr(0, len);
preExpr = preExpr.substr(0, len);
if (FBTrace.DBG_COMMANDLINE)
FBTrace.sysout("commandLine.autoCompleteEval pre:'" + preExpr +
"' spre:'" + spreExpr + "'.");
// Don't auto-complete '.'.
if (spreExpr === "")
return;
evalPropChain(out, spreExpr, preExpr, context);
}
else
{
// Complete variables from the local scope
var contentView = Wrapper.getContentView(context.window);
if (context.stopped && includeCurrentScope)
{
out.complete = Firebug.Debugger.getCurrentFrameKeys(context);
}
else if (contentView && contentView.Window &&
contentView.constructor.toString() === contentView.Window.toString())
// Cross window type pseudo-comparison
{
out.complete = Arr.keys(contentView); // return is safe
// Add some known window properties
out.complete = out.complete.concat(getFakeCompleteKeys("Window"));
}
else // hopefully sandbox in Chromebug
{
out.complete = Arr.keys(context.global);
}
// Sort the completions, and avoid duplicates.
out.complete = sortUnique(out.complete);
}
}
catch (exc)
{
if (FBTrace.DBG_ERRORS && FBTrace.DBG_COMMANDLINE)
FBTrace.sysout("commandLine.autoCompleteEval FAILED", exc);
}
base.candidates = out.complete;
base.hasScope = out.hasScope;
}
var reValidJSToken = /^[A-Za-z_$][A-Za-z_$0-9]*$/;
function isValidProperty(value)
{
// Use only string props
if (typeof(value) != "string")
return false;
// Use only those props that don't contain unsafe charactes and so need
// quotation (e.g. object["my prop"] notice the space character).
// Following expression checks that the name starts with a letter or $_,
// and there are only letters, numbers or $_ character in the string (no spaces).
return reValidJSToken.test(value);
}
const rePositiveNumber = /^[1-9][0-9]*$/;
function nonNumericKeys(map) // keys will be on user-level window objects
{
var keys = [];
try
{
for (var name in map) // enumeration is safe
{
if (! (name === "0" || rePositiveNumber.test(name)) )
keys.push(name);
}
}
catch (exc)
{
// Sometimes we get exceptions trying to iterate properties
}
return keys; // return is safe
}
function setCursorToEOL(input)
{
// textbox version, https://developer.mozilla.org/en/XUL/Property/inputField
// input.inputField.setSelectionRange(len, len);
input.setSelectionRange(input.value.length, input.value.length);
}
// ********************************************************************************************* //
// Registration
return Firebug.JSAutoCompleter;
// ********************************************************************************************* //
});
| Depend less on autoCompleter internals.
| content/autoCompleter.js | Depend less on autoCompleter internals. | <ide><path>ontent/autoCompleter.js
<ide> // ********************************************************************************************* //
<ide> // JavaScript auto-completion
<ide>
<add>var OldJSAutoCompleter = Firebug.JSAutoCompleter;
<add>
<ide> Firebug.JSAutoCompleter = function(textBox, completionBox, options)
<ide> {
<del> this.textBox = textBox;
<del> this.completionBox = completionBox;
<del> this.options = options;
<del> this.showCompletionPopup = options.completionPopup;
<del>
<del> this.completionBase = {
<del> pre: null,
<del> expr: null,
<del> candidates: []
<del> };
<del> this.completions = null;
<del>
<del> this.revertValue = null;
<del>
<del> this.completionPopup = Firebug.chrome.$("fbCommandLineCompletionList");
<del> this.selectedPopupElement = null;
<del>
<del> /**
<del> * If a completion was just performed, revert it. Otherwise do nothing.
<del> * Returns true iff the completion was reverted.
<del> */
<del> this.revert = function(context)
<del> {
<del> if (this.revertValue === null)
<del> return false;
<del>
<del> this.textBox.value = this.revertValue;
<del> var len = this.textBox.value.length;
<del> setCursorToEOL(this.textBox);
<del>
<del> this.complete(context);
<del> return true;
<del> };
<del>
<del> /**
<del> * Hide completions temporarily, so they show up again on the next key press.
<del> */
<del> this.hide = function()
<del> {
<del> this.completionBase = {
<del> pre: null,
<del> expr: null,
<del> candidates: []
<del> };
<del> this.completions = null;
<del>
<del> this.showCompletions();
<del> };
<del>
<del> /**
<del> * Hide completions for this expression (/completion base). Appending further
<del> * characters to the variable name will not make completions appear, but
<del> * adding, say, a semicolon and typing something else will.
<del> */
<del> this.hideForExpression = function()
<del> {
<del> this.completionBase.candidates = [];
<del> this.completions = null;
<del>
<del> this.showCompletions();
<del> };
<del>
<del> /**
<del> * Check whether it would be acceptable for the return key to evaluate the
<del> * expression instead of completing things.
<del> */
<del> this.acceptReturn = function()
<del> {
<del> if (!this.completions)
<del> return true;
<del>
<del> if (this.getCompletionBoxValue() === this.textBox.value)
<del> {
<del> // The user wouldn't see a difference if we completed. This can
<del> // happen for example if you type 'alert' and press enter,
<del> // regardless of whether or not there exist other completions.
<del> return true;
<del> }
<del>
<del> return false;
<del> };
<del>
<del> /**
<del> * Show completions for the current contents of the text box. Either this or
<del> * hide() must be called when the contents change.
<del> */
<del> this.complete = function(context)
<del> {
<del> this.revertValue = null;
<del> this.createCandidates(context);
<del> this.showCompletions();
<del> };
<del>
<del> /**
<del> * Update the completion base and create completion candidates for the
<del> * current value of the text box.
<del> */
<add> OldJSAutoCompleter.apply(this, arguments);
<add>
<add> this.shouldIncludeHint = function()
<add> {
<add> return (this.completions &&
<add> this.completionBase.hasScope &&
<add> !this.completions.prefix &&
<add> !/\[['"]|\.%/.test(this.completionBase.expr.slice(-2)));
<add> }
<add>
<add> /* Modified to use the right parsing/evaluation functions, and pass scope
<add> * data to them. */
<ide> this.createCandidates = function(context)
<ide> {
<ide> var offset = this.textBox.selectionStart;
<ide> this.createCompletions(prop);
<ide> };
<ide>
<del> /**
<del> * From a valid completion base, create a list of completions (containing
<del> * those completion candidates that share a prefix with the user's input)
<del> * and a default completion.
<del> */
<del> this.createCompletions = function(prefix)
<del> {
<del> var candidates = this.completionBase.candidates;
<del> var valid = [];
<del>
<del> if (!this.completionBase.expr && !prefix)
<del> {
<del> // Don't complete "".
<del> }
<del> else
<del> {
<del> for (var i = 0; i < candidates.length; ++i)
<del> {
<del> var name = candidates[i];
<del> if (Str.hasPrefix(name, prefix))
<del> valid.push(name);
<del> }
<del> }
<del>
<del> if (valid.length > 0)
<del> {
<del> this.completions = {
<del> list: valid,
<del> prefix: prefix
<del> };
<del> this.pickDefaultCandidate();
<del> }
<del> else
<del> {
<del> this.completions = null;
<add> /* Hacked to include the .% hint in the count */
<add> var oldShowCompletions = this.showCompletions;
<add> this.showCompletions = function()
<add> {
<add> if (this.completions && this.shouldIncludeHint())
<add> {
<add> // Add a sentinel (removed further down, and in popupCandidates) to
<add> // make the count right in the real showCompletions, without having
<add> // to duplicate logic.
<add> this.completions.list.push(undefined);
<add> this.completions.hasHintElement = true;
<add> }
<add>
<add> oldShowCompletions.apply(this, arguments);
<add>
<add> if (this.completions && this.completions.hasHintElement)
<add> {
<add> this.completions.list.pop();
<add> delete this.completions.hasHintElement;
<ide> }
<ide> };
<ide>
<del> /**
<del> * Chose a default candidate from the list of completions. This is currently
<del> * selected as the shortest completion, to make completions disappear when
<del> * typing a variable name that is also the prefix of another.
<del> */
<del> this.pickDefaultCandidate = function()
<del> {
<del> var pick = 0;
<del> var ar = this.completions.list;
<del> for (var i = 1; i < ar.length; i++)
<del> {
<del> if (ar[i].length < ar[pick].length)
<del> pick = i;
<del> }
<del> this.completions.index = pick;
<del> };
<del>
<del> /**
<del> * Go backward or forward one step in the list of completions.
<del> * dir is the relative movement in the list; -1 means backward and 1 forward.
<del> */
<del> this.cycle = function(dir)
<del> {
<del> this.completions.index += dir;
<del> if (this.completions.index >= this.completions.list.length)
<del> this.completions.index = 0;
<del> else if (this.completions.index < 0)
<del> this.completions.index = this.completions.list.length - 1;
<del> this.showCompletions();
<del> };
<del>
<del> /**
<del> * Get the property name that is currently selected as a completion (or
<del> * null if there is none).
<del> */
<del> this.getCurrentCompletion = function()
<del> {
<del> return (this.completions ? this.completions.list[this.completions.index] : null);
<del> };
<del>
<del> /**
<del> * See if we have any completions.
<del> */
<del> this.hasCompletions = function()
<del> {
<del> return !!this.completions;
<del> };
<del>
<del> /**
<del> * Get the value the completion box should have for some value of the
<del> * text box and a selected completion.
<del> */
<del> this.getCompletionBoxValue = function()
<del> {
<del> var completion = this.getCurrentCompletion();
<del> if (completion === null)
<del> return "";
<del> return this.completionBase.pre + this.completionBase.expr + completion;
<del> };
<del>
<del> /**
<del> * Update the completion box and popup to be consistent with the current
<del> * state of the auto-completer.
<del> */
<del> this.showCompletions = function()
<del> {
<del> this.completionBox.value = this.getCompletionBoxValue();
<del>
<del> var nc = (this.completions ?
<del> (this.completions.list.length + (this.completionBase.hasScope ? 1 : 0)) :
<del> 0);
<del> if (this.showCompletionPopup && nc > 1)
<del> this.popupCandidates();
<del> else
<del> this.closePopup();
<del> };
<del>
<del> /**
<del> * Handle a keypress event. Returns true if the auto-completer used up
<del> * the event and does not want it to propagate further.
<del> */
<del> this.handleKeyPress = function(event, context)
<del> {
<del> var clearedTabWarning = this.clearTabWarning();
<del>
<del> if (Events.isAlt(event))
<del> return false;
<del>
<del> if (event.keyCode === KeyEvent.DOM_VK_TAB &&
<del> !Events.isControl(event) && this.textBox.value !== "")
<del> {
<del> if (this.completions)
<del> {
<del> this.acceptCompletion();
<del> Events.cancelEvent(event);
<del> return true;
<del> }
<del> else if (this.options.tabWarnings)
<del> {
<del> if (clearedTabWarning)
<del> {
<del> // Send tab along if the user was warned.
<del> return false;
<del> }
<del>
<del> this.setTabWarning();
<del> Events.cancelEvent(event);
<del> return true;
<del> }
<del> }
<del> else if (event.keyCode === KeyEvent.DOM_VK_RETURN && !this.acceptReturn())
<del> {
<del> // Completion on return, when one is user-visible.
<del> this.acceptCompletion();
<del> Events.cancelEvent(event);
<del> return true;
<del> }
<del> else if (event.keyCode === KeyEvent.DOM_VK_RIGHT && this.completions &&
<del> this.textBox.selectionStart === this.textBox.value.length)
<del> {
<del> // Complete on right arrow at end of line.
<del> this.acceptCompletion();
<del> Events.cancelEvent(event);
<del> return true;
<del> }
<del> else if (event.keyCode === KeyEvent.DOM_VK_ESCAPE)
<del> {
<del> if (this.completions)
<del> {
<del> this.hideForExpression();
<del> Events.cancelEvent(event);
<del> return true;
<del> }
<del> else
<del> {
<del> // There are no visible completions, but we might still be able to
<del> // revert a recently performed completion.
<del> if (this.revert(context))
<del> {
<del> Events.cancelEvent(event);
<del> return true;
<del> }
<del> }
<del> }
<del> else if (event.keyCode === KeyEvent.DOM_VK_UP || event.keyCode === KeyEvent.DOM_VK_DOWN)
<del> {
<del> if (this.completions)
<del> {
<del> this.cycle((event.keyCode === KeyEvent.DOM_VK_UP ? -1 : 1));
<del> Events.cancelEvent(event);
<del> return true;
<del> }
<del> }
<del> return false;
<del> };
<del>
<del> /**
<del> * Handle a keydown event.
<del> */
<del> this.handleKeyDown = function(event, context)
<del> {
<del> if (event.keyCode === KeyEvent.DOM_VK_ESCAPE && this.completions)
<del> {
<del> // Close the completion popup on escape in keydown, so that the popup
<del> // does not close itself and prevent event propagation on keypress.
<del> this.closePopup();
<del> }
<del> };
<del>
<del> this.clearTabWarning = function()
<del> {
<del> if (this.tabWarning)
<del> {
<del> this.completionBox.value = "";
<del> delete this.tabWarning;
<del> return true;
<del> }
<del> return false;
<del> };
<del>
<del> this.setTabWarning = function()
<del> {
<del> this.completionBox.value = this.textBox.value + " " +
<del> Locale.$STR("firebug.completion.empty");
<del>
<del> this.tabWarning = true;
<del> };
<del>
<del> /**
<del> * Accept the currently shown completion in the text box.
<del> */
<del> this.acceptCompletion = function()
<del> {
<del> var completion = this.getCurrentCompletion();
<del> completion = adjustCompletionOnAccept(this.completionBase.pre,
<del> this.completionBase.expr, completion);
<del>
<del> var originalValue = this.textBox.value;
<del> this.textBox.value = completion;
<del> setCursorToEOL(this.textBox);
<del>
<del> this.hide();
<del> this.revertValue = originalValue;
<del> };
<del>
<add> /* Edited to include the .% hint. */
<ide> this.popupCandidates = function()
<ide> {
<add> if (this.completions.hasHintElement)
<add> {
<add> this.completions.list.pop();
<add> delete this.completions.hasHintElement;
<add> }
<add>
<ide> var commandCompletionLineLimit = 40;
<ide>
<ide> Dom.eraseNode(this.completionPopup);
<ide> var escPrefix = Str.escapeForTextNode(this.textBox.value);
<ide>
<ide> var listSize = this.completions.list.length;
<del> if (this.completions.prefix == '' &&
<del> !/\[['"]/.test(this.completionBase.expr.slice(-2)) &&
<del> this.completionBase.expr.slice(-2) !== ".%" &&
<del> this.completionBase.hasScope)
<del> {
<add> if (this.shouldIncludeHint())
<ide> ++listSize;
<del> }
<ide>
<ide> var showTop = 0;
<ide> var showBottom = listSize;
<ide>
<ide> this.completionPopup.openPopup(this.textBox, "before_start", 0, 0, false, false);
<ide> };
<del>
<del> this.closePopup = function()
<del> {
<del> if (this.completionPopup.state == "closed")
<del> return;
<del>
<del> try
<del> {
<del> this.completionPopup.hidePopup();
<del> }
<del> catch (err)
<del> {
<del> if (FBTrace.DBG_ERRORS)
<del> FBTrace.sysout("Firebug.JSAutoCompleter.closePopup; EXCEPTION " + err, err);
<del> }
<del> };
<del>
<del> this.getCompletionPopupElementFromEvent = function(event)
<del> {
<del> var selected = event.target;
<del> while (selected && selected.localName !== "div")
<del> selected = selected.parentNode;
<del>
<del> return (selected && typeof selected.completionIndex !== "undefined" ? selected : null);
<del> };
<del>
<del> this.popupMousedown = function(event)
<del> {
<del> var el = this.getCompletionPopupElementFromEvent(event);
<del> if (!el)
<del> return;
<del>
<del> if (this.selectedPopupElement)
<del> this.selectedPopupElement.removeAttribute("selected");
<del>
<del> this.selectedPopupElement = el;
<del> this.selectedPopupElement.setAttribute("selected", "true");
<del> this.completions.index = el.completionIndex;
<del> this.completionBox.value = this.getCompletionBoxValue();
<del> };
<del>
<del> this.popupClick = function(event)
<del> {
<del> var el = this.getCompletionPopupElementFromEvent(event);
<del> if (!el)
<del> return;
<del>
<del> this.completions.index = el.completionIndex;
<del> this.acceptCompletion();
<del> };
<del>
<del> this.popupMousedown = Obj.bind(this.popupMousedown, this);
<del> this.popupClick = Obj.bind(this.popupClick, this);
<del>
<del> /**
<del> * A destructor function, to be called when the auto-completer is destroyed.
<del> */
<del> this.shutdown = function()
<del> {
<del> this.completionBox.value = "";
<del>
<del> Events.removeEventListener(this.completionPopup, "mousedown", this.popupMousedown, true);
<del> Events.removeEventListener(this.completionPopup, "click", this.popupClick, true);
<del> };
<del>
<del> Events.addEventListener(this.completionPopup, "mousedown", this.popupMousedown, true);
<del> Events.addEventListener(this.completionPopup, "click", this.popupClick, true);
<ide> };
<ide>
<ide> /**
<ide> }
<ide> }
<ide> return false;
<del>}
<del>
<del>function adjustCompletionOnAccept(preParsed, preExpr, property)
<del>{
<del> var res = preParsed + preExpr + property;
<del>
<del> // Don't adjust index completions.
<del> if (/^\[['"]$/.test(preExpr.slice(-2)))
<del> return res;
<del>
<del> // Nor completions of scoped variables.
<del> if (preExpr.slice(-2) === ".%")
<del> return res;
<del>
<del> if (!isValidProperty(property))
<del> {
<del> // The property name is actually invalid in free form, so replace
<del> // it with array syntax.
<del>
<del> if (preExpr)
<del> {
<del> res = preParsed + preExpr.slice(0, -1);
<del> }
<del> else
<del> {
<del> // Global variable access - assume the variable is a member of 'window'.
<del> res = preParsed + "window";
<del> }
<del> res += '["' + Str.escapeJS(property) + '"]';
<del> }
<del> return res;
<ide> }
<ide>
<ide> // Types the autocompletion knows about, some of their non-enumerable properties,
<ide> return keys; // return is safe
<ide> }
<ide>
<del>function setCursorToEOL(input)
<del>{
<del> // textbox version, https://developer.mozilla.org/en/XUL/Property/inputField
<del> // input.inputField.setSelectionRange(len, len);
<del> input.setSelectionRange(input.value.length, input.value.length);
<del>}
<del>
<ide> // ********************************************************************************************* //
<ide> // Registration
<ide> |
|
Java | apache-2.0 | e2196f7bd683b03ad968f8d09c21a15e07963736 | 0 | phac-nml/irida,phac-nml/irida,phac-nml/irida,phac-nml/irida,phac-nml/irida,phac-nml/irida,phac-nml/irida,phac-nml/irida | package ca.corefacility.bioinformatics.irida.model.workflow.submission;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkNotNull;
import java.util.Date;
import java.util.Map;
import java.util.Objects;
import java.util.Optional;
import java.util.Set;
import java.util.UUID;
import javax.persistence.CascadeType;
import javax.persistence.CollectionTable;
import javax.persistence.Column;
import javax.persistence.ElementCollection;
import javax.persistence.Entity;
import javax.persistence.EntityListeners;
import javax.persistence.EnumType;
import javax.persistence.Enumerated;
import javax.persistence.FetchType;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.Inheritance;
import javax.persistence.InheritanceType;
import javax.persistence.JoinColumn;
import javax.persistence.JoinTable;
import javax.persistence.ManyToMany;
import javax.persistence.ManyToOne;
import javax.persistence.MapKeyColumn;
import javax.persistence.OneToOne;
import javax.persistence.Table;
import javax.persistence.Temporal;
import javax.persistence.TemporalType;
import javax.persistence.UniqueConstraint;
import javax.validation.constraints.NotNull;
import javax.validation.constraints.Size;
import org.hibernate.annotations.Type;
import org.hibernate.envers.Audited;
import org.hibernate.envers.NotAudited;
import org.springframework.data.annotation.CreatedDate;
import org.springframework.data.annotation.LastModifiedDate;
import org.springframework.data.jpa.domain.support.AuditingEntityListener;
import ca.corefacility.bioinformatics.irida.exceptions.AnalysisAlreadySetException;
import ca.corefacility.bioinformatics.irida.model.IridaResourceSupport;
import ca.corefacility.bioinformatics.irida.model.IridaThing;
import ca.corefacility.bioinformatics.irida.model.enums.AnalysisCleanedState;
import ca.corefacility.bioinformatics.irida.model.enums.AnalysisState;
import ca.corefacility.bioinformatics.irida.model.project.ReferenceFile;
import ca.corefacility.bioinformatics.irida.model.sequenceFile.RemoteSequenceFile;
import ca.corefacility.bioinformatics.irida.model.sequenceFile.RemoteSequenceFilePair;
import ca.corefacility.bioinformatics.irida.model.sequenceFile.SequenceFile;
import ca.corefacility.bioinformatics.irida.model.sequenceFile.SequenceFilePair;
import ca.corefacility.bioinformatics.irida.model.user.User;
import ca.corefacility.bioinformatics.irida.model.workflow.analysis.Analysis;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
/**
* Defines a submission to an AnalysisService for executing a remote workflow.
*/
@Entity
@Table(name = "analysis_submission")
@Inheritance(strategy = InheritanceType.JOINED)
@Audited
@EntityListeners(AuditingEntityListener.class)
public class AnalysisSubmission extends IridaResourceSupport implements IridaThing, Comparable<AnalysisSubmission> {
@Id
@GeneratedValue(strategy = GenerationType.AUTO)
@Column(name = "id")
private Long id;
@NotNull
@Size(min = 3)
@Column(name = "name")
private String name;
@ManyToOne(fetch = FetchType.EAGER, cascade = CascadeType.DETACH, optional = false)
@JoinColumn(name = "submitter", nullable = false)
private User submitter;
/**
* Defines the id of an installed workflow in IRIDA for performing this
* analysis.
*/
@NotNull
@Column(name = "workflow_id")
@Type(type = "uuid-char")
private UUID workflowId;
/**
* Defines the remote id for the location where an analysis was run. With
* Galaxy this represents the History id.
*/
@Column(name = "remote_analysis_id")
private String remoteAnalysisId;
/**
* Defines the remote id for a location where input data can be uploaded to
* for an analysis.
*/
@Column(name = "remote_input_data_id")
private String remoteInputDataId;
/**
* Defines the remote id of the workflow being executed. With Galaxy this
* represents the Workflow id.
*/
@Column(name = "remote_workflow_id")
private String remoteWorkflowId;
@ManyToMany(fetch = FetchType.EAGER, cascade = CascadeType.DETACH)
@JoinTable(name = "analysis_submission_sequence_file_single", joinColumns = @JoinColumn(name = "analysis_submission_id", nullable = false), inverseJoinColumns = @JoinColumn(name = "sequence_file_id", nullable = false))
private Set<SequenceFile> inputFilesSingle;
@ManyToMany(fetch = FetchType.EAGER, cascade = CascadeType.DETACH)
@JoinTable(name = "analysis_submission_sequence_file_pair", joinColumns = @JoinColumn(name = "analysis_submission_id", nullable = false), inverseJoinColumns = @JoinColumn(name = "sequence_file_pair_id", nullable = false))
private Set<SequenceFilePair> inputFilesPaired;
@ManyToMany(fetch = FetchType.EAGER, cascade = CascadeType.DETACH)
@JoinTable(name = "analysis_submission_remote_file_single", joinColumns = @JoinColumn(name = "analysis_submission_id", nullable = false), inverseJoinColumns = @JoinColumn(name = "remote_file_id", nullable = false))
private Set<RemoteSequenceFile> remoteFilesSingle;
@ManyToMany(fetch = FetchType.EAGER, cascade = CascadeType.DETACH)
@JoinTable(name = "analysis_submission_remote_file_pair", joinColumns = @JoinColumn(name = "analysis_submission_id", nullable = false), inverseJoinColumns = @JoinColumn(name = "remote_file_pair_id", nullable = false))
private Set<RemoteSequenceFilePair> remoteFilesPaired;
@ElementCollection(fetch = FetchType.EAGER)
@MapKeyColumn(name = "name", nullable = false)
@Column(name = "value", nullable = false)
@CollectionTable(name = "analysis_submission_parameters", joinColumns = @JoinColumn(name = "id"), uniqueConstraints = @UniqueConstraint(columnNames = {
"id", "name" }, name = "UK_ANALYSIS_SUBMISSION_PARAMETER_NAME"))
private Map<String, String> inputParameters;
@CreatedDate
@NotNull
@Temporal(TemporalType.TIMESTAMP)
@Column(name = "created_date", nullable = false)
private final Date createdDate;
@LastModifiedDate
@Temporal(TemporalType.TIMESTAMP)
@Column(name = "modified_date")
private Date modifiedDate;
@NotNull
@Enumerated(EnumType.STRING)
@Column(name = "analysis_state")
private AnalysisState analysisState;
@NotNull
@Enumerated(EnumType.STRING)
@Column(name = "analysis_cleaned_state")
private AnalysisCleanedState analysisCleanedState;
// Analysis entity for this analysis submission. Cascading everything except
// removals
@OneToOne(fetch = FetchType.EAGER, cascade = { CascadeType.DETACH, CascadeType.MERGE, CascadeType.PERSIST,
CascadeType.REFRESH })
@JoinColumn(name = "analysis_id")
@NotAudited
private Analysis analysis;
@ManyToOne(fetch = FetchType.EAGER, cascade = CascadeType.DETACH)
@JoinColumn(name = "reference_file_id")
private ReferenceFile referenceFile;
@NotAudited
@ManyToOne(fetch = FetchType.EAGER, cascade = CascadeType.DETACH)
@JoinColumn(name = "named_parameters_id")
private IridaWorkflowNamedParameters namedParameters;
protected AnalysisSubmission() {
this.createdDate = new Date();
this.analysisState = AnalysisState.NEW;
this.analysisCleanedState = AnalysisCleanedState.NOT_CLEANED;
}
/**
* Builds a new {@link AnalysisSubmission} with the given {@link Builder}.
*
* @param builder
* The {@link Builder} to build the {@link AnalysisSubmission}.
*/
public AnalysisSubmission(Builder builder) {
this();
checkNotNull(builder.workflowId, "workflowId is null");
checkArgument(builder.inputFilesSingle != null || builder.inputFilesPaired != null,
"both inputFilesSingle and inputFilesPaired are null. You must supply at least one set of input files");
this.name = (builder.name != null) ? builder.name : "Unknown";
this.inputFilesSingle = (builder.inputFilesSingle != null) ? builder.inputFilesSingle : Sets.newHashSet();
this.inputFilesPaired = (builder.inputFilesPaired != null) ? builder.inputFilesPaired : Sets.newHashSet();
this.inputParameters = (builder.inputParameters != null) ? ImmutableMap.copyOf(builder.inputParameters)
: ImmutableMap.of();
this.referenceFile = builder.referenceFile;
this.workflowId = builder.workflowId;
this.namedParameters = builder.namedParameters;
this.remoteFilesSingle = builder.remoteFilesSingle;
this.remoteFilesPaired = builder.remoteFilesPaired;
}
/**
* Sets the reference file.
*
* @param referenceFile
* The reference file.
*/
public void setReferenceFile(ReferenceFile referenceFile) {
this.referenceFile = referenceFile;
}
/**
* Gets the ReferenceFile.
*
* @return The ReferenceFile.
*/
@JsonIgnore
public Optional<ReferenceFile> getReferenceFile() {
return (referenceFile != null) ? Optional.of(referenceFile) : Optional.empty();
}
/**
* Gets an analysis id for this workflow
*
* @return An analysis id for this workflow.
*/
@JsonIgnore
public String getRemoteAnalysisId() {
return remoteAnalysisId;
}
/**
* Gets the set of single-end input sequence files.
*
* @return The set of single-end input sequence files.
*/
@JsonIgnore
public Set<SequenceFile> getSingleInputFiles() {
return inputFilesSingle;
}
/**
* Gets the set of paired-end input sequence files.
*
* @return The set of paired-end input sequence files.
*/
@JsonIgnore
public Set<SequenceFilePair> getPairedInputFiles() {
return inputFilesPaired;
}
/**
* Gets the id of a remote location to store input files.
*
* @return The id of a remote location to store input files.
*/
public String getRemoteInputDataId() {
return remoteInputDataId;
}
/**
* Sets the id of a remote location to store input files.
*
* @param remoteInputDataId
* The id of a remote location to store input files.
*/
public void setRemoteInputDataId(String remoteInputDataId) {
this.remoteInputDataId = remoteInputDataId;
}
/**
* Sets the remote analysis id.
*
* @param remoteAnalysisId
* The remote analysis id to set.
*/
public void setRemoteAnalysisId(String remoteAnalysisId) {
this.remoteAnalysisId = remoteAnalysisId;
}
/**
* Gets the remote workflow id.
*
* @return The remote workflow id.
*/
public String getRemoteWorkflowId() {
return remoteWorkflowId;
}
/**
* Sets the remote workflow id.
*
* @param remoteWorkflowId
* The remote workflow id.
*/
public void setRemoteWorkflowId(String remoteWorkflowId) {
this.remoteWorkflowId = remoteWorkflowId;
}
/**
* Gets the state of this analysis.
*
* @return The state of this analysis.
*/
public AnalysisState getAnalysisState() {
return analysisState;
}
/**
* Sets the state of this analysis.
*
* @param analysisState
* The state of this analysis.
*/
public void setAnalysisState(AnalysisState analysisState) {
this.analysisState = analysisState;
}
@Override
public Date getCreatedDate() {
return createdDate;
}
@Override
public Date getModifiedDate() {
return modifiedDate;
}
@Override
public void setModifiedDate(Date modifiedDate) {
this.modifiedDate = modifiedDate;
}
@Override
public String getLabel() {
return name;
}
@Override
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
/**
* @return the analysis
*/
@JsonIgnore
public Analysis getAnalysis() {
return analysis;
}
@JsonIgnore
public User getSubmitter() {
return submitter;
}
/**
* Sets the {@link User} who is submitting this analysis.
*
* @param submitter
* The {@link User} who is submitting this analysis.
*/
public void setSubmitter(User submitter) {
checkNotNull(submitter, "the submitter is null");
this.submitter = submitter;
}
/**
* Set the {@link Analysis} generated as a result of this submission. Note:
* {@link AnalysisSubmission#setAnalysis(Analysis)} can only be set
* **once**; if the current {@link Analysis} is non-null, then this method
* will throw a {@link AnalysisAlreadySetException}.
*
* @param analysis
* the analysis to set
* @throws AnalysisAlreadySetException
* if the {@link Analysis} reference has already been created
* for this submission.
*/
public void setAnalysis(Analysis analysis) throws AnalysisAlreadySetException {
if (this.analysis == null) {
this.analysis = analysis;
} else {
throw new AnalysisAlreadySetException("The analysis has already been set for this submission.");
}
}
@Override
public String toString() {
String userName = (submitter == null) ? "null" : submitter.getUsername();
return "AnalysisSubmission [id=" + id + ", name=" + name + ", submitter=" + userName + ", workflowId="
+ workflowId + ", analysisState=" + analysisState + ", analysisCleanedState=" + analysisCleanedState
+ "]";
}
/**
* @return The {@link AnalysisCleanedState}.
*/
public AnalysisCleanedState getAnalysisCleanedState() {
return analysisCleanedState;
}
/**
* Sets the {@link AnalysisCleanedState}.
*
* @param analysisCleanedState
* The {@link AnalysisCleanedState}.
*/
public void setAnalysisCleanedState(AnalysisCleanedState analysisCleanedState) {
this.analysisCleanedState = analysisCleanedState;
}
/**
* @return the name
*/
public String getName() {
return name;
}
/**
* @param name
* the name to set
*/
public void setName(String name) {
this.name = name;
}
/**
* Gets the id of the implementing workflow for this analysis.
*
* @return The id of the implementing workflow for this analysis.
*/
public UUID getWorkflowId() {
return workflowId;
}
/**
* Sets the id of the workflow for this analysis.
*
* @param workflowId
* The id of the workflow for this analysis.
*/
public void setWorkflowId(UUID workflowId) {
this.workflowId = workflowId;
}
/**
* Gets the input parameters for this submission.
*
* @return The input parameters for this submission.
*/
public Map<String, String> getInputParameters() {
if (this.namedParameters != null) {
return this.namedParameters.getInputParameters();
} else {
return inputParameters;
}
}
/**
* Get the named parameters object used to build this submission.
*
* @return The {@link IridaWorkflowNamedParameters} for this submission.
*/
@JsonIgnore
public final IridaWorkflowNamedParameters getNamedParameters() {
return namedParameters;
}
/**
* Used to build up an {@link AnalysisSubmission}.
*
*/
public static class Builder {
private String name;
private Set<SequenceFile> inputFilesSingle;
private Set<SequenceFilePair> inputFilesPaired;
private Set<RemoteSequenceFile> remoteFilesSingle;
private Set<RemoteSequenceFilePair> remoteFilesPaired;
private ReferenceFile referenceFile;
private UUID workflowId;
private Map<String, String> inputParameters;
private IridaWorkflowNamedParameters namedParameters;
/**
* Creates a new {@link Builder} with a workflow id.
*
* @param workflowId
* The workflow id for this submission.
*/
public Builder(UUID workflowId) {
checkNotNull(workflowId, "workflowId is null");
this.workflowId = workflowId;
this.inputParameters = Maps.newHashMap();
}
/**
* Sets a name for this submission.
*
* @param name
* A name for this submission.
* @return A {@link Builder}.
*/
public Builder name(String name) {
checkNotNull(name, "name is null");
this.name = name;
return this;
}
/**
* Sets the inputFilesSingle for this submission.
*
* @param inputFilesSingle
* The inputFilesSingle for this submission.
* @return A {@link Builder}.
*/
public Builder inputFilesSingle(Set<SequenceFile> inputFilesSingle) {
checkNotNull(inputFilesSingle, "inputFilesSingle is null");
checkArgument(!inputFilesSingle.isEmpty(), "inputFilesSingle is empty");
this.inputFilesSingle = inputFilesSingle;
return this;
}
/**
* Sets the inputFilesPaired for this submission.
*
* @param inputFilesPaired
* The inputFilesPaired for this submission.
* @return A {@link Builder}.
*/
public Builder inputFilesPaired(Set<SequenceFilePair> inputFilesPaired) {
checkNotNull(inputFilesPaired, "inputFilesPaired is null");
checkArgument(!inputFilesPaired.isEmpty(), "inputFilesPaired is empty");
this.inputFilesPaired = inputFilesPaired;
return this;
}
public Builder remoteFilesSingle(Set<RemoteSequenceFile> remoteFilesSingle){
checkNotNull(remoteFilesSingle, "remoteFilesSingle is null");
checkArgument(!remoteFilesSingle.isEmpty(), "remoteFilesSingle is empty");
this.remoteFilesSingle = remoteFilesSingle;
return this;
}
public Builder remoteFilesPaired(Set<RemoteSequenceFilePair> remoteFilesPaired){
checkNotNull(remoteFilesPaired, "remoteFilesPaired is null");
checkArgument(!remoteFilesPaired.isEmpty(), "remoteFilesPaired is empty");
this.remoteFilesPaired = remoteFilesPaired;
return this;
}
/**
* Sets the referenceFile for this submission.
*
* @param referenceFile
* The referenceFile for this submission.
* @return A {@link Builder}.
*/
public Builder referenceFile(ReferenceFile referenceFile) {
checkNotNull(referenceFile, "referenceFile is null");
this.referenceFile = referenceFile;
return this;
}
/**
* Sets the input parameters for this submission.
*
* @param inputParameters
* A map of parameters for this submission.
* @return A {@link Builder}.
*/
public Builder inputParameters(Map<String, String> inputParameters) {
checkNotNull(inputParameters, "inputParameters is null");
checkArgument(!inputParameters.isEmpty(), "inputParameters is empty");
if (namedParameters != null) {
throw new UnsupportedOperationException("You cannot change named parameters once set.");
}
this.inputParameters.clear();
this.inputParameters.putAll(inputParameters);
return this;
}
/**
* Adds an individual input parameter.
*
* @param name
* The name of the parameter.
* @param value
* The value of the parameter.
* @return A {@link Builder}.
*/
public Builder inputParameter(final String name, final String value) {
checkNotNull(name, "key is null");
checkNotNull(value, "value is null");
checkArgument(!inputParameters.containsKey(name), "key=" + name + " already exists as a parameter");
if (namedParameters != null) {
throw new UnsupportedOperationException("You cannot change named parameters once set.");
}
inputParameters.put(name, value);
return this;
}
/**
* Use the specified set of named parameters to run this workflow.
*
* @param parameters
* the named parameters to use.
* @return A {@link Builder}.
*/
public Builder withNamedParameters(final IridaWorkflowNamedParameters parameters) {
checkNotNull(parameters, "named parameters cannot be null.");
this.namedParameters = parameters;
return this;
}
public AnalysisSubmission build() {
checkArgument(inputFilesSingle != null || inputFilesPaired != null,
"both inputFilesSingle and inputFilesPaired are null. You must supply at least one set of input files");
return new AnalysisSubmission(this);
}
}
/**
* Gets a {@link Builder}.
*
* @param workflowId
* The id of the workflow to submit.
*
* @return A {@link Builder}.
*/
public static Builder builder(UUID workflowId) {
return new AnalysisSubmission.Builder(workflowId);
}
/**
* Whether or not a remoteAnalysisId exists for this submission.
*
* @return True if a remoteAnalysisId exists for this submission, false
* otherwise.
*/
public boolean hasRemoteAnalysisId() {
return remoteAnalysisId != null;
}
/**
* Whether or not a remoteWorkflowId exists for this submission.
*
* @return True if a remoteWorkflowId exists for this submission, false
* otherwise.
*/
public boolean hasRemoteWorkflowId() {
return remoteWorkflowId != null;
}
/**
* Whether or not a remoteInputDataId exists for this submission.
*
* @return True if a remoteInputDataId exists for this submission, false
* otherwise.
*/
public boolean hasRemoteInputDataId() {
return remoteInputDataId != null;
}
@Override
public int hashCode() {
return Objects.hash(name, workflowId, remoteAnalysisId, remoteInputDataId, remoteWorkflowId, inputFilesSingle,
inputFilesPaired, createdDate, modifiedDate, analysisState, analysisCleanedState, analysis,
referenceFile, namedParameters, submitter);
}
@Override
public boolean equals(Object other) {
if (other instanceof AnalysisSubmission) {
AnalysisSubmission p = (AnalysisSubmission) other;
return Objects.equals(createdDate, p.createdDate) && Objects.equals(modifiedDate, p.modifiedDate)
&& Objects.equals(name, p.name) && Objects.equals(workflowId, p.workflowId)
&& Objects.equals(remoteAnalysisId, p.remoteAnalysisId)
&& Objects.equals(remoteInputDataId, p.remoteInputDataId)
&& Objects.equals(remoteWorkflowId, p.remoteWorkflowId)
&& Objects.equals(inputFilesSingle, p.inputFilesSingle)
&& Objects.equals(inputFilesPaired, p.inputFilesPaired)
&& Objects.equals(analysisState, p.analysisState)
&& Objects.equals(analysisCleanedState, p.analysisCleanedState)
&& Objects.equals(referenceFile, p.referenceFile)
&& Objects.equals(namedParameters, p.namedParameters) && Objects.equals(submitter, p.submitter);
}
return false;
}
@Override
public int compareTo(AnalysisSubmission o) {
return modifiedDate.compareTo(o.modifiedDate);
}
}
| src/main/java/ca/corefacility/bioinformatics/irida/model/workflow/submission/AnalysisSubmission.java | package ca.corefacility.bioinformatics.irida.model.workflow.submission;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkNotNull;
import java.util.Date;
import java.util.Map;
import java.util.Objects;
import java.util.Optional;
import java.util.Set;
import java.util.UUID;
import javax.persistence.CascadeType;
import javax.persistence.CollectionTable;
import javax.persistence.Column;
import javax.persistence.ElementCollection;
import javax.persistence.Entity;
import javax.persistence.EntityListeners;
import javax.persistence.EnumType;
import javax.persistence.Enumerated;
import javax.persistence.FetchType;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.Inheritance;
import javax.persistence.InheritanceType;
import javax.persistence.JoinColumn;
import javax.persistence.JoinTable;
import javax.persistence.ManyToMany;
import javax.persistence.ManyToOne;
import javax.persistence.MapKeyColumn;
import javax.persistence.OneToOne;
import javax.persistence.Table;
import javax.persistence.Temporal;
import javax.persistence.TemporalType;
import javax.persistence.UniqueConstraint;
import javax.validation.constraints.NotNull;
import javax.validation.constraints.Size;
import org.hibernate.annotations.Type;
import org.hibernate.envers.Audited;
import org.hibernate.envers.NotAudited;
import org.springframework.data.annotation.CreatedDate;
import org.springframework.data.annotation.LastModifiedDate;
import org.springframework.data.jpa.domain.support.AuditingEntityListener;
import ca.corefacility.bioinformatics.irida.exceptions.AnalysisAlreadySetException;
import ca.corefacility.bioinformatics.irida.model.IridaResourceSupport;
import ca.corefacility.bioinformatics.irida.model.IridaThing;
import ca.corefacility.bioinformatics.irida.model.enums.AnalysisCleanedState;
import ca.corefacility.bioinformatics.irida.model.enums.AnalysisState;
import ca.corefacility.bioinformatics.irida.model.project.Project;
import ca.corefacility.bioinformatics.irida.model.project.ReferenceFile;
import ca.corefacility.bioinformatics.irida.model.sequenceFile.SequenceFile;
import ca.corefacility.bioinformatics.irida.model.sequenceFile.SequenceFilePair;
import ca.corefacility.bioinformatics.irida.model.user.User;
import ca.corefacility.bioinformatics.irida.model.workflow.analysis.Analysis;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
/**
* Defines a submission to an AnalysisService for executing a remote workflow.
*/
@Entity
@Table(name = "analysis_submission")
@Inheritance(strategy = InheritanceType.JOINED)
@Audited
@EntityListeners(AuditingEntityListener.class)
public class AnalysisSubmission extends IridaResourceSupport implements IridaThing, Comparable<AnalysisSubmission> {
@Id
@GeneratedValue(strategy = GenerationType.AUTO)
@Column(name = "id")
private Long id;
@NotNull
@Size(min = 3)
@Column(name = "name")
private String name;
@ManyToOne(fetch = FetchType.EAGER, cascade = CascadeType.DETACH, optional = false)
@JoinColumn(name = "submitter", nullable = false)
private User submitter;
/**
* Defines the id of an installed workflow in IRIDA for performing this
* analysis.
*/
@NotNull
@Column(name = "workflow_id")
@Type(type = "uuid-char")
private UUID workflowId;
/**
* Defines the remote id for the location where an analysis was run. With
* Galaxy this represents the History id.
*/
@Column(name = "remote_analysis_id")
private String remoteAnalysisId;
/**
* Defines the remote id for a location where input data can be uploaded to
* for an analysis.
*/
@Column(name = "remote_input_data_id")
private String remoteInputDataId;
/**
* Defines the remote id of the workflow being executed. With Galaxy this
* represents the Workflow id.
*/
@Column(name = "remote_workflow_id")
private String remoteWorkflowId;
@ManyToMany(fetch = FetchType.EAGER, cascade = CascadeType.DETACH)
@JoinTable(name = "analysis_submission_sequence_file_single", joinColumns = @JoinColumn(name = "analysis_submission_id", nullable = false), inverseJoinColumns = @JoinColumn(name = "sequence_file_id", nullable = false))
private Set<SequenceFile> inputFilesSingle;
@ManyToMany(fetch = FetchType.EAGER, cascade = CascadeType.DETACH)
@JoinTable(name = "analysis_submission_sequence_file_pair", joinColumns = @JoinColumn(name = "analysis_submission_id", nullable = false), inverseJoinColumns = @JoinColumn(name = "sequence_file_pair_id", nullable = false))
private Set<SequenceFilePair> inputFilesPaired;
@ElementCollection(fetch = FetchType.EAGER)
@MapKeyColumn(name = "name", nullable = false)
@Column(name = "value", nullable = false)
@CollectionTable(name = "analysis_submission_parameters", joinColumns = @JoinColumn(name = "id"), uniqueConstraints = @UniqueConstraint(columnNames = {
"id", "name" }, name = "UK_ANALYSIS_SUBMISSION_PARAMETER_NAME"))
private Map<String, String> inputParameters;
@CreatedDate
@NotNull
@Temporal(TemporalType.TIMESTAMP)
@Column(name = "created_date", nullable = false)
private final Date createdDate;
@LastModifiedDate
@Temporal(TemporalType.TIMESTAMP)
@Column(name = "modified_date")
private Date modifiedDate;
@NotNull
@Enumerated(EnumType.STRING)
@Column(name = "analysis_state")
private AnalysisState analysisState;
@NotNull
@Enumerated(EnumType.STRING)
@Column(name = "analysis_cleaned_state")
private AnalysisCleanedState analysisCleanedState;
// Analysis entity for this analysis submission. Cascading everything except
// removals
@OneToOne(fetch = FetchType.EAGER, cascade = { CascadeType.DETACH, CascadeType.MERGE, CascadeType.PERSIST,
CascadeType.REFRESH })
@JoinColumn(name = "analysis_id")
@NotAudited
private Analysis analysis;
@ManyToOne(fetch = FetchType.EAGER, cascade = CascadeType.DETACH)
@JoinColumn(name = "reference_file_id")
private ReferenceFile referenceFile;
@NotAudited
@ManyToOne(fetch = FetchType.EAGER, cascade = CascadeType.DETACH)
@JoinColumn(name = "named_parameters_id")
private IridaWorkflowNamedParameters namedParameters;
protected AnalysisSubmission() {
this.createdDate = new Date();
this.analysisState = AnalysisState.NEW;
this.analysisCleanedState = AnalysisCleanedState.NOT_CLEANED;
}
/**
* Builds a new {@link AnalysisSubmission} with the given {@link Builder}.
*
* @param builder
* The {@link Builder} to build the {@link AnalysisSubmission}.
*/
public AnalysisSubmission(Builder builder) {
this();
checkNotNull(builder.workflowId, "workflowId is null");
checkArgument(builder.inputFilesSingle != null || builder.inputFilesPaired != null,
"both inputFilesSingle and inputFilesPaired are null. You must supply at least one set of input files");
this.name = (builder.name != null) ? builder.name : "Unknown";
this.inputFilesSingle = (builder.inputFilesSingle != null) ? builder.inputFilesSingle : Sets.newHashSet();
this.inputFilesPaired = (builder.inputFilesPaired != null) ? builder.inputFilesPaired : Sets.newHashSet();
this.inputParameters = (builder.inputParameters != null) ? ImmutableMap.copyOf(builder.inputParameters)
: ImmutableMap.of();
this.referenceFile = builder.referenceFile;
this.workflowId = builder.workflowId;
this.namedParameters = builder.namedParameters;
}
/**
* Sets the reference file.
*
* @param referenceFile
* The reference file.
*/
public void setReferenceFile(ReferenceFile referenceFile) {
this.referenceFile = referenceFile;
}
/**
* Gets the ReferenceFile.
*
* @return The ReferenceFile.
*/
@JsonIgnore
public Optional<ReferenceFile> getReferenceFile() {
return (referenceFile != null) ? Optional.of(referenceFile) : Optional.empty();
}
/**
* Gets an analysis id for this workflow
*
* @return An analysis id for this workflow.
*/
@JsonIgnore
public String getRemoteAnalysisId() {
return remoteAnalysisId;
}
/**
* Gets the set of single-end input sequence files.
*
* @return The set of single-end input sequence files.
*/
@JsonIgnore
public Set<SequenceFile> getSingleInputFiles() {
return inputFilesSingle;
}
/**
* Gets the set of paired-end input sequence files.
*
* @return The set of paired-end input sequence files.
*/
@JsonIgnore
public Set<SequenceFilePair> getPairedInputFiles() {
return inputFilesPaired;
}
/**
* Gets the id of a remote location to store input files.
*
* @return The id of a remote location to store input files.
*/
public String getRemoteInputDataId() {
return remoteInputDataId;
}
/**
* Sets the id of a remote location to store input files.
*
* @param remoteInputDataId
* The id of a remote location to store input files.
*/
public void setRemoteInputDataId(String remoteInputDataId) {
this.remoteInputDataId = remoteInputDataId;
}
/**
* Sets the remote analysis id.
*
* @param remoteAnalysisId
* The remote analysis id to set.
*/
public void setRemoteAnalysisId(String remoteAnalysisId) {
this.remoteAnalysisId = remoteAnalysisId;
}
/**
* Gets the remote workflow id.
*
* @return The remote workflow id.
*/
public String getRemoteWorkflowId() {
return remoteWorkflowId;
}
/**
* Sets the remote workflow id.
*
* @param remoteWorkflowId
* The remote workflow id.
*/
public void setRemoteWorkflowId(String remoteWorkflowId) {
this.remoteWorkflowId = remoteWorkflowId;
}
/**
* Gets the state of this analysis.
*
* @return The state of this analysis.
*/
public AnalysisState getAnalysisState() {
return analysisState;
}
/**
* Sets the state of this analysis.
*
* @param analysisState
* The state of this analysis.
*/
public void setAnalysisState(AnalysisState analysisState) {
this.analysisState = analysisState;
}
@Override
public Date getCreatedDate() {
return createdDate;
}
@Override
public Date getModifiedDate() {
return modifiedDate;
}
@Override
public void setModifiedDate(Date modifiedDate) {
this.modifiedDate = modifiedDate;
}
@Override
public String getLabel() {
return name;
}
@Override
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
/**
* @return the analysis
*/
@JsonIgnore
public Analysis getAnalysis() {
return analysis;
}
@JsonIgnore
public User getSubmitter() {
return submitter;
}
/**
* Sets the {@link User} who is submitting this analysis.
*
* @param submitter
* The {@link User} who is submitting this analysis.
*/
public void setSubmitter(User submitter) {
checkNotNull(submitter, "the submitter is null");
this.submitter = submitter;
}
/**
* Set the {@link Analysis} generated as a result of this submission. Note:
* {@link AnalysisSubmission#setAnalysis(Analysis)} can only be set
* **once**; if the current {@link Analysis} is non-null, then this method
* will throw a {@link AnalysisAlreadySetException}.
*
* @param analysis
* the analysis to set
* @throws AnalysisAlreadySetException
* if the {@link Analysis} reference has already been created
* for this submission.
*/
public void setAnalysis(Analysis analysis) throws AnalysisAlreadySetException {
if (this.analysis == null) {
this.analysis = analysis;
} else {
throw new AnalysisAlreadySetException("The analysis has already been set for this submission.");
}
}
@Override
public String toString() {
String userName = (submitter == null) ? "null" : submitter.getUsername();
return "AnalysisSubmission [id=" + id + ", name=" + name + ", submitter=" + userName + ", workflowId="
+ workflowId + ", analysisState=" + analysisState + ", analysisCleanedState=" + analysisCleanedState
+ "]";
}
/**
* @return The {@link AnalysisCleanedState}.
*/
public AnalysisCleanedState getAnalysisCleanedState() {
return analysisCleanedState;
}
/**
* Sets the {@link AnalysisCleanedState}.
*
* @param analysisCleanedState
* The {@link AnalysisCleanedState}.
*/
public void setAnalysisCleanedState(AnalysisCleanedState analysisCleanedState) {
this.analysisCleanedState = analysisCleanedState;
}
/**
* @return the name
*/
public String getName() {
return name;
}
/**
* @param name
* the name to set
*/
public void setName(String name) {
this.name = name;
}
/**
* Gets the id of the implementing workflow for this analysis.
*
* @return The id of the implementing workflow for this analysis.
*/
public UUID getWorkflowId() {
return workflowId;
}
/**
* Sets the id of the workflow for this analysis.
*
* @param workflowId
* The id of the workflow for this analysis.
*/
public void setWorkflowId(UUID workflowId) {
this.workflowId = workflowId;
}
/**
* Gets the input parameters for this submission.
*
* @return The input parameters for this submission.
*/
public Map<String, String> getInputParameters() {
if (this.namedParameters != null) {
return this.namedParameters.getInputParameters();
} else {
return inputParameters;
}
}
/**
* Get the named parameters object used to build this submission.
*
* @return The {@link IridaWorkflowNamedParameters} for this submission.
*/
@JsonIgnore
public final IridaWorkflowNamedParameters getNamedParameters() {
return namedParameters;
}
/**
* Used to build up an {@link AnalysisSubmission}.
*
*/
public static class Builder {
private String name;
private Set<SequenceFile> inputFilesSingle;
private Set<SequenceFilePair> inputFilesPaired;
private ReferenceFile referenceFile;
private UUID workflowId;
private Map<String, String> inputParameters;
private IridaWorkflowNamedParameters namedParameters;
/**
* Creates a new {@link Builder} with a workflow id.
*
* @param workflowId
* The workflow id for this submission.
*/
public Builder(UUID workflowId) {
checkNotNull(workflowId, "workflowId is null");
this.workflowId = workflowId;
this.inputParameters = Maps.newHashMap();
}
/**
* Sets a name for this submission.
*
* @param name
* A name for this submission.
* @return A {@link Builder}.
*/
public Builder name(String name) {
checkNotNull(name, "name is null");
this.name = name;
return this;
}
/**
* Sets the inputFilesSingle for this submission.
*
* @param inputFilesSingle
* The inputFilesSingle for this submission.
* @return A {@link Builder}.
*/
public Builder inputFilesSingle(Set<SequenceFile> inputFilesSingle) {
checkNotNull(inputFilesSingle, "inputFilesSingle is null");
checkArgument(!inputFilesSingle.isEmpty(), "inputFilesSingle is empty");
this.inputFilesSingle = inputFilesSingle;
return this;
}
/**
* Sets the inputFilesPaired for this submission.
*
* @param inputFilesPaired
* The inputFilesPaired for this submission.
* @return A {@link Builder}.
*/
public Builder inputFilesPaired(Set<SequenceFilePair> inputFilesPaired) {
checkNotNull(inputFilesPaired, "inputFilesPaired is null");
checkArgument(!inputFilesPaired.isEmpty(), "inputFilesPaired is empty");
this.inputFilesPaired = inputFilesPaired;
return this;
}
/**
* Sets the referenceFile for this submission.
*
* @param referenceFile
* The referenceFile for this submission.
* @return A {@link Builder}.
*/
public Builder referenceFile(ReferenceFile referenceFile) {
checkNotNull(referenceFile, "referenceFile is null");
this.referenceFile = referenceFile;
return this;
}
/**
* Sets the input parameters for this submission.
*
* @param inputParameters
* A map of parameters for this submission.
* @return A {@link Builder}.
*/
public Builder inputParameters(Map<String, String> inputParameters) {
checkNotNull(inputParameters, "inputParameters is null");
checkArgument(!inputParameters.isEmpty(), "inputParameters is empty");
if (namedParameters != null) {
throw new UnsupportedOperationException("You cannot change named parameters once set.");
}
this.inputParameters.clear();
this.inputParameters.putAll(inputParameters);
return this;
}
/**
* Adds an individual input parameter.
*
* @param name
* The name of the parameter.
* @param value
* The value of the parameter.
* @return A {@link Builder}.
*/
public Builder inputParameter(final String name, final String value) {
checkNotNull(name, "key is null");
checkNotNull(value, "value is null");
checkArgument(!inputParameters.containsKey(name), "key=" + name + " already exists as a parameter");
if (namedParameters != null) {
throw new UnsupportedOperationException("You cannot change named parameters once set.");
}
inputParameters.put(name, value);
return this;
}
/**
* Use the specified set of named parameters to run this workflow.
*
* @param parameters
* the named parameters to use.
* @return A {@link Builder}.
*/
public Builder withNamedParameters(final IridaWorkflowNamedParameters parameters) {
checkNotNull(parameters, "named parameters cannot be null.");
this.namedParameters = parameters;
return this;
}
public AnalysisSubmission build() {
checkArgument(inputFilesSingle != null || inputFilesPaired != null,
"both inputFilesSingle and inputFilesPaired are null. You must supply at least one set of input files");
return new AnalysisSubmission(this);
}
}
/**
* Gets a {@link Builder}.
*
* @param workflowId
* The id of the workflow to submit.
*
* @return A {@link Builder}.
*/
public static Builder builder(UUID workflowId) {
return new AnalysisSubmission.Builder(workflowId);
}
/**
* Whether or not a remoteAnalysisId exists for this submission.
*
* @return True if a remoteAnalysisId exists for this submission, false
* otherwise.
*/
public boolean hasRemoteAnalysisId() {
return remoteAnalysisId != null;
}
/**
* Whether or not a remoteWorkflowId exists for this submission.
*
* @return True if a remoteWorkflowId exists for this submission, false
* otherwise.
*/
public boolean hasRemoteWorkflowId() {
return remoteWorkflowId != null;
}
/**
* Whether or not a remoteInputDataId exists for this submission.
*
* @return True if a remoteInputDataId exists for this submission, false
* otherwise.
*/
public boolean hasRemoteInputDataId() {
return remoteInputDataId != null;
}
@Override
public int hashCode() {
return Objects.hash(name, workflowId, remoteAnalysisId, remoteInputDataId, remoteWorkflowId, inputFilesSingle,
inputFilesPaired, createdDate, modifiedDate, analysisState, analysisCleanedState, analysis,
referenceFile, namedParameters, submitter);
}
@Override
public boolean equals(Object other) {
if (other instanceof AnalysisSubmission) {
AnalysisSubmission p = (AnalysisSubmission) other;
return Objects.equals(createdDate, p.createdDate) && Objects.equals(modifiedDate, p.modifiedDate)
&& Objects.equals(name, p.name) && Objects.equals(workflowId, p.workflowId)
&& Objects.equals(remoteAnalysisId, p.remoteAnalysisId)
&& Objects.equals(remoteInputDataId, p.remoteInputDataId)
&& Objects.equals(remoteWorkflowId, p.remoteWorkflowId)
&& Objects.equals(inputFilesSingle, p.inputFilesSingle)
&& Objects.equals(inputFilesPaired, p.inputFilesPaired)
&& Objects.equals(analysisState, p.analysisState)
&& Objects.equals(analysisCleanedState, p.analysisCleanedState)
&& Objects.equals(referenceFile, p.referenceFile)
&& Objects.equals(namedParameters, p.namedParameters) && Objects.equals(submitter, p.submitter);
}
return false;
}
@Override
public int compareTo(AnalysisSubmission o) {
return modifiedDate.compareTo(o.modifiedDate);
}
}
| added remote data to src/main/java/ca/corefacility/bioinformatics/irida/model/workflow/submission/AnalysisSubmission.java
| src/main/java/ca/corefacility/bioinformatics/irida/model/workflow/submission/AnalysisSubmission.java | added remote data to src/main/java/ca/corefacility/bioinformatics/irida/model/workflow/submission/AnalysisSubmission.java | <ide><path>rc/main/java/ca/corefacility/bioinformatics/irida/model/workflow/submission/AnalysisSubmission.java
<ide> import ca.corefacility.bioinformatics.irida.model.IridaThing;
<ide> import ca.corefacility.bioinformatics.irida.model.enums.AnalysisCleanedState;
<ide> import ca.corefacility.bioinformatics.irida.model.enums.AnalysisState;
<del>import ca.corefacility.bioinformatics.irida.model.project.Project;
<ide> import ca.corefacility.bioinformatics.irida.model.project.ReferenceFile;
<add>import ca.corefacility.bioinformatics.irida.model.sequenceFile.RemoteSequenceFile;
<add>import ca.corefacility.bioinformatics.irida.model.sequenceFile.RemoteSequenceFilePair;
<ide> import ca.corefacility.bioinformatics.irida.model.sequenceFile.SequenceFile;
<ide> import ca.corefacility.bioinformatics.irida.model.sequenceFile.SequenceFilePair;
<ide> import ca.corefacility.bioinformatics.irida.model.user.User;
<ide> @ManyToMany(fetch = FetchType.EAGER, cascade = CascadeType.DETACH)
<ide> @JoinTable(name = "analysis_submission_sequence_file_pair", joinColumns = @JoinColumn(name = "analysis_submission_id", nullable = false), inverseJoinColumns = @JoinColumn(name = "sequence_file_pair_id", nullable = false))
<ide> private Set<SequenceFilePair> inputFilesPaired;
<add>
<add> @ManyToMany(fetch = FetchType.EAGER, cascade = CascadeType.DETACH)
<add> @JoinTable(name = "analysis_submission_remote_file_single", joinColumns = @JoinColumn(name = "analysis_submission_id", nullable = false), inverseJoinColumns = @JoinColumn(name = "remote_file_id", nullable = false))
<add> private Set<RemoteSequenceFile> remoteFilesSingle;
<add>
<add> @ManyToMany(fetch = FetchType.EAGER, cascade = CascadeType.DETACH)
<add> @JoinTable(name = "analysis_submission_remote_file_pair", joinColumns = @JoinColumn(name = "analysis_submission_id", nullable = false), inverseJoinColumns = @JoinColumn(name = "remote_file_pair_id", nullable = false))
<add> private Set<RemoteSequenceFilePair> remoteFilesPaired;
<ide>
<ide> @ElementCollection(fetch = FetchType.EAGER)
<ide> @MapKeyColumn(name = "name", nullable = false)
<ide> this.referenceFile = builder.referenceFile;
<ide> this.workflowId = builder.workflowId;
<ide> this.namedParameters = builder.namedParameters;
<add> this.remoteFilesSingle = builder.remoteFilesSingle;
<add> this.remoteFilesPaired = builder.remoteFilesPaired;
<ide> }
<ide>
<ide> /**
<ide> private String name;
<ide> private Set<SequenceFile> inputFilesSingle;
<ide> private Set<SequenceFilePair> inputFilesPaired;
<add> private Set<RemoteSequenceFile> remoteFilesSingle;
<add> private Set<RemoteSequenceFilePair> remoteFilesPaired;
<ide> private ReferenceFile referenceFile;
<ide> private UUID workflowId;
<ide> private Map<String, String> inputParameters;
<ide> checkArgument(!inputFilesPaired.isEmpty(), "inputFilesPaired is empty");
<ide>
<ide> this.inputFilesPaired = inputFilesPaired;
<add> return this;
<add> }
<add>
<add> public Builder remoteFilesSingle(Set<RemoteSequenceFile> remoteFilesSingle){
<add> checkNotNull(remoteFilesSingle, "remoteFilesSingle is null");
<add> checkArgument(!remoteFilesSingle.isEmpty(), "remoteFilesSingle is empty");
<add>
<add> this.remoteFilesSingle = remoteFilesSingle;
<add> return this;
<add> }
<add>
<add> public Builder remoteFilesPaired(Set<RemoteSequenceFilePair> remoteFilesPaired){
<add> checkNotNull(remoteFilesPaired, "remoteFilesPaired is null");
<add> checkArgument(!remoteFilesPaired.isEmpty(), "remoteFilesPaired is empty");
<add>
<add> this.remoteFilesPaired = remoteFilesPaired;
<ide> return this;
<ide> }
<ide> |
|
Java | mit | 644802f27de178ec6b054d80ad5f4b00e43d13cf | 0 | EcoGame/Eco,EcoGame/Eco | package eco;
import org.lwjgl.opengl.GL11;
import org.newdawn.slick.Color;
/**
* This class displays graphs of various things
*
* @author nate
*
*/
public class Graphs {
private static int size = 75;
private static int[] pops = new int[size];
private static int[] wheats = new int[size];
private static float[] moneys = new float[size];
public static void draw(int year, int population, int wheat, float money) {
int y = 740;
int x = 1225;
int shift = 100;
int perGraphShift = 80;
int labelDistance = 60;
int num = 0;
int height = 90;
int maxOne = 0;
int maxTwo = 0;
int maxThree = 0;
for (int i = 0; i < pops.length; i++) {
maxOne = (int) Math.max(maxOne, pops[i]);
}
for (int i = 0; i < wheats.length; i++) {
maxTwo = (int) Math.max(maxTwo, wheats[i]);
}
for (int i = 0; i < moneys.length; i++) {
maxThree = (int) Math.max(maxThree, moneys[i]);
}
// World.messages.add(new Message(String.valueOf(maxOne), (x -
// (perGraphShift * num) - shift + size + 20), y - height - 100, 1));
// World.messages.add(new Message(String.valueOf(maxOne/2), (x -
// (perGraphShift * num) - shift + size + 20), y - height - 50, 1));
Render.font.drawString(
(x - (perGraphShift * num) - shift + size - 270), y - height
- 105, String.valueOf(maxThree),
new Color(41, 152, 104));
Render.font.drawString(
(x - (perGraphShift * num) - shift + size - 270), y - height
- 55, String.valueOf(maxThree / 2), new Color(41, 152,
104));
Render.font.drawString(
(x - (perGraphShift * num) - shift + size - 270), y - height
- 5, "0", new Color(41, 152, 104));
Render.font.drawString((x - (perGraphShift * num) - shift + size + 20),
y - height - 105, String.valueOf(maxOne),
new Color(1, 169, 212));
Render.font.drawString((x - (perGraphShift * num) - shift + size + 20),
y - height - 55, String.valueOf(maxOne / 2), new Color(1, 169,
212));
Render.font.drawString((x - (perGraphShift * num) - shift + size + 20),
y - height - 5, "0", new Color(1, 169, 212));
pops[74] = population;
GL11.glDisable(GL11.GL_TEXTURE_2D);
GL11.glEnable(GL11.GL_LINE_SMOOTH);
GL11.glLineWidth(3f);
GL11.glBegin(GL11.GL_LINE_STRIP);
for (int i = 0; i < pops.length - 1; i++) {
// World.messages.add(new Message("\u25A0 .", (x - (perGraphShift *
// num) - shift) + i, (y - shift) - (int) (prices[i] *
// ((float)height/maxOne)), 1));
//Render.font.drawString((x - (perGraphShift * num) - shift) + i,
// (y - shift) - (int) (pops[i] * ((float) height / maxOne)),
//"\u25A0 .", new Color(1, 169, 212));
GL11.glVertex2f((float)(x - (perGraphShift * num) - shift) + i,
(y - shift) - (pops[i] * ((float) height / maxOne)));
System.out.println((x - (perGraphShift * num) - shift) + i);
}
GL11.glEnd();
for (int i = 0; i < pops.length - 1; i++){
pops[i] = pops[i + 1];
}
num++;
wheats[74] = wheat;
GL11.glColor3f(238 / 255f,187 / 255f,66 / 255f);
for (int i = 0; i < wheats.length - 1; i++) {
// World.messages.add(new Message("\u25A0 .", (x - (perGraphShift *
// num) - shift) + i, (y - shift) - (int) (pop[i] *
// ((float)height/maxTwo)), 1));
GL11.glBegin(GL11.GL_LINES);
GL11.glVertex2f((x - (perGraphShift * num) - shift) + i,
(y - shift)
- (wheats[i] * ((float) height / maxTwo)));
GL11.glVertex2f((x - (perGraphShift * num) - shift) + i + 1,
(y - shift)
- (wheats[i + 1] * ((float) height / maxTwo)));
GL11.glEnd();
}
for (int i = 0; i < wheats.length - 1; i++) {
wheats[i] = wheats[i + 1];
}
num++;
moneys[74] = money;
GL11.glColor3f(42 / 255f, 152 / 255f, 104 / 255f);
for (int i = 0; i < moneys.length - 1; i++) {
// World.messages.add(new Message("\u25A0 .", (x - (perGraphShift *
// num) - shift) + i, (y - shift) - (int) (displaced[i] *
// ((float)height/maxThree)), 1));
GL11.glBegin(GL11.GL_LINES);
GL11.glVertex2f((x - (perGraphShift * num) - shift) + i,
(y - shift)
- (moneys[i] * ((float) height / maxThree)));
GL11.glVertex2f((x - (perGraphShift * num) - shift) + i + 1,
(y - shift)
- (moneys[i + 1] * ((float) height / maxThree)));
GL11.glEnd();
}
for (int i = 0; i < moneys.length - 1; i++) {
moneys[i] = moneys[i + 1];
}
num++;
GL11.glColor3f(1f, 1f, 1f);
GL11.glEnable(GL11.GL_TEXTURE_2D);
Render.font.drawString((x - (perGraphShift * (num - 1)) - shift), y
- labelDistance, "Money");
Render.font.drawString((x - (perGraphShift * (num - 2)) - shift), y
- labelDistance, "Wheat");
Render.font.drawString((x - (perGraphShift * (num - 3)) - shift), y
- labelDistance, "People");
GL11.glColor4f(1f, 1f, 1f, 1f);
}
}
| src/eco/Graphs.java | package eco;
import org.lwjgl.opengl.GL11;
import org.newdawn.slick.Color;
/**
* This class displays graphs of various things
*
* @author nate
*
*/
public class Graphs {
private static int size = 75;
private static int[] prices = new int[size];
private static int[] pop = new int[size];
private static int[] displaced = new int[size];
public static void draw(int year, int wheatPrice, int tPop, int taxRevenue) {
int y = 740;
int x = 1225;
int shift = 100;
int perGraphShift = 80;
int labelDistance = 60;
int num = 0;
int height = 90;
int maxOne = wheatPrice + 100;
int maxTwo = 200;
int maxThree = 200;
// World.messages.add(new Message(String.valueOf(maxOne), (x -
// (perGraphShift * num) - shift + size + 20), y - height - 100, 1));
// World.messages.add(new Message(String.valueOf(maxOne/2), (x -
// (perGraphShift * num) - shift + size + 20), y - height - 50, 1));
Render.font.drawString(
(x - (perGraphShift * num) - shift + size - 270), y - height
- 105, String.valueOf(maxTwo), Color.orange);
Render.font.drawString(
(x - (perGraphShift * num) - shift + size - 270), y - height
- 55, String.valueOf(maxTwo / 2), Color.orange);
Render.font.drawString(
(x - (perGraphShift * num) - shift + size - 270), y - height
- 5, "0", Color.orange);
Render.font.drawString((x - (perGraphShift * num) - shift + size + 20),
y - height - 105, String.valueOf(maxOne), Color.pink);
Render.font.drawString((x - (perGraphShift * num) - shift + size + 20),
y - height - 55, String.valueOf(maxOne / 2), Color.pink);
Render.font.drawString((x - (perGraphShift * num) - shift + size + 20),
y - height - 5, "0", Color.pink);
prices[74] = wheatPrice;
for (int i = 0; i < prices.length; i++) {
// World.messages.add(new Message("\u25A0 .", (x - (perGraphShift *
// num) - shift) + i, (y - shift) - (int) (prices[i] *
// ((float)height/maxOne)), 1));
Render.font
.drawString(
(x - (perGraphShift * num) - shift) + i,
(y - shift)
- (int) (prices[i] * ((float) height / maxOne)),
"\u25A0 .", Color.pink);
}
for (int i = 0; i < prices.length - 1; i++) {
prices[i] = prices[i + 1];
}
Message.addMessage(new Message("Price",
(x - (perGraphShift * num) - shift), y - labelDistance, 1));
num++;
pop[74] = tPop;
for (int i = 0; i < pop.length; i++) {
// World.messages.add(new Message("\u25A0 .", (x - (perGraphShift *
// num) - shift) + i, (y - shift) - (int) (pop[i] *
// ((float)height/maxTwo)), 1));
Render.font.drawString((x - (perGraphShift * num) - shift) + i,
(y - shift) - (int) (pop[i] * ((float) height / maxTwo)),
"\u25A0 .", Color.orange);
}
for (int i = 0; i < pop.length - 1; i++) {
pop[i] = pop[i + 1];
}
Message.addMessage(new Message("Pop",
(x - (perGraphShift * num) - shift), y - labelDistance, 1));
num++;
displaced[74] = World.displacedPeople;
for (int i = 0; i < displaced.length; i++) {
// World.messages.add(new Message("\u25A0 .", (x - (perGraphShift *
// num) - shift) + i, (y - shift) - (int) (displaced[i] *
// ((float)height/maxThree)), 1));
Render.font
.drawString(
(x - (perGraphShift * num) - shift) + i,
(y - shift)
- (int) (displaced[i] * ((float) height / maxThree)),
"\u25A0 .", Color.orange);
}
for (int i = 0; i < displaced.length - 1; i++) {
displaced[i] = displaced[i + 1];
}
Message.addMessage(new Message("(????)",
(x - (perGraphShift * num) - shift), y - labelDistance, 1));
num++;
GL11.glColor4f(1f, 1f, 1f, 1f);
}
}
| Update Graphs.java | src/eco/Graphs.java | Update Graphs.java | <ide><path>rc/eco/Graphs.java
<ide> public class Graphs {
<ide>
<ide> private static int size = 75;
<del> private static int[] prices = new int[size];
<del> private static int[] pop = new int[size];
<del> private static int[] displaced = new int[size];
<add> private static int[] pops = new int[size];
<add> private static int[] wheats = new int[size];
<add> private static float[] moneys = new float[size];
<ide>
<del> public static void draw(int year, int wheatPrice, int tPop, int taxRevenue) {
<add> public static void draw(int year, int population, int wheat, float money) {
<ide> int y = 740;
<ide> int x = 1225;
<ide> int shift = 100;
<ide> int labelDistance = 60;
<ide> int num = 0;
<ide> int height = 90;
<del> int maxOne = wheatPrice + 100;
<del> int maxTwo = 200;
<del> int maxThree = 200;
<add> int maxOne = 0;
<add> int maxTwo = 0;
<add> int maxThree = 0;
<add>
<add> for (int i = 0; i < pops.length; i++) {
<add> maxOne = (int) Math.max(maxOne, pops[i]);
<add> }
<add>
<add> for (int i = 0; i < wheats.length; i++) {
<add> maxTwo = (int) Math.max(maxTwo, wheats[i]);
<add> }
<add>
<add> for (int i = 0; i < moneys.length; i++) {
<add> maxThree = (int) Math.max(maxThree, moneys[i]);
<add> }
<ide>
<ide> // World.messages.add(new Message(String.valueOf(maxOne), (x -
<ide> // (perGraphShift * num) - shift + size + 20), y - height - 100, 1));
<ide> // (perGraphShift * num) - shift + size + 20), y - height - 50, 1));
<ide> Render.font.drawString(
<ide> (x - (perGraphShift * num) - shift + size - 270), y - height
<del> - 105, String.valueOf(maxTwo), Color.orange);
<add> - 105, String.valueOf(maxThree),
<add> new Color(41, 152, 104));
<ide> Render.font.drawString(
<ide> (x - (perGraphShift * num) - shift + size - 270), y - height
<del> - 55, String.valueOf(maxTwo / 2), Color.orange);
<add> - 55, String.valueOf(maxThree / 2), new Color(41, 152,
<add> 104));
<ide> Render.font.drawString(
<ide> (x - (perGraphShift * num) - shift + size - 270), y - height
<del> - 5, "0", Color.orange);
<add> - 5, "0", new Color(41, 152, 104));
<ide>
<ide> Render.font.drawString((x - (perGraphShift * num) - shift + size + 20),
<del> y - height - 105, String.valueOf(maxOne), Color.pink);
<add> y - height - 105, String.valueOf(maxOne),
<add> new Color(1, 169, 212));
<ide> Render.font.drawString((x - (perGraphShift * num) - shift + size + 20),
<del> y - height - 55, String.valueOf(maxOne / 2), Color.pink);
<add> y - height - 55, String.valueOf(maxOne / 2), new Color(1, 169,
<add> 212));
<ide> Render.font.drawString((x - (perGraphShift * num) - shift + size + 20),
<del> y - height - 5, "0", Color.pink);
<add> y - height - 5, "0", new Color(1, 169, 212));
<ide>
<del> prices[74] = wheatPrice;
<del> for (int i = 0; i < prices.length; i++) {
<add> pops[74] = population;
<add> GL11.glDisable(GL11.GL_TEXTURE_2D);
<add> GL11.glEnable(GL11.GL_LINE_SMOOTH);
<add> GL11.glLineWidth(3f);
<add> GL11.glBegin(GL11.GL_LINE_STRIP);
<add> for (int i = 0; i < pops.length - 1; i++) {
<ide> // World.messages.add(new Message("\u25A0 .", (x - (perGraphShift *
<ide> // num) - shift) + i, (y - shift) - (int) (prices[i] *
<ide> // ((float)height/maxOne)), 1));
<del> Render.font
<del> .drawString(
<del> (x - (perGraphShift * num) - shift) + i,
<del> (y - shift)
<del> - (int) (prices[i] * ((float) height / maxOne)),
<del> "\u25A0 .", Color.pink);
<add> //Render.font.drawString((x - (perGraphShift * num) - shift) + i,
<add> // (y - shift) - (int) (pops[i] * ((float) height / maxOne)),
<add> //"\u25A0 .", new Color(1, 169, 212));
<add> GL11.glVertex2f((float)(x - (perGraphShift * num) - shift) + i,
<add> (y - shift) - (pops[i] * ((float) height / maxOne)));
<add> System.out.println((x - (perGraphShift * num) - shift) + i);
<add> }
<add> GL11.glEnd();
<add> for (int i = 0; i < pops.length - 1; i++){
<add> pops[i] = pops[i + 1];
<add> }
<ide>
<del> }
<del> for (int i = 0; i < prices.length - 1; i++) {
<del> prices[i] = prices[i + 1];
<del> }
<del> Message.addMessage(new Message("Price",
<del> (x - (perGraphShift * num) - shift), y - labelDistance, 1));
<ide>
<ide> num++;
<ide>
<del> pop[74] = tPop;
<del> for (int i = 0; i < pop.length; i++) {
<add> wheats[74] = wheat;
<add> GL11.glColor3f(238 / 255f,187 / 255f,66 / 255f);
<add> for (int i = 0; i < wheats.length - 1; i++) {
<ide> // World.messages.add(new Message("\u25A0 .", (x - (perGraphShift *
<ide> // num) - shift) + i, (y - shift) - (int) (pop[i] *
<ide> // ((float)height/maxTwo)), 1));
<del> Render.font.drawString((x - (perGraphShift * num) - shift) + i,
<del> (y - shift) - (int) (pop[i] * ((float) height / maxTwo)),
<del> "\u25A0 .", Color.orange);
<add> GL11.glBegin(GL11.GL_LINES);
<add> GL11.glVertex2f((x - (perGraphShift * num) - shift) + i,
<add> (y - shift)
<add> - (wheats[i] * ((float) height / maxTwo)));
<add> GL11.glVertex2f((x - (perGraphShift * num) - shift) + i + 1,
<add> (y - shift)
<add> - (wheats[i + 1] * ((float) height / maxTwo)));
<add> GL11.glEnd();
<ide>
<ide> }
<del> for (int i = 0; i < pop.length - 1; i++) {
<del> pop[i] = pop[i + 1];
<add> for (int i = 0; i < wheats.length - 1; i++) {
<add> wheats[i] = wheats[i + 1];
<ide> }
<del> Message.addMessage(new Message("Pop",
<del> (x - (perGraphShift * num) - shift), y - labelDistance, 1));
<add>
<ide> num++;
<ide>
<del> displaced[74] = World.displacedPeople;
<del> for (int i = 0; i < displaced.length; i++) {
<add> moneys[74] = money;
<add> GL11.glColor3f(42 / 255f, 152 / 255f, 104 / 255f);
<add> for (int i = 0; i < moneys.length - 1; i++) {
<ide> // World.messages.add(new Message("\u25A0 .", (x - (perGraphShift *
<ide> // num) - shift) + i, (y - shift) - (int) (displaced[i] *
<ide> // ((float)height/maxThree)), 1));
<del> Render.font
<del> .drawString(
<del> (x - (perGraphShift * num) - shift) + i,
<del> (y - shift)
<del> - (int) (displaced[i] * ((float) height / maxThree)),
<del> "\u25A0 .", Color.orange);
<add>
<add> GL11.glBegin(GL11.GL_LINES);
<add> GL11.glVertex2f((x - (perGraphShift * num) - shift) + i,
<add> (y - shift)
<add> - (moneys[i] * ((float) height / maxThree)));
<add> GL11.glVertex2f((x - (perGraphShift * num) - shift) + i + 1,
<add> (y - shift)
<add> - (moneys[i + 1] * ((float) height / maxThree)));
<add> GL11.glEnd();
<ide>
<ide> }
<del> for (int i = 0; i < displaced.length - 1; i++) {
<del> displaced[i] = displaced[i + 1];
<add> for (int i = 0; i < moneys.length - 1; i++) {
<add> moneys[i] = moneys[i + 1];
<ide> }
<del> Message.addMessage(new Message("(????)",
<del> (x - (perGraphShift * num) - shift), y - labelDistance, 1));
<add> num++;
<add> GL11.glColor3f(1f, 1f, 1f);
<ide>
<del> num++;
<add>
<add> GL11.glEnable(GL11.GL_TEXTURE_2D);
<add> Render.font.drawString((x - (perGraphShift * (num - 1)) - shift), y
<add> - labelDistance, "Money");
<add> Render.font.drawString((x - (perGraphShift * (num - 2)) - shift), y
<add> - labelDistance, "Wheat");
<add> Render.font.drawString((x - (perGraphShift * (num - 3)) - shift), y
<add> - labelDistance, "People");
<add>
<ide>
<ide> GL11.glColor4f(1f, 1f, 1f, 1f);
<ide> } |
|
Java | apache-2.0 | c342be769e63056b3a10ef433f2dc997ce2c78ef | 0 | lexs/webimageloader | package se.alexanderblom.imageloader.loader;
import java.io.InputStream;
import java.util.Iterator;
import java.util.Set;
import java.util.WeakHashMap;
import se.alexanderblom.imageloader.Request;
import se.alexanderblom.imageloader.util.BitmapUtils;
import android.graphics.Bitmap;
import android.util.Log;
public class PendingRequests {
private static final String TAG = "PendingRequests";
private WeakHashMap<Object, Request> pendingsTags;
private WeakHashMap<Request, PendingListeners> pendingsRequests;
private MemoryCache memoryCache;
public PendingRequests(MemoryCache memoryCache) {
this.memoryCache = memoryCache;
pendingsTags = new WeakHashMap<Object, Request>();
pendingsRequests = new WeakHashMap<Request, PendingListeners>();
}
public synchronized Loader.Listener addRequest(Object tag, Request request, LoaderManager.Listener listener) {
if (stillPending(tag, request)) {
return null;
}
cancelPotentialWork(tag);
pendingsTags.put(tag, request);
PendingListeners listeners = pendingsRequests.get(request);
if (listeners == null) {
listeners = new PendingListeners(tag, listener);
pendingsRequests.put(request, listeners);
return new RequestListener(request);
} else {
Log.v(TAG, "Reusing request: " + request);
listeners.add(tag, listener);
return null;
}
}
protected synchronized void deliverResult(Request request, Bitmap b) {
PendingListeners listeners = pendingsRequests.remove(request);
if (listeners == null) {
Log.v(TAG, "Request no longer pending: " + request);
return;
}
saveToMemoryCache(request, b);
filterTagsForRequest(listeners, request);
listeners.deliverResult(b);
pendingsTags.keySet().removeAll(listeners.getTags());
}
protected synchronized void deliverError(Request request, Throwable t) {
PendingListeners listeners = pendingsRequests.get(request);
if (listeners == null) {
Log.v(TAG, "Request no longer pending: " + request);
return;
}
filterTagsForRequest(listeners, request);
listeners.deliverError(t);
pendingsTags.keySet().removeAll(listeners.getTags());
}
private void cancelPotentialWork(Object tag) {
Request request = pendingsTags.remove(tag);
if (request == null) {
return;
}
PendingListeners listeners = pendingsRequests.get(request);
if (!listeners.remove(tag)) {
pendingsRequests.remove(request);
// TODO: Actually cancel request
}
}
/**
* Remove tags not pending for this request
*/
private void filterTagsForRequest(PendingListeners listeners, Request request) {
// Tags pending for this request
Set<Object> tags = listeners.getTags();
for (Iterator<Object> it = tags.iterator(); it.hasNext(); ) {
Object tag = it.next();
// Check if tag is still pending
if (!stillPending(tag, request)) {
it.remove();
}
}
}
private void saveToMemoryCache(Request request, Bitmap b) {
if (memoryCache != null) {
memoryCache.set(request, b);
}
}
private boolean stillPending(Object tag, Request request) {
return request.equals(pendingsTags.get(tag));
}
private class RequestListener implements Loader.Listener {
private Request request;
public RequestListener(Request request) {
this.request = request;
}
@Override
public void onStreamLoaded(InputStream is) {
Bitmap b = BitmapUtils.decodeStream(is);
onBitmapLoaded(b);
}
@Override
public void onBitmapLoaded(Bitmap b) {
deliverResult(request, b);
}
@Override
public void onError(Throwable t) {
deliverError(request, t);
}
}
private static class PendingListeners {
private WeakHashMap<Object, LoaderManager.Listener> listeners;
public PendingListeners(Object tag, LoaderManager.Listener listener) {
listeners = new WeakHashMap<Object, LoaderManager.Listener>();
add(tag, listener);
}
public void add(Object tag, LoaderManager.Listener listener) {
listeners.put(tag, listener);
}
/**
* Remove a listener
* @return true if this task is still pending
*/
public boolean remove(Object tag) {
listeners.remove(tag);
if (listeners.isEmpty()) {
return false;
} else {
return true;
}
}
public Set<Object> getTags() {
return listeners.keySet();
}
public void deliverResult(Bitmap b) {
for (LoaderManager.Listener listener : listeners.values()) {
listener.onLoaded(b);
}
}
public void deliverError(Throwable t) {
for (LoaderManager.Listener listener : listeners.values()) {
listener.onError(t);
}
}
}
}
| imageloader/src/se/alexanderblom/imageloader/loader/PendingRequests.java | package se.alexanderblom.imageloader.loader;
import java.io.InputStream;
import java.util.Iterator;
import java.util.Set;
import java.util.WeakHashMap;
import se.alexanderblom.imageloader.Request;
import se.alexanderblom.imageloader.util.BitmapUtils;
import android.graphics.Bitmap;
import android.util.Log;
public class PendingRequests {
private static final String TAG = "PendingRequests";
private WeakHashMap<Object, Request> pendingsTags;
private WeakHashMap<Request, PendingListeners> pendingsRequests;
private MemoryCache memoryCache;
public PendingRequests(MemoryCache memoryCache) {
this.memoryCache = memoryCache;
pendingsTags = new WeakHashMap<Object, Request>();
pendingsRequests = new WeakHashMap<Request, PendingListeners>();
}
public synchronized Loader.Listener addRequest(Object tag, Request request, LoaderManager.Listener listener) {
if (stillPending(tag, request)) {
return null;
}
cancelPotentialWork(tag);
pendingsTags.put(tag, request);
PendingListeners listeners = pendingsRequests.get(request);
if (listeners == null) {
listeners = new PendingListeners(tag, listener);
pendingsRequests.put(request, listeners);
return new RequestListener(request);
} else {
Log.v(TAG, "Reusing request: " + request);
listeners.add(tag, listener);
return null;
}
}
private void cancelPotentialWork(Object tag) {
Request request = pendingsTags.remove(tag);
if (request == null) {
return;
}
PendingListeners listeners = pendingsRequests.get(request);
if (!listeners.remove(tag)) {
pendingsRequests.remove(request);
// TODO: Actually cancel request
}
}
private synchronized void deliverResult(Request request, Bitmap b) {
PendingListeners listeners = pendingsRequests.remove(request);
if (listeners == null) {
Log.v(TAG, "Request no longer pending: " + request);
return;
}
saveToMemoryCache(request, b);
filterTagsForRequest(listeners, request);
listeners.deliverResult(b);
pendingsTags.keySet().removeAll(listeners.getTags());
}
private synchronized void deliverError(Request request, Throwable t) {
PendingListeners listeners = pendingsRequests.get(request);
if (listeners == null) {
Log.v(TAG, "Request no longer pending: " + request);
return;
}
filterTagsForRequest(listeners, request);
listeners.deliverError(t);
pendingsTags.keySet().removeAll(listeners.getTags());
}
/**
* Remove tags not pending for this request
*/
private void filterTagsForRequest(PendingListeners listeners, Request request) {
// Tags pending for this request
Set<Object> tags = listeners.getTags();
for (Iterator<Object> it = tags.iterator(); it.hasNext(); ) {
Object tag = it.next();
// Check if tag is still pending
if (!stillPending(tag, request)) {
it.remove();
}
}
}
private void saveToMemoryCache(Request request, Bitmap b) {
if (memoryCache != null) {
memoryCache.set(request, b);
}
}
private boolean stillPending(Object tag, Request request) {
return request.equals(pendingsTags.get(tag));
}
private class RequestListener implements Loader.Listener {
private Request request;
public RequestListener(Request request) {
this.request = request;
}
@Override
public void onStreamLoaded(InputStream is) {
Bitmap b = BitmapUtils.decodeStream(is);
onBitmapLoaded(b);
}
@Override
public void onBitmapLoaded(Bitmap b) {
deliverResult(request, b);
}
@Override
public void onError(Throwable t) {
deliverError(request, t);
}
}
private static class PendingListeners {
private WeakHashMap<Object, LoaderManager.Listener> listeners;
public PendingListeners(Object tag, LoaderManager.Listener listener) {
listeners = new WeakHashMap<Object, LoaderManager.Listener>();
add(tag, listener);
}
public void add(Object tag, LoaderManager.Listener listener) {
listeners.put(tag, listener);
}
/**
* Remove a listener
* @return true if this task is still pending
*/
public boolean remove(Object tag) {
listeners.remove(tag);
if (listeners.isEmpty()) {
return false;
} else {
return true;
}
}
public Set<Object> getTags() {
return listeners.keySet();
}
public void deliverResult(Bitmap b) {
for (LoaderManager.Listener listener : listeners.values()) {
listener.onLoaded(b);
}
}
public void deliverError(Throwable t) {
for (LoaderManager.Listener listener : listeners.values()) {
listener.onError(t);
}
}
}
}
| Make it clearer which methods need to be synchronized
| imageloader/src/se/alexanderblom/imageloader/loader/PendingRequests.java | Make it clearer which methods need to be synchronized | <ide><path>mageloader/src/se/alexanderblom/imageloader/loader/PendingRequests.java
<ide> }
<ide> }
<ide>
<del> private void cancelPotentialWork(Object tag) {
<del> Request request = pendingsTags.remove(tag);
<del> if (request == null) {
<del> return;
<del> }
<del>
<del> PendingListeners listeners = pendingsRequests.get(request);
<del> if (!listeners.remove(tag)) {
<del> pendingsRequests.remove(request);
<del> // TODO: Actually cancel request
<del> }
<del> }
<del>
<del> private synchronized void deliverResult(Request request, Bitmap b) {
<add> protected synchronized void deliverResult(Request request, Bitmap b) {
<ide> PendingListeners listeners = pendingsRequests.remove(request);
<ide> if (listeners == null) {
<ide> Log.v(TAG, "Request no longer pending: " + request);
<ide> pendingsTags.keySet().removeAll(listeners.getTags());
<ide> }
<ide>
<del> private synchronized void deliverError(Request request, Throwable t) {
<add> protected synchronized void deliverError(Request request, Throwable t) {
<ide> PendingListeners listeners = pendingsRequests.get(request);
<ide> if (listeners == null) {
<ide> Log.v(TAG, "Request no longer pending: " + request);
<ide> filterTagsForRequest(listeners, request);
<ide> listeners.deliverError(t);
<ide> pendingsTags.keySet().removeAll(listeners.getTags());
<add> }
<add>
<add> private void cancelPotentialWork(Object tag) {
<add> Request request = pendingsTags.remove(tag);
<add> if (request == null) {
<add> return;
<add> }
<add>
<add> PendingListeners listeners = pendingsRequests.get(request);
<add> if (!listeners.remove(tag)) {
<add> pendingsRequests.remove(request);
<add> // TODO: Actually cancel request
<add> }
<ide> }
<ide>
<ide> /** |
|
Java | apache-2.0 | 4b1d40f160636ff02281b34aa2aeffa8d8569362 | 0 | brianfrankcooper/YCSB,zyguan/ycsb,leschekhomann/YCSB,leschekhomann/YCSB,leschekhomann/YCSB,leschekhomann/YCSB,jaemyoun/YCSB,cricket007/YCSB,cricket007/YCSB,zyguan/ycsb,madhurihn/YCSB_ToyDB,jaemyoun/YCSB,madhurihn/YCSB_ToyDB,cricket007/YCSB,manolama/YCSB,zyguan/ycsb,ChristianNavolskyi/YCSB,madhurihn/YCSB_ToyDB,ChristianNavolskyi/YCSB,brianfrankcooper/YCSB,ChristianNavolskyi/YCSB,ChristianNavolskyi/YCSB,manolama/YCSB,jaemyoun/YCSB,brianfrankcooper/YCSB,zyguan/ycsb,cricket007/YCSB,jaemyoun/YCSB,manolama/YCSB,manolama/YCSB,madhurihn/YCSB_ToyDB,brianfrankcooper/YCSB | /**
* Copyright (c) 2010 Yahoo! Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you
* may not use this file except in compliance with the License. You
* may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License. See accompanying
* LICENSE file.
*/
package com.yahoo.ycsb.db;
import com.yahoo.ycsb.DB;
import com.yahoo.ycsb.DBException;
import com.yahoo.ycsb.ByteIterator;
import com.yahoo.ycsb.Status;
import com.yahoo.ycsb.StringByteIterator;
import java.sql.*;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
/**
* A class that wraps a JDBC compliant database to allow it to be interfaced with YCSB.
* This class extends {@link DB} and implements the database interface used by YCSB client.
*
* <br> Each client will have its own instance of this class. This client is
* not thread safe.
*
* <br> This interface expects a schema <key> <field1> <field2> <field3> ...
* All attributes are of type VARCHAR. All accesses are through the primary key. Therefore,
* only one index on the primary key is needed.
*
* <p> The following options must be passed when using this database client.
*
* <ul>
* <li><b>db.driver</b> The JDBC driver class to use.</li>
* <li><b>db.url</b> The Database connection URL.</li>
* <li><b>db.user</b> User name for the connection.</li>
* <li><b>db.passwd</b> Password for the connection.</li>
* </ul>
*
* @author sudipto
*
*/
public class JdbcDBClient extends DB implements JdbcDBClientConstants {
private ArrayList<Connection> conns;
private boolean initialized = false;
private Properties props;
private Integer jdbcFetchSize;
private static final String DEFAULT_PROP = "";
private ConcurrentMap<StatementType, PreparedStatement> cachedStatements;
/**
* The statement type for the prepared statements.
*/
private static class StatementType {
enum Type {
INSERT(1),
DELETE(2),
READ(3),
UPDATE(4),
SCAN(5),
;
int internalType;
private Type(int type) {
internalType = type;
}
int getHashCode() {
final int prime = 31;
int result = 1;
result = prime * result + internalType;
return result;
}
}
Type type;
int shardIndex;
int numFields;
String tableName;
StatementType(Type type, String tableName, int numFields, int _shardIndex) {
this.type = type;
this.tableName = tableName;
this.numFields = numFields;
this.shardIndex = _shardIndex;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + numFields + 100 * shardIndex;
result = prime * result
+ ((tableName == null) ? 0 : tableName.hashCode());
result = prime * result + ((type == null) ? 0 : type.getHashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
StatementType other = (StatementType) obj;
if (numFields != other.numFields)
return false;
if (shardIndex != other.shardIndex)
return false;
if (tableName == null) {
if (other.tableName != null)
return false;
} else if (!tableName.equals(other.tableName))
return false;
if (type != other.type)
return false;
return true;
}
}
/**
* For the given key, returns what shard contains data for this key
*
* @param key Data key to do operation on
* @return Shard index
*/
private int getShardIndexByKey(String key) {
int ret = Math.abs(key.hashCode()) % conns.size();
//System.out.println(conns.size() + ": Shard instance for "+ key + " (hash " + key.hashCode()+ " ) " + " is " + ret);
return ret;
}
/**
* For the given key, returns Connection object that holds connection
* to the shard that contains this key
*
* @param key Data key to get information for
* @return Connection object
*/
private Connection getShardConnectionByKey(String key) {
return conns.get(getShardIndexByKey(key));
}
private void cleanupAllConnections() throws SQLException {
for(Connection conn: conns) {
conn.close();
}
}
/**
* Initialize the database connection and set it up for sending requests to the database.
* This must be called once per client.
* @throws
*/
@Override
public void init() throws DBException {
if (initialized) {
System.err.println("Client connection already initialized.");
return;
}
props = getProperties();
String urls = props.getProperty(CONNECTION_URL, DEFAULT_PROP);
String user = props.getProperty(CONNECTION_USER, DEFAULT_PROP);
String passwd = props.getProperty(CONNECTION_PASSWD, DEFAULT_PROP);
String driver = props.getProperty(DRIVER_CLASS);
String jdbcFetchSizeStr = props.getProperty(JDBC_FETCH_SIZE);
if (jdbcFetchSizeStr != null) {
try {
this.jdbcFetchSize = Integer.parseInt(jdbcFetchSizeStr);
} catch (NumberFormatException nfe) {
System.err.println("Invalid JDBC fetch size specified: " + jdbcFetchSizeStr);
throw new DBException(nfe);
}
}
String autoCommitStr = props.getProperty(JDBC_AUTO_COMMIT, Boolean.TRUE.toString());
Boolean autoCommit = Boolean.parseBoolean(autoCommitStr);
try {
if (driver != null) {
Class.forName(driver);
}
int shardCount = 0;
conns = new ArrayList<Connection>(3);
for (String url: urls.split(",")) {
System.out.println("Adding shard node URL: " + url);
Connection conn = DriverManager.getConnection(url, user, passwd);
// Since there is no explicit commit method in the DB interface, all
// operations should auto commit, except when explicitly told not to
// (this is necessary in cases such as for PostgreSQL when running a
// scan workload with fetchSize)
conn.setAutoCommit(autoCommit);
shardCount++;
conns.add(conn);
}
System.out.println("Using " + shardCount + " shards");
cachedStatements = new ConcurrentHashMap<StatementType, PreparedStatement>();
} catch (ClassNotFoundException e) {
System.err.println("Error in initializing the JDBS driver: " + e);
throw new DBException(e);
} catch (SQLException e) {
System.err.println("Error in database operation: " + e);
throw new DBException(e);
} catch (NumberFormatException e) {
System.err.println("Invalid value for fieldcount property. " + e);
throw new DBException(e);
}
initialized = true;
}
@Override
public void cleanup() throws DBException {
try {
cleanupAllConnections();
} catch (SQLException e) {
System.err.println("Error in closing the connection. " + e);
throw new DBException(e);
}
}
private PreparedStatement createAndCacheInsertStatement(StatementType insertType, String key)
throws SQLException {
StringBuilder insert = new StringBuilder("INSERT INTO ");
insert.append(insertType.tableName);
insert.append(" VALUES(?");
for (int i = 0; i < insertType.numFields; i++) {
insert.append(",?");
}
insert.append(")");
PreparedStatement insertStatement = getShardConnectionByKey(key).prepareStatement(insert.toString());
PreparedStatement stmt = cachedStatements.putIfAbsent(insertType, insertStatement);
if (stmt == null) return insertStatement;
else return stmt;
}
private PreparedStatement createAndCacheReadStatement(StatementType readType, String key)
throws SQLException {
StringBuilder read = new StringBuilder("SELECT * FROM ");
read.append(readType.tableName);
read.append(" WHERE ");
read.append(PRIMARY_KEY);
read.append(" = ");
read.append("?");
PreparedStatement readStatement = getShardConnectionByKey(key).prepareStatement(read.toString());
PreparedStatement stmt = cachedStatements.putIfAbsent(readType, readStatement);
if (stmt == null) return readStatement;
else return stmt;
}
private PreparedStatement createAndCacheDeleteStatement(StatementType deleteType, String key)
throws SQLException {
StringBuilder delete = new StringBuilder("DELETE FROM ");
delete.append(deleteType.tableName);
delete.append(" WHERE ");
delete.append(PRIMARY_KEY);
delete.append(" = ?");
PreparedStatement deleteStatement = getShardConnectionByKey(key).prepareStatement(delete.toString());
PreparedStatement stmt = cachedStatements.putIfAbsent(deleteType, deleteStatement);
if (stmt == null) return deleteStatement;
else return stmt;
}
private PreparedStatement createAndCacheUpdateStatement(StatementType updateType, String key)
throws SQLException {
StringBuilder update = new StringBuilder("UPDATE ");
update.append(updateType.tableName);
update.append(" SET ");
for (int i = 0; i < updateType.numFields; i++) {
update.append(COLUMN_PREFIX);
update.append(i);
update.append("=?");
if (i < updateType.numFields - 1) update.append(", ");
}
update.append(" WHERE ");
update.append(PRIMARY_KEY);
update.append(" = ?");
PreparedStatement insertStatement = getShardConnectionByKey(key).prepareStatement(update.toString());
PreparedStatement stmt = cachedStatements.putIfAbsent(updateType, insertStatement);
if (stmt == null) return insertStatement;
else return stmt;
}
private PreparedStatement createAndCacheScanStatement(StatementType scanType, String key)
throws SQLException {
StringBuilder select = new StringBuilder("SELECT * FROM ");
select.append(scanType.tableName);
select.append(" WHERE ");
select.append(PRIMARY_KEY);
select.append(" >= ?");
select.append(" ORDER BY ");
select.append(PRIMARY_KEY);
select.append(" LIMIT ?");
PreparedStatement scanStatement = getShardConnectionByKey(key).prepareStatement(select.toString());
if (this.jdbcFetchSize != null) scanStatement.setFetchSize(this.jdbcFetchSize);
PreparedStatement stmt = cachedStatements.putIfAbsent(scanType, scanStatement);
if (stmt == null) return scanStatement;
else return stmt;
}
@Override
public Status read(String tableName, String key, Set<String> fields,
HashMap<String, ByteIterator> result) {
try {
StatementType type = new StatementType(StatementType.Type.READ, tableName, 1, getShardIndexByKey(key));
PreparedStatement readStatement = cachedStatements.get(type);
if (readStatement == null) {
readStatement = createAndCacheReadStatement(type, key);
}
readStatement.setString(1, key);
ResultSet resultSet = readStatement.executeQuery();
if (!resultSet.next()) {
resultSet.close();
return Status.NOT_FOUND;
}
if (result != null && fields != null) {
for (String field : fields) {
String value = resultSet.getString(field);
result.put(field, new StringByteIterator(value));
}
}
resultSet.close();
return Status.OK;
} catch (SQLException e) {
System.err.println("Error in processing read of table " + tableName + ": "+e);
return Status.ERROR;
}
}
@Override
public Status scan(String tableName, String startKey, int recordcount,
Set<String> fields, Vector<HashMap<String, ByteIterator>> result) {
try {
StatementType type = new StatementType(StatementType.Type.SCAN, tableName, 1, getShardIndexByKey(startKey));
PreparedStatement scanStatement = cachedStatements.get(type);
if (scanStatement == null) {
scanStatement = createAndCacheScanStatement(type, startKey);
}
scanStatement.setString(1, startKey);
scanStatement.setInt(2, recordcount);
ResultSet resultSet = scanStatement.executeQuery();
for (int i = 0; i < recordcount && resultSet.next(); i++) {
if (result != null && fields != null) {
HashMap<String, ByteIterator> values = new HashMap<String, ByteIterator>();
for (String field : fields) {
String value = resultSet.getString(field);
values.put(field, new StringByteIterator(value));
}
result.add(values);
}
}
resultSet.close();
return Status.OK;
} catch (SQLException e) {
System.err.println("Error in processing scan of table: " + tableName + e);
return Status.ERROR;
}
}
@Override
public Status update(String tableName, String key, HashMap<String, ByteIterator> values) {
try {
int numFields = values.size();
StatementType type = new StatementType(StatementType.Type.UPDATE, tableName, numFields, getShardIndexByKey(key));
PreparedStatement updateStatement = cachedStatements.get(type);
if (updateStatement == null) {
updateStatement = createAndCacheUpdateStatement(type, key);
}
int index = 1;
for (Map.Entry<String, ByteIterator> entry : values.entrySet()) {
updateStatement.setString(index++, entry.getValue().toString());
}
updateStatement.setString(index, key);
int result = updateStatement.executeUpdate();
if (result == 1) return Status.OK;
else return Status.UNEXPECTED_STATE;
} catch (SQLException e) {
System.err.println("Error in processing update to table: " + tableName + e);
return Status.ERROR;
}
}
@Override
public Status insert(String tableName, String key, HashMap<String, ByteIterator> values) {
try {
int numFields = values.size();
StatementType type = new StatementType(StatementType.Type.INSERT, tableName, numFields, getShardIndexByKey(key));
PreparedStatement insertStatement = cachedStatements.get(type);
if (insertStatement == null) {
insertStatement = createAndCacheInsertStatement(type, key);
}
insertStatement.setString(1, key);
int index = 2;
for (Map.Entry<String, ByteIterator> entry : values.entrySet()) {
String field = entry.getValue().toString();
insertStatement.setString(index++, field);
}
int result = insertStatement.executeUpdate();
if (result == 1) return Status.OK;
else return Status.UNEXPECTED_STATE;
} catch (SQLException e) {
System.err.println("Error in processing insert to table: " + tableName + e);
return Status.ERROR;
}
}
@Override
public Status delete(String tableName, String key) {
try {
StatementType type = new StatementType(StatementType.Type.DELETE, tableName, 1, getShardIndexByKey(key));
PreparedStatement deleteStatement = cachedStatements.get(type);
if (deleteStatement == null) {
deleteStatement = createAndCacheDeleteStatement(type, key);
}
deleteStatement.setString(1, key);
int result = deleteStatement.executeUpdate();
if (result == 1) return Status.OK;
else return Status.UNEXPECTED_STATE;
} catch (SQLException e) {
System.err.println("Error in processing delete to table: " + tableName + e);
return Status.ERROR;
}
}
}
| jdbc/src/main/java/com/yahoo/ycsb/db/JdbcDBClient.java | /**
* Copyright (c) 2010 Yahoo! Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you
* may not use this file except in compliance with the License. You
* may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License. See accompanying
* LICENSE file.
*/
package com.yahoo.ycsb.db;
import com.yahoo.ycsb.DB;
import com.yahoo.ycsb.DBException;
import com.yahoo.ycsb.ByteIterator;
import com.yahoo.ycsb.Status;
import com.yahoo.ycsb.StringByteIterator;
import java.sql.*;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
/**
* A class that wraps a JDBC compliant database to allow it to be interfaced with YCSB.
* This class extends {@link DB} and implements the database interface used by YCSB client.
*
* <br> Each client will have its own instance of this class. This client is
* not thread safe.
*
* <br> This interface expects a schema <key> <field1> <field2> <field3> ...
* All attributes are of type VARCHAR. All accesses are through the primary key. Therefore,
* only one index on the primary key is needed.
*
* <p> The following options must be passed when using this database client.
*
* <ul>
* <li><b>db.driver</b> The JDBC driver class to use.</li>
* <li><b>db.url</b> The Database connection URL.</li>
* <li><b>db.user</b> User name for the connection.</li>
* <li><b>db.passwd</b> Password for the connection.</li>
* </ul>
*
* @author sudipto
*
*/
public class JdbcDBClient extends DB implements JdbcDBClientConstants {
private ArrayList<Connection> conns;
private boolean initialized = false;
private Properties props;
private Integer jdbcFetchSize;
private static final String DEFAULT_PROP = "";
private ConcurrentMap<StatementType, PreparedStatement> cachedStatements;
/**
* The statement type for the prepared statements.
*/
private static class StatementType {
enum Type {
INSERT(1),
DELETE(2),
READ(3),
UPDATE(4),
SCAN(5),
;
int internalType;
private Type(int type) {
internalType = type;
}
int getHashCode() {
final int prime = 31;
int result = 1;
result = prime * result + internalType;
return result;
}
}
Type type;
int shardIndex;
int numFields;
String tableName;
StatementType(Type type, String tableName, int numFields, int _shardIndex) {
this.type = type;
this.tableName = tableName;
this.numFields = numFields;
this.shardIndex = _shardIndex;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + numFields + 100 * shardIndex;
result = prime * result
+ ((tableName == null) ? 0 : tableName.hashCode());
result = prime * result + ((type == null) ? 0 : type.getHashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
StatementType other = (StatementType) obj;
if (numFields != other.numFields)
return false;
if (shardIndex != other.shardIndex)
return false;
if (tableName == null) {
if (other.tableName != null)
return false;
} else if (!tableName.equals(other.tableName))
return false;
if (type != other.type)
return false;
return true;
}
}
/**
* For the given key, returns what shard contains data for this key
*
* @param key Data key to do operation on
* @return Shard index
*/
private int getShardIndexByKey(String key) {
int ret = Math.abs(key.hashCode()) % conns.size();
//System.out.println(conns.size() + ": Shard instance for "+ key + " (hash " + key.hashCode()+ " ) " + " is " + ret);
return ret;
}
/**
* For the given key, returns Connection object that holds connection
* to the shard that contains this key
*
* @param key Data key to get information for
* @return Connection object
*/
private Connection getShardConnectionByKey(String key) {
return conns.get(getShardIndexByKey(key));
}
private void cleanupAllConnections() throws SQLException {
for(Connection conn: conns) {
conn.close();
}
}
/**
* Initialize the database connection and set it up for sending requests to the database.
* This must be called once per client.
* @throws
*/
@Override
public void init() throws DBException {
if (initialized) {
System.err.println("Client connection already initialized.");
return;
}
props = getProperties();
String urls = props.getProperty(CONNECTION_URL, DEFAULT_PROP);
String user = props.getProperty(CONNECTION_USER, DEFAULT_PROP);
String passwd = props.getProperty(CONNECTION_PASSWD, DEFAULT_PROP);
String driver = props.getProperty(DRIVER_CLASS);
String jdbcFetchSizeStr = props.getProperty(JDBC_FETCH_SIZE);
if (jdbcFetchSizeStr != null) {
try {
this.jdbcFetchSize = Integer.parseInt(jdbcFetchSizeStr);
} catch (NumberFormatException nfe) {
System.err.println("Invalid JDBC fetch size specified: " + jdbcFetchSizeStr);
throw new DBException(nfe);
}
}
String autoCommitStr = props.getProperty(JDBC_AUTO_COMMIT, Boolean.TRUE.toString());
Boolean autoCommit = Boolean.parseBoolean(autoCommitStr);
try {
if (driver != null) {
Class.forName(driver);
}
int shardCount = 0;
conns = new ArrayList<Connection>(3);
for (String url: urls.split(",")) {
System.out.println("Adding shard node URL: " + url);
Connection conn = DriverManager.getConnection(url, user, passwd);
// Since there is no explicit commit method in the DB interface, all
// operations should auto commit, except when explicitly told not to
// (this is necessary in cases such as for PostgreSQL when running a
// scan workload with fetchSize)
conn.setAutoCommit(autoCommit);
shardCount++;
conns.add(conn);
}
System.out.println("Using " + shardCount + " shards");
cachedStatements = new ConcurrentHashMap<StatementType, PreparedStatement>();
} catch (ClassNotFoundException e) {
System.err.println("Error in initializing the JDBS driver: " + e);
throw new DBException(e);
} catch (SQLException e) {
System.err.println("Error in database operation: " + e);
throw new DBException(e);
} catch (NumberFormatException e) {
System.err.println("Invalid value for fieldcount property. " + e);
throw new DBException(e);
}
initialized = true;
}
@Override
public void cleanup() throws DBException {
try {
cleanupAllConnections();
} catch (SQLException e) {
System.err.println("Error in closing the connection. " + e);
throw new DBException(e);
}
}
private PreparedStatement createAndCacheInsertStatement(StatementType insertType, String key)
throws SQLException {
StringBuilder insert = new StringBuilder("INSERT INTO ");
insert.append(insertType.tableName);
insert.append(" VALUES(?");
for (int i = 0; i < insertType.numFields; i++) {
insert.append(",?");
}
insert.append(");");
PreparedStatement insertStatement = getShardConnectionByKey(key).prepareStatement(insert.toString());
PreparedStatement stmt = cachedStatements.putIfAbsent(insertType, insertStatement);
if (stmt == null) return insertStatement;
else return stmt;
}
private PreparedStatement createAndCacheReadStatement(StatementType readType, String key)
throws SQLException {
StringBuilder read = new StringBuilder("SELECT * FROM ");
read.append(readType.tableName);
read.append(" WHERE ");
read.append(PRIMARY_KEY);
read.append(" = ");
read.append("?;");
PreparedStatement readStatement = getShardConnectionByKey(key).prepareStatement(read.toString());
PreparedStatement stmt = cachedStatements.putIfAbsent(readType, readStatement);
if (stmt == null) return readStatement;
else return stmt;
}
private PreparedStatement createAndCacheDeleteStatement(StatementType deleteType, String key)
throws SQLException {
StringBuilder delete = new StringBuilder("DELETE FROM ");
delete.append(deleteType.tableName);
delete.append(" WHERE ");
delete.append(PRIMARY_KEY);
delete.append(" = ?;");
PreparedStatement deleteStatement = getShardConnectionByKey(key).prepareStatement(delete.toString());
PreparedStatement stmt = cachedStatements.putIfAbsent(deleteType, deleteStatement);
if (stmt == null) return deleteStatement;
else return stmt;
}
private PreparedStatement createAndCacheUpdateStatement(StatementType updateType, String key)
throws SQLException {
StringBuilder update = new StringBuilder("UPDATE ");
update.append(updateType.tableName);
update.append(" SET ");
for (int i = 0; i < updateType.numFields; i++) {
update.append(COLUMN_PREFIX);
update.append(i);
update.append("=?");
if (i < updateType.numFields - 1) update.append(", ");
}
update.append(" WHERE ");
update.append(PRIMARY_KEY);
update.append(" = ?;");
PreparedStatement insertStatement = getShardConnectionByKey(key).prepareStatement(update.toString());
PreparedStatement stmt = cachedStatements.putIfAbsent(updateType, insertStatement);
if (stmt == null) return insertStatement;
else return stmt;
}
private PreparedStatement createAndCacheScanStatement(StatementType scanType, String key)
throws SQLException {
StringBuilder select = new StringBuilder("SELECT * FROM ");
select.append(scanType.tableName);
select.append(" WHERE ");
select.append(PRIMARY_KEY);
select.append(" >= ?");
select.append(" ORDER BY ");
select.append(PRIMARY_KEY);
select.append(" LIMIT ?;");
PreparedStatement scanStatement = getShardConnectionByKey(key).prepareStatement(select.toString());
if (this.jdbcFetchSize != null) scanStatement.setFetchSize(this.jdbcFetchSize);
PreparedStatement stmt = cachedStatements.putIfAbsent(scanType, scanStatement);
if (stmt == null) return scanStatement;
else return stmt;
}
@Override
public Status read(String tableName, String key, Set<String> fields,
HashMap<String, ByteIterator> result) {
try {
StatementType type = new StatementType(StatementType.Type.READ, tableName, 1, getShardIndexByKey(key));
PreparedStatement readStatement = cachedStatements.get(type);
if (readStatement == null) {
readStatement = createAndCacheReadStatement(type, key);
}
readStatement.setString(1, key);
ResultSet resultSet = readStatement.executeQuery();
if (!resultSet.next()) {
resultSet.close();
return Status.NOT_FOUND;
}
if (result != null && fields != null) {
for (String field : fields) {
String value = resultSet.getString(field);
result.put(field, new StringByteIterator(value));
}
}
resultSet.close();
return Status.OK;
} catch (SQLException e) {
System.err.println("Error in processing read of table " + tableName + ": "+e);
return Status.ERROR;
}
}
@Override
public Status scan(String tableName, String startKey, int recordcount,
Set<String> fields, Vector<HashMap<String, ByteIterator>> result) {
try {
StatementType type = new StatementType(StatementType.Type.SCAN, tableName, 1, getShardIndexByKey(startKey));
PreparedStatement scanStatement = cachedStatements.get(type);
if (scanStatement == null) {
scanStatement = createAndCacheScanStatement(type, startKey);
}
scanStatement.setString(1, startKey);
scanStatement.setInt(2, recordcount);
ResultSet resultSet = scanStatement.executeQuery();
for (int i = 0; i < recordcount && resultSet.next(); i++) {
if (result != null && fields != null) {
HashMap<String, ByteIterator> values = new HashMap<String, ByteIterator>();
for (String field : fields) {
String value = resultSet.getString(field);
values.put(field, new StringByteIterator(value));
}
result.add(values);
}
}
resultSet.close();
return Status.OK;
} catch (SQLException e) {
System.err.println("Error in processing scan of table: " + tableName + e);
return Status.ERROR;
}
}
@Override
public Status update(String tableName, String key, HashMap<String, ByteIterator> values) {
try {
int numFields = values.size();
StatementType type = new StatementType(StatementType.Type.UPDATE, tableName, numFields, getShardIndexByKey(key));
PreparedStatement updateStatement = cachedStatements.get(type);
if (updateStatement == null) {
updateStatement = createAndCacheUpdateStatement(type, key);
}
int index = 1;
for (Map.Entry<String, ByteIterator> entry : values.entrySet()) {
updateStatement.setString(index++, entry.getValue().toString());
}
updateStatement.setString(index, key);
int result = updateStatement.executeUpdate();
if (result == 1) return Status.OK;
else return Status.UNEXPECTED_STATE;
} catch (SQLException e) {
System.err.println("Error in processing update to table: " + tableName + e);
return Status.ERROR;
}
}
@Override
public Status insert(String tableName, String key, HashMap<String, ByteIterator> values) {
try {
int numFields = values.size();
StatementType type = new StatementType(StatementType.Type.INSERT, tableName, numFields, getShardIndexByKey(key));
PreparedStatement insertStatement = cachedStatements.get(type);
if (insertStatement == null) {
insertStatement = createAndCacheInsertStatement(type, key);
}
insertStatement.setString(1, key);
int index = 2;
for (Map.Entry<String, ByteIterator> entry : values.entrySet()) {
String field = entry.getValue().toString();
insertStatement.setString(index++, field);
}
int result = insertStatement.executeUpdate();
if (result == 1) return Status.OK;
else return Status.UNEXPECTED_STATE;
} catch (SQLException e) {
System.err.println("Error in processing insert to table: " + tableName + e);
return Status.ERROR;
}
}
@Override
public Status delete(String tableName, String key) {
try {
StatementType type = new StatementType(StatementType.Type.DELETE, tableName, 1, getShardIndexByKey(key));
PreparedStatement deleteStatement = cachedStatements.get(type);
if (deleteStatement == null) {
deleteStatement = createAndCacheDeleteStatement(type, key);
}
deleteStatement.setString(1, key);
int result = deleteStatement.executeUpdate();
if (result == 1) return Status.OK;
else return Status.UNEXPECTED_STATE;
} catch (SQLException e) {
System.err.println("Error in processing delete to table: " + tableName + e);
return Status.ERROR;
}
}
}
| [jdbc] removed semicolons from cached statements
| jdbc/src/main/java/com/yahoo/ycsb/db/JdbcDBClient.java | [jdbc] removed semicolons from cached statements | <ide><path>dbc/src/main/java/com/yahoo/ycsb/db/JdbcDBClient.java
<ide> for (int i = 0; i < insertType.numFields; i++) {
<ide> insert.append(",?");
<ide> }
<del> insert.append(");");
<add> insert.append(")");
<ide> PreparedStatement insertStatement = getShardConnectionByKey(key).prepareStatement(insert.toString());
<ide> PreparedStatement stmt = cachedStatements.putIfAbsent(insertType, insertStatement);
<ide> if (stmt == null) return insertStatement;
<ide> read.append(" WHERE ");
<ide> read.append(PRIMARY_KEY);
<ide> read.append(" = ");
<del> read.append("?;");
<add> read.append("?");
<ide> PreparedStatement readStatement = getShardConnectionByKey(key).prepareStatement(read.toString());
<ide> PreparedStatement stmt = cachedStatements.putIfAbsent(readType, readStatement);
<ide> if (stmt == null) return readStatement;
<ide> delete.append(deleteType.tableName);
<ide> delete.append(" WHERE ");
<ide> delete.append(PRIMARY_KEY);
<del> delete.append(" = ?;");
<add> delete.append(" = ?");
<ide> PreparedStatement deleteStatement = getShardConnectionByKey(key).prepareStatement(delete.toString());
<ide> PreparedStatement stmt = cachedStatements.putIfAbsent(deleteType, deleteStatement);
<ide> if (stmt == null) return deleteStatement;
<ide> }
<ide> update.append(" WHERE ");
<ide> update.append(PRIMARY_KEY);
<del> update.append(" = ?;");
<add> update.append(" = ?");
<ide> PreparedStatement insertStatement = getShardConnectionByKey(key).prepareStatement(update.toString());
<ide> PreparedStatement stmt = cachedStatements.putIfAbsent(updateType, insertStatement);
<ide> if (stmt == null) return insertStatement;
<ide> select.append(" >= ?");
<ide> select.append(" ORDER BY ");
<ide> select.append(PRIMARY_KEY);
<del> select.append(" LIMIT ?;");
<add> select.append(" LIMIT ?");
<ide> PreparedStatement scanStatement = getShardConnectionByKey(key).prepareStatement(select.toString());
<ide> if (this.jdbcFetchSize != null) scanStatement.setFetchSize(this.jdbcFetchSize);
<ide> PreparedStatement stmt = cachedStatements.putIfAbsent(scanType, scanStatement); |
|
Java | apache-2.0 | 94dff97c3927d40e862d1f61e0da51ce7de65f33 | 0 | JavaSaBr/jME3-SpaceShift-Editor | package com.ss.editor.file.converter.impl;
import com.ss.editor.FileExtensions;
import com.ss.editor.Messages;
import com.ss.editor.file.converter.FileConverterDescription;
import org.jetbrains.annotations.NotNull;
import rlib.util.array.Array;
import rlib.util.array.ArrayFactory;
/**
* The implementation of {@link AbstractFileConverter} for converting .xbuf file to .j3o.
*
* @author JavaSaBr
*/
public class XBufToJ3oFileConverter extends AbstractFileConverter {
private static final Array<String> EXTENSIONS = ArrayFactory.newArray(String.class);
static {
EXTENSIONS.add(FileExtensions.MODEL_XBUF);
EXTENSIONS.asUnsafe().trimToSize();
}
public static final FileConverterDescription DESCRIPTION = new FileConverterDescription();
static {
DESCRIPTION.setDescription(Messages.XBUF_TO_J3O_FILE_CONVERTER_DESCRIPTION);
DESCRIPTION.setConstructor(XBufToJ3oFileConverter::new);
DESCRIPTION.setExtensions(EXTENSIONS);
}
private XBufToJ3oFileConverter() {
}
@NotNull
@Override
protected Array<String> getAvailableExtensions() {
return EXTENSIONS;
}
@NotNull
@Override
public String getTargetExtension() {
return FileExtensions.JME_OBJECT;
}
}
| src/com/ss/editor/file/converter/impl/XBufToJ3oFileConverter.java | package com.ss.editor.file.converter.impl;
import com.ss.editor.FileExtensions;
import com.ss.editor.Messages;
import com.ss.editor.file.converter.FileConverterDescription;
import org.jetbrains.annotations.NotNull;
import rlib.util.array.Array;
import rlib.util.array.ArrayFactory;
/**
* The implementation of {@link AbstractFileConverter} for converting .xbuf file to .j3o.
*
* @author JavaSaBr
*/
public class XBufToJ3oFileConverter extends AbstractFileConverter {
private static final Array<String> EXTENSIONS = ArrayFactory.newArray(String.class);
static {
EXTENSIONS.add(FileExtensions.MODEL_XBUF);
EXTENSIONS.asUnsafe().trimToSize();
}
public static final FileConverterDescription DESCRIPTION = new FileConverterDescription();
static {
DESCRIPTION.setDescription(Messages.OBJ_TO_J3O_FILE_CONVERTER_DESCRIPTION);
DESCRIPTION.setConstructor(XBufToJ3oFileConverter::new);
DESCRIPTION.setExtensions(EXTENSIONS);
}
private XBufToJ3oFileConverter() {
}
@NotNull
@Override
protected Array<String> getAvailableExtensions() {
return EXTENSIONS;
}
@NotNull
@Override
public String getTargetExtension() {
return FileExtensions.JME_OBJECT;
}
}
| added supporting xbuf models.
| src/com/ss/editor/file/converter/impl/XBufToJ3oFileConverter.java | added supporting xbuf models. | <ide><path>rc/com/ss/editor/file/converter/impl/XBufToJ3oFileConverter.java
<ide> public static final FileConverterDescription DESCRIPTION = new FileConverterDescription();
<ide>
<ide> static {
<del> DESCRIPTION.setDescription(Messages.OBJ_TO_J3O_FILE_CONVERTER_DESCRIPTION);
<add> DESCRIPTION.setDescription(Messages.XBUF_TO_J3O_FILE_CONVERTER_DESCRIPTION);
<ide> DESCRIPTION.setConstructor(XBufToJ3oFileConverter::new);
<ide> DESCRIPTION.setExtensions(EXTENSIONS);
<ide> } |
|
Java | apache-2.0 | aa5a38b7d601c2b8d5b8ee091ca8840f31d083fd | 0 | PATRIC3/p3_solr,PATRIC3/p3_solr,PATRIC3/p3_solr,PATRIC3/p3_solr,PATRIC3/p3_solr,PATRIC3/p3_solr,PATRIC3/p3_solr | package org.apache.solr.util;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.Closeable;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.PrintStream;
import java.net.ServerSocket;
import java.nio.charset.StandardCharsets;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import org.apache.commons.exec.DefaultExecutor;
import org.apache.commons.exec.ExecuteResultHandler;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.solr.SolrTestCaseJ4;
import org.apache.solr.client.solrj.SolrQuery;
import org.apache.solr.client.solrj.embedded.JettyConfig;
import org.apache.solr.client.solrj.embedded.JettySolrRunner;
import org.apache.solr.client.solrj.impl.CloudSolrClient;
import org.apache.solr.client.solrj.impl.HttpSolrClient;
import org.apache.solr.client.solrj.response.QueryResponse;
import org.apache.solr.cloud.MiniSolrCloudCluster;
import org.apache.solr.common.SolrInputDocument;
import org.junit.After;
import org.junit.Ignore;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Tests the SolrCLI.RunExampleTool implementation that supports bin/solr -e [example]
*/
@LuceneTestCase.Slow
@SolrTestCaseJ4.SuppressSSL(bugUrl = "https://issues.apache.org/jira/browse/SOLR-5776")
public class TestSolrCLIRunExample extends SolrTestCaseJ4 {
protected static final transient Logger log = LoggerFactory.getLogger(TestSolrCLIRunExample.class);
/**
* Overrides the call to exec bin/solr to start Solr nodes to start them using the Solr test-framework
* instead of the script, since the script depends on a full build.
*/
private class RunExampleExecutor extends DefaultExecutor implements Closeable {
private PrintStream stdout;
private List<org.apache.commons.exec.CommandLine> commandsExecuted = new ArrayList<>();
private MiniSolrCloudCluster solrCloudCluster;
private JettySolrRunner standaloneSolr;
RunExampleExecutor(PrintStream stdout) {
super();
this.stdout = stdout;
}
/**
* Override the call to execute a command asynchronously to occur synchronously during a unit test.
*/
@Override
public void execute(org.apache.commons.exec.CommandLine cmd, Map<String,String> env, ExecuteResultHandler erh) throws IOException {
int code = execute(cmd);
if (code != 0) throw new RuntimeException("Failed to execute cmd: "+joinArgs(cmd.getArguments()));
}
@Override
public int execute(org.apache.commons.exec.CommandLine cmd) throws IOException {
// collect the commands as they are executed for analysis by the test
commandsExecuted.add(cmd);
String exe = cmd.getExecutable();
if (exe.endsWith("solr")) {
String[] args = cmd.getArguments();
if ("start".equals(args[0])) {
if (!hasFlag("-cloud", args) && !hasFlag("-c", args))
return startStandaloneSolr(args);
File baseDir = createTempDir().toFile();
File solrHomeDir = new File(getArg("-s", args));
int port = Integer.parseInt(getArg("-p", args));
JettyConfig jettyConfig =
JettyConfig.builder().setContext("/solr").setPort(port).build();
try {
if (solrCloudCluster == null) {
System.setProperty("host", "localhost");
System.setProperty("jetty.port", String.valueOf(port));
solrCloudCluster =
new MiniSolrCloudCluster(1, baseDir, new File(solrHomeDir, "solr.xml"), jettyConfig);
} else {
// another member of this cluster -- not supported yet, due to how MiniSolrCloudCluster works
throw new IllegalArgumentException("Only launching one SolrCloud node is supported by this test!");
}
} catch (Exception e) {
if (e instanceof RuntimeException) {
throw (RuntimeException)e;
} else {
throw new RuntimeException(e);
}
}
} else if ("stop".equals(args[0])) {
int port = Integer.parseInt(getArg("-p", args));
// stop the requested node
if (standaloneSolr != null) {
int localPort = standaloneSolr.getLocalPort();
if (port == localPort) {
try {
standaloneSolr.stop();
log.info("Stopped standalone Solr instance running on port "+port);
} catch (Exception e) {
if (e instanceof RuntimeException) {
throw (RuntimeException)e;
} else {
throw new RuntimeException(e);
}
}
} else {
throw new IllegalArgumentException("No Solr is running on port "+port);
}
} else {
if (solrCloudCluster != null) {
try {
solrCloudCluster.shutdown();
log.info("Stopped SolrCloud test cluster");
} catch (Exception e) {
if (e instanceof RuntimeException) {
throw (RuntimeException)e;
} else {
throw new RuntimeException(e);
}
}
} else {
throw new IllegalArgumentException("No Solr nodes found to stop!");
}
}
}
} else {
String cmdLine = joinArgs(cmd.getArguments());
if (cmdLine.indexOf("post.jar") != -1) {
// invocation of the post.jar file ... we'll just hit the SimplePostTool directly vs. trying to invoke another JVM
List<String> argsToSimplePostTool = new ArrayList<String>();
boolean afterPostJarArg = false;
for (String arg : cmd.getArguments()) {
if (arg.startsWith("-D")) {
arg = arg.substring(2);
int eqPos = arg.indexOf("=");
System.setProperty(arg.substring(0,eqPos), arg.substring(eqPos+1));
} else {
if (arg.endsWith("post.jar")) {
afterPostJarArg = true;
} else {
if (afterPostJarArg) {
argsToSimplePostTool.add(arg);
}
}
}
}
SimplePostTool.main(argsToSimplePostTool.toArray(new String[0]));
} else {
log.info("Executing command: "+cmdLine);
try {
return super.execute(cmd);
} catch (Exception exc) {
log.error("Execute command ["+cmdLine+"] failed due to: "+exc, exc);
throw exc;
}
}
}
return 0;
}
protected String joinArgs(String[] args) {
if (args == null || args.length == 0)
return "";
StringBuilder sb = new StringBuilder();
for (int a=0; a < args.length; a++) {
if (a > 0) sb.append(' ');
sb.append(args[a]);
}
return sb.toString();
}
protected int startStandaloneSolr(String[] args) {
if (standaloneSolr != null) {
throw new IllegalStateException("Test is already running a standalone Solr instance "+
standaloneSolr.getBaseUrl()+"! This indicates a bug in the unit test logic.");
}
if (solrCloudCluster != null) {
throw new IllegalStateException("Test is already running a mini SolrCloud cluster! "+
"This indicates a bug in the unit test logic.");
}
int port = Integer.parseInt(getArg("-p", args));
File solrHomeDir = new File(getArg("-s", args));
System.setProperty("host", "localhost");
System.setProperty("jetty.port", String.valueOf(port));
standaloneSolr = new JettySolrRunner(solrHomeDir.getAbsolutePath(), "/solr", port);
Thread bg = new Thread() {
public void run() {
try {
standaloneSolr.start();
} catch (Exception e) {
if (e instanceof RuntimeException) {
throw (RuntimeException)e;
} else {
throw new RuntimeException(e);
}
}
}
};
bg.start();
return 0;
}
protected String getArg(String arg, String[] args) {
for (int a=0; a < args.length; a++) {
if (arg.equals(args[a])) {
if (a+1 >= args.length)
throw new IllegalArgumentException("Missing required value for the "+arg+" option!");
return args[a + 1];
}
}
throw new IllegalArgumentException("Missing required arg "+arg+
" needed to execute command: "+commandsExecuted.get(commandsExecuted.size()-1));
}
protected boolean hasFlag(String flag, String[] args) {
for (String arg : args) {
if (flag.equals(arg))
return true;
}
return false;
}
@Override
public void close() throws IOException {
if (solrCloudCluster != null) {
try {
solrCloudCluster.shutdown();
} catch (Exception e) {
log.warn("Failed to shutdown MiniSolrCloudCluster due to: " + e);
}
}
if (standaloneSolr != null) {
try {
standaloneSolr.stop();
} catch (Exception exc) {
log.warn("Failed to shutdown standalone Solr due to: " + exc);
}
standaloneSolr = null;
}
}
}
protected List<Closeable> closeables = new ArrayList<>();
@After
public void tearDown() throws Exception {
super.tearDown();
if (closeables != null) {
for (Closeable toClose : closeables) {
try {
toClose.close();
} catch (Exception ignore) {}
}
closeables.clear();
closeables = null;
}
}
@Ignore
@Test
public void testTechproductsExample() throws Exception {
testExample("techproducts");
}
@Test
public void testSchemalessExample() throws Exception {
testExample("schemaless");
}
protected void testExample(String exampleName) throws Exception {
File solrHomeDir = new File(ExternalPaths.SERVER_HOME);
if (!solrHomeDir.isDirectory())
fail(solrHomeDir.getAbsolutePath()+" not found and is required to run this test!");
Path tmpDir = createTempDir();
File solrExampleDir = tmpDir.toFile();
File solrServerDir = solrHomeDir.getParentFile();
// need a port to start the example server on
int bindPort = -1;
try (ServerSocket socket = new ServerSocket(0)) {
bindPort = socket.getLocalPort();
}
log.info("Selected port "+bindPort+" to start "+exampleName+" example Solr instance on ...");
String[] toolArgs = new String[] {
"-e", exampleName,
"-serverDir", solrServerDir.getAbsolutePath(),
"-exampleDir", solrExampleDir.getAbsolutePath(),
"-p", String.valueOf(bindPort)
};
// capture tool output to stdout
ByteArrayOutputStream baos = new ByteArrayOutputStream();
PrintStream stdoutSim = new PrintStream(baos, true, StandardCharsets.UTF_8.name());
RunExampleExecutor executor = new RunExampleExecutor(stdoutSim);
closeables.add(executor);
SolrCLI.RunExampleTool tool = new SolrCLI.RunExampleTool(executor, System.in, stdoutSim);
try {
tool.runTool(SolrCLI.processCommandLineArgs(SolrCLI.joinCommonAndToolOptions(tool.getOptions()), toolArgs));
} catch (Exception e) {
log.error("RunExampleTool failed due to: " + e +
"; stdout from tool prior to failure: " + baos.toString(StandardCharsets.UTF_8.name()));
throw e;
}
String toolOutput = baos.toString(StandardCharsets.UTF_8.name());
// dump all the output written by the SolrCLI commands to stdout
//System.out.println("\n\n"+toolOutput+"\n\n");
File exampleSolrHomeDir = new File(solrExampleDir, exampleName+"/solr");
assertTrue(exampleSolrHomeDir.getAbsolutePath() + " not found! run " +
exampleName + " example failed; output: " + toolOutput,
exampleSolrHomeDir.isDirectory());
if ("techproducts".equals(exampleName)) {
HttpSolrClient solrClient = new HttpSolrClient("http://localhost:" + bindPort + "/solr/" + exampleName);
SolrQuery query = new SolrQuery("*:*");
QueryResponse qr = solrClient.query(query);
long numFound = qr.getResults().getNumFound();
assertTrue("expected 32 docs in the "+exampleName+" example but found " + numFound + ", output: " + toolOutput,
numFound == 32);
}
// stop the test instance
executor.execute(org.apache.commons.exec.CommandLine.parse("bin/solr stop -p " + bindPort));
}
/**
* Tests the interactive SolrCloud example; we cannot test the non-interactive because we need control over
* the port and can only test with one node since the test relies on setting the host and jetty.port system
* properties, i.e. there is no test coverage for the -noprompt option.
*/
@Test
public void testInteractiveSolrCloudExample() throws Exception {
File solrHomeDir = new File(ExternalPaths.SERVER_HOME);
if (!solrHomeDir.isDirectory())
fail(solrHomeDir.getAbsolutePath()+" not found and is required to run this test!");
Path tmpDir = createTempDir();
File solrExampleDir = tmpDir.toFile();
File solrServerDir = solrHomeDir.getParentFile();
String[] toolArgs = new String[] {
"-example", "cloud",
"-serverDir", solrServerDir.getAbsolutePath(),
"-exampleDir", solrExampleDir.getAbsolutePath()
};
int bindPort = -1;
try (ServerSocket socket = new ServerSocket(0)) {
bindPort = socket.getLocalPort();
}
String collectionName = "testCloudExamplePrompt";
// sthis test only support launching one SolrCloud node due to how MiniSolrCloudCluster works
// and the need for setting the host and port system properties ...
String userInput = "1\n"+bindPort+"\n"+collectionName+"\n2\n2\ndata_driven_schema_configs\n";
// simulate user input from stdin
InputStream userInputSim = new ByteArrayInputStream(userInput.getBytes(StandardCharsets.UTF_8));
// capture tool output to stdout
ByteArrayOutputStream baos = new ByteArrayOutputStream();
PrintStream stdoutSim = new PrintStream(baos, true, StandardCharsets.UTF_8.name());
RunExampleExecutor executor = new RunExampleExecutor(stdoutSim);
closeables.add(executor);
SolrCLI.RunExampleTool tool = new SolrCLI.RunExampleTool(executor, userInputSim, stdoutSim);
try {
tool.runTool(SolrCLI.processCommandLineArgs(SolrCLI.joinCommonAndToolOptions(tool.getOptions()), toolArgs));
} catch (Exception e) {
System.err.println("RunExampleTool failed due to: " + e +
"; stdout from tool prior to failure: " + baos.toString(StandardCharsets.UTF_8.name()));
throw e;
}
String toolOutput = baos.toString(StandardCharsets.UTF_8.name());
// verify Solr is running on the expected port and verify the collection exists
String solrUrl = "http://localhost:"+bindPort+"/solr";
String collectionListUrl = solrUrl+"/admin/collections?action=list";
if (!SolrCLI.safeCheckCollectionExists(collectionListUrl, collectionName)) {
fail("After running Solr cloud example, test collection '"+collectionName+
"' not found in Solr at: "+solrUrl+"; tool output: "+toolOutput);
}
// index some docs - to verify all is good for both shards
CloudSolrClient cloudClient = null;
try {
cloudClient = new CloudSolrClient(executor.solrCloudCluster.getZkServer().getZkAddress());
cloudClient.connect();
cloudClient.setDefaultCollection(collectionName);
int numDocs = 10;
for (int d=0; d < numDocs; d++) {
SolrInputDocument doc = new SolrInputDocument();
doc.setField("id", "doc"+d);
doc.setField("str_s", "a");
cloudClient.add(doc);
}
cloudClient.commit();
QueryResponse qr = cloudClient.query(new SolrQuery("str_s:a"));
if (qr.getResults().getNumFound() != numDocs) {
fail("Expected "+numDocs+" to be found in the "+collectionName+
" collection but only found "+qr.getResults().getNumFound());
}
} finally {
if (cloudClient != null) {
try {
cloudClient.close();
} catch (Exception ignore){}
}
}
File node1SolrHome = new File(solrExampleDir, "cloud/node1/solr");
if (!node1SolrHome.isDirectory()) {
fail(node1SolrHome.getAbsolutePath()+" not found! run cloud example failed; tool output: "+toolOutput);
}
// delete the collection
SolrCLI.DeleteTool deleteTool = new SolrCLI.DeleteTool(stdoutSim);
String[] deleteArgs = new String[] { "-name", collectionName, "-solrUrl", solrUrl };
deleteTool.runTool(
SolrCLI.processCommandLineArgs(SolrCLI.joinCommonAndToolOptions(deleteTool.getOptions()), deleteArgs));
// dump all the output written by the SolrCLI commands to stdout
//System.out.println(toolOutput);
// stop the test instance
executor.execute(org.apache.commons.exec.CommandLine.parse("bin/solr stop -p "+bindPort));
}
}
| solr/core/src/test/org/apache/solr/util/TestSolrCLIRunExample.java | package org.apache.solr.util;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.Closeable;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.PrintStream;
import java.net.ServerSocket;
import java.nio.charset.StandardCharsets;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import org.apache.commons.exec.DefaultExecutor;
import org.apache.commons.exec.ExecuteResultHandler;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.solr.SolrTestCaseJ4;
import org.apache.solr.client.solrj.SolrQuery;
import org.apache.solr.client.solrj.embedded.JettyConfig;
import org.apache.solr.client.solrj.embedded.JettySolrRunner;
import org.apache.solr.client.solrj.impl.CloudSolrClient;
import org.apache.solr.client.solrj.impl.HttpSolrClient;
import org.apache.solr.client.solrj.response.QueryResponse;
import org.apache.solr.cloud.MiniSolrCloudCluster;
import org.apache.solr.common.SolrInputDocument;
import org.junit.After;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Tests the SolrCLI.RunExampleTool implementation that supports bin/solr -e [example]
*/
@LuceneTestCase.Slow
@SolrTestCaseJ4.SuppressSSL(bugUrl = "https://issues.apache.org/jira/browse/SOLR-5776")
public class TestSolrCLIRunExample extends SolrTestCaseJ4 {
protected static final transient Logger log = LoggerFactory.getLogger(TestSolrCLIRunExample.class);
/**
* Overrides the call to exec bin/solr to start Solr nodes to start them using the Solr test-framework
* instead of the script, since the script depends on a full build.
*/
private class RunExampleExecutor extends DefaultExecutor implements Closeable {
private PrintStream stdout;
private List<org.apache.commons.exec.CommandLine> commandsExecuted = new ArrayList<>();
private MiniSolrCloudCluster solrCloudCluster;
private JettySolrRunner standaloneSolr;
RunExampleExecutor(PrintStream stdout) {
super();
this.stdout = stdout;
}
/**
* Override the call to execute a command asynchronously to occur synchronously during a unit test.
*/
@Override
public void execute(org.apache.commons.exec.CommandLine cmd, Map<String,String> env, ExecuteResultHandler erh) throws IOException {
int code = execute(cmd);
if (code != 0) throw new RuntimeException("Failed to execute cmd: "+joinArgs(cmd.getArguments()));
}
@Override
public int execute(org.apache.commons.exec.CommandLine cmd) throws IOException {
// collect the commands as they are executed for analysis by the test
commandsExecuted.add(cmd);
String exe = cmd.getExecutable();
if (exe.endsWith("solr")) {
String[] args = cmd.getArguments();
if ("start".equals(args[0])) {
if (!hasFlag("-cloud", args) && !hasFlag("-c", args))
return startStandaloneSolr(args);
File baseDir = createTempDir().toFile();
File solrHomeDir = new File(getArg("-s", args));
int port = Integer.parseInt(getArg("-p", args));
JettyConfig jettyConfig =
JettyConfig.builder().setContext("/solr").setPort(port).build();
try {
if (solrCloudCluster == null) {
System.setProperty("host", "localhost");
System.setProperty("jetty.port", String.valueOf(port));
solrCloudCluster =
new MiniSolrCloudCluster(1, baseDir, new File(solrHomeDir, "solr.xml"), jettyConfig);
} else {
// another member of this cluster -- not supported yet, due to how MiniSolrCloudCluster works
throw new IllegalArgumentException("Only launching one SolrCloud node is supported by this test!");
}
} catch (Exception e) {
if (e instanceof RuntimeException) {
throw (RuntimeException)e;
} else {
throw new RuntimeException(e);
}
}
} else if ("stop".equals(args[0])) {
int port = Integer.parseInt(getArg("-p", args));
// stop the requested node
if (standaloneSolr != null) {
int localPort = standaloneSolr.getLocalPort();
if (port == localPort) {
try {
standaloneSolr.stop();
log.info("Stopped standalone Solr instance running on port "+port);
} catch (Exception e) {
if (e instanceof RuntimeException) {
throw (RuntimeException)e;
} else {
throw new RuntimeException(e);
}
}
} else {
throw new IllegalArgumentException("No Solr is running on port "+port);
}
} else {
if (solrCloudCluster != null) {
try {
solrCloudCluster.shutdown();
log.info("Stopped SolrCloud test cluster");
} catch (Exception e) {
if (e instanceof RuntimeException) {
throw (RuntimeException)e;
} else {
throw new RuntimeException(e);
}
}
} else {
throw new IllegalArgumentException("No Solr nodes found to stop!");
}
}
}
} else {
String cmdLine = joinArgs(cmd.getArguments());
if (cmdLine.indexOf("post.jar") != -1) {
// invocation of the post.jar file ... we'll just hit the SimplePostTool directly vs. trying to invoke another JVM
List<String> argsToSimplePostTool = new ArrayList<String>();
boolean afterPostJarArg = false;
for (String arg : cmd.getArguments()) {
if (arg.startsWith("-D")) {
arg = arg.substring(2);
int eqPos = arg.indexOf("=");
System.setProperty(arg.substring(0,eqPos), arg.substring(eqPos+1));
} else {
if (arg.endsWith("post.jar")) {
afterPostJarArg = true;
} else {
if (afterPostJarArg) {
argsToSimplePostTool.add(arg);
}
}
}
}
SimplePostTool.main(argsToSimplePostTool.toArray(new String[0]));
} else {
log.info("Executing command: "+cmdLine);
try {
return super.execute(cmd);
} catch (Exception exc) {
log.error("Execute command ["+cmdLine+"] failed due to: "+exc, exc);
throw exc;
}
}
}
return 0;
}
protected String joinArgs(String[] args) {
if (args == null || args.length == 0)
return "";
StringBuilder sb = new StringBuilder();
for (int a=0; a < args.length; a++) {
if (a > 0) sb.append(' ');
sb.append(args[a]);
}
return sb.toString();
}
protected int startStandaloneSolr(String[] args) {
if (standaloneSolr != null) {
throw new IllegalStateException("Test is already running a standalone Solr instance "+
standaloneSolr.getBaseUrl()+"! This indicates a bug in the unit test logic.");
}
if (solrCloudCluster != null) {
throw new IllegalStateException("Test is already running a mini SolrCloud cluster! "+
"This indicates a bug in the unit test logic.");
}
int port = Integer.parseInt(getArg("-p", args));
File solrHomeDir = new File(getArg("-s", args));
System.setProperty("host", "localhost");
System.setProperty("jetty.port", String.valueOf(port));
standaloneSolr = new JettySolrRunner(solrHomeDir.getAbsolutePath(), "/solr", port);
Thread bg = new Thread() {
public void run() {
try {
standaloneSolr.start();
} catch (Exception e) {
if (e instanceof RuntimeException) {
throw (RuntimeException)e;
} else {
throw new RuntimeException(e);
}
}
}
};
bg.start();
return 0;
}
protected String getArg(String arg, String[] args) {
for (int a=0; a < args.length; a++) {
if (arg.equals(args[a])) {
if (a+1 >= args.length)
throw new IllegalArgumentException("Missing required value for the "+arg+" option!");
return args[a + 1];
}
}
throw new IllegalArgumentException("Missing required arg "+arg+
" needed to execute command: "+commandsExecuted.get(commandsExecuted.size()-1));
}
protected boolean hasFlag(String flag, String[] args) {
for (String arg : args) {
if (flag.equals(arg))
return true;
}
return false;
}
@Override
public void close() throws IOException {
if (solrCloudCluster != null) {
try {
solrCloudCluster.shutdown();
} catch (Exception e) {
log.warn("Failed to shutdown MiniSolrCloudCluster due to: " + e);
}
}
if (standaloneSolr != null) {
try {
standaloneSolr.stop();
} catch (Exception exc) {
log.warn("Failed to shutdown standalone Solr due to: " + exc);
}
standaloneSolr = null;
}
}
}
protected List<Closeable> closeables = new ArrayList<>();
@After
public void tearDown() throws Exception {
super.tearDown();
if (closeables != null) {
for (Closeable toClose : closeables) {
try {
toClose.close();
} catch (Exception ignore) {}
}
closeables.clear();
closeables = null;
}
}
@Test
public void testTechproductsExample() throws Exception {
testExample("techproducts");
}
@Test
public void testSchemalessExample() throws Exception {
testExample("schemaless");
}
protected void testExample(String exampleName) throws Exception {
File solrHomeDir = new File(ExternalPaths.SERVER_HOME);
if (!solrHomeDir.isDirectory())
fail(solrHomeDir.getAbsolutePath()+" not found and is required to run this test!");
Path tmpDir = createTempDir();
File solrExampleDir = tmpDir.toFile();
File solrServerDir = solrHomeDir.getParentFile();
// need a port to start the example server on
int bindPort = -1;
try (ServerSocket socket = new ServerSocket(0)) {
bindPort = socket.getLocalPort();
}
log.info("Selected port "+bindPort+" to start "+exampleName+" example Solr instance on ...");
String[] toolArgs = new String[] {
"-e", exampleName,
"-serverDir", solrServerDir.getAbsolutePath(),
"-exampleDir", solrExampleDir.getAbsolutePath(),
"-p", String.valueOf(bindPort)
};
// capture tool output to stdout
ByteArrayOutputStream baos = new ByteArrayOutputStream();
PrintStream stdoutSim = new PrintStream(baos, true, StandardCharsets.UTF_8.name());
RunExampleExecutor executor = new RunExampleExecutor(stdoutSim);
closeables.add(executor);
SolrCLI.RunExampleTool tool = new SolrCLI.RunExampleTool(executor, System.in, stdoutSim);
try {
tool.runTool(SolrCLI.processCommandLineArgs(SolrCLI.joinCommonAndToolOptions(tool.getOptions()), toolArgs));
} catch (Exception e) {
log.error("RunExampleTool failed due to: " + e +
"; stdout from tool prior to failure: " + baos.toString(StandardCharsets.UTF_8.name()));
throw e;
}
String toolOutput = baos.toString(StandardCharsets.UTF_8.name());
// dump all the output written by the SolrCLI commands to stdout
//System.out.println("\n\n"+toolOutput+"\n\n");
File exampleSolrHomeDir = new File(solrExampleDir, exampleName+"/solr");
assertTrue(exampleSolrHomeDir.getAbsolutePath() + " not found! run " +
exampleName + " example failed; output: " + toolOutput,
exampleSolrHomeDir.isDirectory());
if ("techproducts".equals(exampleName)) {
HttpSolrClient solrClient = new HttpSolrClient("http://localhost:" + bindPort + "/solr/" + exampleName);
SolrQuery query = new SolrQuery("*:*");
QueryResponse qr = solrClient.query(query);
long numFound = qr.getResults().getNumFound();
assertTrue("expected 32 docs in the "+exampleName+" example but found " + numFound + ", output: " + toolOutput,
numFound == 32);
}
// stop the test instance
executor.execute(org.apache.commons.exec.CommandLine.parse("bin/solr stop -p " + bindPort));
}
/**
* Tests the interactive SolrCloud example; we cannot test the non-interactive because we need control over
* the port and can only test with one node since the test relies on setting the host and jetty.port system
* properties, i.e. there is no test coverage for the -noprompt option.
*/
@Test
public void testInteractiveSolrCloudExample() throws Exception {
File solrHomeDir = new File(ExternalPaths.SERVER_HOME);
if (!solrHomeDir.isDirectory())
fail(solrHomeDir.getAbsolutePath()+" not found and is required to run this test!");
Path tmpDir = createTempDir();
File solrExampleDir = tmpDir.toFile();
File solrServerDir = solrHomeDir.getParentFile();
String[] toolArgs = new String[] {
"-example", "cloud",
"-serverDir", solrServerDir.getAbsolutePath(),
"-exampleDir", solrExampleDir.getAbsolutePath()
};
int bindPort = -1;
try (ServerSocket socket = new ServerSocket(0)) {
bindPort = socket.getLocalPort();
}
String collectionName = "testCloudExamplePrompt";
// sthis test only support launching one SolrCloud node due to how MiniSolrCloudCluster works
// and the need for setting the host and port system properties ...
String userInput = "1\n"+bindPort+"\n"+collectionName+"\n2\n2\ndata_driven_schema_configs\n";
// simulate user input from stdin
InputStream userInputSim = new ByteArrayInputStream(userInput.getBytes(StandardCharsets.UTF_8));
// capture tool output to stdout
ByteArrayOutputStream baos = new ByteArrayOutputStream();
PrintStream stdoutSim = new PrintStream(baos, true, StandardCharsets.UTF_8.name());
RunExampleExecutor executor = new RunExampleExecutor(stdoutSim);
closeables.add(executor);
SolrCLI.RunExampleTool tool = new SolrCLI.RunExampleTool(executor, userInputSim, stdoutSim);
try {
tool.runTool(SolrCLI.processCommandLineArgs(SolrCLI.joinCommonAndToolOptions(tool.getOptions()), toolArgs));
} catch (Exception e) {
System.err.println("RunExampleTool failed due to: " + e +
"; stdout from tool prior to failure: " + baos.toString(StandardCharsets.UTF_8.name()));
throw e;
}
String toolOutput = baos.toString(StandardCharsets.UTF_8.name());
// verify Solr is running on the expected port and verify the collection exists
String solrUrl = "http://localhost:"+bindPort+"/solr";
String collectionListUrl = solrUrl+"/admin/collections?action=list";
if (!SolrCLI.safeCheckCollectionExists(collectionListUrl, collectionName)) {
fail("After running Solr cloud example, test collection '"+collectionName+
"' not found in Solr at: "+solrUrl+"; tool output: "+toolOutput);
}
// index some docs - to verify all is good for both shards
CloudSolrClient cloudClient = null;
try {
cloudClient = new CloudSolrClient(executor.solrCloudCluster.getZkServer().getZkAddress());
cloudClient.connect();
cloudClient.setDefaultCollection(collectionName);
int numDocs = 10;
for (int d=0; d < numDocs; d++) {
SolrInputDocument doc = new SolrInputDocument();
doc.setField("id", "doc"+d);
doc.setField("str_s", "a");
cloudClient.add(doc);
}
cloudClient.commit();
QueryResponse qr = cloudClient.query(new SolrQuery("str_s:a"));
if (qr.getResults().getNumFound() != numDocs) {
fail("Expected "+numDocs+" to be found in the "+collectionName+
" collection but only found "+qr.getResults().getNumFound());
}
} finally {
if (cloudClient != null) {
try {
cloudClient.close();
} catch (Exception ignore){}
}
}
File node1SolrHome = new File(solrExampleDir, "cloud/node1/solr");
if (!node1SolrHome.isDirectory()) {
fail(node1SolrHome.getAbsolutePath()+" not found! run cloud example failed; tool output: "+toolOutput);
}
// delete the collection
SolrCLI.DeleteTool deleteTool = new SolrCLI.DeleteTool(stdoutSim);
String[] deleteArgs = new String[] { "-name", collectionName, "-solrUrl", solrUrl };
deleteTool.runTool(
SolrCLI.processCommandLineArgs(SolrCLI.joinCommonAndToolOptions(deleteTool.getOptions()), deleteArgs));
// dump all the output written by the SolrCLI commands to stdout
//System.out.println(toolOutput);
// stop the test instance
executor.execute(org.apache.commons.exec.CommandLine.parse("bin/solr stop -p "+bindPort));
}
}
| SOLR-7847: disable the testTechproductsExample test that is failing on Jenkins
git-svn-id: e799fff536a883f3c27ef733fe770373e8a19271@1694178 13f79535-47bb-0310-9956-ffa450edef68
| solr/core/src/test/org/apache/solr/util/TestSolrCLIRunExample.java | SOLR-7847: disable the testTechproductsExample test that is failing on Jenkins | <ide><path>olr/core/src/test/org/apache/solr/util/TestSolrCLIRunExample.java
<ide> import org.apache.solr.cloud.MiniSolrCloudCluster;
<ide> import org.apache.solr.common.SolrInputDocument;
<ide> import org.junit.After;
<add>import org.junit.Ignore;
<ide> import org.junit.Test;
<ide> import org.slf4j.Logger;
<ide> import org.slf4j.LoggerFactory;
<ide> }
<ide> }
<ide>
<add> @Ignore
<ide> @Test
<ide> public void testTechproductsExample() throws Exception {
<ide> testExample("techproducts"); |
|
Java | apache-2.0 | 511403002b6d57ab4ece1b8327be69fa94070d76 | 0 | zhaozw/meeting | package com.meetisan.meetisan;
import com.meetisan.meetisan.signup.InsertEmailActivity;
import com.meetisan.meetisan.utils.FormatUtils;
import com.meetisan.meetisan.utils.ToastHelper;
import android.app.Activity;
import android.content.Intent;
import android.os.Bundle;
import android.text.TextUtils;
import android.view.View;
import android.view.View.OnClickListener;
import android.widget.Button;
import android.widget.EditText;
import android.widget.TextView;
public class LoginActivity extends Activity implements OnClickListener {
// private static final String TAG = LoginActivity.class.getSimpleName();
private EditText mEmailTxt, mPwdTxt;
private TextView mForgotPwdTxt, mSignUpTxt;
private Button mLoginBtn;
private String email, pwd;
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_login);
initView();
}
private void initView() {
TextView mTitleTxt = (TextView) findViewById(R.id.txt_title);
mTitleTxt.setText(R.string.login);
mTitleTxt.setVisibility(View.VISIBLE);
mEmailTxt = (EditText) findViewById(R.id.email);
mPwdTxt = (EditText) findViewById(R.id.password);
mForgotPwdTxt = (TextView) findViewById(R.id.txt_forget_pwd);
mForgotPwdTxt.setOnClickListener(this);
mSignUpTxt = (TextView) findViewById(R.id.txt_sign_up);
mSignUpTxt.setOnClickListener(this);
mLoginBtn = (Button) findViewById(R.id.btn_login);
mLoginBtn.setOnClickListener(this);
}
@Override
public void onClick(View v) {
switch (v.getId()) {
case R.id.txt_forget_pwd:
Intent intent1 = new Intent(this, InsertEmailActivity.class);
startActivity(intent1);
break;
case R.id.txt_sign_up:
Intent intent2 = new Intent(this, InsertEmailActivity.class);
startActivity(intent2);
break;
case R.id.btn_login:
attemptLogin();
break;
default:
break;
}
}
private void attemptLogin() {
email = mEmailTxt.getText().toString();
pwd = mPwdTxt.getText().toString();
if (TextUtils.isEmpty(email)) {
ToastHelper.showToast(R.string.empty_email_tips);
return;
}
if (!FormatUtils.checkEmailAvailable(email)) {
ToastHelper.showToast(R.string.error_invalid_email);
return;
}
if (TextUtils.isEmpty(pwd)) {
ToastHelper.showToast(R.string.empty_pwd_tips);
return;
}
doLogin();
}
private void doLogin() {
// assume login result
boolean loginResult = true;
if (loginResult) {
Intent intent = new Intent(this, MainActivity.class);
startActivity(intent);
this.finish();
} else {
ToastHelper.showToast(R.string.error_incorrect_email_or_pwd);
}
}
}
| Meetisan/src/com/meetisan/meetisan/LoginActivity.java | package com.meetisan.meetisan;
import com.meetisan.meetisan.signup.InsertEmailActivity;
import com.meetisan.meetisan.utils.FormatUtils;
import com.meetisan.meetisan.utils.ToastHelper;
import android.app.Activity;
import android.content.Intent;
import android.os.Bundle;
import android.text.TextUtils;
import android.view.View;
import android.view.View.OnClickListener;
import android.widget.Button;
import android.widget.EditText;
import android.widget.TextView;
public class LoginActivity extends Activity implements OnClickListener {
// private static final String TAG = LoginActivity.class.getSimpleName();
private EditText mEmailTxt, mPwdTxt;
private TextView mForgotPwdTxt, mSignUpTxt;
private Button mLoginBtn;
private String email, pwd;
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_login);
initView();
}
private void initView() {
TextView mTitleTxt = (TextView) findViewById(R.id.txt_title);
mTitleTxt.setText(R.string.login);
mTitleTxt.setVisibility(View.VISIBLE);
mEmailTxt = (EditText) findViewById(R.id.email);
mPwdTxt = (EditText) findViewById(R.id.password);
mForgotPwdTxt = (TextView) findViewById(R.id.txt_forget_pwd);
mForgotPwdTxt.setOnClickListener(this);
mSignUpTxt = (TextView) findViewById(R.id.txt_sign_up);
mSignUpTxt.setOnClickListener(this);
mLoginBtn = (Button) findViewById(R.id.btn_login);
mLoginBtn.setOnClickListener(this);
}
@Override
public void onClick(View v) {
switch (v.getId()) {
case R.id.txt_forget_pwd:
Intent intent1 = new Intent(this, InsertEmailActivity.class);
startActivity(intent1);
break;
case R.id.txt_sign_up:
Intent intent2 = new Intent(this, InsertEmailActivity.class);
startActivity(intent2);
break;
case R.id.btn_login:
attemptLogin();
break;
default:
break;
}
}
private void attemptLogin() {
email = mEmailTxt.getText().toString();
pwd = mPwdTxt.getText().toString();
if (TextUtils.isEmpty(email)) {
ToastHelper.showToast(R.string.empty_email_tips);
return;
}
if (!FormatUtils.checkEmailAvailable(email)) {
ToastHelper.showToast(R.string.error_invalid_email);
return;
}
if (TextUtils.isEmpty(pwd)) {
ToastHelper.showToast(R.string.empty_pwd_tips);
return;
}
doLogin();
}
private void doLogin() {
// assume login result
boolean loginResult = false;
if (loginResult) {
Intent intent = new Intent(this, MainActivity.class);
startActivity(intent);
this.finish();
} else {
ToastHelper.showToast(R.string.error_incorrect_email_or_pwd);
}
}
}
| assume login success
| Meetisan/src/com/meetisan/meetisan/LoginActivity.java | assume login success | <ide><path>eetisan/src/com/meetisan/meetisan/LoginActivity.java
<ide>
<ide> private void doLogin() {
<ide> // assume login result
<del> boolean loginResult = false;
<add> boolean loginResult = true;
<ide> if (loginResult) {
<ide> Intent intent = new Intent(this, MainActivity.class);
<ide> startActivity(intent); |
|
Java | epl-1.0 | a8ac5bcb4dc0eb8f528fd466b944ab9a7445f258 | 0 | stzilli/kapua,cbaerikebc/kapua,cbaerikebc/kapua,stzilli/kapua,LeoNerdoG/kapua,cbaerikebc/kapua,LeoNerdoG/kapua,stzilli/kapua,LeoNerdoG/kapua,cbaerikebc/kapua,LeoNerdoG/kapua,stzilli/kapua,stzilli/kapua,LeoNerdoG/kapua | /*******************************************************************************
* Copyright (c) 2011, 2016 Eurotech and/or its affiliates and others
*
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* Eurotech - initial API and implementation
*
*******************************************************************************/
package org.eclipse.kapua.service.device.call.message.kura;
import java.io.IOException;
import java.util.Date;
import java.util.HashMap;
import java.util.Map;
import org.eclipse.kapua.KapuaException;
import org.eclipse.kapua.message.internal.MessageErrorCodes;
import org.eclipse.kapua.message.internal.MessageException;
import org.eclipse.kapua.service.device.call.message.DevicePayload;
import org.eclipse.kapua.service.device.call.message.DevicePosition;
import org.eclipse.kapua.service.device.call.message.kura.proto.KuraPayloadProto;
import org.eclipse.kapua.service.device.call.message.kura.utils.GZIPUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.protobuf.ByteString;
import com.google.protobuf.InvalidProtocolBufferException;
/**
* Kura device payload implementation.
*
* @since 1.0
*
*/
public class KuraPayload implements DevicePayload
{
private static final Logger s_logger = LoggerFactory.getLogger(KuraPayload.class);
protected Date timestamp;
protected DevicePosition position;
protected Map<String, Object> metrics;
protected byte[] body;
/**
* Constructor
*/
public KuraPayload()
{
metrics = new HashMap<>();
}
@Override
public Date getTimestamp()
{
return timestamp;
}
/**
* Get the message timestamp
*
* @param timestamp
*/
public void setTimestamp(Date timestamp)
{
this.timestamp = timestamp;
}
@Override
public DevicePosition getPosition()
{
return position;
}
/**
* Set the device position
*
* @param position
*/
public void setPosition(DevicePosition position)
{
this.position = position;
}
@Override
public Map<String, Object> getMetrics()
{
return metrics;
}
@Override
public byte[] getBody()
{
return body;
}
/**
* Set the message body
*
* @param body
*/
public void setBody(byte[] body)
{
this.body = body;
}
@Override
public byte[] toByteArray()
{
// Build the message
KuraPayloadProto.KuraPayload.Builder protoMsg = KuraPayloadProto.KuraPayload.newBuilder();
// set the timestamp
if (getTimestamp() != null) {
protoMsg.setTimestamp(getTimestamp().getTime());
}
// set the position
if (getPosition() != null) {
protoMsg.setPosition(buildPositionProtoBuf(getPosition()));
}
// set the metrics
for (String name : getMetrics().keySet()) {
// build a metric
Object value = getMetrics().get(name);
if (value == null) {
continue;
}
try {
KuraPayloadProto.KuraPayload.KuraMetric.Builder metricB = KuraPayloadProto.KuraPayload.KuraMetric.newBuilder();
metricB.setName(name);
setProtoKuraMetricValue(metricB, value);
metricB.build();
// add it to the message
protoMsg.addMetric(metricB);
}
catch (MessageException eihte) {
s_logger.error("During serialization, ignoring metric named: {}. Unrecognized value type: {}.", name, value.getClass().getName());
throw new RuntimeException(eihte);
}
}
// set the body
if (getBody() != null) {
protoMsg.setBody(ByteString.copyFrom(getBody()));
}
return protoMsg.build().toByteArray();
}
@Override
public void readFromByteArray(byte[] bytes)
throws KapuaException
{
if (GZIPUtils.isCompressed(bytes)) {
try {
bytes = GZIPUtils.decompress(bytes);
}
catch (IOException e) {
// throw new KapuaDeviceCallException(KapuaDeviceCallErrorCodes.)
// FIXME: manage!
throw KapuaException.internalError(e);
}
}
KuraPayloadProto.KuraPayload protoMsg = null;
try {
protoMsg = KuraPayloadProto.KuraPayload.parseFrom(bytes);
}
catch (InvalidProtocolBufferException ipbe) {
throw new MessageException(MessageErrorCodes.INVALID_MESSAGE, ipbe, (Object[]) null);
}
//
// Add timestamp
if (protoMsg.hasTimestamp()) {
timestamp = (new Date(protoMsg.getTimestamp()));
}
//
// Add position
if (protoMsg.hasPosition()) {
position = (buildFromProtoBuf(protoMsg.getPosition()));
}
//
// Add metrics
for (int i = 0; i < protoMsg.getMetricCount(); i++) {
String name = protoMsg.getMetric(i).getName();
try {
Object value = getProtoKuraMetricValue(protoMsg.getMetric(i), protoMsg.getMetric(i).getType());
metrics.put(name, value);
}
catch (MessageException ihte) {
s_logger.warn("During deserialization, ignoring metric named: " + name + ". Unrecognized value type: " + protoMsg.getMetric(i).getType(), ihte);
}
}
// set the body
if (protoMsg.hasBody()) {
body = (protoMsg.getBody().toByteArray());
}
}
//
// Private methods
//
private Object getProtoKuraMetricValue(KuraPayloadProto.KuraPayload.KuraMetric metric,
KuraPayloadProto.KuraPayload.KuraMetric.ValueType type)
throws MessageException
{
switch (type) {
case DOUBLE:
return metric.getDoubleValue();
case FLOAT:
return metric.getFloatValue();
case INT64:
return metric.getLongValue();
case INT32:
return metric.getIntValue();
case BOOL:
return metric.getBoolValue();
case STRING:
return metric.getStringValue();
case BYTES:
ByteString bs = metric.getBytesValue();
return bs.toByteArray();
default:
throw new MessageException(MessageErrorCodes.INVALID_METRIC_TYPE, null, type);
}
}
private static void setProtoKuraMetricValue(KuraPayloadProto.KuraPayload.KuraMetric.Builder metric, Object o)
throws MessageException
{
if (o instanceof String) {
metric.setType(KuraPayloadProto.KuraPayload.KuraMetric.ValueType.STRING);
metric.setStringValue((String) o);
}
else if (o instanceof Double) {
metric.setType(KuraPayloadProto.KuraPayload.KuraMetric.ValueType.DOUBLE);
metric.setDoubleValue((Double) o);
}
else if (o instanceof Integer) {
metric.setType(KuraPayloadProto.KuraPayload.KuraMetric.ValueType.INT32);
metric.setIntValue((Integer) o);
}
else if (o instanceof Float) {
metric.setType(KuraPayloadProto.KuraPayload.KuraMetric.ValueType.FLOAT);
metric.setFloatValue((Float) o);
}
else if (o instanceof Long) {
metric.setType(KuraPayloadProto.KuraPayload.KuraMetric.ValueType.INT64);
metric.setLongValue((Long) o);
}
else if (o instanceof Boolean) {
metric.setType(KuraPayloadProto.KuraPayload.KuraMetric.ValueType.BOOL);
metric.setBoolValue((Boolean) o);
}
else if (o instanceof byte[]) {
metric.setType(KuraPayloadProto.KuraPayload.KuraMetric.ValueType.BYTES);
metric.setBytesValue(ByteString.copyFrom((byte[]) o));
}
else if (o == null) {
throw new MessageException(MessageErrorCodes.INVALID_METRIC_VALUE, null, "null");
}
else {
throw new MessageException(MessageErrorCodes.INVALID_METRIC_TYPE, null, o.getClass().getName());
}
}
private KuraPayloadProto.KuraPayload.KuraPosition buildPositionProtoBuf(DevicePosition position)
{
KuraPayloadProto.KuraPayload.KuraPosition.Builder protoPos = null;
protoPos = KuraPayloadProto.KuraPayload.KuraPosition.newBuilder();
if (position.getLatitude() != null) {
protoPos.setLatitude(position.getLatitude());
}
if (position.getLongitude() != null) {
protoPos.setLongitude(position.getLongitude());
}
if (position.getAltitude() != null) {
protoPos.setAltitude(position.getAltitude());
}
if (position.getPrecision() != null) {
protoPos.setPrecision(position.getPrecision());
}
if (position.getHeading() != null) {
protoPos.setHeading(position.getHeading());
}
if (position.getSpeed() != null) {
protoPos.setSpeed(position.getSpeed());
}
if (position.getTimestamp() != null) {
protoPos.setTimestamp(position.getTimestamp().getTime());
}
if (position.getSatellites() != null) {
protoPos.setSatellites(position.getSatellites());
}
if (position.getStatus() != null) {
protoPos.setStatus(position.getStatus());
}
return protoPos.build();
}
private DevicePosition buildFromProtoBuf(KuraPayloadProto.KuraPayload.KuraPosition protoPosition)
{
DevicePosition position = getPosition();
// for performance reason check the position before
if (position == null) {
if (protoPosition.hasLatitude() || protoPosition.hasLatitude() ||
protoPosition.hasLongitude() || protoPosition.hasAltitude() ||
protoPosition.hasPrecision() || protoPosition.hasHeading() ||
protoPosition.hasHeading() || protoPosition.hasSpeed() ||
protoPosition.hasSatellites() || protoPosition.hasStatus() ||
protoPosition.hasTimestamp()) {
position = new KuraPosition();
}
}
if (protoPosition.hasLatitude()) {
position.setLatitude(protoPosition.getLatitude());
}
if (protoPosition.hasLongitude()) {
position.setLongitude(protoPosition.getLongitude());
}
if (protoPosition.hasAltitude()) {
position.setAltitude(protoPosition.getAltitude());
}
if (protoPosition.hasPrecision()) {
position.setPrecision(protoPosition.getPrecision());
}
if (protoPosition.hasHeading()) {
position.setHeading(protoPosition.getHeading());
}
if (protoPosition.hasSpeed()) {
position.setSpeed(protoPosition.getSpeed());
}
if (protoPosition.hasSatellites()) {
position.setSatellites(protoPosition.getSatellites());
}
if (protoPosition.hasStatus()) {
position.setStatus(protoPosition.getStatus());
}
if (protoPosition.hasTimestamp()) {
position.setTimestamp(new Date(protoPosition.getTimestamp()));
}
return position;
}
}
| service/device/call/kura/src/main/java/org/eclipse/kapua/service/device/call/message/kura/KuraPayload.java | /*******************************************************************************
* Copyright (c) 2011, 2016 Eurotech and/or its affiliates and others
*
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* Eurotech - initial API and implementation
*
*******************************************************************************/
package org.eclipse.kapua.service.device.call.message.kura;
import java.io.IOException;
import java.util.Date;
import java.util.HashMap;
import java.util.Map;
import org.eclipse.kapua.KapuaException;
import org.eclipse.kapua.message.internal.MessageErrorCodes;
import org.eclipse.kapua.message.internal.MessageException;
import org.eclipse.kapua.service.device.call.message.DevicePayload;
import org.eclipse.kapua.service.device.call.message.DevicePosition;
import org.eclipse.kapua.service.device.call.message.kura.proto.KuraPayloadProto;
import org.eclipse.kapua.service.device.call.message.kura.utils.GZIPUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.protobuf.ByteString;
import com.google.protobuf.InvalidProtocolBufferException;
/**
* Kura device payload implementation.
*
* @since 1.0
*
*/
public class KuraPayload implements DevicePayload
{
private static final Logger s_logger = LoggerFactory.getLogger(KuraPayload.class);
protected Date timestamp;
protected DevicePosition position;
protected Map<String, Object> metrics;
protected byte[] body;
/**
* Constructor
*/
public KuraPayload()
{
metrics = new HashMap<>();
}
@Override
public Date getTimestamp()
{
return timestamp;
}
/**
* Get the message timestamp
*
* @param timestamp
*/
public void setTimestamp(Date timestamp)
{
this.timestamp = timestamp;
}
@Override
public DevicePosition getPosition()
{
return position;
}
/**
* Set the device position
*
* @param position
*/
public void setPosition(DevicePosition position)
{
this.position = position;
}
@Override
public Map<String, Object> getMetrics()
{
return metrics;
}
@Override
public byte[] getBody()
{
return body;
}
/**
* Set the message body
*
* @param body
*/
public void setBody(byte[] body)
{
this.body = body;
}
@Override
public byte[] toByteArray()
{
// Build the message
KuraPayloadProto.KuraPayload.Builder protoMsg = KuraPayloadProto.KuraPayload.newBuilder();
// set the timestamp
if (getTimestamp() != null) {
protoMsg.setTimestamp(getTimestamp().getTime());
}
// set the position
if (getPosition() != null) {
protoMsg.setPosition(buildPositionProtoBuf(getPosition()));
}
// set the metrics
for (String name : getMetrics().keySet()) {
// build a metric
Object value = getMetrics().get(name);
if (value == null) {
continue;
}
try {
KuraPayloadProto.KuraPayload.KuraMetric.Builder metricB = KuraPayloadProto.KuraPayload.KuraMetric.newBuilder();
metricB.setName(name);
setProtoKuraMetricValue(metricB, value);
metricB.build();
// add it to the message
protoMsg.addMetric(metricB);
}
catch (MessageException eihte) {
try {
s_logger.error("During serialization, ignoring metric named: {}. Unrecognized value type: {}.", name, value.getClass().getName());
}
catch (NullPointerException npe) {
s_logger.error("During serialization, ignoring metric named: {}. The value is null.", name);
}
throw new RuntimeException(eihte);
}
}
// set the body
if (getBody() != null) {
protoMsg.setBody(ByteString.copyFrom(getBody()));
}
return protoMsg.build().toByteArray();
}
@Override
public void readFromByteArray(byte[] bytes)
throws KapuaException
{
if (GZIPUtils.isCompressed(bytes)) {
try {
bytes = GZIPUtils.decompress(bytes);
}
catch (IOException e) {
// throw new KapuaDeviceCallException(KapuaDeviceCallErrorCodes.)
// FIXME: manage!
throw KapuaException.internalError(e);
}
}
KuraPayloadProto.KuraPayload protoMsg = null;
try {
protoMsg = KuraPayloadProto.KuraPayload.parseFrom(bytes);
}
catch (InvalidProtocolBufferException ipbe) {
throw new MessageException(MessageErrorCodes.INVALID_MESSAGE, ipbe, (Object[]) null);
}
//
// Add timestamp
if (protoMsg.hasTimestamp()) {
timestamp = (new Date(protoMsg.getTimestamp()));
}
//
// Add position
if (protoMsg.hasPosition()) {
position = (buildFromProtoBuf(protoMsg.getPosition()));
}
//
// Add metrics
for (int i = 0; i < protoMsg.getMetricCount(); i++) {
String name = protoMsg.getMetric(i).getName();
try {
Object value = getProtoKuraMetricValue(protoMsg.getMetric(i), protoMsg.getMetric(i).getType());
metrics.put(name, value);
}
catch (MessageException ihte) {
s_logger.warn("During deserialization, ignoring metric named: " + name + ". Unrecognized value type: " + protoMsg.getMetric(i).getType(), ihte);
}
}
// set the body
if (protoMsg.hasBody()) {
body = (protoMsg.getBody().toByteArray());
}
}
//
// Private methods
//
private Object getProtoKuraMetricValue(KuraPayloadProto.KuraPayload.KuraMetric metric,
KuraPayloadProto.KuraPayload.KuraMetric.ValueType type)
throws MessageException
{
switch (type) {
case DOUBLE:
return metric.getDoubleValue();
case FLOAT:
return metric.getFloatValue();
case INT64:
return metric.getLongValue();
case INT32:
return metric.getIntValue();
case BOOL:
return metric.getBoolValue();
case STRING:
return metric.getStringValue();
case BYTES:
ByteString bs = metric.getBytesValue();
return bs.toByteArray();
default:
throw new MessageException(MessageErrorCodes.INVALID_METRIC_TYPE, null, type);
}
}
private static void setProtoKuraMetricValue(KuraPayloadProto.KuraPayload.KuraMetric.Builder metric, Object o)
throws MessageException
{
if (o instanceof String) {
metric.setType(KuraPayloadProto.KuraPayload.KuraMetric.ValueType.STRING);
metric.setStringValue((String) o);
}
else if (o instanceof Double) {
metric.setType(KuraPayloadProto.KuraPayload.KuraMetric.ValueType.DOUBLE);
metric.setDoubleValue((Double) o);
}
else if (o instanceof Integer) {
metric.setType(KuraPayloadProto.KuraPayload.KuraMetric.ValueType.INT32);
metric.setIntValue((Integer) o);
}
else if (o instanceof Float) {
metric.setType(KuraPayloadProto.KuraPayload.KuraMetric.ValueType.FLOAT);
metric.setFloatValue((Float) o);
}
else if (o instanceof Long) {
metric.setType(KuraPayloadProto.KuraPayload.KuraMetric.ValueType.INT64);
metric.setLongValue((Long) o);
}
else if (o instanceof Boolean) {
metric.setType(KuraPayloadProto.KuraPayload.KuraMetric.ValueType.BOOL);
metric.setBoolValue((Boolean) o);
}
else if (o instanceof byte[]) {
metric.setType(KuraPayloadProto.KuraPayload.KuraMetric.ValueType.BYTES);
metric.setBytesValue(ByteString.copyFrom((byte[]) o));
}
else if (o == null) {
throw new MessageException(MessageErrorCodes.INVALID_METRIC_VALUE, null, "null");
}
else {
throw new MessageException(MessageErrorCodes.INVALID_METRIC_TYPE, null, o.getClass().getName());
}
}
private KuraPayloadProto.KuraPayload.KuraPosition buildPositionProtoBuf(DevicePosition position)
{
KuraPayloadProto.KuraPayload.KuraPosition.Builder protoPos = null;
protoPos = KuraPayloadProto.KuraPayload.KuraPosition.newBuilder();
if (position.getLatitude() != null) {
protoPos.setLatitude(position.getLatitude());
}
if (position.getLongitude() != null) {
protoPos.setLongitude(position.getLongitude());
}
if (position.getAltitude() != null) {
protoPos.setAltitude(position.getAltitude());
}
if (position.getPrecision() != null) {
protoPos.setPrecision(position.getPrecision());
}
if (position.getHeading() != null) {
protoPos.setHeading(position.getHeading());
}
if (position.getSpeed() != null) {
protoPos.setSpeed(position.getSpeed());
}
if (position.getTimestamp() != null) {
protoPos.setTimestamp(position.getTimestamp().getTime());
}
if (position.getSatellites() != null) {
protoPos.setSatellites(position.getSatellites());
}
if (position.getStatus() != null) {
protoPos.setStatus(position.getStatus());
}
return protoPos.build();
}
private DevicePosition buildFromProtoBuf(KuraPayloadProto.KuraPayload.KuraPosition protoPosition)
{
DevicePosition position = getPosition();
// for performance reason check the position before
if (position == null) {
if (protoPosition.hasLatitude() || protoPosition.hasLatitude() ||
protoPosition.hasLongitude() || protoPosition.hasAltitude() ||
protoPosition.hasPrecision() || protoPosition.hasHeading() ||
protoPosition.hasHeading() || protoPosition.hasSpeed() ||
protoPosition.hasSatellites() || protoPosition.hasStatus() ||
protoPosition.hasTimestamp()) {
position = new KuraPosition();
}
}
if (protoPosition.hasLatitude()) {
position.setLatitude(protoPosition.getLatitude());
}
if (protoPosition.hasLongitude()) {
position.setLongitude(protoPosition.getLongitude());
}
if (protoPosition.hasAltitude()) {
position.setAltitude(protoPosition.getAltitude());
}
if (protoPosition.hasPrecision()) {
position.setPrecision(protoPosition.getPrecision());
}
if (protoPosition.hasHeading()) {
position.setHeading(protoPosition.getHeading());
}
if (protoPosition.hasSpeed()) {
position.setSpeed(protoPosition.getSpeed());
}
if (protoPosition.hasSatellites()) {
position.setSatellites(protoPosition.getSatellites());
}
if (protoPosition.hasStatus()) {
position.setStatus(protoPosition.getStatus());
}
if (protoPosition.hasTimestamp()) {
position.setTimestamp(new Date(protoPosition.getTimestamp()));
}
return position;
}
}
| Remove code which can never be called
The value is tested before for null and so it can never become null
afterwards as there is no assignment to it.
Signed-off-by: Jens Reimann <[email protected]> | service/device/call/kura/src/main/java/org/eclipse/kapua/service/device/call/message/kura/KuraPayload.java | Remove code which can never be called | <ide><path>ervice/device/call/kura/src/main/java/org/eclipse/kapua/service/device/call/message/kura/KuraPayload.java
<ide> protoMsg.addMetric(metricB);
<ide> }
<ide> catch (MessageException eihte) {
<del> try {
<del> s_logger.error("During serialization, ignoring metric named: {}. Unrecognized value type: {}.", name, value.getClass().getName());
<del> }
<del> catch (NullPointerException npe) {
<del> s_logger.error("During serialization, ignoring metric named: {}. The value is null.", name);
<del> }
<add> s_logger.error("During serialization, ignoring metric named: {}. Unrecognized value type: {}.", name, value.getClass().getName());
<ide> throw new RuntimeException(eihte);
<ide> }
<ide> } |
|
Java | apache-2.0 | 5e451fd012c50f5d9e0815fb4825cce3f50414a3 | 0 | uweschaefer/factcast,uweschaefer/factcast,uweschaefer/factcast | /*
* Copyright © 2018 Mercateo AG (http://www.mercateo.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.factcast.store.pgsql;
import java.util.*;
import java.util.stream.*;
import org.springframework.beans.factory.annotation.*;
import org.springframework.boot.context.event.*;
import org.springframework.boot.context.properties.*;
import org.springframework.context.*;
import org.springframework.core.env.*;
import lombok.*;
import lombok.experimental.*;
import lombok.extern.slf4j.*;
@SuppressWarnings("DefaultAnnotationParam")
@ConfigurationProperties(
prefix = "factcast.store.pgsql",
ignoreInvalidFields = false,
ignoreUnknownFields = false)
@Data
@Slf4j
@Accessors(fluent = false)
public class PgConfigurationProperties implements ApplicationListener<ApplicationReadyEvent> {
private static final String LEGACY_PREFIX = "factcast.pg";
@Autowired
Environment env;
/**
* defines the number of Facts being retrieved with one Page Query for
* PageStrategy.PAGED
*/
int pageSize = 1000;
/**
* The capacity of the queue for PageStrategy.QUEUED
*/
int queueSize = 1000;
/**
* The factor to apply, when fetching/queuing Ids rather than Facts (assuming,
* that needs just a fraction of Heap and is way fater to flush to the client)
*/
int idOnlyFactor = 100;
/**
* Defines the Strategy used for Paging in the Catchup Phase.
*/
CatchupStrategy catchupStrategy = CatchupStrategy.getDefault();
/**
* Fetch Size used when filling the Queue, defaults to 4 (25% of the queue-size)
*/
int queueFetchRatio = 4;
public int getPageSizeForIds() {
return pageSize * idOnlyFactor;
}
public int getQueueSizeForIds() {
return queueSize * idOnlyFactor;
}
public int getFetchSizeForIds() {
return getQueueSizeForIds() / queueFetchRatio;
}
public int getFetchSize() {
return getQueueSize() / queueFetchRatio;
}
@Override
public void onApplicationEvent(ApplicationReadyEvent event) {
Map<String, Object> map = new HashMap();
MutablePropertySources propertySources = ((AbstractEnvironment) env).getPropertySources();
for (Iterator it = propertySources.iterator(); it.hasNext();) {
PropertySource propertySource = (PropertySource) it.next();
if (propertySource instanceof MapPropertySource) {
Map<String, Object> source = ((MapPropertySource) propertySource).getSource();
source.entrySet().forEach(e -> {
map.put(e.getKey(), propertySource.toString());
});
}
}
List<Map.Entry<String, Object>> legacyPrperties = map.entrySet()
.stream()
.filter(e -> e.getKey().startsWith(LEGACY_PREFIX))
.collect(Collectors.toList());
if (!legacyPrperties.isEmpty()) {
log.error(
"There are legacy properties detected. Property namespace has been renamed from '"
+ LEGACY_PREFIX + "' to 'factcast.store.pgsql'");
legacyPrperties.forEach(p -> {
log.error("Property {} found in {}", p.getKey(), p.getValue());
});
System.exit(1);
}
}
}
| factcast-store-pgsql/src/main/java/org/factcast/store/pgsql/PgConfigurationProperties.java | /*
* Copyright © 2018 Mercateo AG (http://www.mercateo.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.factcast.store.pgsql;
import java.util.*;
import java.util.stream.*;
import org.springframework.beans.factory.annotation.*;
import org.springframework.boot.context.event.*;
import org.springframework.boot.context.properties.*;
import org.springframework.context.*;
import org.springframework.core.env.*;
import lombok.*;
import lombok.experimental.*;
import lombok.extern.slf4j.*;
@SuppressWarnings("DefaultAnnotationParam")
@ConfigurationProperties(
prefix = "factcast.store.pgsql",
ignoreInvalidFields = false,
ignoreUnknownFields = false)
@Data
@Slf4j
@Accessors(fluent = false)
public class PgConfigurationProperties implements ApplicationListener<ApplicationReadyEvent> {
private static final String LEGACY_PREFIX = "factcast.pg";
@Autowired
Environment env;
/**
* defines the number of Facts being retrieved with one Page Query for
* PageStrategy.PAGED
*/
int pageSize = 1000;
/**
* The capacity of the queue for PageStrategy.QUEUED
*/
int queueSize = 1000;
/**
* The factor to apply, when fetching/queuing Ids rather than Facts (assuming,
* that needs just a fraction of Heap and is way fater to flush to the client)
*/
int idOnlyFactor = 100;
/**
* Defines the Strategy used for Paging in the Catchup Phase.
*/
CatchupStrategy catchupStrategy = CatchupStrategy.getDefault();
/**
* Fetch Size used when filling the Queue, defaults to 4 (25% of the queue-size)
*/
int queueFetchRatio = 4;
public int getPageSizeForIds() {
return pageSize * idOnlyFactor;
}
public int getQueueSizeForIds() {
return queueSize * idOnlyFactor;
}
public int getFetchSizeForIds() {
return getQueueSizeForIds() / queueFetchRatio;
}
public int getFetchSize() {
return getQueueSize() / queueFetchRatio;
}
@Override
public void onApplicationEvent(ApplicationReadyEvent event) {
Map<String, Object> map = new HashMap();
MutablePropertySources propertySources = ((AbstractEnvironment) env).getPropertySources();
for (Iterator it = propertySources.iterator(); it.hasNext(); ) {
PropertySource propertySource = (PropertySource) it.next();
if (propertySource instanceof MapPropertySource) {
Map<String, Object> source = ((MapPropertySource) propertySource).getSource();
source.entrySet().forEach(e -> {
map.put(e.getKey(), propertySource.toString());
});
}
}
List<Map.Entry<String, Object>> legacyPrperties = map.entrySet()
.stream()
.filter(e -> e.getKey().startsWith(LEGACY_PREFIX))
.collect(Collectors.toList());
if (!legacyPrperties.isEmpty()) {
log.error(
"There are legacy properties detected. Property namespace has been renamed from '"
+ LEGACY_PREFIX + "' to 'factcast.store.pgsql'");
legacyPrperties.forEach(p -> {
log.error("Property {} found in {}", p.getKey(), p.getValue());
});
System.exit(1);
}
}
}
| #212: formatting
| factcast-store-pgsql/src/main/java/org/factcast/store/pgsql/PgConfigurationProperties.java | #212: formatting | <ide><path>actcast-store-pgsql/src/main/java/org/factcast/store/pgsql/PgConfigurationProperties.java
<ide> public void onApplicationEvent(ApplicationReadyEvent event) {
<ide> Map<String, Object> map = new HashMap();
<ide> MutablePropertySources propertySources = ((AbstractEnvironment) env).getPropertySources();
<del> for (Iterator it = propertySources.iterator(); it.hasNext(); ) {
<add> for (Iterator it = propertySources.iterator(); it.hasNext();) {
<ide> PropertySource propertySource = (PropertySource) it.next();
<ide> if (propertySource instanceof MapPropertySource) {
<ide> Map<String, Object> source = ((MapPropertySource) propertySource).getSource(); |
|
Java | mit | 1b0f204d8422716373bb929cb0be2c2e308c6212 | 0 | menacher/java-game-server,chongtianfeiyu/java-game-server,xiexingguang/java-game-server,menacher/java-game-server,cancobanoglu/java-game-server,niuqinghua/java-game-server,cancobanoglu/java-game-server,wuzhenda/java-game-server,niuqinghua/java-game-server,wuzhenda/java-game-server,niuqinghua/java-game-server,wuzhenda/java-game-server,rayue/java-game-server,rayue/java-game-server,chongtianfeiyu/java-game-server,rayue/java-game-server,xiexingguang/java-game-server,feamorx86/java-game-server,cancobanoglu/java-game-server,xiexingguang/java-game-server,feamorx86/java-game-server,feamorx86/java-game-server,menacher/java-game-server | package org.menacheri.protocols.impl;
import org.jboss.netty.channel.ChannelPipeline;
import org.jboss.netty.handler.codec.base64.Base64Decoder;
import org.jboss.netty.handler.codec.base64.Base64Encoder;
import org.jboss.netty.handler.codec.frame.DelimiterBasedFrameDecoder;
import org.jboss.netty.handler.codec.frame.Delimiters;
import org.jboss.netty.handler.codec.frame.TooLongFrameException;
import org.menacheri.app.IPlayerSession;
import org.menacheri.handlers.netty.AMF3ToJavaObjectDecoder;
import org.menacheri.handlers.netty.JavaObjectToAMF3Encoder;
import org.menacheri.handlers.netty.NulEncoder;
import org.menacheri.protocols.AbstractNettyProtocol;
import org.menacheri.util.NettyUtils;
/**
* This protocol defines AMF3 that is base 64 and String encoded sent over the
* wire. Used by XMLSocket flash clients to send AMF3 data.
*
* @author Abraham Menacherry
*
*/
public class AMF3StringProtocol extends AbstractNettyProtocol
{
/**
* The maximum size of the incoming message in bytes. The
* {@link DelimiterBasedFrameDecoder} will use this value in order to throw
* a {@link TooLongFrameException}.
*/
int maxFrameSize;
/**
* The flash client would encode the AMF3 bytes into a base 64 encoded
* string, this decoder is used to decode it back.
*/
private Base64Decoder base64Decoder;
/**
* This decoder will do the actual serialization to java object. Any game
* handlers need to be added after this in the pipeline so that they can
* operate on the java object.
*/
private AMF3ToJavaObjectDecoder amf3ToJavaObjectDecoder;
/**
* Once the game handler is done with its operations, it writes back the
* java object to the client. When writing back to flash client, it needs to
* use this encoder to encode it to AMF3 format.
*/
private JavaObjectToAMF3Encoder javaObjectToAMF3Encoder;
/**
* The flash client expects a AMF3 bytes to be passed in as base 64 encoded
* string. This encoder will encode the bytes accordingly.
*/
private Base64Encoder base64Encoder;
/**
* Flash client expects a nul byte 0x00 to be added as the end byte of any
* communication with it. This encoder will add this nul byte to the end of
* the message. Could be considered as a message "footer".
*/
private NulEncoder nulEncoder;
public AMF3StringProtocol()
{
super("AMF3_STRING");
}
@Override
public void applyProtocol(IPlayerSession playerSession)
{
ChannelPipeline pipeline = NettyUtils
.getPipeLineOfConnection(playerSession);
// Upstream handlers or encoders (i.e towards server) are added to
// pipeline now.
pipeline.addLast("framer", new DelimiterBasedFrameDecoder(maxFrameSize,
Delimiters.nulDelimiter()));
pipeline.addLast("base64Decoder", base64Decoder);
pipeline.addLast("amf3ToJavaObjectDecoder", amf3ToJavaObjectDecoder);
// Downstream handlers - Filter for data which flows from server to
// client. Note that the last handler added is actually the first
// handler for outgoing data.
pipeline.addLast("nulEncoder", nulEncoder);
pipeline.addLast("base64Encoder", base64Encoder);
pipeline.addLast("javaObjectToAMF3Encoder", javaObjectToAMF3Encoder);
}
public int getMaxFrameSize()
{
return maxFrameSize;
}
public void setMaxFrameSize(int frameSize)
{
this.maxFrameSize = frameSize;
}
public Base64Decoder getBase64Decoder()
{
return base64Decoder;
}
public void setBase64Decoder(Base64Decoder base64Decoder)
{
this.base64Decoder = base64Decoder;
}
public AMF3ToJavaObjectDecoder getAmf3ToJavaObjectDecoder()
{
return amf3ToJavaObjectDecoder;
}
public void setAmf3ToJavaObjectDecoder(
AMF3ToJavaObjectDecoder amf3ToJavaObjectDecoder)
{
this.amf3ToJavaObjectDecoder = amf3ToJavaObjectDecoder;
}
public JavaObjectToAMF3Encoder getJavaObjectToAMF3Encoder()
{
return javaObjectToAMF3Encoder;
}
public void setJavaObjectToAMF3Encoder(
JavaObjectToAMF3Encoder javaObjectToAMF3Encoder)
{
this.javaObjectToAMF3Encoder = javaObjectToAMF3Encoder;
}
public Base64Encoder getBase64Encoder()
{
return base64Encoder;
}
public void setBase64Encoder(Base64Encoder base64Encoder)
{
this.base64Encoder = base64Encoder;
}
public NulEncoder getNulEncoder()
{
return nulEncoder;
}
public void setNulEncoder(NulEncoder nulEncoder)
{
this.nulEncoder = nulEncoder;
}
}
| jetserver/src/main/java/org/menacheri/protocols/impl/AMF3StringProtocol.java | package org.menacheri.protocols.impl;
import java.io.ByteArrayInputStream;
import org.jboss.netty.channel.ChannelPipeline;
import org.jboss.netty.handler.codec.base64.Base64Decoder;
import org.jboss.netty.handler.codec.base64.Base64Encoder;
import org.jboss.netty.handler.codec.frame.DelimiterBasedFrameDecoder;
import org.jboss.netty.handler.codec.frame.Delimiters;
import org.jboss.netty.handler.codec.frame.TooLongFrameException;
import org.menacheri.app.IPlayerSession;
import org.menacheri.handlers.netty.AMF3ToJavaObjectDecoder;
import org.menacheri.handlers.netty.ByteArrayStreamDecoder;
import org.menacheri.handlers.netty.JavaObjectToAMF3Encoder;
import org.menacheri.handlers.netty.NulEncoder;
import org.menacheri.protocols.AbstractNettyProtocol;
import org.menacheri.util.NettyUtils;
import org.springframework.beans.factory.annotation.Required;
/**
* This protocol defines AMF3 that is base 64 and String encoded sent over the
* wire. Used by XMLSocket flash clients to send AMF3 data.
*
* @author Abraham Menacherry
*
*/
public class AMF3StringProtocol extends AbstractNettyProtocol
{
/**
* The maximum size of the incoming message in bytes. The
* {@link DelimiterBasedFrameDecoder} will use this value in order to throw
* a {@link TooLongFrameException}.
*/
int frameSize;
/**
* The flash client would encode the AMF3 bytes into a base 64 encoded
* string, this decoder is used to decode it back.
*/
private Base64Decoder base64Decoder;
/**
* After the frame decoder retrieves the bytes from the incoming stream,
* this decoder will convert it to a {@link ByteArrayInputStream} object
* which is provided as input to the {@link AMF3ToJavaObjectDecoder}. The
* game can add more handlers at this point to do business logic and write
* back to the pipeline.
*/
private ByteArrayStreamDecoder byteArrayStreamDecoder;
/**
* This decoder will do the actual serialization to java object. Any game
* handlers need to be added after this in the pipeline so that they can
* operate on the java object.
*/
private AMF3ToJavaObjectDecoder amf3ToJavaObjectDecoder;
/**
* Once the game handler is done with its operations, it writes back the
* java object to the client. When writing back to flash client, it needs to
* use this encoder to encode it to AMF3 format.
*/
private JavaObjectToAMF3Encoder javaObjectToAMF3Encoder;
/**
* The flash client expects a AMF3 bytes to be passed in as base 64 encoded
* string. This encoder will encode the bytes accordingly.
*/
private Base64Encoder base64Encoder;
/**
* Flash client expects a nul byte 0x00 to be added as the end byte of any
* communication with it. This encoder will add this nul byte to the end of
* the message. Could be considered as a message "footer".
*/
private NulEncoder nulEncoder;
public AMF3StringProtocol()
{
super("AMF3_STRING");
}
public AMF3StringProtocol(int frameSize, Base64Decoder base64Decoder,
ByteArrayStreamDecoder byteArrayStreamDecoder,
AMF3ToJavaObjectDecoder amf3ToJavaObjectDecoder,
JavaObjectToAMF3Encoder javaObjectToAMF3Encoder,
Base64Encoder base64Encoder, NulEncoder nulEncoder)
{
super("AMF3_STRING");
this.frameSize = frameSize;
this.base64Decoder = base64Decoder;
this.byteArrayStreamDecoder = byteArrayStreamDecoder;
this.amf3ToJavaObjectDecoder = amf3ToJavaObjectDecoder;
this.javaObjectToAMF3Encoder = javaObjectToAMF3Encoder;
this.base64Encoder = base64Encoder;
this.nulEncoder = nulEncoder;
}
@Override
public void applyProtocol(IPlayerSession playerSession)
{
ChannelPipeline pipeline = NettyUtils
.getPipeLineOfConnection(playerSession);
// Upstream handlers or encoders (i.e towards server) are added to
// pipeline now.
pipeline.addLast("framer", new DelimiterBasedFrameDecoder(frameSize,
Delimiters.nulDelimiter()));
pipeline.addLast("base64Decoder", base64Decoder);
pipeline.addLast("byteArrayStreamDecoder", byteArrayStreamDecoder);
pipeline.addLast("amf3ToJavaObjectDecoder", amf3ToJavaObjectDecoder);
// Downstream handlers - Filter for data which flows from server to
// client. Note that the last handler added is actually the first
// handler for outgoing data.
pipeline.addLast("nulEncoder", nulEncoder);
pipeline.addLast("base64Encoder", base64Encoder);
pipeline.addLast("javaObjectToAMF3Encoder", javaObjectToAMF3Encoder);
}
public int getFrameSize()
{
return frameSize;
}
@Required
public void setFrameSize(int frameSize)
{
this.frameSize = frameSize;
}
public Base64Decoder getBase64Decoder()
{
return base64Decoder;
}
@Required
public void setBase64Decoder(Base64Decoder base64Decoder)
{
this.base64Decoder = base64Decoder;
}
public ByteArrayStreamDecoder getByteArrayStreamDecoder()
{
return byteArrayStreamDecoder;
}
@Required
public void setByteArrayStreamDecoder(
ByteArrayStreamDecoder byteArrayStreamDecoder)
{
this.byteArrayStreamDecoder = byteArrayStreamDecoder;
}
public AMF3ToJavaObjectDecoder getAmf3ToJavaObjectDecoder()
{
return amf3ToJavaObjectDecoder;
}
@Required
public void setAmf3ToJavaObjectDecoder(
AMF3ToJavaObjectDecoder amf3ToJavaObjectDecoder)
{
this.amf3ToJavaObjectDecoder = amf3ToJavaObjectDecoder;
}
public JavaObjectToAMF3Encoder getJavaObjectToAMF3Encoder()
{
return javaObjectToAMF3Encoder;
}
@Required
public void setJavaObjectToAMF3Encoder(
JavaObjectToAMF3Encoder javaObjectToAMF3Encoder)
{
this.javaObjectToAMF3Encoder = javaObjectToAMF3Encoder;
}
public Base64Encoder getBase64Encoder()
{
return base64Encoder;
}
@Required
public void setBase64Encoder(Base64Encoder base64Encoder)
{
this.base64Encoder = base64Encoder;
}
public NulEncoder getNulEncoder()
{
return nulEncoder;
}
@Required
public void setNulEncoder(NulEncoder nulEncoder)
{
this.nulEncoder = nulEncoder;
}
}
| Removed bytearraystreamdecoder and renamed frameSize to maxFrameSize | jetserver/src/main/java/org/menacheri/protocols/impl/AMF3StringProtocol.java | Removed bytearraystreamdecoder and renamed frameSize to maxFrameSize | <ide><path>etserver/src/main/java/org/menacheri/protocols/impl/AMF3StringProtocol.java
<ide> package org.menacheri.protocols.impl;
<del>
<del>import java.io.ByteArrayInputStream;
<ide>
<ide> import org.jboss.netty.channel.ChannelPipeline;
<ide> import org.jboss.netty.handler.codec.base64.Base64Decoder;
<ide> import org.jboss.netty.handler.codec.frame.TooLongFrameException;
<ide> import org.menacheri.app.IPlayerSession;
<ide> import org.menacheri.handlers.netty.AMF3ToJavaObjectDecoder;
<del>import org.menacheri.handlers.netty.ByteArrayStreamDecoder;
<ide> import org.menacheri.handlers.netty.JavaObjectToAMF3Encoder;
<ide> import org.menacheri.handlers.netty.NulEncoder;
<ide> import org.menacheri.protocols.AbstractNettyProtocol;
<ide> import org.menacheri.util.NettyUtils;
<del>import org.springframework.beans.factory.annotation.Required;
<ide>
<ide>
<ide> /**
<ide> * {@link DelimiterBasedFrameDecoder} will use this value in order to throw
<ide> * a {@link TooLongFrameException}.
<ide> */
<del> int frameSize;
<add> int maxFrameSize;
<ide> /**
<ide> * The flash client would encode the AMF3 bytes into a base 64 encoded
<ide> * string, this decoder is used to decode it back.
<ide> */
<ide> private Base64Decoder base64Decoder;
<del> /**
<del> * After the frame decoder retrieves the bytes from the incoming stream,
<del> * this decoder will convert it to a {@link ByteArrayInputStream} object
<del> * which is provided as input to the {@link AMF3ToJavaObjectDecoder}. The
<del> * game can add more handlers at this point to do business logic and write
<del> * back to the pipeline.
<del> */
<del> private ByteArrayStreamDecoder byteArrayStreamDecoder;
<ide> /**
<ide> * This decoder will do the actual serialization to java object. Any game
<ide> * handlers need to be added after this in the pipeline so that they can
<ide> super("AMF3_STRING");
<ide> }
<ide>
<del> public AMF3StringProtocol(int frameSize, Base64Decoder base64Decoder,
<del> ByteArrayStreamDecoder byteArrayStreamDecoder,
<del> AMF3ToJavaObjectDecoder amf3ToJavaObjectDecoder,
<del> JavaObjectToAMF3Encoder javaObjectToAMF3Encoder,
<del> Base64Encoder base64Encoder, NulEncoder nulEncoder)
<del> {
<del> super("AMF3_STRING");
<del> this.frameSize = frameSize;
<del> this.base64Decoder = base64Decoder;
<del> this.byteArrayStreamDecoder = byteArrayStreamDecoder;
<del> this.amf3ToJavaObjectDecoder = amf3ToJavaObjectDecoder;
<del> this.javaObjectToAMF3Encoder = javaObjectToAMF3Encoder;
<del> this.base64Encoder = base64Encoder;
<del> this.nulEncoder = nulEncoder;
<del> }
<del>
<ide> @Override
<ide> public void applyProtocol(IPlayerSession playerSession)
<ide> {
<ide>
<ide> // Upstream handlers or encoders (i.e towards server) are added to
<ide> // pipeline now.
<del> pipeline.addLast("framer", new DelimiterBasedFrameDecoder(frameSize,
<add> pipeline.addLast("framer", new DelimiterBasedFrameDecoder(maxFrameSize,
<ide> Delimiters.nulDelimiter()));
<ide> pipeline.addLast("base64Decoder", base64Decoder);
<del> pipeline.addLast("byteArrayStreamDecoder", byteArrayStreamDecoder);
<ide> pipeline.addLast("amf3ToJavaObjectDecoder", amf3ToJavaObjectDecoder);
<ide>
<ide> // Downstream handlers - Filter for data which flows from server to
<ide> pipeline.addLast("javaObjectToAMF3Encoder", javaObjectToAMF3Encoder);
<ide> }
<ide>
<del> public int getFrameSize()
<add> public int getMaxFrameSize()
<ide> {
<del> return frameSize;
<add> return maxFrameSize;
<ide> }
<ide>
<del> @Required
<del> public void setFrameSize(int frameSize)
<add> public void setMaxFrameSize(int frameSize)
<ide> {
<del> this.frameSize = frameSize;
<add> this.maxFrameSize = frameSize;
<ide> }
<ide>
<ide> public Base64Decoder getBase64Decoder()
<ide> return base64Decoder;
<ide> }
<ide>
<del> @Required
<ide> public void setBase64Decoder(Base64Decoder base64Decoder)
<ide> {
<ide> this.base64Decoder = base64Decoder;
<del> }
<del>
<del> public ByteArrayStreamDecoder getByteArrayStreamDecoder()
<del> {
<del> return byteArrayStreamDecoder;
<del> }
<del>
<del> @Required
<del> public void setByteArrayStreamDecoder(
<del> ByteArrayStreamDecoder byteArrayStreamDecoder)
<del> {
<del> this.byteArrayStreamDecoder = byteArrayStreamDecoder;
<ide> }
<ide>
<ide> public AMF3ToJavaObjectDecoder getAmf3ToJavaObjectDecoder()
<ide> return amf3ToJavaObjectDecoder;
<ide> }
<ide>
<del> @Required
<ide> public void setAmf3ToJavaObjectDecoder(
<ide> AMF3ToJavaObjectDecoder amf3ToJavaObjectDecoder)
<ide> {
<ide> return javaObjectToAMF3Encoder;
<ide> }
<ide>
<del> @Required
<ide> public void setJavaObjectToAMF3Encoder(
<ide> JavaObjectToAMF3Encoder javaObjectToAMF3Encoder)
<ide> {
<ide> return base64Encoder;
<ide> }
<ide>
<del> @Required
<ide> public void setBase64Encoder(Base64Encoder base64Encoder)
<ide> {
<ide> this.base64Encoder = base64Encoder;
<ide> return nulEncoder;
<ide> }
<ide>
<del> @Required
<ide> public void setNulEncoder(NulEncoder nulEncoder)
<ide> {
<ide> this.nulEncoder = nulEncoder; |
|
Java | apache-2.0 | 43594b416843ac37552af4a1ba63a066a0cc3819 | 0 | pinterest/secor,HenryCaiHaiying/secor,HenryCaiHaiying/secor,pinterest/secor | package com.pinterest.secor.uploader;
import com.google.api.client.googleapis.auth.oauth2.GoogleCredential;
import com.google.api.client.googleapis.javanet.GoogleNetHttpTransport;
import com.google.api.client.googleapis.media.MediaHttpUploader;
import com.google.api.client.googleapis.media.MediaHttpUploaderProgressListener;
import com.google.api.client.http.FileContent;
import com.google.api.client.http.HttpRequest;
import com.google.api.client.http.HttpRequestInitializer;
import com.google.api.client.http.HttpTransport;
import com.google.api.client.http.HttpUnsuccessfulResponseHandler;
import com.google.api.client.http.HttpBackOffUnsuccessfulResponseHandler;
import com.google.api.client.json.JsonFactory;
import com.google.api.client.json.jackson2.JacksonFactory;
import com.google.api.client.util.ExponentialBackOff;
import com.google.api.services.storage.Storage;
import com.google.api.services.storage.StorageScopes;
import com.google.api.services.storage.model.StorageObject;
import com.pinterest.secor.common.LogFilePath;
import com.pinterest.secor.common.SecorConfig;
import org.apache.commons.lang.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.nio.file.Files;
import java.util.Collections;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.Date;
import java.text.SimpleDateFormat;
/**
* Manages uploads to Google Cloud Storage using the Storage class from the Google API SDK.
* <p>
* It will use Service Account credential (json file) that can be generated from the Google Developers Console.
* By default it will look up configured credential path in secor.gs.credentials.path or fallback to the default
* credential in the environment variable GOOGLE_APPLICATION_CREDENTIALS.
* <p>
* Application credentials documentation
* https://developers.google.com/identity/protocols/application-default-credentials
*
* @author Jerome Gagnon ([email protected])
*/
public class GsUploadManager extends UploadManager {
private static final Logger LOG = LoggerFactory.getLogger(GsUploadManager.class);
private static final JsonFactory JSON_FACTORY = JacksonFactory.getDefaultInstance();
private static final ExecutorService executor = Executors.newFixedThreadPool(256);
/**
* Global instance of the Storage. The best practice is to make it a single
* globally shared instance across your application.
*/
private static Storage mStorageService;
private Storage mClient;
public GsUploadManager(SecorConfig config) throws Exception {
super(config);
mClient = getService(mConfig.getGsCredentialsPath(),
mConfig.getGsConnectTimeoutInMs(), mConfig.getGsReadTimeoutInMs());
}
@Override
public Handle<?> upload(LogFilePath localPath) throws Exception {
final String gsBucket = mConfig.getGsBucket();
final String gsKey = localPath.withPrefix(mConfig.getGsPath()).getLogFilePath();
final File localFile = new File(localPath.getLogFilePath());
final boolean directUpload = mConfig.getGsDirectUpload();
LOG.info("uploading file {} to gs://{}/{}", localFile, gsBucket, gsKey);
final StorageObject storageObject = new StorageObject().setName(gsKey);
final FileContent storageContent = new FileContent(Files.probeContentType(localFile.toPath()), localFile);
final Future<?> f = executor.submit(new Runnable() {
@Override
public void run() {
try {
Storage.Objects.Insert request = mClient.objects().insert(gsBucket, storageObject, storageContent);
if (directUpload) {
request.getMediaHttpUploader().setDirectUploadEnabled(true);
}
request.getMediaHttpUploader().setProgressListener(new MediaHttpUploaderProgressListener() {
@Override
public void progressChanged(MediaHttpUploader uploader) throws IOException {
LOG.debug("[{} %] upload file {} to gs://{}/{}",
(int) uploader.getProgress() * 100, localFile, gsBucket, gsKey);
}
});
request.execute();
} catch (IOException e) {
throw new RuntimeException(e);
}
}
});
return new FutureHandle(f);
}
private static Storage getService(String credentialsPath, int connectTimeoutMs, int readTimeoutMs) throws Exception {
if (mStorageService == null) {
HttpTransport httpTransport = GoogleNetHttpTransport.newTrustedTransport();
GoogleCredential credential;
try {
// Lookup if configured path from the properties; otherwise fallback to Google Application default
if (credentialsPath != null && !credentialsPath.isEmpty()) {
credential = GoogleCredential
.fromStream(new FileInputStream(credentialsPath), httpTransport, JSON_FACTORY)
.createScoped(Collections.singleton(StorageScopes.CLOUD_PLATFORM));
} else {
credential = GoogleCredential.getApplicationDefault(httpTransport, JSON_FACTORY);
}
} catch (IOException e) {
throw new RuntimeException("Failed to load Google credentials : " + credentialsPath, e);
}
// Depending on the environment that provides the default credentials (e.g. Compute Engine, App
// Engine), the credentials may require us to specify the scopes we need explicitly.
// Check for this case, and inject the scope if required.
if (credential.createScopedRequired()) {
credential = credential.createScoped(StorageScopes.all());
}
mStorageService = new Storage.Builder(httpTransport, JSON_FACTORY,
setHttpBackoffTimeout(credential, connectTimeoutMs, readTimeoutMs))
.setApplicationName("com.pinterest.secor")
.build();
}
return mStorageService;
}
private static HttpRequestInitializer setHttpBackoffTimeout(final HttpRequestInitializer requestInitializer,
final int connectTimeoutMs, final int readTimeoutMs) {
return new HttpRequestInitializer() {
@Override
public void initialize(HttpRequest httpRequest) throws IOException {
requestInitializer.initialize(httpRequest);
// Configure exponential backoff on error
// https://developers.google.com/api-client-library/java/google-http-java-client/backoff
ExponentialBackOff backoff = new ExponentialBackOff();
HttpUnsuccessfulResponseHandler backoffHandler = new HttpBackOffUnsuccessfulResponseHandler(backoff)
.setBackOffRequired(HttpBackOffUnsuccessfulResponseHandler.BackOffRequired.ALWAYS);
httpRequest.setUnsuccessfulResponseHandler(backoffHandler);
httpRequest.setConnectTimeout(connectTimeoutMs);
httpRequest.setReadTimeout(readTimeoutMs);
}
};
}
}
| src/main/java/com/pinterest/secor/uploader/GsUploadManager.java | package com.pinterest.secor.uploader;
import com.google.api.client.googleapis.auth.oauth2.GoogleCredential;
import com.google.api.client.googleapis.javanet.GoogleNetHttpTransport;
import com.google.api.client.googleapis.media.MediaHttpUploader;
import com.google.api.client.googleapis.media.MediaHttpUploaderProgressListener;
import com.google.api.client.http.FileContent;
import com.google.api.client.http.HttpRequest;
import com.google.api.client.http.HttpRequestInitializer;
import com.google.api.client.http.HttpTransport;
import com.google.api.client.http.HttpUnsuccessfulResponseHandler;
import com.google.api.client.http.HttpBackOffUnsuccessfulResponseHandler;
import com.google.api.client.json.JsonFactory;
import com.google.api.client.json.jackson2.JacksonFactory;
import com.google.api.client.util.ExponentialBackOff;
import com.google.api.services.storage.Storage;
import com.google.api.services.storage.StorageScopes;
import com.google.api.services.storage.model.StorageObject;
import com.pinterest.secor.common.LogFilePath;
import com.pinterest.secor.common.SecorConfig;
import org.apache.commons.lang.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.nio.file.Files;
import java.util.Collections;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.Date;
import java.text.SimpleDateFormat;
/**
* Manages uploads to Google Cloud Storage using the Storage class from the Google API SDK.
* <p>
* It will use Service Account credential (json file) that can be generated from the Google Developers Console.
* By default it will look up configured credential path in secor.gs.credentials.path or fallback to the default
* credential in the environment variable GOOGLE_APPLICATION_CREDENTIALS.
* <p>
* Application credentials documentation
* https://developers.google.com/identity/protocols/application-default-credentials
*
* @author Jerome Gagnon ([email protected])
*/
public class GsUploadManager extends UploadManager {
private static final Logger LOG = LoggerFactory.getLogger(GsUploadManager.class);
private static final JsonFactory JSON_FACTORY = JacksonFactory.getDefaultInstance();
private static final ExecutorService executor = Executors.newFixedThreadPool(256);
/**
* Global instance of the Storage. The best practice is to make it a single
* globally shared instance across your application.
*/
private static Storage mStorageService;
private Storage mClient;
public GsUploadManager(SecorConfig config) throws Exception {
super(config);
mClient = getService(mConfig.getGsCredentialsPath(),
mConfig.getGsConnectTimeoutInMs(), mConfig.getGsReadTimeoutInMs());
}
@Override
public Handle<?> upload(LogFilePath localPath) throws Exception {
final String gsBucket = mConfig.getGsBucket();
final String gsKey = localPath.withPrefix(mConfig.getGsPath()).getLogFilePath();
final File localFile = new File(localPath.getLogFilePath());
final boolean directUpload = mConfig.getGsDirectUpload();
LOG.info("uploading file {} to gs://{}/{}", localFile, gsBucket, gsKey);
final StorageObject storageObject = new StorageObject().setName(gsKey);
final FileContent storageContent = new FileContent(Files.probeContentType(localFile.toPath()), localFile);
final Future<?> f = executor.submit(new Runnable() {
@Override
public void run() {
try {
Storage.Objects.Insert request = mClient.objects().insert(gsBucket, storageObject, storageContent);
if (directUpload) {
request.getMediaHttpUploader().setDirectUploadEnabled(true);
}
request.getMediaHttpUploader().setProgressListener(new MediaHttpUploaderProgressListener() {
@Override
public void progressChanged(MediaHttpUploader uploader) throws IOException {
LOG.debug("[{} %] upload file {} to gs://{}/{}",
(int) uploader.getProgress() * 100, localFile, gsBucket, gsKey);
}
});
request.execute();
} catch (IOException e) {
throw new RuntimeException(e);
}
}
});
return new FutureHandle(f);
}
private static Storage getService(String credentialsPath, int connectTimeoutMs, int readTimeoutMs) throws Exception {
if (mStorageService == null) {
HttpTransport httpTransport = GoogleNetHttpTransport.newTrustedTransport();
GoogleCredential credential;
try {
// Lookup if configured path from the properties; otherwise fallback to Google Application default
if (credentialsPath != null && !credentialsPath.isEmpty()) {
credential = GoogleCredential
.fromStream(new FileInputStream(credentialsPath), httpTransport, JSON_FACTORY)
.createScoped(Collections.singleton(StorageScopes.CLOUD_PLATFORM));
} else {
credential = GoogleCredential.getApplicationDefault(httpTransport, JSON_FACTORY);
}
} catch (IOException e) {
throw new RuntimeException("Failed to load Google credentials : " + credentialsPath, e);
}
mStorageService = new Storage.Builder(httpTransport, JSON_FACTORY,
setHttpBackoffTimeout(credential, connectTimeoutMs, readTimeoutMs))
.setApplicationName("com.pinterest.secor")
.build();
}
return mStorageService;
}
private static HttpRequestInitializer setHttpBackoffTimeout(final HttpRequestInitializer requestInitializer,
final int connectTimeoutMs, final int readTimeoutMs) {
return new HttpRequestInitializer() {
@Override
public void initialize(HttpRequest httpRequest) throws IOException {
requestInitializer.initialize(httpRequest);
// Configure exponential backoff on error
// https://developers.google.com/api-client-library/java/google-http-java-client/backoff
ExponentialBackOff backoff = new ExponentialBackOff();
HttpUnsuccessfulResponseHandler backoffHandler = new HttpBackOffUnsuccessfulResponseHandler(backoff)
.setBackOffRequired(HttpBackOffUnsuccessfulResponseHandler.BackOffRequired.ALWAYS);
httpRequest.setUnsuccessfulResponseHandler(backoffHandler);
httpRequest.setConnectTimeout(connectTimeoutMs);
httpRequest.setReadTimeout(readTimeoutMs);
}
};
}
}
| Inject scopes if required
Depending on the environment that provides the default credentials (e.g.
Compute Engine, App Engine), the credentials may require us to specify the
scopes we need explicitly. Check for this case, and inject the scope if
required.
https://github.com/GoogleCloudPlatform/java-docs-samples/blob/master/kms/src/main/java/com/example/Quickstart.java#L41
| src/main/java/com/pinterest/secor/uploader/GsUploadManager.java | Inject scopes if required | <ide><path>rc/main/java/com/pinterest/secor/uploader/GsUploadManager.java
<ide> throw new RuntimeException("Failed to load Google credentials : " + credentialsPath, e);
<ide> }
<ide>
<add> // Depending on the environment that provides the default credentials (e.g. Compute Engine, App
<add> // Engine), the credentials may require us to specify the scopes we need explicitly.
<add> // Check for this case, and inject the scope if required.
<add> if (credential.createScopedRequired()) {
<add> credential = credential.createScoped(StorageScopes.all());
<add> }
<add>
<ide> mStorageService = new Storage.Builder(httpTransport, JSON_FACTORY,
<ide> setHttpBackoffTimeout(credential, connectTimeoutMs, readTimeoutMs))
<ide> .setApplicationName("com.pinterest.secor") |
|
Java | apache-2.0 | e7be1f4e896cbe5f11924377771cdc85411354f8 | 0 | jjj117/airavata,machristie/airavata,machristie/airavata,anujbhan/airavata,apache/airavata,anujbhan/airavata,hasinitg/airavata,apache/airavata,machristie/airavata,glahiru/airavata,hasinitg/airavata,apache/airavata,gouravshenoy/airavata,gouravshenoy/airavata,hasinitg/airavata,gouravshenoy/airavata,gouravshenoy/airavata,apache/airavata,apache/airavata,apache/airavata,jjj117/airavata,hasinitg/airavata,jjj117/airavata,machristie/airavata,dogless/airavata,anujbhan/airavata,anujbhan/airavata,glahiru/airavata,anujbhan/airavata,gouravshenoy/airavata,hasinitg/airavata,dogless/airavata,jjj117/airavata,machristie/airavata,glahiru/airavata,jjj117/airavata,machristie/airavata,dogless/airavata,dogless/airavata,dogless/airavata,jjj117/airavata,machristie/airavata,apache/airavata,glahiru/airavata,dogless/airavata,hasinitg/airavata,anujbhan/airavata,anujbhan/airavata,glahiru/airavata,apache/airavata,gouravshenoy/airavata,gouravshenoy/airavata | /*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.airavata.api.server.handler;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Random;
import org.airavata.appcatalog.cpi.AppCatalog;
import org.airavata.appcatalog.cpi.AppCatalogException;
import org.airavata.appcatalog.cpi.ApplicationDeployment;
import org.airavata.appcatalog.cpi.ComputeResource;
import org.airavata.appcatalog.cpi.GwyResourceProfile;
import org.apache.aiaravata.application.catalog.data.impl.AppCatalogFactory;
import org.apache.aiaravata.application.catalog.data.resources.*;
import org.apache.aiaravata.application.catalog.data.util.AppCatalogThriftConversion;
import org.apache.airavata.api.Airavata;
import org.apache.airavata.api.airavataAPIConstants;
import org.apache.airavata.api.server.util.DataModelUtils;
import org.apache.airavata.common.exception.ApplicationSettingsException;
import org.apache.airavata.common.logger.AiravataLogger;
import org.apache.airavata.common.logger.AiravataLoggerFactory;
import org.apache.airavata.common.utils.AiravataUtils;
import org.apache.airavata.common.utils.AiravataZKUtils;
import org.apache.airavata.common.utils.ServerSettings;
import org.apache.airavata.model.appcatalog.appdeployment.ApplicationDeploymentDescription;
import org.apache.airavata.model.appcatalog.appdeployment.ApplicationModule;
import org.apache.airavata.model.appcatalog.appinterface.ApplicationInterfaceDescription;
import org.apache.airavata.model.appcatalog.appinterface.InputDataObjectType;
import org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType;
import org.apache.airavata.model.appcatalog.computeresource.*;
import org.apache.airavata.model.appcatalog.gatewayprofile.ComputeResourcePreference;
import org.apache.airavata.model.appcatalog.gatewayprofile.GatewayResourceProfile;
import org.apache.airavata.model.error.AiravataClientException;
import org.apache.airavata.model.error.AiravataErrorType;
import org.apache.airavata.model.error.AiravataSystemException;
import org.apache.airavata.model.error.ExperimentNotFoundException;
import org.apache.airavata.model.error.InvalidRequestException;
import org.apache.airavata.model.error.ProjectNotFoundException;
import org.apache.airavata.model.util.ExecutionType;
import org.apache.airavata.model.workspace.Project;
import org.apache.airavata.model.workspace.experiment.*;
import org.apache.airavata.orchestrator.client.OrchestratorClientFactory;
import org.apache.airavata.orchestrator.cpi.OrchestratorService;
import org.apache.airavata.orchestrator.cpi.OrchestratorService.Client;
import org.apache.airavata.persistance.registry.jpa.ResourceUtils;
import org.apache.airavata.persistance.registry.jpa.impl.RegistryFactory;
import org.apache.airavata.registry.cpi.ChildDataType;
import org.apache.airavata.registry.cpi.ParentDataType;
import org.apache.airavata.registry.cpi.Registry;
import org.apache.airavata.registry.cpi.RegistryException;
import org.apache.airavata.registry.cpi.RegistryModelType;
import org.apache.airavata.registry.cpi.utils.Constants;
import org.apache.airavata.registry.cpi.utils.Constants.FieldConstants.TaskDetailConstants;
import org.apache.airavata.registry.cpi.utils.Constants.FieldConstants.WorkflowNodeConstants;
import org.apache.airavata.workflow.engine.WorkflowEngine;
import org.apache.airavata.workflow.engine.WorkflowEngineException;
import org.apache.airavata.workflow.engine.WorkflowEngineFactory;
import org.apache.thrift.TException;
import org.apache.zookeeper.CreateMode;
import org.apache.zookeeper.KeeperException;
import org.apache.zookeeper.WatchedEvent;
import org.apache.zookeeper.Watcher;
import org.apache.zookeeper.ZooDefs;
import org.apache.zookeeper.ZooKeeper;
import org.apache.zookeeper.data.Stat;
public class AiravataServerHandler implements Airavata.Iface, Watcher {
private static final AiravataLogger logger = AiravataLoggerFactory.getLogger(AiravataServerHandler.class);
private Registry registry;
private AppCatalog appCatalog;
private ZooKeeper zk;
private static Integer mutex = -1;
public AiravataServerHandler() {
try {
String zkhostPort = AiravataZKUtils.getZKhostPort();
String airavataServerHostPort = ServerSettings.getSetting(org.apache.airavata.common.utils.Constants.API_SERVER_HOST)
+ ":" + ServerSettings.getSetting(org.apache.airavata.common.utils.Constants.API_SERVER_PORT);
try {
zk = new ZooKeeper(zkhostPort, 6000, this); // no watcher is required, this will only use to store some data
String apiServer = ServerSettings.getSetting(org.apache.airavata.common.utils.Constants.ZOOKEEPER_API_SERVER_NODE,"/airavata-server");
String OrchServer = ServerSettings.getSetting(org.apache.airavata.common.utils.Constants.ZOOKEEPER_ORCHESTRATOR_SERVER_NODE,"/orchestrator-server");
String gfacServer = ServerSettings.getSetting(org.apache.airavata.common.utils.Constants.ZOOKEEPER_GFAC_SERVER_NODE,"/gfac-server");
String gfacExperiments = ServerSettings.getSetting(org.apache.airavata.common.utils.Constants.ZOOKEEPER_GFAC_EXPERIMENT_NODE,"/gfac-experiments");
synchronized (mutex) {
mutex.wait(); // waiting for the syncConnected event
}
Stat zkStat = zk.exists(apiServer, false);
if (zkStat == null) {
zk.create(apiServer, new byte[0], ZooDefs.Ids.OPEN_ACL_UNSAFE,
CreateMode.PERSISTENT);
}
String instantNode = apiServer + File.separator + String.valueOf(new Random().nextInt(Integer.MAX_VALUE));
zkStat = zk.exists(instantNode, false);
if (zkStat == null) {
zk.create(instantNode,
airavataServerHostPort.getBytes(), ZooDefs.Ids.OPEN_ACL_UNSAFE,
CreateMode.EPHEMERAL); // other component will watch these childeren creation deletion to monitor the status of the node
logger.info("Successfully created airavata-server node");
}
zkStat = zk.exists(OrchServer, false);
if (zkStat == null) {
zk.create(OrchServer, new byte[0], ZooDefs.Ids.OPEN_ACL_UNSAFE,
CreateMode.PERSISTENT);
logger.info("Successfully created orchestrator-server node");
}
zkStat = zk.exists(gfacServer, false);
if (zkStat == null) {
zk.create(gfacServer, new byte[0], ZooDefs.Ids.OPEN_ACL_UNSAFE,
CreateMode.PERSISTENT);
logger.info("Successfully created gfac-server node");
}
zkStat = zk.exists(gfacServer, false);
if (zkStat == null) {
zk.create(gfacExperiments, new byte[0], ZooDefs.Ids.OPEN_ACL_UNSAFE,
CreateMode.PERSISTENT);
logger.info("Successfully created gfac-server node");
}
logger.info("Finished starting ZK: " + zk);
} catch (IOException e) {
e.printStackTrace();
} catch (InterruptedException e) {
e.printStackTrace();
} catch (KeeperException e) {
e.printStackTrace();
}
} catch (ApplicationSettingsException e) {
e.printStackTrace();
}
}
synchronized public void process(WatchedEvent watchedEvent) {
synchronized (mutex) {
mutex.notify();
}
}
/**
* Query Airavata to fetch the API version
*/
@Override
public String getAPIVersion() throws InvalidRequestException, AiravataClientException, AiravataSystemException, TException {
return airavataAPIConstants.AIRAVATA_API_VERSION;
}
/**
* Create a Project
*
* @param project
*/
@Override
public String createProject(Project project) throws InvalidRequestException, AiravataClientException, AiravataSystemException, TException {
try {
registry = RegistryFactory.getDefaultRegistry();
if (!validateString(project.getName()) || !validateString(project.getOwner())){
logger.error("Project name and owner cannot be empty...");
throw new AiravataSystemException(AiravataErrorType.INTERNAL_ERROR);
}
return (String)registry.add(ParentDataType.PROJECT, project);
} catch (RegistryException e) {
logger.error("Error while creating the project", e);
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while creating the project. More info : " + e.getMessage());
throw exception;
}
}
public void updateProject(String projectId, Project updatedProject) throws InvalidRequestException,
AiravataClientException,
AiravataSystemException,
ProjectNotFoundException,
TException {
if (!validateString(projectId) || !validateString(projectId)){
logger.error("Project id cannot be empty...");
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Project id cannot be empty...");
throw exception;
}
try {
registry = RegistryFactory.getDefaultRegistry();
if (!registry.isExist(RegistryModelType.PROJECT, projectId)){
logger.error("Project does not exist in the system. Please provide a valid project ID...");
ProjectNotFoundException exception = new ProjectNotFoundException();
exception.setMessage("Project does not exist in the system. Please provide a valid project ID...");
throw exception;
}
registry.update(RegistryModelType.PROJECT, updatedProject, projectId);
} catch (RegistryException e) {
logger.error("Error while updating the project", e);
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while updating the project. More info : " + e.getMessage());
throw exception;
}
}
private boolean validateString(String name){
boolean valid = true;
if (name == null || name.equals("") || name.trim().length() == 0){
valid = false;
}
return valid;
}
/**
* Get a Project by ID
*
* @param projectId
*/
@Override
public Project getProject(String projectId) throws InvalidRequestException,
AiravataClientException,
AiravataSystemException,
ProjectNotFoundException,
TException {
try {
registry = RegistryFactory.getDefaultRegistry();
if (!registry.isExist(RegistryModelType.PROJECT, projectId)){
logger.error("Project does not exist in the system. Please provide a valid project ID...");
ProjectNotFoundException exception = new ProjectNotFoundException();
exception.setMessage("Project does not exist in the system. Please provide a valid project ID...");
throw exception;
}
return (Project)registry.get(RegistryModelType.PROJECT, projectId);
} catch (RegistryException e) {
logger.error("Error while updating the project", e);
ProjectNotFoundException exception = new ProjectNotFoundException();
exception.setMessage("Error while updating the project. More info : " + e.getMessage());
throw exception;
}
}
/**
* Get all Project by user
*
* @param userName
*/
@Override
public List<Project> getAllUserProjects(String userName) throws InvalidRequestException, AiravataClientException, AiravataSystemException, TException {
if (!validateString(userName)){
logger.error("Username cannot be empty. Please provide a valid user..");
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Username cannot be empty. Please provide a valid user..");
throw exception;
}
List<Project> projects = new ArrayList<Project>();
try {
if (!ResourceUtils.isUserExist(userName)){
logger.error("User does not exist in the system. Please provide a valid user..");
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("User does not exist in the system. Please provide a valid user..");
throw exception;
}
registry = RegistryFactory.getDefaultRegistry();
List<Object> list = registry.get(RegistryModelType.PROJECT, Constants.FieldConstants.ProjectConstants.OWNER, userName);
if (list != null && !list.isEmpty()){
for (Object o : list){
projects.add((Project) o);
}
}
return projects;
} catch (RegistryException e) {
logger.error("Error while retrieving projects", e);
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while retrieving projects. More info : " + e.getMessage());
throw exception;
}
}
public List<Project> searchProjectsByProjectName(String userName, String projectName) throws InvalidRequestException,
AiravataClientException,
AiravataSystemException,
TException {
if (!validateString(userName)){
logger.error("Username cannot be empty. Please provide a valid user..");
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Username cannot be empty. Please provide a valid user..");
throw exception;
}
try {
if (!ResourceUtils.isUserExist(userName)){
logger.error("User does not exist in the system. Please provide a valid user..");
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("User does not exist in the system. Please provide a valid user..");
throw exception;
}
List<Project> projects = new ArrayList<Project>();
registry = RegistryFactory.getDefaultRegistry();
Map<String, String> filters = new HashMap<String, String>();
filters.put(Constants.FieldConstants.ProjectConstants.OWNER, userName);
filters.put(Constants.FieldConstants.ProjectConstants.PROJECT_NAME, projectName);
List<Object> results = registry.search(RegistryModelType.PROJECT, filters);
for (Object object : results) {
projects.add((Project)object);
}
return projects;
}catch (Exception e) {
logger.error("Error while retrieving projects", e);
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while retrieving projects. More info : " + e.getMessage());
throw exception;
}
}
public List<Project> searchProjectsByProjectDesc(String userName, String description) throws InvalidRequestException,
AiravataClientException,
AiravataSystemException,
TException {
if (!validateString(userName)){
logger.error("Username cannot be empty. Please provide a valid user..");
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Username cannot be empty. Please provide a valid user..");
throw exception;
}
try {
if (!ResourceUtils.isUserExist(userName)){
logger.error("User does not exist in the system. Please provide a valid user..");
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("User does not exist in the system. Please provide a valid user..");
throw exception;
}
List<Project> projects = new ArrayList<Project>();
registry = RegistryFactory.getDefaultRegistry();
Map<String, String> filters = new HashMap<String, String>();
filters.put(Constants.FieldConstants.ProjectConstants.OWNER, userName);
filters.put(Constants.FieldConstants.ProjectConstants.DESCRIPTION, description);
List<Object> results = registry.search(RegistryModelType.PROJECT, filters);
for (Object object : results) {
projects.add((Project)object);
}
return projects;
}catch (Exception e) {
logger.error("Error while retrieving projects", e);
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while retrieving projects. More info : " + e.getMessage());
throw exception;
}
}
public List<ExperimentSummary> searchExperimentsByName(String userName, String expName) throws InvalidRequestException,
AiravataClientException,
AiravataSystemException,
TException {
if (!validateString(userName)){
logger.error("Username cannot be empty. Please provide a valid user..");
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Username cannot be empty. Please provide a valid user..");
throw exception;
}
try {
if (!ResourceUtils.isUserExist(userName)){
logger.error("User does not exist in the system. Please provide a valid user..");
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("User does not exist in the system. Please provide a valid user..");
throw exception;
}
List<ExperimentSummary> summaries = new ArrayList<ExperimentSummary>();
registry = RegistryFactory.getDefaultRegistry();
Map<String, String> filters = new HashMap<String, String>();
filters.put(Constants.FieldConstants.ExperimentConstants.USER_NAME, userName);
filters.put(Constants.FieldConstants.ExperimentConstants.EXPERIMENT_NAME, expName);
List<Object> results = registry.search(RegistryModelType.EXPERIMENT, filters);
for (Object object : results) {
summaries.add((ExperimentSummary) object);
}
return summaries;
}catch (Exception e) {
logger.error("Error while retrieving experiments", e);
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while retrieving experiments. More info : " + e.getMessage());
throw exception;
}
}
public List<ExperimentSummary> searchExperimentsByDesc(String userName, String description) throws InvalidRequestException,
AiravataClientException,
AiravataSystemException,
TException {
if (!validateString(userName)){
logger.error("Username cannot be empty. Please provide a valid user..");
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Username cannot be empty. Please provide a valid user..");
throw exception;
}
try {
if (!ResourceUtils.isUserExist(userName)){
logger.error("User does not exist in the system. Please provide a valid user..");
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("User does not exist in the system. Please provide a valid user..");
throw exception;
}
List<ExperimentSummary> summaries = new ArrayList<ExperimentSummary>();
registry = RegistryFactory.getDefaultRegistry();
Map<String, String> filters = new HashMap<String, String>();
filters.put(Constants.FieldConstants.ExperimentConstants.USER_NAME, userName);
filters.put(Constants.FieldConstants.ExperimentConstants.EXPERIMENT_DESC, description);
List<Object> results = registry.search(RegistryModelType.EXPERIMENT, filters);
for (Object object : results) {
summaries.add((ExperimentSummary) object);
}
return summaries;
}catch (Exception e) {
logger.error("Error while retrieving experiments", e);
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while retrieving experiments. More info : " + e.getMessage());
throw exception;
}
}
public List<ExperimentSummary> searchExperimentsByApplication(String userName, String applicationId) throws InvalidRequestException, AiravataClientException, AiravataSystemException, TException {
if (!validateString(userName)){
logger.error("Username cannot be empty. Please provide a valid user..");
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Username cannot be empty. Please provide a valid user..");
throw exception;
}
try {
if (!ResourceUtils.isUserExist(userName)){
logger.error("User does not exist in the system. Please provide a valid user..");
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("User does not exist in the system. Please provide a valid user..");
throw exception;
}
List<ExperimentSummary> summaries = new ArrayList<ExperimentSummary>();
registry = RegistryFactory.getDefaultRegistry();
Map<String, String> filters = new HashMap<String, String>();
filters.put(Constants.FieldConstants.ExperimentConstants.USER_NAME, userName);
filters.put(Constants.FieldConstants.ExperimentConstants.APPLICATION_ID, applicationId);
List<Object> results = registry.search(RegistryModelType.EXPERIMENT, filters);
for (Object object : results) {
summaries.add((ExperimentSummary) object);
}
return summaries;
}catch (Exception e) {
logger.error("Error while retrieving experiments", e);
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while retrieving experiments. More info : " + e.getMessage());
throw exception;
}
}
@Override
public List<ExperimentSummary> searchExperimentsByStatus(String userName, ExperimentState experimentState) throws InvalidRequestException, AiravataClientException, AiravataSystemException, TException {
if (!validateString(userName)){
logger.error("Username cannot be empty. Please provide a valid user..");
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Username cannot be empty. Please provide a valid user..");
throw exception;
}
try {
if (!ResourceUtils.isUserExist(userName)){
logger.error("User does not exist in the system. Please provide a valid user..");
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("User does not exist in the system. Please provide a valid user..");
throw exception;
}
List<ExperimentSummary> summaries = new ArrayList<ExperimentSummary>();
registry = RegistryFactory.getDefaultRegistry();
Map<String, String> filters = new HashMap<String, String>();
filters.put(Constants.FieldConstants.ExperimentConstants.USER_NAME, userName);
filters.put(Constants.FieldConstants.ExperimentConstants.EXPERIMENT_STATUS, experimentState.toString());
List<Object> results = registry.search(RegistryModelType.EXPERIMENT, filters);
for (Object object : results) {
summaries.add((ExperimentSummary) object);
}
return summaries;
}catch (Exception e) {
logger.error("Error while retrieving experiments", e);
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while retrieving experiments. More info : " + e.getMessage());
throw exception;
}
}
@Override
public List<ExperimentSummary> searchExperimentsByCreationTime(String userName, long fromTime, long toTime) throws InvalidRequestException, AiravataClientException, AiravataSystemException, TException {
if (!validateString(userName)){
logger.error("Username cannot be empty. Please provide a valid user..");
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Username cannot be empty. Please provide a valid user..");
throw exception;
}
try {
if (!ResourceUtils.isUserExist(userName)){
logger.error("User does not exist in the system. Please provide a valid user..");
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("User does not exist in the system. Please provide a valid user..");
throw exception;
}
List<ExperimentSummary> summaries = new ArrayList<ExperimentSummary>();
registry = RegistryFactory.getDefaultRegistry();
Map<String, String> filters = new HashMap<String, String>();
filters.put(Constants.FieldConstants.ExperimentConstants.USER_NAME, userName);
filters.put(Constants.FieldConstants.ExperimentConstants.FROM_DATE, String.valueOf(fromTime));
filters.put(Constants.FieldConstants.ExperimentConstants.TO_DATE, String.valueOf(toTime));
List<Object> results = registry.search(RegistryModelType.EXPERIMENT, filters);
for (Object object : results) {
summaries.add((ExperimentSummary) object);
}
return summaries;
}catch (Exception e) {
logger.error("Error while retrieving experiments", e);
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while retrieving experiments. More info : " + e.getMessage());
throw exception;
}
}
/**
* Get all Experiments within a Project
*
* @param projectId
*/
@Override
public List<Experiment> getAllExperimentsInProject(String projectId) throws InvalidRequestException,
AiravataClientException,
AiravataSystemException,
ProjectNotFoundException,
TException {
if (!validateString(projectId)){
logger.error("Project id cannot be empty. Please provide a valid project ID...");
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Project id cannot be empty. Please provide a valid project ID...");
throw exception;
}
try {
registry = RegistryFactory.getDefaultRegistry();
if (!registry.isExist(RegistryModelType.PROJECT, projectId)){
logger.error("Project does not exist in the system. Please provide a valid project ID...");
ProjectNotFoundException exception = new ProjectNotFoundException();
exception.setMessage("Project does not exist in the system. Please provide a valid project ID...");
throw exception;
}
List<Experiment> experiments = new ArrayList<Experiment>();
List<Object> list = registry.get(RegistryModelType.EXPERIMENT, Constants.FieldConstants.ExperimentConstants.PROJECT_ID, projectId);
if (list != null && !list.isEmpty()) {
for (Object o : list) {
experiments.add((Experiment) o);
}
}
return experiments;
} catch (Exception e) {
logger.error("Error while retrieving the experiments", e);
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while retrieving the experiments. More info : " + e.getMessage());
throw exception;
}
}
/**
* Get all Experiments by user
*
* @param userName
*/
@Override
public List<Experiment> getAllUserExperiments(String userName) throws InvalidRequestException, AiravataClientException, AiravataSystemException, TException {
if (!validateString(userName)){
logger.error("Username cannot be empty. Please provide a valid user..");
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Username cannot be empty. Please provide a valid user..");
throw exception;
}
try {
if (!ResourceUtils.isUserExist(userName)){
logger.error("User does not exist in the system. Please provide a valid user..");
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("User does not exist in the system. Please provide a valid user..");
throw exception;
}
List<Experiment> experiments = new ArrayList<Experiment>();
registry = RegistryFactory.getDefaultRegistry();
List<Object> list = registry.get(RegistryModelType.EXPERIMENT, Constants.FieldConstants.ExperimentConstants.USER_NAME, userName);
if (list != null && !list.isEmpty()){
for (Object o : list){
experiments.add((Experiment)o);
}
}
return experiments;
} catch (Exception e) {
logger.error("Error while retrieving the experiments", e);
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while retrieving the experiments. More info : " + e.getMessage());
throw exception;
}
}
/**
* Create an experiment for the specified user belonging to the gateway. The gateway identity is not explicitly passed
* but inferred from the authentication header. This experiment is just a persistent place holder. The client
* has to subsequently configure and launch the created experiment. No action is taken on Airavata Server except
* registering the experiment in a persistent store.
*
* @param experiment@return The server-side generated airavata experiment globally unique identifier.
* @throws org.apache.airavata.model.error.InvalidRequestException For any incorrect forming of the request itself.
* @throws org.apache.airavata.model.error.AiravataClientException The following list of exceptions are thrown which Airavata Client can take corrective actions to resolve:
* <p/>
* UNKNOWN_GATEWAY_ID - If a Gateway is not registered with Airavata as a one time administrative
* step, then Airavata Registry will not have a provenance area setup. The client has to follow
* gateway registration steps and retry this request.
* <p/>
* AUTHENTICATION_FAILURE - How Authentication will be implemented is yet to be determined.
* For now this is a place holder.
* <p/>
* INVALID_AUTHORIZATION - This will throw an authorization exception. When a more robust security hand-shake
* is implemented, the authorization will be more substantial.
* @throws org.apache.airavata.model.error.AiravataSystemException This exception will be thrown for any Airavata Server side issues and if the problem cannot be corrected by the client
* rather an Airavata Administrator will be notified to take corrective action.
*/
@Override
public String createExperiment(Experiment experiment) throws InvalidRequestException, AiravataClientException, AiravataSystemException, TException {
try {
registry = RegistryFactory.getDefaultRegistry();
if (!validateString(experiment.getName())){
logger.error("Cannot create experiments with empty experiment name");
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Cannot create experiments with empty experiment name");
throw exception;
}
String experimentId = (String)registry.add(ParentDataType.EXPERIMENT, experiment);
logger.infoId(experimentId, "Created new experiment with experiment name {}", experiment.getName());
return experimentId;
} catch (Exception e) {
logger.error("Error while creating the experiment with experiment name {}", experiment.getName());
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while creating the experiment. More info : " + e.getMessage());
throw exception;
}
}
/**
* Fetch previously created experiment metadata.
*
* @param airavataExperimentId The identifier for the requested experiment. This is returned during the create experiment step.
* @return experimentMetada
* This method will return the previously stored experiment metadata.
* @throws org.apache.airavata.model.error.InvalidRequestException For any incorrect forming of the request itself.
* @throws org.apache.airavata.model.error.ExperimentNotFoundException If the specified experiment is not previously created, then an Experiment Not Found Exception is thrown.
* @throws org.apache.airavata.model.error.AiravataClientException The following list of exceptions are thrown which Airavata Client can take corrective actions to resolve:
* <p/>
* UNKNOWN_GATEWAY_ID - If a Gateway is not registered with Airavata as a one time administrative
* step, then Airavata Registry will not have a provenance area setup. The client has to follow
* gateway registration steps and retry this request.
* <p/>
* AUTHENTICATION_FAILURE - How Authentication will be implemented is yet to be determined.
* For now this is a place holder.
* <p/>
* INVALID_AUTHORIZATION - This will throw an authorization exception. When a more robust security hand-shake
* is implemented, the authorization will be more substantial.
* @throws org.apache.airavata.model.error.AiravataSystemException This exception will be thrown for any Airavata Server side issues and if the problem cannot be corrected by the client
* rather an Airavata Administrator will be notified to take corrective action.
*/
@Override
public Experiment getExperiment(String airavataExperimentId) throws InvalidRequestException, ExperimentNotFoundException, AiravataClientException, AiravataSystemException, TException {
try {
registry = RegistryFactory.getDefaultRegistry();
if (!registry.isExist(RegistryModelType.EXPERIMENT, airavataExperimentId)){
throw new ExperimentNotFoundException("Requested experiment id " + airavataExperimentId + " does not exist in the system..");
}
return (Experiment)registry.get(RegistryModelType.EXPERIMENT, airavataExperimentId);
} catch (Exception e) {
logger.error("Error while retrieving the experiment", e);
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while retrieving the experiment. More info : " + e.getMessage());
throw exception;
}
}
/**
* Configure a previously created experiment with required inputs, scheduling and other quality of service
* parameters. This method only updates the experiment object within the registry. The experiment has to be launched
* to make it actionable by the server.
*
* @param airavataExperimentId The identifier for the requested experiment. This is returned during the create experiment step.
* @param experiment
* @return This method call does not have a return value.
* @throws org.apache.airavata.model.error.InvalidRequestException For any incorrect forming of the request itself.
* @throws org.apache.airavata.model.error.ExperimentNotFoundException If the specified experiment is not previously created, then an Experiment Not Found Exception is thrown.
* @throws org.apache.airavata.model.error.AiravataClientException The following list of exceptions are thrown which Airavata Client can take corrective actions to resolve:
* <p/>
* UNKNOWN_GATEWAY_ID - If a Gateway is not registered with Airavata as a one time administrative
* step, then Airavata Registry will not have a provenance area setup. The client has to follow
* gateway registration steps and retry this request.
* <p/>
* AUTHENTICATION_FAILURE - How Authentication will be implemented is yet to be determined.
* For now this is a place holder.
* <p/>
* INVALID_AUTHORIZATION - This will throw an authorization exception. When a more robust security hand-shake
* is implemented, the authorization will be more substantial.
* @throws org.apache.airavata.model.error.AiravataSystemException This exception will be thrown for any Airavata Server side issues and if the problem cannot be corrected by the client
* rather an Airavata Administrator will be notified to take corrective action.
*/
@Override
public void updateExperiment(String airavataExperimentId, Experiment experiment) throws InvalidRequestException, ExperimentNotFoundException, AiravataClientException, AiravataSystemException, TException {
try {
registry = RegistryFactory.getDefaultRegistry();
if (!registry.isExist(RegistryModelType.EXPERIMENT, airavataExperimentId)) {
logger.errorId(airavataExperimentId, "Update request failed, Experiment {} doesn't exist.", airavataExperimentId);
throw new ExperimentNotFoundException("Requested experiment id " + airavataExperimentId + " does not exist in the system..");
}
ExperimentStatus experimentStatus = getExperimentStatus(airavataExperimentId);
if (experimentStatus != null){
ExperimentState experimentState = experimentStatus.getExperimentState();
switch (experimentState){
case CREATED: case VALIDATED: case CANCELED: case FAILED: case UNKNOWN:
registry.update(RegistryModelType.EXPERIMENT, experiment, airavataExperimentId);
logger.infoId(airavataExperimentId, "Successfully updated experiment {} ", experiment.getName());
break;
default:
logger.errorId(airavataExperimentId, "Error while updating experiment. Update experiment is only valid for experiments " +
"with status CREATED, VALIDATED, CANCELLED, FAILED and UNKNOWN. Make sure the given " +
"experiment is in one of above statuses... ");
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while updating experiment. Update experiment is only valid for experiments " +
"with status CREATED, VALIDATED, CANCELLED, FAILED and UNKNOWN. Make sure the given " +
"experiment is in one of above statuses... ");
throw exception;
}
}
} catch (Exception e) {
logger.errorId(airavataExperimentId, "Error while updating experiment", e);
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while updating experiment. More info : " + e.getMessage());
throw exception;
}
}
@Override
public void updateExperimentConfiguration(String airavataExperimentId, UserConfigurationData userConfiguration) throws TException {
try {
registry = RegistryFactory.getDefaultRegistry();
if (!registry.isExist(RegistryModelType.EXPERIMENT, airavataExperimentId)){
logger.errorId(airavataExperimentId, "Update experiment configuration failed, experiment {} doesn't exist.", airavataExperimentId);
throw new ExperimentNotFoundException("Requested experiment id " + airavataExperimentId + " does not exist in the system..");
}
ExperimentStatus experimentStatus = getExperimentStatus(airavataExperimentId);
if (experimentStatus != null){
ExperimentState experimentState = experimentStatus.getExperimentState();
switch (experimentState){
case CREATED: case VALIDATED: case CANCELED: case FAILED: case UNKNOWN:
registry.add(ChildDataType.EXPERIMENT_CONFIGURATION_DATA, userConfiguration, airavataExperimentId);
logger.infoId(airavataExperimentId, "Successfully updated experiment configuration for experiment {}.", airavataExperimentId);
break;
default:
logger.errorId(airavataExperimentId, "Error while updating experiment {}. Update experiment is only valid for experiments " +
"with status CREATED, VALIDATED, CANCELLED, FAILED and UNKNOWN. Make sure the given " +
"experiment is in one of above statuses... ", airavataExperimentId);
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while updating experiment. Update experiment is only valid for experiments " +
"with status CREATED, VALIDATED, CANCELLED, FAILED and UNKNOWN. Make sure the given " +
"experiment is in one of above statuses... ");
throw exception;
}
}
} catch (Exception e) {
logger.errorId(airavataExperimentId, "Error while updating user configuration", e);
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while updating user configuration. " +
"Update experiment is only valid for experiments " +
"with status CREATED, VALIDATED, CANCELLED, FAILED and UNKNOWN. Make sure the given " +
"experiment is in one of above statuses... " + e.getMessage());
throw exception;
}
}
@Override
public void updateResourceScheduleing(String airavataExperimentId, ComputationalResourceScheduling resourceScheduling) throws TException {
try {
registry = RegistryFactory.getDefaultRegistry();
if (!registry.isExist(RegistryModelType.EXPERIMENT, airavataExperimentId)){
logger.infoId(airavataExperimentId, "Update resource scheduling failed, experiment {} doesn't exist.", airavataExperimentId);
throw new ExperimentNotFoundException("Requested experiment id " + airavataExperimentId + " does not exist in the system..");
}
ExperimentStatus experimentStatus = getExperimentStatus(airavataExperimentId);
if (experimentStatus != null){
ExperimentState experimentState = experimentStatus.getExperimentState();
switch (experimentState){
case CREATED: case VALIDATED: case CANCELED: case FAILED: case UNKNOWN:
registry.add(ChildDataType.COMPUTATIONAL_RESOURCE_SCHEDULING, resourceScheduling, airavataExperimentId);
logger.infoId(airavataExperimentId, "Successfully updated resource scheduling for the experiment {}.", airavataExperimentId);
break;
default:
logger.errorId(airavataExperimentId, "Error while updating scheduling info. Update experiment is only valid for experiments " +
"with status CREATED, VALIDATED, CANCELLED, FAILED and UNKNOWN. Make sure the given " +
"experiment is in one of above statuses... ");
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while updating experiment. Update experiment is only valid for experiments " +
"with status CREATED, VALIDATED, CANCELLED, FAILED and UNKNOWN. Make sure the given " +
"experiment is in one of above statuses... ");
throw exception;
}
}
} catch (Exception e) {
logger.errorId(airavataExperimentId, "Error while updating scheduling info", e);
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while updating scheduling info. " +
"Update experiment is only valid for experiments " +
"with status CREATED, VALIDATED, CANCELLED, FAILED and UNKNOWN. Make sure the given " +
"experiment is in one of above statuses... " + e.getMessage());
throw exception;
}
}
/**
* *
* * Validate experiment configuration. A true in general indicates, the experiment is ready to be launched.
* *
* * @param experimentID
* * @return sucess/failure
* *
* *
*
* @param airavataExperimentId
*/
@Override
public boolean validateExperiment(String airavataExperimentId) throws InvalidRequestException, ExperimentNotFoundException, AiravataClientException, AiravataSystemException, TException {
try {
registry = RegistryFactory.getDefaultRegistry();
if (!registry.isExist(RegistryModelType.EXPERIMENT, airavataExperimentId)) {
logger.errorId(airavataExperimentId, "Experiment validation failed , experiment {} doesn't exist.", airavataExperimentId);
throw new ExperimentNotFoundException("Requested experiment id " + airavataExperimentId + " does not exist in the system..");
}
} catch (RegistryException e1) {
logger.errorId(airavataExperimentId, "Error while retrieving projects", e1);
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while retrieving projects. More info : " + e1.getMessage());
throw exception;
}
if (getOrchestratorClient().validateExperiment(airavataExperimentId)) {
logger.infoId(airavataExperimentId, "Experiment validation succeed.");
return true;
} else {
logger.infoId(airavataExperimentId, "Experiment validation failed.");
return false;
}
}
/**
* Fetch the previously configured experiment configuration information.
*
* @param airavataExperimentId The identifier for the requested experiment. This is returned during the create experiment step.
* @return This method returns the previously configured experiment configuration data.
* @throws org.apache.airavata.model.error.InvalidRequestException For any incorrect forming of the request itself.
* @throws org.apache.airavata.model.error.ExperimentNotFoundException If the specified experiment is not previously created, then an Experiment Not Found Exception is thrown.
* @throws org.apache.airavata.model.error.AiravataClientException The following list of exceptions are thrown which Airavata Client can take corrective actions to resolve:
*<p/>
*UNKNOWN_GATEWAY_ID - If a Gateway is not registered with Airavata as a one time administrative
*step, then Airavata Registry will not have a provenance area setup. The client has to follow
*gateway registration steps and retry this request.
*<p/>
*AUTHENTICATION_FAILURE - How Authentication will be implemented is yet to be determined.
*For now this is a place holder.
*<p/>
*INVALID_AUTHORIZATION - This will throw an authorization exception. When a more robust security hand-shake
*is implemented, the authorization will be more substantial.
* @throws org.apache.airavata.model.error.AiravataSystemException This exception will be thrown for any
* Airavata Server side issues and if the problem cannot be corrected by the client
* rather an Airavata Administrator will be notified to take corrective action.
*/
@Override
public ExperimentStatus getExperimentStatus(String airavataExperimentId) throws InvalidRequestException,
ExperimentNotFoundException,
AiravataClientException,
AiravataSystemException,
TException {
try {
registry = RegistryFactory.getDefaultRegistry();
if (!registry.isExist(RegistryModelType.EXPERIMENT, airavataExperimentId)){
logger.errorId(airavataExperimentId, "Error while retrieving experiment status, experiment {} doesn't exist.", airavataExperimentId);
throw new ExperimentNotFoundException("Requested experiment id " + airavataExperimentId +
" does not exist in the system..");
}
return (ExperimentStatus)registry.get(RegistryModelType.EXPERIMENT_STATUS, airavataExperimentId);
} catch (Exception e) {
logger.errorId(airavataExperimentId, "Error while retrieving the experiment status", e);
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while retrieving the experiment status. More info : " + e.getMessage());
throw exception;
}
}
@Override
public List<DataObjectType> getExperimentOutputs(String airavataExperimentId) throws TException {
try {
registry = RegistryFactory.getDefaultRegistry();
if (!registry.isExist(RegistryModelType.EXPERIMENT, airavataExperimentId)){
logger.errorId(airavataExperimentId, "Get experiment outputs failed, experiment {} doesn't exit.", airavataExperimentId);
throw new ExperimentNotFoundException("Requested experiment id " + airavataExperimentId + " does not exist in the system..");
}
return (List<DataObjectType>)registry.get(RegistryModelType.EXPERIMENT_OUTPUT, airavataExperimentId);
} catch (Exception e) {
logger.errorId(airavataExperimentId, "Error while retrieving the experiment outputs", e);
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while retrieving the experiment outputs. More info : " + e.getMessage());
throw exception;
}
}
public Map<String, JobStatus> getJobStatuses(String airavataExperimentId) throws TException {
Map<String, JobStatus> jobStatus = new HashMap<String, JobStatus>();
try {
registry = RegistryFactory.getDefaultRegistry();
if (!registry.isExist(RegistryModelType.EXPERIMENT, airavataExperimentId)){
logger.errorId(airavataExperimentId, "Error while retrieving job status, the experiment {} doesn't exist.", airavataExperimentId);
throw new ExperimentNotFoundException("Requested experiment id " + airavataExperimentId + " does not exist in the system..");
}
List<Object> workflowNodes = registry.get(RegistryModelType.WORKFLOW_NODE_DETAIL, Constants.FieldConstants.WorkflowNodeConstants.EXPERIMENT_ID, airavataExperimentId);
if (workflowNodes != null && !workflowNodes.isEmpty()){
for (Object wf : workflowNodes){
String nodeInstanceId = ((WorkflowNodeDetails) wf).getNodeInstanceId();
List<Object> taskDetails = registry.get(RegistryModelType.TASK_DETAIL, Constants.FieldConstants.TaskDetailConstants.NODE_ID, nodeInstanceId);
if (taskDetails != null && !taskDetails.isEmpty()){
for (Object ts : taskDetails){
String taskID = ((TaskDetails) ts).getTaskID();
List<Object> jobDetails = registry.get(RegistryModelType.JOB_DETAIL, Constants.FieldConstants.JobDetaisConstants.TASK_ID, taskID);
if (jobDetails != null && !jobDetails.isEmpty()){
for (Object job : jobDetails){
String jobID = ((JobDetails) job).getJobID();
jobStatus.put(jobID, ((JobDetails) job).getJobStatus());
}
}
}
}
}
}
} catch (Exception e) {
logger.errorId(airavataExperimentId, "Error while retrieving the job statuses", e);
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while retrieving the job statuses. More info : " + e.getMessage());
throw exception;
}
return jobStatus;
}
@Override
public List<JobDetails> getJobDetails(String airavataExperimentId) throws InvalidRequestException, ExperimentNotFoundException, AiravataClientException, AiravataSystemException, TException {
List<JobDetails> jobDetailsList = new ArrayList<JobDetails>();
try {
registry = RegistryFactory.getDefaultRegistry();
if (!registry.isExist(RegistryModelType.EXPERIMENT, airavataExperimentId)){
logger.errorId(airavataExperimentId, "Error while retrieving job details, experiment {} doesn't exist.", airavataExperimentId);
throw new ExperimentNotFoundException("Requested experiment id " + airavataExperimentId + " does not exist in the system..");
}
List<Object> workflowNodes = registry.get(RegistryModelType.WORKFLOW_NODE_DETAIL, Constants.FieldConstants.WorkflowNodeConstants.EXPERIMENT_ID, airavataExperimentId);
if (workflowNodes != null && !workflowNodes.isEmpty()){
for (Object wf : workflowNodes){
String nodeInstanceId = ((WorkflowNodeDetails) wf).getNodeInstanceId();
List<Object> taskDetails = registry.get(RegistryModelType.TASK_DETAIL, Constants.FieldConstants.TaskDetailConstants.NODE_ID, nodeInstanceId);
if (taskDetails != null && !taskDetails.isEmpty()){
for (Object ts : taskDetails){
String taskID = ((TaskDetails) ts).getTaskID();
List<Object> jobDetails = registry.get(RegistryModelType.JOB_DETAIL, Constants.FieldConstants.JobDetaisConstants.TASK_ID, taskID);
if (jobDetails != null && !jobDetails.isEmpty()){
for (Object job : jobDetails){
jobDetailsList.add((JobDetails) job);
}
}
}
}
}
}
} catch (Exception e) {
logger.errorId(airavataExperimentId, "Error while retrieving the job details", e);
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while retrieving the job details. More info : " + e.getMessage());
throw exception;
}
return jobDetailsList;
}
@Override
public List<DataTransferDetails> getDataTransferDetails(String airavataExperimentId) throws InvalidRequestException, ExperimentNotFoundException, AiravataClientException, AiravataSystemException, TException {
List<DataTransferDetails> dataTransferDetailList = new ArrayList<DataTransferDetails>();
try {
registry = RegistryFactory.getDefaultRegistry();
if (!registry.isExist(RegistryModelType.EXPERIMENT, airavataExperimentId)) {
logger.errorId(airavataExperimentId, "Error while retrieving data transfer details, experiment {} doesn't exit.", airavataExperimentId);
throw new ExperimentNotFoundException("Requested experiment id " + airavataExperimentId + " does not exist in the system..");
}
List<Object> workflowNodes = registry.get(RegistryModelType.WORKFLOW_NODE_DETAIL, Constants.FieldConstants.WorkflowNodeConstants.EXPERIMENT_ID, airavataExperimentId);
if (workflowNodes != null && !workflowNodes.isEmpty()){
for (Object wf : workflowNodes){
String nodeInstanceId = ((WorkflowNodeDetails) wf).getNodeInstanceId();
List<Object> taskDetails = registry.get(RegistryModelType.TASK_DETAIL, Constants.FieldConstants.TaskDetailConstants.NODE_ID, nodeInstanceId);
if (taskDetails != null && !taskDetails.isEmpty()){
for (Object ts : taskDetails){
String taskID = ((TaskDetails) ts).getTaskID();
List<Object> dataTransferDetails = registry.get(RegistryModelType.DATA_TRANSFER_DETAIL, Constants.FieldConstants.JobDetaisConstants.TASK_ID, taskID);
if (dataTransferDetails != null && !dataTransferDetails.isEmpty()){
for (Object dataTransfer : dataTransferDetails){
dataTransferDetailList.add((DataTransferDetails) dataTransfer);
}
}
}
}
}
}
} catch (Exception e) {
logger.errorId(airavataExperimentId, "Error while retrieving the data transfer details", e);
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while retrieving the data transfer details. More info : " + e.getMessage());
throw exception;
}
return dataTransferDetailList;
}
/**
* Launch a previously created and configured experiment. Airavata Server will then start processing the request and appropriate
* notifications and intermediate and output data will be subsequently available for this experiment.
*
*
* @param airavataExperimentId The identifier for the requested experiment. This is returned during the create experiment step.
* @param airavataCredStoreToken :
* A requirement to execute experiments within Airavata is to first register the targeted remote computational account
* credentials with Airavata Credential Store. The administrative API (related to credential store) will return a
* generated token associated with the registered credentials. The client has to security posses this token id and is
* required to pass it to Airavata Server for all execution requests.
* Note: At this point only the credential store token is required so the string is directly passed here. In future if
* if more security credentials are enables, then the structure ExecutionSecurityParameters should be used.
* Note: This parameter is not persisted within Airavata Registry for security reasons.
* @return This method call does not have a return value.
* @throws org.apache.airavata.model.error.InvalidRequestException
* For any incorrect forming of the request itself.
* @throws org.apache.airavata.model.error.ExperimentNotFoundException
* If the specified experiment is not previously created, then an Experiment Not Found Exception is thrown.
* @throws org.apache.airavata.model.error.AiravataClientException
* The following list of exceptions are thrown which Airavata Client can take corrective actions to resolve:
* <p/>
* UNKNOWN_GATEWAY_ID - If a Gateway is not registered with Airavata as a one time administrative
* step, then Airavata Registry will not have a provenance area setup. The client has to follow
* gateway registration steps and retry this request.
* <p/>
* AUTHENTICATION_FAILURE - How Authentication will be implemented is yet to be determined.
* For now this is a place holder.
* <p/>
* INVALID_AUTHORIZATION - This will throw an authorization exception. When a more robust security hand-shake
* is implemented, the authorization will be more substantial.
* @throws org.apache.airavata.model.error.AiravataSystemException
* This exception will be thrown for any Airavata Server side issues and if the problem cannot be corrected by the client
* rather an Airavata Administrator will be notified to take corrective action.
*/
@Override
public void launchExperiment(final String airavataExperimentId, String airavataCredStoreToken) throws TException {
try {
registry = RegistryFactory.getDefaultRegistry();
if (!registry.isExist(RegistryModelType.EXPERIMENT, airavataExperimentId)) {
logger.errorId(airavataExperimentId, "Error while launching experiment, experiment {} doesn't exist.", airavataExperimentId);
throw new ExperimentNotFoundException("Requested experiment id " + airavataExperimentId + " does not exist in the system..");
}
} catch (RegistryException e1) {
logger.errorId(airavataExperimentId, "Error while retrieving projects", e1);
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while retrieving projects. More info : " + e1.getMessage());
throw exception;
}
final String expID = airavataExperimentId;
final String token = airavataCredStoreToken;
synchronized (this) {
Experiment experiment = getExperiment(expID);
ExecutionType executionType = DataModelUtils.getExecutionType(experiment);
Thread thread = null;
if (executionType==ExecutionType.SINGLE_APP) {
//its an single application execution experiment
logger.debugId(airavataExperimentId, "Launching single application experiment {}.", airavataExperimentId);
final OrchestratorService.Client orchestratorClient = getOrchestratorClient();
if (orchestratorClient.validateExperiment(expID)) {
thread = new Thread() {
public void run() {
try {
launchSingleAppExperiment(expID, token, orchestratorClient);
} catch (TException e) {
// throwing exception from here useless, just print the error log
logger.errorId(airavataExperimentId, "Error while launching single application experiment.", e);
}
}
};
} else {
logger.errorId(airavataExperimentId, "Experiment validation failed. Please check the configurations.");
throw new InvalidRequestException("Experiment Validation Failed, please check the configuration");
}
} else if (executionType == ExecutionType.WORKFLOW){
//its a workflow execution experiment
logger.debugId(airavataExperimentId, "Launching workflow experiment {}.", airavataExperimentId);
thread = new Thread() {
public void run() {
try {
launchWorkflowExperiment(expID, token);
} catch (TException e) {
e.printStackTrace(); //To change body of catch statement use File | Settings | File Templates.
}
}
};
} else {
logger.errorId(airavataExperimentId, "Couldn't identify experiment type, experiment {} is neither single application nor workflow.", airavataExperimentId);
throw new InvalidRequestException("Experiment '" + expID + "' launch failed. Unable to figureout execution type for application " + experiment.getApplicationId());
}
thread.start();
}
}
private void launchWorkflowExperiment(String experimentId, String airavataCredStoreToken) throws TException {
try {
WorkflowEngine workflowEngine = WorkflowEngineFactory.getWorkflowEngine();
workflowEngine.launchExperiment(experimentId, airavataCredStoreToken);
} catch (WorkflowEngineException e) {
logger.errorId(experimentId, "Error while launching experiment.", e);
}
}
private boolean launchSingleAppExperiment(String experimentId, String airavataCredStoreToken, OrchestratorService.Client orchestratorClient) throws TException {
Experiment experiment = null;
try {
List<String> ids = registry.getIds(RegistryModelType.WORKFLOW_NODE_DETAIL, WorkflowNodeConstants.EXPERIMENT_ID, experimentId);
for (String workflowNodeId : ids) {
// WorkflowNodeDetails workflowNodeDetail = (WorkflowNodeDetails) registry.get(RegistryModelType.WORKFLOW_NODE_DETAIL, workflowNodeId);
List<Object> taskDetailList = registry.get(RegistryModelType.TASK_DETAIL, TaskDetailConstants.NODE_ID, workflowNodeId);
for (Object o : taskDetailList) {
TaskDetails taskData = (TaskDetails) o;
//iterate through all the generated tasks and performs the job submisssion+monitoring
experiment = (Experiment) registry.get(RegistryModelType.EXPERIMENT, experimentId);
if (experiment == null) {
logger.errorId(experimentId, "Error retrieving the Experiment by the given experimentID: {}", experimentId);
return false;
}
ExperimentStatus status = new ExperimentStatus();
status.setExperimentState(ExperimentState.LAUNCHED);
status.setTimeOfStateChange(Calendar.getInstance().getTimeInMillis());
experiment.setExperimentStatus(status);
registry.update(RegistryModelType.EXPERIMENT_STATUS, status, experimentId);
registry.update(RegistryModelType.TASK_DETAIL, taskData, taskData.getTaskID());
//launching the experiment
orchestratorClient.launchTask(taskData.getTaskID(), airavataCredStoreToken);
}
}
} catch (Exception e) {
// Here we really do not have to do much because only potential failure can happen
// is in gfac, if there are errors in gfac, it will handle the experiment/task/job statuses
// We might get failures in registry access before submitting the jobs to gfac, in that case we
// leave the status of these as created.
ExperimentStatus status = new ExperimentStatus();
status.setExperimentState(ExperimentState.FAILED);
status.setTimeOfStateChange(Calendar.getInstance().getTimeInMillis());
experiment.setExperimentStatus(status);
try {
registry.update(RegistryModelType.EXPERIMENT_STATUS, status, experimentId);
} catch (RegistryException e1) {
logger.errorId(experimentId, "Error while updating experiment status to " + status.toString(), e);
throw new TException(e);
}
logger.errorId(experimentId, "Error while updating task status, hence updated experiment status to " + status.toString(), e);
throw new TException(e);
}finally {
orchestratorClient.getInputProtocol().getTransport().close();
}
return true;
}
private OrchestratorService.Client getOrchestratorClient() {
final int serverPort = Integer.parseInt(ServerSettings.getSetting(org.apache.airavata.common.utils.Constants.ORCHESTRATOR_SERVER_PORT,"8940"));
final String serverHost = ServerSettings.getSetting(org.apache.airavata.common.utils.Constants.ORCHESTRATOR_SERVER_HOST, null);
return OrchestratorClientFactory.createOrchestratorClient(serverHost, serverPort);
}
/**
* Clone an specified experiment with a new name. A copy of the experiment configuration is made and is persisted with new metadata.
* The client has to subsequently update this configuration if needed and launch the cloned experiment.
*
* @param existingExperimentID
* This is the experiment identifier that already exists in the system. Will use this experimentID to retrieve
* user configuration which is used with the clone experiment.
*
* @param newExperiementName
* experiment name that should be used in the cloned experiment
*
* @return
* The server-side generated airavata experiment globally unique identifier for the newly cloned experiment.
*
* @throws org.apache.airavata.model.error.InvalidRequestException
* For any incorrect forming of the request itself.
*
* @throws org.apache.airavata.model.error.ExperimentNotFoundException
* If the specified experiment is not previously created, then an Experiment Not Found Exception is thrown.
*
* @throws org.apache.airavata.model.error.AiravataClientException
* The following list of exceptions are thrown which Airavata Client can take corrective actions to resolve:
*
* UNKNOWN_GATEWAY_ID - If a Gateway is not registered with Airavata as a one time administrative
* step, then Airavata Registry will not have a provenance area setup. The client has to follow
* gateway registration steps and retry this request.
*
* AUTHENTICATION_FAILURE - How Authentication will be implemented is yet to be determined.
* For now this is a place holder.
*
* INVALID_AUTHORIZATION - This will throw an authorization exception. When a more robust security hand-shake
* is implemented, the authorization will be more substantial.
*
* @throws org.apache.airavata.model.error.AiravataSystemException
* This exception will be thrown for any Airavata Server side issues and if the problem cannot be corrected by the client
* rather an Airavata Administrator will be notified to take corrective action.
*
*
* @param existingExperimentID
* @param newExperiementName
*/
@Override
public String cloneExperiment(String existingExperimentID, String newExperiementName) throws InvalidRequestException, ExperimentNotFoundException, AiravataClientException, AiravataSystemException, TException {
try {
registry = RegistryFactory.getDefaultRegistry();
if (!registry.isExist(RegistryModelType.EXPERIMENT, existingExperimentID)){
logger.errorId(existingExperimentID, "Error while cloning experiment {}, experiment doesn't exist.", existingExperimentID);
throw new ExperimentNotFoundException("Requested experiment id " + existingExperimentID + " does not exist in the system..");
}
Experiment existingExperiment = (Experiment)registry.get(RegistryModelType.EXPERIMENT, existingExperimentID);
existingExperiment.setCreationTime(AiravataUtils.getCurrentTimestamp().getTime());
if (validateString(newExperiementName)){
existingExperiment.setName(newExperiementName);
}
if (existingExperiment.getWorkflowNodeDetailsList() != null){
existingExperiment.getWorkflowNodeDetailsList().clear();
}
if (existingExperiment.getErrors() != null ){
existingExperiment.getErrors().clear();
}
return (String)registry.add(ParentDataType.EXPERIMENT, existingExperiment);
} catch (Exception e) {
logger.errorId(existingExperimentID, "Error while cloning the experiment with existing configuration...", e);
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while cloning the experiment with existing configuration. More info : " + e.getMessage());
throw exception;
}
}
/**
* Terminate a running experiment.
*
* @param airavataExperimentId The identifier for the requested experiment. This is returned during the create experiment step.
* @return This method call does not have a return value.
* @throws org.apache.airavata.model.error.InvalidRequestException For any incorrect forming of the request itself.
* @throws org.apache.airavata.model.error.ExperimentNotFoundException If the specified experiment is not previously created, then an Experiment Not Found Exception is thrown.
* @throws org.apache.airavata.model.error.AiravataClientException The following list of exceptions are thrown which Airavata Client can take corrective actions to resolve:
* <p/>
* UNKNOWN_GATEWAY_ID - If a Gateway is not registered with Airavata as a one time administrative
* step, then Airavata Registry will not have a provenance area setup. The client has to follow
* gateway registration steps and retry this request.
* <p/>
* AUTHENTICATION_FAILURE - How Authentication will be implemented is yet to be determined.
* For now this is a place holder.
* <p/>
* INVALID_AUTHORIZATION - This will throw an authorization exception. When a more robust security hand-shake
* is implemented, the authorization will be more substantial.
* @throws org.apache.airavata.model.error.AiravataSystemException This exception will be thrown for any Airavata Server side issues and if the problem cannot be corrected by the client
* rather an Airavata Administrator will be notified to take corrective action.
*/
@Override
public void terminateExperiment(String airavataExperimentId) throws InvalidRequestException, ExperimentNotFoundException, AiravataClientException, AiravataSystemException, TException {
Client client = getOrchestratorClient();
client.terminateExperiment(airavataExperimentId);
}
/**
* Register a Application Module.
*
* @param applicationModule Application Module Object created from the datamodel.
* @return appModuleId
* Returns a server-side generated airavata appModule globally unique identifier.
*/
@Override
public String registerApplicationModule(ApplicationModule applicationModule) throws InvalidRequestException, AiravataClientException, AiravataSystemException, TException {
try {
appCatalog = AppCatalogFactory.getAppCatalog();
return appCatalog.getApplicationInterface().addApplicationModule(applicationModule);
} catch (AppCatalogException e) {
logger.error("Error while adding application module...", e);
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while adding application module. More info : " + e.getMessage());
throw exception;
}
}
/**
* Fetch a Application Module.
*
* @param appModuleId The identifier for the requested application module
* @return applicationModule
* Returns a application Module Object.
*/
@Override
public ApplicationModule getApplicationModule(String appModuleId) throws InvalidRequestException, AiravataClientException, AiravataSystemException, TException {
try {
appCatalog = AppCatalogFactory.getAppCatalog();
return appCatalog.getApplicationInterface().getApplicationModule(appModuleId);
} catch (AppCatalogException e) {
logger.errorId(appModuleId, "Error while retrieving application module...", e);
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while retrieving the adding application module. More info : " + e.getMessage());
throw exception;
}
}
/**
* Update a Application Module.
*
* @param appModuleId The identifier for the requested application module to be updated.
* @param applicationModule Application Module Object created from the datamodel.
* @return status
* Returns a success/failure of the update.
*/
@Override
public boolean updateApplicationModule(String appModuleId, ApplicationModule applicationModule) throws InvalidRequestException, AiravataClientException, AiravataSystemException, TException {
try {
appCatalog = AppCatalogFactory.getAppCatalog();
appCatalog.getApplicationInterface().updateApplicationModule(appModuleId, applicationModule);
return true;
} catch (AppCatalogException e) {
logger.errorId(appModuleId, "Error while updating application module...", e);
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while updating application module. More info : " + e.getMessage());
throw exception;
}
}
/**
* Delete a Application Module.
*
* @param appModuleId The identifier for the requested application module to be deleted.
* @return status
* Returns a success/failure of the deletion.
*/
@Override
public boolean deleteApplicationModule(String appModuleId) throws InvalidRequestException, AiravataClientException, AiravataSystemException, TException {
try {
appCatalog = AppCatalogFactory.getAppCatalog();
return appCatalog.getApplicationInterface().removeApplicationModule(appModuleId);
} catch (AppCatalogException e) {
logger.errorId(appModuleId, "Error while deleting application module...", e);
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while deleting the application module. More info : " + e.getMessage());
throw exception;
}
}
/**
* Register a Application Deployment.
*
* @param applicationDeployment@return appModuleId
* Returns a server-side generated airavata appModule globally unique identifier.
*/
@Override
public String registerApplicationDeployment(ApplicationDeploymentDescription applicationDeployment) throws InvalidRequestException, AiravataClientException, AiravataSystemException, TException {
try {
appCatalog = AppCatalogFactory.getAppCatalog();
return appCatalog.getApplicationDeployment().addApplicationDeployment(applicationDeployment);
} catch (AppCatalogException e) {
logger.error("Error while adding application deployment...", e);
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while adding application deployment. More info : " + e.getMessage());
throw exception;
}
}
/**
* Fetch a Application Deployment.
*
* @param appDeploymentId The identifier for the requested application module
* @return applicationDeployment
* Returns a application Deployment Object.
*/
@Override
public ApplicationDeploymentDescription getApplicationDeployment(String appDeploymentId) throws InvalidRequestException, AiravataClientException, AiravataSystemException, TException {
try {
appCatalog = AppCatalogFactory.getAppCatalog();
return appCatalog.getApplicationDeployment().getApplicationDeployement(appDeploymentId);
} catch (AppCatalogException e) {
logger.errorId(appDeploymentId, "Error while retrieving application deployment...", e);
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while retrieving application deployment. More info : " + e.getMessage());
throw exception;
}
}
/**
* Update a Application Deployment.
*
* @param appDeploymentId The identifier for the requested application deployment to be updated.
* @param applicationDeployment
* @return status
* Returns a success/failure of the update.
*/
@Override
public boolean updateApplicationDeployment(String appDeploymentId, ApplicationDeploymentDescription applicationDeployment) throws InvalidRequestException, AiravataClientException, AiravataSystemException, TException {
try {
appCatalog = AppCatalogFactory.getAppCatalog();
appCatalog.getApplicationDeployment().updateApplicationDeployment(appDeploymentId, applicationDeployment);
return true;
} catch (AppCatalogException e) {
logger.errorId(appDeploymentId, "Error while updating application deployment...", e);
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while updating application deployment. More info : " + e.getMessage());
throw exception;
}
}
/**
* Delete a Application deployment.
*
* @param appDeploymentId The identifier for the requested application deployment to be deleted.
* @return status
* Returns a success/failure of the deletion.
*/
@Override
public boolean deleteApplicationDeployment(String appDeploymentId) throws InvalidRequestException, AiravataClientException, AiravataSystemException, TException {
try {
appCatalog = AppCatalogFactory.getAppCatalog();
appCatalog.getApplicationDeployment().removeAppDeployment(appDeploymentId);
return true;
} catch (AppCatalogException e) {
logger.errorId(appDeploymentId, "Error while deleting application deployment...", e);
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while deleting application deployment. More info : " + e.getMessage());
throw exception;
}
}
/**
* Fetch a list of Deployed Compute Hosts.
*
* @param appModuleId The identifier for the requested application module
* @return list<string>
* Returns a list of Deployed Resources.
*/
@Override
public List<String> getAppModuleDeployedResources(String appModuleId) throws InvalidRequestException, AiravataClientException, AiravataSystemException, TException {
try {
List<String> appDeployments = new ArrayList<String>();
appCatalog = AppCatalogFactory.getAppCatalog();
Map<String, String> filters = new HashMap<String, String>();
filters.put(AbstractResource.ApplicationDeploymentConstants.APP_MODULE_ID, appModuleId);
List<ApplicationDeploymentDescription> applicationDeployments = appCatalog.getApplicationDeployment().getApplicationDeployements(filters);
for (ApplicationDeploymentDescription description : applicationDeployments){
appDeployments.add(description.getAppDeploymentId());
}
return appDeployments;
} catch (AppCatalogException e) {
logger.errorId(appModuleId, "Error while retrieving application deployments...", e);
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while retrieving application deployment. More info : " + e.getMessage());
throw exception;
}
}
/**
* Register a Application Interface.
*
* @param applicationInterface@return appInterfaceId
* Returns a server-side generated airavata application interface globally unique identifier.
*/
@Override
public String registerApplicationInterface(ApplicationInterfaceDescription applicationInterface) throws InvalidRequestException, AiravataClientException, AiravataSystemException, TException {
try {
appCatalog = AppCatalogFactory.getAppCatalog();
return appCatalog.getApplicationInterface().addApplicationInterface(applicationInterface);
} catch (AppCatalogException e) {
logger.error("Error while adding application interface...", e);
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while adding application interface. More info : " + e.getMessage());
throw exception;
}
}
/**
* Fetch a Application Interface.
*
* @param appInterfaceId The identifier for the requested application module
* @return applicationInterface
* Returns a application Interface Object.
*/
@Override
public ApplicationInterfaceDescription getApplicationInterface(String appInterfaceId) throws InvalidRequestException, AiravataClientException, AiravataSystemException, TException {
try {
appCatalog = AppCatalogFactory.getAppCatalog();
return appCatalog.getApplicationInterface().getApplicationInterface(appInterfaceId);
} catch (AppCatalogException e) {
logger.errorId(appInterfaceId, "Error while retrieving application interface...", e);
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while retrieving application interface. More info : " + e.getMessage());
throw exception;
}
}
/**
* Update a Application Interface.
*
* @param appInterfaceId The identifier for the requested application deployment to be updated.
* @param applicationInterface
* @return status
* Returns a success/failure of the update.
*/
@Override
public boolean updateApplicationInterface(String appInterfaceId, ApplicationInterfaceDescription applicationInterface) throws InvalidRequestException, AiravataClientException, AiravataSystemException, TException {
try {
appCatalog = AppCatalogFactory.getAppCatalog();
appCatalog.getApplicationInterface().updateApplicationInterface(appInterfaceId, applicationInterface);
return true;
} catch (AppCatalogException e) {
logger.errorId(appInterfaceId, "Error while updating application interface...", e);
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while updating application interface. More info : " + e.getMessage());
throw exception;
}
}
/**
* Delete a Application Interface.
*
* @param appInterfaceId The identifier for the requested application interface to be deleted.
* @return status
* Returns a success/failure of the deletion.
*/
@Override
public boolean deleteApplicationInterface(String appInterfaceId) throws InvalidRequestException, AiravataClientException, AiravataSystemException, TException {
try {
appCatalog = AppCatalogFactory.getAppCatalog();
return appCatalog.getApplicationInterface().removeApplicationInterface(appInterfaceId);
} catch (AppCatalogException e) {
logger.errorId(appInterfaceId, "Error while deleting application interface...", e);
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while deleting application interface. More info : " + e.getMessage());
throw exception;
}
}
/**
* Fetch name and id of Application Interface documents.
*
* @return map<applicationId, applicationInterfaceNames>
* Returns a list of application interfaces with corresponsing id's
*/
@Override
public Map<String, String> getAllApplicationInterfaceNames() throws InvalidRequestException, AiravataClientException, AiravataSystemException, TException {
try {
appCatalog = AppCatalogFactory.getAppCatalog();
List<ApplicationInterfaceDescription> allApplicationInterfaces = appCatalog.getApplicationInterface().getAllApplicationInterfaces();
Map<String, String> allApplicationInterfacesMap = new HashMap<String, String>();
if (allApplicationInterfaces != null && !allApplicationInterfaces.isEmpty()){
for (ApplicationInterfaceDescription interfaceDescription : allApplicationInterfaces){
allApplicationInterfacesMap.put(interfaceDescription.getApplicationInterfaceId(), interfaceDescription.getApplicationName());
}
}
return allApplicationInterfacesMap;
} catch (AppCatalogException e) {
logger.error("Error while retrieving application interfaces...", e);
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while retrieving application interfaces. More info : " + e.getMessage());
throw exception;
}
}
/**
* Fetch all Application Interface documents.
*
* @return map<applicationId, applicationInterfaceNames>
* Returns a list of application interfaces documents
*/
@Override
public List<ApplicationInterfaceDescription> getAllApplicationInterfaces() throws InvalidRequestException, AiravataClientException, AiravataSystemException, TException {
try {
appCatalog = AppCatalogFactory.getAppCatalog();
return appCatalog.getApplicationInterface().getAllApplicationInterfaces();
} catch (AppCatalogException e) {
logger.error("Error while retrieving application interfaces...", e);
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while retrieving application interfaces. More info : " + e.getMessage());
throw exception;
}
}
/**
* Fetch the list of Application Inputs.
*
* @param appInterfaceId The identifier for the requested application interface
* @return list<applicationInterfaceModel.InputDataObjectType>
* Returns a list of application inputs.
*/
@Override
public List<InputDataObjectType> getApplicationInputs(String appInterfaceId) throws InvalidRequestException, AiravataClientException, AiravataSystemException, TException {
try {
appCatalog = AppCatalogFactory.getAppCatalog();
return appCatalog.getApplicationInterface().getApplicationInputs(appInterfaceId);
} catch (AppCatalogException e) {
logger.errorId(appInterfaceId, "Error while retrieving application inputs...", e);
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while retrieving application inputs. More info : " + e.getMessage());
throw exception;
}
}
/**
* Fetch the list of Application Outputs.
*
* @param appInterfaceId The identifier for the requested application interface
* @return list<applicationInterfaceModel.OutputDataObjectType>
* Returns a list of application outputs.
*/
@Override
public List<OutputDataObjectType> getApplicationOutputs(String appInterfaceId) throws InvalidRequestException, AiravataClientException, AiravataSystemException, TException {
try {
appCatalog = AppCatalogFactory.getAppCatalog();
return appCatalog.getApplicationInterface().getApplicationOutputs(appInterfaceId);
} catch (AppCatalogException e) {
logger.errorId(appInterfaceId, "Error while retrieving application outputs...", e);
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while retrieving application outputs. More info : " + e.getMessage());
throw exception;
}
}
/**
* Fetch a list of all deployed Compute Hosts for a given application interfaces.
*
* @param appInterfaceId The identifier for the requested application interface
* @return map<computeResourceId, computeResourceName>
* A map of registered compute resource id's and their corresponding hostnames.
* Deployments of each modules listed within the interfaces will be listed.
*/
@Override
public Map<String, String> getAvailableAppInterfaceComputeResources(String appInterfaceId) throws InvalidRequestException, AiravataClientException, AiravataSystemException, TException {
try {
appCatalog = AppCatalogFactory.getAppCatalog();
ApplicationDeployment applicationDeployment = appCatalog.getApplicationDeployment();
Map<String, String> allComputeResources = appCatalog.getComputeResource().getAllComputeResourceIdList();
Map<String, String> availableComputeResources = new HashMap<String, String>();
ApplicationInterfaceDescription applicationInterface =
appCatalog.getApplicationInterface().getApplicationInterface(appInterfaceId);
HashMap<String, String> filters = new HashMap<String,String>();
List<String> applicationModules = applicationInterface.getApplicationModules();
if (applicationModules != null && !applicationModules.isEmpty()){
for (String moduleId : applicationModules) {
filters.put(AbstractResource.ApplicationDeploymentConstants.APP_MODULE_ID, moduleId);
List<ApplicationDeploymentDescription> applicationDeployments =
applicationDeployment.getApplicationDeployements(filters);
for (ApplicationDeploymentDescription deploymentDescription : applicationDeployments) {
availableComputeResources.put(deploymentDescription.getComputeHostId(),
allComputeResources.get(deploymentDescription.getComputeHostId()));
}
}
}
return availableComputeResources;
} catch (AppCatalogException e) {
logger.errorId(appInterfaceId, "Error while saving compute resource...", e);
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while saving compute resource. More info : " + e.getMessage());
throw exception;
}
}
/**
* Register a Compute Resource.
*
* @param computeResourceDescription Compute Resource Object created from the datamodel.
* @return computeResourceId
* Returns a server-side generated airavata compute resource globally unique identifier.
*/
@Override
public String registerComputeResource(ComputeResourceDescription computeResourceDescription) throws InvalidRequestException, AiravataClientException, AiravataSystemException, TException {
try {
appCatalog = AppCatalogFactory.getAppCatalog();
return appCatalog.getComputeResource().addComputeResource(computeResourceDescription);
} catch (AppCatalogException e) {
logger.error("Error while saving compute resource...", e);
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while saving compute resource. More info : " + e.getMessage());
throw exception;
}
}
/**
* Fetch the given Compute Resource.
*
* @param computeResourceId The identifier for the requested compute resource
* @return computeResourceDescription
* Compute Resource Object created from the datamodel..
*/
@Override
public ComputeResourceDescription getComputeResource(String computeResourceId) throws InvalidRequestException, AiravataClientException, AiravataSystemException, TException {
try {
appCatalog = AppCatalogFactory.getAppCatalog();
return appCatalog.getComputeResource().getComputeResource(computeResourceId);
} catch (AppCatalogException e) {
logger.errorId(computeResourceId, "Error while retrieving compute resource...", e);
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while retrieving compute resource. More info : " + e.getMessage());
throw exception;
}
}
/**
* Fetch all registered Compute Resources.
*
* @return A map of registered compute resource id's and thier corresponding hostnames.
* Compute Resource Object created from the datamodel..
*/
@Override
public Map<String, String> getAllComputeResourceNames() throws InvalidRequestException, AiravataClientException, AiravataSystemException, TException {
try {
appCatalog = AppCatalogFactory.getAppCatalog();
return appCatalog.getComputeResource().getAllComputeResourceIdList();
} catch (AppCatalogException e) {
logger.error("Error while retrieving compute resource...", e);
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while retrieving compute resource. More info : " + e.getMessage());
throw exception;
}
}
/**
* Update a Compute Resource.
*
* @param computeResourceId The identifier for the requested compute resource to be updated.
* @param computeResourceDescription Compute Resource Object created from the datamodel.
* @return status
* Returns a success/failure of the update.
*/
@Override
public boolean updateComputeResource(String computeResourceId, ComputeResourceDescription computeResourceDescription) throws InvalidRequestException, AiravataClientException, AiravataSystemException, TException {
try {
appCatalog = AppCatalogFactory.getAppCatalog();
appCatalog.getComputeResource().updateComputeResource(computeResourceId, computeResourceDescription);
return true;
} catch (AppCatalogException e) {
logger.errorId(computeResourceId, "Error while updating compute resource...", e);
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while updaing compute resource. More info : " + e.getMessage());
throw exception;
}
}
/**
* Delete a Compute Resource.
*
* @param computeResourceId The identifier for the requested compute resource to be deleted.
* @return status
* Returns a success/failure of the deletion.
*/
@Override
public boolean deleteComputeResource(String computeResourceId) throws InvalidRequestException, AiravataClientException, AiravataSystemException, TException {
try {
appCatalog = AppCatalogFactory.getAppCatalog();
appCatalog.getComputeResource().removeComputeResource(computeResourceId);
return true;
} catch (AppCatalogException e) {
logger.errorId(computeResourceId, "Error while deleting compute resource...", e);
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while deleting compute resource. More info : " + e.getMessage());
throw exception;
}
}
/**
* Add a Local Job Submission details to a compute resource
* App catalog will return a jobSubmissionInterfaceId which will be added to the jobSubmissionInterfaces.
*
* @param computeResourceId The identifier of the compute resource to which JobSubmission protocol to be added
* @param priorityOrder Specify the priority of this job manager. If this is the only jobmanager, the priority can be zero.
* @param localSubmission The LOCALSubmission object to be added to the resource.
* @return status
* Returns a success/failure of the deletion.
*/
@Override
public boolean addLocalSubmissionDetails(String computeResourceId, int priorityOrder, LOCALSubmission localSubmission) throws InvalidRequestException, AiravataClientException, AiravataSystemException, TException {
try {
appCatalog = AppCatalogFactory.getAppCatalog();
ComputeResource computeResource = appCatalog.getComputeResource();
addJobSubmissionInterface(computeResource, computeResourceId,
computeResource.addLocalJobSubmission(localSubmission), JobSubmissionProtocol.LOCAL, priorityOrder);
return true;
} catch (AppCatalogException e) {
logger.errorId(computeResourceId, "Error while adding job submission interface to resource compute resource...", e);
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while adding job submission interface to resource compute resource. More info : " + e.getMessage());
throw exception;
}
}
/**
* Update the given Local Job Submission details
*
* @param jobSubmissionInterfaceId The identifier of the JobSubmission Interface to be updated.
* @param localSubmission The LOCALSubmission object to be updated.
* @return status
* Returns a success/failure of the deletion.
*/
@Override
public boolean updateLocalSubmissionDetails(String jobSubmissionInterfaceId, LOCALSubmission localSubmission) throws InvalidRequestException, AiravataClientException, AiravataSystemException, TException {
try {
LocalSubmissionResource submission = AppCatalogThriftConversion.getLocalJobSubmission(localSubmission);
submission.setJobSubmissionInterfaceId(jobSubmissionInterfaceId);
submission.save();
return true;
} catch (AppCatalogException e) {
logger.errorId(jobSubmissionInterfaceId, "Error while adding job submission interface to resource compute resource...", e);
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while adding job submission interface to resource compute resource. More info : " + e.getMessage());
throw exception;
}
}
private void addJobSubmissionInterface(ComputeResource computeResource,
String computeResourceId, String jobSubmissionInterfaceId,
JobSubmissionProtocol protocolType, int priorityOrder)
throws AppCatalogException {
JobSubmissionInterface jobSubmissionInterface = new JobSubmissionInterface();
jobSubmissionInterface.setJobSubmissionInterfaceId(jobSubmissionInterfaceId);
jobSubmissionInterface.setPriorityOrder(priorityOrder);
jobSubmissionInterface.setJobSubmissionProtocol(protocolType);
computeResource.addJobSubmissionProtocol(computeResourceId,jobSubmissionInterface);
}
/**
* Add a SSH Job Submission details to a compute resource
* App catalog will return a jobSubmissionInterfaceId which will be added to the jobSubmissionInterfaces.
*
* @param computeResourceId The identifier of the compute resource to which JobSubmission protocol to be added
* @param priorityOrder Specify the priority of this job manager. If this is the only jobmanager, the priority can be zero.
* @param sshJobSubmission The SSHJobSubmission object to be added to the resource.
* @return status
* Returns a success/failure of the deletion.
*/
@Override
public boolean addSSHJobSubmissionDetails(String computeResourceId, int priorityOrder, SSHJobSubmission sshJobSubmission) throws InvalidRequestException, AiravataClientException, AiravataSystemException, TException {
try {
appCatalog = AppCatalogFactory.getAppCatalog();
ComputeResource computeResource = appCatalog.getComputeResource();
addJobSubmissionInterface(computeResource, computeResourceId,
computeResource.addSSHJobSubmission(sshJobSubmission), JobSubmissionProtocol.SSH, priorityOrder);
return true;
} catch (AppCatalogException e) {
logger.errorId(computeResourceId, "Error while adding job submission interface to resource compute resource...", e);
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while adding job submission interface to resource compute resource. More info : " + e.getMessage());
throw exception;
}
}
/**
* Add a Cloud Job Submission details to a compute resource
* App catalog will return a jobSubmissionInterfaceId which will be added to the jobSubmissionInterfaces.
*
* @param computeResourceId The identifier of the compute resource to which JobSubmission protocol to be added
* @param priorityOrder Specify the priority of this job manager. If this is the only jobmanager, the priority can be zero.
* @param cloudJobSubmission The SSHJobSubmission object to be added to the resource.
* @return status
* Returns a success/failure of the deletion.
*/
@Override
public boolean addCloudJobSubmissionDetails(String computeResourceId, int priorityOrder, CloudJobSubmission cloudJobSubmission) throws InvalidRequestException, AiravataClientException, AiravataSystemException, TException {
try {
appCatalog = AppCatalogFactory.getAppCatalog();
ComputeResource computeResource = appCatalog.getComputeResource();
addJobSubmissionInterface(computeResource, computeResourceId,
computeResource.addCloudJobSubmission(cloudJobSubmission), JobSubmissionProtocol.CLOUD, priorityOrder);
return true;
} catch (AppCatalogException e) {
logger.errorId(computeResourceId, "Error while adding job submission interface to resource compute resource...", e);
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while adding job submission interface to resource compute resource. More info : " + e.getMessage());
throw exception;
}
}
/**
* Update the given SSH Job Submission details
*
* @param jobSubmissionInterfaceId The identifier of the JobSubmission Interface to be updated.
* @param sshJobSubmission The SSHJobSubmission object to be updated.
* @return status
* Returns a success/failure of the deletion.
*/
@Override
public boolean updateSSHJobSubmissionDetails(String jobSubmissionInterfaceId, SSHJobSubmission sshJobSubmission) throws InvalidRequestException, AiravataClientException, AiravataSystemException, TException {
try {
SshJobSubmissionResource submission = AppCatalogThriftConversion.getSSHJobSubmission(sshJobSubmission);
submission.setJobSubmissionInterfaceId(jobSubmissionInterfaceId);
submission.save();
return true;
} catch (AppCatalogException e) {
logger.errorId(jobSubmissionInterfaceId, "Error while adding job submission interface to resource compute resource...", e);
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while adding job submission interface to resource compute resource. More info : " + e.getMessage());
throw exception;
}
}
/**
* Update the given SSH Job Submission details
*
* @param jobSubmissionInterfaceId The identifier of the JobSubmission Interface to be updated.
* @param cloudJobSubmission The SSHJobSubmission object to be updated.
* @return status
* Returns a success/failure of the deletion.
*/
@Override
public boolean updateCloudJobSubmissionDetails(String jobSubmissionInterfaceId, CloudJobSubmission cloudJobSubmission) throws InvalidRequestException, AiravataClientException, AiravataSystemException, TException {
try {
CloudSubmissionResource submission = AppCatalogThriftConversion.getCloudJobSubmission(cloudJobSubmission);
submission.setJobSubmissionInterfaceId(jobSubmissionInterfaceId);
submission.save();
return true;
} catch (AppCatalogException e) {
logger.errorId(jobSubmissionInterfaceId, "Error while adding job submission interface to resource compute resource...", e);
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while adding job submission interface to resource compute resource. More info : " + e.getMessage());
throw exception;
}
}
/**
* Add a Local data moevement details to a compute resource
* App catalog will return a dataMovementInterfaceId which will be added to the dataMovementInterfaces.
*
* @param computeResourceId The identifier of the compute resource to which JobSubmission protocol to be added
* @param priorityOrder Specify the priority of this job manager. If this is the only jobmanager, the priority can be zero.
* @param localDataMovement The LOCALDataMovement object to be added to the resource.
* @return status
* Returns a success/failure of the deletion.
*/
@Override
public boolean addLocalDataMovementDetails(String computeResourceId, int priorityOrder, LOCALDataMovement localDataMovement) throws InvalidRequestException, AiravataClientException, AiravataSystemException, TException {
try {
appCatalog = AppCatalogFactory.getAppCatalog();
ComputeResource computeResource = appCatalog.getComputeResource();
addDataMovementInterface(computeResource, computeResourceId,
computeResource.addLocalDataMovement(localDataMovement), DataMovementProtocol.LOCAL, priorityOrder);
return true;
} catch (AppCatalogException e) {
logger.errorId(computeResourceId, "Error while adding data movement interface to resource compute resource...", e);
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while adding data movement interface to resource compute resource. More info : " + e.getMessage());
throw exception;
}
}
/**
* Update the given Local data movement details
*
* @param jobSubmissionInterfaceId The identifier of the JobSubmission Interface to be updated.
* @param localDataMovement The LOCALDataMovement object to be updated.
* @return status
* Returns a success/failure of the update.
*/
@Override
public boolean updateLocalDataMovementDetails(String jobSubmissionInterfaceId, LOCALDataMovement localDataMovement) throws InvalidRequestException, AiravataClientException, AiravataSystemException, TException {
try {
LocalDataMovementResource movment = AppCatalogThriftConversion.getLocalDataMovement(localDataMovement);
movment.setDataMovementInterfaceId(jobSubmissionInterfaceId);
movment.save();
return true;
} catch (AppCatalogException e) {
logger.errorId(jobSubmissionInterfaceId, "Error while adding job submission interface to resource compute resource...", e);
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while adding job submission interface to resource compute resource. More info : " + e.getMessage());
throw exception;
}
}
private void addDataMovementInterface(ComputeResource computeResource,
String computeResourceId, String dataMovementInterfaceId,
DataMovementProtocol protocolType, int priorityOrder)
throws AppCatalogException {
DataMovementInterface dataMovementInterface = new DataMovementInterface();
dataMovementInterface.setDataMovementInterfaceId(dataMovementInterfaceId);
dataMovementInterface.setPriorityOrder(priorityOrder);
dataMovementInterface.setDataMovementProtocol(protocolType);
computeResource.addDataMovementProtocol(computeResourceId,dataMovementInterface);
}
/**
* Add a SCP data moevement details to a compute resource
* App catalog will return a dataMovementInterfaceId which will be added to the dataMovementInterfaces.
*
* @param computeResourceId The identifier of the compute resource to which JobSubmission protocol to be added
* @param priorityOrder Specify the priority of this job manager. If this is the only jobmanager, the priority can be zero.
* @param scpDataMovement The SCPDataMovement object to be added to the resource.
* @return status
* Returns a success/failure of the deletion.
*/
@Override
public boolean addSCPDataMovementDetails(String computeResourceId, int priorityOrder, SCPDataMovement scpDataMovement) throws InvalidRequestException, AiravataClientException, AiravataSystemException, TException {
try {
appCatalog = AppCatalogFactory.getAppCatalog();
ComputeResource computeResource = appCatalog.getComputeResource();
addDataMovementInterface(computeResource, computeResourceId,
computeResource.addScpDataMovement(scpDataMovement), DataMovementProtocol.SCP, priorityOrder);
return true;
} catch (AppCatalogException e) {
logger.errorId(computeResourceId, "Error while adding data movement interface to resource compute resource...", e);
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while adding data movement interface to resource compute resource. More info : " + e.getMessage());
throw exception;
}
}
/**
* Update the given scp data movement details
* App catalog will return a dataMovementInterfaceId which will be added to the dataMovementInterfaces.
*
* @param jobSubmissionInterfaceId The identifier of the JobSubmission Interface to be updated.
* @param scpDataMovement The SCPDataMovement object to be updated.
* @return status
* Returns a success/failure of the update.
*/
@Override
public boolean updateSCPDataMovementDetails(String jobSubmissionInterfaceId, SCPDataMovement scpDataMovement) throws InvalidRequestException, AiravataClientException, AiravataSystemException, TException {
try {
ScpDataMovementResource movment = AppCatalogThriftConversion.getSCPDataMovementDescription(scpDataMovement);
movment.setDataMovementInterfaceId(jobSubmissionInterfaceId);
movment.save();
return true;
} catch (AppCatalogException e) {
logger.errorId(jobSubmissionInterfaceId, "Error while adding job submission interface to resource compute resource...", e);
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while adding job submission interface to resource compute resource. More info : " + e.getMessage());
throw exception;
}
}
/**
* Add a GridFTP data moevement details to a compute resource
* App catalog will return a dataMovementInterfaceId which will be added to the dataMovementInterfaces.
*
* @param computeResourceId The identifier of the compute resource to which JobSubmission protocol to be added
* @param priorityOrder Specify the priority of this job manager. If this is the only jobmanager, the priority can be zero.
* @param gridFTPDataMovement The GridFTPDataMovement object to be added to the resource.
* @return status
* Returns a success/failure of the deletion.
*/
@Override
public boolean addGridFTPDataMovementDetails(String computeResourceId, int priorityOrder, GridFTPDataMovement gridFTPDataMovement) throws InvalidRequestException, AiravataClientException, AiravataSystemException, TException {
try {
appCatalog = AppCatalogFactory.getAppCatalog();
ComputeResource computeResource = appCatalog.getComputeResource();
addDataMovementInterface(computeResource, computeResourceId,
computeResource.addGridFTPDataMovement(gridFTPDataMovement), DataMovementProtocol.GridFTP, priorityOrder);
return true;
} catch (AppCatalogException e) {
logger.errorId(computeResourceId, "Error while adding data movement interface to resource compute resource...", e);
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while adding data movement interface to resource compute resource. More info : " + e.getMessage());
throw exception;
}
}
/**
* Update the given GridFTP data movement details to a compute resource
* App catalog will return a dataMovementInterfaceId which will be added to the dataMovementInterfaces.
*
* @param jobSubmissionInterfaceId The identifier of the JobSubmission Interface to be updated.
* @param gridFTPDataMovement The GridFTPDataMovement object to be updated.
* @return status
* Returns a success/failure of the updation.
*/
@Override
public boolean updateGridFTPDataMovementDetails(String jobSubmissionInterfaceId, GridFTPDataMovement gridFTPDataMovement) throws InvalidRequestException, AiravataClientException, AiravataSystemException, TException {
try {
GridftpDataMovementResource movment = AppCatalogThriftConversion.getGridFTPDataMovementDescription(gridFTPDataMovement);
movment.setDataMovementInterfaceId(jobSubmissionInterfaceId);
movment.save();
return true;
} catch (AppCatalogException e) {
logger.errorId(jobSubmissionInterfaceId, "Error while adding job submission interface to resource compute resource...", e);
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while adding job submission interface to resource compute resource. More info : " + e.getMessage());
throw exception;
}
}
/**
* Change the priority of a given job submisison interface
*
* @param jobSubmissionInterfaceId The identifier of the JobSubmission Interface to be changed
* @param newPriorityOrder
* @return status
* Returns a success/failure of the change.
*/
@Override
public boolean changeJobSubmissionPriority(String jobSubmissionInterfaceId, int newPriorityOrder) throws InvalidRequestException, AiravataClientException, AiravataSystemException, TException {
return false;
}
/**
* Change the priority of a given data movement interface
*
* @param dataMovementInterfaceId The identifier of the DataMovement Interface to be changed
* @param newPriorityOrder
* @return status
* Returns a success/failure of the change.
*/
@Override
public boolean changeDataMovementPriority(String dataMovementInterfaceId, int newPriorityOrder) throws InvalidRequestException, AiravataClientException, AiravataSystemException, TException {
return false;
}
/**
* Change the priorities of a given set of job submission interfaces
*
* @param jobSubmissionPriorityMap A Map of identifiers of the JobSubmission Interfaces and thier associated priorities to be set.
* @return status
* Returns a success/failure of the changes.
*/
@Override
public boolean changeJobSubmissionPriorities(Map<String, Integer> jobSubmissionPriorityMap) throws InvalidRequestException, AiravataClientException, AiravataSystemException, TException {
return false;
}
/**
* Change the priorities of a given set of data movement interfaces
*
* @param dataMovementPriorityMap A Map of identifiers of the DataMovement Interfaces and thier associated priorities to be set.
* @return status
* Returns a success/failure of the changes.
*/
@Override
public boolean changeDataMovementPriorities(Map<String, Integer> dataMovementPriorityMap) throws InvalidRequestException, AiravataClientException, AiravataSystemException, TException {
return false;
}
/**
* Delete a given job submisison interface
*
* @param jobSubmissionInterfaceId The identifier of the JobSubmission Interface to be changed
* @return status
* Returns a success/failure of the deletion.
*/
@Override
public boolean deleteJobSubmissionInterface(String jobSubmissionInterfaceId) throws InvalidRequestException, AiravataClientException, AiravataSystemException, TException {
try {
appCatalog = AppCatalogFactory.getAppCatalog();
appCatalog.getComputeResource().removeJobSubmissionInterface(jobSubmissionInterfaceId);
return true;
} catch (AppCatalogException e) {
logger.errorId(jobSubmissionInterfaceId, "Error while deleting job submission interface...", e);
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while deleting job submission interface. More info : " + e.getMessage());
throw exception;
}
}
/**
* Delete a given data movement interface
*
* @param dataMovementInterfaceId The identifier of the DataMovement Interface to be changed
* @return status
* Returns a success/failure of the deletion.
*/
@Override
public boolean deleteDataMovementInterface(String dataMovementInterfaceId) throws InvalidRequestException, AiravataClientException, AiravataSystemException, TException {
try {
appCatalog = AppCatalogFactory.getAppCatalog();
appCatalog.getComputeResource().removeDataMovementInterface(dataMovementInterfaceId);
return true;
} catch (AppCatalogException e) {
logger.errorId(dataMovementInterfaceId, "Error while deleting data movement interface...", e);
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while deleting data movement interface. More info : " + e.getMessage());
throw exception;
}
}
/**
* Register a Gateway Resource Profile.
*
* @param gatewayResourceProfile Gateway Resource Profile Object.
* The GatewayID should be obtained from Airavata gateway registration and passed to register a corresponding
* resource profile.
* @return status.
* Returns a success/failure of the registration.
*/
@Override
public String registerGatewayResourceProfile(GatewayResourceProfile gatewayResourceProfile) throws InvalidRequestException, AiravataClientException, AiravataSystemException, TException {
try {
appCatalog = AppCatalogFactory.getAppCatalog();
GwyResourceProfile gatewayProfile = appCatalog.getGatewayProfile();
return gatewayProfile.addGatewayResourceProfile(gatewayResourceProfile);
} catch (AppCatalogException e) {
logger.error("Error while registering gateway resource profile...", e);
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while registering gateway resource profile. More info : " + e.getMessage());
throw exception;
}
}
/**
* Fetch the given Gateway Resource Profile.
*
* @param gatewayID The identifier for the requested gateway resource
* @return gatewayResourceProfile
* Gateway Resource Profile Object.
*/
@Override
public GatewayResourceProfile getGatewayResourceProfile(String gatewayID) throws InvalidRequestException, AiravataClientException, AiravataSystemException, TException {
try {
appCatalog = AppCatalogFactory.getAppCatalog();
GwyResourceProfile gatewayProfile = appCatalog.getGatewayProfile();
return gatewayProfile.getGatewayProfile(gatewayID);
} catch (AppCatalogException e) {
logger.errorId(gatewayID, "Error while retrieving gateway resource profile...", e);
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while retrieving gateway resource profile. More info : " + e.getMessage());
throw exception;
}
}
/**
* Update a Gateway Resource Profile.
*
* @param gatewayID The identifier for the requested gateway resource to be updated.
* @param gatewayResourceProfile Gateway Resource Profile Object.
* @return status
* Returns a success/failure of the update.
*/
@Override
public boolean updateGatewayResourceProfile(String gatewayID, GatewayResourceProfile gatewayResourceProfile) throws InvalidRequestException, AiravataClientException, AiravataSystemException, TException {
try {
appCatalog = AppCatalogFactory.getAppCatalog();
GwyResourceProfile gatewayProfile = appCatalog.getGatewayProfile();
gatewayProfile.updateGatewayResourceProfile(gatewayID, gatewayResourceProfile);
return true;
} catch (AppCatalogException e) {
logger.errorId(gatewayID, "Error while updating gateway resource profile...", e);
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while updating gateway resource profile. More info : " + e.getMessage());
throw exception;
}
}
/**
* Delete the given Gateway Resource Profile.
*
* @param gatewayID The identifier for the requested gateway resource to be deleted.
* @return status
* Returns a success/failure of the deletion.
*/
@Override
public boolean deleteGatewayResourceProfile(String gatewayID) throws InvalidRequestException, AiravataClientException, AiravataSystemException, TException {
try {
appCatalog = AppCatalogFactory.getAppCatalog();
GwyResourceProfile gatewayProfile = appCatalog.getGatewayProfile();
gatewayProfile.removeGatewayResourceProfile(gatewayID);
return true;
} catch (AppCatalogException e) {
logger.errorId(gatewayID, "Error while removing gateway resource profile...", e);
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while removing gateway resource profile. More info : " + e.getMessage());
throw exception;
}
}
/**
* Add a Compute Resource Preference to a registered gateway profile.
*
* @param gatewayID The identifier for the gateway profile to be added.
* @param computeResourceId Preferences related to a particular compute resource
* @param computeResourcePreference The ComputeResourcePreference object to be added to the resource profile.
* @return status
* Returns a success/failure of the addition. If a profile already exists, this operation will fail.
* Instead an update should be used.
*/
@Override
public boolean addGatewayComputeResourcePreference(String gatewayID, String computeResourceId, ComputeResourcePreference computeResourcePreference) throws InvalidRequestException, AiravataClientException, AiravataSystemException, TException {
try {
appCatalog = AppCatalogFactory.getAppCatalog();
GwyResourceProfile gatewayProfile = appCatalog.getGatewayProfile();
if (!gatewayProfile.isGatewayResourceProfileExists(gatewayID)){
throw new AppCatalogException("Gateway resource profile '"+gatewayID+"' does not exist!!!");
}
GatewayResourceProfile profile = gatewayProfile.getGatewayProfile(gatewayID);
// gatewayProfile.removeGatewayResourceProfile(gatewayID);
profile.addToComputeResourcePreferences(computeResourcePreference);
gatewayProfile.updateGatewayResourceProfile(gatewayID, profile);
return true;
} catch (AppCatalogException e) {
logger.errorId(gatewayID, "Error while registering gateway resource profile preference...", e);
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while registering gateway resource profile preference. More info : " + e.getMessage());
throw exception;
}
}
/**
* Fetch a Compute Resource Preference of a registered gateway profile.
*
* @param gatewayID The identifier for the gateway profile to be requested
* @param computeResourceId Preferences related to a particular compute resource
* @return computeResourcePreference
* Returns the ComputeResourcePreference object.
*/
@Override
public ComputeResourcePreference getGatewayComputeResourcePreference(String gatewayID, String computeResourceId) throws InvalidRequestException, AiravataClientException, AiravataSystemException, TException {
try {
appCatalog = AppCatalogFactory.getAppCatalog();
GwyResourceProfile gatewayProfile = appCatalog.getGatewayProfile();
ComputeResource computeResource = appCatalog.getComputeResource();
if (!gatewayProfile.isGatewayResourceProfileExists(gatewayID)){
logger.errorId(gatewayID, "Given gateway profile does not exist in the system. Please provide a valid gateway id...");
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Given gateway profile does not exist in the system. Please provide a valid gateway id...");
throw exception;
}
if (!computeResource.isComputeResourceExists(computeResourceId)){
logger.errorId(computeResourceId, "Given compute resource does not exist in the system. Please provide a valid compute resource id...");
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Given compute resource does not exist in the system. Please provide a valid compute resource id...");
throw exception;
}
return gatewayProfile.getComputeResourcePreference(gatewayID, computeResourceId);
} catch (AppCatalogException e) {
logger.errorId(gatewayID, "Error while reading gateway compute resource preference...", e);
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while reading gateway compute resource preference. More info : " + e.getMessage());
throw exception;
}
}
/**
* Fetch all Compute Resource Preferences of a registered gateway profile.
*
* @param gatewayID The identifier for the gateway profile to be requested
* @return computeResourcePreference
* Returns the ComputeResourcePreference object.
*/
@Override
public List<ComputeResourcePreference> getAllGatewayComputeResourcePreferences(String gatewayID) throws InvalidRequestException, AiravataClientException, AiravataSystemException, TException {
try {
appCatalog = AppCatalogFactory.getAppCatalog();
GwyResourceProfile gatewayProfile = appCatalog.getGatewayProfile();
return gatewayProfile.getGatewayProfile(gatewayID).getComputeResourcePreferences();
} catch (AppCatalogException e) {
logger.errorId(gatewayID, "Error while reading gateway compute resource preferences...", e);
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while reading gateway compute resource preferences. More info : " + e.getMessage());
throw exception;
}
}
/**
* Update a Compute Resource Preference to a registered gateway profile.
*
* @param gatewayID The identifier for the gateway profile to be updated.
* @param computeResourceId Preferences related to a particular compute resource
* @param computeResourcePreference The ComputeResourcePreference object to be updated to the resource profile.
* @return status
* Returns a success/failure of the updation.
*/
@Override
public boolean updateGatewayComputeResourcePreference(String gatewayID, String computeResourceId, ComputeResourcePreference computeResourcePreference) throws InvalidRequestException, AiravataClientException, AiravataSystemException, TException {
try {
appCatalog = AppCatalogFactory.getAppCatalog();
GwyResourceProfile gatewayProfile = appCatalog.getGatewayProfile();
GatewayResourceProfile profile = gatewayProfile.getGatewayProfile(gatewayID);
List<ComputeResourcePreference> computeResourcePreferences = profile.getComputeResourcePreferences();
ComputeResourcePreference preferenceToRemove = null;
for (ComputeResourcePreference preference : computeResourcePreferences) {
if (preference.getComputeResourceId().equals(computeResourceId)){
preferenceToRemove=preference;
break;
}
}
if (preferenceToRemove!=null) {
profile.getComputeResourcePreferences().remove(
preferenceToRemove);
}
profile.getComputeResourcePreferences().add(computeResourcePreference);
gatewayProfile.updateGatewayResourceProfile(gatewayID, profile);
return true;
} catch (AppCatalogException e) {
logger.errorId(gatewayID, "Error while reading gateway compute resource preference...", e);
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while updating gateway compute resource preference. More info : " + e.getMessage());
throw exception;
}
}
/**
* Delete the Compute Resource Preference of a registered gateway profile.
*
* @param gatewayID The identifier for the gateway profile to be deleted.
* @param computeResourceId Preferences related to a particular compute resource
* @return status
* Returns a success/failure of the deletion.
*/
@Override
public boolean deleteGatewayComputeResourcePreference(String gatewayID, String computeResourceId) throws InvalidRequestException, AiravataClientException, AiravataSystemException, TException {
try {
appCatalog = AppCatalogFactory.getAppCatalog();
GwyResourceProfile gatewayProfile = appCatalog.getGatewayProfile();
GatewayResourceProfile profile = gatewayProfile.getGatewayProfile(gatewayID);
List<ComputeResourcePreference> computeResourcePreferences = profile.getComputeResourcePreferences();
ComputeResourcePreference preferenceToRemove = null;
for (ComputeResourcePreference preference : computeResourcePreferences) {
if (preference.getComputeResourceId().equals(computeResourceId)){
preferenceToRemove=preference;
break;
}
}
if (preferenceToRemove!=null) {
profile.getComputeResourcePreferences().remove(
preferenceToRemove);
}
gatewayProfile.updateGatewayResourceProfile(gatewayID, profile);
return true;
} catch (AppCatalogException e) {
logger.errorId(gatewayID, "Error while reading gateway compute resource preference...", e);
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while updating gateway compute resource preference. More info : " + e.getMessage());
throw exception;
}
}
}
| airavata-api/airavata-api-server/src/main/java/org/apache/airavata/api/server/handler/AiravataServerHandler.java | /*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.airavata.api.server.handler;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Random;
import org.airavata.appcatalog.cpi.AppCatalog;
import org.airavata.appcatalog.cpi.AppCatalogException;
import org.airavata.appcatalog.cpi.ApplicationDeployment;
import org.airavata.appcatalog.cpi.ComputeResource;
import org.airavata.appcatalog.cpi.GwyResourceProfile;
import org.apache.aiaravata.application.catalog.data.impl.AppCatalogFactory;
import org.apache.aiaravata.application.catalog.data.resources.*;
import org.apache.aiaravata.application.catalog.data.util.AppCatalogThriftConversion;
import org.apache.airavata.api.Airavata;
import org.apache.airavata.api.airavataAPIConstants;
import org.apache.airavata.api.server.util.DataModelUtils;
import org.apache.airavata.common.exception.ApplicationSettingsException;
import org.apache.airavata.common.logger.AiravataLogger;
import org.apache.airavata.common.logger.AiravataLoggerFactory;
import org.apache.airavata.common.utils.AiravataUtils;
import org.apache.airavata.common.utils.AiravataZKUtils;
import org.apache.airavata.common.utils.ServerSettings;
import org.apache.airavata.model.appcatalog.appdeployment.ApplicationDeploymentDescription;
import org.apache.airavata.model.appcatalog.appdeployment.ApplicationModule;
import org.apache.airavata.model.appcatalog.appinterface.ApplicationInterfaceDescription;
import org.apache.airavata.model.appcatalog.appinterface.InputDataObjectType;
import org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType;
import org.apache.airavata.model.appcatalog.computeresource.*;
import org.apache.airavata.model.appcatalog.gatewayprofile.ComputeResourcePreference;
import org.apache.airavata.model.appcatalog.gatewayprofile.GatewayResourceProfile;
import org.apache.airavata.model.error.AiravataClientException;
import org.apache.airavata.model.error.AiravataErrorType;
import org.apache.airavata.model.error.AiravataSystemException;
import org.apache.airavata.model.error.ExperimentNotFoundException;
import org.apache.airavata.model.error.InvalidRequestException;
import org.apache.airavata.model.error.ProjectNotFoundException;
import org.apache.airavata.model.util.ExecutionType;
import org.apache.airavata.model.workspace.Project;
import org.apache.airavata.model.workspace.experiment.*;
import org.apache.airavata.orchestrator.client.OrchestratorClientFactory;
import org.apache.airavata.orchestrator.cpi.OrchestratorService;
import org.apache.airavata.orchestrator.cpi.OrchestratorService.Client;
import org.apache.airavata.persistance.registry.jpa.ResourceUtils;
import org.apache.airavata.persistance.registry.jpa.impl.RegistryFactory;
import org.apache.airavata.registry.cpi.ChildDataType;
import org.apache.airavata.registry.cpi.ParentDataType;
import org.apache.airavata.registry.cpi.Registry;
import org.apache.airavata.registry.cpi.RegistryException;
import org.apache.airavata.registry.cpi.RegistryModelType;
import org.apache.airavata.registry.cpi.utils.Constants;
import org.apache.airavata.registry.cpi.utils.Constants.FieldConstants.TaskDetailConstants;
import org.apache.airavata.registry.cpi.utils.Constants.FieldConstants.WorkflowNodeConstants;
import org.apache.airavata.workflow.engine.WorkflowEngine;
import org.apache.airavata.workflow.engine.WorkflowEngineException;
import org.apache.airavata.workflow.engine.WorkflowEngineFactory;
import org.apache.thrift.TException;
import org.apache.zookeeper.CreateMode;
import org.apache.zookeeper.KeeperException;
import org.apache.zookeeper.WatchedEvent;
import org.apache.zookeeper.Watcher;
import org.apache.zookeeper.ZooDefs;
import org.apache.zookeeper.ZooKeeper;
import org.apache.zookeeper.data.Stat;
public class AiravataServerHandler implements Airavata.Iface, Watcher {
private static final AiravataLogger logger = AiravataLoggerFactory.getLogger(AiravataServerHandler.class);
private Registry registry;
private AppCatalog appCatalog;
private ZooKeeper zk;
private static Integer mutex = -1;
public AiravataServerHandler() {
try {
String zkhostPort = AiravataZKUtils.getZKhostPort();
String airavataServerHostPort = ServerSettings.getSetting(org.apache.airavata.common.utils.Constants.API_SERVER_HOST)
+ ":" + ServerSettings.getSetting(org.apache.airavata.common.utils.Constants.API_SERVER_PORT);
try {
zk = new ZooKeeper(zkhostPort, 6000, this); // no watcher is required, this will only use to store some data
String apiServer = ServerSettings.getSetting(org.apache.airavata.common.utils.Constants.ZOOKEEPER_API_SERVER_NODE,"/airavata-server");
String OrchServer = ServerSettings.getSetting(org.apache.airavata.common.utils.Constants.ZOOKEEPER_ORCHESTRATOR_SERVER_NODE,"/orchestrator-server");
String gfacServer = ServerSettings.getSetting(org.apache.airavata.common.utils.Constants.ZOOKEEPER_GFAC_SERVER_NODE,"/gfac-server");
String gfacExperiments = ServerSettings.getSetting(org.apache.airavata.common.utils.Constants.ZOOKEEPER_GFAC_EXPERIMENT_NODE,"/gfac-experiments");
synchronized (mutex) {
mutex.wait(); // waiting for the syncConnected event
}
Stat zkStat = zk.exists(apiServer, false);
if (zkStat == null) {
zk.create(apiServer, new byte[0], ZooDefs.Ids.OPEN_ACL_UNSAFE,
CreateMode.PERSISTENT);
}
String instantNode = apiServer + File.separator + String.valueOf(new Random().nextInt(Integer.MAX_VALUE));
zkStat = zk.exists(instantNode, false);
if (zkStat == null) {
zk.create(instantNode,
airavataServerHostPort.getBytes(), ZooDefs.Ids.OPEN_ACL_UNSAFE,
CreateMode.EPHEMERAL); // other component will watch these childeren creation deletion to monitor the status of the node
logger.info("Successfully created airavata-server node");
}
zkStat = zk.exists(OrchServer, false);
if (zkStat == null) {
zk.create(OrchServer, new byte[0], ZooDefs.Ids.OPEN_ACL_UNSAFE,
CreateMode.PERSISTENT);
logger.info("Successfully created orchestrator-server node");
}
zkStat = zk.exists(gfacServer, false);
if (zkStat == null) {
zk.create(gfacServer, new byte[0], ZooDefs.Ids.OPEN_ACL_UNSAFE,
CreateMode.PERSISTENT);
logger.info("Successfully created gfac-server node");
}
zkStat = zk.exists(gfacServer, false);
if (zkStat == null) {
zk.create(gfacExperiments, new byte[0], ZooDefs.Ids.OPEN_ACL_UNSAFE,
CreateMode.PERSISTENT);
logger.info("Successfully created gfac-server node");
}
logger.info("Finished starting ZK: " + zk);
} catch (IOException e) {
e.printStackTrace();
} catch (InterruptedException e) {
e.printStackTrace();
} catch (KeeperException e) {
e.printStackTrace();
}
} catch (ApplicationSettingsException e) {
e.printStackTrace();
}
}
synchronized public void process(WatchedEvent watchedEvent) {
synchronized (mutex) {
mutex.notify();
}
}
/**
* Query Airavata to fetch the API version
*/
@Override
public String getAPIVersion() throws InvalidRequestException, AiravataClientException, AiravataSystemException, TException {
return airavataAPIConstants.AIRAVATA_API_VERSION;
}
/**
* Create a Project
*
* @param project
*/
@Override
public String createProject(Project project) throws InvalidRequestException, AiravataClientException, AiravataSystemException, TException {
try {
registry = RegistryFactory.getDefaultRegistry();
if (!validateString(project.getName()) || !validateString(project.getOwner())){
logger.error("Project name and owner cannot be empty...");
throw new AiravataSystemException(AiravataErrorType.INTERNAL_ERROR);
}
return (String)registry.add(ParentDataType.PROJECT, project);
} catch (RegistryException e) {
logger.error("Error while creating the project", e);
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while creating the project. More info : " + e.getMessage());
throw exception;
}
}
public void updateProject(String projectId, Project updatedProject) throws InvalidRequestException,
AiravataClientException,
AiravataSystemException,
ProjectNotFoundException,
TException {
if (!validateString(projectId) || !validateString(projectId)){
logger.error("Project id cannot be empty...");
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Project id cannot be empty...");
throw exception;
}
try {
registry = RegistryFactory.getDefaultRegistry();
if (!registry.isExist(RegistryModelType.PROJECT, projectId)){
logger.error("Project does not exist in the system. Please provide a valid project ID...");
ProjectNotFoundException exception = new ProjectNotFoundException();
exception.setMessage("Project does not exist in the system. Please provide a valid project ID...");
throw exception;
}
registry.update(RegistryModelType.PROJECT, updatedProject, projectId);
} catch (RegistryException e) {
logger.error("Error while updating the project", e);
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while updating the project. More info : " + e.getMessage());
throw exception;
}
}
private boolean validateString(String name){
boolean valid = true;
if (name == null || name.equals("") || name.trim().length() == 0){
valid = false;
}
return valid;
}
/**
* Get a Project by ID
*
* @param projectId
*/
@Override
public Project getProject(String projectId) throws InvalidRequestException,
AiravataClientException,
AiravataSystemException,
ProjectNotFoundException,
TException {
try {
registry = RegistryFactory.getDefaultRegistry();
if (!registry.isExist(RegistryModelType.PROJECT, projectId)){
logger.error("Project does not exist in the system. Please provide a valid project ID...");
ProjectNotFoundException exception = new ProjectNotFoundException();
exception.setMessage("Project does not exist in the system. Please provide a valid project ID...");
throw exception;
}
return (Project)registry.get(RegistryModelType.PROJECT, projectId);
} catch (RegistryException e) {
logger.error("Error while updating the project", e);
ProjectNotFoundException exception = new ProjectNotFoundException();
exception.setMessage("Error while updating the project. More info : " + e.getMessage());
throw exception;
}
}
/**
* Get all Project by user
*
* @param userName
*/
@Override
public List<Project> getAllUserProjects(String userName) throws InvalidRequestException, AiravataClientException, AiravataSystemException, TException {
if (!validateString(userName)){
logger.error("Username cannot be empty. Please provide a valid user..");
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Username cannot be empty. Please provide a valid user..");
throw exception;
}
List<Project> projects = new ArrayList<Project>();
try {
if (!ResourceUtils.isUserExist(userName)){
logger.error("User does not exist in the system. Please provide a valid user..");
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("User does not exist in the system. Please provide a valid user..");
throw exception;
}
registry = RegistryFactory.getDefaultRegistry();
List<Object> list = registry.get(RegistryModelType.PROJECT, Constants.FieldConstants.ProjectConstants.OWNER, userName);
if (list != null && !list.isEmpty()){
for (Object o : list){
projects.add((Project) o);
}
}
return projects;
} catch (RegistryException e) {
logger.error("Error while retrieving projects", e);
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while retrieving projects. More info : " + e.getMessage());
throw exception;
}
}
public List<Project> searchProjectsByProjectName(String userName, String projectName) throws InvalidRequestException,
AiravataClientException,
AiravataSystemException,
TException {
if (!validateString(userName)){
logger.error("Username cannot be empty. Please provide a valid user..");
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Username cannot be empty. Please provide a valid user..");
throw exception;
}
try {
if (!ResourceUtils.isUserExist(userName)){
logger.error("User does not exist in the system. Please provide a valid user..");
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("User does not exist in the system. Please provide a valid user..");
throw exception;
}
List<Project> projects = new ArrayList<Project>();
registry = RegistryFactory.getDefaultRegistry();
Map<String, String> filters = new HashMap<String, String>();
filters.put(Constants.FieldConstants.ProjectConstants.OWNER, userName);
filters.put(Constants.FieldConstants.ProjectConstants.PROJECT_NAME, projectName);
List<Object> results = registry.search(RegistryModelType.PROJECT, filters);
for (Object object : results) {
projects.add((Project)object);
}
return projects;
}catch (Exception e) {
logger.error("Error while retrieving projects", e);
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while retrieving projects. More info : " + e.getMessage());
throw exception;
}
}
public List<Project> searchProjectsByProjectDesc(String userName, String description) throws InvalidRequestException,
AiravataClientException,
AiravataSystemException,
TException {
if (!validateString(userName)){
logger.error("Username cannot be empty. Please provide a valid user..");
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Username cannot be empty. Please provide a valid user..");
throw exception;
}
try {
if (!ResourceUtils.isUserExist(userName)){
logger.error("User does not exist in the system. Please provide a valid user..");
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("User does not exist in the system. Please provide a valid user..");
throw exception;
}
List<Project> projects = new ArrayList<Project>();
registry = RegistryFactory.getDefaultRegistry();
Map<String, String> filters = new HashMap<String, String>();
filters.put(Constants.FieldConstants.ProjectConstants.OWNER, userName);
filters.put(Constants.FieldConstants.ProjectConstants.DESCRIPTION, description);
List<Object> results = registry.search(RegistryModelType.PROJECT, filters);
for (Object object : results) {
projects.add((Project)object);
}
return projects;
}catch (Exception e) {
logger.error("Error while retrieving projects", e);
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while retrieving projects. More info : " + e.getMessage());
throw exception;
}
}
public List<ExperimentSummary> searchExperimentsByName(String userName, String expName) throws InvalidRequestException,
AiravataClientException,
AiravataSystemException,
TException {
if (!validateString(userName)){
logger.error("Username cannot be empty. Please provide a valid user..");
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Username cannot be empty. Please provide a valid user..");
throw exception;
}
try {
if (!ResourceUtils.isUserExist(userName)){
logger.error("User does not exist in the system. Please provide a valid user..");
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("User does not exist in the system. Please provide a valid user..");
throw exception;
}
List<ExperimentSummary> summaries = new ArrayList<ExperimentSummary>();
registry = RegistryFactory.getDefaultRegistry();
Map<String, String> filters = new HashMap<String, String>();
filters.put(Constants.FieldConstants.ExperimentConstants.USER_NAME, userName);
filters.put(Constants.FieldConstants.ExperimentConstants.EXPERIMENT_NAME, expName);
List<Object> results = registry.search(RegistryModelType.EXPERIMENT, filters);
for (Object object : results) {
summaries.add((ExperimentSummary) object);
}
return summaries;
}catch (Exception e) {
logger.error("Error while retrieving experiments", e);
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while retrieving experiments. More info : " + e.getMessage());
throw exception;
}
}
public List<ExperimentSummary> searchExperimentsByDesc(String userName, String description) throws InvalidRequestException,
AiravataClientException,
AiravataSystemException,
TException {
if (!validateString(userName)){
logger.error("Username cannot be empty. Please provide a valid user..");
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Username cannot be empty. Please provide a valid user..");
throw exception;
}
try {
if (!ResourceUtils.isUserExist(userName)){
logger.error("User does not exist in the system. Please provide a valid user..");
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("User does not exist in the system. Please provide a valid user..");
throw exception;
}
List<ExperimentSummary> summaries = new ArrayList<ExperimentSummary>();
registry = RegistryFactory.getDefaultRegistry();
Map<String, String> filters = new HashMap<String, String>();
filters.put(Constants.FieldConstants.ExperimentConstants.USER_NAME, userName);
filters.put(Constants.FieldConstants.ExperimentConstants.EXPERIMENT_DESC, description);
List<Object> results = registry.search(RegistryModelType.EXPERIMENT, filters);
for (Object object : results) {
summaries.add((ExperimentSummary) object);
}
return summaries;
}catch (Exception e) {
logger.error("Error while retrieving experiments", e);
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while retrieving experiments. More info : " + e.getMessage());
throw exception;
}
}
public List<ExperimentSummary> searchExperimentsByApplication(String userName, String applicationId) throws InvalidRequestException, AiravataClientException, AiravataSystemException, TException {
if (!validateString(userName)){
logger.error("Username cannot be empty. Please provide a valid user..");
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Username cannot be empty. Please provide a valid user..");
throw exception;
}
try {
if (!ResourceUtils.isUserExist(userName)){
logger.error("User does not exist in the system. Please provide a valid user..");
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("User does not exist in the system. Please provide a valid user..");
throw exception;
}
List<ExperimentSummary> summaries = new ArrayList<ExperimentSummary>();
registry = RegistryFactory.getDefaultRegistry();
Map<String, String> filters = new HashMap<String, String>();
filters.put(Constants.FieldConstants.ExperimentConstants.USER_NAME, userName);
filters.put(Constants.FieldConstants.ExperimentConstants.APPLICATION_ID, applicationId);
List<Object> results = registry.search(RegistryModelType.EXPERIMENT, filters);
for (Object object : results) {
summaries.add((ExperimentSummary) object);
}
return summaries;
}catch (Exception e) {
logger.error("Error while retrieving experiments", e);
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while retrieving experiments. More info : " + e.getMessage());
throw exception;
}
}
@Override
public List<ExperimentSummary> searchExperimentsByStatus(String userName, ExperimentState experimentState) throws InvalidRequestException, AiravataClientException, AiravataSystemException, TException {
if (!validateString(userName)){
logger.error("Username cannot be empty. Please provide a valid user..");
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Username cannot be empty. Please provide a valid user..");
throw exception;
}
try {
if (!ResourceUtils.isUserExist(userName)){
logger.error("User does not exist in the system. Please provide a valid user..");
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("User does not exist in the system. Please provide a valid user..");
throw exception;
}
List<ExperimentSummary> summaries = new ArrayList<ExperimentSummary>();
registry = RegistryFactory.getDefaultRegistry();
Map<String, String> filters = new HashMap<String, String>();
filters.put(Constants.FieldConstants.ExperimentConstants.USER_NAME, userName);
filters.put(Constants.FieldConstants.ExperimentConstants.EXPERIMENT_STATUS, experimentState.toString());
List<Object> results = registry.search(RegistryModelType.EXPERIMENT, filters);
for (Object object : results) {
summaries.add((ExperimentSummary) object);
}
return summaries;
}catch (Exception e) {
logger.error("Error while retrieving experiments", e);
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while retrieving experiments. More info : " + e.getMessage());
throw exception;
}
}
@Override
public List<ExperimentSummary> searchExperimentsByCreationTime(String userName, long fromTime, long toTime) throws InvalidRequestException, AiravataClientException, AiravataSystemException, TException {
if (!validateString(userName)){
logger.error("Username cannot be empty. Please provide a valid user..");
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Username cannot be empty. Please provide a valid user..");
throw exception;
}
try {
if (!ResourceUtils.isUserExist(userName)){
logger.error("User does not exist in the system. Please provide a valid user..");
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("User does not exist in the system. Please provide a valid user..");
throw exception;
}
List<ExperimentSummary> summaries = new ArrayList<ExperimentSummary>();
registry = RegistryFactory.getDefaultRegistry();
Map<String, String> filters = new HashMap<String, String>();
filters.put(Constants.FieldConstants.ExperimentConstants.USER_NAME, userName);
filters.put(Constants.FieldConstants.ExperimentConstants.FROM_DATE, String.valueOf(fromTime));
filters.put(Constants.FieldConstants.ExperimentConstants.TO_DATE, String.valueOf(toTime));
List<Object> results = registry.search(RegistryModelType.EXPERIMENT, filters);
for (Object object : results) {
summaries.add((ExperimentSummary) object);
}
return summaries;
}catch (Exception e) {
logger.error("Error while retrieving experiments", e);
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while retrieving experiments. More info : " + e.getMessage());
throw exception;
}
}
/**
* Get all Experiments within a Project
*
* @param projectId
*/
@Override
public List<Experiment> getAllExperimentsInProject(String projectId) throws InvalidRequestException,
AiravataClientException,
AiravataSystemException,
ProjectNotFoundException,
TException {
if (!validateString(projectId)){
logger.error("Project id cannot be empty. Please provide a valid project ID...");
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Project id cannot be empty. Please provide a valid project ID...");
throw exception;
}
try {
registry = RegistryFactory.getDefaultRegistry();
if (!registry.isExist(RegistryModelType.PROJECT, projectId)){
logger.error("Project does not exist in the system. Please provide a valid project ID...");
ProjectNotFoundException exception = new ProjectNotFoundException();
exception.setMessage("Project does not exist in the system. Please provide a valid project ID...");
throw exception;
}
List<Experiment> experiments = new ArrayList<Experiment>();
List<Object> list = registry.get(RegistryModelType.EXPERIMENT, Constants.FieldConstants.ExperimentConstants.PROJECT_ID, projectId);
if (list != null && !list.isEmpty()) {
for (Object o : list) {
experiments.add((Experiment) o);
}
}
return experiments;
} catch (Exception e) {
logger.error("Error while retrieving the experiments", e);
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while retrieving the experiments. More info : " + e.getMessage());
throw exception;
}
}
/**
* Get all Experiments by user
*
* @param userName
*/
@Override
public List<Experiment> getAllUserExperiments(String userName) throws InvalidRequestException, AiravataClientException, AiravataSystemException, TException {
if (!validateString(userName)){
logger.error("Username cannot be empty. Please provide a valid user..");
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Username cannot be empty. Please provide a valid user..");
throw exception;
}
try {
if (!ResourceUtils.isUserExist(userName)){
logger.error("User does not exist in the system. Please provide a valid user..");
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("User does not exist in the system. Please provide a valid user..");
throw exception;
}
List<Experiment> experiments = new ArrayList<Experiment>();
registry = RegistryFactory.getDefaultRegistry();
List<Object> list = registry.get(RegistryModelType.EXPERIMENT, Constants.FieldConstants.ExperimentConstants.USER_NAME, userName);
if (list != null && !list.isEmpty()){
for (Object o : list){
experiments.add((Experiment)o);
}
}
return experiments;
} catch (Exception e) {
logger.error("Error while retrieving the experiments", e);
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while retrieving the experiments. More info : " + e.getMessage());
throw exception;
}
}
/**
* Create an experiment for the specified user belonging to the gateway. The gateway identity is not explicitly passed
* but inferred from the authentication header. This experiment is just a persistent place holder. The client
* has to subsequently configure and launch the created experiment. No action is taken on Airavata Server except
* registering the experiment in a persistent store.
*
* @param experiment@return The server-side generated airavata experiment globally unique identifier.
* @throws org.apache.airavata.model.error.InvalidRequestException For any incorrect forming of the request itself.
* @throws org.apache.airavata.model.error.AiravataClientException The following list of exceptions are thrown which Airavata Client can take corrective actions to resolve:
* <p/>
* UNKNOWN_GATEWAY_ID - If a Gateway is not registered with Airavata as a one time administrative
* step, then Airavata Registry will not have a provenance area setup. The client has to follow
* gateway registration steps and retry this request.
* <p/>
* AUTHENTICATION_FAILURE - How Authentication will be implemented is yet to be determined.
* For now this is a place holder.
* <p/>
* INVALID_AUTHORIZATION - This will throw an authorization exception. When a more robust security hand-shake
* is implemented, the authorization will be more substantial.
* @throws org.apache.airavata.model.error.AiravataSystemException This exception will be thrown for any Airavata Server side issues and if the problem cannot be corrected by the client
* rather an Airavata Administrator will be notified to take corrective action.
*/
@Override
public String createExperiment(Experiment experiment) throws InvalidRequestException, AiravataClientException, AiravataSystemException, TException {
try {
registry = RegistryFactory.getDefaultRegistry();
if (!validateString(experiment.getName())){
logger.error("Cannot create experiments with empty experiment name");
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Cannot create experiments with empty experiment name");
throw exception;
}
String experimentId = (String)registry.add(ParentDataType.EXPERIMENT, experiment);
logger.infoId(experimentId, "Created new experiment with experiment name {}", experiment.getName());
return experimentId;
} catch (Exception e) {
logger.error("Error while creating the experiment with experiment name {}", experiment.getName());
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while creating the experiment. More info : " + e.getMessage());
throw exception;
}
}
/**
* Fetch previously created experiment metadata.
*
* @param airavataExperimentId The identifier for the requested experiment. This is returned during the create experiment step.
* @return experimentMetada
* This method will return the previously stored experiment metadata.
* @throws org.apache.airavata.model.error.InvalidRequestException For any incorrect forming of the request itself.
* @throws org.apache.airavata.model.error.ExperimentNotFoundException If the specified experiment is not previously created, then an Experiment Not Found Exception is thrown.
* @throws org.apache.airavata.model.error.AiravataClientException The following list of exceptions are thrown which Airavata Client can take corrective actions to resolve:
* <p/>
* UNKNOWN_GATEWAY_ID - If a Gateway is not registered with Airavata as a one time administrative
* step, then Airavata Registry will not have a provenance area setup. The client has to follow
* gateway registration steps and retry this request.
* <p/>
* AUTHENTICATION_FAILURE - How Authentication will be implemented is yet to be determined.
* For now this is a place holder.
* <p/>
* INVALID_AUTHORIZATION - This will throw an authorization exception. When a more robust security hand-shake
* is implemented, the authorization will be more substantial.
* @throws org.apache.airavata.model.error.AiravataSystemException This exception will be thrown for any Airavata Server side issues and if the problem cannot be corrected by the client
* rather an Airavata Administrator will be notified to take corrective action.
*/
@Override
public Experiment getExperiment(String airavataExperimentId) throws InvalidRequestException, ExperimentNotFoundException, AiravataClientException, AiravataSystemException, TException {
try {
registry = RegistryFactory.getDefaultRegistry();
if (!registry.isExist(RegistryModelType.EXPERIMENT, airavataExperimentId)){
throw new ExperimentNotFoundException("Requested experiment id " + airavataExperimentId + " does not exist in the system..");
}
return (Experiment)registry.get(RegistryModelType.EXPERIMENT, airavataExperimentId);
} catch (Exception e) {
logger.error("Error while retrieving the experiment", e);
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while retrieving the experiment. More info : " + e.getMessage());
throw exception;
}
}
/**
* Configure a previously created experiment with required inputs, scheduling and other quality of service
* parameters. This method only updates the experiment object within the registry. The experiment has to be launched
* to make it actionable by the server.
*
* @param airavataExperimentId The identifier for the requested experiment. This is returned during the create experiment step.
* @param experiment
* @return This method call does not have a return value.
* @throws org.apache.airavata.model.error.InvalidRequestException For any incorrect forming of the request itself.
* @throws org.apache.airavata.model.error.ExperimentNotFoundException If the specified experiment is not previously created, then an Experiment Not Found Exception is thrown.
* @throws org.apache.airavata.model.error.AiravataClientException The following list of exceptions are thrown which Airavata Client can take corrective actions to resolve:
* <p/>
* UNKNOWN_GATEWAY_ID - If a Gateway is not registered with Airavata as a one time administrative
* step, then Airavata Registry will not have a provenance area setup. The client has to follow
* gateway registration steps and retry this request.
* <p/>
* AUTHENTICATION_FAILURE - How Authentication will be implemented is yet to be determined.
* For now this is a place holder.
* <p/>
* INVALID_AUTHORIZATION - This will throw an authorization exception. When a more robust security hand-shake
* is implemented, the authorization will be more substantial.
* @throws org.apache.airavata.model.error.AiravataSystemException This exception will be thrown for any Airavata Server side issues and if the problem cannot be corrected by the client
* rather an Airavata Administrator will be notified to take corrective action.
*/
@Override
public void updateExperiment(String airavataExperimentId, Experiment experiment) throws InvalidRequestException, ExperimentNotFoundException, AiravataClientException, AiravataSystemException, TException {
try {
registry = RegistryFactory.getDefaultRegistry();
if (!registry.isExist(RegistryModelType.EXPERIMENT, airavataExperimentId)) {
logger.errorId(airavataExperimentId, "Update request failed, Experiment {} doesn't exist.", airavataExperimentId);
throw new ExperimentNotFoundException("Requested experiment id " + airavataExperimentId + " does not exist in the system..");
}
ExperimentStatus experimentStatus = getExperimentStatus(airavataExperimentId);
if (experimentStatus != null){
ExperimentState experimentState = experimentStatus.getExperimentState();
switch (experimentState){
case CREATED: case VALIDATED: case CANCELED: case FAILED: case UNKNOWN:
registry.update(RegistryModelType.EXPERIMENT, experiment, airavataExperimentId);
logger.infoId(airavataExperimentId, "Successfully updated experiment {} ", experiment.getName());
break;
default:
logger.errorId(airavataExperimentId, "Error while updating experiment. Update experiment is only valid for experiments " +
"with status CREATED, VALIDATED, CANCELLED, FAILED and UNKNOWN. Make sure the given " +
"experiment is in one of above statuses... ");
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while updating experiment. Update experiment is only valid for experiments " +
"with status CREATED, VALIDATED, CANCELLED, FAILED and UNKNOWN. Make sure the given " +
"experiment is in one of above statuses... ");
throw exception;
}
}
} catch (Exception e) {
logger.errorId(airavataExperimentId, "Error while updating experiment", e);
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while updating experiment. More info : " + e.getMessage());
throw exception;
}
}
@Override
public void updateExperimentConfiguration(String airavataExperimentId, UserConfigurationData userConfiguration) throws TException {
try {
registry = RegistryFactory.getDefaultRegistry();
if (!registry.isExist(RegistryModelType.EXPERIMENT, airavataExperimentId)){
logger.errorId(airavataExperimentId, "Update experiment configuration failed, experiment {} doesn't exist.", airavataExperimentId);
throw new ExperimentNotFoundException("Requested experiment id " + airavataExperimentId + " does not exist in the system..");
}
ExperimentStatus experimentStatus = getExperimentStatus(airavataExperimentId);
if (experimentStatus != null){
ExperimentState experimentState = experimentStatus.getExperimentState();
switch (experimentState){
case CREATED: case VALIDATED: case CANCELED: case FAILED: case UNKNOWN:
registry.add(ChildDataType.EXPERIMENT_CONFIGURATION_DATA, userConfiguration, airavataExperimentId);
logger.infoId(airavataExperimentId, "Successfully updated experiment configuration for experiment {}.", airavataExperimentId);
break;
default:
logger.errorId(airavataExperimentId, "Error while updating experiment {}. Update experiment is only valid for experiments " +
"with status CREATED, VALIDATED, CANCELLED, FAILED and UNKNOWN. Make sure the given " +
"experiment is in one of above statuses... ", airavataExperimentId);
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while updating experiment. Update experiment is only valid for experiments " +
"with status CREATED, VALIDATED, CANCELLED, FAILED and UNKNOWN. Make sure the given " +
"experiment is in one of above statuses... ");
throw exception;
}
}
} catch (Exception e) {
logger.errorId(airavataExperimentId, "Error while updating user configuration", e);
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while updating user configuration. " +
"Update experiment is only valid for experiments " +
"with status CREATED, VALIDATED, CANCELLED, FAILED and UNKNOWN. Make sure the given " +
"experiment is in one of above statuses... " + e.getMessage());
throw exception;
}
}
@Override
public void updateResourceScheduleing(String airavataExperimentId, ComputationalResourceScheduling resourceScheduling) throws TException {
try {
registry = RegistryFactory.getDefaultRegistry();
if (!registry.isExist(RegistryModelType.EXPERIMENT, airavataExperimentId)){
logger.infoId(airavataExperimentId, "Update resource scheduling failed, experiment {} doesn't exist.", airavataExperimentId);
throw new ExperimentNotFoundException("Requested experiment id " + airavataExperimentId + " does not exist in the system..");
}
ExperimentStatus experimentStatus = getExperimentStatus(airavataExperimentId);
if (experimentStatus != null){
ExperimentState experimentState = experimentStatus.getExperimentState();
switch (experimentState){
case CREATED: case VALIDATED: case CANCELED: case FAILED: case UNKNOWN:
registry.add(ChildDataType.COMPUTATIONAL_RESOURCE_SCHEDULING, resourceScheduling, airavataExperimentId);
logger.infoId(airavataExperimentId, "Successfully updated resource scheduling for the experiment {}.", airavataExperimentId);
break;
default:
logger.errorId(airavataExperimentId, "Error while updating scheduling info. Update experiment is only valid for experiments " +
"with status CREATED, VALIDATED, CANCELLED, FAILED and UNKNOWN. Make sure the given " +
"experiment is in one of above statuses... ");
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while updating experiment. Update experiment is only valid for experiments " +
"with status CREATED, VALIDATED, CANCELLED, FAILED and UNKNOWN. Make sure the given " +
"experiment is in one of above statuses... ");
throw exception;
}
}
} catch (Exception e) {
logger.errorId(airavataExperimentId, "Error while updating scheduling info", e);
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while updating scheduling info. " +
"Update experiment is only valid for experiments " +
"with status CREATED, VALIDATED, CANCELLED, FAILED and UNKNOWN. Make sure the given " +
"experiment is in one of above statuses... " + e.getMessage());
throw exception;
}
}
/**
* *
* * Validate experiment configuration. A true in general indicates, the experiment is ready to be launched.
* *
* * @param experimentID
* * @return sucess/failure
* *
* *
*
* @param airavataExperimentId
*/
@Override
public boolean validateExperiment(String airavataExperimentId) throws InvalidRequestException, ExperimentNotFoundException, AiravataClientException, AiravataSystemException, TException {
try {
registry = RegistryFactory.getDefaultRegistry();
if (!registry.isExist(RegistryModelType.EXPERIMENT, airavataExperimentId)) {
logger.errorId(airavataExperimentId, "Experiment validation failed , experiment {} doesn't exist.", airavataExperimentId);
throw new ExperimentNotFoundException("Requested experiment id " + airavataExperimentId + " does not exist in the system..");
}
} catch (RegistryException e1) {
logger.errorId(airavataExperimentId, "Error while retrieving projects", e1);
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while retrieving projects. More info : " + e1.getMessage());
throw exception;
}
if (getOrchestratorClient().validateExperiment(airavataExperimentId)) {
logger.infoId(airavataExperimentId, "Experiment validation succeed.");
return true;
} else {
logger.infoId(airavataExperimentId, "Experiment validation failed.");
return false;
}
}
/**
* Fetch the previously configured experiment configuration information.
*
* @param airavataExperimentId The identifier for the requested experiment. This is returned during the create experiment step.
* @return This method returns the previously configured experiment configuration data.
* @throws org.apache.airavata.model.error.InvalidRequestException For any incorrect forming of the request itself.
* @throws org.apache.airavata.model.error.ExperimentNotFoundException If the specified experiment is not previously created, then an Experiment Not Found Exception is thrown.
* @throws org.apache.airavata.model.error.AiravataClientException The following list of exceptions are thrown which Airavata Client can take corrective actions to resolve:
*<p/>
*UNKNOWN_GATEWAY_ID - If a Gateway is not registered with Airavata as a one time administrative
*step, then Airavata Registry will not have a provenance area setup. The client has to follow
*gateway registration steps and retry this request.
*<p/>
*AUTHENTICATION_FAILURE - How Authentication will be implemented is yet to be determined.
*For now this is a place holder.
*<p/>
*INVALID_AUTHORIZATION - This will throw an authorization exception. When a more robust security hand-shake
*is implemented, the authorization will be more substantial.
* @throws org.apache.airavata.model.error.AiravataSystemException This exception will be thrown for any
* Airavata Server side issues and if the problem cannot be corrected by the client
* rather an Airavata Administrator will be notified to take corrective action.
*/
@Override
public ExperimentStatus getExperimentStatus(String airavataExperimentId) throws InvalidRequestException,
ExperimentNotFoundException,
AiravataClientException,
AiravataSystemException,
TException {
try {
registry = RegistryFactory.getDefaultRegistry();
if (!registry.isExist(RegistryModelType.EXPERIMENT, airavataExperimentId)){
logger.errorId(airavataExperimentId, "Error while retrieving experiment status, experiment {} doesn't exist.", airavataExperimentId);
throw new ExperimentNotFoundException("Requested experiment id " + airavataExperimentId +
" does not exist in the system..");
}
return (ExperimentStatus)registry.get(RegistryModelType.EXPERIMENT_STATUS, airavataExperimentId);
} catch (Exception e) {
logger.errorId(airavataExperimentId, "Error while retrieving the experiment status", e);
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while retrieving the experiment status. More info : " + e.getMessage());
throw exception;
}
}
@Override
public List<DataObjectType> getExperimentOutputs(String airavataExperimentId) throws TException {
try {
registry = RegistryFactory.getDefaultRegistry();
if (!registry.isExist(RegistryModelType.EXPERIMENT, airavataExperimentId)){
logger.errorId(airavataExperimentId, "Get experiment outputs failed, experiment {} doesn't exit.", airavataExperimentId);
throw new ExperimentNotFoundException("Requested experiment id " + airavataExperimentId + " does not exist in the system..");
}
return (List<DataObjectType>)registry.get(RegistryModelType.EXPERIMENT_OUTPUT, airavataExperimentId);
} catch (Exception e) {
logger.errorId(airavataExperimentId, "Error while retrieving the experiment outputs", e);
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while retrieving the experiment outputs. More info : " + e.getMessage());
throw exception;
}
}
public Map<String, JobStatus> getJobStatuses(String airavataExperimentId) throws TException {
Map<String, JobStatus> jobStatus = new HashMap<String, JobStatus>();
try {
registry = RegistryFactory.getDefaultRegistry();
if (!registry.isExist(RegistryModelType.EXPERIMENT, airavataExperimentId)){
logger.errorId(airavataExperimentId, "Error while retrieving job status, the experiment {} doesn't exist.", airavataExperimentId);
throw new ExperimentNotFoundException("Requested experiment id " + airavataExperimentId + " does not exist in the system..");
}
List<Object> workflowNodes = registry.get(RegistryModelType.WORKFLOW_NODE_DETAIL, Constants.FieldConstants.WorkflowNodeConstants.EXPERIMENT_ID, airavataExperimentId);
if (workflowNodes != null && !workflowNodes.isEmpty()){
for (Object wf : workflowNodes){
String nodeInstanceId = ((WorkflowNodeDetails) wf).getNodeInstanceId();
List<Object> taskDetails = registry.get(RegistryModelType.TASK_DETAIL, Constants.FieldConstants.TaskDetailConstants.NODE_ID, nodeInstanceId);
if (taskDetails != null && !taskDetails.isEmpty()){
for (Object ts : taskDetails){
String taskID = ((TaskDetails) ts).getTaskID();
List<Object> jobDetails = registry.get(RegistryModelType.JOB_DETAIL, Constants.FieldConstants.JobDetaisConstants.TASK_ID, taskID);
if (jobDetails != null && !jobDetails.isEmpty()){
for (Object job : jobDetails){
String jobID = ((JobDetails) job).getJobID();
jobStatus.put(jobID, ((JobDetails) job).getJobStatus());
}
}
}
}
}
}
} catch (Exception e) {
logger.errorId(airavataExperimentId, "Error while retrieving the job statuses", e);
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while retrieving the job statuses. More info : " + e.getMessage());
throw exception;
}
return jobStatus;
}
@Override
public List<JobDetails> getJobDetails(String airavataExperimentId) throws InvalidRequestException, ExperimentNotFoundException, AiravataClientException, AiravataSystemException, TException {
List<JobDetails> jobDetailsList = new ArrayList<JobDetails>();
try {
registry = RegistryFactory.getDefaultRegistry();
if (!registry.isExist(RegistryModelType.EXPERIMENT, airavataExperimentId)){
logger.errorId(airavataExperimentId, "Error while retrieving job details, experiment {} doesn't exist.", airavataExperimentId);
throw new ExperimentNotFoundException("Requested experiment id " + airavataExperimentId + " does not exist in the system..");
}
List<Object> workflowNodes = registry.get(RegistryModelType.WORKFLOW_NODE_DETAIL, Constants.FieldConstants.WorkflowNodeConstants.EXPERIMENT_ID, airavataExperimentId);
if (workflowNodes != null && !workflowNodes.isEmpty()){
for (Object wf : workflowNodes){
String nodeInstanceId = ((WorkflowNodeDetails) wf).getNodeInstanceId();
List<Object> taskDetails = registry.get(RegistryModelType.TASK_DETAIL, Constants.FieldConstants.TaskDetailConstants.NODE_ID, nodeInstanceId);
if (taskDetails != null && !taskDetails.isEmpty()){
for (Object ts : taskDetails){
String taskID = ((TaskDetails) ts).getTaskID();
List<Object> jobDetails = registry.get(RegistryModelType.JOB_DETAIL, Constants.FieldConstants.JobDetaisConstants.TASK_ID, taskID);
if (jobDetails != null && !jobDetails.isEmpty()){
for (Object job : jobDetails){
jobDetailsList.add((JobDetails) job);
}
}
}
}
}
}
} catch (Exception e) {
logger.errorId(airavataExperimentId, "Error while retrieving the job details", e);
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while retrieving the job details. More info : " + e.getMessage());
throw exception;
}
return jobDetailsList;
}
@Override
public List<DataTransferDetails> getDataTransferDetails(String airavataExperimentId) throws InvalidRequestException, ExperimentNotFoundException, AiravataClientException, AiravataSystemException, TException {
List<DataTransferDetails> dataTransferDetailList = new ArrayList<DataTransferDetails>();
try {
registry = RegistryFactory.getDefaultRegistry();
if (!registry.isExist(RegistryModelType.EXPERIMENT, airavataExperimentId)) {
logger.errorId(airavataExperimentId, "Error while retrieving data transfer details, experiment {} doesn't exit.", airavataExperimentId);
throw new ExperimentNotFoundException("Requested experiment id " + airavataExperimentId + " does not exist in the system..");
}
List<Object> workflowNodes = registry.get(RegistryModelType.WORKFLOW_NODE_DETAIL, Constants.FieldConstants.WorkflowNodeConstants.EXPERIMENT_ID, airavataExperimentId);
if (workflowNodes != null && !workflowNodes.isEmpty()){
for (Object wf : workflowNodes){
String nodeInstanceId = ((WorkflowNodeDetails) wf).getNodeInstanceId();
List<Object> taskDetails = registry.get(RegistryModelType.TASK_DETAIL, Constants.FieldConstants.TaskDetailConstants.NODE_ID, nodeInstanceId);
if (taskDetails != null && !taskDetails.isEmpty()){
for (Object ts : taskDetails){
String taskID = ((TaskDetails) ts).getTaskID();
List<Object> dataTransferDetails = registry.get(RegistryModelType.DATA_TRANSFER_DETAIL, Constants.FieldConstants.JobDetaisConstants.TASK_ID, taskID);
if (dataTransferDetails != null && !dataTransferDetails.isEmpty()){
for (Object dataTransfer : dataTransferDetails){
dataTransferDetailList.add((DataTransferDetails) dataTransfer);
}
}
}
}
}
}
} catch (Exception e) {
logger.errorId(airavataExperimentId, "Error while retrieving the data transfer details", e);
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while retrieving the data transfer details. More info : " + e.getMessage());
throw exception;
}
return dataTransferDetailList;
}
/**
* Launch a previously created and configured experiment. Airavata Server will then start processing the request and appropriate
* notifications and intermediate and output data will be subsequently available for this experiment.
*
*
* @param airavataExperimentId The identifier for the requested experiment. This is returned during the create experiment step.
* @param airavataCredStoreToken :
* A requirement to execute experiments within Airavata is to first register the targeted remote computational account
* credentials with Airavata Credential Store. The administrative API (related to credential store) will return a
* generated token associated with the registered credentials. The client has to security posses this token id and is
* required to pass it to Airavata Server for all execution requests.
* Note: At this point only the credential store token is required so the string is directly passed here. In future if
* if more security credentials are enables, then the structure ExecutionSecurityParameters should be used.
* Note: This parameter is not persisted within Airavata Registry for security reasons.
* @return This method call does not have a return value.
* @throws org.apache.airavata.model.error.InvalidRequestException
* For any incorrect forming of the request itself.
* @throws org.apache.airavata.model.error.ExperimentNotFoundException
* If the specified experiment is not previously created, then an Experiment Not Found Exception is thrown.
* @throws org.apache.airavata.model.error.AiravataClientException
* The following list of exceptions are thrown which Airavata Client can take corrective actions to resolve:
* <p/>
* UNKNOWN_GATEWAY_ID - If a Gateway is not registered with Airavata as a one time administrative
* step, then Airavata Registry will not have a provenance area setup. The client has to follow
* gateway registration steps and retry this request.
* <p/>
* AUTHENTICATION_FAILURE - How Authentication will be implemented is yet to be determined.
* For now this is a place holder.
* <p/>
* INVALID_AUTHORIZATION - This will throw an authorization exception. When a more robust security hand-shake
* is implemented, the authorization will be more substantial.
* @throws org.apache.airavata.model.error.AiravataSystemException
* This exception will be thrown for any Airavata Server side issues and if the problem cannot be corrected by the client
* rather an Airavata Administrator will be notified to take corrective action.
*/
@Override
public void launchExperiment(final String airavataExperimentId, String airavataCredStoreToken) throws TException {
try {
registry = RegistryFactory.getDefaultRegistry();
if (!registry.isExist(RegistryModelType.EXPERIMENT, airavataExperimentId)) {
logger.errorId(airavataExperimentId, "Error while launching experiment, experiment {} doesn't exist.", airavataExperimentId);
throw new ExperimentNotFoundException("Requested experiment id " + airavataExperimentId + " does not exist in the system..");
}
} catch (RegistryException e1) {
logger.errorId(airavataExperimentId, "Error while retrieving projects", e1);
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while retrieving projects. More info : " + e1.getMessage());
throw exception;
}
final String expID = airavataExperimentId;
final String token = airavataCredStoreToken;
synchronized (this) {
Experiment experiment = getExperiment(expID);
ExecutionType executionType = DataModelUtils.getExecutionType(experiment);
Thread thread = null;
if (executionType==ExecutionType.SINGLE_APP) {
//its an single application execution experiment
logger.debugId(airavataExperimentId, "Launching single application experiment {}.", airavataExperimentId);
final OrchestratorService.Client orchestratorClient = getOrchestratorClient();
if (orchestratorClient.validateExperiment(expID)) {
thread = new Thread() {
public void run() {
try {
launchSingleAppExperiment(expID, token, orchestratorClient);
} catch (TException e) {
// throwing exception from here useless, just print the error log
logger.errorId(airavataExperimentId, "Error while launching single application experiment.", e);
}
}
};
} else {
logger.errorId(airavataExperimentId, "Experiment validation failed. Please check the configurations.");
throw new InvalidRequestException("Experiment Validation Failed, please check the configuration");
}
} else if (executionType == ExecutionType.WORKFLOW){
//its a workflow execution experiment
logger.debugId(airavataExperimentId, "Launching workflow experiment {}.", airavataExperimentId);
thread = new Thread() {
public void run() {
try {
launchWorkflowExperiment(expID, token);
} catch (TException e) {
e.printStackTrace(); //To change body of catch statement use File | Settings | File Templates.
}
}
};
} else {
logger.errorId(airavataExperimentId, "Couldn't identify experiment type, experiment {} is neither single application nor workflow.", airavataExperimentId);
throw new InvalidRequestException("Experiment '" + expID + "' launch failed. Unable to figureout execution type for application " + experiment.getApplicationId());
}
thread.start();
}
}
private void launchWorkflowExperiment(String experimentId, String airavataCredStoreToken) throws TException {
try {
WorkflowEngine workflowEngine = WorkflowEngineFactory.getWorkflowEngine();
workflowEngine.launchExperiment(experimentId, airavataCredStoreToken);
} catch (WorkflowEngineException e) {
logger.errorId(experimentId, "Error while launching experiment.", e);
}
}
private boolean launchSingleAppExperiment(String experimentId, String airavataCredStoreToken, OrchestratorService.Client orchestratorClient) throws TException {
Experiment experiment = null;
try {
List<String> ids = registry.getIds(RegistryModelType.WORKFLOW_NODE_DETAIL, WorkflowNodeConstants.EXPERIMENT_ID, experimentId);
for (String workflowNodeId : ids) {
// WorkflowNodeDetails workflowNodeDetail = (WorkflowNodeDetails) registry.get(RegistryModelType.WORKFLOW_NODE_DETAIL, workflowNodeId);
List<Object> taskDetailList = registry.get(RegistryModelType.TASK_DETAIL, TaskDetailConstants.NODE_ID, workflowNodeId);
for (Object o : taskDetailList) {
TaskDetails taskData = (TaskDetails) o;
//iterate through all the generated tasks and performs the job submisssion+monitoring
experiment = (Experiment) registry.get(RegistryModelType.EXPERIMENT, experimentId);
if (experiment == null) {
logger.errorId(experimentId, "Error retrieving the Experiment by the given experimentID: {}", experimentId);
return false;
}
ExperimentStatus status = new ExperimentStatus();
status.setExperimentState(ExperimentState.LAUNCHED);
status.setTimeOfStateChange(Calendar.getInstance().getTimeInMillis());
experiment.setExperimentStatus(status);
registry.update(RegistryModelType.EXPERIMENT_STATUS, status, experimentId);
registry.update(RegistryModelType.TASK_DETAIL, taskData, taskData.getTaskID());
//launching the experiment
orchestratorClient.launchTask(taskData.getTaskID(),airavataCredStoreToken);
}
}
} catch (Exception e) {
// Here we really do not have to do much because only potential failure can happen
// is in gfac, if there are errors in gfac, it will handle the experiment/task/job statuses
// We might get failures in registry access before submitting the jobs to gfac, in that case we
// leave the status of these as created.
ExperimentStatus status = new ExperimentStatus();
status.setExperimentState(ExperimentState.FAILED);
status.setTimeOfStateChange(Calendar.getInstance().getTimeInMillis());
experiment.setExperimentStatus(status);
try {
registry.update(RegistryModelType.EXPERIMENT_STATUS, status, experimentId);
} catch (RegistryException e1) {
logger.errorId(experimentId, "Error while updating experiment status to " + status.toString(), e);
throw new TException(e);
}
logger.errorId(experimentId, "Error while updating task status, hence updated experiment status to " + status.toString(), e);
throw new TException(e);
}
return true;
}
private OrchestratorService.Client getOrchestratorClient() {
final int serverPort = Integer.parseInt(ServerSettings.getSetting(org.apache.airavata.common.utils.Constants.ORCHESTRATOR_SERVER_PORT,"8940"));
final String serverHost = ServerSettings.getSetting(org.apache.airavata.common.utils.Constants.ORCHESTRATOR_SERVER_HOST, null);
return OrchestratorClientFactory.createOrchestratorClient(serverHost, serverPort);
}
/**
* Clone an specified experiment with a new name. A copy of the experiment configuration is made and is persisted with new metadata.
* The client has to subsequently update this configuration if needed and launch the cloned experiment.
*
* @param existingExperimentID
* This is the experiment identifier that already exists in the system. Will use this experimentID to retrieve
* user configuration which is used with the clone experiment.
*
* @param newExperiementName
* experiment name that should be used in the cloned experiment
*
* @return
* The server-side generated airavata experiment globally unique identifier for the newly cloned experiment.
*
* @throws org.apache.airavata.model.error.InvalidRequestException
* For any incorrect forming of the request itself.
*
* @throws org.apache.airavata.model.error.ExperimentNotFoundException
* If the specified experiment is not previously created, then an Experiment Not Found Exception is thrown.
*
* @throws org.apache.airavata.model.error.AiravataClientException
* The following list of exceptions are thrown which Airavata Client can take corrective actions to resolve:
*
* UNKNOWN_GATEWAY_ID - If a Gateway is not registered with Airavata as a one time administrative
* step, then Airavata Registry will not have a provenance area setup. The client has to follow
* gateway registration steps and retry this request.
*
* AUTHENTICATION_FAILURE - How Authentication will be implemented is yet to be determined.
* For now this is a place holder.
*
* INVALID_AUTHORIZATION - This will throw an authorization exception. When a more robust security hand-shake
* is implemented, the authorization will be more substantial.
*
* @throws org.apache.airavata.model.error.AiravataSystemException
* This exception will be thrown for any Airavata Server side issues and if the problem cannot be corrected by the client
* rather an Airavata Administrator will be notified to take corrective action.
*
*
* @param existingExperimentID
* @param newExperiementName
*/
@Override
public String cloneExperiment(String existingExperimentID, String newExperiementName) throws InvalidRequestException, ExperimentNotFoundException, AiravataClientException, AiravataSystemException, TException {
try {
registry = RegistryFactory.getDefaultRegistry();
if (!registry.isExist(RegistryModelType.EXPERIMENT, existingExperimentID)){
logger.errorId(existingExperimentID, "Error while cloning experiment {}, experiment doesn't exist.", existingExperimentID);
throw new ExperimentNotFoundException("Requested experiment id " + existingExperimentID + " does not exist in the system..");
}
Experiment existingExperiment = (Experiment)registry.get(RegistryModelType.EXPERIMENT, existingExperimentID);
existingExperiment.setCreationTime(AiravataUtils.getCurrentTimestamp().getTime());
if (validateString(newExperiementName)){
existingExperiment.setName(newExperiementName);
}
if (existingExperiment.getWorkflowNodeDetailsList() != null){
existingExperiment.getWorkflowNodeDetailsList().clear();
}
if (existingExperiment.getErrors() != null ){
existingExperiment.getErrors().clear();
}
return (String)registry.add(ParentDataType.EXPERIMENT, existingExperiment);
} catch (Exception e) {
logger.errorId(existingExperimentID, "Error while cloning the experiment with existing configuration...", e);
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while cloning the experiment with existing configuration. More info : " + e.getMessage());
throw exception;
}
}
/**
* Terminate a running experiment.
*
* @param airavataExperimentId The identifier for the requested experiment. This is returned during the create experiment step.
* @return This method call does not have a return value.
* @throws org.apache.airavata.model.error.InvalidRequestException For any incorrect forming of the request itself.
* @throws org.apache.airavata.model.error.ExperimentNotFoundException If the specified experiment is not previously created, then an Experiment Not Found Exception is thrown.
* @throws org.apache.airavata.model.error.AiravataClientException The following list of exceptions are thrown which Airavata Client can take corrective actions to resolve:
* <p/>
* UNKNOWN_GATEWAY_ID - If a Gateway is not registered with Airavata as a one time administrative
* step, then Airavata Registry will not have a provenance area setup. The client has to follow
* gateway registration steps and retry this request.
* <p/>
* AUTHENTICATION_FAILURE - How Authentication will be implemented is yet to be determined.
* For now this is a place holder.
* <p/>
* INVALID_AUTHORIZATION - This will throw an authorization exception. When a more robust security hand-shake
* is implemented, the authorization will be more substantial.
* @throws org.apache.airavata.model.error.AiravataSystemException This exception will be thrown for any Airavata Server side issues and if the problem cannot be corrected by the client
* rather an Airavata Administrator will be notified to take corrective action.
*/
@Override
public void terminateExperiment(String airavataExperimentId) throws InvalidRequestException, ExperimentNotFoundException, AiravataClientException, AiravataSystemException, TException {
Client client = getOrchestratorClient();
client.terminateExperiment(airavataExperimentId);
}
/**
* Register a Application Module.
*
* @param applicationModule Application Module Object created from the datamodel.
* @return appModuleId
* Returns a server-side generated airavata appModule globally unique identifier.
*/
@Override
public String registerApplicationModule(ApplicationModule applicationModule) throws InvalidRequestException, AiravataClientException, AiravataSystemException, TException {
try {
appCatalog = AppCatalogFactory.getAppCatalog();
return appCatalog.getApplicationInterface().addApplicationModule(applicationModule);
} catch (AppCatalogException e) {
logger.error("Error while adding application module...", e);
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while adding application module. More info : " + e.getMessage());
throw exception;
}
}
/**
* Fetch a Application Module.
*
* @param appModuleId The identifier for the requested application module
* @return applicationModule
* Returns a application Module Object.
*/
@Override
public ApplicationModule getApplicationModule(String appModuleId) throws InvalidRequestException, AiravataClientException, AiravataSystemException, TException {
try {
appCatalog = AppCatalogFactory.getAppCatalog();
return appCatalog.getApplicationInterface().getApplicationModule(appModuleId);
} catch (AppCatalogException e) {
logger.errorId(appModuleId, "Error while retrieving application module...", e);
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while retrieving the adding application module. More info : " + e.getMessage());
throw exception;
}
}
/**
* Update a Application Module.
*
* @param appModuleId The identifier for the requested application module to be updated.
* @param applicationModule Application Module Object created from the datamodel.
* @return status
* Returns a success/failure of the update.
*/
@Override
public boolean updateApplicationModule(String appModuleId, ApplicationModule applicationModule) throws InvalidRequestException, AiravataClientException, AiravataSystemException, TException {
try {
appCatalog = AppCatalogFactory.getAppCatalog();
appCatalog.getApplicationInterface().updateApplicationModule(appModuleId, applicationModule);
return true;
} catch (AppCatalogException e) {
logger.errorId(appModuleId, "Error while updating application module...", e);
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while updating application module. More info : " + e.getMessage());
throw exception;
}
}
/**
* Delete a Application Module.
*
* @param appModuleId The identifier for the requested application module to be deleted.
* @return status
* Returns a success/failure of the deletion.
*/
@Override
public boolean deleteApplicationModule(String appModuleId) throws InvalidRequestException, AiravataClientException, AiravataSystemException, TException {
try {
appCatalog = AppCatalogFactory.getAppCatalog();
return appCatalog.getApplicationInterface().removeApplicationModule(appModuleId);
} catch (AppCatalogException e) {
logger.errorId(appModuleId, "Error while deleting application module...", e);
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while deleting the application module. More info : " + e.getMessage());
throw exception;
}
}
/**
* Register a Application Deployment.
*
* @param applicationDeployment@return appModuleId
* Returns a server-side generated airavata appModule globally unique identifier.
*/
@Override
public String registerApplicationDeployment(ApplicationDeploymentDescription applicationDeployment) throws InvalidRequestException, AiravataClientException, AiravataSystemException, TException {
try {
appCatalog = AppCatalogFactory.getAppCatalog();
return appCatalog.getApplicationDeployment().addApplicationDeployment(applicationDeployment);
} catch (AppCatalogException e) {
logger.error("Error while adding application deployment...", e);
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while adding application deployment. More info : " + e.getMessage());
throw exception;
}
}
/**
* Fetch a Application Deployment.
*
* @param appDeploymentId The identifier for the requested application module
* @return applicationDeployment
* Returns a application Deployment Object.
*/
@Override
public ApplicationDeploymentDescription getApplicationDeployment(String appDeploymentId) throws InvalidRequestException, AiravataClientException, AiravataSystemException, TException {
try {
appCatalog = AppCatalogFactory.getAppCatalog();
return appCatalog.getApplicationDeployment().getApplicationDeployement(appDeploymentId);
} catch (AppCatalogException e) {
logger.errorId(appDeploymentId, "Error while retrieving application deployment...", e);
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while retrieving application deployment. More info : " + e.getMessage());
throw exception;
}
}
/**
* Update a Application Deployment.
*
* @param appDeploymentId The identifier for the requested application deployment to be updated.
* @param applicationDeployment
* @return status
* Returns a success/failure of the update.
*/
@Override
public boolean updateApplicationDeployment(String appDeploymentId, ApplicationDeploymentDescription applicationDeployment) throws InvalidRequestException, AiravataClientException, AiravataSystemException, TException {
try {
appCatalog = AppCatalogFactory.getAppCatalog();
appCatalog.getApplicationDeployment().updateApplicationDeployment(appDeploymentId, applicationDeployment);
return true;
} catch (AppCatalogException e) {
logger.errorId(appDeploymentId, "Error while updating application deployment...", e);
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while updating application deployment. More info : " + e.getMessage());
throw exception;
}
}
/**
* Delete a Application deployment.
*
* @param appDeploymentId The identifier for the requested application deployment to be deleted.
* @return status
* Returns a success/failure of the deletion.
*/
@Override
public boolean deleteApplicationDeployment(String appDeploymentId) throws InvalidRequestException, AiravataClientException, AiravataSystemException, TException {
try {
appCatalog = AppCatalogFactory.getAppCatalog();
appCatalog.getApplicationDeployment().removeAppDeployment(appDeploymentId);
return true;
} catch (AppCatalogException e) {
logger.errorId(appDeploymentId, "Error while deleting application deployment...", e);
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while deleting application deployment. More info : " + e.getMessage());
throw exception;
}
}
/**
* Fetch a list of Deployed Compute Hosts.
*
* @param appModuleId The identifier for the requested application module
* @return list<string>
* Returns a list of Deployed Resources.
*/
@Override
public List<String> getAppModuleDeployedResources(String appModuleId) throws InvalidRequestException, AiravataClientException, AiravataSystemException, TException {
try {
List<String> appDeployments = new ArrayList<String>();
appCatalog = AppCatalogFactory.getAppCatalog();
Map<String, String> filters = new HashMap<String, String>();
filters.put(AbstractResource.ApplicationDeploymentConstants.APP_MODULE_ID, appModuleId);
List<ApplicationDeploymentDescription> applicationDeployments = appCatalog.getApplicationDeployment().getApplicationDeployements(filters);
for (ApplicationDeploymentDescription description : applicationDeployments){
appDeployments.add(description.getAppDeploymentId());
}
return appDeployments;
} catch (AppCatalogException e) {
logger.errorId(appModuleId, "Error while retrieving application deployments...", e);
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while retrieving application deployment. More info : " + e.getMessage());
throw exception;
}
}
/**
* Register a Application Interface.
*
* @param applicationInterface@return appInterfaceId
* Returns a server-side generated airavata application interface globally unique identifier.
*/
@Override
public String registerApplicationInterface(ApplicationInterfaceDescription applicationInterface) throws InvalidRequestException, AiravataClientException, AiravataSystemException, TException {
try {
appCatalog = AppCatalogFactory.getAppCatalog();
return appCatalog.getApplicationInterface().addApplicationInterface(applicationInterface);
} catch (AppCatalogException e) {
logger.error("Error while adding application interface...", e);
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while adding application interface. More info : " + e.getMessage());
throw exception;
}
}
/**
* Fetch a Application Interface.
*
* @param appInterfaceId The identifier for the requested application module
* @return applicationInterface
* Returns a application Interface Object.
*/
@Override
public ApplicationInterfaceDescription getApplicationInterface(String appInterfaceId) throws InvalidRequestException, AiravataClientException, AiravataSystemException, TException {
try {
appCatalog = AppCatalogFactory.getAppCatalog();
return appCatalog.getApplicationInterface().getApplicationInterface(appInterfaceId);
} catch (AppCatalogException e) {
logger.errorId(appInterfaceId, "Error while retrieving application interface...", e);
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while retrieving application interface. More info : " + e.getMessage());
throw exception;
}
}
/**
* Update a Application Interface.
*
* @param appInterfaceId The identifier for the requested application deployment to be updated.
* @param applicationInterface
* @return status
* Returns a success/failure of the update.
*/
@Override
public boolean updateApplicationInterface(String appInterfaceId, ApplicationInterfaceDescription applicationInterface) throws InvalidRequestException, AiravataClientException, AiravataSystemException, TException {
try {
appCatalog = AppCatalogFactory.getAppCatalog();
appCatalog.getApplicationInterface().updateApplicationInterface(appInterfaceId, applicationInterface);
return true;
} catch (AppCatalogException e) {
logger.errorId(appInterfaceId, "Error while updating application interface...", e);
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while updating application interface. More info : " + e.getMessage());
throw exception;
}
}
/**
* Delete a Application Interface.
*
* @param appInterfaceId The identifier for the requested application interface to be deleted.
* @return status
* Returns a success/failure of the deletion.
*/
@Override
public boolean deleteApplicationInterface(String appInterfaceId) throws InvalidRequestException, AiravataClientException, AiravataSystemException, TException {
try {
appCatalog = AppCatalogFactory.getAppCatalog();
return appCatalog.getApplicationInterface().removeApplicationInterface(appInterfaceId);
} catch (AppCatalogException e) {
logger.errorId(appInterfaceId, "Error while deleting application interface...", e);
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while deleting application interface. More info : " + e.getMessage());
throw exception;
}
}
/**
* Fetch name and id of Application Interface documents.
*
* @return map<applicationId, applicationInterfaceNames>
* Returns a list of application interfaces with corresponsing id's
*/
@Override
public Map<String, String> getAllApplicationInterfaceNames() throws InvalidRequestException, AiravataClientException, AiravataSystemException, TException {
try {
appCatalog = AppCatalogFactory.getAppCatalog();
List<ApplicationInterfaceDescription> allApplicationInterfaces = appCatalog.getApplicationInterface().getAllApplicationInterfaces();
Map<String, String> allApplicationInterfacesMap = new HashMap<String, String>();
if (allApplicationInterfaces != null && !allApplicationInterfaces.isEmpty()){
for (ApplicationInterfaceDescription interfaceDescription : allApplicationInterfaces){
allApplicationInterfacesMap.put(interfaceDescription.getApplicationInterfaceId(), interfaceDescription.getApplicationName());
}
}
return allApplicationInterfacesMap;
} catch (AppCatalogException e) {
logger.error("Error while retrieving application interfaces...", e);
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while retrieving application interfaces. More info : " + e.getMessage());
throw exception;
}
}
/**
* Fetch all Application Interface documents.
*
* @return map<applicationId, applicationInterfaceNames>
* Returns a list of application interfaces documents
*/
@Override
public List<ApplicationInterfaceDescription> getAllApplicationInterfaces() throws InvalidRequestException, AiravataClientException, AiravataSystemException, TException {
try {
appCatalog = AppCatalogFactory.getAppCatalog();
return appCatalog.getApplicationInterface().getAllApplicationInterfaces();
} catch (AppCatalogException e) {
logger.error("Error while retrieving application interfaces...", e);
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while retrieving application interfaces. More info : " + e.getMessage());
throw exception;
}
}
/**
* Fetch the list of Application Inputs.
*
* @param appInterfaceId The identifier for the requested application interface
* @return list<applicationInterfaceModel.InputDataObjectType>
* Returns a list of application inputs.
*/
@Override
public List<InputDataObjectType> getApplicationInputs(String appInterfaceId) throws InvalidRequestException, AiravataClientException, AiravataSystemException, TException {
try {
appCatalog = AppCatalogFactory.getAppCatalog();
return appCatalog.getApplicationInterface().getApplicationInputs(appInterfaceId);
} catch (AppCatalogException e) {
logger.errorId(appInterfaceId, "Error while retrieving application inputs...", e);
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while retrieving application inputs. More info : " + e.getMessage());
throw exception;
}
}
/**
* Fetch the list of Application Outputs.
*
* @param appInterfaceId The identifier for the requested application interface
* @return list<applicationInterfaceModel.OutputDataObjectType>
* Returns a list of application outputs.
*/
@Override
public List<OutputDataObjectType> getApplicationOutputs(String appInterfaceId) throws InvalidRequestException, AiravataClientException, AiravataSystemException, TException {
try {
appCatalog = AppCatalogFactory.getAppCatalog();
return appCatalog.getApplicationInterface().getApplicationOutputs(appInterfaceId);
} catch (AppCatalogException e) {
logger.errorId(appInterfaceId, "Error while retrieving application outputs...", e);
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while retrieving application outputs. More info : " + e.getMessage());
throw exception;
}
}
/**
* Fetch a list of all deployed Compute Hosts for a given application interfaces.
*
* @param appInterfaceId The identifier for the requested application interface
* @return map<computeResourceId, computeResourceName>
* A map of registered compute resource id's and their corresponding hostnames.
* Deployments of each modules listed within the interfaces will be listed.
*/
@Override
public Map<String, String> getAvailableAppInterfaceComputeResources(String appInterfaceId) throws InvalidRequestException, AiravataClientException, AiravataSystemException, TException {
try {
appCatalog = AppCatalogFactory.getAppCatalog();
ApplicationDeployment applicationDeployment = appCatalog.getApplicationDeployment();
Map<String, String> allComputeResources = appCatalog.getComputeResource().getAllComputeResourceIdList();
Map<String, String> availableComputeResources = new HashMap<String, String>();
ApplicationInterfaceDescription applicationInterface =
appCatalog.getApplicationInterface().getApplicationInterface(appInterfaceId);
HashMap<String, String> filters = new HashMap<String,String>();
List<String> applicationModules = applicationInterface.getApplicationModules();
if (applicationModules != null && !applicationModules.isEmpty()){
for (String moduleId : applicationModules) {
filters.put(AbstractResource.ApplicationDeploymentConstants.APP_MODULE_ID, moduleId);
List<ApplicationDeploymentDescription> applicationDeployments =
applicationDeployment.getApplicationDeployements(filters);
for (ApplicationDeploymentDescription deploymentDescription : applicationDeployments) {
availableComputeResources.put(deploymentDescription.getComputeHostId(),
allComputeResources.get(deploymentDescription.getComputeHostId()));
}
}
}
return availableComputeResources;
} catch (AppCatalogException e) {
logger.errorId(appInterfaceId, "Error while saving compute resource...", e);
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while saving compute resource. More info : " + e.getMessage());
throw exception;
}
}
/**
* Register a Compute Resource.
*
* @param computeResourceDescription Compute Resource Object created from the datamodel.
* @return computeResourceId
* Returns a server-side generated airavata compute resource globally unique identifier.
*/
@Override
public String registerComputeResource(ComputeResourceDescription computeResourceDescription) throws InvalidRequestException, AiravataClientException, AiravataSystemException, TException {
try {
appCatalog = AppCatalogFactory.getAppCatalog();
return appCatalog.getComputeResource().addComputeResource(computeResourceDescription);
} catch (AppCatalogException e) {
logger.error("Error while saving compute resource...", e);
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while saving compute resource. More info : " + e.getMessage());
throw exception;
}
}
/**
* Fetch the given Compute Resource.
*
* @param computeResourceId The identifier for the requested compute resource
* @return computeResourceDescription
* Compute Resource Object created from the datamodel..
*/
@Override
public ComputeResourceDescription getComputeResource(String computeResourceId) throws InvalidRequestException, AiravataClientException, AiravataSystemException, TException {
try {
appCatalog = AppCatalogFactory.getAppCatalog();
return appCatalog.getComputeResource().getComputeResource(computeResourceId);
} catch (AppCatalogException e) {
logger.errorId(computeResourceId, "Error while retrieving compute resource...", e);
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while retrieving compute resource. More info : " + e.getMessage());
throw exception;
}
}
/**
* Fetch all registered Compute Resources.
*
* @return A map of registered compute resource id's and thier corresponding hostnames.
* Compute Resource Object created from the datamodel..
*/
@Override
public Map<String, String> getAllComputeResourceNames() throws InvalidRequestException, AiravataClientException, AiravataSystemException, TException {
try {
appCatalog = AppCatalogFactory.getAppCatalog();
return appCatalog.getComputeResource().getAllComputeResourceIdList();
} catch (AppCatalogException e) {
logger.error("Error while retrieving compute resource...", e);
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while retrieving compute resource. More info : " + e.getMessage());
throw exception;
}
}
/**
* Update a Compute Resource.
*
* @param computeResourceId The identifier for the requested compute resource to be updated.
* @param computeResourceDescription Compute Resource Object created from the datamodel.
* @return status
* Returns a success/failure of the update.
*/
@Override
public boolean updateComputeResource(String computeResourceId, ComputeResourceDescription computeResourceDescription) throws InvalidRequestException, AiravataClientException, AiravataSystemException, TException {
try {
appCatalog = AppCatalogFactory.getAppCatalog();
appCatalog.getComputeResource().updateComputeResource(computeResourceId, computeResourceDescription);
return true;
} catch (AppCatalogException e) {
logger.errorId(computeResourceId, "Error while updating compute resource...", e);
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while updaing compute resource. More info : " + e.getMessage());
throw exception;
}
}
/**
* Delete a Compute Resource.
*
* @param computeResourceId The identifier for the requested compute resource to be deleted.
* @return status
* Returns a success/failure of the deletion.
*/
@Override
public boolean deleteComputeResource(String computeResourceId) throws InvalidRequestException, AiravataClientException, AiravataSystemException, TException {
try {
appCatalog = AppCatalogFactory.getAppCatalog();
appCatalog.getComputeResource().removeComputeResource(computeResourceId);
return true;
} catch (AppCatalogException e) {
logger.errorId(computeResourceId, "Error while deleting compute resource...", e);
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while deleting compute resource. More info : " + e.getMessage());
throw exception;
}
}
/**
* Add a Local Job Submission details to a compute resource
* App catalog will return a jobSubmissionInterfaceId which will be added to the jobSubmissionInterfaces.
*
* @param computeResourceId The identifier of the compute resource to which JobSubmission protocol to be added
* @param priorityOrder Specify the priority of this job manager. If this is the only jobmanager, the priority can be zero.
* @param localSubmission The LOCALSubmission object to be added to the resource.
* @return status
* Returns a success/failure of the deletion.
*/
@Override
public boolean addLocalSubmissionDetails(String computeResourceId, int priorityOrder, LOCALSubmission localSubmission) throws InvalidRequestException, AiravataClientException, AiravataSystemException, TException {
try {
appCatalog = AppCatalogFactory.getAppCatalog();
ComputeResource computeResource = appCatalog.getComputeResource();
addJobSubmissionInterface(computeResource, computeResourceId,
computeResource.addLocalJobSubmission(localSubmission), JobSubmissionProtocol.LOCAL, priorityOrder);
return true;
} catch (AppCatalogException e) {
logger.errorId(computeResourceId, "Error while adding job submission interface to resource compute resource...", e);
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while adding job submission interface to resource compute resource. More info : " + e.getMessage());
throw exception;
}
}
/**
* Update the given Local Job Submission details
*
* @param jobSubmissionInterfaceId The identifier of the JobSubmission Interface to be updated.
* @param localSubmission The LOCALSubmission object to be updated.
* @return status
* Returns a success/failure of the deletion.
*/
@Override
public boolean updateLocalSubmissionDetails(String jobSubmissionInterfaceId, LOCALSubmission localSubmission) throws InvalidRequestException, AiravataClientException, AiravataSystemException, TException {
try {
LocalSubmissionResource submission = AppCatalogThriftConversion.getLocalJobSubmission(localSubmission);
submission.setJobSubmissionInterfaceId(jobSubmissionInterfaceId);
submission.save();
return true;
} catch (AppCatalogException e) {
logger.errorId(jobSubmissionInterfaceId, "Error while adding job submission interface to resource compute resource...", e);
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while adding job submission interface to resource compute resource. More info : " + e.getMessage());
throw exception;
}
}
private void addJobSubmissionInterface(ComputeResource computeResource,
String computeResourceId, String jobSubmissionInterfaceId,
JobSubmissionProtocol protocolType, int priorityOrder)
throws AppCatalogException {
JobSubmissionInterface jobSubmissionInterface = new JobSubmissionInterface();
jobSubmissionInterface.setJobSubmissionInterfaceId(jobSubmissionInterfaceId);
jobSubmissionInterface.setPriorityOrder(priorityOrder);
jobSubmissionInterface.setJobSubmissionProtocol(protocolType);
computeResource.addJobSubmissionProtocol(computeResourceId,jobSubmissionInterface);
}
/**
* Add a SSH Job Submission details to a compute resource
* App catalog will return a jobSubmissionInterfaceId which will be added to the jobSubmissionInterfaces.
*
* @param computeResourceId The identifier of the compute resource to which JobSubmission protocol to be added
* @param priorityOrder Specify the priority of this job manager. If this is the only jobmanager, the priority can be zero.
* @param sshJobSubmission The SSHJobSubmission object to be added to the resource.
* @return status
* Returns a success/failure of the deletion.
*/
@Override
public boolean addSSHJobSubmissionDetails(String computeResourceId, int priorityOrder, SSHJobSubmission sshJobSubmission) throws InvalidRequestException, AiravataClientException, AiravataSystemException, TException {
try {
appCatalog = AppCatalogFactory.getAppCatalog();
ComputeResource computeResource = appCatalog.getComputeResource();
addJobSubmissionInterface(computeResource, computeResourceId,
computeResource.addSSHJobSubmission(sshJobSubmission), JobSubmissionProtocol.SSH, priorityOrder);
return true;
} catch (AppCatalogException e) {
logger.errorId(computeResourceId, "Error while adding job submission interface to resource compute resource...", e);
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while adding job submission interface to resource compute resource. More info : " + e.getMessage());
throw exception;
}
}
/**
* Add a Cloud Job Submission details to a compute resource
* App catalog will return a jobSubmissionInterfaceId which will be added to the jobSubmissionInterfaces.
*
* @param computeResourceId The identifier of the compute resource to which JobSubmission protocol to be added
* @param priorityOrder Specify the priority of this job manager. If this is the only jobmanager, the priority can be zero.
* @param cloudJobSubmission The SSHJobSubmission object to be added to the resource.
* @return status
* Returns a success/failure of the deletion.
*/
@Override
public boolean addCloudJobSubmissionDetails(String computeResourceId, int priorityOrder, CloudJobSubmission cloudJobSubmission) throws InvalidRequestException, AiravataClientException, AiravataSystemException, TException {
try {
appCatalog = AppCatalogFactory.getAppCatalog();
ComputeResource computeResource = appCatalog.getComputeResource();
addJobSubmissionInterface(computeResource, computeResourceId,
computeResource.addCloudJobSubmission(cloudJobSubmission), JobSubmissionProtocol.CLOUD, priorityOrder);
return true;
} catch (AppCatalogException e) {
logger.errorId(computeResourceId, "Error while adding job submission interface to resource compute resource...", e);
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while adding job submission interface to resource compute resource. More info : " + e.getMessage());
throw exception;
}
}
/**
* Update the given SSH Job Submission details
*
* @param jobSubmissionInterfaceId The identifier of the JobSubmission Interface to be updated.
* @param sshJobSubmission The SSHJobSubmission object to be updated.
* @return status
* Returns a success/failure of the deletion.
*/
@Override
public boolean updateSSHJobSubmissionDetails(String jobSubmissionInterfaceId, SSHJobSubmission sshJobSubmission) throws InvalidRequestException, AiravataClientException, AiravataSystemException, TException {
try {
SshJobSubmissionResource submission = AppCatalogThriftConversion.getSSHJobSubmission(sshJobSubmission);
submission.setJobSubmissionInterfaceId(jobSubmissionInterfaceId);
submission.save();
return true;
} catch (AppCatalogException e) {
logger.errorId(jobSubmissionInterfaceId, "Error while adding job submission interface to resource compute resource...", e);
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while adding job submission interface to resource compute resource. More info : " + e.getMessage());
throw exception;
}
}
/**
* Update the given SSH Job Submission details
*
* @param jobSubmissionInterfaceId The identifier of the JobSubmission Interface to be updated.
* @param cloudJobSubmission The SSHJobSubmission object to be updated.
* @return status
* Returns a success/failure of the deletion.
*/
@Override
public boolean updateCloudJobSubmissionDetails(String jobSubmissionInterfaceId, CloudJobSubmission cloudJobSubmission) throws InvalidRequestException, AiravataClientException, AiravataSystemException, TException {
try {
CloudSubmissionResource submission = AppCatalogThriftConversion.getCloudJobSubmission(cloudJobSubmission);
submission.setJobSubmissionInterfaceId(jobSubmissionInterfaceId);
submission.save();
return true;
} catch (AppCatalogException e) {
logger.errorId(jobSubmissionInterfaceId, "Error while adding job submission interface to resource compute resource...", e);
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while adding job submission interface to resource compute resource. More info : " + e.getMessage());
throw exception;
}
}
/**
* Add a Local data moevement details to a compute resource
* App catalog will return a dataMovementInterfaceId which will be added to the dataMovementInterfaces.
*
* @param computeResourceId The identifier of the compute resource to which JobSubmission protocol to be added
* @param priorityOrder Specify the priority of this job manager. If this is the only jobmanager, the priority can be zero.
* @param localDataMovement The LOCALDataMovement object to be added to the resource.
* @return status
* Returns a success/failure of the deletion.
*/
@Override
public boolean addLocalDataMovementDetails(String computeResourceId, int priorityOrder, LOCALDataMovement localDataMovement) throws InvalidRequestException, AiravataClientException, AiravataSystemException, TException {
try {
appCatalog = AppCatalogFactory.getAppCatalog();
ComputeResource computeResource = appCatalog.getComputeResource();
addDataMovementInterface(computeResource, computeResourceId,
computeResource.addLocalDataMovement(localDataMovement), DataMovementProtocol.LOCAL, priorityOrder);
return true;
} catch (AppCatalogException e) {
logger.errorId(computeResourceId, "Error while adding data movement interface to resource compute resource...", e);
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while adding data movement interface to resource compute resource. More info : " + e.getMessage());
throw exception;
}
}
/**
* Update the given Local data movement details
*
* @param jobSubmissionInterfaceId The identifier of the JobSubmission Interface to be updated.
* @param localDataMovement The LOCALDataMovement object to be updated.
* @return status
* Returns a success/failure of the update.
*/
@Override
public boolean updateLocalDataMovementDetails(String jobSubmissionInterfaceId, LOCALDataMovement localDataMovement) throws InvalidRequestException, AiravataClientException, AiravataSystemException, TException {
try {
LocalDataMovementResource movment = AppCatalogThriftConversion.getLocalDataMovement(localDataMovement);
movment.setDataMovementInterfaceId(jobSubmissionInterfaceId);
movment.save();
return true;
} catch (AppCatalogException e) {
logger.errorId(jobSubmissionInterfaceId, "Error while adding job submission interface to resource compute resource...", e);
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while adding job submission interface to resource compute resource. More info : " + e.getMessage());
throw exception;
}
}
private void addDataMovementInterface(ComputeResource computeResource,
String computeResourceId, String dataMovementInterfaceId,
DataMovementProtocol protocolType, int priorityOrder)
throws AppCatalogException {
DataMovementInterface dataMovementInterface = new DataMovementInterface();
dataMovementInterface.setDataMovementInterfaceId(dataMovementInterfaceId);
dataMovementInterface.setPriorityOrder(priorityOrder);
dataMovementInterface.setDataMovementProtocol(protocolType);
computeResource.addDataMovementProtocol(computeResourceId,dataMovementInterface);
}
/**
* Add a SCP data moevement details to a compute resource
* App catalog will return a dataMovementInterfaceId which will be added to the dataMovementInterfaces.
*
* @param computeResourceId The identifier of the compute resource to which JobSubmission protocol to be added
* @param priorityOrder Specify the priority of this job manager. If this is the only jobmanager, the priority can be zero.
* @param scpDataMovement The SCPDataMovement object to be added to the resource.
* @return status
* Returns a success/failure of the deletion.
*/
@Override
public boolean addSCPDataMovementDetails(String computeResourceId, int priorityOrder, SCPDataMovement scpDataMovement) throws InvalidRequestException, AiravataClientException, AiravataSystemException, TException {
try {
appCatalog = AppCatalogFactory.getAppCatalog();
ComputeResource computeResource = appCatalog.getComputeResource();
addDataMovementInterface(computeResource, computeResourceId,
computeResource.addScpDataMovement(scpDataMovement), DataMovementProtocol.SCP, priorityOrder);
return true;
} catch (AppCatalogException e) {
logger.errorId(computeResourceId, "Error while adding data movement interface to resource compute resource...", e);
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while adding data movement interface to resource compute resource. More info : " + e.getMessage());
throw exception;
}
}
/**
* Update the given scp data movement details
* App catalog will return a dataMovementInterfaceId which will be added to the dataMovementInterfaces.
*
* @param jobSubmissionInterfaceId The identifier of the JobSubmission Interface to be updated.
* @param scpDataMovement The SCPDataMovement object to be updated.
* @return status
* Returns a success/failure of the update.
*/
@Override
public boolean updateSCPDataMovementDetails(String jobSubmissionInterfaceId, SCPDataMovement scpDataMovement) throws InvalidRequestException, AiravataClientException, AiravataSystemException, TException {
try {
ScpDataMovementResource movment = AppCatalogThriftConversion.getSCPDataMovementDescription(scpDataMovement);
movment.setDataMovementInterfaceId(jobSubmissionInterfaceId);
movment.save();
return true;
} catch (AppCatalogException e) {
logger.errorId(jobSubmissionInterfaceId, "Error while adding job submission interface to resource compute resource...", e);
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while adding job submission interface to resource compute resource. More info : " + e.getMessage());
throw exception;
}
}
/**
* Add a GridFTP data moevement details to a compute resource
* App catalog will return a dataMovementInterfaceId which will be added to the dataMovementInterfaces.
*
* @param computeResourceId The identifier of the compute resource to which JobSubmission protocol to be added
* @param priorityOrder Specify the priority of this job manager. If this is the only jobmanager, the priority can be zero.
* @param gridFTPDataMovement The GridFTPDataMovement object to be added to the resource.
* @return status
* Returns a success/failure of the deletion.
*/
@Override
public boolean addGridFTPDataMovementDetails(String computeResourceId, int priorityOrder, GridFTPDataMovement gridFTPDataMovement) throws InvalidRequestException, AiravataClientException, AiravataSystemException, TException {
try {
appCatalog = AppCatalogFactory.getAppCatalog();
ComputeResource computeResource = appCatalog.getComputeResource();
addDataMovementInterface(computeResource, computeResourceId,
computeResource.addGridFTPDataMovement(gridFTPDataMovement), DataMovementProtocol.GridFTP, priorityOrder);
return true;
} catch (AppCatalogException e) {
logger.errorId(computeResourceId, "Error while adding data movement interface to resource compute resource...", e);
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while adding data movement interface to resource compute resource. More info : " + e.getMessage());
throw exception;
}
}
/**
* Update the given GridFTP data movement details to a compute resource
* App catalog will return a dataMovementInterfaceId which will be added to the dataMovementInterfaces.
*
* @param jobSubmissionInterfaceId The identifier of the JobSubmission Interface to be updated.
* @param gridFTPDataMovement The GridFTPDataMovement object to be updated.
* @return status
* Returns a success/failure of the updation.
*/
@Override
public boolean updateGridFTPDataMovementDetails(String jobSubmissionInterfaceId, GridFTPDataMovement gridFTPDataMovement) throws InvalidRequestException, AiravataClientException, AiravataSystemException, TException {
try {
GridftpDataMovementResource movment = AppCatalogThriftConversion.getGridFTPDataMovementDescription(gridFTPDataMovement);
movment.setDataMovementInterfaceId(jobSubmissionInterfaceId);
movment.save();
return true;
} catch (AppCatalogException e) {
logger.errorId(jobSubmissionInterfaceId, "Error while adding job submission interface to resource compute resource...", e);
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while adding job submission interface to resource compute resource. More info : " + e.getMessage());
throw exception;
}
}
/**
* Change the priority of a given job submisison interface
*
* @param jobSubmissionInterfaceId The identifier of the JobSubmission Interface to be changed
* @param newPriorityOrder
* @return status
* Returns a success/failure of the change.
*/
@Override
public boolean changeJobSubmissionPriority(String jobSubmissionInterfaceId, int newPriorityOrder) throws InvalidRequestException, AiravataClientException, AiravataSystemException, TException {
return false;
}
/**
* Change the priority of a given data movement interface
*
* @param dataMovementInterfaceId The identifier of the DataMovement Interface to be changed
* @param newPriorityOrder
* @return status
* Returns a success/failure of the change.
*/
@Override
public boolean changeDataMovementPriority(String dataMovementInterfaceId, int newPriorityOrder) throws InvalidRequestException, AiravataClientException, AiravataSystemException, TException {
return false;
}
/**
* Change the priorities of a given set of job submission interfaces
*
* @param jobSubmissionPriorityMap A Map of identifiers of the JobSubmission Interfaces and thier associated priorities to be set.
* @return status
* Returns a success/failure of the changes.
*/
@Override
public boolean changeJobSubmissionPriorities(Map<String, Integer> jobSubmissionPriorityMap) throws InvalidRequestException, AiravataClientException, AiravataSystemException, TException {
return false;
}
/**
* Change the priorities of a given set of data movement interfaces
*
* @param dataMovementPriorityMap A Map of identifiers of the DataMovement Interfaces and thier associated priorities to be set.
* @return status
* Returns a success/failure of the changes.
*/
@Override
public boolean changeDataMovementPriorities(Map<String, Integer> dataMovementPriorityMap) throws InvalidRequestException, AiravataClientException, AiravataSystemException, TException {
return false;
}
/**
* Delete a given job submisison interface
*
* @param jobSubmissionInterfaceId The identifier of the JobSubmission Interface to be changed
* @return status
* Returns a success/failure of the deletion.
*/
@Override
public boolean deleteJobSubmissionInterface(String jobSubmissionInterfaceId) throws InvalidRequestException, AiravataClientException, AiravataSystemException, TException {
try {
appCatalog = AppCatalogFactory.getAppCatalog();
appCatalog.getComputeResource().removeJobSubmissionInterface(jobSubmissionInterfaceId);
return true;
} catch (AppCatalogException e) {
logger.errorId(jobSubmissionInterfaceId, "Error while deleting job submission interface...", e);
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while deleting job submission interface. More info : " + e.getMessage());
throw exception;
}
}
/**
* Delete a given data movement interface
*
* @param dataMovementInterfaceId The identifier of the DataMovement Interface to be changed
* @return status
* Returns a success/failure of the deletion.
*/
@Override
public boolean deleteDataMovementInterface(String dataMovementInterfaceId) throws InvalidRequestException, AiravataClientException, AiravataSystemException, TException {
try {
appCatalog = AppCatalogFactory.getAppCatalog();
appCatalog.getComputeResource().removeDataMovementInterface(dataMovementInterfaceId);
return true;
} catch (AppCatalogException e) {
logger.errorId(dataMovementInterfaceId, "Error while deleting data movement interface...", e);
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while deleting data movement interface. More info : " + e.getMessage());
throw exception;
}
}
/**
* Register a Gateway Resource Profile.
*
* @param gatewayResourceProfile Gateway Resource Profile Object.
* The GatewayID should be obtained from Airavata gateway registration and passed to register a corresponding
* resource profile.
* @return status.
* Returns a success/failure of the registration.
*/
@Override
public String registerGatewayResourceProfile(GatewayResourceProfile gatewayResourceProfile) throws InvalidRequestException, AiravataClientException, AiravataSystemException, TException {
try {
appCatalog = AppCatalogFactory.getAppCatalog();
GwyResourceProfile gatewayProfile = appCatalog.getGatewayProfile();
return gatewayProfile.addGatewayResourceProfile(gatewayResourceProfile);
} catch (AppCatalogException e) {
logger.error("Error while registering gateway resource profile...", e);
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while registering gateway resource profile. More info : " + e.getMessage());
throw exception;
}
}
/**
* Fetch the given Gateway Resource Profile.
*
* @param gatewayID The identifier for the requested gateway resource
* @return gatewayResourceProfile
* Gateway Resource Profile Object.
*/
@Override
public GatewayResourceProfile getGatewayResourceProfile(String gatewayID) throws InvalidRequestException, AiravataClientException, AiravataSystemException, TException {
try {
appCatalog = AppCatalogFactory.getAppCatalog();
GwyResourceProfile gatewayProfile = appCatalog.getGatewayProfile();
return gatewayProfile.getGatewayProfile(gatewayID);
} catch (AppCatalogException e) {
logger.errorId(gatewayID, "Error while retrieving gateway resource profile...", e);
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while retrieving gateway resource profile. More info : " + e.getMessage());
throw exception;
}
}
/**
* Update a Gateway Resource Profile.
*
* @param gatewayID The identifier for the requested gateway resource to be updated.
* @param gatewayResourceProfile Gateway Resource Profile Object.
* @return status
* Returns a success/failure of the update.
*/
@Override
public boolean updateGatewayResourceProfile(String gatewayID, GatewayResourceProfile gatewayResourceProfile) throws InvalidRequestException, AiravataClientException, AiravataSystemException, TException {
try {
appCatalog = AppCatalogFactory.getAppCatalog();
GwyResourceProfile gatewayProfile = appCatalog.getGatewayProfile();
gatewayProfile.updateGatewayResourceProfile(gatewayID, gatewayResourceProfile);
return true;
} catch (AppCatalogException e) {
logger.errorId(gatewayID, "Error while updating gateway resource profile...", e);
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while updating gateway resource profile. More info : " + e.getMessage());
throw exception;
}
}
/**
* Delete the given Gateway Resource Profile.
*
* @param gatewayID The identifier for the requested gateway resource to be deleted.
* @return status
* Returns a success/failure of the deletion.
*/
@Override
public boolean deleteGatewayResourceProfile(String gatewayID) throws InvalidRequestException, AiravataClientException, AiravataSystemException, TException {
try {
appCatalog = AppCatalogFactory.getAppCatalog();
GwyResourceProfile gatewayProfile = appCatalog.getGatewayProfile();
gatewayProfile.removeGatewayResourceProfile(gatewayID);
return true;
} catch (AppCatalogException e) {
logger.errorId(gatewayID, "Error while removing gateway resource profile...", e);
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while removing gateway resource profile. More info : " + e.getMessage());
throw exception;
}
}
/**
* Add a Compute Resource Preference to a registered gateway profile.
*
* @param gatewayID The identifier for the gateway profile to be added.
* @param computeResourceId Preferences related to a particular compute resource
* @param computeResourcePreference The ComputeResourcePreference object to be added to the resource profile.
* @return status
* Returns a success/failure of the addition. If a profile already exists, this operation will fail.
* Instead an update should be used.
*/
@Override
public boolean addGatewayComputeResourcePreference(String gatewayID, String computeResourceId, ComputeResourcePreference computeResourcePreference) throws InvalidRequestException, AiravataClientException, AiravataSystemException, TException {
try {
appCatalog = AppCatalogFactory.getAppCatalog();
GwyResourceProfile gatewayProfile = appCatalog.getGatewayProfile();
if (!gatewayProfile.isGatewayResourceProfileExists(gatewayID)){
throw new AppCatalogException("Gateway resource profile '"+gatewayID+"' does not exist!!!");
}
GatewayResourceProfile profile = gatewayProfile.getGatewayProfile(gatewayID);
// gatewayProfile.removeGatewayResourceProfile(gatewayID);
profile.addToComputeResourcePreferences(computeResourcePreference);
gatewayProfile.updateGatewayResourceProfile(gatewayID, profile);
return true;
} catch (AppCatalogException e) {
logger.errorId(gatewayID, "Error while registering gateway resource profile preference...", e);
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while registering gateway resource profile preference. More info : " + e.getMessage());
throw exception;
}
}
/**
* Fetch a Compute Resource Preference of a registered gateway profile.
*
* @param gatewayID The identifier for the gateway profile to be requested
* @param computeResourceId Preferences related to a particular compute resource
* @return computeResourcePreference
* Returns the ComputeResourcePreference object.
*/
@Override
public ComputeResourcePreference getGatewayComputeResourcePreference(String gatewayID, String computeResourceId) throws InvalidRequestException, AiravataClientException, AiravataSystemException, TException {
try {
appCatalog = AppCatalogFactory.getAppCatalog();
GwyResourceProfile gatewayProfile = appCatalog.getGatewayProfile();
ComputeResource computeResource = appCatalog.getComputeResource();
if (!gatewayProfile.isGatewayResourceProfileExists(gatewayID)){
logger.errorId(gatewayID, "Given gateway profile does not exist in the system. Please provide a valid gateway id...");
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Given gateway profile does not exist in the system. Please provide a valid gateway id...");
throw exception;
}
if (!computeResource.isComputeResourceExists(computeResourceId)){
logger.errorId(computeResourceId, "Given compute resource does not exist in the system. Please provide a valid compute resource id...");
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Given compute resource does not exist in the system. Please provide a valid compute resource id...");
throw exception;
}
return gatewayProfile.getComputeResourcePreference(gatewayID, computeResourceId);
} catch (AppCatalogException e) {
logger.errorId(gatewayID, "Error while reading gateway compute resource preference...", e);
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while reading gateway compute resource preference. More info : " + e.getMessage());
throw exception;
}
}
/**
* Fetch all Compute Resource Preferences of a registered gateway profile.
*
* @param gatewayID The identifier for the gateway profile to be requested
* @return computeResourcePreference
* Returns the ComputeResourcePreference object.
*/
@Override
public List<ComputeResourcePreference> getAllGatewayComputeResourcePreferences(String gatewayID) throws InvalidRequestException, AiravataClientException, AiravataSystemException, TException {
try {
appCatalog = AppCatalogFactory.getAppCatalog();
GwyResourceProfile gatewayProfile = appCatalog.getGatewayProfile();
return gatewayProfile.getGatewayProfile(gatewayID).getComputeResourcePreferences();
} catch (AppCatalogException e) {
logger.errorId(gatewayID, "Error while reading gateway compute resource preferences...", e);
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while reading gateway compute resource preferences. More info : " + e.getMessage());
throw exception;
}
}
/**
* Update a Compute Resource Preference to a registered gateway profile.
*
* @param gatewayID The identifier for the gateway profile to be updated.
* @param computeResourceId Preferences related to a particular compute resource
* @param computeResourcePreference The ComputeResourcePreference object to be updated to the resource profile.
* @return status
* Returns a success/failure of the updation.
*/
@Override
public boolean updateGatewayComputeResourcePreference(String gatewayID, String computeResourceId, ComputeResourcePreference computeResourcePreference) throws InvalidRequestException, AiravataClientException, AiravataSystemException, TException {
try {
appCatalog = AppCatalogFactory.getAppCatalog();
GwyResourceProfile gatewayProfile = appCatalog.getGatewayProfile();
GatewayResourceProfile profile = gatewayProfile.getGatewayProfile(gatewayID);
List<ComputeResourcePreference> computeResourcePreferences = profile.getComputeResourcePreferences();
ComputeResourcePreference preferenceToRemove = null;
for (ComputeResourcePreference preference : computeResourcePreferences) {
if (preference.getComputeResourceId().equals(computeResourceId)){
preferenceToRemove=preference;
break;
}
}
if (preferenceToRemove!=null) {
profile.getComputeResourcePreferences().remove(
preferenceToRemove);
}
profile.getComputeResourcePreferences().add(computeResourcePreference);
gatewayProfile.updateGatewayResourceProfile(gatewayID, profile);
return true;
} catch (AppCatalogException e) {
logger.errorId(gatewayID, "Error while reading gateway compute resource preference...", e);
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while updating gateway compute resource preference. More info : " + e.getMessage());
throw exception;
}
}
/**
* Delete the Compute Resource Preference of a registered gateway profile.
*
* @param gatewayID The identifier for the gateway profile to be deleted.
* @param computeResourceId Preferences related to a particular compute resource
* @return status
* Returns a success/failure of the deletion.
*/
@Override
public boolean deleteGatewayComputeResourcePreference(String gatewayID, String computeResourceId) throws InvalidRequestException, AiravataClientException, AiravataSystemException, TException {
try {
appCatalog = AppCatalogFactory.getAppCatalog();
GwyResourceProfile gatewayProfile = appCatalog.getGatewayProfile();
GatewayResourceProfile profile = gatewayProfile.getGatewayProfile(gatewayID);
List<ComputeResourcePreference> computeResourcePreferences = profile.getComputeResourcePreferences();
ComputeResourcePreference preferenceToRemove = null;
for (ComputeResourcePreference preference : computeResourcePreferences) {
if (preference.getComputeResourceId().equals(computeResourceId)){
preferenceToRemove=preference;
break;
}
}
if (preferenceToRemove!=null) {
profile.getComputeResourcePreferences().remove(
preferenceToRemove);
}
gatewayProfile.updateGatewayResourceProfile(gatewayID, profile);
return true;
} catch (AppCatalogException e) {
logger.errorId(gatewayID, "Error while reading gateway compute resource preference...", e);
AiravataSystemException exception = new AiravataSystemException();
exception.setAiravataErrorType(AiravataErrorType.INTERNAL_ERROR);
exception.setMessage("Error while updating gateway compute resource preference. More info : " + e.getMessage());
throw exception;
}
}
}
| closing the thrift transport connection during gfac invocation
| airavata-api/airavata-api-server/src/main/java/org/apache/airavata/api/server/handler/AiravataServerHandler.java | closing the thrift transport connection during gfac invocation | <ide><path>iravata-api/airavata-api-server/src/main/java/org/apache/airavata/api/server/handler/AiravataServerHandler.java
<ide> registry.update(RegistryModelType.EXPERIMENT_STATUS, status, experimentId);
<ide> registry.update(RegistryModelType.TASK_DETAIL, taskData, taskData.getTaskID());
<ide> //launching the experiment
<del> orchestratorClient.launchTask(taskData.getTaskID(),airavataCredStoreToken);
<add> orchestratorClient.launchTask(taskData.getTaskID(), airavataCredStoreToken);
<ide> }
<ide> }
<ide>
<ide> }
<ide> logger.errorId(experimentId, "Error while updating task status, hence updated experiment status to " + status.toString(), e);
<ide> throw new TException(e);
<add> }finally {
<add> orchestratorClient.getInputProtocol().getTransport().close();
<ide> }
<ide> return true;
<ide> } |
|
Java | apache-2.0 | error: pathspec 'projects/OG-Financial/src/main/java/com/opengamma/financial/convention/InterestRateFutureConvention.java' did not match any file(s) known to git
| d788e3e5fab075f0e080b6e21c18966107cac952 | 1 | DevStreet/FinanceAnalytics,nssales/OG-Platform,McLeodMoores/starling,jerome79/OG-Platform,jeorme/OG-Platform,nssales/OG-Platform,ChinaQuants/OG-Platform,ChinaQuants/OG-Platform,jeorme/OG-Platform,DevStreet/FinanceAnalytics,nssales/OG-Platform,nssales/OG-Platform,McLeodMoores/starling,codeaudit/OG-Platform,DevStreet/FinanceAnalytics,jeorme/OG-Platform,McLeodMoores/starling,codeaudit/OG-Platform,jerome79/OG-Platform,codeaudit/OG-Platform,DevStreet/FinanceAnalytics,ChinaQuants/OG-Platform,jerome79/OG-Platform,codeaudit/OG-Platform,jeorme/OG-Platform,jerome79/OG-Platform,McLeodMoores/starling,ChinaQuants/OG-Platform | /**
* Copyright (C) 2013 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.financial.convention;
import org.joda.beans.BeanDefinition;
import org.joda.beans.PropertyDefinition;
import com.opengamma.id.ExternalIdBundle;
import java.util.Map;
import org.joda.beans.BeanBuilder;
import org.joda.beans.JodaBeanUtils;
import org.joda.beans.MetaProperty;
import org.joda.beans.Property;
import org.joda.beans.impl.direct.DirectBeanBuilder;
import org.joda.beans.impl.direct.DirectMetaProperty;
import org.joda.beans.impl.direct.DirectMetaPropertyMap;
/**
*
*/
@BeanDefinition
public class InterestRateFutureConvention extends Convention {
/** Serialization version. */
private static final long serialVersionUID = 1L;
/**
* The index convention.
*/
@PropertyDefinition(validate = "notNull")
private String _indexConvention;
/**
* The expiry convention.
*/
@PropertyDefinition(validate = "notNull")
private String _expiryConvention;
/**
* For the builder.
*/
public InterestRateFutureConvention() {
}
public InterestRateFutureConvention(final String name, final ExternalIdBundle externalIdBundle, final String indexConvention,
final String expiryConvention) {
super(name, externalIdBundle);
setIndexConvention(indexConvention);
setExpiryConvention(expiryConvention);
}
//------------------------- AUTOGENERATED START -------------------------
///CLOVER:OFF
/**
* The meta-bean for {@code InterestRateFutureConvention}.
* @return the meta-bean, not null
*/
public static InterestRateFutureConvention.Meta meta() {
return InterestRateFutureConvention.Meta.INSTANCE;
}
static {
JodaBeanUtils.registerMetaBean(InterestRateFutureConvention.Meta.INSTANCE);
}
@Override
public InterestRateFutureConvention.Meta metaBean() {
return InterestRateFutureConvention.Meta.INSTANCE;
}
@Override
protected Object propertyGet(String propertyName, boolean quiet) {
switch (propertyName.hashCode()) {
case -668532253: // indexConvention
return getIndexConvention();
case 2143523076: // expiryConvention
return getExpiryConvention();
}
return super.propertyGet(propertyName, quiet);
}
@Override
protected void propertySet(String propertyName, Object newValue, boolean quiet) {
switch (propertyName.hashCode()) {
case -668532253: // indexConvention
setIndexConvention((String) newValue);
return;
case 2143523076: // expiryConvention
setExpiryConvention((String) newValue);
return;
}
super.propertySet(propertyName, newValue, quiet);
}
@Override
protected void validate() {
JodaBeanUtils.notNull(_indexConvention, "indexConvention");
JodaBeanUtils.notNull(_expiryConvention, "expiryConvention");
super.validate();
}
@Override
public boolean equals(Object obj) {
if (obj == this) {
return true;
}
if (obj != null && obj.getClass() == this.getClass()) {
InterestRateFutureConvention other = (InterestRateFutureConvention) obj;
return JodaBeanUtils.equal(getIndexConvention(), other.getIndexConvention()) &&
JodaBeanUtils.equal(getExpiryConvention(), other.getExpiryConvention()) &&
super.equals(obj);
}
return false;
}
@Override
public int hashCode() {
int hash = 7;
hash += hash * 31 + JodaBeanUtils.hashCode(getIndexConvention());
hash += hash * 31 + JodaBeanUtils.hashCode(getExpiryConvention());
return hash ^ super.hashCode();
}
//-----------------------------------------------------------------------
/**
* Gets the index convention.
* @return the value of the property, not null
*/
public String getIndexConvention() {
return _indexConvention;
}
/**
* Sets the index convention.
* @param indexConvention the new value of the property, not null
*/
public void setIndexConvention(String indexConvention) {
JodaBeanUtils.notNull(indexConvention, "indexConvention");
this._indexConvention = indexConvention;
}
/**
* Gets the the {@code indexConvention} property.
* @return the property, not null
*/
public final Property<String> indexConvention() {
return metaBean().indexConvention().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the expiry convention.
* @return the value of the property, not null
*/
public String getExpiryConvention() {
return _expiryConvention;
}
/**
* Sets the expiry convention.
* @param expiryConvention the new value of the property, not null
*/
public void setExpiryConvention(String expiryConvention) {
JodaBeanUtils.notNull(expiryConvention, "expiryConvention");
this._expiryConvention = expiryConvention;
}
/**
* Gets the the {@code expiryConvention} property.
* @return the property, not null
*/
public final Property<String> expiryConvention() {
return metaBean().expiryConvention().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* The meta-bean for {@code InterestRateFutureConvention}.
*/
public static class Meta extends Convention.Meta {
/**
* The singleton instance of the meta-bean.
*/
static final Meta INSTANCE = new Meta();
/**
* The meta-property for the {@code indexConvention} property.
*/
private final MetaProperty<String> _indexConvention = DirectMetaProperty.ofReadWrite(
this, "indexConvention", InterestRateFutureConvention.class, String.class);
/**
* The meta-property for the {@code expiryConvention} property.
*/
private final MetaProperty<String> _expiryConvention = DirectMetaProperty.ofReadWrite(
this, "expiryConvention", InterestRateFutureConvention.class, String.class);
/**
* The meta-properties.
*/
private final Map<String, MetaProperty<?>> _metaPropertyMap$ = new DirectMetaPropertyMap(
this, (DirectMetaPropertyMap) super.metaPropertyMap(),
"indexConvention",
"expiryConvention");
/**
* Restricted constructor.
*/
protected Meta() {
}
@Override
protected MetaProperty<?> metaPropertyGet(String propertyName) {
switch (propertyName.hashCode()) {
case -668532253: // indexConvention
return _indexConvention;
case 2143523076: // expiryConvention
return _expiryConvention;
}
return super.metaPropertyGet(propertyName);
}
@Override
public BeanBuilder<? extends InterestRateFutureConvention> builder() {
return new DirectBeanBuilder<InterestRateFutureConvention>(new InterestRateFutureConvention());
}
@Override
public Class<? extends InterestRateFutureConvention> beanType() {
return InterestRateFutureConvention.class;
}
@Override
public Map<String, MetaProperty<?>> metaPropertyMap() {
return _metaPropertyMap$;
}
//-----------------------------------------------------------------------
/**
* The meta-property for the {@code indexConvention} property.
* @return the meta-property, not null
*/
public final MetaProperty<String> indexConvention() {
return _indexConvention;
}
/**
* The meta-property for the {@code expiryConvention} property.
* @return the meta-property, not null
*/
public final MetaProperty<String> expiryConvention() {
return _expiryConvention;
}
}
///CLOVER:ON
//-------------------------- AUTOGENERATED END --------------------------
}
| projects/OG-Financial/src/main/java/com/opengamma/financial/convention/InterestRateFutureConvention.java | [PLAT-3091] Conventions for interest rate futures
| projects/OG-Financial/src/main/java/com/opengamma/financial/convention/InterestRateFutureConvention.java | [PLAT-3091] Conventions for interest rate futures | <ide><path>rojects/OG-Financial/src/main/java/com/opengamma/financial/convention/InterestRateFutureConvention.java
<add>/**
<add> * Copyright (C) 2013 - present by OpenGamma Inc. and the OpenGamma group of companies
<add> *
<add> * Please see distribution for license.
<add> */
<add>package com.opengamma.financial.convention;
<add>
<add>import org.joda.beans.BeanDefinition;
<add>import org.joda.beans.PropertyDefinition;
<add>
<add>import com.opengamma.id.ExternalIdBundle;
<add>import java.util.Map;
<add>import org.joda.beans.BeanBuilder;
<add>import org.joda.beans.JodaBeanUtils;
<add>import org.joda.beans.MetaProperty;
<add>import org.joda.beans.Property;
<add>import org.joda.beans.impl.direct.DirectBeanBuilder;
<add>import org.joda.beans.impl.direct.DirectMetaProperty;
<add>import org.joda.beans.impl.direct.DirectMetaPropertyMap;
<add>
<add>/**
<add> *
<add> */
<add>@BeanDefinition
<add>public class InterestRateFutureConvention extends Convention {
<add>
<add> /** Serialization version. */
<add> private static final long serialVersionUID = 1L;
<add>
<add> /**
<add> * The index convention.
<add> */
<add> @PropertyDefinition(validate = "notNull")
<add> private String _indexConvention;
<add>
<add> /**
<add> * The expiry convention.
<add> */
<add> @PropertyDefinition(validate = "notNull")
<add> private String _expiryConvention;
<add>
<add> /**
<add> * For the builder.
<add> */
<add> public InterestRateFutureConvention() {
<add> }
<add>
<add> public InterestRateFutureConvention(final String name, final ExternalIdBundle externalIdBundle, final String indexConvention,
<add> final String expiryConvention) {
<add> super(name, externalIdBundle);
<add> setIndexConvention(indexConvention);
<add> setExpiryConvention(expiryConvention);
<add> }
<add>
<add> //------------------------- AUTOGENERATED START -------------------------
<add> ///CLOVER:OFF
<add> /**
<add> * The meta-bean for {@code InterestRateFutureConvention}.
<add> * @return the meta-bean, not null
<add> */
<add> public static InterestRateFutureConvention.Meta meta() {
<add> return InterestRateFutureConvention.Meta.INSTANCE;
<add> }
<add> static {
<add> JodaBeanUtils.registerMetaBean(InterestRateFutureConvention.Meta.INSTANCE);
<add> }
<add>
<add> @Override
<add> public InterestRateFutureConvention.Meta metaBean() {
<add> return InterestRateFutureConvention.Meta.INSTANCE;
<add> }
<add>
<add> @Override
<add> protected Object propertyGet(String propertyName, boolean quiet) {
<add> switch (propertyName.hashCode()) {
<add> case -668532253: // indexConvention
<add> return getIndexConvention();
<add> case 2143523076: // expiryConvention
<add> return getExpiryConvention();
<add> }
<add> return super.propertyGet(propertyName, quiet);
<add> }
<add>
<add> @Override
<add> protected void propertySet(String propertyName, Object newValue, boolean quiet) {
<add> switch (propertyName.hashCode()) {
<add> case -668532253: // indexConvention
<add> setIndexConvention((String) newValue);
<add> return;
<add> case 2143523076: // expiryConvention
<add> setExpiryConvention((String) newValue);
<add> return;
<add> }
<add> super.propertySet(propertyName, newValue, quiet);
<add> }
<add>
<add> @Override
<add> protected void validate() {
<add> JodaBeanUtils.notNull(_indexConvention, "indexConvention");
<add> JodaBeanUtils.notNull(_expiryConvention, "expiryConvention");
<add> super.validate();
<add> }
<add>
<add> @Override
<add> public boolean equals(Object obj) {
<add> if (obj == this) {
<add> return true;
<add> }
<add> if (obj != null && obj.getClass() == this.getClass()) {
<add> InterestRateFutureConvention other = (InterestRateFutureConvention) obj;
<add> return JodaBeanUtils.equal(getIndexConvention(), other.getIndexConvention()) &&
<add> JodaBeanUtils.equal(getExpiryConvention(), other.getExpiryConvention()) &&
<add> super.equals(obj);
<add> }
<add> return false;
<add> }
<add>
<add> @Override
<add> public int hashCode() {
<add> int hash = 7;
<add> hash += hash * 31 + JodaBeanUtils.hashCode(getIndexConvention());
<add> hash += hash * 31 + JodaBeanUtils.hashCode(getExpiryConvention());
<add> return hash ^ super.hashCode();
<add> }
<add>
<add> //-----------------------------------------------------------------------
<add> /**
<add> * Gets the index convention.
<add> * @return the value of the property, not null
<add> */
<add> public String getIndexConvention() {
<add> return _indexConvention;
<add> }
<add>
<add> /**
<add> * Sets the index convention.
<add> * @param indexConvention the new value of the property, not null
<add> */
<add> public void setIndexConvention(String indexConvention) {
<add> JodaBeanUtils.notNull(indexConvention, "indexConvention");
<add> this._indexConvention = indexConvention;
<add> }
<add>
<add> /**
<add> * Gets the the {@code indexConvention} property.
<add> * @return the property, not null
<add> */
<add> public final Property<String> indexConvention() {
<add> return metaBean().indexConvention().createProperty(this);
<add> }
<add>
<add> //-----------------------------------------------------------------------
<add> /**
<add> * Gets the expiry convention.
<add> * @return the value of the property, not null
<add> */
<add> public String getExpiryConvention() {
<add> return _expiryConvention;
<add> }
<add>
<add> /**
<add> * Sets the expiry convention.
<add> * @param expiryConvention the new value of the property, not null
<add> */
<add> public void setExpiryConvention(String expiryConvention) {
<add> JodaBeanUtils.notNull(expiryConvention, "expiryConvention");
<add> this._expiryConvention = expiryConvention;
<add> }
<add>
<add> /**
<add> * Gets the the {@code expiryConvention} property.
<add> * @return the property, not null
<add> */
<add> public final Property<String> expiryConvention() {
<add> return metaBean().expiryConvention().createProperty(this);
<add> }
<add>
<add> //-----------------------------------------------------------------------
<add> /**
<add> * The meta-bean for {@code InterestRateFutureConvention}.
<add> */
<add> public static class Meta extends Convention.Meta {
<add> /**
<add> * The singleton instance of the meta-bean.
<add> */
<add> static final Meta INSTANCE = new Meta();
<add>
<add> /**
<add> * The meta-property for the {@code indexConvention} property.
<add> */
<add> private final MetaProperty<String> _indexConvention = DirectMetaProperty.ofReadWrite(
<add> this, "indexConvention", InterestRateFutureConvention.class, String.class);
<add> /**
<add> * The meta-property for the {@code expiryConvention} property.
<add> */
<add> private final MetaProperty<String> _expiryConvention = DirectMetaProperty.ofReadWrite(
<add> this, "expiryConvention", InterestRateFutureConvention.class, String.class);
<add> /**
<add> * The meta-properties.
<add> */
<add> private final Map<String, MetaProperty<?>> _metaPropertyMap$ = new DirectMetaPropertyMap(
<add> this, (DirectMetaPropertyMap) super.metaPropertyMap(),
<add> "indexConvention",
<add> "expiryConvention");
<add>
<add> /**
<add> * Restricted constructor.
<add> */
<add> protected Meta() {
<add> }
<add>
<add> @Override
<add> protected MetaProperty<?> metaPropertyGet(String propertyName) {
<add> switch (propertyName.hashCode()) {
<add> case -668532253: // indexConvention
<add> return _indexConvention;
<add> case 2143523076: // expiryConvention
<add> return _expiryConvention;
<add> }
<add> return super.metaPropertyGet(propertyName);
<add> }
<add>
<add> @Override
<add> public BeanBuilder<? extends InterestRateFutureConvention> builder() {
<add> return new DirectBeanBuilder<InterestRateFutureConvention>(new InterestRateFutureConvention());
<add> }
<add>
<add> @Override
<add> public Class<? extends InterestRateFutureConvention> beanType() {
<add> return InterestRateFutureConvention.class;
<add> }
<add>
<add> @Override
<add> public Map<String, MetaProperty<?>> metaPropertyMap() {
<add> return _metaPropertyMap$;
<add> }
<add>
<add> //-----------------------------------------------------------------------
<add> /**
<add> * The meta-property for the {@code indexConvention} property.
<add> * @return the meta-property, not null
<add> */
<add> public final MetaProperty<String> indexConvention() {
<add> return _indexConvention;
<add> }
<add>
<add> /**
<add> * The meta-property for the {@code expiryConvention} property.
<add> * @return the meta-property, not null
<add> */
<add> public final MetaProperty<String> expiryConvention() {
<add> return _expiryConvention;
<add> }
<add>
<add> }
<add>
<add> ///CLOVER:ON
<add> //-------------------------- AUTOGENERATED END --------------------------
<add>} |
|
Java | apache-2.0 | 0e829ff5b87f159d8a8025934eecee924182a67c | 0 | xzel23/meja,xzel23/meja | /*
* Copyright 2015 Axel Howind <[email protected]>.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.dua3.meja.ui.swing;
import com.dua3.meja.model.BorderStyle;
import com.dua3.meja.model.Cell;
import com.dua3.meja.model.CellStyle;
import com.dua3.meja.model.CellType;
import com.dua3.meja.model.Direction;
import com.dua3.meja.model.FillPattern;
import com.dua3.meja.model.Font;
import com.dua3.meja.model.Row;
import com.dua3.meja.model.Sheet;
import com.dua3.meja.util.Cache;
import java.awt.BasicStroke;
import java.awt.Color;
import java.awt.Dimension;
import java.awt.Graphics;
import java.awt.Graphics2D;
import java.awt.Rectangle;
import java.awt.RenderingHints;
import java.awt.Stroke;
import java.awt.Toolkit;
import java.awt.event.ActionEvent;
import java.awt.event.MouseAdapter;
import java.awt.event.MouseEvent;
import java.awt.font.FontRenderContext;
import java.awt.font.LineBreakMeasurer;
import java.awt.font.TextLayout;
import java.awt.geom.AffineTransform;
import java.text.AttributedCharacterIterator;
import java.text.AttributedString;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import javax.swing.AbstractAction;
import javax.swing.Action;
import javax.swing.ActionMap;
import javax.swing.InputMap;
import javax.swing.JComponent;
import javax.swing.JPanel;
import javax.swing.KeyStroke;
import javax.swing.Scrollable;
import javax.swing.SwingConstants;
/**
*
* @author axel
*/
public class SheetView extends JPanel implements Scrollable {
Cache<Float, java.awt.Stroke> strokeCache = new Cache<Float, java.awt.Stroke>() {
@Override
protected java.awt.Stroke create(Float width) {
return new BasicStroke(width);
}
};
Cache<Font, java.awt.Font> fontCache = new Cache<Font, java.awt.Font>() {
@Override
protected java.awt.Font create(Font font) {
int style = (font.isBold() ? java.awt.Font.BOLD : 0) | (font.isItalic() ? java.awt.Font.ITALIC : 0);
return new java.awt.Font(font.getFamily(), style, (int) Math.round(font.getSizeInPoints()));
}
};
float scale = 1;
int columnPos[];
int rowPos[];
int sheetWidth;
int sheetHeight;
int currentColNum;
int currentRowNum;
private Sheet sheet;
private Color gridColor = Color.LIGHT_GRAY;
/**
* Horizontal padding.
*/
private final int paddingX = 2;
/**
* Vertical padding.
*/
private final int paddingY = 1;
/**
* Color used to draw the selection rectangle.
*/
private Color selectionColor = Color.BLACK;
/**
* Width of the selection rectangle borders.
*/
private final int selectionStrokeWidth = 4;
/**
* Stroke used to draw the selection rectangle.
*/
private Stroke selectionStroke = getStroke((float) selectionStrokeWidth);
/**
* Active clip bounds when drawing.
*/
private final Rectangle clipBounds = new Rectangle();
/**
* Move the selection rectangle to an adjacent cell.
* @param d direction
*/
private void move(Direction d) {
Cell cell = getCurrentCell().getLogicalCell();
switch (d) {
case NORTH:
setCurrentRowNum(cell.getRowNumber() - 1);
break;
case SOUTH:
setCurrentRowNum(cell.getRowNumber() + cell.getVerticalSpan());
break;
case WEST:
setCurrentColNum(cell.getColumnNumber() - 1);
break;
case EAST:
setCurrentColNum(cell.getColumnNumber() + cell.getHorizontalSpan());
break;
}
scrollToCurrentCell();
}
/**
* Get display coordinates of selection rectangle.
* @return selection rectangle in display coordinates
*/
private Rectangle getSelectionRect() {
Rectangle cellRect = getCellRect(getCurrentCell().getLogicalCell());
int extra = (selectionStrokeWidth + 1) / 2;
cellRect.x -= extra;
cellRect.y -= extra;
cellRect.width += 2 * extra;
cellRect.height += 2 * extra;
return cellRect;
}
/**
* Scroll the currently selected cell into view.
*/
public void scrollToCurrentCell() {
ensureCellIsVisibile(getCurrentCell().getLogicalCell());
}
/**
* Scroll cell into view.
* @param cell the cell to scroll to
*/
public void ensureCellIsVisibile(Cell cell) {
scrollRectToVisible(getCellRect(cell));
}
public Rectangle getCellRect(Cell cell) {
final int i = cell.getRowNumber();
final int j = cell.getColumnNumber();
final int y = rowPos[i];
final int h = rowPos[i + cell.getVerticalSpan()] - y;
final int x = columnPos[j];
final int w = columnPos[cell.getColumnNumber() + cell.getHorizontalSpan()] - x;
return new Rectangle(x, y, w, h);
}
public int getCurrentRowNum() {
return currentRowNum;
}
public void setCurrentRowNum(int rowNum) {
setCurrent(rowNum, currentColNum);
}
public int getCurrentColNum() {
return currentColNum;
}
public void setCurrentColNum(int colNum) {
setCurrent(currentRowNum, colNum);
}
public void setCurrent(int rowNum, int colNum) {
int oldRowNum = currentRowNum;
int newRowNum = Math.max(sheet.getFirstRowNum(), Math.min(sheet.getLastRowNum(), rowNum));
int oldColNum = currentColNum;
int newColNum = Math.max(sheet.getFirstColNum(), Math.min(sheet.getLastColNum(), colNum));
if (newRowNum != oldRowNum || newColNum != oldColNum) {
// get old selection for repainting
Rectangle oldRect = getSelectionRect();
// update current position
currentRowNum = newRowNum;
currentColNum = newColNum;
// get new selection for repainting
Rectangle newRect = getSelectionRect();
repaint(oldRect);
repaint(newRect);
}
}
static enum Actions {
MOVE_UP {
@Override
public Action getAction(final SheetView view) {
return new AbstractAction("MOVE_UP") {
@Override
public void actionPerformed(ActionEvent e) {
view.move(Direction.NORTH);
}
};
}
},
MOVE_DOWN {
@Override
public Action getAction(final SheetView view) {
return new AbstractAction("MOVE_DOWN") {
@Override
public void actionPerformed(ActionEvent e) {
view.move(Direction.SOUTH);
}
};
}
},
MOVE_LEFT {
@Override
public Action getAction(final SheetView view) {
return new AbstractAction("MOVE_LEFT") {
@Override
public void actionPerformed(ActionEvent e) {
view.move(Direction.WEST);
}
};
}
},
MOVE_RIGHT {
@Override
public Action getAction(final SheetView view) {
return new AbstractAction("MOVE_RIGHT") {
@Override
public void actionPerformed(ActionEvent e) {
view.move(Direction.EAST);
}
};
}
};
abstract Action getAction(SheetView view);
}
public SheetView() {
this(null);
}
public SheetView(Sheet sheet) {
init();
setSheet(sheet);
}
public void setSheet(Sheet sheet1) {
this.sheet = sheet1;
this.currentRowNum = 0;
this.currentColNum = 0;
update();
}
private void init() {
// setup input map for keyboard navigation
final InputMap inputMap = getInputMap(JComponent.WHEN_ANCESTOR_OF_FOCUSED_COMPONENT);
inputMap.put(KeyStroke.getKeyStroke(java.awt.event.KeyEvent.VK_UP, 0), Actions.MOVE_UP);
getInputMap().put(KeyStroke.getKeyStroke(java.awt.event.KeyEvent.VK_KP_UP, 0), Actions.MOVE_UP);
getInputMap().put(KeyStroke.getKeyStroke(java.awt.event.KeyEvent.VK_DOWN, 0), Actions.MOVE_DOWN);
getInputMap().put(KeyStroke.getKeyStroke(java.awt.event.KeyEvent.VK_KP_DOWN, 0), Actions.MOVE_DOWN);
getInputMap().put(KeyStroke.getKeyStroke(java.awt.event.KeyEvent.VK_LEFT, 0), Actions.MOVE_LEFT);
getInputMap().put(KeyStroke.getKeyStroke(java.awt.event.KeyEvent.VK_KP_LEFT, 0), Actions.MOVE_LEFT);
getInputMap().put(KeyStroke.getKeyStroke(java.awt.event.KeyEvent.VK_RIGHT, 0), Actions.MOVE_RIGHT);
getInputMap().put(KeyStroke.getKeyStroke(java.awt.event.KeyEvent.VK_KP_RIGHT, 0), Actions.MOVE_RIGHT);
final ActionMap actionMap = getActionMap();
for (Actions action : Actions.values()) {
actionMap.put(action, action.getAction(this));
}
// listen to mouse events
addMouseListener(new MouseAdapter() {
@Override
public void mousePressed(MouseEvent e) {
int row = getRowNumberFromY(e.getY());
int col = getColumnNumberFromX(e.getX());
setCurrent(row, col);
requestFocusInWindow();
}
});
// make focusable
setFocusable(true);
requestFocusInWindow();
}
/**
* Set the grid color.
*
* @param gridColor
*/
public void setGridColor(Color gridColor) {
this.gridColor = gridColor;
}
/**
* Get the grid color.
*
* @return color of grid
*/
public Color getGridColor() {
return gridColor;
}
/**
* Update sheet layout data.
*/
private void update() {
int dpi = Toolkit.getDefaultToolkit().getScreenResolution();
scale = dpi / 72f;
if (sheet == null) {
sheetWidth = 0;
sheetHeight = 0;
rowPos = new int[]{0};
columnPos = new int[]{0};
return;
}
sheetHeight = 0;
rowPos = new int[2 + sheet.getLastRowNum()];
rowPos[0] = 0;
for (int i = 1; i < rowPos.length; i++) {
sheetHeight += Math.round(sheet.getRowHeight(i - 1) * scale);
rowPos[i] = sheetHeight;
}
sheetWidth = 0;
columnPos = new int[2 + sheet.getLastColNum()];
columnPos[0] = 0;
for (int j = 1; j < columnPos.length; j++) {
sheetWidth += Math.round(sheet.getColumnWidth(j - 1) * scale);
columnPos[j] = sheetWidth;
}
revalidate();
}
@Override
public Dimension getPreferredScrollableViewportSize() {
return getPreferredSize();
}
@Override
public int getScrollableUnitIncrement(Rectangle visibleRect, int orientation, int direction) {
if (orientation == SwingConstants.VERTICAL) {
// scroll vertical
if (direction < 0) {
//scroll up
final int y = visibleRect.y;
int yPrevious = 0;
for (int i = 0; i < rowPos.length; i++) {
if (rowPos[i] >= y) {
return y - yPrevious;
}
yPrevious = rowPos[i];
}
// should never be reached
return 0;
} else {
// scroll down
final int y = visibleRect.y + visibleRect.height;
for (int i = 0; i < rowPos.length; i++) {
if (rowPos[i] > y) {
return rowPos[i] - y;
}
}
// should never be reached
return 0;
}
} else {
// scroll horizontal
if (direction < 0) {
//scroll left
final int x = visibleRect.x;
int xPrevious = 0;
for (int j = 0; j < columnPos.length; j++) {
if (columnPos[j] >= x) {
return x - xPrevious;
}
xPrevious = columnPos[j];
}
// should never be reached
return 0;
} else {
// scroll down
final int x = visibleRect.x + visibleRect.width;
for (int j = 0; j < columnPos.length; j++) {
if (columnPos[j] > x) {
return columnPos[j] - x;
}
}
// should never be reached
return 0;
}
}
}
/**
* Get the row number that the given y-coordinate belongs to.
*
* @param y y-coordinate
*
* @return
* <ul>
* <li> -1, if the first row is displayed below the given coordinate
* <li> number of rows, if the lower edge of the last row is displayed above
* the given coordinate
* <li> the number of the row that belongs to the given coordinate
* </ul>
*/
public int getRowNumberFromY(int y) {
int i = 0;
while (i < rowPos.length && rowPos[i] <= y) {
i++;
}
return i - 1;
}
/**
* Get the column number that the given x-coordinate belongs to.
*
* @param x x-coordinate
*
* @return
* <ul>
* <li> -1, if the first column is displayed to the right of the given
* coordinate
* <li> number of columns, if the right edge of the last column is displayed
* to the left of the given coordinate
* <li> the number of the column that belongs to the given coordinate
* </ul>
*/
public int getColumnNumberFromX(int x) {
int j = 0;
while (j < columnPos.length && columnPos[j] <= x) {
j++;
}
return j - 1;
}
@Override
public int getScrollableBlockIncrement(Rectangle visibleRect, int orientation, int direction) {
return 3 * getScrollableUnitIncrement(visibleRect, orientation, direction);
}
@Override
public boolean getScrollableTracksViewportWidth() {
return false;
}
@Override
public boolean getScrollableTracksViewportHeight() {
return false;
}
@Override
public Dimension getPreferredSize() {
return new Dimension(sheetWidth + 1, sheetHeight + 1);
}
@Override
protected void paintComponent(Graphics g) {
Graphics2D g2d = (Graphics2D) g;
g2d.getClipBounds(clipBounds);
g2d.clearRect(clipBounds.x, clipBounds.y, clipBounds.width, clipBounds.height);
g2d.setRenderingHint(RenderingHints.KEY_RENDERING, RenderingHints.VALUE_RENDER_QUALITY);
g2d.setRenderingHint(RenderingHints.KEY_TEXT_ANTIALIASING, RenderingHints.VALUE_TEXT_ANTIALIAS_GASP);
g2d.setRenderingHint(RenderingHints.KEY_FRACTIONALMETRICS, RenderingHints.VALUE_FRACTIONALMETRICS_ON);
drawGrid(g2d);
drawCells(g2d, CellDrawMode.DRAW_CELL_BACKGROUND);
drawCells(g2d, CellDrawMode.DRAW_CELL_BORDER);
drawCells(g2d, CellDrawMode.DRAW_CELL_FOREGROUND);
drawSelection(g2d);
}
/**
* Draw the grid.
*
* @param g
*/
private void drawGrid(Graphics2D g) {
g.setColor(gridColor);
final int minY = clipBounds.y;
final int maxY = clipBounds.y + clipBounds.height;
final int minX = clipBounds.x;
final int maxX = clipBounds.x + clipBounds.width;
// draw horizontal grid lines
for (int gridY : rowPos) {
if (gridY < minY) {
// visible region not reached
continue;
}
if (gridY > maxY) {
// out of visible region
break;
}
g.drawLine(minX, gridY, maxX, gridY);
}
// draw vertical grid lines
for (int gridX : columnPos) {
if (gridX < minX) {
// visible region not reached
continue;
}
if (gridX > maxX) {
// out of visible region
break;
}
g.drawLine(gridX, minY, gridX, maxY);
}
}
void setCurrentCell(Cell cell) {
int rowNumber = cell.getRowNumber();
int colNumber = cell.getColumnNumber();
setCurrentColNum(colNumber);
}
void drawCells(Graphics2D g, CellDrawMode cellDrawMode) {
// no sheet, no drawing
if (sheet == null) {
return;
}
// since text can overflow into other cells, add a margin of cells to be
// drawn that normally aren't visible when drawing foreground
int extra = cellDrawMode == CellDrawMode.DRAW_CELL_FOREGROUND ? 10 : 0;
// determine visible rows and columns
int startRow = Math.max(0, getRowNumberFromY(clipBounds.y) - extra);
int endRow = Math.min(getNumberOfRows(), 1 + getRowNumberFromY(clipBounds.y + clipBounds.height) + extra);
int startColumn = Math.max(0, getColumnNumberFromX(clipBounds.x) - extra);
int endColumn = Math.min(getNumberOfColumns(), 1 + getColumnNumberFromX(clipBounds.x + clipBounds.width) + extra);
// Collect cells to be drawn
for (int i = startRow; i < endRow; i++) {
Row row = sheet.getRow(i);
if (row == null) {
continue;
}
for (int j = startColumn; j < endColumn; j++) {
Cell cell = row.getCell(j);
if (cell != null) {
Cell logicalCell = cell.getLogicalCell();
final boolean visible;
if (cell == logicalCell) {
// if cell is not merged or the topleft cell of the
// merged region, then it is visible
visible = true;
} else {
// otherwise calculate row and column numbers of the
// first visible cell of the merged region
int iCell = Math.max(i, logicalCell.getRowNumber());
int jCell = Math.max(j, logicalCell.getColumnNumber());
visible = i == iCell && j == jCell;
// skip the other cells of this row that belong to the same merged region
j = logicalCell.getColumnNumber() + logicalCell.getHorizontalSpan() - 1;
}
// draw cell
if (visible) {
switch (cellDrawMode) {
case DRAW_CELL_BACKGROUND:
drawCellBackground(g, logicalCell);
break;
case DRAW_CELL_BORDER:
drawCellBorder(g, logicalCell);
break;
case DRAW_CELL_FOREGROUND:
drawCellForeground(g, logicalCell);
break;
}
}
}
}
}
}
/**
* Draw cell background.
*
* @param g the graphics context to use
* @param x x-coordinate of the cells top-left corner
* @param y y-coordinate of the cells top-left corner
* @param w width of the cell in pixels
* @param h height of the cell in pixels
* @param cell cell to draw
*/
private void drawCellBackground(Graphics2D g, Cell cell) {
CellStyle style = cell.getCellStyle();
FillPattern pattern = style.getFillPattern();
if (pattern == FillPattern.NONE) {
return;
}
Rectangle cr = getCellRect(cell);
if (pattern != FillPattern.SOLID) {
Color fillBgColor = style.getFillBgColor();
if (fillBgColor != null) {
g.setColor(fillBgColor);
g.fillRect(cr.x, cr.y, cr.width, cr.height);
}
}
if (pattern != FillPattern.NONE) {
Color fillFgColor = style.getFillFgColor();
if (fillFgColor != null) {
g.setColor(fillFgColor);
g.fillRect(cr.x, cr.y, cr.width, cr.height);
}
}
}
/**
* Draw cell border.
*
* @param g the graphics context to use
* @param x x-coordinate of the cells top-left corner
* @param y y-coordinate of the cells top-left corner
* @param w width of the cell in pixels
* @param h height of the cell in pixels
* @param cell cell to draw
*/
private void drawCellBorder(Graphics2D g, Cell cell) {
CellStyle style = cell.getCellStyle();
Rectangle cr = getCellRect(cell);
// draw border
for (Direction d : Direction.values()) {
BorderStyle b = style.getBorderStyle(d);
if (b.getWidth() == 0) {
continue;
}
Color color = b.getColor();
if (color == null) {
color = Color.BLACK;
}
g.setColor(color);
g.setStroke(getStroke(b.getWidth() * scale));
switch (d) {
case NORTH:
g.drawLine(cr.x, cr.y, cr.x + cr.width - 1, cr.y);
break;
case EAST:
g.drawLine(cr.x + cr.width - 1, cr.y, cr.x + cr.width - 1, cr.y + cr.height - 1);
break;
case SOUTH:
g.drawLine(cr.x, cr.y + cr.height - 1, cr.x + cr.width - 1, cr.y + cr.height - 1);
break;
case WEST:
g.drawLine(cr.x, cr.y, cr.x, cr.y + cr.height - 1);
break;
}
}
}
/**
* Draw cell foreground.
*
* @param g the graphics context to use
* @param x x-coordinate of the cells top-left corner
* @param y y-coordinate of the cells top-left corner
* @param w width of the cell in pixels
* @param h height of the cell in pixels
* @param cell cell to draw
*/
private void drawCellForeground(Graphics2D g, Cell cell) {
if (cell.getCellType() == CellType.BLANK) {
return;
}
AttributedString text = cell.getAttributedString();
if (isEmpty(text)) {
return;
}
Rectangle cr = getCellRect(cell);
cr.x += paddingX;
cr.width -= 2 * paddingX - 1;
cr.y += paddingY;
cr.height -= 2 * paddingY;
float width = cr.width / scale;
CellStyle style = cell.getCellStyle();
Font font = style.getFont();
final Color color = font.getColor();
g.setFont(getAwtFont(font));
g.setColor(color == null ? Color.BLACK : color);
AffineTransform originalTransform = g.getTransform();
g.translate(cr.x, cr.y);
g.scale(scale, scale);
// layout text
float wrapWidth = style.isWrap() ? width : 0;
FontRenderContext frc = new FontRenderContext(g.getTransform(), true, true);
List<TextLayout> layouts = prepareText(g, frc, text.getIterator(), wrapWidth);
// determine size of text
float textWidth = 0;
float textHeight = 0;
for (TextLayout layout : layouts) {
textWidth = Math.max(textWidth, scale * layout.getVisibleAdvance());
textHeight += scale * (layout.getAscent() + layout.getDescent() + layout.getLeading());
}
// calculate text position
final float xd, yd;
switch (style.getHAlign()) {
case ALIGN_LEFT:
case ALIGN_JUSTIFY:
xd = cr.x;
break;
case ALIGN_CENTER:
xd = (float) (cr.x + (cr.width - textWidth) / 2.0);
break;
case ALIGN_RIGHT:
xd = cr.x + cr.width - textWidth;
break;
default:
throw new IllegalArgumentException();
}
switch (style.getVAlign()) {
case ALIGN_TOP:
case ALIGN_JUSTIFY:
yd = cr.y;
break;
case ALIGN_MIDDLE:
yd = (float) (cr.y + (cr.height - textHeight - scale * layouts.get(layouts.size() - 1).getLeading()) / 2.0);
break;
case ALIGN_BOTTOM:
final TextLayout lastLayout = layouts.get(layouts.size() - 1);
yd = cr.y + cr.height - scale * (lastLayout.getDescent() + lastLayout.getAscent());
break;
default:
throw new IllegalArgumentException();
}
// draw text
g.setTransform(originalTransform);
g.translate(xd, yd);
g.scale(scale, scale);
float drawPosY = 0;
for (TextLayout layout : layouts) {
// Compute pen x position. If the paragraph
// is right-to-left we will align the
// TextLayouts to the right edge of the panel.
float drawPosX = layout.isLeftToRight() ? 0 : width - layout.getAdvance();
// Move y-coordinate by the ascent of the
// layout.
drawPosY += layout.getAscent();
// Draw the TextLayout at (drawPosX,drawPosY).
layout.draw(g, drawPosX, drawPosY);
// Move y-coordinate in preparation for next
// layout.
drawPosY += layout.getDescent() + layout.getLeading();
}
g.setTransform(originalTransform);
}
private List<TextLayout> prepareText(Graphics2D g, FontRenderContext frc, AttributedCharacterIterator text, float width) {
if (width <= 0) {
// no width is given, so no wrapping will be applied.
return Collections.singletonList(new TextLayout(text, frc));
}
AttributedCharacterIterator paragraph = text;
int paragraphStart = paragraph.getBeginIndex();
int paragraphEnd = paragraph.getEndIndex();
LineBreakMeasurer lineMeasurer = new LineBreakMeasurer(paragraph, frc);
float drawPosY = 0;
List<TextLayout> tls = new ArrayList<>();
// Set position to the index of the first
// character in the paragraph.
lineMeasurer.setPosition(paragraphStart);
// Get lines from until the entire paragraph
// has been displayed.
while (lineMeasurer.getPosition() < paragraphEnd) {
TextLayout layout = lineMeasurer.nextLayout(width);
// Compute pen x position. If the paragraph
// is right-to-left we will align the
// TextLayouts to the right edge of the panel.
// Move y-coordinate by the ascent of the
// layout.
drawPosY += scale * layout.getAscent();
// Draw the TextLayout at (drawPosX,drawPosY).
tls.add(layout);
// Move y-coordinate in preparation for next
// layout.
drawPosY += scale * (layout.getDescent() + layout.getLeading());
}
return tls;
}
/**
* Get number of columns for the currently loaded sheet.
*
* @return
*/
private int getNumberOfColumns() {
return columnPos.length - 1;
}
/**
* Get number of rows for the currently loaded sheet.
*
* @return
*/
private int getNumberOfRows() {
return rowPos.length - 1;
}
private boolean isEmpty(AttributedString text) {
AttributedCharacterIterator iterator = text.getIterator();
return iterator.getBeginIndex() == iterator.getEndIndex();
}
private java.awt.Stroke getStroke(Float width) {
return strokeCache.get(width);
}
private java.awt.Font getAwtFont(Font font) {
return fontCache.get(font);
}
/**
* Return the current cell.
*
* @return current cell
*/
private Cell getCurrentCell() {
return sheet.getRow(currentRowNum).getCell(currentColNum);
}
/**
* Draw frame around current selection.
*
* @param g2d graphics used for drawing
*/
private void drawSelection(Graphics2D g2d) {
// no sheet, no drawing
if (sheet == null) {
return;
}
Cell logicalCell = getCurrentCell().getLogicalCell();
int rowNum = logicalCell.getRowNumber();
int colNum = logicalCell.getColumnNumber();
int spanX = logicalCell.getHorizontalSpan();
int spanY = logicalCell.getVerticalSpan();
int x = columnPos[colNum];
int y = rowPos[rowNum];
int w = columnPos[colNum + spanX] - x;
int h = rowPos[rowNum + spanY] - y;
g2d.setColor(selectionColor);
g2d.setStroke(selectionStroke);
g2d.drawRect(x, y, w, h);
}
protected static enum CellDrawMode {
DRAW_CELL_BACKGROUND, DRAW_CELL_BORDER, DRAW_CELL_FOREGROUND
}
}
| meja/src/com/dua3/meja/ui/swing/SheetView.java | /*
* Copyright 2015 Axel Howind <[email protected]>.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.dua3.meja.ui.swing;
import com.dua3.meja.model.BorderStyle;
import com.dua3.meja.model.Cell;
import com.dua3.meja.model.CellStyle;
import com.dua3.meja.model.CellType;
import com.dua3.meja.model.Direction;
import com.dua3.meja.model.FillPattern;
import com.dua3.meja.model.Font;
import com.dua3.meja.model.Row;
import com.dua3.meja.model.Sheet;
import com.dua3.meja.util.Cache;
import java.awt.BasicStroke;
import java.awt.Color;
import java.awt.Dimension;
import java.awt.Graphics;
import java.awt.Graphics2D;
import java.awt.Rectangle;
import java.awt.RenderingHints;
import java.awt.Stroke;
import java.awt.Toolkit;
import java.awt.event.ActionEvent;
import java.awt.font.FontRenderContext;
import java.awt.font.LineBreakMeasurer;
import java.awt.font.TextLayout;
import java.awt.geom.AffineTransform;
import java.text.AttributedCharacterIterator;
import java.text.AttributedString;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import javax.swing.AbstractAction;
import javax.swing.Action;
import javax.swing.ActionMap;
import javax.swing.InputMap;
import javax.swing.JComponent;
import javax.swing.JPanel;
import javax.swing.KeyStroke;
import javax.swing.Scrollable;
import javax.swing.SwingConstants;
/**
*
* @author axel
*/
public class SheetView extends JPanel implements Scrollable {
Cache<Float, java.awt.Stroke> strokeCache = new Cache<Float, java.awt.Stroke>() {
@Override
protected java.awt.Stroke create(Float width) {
return new BasicStroke(width);
}
};
Cache<Font, java.awt.Font> fontCache = new Cache<Font, java.awt.Font>() {
@Override
protected java.awt.Font create(Font font) {
int style = (font.isBold() ? java.awt.Font.BOLD : 0) | (font.isItalic() ? java.awt.Font.ITALIC : 0);
return new java.awt.Font(font.getFamily(), style, (int) Math.round(font.getSizeInPoints()));
}
};
float scale = 1;
int columnPos[];
int rowPos[];
int sheetWidth;
int sheetHeight;
int currentColNum;
int currentRowNum;
private Sheet sheet;
private Color gridColor = Color.LIGHT_GRAY;
/**
* Horizontal padding.
*/
private final int paddingX = 2;
/**
* Vertical padding.
*/
private final int paddingY = 1;
/**
* Color used to draw the selection rectangle.
*/
private Color selectionColor = Color.BLACK;
/**
* Width of the selection rectangle borders.
*/
private final int selectionStrokeWidth = 4;
/**
* Stroke used to draw the selection rectangle.
*/
private Stroke selectionStroke = getStroke((float) selectionStrokeWidth);
/**
* Active clip bounds when drawing.
*/
private final Rectangle clipBounds = new Rectangle();
/**
* Move the selection rectangle to an adjacent cell.
* @param d direction
*/
private void move(Direction d) {
Cell cell = getCurrentCell().getLogicalCell();
switch (d) {
case NORTH:
setCurrentRowNum(cell.getRowNumber() - 1);
break;
case SOUTH:
setCurrentRowNum(cell.getRowNumber() + cell.getVerticalSpan());
break;
case WEST:
setCurrentColNum(cell.getColumnNumber() - 1);
break;
case EAST:
setCurrentColNum(cell.getColumnNumber() + cell.getHorizontalSpan());
break;
}
scrollToCurrentCell();
}
/**
* Get display coordinates of selection rectangle.
* @return selection rectangle in display coordinates
*/
private Rectangle getSelectionRect() {
Rectangle cellRect = getCellRect(getCurrentCell().getLogicalCell());
int extra = (selectionStrokeWidth + 1) / 2;
cellRect.x -= extra;
cellRect.y -= extra;
cellRect.width += 2 * extra;
cellRect.height += 2 * extra;
return cellRect;
}
public void scrollToCurrentCell() {
ensureCellIsVisibile(getCurrentCell().getLogicalCell());
}
public void ensureCellIsVisibile(Cell cell) {
scrollRectToVisible(getCellRect(cell));
}
public Rectangle getCellRect(Cell cell) {
final int i = cell.getRowNumber();
final int j = cell.getColumnNumber();
final int y = rowPos[i];
final int h = rowPos[i + cell.getVerticalSpan()] - y;
final int x = columnPos[j];
final int w = columnPos[cell.getColumnNumber() + cell.getHorizontalSpan()] - x;
return new Rectangle(x, y, w, h);
}
public int getCurrentRowNum() {
return currentRowNum;
}
public void setCurrentRowNum(int rowNum) {
int oldRowNum = currentRowNum;
int newRowNum = Math.max(sheet.getFirstRowNum(), Math.min(sheet.getLastRowNum(), rowNum));
if (newRowNum != oldRowNum) {
// get old selection for repainting
Rectangle oldRect = getSelectionRect();
// update current position
currentRowNum = newRowNum;
// get new selection for repainting
Rectangle newRect = getSelectionRect();
repaint(oldRect);
repaint(newRect);
}
}
public int getCurrentColNum() {
return currentColNum;
}
public void setCurrentColNum(int colNum) {
int oldColNum = currentColNum;
int newColNum = Math.max(sheet.getFirstColNum(), Math.min(sheet.getLastColNum(), colNum));
if (newColNum != oldColNum) {
// get old selection for repainting
Rectangle oldRect = getSelectionRect();
// update current position
currentColNum = newColNum;
// get new selection for repainting
Rectangle newRect = getSelectionRect();
repaint(oldRect);
repaint(newRect);
}
}
static enum Actions {
MOVE_UP {
@Override
public Action getAction(final SheetView view) {
return new AbstractAction("MOVE_UP") {
@Override
public void actionPerformed(ActionEvent e) {
view.move(Direction.NORTH);
}
};
}
},
MOVE_DOWN {
@Override
public Action getAction(final SheetView view) {
return new AbstractAction("MOVE_DOWN") {
@Override
public void actionPerformed(ActionEvent e) {
view.move(Direction.SOUTH);
}
};
}
},
MOVE_LEFT {
@Override
public Action getAction(final SheetView view) {
return new AbstractAction("MOVE_LEFT") {
@Override
public void actionPerformed(ActionEvent e) {
view.move(Direction.WEST);
}
};
}
},
MOVE_RIGHT {
@Override
public Action getAction(final SheetView view) {
return new AbstractAction("MOVE_RIGHT") {
@Override
public void actionPerformed(ActionEvent e) {
view.move(Direction.EAST);
}
};
}
};
abstract Action getAction(SheetView view);
}
public SheetView() {
this(null);
}
public SheetView(Sheet sheet) {
init();
setSheet(sheet);
}
public void setSheet(Sheet sheet1) {
this.sheet = sheet1;
this.currentRowNum = 0;
this.currentColNum = 0;
update();
}
private void init() {
final InputMap inputMap = getInputMap(JComponent.WHEN_ANCESTOR_OF_FOCUSED_COMPONENT);
inputMap.put(KeyStroke.getKeyStroke(java.awt.event.KeyEvent.VK_UP, 0), Actions.MOVE_UP);
getInputMap().put(KeyStroke.getKeyStroke(java.awt.event.KeyEvent.VK_KP_UP, 0), Actions.MOVE_UP);
getInputMap().put(KeyStroke.getKeyStroke(java.awt.event.KeyEvent.VK_DOWN, 0), Actions.MOVE_DOWN);
getInputMap().put(KeyStroke.getKeyStroke(java.awt.event.KeyEvent.VK_KP_DOWN, 0), Actions.MOVE_DOWN);
getInputMap().put(KeyStroke.getKeyStroke(java.awt.event.KeyEvent.VK_LEFT, 0), Actions.MOVE_LEFT);
getInputMap().put(KeyStroke.getKeyStroke(java.awt.event.KeyEvent.VK_KP_LEFT, 0), Actions.MOVE_LEFT);
getInputMap().put(KeyStroke.getKeyStroke(java.awt.event.KeyEvent.VK_RIGHT, 0), Actions.MOVE_RIGHT);
getInputMap().put(KeyStroke.getKeyStroke(java.awt.event.KeyEvent.VK_KP_RIGHT, 0), Actions.MOVE_RIGHT);
final ActionMap actionMap = getActionMap();
for (Actions action : Actions.values()) {
actionMap.put(action, action.getAction(this));
}
setFocusable(true);
requestFocusInWindow();
}
/**
* Set the grid color.
*
* @param gridColor
*/
public void setGridColor(Color gridColor) {
this.gridColor = gridColor;
}
/**
* Get the grid color.
*
* @return color of grid
*/
public Color getGridColor() {
return gridColor;
}
/**
* Update sheet layout data.
*/
private void update() {
int dpi = Toolkit.getDefaultToolkit().getScreenResolution();
scale = dpi / 72f;
if (sheet == null) {
sheetWidth = 0;
sheetHeight = 0;
rowPos = new int[]{0};
columnPos = new int[]{0};
return;
}
sheetHeight = 0;
rowPos = new int[2 + sheet.getLastRowNum()];
rowPos[0] = 0;
for (int i = 1; i < rowPos.length; i++) {
sheetHeight += Math.round(sheet.getRowHeight(i - 1) * scale);
rowPos[i] = sheetHeight;
}
sheetWidth = 0;
columnPos = new int[2 + sheet.getLastColNum()];
columnPos[0] = 0;
for (int j = 1; j < columnPos.length; j++) {
sheetWidth += Math.round(sheet.getColumnWidth(j - 1) * scale);
columnPos[j] = sheetWidth;
}
}
@Override
public Dimension getPreferredScrollableViewportSize() {
return getPreferredSize();
}
@Override
public int getScrollableUnitIncrement(Rectangle visibleRect, int orientation, int direction) {
if (orientation == SwingConstants.VERTICAL) {
// scroll vertical
if (direction < 0) {
//scroll up
final int y = visibleRect.y;
int yPrevious = 0;
for (int i = 0; i < rowPos.length; i++) {
if (rowPos[i] >= y) {
return y - yPrevious;
}
yPrevious = rowPos[i];
}
// should never be reached
return 0;
} else {
// scroll down
final int y = visibleRect.y + visibleRect.height;
for (int i = 0; i < rowPos.length; i++) {
if (rowPos[i] > y) {
return rowPos[i] - y;
}
}
// should never be reached
return 0;
}
} else {
// scroll horizontal
if (direction < 0) {
//scroll left
final int x = visibleRect.x;
int xPrevious = 0;
for (int j = 0; j < columnPos.length; j++) {
if (columnPos[j] >= x) {
return x - xPrevious;
}
xPrevious = columnPos[j];
}
// should never be reached
return 0;
} else {
// scroll down
final int x = visibleRect.x + visibleRect.width;
for (int j = 0; j < columnPos.length; j++) {
if (columnPos[j] > x) {
return columnPos[j] - x;
}
}
// should never be reached
return 0;
}
}
}
/**
* Get the row number that the given y-coordinate belongs to.
*
* @param y y-coordinate
*
* @return
* <ul>
* <li> -1, if the first row is displayed below the given coordinate
* <li> number of rows, if the lower edge of the last row is displayed above
* the given coordinate
* <li> the number of the row that belongs to the given coordinate
* </ul>
*/
public int getRowNumberFromY(int y) {
int i = 0;
while (i < rowPos.length && rowPos[i] <= y) {
i++;
}
return i - 1;
}
/**
* Get the column number that the given x-coordinate belongs to.
*
* @param x x-coordinate
*
* @return
* <ul>
* <li> -1, if the first column is displayed to the right of the given
* coordinate
* <li> number of columns, if the right edge of the last column is displayed
* to the left of the given coordinate
* <li> the number of the column that belongs to the given coordinate
* </ul>
*/
public int getColumnNumberFromX(int x) {
int j = 0;
while (j < columnPos.length && columnPos[j] <= x) {
j++;
}
return j - 1;
}
@Override
public int getScrollableBlockIncrement(Rectangle visibleRect, int orientation, int direction) {
return 3 * getScrollableUnitIncrement(visibleRect, orientation, direction);
}
@Override
public boolean getScrollableTracksViewportWidth() {
return false;
}
@Override
public boolean getScrollableTracksViewportHeight() {
return false;
}
@Override
public Dimension getPreferredSize() {
return new Dimension(sheetWidth + 1, sheetHeight + 1);
}
@Override
protected void paintComponent(Graphics g) {
Graphics2D g2d = (Graphics2D) g;
g2d.getClipBounds(clipBounds);
g2d.clearRect(clipBounds.x, clipBounds.y, clipBounds.width, clipBounds.height);
g2d.setRenderingHint(RenderingHints.KEY_RENDERING, RenderingHints.VALUE_RENDER_QUALITY);
g2d.setRenderingHint(RenderingHints.KEY_TEXT_ANTIALIASING, RenderingHints.VALUE_TEXT_ANTIALIAS_GASP);
g2d.setRenderingHint(RenderingHints.KEY_FRACTIONALMETRICS, RenderingHints.VALUE_FRACTIONALMETRICS_ON);
drawGrid(g2d);
drawCells(g2d, CellDrawMode.DRAW_CELL_BACKGROUND);
drawCells(g2d, CellDrawMode.DRAW_CELL_BORDER);
drawCells(g2d, CellDrawMode.DRAW_CELL_FOREGROUND);
drawSelection(g2d);
}
/**
* Draw the grid.
*
* @param g
*/
private void drawGrid(Graphics2D g) {
g.setColor(gridColor);
final int minY = clipBounds.y;
final int maxY = clipBounds.y + clipBounds.height;
final int minX = clipBounds.x;
final int maxX = clipBounds.x + clipBounds.width;
// draw horizontal grid lines
for (int gridY : rowPos) {
if (gridY < minY) {
// visible region not reached
continue;
}
if (gridY > maxY) {
// out of visible region
break;
}
g.drawLine(minX, gridY, maxX, gridY);
}
// draw vertical grid lines
for (int gridX : columnPos) {
if (gridX < minX) {
// visible region not reached
continue;
}
if (gridX > maxX) {
// out of visible region
break;
}
g.drawLine(gridX, minY, gridX, maxY);
}
}
void drawCells(Graphics2D g, CellDrawMode cellDrawMode) {
// no sheet, no drawing
if (sheet == null) {
return;
}
// since text can overflow into other cells, add a margin of cells to be
// drawn that normally aren't visible when drawing foreground
int extra = cellDrawMode == CellDrawMode.DRAW_CELL_FOREGROUND ? 10 : 0;
// determine visible rows and columns
int startRow = Math.max(0, getRowNumberFromY(clipBounds.y) - extra);
int endRow = Math.min(getNumberOfRows(), 1 + getRowNumberFromY(clipBounds.y + clipBounds.height) + extra);
int startColumn = Math.max(0, getColumnNumberFromX(clipBounds.x) - extra);
int endColumn = Math.min(getNumberOfColumns(), 1 + getColumnNumberFromX(clipBounds.x + clipBounds.width) + extra);
// Collect cells to be drawn
for (int i = startRow; i < endRow; i++) {
Row row = sheet.getRow(i);
if (row == null) {
continue;
}
for (int j = startColumn; j < endColumn; j++) {
Cell cell = row.getCell(j);
if (cell != null) {
Cell logicalCell = cell.getLogicalCell();
final boolean visible;
if (cell == logicalCell) {
// if cell is not merged or the topleft cell of the
// merged region, then it is visible
visible = true;
} else {
// otherwise calculate row and column numbers of the
// first visible cell of the merged region
int iCell = Math.max(i, logicalCell.getRowNumber());
int jCell = Math.max(j, logicalCell.getColumnNumber());
visible = i == iCell && j == jCell;
// skip the other cells of this row that belong to the same merged region
j = logicalCell.getColumnNumber() + logicalCell.getHorizontalSpan() - 1;
}
// draw cell
if (visible) {
switch (cellDrawMode) {
case DRAW_CELL_BACKGROUND:
drawCellBackground(g, logicalCell);
break;
case DRAW_CELL_BORDER:
drawCellBorder(g, logicalCell);
break;
case DRAW_CELL_FOREGROUND:
drawCellForeground(g, logicalCell);
break;
}
}
}
}
}
}
/**
* Draw cell background.
*
* @param g the graphics context to use
* @param x x-coordinate of the cells top-left corner
* @param y y-coordinate of the cells top-left corner
* @param w width of the cell in pixels
* @param h height of the cell in pixels
* @param cell cell to draw
*/
private void drawCellBackground(Graphics2D g, Cell cell) {
CellStyle style = cell.getCellStyle();
FillPattern pattern = style.getFillPattern();
if (pattern == FillPattern.NONE) {
return;
}
Rectangle cr = getCellRect(cell);
if (pattern != FillPattern.SOLID) {
Color fillBgColor = style.getFillBgColor();
if (fillBgColor != null) {
g.setColor(fillBgColor);
g.fillRect(cr.x, cr.y, cr.width, cr.height);
}
}
if (pattern != FillPattern.NONE) {
Color fillFgColor = style.getFillFgColor();
if (fillFgColor != null) {
g.setColor(fillFgColor);
g.fillRect(cr.x, cr.y, cr.width, cr.height);
}
}
}
/**
* Draw cell border.
*
* @param g the graphics context to use
* @param x x-coordinate of the cells top-left corner
* @param y y-coordinate of the cells top-left corner
* @param w width of the cell in pixels
* @param h height of the cell in pixels
* @param cell cell to draw
*/
private void drawCellBorder(Graphics2D g, Cell cell) {
CellStyle style = cell.getCellStyle();
Rectangle cr = getCellRect(cell);
// draw border
for (Direction d : Direction.values()) {
BorderStyle b = style.getBorderStyle(d);
if (b.getWidth() == 0) {
continue;
}
Color color = b.getColor();
if (color == null) {
color = Color.BLACK;
}
g.setColor(color);
g.setStroke(getStroke(b.getWidth() * scale));
switch (d) {
case NORTH:
g.drawLine(cr.x, cr.y, cr.x + cr.width - 1, cr.y);
break;
case EAST:
g.drawLine(cr.x + cr.width - 1, cr.y, cr.x + cr.width - 1, cr.y + cr.height - 1);
break;
case SOUTH:
g.drawLine(cr.x, cr.y + cr.height - 1, cr.x + cr.width - 1, cr.y + cr.height - 1);
break;
case WEST:
g.drawLine(cr.x, cr.y, cr.x, cr.y + cr.height - 1);
break;
}
}
}
/**
* Draw cell foreground.
*
* @param g the graphics context to use
* @param x x-coordinate of the cells top-left corner
* @param y y-coordinate of the cells top-left corner
* @param w width of the cell in pixels
* @param h height of the cell in pixels
* @param cell cell to draw
*/
private void drawCellForeground(Graphics2D g, Cell cell) {
if (cell.getCellType() == CellType.BLANK) {
return;
}
AttributedString text = cell.getAttributedString();
if (isEmpty(text)) {
return;
}
Rectangle cr = getCellRect(cell);
cr.x += paddingX;
cr.width -= 2 * paddingX - 1;
cr.y += paddingY;
cr.height -= 2 * paddingY;
float width = cr.width / scale;
CellStyle style = cell.getCellStyle();
Font font = style.getFont();
final Color color = font.getColor();
g.setFont(getAwtFont(font));
g.setColor(color == null ? Color.BLACK : color);
AffineTransform originalTransform = g.getTransform();
g.translate(cr.x, cr.y);
g.scale(scale, scale);
// layout text
float wrapWidth = style.isWrap() ? width : 0;
FontRenderContext frc = new FontRenderContext(g.getTransform(), true, true);
List<TextLayout> layouts = prepareText(g, frc, text.getIterator(), wrapWidth);
// determine size of text
float textWidth = 0;
float textHeight = 0;
for (TextLayout layout : layouts) {
textWidth = Math.max(textWidth, scale * layout.getVisibleAdvance());
textHeight += scale * (layout.getAscent() + layout.getDescent() + layout.getLeading());
}
// calculate text position
final float xd, yd;
switch (style.getHAlign()) {
case ALIGN_LEFT:
case ALIGN_JUSTIFY:
xd = cr.x;
break;
case ALIGN_CENTER:
xd = (float) (cr.x + (cr.width - textWidth) / 2.0);
break;
case ALIGN_RIGHT:
xd = cr.x + cr.width - textWidth;
break;
default:
throw new IllegalArgumentException();
}
switch (style.getVAlign()) {
case ALIGN_TOP:
case ALIGN_JUSTIFY:
yd = cr.y;
break;
case ALIGN_MIDDLE:
yd = (float) (cr.y + (cr.height - textHeight - scale * layouts.get(layouts.size() - 1).getLeading()) / 2.0);
break;
case ALIGN_BOTTOM:
final TextLayout lastLayout = layouts.get(layouts.size() - 1);
yd = cr.y + cr.height - scale * (lastLayout.getDescent() + lastLayout.getAscent());
break;
default:
throw new IllegalArgumentException();
}
// draw text
g.setTransform(originalTransform);
g.translate(xd, yd);
g.scale(scale, scale);
float drawPosY = 0;
for (TextLayout layout : layouts) {
// Compute pen x position. If the paragraph
// is right-to-left we will align the
// TextLayouts to the right edge of the panel.
float drawPosX = layout.isLeftToRight() ? 0 : width - layout.getAdvance();
// Move y-coordinate by the ascent of the
// layout.
drawPosY += layout.getAscent();
// Draw the TextLayout at (drawPosX,drawPosY).
layout.draw(g, drawPosX, drawPosY);
// Move y-coordinate in preparation for next
// layout.
drawPosY += layout.getDescent() + layout.getLeading();
}
g.setTransform(originalTransform);
}
private List<TextLayout> prepareText(Graphics2D g, FontRenderContext frc, AttributedCharacterIterator text, float width) {
if (width <= 0) {
// no width is given, so no wrapping will be applied.
return Collections.singletonList(new TextLayout(text, frc));
}
AttributedCharacterIterator paragraph = text;
int paragraphStart = paragraph.getBeginIndex();
int paragraphEnd = paragraph.getEndIndex();
LineBreakMeasurer lineMeasurer = new LineBreakMeasurer(paragraph, frc);
float drawPosY = 0;
List<TextLayout> tls = new ArrayList<>();
// Set position to the index of the first
// character in the paragraph.
lineMeasurer.setPosition(paragraphStart);
// Get lines from until the entire paragraph
// has been displayed.
while (lineMeasurer.getPosition() < paragraphEnd) {
TextLayout layout = lineMeasurer.nextLayout(width);
// Compute pen x position. If the paragraph
// is right-to-left we will align the
// TextLayouts to the right edge of the panel.
// Move y-coordinate by the ascent of the
// layout.
drawPosY += scale * layout.getAscent();
// Draw the TextLayout at (drawPosX,drawPosY).
tls.add(layout);
// Move y-coordinate in preparation for next
// layout.
drawPosY += scale * (layout.getDescent() + layout.getLeading());
}
return tls;
}
/**
* Get number of columns for the currently loaded sheet.
*
* @return
*/
private int getNumberOfColumns() {
return columnPos.length - 1;
}
/**
* Get number of rows for the currently loaded sheet.
*
* @return
*/
private int getNumberOfRows() {
return rowPos.length - 1;
}
private boolean isEmpty(AttributedString text) {
AttributedCharacterIterator iterator = text.getIterator();
return iterator.getBeginIndex() == iterator.getEndIndex();
}
private java.awt.Stroke getStroke(Float width) {
return strokeCache.get(width);
}
private java.awt.Font getAwtFont(Font font) {
return fontCache.get(font);
}
/**
* Return the current cell.
*
* @return current cell
*/
private Cell getCurrentCell() {
return sheet.getRow(currentRowNum).getCell(currentColNum);
}
/**
* Draw frame around current selection.
*
* @param g2d graphics used for drawing
*/
private void drawSelection(Graphics2D g2d) {
// no sheet, no drawing
if (sheet == null) {
return;
}
Cell logicalCell = getCurrentCell().getLogicalCell();
int rowNum = logicalCell.getRowNumber();
int colNum = logicalCell.getColumnNumber();
int spanX = logicalCell.getHorizontalSpan();
int spanY = logicalCell.getVerticalSpan();
int x = columnPos[colNum];
int y = rowPos[rowNum];
int w = columnPos[colNum + spanX] - x;
int h = rowPos[rowNum + spanY] - y;
g2d.setColor(selectionColor);
g2d.setStroke(selectionStroke);
g2d.drawRect(x, y, w, h);
}
protected static enum CellDrawMode {
DRAW_CELL_BACKGROUND, DRAW_CELL_BORDER, DRAW_CELL_FOREGROUND
}
}
| start mouse support
| meja/src/com/dua3/meja/ui/swing/SheetView.java | start mouse support | <ide><path>eja/src/com/dua3/meja/ui/swing/SheetView.java
<ide> import java.awt.Stroke;
<ide> import java.awt.Toolkit;
<ide> import java.awt.event.ActionEvent;
<add>import java.awt.event.MouseAdapter;
<add>import java.awt.event.MouseEvent;
<ide> import java.awt.font.FontRenderContext;
<ide> import java.awt.font.LineBreakMeasurer;
<ide> import java.awt.font.TextLayout;
<ide> return cellRect;
<ide> }
<ide>
<add> /**
<add> * Scroll the currently selected cell into view.
<add> */
<ide> public void scrollToCurrentCell() {
<ide> ensureCellIsVisibile(getCurrentCell().getLogicalCell());
<ide> }
<ide>
<add> /**
<add> * Scroll cell into view.
<add> * @param cell the cell to scroll to
<add> */
<ide> public void ensureCellIsVisibile(Cell cell) {
<ide> scrollRectToVisible(getCellRect(cell));
<ide> }
<ide> }
<ide>
<ide> public void setCurrentRowNum(int rowNum) {
<add> setCurrent(rowNum, currentColNum);
<add> }
<add>
<add> public int getCurrentColNum() {
<add> return currentColNum;
<add> }
<add>
<add> public void setCurrentColNum(int colNum) {
<add> setCurrent(currentRowNum, colNum);
<add> }
<add>
<add> public void setCurrent(int rowNum, int colNum) {
<ide> int oldRowNum = currentRowNum;
<ide> int newRowNum = Math.max(sheet.getFirstRowNum(), Math.min(sheet.getLastRowNum(), rowNum));
<del> if (newRowNum != oldRowNum) {
<add> int oldColNum = currentColNum;
<add> int newColNum = Math.max(sheet.getFirstColNum(), Math.min(sheet.getLastColNum(), colNum));
<add> if (newRowNum != oldRowNum || newColNum != oldColNum) {
<ide> // get old selection for repainting
<ide> Rectangle oldRect = getSelectionRect();
<ide> // update current position
<ide> currentRowNum = newRowNum;
<del> // get new selection for repainting
<del> Rectangle newRect = getSelectionRect();
<del> repaint(oldRect);
<del> repaint(newRect);
<del> }
<del> }
<del>
<del> public int getCurrentColNum() {
<del> return currentColNum;
<del> }
<del>
<del> public void setCurrentColNum(int colNum) {
<del> int oldColNum = currentColNum;
<del> int newColNum = Math.max(sheet.getFirstColNum(), Math.min(sheet.getLastColNum(), colNum));
<del> if (newColNum != oldColNum) {
<del> // get old selection for repainting
<del> Rectangle oldRect = getSelectionRect();
<del> // update current position
<ide> currentColNum = newColNum;
<ide> // get new selection for repainting
<ide> Rectangle newRect = getSelectionRect();
<ide> }
<ide>
<ide> private void init() {
<add> // setup input map for keyboard navigation
<ide> final InputMap inputMap = getInputMap(JComponent.WHEN_ANCESTOR_OF_FOCUSED_COMPONENT);
<ide> inputMap.put(KeyStroke.getKeyStroke(java.awt.event.KeyEvent.VK_UP, 0), Actions.MOVE_UP);
<ide> getInputMap().put(KeyStroke.getKeyStroke(java.awt.event.KeyEvent.VK_KP_UP, 0), Actions.MOVE_UP);
<ide> actionMap.put(action, action.getAction(this));
<ide> }
<ide>
<add> // listen to mouse events
<add> addMouseListener(new MouseAdapter() {
<add> @Override
<add> public void mousePressed(MouseEvent e) {
<add> int row = getRowNumberFromY(e.getY());
<add> int col = getColumnNumberFromX(e.getX());
<add> setCurrent(row, col);
<add> requestFocusInWindow();
<add> }
<add> });
<add>
<add> // make focusable
<ide> setFocusable(true);
<ide> requestFocusInWindow();
<ide> }
<ide> sheetWidth += Math.round(sheet.getColumnWidth(j - 1) * scale);
<ide> columnPos[j] = sheetWidth;
<ide> }
<add> revalidate();
<ide> }
<ide>
<ide> @Override
<ide> }
<ide> }
<ide>
<add> void setCurrentCell(Cell cell) {
<add> int rowNumber = cell.getRowNumber();
<add> int colNumber = cell.getColumnNumber();
<add> setCurrentColNum(colNumber);
<add> }
<add>
<ide> void drawCells(Graphics2D g, CellDrawMode cellDrawMode) {
<ide> // no sheet, no drawing
<ide> if (sheet == null) { |
|
Java | apache-2.0 | c0db65a6384abbffb6b65c5aa21275bc5e21cfed | 0 | cuba-platform/cuba,dimone-kun/cuba,cuba-platform/cuba,dimone-kun/cuba,cuba-platform/cuba,dimone-kun/cuba | /*
* Copyright (c) 2008-2013 Haulmont. All rights reserved.
* Use is subject to license terms, see http://www.cuba-platform.com/license for details.
*/
package com.haulmont.cuba.web.gui.components;
import com.haulmont.bali.datastruct.Pair;
import com.haulmont.bali.util.Dom4j;
import com.haulmont.chile.core.datatypes.Datatype;
import com.haulmont.chile.core.datatypes.impl.BooleanDatatype;
import com.haulmont.chile.core.model.Instance;
import com.haulmont.chile.core.model.MetaClass;
import com.haulmont.chile.core.model.MetaProperty;
import com.haulmont.chile.core.model.MetaPropertyPath;
import com.haulmont.cuba.client.ClientConfig;
import com.haulmont.cuba.core.entity.Entity;
import com.haulmont.cuba.core.global.*;
import com.haulmont.cuba.gui.ComponentsHelper;
import com.haulmont.cuba.gui.WindowManager;
import com.haulmont.cuba.gui.components.*;
import com.haulmont.cuba.gui.components.Field;
import com.haulmont.cuba.gui.components.Formatter;
import com.haulmont.cuba.gui.components.Table;
import com.haulmont.cuba.gui.components.Window;
import com.haulmont.cuba.gui.components.actions.ListActionType;
import com.haulmont.cuba.gui.data.*;
import com.haulmont.cuba.gui.data.impl.CollectionDsActionsNotifier;
import com.haulmont.cuba.gui.data.impl.CollectionDsListenerAdapter;
import com.haulmont.cuba.gui.data.impl.DatasourceImplementation;
import com.haulmont.cuba.gui.presentations.Presentations;
import com.haulmont.cuba.gui.presentations.PresentationsImpl;
import com.haulmont.cuba.security.entity.EntityAttrAccess;
import com.haulmont.cuba.security.entity.EntityOp;
import com.haulmont.cuba.security.entity.Presentation;
import com.haulmont.cuba.security.global.UserSession;
import com.haulmont.cuba.web.gui.CompositionLayout;
import com.haulmont.cuba.web.gui.components.presentations.TablePresentations;
import com.haulmont.cuba.web.gui.data.CollectionDsWrapper;
import com.haulmont.cuba.web.gui.data.ItemWrapper;
import com.haulmont.cuba.web.gui.data.PropertyWrapper;
import com.haulmont.cuba.web.toolkit.VersionedThemeResource;
import com.haulmont.cuba.web.toolkit.data.AggregationContainer;
import com.haulmont.cuba.web.toolkit.ui.FieldWrapper;
import com.vaadin.data.Item;
import com.vaadin.data.Property;
import com.vaadin.event.ItemClickEvent;
import com.vaadin.event.ShortcutListener;
import com.vaadin.terminal.PaintException;
import com.vaadin.terminal.PaintTarget;
import com.vaadin.terminal.Resource;
import com.vaadin.ui.*;
import com.vaadin.ui.Button;
import com.vaadin.ui.Component;
import com.vaadin.ui.Label;
import com.vaadin.ui.TextArea;
import org.apache.commons.lang.ArrayUtils;
import org.apache.commons.lang.BooleanUtils;
import org.apache.commons.lang.StringEscapeUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.logging.LogFactory;
import org.dom4j.DocumentHelper;
import org.dom4j.Element;
import javax.annotation.Nullable;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.*;
/**
* @param <T>
* @author abramov
* @version $Id$
*/
public abstract class WebAbstractTable<T extends com.haulmont.cuba.web.toolkit.ui.Table>
extends WebAbstractList<T> implements Table {
protected static final String REQUIRED_TABLE_STYLE = "table";
protected Map<Object, Table.Column> columns = new HashMap<>();
protected List<Table.Column> columnsOrder = new ArrayList<>();
protected boolean editable;
protected Action itemClickAction;
protected Action enterPressAction;
protected Table.StyleProvider styleProvider;
protected Table.IconProvider iconProvider;
protected Map<Entity, Datasource> fieldDatasources = new WeakHashMap<>();
protected Map<Table.Column, String> requiredColumns = new HashMap<>();
protected Map<Table.Column, Set<com.haulmont.cuba.gui.components.Field.Validator>> validatorsMap = new HashMap<>();
protected Set<com.haulmont.cuba.gui.components.Field.Validator> tableValidators = new LinkedHashSet<>();
protected CompositionLayout componentComposition;
protected HorizontalLayout topPanel;
protected ButtonsPanel buttonsPanel;
protected RowsCount rowsCount;
protected Map<Table.Column, Object> aggregationCells = null;
protected boolean usePresentations;
protected Presentations presentations;
protected TablePresentations tablePresentations;
protected List<ColumnCollapseListener> columnCollapseListeners = new ArrayList<>();
// Map column id to Printable representation
protected Map<String, Printable> printables = new HashMap<>();
// disabled for #PL-2035
// Disable listener that points component value to follow the ds item.
// protected boolean disableItemListener = false;
protected String customStyle;
protected Security security = AppBeans.get(Security.class);
protected static final int MAX_TEXT_LENGTH_GAP = 10;
@Override
public java.util.List<Table.Column> getColumns() {
return columnsOrder;
}
@Override
public Table.Column getColumn(String id) {
for (Table.Column column : columnsOrder) {
if (column.getId().toString().equals(id))
return column;
}
return null;
}
@Override
public void addColumn(Table.Column column) {
component.addContainerProperty(column.getId(), column.getType(), null);
columns.put(column.getId(), column);
columnsOrder.add(column);
if (column.getWidth() != null) {
component.setColumnWidth(column.getId(), column.getWidth());
}
}
@Override
public void removeColumn(Table.Column column) {
component.removeContainerProperty(column.getId());
//noinspection RedundantCast
columns.remove((MetaPropertyPath) column.getId());
columnsOrder.remove(column);
}
@Override
public Datasource getItemDatasource(Entity item) {
Datasource fieldDatasource = fieldDatasources.get(item);
if (fieldDatasource == null) {
fieldDatasource = new DsBuilder()
.setAllowCommit(false)
.setMetaClass(datasource.getMetaClass())
.setRefreshMode(CollectionDatasource.RefreshMode.NEVER)
.setViewName("_local")
.buildDatasource();
((DatasourceImplementation)fieldDatasource).valid();
fieldDatasource.setItem(item);
fieldDatasources.put(item, fieldDatasource);
}
return fieldDatasource;
}
protected void addGeneratedColumn(Object id, Object generator) {
component.addGeneratedColumn(id, (com.vaadin.ui.Table.ColumnGenerator) generator);
}
protected void removeGeneratedColumn(Object id) {
component.removeGeneratedColumn(id);
}
@Override
public void addPrintable(String columnId, Printable printable) {
printables.put(columnId, printable);
}
@Override
public void removePrintable(String columnId) {
printables.remove(columnId);
}
@Override
@Nullable
public Printable getPrintable(Table.Column column) {
return getPrintable(String.valueOf(column.getId()));
}
@Nullable
@Override
public Printable getPrintable(String columnId) {
Printable printable = printables.get(columnId);
if (printable != null) {
return printable;
} else {
com.vaadin.ui.Table.ColumnGenerator vColumnGenerator = component.getColumnGenerator(columnId);
if (vColumnGenerator instanceof CustomColumnGenerator) {
ColumnGenerator columnGenerator = ((CustomColumnGenerator) vColumnGenerator).getColumnGenerator();
if (columnGenerator instanceof Printable)
return (Printable) columnGenerator;
}
return null;
}
}
@Override
public boolean isEditable() {
return editable;
}
@Override
public void setEditable(boolean editable) {
if (this.editable != editable) {
this.editable = editable;
component.disableContentRefreshing();
if (datasource != null) {
com.vaadin.data.Container ds = component.getContainerDataSource();
@SuppressWarnings("unchecked")
final Collection<MetaPropertyPath> propertyIds = (Collection<MetaPropertyPath>) ds.getContainerPropertyIds();
// added generated columns
final List<Pair<Object, com.vaadin.ui.Table.ColumnGenerator>> columnGenerators = new LinkedList<>();
Object[] visibleColumns = component.getVisibleColumns();
for (final MetaPropertyPath id : propertyIds) {
if (ArrayUtils.contains(visibleColumns, id)) {
final Table.Column column = getColumn(id.toString());
// save generators only for non editable columns
if (!column.isEditable()) {
com.vaadin.ui.Table.ColumnGenerator generator = component.getColumnGenerator(id);
if (generator != null && !(generator instanceof WebAbstractTable.SystemTableColumnGenerator)) {
columnGenerators.add(new Pair<Object, com.vaadin.ui.Table.ColumnGenerator>(id, generator));
}
}
}
}
refreshColumns(ds);
// restore generated columns
for (Pair<Object, com.vaadin.ui.Table.ColumnGenerator> generatorEntry : columnGenerators) {
component.addGeneratedColumn(generatorEntry.getFirst(), generatorEntry.getSecond());
}
}
component.setEditable(editable);
component.enableContentRefreshing(true);
}
}
protected void setEditableColumns(List<MetaPropertyPath> editableColumns) {
component.setEditableColumns(editableColumns.toArray());
}
@Override
public boolean isSortable() {
return !component.isSortDisabled();
}
@Override
public void setSortable(boolean sortable) {
component.setSortDisabled(!sortable);
}
@Override
public void setColumnReorderingAllowed(boolean columnReorderingAllowed) {
component.setColumnReorderingAllowed(columnReorderingAllowed);
}
@Override
public boolean getColumnReorderingAllowed() {
return component.isColumnReorderingAllowed();
}
@Override
public void setColumnControlVisible(boolean columnCollapsingAllowed) {
component.setColumnCollapsingAllowed(columnCollapsingAllowed);
}
@Override
public boolean getColumnControlVisible() {
return component.isColumnCollapsingAllowed();
}
@Override
public void sortBy(Object propertyId, boolean ascending) {
if (isSortable()) {
component.setSortAscending(ascending);
component.setSortContainerPropertyId(propertyId);
component.sort();
}
}
@Override
public RowsCount getRowsCount() {
return rowsCount;
}
@Override
public void setRowsCount(RowsCount rowsCount) {
if (this.rowsCount != null && topPanel != null) {
topPanel.removeComponent(WebComponentsHelper.unwrap(this.rowsCount));
}
this.rowsCount = rowsCount;
if (rowsCount != null) {
if (topPanel == null) {
topPanel = new HorizontalLayout();
topPanel.setWidth("100%");
componentComposition.addComponentAsFirst(topPanel);
}
Component rc = WebComponentsHelper.unwrap(rowsCount);
topPanel.addComponent(rc);
topPanel.setExpandRatio(rc, 1);
topPanel.setComponentAlignment(rc, com.vaadin.ui.Alignment.BOTTOM_RIGHT);
}
}
@Override
public boolean isAllowMultiStringCells() {
return component.isAllowMultiStringCells();
}
@Override
public void setAllowMultiStringCells(boolean value) {
component.setAllowMultiStringCells(value);
}
@Override
public boolean isAggregatable() {
return component.isAggregatable();
}
@Override
public void setAggregatable(boolean aggregatable) {
component.setAggregatable(aggregatable);
}
@Override
public void setShowTotalAggregation(boolean showAggregation) {
component.setShowTotalAggregation(showAggregation);
}
@Override
public boolean isShowTotalAggregation() {
return component.isShowTotalAggregation();
}
@Override
public Component getComposition() {
return componentComposition;
}
@Override
public float getHeight() {
return componentComposition.getHeight();
}
@Override
public int getHeightUnits() {
return componentComposition.getHeightUnits();
}
@Override
public void setHeight(String height) {
componentComposition.setHeight(height);
}
@Override
public float getWidth() {
return componentComposition.getWidth();
}
@Override
public void setWidth(String width) {
componentComposition.setWidth(width);
}
@Override
public int getWidthUnits() {
return componentComposition.getWidthUnits();
}
@Override
public void setStyleName(String name) {
this.customStyle = name;
String style = REQUIRED_TABLE_STYLE;
if (StringUtils.isNotEmpty(name))
style += " " + name;
super.setStyleName(style);
}
@Override
public String getStyleName() {
return customStyle;
}
protected void initComponent(T component) {
component.setMultiSelect(false);
component.setNullSelectionAllowed(false);
component.setImmediate(true);
component.setValidationVisible(false);
component.setStoreColWidth(true);
component.setStyleName(REQUIRED_TABLE_STYLE); //It helps us to manage a caption style
component.setPageLength(15);
component.addActionHandler(new ActionsAdapter());
component.addListener(new Property.ValueChangeListener() {
@Override
@SuppressWarnings("unchecked")
public void valueChange(Property.ValueChangeEvent event) {
if (datasource == null) return;
final Set<Entity> selected = getSelected();
// disabled for #PL-2035
// disableItemListener = true;
if (selected.isEmpty()) {
datasource.setItem(null);
} else {
// reset selection and select new item
if (isMultiSelect())
datasource.setItem(null);
datasource.setItem(selected.iterator().next());
}
// disabled for #PL-2035
// disableItemListener = false;
}
});
component.addShortcutListener(new ShortcutListener("tableEnter", com.vaadin.event.ShortcutAction.KeyCode.ENTER, null) {
@Override
public void handleAction(Object sender, Object target) {
if (enterPressAction != null) {
enterPressAction.actionPerform(WebAbstractTable.this);
} else {
handleClickAction();
}
}
});
component.addListener(new ItemClickEvent.ItemClickListener() {
@Override
public void itemClick(ItemClickEvent event) {
if (event.isDoubleClick() && event.getItem() != null) {
handleClickAction();
}
}
});
component.addColumnCollapseListener(new com.haulmont.cuba.web.toolkit.ui.Table.CollapseListener() {
@Override
public void columnCollapsed(Object columnId, boolean collapsed) {
final Column collapsedColumn = getColumn(columnId.toString());
for (ColumnCollapseListener listener : columnCollapseListeners) {
listener.columnCollapsed(collapsedColumn, collapsed);
}
}
});
component.setSelectable(true);
component.setTableFieldFactory(new WebTableFieldFactory());
component.setColumnCollapsingAllowed(true);
component.setColumnReorderingAllowed(true);
setEditable(false);
componentComposition = new CompositionLayout(component);
componentComposition.setSpacing(true);
componentComposition.setMargin(false);
componentComposition.setWidth("-1px");
component.setSizeFull();
componentComposition.setExpandRatio(component, 1);
ClientConfig clientConfig = AppBeans.get(Configuration.class).getConfig(ClientConfig.class);
addShortcutActionBridge(INSERT_SHORTCUT_ID, clientConfig.getTableInsertShortcut(), ListActionType.CREATE);
addShortcutActionBridge(REMOVE_SHORTCUT_ID, clientConfig.getTableRemoveShortcut(), ListActionType.REMOVE);
}
/**
* Connect shortcut action to default list action
* @param shortcutActionId Shortcut action id
* @param keyCombination Keys
* @param defaultAction List action
*/
protected void addShortcutActionBridge(String shortcutActionId, String keyCombination,
final ListActionType defaultAction) {
KeyCombination actionKeyCombination = KeyCombination.create(keyCombination);
component.addShortcutListener(new ShortcutListener(shortcutActionId, actionKeyCombination.getKey().getCode(),
KeyCombination.Modifier.codes(actionKeyCombination.getModifiers())) {
@Override
public void handleAction(Object sender, Object target) {
if (target == component) {
Action listAction = getAction(defaultAction.getId());
if (listAction != null && listAction.isEnabled())
listAction.actionPerform(WebAbstractTable.this);
}
}
});
}
protected void handleClickAction() {
Action action = getItemClickAction();
if (action == null) {
action = getAction("edit");
if (action == null) {
action = getAction("view");
}
}
if (action != null && action.isEnabled()) {
Window window = ComponentsHelper.getWindow(WebAbstractTable.this);
if (window instanceof Window.Wrapper)
window = ((Window.Wrapper) window).getWrappedWindow();
if (!(window instanceof Window.Lookup)) {
action.actionPerform(WebAbstractTable.this);
} else {
Window.Lookup lookup = (Window.Lookup) window;
com.haulmont.cuba.gui.components.Component lookupComponent = lookup.getLookupComponent();
if (lookupComponent != this)
action.actionPerform(WebAbstractTable.this);
else if (action.getId().equals(WindowDelegate.LOOKUP_ITEM_CLICK_ACTION_ID)) {
action.actionPerform(WebAbstractTable.this);
}
}
}
}
protected Collection<MetaPropertyPath> createColumns(com.vaadin.data.Container ds) {
@SuppressWarnings({"unchecked"})
final Collection<MetaPropertyPath> properties = (Collection<MetaPropertyPath>) ds.getContainerPropertyIds();
Window window = ComponentsHelper.getWindow(this);
boolean isLookup = window instanceof Window.Lookup;
for (MetaPropertyPath propertyPath : properties) {
final Table.Column column = columns.get(propertyPath);
if (column != null && !(editable && BooleanUtils.isTrue(column.isEditable()))) {
final String clickAction =
column.getXmlDescriptor() == null ?
null : column.getXmlDescriptor().attributeValue("clickAction");
if (propertyPath.getRange().isClass()) {
if (!isLookup && !StringUtils.isEmpty(clickAction)) {
addGeneratedColumn(propertyPath, new ReadOnlyAssociationGenerator(column));
}
} else if (propertyPath.getRange().isDatatype()) {
if (!isLookup && !StringUtils.isEmpty(clickAction)) {
addGeneratedColumn(propertyPath, new CodePropertyGenerator(column));
} else if (editable && BooleanUtils.isTrue(column.isCalculatable())) {
addGeneratedColumn(propertyPath, new CalculatableColumnGenerator());
} else {
final Datatype datatype = propertyPath.getRange().asDatatype();
if (BooleanDatatype.NAME.equals(datatype.getName()) && column.getFormatter() == null) {
addGeneratedColumn(propertyPath, new ReadOnlyBooleanDatatypeGenerator());
} else if (column.getMaxTextLength() != null) {
addGeneratedColumn(propertyPath, new AbbreviatedColumnGenerator(column));
}
}
} else if (propertyPath.getRange().isEnum()) {
// TODO (abramov)
} else {
throw new UnsupportedOperationException();
}
}
}
return properties;
}
protected void refreshColumns(com.vaadin.data.Container ds) {
@SuppressWarnings({"unchecked"})
final Collection<MetaPropertyPath> propertyIds = (Collection<MetaPropertyPath>) ds.getContainerPropertyIds();
for (final MetaPropertyPath id : propertyIds) {
removeGeneratedColumn(id);
}
if (isEditable()) {
final List<MetaPropertyPath> editableColumns = new ArrayList<>(propertyIds.size());
for (final MetaPropertyPath propertyId : propertyIds) {
final Table.Column column = getColumn(propertyId.toString());
if (BooleanUtils.isTrue(column.isEditable())) {
editableColumns.add(propertyId);
}
}
if (!editableColumns.isEmpty()) {
setEditableColumns(editableColumns);
}
} else {
setEditableColumns(Collections.<MetaPropertyPath>emptyList());
}
createColumns(ds);
}
@Override
public void setDatasource(CollectionDatasource datasource) {
UserSessionSource uss = AppBeans.get(UserSessionSource.NAME);
UserSession userSession = uss.getUserSession();
MetadataTools metadataTools = AppBeans.get(MetadataTools.class);
final Collection<Object> columns;
if (this.columns.isEmpty()) {
Collection<MetaPropertyPath> paths = metadataTools.getViewPropertyPaths(datasource.getView(), datasource.getMetaClass());
for (MetaPropertyPath metaPropertyPath : paths) {
MetaProperty property = metaPropertyPath.getMetaProperty();
if (!property.getRange().getCardinality().isMany() && !metadataTools.isSystem(property)) {
Table.Column column = new Table.Column(metaPropertyPath);
column.setCaption(AppBeans.get(MessageTools.class).getPropertyCaption(property));
column.setType(metaPropertyPath.getRangeJavaClass());
Element element = DocumentHelper.createElement("column");
column.setXmlDescriptor(element);
addColumn(column);
}
}
}
columns = this.columns.keySet();
this.datasource = datasource;
// drop cached datasources for components before update table cells on client
datasource.addListener(new CollectionDsListenerAdapter<Entity>() {
@Override
public void collectionChanged(CollectionDatasource ds, Operation operation, List<Entity> items) {
switch (operation) {
case CLEAR:
case REFRESH:
fieldDatasources.clear();
break;
case UPDATE:
case REMOVE:
for (Entity entity : items) {
fieldDatasources.remove(entity);
}
break;
}
}
});
final CollectionDsWrapper containerDatasource = createContainerDatasource(datasource, getPropertyColumns());
component.setContainerDataSource(containerDatasource);
if (columns == null) {
throw new NullPointerException("Columns cannot be null");
}
List<MetaPropertyPath> editableColumns = null;
if (isEditable()) {
editableColumns = new LinkedList<>();
}
for (final Object columnId : columns) {
final Table.Column column = this.columns.get(columnId);
final String caption;
if (column != null) {
caption = StringUtils.capitalize(column.getCaption() != null ? column.getCaption() : getColumnCaption(columnId));
} else {
caption = StringUtils.capitalize(getColumnCaption(columnId));
}
setColumnHeader(columnId, caption);
if (column != null) {
if (editableColumns != null && column.isEditable() && (columnId instanceof MetaPropertyPath)) {
MetaProperty colMetaProperty = ((MetaPropertyPath) columnId).getMetaProperty();
MetaClass colMetaClass = colMetaProperty.getDomain();
if (userSession.isEntityAttrPermitted(colMetaClass, colMetaProperty.getName(), EntityAttrAccess.MODIFY)) {
editableColumns.add((MetaPropertyPath) column.getId());
}
}
if (column.isCollapsed() && component.isColumnCollapsingAllowed()) {
component.setColumnCollapsed(column.getId(), true);
}
if (column.getAggregation() != null && isAggregatable()) {
component.addContainerPropertyAggregation(column.getId(),
WebComponentsHelper.convertAggregationType(column.getAggregation().getType()));
}
}
}
if (editableColumns != null && !editableColumns.isEmpty()) {
setEditableColumns(editableColumns);
}
createColumns(containerDatasource);
for (Table.Column column : this.columnsOrder) {
if (editable && column.getAggregation() != null
&& (BooleanUtils.isTrue(column.isEditable()) || BooleanUtils.isTrue(column.isCalculatable())))
{
addAggregationCell(column);
}
}
if (aggregationCells != null) {
getDatasource().addListener(createAggregationDatasourceListener());
}
setVisibleColumns(getPropertyColumns());
if (userSession.isSpecificPermitted(ShowInfoAction.ACTION_PERMISSION)) {
ShowInfoAction action = (ShowInfoAction) getAction(ShowInfoAction.ACTION_ID);
if (action == null) {
action = new ShowInfoAction();
addAction(action);
}
action.setDatasource(datasource);
}
if (rowsCount != null) {
rowsCount.setDatasource(datasource);
}
datasource.addListener(new CollectionDsActionsNotifier(this){
@Override
public void collectionChanged(CollectionDatasource ds, Operation operation, List<Entity> items) {
// #PL-2035, reload selection from ds
Set<Object> selectedItemIds = getSelectedItemIds();
if (selectedItemIds == null) {
selectedItemIds = Collections.emptySet();
}
Set<Object> newSelection = new HashSet<>();
for (Object entityId : selectedItemIds) {
if (ds.containsItem(entityId)) {
newSelection.add(entityId);
}
}
if (newSelection.isEmpty()) {
setSelected((Entity) null);
} else {
setSelectedIds(newSelection);
}
}
});
// noinspection unchecked
// disabled for #PL-2035
// datasource.addListener(new CollectionDsActionsNotifier(this) {
// @Override
// public void itemChanged(Datasource ds, Entity prevItem, Entity item) {
// super.itemChanged(ds, prevItem, item);
//
// if (!disableItemListener && !getSelected().contains(item)) {
// setSelected(item);
// }
// }
// });
datasource.addListener(new CollectionDsActionsNotifier(this));
for (Action action : getActions()) {
action.refreshState();
}
}
private String getColumnCaption(Object columnId) {
if (columnId instanceof MetaPropertyPath)
return ((MetaPropertyPath) columnId).getMetaProperty().getName();
else
return columnId.toString();
}
private List<MetaPropertyPath> getPropertyColumns() {
UserSession userSession = UserSessionProvider.getUserSession();
List<MetaPropertyPath> result = new ArrayList<>();
for (Column column : columnsOrder) {
if (column.getId() instanceof MetaPropertyPath) {
MetaProperty colMetaProperty = ((MetaPropertyPath) column.getId()).getMetaProperty();
MetaClass colMetaClass = colMetaProperty.getDomain();
if (userSession.isEntityOpPermitted(colMetaClass, EntityOp.READ)
&& userSession.isEntityAttrPermitted(
colMetaClass, colMetaProperty.getName(), EntityAttrAccess.VIEW)) {
result.add((MetaPropertyPath)column.getId());
}
}
}
return result;
}
protected abstract CollectionDsWrapper createContainerDatasource(CollectionDatasource datasource,
Collection<MetaPropertyPath> columns);
protected void setVisibleColumns(List<?> columnsOrder) {
component.setVisibleColumns(columnsOrder.toArray());
}
protected void setColumnHeader(Object columnId, String caption) {
component.setColumnHeader(columnId, caption);
}
@Override
public void setRowHeaderMode(com.haulmont.cuba.gui.components.Table.RowHeaderMode rowHeaderMode) {
switch (rowHeaderMode) {
case NONE: {
component.setRowHeaderMode(com.vaadin.ui.Table.ROW_HEADER_MODE_HIDDEN);
break;
}
case ICON: {
component.setRowHeaderMode(com.vaadin.ui.Table.ROW_HEADER_MODE_ICON_ONLY);
break;
}
default: {
throw new UnsupportedOperationException();
}
}
}
@Override
public void setRequired(Table.Column column, boolean required, String message) {
if (required)
requiredColumns.put(column, message);
else
requiredColumns.remove(column);
}
@Override
public void addValidator(Table.Column column, final com.haulmont.cuba.gui.components.Field.Validator validator) {
Set<com.haulmont.cuba.gui.components.Field.Validator> validators = validatorsMap.get(column);
if (validators == null) {
validators = new HashSet<>();
validatorsMap.put(column, validators);
}
validators.add(validator);
}
@Override
public void addValidator(final com.haulmont.cuba.gui.components.Field.Validator validator) {
tableValidators.add(validator);
}
public void validate() throws ValidationException {
for (com.haulmont.cuba.gui.components.Field.Validator tableValidator : tableValidators) {
tableValidator.validate(getSelected());
}
}
@Override
public void setStyleProvider(final Table.StyleProvider styleProvider) {
this.styleProvider = styleProvider;
if (styleProvider == null) {
component.setCellStyleGenerator(null);
return;
}
component.setCellStyleGenerator(new com.vaadin.ui.Table.CellStyleGenerator() {
public String getStyle(Object itemId, Object propertyId) {
@SuppressWarnings({"unchecked"})
final Entity item = datasource.getItem(itemId);
return styleProvider.getStyleName(item, propertyId == null ? null : propertyId.toString());
}
});
}
@Override
public void setIconProvider(IconProvider iconProvider) {
LogFactory.getLog(WebAbstractTable.class).warn("Legacy web module does not support icons for tables");
}
// For vaadin component extensions.
protected Resource getItemIcon(Object itemId) {
if (iconProvider == null) {
return null;
}
// noinspection unchecked
Entity item = datasource.getItem(itemId);
if (item == null) {
return null;
}
// noinspection unchecked
String resourceUrl = iconProvider.getItemIcon(item);
if (StringUtils.isBlank(resourceUrl)) {
return null;
}
// noinspection ConstantConditions
if (!resourceUrl.contains(":")) {
resourceUrl = "theme:" + resourceUrl;
}
return WebComponentsHelper.getResource(resourceUrl);
}
@Override
public int getRowHeaderWidth() {
// CAUTION: vaadin considers null as row header property id;
return component.getColumnWidth(null);
}
@Override
public void setRowHeaderWidth(int width) {
// CAUTION: vaadin considers null as row header property id;
component.setColumnWidth(null, width);
}
@Override
public void applySettings(Element element) {
final Element columnsElem = element.element("columns");
if (columnsElem != null) {
Object[] oldColumns = component.getVisibleColumns();
List<Object> newColumns = new ArrayList<>();
// add columns from saved settings
for (Element colElem : Dom4j.elements(columnsElem, "columns")) {
for (Object column : oldColumns) {
if (column.toString().equals(colElem.attributeValue("id"))) {
newColumns.add(column);
String width = colElem.attributeValue("width");
if (width != null)
component.setColumnWidth(column, Integer.valueOf(width));
String visible = colElem.attributeValue("visible");
if (visible != null) {
if (component.isColumnCollapsingAllowed()) { // throws exception if not
component.setColumnCollapsed(column, !Boolean.valueOf(visible));
}
}
break;
}
}
}
// add columns not saved in settings (perhaps new)
for (Object column : oldColumns) {
if (!newColumns.contains(column)) {
newColumns.add(column);
}
}
// if the table contains only one column, always show it
if (newColumns.size() == 1) {
if (component.isColumnCollapsingAllowed()) { // throws exception if not
component.setColumnCollapsed(newColumns.get(0), false);
}
}
component.setVisibleColumns(newColumns.toArray());
if (isSortable()) {
//apply sorting
String sortProp = columnsElem.attributeValue("sortProperty");
if (!StringUtils.isEmpty(sortProp)) {
MetaPropertyPath sortProperty = datasource.getMetaClass().getPropertyPath(sortProp);
if (newColumns.contains(sortProperty)) {
boolean sortAscending = BooleanUtils.toBoolean(columnsElem.attributeValue("sortAscending"));
component.setSortContainerPropertyId(null);
component.setSortAscending(sortAscending);
component.setSortContainerPropertyId(sortProperty);
}
} else {
component.setSortContainerPropertyId(null);
}
}
}
}
@Override
public boolean isAllowPopupMenu() {
// todo not yet implemented
return false;
}
@Override
public void setAllowPopupMenu(boolean value) {
// todo not yet implemented
}
@Override
public boolean saveSettings(Element element) {
Element columnsElem = element.element("columns");
if (columnsElem != null)
element.remove(columnsElem);
columnsElem = element.addElement("columns");
Object[] visibleColumns = component.getVisibleColumns();
for (Object column : visibleColumns) {
Element colElem = columnsElem.addElement("columns");
colElem.addAttribute("id", column.toString());
int width = component.getColumnWidth(column);
if (width > -1)
colElem.addAttribute("width", String.valueOf(width));
Boolean visible = !component.isColumnCollapsed(column);
colElem.addAttribute("visible", visible.toString());
}
MetaPropertyPath sortProperty = (MetaPropertyPath) component.getSortContainerPropertyId();
if (sortProperty != null) {
Boolean sortAscending = component.isSortAscending();
columnsElem.addAttribute("sortProperty", sortProperty.toString());
columnsElem.addAttribute("sortAscending", sortAscending.toString());
}
return true;
}
@Override
public void setEnterPressAction(Action action) {
enterPressAction = action;
}
@Override
public Action getEnterPressAction(){
return enterPressAction;
}
@Override
public void setItemClickAction(Action action) {
if (itemClickAction != null) {
removeAction(itemClickAction);
}
itemClickAction = action;
if (!getActions().contains(action)) {
addAction(action);
}
}
@Override
public Action getItemClickAction() {
return itemClickAction;
}
public String getCaption() {
return component.getCaption();
}
public void setCaption(String caption) {
component.setCaption(caption);
}
@Override
public void setMultiSelect(boolean multiselect) {
component.setNullSelectionAllowed(multiselect);
super.setMultiSelect(multiselect);
}
@Override
public ButtonsPanel getButtonsPanel() {
return buttonsPanel;
}
@Override
public void setButtonsPanel(ButtonsPanel panel) {
if (buttonsPanel != null && topPanel != null) {
topPanel.removeComponent(WebComponentsHelper.unwrap(buttonsPanel));
}
buttonsPanel = panel;
if (panel != null) {
if (topPanel == null) {
topPanel = new HorizontalLayout();
topPanel.setWidth("100%");
componentComposition.addComponentAsFirst(topPanel);
}
topPanel.addComponent(WebComponentsHelper.unwrap(panel));
}
}
@Override
public void addGeneratedColumn(String columnId, ColumnGenerator generator) {
if (columnId == null)
throw new IllegalArgumentException("columnId is null");
if (generator == null)
throw new IllegalArgumentException("generator is null");
MetaPropertyPath targetCol = getDatasource().getMetaClass().getPropertyPath(columnId);
Object generatedColumnId = targetCol != null ? targetCol : columnId;
// replace generator for column if exist
if (component.getColumnGenerator(generatedColumnId) != null)
component.removeGeneratedColumn(generatedColumnId);
component.addGeneratedColumn(
generatedColumnId,
new CustomColumnGenerator(generator) {
@Override
public Component generateCell(com.vaadin.ui.Table source, Object itemId, Object columnId) {
Entity entity = getDatasource().getItem(itemId);
com.haulmont.cuba.gui.components.Component component = getColumnGenerator().generateCell(entity);
if (component == null)
return null;
else {
Component vComponent = WebComponentsHelper.unwrap(component);
// wrap field for show required asterisk
if ((vComponent instanceof com.vaadin.ui.Field)
&& (((com.vaadin.ui.Field) vComponent).isRequired())) {
VerticalLayout layout = new VerticalLayout();
layout.addComponent(vComponent);
vComponent = layout;
}
return vComponent;
}
}
}
);
}
@Override
public void addGeneratedColumn(String columnId, ColumnGenerator generator,
Class<? extends com.haulmont.cuba.gui.components.Component> componentClass) {
// web ui doesn't make any improvements with componentClass known
addGeneratedColumn(columnId, generator);
}
@Override
public void removeGeneratedColumn(String columnId) {
MetaPropertyPath targetCol = getDatasource().getMetaClass().getPropertyPath(columnId);
removeGeneratedColumn(targetCol == null ? columnId : targetCol);
}
/**
* {@inheritDoc}
*/
@Override
public void repaint() {
if (datasource != null) {
com.vaadin.data.Container ds = component.getContainerDataSource();
final Collection<MetaPropertyPath> propertyIds = (Collection<MetaPropertyPath>) ds.getContainerPropertyIds();
// added generated columns
final List<Pair<Object, com.vaadin.ui.Table.ColumnGenerator>> columnGenerators = new LinkedList<>();
for (final MetaPropertyPath id : propertyIds) {
com.vaadin.ui.Table.ColumnGenerator generator = component.getColumnGenerator(id);
if (generator != null && !(generator instanceof WebAbstractTable.SystemTableColumnGenerator)) {
columnGenerators.add(new Pair<Object, com.vaadin.ui.Table.ColumnGenerator>(id, generator));
}
}
refreshColumns(ds);
// restore generated columns
for (Pair<Object, com.vaadin.ui.Table.ColumnGenerator> generatorEntry : columnGenerators) {
component.addGeneratedColumn(generatorEntry.getFirst(), generatorEntry.getSecond());
}
}
component.requestRepaintAll();
}
@Override
public void selectAll() {
if (isMultiSelect()) {
component.setValue(component.getItemIds());
}
}
protected Map<Object, Object> __aggregate(AggregationContainer container, AggregationContainer.Context context) {
final List<AggregationInfo> aggregationInfos = new LinkedList<>();
for (final Object o : container.getAggregationPropertyIds()) {
final MetaPropertyPath propertyId = (MetaPropertyPath) o;
final Table.Column column = columns.get(propertyId);
if (column.getAggregation() != null) {
aggregationInfos.add(column.getAggregation());
}
}
Map<Object, Object> results = ((CollectionDatasource.Aggregatable) datasource).aggregate(
aggregationInfos.toArray(new AggregationInfo[aggregationInfos.size()]),
context.getItemIds()
);
if (aggregationCells != null) {
results = __handleAggregationResults(context, results);
}
return results;
}
protected Map<Object, Object> __handleAggregationResults(AggregationContainer.Context context, Map<Object, Object> results) {
for (final Map.Entry<Object, Object> entry : results.entrySet()) {
final Table.Column column = columns.get(entry.getKey());
com.vaadin.ui.Label cell;
if ((cell = (com.vaadin.ui.Label) aggregationCells.get(column)) != null) {
WebComponentsHelper.setLabelText(cell, entry.getValue(), column.getFormatter());
entry.setValue(cell);
}
}
return results;
}
protected class TablePropertyWrapper extends PropertyWrapper {
private ValueChangeListener calcListener;
private static final long serialVersionUID = -7942046867909695346L;
public TablePropertyWrapper(Object item, MetaPropertyPath propertyPath) {
super(item, propertyPath);
}
@Override
public void addListener(ValueChangeListener listener) {
super.addListener(listener);
//A listener of a calculatable property must be only one
if (listener instanceof CalculatablePropertyValueChangeListener) {
if (this.calcListener != null) {
removeListener(calcListener);
}
calcListener = listener;
}
}
@Override
public void removeListener(ValueChangeListener listener) {
super.removeListener(listener);
if (calcListener == listener) {
calcListener = null;
}
}
@Override
public boolean isReadOnly() {
final Table.Column column = WebAbstractTable.this.columns.get(propertyPath);
if (column != null) {
return !editable || !(BooleanUtils.isTrue(column.isEditable()) || BooleanUtils.isTrue(column.isCalculatable()));
} else {
return super.isReadOnly();
}
}
@Override
public void setReadOnly(boolean newStatus) {
super.setReadOnly(newStatus);
}
@Override
public String toString() {
final Table.Column column = WebAbstractTable.this.columns.get(propertyPath);
if (column != null) {
if (column.getFormatter() != null) {
return column.getFormatter().format(getValue());
} else if (column.getXmlDescriptor() != null) {
String captionProperty = column.getXmlDescriptor().attributeValue("captionProperty");
if (!StringUtils.isEmpty(captionProperty) && propertyPath.getRange().isClass()) {
final Object value = getValue();
return value != null ? String.valueOf(((Instance) value).getValue(captionProperty)) : null;
}
}
}
return super.toString();
}
}
private interface SystemTableColumnGenerator extends com.vaadin.ui.Table.ColumnGenerator {
}
protected static abstract class CustomColumnGenerator implements com.vaadin.ui.Table.ColumnGenerator {
private ColumnGenerator columnGenerator;
protected CustomColumnGenerator(ColumnGenerator columnGenerator) {
this.columnGenerator = columnGenerator;
}
public ColumnGenerator getColumnGenerator() {
return columnGenerator;
}
}
protected abstract class LinkGenerator implements SystemTableColumnGenerator {
protected Table.Column column;
public LinkGenerator(Table.Column column) {
this.column = column;
}
public com.vaadin.ui.Component generateCell(AbstractSelect source, final Object itemId, Object columnId) {
final Item item = source.getItem(itemId);
final Property property = item.getItemProperty(columnId);
final Object value = property.getValue();
final com.vaadin.ui.Button component = new Button();
component.setData(value);
component.setCaption(value == null ? "" : property.toString());
component.setStyleName("link");
component.addListener(new Button.ClickListener() {
@Override
public void buttonClick(Button.ClickEvent event) {
final Element element = column.getXmlDescriptor();
final String clickAction = element.attributeValue("clickAction");
if (!StringUtils.isEmpty(clickAction)) {
if (clickAction.startsWith("open:")) {
final com.haulmont.cuba.gui.components.IFrame frame = WebAbstractTable.this.getFrame();
String screenName = clickAction.substring("open:".length()).trim();
final Window window = frame.openEditor(screenName, getItem(item, property), WindowManager.OpenType.THIS_TAB);
window.addListener(new Window.CloseListener() {
@Override
public void windowClosed(String actionId) {
if (Window.COMMIT_ACTION_ID.equals(actionId) && window instanceof Window.Editor) {
Object item = ((Window.Editor) window).getItem();
if (item instanceof Entity) {
datasource.updateItem((Entity) item);
}
}
}
});
} else if (clickAction.startsWith("invoke:")) {
final com.haulmont.cuba.gui.components.IFrame frame = WebAbstractTable.this.getFrame();
String methodName = clickAction.substring("invoke:".length()).trim();
try {
IFrame controllerFrame = WebComponentsHelper.getControllerFrame(frame);
Method method = controllerFrame.getClass().getMethod(methodName, Object.class);
method.invoke(controllerFrame, getItem(item, property));
} catch (NoSuchMethodException | InvocationTargetException | IllegalAccessException e) {
throw new RuntimeException("Unable to invoke clickAction", e);
}
} else {
throw new UnsupportedOperationException("Unsupported clickAction format: " + clickAction);
}
}
}
});
return component;
}
@Override
public Component generateCell(com.vaadin.ui.Table source, Object itemId, Object columnId) {
return generateCell(((AbstractSelect) source), itemId, columnId);
}
protected abstract Entity getItem(Item item, Property property);
}
protected class ReadOnlyAssociationGenerator extends LinkGenerator {
public ReadOnlyAssociationGenerator(Table.Column column) {
super(column);
}
@Override
protected Entity getItem(Item item, Property property) {
return (Entity) property.getValue();
}
}
protected class CodePropertyGenerator extends LinkGenerator {
public CodePropertyGenerator(Table.Column column) {
super(column);
}
@Override
protected Entity getItem(Item item, Property property) {
return ((ItemWrapper) item).getItem();
}
}
protected class ReadOnlyBooleanDatatypeGenerator implements SystemTableColumnGenerator {
@Override
public Component generateCell(com.vaadin.ui.Table source, Object itemId, Object columnId) {
return generateCell((AbstractSelect) source, itemId, columnId);
}
protected Component generateCell(AbstractSelect source, Object itemId, Object columnId) {
final Property property = source.getItem(itemId).getItemProperty(columnId);
final Object value = property.getValue();
com.vaadin.ui.Embedded checkBoxImage;
if (BooleanUtils.isTrue((Boolean) value)){
checkBoxImage = new com.vaadin.ui.Embedded("", new VersionedThemeResource("components/table/images/checkbox-checked.png"));
} else {
checkBoxImage = new com.vaadin.ui.Embedded("", new VersionedThemeResource("components/table/images/checkbox-unchecked.png"));
}
return checkBoxImage;
}
}
protected class AbbreviatedColumnGenerator implements SystemTableColumnGenerator {
protected Table.Column column;
public AbbreviatedColumnGenerator(Table.Column column) {
this.column = column;
}
@Override
public com.vaadin.ui.Component generateCell(com.vaadin.ui.Table source, Object itemId, Object columnId) {
return generateCell((AbstractSelect) source, itemId, columnId);
}
protected com.vaadin.ui.Component generateCell(AbstractSelect source, Object itemId, Object columnId) {
final Property property = source.getItem(itemId).getItemProperty(columnId);
final Object value = property.getValue();
if (value == null) {
return null;
}
com.vaadin.ui.Component cell;
String stringValue = value.toString();
int maxTextLength = column.getMaxTextLength();
if (stringValue.length() > maxTextLength + MAX_TEXT_LENGTH_GAP) {
TextArea content = new TextArea(null, stringValue);
content.setWidth("100%");
content.setHeight("100%");
content.setReadOnly(true);
CssLayout cssLayout = new CssLayout();
cssLayout.setHeight("300px");
cssLayout.setWidth("400px");
cell = new PopupView(StringEscapeUtils.escapeHtml(StringUtils.abbreviate(stringValue, maxTextLength)),
cssLayout);
cell.addStyleName("abbreviated");
cssLayout.addComponent(content);
} else {
cell = new Label(stringValue);
}
return cell;
}
}
protected class CalculatableColumnGenerator implements SystemTableColumnGenerator {
@Override
public Component generateCell(com.vaadin.ui.Table source, Object itemId, Object columnId) {
return generateCell((AbstractSelect) source, itemId, columnId);
}
protected Component generateCell(AbstractSelect source, Object itemId, Object columnId) {
CollectionDatasource ds = WebAbstractTable.this.getDatasource();
MetaPropertyPath propertyPath = ds.getMetaClass().getPropertyPath(columnId.toString());
PropertyWrapper propertyWrapper = (PropertyWrapper) source.getContainerProperty(itemId, propertyPath);
Formatter formatter = null;
Table.Column column = WebAbstractTable.this.getColumn(columnId.toString());
if (column != null) {
formatter = column.getFormatter();
}
final Label label = new Label();
WebComponentsHelper.setLabelText(label, propertyWrapper.getValue(), formatter);
label.setWidth("-1px");
//add property change listener that will update a label value
propertyWrapper.addListener(new CalculatablePropertyValueChangeListener(label, formatter));
return label;
}
}
protected static class CalculatablePropertyValueChangeListener implements Property.ValueChangeListener {
private Label component;
private Formatter formatter;
private static final long serialVersionUID = 8041384664735759397L;
private CalculatablePropertyValueChangeListener(Label component, Formatter formatter) {
this.component = component;
this.formatter = formatter;
}
@Override
public void valueChange(Property.ValueChangeEvent event) {
WebComponentsHelper.setLabelText(component, event.getProperty().getValue(), formatter);
}
}
protected void addAggregationCell(Table.Column column) {
if (aggregationCells == null) {
aggregationCells = new HashMap<>();
}
aggregationCells.put(column, createAggregationCell());
}
protected com.vaadin.ui.Label createAggregationCell() {
com.vaadin.ui.Label label = new Label();
label.setWidth("-1px");
label.setParent(component);
return label;
}
protected CollectionDatasourceListener createAggregationDatasourceListener() {
return new AggregationDatasourceListener();
}
protected class AggregationDatasourceListener extends CollectionDsListenerAdapter<Entity> {
@Override
public void valueChanged(Entity source, String property, Object prevValue, Object value) {
final CollectionDatasource ds = WebAbstractTable.this.getDatasource();
component.aggregate(new AggregationContainer.Context(ds.getItemIds()));
}
}
protected class WebTableFieldFactory extends com.haulmont.cuba.web.gui.components.AbstractFieldFactory
implements TableFieldFactory {
protected Map<MetaClass, CollectionDatasource> optionsDatasources = new HashMap<>();
@Override
public com.vaadin.ui.Field createField(com.vaadin.data.Container container,
Object itemId, Object propertyId, Component uiContext) {
String fieldPropertyId = String.valueOf(propertyId);
Column columnConf = columns.get(propertyId);
Item item = container.getItem(itemId);
Entity entity = ((ItemWrapper)item).getItem();
Datasource fieldDatasource = getItemDatasource(entity);
com.haulmont.cuba.gui.components.Component columnComponent =
createField(fieldDatasource, fieldPropertyId, columnConf.getXmlDescriptor());
com.vaadin.ui.Field fieldImpl = getFieldImplementation(columnComponent);
if (columnComponent instanceof Field) {
Field cubaField = (Field) columnComponent;
if (columnConf.getDescription() != null) {
cubaField.setDescription(columnConf.getDescription());
}
if (requiredColumns.containsKey(columnConf)) {
cubaField.setRequired(true);
cubaField.setRequiredMessage(requiredColumns.get(columnConf));
}
}
if (columnConf.getWidth() != null) {
columnComponent.setWidth(columnConf.getWidth() + "px");
} else {
columnComponent.setWidth("100%");
}
if (columnComponent instanceof BelongToFrame) {
BelongToFrame belongToFrame = (BelongToFrame) columnComponent;
if (belongToFrame.getFrame() == null) {
belongToFrame.setFrame(getFrame());
}
}
applyPermissions(columnComponent);
return fieldImpl;
}
protected com.vaadin.ui.Field getFieldImplementation(com.haulmont.cuba.gui.components.Component columnComponent) {
com.vaadin.ui.Component composition = WebComponentsHelper.getComposition(columnComponent);
com.vaadin.ui.Field fieldImpl;
if (composition instanceof com.vaadin.ui.Field) {
fieldImpl = (com.vaadin.ui.Field) composition;
} else {
fieldImpl = new FieldWrapper(columnComponent);
}
return fieldImpl;
}
protected void applyPermissions(com.haulmont.cuba.gui.components.Component columnComponent) {
if (columnComponent instanceof DatasourceComponent) {
DatasourceComponent dsComponent = (DatasourceComponent) columnComponent;
MetaProperty metaProperty = dsComponent.getMetaProperty();
if (metaProperty != null) {
MetaClass metaClass = dsComponent.getDatasource().getMetaClass();
dsComponent.setEditable(security.isEntityAttrModificationPermitted(metaClass, metaProperty.getName())
&& dsComponent.isEditable());
}
}
}
@Override
protected CollectionDatasource getOptionsDatasource(Datasource fieldDatasource, String propertyId) {
if (datasource == null)
throw new IllegalStateException("Table datasource is null");
Column columnConf = columns.get(datasource.getMetaClass().getPropertyPath(propertyId));
final DsContext dsContext = datasource.getDsContext();
String optDsName = columnConf.getXmlDescriptor().attributeValue("optionsDatasource");
if (StringUtils.isBlank(optDsName)) {
MetaPropertyPath propertyPath = fieldDatasource.getMetaClass().getPropertyPath(propertyId);
MetaClass metaClass = propertyPath.getRange().asClass();
CollectionDatasource ds = optionsDatasources.get(metaClass);
if (ds != null)
return ds;
final DataSupplier dataSupplier = fieldDatasource.getDataSupplier();
final String id = metaClass.getName();
final String viewName = null; //metaClass.getName() + ".lookup";
ds = new DsBuilder(dsContext)
.setDataSupplier(dataSupplier)
.setId(id)
.setMetaClass(metaClass)
.setViewName(viewName)
.buildCollectionDatasource();
ds.refresh();
optionsDatasources.put(metaClass, ds);
return ds;
} else {
CollectionDatasource ds = dsContext.get(optDsName);
if (ds == null)
throw new IllegalStateException("Options datasource not found: " + optDsName);
return ds;
}
}
}
protected boolean handleSpecificVariables(Map<String, Object> variables) {
boolean needReload = false;
if (isUsePresentations()) {
final Presentations p = getPresentations();
if (p.getCurrent() != null && p.isAutoSave(p.getCurrent()) && needUpdatePresentation(variables)) {
Element e = p.getSettings(p.getCurrent());
saveSettings(e);
p.setSettings(p.getCurrent(), e);
}
}
return needReload;
}
private boolean needUpdatePresentation(Map<String, Object> variables) {
return variables.containsKey("colwidth") || variables.containsKey("sortcolumn")
|| variables.containsKey("sortascending") || variables.containsKey("columnorder")
|| variables.containsKey("collapsedcolumns") || variables.containsKey("groupedcolumns");
}
protected void paintSpecificContent(PaintTarget target) throws PaintException {
target.addVariable(component, "presentations", isUsePresentations());
if (isUsePresentations()) {
target.startTag("presentations");
tablePresentations.paint(target);
target.endTag("presentations");
}
}
@Override
public List<Table.Column> getNotCollapsedColumns() {
if (component.getVisibleColumns() == null)
return Collections.emptyList();
final List<Table.Column> visibleColumns = new ArrayList<>(component.getVisibleColumns().length);
Object[] keys = component.getVisibleColumns();
for (final Object key : keys) {
if (!component.isColumnCollapsed(key)) {
Column column = columns.get(key);
if (column != null)
visibleColumns.add(column);
}
}
return visibleColumns;
}
@Override
public void usePresentations(boolean use) {
usePresentations = use;
}
@Override
public boolean isUsePresentations() {
return usePresentations;
}
@Override
public void loadPresentations() {
if (isUsePresentations()) {
presentations = new PresentationsImpl(this);
tablePresentations = new TablePresentations(this);
} else {
throw new UnsupportedOperationException("Component doesn't use presentations");
}
}
@Override
public Presentations getPresentations() {
if (isUsePresentations()) {
return presentations;
} else {
throw new UnsupportedOperationException("Component doesn't use presentations");
}
}
@Override
public void applyPresentation(Object id) {
if (isUsePresentations()) {
Presentation p = presentations.getPresentation(id);
applyPresentation(p);
} else {
throw new UnsupportedOperationException("Component doesn't use presentations");
}
}
@Override
public void applyPresentationAsDefault(Object id) {
if (isUsePresentations()) {
Presentation p = presentations.getPresentation(id);
if (p != null) {
presentations.setDefault(p);
applyPresentation(p);
}
} else {
throw new UnsupportedOperationException("Component doesn't use presentations");
}
}
protected void applyPresentation(Presentation p) {
presentations.setCurrent(p);
Element settingsElement = presentations.getSettings(p);
applySettings(settingsElement);
component.requestRepaint();
}
@Override
public Object getDefaultPresentationId() {
Presentation def = presentations.getDefault();
return def == null ? null : def.getId();
}
@Override
public void addColumnCollapsedListener(ColumnCollapseListener columnCollapsedListener) {
columnCollapseListeners.add(columnCollapsedListener);
}
@Override
public void removeColumnCollapseListener(ColumnCollapseListener columnCollapseListener) {
columnCollapseListeners.remove(columnCollapseListener);
}
} | modules/web6/src/com/haulmont/cuba/web/gui/components/WebAbstractTable.java | /*
* Copyright (c) 2008-2013 Haulmont. All rights reserved.
* Use is subject to license terms, see http://www.cuba-platform.com/license for details.
*/
package com.haulmont.cuba.web.gui.components;
import com.haulmont.bali.datastruct.Pair;
import com.haulmont.bali.util.Dom4j;
import com.haulmont.chile.core.datatypes.Datatype;
import com.haulmont.chile.core.datatypes.impl.BooleanDatatype;
import com.haulmont.chile.core.model.Instance;
import com.haulmont.chile.core.model.MetaClass;
import com.haulmont.chile.core.model.MetaProperty;
import com.haulmont.chile.core.model.MetaPropertyPath;
import com.haulmont.cuba.client.ClientConfig;
import com.haulmont.cuba.core.entity.Entity;
import com.haulmont.cuba.core.global.*;
import com.haulmont.cuba.gui.ComponentsHelper;
import com.haulmont.cuba.gui.WindowManager;
import com.haulmont.cuba.gui.components.*;
import com.haulmont.cuba.gui.components.Field;
import com.haulmont.cuba.gui.components.Formatter;
import com.haulmont.cuba.gui.components.Table;
import com.haulmont.cuba.gui.components.Window;
import com.haulmont.cuba.gui.components.actions.ListActionType;
import com.haulmont.cuba.gui.data.*;
import com.haulmont.cuba.gui.data.impl.CollectionDsActionsNotifier;
import com.haulmont.cuba.gui.data.impl.CollectionDsListenerAdapter;
import com.haulmont.cuba.gui.data.impl.DatasourceImplementation;
import com.haulmont.cuba.gui.presentations.Presentations;
import com.haulmont.cuba.gui.presentations.PresentationsImpl;
import com.haulmont.cuba.security.entity.EntityAttrAccess;
import com.haulmont.cuba.security.entity.EntityOp;
import com.haulmont.cuba.security.entity.Presentation;
import com.haulmont.cuba.security.global.UserSession;
import com.haulmont.cuba.web.gui.CompositionLayout;
import com.haulmont.cuba.web.gui.components.presentations.TablePresentations;
import com.haulmont.cuba.web.gui.data.CollectionDsWrapper;
import com.haulmont.cuba.web.gui.data.ItemWrapper;
import com.haulmont.cuba.web.gui.data.PropertyWrapper;
import com.haulmont.cuba.web.toolkit.VersionedThemeResource;
import com.haulmont.cuba.web.toolkit.data.AggregationContainer;
import com.haulmont.cuba.web.toolkit.ui.FieldWrapper;
import com.vaadin.data.Item;
import com.vaadin.data.Property;
import com.vaadin.event.ItemClickEvent;
import com.vaadin.event.ShortcutListener;
import com.vaadin.terminal.PaintException;
import com.vaadin.terminal.PaintTarget;
import com.vaadin.terminal.Resource;
import com.vaadin.ui.*;
import com.vaadin.ui.Button;
import com.vaadin.ui.Component;
import com.vaadin.ui.Label;
import com.vaadin.ui.TextArea;
import org.apache.commons.lang.BooleanUtils;
import org.apache.commons.lang.StringEscapeUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.logging.LogFactory;
import org.dom4j.DocumentHelper;
import org.dom4j.Element;
import javax.annotation.Nullable;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.*;
/**
* @param <T>
* @author abramov
* @version $Id$
*/
public abstract class WebAbstractTable<T extends com.haulmont.cuba.web.toolkit.ui.Table>
extends WebAbstractList<T> implements Table {
protected static final String REQUIRED_TABLE_STYLE = "table";
protected Map<Object, Table.Column> columns = new HashMap<>();
protected List<Table.Column> columnsOrder = new ArrayList<>();
protected boolean editable;
protected Action itemClickAction;
protected Action enterPressAction;
protected Table.StyleProvider styleProvider;
protected Table.IconProvider iconProvider;
protected Map<Entity, Datasource> fieldDatasources = new WeakHashMap<>();
protected Map<Table.Column, String> requiredColumns = new HashMap<>();
protected Map<Table.Column, Set<com.haulmont.cuba.gui.components.Field.Validator>> validatorsMap = new HashMap<>();
protected Set<com.haulmont.cuba.gui.components.Field.Validator> tableValidators = new LinkedHashSet<>();
protected CompositionLayout componentComposition;
protected HorizontalLayout topPanel;
protected ButtonsPanel buttonsPanel;
protected RowsCount rowsCount;
protected Map<Table.Column, Object> aggregationCells = null;
protected boolean usePresentations;
protected Presentations presentations;
protected TablePresentations tablePresentations;
protected List<ColumnCollapseListener> columnCollapseListeners = new ArrayList<>();
// Map column id to Printable representation
protected Map<String, Printable> printables = new HashMap<>();
// disabled for #PL-2035
// Disable listener that points component value to follow the ds item.
// protected boolean disableItemListener = false;
protected String customStyle;
protected Security security = AppBeans.get(Security.class);
protected static final int MAX_TEXT_LENGTH_GAP = 10;
@Override
public java.util.List<Table.Column> getColumns() {
return columnsOrder;
}
@Override
public Table.Column getColumn(String id) {
for (Table.Column column : columnsOrder) {
if (column.getId().toString().equals(id))
return column;
}
return null;
}
@Override
public void addColumn(Table.Column column) {
component.addContainerProperty(column.getId(), column.getType(), null);
columns.put(column.getId(), column);
columnsOrder.add(column);
if (column.getWidth() != null) {
component.setColumnWidth(column.getId(), column.getWidth());
}
}
@Override
public void removeColumn(Table.Column column) {
component.removeContainerProperty(column.getId());
//noinspection RedundantCast
columns.remove((MetaPropertyPath) column.getId());
columnsOrder.remove(column);
}
@Override
public Datasource getItemDatasource(Entity item) {
Datasource fieldDatasource = fieldDatasources.get(item);
if (fieldDatasource == null) {
fieldDatasource = new DsBuilder()
.setAllowCommit(false)
.setMetaClass(datasource.getMetaClass())
.setRefreshMode(CollectionDatasource.RefreshMode.NEVER)
.setViewName("_local")
.buildDatasource();
((DatasourceImplementation)fieldDatasource).valid();
fieldDatasource.setItem(item);
fieldDatasources.put(item, fieldDatasource);
}
return fieldDatasource;
}
protected void addGeneratedColumn(Object id, Object generator) {
component.addGeneratedColumn(id, (com.vaadin.ui.Table.ColumnGenerator) generator);
}
protected void removeGeneratedColumn(Object id) {
component.removeGeneratedColumn(id);
}
@Override
public void addPrintable(String columnId, Printable printable) {
printables.put(columnId, printable);
}
@Override
public void removePrintable(String columnId) {
printables.remove(columnId);
}
@Override
@Nullable
public Printable getPrintable(Table.Column column) {
return getPrintable(String.valueOf(column.getId()));
}
@Nullable
@Override
public Printable getPrintable(String columnId) {
Printable printable = printables.get(columnId);
if (printable != null) {
return printable;
} else {
com.vaadin.ui.Table.ColumnGenerator vColumnGenerator = component.getColumnGenerator(columnId);
if (vColumnGenerator instanceof CustomColumnGenerator) {
ColumnGenerator columnGenerator = ((CustomColumnGenerator) vColumnGenerator).getColumnGenerator();
if (columnGenerator instanceof Printable)
return (Printable) columnGenerator;
}
return null;
}
}
@Override
public boolean isEditable() {
return editable;
}
@Override
public void setEditable(boolean editable) {
if (this.editable != editable) {
this.editable = editable;
component.disableContentRefreshing();
if (datasource != null) {
com.vaadin.data.Container ds = component.getContainerDataSource();
@SuppressWarnings("unchecked")
final Collection<MetaPropertyPath> propertyIds = (Collection<MetaPropertyPath>) ds.getContainerPropertyIds();
// added generated columns
final List<Pair<Object, com.vaadin.ui.Table.ColumnGenerator>> columnGenerators = new LinkedList<>();
for (final MetaPropertyPath id : propertyIds) {
final Table.Column column = getColumn(id.toString());
// save generators only for non editable columns
if (!column.isEditable()) {
com.vaadin.ui.Table.ColumnGenerator generator = component.getColumnGenerator(id);
if (generator != null && !(generator instanceof WebAbstractTable.SystemTableColumnGenerator)) {
columnGenerators.add(new Pair<Object, com.vaadin.ui.Table.ColumnGenerator>(id, generator));
}
}
}
refreshColumns(ds);
// restore generated columns
for (Pair<Object, com.vaadin.ui.Table.ColumnGenerator> generatorEntry : columnGenerators) {
component.addGeneratedColumn(generatorEntry.getFirst(), generatorEntry.getSecond());
}
}
component.setEditable(editable);
component.enableContentRefreshing(true);
}
}
protected void setEditableColumns(List<MetaPropertyPath> editableColumns) {
component.setEditableColumns(editableColumns.toArray());
}
@Override
public boolean isSortable() {
return !component.isSortDisabled();
}
@Override
public void setSortable(boolean sortable) {
component.setSortDisabled(!sortable);
}
@Override
public void setColumnReorderingAllowed(boolean columnReorderingAllowed) {
component.setColumnReorderingAllowed(columnReorderingAllowed);
}
@Override
public boolean getColumnReorderingAllowed() {
return component.isColumnReorderingAllowed();
}
@Override
public void setColumnControlVisible(boolean columnCollapsingAllowed) {
component.setColumnCollapsingAllowed(columnCollapsingAllowed);
}
@Override
public boolean getColumnControlVisible() {
return component.isColumnCollapsingAllowed();
}
@Override
public void sortBy(Object propertyId, boolean ascending) {
if (isSortable()) {
component.setSortAscending(ascending);
component.setSortContainerPropertyId(propertyId);
component.sort();
}
}
@Override
public RowsCount getRowsCount() {
return rowsCount;
}
@Override
public void setRowsCount(RowsCount rowsCount) {
if (this.rowsCount != null && topPanel != null) {
topPanel.removeComponent(WebComponentsHelper.unwrap(this.rowsCount));
}
this.rowsCount = rowsCount;
if (rowsCount != null) {
if (topPanel == null) {
topPanel = new HorizontalLayout();
topPanel.setWidth("100%");
componentComposition.addComponentAsFirst(topPanel);
}
Component rc = WebComponentsHelper.unwrap(rowsCount);
topPanel.addComponent(rc);
topPanel.setExpandRatio(rc, 1);
topPanel.setComponentAlignment(rc, com.vaadin.ui.Alignment.BOTTOM_RIGHT);
}
}
@Override
public boolean isAllowMultiStringCells() {
return component.isAllowMultiStringCells();
}
@Override
public void setAllowMultiStringCells(boolean value) {
component.setAllowMultiStringCells(value);
}
@Override
public boolean isAggregatable() {
return component.isAggregatable();
}
@Override
public void setAggregatable(boolean aggregatable) {
component.setAggregatable(aggregatable);
}
@Override
public void setShowTotalAggregation(boolean showAggregation) {
component.setShowTotalAggregation(showAggregation);
}
@Override
public boolean isShowTotalAggregation() {
return component.isShowTotalAggregation();
}
@Override
public Component getComposition() {
return componentComposition;
}
@Override
public float getHeight() {
return componentComposition.getHeight();
}
@Override
public int getHeightUnits() {
return componentComposition.getHeightUnits();
}
@Override
public void setHeight(String height) {
componentComposition.setHeight(height);
}
@Override
public float getWidth() {
return componentComposition.getWidth();
}
@Override
public void setWidth(String width) {
componentComposition.setWidth(width);
}
@Override
public int getWidthUnits() {
return componentComposition.getWidthUnits();
}
@Override
public void setStyleName(String name) {
this.customStyle = name;
String style = REQUIRED_TABLE_STYLE;
if (StringUtils.isNotEmpty(name))
style += " " + name;
super.setStyleName(style);
}
@Override
public String getStyleName() {
return customStyle;
}
protected void initComponent(T component) {
component.setMultiSelect(false);
component.setNullSelectionAllowed(false);
component.setImmediate(true);
component.setValidationVisible(false);
component.setStoreColWidth(true);
component.setStyleName(REQUIRED_TABLE_STYLE); //It helps us to manage a caption style
component.setPageLength(15);
component.addActionHandler(new ActionsAdapter());
component.addListener(new Property.ValueChangeListener() {
@Override
@SuppressWarnings("unchecked")
public void valueChange(Property.ValueChangeEvent event) {
if (datasource == null) return;
final Set<Entity> selected = getSelected();
// disabled for #PL-2035
// disableItemListener = true;
if (selected.isEmpty()) {
datasource.setItem(null);
} else {
// reset selection and select new item
if (isMultiSelect())
datasource.setItem(null);
datasource.setItem(selected.iterator().next());
}
// disabled for #PL-2035
// disableItemListener = false;
}
});
component.addShortcutListener(new ShortcutListener("tableEnter", com.vaadin.event.ShortcutAction.KeyCode.ENTER, null) {
@Override
public void handleAction(Object sender, Object target) {
if (enterPressAction != null) {
enterPressAction.actionPerform(WebAbstractTable.this);
} else {
handleClickAction();
}
}
});
component.addListener(new ItemClickEvent.ItemClickListener() {
@Override
public void itemClick(ItemClickEvent event) {
if (event.isDoubleClick() && event.getItem() != null) {
handleClickAction();
}
}
});
component.addColumnCollapseListener(new com.haulmont.cuba.web.toolkit.ui.Table.CollapseListener() {
@Override
public void columnCollapsed(Object columnId, boolean collapsed) {
final Column collapsedColumn = getColumn(columnId.toString());
for (ColumnCollapseListener listener : columnCollapseListeners) {
listener.columnCollapsed(collapsedColumn, collapsed);
}
}
});
component.setSelectable(true);
component.setTableFieldFactory(new WebTableFieldFactory());
component.setColumnCollapsingAllowed(true);
component.setColumnReorderingAllowed(true);
setEditable(false);
componentComposition = new CompositionLayout(component);
componentComposition.setSpacing(true);
componentComposition.setMargin(false);
componentComposition.setWidth("-1px");
component.setSizeFull();
componentComposition.setExpandRatio(component, 1);
ClientConfig clientConfig = AppBeans.get(Configuration.class).getConfig(ClientConfig.class);
addShortcutActionBridge(INSERT_SHORTCUT_ID, clientConfig.getTableInsertShortcut(), ListActionType.CREATE);
addShortcutActionBridge(REMOVE_SHORTCUT_ID, clientConfig.getTableRemoveShortcut(), ListActionType.REMOVE);
}
/**
* Connect shortcut action to default list action
* @param shortcutActionId Shortcut action id
* @param keyCombination Keys
* @param defaultAction List action
*/
protected void addShortcutActionBridge(String shortcutActionId, String keyCombination,
final ListActionType defaultAction) {
KeyCombination actionKeyCombination = KeyCombination.create(keyCombination);
component.addShortcutListener(new ShortcutListener(shortcutActionId, actionKeyCombination.getKey().getCode(),
KeyCombination.Modifier.codes(actionKeyCombination.getModifiers())) {
@Override
public void handleAction(Object sender, Object target) {
if (target == component) {
Action listAction = getAction(defaultAction.getId());
if (listAction != null && listAction.isEnabled())
listAction.actionPerform(WebAbstractTable.this);
}
}
});
}
protected void handleClickAction() {
Action action = getItemClickAction();
if (action == null) {
action = getAction("edit");
if (action == null) {
action = getAction("view");
}
}
if (action != null && action.isEnabled()) {
Window window = ComponentsHelper.getWindow(WebAbstractTable.this);
if (window instanceof Window.Wrapper)
window = ((Window.Wrapper) window).getWrappedWindow();
if (!(window instanceof Window.Lookup)) {
action.actionPerform(WebAbstractTable.this);
} else {
Window.Lookup lookup = (Window.Lookup) window;
com.haulmont.cuba.gui.components.Component lookupComponent = lookup.getLookupComponent();
if (lookupComponent != this)
action.actionPerform(WebAbstractTable.this);
else if (action.getId().equals(WindowDelegate.LOOKUP_ITEM_CLICK_ACTION_ID)) {
action.actionPerform(WebAbstractTable.this);
}
}
}
}
protected Collection<MetaPropertyPath> createColumns(com.vaadin.data.Container ds) {
@SuppressWarnings({"unchecked"})
final Collection<MetaPropertyPath> properties = (Collection<MetaPropertyPath>) ds.getContainerPropertyIds();
Window window = ComponentsHelper.getWindow(this);
boolean isLookup = window instanceof Window.Lookup;
for (MetaPropertyPath propertyPath : properties) {
final Table.Column column = columns.get(propertyPath);
if (column != null && !(editable && BooleanUtils.isTrue(column.isEditable()))) {
final String clickAction =
column.getXmlDescriptor() == null ?
null : column.getXmlDescriptor().attributeValue("clickAction");
if (propertyPath.getRange().isClass()) {
if (!isLookup && !StringUtils.isEmpty(clickAction)) {
addGeneratedColumn(propertyPath, new ReadOnlyAssociationGenerator(column));
}
} else if (propertyPath.getRange().isDatatype()) {
if (!isLookup && !StringUtils.isEmpty(clickAction)) {
addGeneratedColumn(propertyPath, new CodePropertyGenerator(column));
} else if (editable && BooleanUtils.isTrue(column.isCalculatable())) {
addGeneratedColumn(propertyPath, new CalculatableColumnGenerator());
} else {
final Datatype datatype = propertyPath.getRange().asDatatype();
if (BooleanDatatype.NAME.equals(datatype.getName()) && column.getFormatter() == null) {
addGeneratedColumn(propertyPath, new ReadOnlyBooleanDatatypeGenerator());
} else if (column.getMaxTextLength() != null) {
addGeneratedColumn(propertyPath, new AbbreviatedColumnGenerator(column));
}
}
} else if (propertyPath.getRange().isEnum()) {
// TODO (abramov)
} else {
throw new UnsupportedOperationException();
}
}
}
return properties;
}
protected void refreshColumns(com.vaadin.data.Container ds) {
@SuppressWarnings({"unchecked"})
final Collection<MetaPropertyPath> propertyIds = (Collection<MetaPropertyPath>) ds.getContainerPropertyIds();
for (final MetaPropertyPath id : propertyIds) {
removeGeneratedColumn(id);
}
if (isEditable()) {
final List<MetaPropertyPath> editableColumns = new ArrayList<>(propertyIds.size());
for (final MetaPropertyPath propertyId : propertyIds) {
final Table.Column column = getColumn(propertyId.toString());
if (BooleanUtils.isTrue(column.isEditable())) {
editableColumns.add(propertyId);
}
}
if (!editableColumns.isEmpty()) {
setEditableColumns(editableColumns);
}
} else {
setEditableColumns(Collections.<MetaPropertyPath>emptyList());
}
createColumns(ds);
}
@Override
public void setDatasource(CollectionDatasource datasource) {
UserSessionSource uss = AppBeans.get(UserSessionSource.NAME);
UserSession userSession = uss.getUserSession();
MetadataTools metadataTools = AppBeans.get(MetadataTools.class);
final Collection<Object> columns;
if (this.columns.isEmpty()) {
Collection<MetaPropertyPath> paths = metadataTools.getViewPropertyPaths(datasource.getView(), datasource.getMetaClass());
for (MetaPropertyPath metaPropertyPath : paths) {
MetaProperty property = metaPropertyPath.getMetaProperty();
if (!property.getRange().getCardinality().isMany() && !metadataTools.isSystem(property)) {
Table.Column column = new Table.Column(metaPropertyPath);
column.setCaption(AppBeans.get(MessageTools.class).getPropertyCaption(property));
column.setType(metaPropertyPath.getRangeJavaClass());
Element element = DocumentHelper.createElement("column");
column.setXmlDescriptor(element);
addColumn(column);
}
}
}
columns = this.columns.keySet();
this.datasource = datasource;
// drop cached datasources for components before update table cells on client
datasource.addListener(new CollectionDsListenerAdapter<Entity>() {
@Override
public void collectionChanged(CollectionDatasource ds, Operation operation, List<Entity> items) {
switch (operation) {
case CLEAR:
case REFRESH:
fieldDatasources.clear();
break;
case UPDATE:
case REMOVE:
for (Entity entity : items) {
fieldDatasources.remove(entity);
}
break;
}
}
});
final CollectionDsWrapper containerDatasource = createContainerDatasource(datasource, getPropertyColumns());
component.setContainerDataSource(containerDatasource);
if (columns == null) {
throw new NullPointerException("Columns cannot be null");
}
List<MetaPropertyPath> editableColumns = null;
if (isEditable()) {
editableColumns = new LinkedList<>();
}
for (final Object columnId : columns) {
final Table.Column column = this.columns.get(columnId);
final String caption;
if (column != null) {
caption = StringUtils.capitalize(column.getCaption() != null ? column.getCaption() : getColumnCaption(columnId));
} else {
caption = StringUtils.capitalize(getColumnCaption(columnId));
}
setColumnHeader(columnId, caption);
if (column != null) {
if (editableColumns != null && column.isEditable() && (columnId instanceof MetaPropertyPath)) {
MetaProperty colMetaProperty = ((MetaPropertyPath) columnId).getMetaProperty();
MetaClass colMetaClass = colMetaProperty.getDomain();
if (userSession.isEntityAttrPermitted(colMetaClass, colMetaProperty.getName(), EntityAttrAccess.MODIFY)) {
editableColumns.add((MetaPropertyPath) column.getId());
}
}
if (column.isCollapsed() && component.isColumnCollapsingAllowed()) {
component.setColumnCollapsed(column.getId(), true);
}
if (column.getAggregation() != null && isAggregatable()) {
component.addContainerPropertyAggregation(column.getId(),
WebComponentsHelper.convertAggregationType(column.getAggregation().getType()));
}
}
}
if (editableColumns != null && !editableColumns.isEmpty()) {
setEditableColumns(editableColumns);
}
createColumns(containerDatasource);
for (Table.Column column : this.columnsOrder) {
if (editable && column.getAggregation() != null
&& (BooleanUtils.isTrue(column.isEditable()) || BooleanUtils.isTrue(column.isCalculatable())))
{
addAggregationCell(column);
}
}
if (aggregationCells != null) {
getDatasource().addListener(createAggregationDatasourceListener());
}
setVisibleColumns(getPropertyColumns());
if (userSession.isSpecificPermitted(ShowInfoAction.ACTION_PERMISSION)) {
ShowInfoAction action = (ShowInfoAction) getAction(ShowInfoAction.ACTION_ID);
if (action == null) {
action = new ShowInfoAction();
addAction(action);
}
action.setDatasource(datasource);
}
if (rowsCount != null) {
rowsCount.setDatasource(datasource);
}
datasource.addListener(new CollectionDsActionsNotifier(this){
@Override
public void collectionChanged(CollectionDatasource ds, Operation operation, List<Entity> items) {
// #PL-2035, reload selection from ds
Set<Object> selectedItemIds = getSelectedItemIds();
if (selectedItemIds == null) {
selectedItemIds = Collections.emptySet();
}
Set<Object> newSelection = new HashSet<>();
for (Object entityId : selectedItemIds) {
if (ds.containsItem(entityId)) {
newSelection.add(entityId);
}
}
if (newSelection.isEmpty()) {
setSelected((Entity) null);
} else {
setSelectedIds(newSelection);
}
}
});
// noinspection unchecked
// disabled for #PL-2035
// datasource.addListener(new CollectionDsActionsNotifier(this) {
// @Override
// public void itemChanged(Datasource ds, Entity prevItem, Entity item) {
// super.itemChanged(ds, prevItem, item);
//
// if (!disableItemListener && !getSelected().contains(item)) {
// setSelected(item);
// }
// }
// });
datasource.addListener(new CollectionDsActionsNotifier(this));
for (Action action : getActions()) {
action.refreshState();
}
}
private String getColumnCaption(Object columnId) {
if (columnId instanceof MetaPropertyPath)
return ((MetaPropertyPath) columnId).getMetaProperty().getName();
else
return columnId.toString();
}
private List<MetaPropertyPath> getPropertyColumns() {
UserSession userSession = UserSessionProvider.getUserSession();
List<MetaPropertyPath> result = new ArrayList<>();
for (Column column : columnsOrder) {
if (column.getId() instanceof MetaPropertyPath) {
MetaProperty colMetaProperty = ((MetaPropertyPath) column.getId()).getMetaProperty();
MetaClass colMetaClass = colMetaProperty.getDomain();
if (userSession.isEntityOpPermitted(colMetaClass, EntityOp.READ)
&& userSession.isEntityAttrPermitted(
colMetaClass, colMetaProperty.getName(), EntityAttrAccess.VIEW)) {
result.add((MetaPropertyPath)column.getId());
}
}
}
return result;
}
protected abstract CollectionDsWrapper createContainerDatasource(CollectionDatasource datasource,
Collection<MetaPropertyPath> columns);
protected void setVisibleColumns(List<?> columnsOrder) {
component.setVisibleColumns(columnsOrder.toArray());
}
protected void setColumnHeader(Object columnId, String caption) {
component.setColumnHeader(columnId, caption);
}
@Override
public void setRowHeaderMode(com.haulmont.cuba.gui.components.Table.RowHeaderMode rowHeaderMode) {
switch (rowHeaderMode) {
case NONE: {
component.setRowHeaderMode(com.vaadin.ui.Table.ROW_HEADER_MODE_HIDDEN);
break;
}
case ICON: {
component.setRowHeaderMode(com.vaadin.ui.Table.ROW_HEADER_MODE_ICON_ONLY);
break;
}
default: {
throw new UnsupportedOperationException();
}
}
}
@Override
public void setRequired(Table.Column column, boolean required, String message) {
if (required)
requiredColumns.put(column, message);
else
requiredColumns.remove(column);
}
@Override
public void addValidator(Table.Column column, final com.haulmont.cuba.gui.components.Field.Validator validator) {
Set<com.haulmont.cuba.gui.components.Field.Validator> validators = validatorsMap.get(column);
if (validators == null) {
validators = new HashSet<>();
validatorsMap.put(column, validators);
}
validators.add(validator);
}
@Override
public void addValidator(final com.haulmont.cuba.gui.components.Field.Validator validator) {
tableValidators.add(validator);
}
public void validate() throws ValidationException {
for (com.haulmont.cuba.gui.components.Field.Validator tableValidator : tableValidators) {
tableValidator.validate(getSelected());
}
}
@Override
public void setStyleProvider(final Table.StyleProvider styleProvider) {
this.styleProvider = styleProvider;
if (styleProvider == null) {
component.setCellStyleGenerator(null);
return;
}
component.setCellStyleGenerator(new com.vaadin.ui.Table.CellStyleGenerator() {
public String getStyle(Object itemId, Object propertyId) {
@SuppressWarnings({"unchecked"})
final Entity item = datasource.getItem(itemId);
return styleProvider.getStyleName(item, propertyId == null ? null : propertyId.toString());
}
});
}
@Override
public void setIconProvider(IconProvider iconProvider) {
LogFactory.getLog(WebAbstractTable.class).warn("Legacy web module does not support icons for tables");
}
// For vaadin component extensions.
protected Resource getItemIcon(Object itemId) {
if (iconProvider == null) {
return null;
}
// noinspection unchecked
Entity item = datasource.getItem(itemId);
if (item == null) {
return null;
}
// noinspection unchecked
String resourceUrl = iconProvider.getItemIcon(item);
if (StringUtils.isBlank(resourceUrl)) {
return null;
}
// noinspection ConstantConditions
if (!resourceUrl.contains(":")) {
resourceUrl = "theme:" + resourceUrl;
}
return WebComponentsHelper.getResource(resourceUrl);
}
@Override
public int getRowHeaderWidth() {
// CAUTION: vaadin considers null as row header property id;
return component.getColumnWidth(null);
}
@Override
public void setRowHeaderWidth(int width) {
// CAUTION: vaadin considers null as row header property id;
component.setColumnWidth(null, width);
}
@Override
public void applySettings(Element element) {
final Element columnsElem = element.element("columns");
if (columnsElem != null) {
Object[] oldColumns = component.getVisibleColumns();
List<Object> newColumns = new ArrayList<>();
// add columns from saved settings
for (Element colElem : Dom4j.elements(columnsElem, "columns")) {
for (Object column : oldColumns) {
if (column.toString().equals(colElem.attributeValue("id"))) {
newColumns.add(column);
String width = colElem.attributeValue("width");
if (width != null)
component.setColumnWidth(column, Integer.valueOf(width));
String visible = colElem.attributeValue("visible");
if (visible != null) {
if (component.isColumnCollapsingAllowed()) { // throws exception if not
component.setColumnCollapsed(column, !Boolean.valueOf(visible));
}
}
break;
}
}
}
// add columns not saved in settings (perhaps new)
for (Object column : oldColumns) {
if (!newColumns.contains(column)) {
newColumns.add(column);
}
}
// if the table contains only one column, always show it
if (newColumns.size() == 1) {
if (component.isColumnCollapsingAllowed()) { // throws exception if not
component.setColumnCollapsed(newColumns.get(0), false);
}
}
component.setVisibleColumns(newColumns.toArray());
if (isSortable()) {
//apply sorting
String sortProp = columnsElem.attributeValue("sortProperty");
if (!StringUtils.isEmpty(sortProp)) {
MetaPropertyPath sortProperty = datasource.getMetaClass().getPropertyPath(sortProp);
if (newColumns.contains(sortProperty)) {
boolean sortAscending = BooleanUtils.toBoolean(columnsElem.attributeValue("sortAscending"));
component.setSortContainerPropertyId(null);
component.setSortAscending(sortAscending);
component.setSortContainerPropertyId(sortProperty);
}
} else {
component.setSortContainerPropertyId(null);
}
}
}
}
@Override
public boolean isAllowPopupMenu() {
// todo not yet implemented
return false;
}
@Override
public void setAllowPopupMenu(boolean value) {
// todo not yet implemented
}
@Override
public boolean saveSettings(Element element) {
Element columnsElem = element.element("columns");
if (columnsElem != null)
element.remove(columnsElem);
columnsElem = element.addElement("columns");
Object[] visibleColumns = component.getVisibleColumns();
for (Object column : visibleColumns) {
Element colElem = columnsElem.addElement("columns");
colElem.addAttribute("id", column.toString());
int width = component.getColumnWidth(column);
if (width > -1)
colElem.addAttribute("width", String.valueOf(width));
Boolean visible = !component.isColumnCollapsed(column);
colElem.addAttribute("visible", visible.toString());
}
MetaPropertyPath sortProperty = (MetaPropertyPath) component.getSortContainerPropertyId();
if (sortProperty != null) {
Boolean sortAscending = component.isSortAscending();
columnsElem.addAttribute("sortProperty", sortProperty.toString());
columnsElem.addAttribute("sortAscending", sortAscending.toString());
}
return true;
}
@Override
public void setEnterPressAction(Action action) {
enterPressAction = action;
}
@Override
public Action getEnterPressAction(){
return enterPressAction;
}
@Override
public void setItemClickAction(Action action) {
if (itemClickAction != null) {
removeAction(itemClickAction);
}
itemClickAction = action;
if (!getActions().contains(action)) {
addAction(action);
}
}
@Override
public Action getItemClickAction() {
return itemClickAction;
}
public String getCaption() {
return component.getCaption();
}
public void setCaption(String caption) {
component.setCaption(caption);
}
@Override
public void setMultiSelect(boolean multiselect) {
component.setNullSelectionAllowed(multiselect);
super.setMultiSelect(multiselect);
}
@Override
public ButtonsPanel getButtonsPanel() {
return buttonsPanel;
}
@Override
public void setButtonsPanel(ButtonsPanel panel) {
if (buttonsPanel != null && topPanel != null) {
topPanel.removeComponent(WebComponentsHelper.unwrap(buttonsPanel));
}
buttonsPanel = panel;
if (panel != null) {
if (topPanel == null) {
topPanel = new HorizontalLayout();
topPanel.setWidth("100%");
componentComposition.addComponentAsFirst(topPanel);
}
topPanel.addComponent(WebComponentsHelper.unwrap(panel));
}
}
@Override
public void addGeneratedColumn(String columnId, ColumnGenerator generator) {
if (columnId == null)
throw new IllegalArgumentException("columnId is null");
if (generator == null)
throw new IllegalArgumentException("generator is null");
MetaPropertyPath targetCol = getDatasource().getMetaClass().getPropertyPath(columnId);
Object generatedColumnId = targetCol != null ? targetCol : columnId;
// replace generator for column if exist
if (component.getColumnGenerator(generatedColumnId) != null)
component.removeGeneratedColumn(generatedColumnId);
component.addGeneratedColumn(
generatedColumnId,
new CustomColumnGenerator(generator) {
@Override
public Component generateCell(com.vaadin.ui.Table source, Object itemId, Object columnId) {
Entity entity = getDatasource().getItem(itemId);
com.haulmont.cuba.gui.components.Component component = getColumnGenerator().generateCell(entity);
if (component == null)
return null;
else {
Component vComponent = WebComponentsHelper.unwrap(component);
// wrap field for show required asterisk
if ((vComponent instanceof com.vaadin.ui.Field)
&& (((com.vaadin.ui.Field) vComponent).isRequired())) {
VerticalLayout layout = new VerticalLayout();
layout.addComponent(vComponent);
vComponent = layout;
}
return vComponent;
}
}
}
);
}
@Override
public void addGeneratedColumn(String columnId, ColumnGenerator generator,
Class<? extends com.haulmont.cuba.gui.components.Component> componentClass) {
// web ui doesn't make any improvements with componentClass known
addGeneratedColumn(columnId, generator);
}
@Override
public void removeGeneratedColumn(String columnId) {
MetaPropertyPath targetCol = getDatasource().getMetaClass().getPropertyPath(columnId);
removeGeneratedColumn(targetCol == null ? columnId : targetCol);
}
/**
* {@inheritDoc}
*/
@Override
public void repaint() {
if (datasource != null) {
com.vaadin.data.Container ds = component.getContainerDataSource();
final Collection<MetaPropertyPath> propertyIds = (Collection<MetaPropertyPath>) ds.getContainerPropertyIds();
// added generated columns
final List<Pair<Object, com.vaadin.ui.Table.ColumnGenerator>> columnGenerators = new LinkedList<>();
for (final MetaPropertyPath id : propertyIds) {
com.vaadin.ui.Table.ColumnGenerator generator = component.getColumnGenerator(id);
if (generator != null && !(generator instanceof WebAbstractTable.SystemTableColumnGenerator)) {
columnGenerators.add(new Pair<Object, com.vaadin.ui.Table.ColumnGenerator>(id, generator));
}
}
refreshColumns(ds);
// restore generated columns
for (Pair<Object, com.vaadin.ui.Table.ColumnGenerator> generatorEntry : columnGenerators) {
component.addGeneratedColumn(generatorEntry.getFirst(), generatorEntry.getSecond());
}
}
component.requestRepaintAll();
}
@Override
public void selectAll() {
if (isMultiSelect()) {
component.setValue(component.getItemIds());
}
}
protected Map<Object, Object> __aggregate(AggregationContainer container, AggregationContainer.Context context) {
final List<AggregationInfo> aggregationInfos = new LinkedList<>();
for (final Object o : container.getAggregationPropertyIds()) {
final MetaPropertyPath propertyId = (MetaPropertyPath) o;
final Table.Column column = columns.get(propertyId);
if (column.getAggregation() != null) {
aggregationInfos.add(column.getAggregation());
}
}
Map<Object, Object> results = ((CollectionDatasource.Aggregatable) datasource).aggregate(
aggregationInfos.toArray(new AggregationInfo[aggregationInfos.size()]),
context.getItemIds()
);
if (aggregationCells != null) {
results = __handleAggregationResults(context, results);
}
return results;
}
protected Map<Object, Object> __handleAggregationResults(AggregationContainer.Context context, Map<Object, Object> results) {
for (final Map.Entry<Object, Object> entry : results.entrySet()) {
final Table.Column column = columns.get(entry.getKey());
com.vaadin.ui.Label cell;
if ((cell = (com.vaadin.ui.Label) aggregationCells.get(column)) != null) {
WebComponentsHelper.setLabelText(cell, entry.getValue(), column.getFormatter());
entry.setValue(cell);
}
}
return results;
}
protected class TablePropertyWrapper extends PropertyWrapper {
private ValueChangeListener calcListener;
private static final long serialVersionUID = -7942046867909695346L;
public TablePropertyWrapper(Object item, MetaPropertyPath propertyPath) {
super(item, propertyPath);
}
@Override
public void addListener(ValueChangeListener listener) {
super.addListener(listener);
//A listener of a calculatable property must be only one
if (listener instanceof CalculatablePropertyValueChangeListener) {
if (this.calcListener != null) {
removeListener(calcListener);
}
calcListener = listener;
}
}
@Override
public void removeListener(ValueChangeListener listener) {
super.removeListener(listener);
if (calcListener == listener) {
calcListener = null;
}
}
@Override
public boolean isReadOnly() {
final Table.Column column = WebAbstractTable.this.columns.get(propertyPath);
if (column != null) {
return !editable || !(BooleanUtils.isTrue(column.isEditable()) || BooleanUtils.isTrue(column.isCalculatable()));
} else {
return super.isReadOnly();
}
}
@Override
public void setReadOnly(boolean newStatus) {
super.setReadOnly(newStatus);
}
@Override
public String toString() {
final Table.Column column = WebAbstractTable.this.columns.get(propertyPath);
if (column != null) {
if (column.getFormatter() != null) {
return column.getFormatter().format(getValue());
} else if (column.getXmlDescriptor() != null) {
String captionProperty = column.getXmlDescriptor().attributeValue("captionProperty");
if (!StringUtils.isEmpty(captionProperty) && propertyPath.getRange().isClass()) {
final Object value = getValue();
return value != null ? String.valueOf(((Instance) value).getValue(captionProperty)) : null;
}
}
}
return super.toString();
}
}
private interface SystemTableColumnGenerator extends com.vaadin.ui.Table.ColumnGenerator {
}
protected static abstract class CustomColumnGenerator implements com.vaadin.ui.Table.ColumnGenerator {
private ColumnGenerator columnGenerator;
protected CustomColumnGenerator(ColumnGenerator columnGenerator) {
this.columnGenerator = columnGenerator;
}
public ColumnGenerator getColumnGenerator() {
return columnGenerator;
}
}
protected abstract class LinkGenerator implements SystemTableColumnGenerator {
protected Table.Column column;
public LinkGenerator(Table.Column column) {
this.column = column;
}
public com.vaadin.ui.Component generateCell(AbstractSelect source, final Object itemId, Object columnId) {
final Item item = source.getItem(itemId);
final Property property = item.getItemProperty(columnId);
final Object value = property.getValue();
final com.vaadin.ui.Button component = new Button();
component.setData(value);
component.setCaption(value == null ? "" : property.toString());
component.setStyleName("link");
component.addListener(new Button.ClickListener() {
@Override
public void buttonClick(Button.ClickEvent event) {
final Element element = column.getXmlDescriptor();
final String clickAction = element.attributeValue("clickAction");
if (!StringUtils.isEmpty(clickAction)) {
if (clickAction.startsWith("open:")) {
final com.haulmont.cuba.gui.components.IFrame frame = WebAbstractTable.this.getFrame();
String screenName = clickAction.substring("open:".length()).trim();
final Window window = frame.openEditor(screenName, getItem(item, property), WindowManager.OpenType.THIS_TAB);
window.addListener(new Window.CloseListener() {
@Override
public void windowClosed(String actionId) {
if (Window.COMMIT_ACTION_ID.equals(actionId) && window instanceof Window.Editor) {
Object item = ((Window.Editor) window).getItem();
if (item instanceof Entity) {
datasource.updateItem((Entity) item);
}
}
}
});
} else if (clickAction.startsWith("invoke:")) {
final com.haulmont.cuba.gui.components.IFrame frame = WebAbstractTable.this.getFrame();
String methodName = clickAction.substring("invoke:".length()).trim();
try {
IFrame controllerFrame = WebComponentsHelper.getControllerFrame(frame);
Method method = controllerFrame.getClass().getMethod(methodName, Object.class);
method.invoke(controllerFrame, getItem(item, property));
} catch (NoSuchMethodException | InvocationTargetException | IllegalAccessException e) {
throw new RuntimeException("Unable to invoke clickAction", e);
}
} else {
throw new UnsupportedOperationException("Unsupported clickAction format: " + clickAction);
}
}
}
});
return component;
}
@Override
public Component generateCell(com.vaadin.ui.Table source, Object itemId, Object columnId) {
return generateCell(((AbstractSelect) source), itemId, columnId);
}
protected abstract Entity getItem(Item item, Property property);
}
protected class ReadOnlyAssociationGenerator extends LinkGenerator {
public ReadOnlyAssociationGenerator(Table.Column column) {
super(column);
}
@Override
protected Entity getItem(Item item, Property property) {
return (Entity) property.getValue();
}
}
protected class CodePropertyGenerator extends LinkGenerator {
public CodePropertyGenerator(Table.Column column) {
super(column);
}
@Override
protected Entity getItem(Item item, Property property) {
return ((ItemWrapper) item).getItem();
}
}
protected class ReadOnlyBooleanDatatypeGenerator implements SystemTableColumnGenerator {
@Override
public Component generateCell(com.vaadin.ui.Table source, Object itemId, Object columnId) {
return generateCell((AbstractSelect) source, itemId, columnId);
}
protected Component generateCell(AbstractSelect source, Object itemId, Object columnId) {
final Property property = source.getItem(itemId).getItemProperty(columnId);
final Object value = property.getValue();
com.vaadin.ui.Embedded checkBoxImage;
if (BooleanUtils.isTrue((Boolean) value)){
checkBoxImage = new com.vaadin.ui.Embedded("", new VersionedThemeResource("components/table/images/checkbox-checked.png"));
} else {
checkBoxImage = new com.vaadin.ui.Embedded("", new VersionedThemeResource("components/table/images/checkbox-unchecked.png"));
}
return checkBoxImage;
}
}
protected class AbbreviatedColumnGenerator implements SystemTableColumnGenerator {
protected Table.Column column;
public AbbreviatedColumnGenerator(Table.Column column) {
this.column = column;
}
@Override
public com.vaadin.ui.Component generateCell(com.vaadin.ui.Table source, Object itemId, Object columnId) {
return generateCell((AbstractSelect) source, itemId, columnId);
}
protected com.vaadin.ui.Component generateCell(AbstractSelect source, Object itemId, Object columnId) {
final Property property = source.getItem(itemId).getItemProperty(columnId);
final Object value = property.getValue();
if (value == null) {
return null;
}
com.vaadin.ui.Component cell;
String stringValue = value.toString();
int maxTextLength = column.getMaxTextLength();
if (stringValue.length() > maxTextLength + MAX_TEXT_LENGTH_GAP) {
TextArea content = new TextArea(null, stringValue);
content.setWidth("100%");
content.setHeight("100%");
content.setReadOnly(true);
CssLayout cssLayout = new CssLayout();
cssLayout.setHeight("300px");
cssLayout.setWidth("400px");
cell = new PopupView(StringEscapeUtils.escapeHtml(StringUtils.abbreviate(stringValue, maxTextLength)),
cssLayout);
cell.addStyleName("abbreviated");
cssLayout.addComponent(content);
} else {
cell = new Label(stringValue);
}
return cell;
}
}
protected class CalculatableColumnGenerator implements SystemTableColumnGenerator {
@Override
public Component generateCell(com.vaadin.ui.Table source, Object itemId, Object columnId) {
return generateCell((AbstractSelect) source, itemId, columnId);
}
protected Component generateCell(AbstractSelect source, Object itemId, Object columnId) {
CollectionDatasource ds = WebAbstractTable.this.getDatasource();
MetaPropertyPath propertyPath = ds.getMetaClass().getPropertyPath(columnId.toString());
PropertyWrapper propertyWrapper = (PropertyWrapper) source.getContainerProperty(itemId, propertyPath);
Formatter formatter = null;
Table.Column column = WebAbstractTable.this.getColumn(columnId.toString());
if (column != null) {
formatter = column.getFormatter();
}
final Label label = new Label();
WebComponentsHelper.setLabelText(label, propertyWrapper.getValue(), formatter);
label.setWidth("-1px");
//add property change listener that will update a label value
propertyWrapper.addListener(new CalculatablePropertyValueChangeListener(label, formatter));
return label;
}
}
protected static class CalculatablePropertyValueChangeListener implements Property.ValueChangeListener {
private Label component;
private Formatter formatter;
private static final long serialVersionUID = 8041384664735759397L;
private CalculatablePropertyValueChangeListener(Label component, Formatter formatter) {
this.component = component;
this.formatter = formatter;
}
@Override
public void valueChange(Property.ValueChangeEvent event) {
WebComponentsHelper.setLabelText(component, event.getProperty().getValue(), formatter);
}
}
protected void addAggregationCell(Table.Column column) {
if (aggregationCells == null) {
aggregationCells = new HashMap<>();
}
aggregationCells.put(column, createAggregationCell());
}
protected com.vaadin.ui.Label createAggregationCell() {
com.vaadin.ui.Label label = new Label();
label.setWidth("-1px");
label.setParent(component);
return label;
}
protected CollectionDatasourceListener createAggregationDatasourceListener() {
return new AggregationDatasourceListener();
}
protected class AggregationDatasourceListener extends CollectionDsListenerAdapter<Entity> {
@Override
public void valueChanged(Entity source, String property, Object prevValue, Object value) {
final CollectionDatasource ds = WebAbstractTable.this.getDatasource();
component.aggregate(new AggregationContainer.Context(ds.getItemIds()));
}
}
protected class WebTableFieldFactory extends com.haulmont.cuba.web.gui.components.AbstractFieldFactory
implements TableFieldFactory {
protected Map<MetaClass, CollectionDatasource> optionsDatasources = new HashMap<>();
@Override
public com.vaadin.ui.Field createField(com.vaadin.data.Container container,
Object itemId, Object propertyId, Component uiContext) {
String fieldPropertyId = String.valueOf(propertyId);
Column columnConf = columns.get(propertyId);
Item item = container.getItem(itemId);
Entity entity = ((ItemWrapper)item).getItem();
Datasource fieldDatasource = getItemDatasource(entity);
com.haulmont.cuba.gui.components.Component columnComponent =
createField(fieldDatasource, fieldPropertyId, columnConf.getXmlDescriptor());
com.vaadin.ui.Field fieldImpl = getFieldImplementation(columnComponent);
if (columnComponent instanceof Field) {
Field cubaField = (Field) columnComponent;
if (columnConf.getDescription() != null) {
cubaField.setDescription(columnConf.getDescription());
}
if (requiredColumns.containsKey(columnConf)) {
cubaField.setRequired(true);
cubaField.setRequiredMessage(requiredColumns.get(columnConf));
}
}
if (columnConf.getWidth() != null) {
columnComponent.setWidth(columnConf.getWidth() + "px");
} else {
columnComponent.setWidth("100%");
}
if (columnComponent instanceof BelongToFrame) {
BelongToFrame belongToFrame = (BelongToFrame) columnComponent;
if (belongToFrame.getFrame() == null) {
belongToFrame.setFrame(getFrame());
}
}
applyPermissions(columnComponent);
return fieldImpl;
}
protected com.vaadin.ui.Field getFieldImplementation(com.haulmont.cuba.gui.components.Component columnComponent) {
com.vaadin.ui.Component composition = WebComponentsHelper.getComposition(columnComponent);
com.vaadin.ui.Field fieldImpl;
if (composition instanceof com.vaadin.ui.Field) {
fieldImpl = (com.vaadin.ui.Field) composition;
} else {
fieldImpl = new FieldWrapper(columnComponent);
}
return fieldImpl;
}
protected void applyPermissions(com.haulmont.cuba.gui.components.Component columnComponent) {
if (columnComponent instanceof DatasourceComponent) {
DatasourceComponent dsComponent = (DatasourceComponent) columnComponent;
MetaProperty metaProperty = dsComponent.getMetaProperty();
if (metaProperty != null) {
MetaClass metaClass = dsComponent.getDatasource().getMetaClass();
dsComponent.setEditable(security.isEntityAttrModificationPermitted(metaClass, metaProperty.getName())
&& dsComponent.isEditable());
}
}
}
@Override
protected CollectionDatasource getOptionsDatasource(Datasource fieldDatasource, String propertyId) {
if (datasource == null)
throw new IllegalStateException("Table datasource is null");
Column columnConf = columns.get(datasource.getMetaClass().getPropertyPath(propertyId));
final DsContext dsContext = datasource.getDsContext();
String optDsName = columnConf.getXmlDescriptor().attributeValue("optionsDatasource");
if (StringUtils.isBlank(optDsName)) {
MetaPropertyPath propertyPath = fieldDatasource.getMetaClass().getPropertyPath(propertyId);
MetaClass metaClass = propertyPath.getRange().asClass();
CollectionDatasource ds = optionsDatasources.get(metaClass);
if (ds != null)
return ds;
final DataSupplier dataSupplier = fieldDatasource.getDataSupplier();
final String id = metaClass.getName();
final String viewName = null; //metaClass.getName() + ".lookup";
ds = new DsBuilder(dsContext)
.setDataSupplier(dataSupplier)
.setId(id)
.setMetaClass(metaClass)
.setViewName(viewName)
.buildCollectionDatasource();
ds.refresh();
optionsDatasources.put(metaClass, ds);
return ds;
} else {
CollectionDatasource ds = dsContext.get(optDsName);
if (ds == null)
throw new IllegalStateException("Options datasource not found: " + optDsName);
return ds;
}
}
}
protected boolean handleSpecificVariables(Map<String, Object> variables) {
boolean needReload = false;
if (isUsePresentations()) {
final Presentations p = getPresentations();
if (p.getCurrent() != null && p.isAutoSave(p.getCurrent()) && needUpdatePresentation(variables)) {
Element e = p.getSettings(p.getCurrent());
saveSettings(e);
p.setSettings(p.getCurrent(), e);
}
}
return needReload;
}
private boolean needUpdatePresentation(Map<String, Object> variables) {
return variables.containsKey("colwidth") || variables.containsKey("sortcolumn")
|| variables.containsKey("sortascending") || variables.containsKey("columnorder")
|| variables.containsKey("collapsedcolumns") || variables.containsKey("groupedcolumns");
}
protected void paintSpecificContent(PaintTarget target) throws PaintException {
target.addVariable(component, "presentations", isUsePresentations());
if (isUsePresentations()) {
target.startTag("presentations");
tablePresentations.paint(target);
target.endTag("presentations");
}
}
@Override
public List<Table.Column> getNotCollapsedColumns() {
if (component.getVisibleColumns() == null)
return Collections.emptyList();
final List<Table.Column> visibleColumns = new ArrayList<>(component.getVisibleColumns().length);
Object[] keys = component.getVisibleColumns();
for (final Object key : keys) {
if (!component.isColumnCollapsed(key)) {
Column column = columns.get(key);
if (column != null)
visibleColumns.add(column);
}
}
return visibleColumns;
}
@Override
public void usePresentations(boolean use) {
usePresentations = use;
}
@Override
public boolean isUsePresentations() {
return usePresentations;
}
@Override
public void loadPresentations() {
if (isUsePresentations()) {
presentations = new PresentationsImpl(this);
tablePresentations = new TablePresentations(this);
} else {
throw new UnsupportedOperationException("Component doesn't use presentations");
}
}
@Override
public Presentations getPresentations() {
if (isUsePresentations()) {
return presentations;
} else {
throw new UnsupportedOperationException("Component doesn't use presentations");
}
}
@Override
public void applyPresentation(Object id) {
if (isUsePresentations()) {
Presentation p = presentations.getPresentation(id);
applyPresentation(p);
} else {
throw new UnsupportedOperationException("Component doesn't use presentations");
}
}
@Override
public void applyPresentationAsDefault(Object id) {
if (isUsePresentations()) {
Presentation p = presentations.getPresentation(id);
if (p != null) {
presentations.setDefault(p);
applyPresentation(p);
}
} else {
throw new UnsupportedOperationException("Component doesn't use presentations");
}
}
protected void applyPresentation(Presentation p) {
presentations.setCurrent(p);
Element settingsElement = presentations.getSettings(p);
applySettings(settingsElement);
component.requestRepaint();
}
@Override
public Object getDefaultPresentationId() {
Presentation def = presentations.getDefault();
return def == null ? null : def.getId();
}
@Override
public void addColumnCollapsedListener(ColumnCollapseListener columnCollapsedListener) {
columnCollapseListeners.add(columnCollapsedListener);
}
@Override
public void removeColumnCollapseListener(ColumnCollapseListener columnCollapseListener) {
columnCollapseListeners.remove(columnCollapseListener);
}
} | При изменении редактируемости таблицы, отображаются ненужные колонки #PL-3197 Fixed
| modules/web6/src/com/haulmont/cuba/web/gui/components/WebAbstractTable.java | При изменении редактируемости таблицы, отображаются ненужные колонки #PL-3197 Fixed | <ide><path>odules/web6/src/com/haulmont/cuba/web/gui/components/WebAbstractTable.java
<ide> import com.vaadin.ui.Component;
<ide> import com.vaadin.ui.Label;
<ide> import com.vaadin.ui.TextArea;
<add>import org.apache.commons.lang.ArrayUtils;
<ide> import org.apache.commons.lang.BooleanUtils;
<ide> import org.apache.commons.lang.StringEscapeUtils;
<ide> import org.apache.commons.lang.StringUtils;
<ide> // added generated columns
<ide> final List<Pair<Object, com.vaadin.ui.Table.ColumnGenerator>> columnGenerators = new LinkedList<>();
<ide>
<add> Object[] visibleColumns = component.getVisibleColumns();
<ide> for (final MetaPropertyPath id : propertyIds) {
<del> final Table.Column column = getColumn(id.toString());
<del> // save generators only for non editable columns
<del> if (!column.isEditable()) {
<del> com.vaadin.ui.Table.ColumnGenerator generator = component.getColumnGenerator(id);
<del> if (generator != null && !(generator instanceof WebAbstractTable.SystemTableColumnGenerator)) {
<del> columnGenerators.add(new Pair<Object, com.vaadin.ui.Table.ColumnGenerator>(id, generator));
<add> if (ArrayUtils.contains(visibleColumns, id)) {
<add> final Table.Column column = getColumn(id.toString());
<add> // save generators only for non editable columns
<add> if (!column.isEditable()) {
<add> com.vaadin.ui.Table.ColumnGenerator generator = component.getColumnGenerator(id);
<add> if (generator != null && !(generator instanceof WebAbstractTable.SystemTableColumnGenerator)) {
<add> columnGenerators.add(new Pair<Object, com.vaadin.ui.Table.ColumnGenerator>(id, generator));
<add> }
<ide> }
<ide> }
<ide> } |
|
Java | apache-2.0 | d2cb66df42e144dd2f95809b2ec71dd04bcc92f4 | 0 | BOOtak/touchlogger-dirty,BOOtak/touchlogger-dirty,BOOtak/touchlogger-dirty | package org.leyfer.thesis.touchlogger_dirty.notification;
import android.app.Notification;
import android.app.NotificationManager;
import android.app.PendingIntent;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.support.v4.app.NotificationCompat;
import android.util.Log;
import org.leyfer.thesis.touchlogger_dirty.R;
import org.leyfer.thesis.touchlogger_dirty.activity.MainActivity;
public class ControlNotification {
public static final int GESTURE_CONTROLLER_NOTIFICATION_ID = 0x7357;
private static final String STATUS_ONLINE = "online";
private static final String STATUS_OFFLINE = "offline";
private final Context context;
private final NotificationCompat.Builder builder;
private final NotificationCompat.Action pauseAction;
private final NotificationCompat.Action resumeAction;
private final NotificationManager notificationManager;
public ControlNotification(Context context) {
this.context = context;
pauseAction = new NotificationCompat.Action(R.drawable.ic_action_pause,
context.getString(R.string.action_pause),
PendingIntent.getBroadcast(context, 0, getPauseIntent(), 0));
resumeAction = new NotificationCompat.Action(R.drawable.ic_action_play_arrow,
context.getString(R.string.action_resume),
PendingIntent.getBroadcast(context, 0, getResumeIntent(), 0));
builder = new NotificationCompat.Builder(context);
notificationManager = (NotificationManager)
context.getSystemService(Context.NOTIFICATION_SERVICE);
setOfflineUi();
onResumedUi();
updateNotification();
}
private void updateNotification() {
notificationManager.notify(GESTURE_CONTROLLER_NOTIFICATION_ID, getNotification());
}
private static Intent getPauseIntent() {
return new Intent(NotificationActionReceiver.ACTION_PAUSE);
}
private static Intent getResumeIntent() {
return new Intent(NotificationActionReceiver.ACTION_RESUME);
}
public Notification getNotification() {
return builder.build();
}
private void setStatus(String status) {
builder.setContentTitle(context.getString(R.string.payload_status, status));
}
private void setOnlineUi() {
setStatus(STATUS_ONLINE);
}
public void setOnline() {
setOnlineUi();
updateNotification();
}
private void setOfflineUi() {
setStatus(STATUS_OFFLINE);
}
public void setOffline() {
setOfflineUi();
updateNotification();
}
private void onPausedUi() {
builder.setSmallIcon(R.drawable.ic_paused);
builder.mActions.clear();
builder.addAction(resumeAction);
}
public void onPaused() {
onPausedUi();
updateNotification();
}
private void onResumedUi() {
builder.setSmallIcon(R.drawable.ic_logging);
builder.mActions.clear();
builder.addAction(pauseAction);
}
public void onResumed() {
onResumedUi();
updateNotification();
}
public abstract static class NotificationActionReceiver extends BroadcastReceiver {
private static final String ACTION_PAUSE = "notification_action_receiver_pause";
private static final String ACTION_RESUME = "notification_action_receiver_resume";
@Override
public void onReceive(Context context, Intent intent) {
if (intent.getAction() != null) {
if (intent.getAction().equals(ACTION_PAUSE)) {
Log.d(MainActivity.TAG, "Pausing gesture service");
onPause();
} else if (intent.getAction().equals(ACTION_RESUME)) {
Log.d(MainActivity.TAG, "Resuming gesture service");
onResume();
} else {
Log.e(MainActivity.TAG, String.format("Invalid action string: %s!",
intent.getAction()));
}
} else {
Log.e(MainActivity.TAG, "No action string!");
}
}
public IntentFilter getIntentFilter() {
IntentFilter intentFilter = new IntentFilter();
intentFilter.addAction(ACTION_PAUSE);
intentFilter.addAction(ACTION_RESUME);
return intentFilter;
}
public abstract void onPause();
public abstract void onResume();
}
}
| app/src/main/java/org/leyfer/thesis/touchlogger_dirty/notification/ControlNotification.java | package org.leyfer.thesis.touchlogger_dirty.notification;
import android.app.Notification;
import android.app.NotificationManager;
import android.app.PendingIntent;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.support.v4.app.NotificationCompat;
import android.util.Log;
import org.leyfer.thesis.touchlogger_dirty.R;
import org.leyfer.thesis.touchlogger_dirty.activity.MainActivity;
public class ControlNotification {
public static final int GESTURE_CONTROLLER_NOTIFICATION_ID = 0x7357;
private static final String STATUS_ONLINE = "online";
private static final String STATUS_OFFLINE = "offline";
private final Context context;
private final NotificationCompat.Builder builder;
private final NotificationCompat.Action pauseAction;
private final NotificationCompat.Action resumeAction;
private final NotificationManager notificationManager;
public ControlNotification(Context context) {
this.context = context;
pauseAction = new NotificationCompat.Action(R.drawable.ic_action_pause,
context.getString(R.string.action_pause),
PendingIntent.getBroadcast(context, 0, getPauseIntent(), 0));
resumeAction = new NotificationCompat.Action(R.drawable.ic_action_play_arrow,
context.getString(R.string.action_resume),
PendingIntent.getBroadcast(context, 0, getResumeIntent(), 0));
builder = new NotificationCompat.Builder(context);
notificationManager = (NotificationManager)
context.getSystemService(Context.NOTIFICATION_SERVICE);
setOffline();
onResumed();
updateNotification();
}
private void updateNotification() {
notificationManager.notify(GESTURE_CONTROLLER_NOTIFICATION_ID, getNotification());
}
private static Intent getPauseIntent() {
return new Intent(NotificationActionReceiver.ACTION_PAUSE);
}
private static Intent getResumeIntent() {
return new Intent(NotificationActionReceiver.ACTION_RESUME);
}
public Notification getNotification() {
return builder.build();
}
private void setStatus(String status) {
builder.setContentTitle(context.getString(R.string.payload_status, status));
}
public void setOnline() {
setStatus(STATUS_ONLINE);
}
public void setOffline() {
setStatus(STATUS_OFFLINE);
}
public void onPaused() {
builder.setSmallIcon(R.drawable.ic_paused);
builder.mActions.clear();
builder.addAction(resumeAction);
}
public void onResumed() {
builder.setSmallIcon(R.drawable.ic_logging);
builder.mActions.clear();
builder.addAction(pauseAction);
}
public abstract static class NotificationActionReceiver extends BroadcastReceiver {
private static final String ACTION_PAUSE = "notification_action_receiver_pause";
private static final String ACTION_RESUME = "notification_action_receiver_resume";
@Override
public void onReceive(Context context, Intent intent) {
if (intent.getAction() != null) {
if (intent.getAction().equals(ACTION_PAUSE)) {
Log.d(MainActivity.TAG, "Pausing gesture service");
onPause();
} else if (intent.getAction().equals(ACTION_RESUME)) {
Log.d(MainActivity.TAG, "Resuming gesture service");
onResume();
} else {
Log.e(MainActivity.TAG, String.format("Invalid action string: %s!",
intent.getAction()));
}
} else {
Log.e(MainActivity.TAG, "No action string!");
}
}
public IntentFilter getIntentFilter() {
IntentFilter intentFilter = new IntentFilter();
intentFilter.addAction(ACTION_PAUSE);
intentFilter.addAction(ACTION_RESUME);
return intentFilter;
}
public abstract void onPause();
public abstract void onResume();
}
}
| Refactor ControlNotification.
| app/src/main/java/org/leyfer/thesis/touchlogger_dirty/notification/ControlNotification.java | Refactor ControlNotification. | <ide><path>pp/src/main/java/org/leyfer/thesis/touchlogger_dirty/notification/ControlNotification.java
<ide> builder = new NotificationCompat.Builder(context);
<ide> notificationManager = (NotificationManager)
<ide> context.getSystemService(Context.NOTIFICATION_SERVICE);
<del> setOffline();
<del> onResumed();
<add> setOfflineUi();
<add> onResumedUi();
<ide> updateNotification();
<ide> }
<ide>
<ide> builder.setContentTitle(context.getString(R.string.payload_status, status));
<ide> }
<ide>
<del> public void setOnline() {
<add> private void setOnlineUi() {
<ide> setStatus(STATUS_ONLINE);
<ide> }
<ide>
<del> public void setOffline() {
<del> setStatus(STATUS_OFFLINE);
<add> public void setOnline() {
<add> setOnlineUi();
<add> updateNotification();
<ide> }
<ide>
<del> public void onPaused() {
<add> private void setOfflineUi() {
<add> setStatus(STATUS_OFFLINE);
<add>
<add> }
<add>
<add> public void setOffline() {
<add> setOfflineUi();
<add> updateNotification();
<add> }
<add>
<add> private void onPausedUi() {
<ide> builder.setSmallIcon(R.drawable.ic_paused);
<ide> builder.mActions.clear();
<ide> builder.addAction(resumeAction);
<ide> }
<ide>
<del> public void onResumed() {
<add> public void onPaused() {
<add> onPausedUi();
<add> updateNotification();
<add> }
<add>
<add> private void onResumedUi() {
<ide> builder.setSmallIcon(R.drawable.ic_logging);
<ide> builder.mActions.clear();
<ide> builder.addAction(pauseAction);
<add> }
<add>
<add> public void onResumed() {
<add> onResumedUi();
<add> updateNotification();
<ide> }
<ide>
<ide> public abstract static class NotificationActionReceiver extends BroadcastReceiver { |
|
Java | apache-2.0 | error: pathspec 'com/planet_ink/coffee_mud/Commands/sysop/Races.java' did not match any file(s) known to git
| 8f5d93c2336ca54fd7fcd4fc87974c076770e573 | 1 | MaxRau/CoffeeMud,oriontribunal/CoffeeMud,bozimmerman/CoffeeMud,bozimmerman/CoffeeMud,sfunk1x/CoffeeMud,bozimmerman/CoffeeMud,oriontribunal/CoffeeMud,bozimmerman/CoffeeMud,sfunk1x/CoffeeMud,sfunk1x/CoffeeMud,oriontribunal/CoffeeMud,MaxRau/CoffeeMud,Tycheo/coffeemud,oriontribunal/CoffeeMud,MaxRau/CoffeeMud,MaxRau/CoffeeMud,Tycheo/coffeemud,Tycheo/coffeemud,Tycheo/coffeemud,sfunk1x/CoffeeMud | package com.planet_ink.coffee_mud.Commands.sysop;
public class Races
{
}
| com/planet_ink/coffee_mud/Commands/sysop/Races.java |
git-svn-id: svn://192.168.1.10/public/CoffeeMud@3184 0d6f1817-ed0e-0410-87c9-987e46238f29
| com/planet_ink/coffee_mud/Commands/sysop/Races.java | <ide><path>om/planet_ink/coffee_mud/Commands/sysop/Races.java
<add>package com.planet_ink.coffee_mud.Commands.sysop;
<add>
<add>public class Races
<add>{
<add>} |
||
Java | apache-2.0 | acbfd6696f6fac25fac8de9466ed47ddd804048a | 0 | HuangLS/neo4j,HuangLS/neo4j,HuangLS/neo4j,HuangLS/neo4j,HuangLS/neo4j | /**
* Copyright (c) 2002-2011 "Neo Technology,"
* Network Engine for Objects in Lund AB [http://neotechnology.com]
*
* This file is part of Neo4j.
*
* Neo4j is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.neo4j.kernel.impl.batchinsert;
import java.io.Serializable;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import java.util.NoSuchElementException;
import org.neo4j.graphdb.Direction;
import org.neo4j.graphdb.GraphDatabaseService;
import org.neo4j.graphdb.Node;
import org.neo4j.graphdb.NotFoundException;
import org.neo4j.graphdb.NotInTransactionException;
import org.neo4j.graphdb.Relationship;
import org.neo4j.graphdb.RelationshipType;
import org.neo4j.graphdb.ReturnableEvaluator;
import org.neo4j.graphdb.StopEvaluator;
import org.neo4j.graphdb.Transaction;
import org.neo4j.graphdb.Traverser;
import org.neo4j.graphdb.Traverser.Order;
import org.neo4j.graphdb.event.KernelEventHandler;
import org.neo4j.graphdb.event.TransactionEventHandler;
import org.neo4j.graphdb.index.IndexManager;
import org.neo4j.kernel.impl.cache.LruCache;
import org.neo4j.kernel.impl.nioneo.store.InvalidRecordException;
class BatchGraphDatabaseImpl implements GraphDatabaseService
{
final BatchInserterImpl batchInserter;
private final LruCache<Long,NodeBatchImpl> nodes =
new LruCache<Long,NodeBatchImpl>( "NodeCache", 10000, null )
{
@Override
public void elementCleaned( NodeBatchImpl node )
{
Map<String,Object> properties = node.getProperties();
if ( properties != null )
{
batchInserter.setNodeProperties( node.getId(), properties );
}
}
};
private final LruCache<Long,RelationshipBatchImpl> rels =
new LruCache<Long,RelationshipBatchImpl>( "RelCache", 10000, null )
{
@Override
public void elementCleaned( RelationshipBatchImpl rel )
{
Map<String,Object> properties = rel.getProperties();
if ( properties != null )
{
batchInserter.setRelationshipProperties( rel.getId(),
properties );
}
}
};
BatchGraphDatabaseImpl( BatchInserterImpl batchInserter )
{
this.batchInserter = batchInserter;
}
BatchInserterImpl getBatchInserter()
{
return batchInserter;
}
public Transaction beginTx()
{
return new FakeTransaction();
}
public Node createNode()
{
long id = batchInserter.createNode( null );
NodeBatchImpl node = new NodeBatchImpl( id, this, emptyProps() );
nodes.put( id, node );
return node;
}
static Map<String,Object> emptyProps()
{
return new HashMap<String,Object>();
}
public boolean enableRemoteShell()
{
return false;
}
public boolean enableRemoteShell( Map<String,Serializable> initialProperties )
{
return false;
}
public Iterable<Node> getAllNodes()
{
throw new UnsupportedOperationException( "Batch inserter mode" );
}
public Node getNodeById( long id )
{
NodeBatchImpl node = nodes.get( id );
if ( node == null )
{
try
{
node = new NodeBatchImpl( id, this,
batchInserter.getNodeProperties( id ) );
nodes.put( id, node );
}
catch ( InvalidRecordException e )
{
throw new NotFoundException( e );
}
}
return node;
}
public Node getReferenceNode()
{
return getNodeById( 0 );
}
public Relationship getRelationshipById( long id )
{
RelationshipBatchImpl rel = rels.get( id );
if ( rel == null )
{
try
{
SimpleRelationship simpleRel =
batchInserter.getRelationshipById( id );
Map<String,Object> props =
batchInserter.getRelationshipProperties( id );
rel = new RelationshipBatchImpl( simpleRel, this, props );
rels.put( id, rel );
}
catch ( InvalidRecordException e )
{
throw new NotFoundException( e );
}
}
return rel;
}
public Iterable<RelationshipType> getRelationshipTypes()
{
throw new UnsupportedOperationException( "Batch inserter mode" );
}
public void shutdown()
{
batchInserter.shutdown();
}
static class FakeTransaction implements Transaction
{
public void failure()
{
throw new NotInTransactionException( "Batch insert mode, " +
"failure is not an option." );
}
public void finish()
{
}
public void success()
{
}
}
private static class NodeBatchImpl implements Node
{
private final BatchGraphDatabaseImpl graphDbService;
private final long id;
private final Map<String,Object> properties;
NodeBatchImpl( long id, BatchGraphDatabaseImpl graphDbService,
Map<String,Object> properties )
{
this.id = id;
this.graphDbService = graphDbService;
this.properties = properties;
}
public GraphDatabaseService getGraphDatabase()
{
return graphDbService;
}
public Relationship createRelationshipTo( Node otherNode,
RelationshipType type )
{
long relId = graphDbService.getBatchInserter().createRelationship( id,
otherNode.getId(), type, null );
RelationshipBatchImpl rel = new RelationshipBatchImpl(
new SimpleRelationship( relId, id, otherNode.getId(), type ), graphDbService, emptyProps() );
graphDbService.addRelationshipToCache( relId, rel );
return rel;
}
Map<String,Object> getProperties()
{
return properties;
}
public void delete()
{
throw new UnsupportedOperationException();
}
public long getId()
{
return id;
}
private RelIterator newRelIterator( Direction dir,
RelationshipType[] types )
{
Iterable<Long> relIds =
graphDbService.getBatchInserter().getRelationshipIds( id );
return new RelIterator( graphDbService, relIds, id, dir, types );
}
public Iterable<Relationship> getRelationships()
{
return newRelIterator( Direction.BOTH, null );
}
public Iterable<Relationship> getRelationships(
RelationshipType... types )
{
return newRelIterator( Direction.BOTH, types );
}
public Iterable<Relationship> getRelationships( Direction dir )
{
return newRelIterator( dir, null );
}
public Iterable<Relationship> getRelationships( RelationshipType type,
Direction dir )
{
return newRelIterator( dir, new RelationshipType[] { type } );
}
public Relationship getSingleRelationship( RelationshipType type,
Direction dir )
{
Iterator<Relationship> relItr =
newRelIterator( dir, new RelationshipType[] { type } );
if ( relItr.hasNext() )
{
Relationship rel = relItr.next();
if ( relItr.hasNext() )
{
throw new NotFoundException( "More than one relationship[" +
type + ", " + dir + "] found for " + this );
}
return rel;
}
return null;
}
public boolean hasRelationship()
{
Iterator<Relationship> relItr =
newRelIterator( Direction.BOTH, null );
return relItr.hasNext();
}
public boolean hasRelationship( RelationshipType... types )
{
Iterator<Relationship> relItr =
newRelIterator( Direction.BOTH, types );
return relItr.hasNext();
}
public boolean hasRelationship( Direction dir )
{
Iterator<Relationship> relItr =
newRelIterator( dir, null );
return relItr.hasNext();
}
public boolean hasRelationship( RelationshipType type, Direction dir )
{
Iterator<Relationship> relItr =
newRelIterator( dir, new RelationshipType[] { type } );
return relItr.hasNext();
}
/* Tentative expansion API
public Expansion<Relationship> expandAll()
{
return Traversal.expanderForAllTypes().expand( this );
}
public Expansion<Relationship> expand( RelationshipType type )
{
return expand( type, Direction.BOTH );
}
public Expansion<Relationship> expand( RelationshipType type,
Direction direction )
{
return Traversal.expanderForTypes( type, direction ).expand(
this );
}
public Expansion<Relationship> expand( Direction direction )
{
return Traversal.expanderForAllTypes( direction ).expand(
this );
}
public Expansion<Relationship> expand( RelationshipExpander expander )
{
return Traversal.expander( expander ).expand( this );
}
*/
public Traverser traverse( Order traversalOrder,
StopEvaluator stopEvaluator,
ReturnableEvaluator returnableEvaluator,
RelationshipType relationshipType, Direction direction )
{
throw new UnsupportedOperationException( "Batch inserter mode" );
}
public Traverser traverse( Order traversalOrder,
StopEvaluator stopEvaluator,
ReturnableEvaluator returnableEvaluator,
RelationshipType firstRelationshipType, Direction firstDirection,
RelationshipType secondRelationshipType, Direction secondDirection )
{
throw new UnsupportedOperationException( "Batch inserter mode" );
}
public Traverser traverse( Order traversalOrder,
StopEvaluator stopEvaluator,
ReturnableEvaluator returnableEvaluator,
Object... relationshipTypesAndDirections )
{
throw new UnsupportedOperationException( "Batch inserter mode" );
}
public Object getProperty( String key )
{
Object val = properties.get( key );
if ( val == null )
{
throw new NotFoundException( key );
}
return val;
}
public Object getProperty( String key, Object defaultValue )
{
Object val = properties.get( key );
if ( val == null )
{
return defaultValue;
}
return val;
}
public Iterable<String> getPropertyKeys()
{
return properties.keySet();
}
public Iterable<Object> getPropertyValues()
{
return properties.values();
}
public boolean hasProperty( String key )
{
return properties.containsKey( key );
}
public Object removeProperty( String key )
{
Object val = properties.remove( key );
if ( val == null )
{
throw new NotFoundException( "Property " + key );
}
return val;
}
public void setProperty( String key, Object value )
{
properties.put( key, value );
}
@Override
public boolean equals( Object o )
{
if ( !(o instanceof Node) )
{
return false;
}
return this.getId() == ((Node) o).getId();
}
@Override
public int hashCode()
{
return (int) ( id ^ ( id >>> 32 ) );
}
}
private static class RelationshipBatchImpl implements Relationship
{
private final SimpleRelationship rel;
private final BatchGraphDatabaseImpl graphDbService;
private final Map<String,Object> properties;
RelationshipBatchImpl( SimpleRelationship rel,
BatchGraphDatabaseImpl graphDbService, Map<String,Object> properties )
{
this.rel = rel;
this.graphDbService = graphDbService;
this.properties = properties;
}
public GraphDatabaseService getGraphDatabase()
{
return graphDbService;
}
Map<String,Object> getProperties()
{
return properties;
}
public void delete()
{
throw new UnsupportedOperationException( "Batch inserter mode" );
}
public Node getEndNode()
{
return graphDbService.getNodeById( rel.getEndNode() );
}
public long getId()
{
return rel.getId();
}
public Node[] getNodes()
{
return new Node[] { getStartNode(), getEndNode() };
}
public Node getOtherNode( Node node )
{
Node startNode = getStartNode();
Node endNode = getEndNode();
if ( node.equals( endNode ) )
{
return startNode;
}
if ( node.equals( startNode ) )
{
return endNode;
}
throw new IllegalArgumentException( "" + node );
}
public Node getStartNode()
{
return graphDbService.getNodeById( rel.getStartNode() );
}
public RelationshipType getType()
{
return rel.getType();
}
public boolean isType( RelationshipType type )
{
return rel.getType().equals( type );
}
public Object getProperty( String key )
{
Object val = properties.get( key );
if ( val == null )
{
throw new NotFoundException( key );
}
return val;
}
public Object getProperty( String key, Object defaultValue )
{
Object val = properties.get( key );
if ( val == null )
{
return defaultValue;
}
return val;
}
public Iterable<String> getPropertyKeys()
{
return properties.keySet();
}
public Iterable<Object> getPropertyValues()
{
return properties.values();
}
public boolean hasProperty( String key )
{
return properties.containsKey( key );
}
public Object removeProperty( String key )
{
Object val = properties.remove( key );
if ( val == null )
{
throw new NotFoundException( "Property " + key );
}
return val;
}
public void setProperty( String key, Object value )
{
properties.put( key, value );
}
@Override
public boolean equals( Object o )
{
if ( !(o instanceof Relationship) )
{
return false;
}
return this.getId() == ((Relationship) o).getId();
}
@Override
public int hashCode()
{
return (int) ( rel.getId() ^ ( rel.getId() >>> 32 ) );
}
}
void addRelationshipToCache( long id, RelationshipBatchImpl rel )
{
rels.put( id, rel );
}
static class RelIterator implements
Iterable<Relationship>, Iterator<Relationship>
{
private final BatchGraphDatabaseImpl graphDbService;
private final Iterable<Long> relIds;
private final Iterator<Long> relItr;
private final long nodeId;
private final Direction dir;
private final RelationshipType[] types;
private Relationship nextElement;
RelIterator( BatchGraphDatabaseImpl graphDbService, Iterable<Long> relIds,
long nodeId, Direction dir, RelationshipType[] types )
{
this.graphDbService = graphDbService;
this.relIds = relIds;
this.relItr = relIds.iterator();
this.nodeId = nodeId;
this.dir = dir;
this.types = types;
}
public Iterator<Relationship> iterator()
{
return new RelIterator( graphDbService, relIds, nodeId, dir, types );
}
public boolean hasNext()
{
getNextElement();
if ( nextElement != null )
{
return true;
}
return false;
}
public Relationship next()
{
getNextElement();
if ( nextElement != null )
{
Relationship returnVal = nextElement;
nextElement = null;
return returnVal;
}
throw new NoSuchElementException();
}
private void getNextElement()
{
while ( nextElement == null && relItr.hasNext() )
{
Relationship possibleRel =
graphDbService.getRelationshipById( relItr.next() );
if ( dir == Direction.OUTGOING &&
possibleRel.getEndNode().getId() == nodeId )
{
continue;
}
if ( dir == Direction.INCOMING &&
possibleRel.getStartNode().getId() == nodeId )
{
continue;
}
if ( types != null )
{
for ( RelationshipType type : types )
{
if ( type.name().equals(
possibleRel.getType().name() ) )
{
nextElement = possibleRel;
break;
}
}
}
else
{
nextElement = possibleRel;
}
}
}
public void remove()
{
throw new UnsupportedOperationException();
}
}
void clearCaches()
{
nodes.clear();
rels.clear();
}
public KernelEventHandler registerKernelEventHandler(
KernelEventHandler handler )
{
throw new UnsupportedOperationException();
}
public <T> TransactionEventHandler<T> registerTransactionEventHandler(
TransactionEventHandler<T> handler )
{
throw new UnsupportedOperationException();
}
public KernelEventHandler unregisterKernelEventHandler(
KernelEventHandler handler )
{
throw new UnsupportedOperationException();
}
public <T> TransactionEventHandler<T> unregisterTransactionEventHandler(
TransactionEventHandler<T> handler )
{
throw new UnsupportedOperationException();
}
public IndexManager index()
{
throw new UnsupportedOperationException();
}
} | advanced/kernel/src/main/java/org/neo4j/kernel/impl/batchinsert/BatchGraphDatabaseImpl.java | /**
* Copyright (c) 2002-2011 "Neo Technology,"
* Network Engine for Objects in Lund AB [http://neotechnology.com]
*
* This file is part of Neo4j.
*
* Neo4j is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.neo4j.kernel.impl.batchinsert;
import java.io.Serializable;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import java.util.NoSuchElementException;
import org.neo4j.graphdb.Direction;
import org.neo4j.graphdb.GraphDatabaseService;
import org.neo4j.graphdb.Node;
import org.neo4j.graphdb.NotFoundException;
import org.neo4j.graphdb.NotInTransactionException;
import org.neo4j.graphdb.Relationship;
import org.neo4j.graphdb.RelationshipType;
import org.neo4j.graphdb.ReturnableEvaluator;
import org.neo4j.graphdb.StopEvaluator;
import org.neo4j.graphdb.Transaction;
import org.neo4j.graphdb.Traverser;
import org.neo4j.graphdb.Traverser.Order;
import org.neo4j.graphdb.event.KernelEventHandler;
import org.neo4j.graphdb.event.TransactionEventHandler;
import org.neo4j.graphdb.index.IndexManager;
import org.neo4j.kernel.impl.cache.LruCache;
import org.neo4j.kernel.impl.nioneo.store.InvalidRecordException;
class BatchGraphDatabaseImpl implements GraphDatabaseService
{
final BatchInserterImpl batchInserter;
private final LruCache<Long,NodeBatchImpl> nodes =
new LruCache<Long,NodeBatchImpl>( "NodeCache", 10000, null )
{
@Override
public void elementCleaned( NodeBatchImpl node )
{
Map<String,Object> properties = node.getProperties();
if ( properties != null )
{
batchInserter.setNodeProperties( node.getId(), properties );
}
}
};
private final LruCache<Long,RelationshipBatchImpl> rels =
new LruCache<Long,RelationshipBatchImpl>( "RelCache", 10000, null )
{
@Override
public void elementCleaned( RelationshipBatchImpl rel )
{
Map<String,Object> properties = rel.getProperties();
if ( properties != null )
{
batchInserter.setRelationshipProperties( rel.getId(),
properties );
}
}
};
BatchGraphDatabaseImpl( BatchInserterImpl batchInserter )
{
this.batchInserter = batchInserter;
}
BatchInserterImpl getBatchInserter()
{
return batchInserter;
}
public Transaction beginTx()
{
return new FakeTransaction();
}
public Node createNode()
{
long id = batchInserter.createNode( null );
NodeBatchImpl node = new NodeBatchImpl( id, this, emptyProps() );
nodes.put( id, node );
return node;
}
static Map<String,Object> emptyProps()
{
return new HashMap<String,Object>();
}
public boolean enableRemoteShell()
{
return false;
}
public boolean enableRemoteShell( Map<String,Serializable> initialProperties )
{
return false;
}
public Iterable<Node> getAllNodes()
{
throw new UnsupportedOperationException( "Batch inserter mode" );
}
public Node getNodeById( long id )
{
NodeBatchImpl node = nodes.get( id );
if ( node == null )
{
try
{
node = new NodeBatchImpl( id, this,
batchInserter.getNodeProperties( id ) );
nodes.put( id, node );
}
catch ( InvalidRecordException e )
{
throw new NotFoundException( e );
}
}
return node;
}
public Node getReferenceNode()
{
return getNodeById( 0 );
}
public Relationship getRelationshipById( long id )
{
RelationshipBatchImpl rel = rels.get( id );
if ( rel == null )
{
try
{
SimpleRelationship simpleRel =
batchInserter.getRelationshipById( id );
Map<String,Object> props =
batchInserter.getRelationshipProperties( id );
rel = new RelationshipBatchImpl( simpleRel, this, props );
rels.put( id, rel );
}
catch ( InvalidRecordException e )
{
throw new NotFoundException( e );
}
}
return rel;
}
public Iterable<RelationshipType> getRelationshipTypes()
{
throw new UnsupportedOperationException( "Batch inserter mode" );
}
public void shutdown()
{
batchInserter.shutdown();
}
static class FakeTransaction implements Transaction
{
public void failure()
{
throw new NotInTransactionException( "Batch insert mode, " +
"failure is not an option." );
}
public void finish()
{
}
public void success()
{
}
}
private static class NodeBatchImpl implements Node
{
private final BatchGraphDatabaseImpl graphDbService;
private final long id;
private final Map<String,Object> properties;
NodeBatchImpl( long id, BatchGraphDatabaseImpl graphDbService,
Map<String,Object> properties )
{
this.id = id;
this.graphDbService = graphDbService;
this.properties = properties;
}
public GraphDatabaseService getGraphDatabase()
{
return graphDbService;
}
public Relationship createRelationshipTo( Node otherNode,
RelationshipType type )
{
long relId = graphDbService.getBatchInserter().createRelationship( id,
otherNode.getId(), type, null );
RelationshipBatchImpl rel = new RelationshipBatchImpl(
new SimpleRelationship( (int)relId, (int) id,
(int) otherNode.getId(), type ), graphDbService, emptyProps() );
graphDbService.addRelationshipToCache( relId, rel );
return rel;
}
Map<String,Object> getProperties()
{
return properties;
}
public void delete()
{
throw new UnsupportedOperationException();
}
public long getId()
{
return id;
}
private RelIterator newRelIterator( Direction dir,
RelationshipType[] types )
{
Iterable<Long> relIds =
graphDbService.getBatchInserter().getRelationshipIds( id );
return new RelIterator( graphDbService, relIds, id, dir, types );
}
public Iterable<Relationship> getRelationships()
{
return newRelIterator( Direction.BOTH, null );
}
public Iterable<Relationship> getRelationships(
RelationshipType... types )
{
return newRelIterator( Direction.BOTH, types );
}
public Iterable<Relationship> getRelationships( Direction dir )
{
return newRelIterator( dir, null );
}
public Iterable<Relationship> getRelationships( RelationshipType type,
Direction dir )
{
return newRelIterator( dir, new RelationshipType[] { type } );
}
public Relationship getSingleRelationship( RelationshipType type,
Direction dir )
{
Iterator<Relationship> relItr =
newRelIterator( dir, new RelationshipType[] { type } );
if ( relItr.hasNext() )
{
Relationship rel = relItr.next();
if ( relItr.hasNext() )
{
throw new NotFoundException( "More than one relationship[" +
type + ", " + dir + "] found for " + this );
}
return rel;
}
return null;
}
public boolean hasRelationship()
{
Iterator<Relationship> relItr =
newRelIterator( Direction.BOTH, null );
return relItr.hasNext();
}
public boolean hasRelationship( RelationshipType... types )
{
Iterator<Relationship> relItr =
newRelIterator( Direction.BOTH, types );
return relItr.hasNext();
}
public boolean hasRelationship( Direction dir )
{
Iterator<Relationship> relItr =
newRelIterator( dir, null );
return relItr.hasNext();
}
public boolean hasRelationship( RelationshipType type, Direction dir )
{
Iterator<Relationship> relItr =
newRelIterator( dir, new RelationshipType[] { type } );
return relItr.hasNext();
}
/* Tentative expansion API
public Expansion<Relationship> expandAll()
{
return Traversal.expanderForAllTypes().expand( this );
}
public Expansion<Relationship> expand( RelationshipType type )
{
return expand( type, Direction.BOTH );
}
public Expansion<Relationship> expand( RelationshipType type,
Direction direction )
{
return Traversal.expanderForTypes( type, direction ).expand(
this );
}
public Expansion<Relationship> expand( Direction direction )
{
return Traversal.expanderForAllTypes( direction ).expand(
this );
}
public Expansion<Relationship> expand( RelationshipExpander expander )
{
return Traversal.expander( expander ).expand( this );
}
*/
public Traverser traverse( Order traversalOrder,
StopEvaluator stopEvaluator,
ReturnableEvaluator returnableEvaluator,
RelationshipType relationshipType, Direction direction )
{
throw new UnsupportedOperationException( "Batch inserter mode" );
}
public Traverser traverse( Order traversalOrder,
StopEvaluator stopEvaluator,
ReturnableEvaluator returnableEvaluator,
RelationshipType firstRelationshipType, Direction firstDirection,
RelationshipType secondRelationshipType, Direction secondDirection )
{
throw new UnsupportedOperationException( "Batch inserter mode" );
}
public Traverser traverse( Order traversalOrder,
StopEvaluator stopEvaluator,
ReturnableEvaluator returnableEvaluator,
Object... relationshipTypesAndDirections )
{
throw new UnsupportedOperationException( "Batch inserter mode" );
}
public Object getProperty( String key )
{
Object val = properties.get( key );
if ( val == null )
{
throw new NotFoundException( key );
}
return val;
}
public Object getProperty( String key, Object defaultValue )
{
Object val = properties.get( key );
if ( val == null )
{
return defaultValue;
}
return val;
}
public Iterable<String> getPropertyKeys()
{
return properties.keySet();
}
public Iterable<Object> getPropertyValues()
{
return properties.values();
}
public boolean hasProperty( String key )
{
return properties.containsKey( key );
}
public Object removeProperty( String key )
{
Object val = properties.remove( key );
if ( val == null )
{
throw new NotFoundException( "Property " + key );
}
return val;
}
public void setProperty( String key, Object value )
{
properties.put( key, value );
}
@Override
public boolean equals( Object o )
{
if ( !(o instanceof Node) )
{
return false;
}
return this.getId() == ((Node) o).getId();
}
@Override
public int hashCode()
{
return (int) id;
}
}
private static class RelationshipBatchImpl implements Relationship
{
private final SimpleRelationship rel;
private final BatchGraphDatabaseImpl graphDbService;
private final Map<String,Object> properties;
RelationshipBatchImpl( SimpleRelationship rel,
BatchGraphDatabaseImpl graphDbService, Map<String,Object> properties )
{
this.rel = rel;
this.graphDbService = graphDbService;
this.properties = properties;
}
public GraphDatabaseService getGraphDatabase()
{
return graphDbService;
}
Map<String,Object> getProperties()
{
return properties;
}
public void delete()
{
throw new UnsupportedOperationException( "Batch inserter mode" );
}
public Node getEndNode()
{
return graphDbService.getNodeById( rel.getEndNode() );
}
public long getId()
{
return rel.getId();
}
public Node[] getNodes()
{
return new Node[] { getStartNode(), getEndNode() };
}
public Node getOtherNode( Node node )
{
Node startNode = getStartNode();
Node endNode = getEndNode();
if ( node.equals( endNode ) )
{
return startNode;
}
if ( node.equals( startNode ) )
{
return endNode;
}
throw new IllegalArgumentException( "" + node );
}
public Node getStartNode()
{
return graphDbService.getNodeById( rel.getStartNode() );
}
public RelationshipType getType()
{
return rel.getType();
}
public boolean isType( RelationshipType type )
{
return rel.getType().equals( type );
}
public Object getProperty( String key )
{
Object val = properties.get( key );
if ( val == null )
{
throw new NotFoundException( key );
}
return val;
}
public Object getProperty( String key, Object defaultValue )
{
Object val = properties.get( key );
if ( val == null )
{
return defaultValue;
}
return val;
}
public Iterable<String> getPropertyKeys()
{
return properties.keySet();
}
public Iterable<Object> getPropertyValues()
{
return properties.values();
}
public boolean hasProperty( String key )
{
return properties.containsKey( key );
}
public Object removeProperty( String key )
{
Object val = properties.remove( key );
if ( val == null )
{
throw new NotFoundException( "Property " + key );
}
return val;
}
public void setProperty( String key, Object value )
{
properties.put( key, value );
}
@Override
public boolean equals( Object o )
{
if ( !(o instanceof Relationship) )
{
return false;
}
return this.getId() == ((Relationship) o).getId();
}
@Override
public int hashCode()
{
return (int) rel.getId();
}
}
void addRelationshipToCache( long id, RelationshipBatchImpl rel )
{
rels.put( id, rel );
}
static class RelIterator implements
Iterable<Relationship>, Iterator<Relationship>
{
private final BatchGraphDatabaseImpl graphDbService;
private final Iterable<Long> relIds;
private final Iterator<Long> relItr;
private final long nodeId;
private final Direction dir;
private final RelationshipType[] types;
private Relationship nextElement;
RelIterator( BatchGraphDatabaseImpl graphDbService, Iterable<Long> relIds,
long nodeId, Direction dir, RelationshipType[] types )
{
this.graphDbService = graphDbService;
this.relIds = relIds;
this.relItr = relIds.iterator();
this.nodeId = nodeId;
this.dir = dir;
this.types = types;
}
public Iterator<Relationship> iterator()
{
return new RelIterator( graphDbService, relIds, nodeId, dir, types );
}
public boolean hasNext()
{
getNextElement();
if ( nextElement != null )
{
return true;
}
return false;
}
public Relationship next()
{
getNextElement();
if ( nextElement != null )
{
Relationship returnVal = nextElement;
nextElement = null;
return returnVal;
}
throw new NoSuchElementException();
}
private void getNextElement()
{
while ( nextElement == null && relItr.hasNext() )
{
Relationship possibleRel =
graphDbService.getRelationshipById( relItr.next() );
if ( dir == Direction.OUTGOING &&
possibleRel.getEndNode().getId() == nodeId )
{
continue;
}
if ( dir == Direction.INCOMING &&
possibleRel.getStartNode().getId() == nodeId )
{
continue;
}
if ( types != null )
{
for ( RelationshipType type : types )
{
if ( type.name().equals(
possibleRel.getType().name() ) )
{
nextElement = possibleRel;
break;
}
}
}
else
{
nextElement = possibleRel;
}
}
}
public void remove()
{
throw new UnsupportedOperationException();
}
}
void clearCaches()
{
nodes.clear();
rels.clear();
}
public KernelEventHandler registerKernelEventHandler(
KernelEventHandler handler )
{
throw new UnsupportedOperationException();
}
public <T> TransactionEventHandler<T> registerTransactionEventHandler(
TransactionEventHandler<T> handler )
{
throw new UnsupportedOperationException();
}
public KernelEventHandler unregisterKernelEventHandler(
KernelEventHandler handler )
{
throw new UnsupportedOperationException();
}
public <T> TransactionEventHandler<T> unregisterTransactionEventHandler(
TransactionEventHandler<T> handler )
{
throw new UnsupportedOperationException();
}
public IndexManager index()
{
throw new UnsupportedOperationException();
}
} | Found a cast to int for relationship id.
| advanced/kernel/src/main/java/org/neo4j/kernel/impl/batchinsert/BatchGraphDatabaseImpl.java | Found a cast to int for relationship id. | <ide><path>dvanced/kernel/src/main/java/org/neo4j/kernel/impl/batchinsert/BatchGraphDatabaseImpl.java
<ide> long relId = graphDbService.getBatchInserter().createRelationship( id,
<ide> otherNode.getId(), type, null );
<ide> RelationshipBatchImpl rel = new RelationshipBatchImpl(
<del> new SimpleRelationship( (int)relId, (int) id,
<del> (int) otherNode.getId(), type ), graphDbService, emptyProps() );
<add> new SimpleRelationship( relId, id, otherNode.getId(), type ), graphDbService, emptyProps() );
<ide> graphDbService.addRelationshipToCache( relId, rel );
<ide> return rel;
<ide> }
<ide> @Override
<ide> public int hashCode()
<ide> {
<del> return (int) id;
<add> return (int) ( id ^ ( id >>> 32 ) );
<ide> }
<ide> }
<ide>
<ide> @Override
<ide> public int hashCode()
<ide> {
<del> return (int) rel.getId();
<add> return (int) ( rel.getId() ^ ( rel.getId() >>> 32 ) );
<ide> }
<ide> }
<ide>
<ide> {
<ide> throw new UnsupportedOperationException();
<ide> }
<del>
<add>
<ide> public IndexManager index()
<ide> {
<ide> throw new UnsupportedOperationException(); |
|
Java | mit | 786a9104002f17f7d5c256d3a5eae239b3820f6a | 0 | rmsy/Protobuf-Packet | package tc.oc.protobuf.packet.util;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import javax.annotation.Nonnull;
import com.google.common.base.Preconditions;
import com.google.protobuf.Descriptors.Descriptor;
import com.google.protobuf.Message;
public final class DescriptorUtil {
private DescriptorUtil() { }
public static @Nonnull Descriptor getDescriptor(@Nonnull Method method) throws IllegalArgumentException {
Preconditions.checkNotNull(method, "method");
// check params
Class<?>[] params = method.getParameterTypes();
if(params.length < 1) {
throw new IllegalArgumentException("too few arguments to be a handler");
} else if (params.length > 1) {
throw new IllegalArgumentException("has too many arguments to be a handler");
}
// check to see if it is a message type
Class<?> msgClass = params[0];
if (!Message.class.isAssignableFrom(msgClass)) {
throw new IllegalArgumentException("parameter type " + msgClass + " is not a protobuf message type");
}
// try to get the method for getting the descriptor
Method getDescriptor;
try {
getDescriptor = msgClass.getMethod("getDescriptor");
} catch (NoSuchMethodException e) {
throw new IllegalArgumentException("paramter type " + msgClass + " does not have the required getDescriptor() static method", e);
} catch (SecurityException e) {
throw new IllegalArgumentException("failed to fetch the getDescriptor() method for " + msgClass + " due to security constraints", e);
}
// try to invoke the method to get the descriptor
Object rawDesc;
try {
rawDesc = getDescriptor.invoke(null);
} catch (IllegalAccessException e) {
throw new IllegalArgumentException("failed to invoke " + getDescriptor + " due to security constraints", e);
} catch (InvocationTargetException e) {
throw new IllegalArgumentException("exception when invoking " + getDescriptor, e);
}
// check to ensure the result is correct
if(rawDesc instanceof Descriptor) {
return (Descriptor) rawDesc;
} else {
throw new IllegalArgumentException("getDescriptor() for " + msgClass + " returned an object that was not a Descriptor");
}
}
}
| src/main/java/tc/oc/protobuf/packet/util/DescriptorUtil.java | package tc.oc.protobuf.packet.util;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import javax.annotation.Nonnull;
import com.google.common.base.Preconditions;
import com.google.protobuf.Descriptors.Descriptor;
import com.google.protobuf.Message;
public final class DescriptorUtil {
private DescriptorUtil() { }
public static @Nonnull Descriptor getDescriptor(@Nonnull Method method) throws IllegalArgumentException {
Preconditions.checkNotNull(method, "method");
// check params
Class<?>[] params = method.getParameterTypes();
if(params.length < 1) {
throw new IllegalArgumentException("too few arguments to be a handler");
} else if (params.length > 1) {
throw new IllegalArgumentException("has too many arguments to be a handler");
}
// check to see if it is a message type
Class<?> msgClass = params[0];
if(Message.class.isAssignableFrom(msgClass)) {
throw new IllegalArgumentException("parameter type " + msgClass + " is not a protobuf message type");
}
// try to get the method for getting the descriptor
Method getDescriptor;
try {
getDescriptor = msgClass.getMethod("getDescriptor");
} catch (NoSuchMethodException e) {
throw new IllegalArgumentException("paramter type " + msgClass + " does not have the required getDescriptor() static method", e);
} catch (SecurityException e) {
throw new IllegalArgumentException("failed to fetch the getDescriptor() method for " + msgClass + " due to security constraints", e);
}
// try to invoke the method to get the descriptor
Object rawDesc;
try {
rawDesc = getDescriptor.invoke(null);
} catch (IllegalAccessException e) {
throw new IllegalArgumentException("failed to invoke " + getDescriptor + " due to security constraints", e);
} catch (InvocationTargetException e) {
throw new IllegalArgumentException("exception when invoking " + getDescriptor, e);
}
// check to ensure the result is correct
if(rawDesc instanceof Descriptor) {
return (Descriptor) rawDesc;
} else {
throw new IllegalArgumentException("getDescriptor() for " + msgClass + " returned an object that was not a Descriptor");
}
}
}
| Add missing exclamation point
| src/main/java/tc/oc/protobuf/packet/util/DescriptorUtil.java | Add missing exclamation point | <ide><path>rc/main/java/tc/oc/protobuf/packet/util/DescriptorUtil.java
<ide>
<ide> // check to see if it is a message type
<ide> Class<?> msgClass = params[0];
<del> if(Message.class.isAssignableFrom(msgClass)) {
<add> if (!Message.class.isAssignableFrom(msgClass)) {
<ide> throw new IllegalArgumentException("parameter type " + msgClass + " is not a protobuf message type");
<ide> }
<ide> |
|
Java | apache-2.0 | 3f4fc002b69aa392b200890a28277106857d20a7 | 0 | michael-rapp/AndroidAdapters | /*
* AndroidAdapters Copyright 2014 Michael Rapp
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published
* by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this program. If not, see
* <http://www.gnu.org/licenses/>.
*/
package de.mrapp.android.adapter.list.filterable;
import static de.mrapp.android.adapter.util.Condition.ensureAtLeast;
import static de.mrapp.android.adapter.util.Condition.ensureAtMaximum;
import static de.mrapp.android.adapter.util.Condition.ensureNotNull;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.LinkedHashSet;
import java.util.LinkedList;
import java.util.Set;
import java.util.regex.Pattern;
import android.content.Context;
import android.os.Bundle;
import android.util.SparseIntArray;
import de.mrapp.android.adapter.Filter;
import de.mrapp.android.adapter.ListAdapter;
import de.mrapp.android.adapter.Order;
import de.mrapp.android.adapter.datastructure.AppliedFilter;
import de.mrapp.android.adapter.datastructure.item.Item;
import de.mrapp.android.adapter.datastructure.item.ItemComparator;
import de.mrapp.android.adapter.inflater.Inflater;
import de.mrapp.android.adapter.list.ListAdapterListener;
import de.mrapp.android.adapter.list.enablestate.ListEnableStateListener;
import de.mrapp.android.adapter.list.itemstate.ListItemStateListener;
import de.mrapp.android.adapter.list.sortable.AbstractSortableListAdapter;
import de.mrapp.android.adapter.list.sortable.ListSortingListener;
import de.mrapp.android.adapter.logging.LogLevel;
import de.mrapp.android.adapter.util.VisibleForTesting;
/**
* An abstract base class for all adapters, whose underlying data is managed as
* a filterable list of arbitrary items. Such an adapter's purpose is to provide
* the underlying data for visualization using a {@link ListView} widget.
*
* @param <DataType>
* The type of the adapter's underlying data
* @param <DecoratorType>
* The type of the decorator, which allows to customize the
* appearance of the views, which are used to visualize the items of
* the adapter
*
* @author Michael Rapp
*
* @since 1.0.0
*/
public abstract class AbstractFilterableListAdapter<DataType, DecoratorType>
extends AbstractSortableListAdapter<DataType, DecoratorType> implements
FilterableListAdapter<DataType> {
/**
* A list, which contains the adapter's unfiltered data.
*/
private transient ArrayList<Item<DataType>> unfilteredItems;
/**
* A sparse map, which maps the indices of the adapter's filtered items to
* their corresponding indices of the unfiltered data.
*/
private transient SparseIntArray indexMapping;
/**
* A set, which contains the listeners, which should be notified, when the
* adapter's underlying data has been filtered.
*/
private transient Set<ListFilterListener<DataType>> filterListeners;
/**
* A set, which contains the filters, which are used to filter the adapter's
* underlying data.
*/
private LinkedHashSet<AppliedFilter<DataType>> appliedFilters;
/**
* The constant serial version UID.
*/
private static final long serialVersionUID = 1L;
/**
* The key, which is used to store the filters, which are used to filter the
* adapter's underlying data, within a bundle.
*/
@VisibleForTesting
protected static final String APPLIED_FILTERS_BUNDLE_KEY = AbstractFilterableListAdapter.class
.getSimpleName() + "::AppliedFilters";
/**
* Creates and returns a listener, which allows to adapt the unfiltered
* items, when an item has been removed from or added to the adapter.
*
* @return The listener, which has been created, as an instance of the type
* {@link ListAdapterListener}
*/
private ListAdapterListener<DataType> createAdapterListener() {
return new ListAdapterListener<DataType>() {
@Override
public void onItemAdded(final ListAdapter<DataType> adapter,
final DataType item, final int index) {
if (isFiltered()) {
Item<DataType> addedItem = getItems().get(index);
unfilteredItems.add(index, addedItem);
if (!matchAllFilters(addedItem)) {
getItems().remove(index);
}
}
}
@Override
public void onItemRemoved(final ListAdapter<DataType> adapter,
final DataType item, final int index) {
if (isFiltered()) {
unfilteredItems.remove(getUnfilteredIndex(index));
}
}
@Override
public final int hashCode() {
return getClass().hashCode();
}
@Override
public final boolean equals(final Object obj) {
if (getClass() == obj.getClass())
return true;
return false;
}
};
}
/**
* Creates and returns a listener, which allows to adapter the unfiltered
* items, when the adapter's underlying data has been sorted.
*
* @return The listener, which has been created as an instance of the type
* {@link ListSortingListener}
*/
private ListSortingListener<DataType> createSortingListener() {
return new ListSortingListener<DataType>() {
@Override
public void onSorted(final ListAdapter<DataType> adapter,
final Collection<DataType> sortedItems, final Order order,
final Comparator<DataType> comparator) {
if (isFiltered()) {
if (order == Order.ASCENDING) {
if (comparator != null) {
Collections.sort(unfilteredItems,
new ItemComparator<DataType>(comparator));
} else {
Collections.sort(unfilteredItems);
}
} else {
if (comparator != null) {
Collections.sort(unfilteredItems, Collections
.reverseOrder(new ItemComparator<DataType>(
comparator)));
} else {
Collections.sort(unfilteredItems,
Collections.reverseOrder());
}
}
}
}
@Override
public final int hashCode() {
return getClass().hashCode();
}
@Override
public final boolean equals(final Object obj) {
if (getClass() == obj.getClass())
return true;
return false;
}
};
}
/**
* Applies all filters, which are currently applied on the adapter, to
* filter the adapter's underlying data.
*/
private void applyAllFilters() {
for (AppliedFilter<DataType> filter : appliedFilters) {
applyFilter(filter);
}
}
/**
* Applies a specific filter to filter the adapter's underlying data.
*
* @param filter
* The filter, which should be applied, as an instance of the
* class {@link AppliedFilter}. The filter may not be null
*/
private void applyFilter(final AppliedFilter<DataType> filter) {
if (unfilteredItems == null) {
unfilteredItems = new ArrayList<Item<DataType>>(getItems());
indexMapping = new SparseIntArray();
}
Collection<Item<DataType>> itemsToRemove = new LinkedList<Item<DataType>>();
int counter = 0;
for (int i = 0; i < getNumberOfItems(); i++) {
Item<DataType> item = getItems().get(i);
if (!matchFilter(filter, item)) {
itemsToRemove.add(item);
} else {
indexMapping.put(counter, i);
counter++;
}
}
getItems().removeAll(itemsToRemove);
}
/**
* Returns, whether a specific item matches all applied filters, or not.
*
* @param item
* The item, which should be matched, as an instance of the class
* {@link Item}. The item may not be null
* @return True, if the given item matches all applied filters, false
* otherwise
*/
private boolean matchAllFilters(final Item<DataType> item) {
for (AppliedFilter<DataType> filter : appliedFilters) {
if (!matchFilter(filter, item)) {
return false;
}
}
return true;
}
/**
* Returns, whether a specific item matches a filter, or not.
*
* @param filter
* The filter, which should be matched, as an instance of the
* class {@link AppliedFilter}. The filter may not be null
* @param item
* The item, which should be matched, as an instance of the class
* {@link Item}. The item may not be null
* @return True, if the given item matches the filter, false otherwise
*/
private boolean matchFilter(final AppliedFilter<DataType> filter,
final Item<DataType> item) {
if (filter.getFilter() != null) {
return filter.getFilter().match(item.getData(),
filter.getRegularExpression());
} else {
return item.match(filter.getRegularExpression());
}
}
/**
* Notifies all listeners, which have been registered to be notified, when
* the adapter's underlying data has been filtered, when a filter has been
* applied.
*
* @param regularExpression
* The regular expression, which has been used to filter the
* adapter's underlying data, as an instance of the class
* {@link Pattern}. The regular expression may not be null
* @param filter
* The filter, which has been used to apply the regular
* expression on the single items, as an instance of the type
* {@link Filter} or null, if the items' implementations of the
* interface {@link Filterable} has been used instead
* @param filteredItems
* A collection, which contains the adapter's filtered items, as
* an instance of the type {@link Collection} or an empty
* collection, if the adapter does not contain any items
*/
private void notifyOnApplyFilter(final Pattern regularExpression,
final Filter<DataType> filter,
final Collection<DataType> filteredItems) {
for (ListFilterListener<DataType> listener : filterListeners) {
listener.onApplyFilter(this, regularExpression, filter,
filteredItems);
}
}
/**
* Notifies all listeners, which have been registered to be notified, when
* the adapter's underlying data has been filtered, when a filter has been
* reseted.
*
* @param regularExpression
* The regular expression of the filter, which has been reseted,
* as an instance of the class {@link Pattern}. The regular
* expression may not be null
* @param unfilteredItems
* A collection, which contains the adapter's filtered items, as
* an instance of the type {@link Collection} or an empty
* collection, if the adapter does not contain any items
*/
private void notifyOnResetFilter(final Pattern regularExpression,
final Collection<DataType> unfilteredItems) {
for (ListFilterListener<DataType> listener : filterListeners) {
listener.onResetFilter(this, regularExpression, unfilteredItems);
}
}
/**
* Returns a set, which contains the listeners, which should be notified,
* when the adapter's underlying data has been filtered.
*
* @return A set, which contains the listeners, which should be notified,
* when the adapter's underlying data has been filtered, as an
* instance of the type {@link Set} or an empty set, if no listeners
* should be notified
*/
protected final Set<ListFilterListener<DataType>> getFilterListeners() {
return filterListeners;
}
/**
* Sets the set, which contains the listeners, which should be notified,
* when the adapter's underlying data has been filtered.
*
* @param filterListeners
* The set, which should be set, as an instance of the type
* {@link Set} or an empty set, if no listeners should be
* notified
*/
protected final void setFilterListeners(
final Set<ListFilterListener<DataType>> filterListeners) {
ensureNotNull(filterListeners, "The listeners may not be null");
this.filterListeners = filterListeners;
}
/**
* Returns a set, which contains the filters, which are used to filter the
* adapter's underlying data.
*
* @return A set, which contains the filters, which are used to filter the
* adapter's underlying data, as an instance of the type
* {@link LinkedHashSet} or an empty set, if the adapter's
* underlying data is not filtered
*/
protected final LinkedHashSet<AppliedFilter<DataType>> getAppliedFilters() {
return appliedFilters;
}
/**
* Sets the set, which contains the filters, which are used to filter the
* adapter's underlying data.
*
* @param appliedFilters
* The set, which should be set, as an instance of the type
* {@link LinkedHashSet} or an empty set, if the adapter's
* underlying data should not be filtered
*/
protected final void setAppliedFilters(
final LinkedHashSet<AppliedFilter<DataType>> appliedFilters) {
ensureNotNull(appliedFilters, "The applied filters may not be null");
this.appliedFilters = appliedFilters;
applyAllFilters();
}
/**
* Returns a list, which contains the adapter's unfiltered data.
*
* @return A list, which contains the adapter's unfiltered data as an
* instance of the type {@link ArrayList} or null, if no filters are
* currently applied on the adapter
*/
protected final ArrayList<Item<DataType>> getUnfilteredItems() {
return unfilteredItems;
}
/**
* Returns the unfiltered index, which corresponds to a specific filtered
* index.
*
* @param filteredIndex
* The index, whose corresponding unfiltered index should be
* retrieved, as an {@link Integer} value
* @return The unfiltered index, which corresponds to the given filtered
* index, as an {@link Integer} value
*/
protected final int getUnfilteredIndex(final int filteredIndex) {
ensureAtLeast(filteredIndex, 0, "The index must be at least 0");
ensureAtMaximum(filteredIndex, getNumberOfItems() - 1,
"The index must be at maximum " + (getNumberOfItems() - 1));
if (!isFiltered()) {
return filteredIndex;
} else {
return indexMapping.get(filteredIndex);
}
}
/**
* Creates and returns a deep copy of the set, which contains the filters,
* which are applied on the adapter.
*
* @return A deep copy of the set, which contains the filters, which are
* applied on the adapter, as an instance of the type
* {@link LinkedHashSet} or an empty set, if no filters are applied
*/
protected final LinkedHashSet<AppliedFilter<DataType>> cloneAppliedFilters() {
LinkedHashSet<AppliedFilter<DataType>> clonedAppliedFilters = new LinkedHashSet<AppliedFilter<DataType>>();
for (AppliedFilter<DataType> filter : appliedFilters) {
clonedAppliedFilters.add(filter.clone());
}
return clonedAppliedFilters;
}
/**
* Creates a new adapter, whose underlying data is managed as a filterable
* list of arbitrary items.
*
* @param context
* The context, the adapter should belong to, as an instance of
* the class {@link Context}. The context may not be null
* @param inflater
* The inflater, which should be used to inflate the views, which
* are used to visualize the adapter's items, as an instance of
* the type {@link Inflater}. The inflater may not be null
* @param decorator
* The decorator, which should be used to customize the
* appearance of the views, which are used to visualize the items
* of the adapter, as an instance of the generic type
* DecoratorType. The decorator may not be null
* @param logLevel
* The log level, which should be used for logging, as a value of
* the enum {@link LogLevel}. The log level may not be null
* @param items
* A list, which contains the the adapter's items, or an empty
* list, if the adapter should not contain any items
* @param allowDuplicates
* True, if duplicate items should be allowed, false otherwise
* @param notifyOnChange
* True, if the method <code>notifyDataSetChanged():void</code>
* should be automatically called when the adapter's underlying
* data has been changed, false otherwise
* @param adapterListeners
* A set, which contains the listeners, which should be notified
* when the adapter's underlying data has been modified or an
* empty set, if no listeners should be notified
* @param enableStateListeners
* A set, which contains the listeners, which should be notified
* when an item has been disabled or enabled or an empty set, if
* no listeners should be notified
* @param numberOfItemStates
* The number of states, the adapter's items may have, as an
* {@link Integer} value. The value must be at least 1
* @param triggerItemStateOnClick
* True, if the state of an item should be triggered, when it is
* clicked by the user, false otherwise
* @param itemStateListeners
* A set, which contains the listeners, which should be notified,
* when the state of an item has been changed or an empty set, if
* no listeners should be notified
* @param sortingListeners
* A set, which contains the listeners, which should be notified,
* when the adapter's underlying data has been sorted or an empty
* set, if no listeners should be notified
* @param filterListeners
* A set, which contains the listeners, which should be notified,
* when the adapter's underlying data has been filtered or an
* empty set, if no listeners should be notified
* @param appliedFilters
* A set, which contains the filters, which should be used to
* filter the adapter's underlying data or an empty set, if the
* adapter's underlying data should not be filtered
*/
protected AbstractFilterableListAdapter(final Context context,
final Inflater inflater, final DecoratorType decorator,
final LogLevel logLevel, final ArrayList<Item<DataType>> items,
final boolean allowDuplicates, final boolean notifyOnChange,
final Set<ListAdapterListener<DataType>> adapterListeners,
final Set<ListEnableStateListener<DataType>> enableStateListeners,
final int numberOfItemStates,
final boolean triggerItemStateOnClick,
final Set<ListItemStateListener<DataType>> itemStateListeners,
final Set<ListSortingListener<DataType>> sortingListeners,
final Set<ListFilterListener<DataType>> filterListeners,
final LinkedHashSet<AppliedFilter<DataType>> appliedFilters) {
super(context, inflater, decorator, logLevel, items, allowDuplicates,
notifyOnChange, adapterListeners, enableStateListeners,
numberOfItemStates, triggerItemStateOnClick,
itemStateListeners, sortingListeners);
setFilterListeners(filterListeners);
setAppliedFilters(appliedFilters);
addAdapterListener(createAdapterListener());
addSortingListner(createSortingListener());
}
@Override
public final boolean applyFilter(final Pattern regularExpression) {
AppliedFilter<DataType> appliedFilter = new AppliedFilter<DataType>(
regularExpression);
boolean added = appliedFilters.add(appliedFilter);
if (added) {
applyFilter(appliedFilter);
notifyOnApplyFilter(regularExpression, null, getAllItems());
notifyOnDataSetChanged();
String message = "Applied filter using regular expression \""
+ regularExpression + "\"";
getLogger().logInfo(getClass(), message);
return true;
}
String message = "Filter using regular expression \""
+ regularExpression
+ "\" not applied, because a filter using the same "
+ "regular expression is already applied on the adapter";
getLogger().logDebug(getClass(), message);
return false;
}
@Override
public final boolean applyFilter(final Pattern regularExpression,
final Filter<DataType> filter) {
AppliedFilter<DataType> appliedFilter = new AppliedFilter<DataType>(
regularExpression, filter);
boolean added = appliedFilters.add(appliedFilter);
if (added) {
applyFilter(appliedFilter);
notifyOnApplyFilter(regularExpression, filter, getAllItems());
notifyOnDataSetChanged();
String message = "Applied filter using regular expression \""
+ regularExpression + "\" and filter \"" + filter + "\"";
getLogger().logInfo(getClass(), message);
return true;
}
String message = "Filter using regular expression \""
+ regularExpression
+ "\" not applied, because a filter using the same "
+ "regular expression and filter is already applied "
+ "on the adapter";
getLogger().logDebug(getClass(), message);
return false;
}
@Override
public final boolean resetFilter(final Pattern regularExpression) {
AppliedFilter<DataType> appliedFilter = new AppliedFilter<DataType>(
regularExpression);
boolean removed = appliedFilters.remove(appliedFilter);
if (removed) {
setItems(unfilteredItems);
unfilteredItems = null;
indexMapping = null;
applyAllFilters();
notifyOnResetFilter(regularExpression, getAllItems());
notifyOnDataSetChanged();
String message = "Reseted filter \"" + appliedFilter + "\"";
getLogger().logInfo(getClass(), message);
return true;
} else {
String message = "Filter with regular expression \""
+ regularExpression.pattern()
+ "\" not reseted, because no such filter is applied on the adapter";
getLogger().logDebug(getClass(), message);
return false;
}
}
@Override
public final void resetAllFilters() {
for (AppliedFilter<DataType> appliedFilter : new LinkedHashSet<AppliedFilter<DataType>>(
appliedFilters)) {
resetFilter(appliedFilter.getRegularExpression());
}
String message = "Reseted all previously applied filters";
getLogger().logInfo(getClass(), message);
}
@Override
public final boolean isFiltered() {
return !appliedFilters.isEmpty();
}
@Override
public final boolean isFilterApplied(final Pattern regularExpression) {
return appliedFilters.contains(new AppliedFilter<DataType>(
regularExpression));
}
@Override
public final int getNumberOfAppliedFilters() {
return appliedFilters.size();
}
@Override
public final void addFilterListener(
final ListFilterListener<DataType> listener) {
ensureNotNull(listener, "The listener may not be null");
filterListeners.add(listener);
String message = "Added filter listener \"" + listener + "\"";
getLogger().logDebug(getClass(), message);
}
@Override
public final void removeFilterListener(
final ListFilterListener<DataType> listener) {
ensureNotNull(listener, "The listener may not be null");
filterListeners.remove(listener);
String message = "Removed filter listener \"" + listener + "\"";
getLogger().logDebug(getClass(), message);
}
@Override
public void onSaveInstanceState(final Bundle outState) {
super.onSaveInstanceState(outState);
outState.putSerializable(APPLIED_FILTERS_BUNDLE_KEY,
getAppliedFilters());
}
@SuppressWarnings("unchecked")
@Override
public void onRestoreInstanceState(final Bundle savedInstanceState) {
super.onRestoreInstanceState(savedInstanceState);
if (savedInstanceState != null) {
setAppliedFilters((LinkedHashSet<AppliedFilter<DataType>>) savedInstanceState
.getSerializable(APPLIED_FILTERS_BUNDLE_KEY));
notifyDataSetChanged();
}
}
@Override
public int hashCode() {
final int prime = 31;
int result = super.hashCode();
result = prime * result + appliedFilters.hashCode();
return result;
}
@Override
public boolean equals(final Object obj) {
if (this == obj)
return true;
if (!super.equals(obj))
return false;
if (getClass() != obj.getClass())
return false;
AbstractFilterableListAdapter<?, ?> other = (AbstractFilterableListAdapter<?, ?>) obj;
if (!appliedFilters.equals(other.appliedFilters))
return false;
return true;
}
@Override
public abstract AbstractSortableListAdapter<DataType, DecoratorType> clone()
throws CloneNotSupportedException;
} | src/de/mrapp/android/adapter/list/filterable/AbstractFilterableListAdapter.java | /*
* AndroidAdapters Copyright 2014 Michael Rapp
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published
* by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this program. If not, see
* <http://www.gnu.org/licenses/>.
*/
package de.mrapp.android.adapter.list.filterable;
import static de.mrapp.android.adapter.util.Condition.ensureAtLeast;
import static de.mrapp.android.adapter.util.Condition.ensureAtMaximum;
import static de.mrapp.android.adapter.util.Condition.ensureNotNull;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.LinkedHashSet;
import java.util.LinkedList;
import java.util.Set;
import java.util.regex.Pattern;
import android.content.Context;
import android.os.Bundle;
import android.util.SparseIntArray;
import de.mrapp.android.adapter.Filter;
import de.mrapp.android.adapter.ListAdapter;
import de.mrapp.android.adapter.Order;
import de.mrapp.android.adapter.datastructure.AppliedFilter;
import de.mrapp.android.adapter.datastructure.item.Item;
import de.mrapp.android.adapter.datastructure.item.ItemComparator;
import de.mrapp.android.adapter.inflater.Inflater;
import de.mrapp.android.adapter.list.ListAdapterListener;
import de.mrapp.android.adapter.list.enablestate.ListEnableStateListener;
import de.mrapp.android.adapter.list.itemstate.ListItemStateListener;
import de.mrapp.android.adapter.list.sortable.AbstractSortableListAdapter;
import de.mrapp.android.adapter.list.sortable.ListSortingListener;
import de.mrapp.android.adapter.logging.LogLevel;
import de.mrapp.android.adapter.util.VisibleForTesting;
/**
* An abstract base class for all adapters, whose underlying data is managed as
* a filterable list of arbitrary items. Such an adapter's purpose is to provide
* the underlying data for visualization using a {@link ListView} widget.
*
* @param <DataType>
* The type of the adapter's underlying data
* @param <DecoratorType>
* The type of the decorator, which allows to customize the
* appearance of the views, which are used to visualize the items of
* the adapter
*
* @author Michael Rapp
*
* @since 1.0.0
*/
public abstract class AbstractFilterableListAdapter<DataType, DecoratorType>
extends AbstractSortableListAdapter<DataType, DecoratorType> implements
FilterableListAdapter<DataType> {
/**
* A list, which contains the the adapter's unfiltered data.
*/
private transient ArrayList<Item<DataType>> unfilteredItems;
/**
* A sparse map, which maps the indices of the adapter's filtered items to
* their corresponding indices of the unfiltered data.
*/
private transient SparseIntArray indexMapping;
/**
* A set, which contains the listeners, which should be notified, when the
* adapter's underlying data has been filtered.
*/
private transient Set<ListFilterListener<DataType>> filterListeners;
/**
* A set, which contains the filters, which are used to filter the adapter's
* underlying data.
*/
private LinkedHashSet<AppliedFilter<DataType>> appliedFilters;
/**
* The constant serial version UID.
*/
private static final long serialVersionUID = 1L;
/**
* The key, which is used to store the filters, which are used to filter the
* adapter's underlying data, within a bundle.
*/
@VisibleForTesting
protected static final String APPLIED_FILTERS_BUNDLE_KEY = AbstractFilterableListAdapter.class
.getSimpleName() + "::AppliedFilters";
/**
* Creates and returns a listener, which allows to adapt the unfiltered
* items, when an item has been removed from or added to the adapter.
*
* @return The listener, which has been created, as an instance of the type
* {@link ListAdapterListener}
*/
private ListAdapterListener<DataType> createAdapterListener() {
return new ListAdapterListener<DataType>() {
@Override
public void onItemAdded(final ListAdapter<DataType> adapter,
final DataType item, final int index) {
if (isFiltered()) {
Item<DataType> addedItem = getItems().get(index);
unfilteredItems.add(index, addedItem);
if (!matchAllFilters(addedItem)) {
getItems().remove(index);
}
}
}
@Override
public void onItemRemoved(final ListAdapter<DataType> adapter,
final DataType item, final int index) {
if (isFiltered()) {
unfilteredItems.remove(getUnfilteredIndex(index));
}
}
@Override
public final int hashCode() {
return getClass().hashCode();
}
@Override
public final boolean equals(final Object obj) {
if (getClass() == obj.getClass())
return true;
return false;
}
};
}
/**
* Creates and returns a listener, which allows to adapter the unfiltered
* items, when the adapter's underlying data has been sorted.
*
* @return The listener, which has been created as an instance of the type
* {@link ListSortingListener}
*/
private ListSortingListener<DataType> createSortingListener() {
return new ListSortingListener<DataType>() {
@Override
public void onSorted(final ListAdapter<DataType> adapter,
final Collection<DataType> sortedItems, final Order order,
final Comparator<DataType> comparator) {
if (isFiltered()) {
if (order == Order.ASCENDING) {
if (comparator != null) {
Collections.sort(unfilteredItems,
new ItemComparator<DataType>(comparator));
} else {
Collections.sort(unfilteredItems);
}
} else {
if (comparator != null) {
Collections.sort(unfilteredItems, Collections
.reverseOrder(new ItemComparator<DataType>(
comparator)));
} else {
Collections.sort(unfilteredItems,
Collections.reverseOrder());
}
}
}
}
@Override
public final int hashCode() {
return getClass().hashCode();
}
@Override
public final boolean equals(final Object obj) {
if (getClass() == obj.getClass())
return true;
return false;
}
};
}
/**
* Applies all filters, which are currently applied on the adapter, to
* filter the adapter's underlying data.
*/
private void applyAllFilters() {
for (AppliedFilter<DataType> filter : appliedFilters) {
applyFilter(filter);
}
}
/**
* Applies a specific filter to filter the adapter's underlying data.
*
* @param filter
* The filter, which should be applied, as an instance of the
* class {@link AppliedFilter}. The filter may not be null
*/
private void applyFilter(final AppliedFilter<DataType> filter) {
if (unfilteredItems == null) {
unfilteredItems = new ArrayList<Item<DataType>>(getItems());
indexMapping = new SparseIntArray();
}
Collection<Item<DataType>> itemsToRemove = new LinkedList<Item<DataType>>();
int counter = 0;
for (int i = 0; i < getNumberOfItems(); i++) {
Item<DataType> item = getItems().get(i);
if (!matchFilter(filter, item)) {
itemsToRemove.add(item);
} else {
indexMapping.put(counter, i);
counter++;
}
}
getItems().removeAll(itemsToRemove);
}
/**
* Returns, whether a specific item matches all applied filters, or not.
*
* @param item
* The item, which should be matched, as an instance of the class
* {@link Item}. The item may not be null
* @return True, if the given item matches all applied filters, false
* otherwise
*/
private boolean matchAllFilters(final Item<DataType> item) {
for (AppliedFilter<DataType> filter : appliedFilters) {
if (!matchFilter(filter, item)) {
return false;
}
}
return true;
}
/**
* Returns, whether a specific item matches a filter, or not.
*
* @param filter
* The filter, which should be matched, as an instance of the
* class {@link AppliedFilter}. The filter may not be null
* @param item
* The item, which should be matched, as an instance of the class
* {@link Item}. The item may not be null
* @return True, if the given item matches the filter, false otherwise
*/
private boolean matchFilter(final AppliedFilter<DataType> filter,
final Item<DataType> item) {
if (filter.getFilter() != null) {
return filter.getFilter().match(item.getData(),
filter.getRegularExpression());
} else {
return item.match(filter.getRegularExpression());
}
}
/**
* Notifies all listeners, which have been registered to be notified, when
* the adapter's underlying data has been filtered, when a filter has been
* applied.
*
* @param regularExpression
* The regular expression, which has been used to filter the
* adapter's underlying data, as an instance of the class
* {@link Pattern}. The regular expression may not be null
* @param filter
* The filter, which has been used to apply the regular
* expression on the single items, as an instance of the type
* {@link Filter} or null, if the items' implementations of the
* interface {@link Filterable} has been used instead
* @param filteredItems
* A collection, which contains the adapter's filtered items, as
* an instance of the type {@link Collection} or an empty
* collection, if the adapter does not contain any items
*/
private void notifyOnApplyFilter(final Pattern regularExpression,
final Filter<DataType> filter,
final Collection<DataType> filteredItems) {
for (ListFilterListener<DataType> listener : filterListeners) {
listener.onApplyFilter(this, regularExpression, filter,
filteredItems);
}
}
/**
* Notifies all listeners, which have been registered to be notified, when
* the adapter's underlying data has been filtered, when a filter has been
* reseted.
*
* @param regularExpression
* The regular expression of the filter, which has been reseted,
* as an instance of the class {@link Pattern}. The regular
* expression may not be null
* @param unfilteredItems
* A collection, which contains the adapter's filtered items, as
* an instance of the type {@link Collection} or an empty
* collection, if the adapter does not contain any items
*/
private void notifyOnResetFilter(final Pattern regularExpression,
final Collection<DataType> unfilteredItems) {
for (ListFilterListener<DataType> listener : filterListeners) {
listener.onResetFilter(this, regularExpression, unfilteredItems);
}
}
/**
* Returns a set, which contains the listeners, which should be notified,
* when the adapter's underlying data has been filtered.
*
* @return A set, which contains the listeners, which should be notified,
* when the adapter's underlying data has been filtered, as an
* instance of the type {@link Set} or an empty set, if no listeners
* should be notified
*/
protected final Set<ListFilterListener<DataType>> getFilterListeners() {
return filterListeners;
}
/**
* Sets the set, which contains the listeners, which should be notified,
* when the adapter's underlying data has been filtered.
*
* @param filterListeners
* The set, which should be set, as an instance of the type
* {@link Set} or an empty set, if no listeners should be
* notified
*/
protected final void setFilterListeners(
final Set<ListFilterListener<DataType>> filterListeners) {
ensureNotNull(filterListeners, "The listeners may not be null");
this.filterListeners = filterListeners;
}
/**
* Returns a set, which contains the filters, which are used to filter the
* adapter's underlying data.
*
* @return A set, which contains the filters, which are used to filter the
* adapter's underlying data, as an instance of the type
* {@link LinkedHashSet} or an empty set, if the adapter's
* underlying data is not filtered
*/
protected final LinkedHashSet<AppliedFilter<DataType>> getAppliedFilters() {
return appliedFilters;
}
/**
* Sets the set, which contains the filters, which are used to filter the
* adapter's underlying data.
*
* @param appliedFilters
* The set, which should be set, as an instance of the type
* {@link LinkedHashSet} or an empty set, if the adapter's
* underlying data should not be filtered
*/
protected final void setAppliedFilters(
final LinkedHashSet<AppliedFilter<DataType>> appliedFilters) {
ensureNotNull(appliedFilters, "The applied filters may not be null");
this.appliedFilters = appliedFilters;
applyAllFilters();
}
/**
* Returns a list, which contains the adapter's unfiltered data.
*
* @return A list, which contains the adapter's unfiltered data as an
* instance of the type {@link ArrayList} or null, if no filters are
* currently applied on the adapter
*/
protected final ArrayList<Item<DataType>> getUnfilteredItems() {
return unfilteredItems;
}
/**
* Returns the unfiltered index, which corresponds to a specific filtered
* index.
*
* @param filteredIndex
* The index, whose corresponding unfiltered index should be
* retrieved, as an {@link Integer} value
* @return The unfiltered index, which corresponds to the given filtered
* index, as an {@link Integer} value
*/
protected final int getUnfilteredIndex(final int filteredIndex) {
ensureAtLeast(filteredIndex, 0, "The index must be at least 0");
ensureAtMaximum(filteredIndex, getNumberOfItems() - 1,
"The index must be at maximum " + (getNumberOfItems() - 1));
if (!isFiltered()) {
return filteredIndex;
} else {
return indexMapping.get(filteredIndex);
}
}
/**
* Creates and returns a deep copy of the set, which contains the filters,
* which are applied on the adapter.
*
* @return A deep copy of the set, which contains the filters, which are
* applied on the adapter, as an instance of the type
* {@link LinkedHashSet} or an empty set, if no filters are applied
*/
protected final LinkedHashSet<AppliedFilter<DataType>> cloneAppliedFilters() {
LinkedHashSet<AppliedFilter<DataType>> clonedAppliedFilters = new LinkedHashSet<AppliedFilter<DataType>>();
for (AppliedFilter<DataType> filter : appliedFilters) {
clonedAppliedFilters.add(filter.clone());
}
return clonedAppliedFilters;
}
/**
* Creates a new adapter, whose underlying data is managed as a filterable
* list of arbitrary items.
*
* @param context
* The context, the adapter should belong to, as an instance of
* the class {@link Context}. The context may not be null
* @param inflater
* The inflater, which should be used to inflate the views, which
* are used to visualize the adapter's items, as an instance of
* the type {@link Inflater}. The inflater may not be null
* @param decorator
* The decorator, which should be used to customize the
* appearance of the views, which are used to visualize the items
* of the adapter, as an instance of the generic type
* DecoratorType. The decorator may not be null
* @param logLevel
* The log level, which should be used for logging, as a value of
* the enum {@link LogLevel}. The log level may not be null
* @param items
* A list, which contains the the adapter's items, or an empty
* list, if the adapter should not contain any items
* @param allowDuplicates
* True, if duplicate items should be allowed, false otherwise
* @param notifyOnChange
* True, if the method <code>notifyDataSetChanged():void</code>
* should be automatically called when the adapter's underlying
* data has been changed, false otherwise
* @param adapterListeners
* A set, which contains the listeners, which should be notified
* when the adapter's underlying data has been modified or an
* empty set, if no listeners should be notified
* @param enableStateListeners
* A set, which contains the listeners, which should be notified
* when an item has been disabled or enabled or an empty set, if
* no listeners should be notified
* @param numberOfItemStates
* The number of states, the adapter's items may have, as an
* {@link Integer} value. The value must be at least 1
* @param triggerItemStateOnClick
* True, if the state of an item should be triggered, when it is
* clicked by the user, false otherwise
* @param itemStateListeners
* A set, which contains the listeners, which should be notified,
* when the state of an item has been changed or an empty set, if
* no listeners should be notified
* @param sortingListeners
* A set, which contains the listeners, which should be notified,
* when the adapter's underlying data has been sorted or an empty
* set, if no listeners should be notified
* @param filterListeners
* A set, which contains the listeners, which should be notified,
* when the adapter's underlying data has been filtered or an
* empty set, if no listeners should be notified
* @param appliedFilters
* A set, which contains the filters, which should be used to
* filter the adapter's underlying data or an empty set, if the
* adapter's underlying data should not be filtered
*/
protected AbstractFilterableListAdapter(final Context context,
final Inflater inflater, final DecoratorType decorator,
final LogLevel logLevel, final ArrayList<Item<DataType>> items,
final boolean allowDuplicates, final boolean notifyOnChange,
final Set<ListAdapterListener<DataType>> adapterListeners,
final Set<ListEnableStateListener<DataType>> enableStateListeners,
final int numberOfItemStates,
final boolean triggerItemStateOnClick,
final Set<ListItemStateListener<DataType>> itemStateListeners,
final Set<ListSortingListener<DataType>> sortingListeners,
final Set<ListFilterListener<DataType>> filterListeners,
final LinkedHashSet<AppliedFilter<DataType>> appliedFilters) {
super(context, inflater, decorator, logLevel, items, allowDuplicates,
notifyOnChange, adapterListeners, enableStateListeners,
numberOfItemStates, triggerItemStateOnClick,
itemStateListeners, sortingListeners);
setFilterListeners(filterListeners);
setAppliedFilters(appliedFilters);
addAdapterListener(createAdapterListener());
addSortingListner(createSortingListener());
}
@Override
public final boolean applyFilter(final Pattern regularExpression) {
AppliedFilter<DataType> appliedFilter = new AppliedFilter<DataType>(
regularExpression);
boolean added = appliedFilters.add(appliedFilter);
if (added) {
applyFilter(appliedFilter);
notifyOnApplyFilter(regularExpression, null, getAllItems());
notifyOnDataSetChanged();
String message = "Applied filter using regular expression \""
+ regularExpression + "\"";
getLogger().logInfo(getClass(), message);
return true;
}
String message = "Filter using regular expression \""
+ regularExpression
+ "\" not applied, because a filter using the same "
+ "regular expression is already applied on the adapter";
getLogger().logDebug(getClass(), message);
return false;
}
@Override
public final boolean applyFilter(final Pattern regularExpression,
final Filter<DataType> filter) {
AppliedFilter<DataType> appliedFilter = new AppliedFilter<DataType>(
regularExpression, filter);
boolean added = appliedFilters.add(appliedFilter);
if (added) {
applyFilter(appliedFilter);
notifyOnApplyFilter(regularExpression, filter, getAllItems());
notifyOnDataSetChanged();
String message = "Applied filter using regular expression \""
+ regularExpression + "\" and filter \"" + filter + "\"";
getLogger().logInfo(getClass(), message);
return true;
}
String message = "Filter using regular expression \""
+ regularExpression
+ "\" not applied, because a filter using the same "
+ "regular expression and filter is already applied "
+ "on the adapter";
getLogger().logDebug(getClass(), message);
return false;
}
@Override
public final boolean resetFilter(final Pattern regularExpression) {
AppliedFilter<DataType> appliedFilter = new AppliedFilter<DataType>(
regularExpression);
boolean removed = appliedFilters.remove(appliedFilter);
if (removed) {
setItems(unfilteredItems);
unfilteredItems = null;
indexMapping = null;
applyAllFilters();
notifyOnResetFilter(regularExpression, getAllItems());
notifyOnDataSetChanged();
String message = "Reseted filter \"" + appliedFilter + "\"";
getLogger().logInfo(getClass(), message);
return true;
} else {
String message = "Filter with regular expression \""
+ regularExpression.pattern()
+ "\" not reseted, because no such filter is applied on the adapter";
getLogger().logDebug(getClass(), message);
return false;
}
}
@Override
public final void resetAllFilters() {
for (AppliedFilter<DataType> appliedFilter : new LinkedHashSet<AppliedFilter<DataType>>(
appliedFilters)) {
resetFilter(appliedFilter.getRegularExpression());
}
String message = "Reseted all previously applied filters";
getLogger().logInfo(getClass(), message);
}
@Override
public final boolean isFiltered() {
return !appliedFilters.isEmpty();
}
@Override
public final boolean isFilterApplied(final Pattern regularExpression) {
return appliedFilters.contains(new AppliedFilter<DataType>(
regularExpression));
}
@Override
public final int getNumberOfAppliedFilters() {
return appliedFilters.size();
}
@Override
public final void addFilterListener(
final ListFilterListener<DataType> listener) {
ensureNotNull(listener, "The listener may not be null");
filterListeners.add(listener);
String message = "Added filter listener \"" + listener + "\"";
getLogger().logDebug(getClass(), message);
}
@Override
public final void removeFilterListener(
final ListFilterListener<DataType> listener) {
ensureNotNull(listener, "The listener may not be null");
filterListeners.remove(listener);
String message = "Removed filter listener \"" + listener + "\"";
getLogger().logDebug(getClass(), message);
}
@Override
public void onSaveInstanceState(final Bundle outState) {
super.onSaveInstanceState(outState);
outState.putSerializable(APPLIED_FILTERS_BUNDLE_KEY,
getAppliedFilters());
}
@SuppressWarnings("unchecked")
@Override
public void onRestoreInstanceState(final Bundle savedInstanceState) {
super.onRestoreInstanceState(savedInstanceState);
if (savedInstanceState != null) {
setAppliedFilters((LinkedHashSet<AppliedFilter<DataType>>) savedInstanceState
.getSerializable(APPLIED_FILTERS_BUNDLE_KEY));
notifyDataSetChanged();
}
}
@Override
public int hashCode() {
final int prime = 31;
int result = super.hashCode();
result = prime * result + appliedFilters.hashCode();
return result;
}
@Override
public boolean equals(final Object obj) {
if (this == obj)
return true;
if (!super.equals(obj))
return false;
if (getClass() != obj.getClass())
return false;
AbstractFilterableListAdapter<?, ?> other = (AbstractFilterableListAdapter<?, ?>) obj;
if (!appliedFilters.equals(other.appliedFilters))
return false;
return true;
}
@Override
public abstract AbstractSortableListAdapter<DataType, DecoratorType> clone()
throws CloneNotSupportedException;
} | Edited comment.
| src/de/mrapp/android/adapter/list/filterable/AbstractFilterableListAdapter.java | Edited comment. | <ide><path>rc/de/mrapp/android/adapter/list/filterable/AbstractFilterableListAdapter.java
<ide> FilterableListAdapter<DataType> {
<ide>
<ide> /**
<del> * A list, which contains the the adapter's unfiltered data.
<add> * A list, which contains the adapter's unfiltered data.
<ide> */
<ide> private transient ArrayList<Item<DataType>> unfilteredItems;
<ide> |
|
Java | apache-2.0 | 7feb6880f07e23d3846c5dc5fdebc934235b34c1 | 0 | zwsong/wicket,klopfdreh/wicket,Servoy/wicket,dashorst/wicket,freiheit-com/wicket,freiheit-com/wicket,apache/wicket,Servoy/wicket,AlienQueen/wicket,martin-g/wicket-osgi,astrapi69/wicket,klopfdreh/wicket,zwsong/wicket,mafulafunk/wicket,dashorst/wicket,mafulafunk/wicket,selckin/wicket,bitstorm/wicket,mosoft521/wicket,apache/wicket,klopfdreh/wicket,zwsong/wicket,aldaris/wicket,aldaris/wicket,selckin/wicket,selckin/wicket,mosoft521/wicket,Servoy/wicket,apache/wicket,AlienQueen/wicket,dashorst/wicket,astrapi69/wicket,Servoy/wicket,topicusonderwijs/wicket,mafulafunk/wicket,AlienQueen/wicket,klopfdreh/wicket,martin-g/wicket-osgi,topicusonderwijs/wicket,astrapi69/wicket,aldaris/wicket,mosoft521/wicket,astrapi69/wicket,apache/wicket,Servoy/wicket,selckin/wicket,mosoft521/wicket,martin-g/wicket-osgi,klopfdreh/wicket,selckin/wicket,AlienQueen/wicket,freiheit-com/wicket,topicusonderwijs/wicket,topicusonderwijs/wicket,freiheit-com/wicket,dashorst/wicket,apache/wicket,aldaris/wicket,AlienQueen/wicket,topicusonderwijs/wicket,bitstorm/wicket,aldaris/wicket,bitstorm/wicket,bitstorm/wicket,mosoft521/wicket,zwsong/wicket,freiheit-com/wicket,dashorst/wicket,bitstorm/wicket | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.wicket.extensions.markup.html.tabs;
import java.util.List;
import org.apache.wicket.Component;
import org.apache.wicket.WicketRuntimeException;
import org.apache.wicket.markup.ComponentTag;
import org.apache.wicket.markup.html.WebMarkupContainer;
import org.apache.wicket.markup.html.basic.Label;
import org.apache.wicket.markup.html.link.Link;
import org.apache.wicket.markup.html.list.Loop;
import org.apache.wicket.markup.html.list.Loop.LoopItem;
import org.apache.wicket.markup.html.panel.Panel;
import org.apache.wicket.model.AbstractReadOnlyModel;
import org.apache.wicket.model.IModel;
import org.apache.wicket.model.Model;
/**
* TabbedPanel component represets a panel with tabs that are used to switch between different
* content panels inside the TabbedPanel panel.
* <p>
* <b>Note:</b> When the currently selected tab is replaced by changing the underlying list of tabs,
* the change is not picked up unless a call is made to {@link #setSelectedTab(int)}.
* <p>
* Example:
*
* <pre>
* List tabs=new ArrayList();
* tabs.add(new AbstractTab(new Model<String>("first tab")) {
* public Panel getPanel(String panelId)
* {
* return new TabPanel1(panelId);
* }
* });
*
* tabs.add(new AbstractTab(new Model<String>("second tab")) {
* public Panel getPanel(String panelId)
* {
* return new TabPanel2(panelId);
* }
* });
*
* add(new TabbedPanel("tabs", tabs));
*
* <span wicket:id="tabs" class="tabpanel">[tabbed panel will be here]</span>
* </pre>
* <p>
* For a complete example see the component references in wicket-examples project
*
* @see org.apache.wicket.extensions.markup.html.tabs.ITab
*
* @author Igor Vaynberg (ivaynberg at apache dot org)
*
*/
public class TabbedPanel extends Panel
{
private static final long serialVersionUID = 1L;
/** id used for child panels */
public static final String TAB_PANEL_ID = "panel";
private final List<? extends ITab> tabs;
private transient Boolean[] tabsVisibilityCache;
/**
* Constructor
*
* @param id
* component id
* @param tabs
* list of ITab objects used to represent tabs
*/
public TabbedPanel(String id, List<? extends ITab> tabs)
{
super(id, new Model<Integer>(new Integer(-1)));
if (tabs == null)
{
throw new IllegalArgumentException("argument [tabs] cannot be null");
}
this.tabs = tabs;
final IModel<Integer> tabCount = new AbstractReadOnlyModel<Integer>()
{
private static final long serialVersionUID = 1L;
@Override
public Integer getObject()
{
return TabbedPanel.this.tabs.size();
}
};
WebMarkupContainer tabsContainer = new WebMarkupContainer("tabs-container")
{
private static final long serialVersionUID = 1L;
@Override
protected void onComponentTag(ComponentTag tag)
{
super.onComponentTag(tag);
tag.put("class", getTabContainerCssClass());
}
};
add(tabsContainer);
// add the loop used to generate tab names
tabsContainer.add(new Loop("tabs", tabCount)
{
private static final long serialVersionUID = 1L;
@Override
protected void populateItem(LoopItem item)
{
final int index = item.getIteration();
final ITab tab = (TabbedPanel.this.tabs.get(index));
final WebMarkupContainer titleLink = newLink("link", index);
titleLink.add(newTitle("title", tab.getTitle(), index));
item.add(titleLink);
}
@Override
protected LoopItem newItem(int iteration)
{
return newTabContainer(iteration);
}
});
}
/**
* Generates a loop item used to represent a specific tab's <code>li</code> element.
*
* @param tabIndex
* @return new loop item
*/
protected LoopItem newTabContainer(final int tabIndex)
{
return new LoopItem(tabIndex)
{
private static final long serialVersionUID = 1L;
@Override
protected void onComponentTag(ComponentTag tag)
{
super.onComponentTag(tag);
String cssClass = (String)tag.getString("class");
if (cssClass == null)
{
cssClass = " ";
}
cssClass += " tab" + getIteration();
if (getIteration() == getSelectedTab())
{
cssClass += " selected";
}
if (getIteration() == getTabs().size() - 1)
{
cssClass += " last";
}
tag.put("class", cssClass.trim());
}
@Override
public boolean isVisible()
{
return getTabs().get(tabIndex).isVisible();
}
};
}
/**
* @see org.apache.wicket.Component#onBeforeRender()
*/
@Override
protected void onBeforeRender()
{
if (getSelectedTab() == -1 || isTabVisible(getSelectedTab()) == false)
{
// find first visible selected tab
int selected = 0;
for (int i = 0; i < tabs.size(); i++)
{
if (isTabVisible(i))
{
selected = i;
break;
}
}
if (selected == tabs.size())
{
/*
* none of the tabs are selected...
*
* we do not need to do anything special because the check in setSelectedTab() will
* replace the current tab panel with an empty one
*/
selected = 0;
}
setSelectedTab(selected);
}
super.onBeforeRender();
}
/**
* @return the value of css class attribute that will be added to a div containing the tabs. The
* default value is <code>tab-row</code>
*/
protected String getTabContainerCssClass()
{
return "tab-row";
}
/**
* @return list of tabs that can be used by the user to add/remove/reorder tabs in the panel
*/
public final List<? extends ITab> getTabs()
{
return tabs;
}
/**
* Factory method for tab titles. Returned component can be anything that can attach to span
* tags such as a fragment, panel, or a label
*
* @param titleId
* id of title component
* @param titleModel
* model containing tab title
* @param index
* index of tab
* @return title component
*/
protected Component newTitle(String titleId, IModel<?> titleModel, int index)
{
return new Label(titleId, titleModel);
}
/**
* Factory method for links used to switch between tabs.
*
* The created component is attached to the following markup. Label component with id: title
* will be added for you by the tabbed panel.
*
* <pre>
* <a href="#" wicket:id="link"><span wicket:id="title">[[tab title]]</span></a>
* </pre>
*
* Example implementation:
*
* <pre>
* protected WebMarkupContainer newLink(String linkId, final int index)
* {
* return new Link(linkId)
* {
* private static final long serialVersionUID = 1L;
*
* public void onClick()
* {
* setSelectedTab(index);
* }
* };
* }
* </pre>
*
* @param linkId
* component id with which the link should be created
* @param index
* index of the tab that should be activated when this link is clicked. See
* {@link #setSelectedTab(int)}.
* @return created link component
*/
protected WebMarkupContainer newLink(String linkId, final int index)
{
return new Link(linkId)
{
private static final long serialVersionUID = 1L;
@Override
public void onClick()
{
setSelectedTab(index);
}
};
}
/**
* sets the selected tab
*
* @param index
* index of the tab to select
*
*/
public void setSelectedTab(int index)
{
if (index < 0 || (index >= tabs.size() && index > 0))
{
throw new IndexOutOfBoundsException();
}
setDefaultModelObject(new Integer(index));
final Component component;
if (tabs.size() == 0 || !isTabVisible(index))
{
// no tabs or the currently selected tab is not visible
component = new WebMarkupContainer(TAB_PANEL_ID);
}
else
{
// show panel from selected tab
ITab tab = tabs.get(index);
component = tab.getPanel(TAB_PANEL_ID);
if (component == null)
{
throw new WicketRuntimeException("ITab.getPanel() returned null. TabbedPanel [" +
getPath() + "] ITab index [" + index + "]");
}
}
if (!component.getId().equals(TAB_PANEL_ID))
{
throw new WicketRuntimeException(
"ITab.getPanel() returned a panel with invalid id [" +
component.getId() +
"]. You must always return a panel with id equal to the provided panelId parameter. TabbedPanel [" +
getPath() + "] ITab index [" + index + "]");
}
addOrReplace(component);
}
/**
* @return index of the selected tab
*/
public final int getSelectedTab()
{
return (Integer)getDefaultModelObject();
}
private boolean isTabVisible(int tabIndex)
{
if (tabsVisibilityCache == null)
{
tabsVisibilityCache = new Boolean[tabs.size()];
}
Boolean visible = tabsVisibilityCache[tabIndex];
if (visible == null)
{
visible = tabs.get(tabIndex).isVisible();
tabsVisibilityCache[tabIndex] = visible;
}
return visible;
}
@Override
protected void onDetach()
{
tabsVisibilityCache = null;
super.onDetach();
}
}
| wicket-extensions/src/main/java/org/apache/wicket/extensions/markup/html/tabs/TabbedPanel.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.wicket.extensions.markup.html.tabs;
import java.util.List;
import org.apache.wicket.Component;
import org.apache.wicket.WicketRuntimeException;
import org.apache.wicket.markup.ComponentTag;
import org.apache.wicket.markup.html.WebMarkupContainer;
import org.apache.wicket.markup.html.basic.Label;
import org.apache.wicket.markup.html.link.Link;
import org.apache.wicket.markup.html.list.Loop;
import org.apache.wicket.markup.html.list.Loop.LoopItem;
import org.apache.wicket.markup.html.panel.Panel;
import org.apache.wicket.model.AbstractReadOnlyModel;
import org.apache.wicket.model.IModel;
import org.apache.wicket.model.Model;
/**
* TabbedPanel component represets a panel with tabs that are used to switch between different
* content panels inside the TabbedPanel panel.
*
* <p>
* <b>Note:</b> When the currently selected tab is replaced by changing the underlying list of
* tabs, the change is not picked up unless a call is made to {@link #setSelectedTab(int)}.
* <p>
*
* Example:
*
* <pre>
*
* List tabs=new ArrayList();
*
* tabs.add(new AbstractTab(new Model<String>("first tab")) {
*
* public Panel getPanel(String panelId)
* {
* return new TabPanel1(panelId);
* }
*
* });
*
* tabs.add(new AbstractTab(new Model<String>("second tab")) {
*
* public Panel getPanel(String panelId)
* {
* return new TabPanel2(panelId);
* }
*
* });
*
* add(new TabbedPanel("tabs", tabs));
*
*
* <span wicket:id="tabs" class="tabpanel">[tabbed panel will be here]</span>
*
*
* </pre>
*
* </p>
*
* <p>
* For a complete example see the component references in wicket-examples project
* </p>
*
* @see org.apache.wicket.extensions.markup.html.tabs.ITab
*
* @author Igor Vaynberg (ivaynberg at apache dot org)
*
*/
public class TabbedPanel extends Panel
{
private static final long serialVersionUID = 1L;
/**
* id used for child panels
*/
public static final String TAB_PANEL_ID = "panel";
private final List<ITab> tabs;
private transient Boolean[] tabsVisibilityCache;
/**
* Constructor
*
* @param id
* component id
* @param tabs
* list of ITab objects used to represent tabs
*/
public TabbedPanel(String id, List<ITab> tabs)
{
super(id, new Model<Integer>(new Integer(-1)));
if (tabs == null)
{
throw new IllegalArgumentException("argument [tabs] cannot be null");
}
this.tabs = tabs;
final IModel<Integer> tabCount = new AbstractReadOnlyModel<Integer>()
{
private static final long serialVersionUID = 1L;
@Override
public Integer getObject()
{
return TabbedPanel.this.tabs.size();
}
};
WebMarkupContainer tabsContainer = new WebMarkupContainer("tabs-container")
{
private static final long serialVersionUID = 1L;
@Override
protected void onComponentTag(ComponentTag tag)
{
super.onComponentTag(tag);
tag.put("class", getTabContainerCssClass());
}
};
add(tabsContainer);
// add the loop used to generate tab names
tabsContainer.add(new Loop("tabs", tabCount)
{
private static final long serialVersionUID = 1L;
@Override
protected void populateItem(LoopItem item)
{
final int index = item.getIteration();
final ITab tab = (TabbedPanel.this.tabs.get(index));
final WebMarkupContainer titleLink = newLink("link", index);
titleLink.add(newTitle("title", tab.getTitle(), index));
item.add(titleLink);
}
@Override
protected LoopItem newItem(int iteration)
{
return newTabContainer(iteration);
}
});
}
/**
* Generates a loop item used to represent a specific tab's <code>li</code> element.
*
* @param tabIndex
* @return new loop item
*/
protected LoopItem newTabContainer(final int tabIndex)
{
return new LoopItem(tabIndex)
{
private static final long serialVersionUID = 1L;
@Override
protected void onComponentTag(ComponentTag tag)
{
super.onComponentTag(tag);
String cssClass = (String)tag.getString("class");
if (cssClass == null)
{
cssClass = " ";
}
cssClass += " tab" + getIteration();
if (getIteration() == getSelectedTab())
{
cssClass += " selected";
}
if (getIteration() == getTabs().size() - 1)
{
cssClass += " last";
}
tag.put("class", cssClass.trim());
}
@Override
public boolean isVisible()
{
return getTabs().get(tabIndex).isVisible();
}
};
}
// @see org.apache.wicket.Component#onAttach()
@Override
protected void onBeforeRender()
{
if (getSelectedTab() == -1 || isTabVisible(getSelectedTab()) == false)
{
// find first visible selected tab
int selected = 0;
for (int i = 0; i < tabs.size(); i++)
{
if (isTabVisible(i))
{
selected = i;
break;
}
}
if (selected == tabs.size())
{
/*
* none of the tabs are selected...
*
* we do not need to do anything special because the check in setSelectedTab() will
* replace the current tab panel with an empty one
*/
selected = 0;
}
setSelectedTab(selected);
}
super.onBeforeRender();
}
/**
* @return the value of css class attribute that will be added to a div containing the tabs. The
* default value is <code>tab-row</code>
*/
protected String getTabContainerCssClass()
{
return "tab-row";
}
/**
* @return list of tabs that can be used by the user to add/remove/reorder tabs in the panel
*/
public final List<ITab> getTabs()
{
return tabs;
}
/**
* Factory method for tab titles. Returned component can be anything that can attach to span
* tags such as a fragment, panel, or a label
*
* @param titleId
* id of title component
* @param titleModel
* model containing tab title
* @param index
* index of tab
* @return title component
*/
protected Component newTitle(String titleId, IModel<?> titleModel, int index)
{
return new Label(titleId, titleModel);
}
/**
* Factory method for links used to switch between tabs.
*
* The created component is attached to the following markup. Label component with id: title
* will be added for you by the tabbed panel.
*
* <pre>
* <a href="#" wicket:id="link"><span wicket:id="title">[[tab title]]</span></a>
* </pre>
*
* Example implementation:
*
* <pre>
* protected WebMarkupContainer newLink(String linkId, final int index)
* {
* return new Link(linkId)
* {
* private static final long serialVersionUID = 1L;
*
* public void onClick()
* {
* setSelectedTab(index);
* }
* };
* }
* </pre>
*
* @param linkId
* component id with which the link should be created
* @param index
* index of the tab that should be activated when this link is clicked. See
* {@link #setSelectedTab(int)}.
* @return created link component
*/
protected WebMarkupContainer newLink(String linkId, final int index)
{
return new Link(linkId)
{
private static final long serialVersionUID = 1L;
@Override
public void onClick()
{
setSelectedTab(index);
}
};
}
/**
* sets the selected tab
*
* @param index
* index of the tab to select
*
*/
public void setSelectedTab(int index)
{
if (index < 0 || (index >= tabs.size() && index > 0))
{
throw new IndexOutOfBoundsException();
}
setDefaultModelObject(new Integer(index));
final Component component;
if (tabs.size() == 0 || !isTabVisible(index))
{
// no tabs or the currently selected tab is not visible
component = new WebMarkupContainer(TAB_PANEL_ID);
}
else
{
// show panel from selected tab
ITab tab = tabs.get(index);
component = tab.getPanel(TAB_PANEL_ID);
if (component == null)
{
throw new WicketRuntimeException("ITab.getPanel() returned null. TabbedPanel [" +
getPath() + "] ITab index [" + index + "]");
}
}
if (!component.getId().equals(TAB_PANEL_ID))
{
throw new WicketRuntimeException(
"ITab.getPanel() returned a panel with invalid id [" +
component.getId() +
"]. You must always return a panel with id equal to the provided panelId parameter. TabbedPanel [" +
getPath() + "] ITab index [" + index + "]");
}
addOrReplace(component);
}
/**
* @return index of the selected tab
*/
public final int getSelectedTab()
{
return (Integer)getDefaultModelObject();
}
private boolean isTabVisible(int tabIndex)
{
if (tabsVisibilityCache == null)
{
tabsVisibilityCache = new Boolean[tabs.size()];
}
Boolean visible = tabsVisibilityCache[tabIndex];
if (visible == null)
{
visible = tabs.get(tabIndex).isVisible();
tabsVisibilityCache[tabIndex] = visible;
}
return visible;
}
@Override
protected void onDetach()
{
tabsVisibilityCache = null;
super.onDetach();
}
}
| fixed WICKET-2153 TabbedPanel accepts only ITab(s)
git-svn-id: 5a74b5304d8e7e474561603514f78b697e5d94c4@763126 13f79535-47bb-0310-9956-ffa450edef68
| wicket-extensions/src/main/java/org/apache/wicket/extensions/markup/html/tabs/TabbedPanel.java | fixed WICKET-2153 TabbedPanel accepts only ITab(s) | <ide><path>icket-extensions/src/main/java/org/apache/wicket/extensions/markup/html/tabs/TabbedPanel.java
<ide> /**
<ide> * TabbedPanel component represets a panel with tabs that are used to switch between different
<ide> * content panels inside the TabbedPanel panel.
<del> *
<ide> * <p>
<del> * <b>Note:</b> When the currently selected tab is replaced by changing the underlying list of
<del> * tabs, the change is not picked up unless a call is made to {@link #setSelectedTab(int)}.
<add> * <b>Note:</b> When the currently selected tab is replaced by changing the underlying list of tabs,
<add> * the change is not picked up unless a call is made to {@link #setSelectedTab(int)}.
<ide> * <p>
<del> *
<ide> * Example:
<ide> *
<ide> * <pre>
<del> *
<ide> * List tabs=new ArrayList();
<del> *
<ide> * tabs.add(new AbstractTab(new Model<String>("first tab")) {
<del> *
<ide> * public Panel getPanel(String panelId)
<ide> * {
<ide> * return new TabPanel1(panelId);
<ide> * }
<del> *
<ide> * });
<ide> *
<ide> * tabs.add(new AbstractTab(new Model<String>("second tab")) {
<del> *
<ide> * public Panel getPanel(String panelId)
<ide> * {
<ide> * return new TabPanel2(panelId);
<ide> * }
<del> *
<ide> * });
<ide> *
<ide> * add(new TabbedPanel("tabs", tabs));
<ide> *
<del> *
<ide> * <span wicket:id="tabs" class="tabpanel">[tabbed panel will be here]</span>
<del> *
<del> *
<ide> * </pre>
<del> *
<del> * </p>
<del> *
<ide> * <p>
<ide> * For a complete example see the component references in wicket-examples project
<del> * </p>
<ide> *
<ide> * @see org.apache.wicket.extensions.markup.html.tabs.ITab
<ide> *
<ide> {
<ide> private static final long serialVersionUID = 1L;
<ide>
<del> /**
<del> * id used for child panels
<del> */
<add> /** id used for child panels */
<ide> public static final String TAB_PANEL_ID = "panel";
<ide>
<del>
<del> private final List<ITab> tabs;
<add> private final List<? extends ITab> tabs;
<ide>
<ide> private transient Boolean[] tabsVisibilityCache;
<ide>
<ide> * @param tabs
<ide> * list of ITab objects used to represent tabs
<ide> */
<del> public TabbedPanel(String id, List<ITab> tabs)
<add> public TabbedPanel(String id, List<? extends ITab> tabs)
<ide> {
<ide> super(id, new Model<Integer>(new Integer(-1)));
<ide>
<ide> {
<ide> return newTabContainer(iteration);
<ide> }
<del>
<ide> });
<ide> }
<del>
<ide>
<ide> /**
<ide> * Generates a loop item used to represent a specific tab's <code>li</code> element.
<ide> {
<ide> return getTabs().get(tabIndex).isVisible();
<ide> }
<del>
<ide> };
<ide> }
<ide>
<del>
<del> // @see org.apache.wicket.Component#onAttach()
<add> /**
<add> * @see org.apache.wicket.Component#onBeforeRender()
<add> */
<ide> @Override
<ide> protected void onBeforeRender()
<ide> {
<ide> /**
<ide> * @return list of tabs that can be used by the user to add/remove/reorder tabs in the panel
<ide> */
<del> public final List<ITab> getTabs()
<add> public final List<? extends ITab> getTabs()
<ide> {
<ide> return tabs;
<ide> }
<ide> return new Label(titleId, titleModel);
<ide> }
<ide>
<del>
<ide> /**
<ide> * Factory method for links used to switch between tabs.
<ide> *
<ide>
<ide> final Component component;
<ide>
<del>
<ide> if (tabs.size() == 0 || !isTabVisible(index))
<ide> {
<ide> // no tabs or the currently selected tab is not visible
<ide>
<ide> }
<ide> }
<del>
<ide>
<ide> if (!component.getId().equals(TAB_PANEL_ID))
<ide> {
<ide> tabsVisibilityCache = null;
<ide> super.onDetach();
<ide> }
<del>
<ide> } |
|
JavaScript | apache-2.0 | 2cb1b01dcc37f02fc24023450a538efff1df9201 | 0 | braineo/gapp-for-office,braineo/gapp-for-office | /* cardUI
* generate day and weekday every day
* markout who did not punch in previous day
*/
// Day of year
function dayOfYear(date) {
if (!(date instanceof Date)) {
return 0;
}
var start = new Date(date.getFullYear(), 0, 0);
var diff = date - start;
var oneDay = 86400000; // (1000*60*60*24)
return Math.floor(diff / oneDay);
}
function dayRoutine() {
var row = dayOfYear(new Date());
var offset = 2;
var punchCardSheet = SpreadsheetApp.openById('1d3jLeX_FcNEEq_bQvcK7LRKQnq01-8hKHH5JSR_HH5k').getSheetByName("Punch");
generateDayHead(punchCardSheet, row + offset);
markMiss(punchCardSheet, row + offset);
}
function generateDayHead(sheet, row) {
var today = new Date();
sheet.getRange(row, 1).setValue(today);
var weekDay = {
1: '月',
2: '火',
3: '水',
4: '木',
5: '金',
6: '土',
0: '日'
};
sheet.getRange(row, 2).setValue(weekDay[today.getDay()]);
}
function markMiss(sheet, row) {
var header = sheet.getRange("1:1").getValues();
for (var i = 3; i <= header[0].length; i++) {
if (sheet.getRange(row, i).isBlank()) {
sheet.getRange(row, i).setBackground("red");
}
}
}
function onOpen(e) {
var row = dayOfYear(new Date());
var punchCardSheet = SpreadsheetApp.openById('1d3jLeX_FcNEEq_bQvcK7LRKQnq01-8hKHH5JSR_HH5k').getSheetByName("Punch");
SpreadsheetApp.setActiveRange(punchCardSheet.getRange(row + 10, 1)); //Move activate cell to recent date
}
| cardUI.js | /* cardUI
* generate day and weekday every day
* markout who did not punch in previous day
*/
// Day of year
function dayOfYear(date) {
if (!(date instanceof Date)) {
return 0;
}
var start = new Date(date.getFullYear(), 0, 0);
var diff = date - start;
var oneDay = 86400000; // (1000*60*60*24)
return Math.floor(diff / oneDay);
}
function dayRoutine() {
var row = dayOfYear(new Date());
var offset = 2;
var punchCardSheet = SpreadsheetApp.openById('1d3jLeX_FcNEEq_bQvcK7LRKQnq01-8hKHH5JSR_HH5k').getSheetByName("Punch");
generateDayHead(punchCardSheet, row + offset);
markMiss(punchCardSheet, row + offset);
}
function generateDayHead(sheet, row) {
var today = new Date();
sheet.getRange(row, 1).setValue(today);
var weekDay = { 1: '月', 2: '火', 3: '水', 4: '木', 5: '金', 6: '土', 7: '日' };
sheet.getRange(row, 2).setValue(weekDay[today.getDay()]);
}
function markMiss(sheet, row) {
var header = sheet.getRange("1:1").getValues();
Logger.log(header);
for(var i=3; i<=header[0].length; i++){
if(sheet.getRange(row, i).isBlank()){
sheet.getRange(row, i).setBackground("red");
}
}
}
function onOpen(e){
var row = dayOfYear(new Date());
var punchCardSheet = SpreadsheetApp.openById('1d3jLeX_FcNEEq_bQvcK7LRKQnq01-8hKHH5JSR_HH5k').getSheetByName("Punch");
SpreadsheetApp.setActiveRange(punchCardSheet.getRange(row + 10, 1)); //Move activate cell to recent date
} | getDay, Sunday is 0 not 7
| cardUI.js | getDay, Sunday is 0 not 7 | <ide><path>ardUI.js
<ide> /* cardUI
<del>* generate day and weekday every day
<del>* markout who did not punch in previous day
<del>*/
<add> * generate day and weekday every day
<add> * markout who did not punch in previous day
<add> */
<ide>
<ide> // Day of year
<ide> function dayOfYear(date) {
<ide> function generateDayHead(sheet, row) {
<ide> var today = new Date();
<ide> sheet.getRange(row, 1).setValue(today);
<del> var weekDay = { 1: '月', 2: '火', 3: '水', 4: '木', 5: '金', 6: '土', 7: '日' };
<add> var weekDay = {
<add> 1: '月',
<add> 2: '火',
<add> 3: '水',
<add> 4: '木',
<add> 5: '金',
<add> 6: '土',
<add> 0: '日'
<add> };
<ide> sheet.getRange(row, 2).setValue(weekDay[today.getDay()]);
<ide> }
<ide>
<ide> function markMiss(sheet, row) {
<del> var header = sheet.getRange("1:1").getValues();
<del> Logger.log(header);
<del> for(var i=3; i<=header[0].length; i++){
<del> if(sheet.getRange(row, i).isBlank()){
<add> var header = sheet.getRange("1:1").getValues();
<add> for (var i = 3; i <= header[0].length; i++) {
<add> if (sheet.getRange(row, i).isBlank()) {
<ide> sheet.getRange(row, i).setBackground("red");
<ide> }
<ide> }
<ide> }
<ide>
<del>function onOpen(e){
<add>function onOpen(e) {
<ide> var row = dayOfYear(new Date());
<ide> var punchCardSheet = SpreadsheetApp.openById('1d3jLeX_FcNEEq_bQvcK7LRKQnq01-8hKHH5JSR_HH5k').getSheetByName("Punch");
<ide> SpreadsheetApp.setActiveRange(punchCardSheet.getRange(row + 10, 1)); //Move activate cell to recent date
<del>}
<add>} |
|
Java | apache-2.0 | 54eb526eab4329b9187ca80b58d2442552f4dc07 | 0 | actframework/act-guice-plugin | package act.di.guice;
import act.app.App;
import act.di.DependencyInjector;
import act.di.DiBinder;
import act.event.ActEventListener;
import act.event.ActEventListenerBase;
import act.plugin.AppServicePlugin;
import java.util.EventObject;
public class InjectorInitializer extends AppServicePlugin {
@Override
protected void applyTo(final App app) {
app.eventBus().bind(DiBinder.class, new ActEventListenerBase<DiBinder>() {
@Override
public void on(DiBinder event) throws Exception {
DependencyInjector injector = app.injector();
if (null == injector) {
injector = new GuiceDependencyInjector(app);
} else if (!(injector instanceof GuiceDependencyInjector)) {
return;
}
((GuiceDependencyInjector)injector).registerDiBinder(event);
}
});
app.jobManager().beforeAppStart(new Runnable() {
@Override
public void run() {
DependencyInjector injector = app.injector();
if (null == injector) {
new GuiceDependencyInjector(app);
}
}
});
}
}
| src/main/java/act/di/guice/InjectorInitializer.java | package act.di.guice;
import act.app.App;
import act.di.DependencyInjector;
import act.di.DiBinder;
import act.event.ActEventListener;
import act.event.ActEventListenerBase;
import act.plugin.AppServicePlugin;
import java.util.EventObject;
public class InjectorInitializer extends AppServicePlugin {
@Override
protected void applyTo(final App app) {
app.eventBus().bind(DiBinder.class, new ActEventListenerBase<DiBinder>() {
@Override
public void on(DiBinder event) throws Exception {
GuiceDependencyInjector injector = app.injector();
if (null == injector) {
injector = new GuiceDependencyInjector(app);
}
injector.registerDiBinder(event);
}
});
app.jobManager().beforeAppStart(new Runnable() {
@Override
public void run() {
DependencyInjector injector = app.injector();
if (null == injector) {
new GuiceDependencyInjector(app);
}
}
});
}
}
| fix type mismatch issue
| src/main/java/act/di/guice/InjectorInitializer.java | fix type mismatch issue | <ide><path>rc/main/java/act/di/guice/InjectorInitializer.java
<ide> app.eventBus().bind(DiBinder.class, new ActEventListenerBase<DiBinder>() {
<ide> @Override
<ide> public void on(DiBinder event) throws Exception {
<del> GuiceDependencyInjector injector = app.injector();
<add> DependencyInjector injector = app.injector();
<ide> if (null == injector) {
<ide> injector = new GuiceDependencyInjector(app);
<add> } else if (!(injector instanceof GuiceDependencyInjector)) {
<add> return;
<ide> }
<del> injector.registerDiBinder(event);
<add> ((GuiceDependencyInjector)injector).registerDiBinder(event);
<ide> }
<ide> });
<ide> app.jobManager().beforeAppStart(new Runnable() { |
|
JavaScript | isc | 8702636bc0332a3724f448ad5a97b4891e89beea | 0 | SteamedPears/Code-Review,SteamedPears/Code-Review | // Libraries
var https = require('https');
var url = require('url');
var uuid = require('node-uuid');
var db = require('redis').createClient();
/******************************************************************************
* Handle DB errors *
******************************************************************************/
db.on('error', function (err) {
console.error('DB Error: ' + err);
});
/******************************************************************************
* Helper Functions *
******************************************************************************/
function success(response, ob) {
response.writeHead(200, {
'Content-Type': 'application/json',
'Access-Control-Allow-Origin': '*'
});
response.write(JSON.stringify(ob));
response.end();
}
function error(response, errno, errtext) {
console.error('===ERROR===', errno, errtext);
response.writeHead(errno, {'Content-Type': 'application/json'});
response.write(JSON.stringify({error:errtext}));
response.end();
}
function isUndefined(x) {
return x === undefined;
}
function isString(x) {
return (typeof x) === 'string';
}
function isBlank(str) {
return str.trim() === '';
}
function isValidString(x) {
return !isUndefined(x) && isString(x) && !isBlank(x);
}
function isValidPositiveIntegerString(x) {
return isValidString(x) && x.trim().match(/^\d+$/) !== null;
}
/******************************************************************************
* Getters *
******************************************************************************/
function codeByID(request, response) {
var query = url.parse(request.url, true).query;
var id = query.id;
if (id === undefined) {
return error(response, 400, 'Invalid code id');
}
db.get('code:' + id, function(err, reply) {
if (err !== null) {
return error(response, 500, 'Error while reading from database.');
}
if (reply === null) {
return error(response, 404, 'Code not found');
}
return success(response, JSON.parse(reply));
});
};
function commentsOnLine(request, response) {
var query = url.parse(request.url, true).query;
var id = query.code_id;
var line = query.line;
if (id === undefined) {
return error(response, 400, 'Invalid comment id');
}
if (line === undefined) {
return error(response, 400, 'Invalid line number');
}
db.lrange('comment:' + id + ':' + line, 0, -1, function(err, reply) {
if (err !== null) {
return error(response, 500, 'Error while reading from database.');
}
if (reply === null) {
return error(response, 404, 'Comment not found');
}
var out = [];
reply.forEach(function(value) {
out.push(JSON.parse(value));
});
return success(response, out);
});
};
function commentCount(request, response) {
var query = url.parse(request.url, true).query;
var code_id = query.code_id;
db.smembers('comment:' + code_id + ':indices', function(err, indices) {
if (err !== null) {
return error(response, 500, 'Error while reading from database.');
}
if (indices === null) {
return error(response, 404, 'Comments not found.');
}
var multi = db.multi();
indices.forEach(function(index) {
multi.llen('comment:' + code_id + ':' + index);
});
multi.exec(function(err, lengths) {
var out = {};
lengths.forEach(function(length, i) {
out[indices[i]] = length;
});
return success(response, out);
});
});
};
/******************************************************************************
* Setters *
******************************************************************************/
function newcode(request, response) {
// do some basic validation
var fields = request.body;
if (fields === null || !isValidString(fields.text)) {
return error(response, 400, 'Invalid code text.');
}
var id=uuid.v4();
var data = {
text: fields.text,
lang: fields.lang
};
db.set('code:' + id, JSON.stringify(data), function(err) {
if (err !== null) {
return error(response, 500, 'Error while writing to database.');
}
return success(response, {id:id});
});
};
function newcomment(request, response) {
// reject if no referer
if (request === null ||
request.headers === undefined ||
request.headers.referer === undefined) {
return error(response, 400, 'Invalid referer');
}
// do some basic validation
var fields = request.body;
if (fields === null ||
!isValidString(fields.text) ||
!isValidPositiveIntegerString(fields.line_start) ||
!isValidPositiveIntegerString(fields.line_end)) {
return error(response, 400, 'Invalid field');
}
// make sure the line numbers are sane
if (Number(fields.line_start) > Number(fields.line_end)) {
return error(response, 400, 'Invalid line numbers');
}
// upon successfully saving comment, this function will update comment indices
var data = {
user: 'Anonymous',
code_id: fields.code_id,
text: fields.text,
line_start: fields.line_start,
line_end: fields.line_end,
diffs: fields.diffs
};
if(request.session.email) {
console.log('new comment by ' + request.session.email);
data.user = request.session.email;
}
db.multi()
.rpush('comment:' + data.code_id + ':' + data.line_start,
JSON.stringify(data))
.sadd('comment:' + data.code_id + ':indices', data.line_start)
.exec(function(err) {
if (err !== null) {
return error(response, 500, 'Error while writing to database.');
}
return success(response, data);
});
};
/******************************************************************************
* Authentication *
* *
* In the event of implementing more than just persona for authentication, you *
* should replace most of the following code with a library like *
* *
* passport http://passportjs.org/ *
* passport-browserid https://github.com/jaredhanson/passport-browserid *
* *
******************************************************************************/
module.exports = function(host, clientPort) {
var exports = {};
exports.login = function login(request, response) {
if(request === null ||
request.body === null ||
request.body.assertion === null) {
return error(response, 400, 'Invalid assertion');
}
var assertion = request.body.assertion;
var content = 'assertion=' + assertion + '&audience=http://';
content += host + ':' + clientPort;
var auth_request = https.request({
host: 'verifier.login.persona.org',
port: 443,
path: '/verify',
method: 'POST',
headers: {'Content-Length': content.length,
'Content-Type': 'application/x-www-form-urlencoded'}
},function(auth_response) {
var data_ob = null;
// assume uft8
auth_response.setEncoding('utf8');
auth_response.on('data',function(data) {
data_ob = JSON.parse(data);
});
auth_response.on('end',function() {
if (auth_response.statusCode === 200 &&
data_ob !== null &&
data_ob.status === 'okay') {
request.session.email = data_ob.email;
return success(response, {email: data_ob.email});
} else {
return error(response,
auth_response.statusCode,
'Error validating assertion');
}
});
});
auth_request.write(content);
auth_request.end();
};
exports.logout = function logout(request, response) {
delete request.session.email;
return success(response, {});
};
exports.codeByID = codeByID;
exports.commentsOnLine = commentsOnLine;
exports.commentCount = commentCount;
exports.newcode = newcode;
exports.newcomment = newcomment;
return exports;
};
/* vim: set softtabstop=2 shiftwidth=2 tabstop=8 expandtab textwidth=80: */
| src/server/requestHandlers.js | // Libraries
var https = require('https');
var url = require('url');
var uuid = require('node-uuid');
var db = require('redis').createClient();
/******************************************************************************
* Handle DB errors *
******************************************************************************/
db.on('error', function (err) {
console.error('DB Error: ' + err);
});
/******************************************************************************
* Helper Functions *
******************************************************************************/
function success(response, ob) {
response.writeHead(200, {
'Content-Type': 'application/json',
'Access-Control-Allow-Origin': '*'
});
response.write(JSON.stringify(ob));
response.end();
}
function error(response, errno, errtext) {
console.error('===ERROR===', errno, errtext);
response.writeHead(errno, {'Content-Type': 'application/json'});
response.write(JSON.stringify({error:errtext}));
response.end();
}
function isUndefined(x) {
return x === undefined;
}
function isString(x) {
return (typeof x) === 'string';
}
function isBlank(str) {
return str.trim() === '';
}
function isValidString(x) {
return !isUndefined(x) && isString(x) && !isBlank(x);
}
function isValidPositiveIntegerString(x) {
return isValidString(x) && x.trim().match(/^\d+$/) !== null;
}
/******************************************************************************
* Getters *
******************************************************************************/
function codeByID(request, response) {
var query = url.parse(request.url, true).query;
var id = query.id;
if (id === undefined) {
return error(response, 400, 'Invalid code id');
}
db.get('code:' + id, function(err, reply) {
if (err !== null) {
return error(response, 500, 'Error while reading from database.');
}
if (reply === null) {
return error(response, 404, 'Code not found');
}
return success(response, JSON.parse(reply));
});
};
function commentsOnLine(request, response) {
var query = url.parse(request.url, true).query;
var id = query.code_id;
var line = query.line;
if (id === undefined) {
return error(response, 400, 'Invalid comment id');
}
if (line === undefined) {
return error(response, 400, 'Invalid line number');
}
db.lrange('comment:' + id + ':' + line, 0, -1, function(err, reply) {
if (err !== null) {
return error(response, 500, 'Error while reading from database.');
}
if (reply === null) {
return error(response, 404, 'Comment not found');
}
var out = [];
reply.forEach(function(value) {
out.push(JSON.parse(value));
});
return success(response, out);
});
};
function commentCount(request, response) {
var query = url.parse(request.url, true).query;
var code_id = query.code_id;
db.smembers('comment:' + code_id + ':indices', function(err, indices) {
if (err !== null) {
return error(response, 500, 'Error while reading from database.');
}
if (indices === null) {
return error(response, 404, 'Comments not found.');
}
var multi = db.multi();
indices.forEach(function(index) {
multi.llen('comment:' + code_id + ':' + index);
});
multi.exec(function(err, lengths) {
var out = {};
lengths.forEach(function(length, i) {
out[indices[i]] = length;
});
return success(response, out);
});
});
};
/******************************************************************************
* Setters *
******************************************************************************/
function newcode(request, response) {
// do some basic validation
var obj = request.body;
if (obj === null || !isValidString(obj.text)) {
return error(response, 400, 'Invalid code text.');
}
var id=uuid.v4();
db.set('code:' + id, JSON.stringify(obj), function(err) {
if (err !== null) {
return error(response, 500, 'Error while writing to database.');
}
return success(response, {id:id});
});
};
function newcomment(request, response) {
// reject if no referer
if (request === null ||
request.headers === undefined ||
request.headers.referer === undefined) {
return error(response, 400, 'Invalid referer');
}
// do some basic validation
var fields = request.body;
if (fields === null ||
!isValidString(fields.text) ||
!isValidPositiveIntegerString(fields.line_start) ||
!isValidPositiveIntegerString(fields.line_end)) {
return error(response, 400, 'Invalid field');
}
// make sure the line numbers are sane
if (Number(fields.line_start) > Number(fields.line_end)) {
return error(response, 400, 'Invalid line numbers');
}
// upon successfully saving comment, this function will update comment indices
var data = {
user: 'Anonymous',
code_id: fields.code_id,
text: fields.text,
line_start: fields.line_start,
line_end: fields.line_end,
diffs: fields.diffs
};
if(request.session.email) {
console.log('new comment by ' + request.session.email);
data.user = request.session.email;
}
db.multi()
.rpush('comment:' + data.code_id + ':' + data.line_start,
JSON.stringify(data))
.sadd('comment:' + data.code_id + ':indices', data.line_start)
.exec(function(err) {
if (err !== null) {
return error(response, 500, 'Error while writing to database.');
}
return success(response, data);
});
};
/******************************************************************************
* Authentication *
* *
* In the event of implementing more than just persona for authentication, you *
* should replace most of the following code with a library like *
* *
* passport http://passportjs.org/ *
* passport-browserid https://github.com/jaredhanson/passport-browserid *
* *
******************************************************************************/
module.exports = function(host, clientPort) {
var exports = {};
exports.login = function login(request, response) {
if(request === null ||
request.body === null ||
request.body.assertion === null) {
return error(response, 400, 'Invalid assertion');
}
var assertion = request.body.assertion;
var content = 'assertion=' + assertion + '&audience=http://';
content += host + ':' + clientPort;
var auth_request = https.request({
host: 'verifier.login.persona.org',
port: 443,
path: '/verify',
method: 'POST',
headers: {'Content-Length': content.length,
'Content-Type': 'application/x-www-form-urlencoded'}
},function(auth_response) {
var data_ob = null;
// assume uft8
auth_response.setEncoding('utf8');
auth_response.on('data',function(data) {
data_ob = JSON.parse(data);
});
auth_response.on('end',function() {
if (auth_response.statusCode === 200 &&
data_ob !== null &&
data_ob.status === 'okay') {
request.session.email = data_ob.email;
return success(response, {email: data_ob.email});
} else {
return error(response,
auth_response.statusCode,
'Error validating assertion');
}
});
});
auth_request.write(content);
auth_request.end();
};
exports.logout = function logout(request, response) {
delete request.session.email;
return success(response, {});
};
exports.codeByID = codeByID;
exports.commentsOnLine = commentsOnLine;
exports.commentCount = commentCount;
exports.newcode = newcode;
exports.newcomment = newcomment;
return exports;
};
/* vim: set softtabstop=2 shiftwidth=2 tabstop=8 expandtab textwidth=80: */
| Now whitelisting code objects before storing
| src/server/requestHandlers.js | Now whitelisting code objects before storing | <ide><path>rc/server/requestHandlers.js
<ide> ******************************************************************************/
<ide> function newcode(request, response) {
<ide> // do some basic validation
<del> var obj = request.body;
<del> if (obj === null || !isValidString(obj.text)) {
<add> var fields = request.body;
<add> if (fields === null || !isValidString(fields.text)) {
<ide> return error(response, 400, 'Invalid code text.');
<ide> }
<ide> var id=uuid.v4();
<del> db.set('code:' + id, JSON.stringify(obj), function(err) {
<add> var data = {
<add> text: fields.text,
<add> lang: fields.lang
<add> };
<add> db.set('code:' + id, JSON.stringify(data), function(err) {
<ide> if (err !== null) {
<ide> return error(response, 500, 'Error while writing to database.');
<ide> } |
|
JavaScript | apache-2.0 | 33af5cd7c6af3d2001de37ee4077e09d5ebf0621 | 0 | node-red/node-red,mw75/node-red,mw75/node-red,mw75/node-red,node-red/node-red,mw75/node-red,node-red/node-red | /**
* Copyright JS Foundation and other contributors, http://js.foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* @ignore
**/
/**
* Internationalization utilities
* @mixin @node-red/util_i18n
*/
var i18n = require("i18next");
var path = require("path");
var fs = require("fs");
var defaultLang = "en-US";
var resourceMap = {};
var resourceCache = {};
var initPromise;
/**
* Register multiple message catalogs with i18n.
* @memberof @node-red/util_i18n
*/
function registerMessageCatalogs(catalogs) {
var promises = catalogs.map(function(catalog) {
return registerMessageCatalog(catalog.namespace,catalog.dir,catalog.file).catch(err => {});
});
return Promise.all(promises);
}
/**
* Register a message catalog with i18n.
* @memberof @node-red/util_i18n
*/
async function registerMessageCatalog(namespace,dir,file) {
return initPromise.then(function() {
return new Promise((resolve,reject) => {
resourceMap[namespace] = { basedir:dir, file:file, lngs: []};
fs.readdir(dir,function(err, files) {
if (err) {
resolve();
} else {
files.forEach(function(f) {
if (fs.existsSync(path.join(dir,f,file))) {
resourceMap[namespace].lngs.push(f);
}
});
i18n.loadNamespaces(namespace,function() {
resolve();
});
}
})
});
});
}
function mergeCatalog(fallback,catalog) {
for (var k in fallback) {
if (fallback.hasOwnProperty(k)) {
if (!catalog[k]) {
catalog[k] = fallback[k];
} else if (typeof fallback[k] === 'object') {
mergeCatalog(fallback[k],catalog[k]);
}
}
}
}
async function readFile(lng, ns) {
if (/[^a-z\-]/i.test(lng)) {
throw new Error("Invalid language: "+lng)
}
if (resourceCache[ns] && resourceCache[ns][lng]) {
return resourceCache[ns][lng];
} else if (resourceMap[ns]) {
const file = path.join(resourceMap[ns].basedir, lng, resourceMap[ns].file);
const content = await fs.promises.readFile(file, "utf8");
resourceCache[ns] = resourceCache[ns] || {};
resourceCache[ns][lng] = JSON.parse(content.replace(/^\uFEFF/, ''));
var baseLng = lng.split('-')[0];
if (baseLng !== lng && resourceCache[ns][baseLng]) {
mergeCatalog(resourceCache[ns][baseLng], resourceCache[ns][lng]);
}
if (lng !== defaultLang) {
mergeCatalog(resourceCache[ns][defaultLang], resourceCache[ns][lng]);
}
return resourceCache[ns][lng];
} else {
throw new Error("Unrecognised namespace");
}
}
var MessageFileLoader = {
type: "backend",
init: function (services, backendOptions, i18nextOptions) { },
read: function (lng, ns, callback) {
readFile(lng, ns)
.then(data => callback(null, data))
.catch(err => {
if (/-/.test(lng)) {
// if reading language file fails -> try reading base language (e. g. 'fr' instead of 'fr-FR' or 'de' for 'de-DE')
var baseLng = lng.split('-')[0];
readFile(baseLng, ns).then(baseData => callback(null, baseData)).catch(err => callback(err));
} else {
callback(err);
}
});
}
}
function getCurrentLocale() {
var env = process.env;
for (var name of ['LC_ALL', 'LC_MESSAGES', 'LANG']) {
if (name in env) {
var val = env[name];
return val.substring(0, 2);
}
}
return undefined;
}
function init(settings) {
if (!initPromise) {
initPromise = new Promise((resolve,reject) => {
i18n.use(MessageFileLoader);
var opt = {
compatibilityJSON: 'v3',
// debug: true,
defaultNS: "runtime",
ns: [],
fallbackLng: defaultLang,
keySeparator: ".",
nsSeparator: ":",
interpolation: {
unescapeSuffix: 'HTML',
escapeValue: false,
prefix: '__',
suffix: '__'
}
};
var lang = settings.lang || getCurrentLocale();
if (lang) {
opt.lng = lang;
}
i18n.init(opt ,function() {
resolve();
});
});
}
}
/**
* Gets a message catalog.
* @name catalog
* @function
* @memberof @node-red/util_i18n
*/
function getCatalog(namespace,lang) {
var result = null;
lang = lang || defaultLang;
if (/[^a-z\-]/i.test(lang)) {
throw new Error("Invalid language: "+lng)
}
if (resourceCache.hasOwnProperty(namespace)) {
result = resourceCache[namespace][lang];
if (!result) {
var langParts = lang.split("-");
if (langParts.length == 2) {
result = resourceCache[namespace][langParts[0]];
}
}
}
return result;
}
/**
* Gets a list of languages a given catalog is available in.
* @name availableLanguages
* @function
* @memberof @node-red/util_i18n
*/
function availableLanguages(namespace) {
if (resourceMap.hasOwnProperty(namespace)) {
return resourceMap[namespace].lngs
}
}
var obj = module.exports = {
init: init,
registerMessageCatalog: registerMessageCatalog,
registerMessageCatalogs: registerMessageCatalogs,
catalog: getCatalog,
availableLanguages: availableLanguages,
/**
* The underlying i18n library for when direct access is really needed
*/
i: i18n,
/**
* The default language of the runtime
*/
defaultLang: defaultLang
}
/**
* Perform a message catalog lookup.
* @name _
* @function
* @memberof @node-red/util_i18n
*/
obj['_'] = function() {
//var opts = {};
//if (def) {
// opts.defaultValue = def;
//}
//console.log(arguments);
var res = i18n.t.apply(i18n,arguments);
return res;
}
| packages/node_modules/@node-red/util/lib/i18n.js | /**
* Copyright JS Foundation and other contributors, http://js.foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* @ignore
**/
/**
* Internationalization utilities
* @mixin @node-red/util_i18n
*/
var i18n = require("i18next");
var path = require("path");
var fs = require("fs");
var defaultLang = "en-US";
var resourceMap = {};
var resourceCache = {};
var initPromise;
/**
* Register multiple message catalogs with i18n.
* @memberof @node-red/util_i18n
*/
function registerMessageCatalogs(catalogs) {
var promises = catalogs.map(function(catalog) {
return registerMessageCatalog(catalog.namespace,catalog.dir,catalog.file).catch(err => {});
});
return Promise.all(promises);
}
/**
* Register a message catalog with i18n.
* @memberof @node-red/util_i18n
*/
async function registerMessageCatalog(namespace,dir,file) {
return initPromise.then(function() {
return new Promise((resolve,reject) => {
resourceMap[namespace] = { basedir:dir, file:file, lngs: []};
fs.readdir(dir,function(err, files) {
if (err) {
resolve();
} else {
files.forEach(function(f) {
if (fs.existsSync(path.join(dir,f,file))) {
resourceMap[namespace].lngs.push(f);
}
});
i18n.loadNamespaces(namespace,function() {
resolve();
});
}
})
});
});
}
function mergeCatalog(fallback,catalog) {
for (var k in fallback) {
if (fallback.hasOwnProperty(k)) {
if (!catalog[k]) {
catalog[k] = fallback[k];
} else if (typeof fallback[k] === 'object') {
mergeCatalog(fallback[k],catalog[k]);
}
}
}
}
async function readFile(lng, ns) {
if (/[^a-z\-]/i.test(lng)) {
throw new Error("Invalid language: "+lng)
}
if (resourceCache[ns] && resourceCache[ns][lng]) {
return resourceCache[ns][lng];
} else if (resourceMap[ns]) {
const file = path.join(resourceMap[ns].basedir, lng, resourceMap[ns].file);
const content = await fs.promises.readFile(file, "utf8");
resourceCache[ns] = resourceCache[ns] || {};
resourceCache[ns][lng] = JSON.parse(content.replace(/^\uFEFF/, ''));
var baseLng = lng.split('-')[0];
if (baseLng !== lng && resourceCache[ns][baseLng]) {
mergeCatalog(resourceCache[ns][baseLng], resourceCache[ns][lng]);
}
if (lng !== defaultLang) {
mergeCatalog(resourceCache[ns][defaultLang], resourceCache[ns][lng]);
}
return resourceCache[ns][lng];
} else {
throw new Error("Unrecognised namespace");
}
}
var MessageFileLoader = {
type: "backend",
init: function (services, backendOptions, i18nextOptions) { },
read: function (lng, ns, callback) {
readFile(lng, ns)
.then(data => callback(null, data))
.catch(err => {
if (/-/.test(lng)) {
// if reading language file fails -> try reading base language (e. g. 'fr' instead of 'fr-FR' or 'de' for 'de-DE')
var baseLng = lng.split('-')[0];
readFile(baseLng, ns).then(baseData => callback(null, baseData)).catch(err => callback(err));
} else {
callback(err);
}
});
}
}
function getCurrentLocale() {
var env = process.env;
for (var name of ['LC_ALL', 'LC_MESSAGES', 'LANG']) {
if (name in env) {
var val = env[name];
return val.substring(0, 2);
}
}
return undefined;
}
function init(settings) {
if (!initPromise) {
// Keep this as a 'when' promise as top-level red.js uses 'otherwise'
// and embedded users of NR may have copied that.
initPromise = new Promise((resolve,reject) => {
i18n.use(MessageFileLoader);
var opt = {
compatibilityJSON: 'v3',
// debug: true,
defaultNS: "runtime",
ns: [],
fallbackLng: defaultLang,
interpolation: {
unescapeSuffix: 'HTML',
escapeValue: false,
prefix: '__',
suffix: '__'
}
};
var lang = settings.lang || getCurrentLocale();
if (lang) {
opt.lng = lang;
}
i18n.init(opt ,function() {
resolve();
});
});
}
}
/**
* Gets a message catalog.
* @name catalog
* @function
* @memberof @node-red/util_i18n
*/
function getCatalog(namespace,lang) {
var result = null;
lang = lang || defaultLang;
if (/[^a-z\-]/i.test(lang)) {
throw new Error("Invalid language: "+lng)
}
if (resourceCache.hasOwnProperty(namespace)) {
result = resourceCache[namespace][lang];
if (!result) {
var langParts = lang.split("-");
if (langParts.length == 2) {
result = resourceCache[namespace][langParts[0]];
}
}
}
return result;
}
/**
* Gets a list of languages a given catalog is available in.
* @name availableLanguages
* @function
* @memberof @node-red/util_i18n
*/
function availableLanguages(namespace) {
if (resourceMap.hasOwnProperty(namespace)) {
return resourceMap[namespace].lngs
}
}
var obj = module.exports = {
init: init,
registerMessageCatalog: registerMessageCatalog,
registerMessageCatalogs: registerMessageCatalogs,
catalog: getCatalog,
availableLanguages: availableLanguages,
/**
* The underlying i18n library for when direct access is really needed
*/
i: i18n,
/**
* The default language of the runtime
*/
defaultLang: defaultLang
}
/**
* Perform a message catalog lookup.
* @name _
* @function
* @memberof @node-red/util_i18n
*/
obj['_'] = function() {
//var opts = {};
//if (def) {
// opts.defaultValue = def;
//}
//console.log(arguments);
var res = i18n.t.apply(i18n,arguments);
return res;
}
| Apply i18n namespace fix to runtime component
| packages/node_modules/@node-red/util/lib/i18n.js | Apply i18n namespace fix to runtime component | <ide><path>ackages/node_modules/@node-red/util/lib/i18n.js
<ide>
<ide> function init(settings) {
<ide> if (!initPromise) {
<del> // Keep this as a 'when' promise as top-level red.js uses 'otherwise'
<del> // and embedded users of NR may have copied that.
<ide> initPromise = new Promise((resolve,reject) => {
<ide> i18n.use(MessageFileLoader);
<ide> var opt = {
<ide> defaultNS: "runtime",
<ide> ns: [],
<ide> fallbackLng: defaultLang,
<add> keySeparator: ".",
<add> nsSeparator: ":",
<ide> interpolation: {
<ide> unescapeSuffix: 'HTML',
<ide> escapeValue: false, |
|
Java | apache-2.0 | 07f2cb608bd0aee7111cd498a59e6e98e6512635 | 0 | geethkokila/carbon-device-mgt,ruwany/carbon-device-mgt,madawas/carbon-device-mgt,sinthuja/carbon-device-mgt,prithvi66/carbon-device-mgt,harshanL/carbon-device-mgt,Megala21/carbon-device-mgt,GDLMadushanka/carbon-device-mgt,sameeragunarathne/carbon-device-mgt,sinthuja/carbon-device-mgt,rasika90/carbon-device-mgt,hasuniea/carbon-device-mgt,sameeragunarathne/carbon-device-mgt,wso2/carbon-device-mgt,prithvi66/carbon-device-mgt,sinthuja/carbon-device-mgt,chathurace/carbon-device-mgt,dilee/carbon-device-mgt,menakaj/carbon-device-mgt,sameeragunarathne/carbon-device-mgt,charithag/carbon-device-mgt,wso2/carbon-device-mgt,harshanL/carbon-device-mgt,Kamidu/carbon-device-mgt,ruwany/carbon-device-mgt,geethkokila/carbon-device-mgt,pasindujw/carbon-device-mgt,charithag/carbon-device-mgt,ruwany/carbon-device-mgt,DimalChandrasiri/carbon-device-mgt,Jasintha/carbon-device-mgt,wso2/carbon-device-mgt,pasindujw/carbon-device-mgt,menakaj/carbon-device-mgt,Jasintha/carbon-device-mgt,Megala21/carbon-device-mgt,rasika90/carbon-device-mgt,DimalChandrasiri/carbon-device-mgt,menakaj/carbon-device-mgt,madhawap/carbon-device-mgt,rasika90/carbon-device-mgt,chathurace/carbon-device-mgt,madawas/carbon-device-mgt,hasuniea/carbon-device-mgt,madhawap/carbon-device-mgt,rasika/carbon-device-mgt,Jasintha/carbon-device-mgt,geethkokila/carbon-device-mgt,GDLMadushanka/carbon-device-mgt,Kamidu/carbon-device-mgt,Kamidu/carbon-device-mgt,prithvi66/carbon-device-mgt,madawas/carbon-device-mgt,rasika/carbon-device-mgt,madhawap/carbon-device-mgt,DimalChandrasiri/carbon-device-mgt,dilee/carbon-device-mgt,Megala21/carbon-device-mgt,GDLMadushanka/carbon-device-mgt,chathurace/carbon-device-mgt,charithag/carbon-device-mgt,hasuniea/carbon-device-mgt,rasika/carbon-device-mgt,pasindujw/carbon-device-mgt,harshanL/carbon-device-mgt,dilee/carbon-device-mgt | /*
* Copyright (c) 2014, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* you may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.carbon.device.mgt.common;
import com.fasterxml.jackson.annotation.JsonProperty;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import java.io.Serializable;
@ApiModel(value = "DeviceIdentifier", description = "This contains device details that is used to identify a device " +
"uniquely.")
public class DeviceIdentifier implements Serializable{
@ApiModelProperty(
name = "id",
value = "Identity of the device.",
required = true,
example = "123456")
@JsonProperty(value = "id", required = true)
private String id;
@ApiModelProperty(
name = "type",
value = "Type of the device.",
required = true,
example = "android")
@JsonProperty(value = "type", required = true)
private String type;
public DeviceIdentifier() {}
public DeviceIdentifier(String id, String type) {
this.id = id;
this.type = type;
}
public String getType() {
return type;
}
public void setType(String type) {
this.type = type.trim();
}
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
@Override
public String toString() {
return "deviceId {" +
"id='" + id + '\'' +
", type='" + type + '\'' +
'}';
}
}
| components/device-mgt/org.wso2.carbon.device.mgt.common/src/main/java/org/wso2/carbon/device/mgt/common/DeviceIdentifier.java | /*
* Copyright (c) 2014, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* you may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.carbon.device.mgt.common;
import com.fasterxml.jackson.annotation.JsonProperty;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import java.io.Serializable;
@ApiModel(value = "DeviceIdentifier", description = "This contains device details that is used to identify a device " +
"uniquely.")
public class DeviceIdentifier implements Serializable{
@ApiModelProperty(
name = "id",
value = "Identity of the device.",
required = true,
example = "123456")
@JsonProperty(value = "id", required = true)
private String id;
@ApiModelProperty(
name = "type",
value = "Type of the device.",
required = true,
example = "android")
@JsonProperty(value = "type", required = true)
private String type;
public DeviceIdentifier() {}
public DeviceIdentifier(String id, String type) {
this.id = id;
this.type = type;
}
public String getType() {
return type;
}
public void setType(String type) {
this.type = type.toLowerCase();
}
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
@Override
public String toString() {
return "deviceId {" +
"id='" + id + '\'' +
", type='" + type + '\'' +
'}';
}
}
| Fix : Store device type names as it is
| components/device-mgt/org.wso2.carbon.device.mgt.common/src/main/java/org/wso2/carbon/device/mgt/common/DeviceIdentifier.java | Fix : Store device type names as it is | <ide><path>omponents/device-mgt/org.wso2.carbon.device.mgt.common/src/main/java/org/wso2/carbon/device/mgt/common/DeviceIdentifier.java
<ide> }
<ide>
<ide> public void setType(String type) {
<del> this.type = type.toLowerCase();
<add> this.type = type.trim();
<ide> }
<ide> public String getId() {
<ide> return id; |
|
Java | apache-2.0 | 6546b05d852d258d146114ebd0951bed30604ab9 | 0 | google/mug,google/mug | /*****************************************************************************
* ------------------------------------------------------------------------- *
* Licensed under the Apache License, Version 2.0 (the "License"); *
* you may not use this file except in compliance with the License. *
* You may obtain a copy of the License at *
* *
* http://www.apache.org/licenses/LICENSE-2.0 *
* *
* Unless required by applicable law or agreed to in writing, software *
* distributed under the License is distributed on an "AS IS" BASIS, *
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. *
* See the License for the specific language governing permissions and *
* limitations under the License. *
*****************************************************************************/
package com.google.mu.util;
import static java.util.Objects.requireNonNull;
import java.io.Serializable;
import java.util.Optional;
/**
* A substring inside a string, providing easy access to substrings around it ({@link #before before()},
* {@link #after after()} or with the substring itself {@link #remove removed}, {@link #replaceWith replaced}
* etc.).
*
* <p>For example, to strip off the "http://" prefix from a uri string if existent: <pre>
* static String stripHttp(String uri) {
* return Substring.prefix("http://").removeFrom(uri);
* }
* </pre>
*
* To strip off either "http://" or "https://" prefix: <pre>
* static import com.google.mu.util.Substring.prefix;
*
* static String stripHttpOrHttps(String uri) {
* return prefix("http://").or(prefix("https://")).removeFrom(uri);
* }
* </pre>
*
* To strip off the suffix starting with a dash (-) character: <pre>
* static String stripDashSuffix(String str) {
* return Substring.last('-').andAfter().removeFrom(str);
* }
* </pre>
*
* To replace trailing "//" with "/": <pre>
* static String fixTrailingSlash(String str) {
* return Substring.suffix("//").replaceFrom(str, '/');
* }
* </pre>
*
* To extract the 'name' and 'value' from texts in the format of "name:value": <pre>
* String str = ...;
* Substring colon = Substring.first(':').in(str).orElseThrow(BadFormatException::new);
* String name = colon.before();
* String value = colon.after();
* </pre>
*
* @since 2.0
*/
public final class Substring {
private final String context;
private final int startIndex;
private final int endIndex;
private Substring(String context, int startIndex, int endIndex) {
this.context = context;
this.startIndex = startIndex;
this.endIndex = endIndex;
}
/** Returns a {@link Pattern} that never matches any substring. */
public static Pattern none() {
return Constants.NONE;
}
/** Returns a {@link Pattern} that matches all strings entirely. */
public static Pattern all() {
return Constants.ALL;
}
/** Returns a {@code Pattern} that matches strings starting with {@code prefix}. */
public static Pattern prefix(String prefix) {
requireNonNull(prefix);
return (SerializablePattern) str -> str.startsWith(prefix)
? Optional.of(new Substring(str, 0, prefix.length()))
: Optional.empty();
}
/** Returns a {@code Pattern} that matches strings starting with {@code prefix}. */
public static Pattern prefix(char prefix) {
requireNonNull(prefix);
return (SerializablePattern) str -> str.length() > 0 && str.charAt(0) == prefix
? Optional.of(new Substring(str, 0, 1))
: Optional.empty();
}
/** Returns a {@code Pattern} that matches strings ending with {@code suffix}. */
public static Pattern suffix(String suffix) {
requireNonNull(suffix);
return (SerializablePattern) str -> str.endsWith(suffix)
? Optional.of(new Substring(str, str.length() - suffix.length(), str.length()))
: Optional.empty();
}
/** Returns a {@code Pattern} that matches strings ending with {@code suffix}. */
public static Pattern suffix(char suffix) {
requireNonNull(suffix);
return (SerializablePattern) str -> str.length() > 0 && str.charAt(str.length() - 1) == suffix
? Optional.of(new Substring(str, str.length() - 1, str.length()))
: Optional.empty();
}
/** Returns a {@code Pattern} that matches the first occurrence of {@code c}. */
public static Pattern first(char c) {
return (SerializablePattern) str -> substring(str, str.indexOf(c), 1);
}
/** Returns a {@code Pattern} that matches the first occurrence of {@code snippet}. */
public static Pattern first(String snippet) {
requireNonNull(snippet);
return (SerializablePattern) str -> substring(str, str.indexOf(snippet), snippet.length());
}
/**
* Returns a {@code Pattern} that matches the first occurrence of {@code regexPattern}.
*
* <p>Unlike {@code str.replaceFirst(regexPattern, replacement)},
* <pre>regex(regexPattern).replaceFrom(str, replacement)</pre> treats the {@code replacement} as a literal
* string with no special handling of backslash (\) and dollar sign ($) characters.
*/
public static Pattern regex(java.util.regex.Pattern regexPattern) {
return regexGroup(regexPattern, 0);
}
/**
* Returns a {@code Pattern} that matches the first occurrence of {@code regexPattern}.
*
* <p>Unlike {@code str.replaceFirst(regexPattern, replacement)},
* <pre>regex(regexPattern).replaceFrom(str, replacement)</pre> treats the {@code replacement} as a literal
* string with no special handling of backslash (\) and dollar sign ($) characters.
*
* <p>Because this method internally compiles {@code regexPattern}, it's more efficient to reuse the
* returned {@link Pattern} object than calling {@code regex(regexPattern)} repetitively.
*/
public static Pattern regex(String regexPattern) {
return regex(java.util.regex.Pattern.compile(regexPattern));
}
/**
* Returns a {@code Pattern} that matches capturing {@code group} of {@code regexPattern}.
*
* <p>The returned {@code Pattern} will throw {@link IndexOutOfBoundsException} when matched against
* strings without the target {@code group}.
*/
public static Pattern regexGroup(java.util.regex.Pattern regexPattern, int group) {
requireNonNull(regexPattern);
if (group < 0) throw new IllegalArgumentException("group cannot be negative: " + group);
return (SerializablePattern) str -> {
java.util.regex.Matcher matcher = regexPattern.matcher(str);
if (matcher.find()) {
return Optional.of(new Substring(str, matcher.start(group), matcher.end(group)));
} else {
return Optional.empty();
}
};
}
/**
* Returns a {@code Pattern} that matches capturing {@code group} of {@code regexPattern}.
*
* <p>Unlike {@code str.replaceFirst(regexPattern, replacement)},
* <pre>regexGroup(regexPattern, group).replaceFrom(str, replacement)</pre> treats the {@code replacement}
* as a literal string with no special handling of backslash (\) and dollar sign ($) characters.
*
* <p>Because this method internally compiles {@code regexPattern}, it's more efficient to reuse the
* returned {@link Pattern} object than calling {@code regexGroup(regexPattern, group)} repetitively.
*
* <p>The returned {@code Pattern} will throw {@link IndexOutOfBoundsException} when matched against
* strings without the target {@code group}.
*/
public static Pattern regexGroup(String regexPattern, int group) {
return regexGroup(java.util.regex.Pattern.compile(regexPattern), group);
}
/** Returns a {@code Pattern} that matches the last occurrence of {@code c}. */
public static Pattern last(char c) {
return (SerializablePattern) str -> substring(str, str.lastIndexOf(c), 1);
}
/** Returns a {@code Pattern} that matches the last occurrence of {@code snippet}. */
public static Pattern last(String snippet) {
requireNonNull(snippet);
return (SerializablePattern) str -> substring(str, str.lastIndexOf(snippet), snippet.length());
}
/**
* Returns part before this substring.
*
* <p>{@link #before} and {@link #after} are almost always used together to split a string into
* two parts. Prefer using {@link Pattern#andBefore} if you are trying to find a prefix ending
* with a pattern, like: <pre>
* String schemeStripped = Substring.first("://").andBefore().removeFrom(uri);
* </pre> or using {@link Pattern#andAfter} to find a suffix starting with a pattern: <pre>
* String commentRemoved = Substring.first("//").andAfter().removeFrom(line);
* </pre>
*/
public String before() {
return context.substring(0, startIndex);
}
/**
* Returns part after this substring.
*
* <p>{@link #before} and {@link #after} are almost always used together to split a string into
* two parts. Prefer using {@link Pattern#andBefore} if you are trying to find a prefix ending
* with a pattern, like: <pre>
* String schemeStripped = Substring.first("://").andBefore().removeFrom(uri);
* </pre> or using {@link Pattern#andAfter} to find a suffix starting with a pattern: <pre>
* String commentRemoved = Substring.first("//").andAfter().removeFrom(line);
* </pre>
*/
public String after() {
return context.substring(endIndex);
}
/**
* Returns a new {@code Substring} instance that extends to the beginning of the
* enclosing string.
*/
Substring andBefore() {
return new Substring(context, 0, endIndex);
}
/** Returns a new {@code Substring} instance that extends to the end of the enclosing string. */
Substring andAfter() {
return new Substring(context, startIndex, context.length());
}
/** Returns a new string with the substring removed. */
public String remove() {
if (endIndex == context.length()) {
return before();
} else if (startIndex == 0) {
return after();
} else {
return before() + after();
}
}
/** Returns a new string with {@code this} substring replaced by {@code replacement}. */
public String replaceWith(char replacement) {
return before() + replacement + after();
}
/** Returns a new string with {@code this} substring replaced by {@code replacement}. */
public String replaceWith(CharSequence replacement) {
requireNonNull(replacement);
return before() + replacement + after();
}
/** Returns the starting index of this substring in the containing string. */
public int index() {
return startIndex;
}
/** Returns the length of this substring. */
public int length() {
return endIndex - startIndex;
}
/** Returns this substring. */
@Override public String toString() {
return context.substring(startIndex, endIndex);
}
@Override public int hashCode() {
return context.hashCode();
}
/** Two {@code Substring} instances are equal if they are the same sub sequences of equal strings. */
@Override public boolean equals(Object obj) {
if (obj instanceof Substring) {
Substring that = (Substring) obj;
return startIndex == that.startIndex && endIndex == that.endIndex && context.equals(that.context);
}
return false;
}
/** A substring pattern that can be matched against a string to find substrings. */
public interface Pattern {
/** Finds the substring in {@code string} or returns {@code empty()} if not found. */
Optional<Substring> in(String string);
/**
* Returns a new string with the substring matched by {@code this} removed. Returns {@code string} as is
* if a substring is not found.
*/
default String removeFrom(String string) {
return in(string).map(Substring::remove).orElse(string);
}
/**
* Returns a new string with the substring matched by {@code this} replaced by {@code replacement}.
* Returns {@code string} as is if a substring is not found.
*/
default String replaceFrom(String string, char replacement) {
return in(string).map(sub -> sub.replaceWith(replacement)).orElse(string);
}
/**
* Returns a new string with the substring matched by {@code this} replaced by {@code replacement}.
* Returns {@code string} as is if a substring is not found.
*/
default String replaceFrom(String string, CharSequence replacement) {
requireNonNull(replacement);
return in(string).map(sub -> sub.replaceWith(replacement)).orElse(string);
}
/**
* Returns a {@code Pattern} that fall backs to using {@code that} if {@code this} fails to
* match.
*/
default Pattern or(Pattern that) {
requireNonNull(that);
return (SerializablePattern) str -> {
Optional<Substring> substring = in(str);
return substring.isPresent() ? substring : that.in(str);
};
}
/**
* Returns a new {@code Pattern} that will match strings using {@code this} pattern and then
* extend the matched substring to the beginning of the string. For example: <pre>
* String schemeStripped = Substring.first("://").andBefore().removeFrom(uri);
* </pre>
*/
default Pattern andBefore() {
return (SerializablePattern) str -> in(str).map(Substring::andBefore);
}
/**
* Returns a new {@code Pattern} that will match strings using {@code this} pattern and then
* extend the matched substring to the end of the string. For example: <pre>
* String commentRemoved = Substring.first("//").andAfter().removeFrom(line);
* </pre>
*/
default Pattern andAfter() {
return (SerializablePattern) str -> in(str).map(Substring::andAfter);
}
}
private static Optional<Substring> substring(String str, int index, int length) {
return index >= 0 ? Optional.of(new Substring(str, index, index + length)) : Optional.empty();
}
private enum Constants implements Pattern {
NONE {
@Override public Optional<Substring> in(String s) {
requireNonNull(s);
return Optional.empty();
}
},
ALL {
@Override public Optional<Substring> in(String s) {
return Optional.of(new Substring(s, 0, s.length()));
}
}
}
private interface SerializablePattern extends Pattern, Serializable {}
} | core/src/main/java/com/google/mu/util/Substring.java | /*****************************************************************************
* ------------------------------------------------------------------------- *
* Licensed under the Apache License, Version 2.0 (the "License"); *
* you may not use this file except in compliance with the License. *
* You may obtain a copy of the License at *
* *
* http://www.apache.org/licenses/LICENSE-2.0 *
* *
* Unless required by applicable law or agreed to in writing, software *
* distributed under the License is distributed on an "AS IS" BASIS, *
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. *
* See the License for the specific language governing permissions and *
* limitations under the License. *
*****************************************************************************/
package com.google.mu.util;
import static java.util.Objects.requireNonNull;
import java.io.Serializable;
import java.util.Optional;
/**
* A substring inside a string, providing easy access to substrings around it ({@link #before before()},
* {@link #after after()} or with the substring itself {@link #remove removed}, {@link #replaceWith replaced}
* etc.).
*
* <p>For example, to strip off the "http://" prefix from a uri string if existent: <pre>
* static String stripHttp(String uri) {
* return Substring.prefix("http://").removeFrom(uri);
* }
* </pre>
*
* To strip off either "http://" or "https://" prefix: <pre>
* static import com.google.mu.util.Substring.prefix;
*
* static String stripHttpOrHttps(String uri) {
* return prefix("http://").or(prefix("https://")).removeFrom(uri);
* }
* </pre>
*
* To strip off the suffix starting with a dash (-) character: <pre>
* static String stripDashSuffix(String str) {
* return Substring.last('-').andAfter().removeFrom(str);
* }
* </pre>
*
* To replace trailing "//" with "/": <pre>
* static String fixTrailingSlash(String str) {
* return Substring.suffix("//").replaceFrom(str, '/');
* }
* </pre>
*
* To extract the 'name' and 'value' from texts in the format of "name:value": <pre>
* String str = ...;
* Substring colon = Substring.first(':').in(str).orElseThrow(BadFormatException::new);
* String name = colon.before();
* String value = colon.after();
* </pre>
*
* @since 2.0
*/
public final class Substring {
private final String context;
private final int startIndex;
private final int endIndex;
private Substring(String context, int startIndex, int endIndex) {
this.context = context;
this.startIndex = startIndex;
this.endIndex = endIndex;
}
/** Returns a {@link Pattern} that never matches any substring. */
public static Pattern none() {
return Constants.NONE;
}
/** Returns a {@link Pattern} that matches all strings entirely. */
public static Pattern all() {
return Constants.ALL;
}
/** Returns a {@code Pattern} that matches strings starting with {@code prefix}. */
public static Pattern prefix(String prefix) {
requireNonNull(prefix);
return (SerializablePattern) str -> str.startsWith(prefix)
? Optional.of(new Substring(str, 0, prefix.length()))
: Optional.empty();
}
/** Returns a {@code Pattern} that matches strings starting with {@code prefix}. */
public static Pattern prefix(char prefix) {
requireNonNull(prefix);
return (SerializablePattern) str -> str.length() > 0 && str.charAt(0) == prefix
? Optional.of(new Substring(str, 0, 1))
: Optional.empty();
}
/** Returns a {@code Pattern} that matches strings ending with {@code suffix}. */
public static Pattern suffix(String suffix) {
requireNonNull(suffix);
return (SerializablePattern) str -> str.endsWith(suffix)
? Optional.of(new Substring(str, str.length() - suffix.length(), str.length()))
: Optional.empty();
}
/** Returns a {@code Pattern} that matches strings ending with {@code suffix}. */
public static Pattern suffix(char suffix) {
requireNonNull(suffix);
return (SerializablePattern) str -> str.length() > 0 && str.charAt(str.length() - 1) == suffix
? Optional.of(new Substring(str, str.length() - 1, str.length()))
: Optional.empty();
}
/** Returns a {@code Pattern} that matches the first occurrence of {@code c}. */
public static Pattern first(char c) {
return (SerializablePattern) str -> substring(str, str.indexOf(c), 1);
}
/** Returns a {@code Pattern} that matches the first occurrence of {@code snippet}. */
public static Pattern first(String snippet) {
requireNonNull(snippet);
return (SerializablePattern) str -> substring(str, str.indexOf(snippet), snippet.length());
}
/**
* Returns a {@code Pattern} that matches the first occurrence of {@code regexPattern}.
*
* <p>Unlike {@code str.replaceFirst(regexPattern, replacement)},
* <pre>regex(regexPattern).replaceFrom(str, replacement)</pre> treats the {@code replacement} as a literal
* string with no special handling of backslash (\) and dollar sign ($) characters.
*/
public static Pattern regex(java.util.regex.Pattern regexPattern) {
return regexGroup(regexPattern, 0);
}
/**
* Returns a {@code Pattern} that matches the first occurrence of {@code regexPattern}.
*
* <p>Unlike {@code str.replaceFirst(regexPattern, replacement)},
* <pre>regex(regexPattern).replaceFrom(str, replacement)</pre> treats the {@code replacement} as a literal
* string with no special handling of backslash (\) and dollar sign ($) characters.
*
* <p>Because this method internally compiles {@code regexPattern}, it's more efficient to reuse the
* returned {@link Pattern} object than calling {@code regex(regexPattern)} repetitively.
*/
public static Pattern regex(String regexPattern) {
return regex(java.util.regex.Pattern.compile(regexPattern));
}
/**
* Returns a {@code Pattern} that matches capturing {@code group} of {@code regexPattern}.
*
* <p>The returned {@code Pattern} will throw {@link IndexOutOfBoundsException} when matched against
* strings without the target {@code group}.
*/
public static Pattern regexGroup(java.util.regex.Pattern regexPattern, int group) {
requireNonNull(regexPattern);
if (group < 0) throw new IllegalArgumentException("group cannot be negative: " + group);
return (SerializablePattern) str -> {
java.util.regex.Matcher matcher = regexPattern.matcher(str);
if (matcher.find()) {
return Optional.of(new Substring(str, matcher.start(group), matcher.end(group)));
} else {
return Optional.empty();
}
};
}
/**
* Returns a {@code Pattern} that matches capturing {@code group} of {@code regexPattern}.
*
* <p>Unlike {@code str.replaceFirst(regexPattern, replacement)},
* <pre>regexGroup(regexPattern, group).replaceFrom(str, replacement)</pre> treats the {@code replacement}
* as a literal string with no special handling of backslash (\) and dollar sign ($) characters.
*
* <p>Because this method internally compiles {@code regexPattern}, it's more efficient to reuse the
* returned {@link Pattern} object than calling {@code regexGroup(regexPattern, group)} repetitively.
*
* <p>The returned {@code Pattern} will throw {@link IndexOutOfBoundsException} when matched against
* strings without the target {@code group}.
*/
public static Pattern regexGroup(String regexPattern, int group) {
return regexGroup(java.util.regex.Pattern.compile(regexPattern), group);
}
/** Returns a {@code Pattern} that matches the last occurrence of {@code c}. */
public static Pattern last(char c) {
return (SerializablePattern) str -> substring(str, str.lastIndexOf(c), 1);
}
/** Returns a {@code Pattern} that matches the last occurrence of {@code snippet}. */
public static Pattern last(String snippet) {
requireNonNull(snippet);
return (SerializablePattern) str -> substring(str, str.lastIndexOf(snippet), snippet.length());
}
/**
* Returns part before this substring.
*
* <p>{@link #before} and {@link #after} are almost always used together to split a string into
* two parts. Prefer to using {@link Pattern#andBefore} if you are trying to find a prefix ending
* with a pattern, like: <pre>
* String schemeStripped = Substring.first("://").andBefore().removeFrom(uri);
* </pre> or using {@link Pattern#andAfter} to find a suffix starting with a pattern: <pre>
* String commentRemoved = Substring.first("//").andAfter().removeFrom(line);
* </pre>
*/
public String before() {
return context.substring(0, startIndex);
}
/**
* Returns part after this substring.
*
* <p>{@link #before} and {@link #after} are almost always used together to split a string into
* two parts. Prefer to using {@link Pattern#andBefore} if you are trying to find a prefix ending
* with a pattern, like: <pre>
* String schemeStripped = Substring.first("://").andBefore().removeFrom(uri);
* </pre> or using {@link Pattern#andAfter} to find a suffix starting with a pattern: <pre>
* String commentRemoved = Substring.first("//").andAfter().removeFrom(line);
* </pre>
*/
public String after() {
return context.substring(endIndex);
}
/**
* Returns a new {@code Substring} instance that extends to the beginning of the
* enclosing string.
*/
Substring andBefore() {
return new Substring(context, 0, endIndex);
}
/** Returns a new {@code Substring} instance that extends to the end of the enclosing string. */
Substring andAfter() {
return new Substring(context, startIndex, context.length());
}
/** Returns a new string with the substring removed. */
public String remove() {
if (endIndex == context.length()) {
return before();
} else if (startIndex == 0) {
return after();
} else {
return before() + after();
}
}
/** Returns a new string with {@code this} substring replaced by {@code replacement}. */
public String replaceWith(char replacement) {
return before() + replacement + after();
}
/** Returns a new string with {@code this} substring replaced by {@code replacement}. */
public String replaceWith(CharSequence replacement) {
requireNonNull(replacement);
return before() + replacement + after();
}
/** Returns the starting index of this substring in the containing string. */
public int index() {
return startIndex;
}
/** Returns the length of this substring. */
public int length() {
return endIndex - startIndex;
}
/** Returns this substring. */
@Override public String toString() {
return context.substring(startIndex, endIndex);
}
@Override public int hashCode() {
return context.hashCode();
}
/** Two {@code Substring} instances are equal if they are the same sub sequences of equal strings. */
@Override public boolean equals(Object obj) {
if (obj instanceof Substring) {
Substring that = (Substring) obj;
return startIndex == that.startIndex && endIndex == that.endIndex && context.equals(that.context);
}
return false;
}
/** A substring pattern that can be matched against a string to find substrings. */
public interface Pattern {
/** Finds the substring in {@code string} or returns {@code empty()} if not found. */
Optional<Substring> in(String string);
/**
* Returns a new string with the substring matched by {@code this} removed. Returns {@code string} as is
* if a substring is not found.
*/
default String removeFrom(String string) {
return in(string).map(Substring::remove).orElse(string);
}
/**
* Returns a new string with the substring matched by {@code this} replaced by {@code replacement}.
* Returns {@code string} as is if a substring is not found.
*/
default String replaceFrom(String string, char replacement) {
return in(string).map(sub -> sub.replaceWith(replacement)).orElse(string);
}
/**
* Returns a new string with the substring matched by {@code this} replaced by {@code replacement}.
* Returns {@code string} as is if a substring is not found.
*/
default String replaceFrom(String string, CharSequence replacement) {
requireNonNull(replacement);
return in(string).map(sub -> sub.replaceWith(replacement)).orElse(string);
}
/**
* Returns a {@code Pattern} that fall backs to using {@code that} if {@code this} fails to
* match.
*/
default Pattern or(Pattern that) {
requireNonNull(that);
return (SerializablePattern) str -> {
Optional<Substring> substring = in(str);
return substring.isPresent() ? substring : that.in(str);
};
}
/**
* Returns a new {@code Pattern} that will match strings using {@code this} pattern and then
* extend the matched substring to the beginning of the string. For example: <pre>
* String schemeStripped = Substring.first("://").andBefore().removeFrom(uri);
* </pre>
*/
default Pattern andBefore() {
return (SerializablePattern) str -> in(str).map(Substring::andBefore);
}
/**
* Returns a new {@code Pattern} that will match strings using {@code this} pattern and then
* extend the matched substring to the end of the string. For example: <pre>
* String commentRemoved = Substring.first("//").andAfter().removeFrom(line);
* </pre>
*/
default Pattern andAfter() {
return (SerializablePattern) str -> in(str).map(Substring::andAfter);
}
}
private static Optional<Substring> substring(String str, int index, int length) {
return index >= 0 ? Optional.of(new Substring(str, index, index + length)) : Optional.empty();
}
private enum Constants implements Pattern {
NONE {
@Override public Optional<Substring> in(String s) {
requireNonNull(s);
return Optional.empty();
}
},
ALL {
@Override public Optional<Substring> in(String s) {
return Optional.of(new Substring(s, 0, s.length()));
}
}
}
private interface SerializablePattern extends Pattern, Serializable {}
} | before()/after() javadoc fix
| core/src/main/java/com/google/mu/util/Substring.java | before()/after() javadoc fix | <ide><path>ore/src/main/java/com/google/mu/util/Substring.java
<ide> * Returns part before this substring.
<ide> *
<ide> * <p>{@link #before} and {@link #after} are almost always used together to split a string into
<del> * two parts. Prefer to using {@link Pattern#andBefore} if you are trying to find a prefix ending
<add> * two parts. Prefer using {@link Pattern#andBefore} if you are trying to find a prefix ending
<ide> * with a pattern, like: <pre>
<ide> * String schemeStripped = Substring.first("://").andBefore().removeFrom(uri);
<ide> * </pre> or using {@link Pattern#andAfter} to find a suffix starting with a pattern: <pre>
<ide> * Returns part after this substring.
<ide> *
<ide> * <p>{@link #before} and {@link #after} are almost always used together to split a string into
<del> * two parts. Prefer to using {@link Pattern#andBefore} if you are trying to find a prefix ending
<add> * two parts. Prefer using {@link Pattern#andBefore} if you are trying to find a prefix ending
<ide> * with a pattern, like: <pre>
<ide> * String schemeStripped = Substring.first("://").andBefore().removeFrom(uri);
<ide> * </pre> or using {@link Pattern#andAfter} to find a suffix starting with a pattern: <pre> |
|
Java | mit | 2063be0c34c1f7cdf96c79094e83ac828341cdb1 | 0 | rthoth/SublAndroidDaemon | package sublandroid;
import sublandroid.messages.*;
import sublandroid.command.*;
import static java.lang.String.format;
import java.io.*;
import java.net.*;
import org.gradle.tooling.*;
import static com.alibaba.fastjson.JSON.parseObject;
import static com.alibaba.fastjson.JSON.writeJSONStringTo;
public class Connector implements AutoCloseable {
private static class Server implements Runnable, AutoCloseable {
private final Connector connector;
private final int port;
private ServerSocket serverSocket;
private Socket clientSocket;
private Writer clientWriter;
private Reader clientReader;
public Server(final Connector connector, final int port) {
this.connector = connector;
this.port = port;
}
@Override
public void close() {
IOUtils.close(serverSocket);
IOUtils.close(clientSocket);
IOUtils.close(clientWriter);
IOUtils.close(clientReader);
}
@Override
public void run() {
try {
serverSocket = new ServerSocket(port, 1, InetAddress.getLoopbackAddress());
while (true) {
connector.println("SublAndroid listen @ %d", serverSocket.getLocalPort());
clientSocket = serverSocket.accept();
connector.println("A new sublandroid developer!");
clientReader = new InputStreamReader(clientSocket.getInputStream());
clientWriter = new OutputStreamWriter(clientSocket.getOutputStream());
connector.listen(clientReader, clientWriter);
}
} catch (Throwable throwable) {
throwable.printStackTrace();
}
}
}
protected static void println(String msg, Object... args) {
System.out.println(format(msg, args));
}
public static void main(String args[]) {
if (args.length == 0)
throw new IllegalArgumentException("Need a gradle project folder");
try {
boolean debug = false;
if (args.length > 2)
debug = "debug".equals(args[2]);
final Connector connector = new Connector(args[0], debug);
int port = 0;
if (args.length > 1)
port = Integer.parseInt(args[1]);
connector.listen(port);
} catch (Throwable throwable) {
throwable.printStackTrace();
}
}
protected ProjectConnection projectConnection = null;
private BufferedReader reader = null;
private Server server = null;
private Thread serverThread = null;
private BufferedWriter writer = null;
public Connector(String file) throws IOException {
this(file, false);
}
public Connector(String file, boolean debug) throws IOException {
if (file == null)
throw new NullPointerException("File Path is null");
defineDebug(debug);
fromDirectory(new File(file));
}
public Connector(File file, boolean debug) throws IOException {
if (file == null)
throw new NullPointerException("File is null");
defineDebug(debug);
fromDirectory(file);
}
@Override
public void close() {
println("Closing...");
if (server != null)
server.close();
projectConnection.close();
IOUtils.close(reader);
IOUtils.close(writer);
println("Closed");
}
private void defineDebug(boolean debug) {
if (debug) {
try {
File outFile = new File(System.getProperty("java.io.tmpdir"), "sublandroid.out.log");
File errFile = new File(System.getProperty("java.io.tmpdir"), "sublandroid.err.log");
System.setOut(new PrintStream(new FileOutputStream(outFile, true)));
System.setErr(new PrintStream(new FileOutputStream(errFile, true)));
} catch (FileNotFoundException ex) {
}
}
}
private void execute(final Command command, final MCommand mCommand) {
Message message = null;
boolean success = true;
try {
println("Trying %s", mCommand.command);
message = command.execute(mCommand, projectConnection);
println("Executed %s", mCommand.command);
} catch (Throwable throwable) {
success = false;
message = new MFailure(throwable);
}
response(success, message);
}
private void fromDirectory(File directory) throws IOException {
if (!directory.isDirectory())
throw new IOException(format("%s must be a directory", directory.getCanonicalPath()));
try {
projectConnection = GradleConnector.newConnector().forProjectDirectory(directory).connect();
} catch (RuntimeException exception) {
println(format("Trying start gradle at %s", directory.getCanonicalPath()));
}
}
public void response(boolean success, Message message) {
try {
if (success)
writer.write('S');
else
writer.write('E');
writeJSONStringTo(message, writer);
writer.write('\n');
writer.flush();
} catch (Throwable throwable) {
throw new Error(throwable);
}
}
public synchronized void listen(final int port) {
if (server != null)
throw new IllegalStateException();
server = new Server(this, port);
serverThread = new Thread(server, "SublAndroidListener");
serverThread.start();
}
public synchronized void listen(Reader reader, Writer writer) throws IOException {
this.reader = (reader instanceof BufferedReader) ? (BufferedReader) reader : new BufferedReader(reader);
this.writer = (writer instanceof BufferedWriter) ? (BufferedWriter) writer : new BufferedWriter(writer);
while(true) {
try {
run(this.reader.readLine());
} catch (Throwable throwable) {
throw throwable;
}
}
}
private void run(final String line) throws IOException {
final MCommand mCommand = parseObject(line, MCommand.class);
println("Searching %s", mCommand.command);
Command command = null;
switch(mCommand.command) {
case Hello.COMMAND:
command = new Hello();
break;
case ShowTasks.COMMAND:
command = new ShowTasks();
break;
}
if (command != null)
execute(command, mCommand);
else
response(false, new MFailure(format("Command %s not found", mCommand.command), "CommandNotFoundException"));
}
} | src/main/java/sublandroid/Connector.java | package sublandroid;
import sublandroid.messages.*;
import sublandroid.command.*;
import static java.lang.String.format;
import java.io.*;
import java.net.*;
import org.gradle.tooling.*;
import static com.alibaba.fastjson.JSON.parseObject;
import static com.alibaba.fastjson.JSON.writeJSONStringTo;
public class Connector implements AutoCloseable {
private static class Server implements Runnable, AutoCloseable {
private final Connector connector;
private final int port;
private ServerSocket serverSocket;
private Socket clientSocket;
private Writer clientWriter;
private Reader clientReader;
public Server(final Connector connector, final int port) {
this.connector = connector;
this.port = port;
}
@Override
public void close() {
IOUtils.close(serverSocket);
IOUtils.close(clientSocket);
IOUtils.close(clientWriter);
IOUtils.close(clientReader);
}
@Override
public void run() {
try {
serverSocket = new ServerSocket(port, 1, InetAddress.getLoopbackAddress());
while (true) {
connector.println("SublAndroid listen @ %d", serverSocket.getLocalPort());
clientSocket = serverSocket.accept();
connector.println("A new sublandroid developer!");
clientReader = new InputStreamReader(clientSocket.getInputStream());
clientWriter = new OutputStreamWriter(clientSocket.getOutputStream());
connector.listen(clientReader, clientWriter);
}
} catch (Throwable throwable) {
throwable.printStackTrace();
}
}
}
protected static void println(String msg, Object... args) {
System.out.println(format(msg, args));
}
protected final String DEBUG_PREFIX = "sublandroid_";
protected final String DEBUG_SUFIX = ".log";
public static void main(String args[]) {
if (args.length == 0)
throw new IllegalArgumentException("Need a gradle project folder");
try {
boolean debug = false;
if (args.length > 2)
debug = "debug".equals(args[2]);
final Connector connector = new Connector(args[0], debug);
int port = 0;
if (args.length > 1)
port = Integer.parseInt(args[1]);
connector.listen(port);
} catch (Throwable throwable) {
throwable.printStackTrace();
}
}
protected ProjectConnection projectConnection = null;
private BufferedReader reader = null;
private Server server = null;
private Thread serverThread = null;
private BufferedWriter writer = null;
public Connector(String file) throws IOException {
this(file, false);
}
public Connector(String file, boolean debug) throws IOException {
if (file == null)
throw new NullPointerException("File Path is null");
defineDebug(debug);
fromDirectory(new File(file));
}
public Connector(File file, boolean debug) throws IOException {
if (file == null)
throw new NullPointerException("File is null");
defineDebug(debug);
fromDirectory(file);
}
@Override
public void close() {
println("Closing...");
if (server != null)
server.close();
projectConnection.close();
IOUtils.close(reader);
IOUtils.close(writer);
println("Closed");
}
private void defineDebug(boolean debug) {
if (debug) {
try {
File outFile = new File(System.getProperty("java.io.tmpdir"), "sublandroid.out.log");
File errFile = new File(System.getProperty("java.io.tmpdir"), "sublandroid.err.log");
System.setOut(new PrintStream(new FileOutputStream(outFile, true)));
System.setErr(new PrintStream(new FileOutputStream(errFile, true)));
} catch (FileNotFoundException ex) {
}
}
}
private void execute(final Command command, final MCommand mCommand) {
Message message = null;
boolean success = true;
try {
println("Trying %s", mCommand.command);
message = command.execute(mCommand, projectConnection);
println("Executed %s", mCommand.command);
} catch (Throwable throwable) {
success = false;
message = new MFailure(throwable);
}
response(success, message);
}
private void fromDirectory(File directory) throws IOException {
if (!directory.isDirectory())
throw new IOException(format("%s must be a directory", directory.getCanonicalPath()));
try {
projectConnection = GradleConnector.newConnector().forProjectDirectory(directory).connect();
} catch (RuntimeException exception) {
println(format("Trying start gradle at %s", directory.getCanonicalPath()));
}
}
public void response(boolean success, Message message) {
try {
if (success)
writer.write('S');
else
writer.write('E');
writeJSONStringTo(message, writer);
writer.write('\n');
writer.flush();
} catch (Throwable throwable) {
throw new Error(throwable);
}
}
public synchronized void listen(final int port) {
if (server != null)
throw new IllegalStateException();
server = new Server(this, port);
serverThread = new Thread(server, "SublAndroidListener");
serverThread.start();
}
public synchronized void listen(Reader reader, Writer writer) throws IOException {
this.reader = (reader instanceof BufferedReader) ? (BufferedReader) reader : new BufferedReader(reader);
this.writer = (writer instanceof BufferedWriter) ? (BufferedWriter) writer : new BufferedWriter(writer);
while(true) {
try {
run(this.reader.readLine());
} catch (Throwable throwable) {
throw throwable;
}
}
}
private void run(final String line) throws IOException {
final MCommand mCommand = parseObject(line, MCommand.class);
println("Searching %s", mCommand.command);
Command command = null;
switch(mCommand.command) {
case Hello.COMMAND:
command = new Hello();
break;
case ShowTasks.COMMAND:
command = new ShowTasks();
break;
}
if (command != null)
execute(command, mCommand);
else
response(false, new MFailure(format("Command %s not found", mCommand.command), "CommandNotFoundException"));
}
} | Remove unused symbols
| src/main/java/sublandroid/Connector.java | Remove unused symbols | <ide><path>rc/main/java/sublandroid/Connector.java
<ide> System.out.println(format(msg, args));
<ide> }
<ide>
<del> protected final String DEBUG_PREFIX = "sublandroid_";
<del> protected final String DEBUG_SUFIX = ".log";
<del>
<ide> public static void main(String args[]) {
<ide> if (args.length == 0)
<ide> throw new IllegalArgumentException("Need a gradle project folder"); |
|
Java | mit | ba41233aad5a5b5dc9a9b6791491673f0fc37e2a | 0 | McJty/RFTools | package mcjty.rftools.blocks.crafter;
import mcjty.lib.container.*;
import mcjty.rftools.items.storage.StorageFilterItem;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.inventory.IInventory;
import net.minecraft.inventory.Slot;
import net.minecraft.item.ItemStack;
public class CrafterContainer extends GenericContainer {
public static final String CONTAINER_INVENTORY = "container";
public static final int SLOT_CRAFTINPUT = 0;
public static final int SLOT_CRAFTOUTPUT = 9;
public static final int SLOT_BUFFER = 10;
public static final int BUFFER_SIZE = (13*2);
public static final int SLOT_BUFFEROUT = SLOT_BUFFER + BUFFER_SIZE;
public static final int BUFFEROUT_SIZE = 4;
public static final int SLOT_FILTER_MODULE = SLOT_BUFFEROUT + BUFFEROUT_SIZE;
private final IInventory crafterBaseTE;
public static final ContainerFactory factory = new ContainerFactory() {
@Override
protected void setup() {
addSlotBox(new SlotDefinition(SlotType.SLOT_GHOST), CONTAINER_INVENTORY, SLOT_CRAFTINPUT, 193, 7, 3, 18, 3, 18);
addSlot(new SlotDefinition(SlotType.SLOT_GHOSTOUT), CONTAINER_INVENTORY, SLOT_CRAFTOUTPUT, 193, 65);
addSlotBox(new SlotDefinition(SlotType.SLOT_INPUT), CONTAINER_INVENTORY, SLOT_BUFFER, 13, 97, 13, 18, 2, 18);
addSlotBox(new SlotDefinition(SlotType.SLOT_OUTPUT), CONTAINER_INVENTORY, SLOT_BUFFEROUT, 31, 142, 2, 18, 2, 18);
addSlot(new SlotDefinition(SlotType.SLOT_SPECIFICITEM, StorageFilterItem.class), CONTAINER_INVENTORY, SLOT_FILTER_MODULE, 157, 65);
layoutPlayerInventorySlots(85, 142);
}
};
public CrafterContainer(EntityPlayer player, IInventory containerInventory) {
super(factory);
this.crafterBaseTE = containerInventory;
addInventory(CONTAINER_INVENTORY, containerInventory);
addInventory(ContainerFactory.CONTAINER_PLAYER, player.inventory);
generateSlots();
}
@Override
protected Slot createSlot(SlotFactory slotFactory, IInventory inventory, int index, int x, int y, SlotType slotType) {
if (index >= SLOT_BUFFER && index < SLOT_BUFFEROUT && slotType == SlotType.SLOT_INPUT) {
return new BaseSlot(inventory, index, x, y) {
@Override
public boolean isItemValid(ItemStack stack) {
if (!crafterBaseTE.isItemValidForSlot(getSlotIndex(), stack)) {
return false;
}
return super.isItemValid(stack);
}
};
}
return super.createSlot(slotFactory, inventory, index, x, y, slotType);
}
}
| src/main/java/mcjty/rftools/blocks/crafter/CrafterContainer.java | package mcjty.rftools.blocks.crafter;
import mcjty.lib.container.*;
import mcjty.rftools.items.storage.StorageFilterItem;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.inventory.IInventory;
import net.minecraft.inventory.Slot;
import net.minecraft.item.ItemStack;
public class CrafterContainer extends GenericContainer {
public static final String CONTAINER_INVENTORY = "container";
public static final int SLOT_CRAFTINPUT = 0;
public static final int SLOT_CRAFTOUTPUT = 9;
public static final int SLOT_BUFFER = 10;
public static final int BUFFER_SIZE = (13*2);
public static final int SLOT_BUFFEROUT = SLOT_BUFFER + BUFFER_SIZE;
public static final int BUFFEROUT_SIZE = 4;
public static final int SLOT_FILTER_MODULE = SLOT_BUFFEROUT + BUFFEROUT_SIZE;
private final IInventory crafterBaseTE;
public static final ContainerFactory factory = new ContainerFactory() {
@Override
protected void setup() {
addSlotBox(new SlotDefinition(SlotType.SLOT_GHOST), CONTAINER_INVENTORY, SLOT_CRAFTINPUT, 193, 7, 3, 18, 3, 18);
addSlot(new SlotDefinition(SlotType.SLOT_GHOSTOUT), CONTAINER_INVENTORY, SLOT_CRAFTOUTPUT, 193, 65);
addSlotBox(new SlotDefinition(SlotType.SLOT_INPUT), CONTAINER_INVENTORY, SLOT_BUFFER, 12, 97, 13, 18, 2, 18);
addSlotBox(new SlotDefinition(SlotType.SLOT_OUTPUT), CONTAINER_INVENTORY, SLOT_BUFFEROUT, 31, 142, 2, 18, 2, 18);
addSlot(new SlotDefinition(SlotType.SLOT_SPECIFICITEM, StorageFilterItem.class), CONTAINER_INVENTORY, SLOT_FILTER_MODULE, 157, 65);
layoutPlayerInventorySlots(85, 142);
}
};
public CrafterContainer(EntityPlayer player, IInventory containerInventory) {
super(factory);
this.crafterBaseTE = containerInventory;
addInventory(CONTAINER_INVENTORY, containerInventory);
addInventory(ContainerFactory.CONTAINER_PLAYER, player.inventory);
generateSlots();
}
@Override
protected Slot createSlot(SlotFactory slotFactory, IInventory inventory, int index, int x, int y, SlotType slotType) {
if (index >= SLOT_BUFFER && index < SLOT_BUFFEROUT && slotType == SlotType.SLOT_INPUT) {
return new BaseSlot(inventory, index, x, y) {
@Override
public boolean isItemValid(ItemStack stack) {
if (!crafterBaseTE.isItemValidForSlot(getSlotIndex(), stack)) {
return false;
}
return super.isItemValid(stack);
}
};
}
return super.createSlot(slotFactory, inventory, index, x, y, slotType);
}
}
| Fixed 1 pixel wrong offset for the crafter input slots
| src/main/java/mcjty/rftools/blocks/crafter/CrafterContainer.java | Fixed 1 pixel wrong offset for the crafter input slots | <ide><path>rc/main/java/mcjty/rftools/blocks/crafter/CrafterContainer.java
<ide> protected void setup() {
<ide> addSlotBox(new SlotDefinition(SlotType.SLOT_GHOST), CONTAINER_INVENTORY, SLOT_CRAFTINPUT, 193, 7, 3, 18, 3, 18);
<ide> addSlot(new SlotDefinition(SlotType.SLOT_GHOSTOUT), CONTAINER_INVENTORY, SLOT_CRAFTOUTPUT, 193, 65);
<del> addSlotBox(new SlotDefinition(SlotType.SLOT_INPUT), CONTAINER_INVENTORY, SLOT_BUFFER, 12, 97, 13, 18, 2, 18);
<add> addSlotBox(new SlotDefinition(SlotType.SLOT_INPUT), CONTAINER_INVENTORY, SLOT_BUFFER, 13, 97, 13, 18, 2, 18);
<ide> addSlotBox(new SlotDefinition(SlotType.SLOT_OUTPUT), CONTAINER_INVENTORY, SLOT_BUFFEROUT, 31, 142, 2, 18, 2, 18);
<ide>
<ide> addSlot(new SlotDefinition(SlotType.SLOT_SPECIFICITEM, StorageFilterItem.class), CONTAINER_INVENTORY, SLOT_FILTER_MODULE, 157, 65); |
|
Java | apache-2.0 | 627c7ee4dc51a73fa1f5138b4494142562e68429 | 0 | waprin/cloud-bigtable-client,ceocoder/cloud-bigtable-client,sduskis/cloud-bigtable-client,rameshdharan/cloud-bigtable-client,googleapis/java-bigtable-hbase,derjust/cloud-bigtable-client,dmmcerlean/cloud-bigtable-client,kevinsi4508/cloud-bigtable-client,googleapis/java-bigtable-hbase,sduskis/cloud-bigtable-client,rameshdharan/cloud-bigtable-client,agibsonccc/cloud-bigtable-client,kevinsi4508/cloud-bigtable-client,googleapis/java-bigtable-hbase | package com.google.cloud.bigtable.hbase;
import io.grpc.Status;
import io.grpc.Status.OperationRuntimeException;
import java.io.IOException;
import java.io.InputStream;
import java.util.Arrays;
import java.util.List;
import java.util.Random;
import java.util.UUID;
import java.util.concurrent.Executors;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.BigtableConnection;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.Increment;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.util.Bytes;
import org.junit.Assert;
import org.junit.Test;
import com.google.bigtable.admin.cluster.v1.Cluster;
import com.google.bigtable.admin.cluster.v1.CreateClusterRequest;
import com.google.bigtable.admin.cluster.v1.DeleteClusterRequest;
import com.google.bigtable.admin.cluster.v1.GetClusterRequest;
import com.google.bigtable.admin.cluster.v1.ListClustersRequest;
import com.google.bigtable.admin.cluster.v1.ListZonesRequest;
import com.google.bigtable.admin.cluster.v1.ListZonesResponse;
import com.google.bigtable.admin.cluster.v1.Zone;
import com.google.cloud.hadoop.hbase.BigtableClusterAdminClient;
import com.google.cloud.hadoop.hbase.BigtableClusterAdminGrpcClient;
import com.google.common.util.concurrent.UncheckedExecutionException;
import com.google.longrunning.GetOperationRequest;
import com.google.longrunning.Operation;
/**
* Tests the Cluster API.
*/
public class TestClusterAPI {
private static final int MAX_WAIT_SECONDS = 20;
private static final String TEST_CLUSTER_ID = "test-cluster-api";
public static final byte[] COLUMN_FAMILY = Bytes.toBytes("test_family");
@Test
public void setup() throws IOException {
String shouldTest = System.getProperty("bigtable.test.cluster.api");
if (!"true".equals(shouldTest)) {
return;
}
Configuration config = HBaseConfiguration.create();
String extraResources = System.getProperty("bigtable.test.extra.resources");
if (extraResources == null) {
Assert.fail("Please set bigtable.test.extra.resources");
}
InputStream resourceStream =
TestClusterAPI.class.getClassLoader().getResourceAsStream(extraResources);
if (resourceStream == null) {
Assert.fail(extraResources + " does not exist");
}
config.addResource(resourceStream);
BigtableOptions bigtableOptions = BigtableOptionsFactory.fromConfiguration(config);
BigtableClusterAdminClient client = createClusterAdminStub(bigtableOptions);
String projectId = bigtableOptions.getProjectId();
List<Cluster> clusters = getClusters(client, projectId);
// cleanup any old clusters
for (Cluster cluster : clusters) {
if (cluster.getName().contains(TEST_CLUSTER_ID)) {
dropCluster(client, cluster.getName());
}
}
List<Zone> zoneList = getZones(client, projectId);
String fullyQualifiedZoneName = selectZone(zoneList);
String clusterId = fullyQualifiedZoneName + "/clusters/" + TEST_CLUSTER_ID;
Cluster cluster = createACluster(client, fullyQualifiedZoneName, TEST_CLUSTER_ID);
waitForOperation(client, cluster.getCurrentOperation().getName(), MAX_WAIT_SECONDS);
Configuration newConfig = newConfiguration(config, clusterId);
TableName autoDeletedTableName =
TableName.valueOf("auto-deleted-" + UUID.randomUUID().toString());
try (Connection connection = new BigtableConnection(newConfig);
Admin admin = connection.getAdmin()) {
countTables(admin, 0);
createTable(admin, autoDeletedTableName);
countTables(admin, 1);
TableName tableToDelete = TableName.valueOf("test_table-" + UUID.randomUUID().toString());
createTable(admin, tableToDelete);
countTables(admin, 2);
try (Table t = connection.getTable(tableToDelete)) {
doPutGetDelete(t);
}
dropTable(connection, tableToDelete);
countTables(admin, 1);
} finally {
dropCluster(client, clusterId);
}
}
private void countTables(Admin admin, int expectedCount) throws IOException {
TableName[] tables = admin.listTableNames();
int actualCount = tables.length;
Assert.assertEquals(String.format("Got %d tables, expected %d. The tables: %s", actualCount,
expectedCount, Arrays.asList(tables)), expectedCount, actualCount);
}
private Cluster getCluster(BigtableClusterAdminClient client, String clusterName) {
GetClusterRequest request = GetClusterRequest.newBuilder().setName(clusterName).build();
try {
Cluster response = client.getCluster(request);
return response;
} catch (UncheckedExecutionException e) {
if (e.getCause() != null && e.getCause() instanceof OperationRuntimeException) {
Status status = ((OperationRuntimeException) e.getCause()).getStatus();
if (status.getCode() == Status.NOT_FOUND.getCode()) {
return null;
}
}
e.printStackTrace();
throw e;
}
}
private void waitForOperation(BigtableClusterAdminClient client, String operationName,
int maxSeconds) {
GetOperationRequest request = GetOperationRequest.newBuilder().setName(operationName).build();
for (int i = 0; i < maxSeconds; i++) {
try {
Thread.sleep(1000);
} catch (InterruptedException e) {
e.printStackTrace();
}
Operation response = client.getOperation(request);
if (response.getError() != null) {
return;
}
}
throw new IllegalStateException(String.format(
"Waited %d seconds and operation was not complete", maxSeconds));
}
private BigtableClusterAdminClient createClusterAdminStub(BigtableOptions bigtableOptions)
throws IOException {
return BigtableClusterAdminGrpcClient.createClient(
bigtableOptions.getClusterAdminTransportOptions(), bigtableOptions.getChannelOptions(),
Executors.newFixedThreadPool(10));
}
private List<Zone> getZones(BigtableClusterAdminClient client, String projectId) {
ListZonesResponse zones =
client.listZones(ListZonesRequest.newBuilder().setName("projects/" + projectId).build());
List<Zone> zoneList = zones.getZonesList();
Assert.assertTrue("Zones must exist", !zoneList.isEmpty());
return zoneList;
}
private String selectZone(List<Zone> zoneList) {
int zoneNumber = (int) (zoneList.size() * Math.random());
return zoneList.get(zoneNumber).getName().replaceFirst("^/", "");
}
private Cluster createACluster(BigtableClusterAdminClient client, String zoneName,
String clusterId) {
Cluster cluster = Cluster.newBuilder()
.setDisplayName(clusterId)
.setServeNodes(3)
.build();
CreateClusterRequest request = CreateClusterRequest.newBuilder()
.setName(zoneName)
.setClusterId(clusterId)
.setCluster(cluster)
.build();
return client.createCluster(request);
}
private List<Cluster> getClusters(BigtableClusterAdminClient client, String projectId) {
ListClustersRequest request =
ListClustersRequest.newBuilder().setName("projects/" + projectId).build();
return client.listClusters(request).getClustersList();
}
private Configuration newConfiguration(Configuration base, String fullyQualifiedClusterId) {
Configuration newConfig = new Configuration(base);
String zone = fullyQualifiedClusterId.replaceFirst(".*/zones/([^/]+)/.*", "$1");
String cluster = fullyQualifiedClusterId.replaceFirst(".*/clusters/([^/]+)", "$1");
newConfig.set(BigtableOptionsFactory.ZONE_KEY, zone);
newConfig.set(BigtableOptionsFactory.CLUSTER_KEY, cluster);
return newConfig;
}
private void createTable(Admin admin, TableName tableName) throws IOException {
HTableDescriptor descriptor = new HTableDescriptor(tableName);
descriptor.addFamily(new HColumnDescriptor(COLUMN_FAMILY));
admin.createTable(descriptor);
Assert.assertTrue("Table does not exist", admin.tableExists(tableName));
}
DataGenerationHelper dataHelper = new DataGenerationHelper();
private void doPutGetDelete(Table table) throws IOException {
testIncrement(dataHelper, table);
testCheckAndMutate(dataHelper, table);
}
private void testIncrement(DataGenerationHelper dataHelper, Table table)
throws IOException {
byte[] rowKey = dataHelper.randomData("testrow-");
byte[] qual1 = dataHelper.randomData("qual-");
long value1 = new Random().nextInt();
long incr1 = new Random().nextInt();
byte[] qual2 = dataHelper.randomData("qual-");
long value2 = new Random().nextInt();
long incr2 = new Random().nextInt();
// Put and increment
Put put = new Put(rowKey);
put.addColumn(COLUMN_FAMILY, qual1, Bytes.toBytes(value1));
put.addColumn(COLUMN_FAMILY, qual2, Bytes.toBytes(value2));
table.put(put);
Increment increment = new Increment(rowKey);
increment.addColumn(COLUMN_FAMILY, qual1, incr1);
increment.addColumn(COLUMN_FAMILY, qual2, incr2);
Result result = table.increment(increment);
Assert.assertEquals(2, result.size());
Assert.assertEquals("Value1=" + value1 + " & Incr1=" + incr1, value1 + incr1,
Bytes.toLong(CellUtil.cloneValue(result.getColumnLatestCell(COLUMN_FAMILY, qual1))));
Assert.assertEquals("Value2=" + value2 + " & Incr2=" + incr2, value2 + incr2,
Bytes.toLong(CellUtil.cloneValue(result.getColumnLatestCell(COLUMN_FAMILY, qual2))));
// Double-check values with a Get
Get get = new Get(rowKey);
get.setMaxVersions(5);
result = table.get(get);
Assert.assertEquals("Expected four results, two for each column", 4, result.size());
Assert.assertEquals("Value1=" + value1 + " & Incr1=" + incr1, value1 + incr1,
Bytes.toLong(CellUtil.cloneValue(result.getColumnLatestCell(COLUMN_FAMILY, qual1))));
Assert.assertEquals("Value2=" + value2 + " & Incr2=" + incr2, value2 + incr2,
Bytes.toLong(CellUtil.cloneValue(result.getColumnLatestCell(COLUMN_FAMILY, qual2))));
}
private void testCheckAndMutate(DataGenerationHelper dataHelper, Table table) throws IOException {
byte[] rowKey = dataHelper.randomData("rowKey-");
byte[] qual = dataHelper.randomData("qualifier-");
byte[] value1 = dataHelper.randomData("value-");
byte[] value2 = dataHelper.randomData("value-");
// Put with a bad check on a null value, then try with a good one
Put put = new Put(rowKey).addColumn(COLUMN_FAMILY, qual, value1);
boolean success = table.checkAndPut(rowKey, COLUMN_FAMILY, qual, value2, put);
Assert.assertFalse("Column doesn't exist. Should fail.", success);
success = table.checkAndPut(rowKey, COLUMN_FAMILY, qual, null, put);
Assert.assertTrue(success);
// Fail on null check, now there's a value there
put = new Put(rowKey).addColumn(COLUMN_FAMILY, qual, value2);
success = table.checkAndPut(rowKey, COLUMN_FAMILY, qual, null, put);
Assert.assertFalse("Null check should fail", success);
success = table.checkAndPut(rowKey, COLUMN_FAMILY, qual, value2, put);
Assert.assertFalse("Wrong value should fail", success);
success = table.checkAndPut(rowKey, COLUMN_FAMILY, qual, value1, put);
Assert.assertTrue(success);
// Check results
Get get = new Get(rowKey);
get.setMaxVersions(5);
Result result = table.get(get);
Assert.assertEquals("Should be two results", 2, result.size());
List<Cell> cells = result.getColumnCells(COLUMN_FAMILY, qual);
Assert.assertArrayEquals(value2, CellUtil.cloneValue(cells.get(0)));
Assert.assertArrayEquals(value1, CellUtil.cloneValue(cells.get(1)));
}
private void dropTable(Connection connection, TableName tableName) throws IOException {
try (Admin admin = connection.getAdmin()) {
admin.disableTable(tableName);
admin.deleteTable(tableName);
Assert.assertFalse(admin.tableExists(tableName));
}
}
private void dropCluster(BigtableClusterAdminClient client, String fullyQualifiedClusterId) {
DeleteClusterRequest request =
DeleteClusterRequest.newBuilder().setName(fullyQualifiedClusterId).build();
client.deleteCluster(request);
Assert.assertNull(getCluster(client, fullyQualifiedClusterId));
}
}
| bigtable-hbase/src/test/java/com/google/cloud/bigtable/hbase/TestClusterAPI.java | package com.google.cloud.bigtable.hbase;
import io.grpc.Status;
import io.grpc.Status.OperationRuntimeException;
import java.io.IOException;
import java.io.InputStream;
import java.util.Arrays;
import java.util.List;
import java.util.Random;
import java.util.UUID;
import java.util.concurrent.Executors;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.BigtableConnection;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.Increment;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.util.Bytes;
import org.junit.Assert;
import org.junit.Test;
import com.google.bigtable.admin.cluster.v1.Cluster;
import com.google.bigtable.admin.cluster.v1.CreateClusterRequest;
import com.google.bigtable.admin.cluster.v1.DeleteClusterRequest;
import com.google.bigtable.admin.cluster.v1.GetClusterRequest;
import com.google.bigtable.admin.cluster.v1.ListClustersRequest;
import com.google.bigtable.admin.cluster.v1.ListZonesRequest;
import com.google.bigtable.admin.cluster.v1.ListZonesResponse;
import com.google.bigtable.admin.cluster.v1.Zone;
import com.google.cloud.hadoop.hbase.BigtableClusterAdminClient;
import com.google.cloud.hadoop.hbase.BigtableClusterAdminGrpcClient;
import com.google.common.util.concurrent.UncheckedExecutionException;
import com.google.longrunning.GetOperationRequest;
import com.google.longrunning.Operation;
/**
* Tests the Cluster API.
*/
public class TestClusterAPI {
private static final int MAX_WAIT_SECONDS = 20;
private static final String TEST_CLUSTER_ID = "test-cluster-api";
public static final byte[] COLUMN_FAMILY = Bytes.toBytes("test_family");
@Test
public void setup() throws IOException {
String shouldTest = System.getProperty("bigtable.test.cluster.api");
if (!"true".equals(shouldTest)) {
return;
}
Configuration config = HBaseConfiguration.create();
String extraResources = System.getProperty("bigtable.test.extra.resources");
if (extraResources == null) {
Assert.fail("Please set bigtable.test.extra.resources");
}
InputStream resourceStream =
TestClusterAPI.class.getClassLoader().getResourceAsStream(extraResources);
if (resourceStream == null) {
Assert.fail(extraResources + " does not exist");
}
config.addResource(resourceStream);
BigtableOptions bigtableOptions = BigtableOptionsFactory.fromConfiguration(config);
BigtableClusterAdminClient client = createClusterAdminStub(bigtableOptions);
String projectId = bigtableOptions.getProjectId();
List<Cluster> clusters = getClusters(client, projectId);
// cleanup any old clusters
boolean createCluster = true;
for (Cluster cluster : clusters) {
if (cluster.getName().contains(TEST_CLUSTER_ID)) {
dropCluster(client, cluster.getName());
// createCluster = false;
}
}
List<Zone> zoneList = getZones(client, projectId);
String fullyQualifiedZoneName = selectZone(zoneList);
String clusterId = fullyQualifiedZoneName + "/clusters/" + TEST_CLUSTER_ID;
if (createCluster) {
Cluster cluster = createACluster(client, fullyQualifiedZoneName, TEST_CLUSTER_ID);
waitForOperation(client, cluster.getCurrentOperation().getName(), MAX_WAIT_SECONDS);
}
Configuration newConfig = newConfiguration(config, clusterId);
TableName autoDeletedTableName =
TableName.valueOf("auto-deleted-" + UUID.randomUUID().toString());
try (Connection connection = new BigtableConnection(newConfig);
Admin admin = connection.getAdmin()) {
countTables(admin, 0);
createTable(admin, autoDeletedTableName);
countTables(admin, 1);
TableName tableToDelete = TableName.valueOf("test_table-" + UUID.randomUUID().toString());
createTable(admin, tableToDelete);
countTables(admin, 2);
try (Table t = connection.getTable(tableToDelete)) {
doPutGetDelete(t);
}
dropTable(connection, tableToDelete);
countTables(admin, 1);
} finally {
dropCluster(client, clusterId);
}
}
private void countTables(Admin admin, int expectedCount) throws IOException {
TableName[] tables = admin.listTableNames();
int actualCount = tables.length;
Assert.assertEquals(String.format("Got %d tables, expected %d. The tables: %s", actualCount,
expectedCount, Arrays.asList(tables)), expectedCount, actualCount);
}
private Cluster getCluster(BigtableClusterAdminClient client, String clusterName) {
GetClusterRequest request = GetClusterRequest.newBuilder().setName(clusterName).build();
try {
Cluster response = client.getCluster(request);
return response;
} catch (UncheckedExecutionException e) {
if (e.getCause() != null && e.getCause() instanceof OperationRuntimeException) {
Status status = ((OperationRuntimeException) e.getCause()).getStatus();
if (status.getCode() == Status.NOT_FOUND.getCode()) {
return null;
}
}
e.printStackTrace();
throw e;
}
}
private void waitForOperation(BigtableClusterAdminClient client, String operationName,
int maxSeconds) {
GetOperationRequest request = GetOperationRequest.newBuilder().setName(operationName).build();
for (int i = 0; i < maxSeconds; i++) {
try {
Thread.sleep(1000);
} catch (InterruptedException e) {
e.printStackTrace();
}
Operation response = client.getOperation(request);
if (response.getError() != null) {
return;
}
}
throw new IllegalStateException(String.format(
"Waited %d seconds and operation was not complete", maxSeconds));
}
private BigtableClusterAdminClient createClusterAdminStub(BigtableOptions bigtableOptions)
throws IOException {
return BigtableClusterAdminGrpcClient.createClient(
bigtableOptions.getClusterAdminTransportOptions(), bigtableOptions.getChannelOptions(),
Executors.newFixedThreadPool(10));
}
private List<Zone> getZones(BigtableClusterAdminClient client, String projectId) {
ListZonesResponse zones =
client.listZones(ListZonesRequest.newBuilder().setName("projects/" + projectId).build());
List<Zone> zoneList = zones.getZonesList();
Assert.assertTrue("Zones must exist", !zoneList.isEmpty());
return zoneList;
}
private String selectZone(List<Zone> zoneList) {
int zoneNumber = (int) (zoneList.size() * Math.random());
return zoneList.get(zoneNumber).getName().replaceFirst("^/", "");
}
private Cluster createACluster(BigtableClusterAdminClient client, String zoneName,
String clusterId) {
CreateClusterRequest request =
CreateClusterRequest.newBuilder().setName(zoneName).setClusterId(clusterId)
.setCluster(Cluster.newBuilder().setServeNodes(1).build()).build();
return client.createCluster(request);
}
private List<Cluster> getClusters(BigtableClusterAdminClient client, String projectId) {
ListClustersRequest request =
ListClustersRequest.newBuilder().setName("projects/" + projectId).build();
return client.listClusters(request).getClustersList();
}
private Configuration newConfiguration(Configuration base, String fullyQualifiedClusterId) {
Configuration newConfig = new Configuration(base);
String zone = fullyQualifiedClusterId.replaceFirst(".*/zones/([^/]+)/.*", "$1");
String cluster = fullyQualifiedClusterId.replaceFirst(".*/clusters/([^/]+)", "$1");
newConfig.set(BigtableOptionsFactory.ZONE_KEY, zone);
newConfig.set(BigtableOptionsFactory.CLUSTER_KEY, cluster);
return newConfig;
}
private void createTable(Admin admin, TableName tableName) throws IOException {
HTableDescriptor descriptor = new HTableDescriptor(tableName);
descriptor.addFamily(new HColumnDescriptor(COLUMN_FAMILY));
admin.createTable(descriptor);
Assert.assertTrue("Table does not exist", admin.tableExists(tableName));
}
DataGenerationHelper dataHelper = new DataGenerationHelper();
private void doPutGetDelete(Table table) throws IOException {
testIncrement(dataHelper, table);
testCheckAndMutate(dataHelper, table);
}
private void testIncrement(DataGenerationHelper dataHelper, Table table)
throws IOException {
byte[] rowKey = dataHelper.randomData("testrow-");
byte[] qual1 = dataHelper.randomData("qual-");
long value1 = new Random().nextInt();
long incr1 = new Random().nextInt();
byte[] qual2 = dataHelper.randomData("qual-");
long value2 = new Random().nextInt();
long incr2 = new Random().nextInt();
// Put and increment
Put put = new Put(rowKey);
put.addColumn(COLUMN_FAMILY, qual1, Bytes.toBytes(value1));
put.addColumn(COLUMN_FAMILY, qual2, Bytes.toBytes(value2));
table.put(put);
Increment increment = new Increment(rowKey);
increment.addColumn(COLUMN_FAMILY, qual1, incr1);
increment.addColumn(COLUMN_FAMILY, qual2, incr2);
Result result = table.increment(increment);
Assert.assertEquals(2, result.size());
Assert.assertEquals("Value1=" + value1 + " & Incr1=" + incr1, value1 + incr1,
Bytes.toLong(CellUtil.cloneValue(result.getColumnLatestCell(COLUMN_FAMILY, qual1))));
Assert.assertEquals("Value2=" + value2 + " & Incr2=" + incr2, value2 + incr2,
Bytes.toLong(CellUtil.cloneValue(result.getColumnLatestCell(COLUMN_FAMILY, qual2))));
// Double-check values with a Get
Get get = new Get(rowKey);
get.setMaxVersions(5);
result = table.get(get);
Assert.assertEquals("Expected four results, two for each column", 4, result.size());
Assert.assertEquals("Value1=" + value1 + " & Incr1=" + incr1, value1 + incr1,
Bytes.toLong(CellUtil.cloneValue(result.getColumnLatestCell(COLUMN_FAMILY, qual1))));
Assert.assertEquals("Value2=" + value2 + " & Incr2=" + incr2, value2 + incr2,
Bytes.toLong(CellUtil.cloneValue(result.getColumnLatestCell(COLUMN_FAMILY, qual2))));
}
private void testCheckAndMutate(DataGenerationHelper dataHelper, Table table) throws IOException {
byte[] rowKey = dataHelper.randomData("rowKey-");
byte[] qual = dataHelper.randomData("qualifier-");
byte[] value1 = dataHelper.randomData("value-");
byte[] value2 = dataHelper.randomData("value-");
// Put with a bad check on a null value, then try with a good one
Put put = new Put(rowKey).addColumn(COLUMN_FAMILY, qual, value1);
boolean success = table.checkAndPut(rowKey, COLUMN_FAMILY, qual, value2, put);
Assert.assertFalse("Column doesn't exist. Should fail.", success);
success = table.checkAndPut(rowKey, COLUMN_FAMILY, qual, null, put);
Assert.assertTrue(success);
// Fail on null check, now there's a value there
put = new Put(rowKey).addColumn(COLUMN_FAMILY, qual, value2);
success = table.checkAndPut(rowKey, COLUMN_FAMILY, qual, null, put);
Assert.assertFalse("Null check should fail", success);
success = table.checkAndPut(rowKey, COLUMN_FAMILY, qual, value2, put);
Assert.assertFalse("Wrong value should fail", success);
success = table.checkAndPut(rowKey, COLUMN_FAMILY, qual, value1, put);
Assert.assertTrue(success);
// Check results
Get get = new Get(rowKey);
get.setMaxVersions(5);
Result result = table.get(get);
Assert.assertEquals("Should be two results", 2, result.size());
List<Cell> cells = result.getColumnCells(COLUMN_FAMILY, qual);
Assert.assertArrayEquals(value2, CellUtil.cloneValue(cells.get(0)));
Assert.assertArrayEquals(value1, CellUtil.cloneValue(cells.get(1)));
}
private void dropTable(Connection connection, TableName tableName) throws IOException {
try (Admin admin = connection.getAdmin()) {
admin.disableTable(tableName);
admin.deleteTable(tableName);
Assert.assertFalse(admin.tableExists(tableName));
}
}
private void dropCluster(BigtableClusterAdminClient client, String fullyQualifiedClusterId) {
DeleteClusterRequest request =
DeleteClusterRequest.newBuilder().setName(fullyQualifiedClusterId).build();
client.deleteCluster(request);
Assert.assertNull(getCluster(client, fullyQualifiedClusterId));
}
}
| Making the TestClusterAPI compliant with server validation.
| bigtable-hbase/src/test/java/com/google/cloud/bigtable/hbase/TestClusterAPI.java | Making the TestClusterAPI compliant with server validation. | <ide><path>igtable-hbase/src/test/java/com/google/cloud/bigtable/hbase/TestClusterAPI.java
<ide> List<Cluster> clusters = getClusters(client, projectId);
<ide>
<ide> // cleanup any old clusters
<del> boolean createCluster = true;
<ide> for (Cluster cluster : clusters) {
<ide> if (cluster.getName().contains(TEST_CLUSTER_ID)) {
<ide> dropCluster(client, cluster.getName());
<del> // createCluster = false;
<ide> }
<ide> }
<ide>
<ide> String fullyQualifiedZoneName = selectZone(zoneList);
<ide> String clusterId = fullyQualifiedZoneName + "/clusters/" + TEST_CLUSTER_ID;
<ide>
<del> if (createCluster) {
<del> Cluster cluster = createACluster(client, fullyQualifiedZoneName, TEST_CLUSTER_ID);
<del> waitForOperation(client, cluster.getCurrentOperation().getName(), MAX_WAIT_SECONDS);
<del> }
<add> Cluster cluster = createACluster(client, fullyQualifiedZoneName, TEST_CLUSTER_ID);
<add> waitForOperation(client, cluster.getCurrentOperation().getName(), MAX_WAIT_SECONDS);
<ide>
<ide> Configuration newConfig = newConfiguration(config, clusterId);
<ide> TableName autoDeletedTableName =
<ide>
<ide> private Cluster createACluster(BigtableClusterAdminClient client, String zoneName,
<ide> String clusterId) {
<del> CreateClusterRequest request =
<del> CreateClusterRequest.newBuilder().setName(zoneName).setClusterId(clusterId)
<del> .setCluster(Cluster.newBuilder().setServeNodes(1).build()).build();
<add> Cluster cluster = Cluster.newBuilder()
<add> .setDisplayName(clusterId)
<add> .setServeNodes(3)
<add> .build();
<add> CreateClusterRequest request = CreateClusterRequest.newBuilder()
<add> .setName(zoneName)
<add> .setClusterId(clusterId)
<add> .setCluster(cluster)
<add> .build();
<ide> return client.createCluster(request);
<ide> }
<ide> |
|
JavaScript | mit | 6f04dcc8f390458e14ef7b25bff7a4a7083f4ba6 | 0 | lenvanessen/bolt,nikgo/bolt,electrolinux/bolt,marcin-piela/bolt,winiceo/bolt,lenvanessen/bolt,marcin-piela/bolt,skript-cc/bolt,skript-cc/bolt,HonzaMikula/bolt,tekjava/bolt,kendoctor/bolt,bolt/bolt,pygillier/bolt,romulo1984/bolt,cdowdy/bolt,xeddmc/bolt,richardhinkamp/bolt,nikgo/bolt,CarsonF/bolt,xeddmc/bolt,bolt/bolt,GawainLynch/bolt,nantunes/bolt,bywatersolutions/reports-site,rossriley/bolt,richardhinkamp/bolt,rarila/bolt,bywatersolutions/reports-site,hugin2005/bolt,pygillier/bolt,one988/cm,bolt/bolt,HonzaMikula/masivnipostele,rarila/bolt,joshuan/bolt,winiceo/bolt,marcin-piela/bolt,HonzaMikula/masivnipostele,GDmac/bolt,Calinou/bolt,electrolinux/bolt,codesman/bolt,codesman/bolt,pygillier/bolt,nikgo/bolt,electrolinux/bolt,HonzaMikula/masivnipostele,rossriley/bolt,Raistlfiren/bolt,romulo1984/bolt,bywatersolutions/reports-site,cdowdy/bolt,one988/cm,rossriley/bolt,kendoctor/bolt,HonzaMikula/bolt,joshuan/bolt,hugin2005/bolt,tekjava/bolt,one988/cm,electrolinux/bolt,CarsonF/bolt,romulo1984/bolt,Raistlfiren/bolt,tekjava/bolt,codesman/bolt,marcin-piela/bolt,GDmac/bolt,hannesl/bolt,rossriley/bolt,Eiskis/bolt-base,cdowdy/bolt,bolt/bolt,nantunes/bolt,hugin2005/bolt,HonzaMikula/bolt,rarila/bolt,cdowdy/bolt,hannesl/bolt,Intendit/bolt,xeddmc/bolt,GawainLynch/bolt,tekjava/bolt,nantunes/bolt,nikgo/bolt,Calinou/bolt,skript-cc/bolt,winiceo/bolt,CarsonF/bolt,Raistlfiren/bolt,winiceo/bolt,Intendit/bolt,pygillier/bolt,one988/cm,codesman/bolt,skript-cc/bolt,GDmac/bolt,hugin2005/bolt,GDmac/bolt,hannesl/bolt,kendoctor/bolt,richardhinkamp/bolt,joshuan/bolt,rarila/bolt,lenvanessen/bolt,richardhinkamp/bolt,Calinou/bolt,Intendit/bolt,bywatersolutions/reports-site,Raistlfiren/bolt,xeddmc/bolt,HonzaMikula/masivnipostele,Eiskis/bolt-base,lenvanessen/bolt,HonzaMikula/bolt,joshuan/bolt,nantunes/bolt,Eiskis/bolt-base,hannesl/bolt,Calinou/bolt,GawainLynch/bolt,romulo1984/bolt,GawainLynch/bolt,CarsonF/bolt,Intendit/bolt,kendoctor/bolt,Eiskis/bolt-base | /**
* Helper to get all selected Items and return Array
*/
function getSelectedItems() {
var aItems = [];
$('.dashboardlisting input:checked').each(function () {
if ($(this).parents('tr').attr('id')) {
aItems.push($(this).parents('tr').attr('id').substr(5));
}
});
return aItems;
}
/**
* Basic form validation before submit, adapted from
* http://www.sitepoint.com/html5-forms-javascript-constraint-validation-api/
*/
// Basic legacy validation checking
function LegacyValidation(field) {
var
valid = true,
val = field.value,
type = field.getAttribute('type'),
chkbox = type === 'checkbox' || type === 'radio',
required = field.getAttribute('required'),
minlength = field.getAttribute('minlength'),
maxlength = field.getAttribute('maxlength'),
pattern = field.getAttribute('pattern');
// Disabled fields should not be validated
if (field.disabled) {
return valid;
}
/* jshint -W126 */
// value required?
valid = valid && (!required ||
(chkbox && field.checked) ||
(!chkbox && val !== "")
);
// minlength or maxlength set?
valid = valid && (chkbox || (
(!minlength || val.length >= minlength) &&
(!maxlength || val.length <= maxlength)
));
/* jshint +W126 */
// Test pattern
if (valid && pattern) {
pattern = new RegExp('^(?:' + pattern + ')$');
valid = pattern.test(val);
}
return valid;
}
function validateContent(form) {
var formLength = form.elements.length,
f,
field,
formvalid = true,
hasNativeValidation,
isCkeditor;
// Loop all fields
for (f = 0; f < formLength; f++) {
field = form.elements[f];
if (field.nodeName !== 'INPUT' && field.nodeName !== 'TEXTAREA' && field.nodeName !== 'SELECT') {
continue;
}
if (field.nodeName === 'INPUT') {
// Trim input values
field.value = field.value.trim();
}
// Is native browser validation available?
hasNativeValidation = typeof field.willValidate !== 'undefined';
if (hasNativeValidation) {
// Native validation available
if (field.nodeName === 'INPUT' && field.type !== field.getAttribute('type')) {
// Input type not supported! Use legacy JavaScript validation
field.setCustomValidity(LegacyValidation(field) ? '' : 'error');
}
// Native browser check
field.checkValidity();
} else {
// Native validation not available
field.validity = field.validity || {};
// Set to result of validation function
field.validity.valid = LegacyValidation(field);
// If "invalid" events are required, trigger it here
}
// Special validation for CKEdito fields
isCkeditor = field.nodeName === 'TEXTAREA' && $(field).hasClass('ckeditor');
if (isCkeditor) {
var editor = CKEDITOR.instances[field.id],
error;
if (editor) {
error = editor._.required === true && editor.getData().trim() === '';
if (hasNativeValidation) {
field.setCustomValidity(error ? 'Required' : '');
} else {
field.validity.valid = error;
}
}
}
var noticeID = field.id + '-notice';
// First, remove any existing old notices
$('#' + noticeID).remove();
if (field.validity.valid) {
// Remove error styles and messages
$(field).removeClass('error');
if (isCkeditor) {
$('#cke_' + field.id).removeClass('cke_error');
}
} else {
// Style field, show error, etc.
$(field).addClass('error');
if (isCkeditor) {
$('#cke_' + field.id).addClass('cke_error');
}
var msg = $(field).data('errortext') || 'The ' +
field.name + ' field is required or needs to match a pattern';
console.log(noticeID+': '+msg);
$('<div id="' + noticeID + '" class="alert alert-danger">' +
'<button class="close" data-dismiss="alert">×</button>' + msg + '</div>')
.hide()
.insertAfter('.page-header')
.slideDown('fast');
// form is invalid
formvalid = false;
}
}
return formvalid;
}
| app/view/lib/bolt/fnc-helpers.js | /**
* Helper to get all selected Items and return Array
*/
function getSelectedItems() {
var aItems = [];
$('.dashboardlisting input:checked').each(function () {
if ($(this).parents('tr').attr('id')) {
aItems.push($(this).parents('tr').attr('id').substr(5));
}
});
return aItems;
}
// basic form validation before submit, adapted from
// http://www.sitepoint.com/html5-forms-javascript-constraint-validation-api/
// =========================================================
// basic legacy validation checking
function LegacyValidation(field) {
var
valid = true,
val = field.value,
type = field.getAttribute("type"),
chkbox = type === "checkbox" || type === "radio",
required = field.getAttribute("required"),
minlength = field.getAttribute("minlength"),
maxlength = field.getAttribute("maxlength"),
pattern = field.getAttribute("pattern");
// disabled fields should not be validated
if (field.disabled) {
return valid;
}
/* jshint -W126 */
// value required?
valid = valid && (!required ||
(chkbox && field.checked) ||
(!chkbox && val !== "")
);
// minlength or maxlength set?
valid = valid && (chkbox || (
(!minlength || val.length >= minlength) &&
(!maxlength || val.length <= maxlength)
));
/* jshint +W126 */
// test pattern
if (valid && pattern) {
pattern = new RegExp('^(?:'+pattern+')$');
valid = pattern.test(val);
}
return valid;
}
function validateContent(form) {
var formLength = form.elements.length,
f, field, formvalid = true,
hasNativeValidation,
isCkeditor;
// loop all fields
for (f = 0; f < formLength; f++) {
field = form.elements[f];
if (field.nodeName !== "INPUT" && field.nodeName !== "TEXTAREA" && field.nodeName !== "SELECT") {
continue;
}
if (field.nodeName === "INPUT"){
// trim input values
field.value = field.value.trim();
}
// is native browser validation available?
hasNativeValidation = typeof field.willValidate !== 'undefined';
if (hasNativeValidation) {
// native validation available
if (field.nodeName === "INPUT" && field.type !== field.getAttribute("type")) {
// input type not supported! Use legacy JavaScript validation
field.setCustomValidity(LegacyValidation(field) ? "" : "error");
}
// native browser check
field.checkValidity();
}
else {
// native validation not available
field.validity = field.validity || {};
// set to result of validation function
field.validity.valid = LegacyValidation(field);
// if "invalid" events are required, trigger it here
}
// Special validation for CKEdito fields
isCkeditor = field.nodeName === 'TEXTAREA' && $(field).hasClass('ckeditor');
if (isCkeditor) {
var editor = CKEDITOR.instances[field.id],
error;
if (editor) {
error = editor._.required === true && editor.getData().trim() === '';
if (hasNativeValidation) {
field.setCustomValidity(error ? 'Required' : '');
} else {
field.validity.valid = error;
}
}
}
var noticeID = field.id + '-notice';
// first, remove any existing old notices
$('#'+noticeID).remove();
if (field.validity.valid) {
// remove error styles and messages
$(field).removeClass('error');
if (isCkeditor) {
$('#cke_' + field.id).removeClass('cke_error');
}
}
else {
// style field, show error, etc.
$(field).addClass('error');
if (isCkeditor) {
$('#cke_' + field.id).addClass('cke_error');
}
var msg = $(field).data('errortext') || 'The '+field.name+' field is required or needs to match a pattern';
console.log(noticeID+': '+msg);
$('<div id="' + noticeID + '" class="alert alert-danger">' +
'<button class="close" data-dismiss="alert">×</button>' + msg + '</div>')
.hide()
.insertAfter('.page-header')
.slideDown('fast');
// form is invalid
formvalid = false;
}
}
return formvalid;
}
// =========================================================
| Coding style | app/view/lib/bolt/fnc-helpers.js | Coding style | <ide><path>pp/view/lib/bolt/fnc-helpers.js
<ide> */
<ide> function getSelectedItems() {
<ide> var aItems = [];
<add>
<ide> $('.dashboardlisting input:checked').each(function () {
<ide> if ($(this).parents('tr').attr('id')) {
<ide> aItems.push($(this).parents('tr').attr('id').substr(5));
<ide> }
<ide> });
<add>
<ide> return aItems;
<ide> }
<ide>
<add>/**
<add> * Basic form validation before submit, adapted from
<add> * http://www.sitepoint.com/html5-forms-javascript-constraint-validation-api/
<add>*/
<ide>
<del>// basic form validation before submit, adapted from
<del>// http://www.sitepoint.com/html5-forms-javascript-constraint-validation-api/
<del>// =========================================================
<del>
<del>// basic legacy validation checking
<add>// Basic legacy validation checking
<ide> function LegacyValidation(field) {
<ide> var
<ide> valid = true,
<ide> val = field.value,
<del> type = field.getAttribute("type"),
<del> chkbox = type === "checkbox" || type === "radio",
<del> required = field.getAttribute("required"),
<del> minlength = field.getAttribute("minlength"),
<del> maxlength = field.getAttribute("maxlength"),
<del> pattern = field.getAttribute("pattern");
<add> type = field.getAttribute('type'),
<add> chkbox = type === 'checkbox' || type === 'radio',
<add> required = field.getAttribute('required'),
<add> minlength = field.getAttribute('minlength'),
<add> maxlength = field.getAttribute('maxlength'),
<add> pattern = field.getAttribute('pattern');
<ide>
<del> // disabled fields should not be validated
<add> // Disabled fields should not be validated
<ide> if (field.disabled) {
<ide> return valid;
<ide> }
<ide>
<ide> /* jshint +W126 */
<ide>
<del> // test pattern
<add> // Test pattern
<ide> if (valid && pattern) {
<del> pattern = new RegExp('^(?:'+pattern+')$');
<add> pattern = new RegExp('^(?:' + pattern + ')$');
<ide> valid = pattern.test(val);
<ide> }
<ide>
<ide> function validateContent(form) {
<ide>
<ide> var formLength = form.elements.length,
<del> f, field, formvalid = true,
<add> f,
<add> field,
<add> formvalid = true,
<ide> hasNativeValidation,
<ide> isCkeditor;
<ide>
<del> // loop all fields
<add> // Loop all fields
<ide> for (f = 0; f < formLength; f++) {
<ide> field = form.elements[f];
<ide>
<del> if (field.nodeName !== "INPUT" && field.nodeName !== "TEXTAREA" && field.nodeName !== "SELECT") {
<add> if (field.nodeName !== 'INPUT' && field.nodeName !== 'TEXTAREA' && field.nodeName !== 'SELECT') {
<ide> continue;
<ide> }
<ide>
<del> if (field.nodeName === "INPUT"){
<del> // trim input values
<add> if (field.nodeName === 'INPUT') {
<add> // Trim input values
<ide> field.value = field.value.trim();
<ide> }
<ide>
<del> // is native browser validation available?
<add> // Is native browser validation available?
<ide> hasNativeValidation = typeof field.willValidate !== 'undefined';
<ide> if (hasNativeValidation) {
<del> // native validation available
<del> if (field.nodeName === "INPUT" && field.type !== field.getAttribute("type")) {
<del> // input type not supported! Use legacy JavaScript validation
<del> field.setCustomValidity(LegacyValidation(field) ? "" : "error");
<add> // Native validation available
<add> if (field.nodeName === 'INPUT' && field.type !== field.getAttribute('type')) {
<add> // Input type not supported! Use legacy JavaScript validation
<add> field.setCustomValidity(LegacyValidation(field) ? '' : 'error');
<ide> }
<del> // native browser check
<add> // Native browser check
<ide> field.checkValidity();
<del> }
<del> else {
<del> // native validation not available
<add> } else {
<add> // Native validation not available
<ide> field.validity = field.validity || {};
<del> // set to result of validation function
<add> // Set to result of validation function
<ide> field.validity.valid = LegacyValidation(field);
<ide>
<del> // if "invalid" events are required, trigger it here
<del>
<add> // If "invalid" events are required, trigger it here
<ide> }
<ide>
<ide> // Special validation for CKEdito fields
<ide>
<ide> var noticeID = field.id + '-notice';
<ide>
<del> // first, remove any existing old notices
<del> $('#'+noticeID).remove();
<add> // First, remove any existing old notices
<add> $('#' + noticeID).remove();
<ide>
<ide> if (field.validity.valid) {
<del>
<del> // remove error styles and messages
<add> // Remove error styles and messages
<ide> $(field).removeClass('error');
<ide>
<ide> if (isCkeditor) {
<ide> $('#cke_' + field.id).removeClass('cke_error');
<ide> }
<del> }
<del> else {
<del> // style field, show error, etc.
<add> } else {
<add> // Style field, show error, etc.
<ide> $(field).addClass('error');
<ide>
<ide> if (isCkeditor) {
<ide> $('#cke_' + field.id).addClass('cke_error');
<ide> }
<ide>
<del> var msg = $(field).data('errortext') || 'The '+field.name+' field is required or needs to match a pattern';
<add> var msg = $(field).data('errortext') || 'The ' +
<add> field.name + ' field is required or needs to match a pattern';
<ide> console.log(noticeID+': '+msg);
<ide>
<ide> $('<div id="' + noticeID + '" class="alert alert-danger">' +
<ide>
<ide> return formvalid;
<ide> }
<del>
<del>// ========================================================= |
|
Java | lgpl-2.1 | bd7c800b7f1c9a68529319096a8118b175ca1f7e | 0 | fjalvingh/domui,fjalvingh/domui,fjalvingh/domui,fjalvingh/domui,fjalvingh/domui,fjalvingh/domui,fjalvingh/domui | package to.etc.domui.components.basic;
import to.etc.domui.annotations.*;
import to.etc.domui.component.buttons.*;
import to.etc.domui.dom.html.*;
import to.etc.domui.utils.*;
/**
* A page title bar. This consists of an image in the left corner, a string describing the
* module's functionality and a set of standard buttons opening quick-access pages. This uses
* the generic menu system code to retrieve a module name and image, if applicable.
*
* @author <a href="mailto:[email protected]">Frits Jalvingh</a>
* Created on Apr 3, 2009
*/
public class AppPageTitle extends Div {
private final Img m_img = new Img();
private String m_title;
private TD m_buttonpart;
private TD m_titlePart;
public AppPageTitle() {
}
protected AppPageTitle(final String title) {
m_title = title;
}
protected AppPageTitle(final String icon, final String title) {
m_title = title;
setIcon(icon);
}
public void setIcon(final String s) {
m_img.setSrc(s);
}
@Override
public void createContent() throws Exception {
super.createContent();
Table tbl = new Table();
add(tbl);
TBody b = new TBody();
tbl.add(b);
tbl.setCssClass("vp-ttl");
tbl.setCellPadding("0");
tbl.setCellSpacing("0");
tbl.setTableBorder(0);
TR tr = b.addRow();
b.add(tr);
//-- Image...
setIconURL();
// if(m_img.getSrc() == null)
// m_img.setSrc("img/btnModule.png");
m_img.setAlign(ImgAlign.LEFT);
TD td = b.addCell();
td.add(m_img);
td.setCssClass("vp-ttl-i");
//-- Title.
td = b.addCell();
m_titlePart = td;
td.setCssClass("vp-ttl-t");
String ttl = getPageTitle();
if(ttl != null)
td.add(ttl);
//-- Buttons
td = b.addCell();
td.setCssClass("vp-ttl-bb");
// td.setWidth("1%");
m_buttonpart = td;
addDefaultButtons(td);
}
public TD getButtonpart() {
return m_buttonpart;
}
/**
* Calculate the image URL to use for the icon.
* @return
*/
private void setIconURL() {
//-- 1. Is an icon or icon resource specified in any attached UIMenu annotation? If so use that;
Class<? extends UrlPage> clz = getPage().getBody().getClass();
UIMenu ma = clz.getAnnotation(UIMenu.class);
if(ma != null) {
if(ma.iconName() != null) {
if(ma.iconBase() != Object.class)
m_img.setSrc(ma.iconBase(), ma.iconName()); // Set class-based URL
else
m_img.setSrc(ma.iconName()); // Set specific thingy,
}
}
//-- Not set using a UIMenu annotation. Is a .png with the same classname available?
String cn = AppUIUtil.getClassNameOnly(clz)+".png";
if(AppUIUtil.hasResource(clz, cn)) {
m_img.setSrc(clz, cn);
return;
}
//-- Try to get an URL from the class-based resources. FIXME Todo
}
/**
* Calculate the title URL to use for this thing.
* @return
*/
private String getPageTitle() {
if(m_title != null) // Manually set?
return m_title;
return AppUIUtil.calcPageTitle(getPage().getBody().getClass());
}
protected void addDefaultButtons(final NodeContainer nc) {
SmallImgButton ib = new SmallImgButton("img/btnSpecialChar.png");
nc.add(ib);
ib.setTitle("Toon lijst van bijzondere tekens");
ib.setClicked(new IClicked<NodeBase>() {
public void clicked(final NodeBase b) throws Exception {
OddCharacters oc = new OddCharacters();
getPage().getBody().add(oc);
}
});
}
}
| to.etc.domui.pages/src/to/etc/domui/components/basic/AppPageTitle.java | package to.etc.domui.components.basic;
import to.etc.domui.annotations.*;
import to.etc.domui.component.buttons.*;
import to.etc.domui.dom.html.*;
import to.etc.domui.utils.*;
/**
* A page title bar. This consists of an image in the left corner, a string describing the
* module's functionality and a set of standard buttons opening quick-access pages. This uses
* the generic menu system code to retrieve a module name and image, if applicable.
*
* @author <a href="mailto:[email protected]">Frits Jalvingh</a>
* Created on Apr 3, 2009
*/
public class AppPageTitle extends Div {
private final Img m_img = new Img();
private String m_title;
private TD m_buttonpart;
private TD m_titlePart;
public AppPageTitle() {
}
protected AppPageTitle(final String title) {
m_title = title;
}
protected AppPageTitle(final String icon, final String title) {
m_title = title;
setIcon(icon);
}
public void setIcon(final String s) {
m_img.setSrc(s);
}
@Override
public void createContent() throws Exception {
super.createContent();
Table tbl = new Table();
add(tbl);
TBody b = new TBody();
tbl.add(b);
tbl.setCssClass("vp-ttl");
tbl.setCellPadding("0");
tbl.setCellSpacing("0");
tbl.setTableBorder(0);
TR tr = b.addRow();
b.add(tr);
//-- Image...
if(m_img.getSrc() == null)
m_img.setSrc("img/btnModule.png");
m_img.setAlign(ImgAlign.LEFT);
TD td = b.addCell();
td.add(m_img);
td.setCssClass("vp-ttl-i");
//-- Title.
td = b.addCell();
m_titlePart = td;
td.setCssClass("vp-ttl-t");
String ttl = getPageTitle();
if(ttl != null)
td.add(ttl);
//-- Buttons
td = b.addCell();
td.setCssClass("vp-ttl-bb");
// td.setWidth("1%");
m_buttonpart = td;
addDefaultButtons(td);
}
public TD getButtonpart() {
return m_buttonpart;
}
/**
* Calculate the image URL to use for the icon.
* @return
*/
private void setIconURL() {
//-- 1. Is an icon or icon resource specified in any attached UIMenu annotation? If so use that;
Class<? extends UrlPage> clz = getPage().getBody().getClass();
UIMenu ma = clz.getAnnotation(UIMenu.class);
if(ma != null) {
if(ma.iconName() != null) {
if(ma.iconBase() != Object.class)
m_img.setSrc(ma.iconBase(), ma.iconName()); // Set class-based URL
else
m_img.setSrc(ma.iconName()); // Set specific thingy,
}
}
//-- Not set using a UIMenu annotation. Is a .png with the same classname available?
String cn = AppUIUtil.getClassNameOnly(clz)+".png";
if(AppUIUtil.hasResource(clz, cn)) {
m_img.setSrc(clz, cn);
return;
}
//-- Try to get an URL from the class-based resources. FIXME Todo
}
/**
* Calculate the title URL to use for this thing.
* @return
*/
private String getPageTitle() {
if(m_title != null) // Manually set?
return m_title;
return AppUIUtil.calcPageTitle(getPage().getBody().getClass());
}
protected void addDefaultButtons(final NodeContainer nc) {
SmallImgButton ib = new SmallImgButton("img/btnSpecialChar.png");
nc.add(ib);
ib.setTitle("Toon lijst van bijzondere tekens");
ib.setClicked(new IClicked<NodeBase>() {
public void clicked(final NodeBase b) throws Exception {
OddCharacters oc = new OddCharacters();
getPage().getBody().add(oc);
}
});
}
}
| Fix icons on page title bar | to.etc.domui.pages/src/to/etc/domui/components/basic/AppPageTitle.java | Fix icons on page title bar | <ide><path>o.etc.domui.pages/src/to/etc/domui/components/basic/AppPageTitle.java
<ide> b.add(tr);
<ide>
<ide> //-- Image...
<del> if(m_img.getSrc() == null)
<del> m_img.setSrc("img/btnModule.png");
<add> setIconURL();
<add>// if(m_img.getSrc() == null)
<add>// m_img.setSrc("img/btnModule.png");
<ide> m_img.setAlign(ImgAlign.LEFT);
<ide> TD td = b.addCell();
<ide> td.add(m_img); |
|
Java | apache-2.0 | eeb5f4491adfd57a2fcbdfe30d115cd90e258bf0 | 0 | ddebrunner/streamsx.topology,wmarshall484/streamsx.topology,ddebrunner/streamsx.topology,ibmkendrick/streamsx.topology,ibmkendrick/streamsx.topology,wmarshall484/streamsx.topology,IBMStreams/streamsx.topology,ibmkendrick/streamsx.topology,ibmkendrick/streamsx.topology,IBMStreams/streamsx.topology,wmarshall484/streamsx.topology,wmarshall484/streamsx.topology,IBMStreams/streamsx.topology,IBMStreams/streamsx.topology,wmarshall484/streamsx.topology,ibmkendrick/streamsx.topology,IBMStreams/streamsx.topology,ddebrunner/streamsx.topology,ddebrunner/streamsx.topology,wmarshall484/streamsx.topology,ibmkendrick/streamsx.topology,wmarshall484/streamsx.topology,IBMStreams/streamsx.topology,wmarshall484/streamsx.topology,IBMStreams/streamsx.topology,ibmkendrick/streamsx.topology,ddebrunner/streamsx.topology,ddebrunner/streamsx.topology,ddebrunner/streamsx.topology | /*
# Licensed Materials - Property of IBM
# Copyright IBM Corp. 2015
*/
package com.ibm.streamsx.topology.test.splpy;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.junit.Assume.assumeTrue;
import java.io.File;
import java.util.List;
import java.util.Random;
import java.util.concurrent.TimeUnit;
import org.junit.Before;
import org.junit.Test;
import com.ibm.streams.operator.OutputTuple;
import com.ibm.streams.operator.StreamSchema;
import com.ibm.streams.operator.Tuple;
import com.ibm.streams.operator.Type;
import com.ibm.streams.operator.meta.TupleType;
import com.ibm.streamsx.topology.TStream;
import com.ibm.streamsx.topology.Topology;
import com.ibm.streamsx.topology.TopologyElement;
import com.ibm.streamsx.topology.context.StreamsContext;
import com.ibm.streamsx.topology.function.BiFunction;
import com.ibm.streamsx.topology.spl.SPL;
import com.ibm.streamsx.topology.spl.SPLStream;
import com.ibm.streamsx.topology.spl.SPLStreams;
import com.ibm.streamsx.topology.streams.BeaconStreams;
import com.ibm.streamsx.topology.test.TestTopology;
import com.ibm.streamsx.topology.tester.Condition;
import com.ibm.streamsx.topology.tester.Tester;
public class PythonFunctionalOperatorsTest extends TestTopology {
public static final StreamSchema ALL_PYTHON_TYPES_SCHEMA=
Type.Factory.getStreamSchema("tuple<boolean b," +
"int8 i8, int16 i16, int32 i32, int64 i64," +
"uint8 u8, uint16 u16, uint32 u32, uint64 u64," +
"float32 f32, float64 f64," +
"rstring r," +
"complex32 c32," +
"complex64 c64," +
"list<rstring> lr," +
"list<int32> li32," +
"list<int64> li64," +
"list<uint32> lui32," +
"list<uint64> lui64," +
"list<float32> lf32," +
"list<float64> lf64," +
"list<boolean> lb," +
"map<int32,rstring> mi32r," +
"map<rstring,uint32> mru32," +
"map<rstring,int32> mri32," +
"map<uint32,rstring> mu32r," +
"map<int32,int32> mi32i32," +
"map<uint32,uint32> mu32u32," +
"map<rstring,rstring> mrr," +
"map<float64,float64> mf64f64," +
"map<float64,int32> mf64i32," +
"map<float64,uint32> mf64u32," +
"map<float64,rstring> mf64r," +
"map<rstring,float64> mrf64>");
public static final StreamSchema ALL_PYTHON_TYPES_WITH_SETS_SCHEMA = ALL_PYTHON_TYPES_SCHEMA.extend("set<int32>", "si32");
public static final int TUPLE_COUNT = 1000;
@Before
public void runSpl() {
assumeSPLOk();
assumeTrue(getTesterContext().getType() == StreamsContext.Type.STANDALONE_TESTER
|| getTesterContext().getType() == StreamsContext.Type.DISTRIBUTED_TESTER);
}
public static SPLStream testTupleStream(Topology topology) {
return testTupleStream(topology, false);
}
public static StreamSchema getPythonTypesSchema(boolean withSets) {
if (withSets) {
return ALL_PYTHON_TYPES_WITH_SETS_SCHEMA;
}
else {
return ALL_PYTHON_TYPES_SCHEMA;
}
}
public static SPLStream testTupleStream(Topology topology, boolean withSets) {
TStream<Long> beacon = BeaconStreams.longBeacon(topology, TUPLE_COUNT);
return SPLStreams.convertStream(beacon, new BiFunction<Long, OutputTuple, OutputTuple>() {
private static final long serialVersionUID = 1L;
private transient TupleType type;
private transient Random rand;
@Override
public OutputTuple apply(Long v1, OutputTuple v2) {
if (type == null) {
type = Type.Factory.getTupleType(getPythonTypesSchema(withSets).getLanguageType());
rand = new Random();
}
Tuple randTuple = (Tuple) type.randomValue(rand);
v2.assign(randTuple);
return v2;
}
}, getPythonTypesSchema(withSets));
}
@Test
public void testPositionalSampleNoop() throws Exception {
Topology topology = new Topology("testPositionalSampleNoop");
SPLStream tuples = testTupleStream(topology);
SPLStream viaSPL = SPL.invokeOperator("spl.relational::Functor", tuples, tuples.getSchema(), null);
addTestToolkit(tuples);
SPLStream viaPython = SPL.invokeOperator("com.ibm.streamsx.topology.pysamples.positional::Noop", tuples, tuples.getSchema(), null);
Tester tester = topology.getTester();
Condition<Long> expectedCount = tester.tupleCount(viaPython, TUPLE_COUNT);
Condition<List<Tuple>> viaSPLResult = tester.tupleContents(viaSPL);
Condition<List<Tuple>> viaPythonResult = tester.tupleContents(viaPython);
complete(tester, expectedCount, 10, TimeUnit.SECONDS);
assertTrue(expectedCount.valid());
assertEquals(viaSPLResult.getResult(), viaPythonResult.getResult());
}
private static final StreamSchema TEST_SCHEMA_SF =
Type.Factory.getStreamSchema("tuple<int32 a, int16 b, int64 vl>");
private static final Tuple[] TEST_TUPLES = new Tuple[4];
static {
TEST_TUPLES[0] = TEST_SCHEMA_SF.getTuple(new Object[] {7, (short) 25, 34535L});
TEST_TUPLES[1] = TEST_SCHEMA_SF.getTuple(new Object[] {32, (short) 6, 43675232L});
TEST_TUPLES[2] = TEST_SCHEMA_SF.getTuple(new Object[] {2, (short) 3, 654932L});
TEST_TUPLES[3] = TEST_SCHEMA_SF.getTuple(new Object[] {431221, (short) 1321, 82343L});
}
public static SPLStream sampleFilterStream(Topology topology) {
TStream<Long> beacon = BeaconStreams.longBeacon(topology, TEST_TUPLES.length);
return SPLStreams.convertStream(beacon, new BiFunction<Long, OutputTuple, OutputTuple>() {
private static final long serialVersionUID = 1L;
@Override
public OutputTuple apply(Long v1, OutputTuple v2) {
v2.assign(TEST_TUPLES[(int)((long) v1)]);
return v2;
}
}, TEST_SCHEMA_SF);
}
private void addTestToolkit(TopologyElement te) throws Exception {
// Need to run extract to ensure the operators match the python
// version we are testing.
File toolkitRoot = new File(getTestRoot(), "python/spl/testtkpy");
int rc = PythonExtractTest.extract(toolkitRoot, true);
assertEquals(0, rc);
SPL.addToolkit(te, toolkitRoot);
}
@Test
public void testPositionalSampleSimpleFilter() throws Exception {
Topology topology = new Topology("testPositionalSampleSimpleFilter");
SPLStream tuples = sampleFilterStream(topology);
addTestToolkit(tuples);
SPLStream viaPython = SPL.invokeOperator(
"com.ibm.streamsx.topology.pysamples.positional::SimpleFilter", tuples, tuples.getSchema(), null);
Tester tester = topology.getTester();
Condition<Long> expectedCount = tester.tupleCount(viaPython, 2);
// first attribute is the sum of the first and second input attributes
// others are copied across from in to out.
Tuple r1 = TEST_SCHEMA_SF.getTuple(new Object[] {32, (short) 25, 34535L});
Tuple r2 = TEST_SCHEMA_SF.getTuple(new Object[] {5, (short) 3, 654932L});
Condition<List<Tuple>> viaPythonResult = tester.tupleContents(viaPython,
r1, r2);
complete(tester, expectedCount, 10, TimeUnit.SECONDS);
assertTrue(expectedCount.toString(), expectedCount.valid());
assertTrue(viaPythonResult.toString(), viaPythonResult.valid());
}
@Test
public void testPositionalSampleSimpleFilterUsingSPLType() throws Exception {
Topology topology = new Topology("testPositionalSampleSimpleFilterUsingSPLType");
SPLStream tuples = sampleFilterStream(topology);
addTestToolkit(tuples);
SPLStream viaPython = SPL.invokeOperator(
"testspl::SF", tuples, tuples.getSchema(), null);
Tester tester = topology.getTester();
Condition<Long> expectedCount = tester.tupleCount(viaPython, 2);
// first attribute is the sum of the first and second input attributes
// others are copied across from in to out.
Tuple r1 = TEST_SCHEMA_SF.getTuple(new Object[] {32, (short) 25, 34535L});
Tuple r2 = TEST_SCHEMA_SF.getTuple(new Object[] {5, (short) 3, 654932L});
Condition<List<Tuple>> viaPythonResult = tester.tupleContents(viaPython,
r1, r2);
complete(tester, expectedCount, 10, TimeUnit.SECONDS);
assertTrue(expectedCount.valid());
assertTrue(viaPythonResult.toString(), viaPythonResult.valid());
}
@Test
public void testStatefulOperator() throws Exception {
Topology topology = new Topology("testPositionalSampleSimpleFilterUsingSPLType");
SPLStream tuples = testTupleStream(topology, false);
addTestToolkit(tuples);
StreamSchema outSchema = tuples.getSchema().extend("int32", "sequence_using_py");
SPLStream viaPython = SPL.invokeOperator(
"com.ibm.streamsx.topology.pysamples.positional::AddSeq", tuples, outSchema, null);
// Add a second count to make sure that the states are independent.
SPLStream filtered = tuples.filter(t -> t.getInt("i32") < 10000);
SPLStream viaPythonFiltered = SPL.invokeOperator(
"com.ibm.streamsx.topology.pysamples.positional::AddSeq", filtered, outSchema, null);
Tester tester = topology.getTester();
Condition<Long> expectedCount = tester.tupleCount(viaPython, TUPLE_COUNT);
Condition<List<Tuple>> outTuples = tester.tupleContents(viaPython);
Condition<List<Tuple>> outFilteredTuples = tester.tupleContents(viaPythonFiltered);
complete(tester, expectedCount, 10, TimeUnit.SECONDS);
assertTrue(expectedCount.valid());
List<Tuple> result = outTuples.getResult();
assertEquals(TUPLE_COUNT, result.size());
for (int i = 0; i < TUPLE_COUNT; i++)
assertEquals(i, result.get(i).getInt("sequence_using_py"));
List<Tuple> filteredResult = outFilteredTuples.getResult();
assertTrue(filteredResult.size() <= TUPLE_COUNT);
for (int i = 0; i < filteredResult.size(); i++)
assertEquals(i, filteredResult.get(i).getInt("sequence_using_py"));
}
}
| test/java/src/com/ibm/streamsx/topology/test/splpy/PythonFunctionalOperatorsTest.java | /*
# Licensed Materials - Property of IBM
# Copyright IBM Corp. 2015
*/
package com.ibm.streamsx.topology.test.splpy;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.junit.Assume.assumeTrue;
import java.io.File;
import java.util.List;
import java.util.Random;
import java.util.concurrent.TimeUnit;
import org.junit.Before;
import org.junit.Test;
import com.ibm.streams.operator.OutputTuple;
import com.ibm.streams.operator.StreamSchema;
import com.ibm.streams.operator.Tuple;
import com.ibm.streams.operator.Type;
import com.ibm.streams.operator.meta.TupleType;
import com.ibm.streamsx.topology.TStream;
import com.ibm.streamsx.topology.Topology;
import com.ibm.streamsx.topology.TopologyElement;
import com.ibm.streamsx.topology.context.StreamsContext;
import com.ibm.streamsx.topology.function.BiFunction;
import com.ibm.streamsx.topology.spl.SPL;
import com.ibm.streamsx.topology.spl.SPLStream;
import com.ibm.streamsx.topology.spl.SPLStreams;
import com.ibm.streamsx.topology.streams.BeaconStreams;
import com.ibm.streamsx.topology.test.TestTopology;
import com.ibm.streamsx.topology.tester.Condition;
import com.ibm.streamsx.topology.tester.Tester;
public class PythonFunctionalOperatorsTest extends TestTopology {
public static final StreamSchema ALL_PYTHON_TYPES_SCHEMA=
Type.Factory.getStreamSchema("tuple<boolean b," +
"int8 i8, int16 i16, int32 i32, int64 i64," +
"uint8 u8, uint16 u16, uint32 u32, uint64 u64," +
"float32 f32, float64 f64," +
"rstring r," +
"complex32 c32," +
"complex64 c64," +
"list<rstring> lr," +
"list<int32> li32," +
"list<int64> li64," +
"list<uint32> lui32," +
"list<uint64> lui64," +
"list<float32> lf32," +
"list<float64> lf64," +
"list<boolean> lb," +
"map<int32,rstring> mi32r," +
"map<rstring,uint32> mru32," +
"map<rstring,int32> mri32," +
"map<uint32,rstring> mu32r," +
"map<int32,int32> mi32i32," +
"map<uint32,uint32> mu32u32," +
"map<rstring,rstring> mrr," +
"map<float64,float64> mf64f64," +
"map<float64,int32> mf64i32," +
"map<float64,uint32> mf64u32," +
"map<float64,rstring> mf64r," +
"map<rstring,float64> mrf64>");
public static final StreamSchema ALL_PYTHON_TYPES_WITH_SETS_SCHEMA = ALL_PYTHON_TYPES_SCHEMA.extend("set<int32>", "si32");
public static final int TUPLE_COUNT = 1000;
@Before
public void runSpl() {
assumeSPLOk();
assumeTrue(getTesterContext().getType() == StreamsContext.Type.STANDALONE_TESTER
|| getTesterContext().getType() == StreamsContext.Type.DISTRIBUTED_TESTER);
}
public static SPLStream testTupleStream(Topology topology) {
return testTupleStream(topology, false);
}
public static StreamSchema getPythonTypesSchema(boolean withSets) {
if (withSets) {
return ALL_PYTHON_TYPES_WITH_SETS_SCHEMA;
}
else {
return ALL_PYTHON_TYPES_SCHEMA;
}
}
public static SPLStream testTupleStream(Topology topology, boolean withSets) {
TStream<Long> beacon = BeaconStreams.longBeacon(topology, TUPLE_COUNT);
return SPLStreams.convertStream(beacon, new BiFunction<Long, OutputTuple, OutputTuple>() {
private static final long serialVersionUID = 1L;
private transient TupleType type;
private transient Random rand;
@Override
public OutputTuple apply(Long v1, OutputTuple v2) {
if (type == null) {
type = Type.Factory.getTupleType(getPythonTypesSchema(withSets).getLanguageType());
rand = new Random();
}
Tuple randTuple = (Tuple) type.randomValue(rand);
v2.assign(randTuple);
return v2;
}
}, getPythonTypesSchema(withSets));
}
@Test
public void testPositionalSampleNoop() throws Exception {
Topology topology = new Topology("testPositionalSampleNoop");
SPLStream tuples = testTupleStream(topology);
SPLStream viaSPL = SPL.invokeOperator("spl.relational::Functor", tuples, tuples.getSchema(), null);
addTestToolkit(tuples);
SPLStream viaPython = SPL.invokeOperator("com.ibm.streamsx.topology.pysamples.positional::Noop", tuples, tuples.getSchema(), null);
Tester tester = topology.getTester();
Condition<Long> expectedCount = tester.tupleCount(viaPython, TUPLE_COUNT);
Condition<List<Tuple>> viaSPLResult = tester.tupleContents(viaSPL);
Condition<List<Tuple>> viaPythonResult = tester.tupleContents(viaPython);
complete(tester, expectedCount, 10, TimeUnit.SECONDS);
assertTrue(expectedCount.valid());
assertEquals(viaSPLResult.getResult(), viaPythonResult.getResult());
}
private static final StreamSchema TEST_SCHEMA_SF =
Type.Factory.getStreamSchema("tuple<int32 a, int16 b, int64 vl>");
private static final Tuple[] TEST_TUPLES = new Tuple[4];
static {
TEST_TUPLES[0] = TEST_SCHEMA_SF.getTuple(new Object[] {7, (short) 25, 34535L});
TEST_TUPLES[1] = TEST_SCHEMA_SF.getTuple(new Object[] {32, (short) 6, 43675232L});
TEST_TUPLES[2] = TEST_SCHEMA_SF.getTuple(new Object[] {2, (short) 3, 654932L});
TEST_TUPLES[3] = TEST_SCHEMA_SF.getTuple(new Object[] {431221, (short) 1321, 82343L});
}
public static SPLStream sampleFilterStream(Topology topology) {
TStream<Long> beacon = BeaconStreams.longBeacon(topology, TEST_TUPLES.length);
return SPLStreams.convertStream(beacon, new BiFunction<Long, OutputTuple, OutputTuple>() {
private static final long serialVersionUID = 1L;
@Override
public OutputTuple apply(Long v1, OutputTuple v2) {
v2.assign(TEST_TUPLES[(int)((long) v1)]);
return v2;
}
}, TEST_SCHEMA_SF);
}
private void addTestToolkit(TopologyElement te) throws Exception {
// Need to run extract to ensure the operators match the python
// version we are testing.
File toolkitRoot = new File(getTestRoot(), "python/spl/testtkpy");
PythonExtractTest.extract(toolkitRoot, true);
SPL.addToolkit(te, toolkitRoot);
}
@Test
public void testPositionalSampleSimpleFilter() throws Exception {
Topology topology = new Topology("testPositionalSampleSimpleFilter");
SPLStream tuples = sampleFilterStream(topology);
addTestToolkit(tuples);
SPLStream viaPython = SPL.invokeOperator(
"com.ibm.streamsx.topology.pysamples.positional::SimpleFilter", tuples, tuples.getSchema(), null);
Tester tester = topology.getTester();
Condition<Long> expectedCount = tester.tupleCount(viaPython, 2);
// first attribute is the sum of the first and second input attributes
// others are copied across from in to out.
Tuple r1 = TEST_SCHEMA_SF.getTuple(new Object[] {32, (short) 25, 34535L});
Tuple r2 = TEST_SCHEMA_SF.getTuple(new Object[] {5, (short) 3, 654932L});
Condition<List<Tuple>> viaPythonResult = tester.tupleContents(viaPython,
r1, r2);
complete(tester, expectedCount, 10, TimeUnit.SECONDS);
assertTrue(expectedCount.toString(), expectedCount.valid());
assertTrue(viaPythonResult.toString(), viaPythonResult.valid());
}
@Test
public void testPositionalSampleSimpleFilterUsingSPLType() throws Exception {
Topology topology = new Topology("testPositionalSampleSimpleFilterUsingSPLType");
SPLStream tuples = sampleFilterStream(topology);
addTestToolkit(tuples);
SPLStream viaPython = SPL.invokeOperator(
"testspl::SF", tuples, tuples.getSchema(), null);
Tester tester = topology.getTester();
Condition<Long> expectedCount = tester.tupleCount(viaPython, 2);
// first attribute is the sum of the first and second input attributes
// others are copied across from in to out.
Tuple r1 = TEST_SCHEMA_SF.getTuple(new Object[] {32, (short) 25, 34535L});
Tuple r2 = TEST_SCHEMA_SF.getTuple(new Object[] {5, (short) 3, 654932L});
Condition<List<Tuple>> viaPythonResult = tester.tupleContents(viaPython,
r1, r2);
complete(tester, expectedCount, 10, TimeUnit.SECONDS);
assertTrue(expectedCount.valid());
assertTrue(viaPythonResult.toString(), viaPythonResult.valid());
}
@Test
public void testStatefulOperator() throws Exception {
Topology topology = new Topology("testPositionalSampleSimpleFilterUsingSPLType");
SPLStream tuples = testTupleStream(topology, false);
addTestToolkit(tuples);
StreamSchema outSchema = tuples.getSchema().extend("int32", "sequence_using_py");
SPLStream viaPython = SPL.invokeOperator(
"com.ibm.streamsx.topology.pysamples.positional::AddSeq", tuples, outSchema, null);
// Add a second count to make sure that the states are independent.
SPLStream filtered = tuples.filter(t -> t.getInt("i32") < 10000);
SPLStream viaPythonFiltered = SPL.invokeOperator(
"com.ibm.streamsx.topology.pysamples.positional::AddSeq", filtered, outSchema, null);
Tester tester = topology.getTester();
Condition<Long> expectedCount = tester.tupleCount(viaPython, TUPLE_COUNT);
Condition<List<Tuple>> outTuples = tester.tupleContents(viaPython);
Condition<List<Tuple>> outFilteredTuples = tester.tupleContents(viaPythonFiltered);
complete(tester, expectedCount, 10, TimeUnit.SECONDS);
assertTrue(expectedCount.valid());
List<Tuple> result = outTuples.getResult();
assertEquals(TUPLE_COUNT, result.size());
for (int i = 0; i < TUPLE_COUNT; i++)
assertEquals(i, result.get(i).getInt("sequence_using_py"));
List<Tuple> filteredResult = outFilteredTuples.getResult();
assertTrue(filteredResult.size() <= TUPLE_COUNT);
for (int i = 0; i < filteredResult.size(); i++)
assertEquals(i, filteredResult.get(i).getInt("sequence_using_py"));
}
}
| Assert extract was successful
| test/java/src/com/ibm/streamsx/topology/test/splpy/PythonFunctionalOperatorsTest.java | Assert extract was successful | <ide><path>est/java/src/com/ibm/streamsx/topology/test/splpy/PythonFunctionalOperatorsTest.java
<ide> // Need to run extract to ensure the operators match the python
<ide> // version we are testing.
<ide> File toolkitRoot = new File(getTestRoot(), "python/spl/testtkpy");
<del> PythonExtractTest.extract(toolkitRoot, true);
<add> int rc = PythonExtractTest.extract(toolkitRoot, true);
<add> assertEquals(0, rc);
<ide> SPL.addToolkit(te, toolkitRoot);
<ide> }
<ide> |
|
Java | bsd-3-clause | 370bd67cee2946cf21629b3e733860675b31cbbb | 0 | daonb/obudget,daonb/obudget,daonb/obudget | package org.obudget.client;
import com.google.gwt.core.client.JavaScriptObject;
import com.google.gwt.http.client.UrlBuilder;
import com.google.gwt.json.client.JSONArray;
import com.google.gwt.jsonp.client.JsonpRequestBuilder;
import com.google.gwt.user.client.Window;
import com.google.gwt.user.client.rpc.AsyncCallback;
class BudgetAPICaller extends JsonpRequestBuilder {
private UrlBuilder url;
public BudgetAPICaller() {
url = new UrlBuilder();
url.setHost("127.0.0.1");
url.setPort(8000);
url.setPath("00");
}
public void setCode( String code ) {
url.setPath(code);
}
public void setParameter( String key, String value ) {
url.setParameter(key, value);
}
public void go( final BudgetAPICallback callback ) {
requestObject(url.buildString(), new AsyncCallback<JavaScriptObject>() {
@Override
public void onSuccess(JavaScriptObject result) {
JSONArray array = new JSONArray(result);
callback.onSuccess(array);
}
@Override
public void onFailure(Throwable caught) {
Window.alert("Failed to access API: "+caught.getMessage());
}
});
}
}
| src/obudget/gwt/src/org/obudget/client/BudgetAPICaller.java | package org.obudget.client;
import com.google.gwt.core.client.JavaScriptObject;
import com.google.gwt.http.client.UrlBuilder;
import com.google.gwt.json.client.JSONArray;
import com.google.gwt.jsonp.client.JsonpRequestBuilder;
import com.google.gwt.user.client.Window;
import com.google.gwt.user.client.rpc.AsyncCallback;
class BudgetAPICaller extends JsonpRequestBuilder {
private UrlBuilder url;
public BudgetAPICaller() {
url = new UrlBuilder();
url.setHost("127.0.0.1");
url.setPort(12345);
url.setPath("00");
}
public void setCode( String code ) {
url.setPath(code);
}
public void setParameter( String key, String value ) {
url.setParameter(key, value);
}
public void go( final BudgetAPICallback callback ) {
requestObject(url.buildString(), new AsyncCallback<JavaScriptObject>() {
@Override
public void onSuccess(JavaScriptObject result) {
JSONArray array = new JSONArray(result);
callback.onSuccess(array);
}
@Override
public void onFailure(Throwable caught) {
Window.alert("Failed to access API: "+caught.getMessage());
}
});
}
}
| using port 8000
| src/obudget/gwt/src/org/obudget/client/BudgetAPICaller.java | using port 8000 | <ide><path>rc/obudget/gwt/src/org/obudget/client/BudgetAPICaller.java
<ide> public BudgetAPICaller() {
<ide> url = new UrlBuilder();
<ide> url.setHost("127.0.0.1");
<del> url.setPort(12345);
<add> url.setPort(8000);
<ide> url.setPath("00");
<ide> }
<ide> |
|
JavaScript | bsd-3-clause | 57588fae748567e0d73e5a85dd2eb536d14e8caf | 0 | Ashoat/squadcal,Ashoat/squadcal,Ashoat/squadcal,Ashoat/squadcal,Ashoat/squadcal,Ashoat/squadcal,Ashoat/squadcal | // @flow
import classNames from 'classnames';
import invariant from 'invariant';
import _pickBy from 'lodash/fp/pickBy';
import * as React from 'react';
import {
deleteThreadActionTypes,
deleteThread,
changeThreadSettingsActionTypes,
changeThreadSettings,
} from 'lib/actions/thread-actions';
import { createLoadingStatusSelector } from 'lib/selectors/loading-selectors';
import { threadInfoSelector } from 'lib/selectors/thread-selectors';
import {
threadHasPermission,
threadTypeDescriptions,
robotextName,
} from 'lib/shared/thread-utils';
import {
type ThreadInfo,
threadTypes,
assertThreadType,
type ChangeThreadSettingsPayload,
type UpdateThreadRequest,
type LeaveThreadPayload,
threadPermissions,
type ThreadChanges,
} from 'lib/types/thread-types';
import type { UserInfos } from 'lib/types/user-types';
import {
useDispatchActionPromise,
useServerCall,
type DispatchActionPromise,
} from 'lib/utils/action-utils';
import { firstLine } from 'lib/utils/string-utils';
import { useSelector } from '../../redux/redux-utils';
import css from '../../style.css';
import Modal from '../modal.react';
import ColorPicker from './color-picker.react';
type TabType = 'general' | 'privacy' | 'delete';
type TabProps = {|
+name: string,
+tabType: TabType,
+selected: boolean,
+onClick: (tabType: TabType) => void,
|};
class Tab extends React.PureComponent<TabProps> {
render() {
const classNamesForTab = classNames({
[css['current-tab']]: this.props.selected,
[css['delete-tab']]:
this.props.selected && this.props.tabType === 'delete',
});
return (
<li className={classNamesForTab} onClick={this.onClick}>
<a>{this.props.name}</a>
</li>
);
}
onClick = () => {
return this.props.onClick(this.props.tabType);
};
}
type BaseProps = {|
+threadID: string,
+onClose: () => void,
|};
type Props = {|
...BaseProps,
+threadInfo: ThreadInfo,
+changeInProgress: boolean,
+viewerID: ?string,
+userInfos: UserInfos,
+dispatchActionPromise: DispatchActionPromise,
+deleteThread: (
threadID: string,
currentAccountPassword: string,
) => Promise<LeaveThreadPayload>,
+changeThreadSettings: (
update: UpdateThreadRequest,
) => Promise<ChangeThreadSettingsPayload>,
|};
type State = {|
+queuedChanges: ThreadChanges,
+errorMessage: string,
+accountPassword: string,
+currentTabType: TabType,
|};
class ThreadSettingsModal extends React.PureComponent<Props, State> {
nameInput: ?HTMLInputElement;
newThreadPasswordInput: ?HTMLInputElement;
accountPasswordInput: ?HTMLInputElement;
constructor(props: Props) {
super(props);
this.state = {
queuedChanges: Object.freeze({}),
errorMessage: '',
accountPassword: '',
currentTabType: 'general',
};
}
componentDidMount() {
invariant(this.nameInput, 'nameInput ref unset');
this.nameInput.focus();
}
componentDidUpdate(prevProps: Props) {
if (this.state.currentTabType !== 'delete') {
return;
}
const permissionForDeleteTab = this.hasPermissionForTab(
this.props.threadInfo,
'delete',
);
const prevPermissionForDeleteTab = this.hasPermissionForTab(
prevProps.threadInfo,
'delete',
);
if (!permissionForDeleteTab && prevPermissionForDeleteTab) {
this.setTab('general');
}
}
hasPermissionForTab(threadInfo: ThreadInfo, tab: TabType) {
if (tab === 'general') {
return threadHasPermission(threadInfo, threadPermissions.EDIT_THREAD);
} else if (tab === 'privacy') {
return threadHasPermission(
threadInfo,
threadPermissions.EDIT_PERMISSIONS,
);
} else if (tab === 'delete') {
return threadHasPermission(threadInfo, threadPermissions.DELETE_THREAD);
}
invariant(false, `invalid tab ${tab}`);
}
possiblyChangedValue(key: string) {
const valueChanged =
this.state.queuedChanges[key] !== null &&
this.state.queuedChanges[key] !== undefined;
return valueChanged
? this.state.queuedChanges[key]
: this.props.threadInfo[key];
}
namePlaceholder() {
return robotextName(
this.props.threadInfo,
this.props.viewerID,
this.props.userInfos,
);
}
changeQueued() {
return (
Object.keys(
_pickBy(
(value) => value !== null && value !== undefined,
// the lodash/fp libdef coerces the returned object's properties to the
// same type, which means it only works for object-as-maps $FlowFixMe
)(this.state.queuedChanges),
).length > 0
);
}
render() {
const inputDisabled =
this.props.changeInProgress ||
!this.hasPermissionForTab(
this.props.threadInfo,
this.state.currentTabType,
);
let mainContent = null;
if (this.state.currentTabType === 'general') {
mainContent = (
<div>
<div>
<div className={css['form-title']}>Thread name</div>
<div className={css['form-content']}>
<input
type="text"
value={firstLine(this.possiblyChangedValue('name'))}
placeholder={this.namePlaceholder()}
onChange={this.onChangeName}
disabled={inputDisabled}
ref={this.nameInputRef}
/>
</div>
</div>
<div className={css['form-textarea-container']}>
<div className={css['form-title']}>Description</div>
<div className={css['form-content']}>
<textarea
value={this.possiblyChangedValue('description')}
placeholder="Thread description"
onChange={this.onChangeDescription}
disabled={inputDisabled}
></textarea>
</div>
</div>
<div className={css['edit-thread-color-container']}>
<div className={`${css['form-title']} ${css['color-title']}`}>
Color
</div>
<div className={css['form-content']}>
<ColorPicker
id="edit-thread-color"
value={this.possiblyChangedValue('color')}
disabled={inputDisabled}
onChange={this.onChangeColor}
/>
</div>
</div>
</div>
);
} else if (this.state.currentTabType === 'privacy') {
mainContent = (
<div className={css['edit-thread-privacy-container']}>
<div className={css['modal-radio-selector']}>
<div className={css['form-title']}>Thread type</div>
<div className={css['form-enum-selector']}>
<div className={css['form-enum-container']}>
<input
type="radio"
name="edit-thread-type"
id="edit-thread-open"
value={threadTypes.CHAT_NESTED_OPEN}
checked={
this.possiblyChangedValue('type') ===
threadTypes.CHAT_NESTED_OPEN
}
onChange={this.onChangeThreadType}
disabled={inputDisabled}
/>
<div className={css['form-enum-option']}>
<label htmlFor="edit-thread-open">
Open
<span className={css['form-enum-description']}>
{threadTypeDescriptions[threadTypes.CHAT_NESTED_OPEN]}
</span>
</label>
</div>
</div>
<div className={css['form-enum-container']}>
<input
type="radio"
name="edit-thread-type"
id="edit-thread-closed"
value={threadTypes.CHAT_SECRET}
checked={
this.possiblyChangedValue('type') ===
threadTypes.CHAT_SECRET
}
onChange={this.onChangeThreadType}
disabled={inputDisabled}
/>
<div className={css['form-enum-option']}>
<label htmlFor="edit-thread-closed">
Secret
<span className={css['form-enum-description']}>
{threadTypeDescriptions[threadTypes.CHAT_SECRET]}
</span>
</label>
</div>
</div>
</div>
</div>
</div>
);
} else if (this.state.currentTabType === 'delete') {
mainContent = (
<>
<div>
<p className={css['italic']}>
Your thread will be permanently deleted. There is no way to
reverse this.
</p>
</div>
<div className={css['edit-thread-account-password']}>
<p className={css['confirm-account-password']}>
Please enter your account password to confirm your identity
</p>
<div className={css['form-title']}>Account password</div>
<div className={css['form-content']}>
<input
type="password"
placeholder="Personal account password"
value={this.state.accountPassword}
onChange={this.onChangeAccountPassword}
disabled={inputDisabled}
ref={this.accountPasswordInputRef}
/>
</div>
</div>
</>
);
}
let buttons = null;
if (this.state.currentTabType === 'delete') {
buttons = (
<input
type="submit"
value="Delete"
onClick={this.onDelete}
disabled={inputDisabled}
/>
);
} else {
buttons = (
<input
type="submit"
value="Save"
onClick={this.onSubmit}
disabled={inputDisabled || !this.changeQueued()}
/>
);
}
const tabs = [
<Tab
name="General"
tabType="general"
onClick={this.setTab}
selected={this.state.currentTabType === 'general'}
key="general"
/>,
];
// This UI needs to be updated to handle sidebars but we haven't gotten
// there yet. We'll probably end up ripping it out anyways, so for now we
// are just hiding the privacy tab for any thread that was created as a
// sidebar
const canSeePrivacyTab =
this.possiblyChangedValue('parentThreadID') &&
!this.props.threadInfo.sourceMessageID;
if (canSeePrivacyTab) {
tabs.push(
<Tab
name="Privacy"
tabType="privacy"
onClick={this.setTab}
selected={this.state.currentTabType === 'privacy'}
key="privacy"
/>,
);
}
const canDeleteThread = this.hasPermissionForTab(
this.props.threadInfo,
'delete',
);
if (canDeleteThread) {
tabs.push(
<Tab
name="Delete"
tabType="delete"
onClick={this.setTab}
selected={this.state.currentTabType === 'delete'}
key="delete"
/>,
);
}
return (
<Modal name="Thread settings" onClose={this.props.onClose} size="large">
<ul className={css['tab-panel']}>{tabs}</ul>
<div className={css['modal-body']}>
<form method="POST">
{mainContent}
<div className={css['form-footer']}>
{buttons}
<div className={css['modal-form-error']}>
{this.state.errorMessage}
</div>
</div>
</form>
</div>
</Modal>
);
}
setTab = (tabType: TabType) => {
this.setState({ currentTabType: tabType });
};
nameInputRef = (nameInput: ?HTMLInputElement) => {
this.nameInput = nameInput;
};
newThreadPasswordInputRef = (newThreadPasswordInput: ?HTMLInputElement) => {
this.newThreadPasswordInput = newThreadPasswordInput;
};
accountPasswordInputRef = (accountPasswordInput: ?HTMLInputElement) => {
this.accountPasswordInput = accountPasswordInput;
};
onChangeName = (event: SyntheticEvent<HTMLInputElement>) => {
const target = event.currentTarget;
const newValue =
target.value !== this.props.threadInfo.name ? target.value : undefined;
this.setState((prevState: State) => ({
...prevState,
queuedChanges: {
...prevState.queuedChanges,
name: firstLine(newValue),
},
}));
};
onChangeDescription = (event: SyntheticEvent<HTMLTextAreaElement>) => {
const target = event.currentTarget;
const newValue =
target.value !== this.props.threadInfo.description
? target.value
: undefined;
this.setState((prevState: State) => ({
...prevState,
queuedChanges: {
...prevState.queuedChanges,
description: newValue,
},
}));
};
onChangeColor = (color: string) => {
const newValue = color !== this.props.threadInfo.color ? color : undefined;
this.setState((prevState: State) => ({
...prevState,
queuedChanges: {
...prevState.queuedChanges,
color: newValue,
},
}));
};
onChangeThreadType = (event: SyntheticEvent<HTMLInputElement>) => {
const uiValue = assertThreadType(parseInt(event.currentTarget.value, 10));
const newValue =
uiValue !== this.props.threadInfo.type ? uiValue : undefined;
this.setState((prevState: State) => ({
...prevState,
queuedChanges: {
...prevState.queuedChanges,
type: newValue,
},
}));
};
onChangeAccountPassword = (event: SyntheticEvent<HTMLInputElement>) => {
const target = event.currentTarget;
this.setState({ accountPassword: target.value });
};
onSubmit = (event: SyntheticEvent<HTMLInputElement>) => {
event.preventDefault();
this.props.dispatchActionPromise(
changeThreadSettingsActionTypes,
this.changeThreadSettingsAction(),
);
};
async changeThreadSettingsAction() {
try {
const response = await this.props.changeThreadSettings({
threadID: this.props.threadInfo.id,
changes: this.state.queuedChanges,
});
this.props.onClose();
return response;
} catch (e) {
this.setState(
(prevState) => ({
...prevState,
queuedChanges: Object.freeze({}),
accountPassword: '',
errorMessage: 'unknown error',
currentTabType: 'general',
}),
() => {
invariant(this.nameInput, 'nameInput ref unset');
this.nameInput.focus();
},
);
throw e;
}
}
onDelete = (event: SyntheticEvent<HTMLInputElement>) => {
event.preventDefault();
this.props.dispatchActionPromise(
deleteThreadActionTypes,
this.deleteThreadAction(),
);
};
async deleteThreadAction() {
try {
const response = await this.props.deleteThread(
this.props.threadInfo.id,
this.state.accountPassword,
);
this.props.onClose();
return response;
} catch (e) {
const errorMessage =
e.message === 'invalid_credentials'
? 'wrong password'
: 'unknown error';
this.setState(
{
accountPassword: '',
errorMessage: errorMessage,
},
() => {
invariant(
this.accountPasswordInput,
'accountPasswordInput ref unset',
);
this.accountPasswordInput.focus();
},
);
throw e;
}
}
}
const deleteThreadLoadingStatusSelector = createLoadingStatusSelector(
deleteThreadActionTypes,
);
const changeThreadSettingsLoadingStatusSelector = createLoadingStatusSelector(
changeThreadSettingsActionTypes,
);
export default React.memo<BaseProps>(function ConnectedThreadSettingsModal(
props: BaseProps,
) {
const changeInProgress = useSelector(
(state) =>
deleteThreadLoadingStatusSelector(state) === 'loading' ||
changeThreadSettingsLoadingStatusSelector(state) === 'loading',
);
const viewerID = useSelector(
(state) => state.currentUserInfo && state.currentUserInfo.id,
);
const userInfos = useSelector((state) => state.userStore.userInfos);
const callDeleteThread = useServerCall(deleteThread);
const callChangeThreadSettings = useServerCall(changeThreadSettings);
const dispatchActionPromise = useDispatchActionPromise();
const threadInfo: ?ThreadInfo = useSelector(
(state) => threadInfoSelector(state)[props.threadID],
);
if (!threadInfo) {
return (
<Modal onClose={props.onClose} name="Invalid thread">
<div className={css['modal-body']}>
<p>You no longer have permission to view this thread</p>
</div>
</Modal>
);
}
return (
<ThreadSettingsModal
{...props}
threadInfo={threadInfo}
changeInProgress={changeInProgress}
viewerID={viewerID}
userInfos={userInfos}
deleteThread={callDeleteThread}
changeThreadSettings={callChangeThreadSettings}
dispatchActionPromise={dispatchActionPromise}
/>
);
});
| web/modals/threads/thread-settings-modal.react.js | // @flow
import classNames from 'classnames';
import invariant from 'invariant';
import _pickBy from 'lodash/fp/pickBy';
import * as React from 'react';
import {
deleteThreadActionTypes,
deleteThread,
changeThreadSettingsActionTypes,
changeThreadSettings,
} from 'lib/actions/thread-actions';
import { createLoadingStatusSelector } from 'lib/selectors/loading-selectors';
import { threadInfoSelector } from 'lib/selectors/thread-selectors';
import {
threadHasPermission,
threadTypeDescriptions,
robotextName,
} from 'lib/shared/thread-utils';
import {
type ThreadInfo,
threadTypes,
assertThreadType,
type ChangeThreadSettingsPayload,
type UpdateThreadRequest,
type LeaveThreadPayload,
threadPermissions,
type ThreadChanges,
} from 'lib/types/thread-types';
import type { UserInfos } from 'lib/types/user-types';
import {
useDispatchActionPromise,
useServerCall,
type DispatchActionPromise,
} from 'lib/utils/action-utils';
import { firstLine } from 'lib/utils/string-utils';
import { useSelector } from '../../redux/redux-utils';
import css from '../../style.css';
import Modal from '../modal.react';
import ColorPicker from './color-picker.react';
type TabType = 'general' | 'privacy' | 'delete';
type TabProps = {|
+name: string,
+tabType: TabType,
+selected: boolean,
+onClick: (tabType: TabType) => void,
|};
class Tab extends React.PureComponent<TabProps> {
render() {
const classNamesForTab = classNames({
[css['current-tab']]: this.props.selected,
[css['delete-tab']]:
this.props.selected && this.props.tabType === 'delete',
});
return (
<li className={classNamesForTab} onClick={this.onClick}>
<a>{this.props.name}</a>
</li>
);
}
onClick = () => {
return this.props.onClick(this.props.tabType);
};
}
type BaseProps = {|
+threadID: string,
+onClose: () => void,
|};
type Props = {|
...BaseProps,
+threadInfo: ThreadInfo,
+changeInProgress: boolean,
+viewerID: ?string,
+userInfos: UserInfos,
+dispatchActionPromise: DispatchActionPromise,
+deleteThread: (
threadID: string,
currentAccountPassword: string,
) => Promise<LeaveThreadPayload>,
+changeThreadSettings: (
update: UpdateThreadRequest,
) => Promise<ChangeThreadSettingsPayload>,
|};
type State = {|
+queuedChanges: ThreadChanges,
+errorMessage: string,
+accountPassword: string,
+currentTabType: TabType,
|};
class ThreadSettingsModal extends React.PureComponent<Props, State> {
nameInput: ?HTMLInputElement;
newThreadPasswordInput: ?HTMLInputElement;
accountPasswordInput: ?HTMLInputElement;
constructor(props: Props) {
super(props);
this.state = {
queuedChanges: Object.freeze({}),
errorMessage: '',
accountPassword: '',
currentTabType: 'general',
};
}
componentDidMount() {
invariant(this.nameInput, 'nameInput ref unset');
this.nameInput.focus();
}
componentDidUpdate(prevProps: Props) {
if (this.state.currentTabType !== 'delete') {
return;
}
const permissionForDeleteTab = this.hasPermissionForTab(
this.props.threadInfo,
'delete',
);
const prevPermissionForDeleteTab = this.hasPermissionForTab(
prevProps.threadInfo,
'delete',
);
if (!permissionForDeleteTab && prevPermissionForDeleteTab) {
this.setTab('general');
}
}
hasPermissionForTab(threadInfo: ThreadInfo, tab: TabType) {
if (tab === 'general') {
return threadHasPermission(threadInfo, threadPermissions.EDIT_THREAD);
} else if (tab === 'privacy') {
return threadHasPermission(
threadInfo,
threadPermissions.EDIT_PERMISSIONS,
);
} else if (tab === 'delete') {
return threadHasPermission(threadInfo, threadPermissions.DELETE_THREAD);
}
invariant(false, `invalid tab ${tab}`);
}
possiblyChangedValue(key: string) {
const valueChanged =
this.state.queuedChanges[key] !== null &&
this.state.queuedChanges[key] !== undefined;
return valueChanged
? this.state.queuedChanges[key]
: this.props.threadInfo[key];
}
namePlaceholder() {
return robotextName(
this.props.threadInfo,
this.props.viewerID,
this.props.userInfos,
);
}
changeQueued() {
return (
Object.keys(
_pickBy(
(value) => value !== null && value !== undefined,
// the lodash/fp libdef coerces the returned object's properties to the
// same type, which means it only works for object-as-maps $FlowFixMe
)(this.state.queuedChanges),
).length > 0
);
}
render() {
const inputDisabled =
this.props.changeInProgress ||
!this.hasPermissionForTab(
this.props.threadInfo,
this.state.currentTabType,
);
let mainContent = null;
if (this.state.currentTabType === 'general') {
mainContent = (
<div>
<div>
<div className={css['form-title']}>Thread name</div>
<div className={css['form-content']}>
<input
type="text"
value={firstLine(this.possiblyChangedValue('name'))}
placeholder={this.namePlaceholder()}
onChange={this.onChangeName}
disabled={inputDisabled}
ref={this.nameInputRef}
/>
</div>
</div>
<div className={css['form-textarea-container']}>
<div className={css['form-title']}>Description</div>
<div className={css['form-content']}>
<textarea
value={this.possiblyChangedValue('description')}
placeholder="Thread description"
onChange={this.onChangeDescription}
disabled={inputDisabled}
></textarea>
</div>
</div>
<div className={css['edit-thread-color-container']}>
<div className={`${css['form-title']} ${css['color-title']}`}>
Color
</div>
<div className={css['form-content']}>
<ColorPicker
id="edit-thread-color"
value={this.possiblyChangedValue('color')}
disabled={inputDisabled}
onChange={this.onChangeColor}
/>
</div>
</div>
</div>
);
} else if (this.state.currentTabType === 'privacy') {
mainContent = (
<div className={css['edit-thread-privacy-container']}>
<div className={css['modal-radio-selector']}>
<div className={css['form-title']}>Thread type</div>
<div className={css['form-enum-selector']}>
<div className={css['form-enum-container']}>
<input
type="radio"
name="edit-thread-type"
id="edit-thread-open"
value={threadTypes.CHAT_NESTED_OPEN}
checked={
this.possiblyChangedValue('type') ===
threadTypes.CHAT_NESTED_OPEN
}
onChange={this.onChangeThreadType}
disabled={inputDisabled}
/>
<div className={css['form-enum-option']}>
<label htmlFor="edit-thread-open">
Open
<span className={css['form-enum-description']}>
{threadTypeDescriptions[threadTypes.CHAT_NESTED_OPEN]}
</span>
</label>
</div>
</div>
<div className={css['form-enum-container']}>
<input
type="radio"
name="edit-thread-type"
id="edit-thread-closed"
value={threadTypes.CHAT_SECRET}
checked={
this.possiblyChangedValue('type') ===
threadTypes.CHAT_SECRET
}
onChange={this.onChangeThreadType}
disabled={inputDisabled}
/>
<div className={css['form-enum-option']}>
<label htmlFor="edit-thread-closed">
Secret
<span className={css['form-enum-description']}>
{threadTypeDescriptions[threadTypes.CHAT_SECRET]}
</span>
</label>
</div>
</div>
</div>
</div>
</div>
);
} else if (this.state.currentTabType === 'delete') {
mainContent = (
<>
<div>
<p className={css['italic']}>
Your thread will be permanently deleted. There is no way to
reverse this.
</p>
</div>
<div className={css['edit-thread-account-password']}>
<p className={css['confirm-account-password']}>
Please enter your account password to confirm your identity
</p>
<div className={css['form-title']}>Account password</div>
<div className={css['form-content']}>
<input
type="password"
placeholder="Personal account password"
value={this.state.accountPassword}
onChange={this.onChangeAccountPassword}
disabled={inputDisabled}
ref={this.accountPasswordInputRef}
/>
</div>
</div>
</>
);
}
let buttons = null;
if (this.state.currentTabType === 'delete') {
buttons = (
<input
type="submit"
value="Delete"
onClick={this.onDelete}
disabled={inputDisabled}
/>
);
} else {
buttons = (
<input
type="submit"
value="Save"
onClick={this.onSubmit}
disabled={inputDisabled || !this.changeQueued()}
/>
);
}
const tabs = [
<Tab
name="General"
tabType="general"
onClick={this.setTab}
selected={this.state.currentTabType === 'general'}
key="general"
/>,
];
if (this.possiblyChangedValue('parentThreadID')) {
tabs.push(
<Tab
name="Privacy"
tabType="privacy"
onClick={this.setTab}
selected={this.state.currentTabType === 'privacy'}
key="privacy"
/>,
);
}
const canDeleteThread = this.hasPermissionForTab(
this.props.threadInfo,
'delete',
);
if (canDeleteThread) {
tabs.push(
<Tab
name="Delete"
tabType="delete"
onClick={this.setTab}
selected={this.state.currentTabType === 'delete'}
key="delete"
/>,
);
}
return (
<Modal name="Thread settings" onClose={this.props.onClose} size="large">
<ul className={css['tab-panel']}>{tabs}</ul>
<div className={css['modal-body']}>
<form method="POST">
{mainContent}
<div className={css['form-footer']}>
{buttons}
<div className={css['modal-form-error']}>
{this.state.errorMessage}
</div>
</div>
</form>
</div>
</Modal>
);
}
setTab = (tabType: TabType) => {
this.setState({ currentTabType: tabType });
};
nameInputRef = (nameInput: ?HTMLInputElement) => {
this.nameInput = nameInput;
};
newThreadPasswordInputRef = (newThreadPasswordInput: ?HTMLInputElement) => {
this.newThreadPasswordInput = newThreadPasswordInput;
};
accountPasswordInputRef = (accountPasswordInput: ?HTMLInputElement) => {
this.accountPasswordInput = accountPasswordInput;
};
onChangeName = (event: SyntheticEvent<HTMLInputElement>) => {
const target = event.currentTarget;
const newValue =
target.value !== this.props.threadInfo.name ? target.value : undefined;
this.setState((prevState: State) => ({
...prevState,
queuedChanges: {
...prevState.queuedChanges,
name: firstLine(newValue),
},
}));
};
onChangeDescription = (event: SyntheticEvent<HTMLTextAreaElement>) => {
const target = event.currentTarget;
const newValue =
target.value !== this.props.threadInfo.description
? target.value
: undefined;
this.setState((prevState: State) => ({
...prevState,
queuedChanges: {
...prevState.queuedChanges,
description: newValue,
},
}));
};
onChangeColor = (color: string) => {
const newValue = color !== this.props.threadInfo.color ? color : undefined;
this.setState((prevState: State) => ({
...prevState,
queuedChanges: {
...prevState.queuedChanges,
color: newValue,
},
}));
};
onChangeThreadType = (event: SyntheticEvent<HTMLInputElement>) => {
const uiValue = assertThreadType(parseInt(event.currentTarget.value, 10));
const newValue =
uiValue !== this.props.threadInfo.type ? uiValue : undefined;
this.setState((prevState: State) => ({
...prevState,
queuedChanges: {
...prevState.queuedChanges,
type: newValue,
},
}));
};
onChangeAccountPassword = (event: SyntheticEvent<HTMLInputElement>) => {
const target = event.currentTarget;
this.setState({ accountPassword: target.value });
};
onSubmit = (event: SyntheticEvent<HTMLInputElement>) => {
event.preventDefault();
this.props.dispatchActionPromise(
changeThreadSettingsActionTypes,
this.changeThreadSettingsAction(),
);
};
async changeThreadSettingsAction() {
try {
const response = await this.props.changeThreadSettings({
threadID: this.props.threadInfo.id,
changes: this.state.queuedChanges,
});
this.props.onClose();
return response;
} catch (e) {
this.setState(
(prevState) => ({
...prevState,
queuedChanges: Object.freeze({}),
accountPassword: '',
errorMessage: 'unknown error',
currentTabType: 'general',
}),
() => {
invariant(this.nameInput, 'nameInput ref unset');
this.nameInput.focus();
},
);
throw e;
}
}
onDelete = (event: SyntheticEvent<HTMLInputElement>) => {
event.preventDefault();
this.props.dispatchActionPromise(
deleteThreadActionTypes,
this.deleteThreadAction(),
);
};
async deleteThreadAction() {
try {
const response = await this.props.deleteThread(
this.props.threadInfo.id,
this.state.accountPassword,
);
this.props.onClose();
return response;
} catch (e) {
const errorMessage =
e.message === 'invalid_credentials'
? 'wrong password'
: 'unknown error';
this.setState(
{
accountPassword: '',
errorMessage: errorMessage,
},
() => {
invariant(
this.accountPasswordInput,
'accountPasswordInput ref unset',
);
this.accountPasswordInput.focus();
},
);
throw e;
}
}
}
const deleteThreadLoadingStatusSelector = createLoadingStatusSelector(
deleteThreadActionTypes,
);
const changeThreadSettingsLoadingStatusSelector = createLoadingStatusSelector(
changeThreadSettingsActionTypes,
);
export default React.memo<BaseProps>(function ConnectedThreadSettingsModal(
props: BaseProps,
) {
const changeInProgress = useSelector(
(state) =>
deleteThreadLoadingStatusSelector(state) === 'loading' ||
changeThreadSettingsLoadingStatusSelector(state) === 'loading',
);
const viewerID = useSelector(
(state) => state.currentUserInfo && state.currentUserInfo.id,
);
const userInfos = useSelector((state) => state.userStore.userInfos);
const callDeleteThread = useServerCall(deleteThread);
const callChangeThreadSettings = useServerCall(changeThreadSettings);
const dispatchActionPromise = useDispatchActionPromise();
const threadInfo: ?ThreadInfo = useSelector(
(state) => threadInfoSelector(state)[props.threadID],
);
if (!threadInfo) {
return (
<Modal onClose={props.onClose} name="Invalid thread">
<div className={css['modal-body']}>
<p>You no longer have permission to view this thread</p>
</div>
</Modal>
);
}
return (
<ThreadSettingsModal
{...props}
threadInfo={threadInfo}
changeInProgress={changeInProgress}
viewerID={viewerID}
userInfos={userInfos}
deleteThread={callDeleteThread}
changeThreadSettings={callChangeThreadSettings}
dispatchActionPromise={dispatchActionPromise}
/>
);
});
| [web] Don't show Privacy tab in ThreadSettingsModal for sidebars
Summary: More accurately: for threads created as sidebars. More context [here](https://phabricator.ashoat.com/D1000?id=3007#inline-5622).
Test Plan: Check `ThreadSettingsModal` for a thread created as a sidebar
Reviewers: KatPo, palys-swm
Reviewed By: KatPo, palys-swm
Subscribers: Adrian, atul
Differential Revision: https://phabricator.ashoat.com/D1007
| web/modals/threads/thread-settings-modal.react.js | [web] Don't show Privacy tab in ThreadSettingsModal for sidebars | <ide><path>eb/modals/threads/thread-settings-modal.react.js
<ide> key="general"
<ide> />,
<ide> ];
<del> if (this.possiblyChangedValue('parentThreadID')) {
<add>
<add> // This UI needs to be updated to handle sidebars but we haven't gotten
<add> // there yet. We'll probably end up ripping it out anyways, so for now we
<add> // are just hiding the privacy tab for any thread that was created as a
<add> // sidebar
<add> const canSeePrivacyTab =
<add> this.possiblyChangedValue('parentThreadID') &&
<add> !this.props.threadInfo.sourceMessageID;
<add>
<add> if (canSeePrivacyTab) {
<ide> tabs.push(
<ide> <Tab
<ide> name="Privacy" |
|
Java | agpl-3.0 | b5e4e2537eb79b0ffc72f5e4805961f54f09f6cb | 0 | PeterWithers/temp-to-delete1,KinshipSoftware/KinOathKinshipArchiver,PeterWithers/temp-to-delete1,KinshipSoftware/KinOathKinshipArchiver | package nl.mpi.kinnate.ui;
import nl.mpi.arbil.util.ApplicationVersionManager;
import nl.mpi.kinnate.KinOathVersion;
import nl.mpi.kinnate.KinnateArbilInjector;
import nl.mpi.kinnate.ui.window.AbstractDiagramManager;
import nl.mpi.kinnate.ui.window.WindowedDiagramManager;
/*
* Document : MainFrame
* Author : Peter Withers
* Created on : Aug 16, 2010, 5:20:20 PM
*/
public class MainFrame extends javax.swing.JFrame {
/** Creates new form MainFrame */
public MainFrame() {
initComponents();
}
/** This method is called from within the constructor to
* initialize the form.
* WARNING: Do NOT modify this code. The content of this method is
* always regenerated by the Form Editor.
*/
// <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents
private void initComponents() {
setDefaultCloseOperation(javax.swing.WindowConstants.EXIT_ON_CLOSE);
pack();
}// </editor-fold>//GEN-END:initComponents
/**
* @param args the command line arguments
*/
public static void main(String args[]) {
java.awt.EventQueue.invokeLater(new Runnable() {
public void run() {
final ApplicationVersionManager versionManager = new ApplicationVersionManager(new KinOathVersion());
final KinnateArbilInjector injector = new KinnateArbilInjector();
injector.injectHandlers(versionManager);
AbstractDiagramManager abstractDiagramManager;
// abstractDiagramManager = new LayeredDiagramManager(versionManager);
// abstractDiagramManager = new TabbedDiagramManager(versionManager);
abstractDiagramManager = new WindowedDiagramManager(versionManager, injector.getWindowManager(), injector.getSessionStorage(), injector.getDataNodeLoader(), injector.getTreeHelper(), injector.getEntityCollection());
abstractDiagramManager.newDiagram();
abstractDiagramManager.createApplicationWindow();
injector.getWindowManager().setMessagesCanBeShown(true);
// if (arbilMenuBar.checkNewVersionAtStartCheckBoxMenuItem.isSelected()) {
// todo: Ticket #1066 add the check for updates and check now menu items
versionManager.checkForUpdate();
}
});
}
// Variables declaration - do not modify//GEN-BEGIN:variables
// End of variables declaration//GEN-END:variables
}
| desktop/src/main/java/nl/mpi/kinnate/ui/MainFrame.java | package nl.mpi.kinnate.ui;
import nl.mpi.arbil.util.ApplicationVersionManager;
import nl.mpi.kinnate.KinOathVersion;
import nl.mpi.kinnate.KinnateArbilInjector;
import nl.mpi.kinnate.ui.window.AbstractDiagramManager;
import nl.mpi.kinnate.ui.window.WindowedDiagramManager;
/*
* Document : MainFrame
* Author : Peter Withers
* Created on : Aug 16, 2010, 5:20:20 PM
*/
public class MainFrame extends javax.swing.JFrame {
/** Creates new form MainFrame */
public MainFrame() {
initComponents();
}
/** This method is called from within the constructor to
* initialize the form.
* WARNING: Do NOT modify this code. The content of this method is
* always regenerated by the Form Editor.
*/
// <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents
private void initComponents() {
setDefaultCloseOperation(javax.swing.WindowConstants.EXIT_ON_CLOSE);
pack();
}// </editor-fold>//GEN-END:initComponents
/**
* @param args the command line arguments
*/
public static void main(String args[]) {
java.awt.EventQueue.invokeLater(new Runnable() {
public void run() {
final ApplicationVersionManager versionManager = new ApplicationVersionManager(new KinOathVersion());
final KinnateArbilInjector injector = new KinnateArbilInjector();
injector.injectHandlers(versionManager);
AbstractDiagramManager abstractDiagramManager;
// abstractDiagramManager = new LayeredDiagramManager(versionManager);
// abstractDiagramManager = new TabbedDiagramManager(versionManager);
abstractDiagramManager = new WindowedDiagramManager(versionManager, injector.getWindowManager(), injector.getSessionStorage(), injector.getDataNodeLoader(), injector.getTreeHelper(), injector.getEntityCollection());
abstractDiagramManager.newDiagram();
abstractDiagramManager.createApplicationWindow();
injector.getWindowManager().setMessagesCanBeShown(true);
// if (arbilMenuBar.checkNewVersionAtStartCheckBoxMenuItem.isSelected()) {
// todo: Ticket #1066 add the check for updates and check now menu items
versionManager.checkForUpdate();
abstractDiagramManager.loadAllTrees();
}
});
}
// Variables declaration - do not modify//GEN-BEGIN:variables
// End of variables declaration//GEN-END:variables
}
| Made changes to address the issue of 'Import Gedcom/CSV File' failing.
refs #1876
| desktop/src/main/java/nl/mpi/kinnate/ui/MainFrame.java | Made changes to address the issue of 'Import Gedcom/CSV File' failing. refs #1876 | <ide><path>esktop/src/main/java/nl/mpi/kinnate/ui/MainFrame.java
<ide> import nl.mpi.kinnate.ui.window.WindowedDiagramManager;
<ide>
<ide> /*
<del> * Document : MainFrame
<del> * Author : Peter Withers
<del> * Created on : Aug 16, 2010, 5:20:20 PM
<add> * Document : MainFrame
<add> * Author : Peter Withers
<add> * Created on : Aug 16, 2010, 5:20:20 PM
<ide> */
<ide> public class MainFrame extends javax.swing.JFrame {
<ide>
<ide> // if (arbilMenuBar.checkNewVersionAtStartCheckBoxMenuItem.isSelected()) {
<ide> // todo: Ticket #1066 add the check for updates and check now menu items
<ide> versionManager.checkForUpdate();
<del> abstractDiagramManager.loadAllTrees();
<ide> }
<ide> });
<ide> } |
|
JavaScript | bsd-3-clause | 7873d5ba3cb555881439186d1520329fd668ae03 | 0 | lognormal/boomerang,lognormal/boomerang | /*
* Copyright (c) 2011, Yahoo! Inc. All rights reserved.
* Copyright (c) 2012, Log-Normal, Inc. All rights reserved.
* Copyrights licensed under the BSD License. See the accompanying LICENSE.txt file for terms.
*/
// This is the Round Trip Time plugin. Abbreviated to RT
// the parameter is the window
(function(w) {
var d=w.document, impl;
BOOMR = BOOMR || {};
BOOMR.plugins = BOOMR.plugins || {};
// private object
impl = {
onloadfired: false, //! Set when the page_ready event fires
// Use this to determine if unload fires before onload
unloadfired: false, //! Set when the first unload event fires
// Use this to make sure we don't beacon twice for beforeunload and unload
visiblefired: false, //! Set when page becomes visible (Chrome/IE)
// Use this to determine if user bailed without opening the tab
initialized: false, //! Set when init has completed to prevent double initialization
complete: false, //! Set when this plugin has completed
timers: {}, //! Custom timers that the developer can use
// Format for each timer is { start: XXX, end: YYY, delta: YYY-XXX }
cookie: 'RT', //! Name of the cookie that stores the start time and referrer
cookie_exp:600, //! Cookie expiry in seconds
strict_referrer: true, //! By default, don't beacon if referrers don't match.
// If set to false, beacon both referrer values and let
// the back end decide
navigationType: 0, // Navigation Type from the NavTiming API. We mainly care if this was BACK_FORWARD
// since cookie time will be incorrect in that case
navigationStart: undefined,
responseStart: undefined,
t_start: undefined, // t_start that came off the cookie
t_fb_approx: undefined, // approximate first byte time for browsers that don't support navtiming
r: undefined, // referrer from the cookie
r2: undefined, // referrer from document.referer
/**
* Merge new cookie `params` onto current cookie, and set `timer` param on cookie to current timestamp
* @param params object containing keys & values to merge onto current cookie. A value of `undefined`
* will remove the key from the cookie
* @param timer string key name that will be set to the current timestamp on the cookie
*
* @returns true if the cookie was updated, false if the cookie could not be set for any reason
*/
updateCookie: function(params, timer) {
var t_end, t_start, subcookies, k;
// Disable use of RT cookie by setting its name to a falsy value
if(!this.cookie) {
return false;
}
subcookies = BOOMR.utils.getSubCookies(BOOMR.utils.getCookie(this.cookie)) || {};
if (typeof params === "object") {
for(k in params) {
if(params.hasOwnProperty(k)) {
if (params[k] === undefined ) {
if (subcookies.hasOwnProperty(k)) {
delete subcookies[k];
}
}
else {
if (k==="nu" || k==="r") {
params[k] = BOOMR.utils.hashQueryString(params[k], true);
}
subcookies[k] = params[k];
}
}
}
}
t_start = new Date().getTime();
if(timer) {
subcookies[timer] = t_start;
}
BOOMR.debug("Setting cookie (timer=" + timer + ")\n" + BOOMR.utils.objectToString(subcookies), "rt");
if(!BOOMR.utils.setCookie(this.cookie, subcookies, this.cookie_exp)) {
BOOMR.error("cannot set start cookie", "rt");
return false;
}
t_end = new Date().getTime();
if(t_end - t_start > 50) {
// It took > 50ms to set the cookie
// The user Most likely has cookie prompting turned on so
// t_start won't be the actual unload time
// We bail at this point since we can't reliably tell t_done
BOOMR.utils.removeCookie(this.cookie);
// at some point we may want to log this info on the server side
BOOMR.error("took more than 50ms to set cookie... aborting: "
+ t_start + " -> " + t_end, "rt");
}
return true;
},
/**
* Read initial values from cookie and clear out cookie values it cares about after reading.
* This makes sure that other pages (eg: loaded in new tabs) do not get an invalid cookie time.
* This method should only be called from init, and may be called more than once.
*
* Request start time is the greater of last page beforeunload or last click time
* If start time came from a click, we check that the clicked URL matches the current URL
* If it came from a beforeunload, we check that cookie referrer matches document.referrer
*
* If we had a pageHide time or unload time, we use that as a proxy for first byte on non-navtiming
* browsers.
*/
initFromCookie: function() {
var url, subcookies;
subcookies = BOOMR.utils.getSubCookies(BOOMR.utils.getCookie(this.cookie));
if(!subcookies) {
return;
}
subcookies.s = Math.max(+subcookies.ul||0, +subcookies.cl||0);
BOOMR.debug("Read from cookie " + BOOMR.utils.objectToString(subcookies), "rt");
// If we have a start time, and either a referrer, or a clicked on URL,
// we check if the start time is usable
if(subcookies.s && (subcookies.r || subcookies.nu)) {
this.r = subcookies.r;
url = BOOMR.utils.hashQueryString(d.URL, true);
// Either the URL of the page setting the cookie needs to match document.referrer
BOOMR.debug(this.r + " =?= " + this.r2, "rt");
// Or the start timer was no more than 15ms after a click or form submit
// and the URL clicked or submitted to matches the current page's URL
// (note the start timer may be later than click if both click and beforeunload fired
// on the previous page)
BOOMR.debug(subcookies.s + " <? " + (+subcookies.cl+15), "rt");
BOOMR.debug(subcookies.nu + " =?= " + url, "rt");
if (!this.strict_referrer ||
(subcookies.nu && subcookies.nu === url && subcookies.s < +subcookies.cl + 15) ||
(subcookies.s === +subcookies.ul && this.r === this.r2)
) {
this.t_start = subcookies.s;
// additionally, if we have a pagehide, or unload event, that's a proxy
// for the first byte of the current page, so use that wisely
if(+subcookies.hd > subcookies.s) {
this.t_fb_approx = parseInt(subcookies.hd, 10);
}
}
else {
this.t_start = this.t_fb_approx = undefined;
}
}
// Now that we've pulled out the timers, we'll clear them so they don't pollute future calls
this.updateCookie({
s: undefined, // start timer
r: undefined, // referrer
nu: undefined, // clicked url
ul: undefined, // onbeforeunload time
cl: undefined, // onclick time
hd: undefined // onunload or onpagehide time
});
},
/**
* Figure out how long boomerang and config.js took to load using resource timing if available, or built in timestamps
*/
getBoomerangTimings: function() {
var res, k, urls, url;
if(BOOMR.t_start) {
// How long does it take Boomerang to load up and execute (fb to lb)?
BOOMR.plugins.RT.startTimer('boomerang', BOOMR.t_start);
BOOMR.plugins.RT.endTimer('boomerang', BOOMR.t_end); // t_end === null defaults to current time
// How long did it take from page request to boomerang fb?
BOOMR.plugins.RT.endTimer('boomr_fb', BOOMR.t_start);
if(BOOMR.t_lstart) {
// when did the boomerang loader start loading boomerang on the page?
BOOMR.plugins.RT.endTimer('boomr_ld', BOOMR.t_lstart);
// What was the network latency for boomerang (request to first byte)?
BOOMR.plugins.RT.setTimer('boomr_lat', BOOMR.t_start - BOOMR.t_lstart);
}
}
// use window and not w because we want the inner iframe
if (window.performance && window.performance.getEntriesByName) {
urls = { "rt.bmr." : BOOMR.url };
for(url in urls) {
if(urls.hasOwnProperty(url) && urls[url]) {
res = window.performance.getEntriesByName(urls[url]);
if(!res || res.length === 0) {
continue;
}
res = res[0];
for(k in res) {
if(res.hasOwnProperty(k) && k.match(/(Start|End)$/) && res[k] > 0) {
BOOMR.addVar(url + k.replace(/^(...).*(St|En).*$/, '$1$2'), res[k]);
}
}
}
}
}
},
page_ready: function() {
// we need onloadfired because it's possible to reset "impl.complete"
// if you're measuring multiple xhr loads, but not possible to reset
// impl.onloadfired
this.onloadfired = true;
},
visibility_changed: function() {
// we care if the page became visible at some point
if(!(d.hidden || d.msHidden || d.webkitHidden)) {
impl.visiblefired = true;
}
},
/**
* Check if we're in a prerender state, and if we are, set additional timers.
* In Chrome/IE, a prerender state is when a page is completely rendered in an in-memory buffer, before
* a user requests that page. We do not beacon at this point because the user has not shown intent
* to view the page. If the user opens the page, the visibility state changes to visible, and we
* fire the beacon at that point, including any timing details for prerendering.
*
* Sets the `t_load` timer to the actual value of page load time (request initiated by browser to onload)
*
* @returns true if this is a prerender state, false if not (or not supported)
*/
checkPreRender: function() {
if(
!(d.webkitVisibilityState && d.webkitVisibilityState === "prerender")
&&
!(d.msVisibilityState && d.msVisibilityState === 3)
) {
return false;
}
// This means that onload fired through a pre-render. We'll capture this
// time, but wait for t_done until after the page has become either visible
// or hidden (ie, it moved out of the pre-render state)
// http://code.google.com/chrome/whitepapers/pagevisibility.html
// http://www.w3.org/TR/2011/WD-page-visibility-20110602/
// http://code.google.com/chrome/whitepapers/prerender.html
BOOMR.plugins.RT.startTimer("t_load", this.navigationStart);
BOOMR.plugins.RT.endTimer("t_load"); // this will measure actual onload time for a prerendered page
BOOMR.plugins.RT.startTimer("t_prerender", this.navigationStart);
BOOMR.plugins.RT.startTimer("t_postrender"); // time from prerender to visible or hidden
BOOMR.subscribe("visibility_changed", BOOMR.plugins.RT.done, "visible", BOOMR.plugins.RT);
return true;
},
/**
* Initialise timers from the NavigationTiming API. This method looks at various sources for
* Navigation Timing, and also patches around bugs in various browser implementations.
* It sets the beacon parameter `rt.start` to the source of the timer
*/
initNavTiming: function() {
var ti, p, source;
if(this.navigationStart) {
return;
}
// Get start time from WebTiming API see:
// https://dvcs.w3.org/hg/webperf/raw-file/tip/specs/NavigationTiming/Overview.html
// http://blogs.msdn.com/b/ie/archive/2010/06/28/measuring-web-page-performance.aspx
// http://blog.chromium.org/2010/07/do-you-know-how-slow-your-web-page-is.html
p = w.performance || w.msPerformance || w.webkitPerformance || w.mozPerformance;
if(p && p.navigation) {
this.navigationType = p.navigation.type;
}
if(p && p.timing) {
ti = p.timing;
}
else if(w.chrome && w.chrome.csi && w.chrome.csi().startE) {
// Older versions of chrome also have a timing API that's sort of documented here:
// http://ecmanaut.blogspot.com/2010/06/google-bom-feature-ms-since-pageload.html
// source here:
// http://src.chromium.org/viewvc/chrome/trunk/src/chrome/renderer/loadtimes_extension_bindings.cc?view=markup
ti = {
navigationStart: w.chrome.csi().startE
};
source = "csi";
}
else if(w.gtbExternal && w.gtbExternal.startE()) {
// The Google Toolbar exposes navigation start time similar to old versions of chrome
// This would work for any browser that has the google toolbar installed
ti = {
navigationStart: w.gtbExternal.startE()
};
source = 'gtb';
}
if(ti) {
// Always use navigationStart since it falls back to fetchStart (not with redirects)
// If not set, we leave t_start alone so that timers that depend
// on it don't get sent back. Never use requestStart since if
// the first request fails and the browser retries, it will contain
// the value for the new request.
BOOMR.addVar("rt.start", source || "navigation");
this.navigationStart = ti.navigationStart || ti.fetchStart || undefined;
this.responseStart = ti.responseStart || undefined;
// bug in Firefox 7 & 8 https://bugzilla.mozilla.org/show_bug.cgi?id=691547
if(navigator.userAgent.match(/Firefox\/[78]\./)) {
this.navigationStart = ti.unloadEventStart || ti.fetchStart || undefined;
}
}
else {
BOOMR.warn("This browser doesn't support the WebTiming API", "rt");
}
return;
},
page_unload: function(edata) {
BOOMR.debug("Unload called with " + BOOMR.utils.objectToString(edata) + " when unloadfired = " + this.unloadfired, "rt");
if(!this.unloadfired) {
// run done on abort or on page_unload to measure session length
BOOMR.plugins.RT.done(edata, "unload");
}
// set cookie for next page
// We use document.URL instead of location.href because of a bug in safari 4
// where location.href is URL decoded
this.updateCookie({ 'r': d.URL }, edata.type === 'beforeunload'?'ul':'hd');
this.unloadfired = true;
},
_iterable_click: function(name, element, etarget, value_cb) {
if(!etarget) {
return;
}
BOOMR.debug(name + " called with " + etarget.nodeName, "rt");
while(etarget && etarget.nodeName.toUpperCase() !== element) {
etarget = etarget.parentNode;
}
if(etarget && etarget.nodeName.toUpperCase() === element) {
BOOMR.debug("passing through", "rt");
// user event, they may be going to another page
// if this page is being opened in a different tab, then
// our unload handler won't fire, so we need to set our
// cookie on click or submit
this.updateCookie({ "nu": value_cb(etarget) }, 'cl' );
}
},
onclick: function(etarget) {
impl._iterable_click("Click", "A", etarget, function(t) { return t.href; });
},
onsubmit: function(etarget) {
impl._iterable_click("Submit", "FORM", etarget, function(t) { var v = t.action || d.URL; return v.match(/\?/) ? v : v + "?"; });
},
domloaded: function() {
BOOMR.plugins.RT.endTimer("t_domloaded");
}
};
BOOMR.plugins.RT = {
// Methods
init: function(config) {
BOOMR.debug("init RT", "rt");
if(w !== BOOMR.window) {
w = BOOMR.window;
d = w.document;
}
BOOMR.utils.pluginConfig(impl, config, "RT",
["cookie", "cookie_exp", "strict_referrer"]);
// A beacon may be fired automatically on page load or if the page dev fires
// it manually with their own timers. It may not always contain a referrer
// (eg: XHR calls). We set default values for these cases.
// This is done before reading from the cookie because the cookie overwrites
// impl.r
impl.r = impl.r2 = BOOMR.utils.hashQueryString(d.referrer, true);
// Now pull out start time information from the cookie
// We'll do this every time init is called, and every time we call it, it will
// overwrite values already set (provided there are values to read out)
impl.initFromCookie();
// We'll get BoomerangTimings every time init is called because it could also
// include additional timers which might happen on a subsequent init call.
impl.getBoomerangTimings();
// only initialize once. we still collect config and check/set cookies
// every time init is called, but we attach event handlers only once
if(impl.initialized) {
return this;
}
impl.complete = false;
impl.timers = {};
BOOMR.subscribe("page_ready", impl.page_ready, null, impl);
impl.visiblefired = !(d.hidden || d.msHidden || d.webkitHidden);
if(!impl.visiblefired) {
BOOMR.subscribe("visibility_changed", impl.visibility_changed, null, impl);
}
BOOMR.subscribe("page_ready", this.done, "load", this);
BOOMR.subscribe("xhr_load", this.done, "xhr", this);
BOOMR.subscribe("dom_loaded", impl.domloaded, null, impl);
BOOMR.subscribe("page_unload", impl.page_unload, null, impl);
BOOMR.subscribe("click", impl.onclick, null, impl);
BOOMR.subscribe("form_submit", impl.onsubmit, null, impl);
impl.initialized = true;
return this;
},
startTimer: function(timer_name, time_value) {
if(timer_name) {
if (timer_name === 't_page') {
this.endTimer('t_resp', time_value);
}
impl.timers[timer_name] = {start: (typeof time_value === "number" ? time_value : new Date().getTime())};
}
return this;
},
endTimer: function(timer_name, time_value) {
if(timer_name) {
impl.timers[timer_name] = impl.timers[timer_name] || {};
if(impl.timers[timer_name].end === undefined) {
impl.timers[timer_name].end =
(typeof time_value === "number" ? time_value : new Date().getTime());
}
}
return this;
},
setTimer: function(timer_name, time_delta) {
if(timer_name) {
impl.timers[timer_name] = { delta: time_delta };
}
return this;
},
// Called when the page has reached a "usable" state. This may be when the
// onload event fires, or it could be at some other moment during/after page
// load when the page is usable by the user
done: function(edata, ename) {
BOOMR.debug("Called done with " + BOOMR.utils.objectToString(edata) + ", " + ename, "rt");
var t_start, t_done=new Date().getTime(),
basic_timers = { t_done: 1, t_resp: 1, t_page: 1},
ntimers = 0, t_name, timer, t_other=[];
impl.complete = false;
if(ename==="load" || ename==="visible") {
impl.initFromCookie();
impl.initNavTiming();
if(impl.checkPreRender()) {
return this;
}
if(impl.responseStart) {
// Use NavTiming API to figure out resp latency and page time
// t_resp will use the cookie if available or fallback to NavTiming
this.endTimer("t_resp", impl.responseStart);
if(impl.timers.t_load) { // t_load is the actual time load completed if using prerender
this.setTimer("t_page", impl.timers.t_load.end - impl.responseStart);
}
else {
this.setTimer("t_page", t_done - impl.responseStart);
}
}
else if(impl.timers.hasOwnProperty('t_page')) {
// If the dev has already started t_page timer, we can end it now as well
this.endTimer("t_page");
}
else if(impl.t_fb_approx) {
this.endTimer('t_resp', impl.t_fb_approx);
this.setTimer("t_page", t_done - impl.t_fb_approx);
}
// If a prerender timer was started, we can end it now as well
if(impl.timers.hasOwnProperty('t_postrender')) {
this.endTimer("t_postrender");
this.endTimer("t_prerender");
}
}
if(ename==="xhr" && edata.name && impl.timers[edata.name]) {
// For xhr timers, t_start is stored in impl.timers.xhr_{page group name}
// and xhr.pg is set to {page group name}
t_start = impl.timers[edata.name].start;
BOOMR.addVar("rt.start", "manual");
}
else if(impl.navigationStart) {
t_start = impl.navigationStart;
}
else if(impl.t_start && impl.navigationType !== 2) {
t_start = impl.t_start; // 2 is TYPE_BACK_FORWARD but the constant may not be defined across browsers
BOOMR.addVar("rt.start", "cookie"); // if the user hit the back button, referrer will match, and cookie will match
} // but will have time of previous page start, so t_done will be wrong
else {
BOOMR.addVar("rt.start", "none");
t_start = undefined; // force all timers to NaN state
}
BOOMR.debug("Got start time: " + t_start, "rt");
// If the dev has already called endTimer, then this call will do nothing
// else, it will stop the page load timer
this.endTimer("t_done", t_done);
// make sure old variables don't stick around
BOOMR.removeVar('t_done', 't_page', 't_resp', 'r', 'r2', 'rt.tstart', 'rt.cstart', 'rt.bstart', 'rt.end', 't_postrender', 't_prerender', 't_load');
BOOMR.addVar('rt.tstart', t_start);
if(typeof impl.t_start === 'number' && impl.t_start !== t_start) {
BOOMR.addVar('rt.cstart', impl.t_start);
}
BOOMR.addVar('rt.bstart', BOOMR.t_start);
BOOMR.addVar('rt.end', impl.timers.t_done.end); // don't just use t_done because dev may have called endTimer before we did
for(t_name in impl.timers) {
if(impl.timers.hasOwnProperty(t_name)) {
timer = impl.timers[t_name];
// if delta is a number, then it was set using setTimer
// if not, then we have to calculate it using start & end
if(typeof timer.delta !== "number") {
if(typeof timer.start !== "number") {
timer.start = t_start;
}
timer.delta = timer.end - timer.start;
}
// If the caller did not set a start time, and if there was no start cookie
// Or if there was no end time for this timer,
// then timer.delta will be NaN, in which case we discard it.
if(isNaN(timer.delta)) {
continue;
}
if(basic_timers.hasOwnProperty(t_name)) {
BOOMR.addVar(t_name, timer.delta);
}
else {
t_other.push(t_name + '|' + timer.delta);
}
ntimers++;
}
}
if(ntimers) {
if(ename !== "xhr") {
BOOMR.addVar("r", BOOMR.utils.cleanupURL(impl.r));
if(impl.r2 !== impl.r) {
BOOMR.addVar("r2", BOOMR.utils.cleanupURL(impl.r2));
}
}
if(t_other.length) {
BOOMR.addVar("t_other", t_other.join(','));
}
}
if(ename==='unload' && !impl.onloadfired) {
BOOMR.addVar('rt.abld', '');
if(!impl.visiblefired) {
BOOMR.addVar('rt.ntvu', '');
}
}
impl.timers = {};
impl.complete = true;
BOOMR.sendBeacon(); // we call sendBeacon() anyway because some other plugin
// may have blocked waiting for RT to complete
return this;
},
is_complete: function() { return impl.complete; }
};
}(window));
// End of RT plugin
| plugins/rt.js | /*
* Copyright (c) 2011, Yahoo! Inc. All rights reserved.
* Copyright (c) 2012, Log-Normal, Inc. All rights reserved.
* Copyrights licensed under the BSD License. See the accompanying LICENSE.txt file for terms.
*/
// This is the Round Trip Time plugin. Abbreviated to RT
// the parameter is the window
(function(w) {
var d=w.document, impl;
BOOMR = BOOMR || {};
BOOMR.plugins = BOOMR.plugins || {};
// private object
impl = {
onloadfired: false, //! Set when the page_ready event fires
// Use this to determine if unload fires before onload
unloadfired: false, //! Set when the first unload event fires
// Use this to make sure we don't beacon twice for beforeunload and unload
visiblefired: false, //! Set when page becomes visible (Chrome/IE)
// Use this to determine if user bailed without opening the tab
initialized: false, //! Set when init has completed to prevent double initialization
complete: false, //! Set when this plugin has completed
timers: {}, //! Custom timers that the developer can use
// Format for each timer is { start: XXX, end: YYY, delta: YYY-XXX }
cookie: 'RT', //! Name of the cookie that stores the start time and referrer
cookie_exp:600, //! Cookie expiry in seconds
strict_referrer: true, //! By default, don't beacon if referrers don't match.
// If set to false, beacon both referrer values and let
// the back end decide
navigationType: 0, // Navigation Type from the NavTiming API. We mainly care if this was BACK_FORWARD
// since cookie time will be incorrect in that case
navigationStart: undefined,
responseStart: undefined,
t_start: undefined, // t_start that came off the cookie
t_fb_approx: undefined, // approximate first byte time for browsers that don't support navtiming
r: undefined, // referrer from the cookie
r2: undefined, // referrer from document.referer
/**
* Merge new cookie `params` onto current cookie, and set `timer` param on cookie to current timestamp
* @param params object containing keys & values to merge onto current cookie. A value of `undefined`
* will remove the key from the cookie
* @param timer string key name that will be set to the current timestamp on the cookie
*
* @returns true if the cookie was updated, false if the cookie could not be set for any reason
*/
updateCookie: function(params, timer) {
var t_end, t_start, subcookies, k;
// Disable use of RT cookie by setting its name to a falsy value
if(!this.cookie) {
return this;
}
subcookies = BOOMR.utils.getSubCookies(BOOMR.utils.getCookie(this.cookie)) || {};
if (typeof params === "object") {
for(k in params) {
if(params.hasOwnProperty(k)) {
if (params[k] === undefined ) {
if (subcookies.hasOwnProperty(k)) {
delete subcookies[k];
}
}
else {
if (k==="nu" || k==="r") {
params[k] = BOOMR.utils.hashQueryString(params[k], true);
}
subcookies[k] = params[k];
}
}
}
}
t_start = new Date().getTime();
if(timer) {
subcookies[timer] = t_start;
}
BOOMR.debug("Setting cookie (timer=" + timer + ")\n" + BOOMR.utils.objectToString(subcookies), "rt");
if(!BOOMR.utils.setCookie(this.cookie, subcookies, this.cookie_exp)) {
BOOMR.error("cannot set start cookie", "rt");
return this;
}
t_end = new Date().getTime();
if(t_end - t_start > 50) {
// It took > 50ms to set the cookie
// The user Most likely has cookie prompting turned on so
// t_start won't be the actual unload time
// We bail at this point since we can't reliably tell t_done
BOOMR.utils.removeCookie(this.cookie);
// at some point we may want to log this info on the server side
BOOMR.error("took more than 50ms to set cookie... aborting: "
+ t_start + " -> " + t_end, "rt");
}
return this;
},
/**
* Read initial values from cookie and clear out cookie values it cares about after reading.
* This makes sure that other pages (eg: loaded in new tabs) do not get an invalid cookie time.
* This method should only be called from init, and may be called more than once.
*
* Request start time is the greater of last page beforeunload or last click time
* If start time came from a click, we check that the clicked URL matches the current URL
* If it came from a beforeunload, we check that cookie referrer matches document.referrer
*
* If we had a pageHide time or unload time, we use that as a proxy for first byte on non-navtiming
* browsers.
*/
initFromCookie: function() {
var url, subcookies;
subcookies = BOOMR.utils.getSubCookies(BOOMR.utils.getCookie(this.cookie));
if(!subcookies) {
return;
}
subcookies.s = Math.max(+subcookies.ul||0, +subcookies.cl||0);
BOOMR.debug("Read from cookie " + BOOMR.utils.objectToString(subcookies), "rt");
// If we have a start time, and either a referrer, or a clicked on URL,
// we check if the start time is usable
if(subcookies.s && (subcookies.r || subcookies.nu)) {
this.r = subcookies.r;
url = BOOMR.utils.hashQueryString(d.URL, true);
// Either the URL of the page setting the cookie needs to match document.referrer
BOOMR.debug(this.r + " =?= " + this.r2, "rt");
// Or the start timer was no more than 15ms after a click or form submit
// and the URL clicked or submitted to matches the current page's URL
// (note the start timer may be later than click if both click and beforeunload fired
// on the previous page)
BOOMR.debug(subcookies.s + " <? " + (+subcookies.cl+15), "rt");
BOOMR.debug(subcookies.nu + " =?= " + url, "rt");
if (!this.strict_referrer ||
(subcookies.nu && subcookies.nu === url && subcookies.s < +subcookies.cl + 15) ||
(subcookies.s === +subcookies.ul && this.r === this.r2)
) {
this.t_start = subcookies.s;
// additionally, if we have a pagehide, or unload event, that's a proxy
// for the first byte of the current page, so use that wisely
if(+subcookies.hd > subcookies.s) {
this.t_fb_approx = parseInt(subcookies.hd, 10);
}
}
else {
this.t_start = this.t_fb_approx = undefined;
}
}
// Now that we've pulled out the timers, we'll clear them so they don't pollute future calls
this.updateCookie({
s: undefined, // start timer
r: undefined, // referrer
nu: undefined, // clicked url
ul: undefined, // onbeforeunload time
cl: undefined, // onclick time
hd: undefined // onunload or onpagehide time
});
},
/**
* Figure out how long boomerang and config.js took to load using resource timing if available, or built in timestamps
*/
getBoomerangTimings: function() {
var res, k, urls, url;
if(BOOMR.t_start) {
// How long does it take Boomerang to load up and execute (fb to lb)?
BOOMR.plugins.RT.startTimer('boomerang', BOOMR.t_start);
BOOMR.plugins.RT.endTimer('boomerang', BOOMR.t_end); // t_end === null defaults to current time
// How long did it take from page request to boomerang fb?
BOOMR.plugins.RT.endTimer('boomr_fb', BOOMR.t_start);
if(BOOMR.t_lstart) {
// when did the boomerang loader start loading boomerang on the page?
BOOMR.plugins.RT.endTimer('boomr_ld', BOOMR.t_lstart);
// What was the network latency for boomerang (request to first byte)?
BOOMR.plugins.RT.setTimer('boomr_lat', BOOMR.t_start - BOOMR.t_lstart);
}
}
// use window and not w because we want the inner iframe
if (window.performance && window.performance.getEntriesByName) {
urls = { "rt.bmr." : BOOMR.url };
for(url in urls) {
if(urls.hasOwnProperty(url) && urls[url]) {
res = window.performance.getEntriesByName(urls[url]);
if(!res || res.length === 0) {
continue;
}
res = res[0];
for(k in res) {
if(res.hasOwnProperty(k) && k.match(/(Start|End)$/) && res[k] > 0) {
BOOMR.addVar(url + k.replace(/^(...).*(St|En).*$/, '$1$2'), res[k]);
}
}
}
}
}
},
page_ready: function() {
// we need onloadfired because it's possible to reset "impl.complete"
// if you're measuring multiple xhr loads, but not possible to reset
// impl.onloadfired
this.onloadfired = true;
},
visibility_changed: function() {
// we care if the page became visible at some point
if(!(d.hidden || d.msHidden || d.webkitHidden)) {
impl.visiblefired = true;
}
},
/**
* Check if we're in a prerender state, and if we are, set additional timers.
* In Chrome/IE, a prerender state is when a page is completely rendered in an in-memory buffer, before
* a user requests that page. We do not beacon at this point because the user has not shown intent
* to view the page. If the user opens the page, the visibility state changes to visible, and we
* fire the beacon at that point, including any timing details for prerendering.
*
* Sets the `t_load` timer to the actual value of page load time (request initiated by browser to onload)
*
* @returns true if this is a prerender state, false if not (or not supported)
*/
checkPreRender: function() {
if(
!(d.webkitVisibilityState && d.webkitVisibilityState === "prerender")
&&
!(d.msVisibilityState && d.msVisibilityState === 3)
) {
return false;
}
// This means that onload fired through a pre-render. We'll capture this
// time, but wait for t_done until after the page has become either visible
// or hidden (ie, it moved out of the pre-render state)
// http://code.google.com/chrome/whitepapers/pagevisibility.html
// http://www.w3.org/TR/2011/WD-page-visibility-20110602/
// http://code.google.com/chrome/whitepapers/prerender.html
BOOMR.plugins.RT.startTimer("t_load", this.navigationStart);
BOOMR.plugins.RT.endTimer("t_load"); // this will measure actual onload time for a prerendered page
BOOMR.plugins.RT.startTimer("t_prerender", this.navigationStart);
BOOMR.plugins.RT.startTimer("t_postrender"); // time from prerender to visible or hidden
BOOMR.subscribe("visibility_changed", BOOMR.plugins.RT.done, "visible", BOOMR.plugins.RT);
return true;
},
/**
* Initialise timers from the NavigationTiming API. This method looks at various sources for
* Navigation Timing, and also patches around bugs in various browser implementations.
* It sets the beacon parameter `rt.start` to the source of the timer
*/
initNavTiming: function() {
var ti, p, source;
if(this.navigationStart) {
return;
}
// Get start time from WebTiming API see:
// https://dvcs.w3.org/hg/webperf/raw-file/tip/specs/NavigationTiming/Overview.html
// http://blogs.msdn.com/b/ie/archive/2010/06/28/measuring-web-page-performance.aspx
// http://blog.chromium.org/2010/07/do-you-know-how-slow-your-web-page-is.html
p = w.performance || w.msPerformance || w.webkitPerformance || w.mozPerformance;
if(p && p.navigation) {
this.navigationType = p.navigation.type;
}
if(p && p.timing) {
ti = p.timing;
}
else if(w.chrome && w.chrome.csi && w.chrome.csi().startE) {
// Older versions of chrome also have a timing API that's sort of documented here:
// http://ecmanaut.blogspot.com/2010/06/google-bom-feature-ms-since-pageload.html
// source here:
// http://src.chromium.org/viewvc/chrome/trunk/src/chrome/renderer/loadtimes_extension_bindings.cc?view=markup
ti = {
navigationStart: w.chrome.csi().startE
};
source = "csi";
}
else if(w.gtbExternal && w.gtbExternal.startE()) {
// The Google Toolbar exposes navigation start time similar to old versions of chrome
// This would work for any browser that has the google toolbar installed
ti = {
navigationStart: w.gtbExternal.startE()
};
source = 'gtb';
}
if(ti) {
// Always use navigationStart since it falls back to fetchStart (not with redirects)
// If not set, we leave t_start alone so that timers that depend
// on it don't get sent back. Never use requestStart since if
// the first request fails and the browser retries, it will contain
// the value for the new request.
BOOMR.addVar("rt.start", source || "navigation");
this.navigationStart = ti.navigationStart || ti.fetchStart || undefined;
this.responseStart = ti.responseStart || undefined;
// bug in Firefox 7 & 8 https://bugzilla.mozilla.org/show_bug.cgi?id=691547
if(navigator.userAgent.match(/Firefox\/[78]\./)) {
this.navigationStart = ti.unloadEventStart || ti.fetchStart || undefined;
}
}
else {
BOOMR.warn("This browser doesn't support the WebTiming API", "rt");
}
return;
},
page_unload: function(edata) {
BOOMR.debug("Unload called with " + BOOMR.utils.objectToString(edata) + " when unloadfired = " + this.unloadfired, "rt");
if(!this.unloadfired) {
// run done on abort or on page_unload to measure session length
BOOMR.plugins.RT.done(edata, "unload");
}
// set cookie for next page
// We use document.URL instead of location.href because of a bug in safari 4
// where location.href is URL decoded
this.updateCookie({ 'r': d.URL }, edata.type === 'beforeunload'?'ul':'hd');
this.unloadfired = true;
},
_iterable_click: function(name, element, etarget, value_cb) {
if(!etarget) {
return;
}
BOOMR.debug(name + " called with " + etarget.nodeName, "rt");
while(etarget && etarget.nodeName.toUpperCase() !== element) {
etarget = etarget.parentNode;
}
if(etarget && etarget.nodeName.toUpperCase() === element) {
BOOMR.debug("passing through", "rt");
// user event, they may be going to another page
// if this page is being opened in a different tab, then
// our unload handler won't fire, so we need to set our
// cookie on click or submit
this.updateCookie({ "nu": value_cb(etarget) }, 'cl' );
}
},
onclick: function(etarget) {
impl._iterable_click("Click", "A", etarget, function(t) { return t.href; });
},
onsubmit: function(etarget) {
impl._iterable_click("Submit", "FORM", etarget, function(t) { var v = t.action || d.URL; return v.match(/\?/) ? v : v + "?"; });
},
domloaded: function() {
BOOMR.plugins.RT.endTimer("t_domloaded");
}
};
BOOMR.plugins.RT = {
// Methods
init: function(config) {
BOOMR.debug("init RT", "rt");
if(w !== BOOMR.window) {
w = BOOMR.window;
d = w.document;
}
BOOMR.utils.pluginConfig(impl, config, "RT",
["cookie", "cookie_exp", "strict_referrer"]);
// A beacon may be fired automatically on page load or if the page dev fires
// it manually with their own timers. It may not always contain a referrer
// (eg: XHR calls). We set default values for these cases.
// This is done before reading from the cookie because the cookie overwrites
// impl.r
impl.r = impl.r2 = BOOMR.utils.hashQueryString(d.referrer, true);
// Now pull out start time information from the cookie
// We'll do this every time init is called, and every time we call it, it will
// overwrite values already set (provided there are values to read out)
impl.initFromCookie();
// We'll get BoomerangTimings every time init is called because it could also
// include additional timers which might happen on a subsequent init call.
impl.getBoomerangTimings();
// only initialize once. we still collect config and check/set cookies
// every time init is called, but we attach event handlers only once
if(impl.initialized) {
return this;
}
impl.complete = false;
impl.timers = {};
BOOMR.subscribe("page_ready", impl.page_ready, null, impl);
impl.visiblefired = !(d.hidden || d.msHidden || d.webkitHidden);
if(!impl.visiblefired) {
BOOMR.subscribe("visibility_changed", impl.visibility_changed, null, impl);
}
BOOMR.subscribe("page_ready", this.done, "load", this);
BOOMR.subscribe("xhr_load", this.done, "xhr", this);
BOOMR.subscribe("dom_loaded", impl.domloaded, null, impl);
BOOMR.subscribe("page_unload", impl.page_unload, null, impl);
BOOMR.subscribe("click", impl.onclick, null, impl);
BOOMR.subscribe("form_submit", impl.onsubmit, null, impl);
impl.initialized = true;
return this;
},
startTimer: function(timer_name, time_value) {
if(timer_name) {
if (timer_name === 't_page') {
this.endTimer('t_resp', time_value);
}
impl.timers[timer_name] = {start: (typeof time_value === "number" ? time_value : new Date().getTime())};
}
return this;
},
endTimer: function(timer_name, time_value) {
if(timer_name) {
impl.timers[timer_name] = impl.timers[timer_name] || {};
if(impl.timers[timer_name].end === undefined) {
impl.timers[timer_name].end =
(typeof time_value === "number" ? time_value : new Date().getTime());
}
}
return this;
},
setTimer: function(timer_name, time_delta) {
if(timer_name) {
impl.timers[timer_name] = { delta: time_delta };
}
return this;
},
// Called when the page has reached a "usable" state. This may be when the
// onload event fires, or it could be at some other moment during/after page
// load when the page is usable by the user
done: function(edata, ename) {
BOOMR.debug("Called done with " + BOOMR.utils.objectToString(edata) + ", " + ename, "rt");
var t_start, t_done=new Date().getTime(),
basic_timers = { t_done: 1, t_resp: 1, t_page: 1},
ntimers = 0, t_name, timer, t_other=[];
impl.complete = false;
if(ename==="load" || ename==="visible") {
impl.initFromCookie();
impl.initNavTiming();
if(impl.checkPreRender()) {
return this;
}
if(impl.responseStart) {
// Use NavTiming API to figure out resp latency and page time
// t_resp will use the cookie if available or fallback to NavTiming
this.endTimer("t_resp", impl.responseStart);
if(impl.timers.t_load) { // t_load is the actual time load completed if using prerender
this.setTimer("t_page", impl.timers.t_load.end - impl.responseStart);
}
else {
this.setTimer("t_page", t_done - impl.responseStart);
}
}
else if(impl.timers.hasOwnProperty('t_page')) {
// If the dev has already started t_page timer, we can end it now as well
this.endTimer("t_page");
}
else if(impl.t_fb_approx) {
this.endTimer('t_resp', impl.t_fb_approx);
this.setTimer("t_page", t_done - impl.t_fb_approx);
}
// If a prerender timer was started, we can end it now as well
if(impl.timers.hasOwnProperty('t_postrender')) {
this.endTimer("t_postrender");
this.endTimer("t_prerender");
}
}
if(ename==="xhr" && edata.name && impl.timers[edata.name]) {
// For xhr timers, t_start is stored in impl.timers.xhr_{page group name}
// and xhr.pg is set to {page group name}
t_start = impl.timers[edata.name].start;
BOOMR.addVar("rt.start", "manual");
}
else if(impl.navigationStart) {
t_start = impl.navigationStart;
}
else if(impl.t_start && impl.navigationType !== 2) {
t_start = impl.t_start; // 2 is TYPE_BACK_FORWARD but the constant may not be defined across browsers
BOOMR.addVar("rt.start", "cookie"); // if the user hit the back button, referrer will match, and cookie will match
} // but will have time of previous page start, so t_done will be wrong
else {
BOOMR.addVar("rt.start", "none");
t_start = undefined; // force all timers to NaN state
}
BOOMR.debug("Got start time: " + t_start, "rt");
// If the dev has already called endTimer, then this call will do nothing
// else, it will stop the page load timer
this.endTimer("t_done", t_done);
// make sure old variables don't stick around
BOOMR.removeVar('t_done', 't_page', 't_resp', 'r', 'r2', 'rt.tstart', 'rt.cstart', 'rt.bstart', 'rt.end', 't_postrender', 't_prerender', 't_load');
BOOMR.addVar('rt.tstart', t_start);
if(typeof impl.t_start === 'number' && impl.t_start !== t_start) {
BOOMR.addVar('rt.cstart', impl.t_start);
}
BOOMR.addVar('rt.bstart', BOOMR.t_start);
BOOMR.addVar('rt.end', impl.timers.t_done.end); // don't just use t_done because dev may have called endTimer before we did
for(t_name in impl.timers) {
if(impl.timers.hasOwnProperty(t_name)) {
timer = impl.timers[t_name];
// if delta is a number, then it was set using setTimer
// if not, then we have to calculate it using start & end
if(typeof timer.delta !== "number") {
if(typeof timer.start !== "number") {
timer.start = t_start;
}
timer.delta = timer.end - timer.start;
}
// If the caller did not set a start time, and if there was no start cookie
// Or if there was no end time for this timer,
// then timer.delta will be NaN, in which case we discard it.
if(isNaN(timer.delta)) {
continue;
}
if(basic_timers.hasOwnProperty(t_name)) {
BOOMR.addVar(t_name, timer.delta);
}
else {
t_other.push(t_name + '|' + timer.delta);
}
ntimers++;
}
}
if(ntimers) {
if(ename !== "xhr") {
BOOMR.addVar("r", BOOMR.utils.cleanupURL(impl.r));
if(impl.r2 !== impl.r) {
BOOMR.addVar("r2", BOOMR.utils.cleanupURL(impl.r2));
}
}
if(t_other.length) {
BOOMR.addVar("t_other", t_other.join(','));
}
}
if(ename==='unload' && !impl.onloadfired) {
BOOMR.addVar('rt.abld', '');
if(!impl.visiblefired) {
BOOMR.addVar('rt.ntvu', '');
}
}
impl.timers = {};
impl.complete = true;
BOOMR.sendBeacon(); // we call sendBeacon() anyway because some other plugin
// may have blocked waiting for RT to complete
return this;
},
is_complete: function() { return impl.complete; }
};
}(window));
// End of RT plugin
| updateCookie method returns true/false
| plugins/rt.js | updateCookie method returns true/false | <ide><path>lugins/rt.js
<ide>
<ide> // Disable use of RT cookie by setting its name to a falsy value
<ide> if(!this.cookie) {
<del> return this;
<add> return false;
<ide> }
<ide>
<ide> subcookies = BOOMR.utils.getSubCookies(BOOMR.utils.getCookie(this.cookie)) || {};
<ide> BOOMR.debug("Setting cookie (timer=" + timer + ")\n" + BOOMR.utils.objectToString(subcookies), "rt");
<ide> if(!BOOMR.utils.setCookie(this.cookie, subcookies, this.cookie_exp)) {
<ide> BOOMR.error("cannot set start cookie", "rt");
<del> return this;
<add> return false;
<ide> }
<ide>
<ide> t_end = new Date().getTime();
<ide> + t_start + " -> " + t_end, "rt");
<ide> }
<ide>
<del> return this;
<add> return true;
<ide> },
<ide>
<ide> /** |
|
Java | apache-2.0 | dce13845fb4c1b17dedec559d763245fcd517a40 | 0 | apache/openwebbeans,apache/openwebbeans,apache/openwebbeans | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with this
* work for additional information regarding copyright ownership. The ASF
* licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law
* or agreed to in writing, software distributed under the License is
* distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the specific language
* governing permissions and limitations under the License.
*/
package javax.enterprise.inject.spi;
import java.lang.annotation.Annotation;
import java.lang.reflect.Member;
import java.lang.reflect.Type;
import java.util.Set;
/**
* An InjectionPoint object provides metadata information about an injection point.
* An instance of InjectionPoint may represent one of the following types:
* <ul>
* <li>an injected field</li>
* <li>a parameter of a bean constructor</li>
* <li>an initializer method</li>
* <li>a producer method</li>
* <li>a disposer method</li>
* <li>an observer method</li>
* </ul>
*/
public interface InjectionPoint
{
public Type getType();
public Set<Annotation> getBindings();
public Bean<?> getBean();
public Member getMember();
/** @deprecated old signatures have to be dropped */
public <T extends Annotation> T getAnnotation(Class<T> annotationType);
/** @deprecated old signatures have to be dropped */
public Annotation[] getAnnotations();
/** @deprecated old signatures have to be dropped */
public boolean isAnnotationPresent(Class<? extends Annotation> annotationType);
}
| webbeans-api/src/main/java/javax/enterprise/inject/spi/InjectionPoint.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with this
* work for additional information regarding copyright ownership. The ASF
* licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law
* or agreed to in writing, software distributed under the License is
* distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the specific language
* governing permissions and limitations under the License.
*/
package javax.enterprise.inject.spi;
import java.lang.annotation.Annotation;
import java.lang.reflect.Member;
import java.lang.reflect.Type;
import java.util.Set;
public interface InjectionPoint
{
public Type getType();
public Set<Annotation> getBindings();
public Bean<?> getBean();
public Member getMember();
public <T extends Annotation> T getAnnotation(Class<T> annotationType);
public Annotation[] getAnnotations();
public boolean isAnnotationPresent(Class<? extends Annotation> annotationType);
}
| OWB-108 JavaDoc
@deprecated old method declarations
git-svn-id: 959c5dc4f35e0484f067e28fe24ef05c41faf244@788501 13f79535-47bb-0310-9956-ffa450edef68
| webbeans-api/src/main/java/javax/enterprise/inject/spi/InjectionPoint.java | OWB-108 JavaDoc @deprecated old method declarations | <ide><path>ebbeans-api/src/main/java/javax/enterprise/inject/spi/InjectionPoint.java
<ide> import java.lang.reflect.Type;
<ide> import java.util.Set;
<ide>
<del>
<add>/**
<add> * An InjectionPoint object provides metadata information about an injection point.
<add> * An instance of InjectionPoint may represent one of the following types:
<add> * <ul>
<add> * <li>an injected field</li>
<add> * <li>a parameter of a bean constructor</li>
<add> * <li>an initializer method</li>
<add> * <li>a producer method</li>
<add> * <li>a disposer method</li>
<add> * <li>an observer method</li>
<add> * </ul>
<add> */
<ide> public interface InjectionPoint
<ide> {
<ide> public Type getType();
<ide>
<ide> public Member getMember();
<ide>
<add> /** @deprecated old signatures have to be dropped */
<ide> public <T extends Annotation> T getAnnotation(Class<T> annotationType);
<ide>
<add> /** @deprecated old signatures have to be dropped */
<ide> public Annotation[] getAnnotations();
<ide>
<add> /** @deprecated old signatures have to be dropped */
<ide> public boolean isAnnotationPresent(Class<? extends Annotation> annotationType);
<ide>
<ide> } |
|
Java | apache-2.0 | a70f8a9b3d2e6994447cc81daf909bf06fc9853e | 0 | jcshen007/cloudstack,mufaddalq/cloudstack-datera-driver,jcshen007/cloudstack,mufaddalq/cloudstack-datera-driver,wido/cloudstack,argv0/cloudstack,GabrielBrascher/cloudstack,wido/cloudstack,DaanHoogland/cloudstack,argv0/cloudstack,cinderella/incubator-cloudstack,resmo/cloudstack,DaanHoogland/cloudstack,argv0/cloudstack,wido/cloudstack,jcshen007/cloudstack,resmo/cloudstack,cinderella/incubator-cloudstack,GabrielBrascher/cloudstack,wido/cloudstack,GabrielBrascher/cloudstack,resmo/cloudstack,mufaddalq/cloudstack-datera-driver,mufaddalq/cloudstack-datera-driver,jcshen007/cloudstack,resmo/cloudstack,resmo/cloudstack,jcshen007/cloudstack,DaanHoogland/cloudstack,DaanHoogland/cloudstack,wido/cloudstack,argv0/cloudstack,GabrielBrascher/cloudstack,GabrielBrascher/cloudstack,cinderella/incubator-cloudstack,DaanHoogland/cloudstack,wido/cloudstack,cinderella/incubator-cloudstack,DaanHoogland/cloudstack,jcshen007/cloudstack,cinderella/incubator-cloudstack,mufaddalq/cloudstack-datera-driver,jcshen007/cloudstack,mufaddalq/cloudstack-datera-driver,resmo/cloudstack,argv0/cloudstack,GabrielBrascher/cloudstack,GabrielBrascher/cloudstack,resmo/cloudstack,argv0/cloudstack,DaanHoogland/cloudstack,wido/cloudstack | /**
* Copyright (C) 2010 Cloud.com, Inc. All rights reserved.
*
* This software is licensed under the GNU General Public License v3 or later.
*
* It is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or any later version.
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
*/
package com.cloud.server;
import java.lang.reflect.Field;
import java.net.Inet6Address;
import java.net.InetAddress;
import java.net.URI;
import java.net.URISyntaxException;
import java.net.UnknownHostException;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Comparator;
import java.util.Date;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TimeZone;
import java.util.UUID;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import javax.crypto.Mac;
import javax.crypto.spec.SecretKeySpec;
import org.apache.commons.codec.binary.Base64;
import org.apache.log4j.Logger;
import com.cloud.acl.SecurityChecker.AccessType;
import com.cloud.agent.AgentManager;
import com.cloud.agent.api.GetVncPortAnswer;
import com.cloud.agent.api.GetVncPortCommand;
import com.cloud.agent.api.storage.CopyVolumeAnswer;
import com.cloud.agent.api.storage.CopyVolumeCommand;
import com.cloud.agent.manager.allocator.HostAllocator;
import com.cloud.alert.Alert;
import com.cloud.alert.AlertManager;
import com.cloud.alert.AlertVO;
import com.cloud.alert.dao.AlertDao;
import com.cloud.api.ApiConstants;
import com.cloud.api.ApiDBUtils;
import com.cloud.api.commands.CreateSSHKeyPairCmd;
import com.cloud.api.commands.DeleteSSHKeyPairCmd;
import com.cloud.api.commands.DestroySystemVmCmd;
import com.cloud.api.commands.ExtractVolumeCmd;
import com.cloud.api.commands.GetVMPasswordCmd;
import com.cloud.api.commands.ListAlertsCmd;
import com.cloud.api.commands.ListAsyncJobsCmd;
import com.cloud.api.commands.ListCapabilitiesCmd;
import com.cloud.api.commands.ListCapacityCmd;
import com.cloud.api.commands.ListCfgsByCmd;
import com.cloud.api.commands.ListClustersCmd;
import com.cloud.api.commands.ListDiskOfferingsCmd;
import com.cloud.api.commands.ListEventsCmd;
import com.cloud.api.commands.ListGuestOsCategoriesCmd;
import com.cloud.api.commands.ListGuestOsCmd;
import com.cloud.api.commands.ListHostsCmd;
import com.cloud.api.commands.ListIsosCmd;
import com.cloud.api.commands.ListPodsByCmd;
import com.cloud.api.commands.ListPublicIpAddressesCmd;
import com.cloud.api.commands.ListRoutersCmd;
import com.cloud.api.commands.ListSSHKeyPairsCmd;
import com.cloud.api.commands.ListServiceOfferingsCmd;
import com.cloud.api.commands.ListStoragePoolsCmd;
import com.cloud.api.commands.ListSystemVMsCmd;
import com.cloud.api.commands.ListTemplatesCmd;
import com.cloud.api.commands.ListVMGroupsCmd;
import com.cloud.api.commands.ListVlanIpRangesCmd;
import com.cloud.api.commands.ListZonesByCmd;
import com.cloud.api.commands.RebootSystemVmCmd;
import com.cloud.api.commands.RegisterSSHKeyPairCmd;
import com.cloud.api.commands.StopSystemVmCmd;
import com.cloud.api.commands.UpdateDomainCmd;
import com.cloud.api.commands.UpdateHostPasswordCmd;
import com.cloud.api.commands.UpdateIsoCmd;
import com.cloud.api.commands.UpdateTemplateCmd;
import com.cloud.api.commands.UpdateTemplateOrIsoCmd;
import com.cloud.api.commands.UpdateVMGroupCmd;
import com.cloud.api.commands.UploadCustomCertificateCmd;
import com.cloud.api.response.ExtractResponse;
import com.cloud.async.AsyncJobExecutor;
import com.cloud.async.AsyncJobManager;
import com.cloud.async.AsyncJobResult;
import com.cloud.async.AsyncJobVO;
import com.cloud.async.BaseAsyncJobExecutor;
import com.cloud.async.dao.AsyncJobDao;
import com.cloud.capacity.Capacity;
import com.cloud.capacity.CapacityVO;
import com.cloud.capacity.dao.CapacityDao;
import com.cloud.capacity.dao.CapacityDaoImpl.SummedCapacity;
import com.cloud.configuration.Config;
import com.cloud.configuration.ConfigurationVO;
import com.cloud.configuration.dao.ConfigurationDao;
import com.cloud.consoleproxy.ConsoleProxyManagementState;
import com.cloud.consoleproxy.ConsoleProxyManager;
import com.cloud.dc.AccountVlanMapVO;
import com.cloud.dc.ClusterVO;
import com.cloud.dc.DataCenterVO;
import com.cloud.dc.HostPodVO;
import com.cloud.dc.PodVlanMapVO;
import com.cloud.dc.Vlan.VlanType;
import com.cloud.dc.VlanVO;
import com.cloud.dc.dao.AccountVlanMapDao;
import com.cloud.dc.dao.ClusterDao;
import com.cloud.dc.dao.DataCenterDao;
import com.cloud.dc.dao.HostPodDao;
import com.cloud.dc.dao.PodVlanMapDao;
import com.cloud.dc.dao.VlanDao;
import com.cloud.deploy.DataCenterDeployment;
import com.cloud.deploy.DeploymentPlanner.ExcludeList;
import com.cloud.domain.DomainVO;
import com.cloud.domain.dao.DomainDao;
import com.cloud.event.ActionEvent;
import com.cloud.event.EventTypes;
import com.cloud.event.EventUtils;
import com.cloud.event.EventVO;
import com.cloud.event.dao.EventDao;
import com.cloud.exception.CloudAuthenticationException;
import com.cloud.exception.ConcurrentOperationException;
import com.cloud.exception.InvalidParameterValueException;
import com.cloud.exception.OperationTimedoutException;
import com.cloud.exception.PermissionDeniedException;
import com.cloud.exception.ResourceUnavailableException;
import com.cloud.exception.StorageUnavailableException;
import com.cloud.host.DetailVO;
import com.cloud.host.Host;
import com.cloud.host.Host.Type;
import com.cloud.host.HostVO;
import com.cloud.host.dao.HostDao;
import com.cloud.host.dao.HostDetailsDao;
import com.cloud.hypervisor.Hypervisor.HypervisorType;
import com.cloud.hypervisor.HypervisorCapabilities;
import com.cloud.hypervisor.HypervisorCapabilitiesVO;
import com.cloud.hypervisor.dao.HypervisorCapabilitiesDao;
import com.cloud.info.ConsoleProxyInfo;
import com.cloud.keystore.KeystoreManager;
import com.cloud.network.IPAddressVO;
import com.cloud.network.LoadBalancerVO;
import com.cloud.network.NetworkVO;
import com.cloud.network.dao.IPAddressDao;
import com.cloud.network.dao.LoadBalancerDao;
import com.cloud.network.dao.NetworkDao;
import com.cloud.org.Grouping.AllocationState;
import com.cloud.projects.Project;
import com.cloud.projects.Project.ListProjectResourcesCriteria;
import com.cloud.projects.ProjectManager;
import com.cloud.resource.ResourceManager;
import com.cloud.service.ServiceOfferingVO;
import com.cloud.service.dao.ServiceOfferingDao;
import com.cloud.storage.DiskOfferingVO;
import com.cloud.storage.GuestOSCategoryVO;
import com.cloud.storage.GuestOSVO;
import com.cloud.storage.Storage;
import com.cloud.storage.Storage.ImageFormat;
import com.cloud.storage.StorageManager;
import com.cloud.storage.StoragePoolVO;
import com.cloud.storage.Upload;
import com.cloud.storage.Upload.Mode;
import com.cloud.storage.UploadVO;
import com.cloud.storage.VMTemplateVO;
import com.cloud.storage.Volume;
import com.cloud.storage.VolumeVO;
import com.cloud.storage.dao.DiskOfferingDao;
import com.cloud.storage.dao.GuestOSCategoryDao;
import com.cloud.storage.dao.GuestOSDao;
import com.cloud.storage.dao.StoragePoolDao;
import com.cloud.storage.dao.UploadDao;
import com.cloud.storage.dao.VMTemplateDao;
import com.cloud.storage.dao.VolumeDao;
import com.cloud.storage.secondary.SecondaryStorageVmManager;
import com.cloud.storage.snapshot.SnapshotManager;
import com.cloud.storage.swift.SwiftManager;
import com.cloud.storage.upload.UploadMonitor;
import com.cloud.template.VirtualMachineTemplate.TemplateFilter;
import com.cloud.user.Account;
import com.cloud.user.AccountManager;
import com.cloud.user.AccountVO;
import com.cloud.user.SSHKeyPair;
import com.cloud.user.SSHKeyPairVO;
import com.cloud.user.User;
import com.cloud.user.UserContext;
import com.cloud.user.dao.AccountDao;
import com.cloud.user.dao.SSHKeyPairDao;
import com.cloud.user.dao.UserDao;
import com.cloud.utils.EnumUtils;
import com.cloud.utils.NumbersUtil;
import com.cloud.utils.Pair;
import com.cloud.utils.PasswordGenerator;
import com.cloud.utils.Ternary;
import com.cloud.utils.component.Adapters;
import com.cloud.utils.component.ComponentLocator;
import com.cloud.utils.component.Inject;
import com.cloud.utils.concurrency.NamedThreadFactory;
import com.cloud.utils.crypt.DBEncryptionUtil;
import com.cloud.utils.db.DB;
import com.cloud.utils.db.Filter;
import com.cloud.utils.db.GlobalLock;
import com.cloud.utils.db.JoinBuilder;
import com.cloud.utils.db.JoinBuilder.JoinType;
import com.cloud.utils.db.SearchBuilder;
import com.cloud.utils.db.SearchCriteria;
import com.cloud.utils.db.Transaction;
import com.cloud.utils.exception.CloudRuntimeException;
import com.cloud.utils.net.MacAddress;
import com.cloud.utils.net.NetUtils;
import com.cloud.utils.ssh.SSHKeysHelper;
import com.cloud.vm.ConsoleProxyVO;
import com.cloud.vm.DomainRouterVO;
import com.cloud.vm.InstanceGroupVO;
import com.cloud.vm.NicVO;
import com.cloud.vm.SecondaryStorageVmVO;
import com.cloud.vm.UserVmVO;
import com.cloud.vm.VMInstanceVO;
import com.cloud.vm.VirtualMachine;
import com.cloud.vm.VirtualMachine.State;
import com.cloud.vm.VirtualMachineManager;
import com.cloud.vm.VirtualMachineProfile;
import com.cloud.vm.VirtualMachineProfileImpl;
import com.cloud.vm.dao.ConsoleProxyDao;
import com.cloud.vm.dao.DomainRouterDao;
import com.cloud.vm.dao.InstanceGroupDao;
import com.cloud.vm.dao.NicDao;
import com.cloud.vm.dao.SecondaryStorageVmDao;
import com.cloud.vm.dao.UserVmDao;
import com.cloud.vm.dao.VMInstanceDao;
import com.cloud.utils.exception.CSExceptionErrorCode;
import com.cloud.utils.AnnotationHelper;
import edu.emory.mathcs.backport.java.util.Arrays;
import edu.emory.mathcs.backport.java.util.Collections;
public class ManagementServerImpl implements ManagementServer {
public static final Logger s_logger = Logger.getLogger(ManagementServerImpl.class.getName());
private final AccountManager _accountMgr;
private final AgentManager _agentMgr;
private final AlertManager _alertMgr;
private final IPAddressDao _publicIpAddressDao;
private final DomainRouterDao _routerDao;
private final ConsoleProxyDao _consoleProxyDao;
private final ClusterDao _clusterDao;
private final SecondaryStorageVmDao _secStorageVmDao;
private final EventDao _eventDao;
private final DataCenterDao _dcDao;
private final VlanDao _vlanDao;
private final AccountVlanMapDao _accountVlanMapDao;
private final PodVlanMapDao _podVlanMapDao;
private final HostDao _hostDao;
private final HostDetailsDao _detailsDao;
private final UserDao _userDao;
private final UserVmDao _userVmDao;
private final ConfigurationDao _configDao;
private final ConsoleProxyManager _consoleProxyMgr;
private final SecondaryStorageVmManager _secStorageVmMgr;
private final SwiftManager _swiftMgr;
private final ServiceOfferingDao _offeringsDao;
private final DiskOfferingDao _diskOfferingDao;
private final VMTemplateDao _templateDao;
private final DomainDao _domainDao;
private final AccountDao _accountDao;
private final AlertDao _alertDao;
private final CapacityDao _capacityDao;
private final GuestOSDao _guestOSDao;
private final GuestOSCategoryDao _guestOSCategoryDao;
private final StoragePoolDao _poolDao;
private final NicDao _nicDao;
private final NetworkDao _networkDao;
private final StorageManager _storageMgr;
private final VirtualMachineManager _itMgr;
private final HostPodDao _hostPodDao;
private final VMInstanceDao _vmInstanceDao;
private final VolumeDao _volumeDao;
private final AsyncJobDao _jobDao;
private final AsyncJobManager _asyncMgr;
private final int _purgeDelay;
private final InstanceGroupDao _vmGroupDao;
private final UploadMonitor _uploadMonitor;
private final UploadDao _uploadDao;
private final SSHKeyPairDao _sshKeyPairDao;
private final LoadBalancerDao _loadbalancerDao;
private final HypervisorCapabilitiesDao _hypervisorCapabilitiesDao;
private final Adapters<HostAllocator> _hostAllocators;
@Inject
ProjectManager _projectMgr;
private final ResourceManager _resourceMgr;
@Inject
SnapshotManager _snapshotMgr;
private final KeystoreManager _ksMgr;
private final ScheduledExecutorService _eventExecutor = Executors.newScheduledThreadPool(1, new NamedThreadFactory("EventChecker"));
private final Map<String, String> _configs;
private final StatsCollector _statsCollector;
private final Map<String, Boolean> _availableIdsMap;
private String _hashKey = null;
protected ManagementServerImpl() {
ComponentLocator locator = ComponentLocator.getLocator(Name);
_configDao = locator.getDao(ConfigurationDao.class);
_routerDao = locator.getDao(DomainRouterDao.class);
_eventDao = locator.getDao(EventDao.class);
_dcDao = locator.getDao(DataCenterDao.class);
_vlanDao = locator.getDao(VlanDao.class);
_accountVlanMapDao = locator.getDao(AccountVlanMapDao.class);
_podVlanMapDao = locator.getDao(PodVlanMapDao.class);
_hostDao = locator.getDao(HostDao.class);
_detailsDao = locator.getDao(HostDetailsDao.class);
_hostPodDao = locator.getDao(HostPodDao.class);
_jobDao = locator.getDao(AsyncJobDao.class);
_clusterDao = locator.getDao(ClusterDao.class);
_nicDao = locator.getDao(NicDao.class);
_networkDao = locator.getDao(NetworkDao.class);
_loadbalancerDao = locator.getDao(LoadBalancerDao.class);
_accountMgr = locator.getManager(AccountManager.class);
_agentMgr = locator.getManager(AgentManager.class);
_alertMgr = locator.getManager(AlertManager.class);
_consoleProxyMgr = locator.getManager(ConsoleProxyManager.class);
_secStorageVmMgr = locator.getManager(SecondaryStorageVmManager.class);
_swiftMgr = locator.getManager(SwiftManager.class);
_storageMgr = locator.getManager(StorageManager.class);
_publicIpAddressDao = locator.getDao(IPAddressDao.class);
_consoleProxyDao = locator.getDao(ConsoleProxyDao.class);
_secStorageVmDao = locator.getDao(SecondaryStorageVmDao.class);
_userDao = locator.getDao(UserDao.class);
_userVmDao = locator.getDao(UserVmDao.class);
_offeringsDao = locator.getDao(ServiceOfferingDao.class);
_diskOfferingDao = locator.getDao(DiskOfferingDao.class);
_templateDao = locator.getDao(VMTemplateDao.class);
_domainDao = locator.getDao(DomainDao.class);
_accountDao = locator.getDao(AccountDao.class);
_alertDao = locator.getDao(AlertDao.class);
_capacityDao = locator.getDao(CapacityDao.class);
_guestOSDao = locator.getDao(GuestOSDao.class);
_guestOSCategoryDao = locator.getDao(GuestOSCategoryDao.class);
_poolDao = locator.getDao(StoragePoolDao.class);
_vmGroupDao = locator.getDao(InstanceGroupDao.class);
_uploadDao = locator.getDao(UploadDao.class);
_configs = _configDao.getConfiguration();
_vmInstanceDao = locator.getDao(VMInstanceDao.class);
_volumeDao = locator.getDao(VolumeDao.class);
_asyncMgr = locator.getManager(AsyncJobManager.class);
_uploadMonitor = locator.getManager(UploadMonitor.class);
_sshKeyPairDao = locator.getDao(SSHKeyPairDao.class);
_itMgr = locator.getManager(VirtualMachineManager.class);
_ksMgr = locator.getManager(KeystoreManager.class);
_resourceMgr = locator.getManager(ResourceManager.class);
_hypervisorCapabilitiesDao = locator.getDao(HypervisorCapabilitiesDao.class);
_hostAllocators = locator.getAdapters(HostAllocator.class);
if (_hostAllocators == null || !_hostAllocators.isSet()) {
s_logger.error("Unable to find HostAllocators");
}
String value = _configs.get("account.cleanup.interval");
int cleanup = NumbersUtil.parseInt(value, 60 * 60 * 24); // 1 day.
_statsCollector = StatsCollector.getInstance(_configs);
_purgeDelay = NumbersUtil.parseInt(_configs.get("event.purge.delay"), 0);
if (_purgeDelay != 0) {
_eventExecutor.scheduleAtFixedRate(new EventPurgeTask(), cleanup, cleanup, TimeUnit.SECONDS);
}
String[] availableIds = TimeZone.getAvailableIDs();
_availableIdsMap = new HashMap<String, Boolean>(availableIds.length);
for (String id : availableIds) {
_availableIdsMap.put(id, true);
}
}
protected Map<String, String> getConfigs() {
return _configs;
}
@Override
public String generateRandomPassword() {
return PasswordGenerator.generateRandomPassword(6);
}
@Override
public List<DataCenterVO> listDataCenters(ListZonesByCmd cmd) {
Account account = UserContext.current().getCaller();
List<DataCenterVO> dcs = null;
Long domainId = cmd.getDomainId();
Long id = cmd.getId();
boolean removeDisabledZones = false;
String keyword = cmd.getKeyword();
if (domainId != null) {
// for domainId != null
// right now, we made the decision to only list zones associated with this domain
dcs = _dcDao.findZonesByDomainId(domainId, keyword); // private zones
} else if ((account == null || account.getType() == Account.ACCOUNT_TYPE_ADMIN)) {
if (keyword != null) {
dcs = _dcDao.findByKeyword(keyword);
} else {
dcs = _dcDao.listAll(); // all zones
}
} else if (account.getType() == Account.ACCOUNT_TYPE_NORMAL) {
// it was decided to return all zones for the user's domain, and everything above till root
// list all zones belonging to this domain, and all of its parents
// check the parent, if not null, add zones for that parent to list
dcs = new ArrayList<DataCenterVO>();
DomainVO domainRecord = _domainDao.findById(account.getDomainId());
if (domainRecord != null) {
while (true) {
dcs.addAll(_dcDao.findZonesByDomainId(domainRecord.getId(), keyword));
if (domainRecord.getParent() != null) {
domainRecord = _domainDao.findById(domainRecord.getParent());
} else {
break;
}
}
}
// add all public zones too
dcs.addAll(_dcDao.listPublicZones(keyword));
removeDisabledZones = true;
} else if (account.getType() == Account.ACCOUNT_TYPE_DOMAIN_ADMIN || account.getType() == Account.ACCOUNT_TYPE_RESOURCE_DOMAIN_ADMIN) {
// it was decided to return all zones for the domain admin, and everything above till root
dcs = new ArrayList<DataCenterVO>();
DomainVO domainRecord = _domainDao.findById(account.getDomainId());
// this covers path till root
if (domainRecord != null) {
DomainVO localRecord = domainRecord;
while (true) {
dcs.addAll(_dcDao.findZonesByDomainId(localRecord.getId(), keyword));
if (localRecord.getParent() != null) {
localRecord = _domainDao.findById(localRecord.getParent());
} else {
break;
}
}
}
// this covers till leaf
if (domainRecord != null) {
// find all children for this domain based on a like search by path
List<DomainVO> allChildDomains = _domainDao.findAllChildren(domainRecord.getPath(), domainRecord.getId());
List<Long> allChildDomainIds = new ArrayList<Long>();
// create list of domainIds for search
for (DomainVO domain : allChildDomains) {
allChildDomainIds.add(domain.getId());
}
// now make a search for zones based on this
if (allChildDomainIds.size() > 0) {
List<DataCenterVO> childZones = _dcDao.findChildZones((allChildDomainIds.toArray()), keyword);
dcs.addAll(childZones);
}
}
// add all public zones too
dcs.addAll(_dcDao.listPublicZones(keyword));
removeDisabledZones = true;
}
if (removeDisabledZones) {
dcs.removeAll(_dcDao.listDisabledZones());
}
Boolean available = cmd.isAvailable();
if (account != null) {
if ((available != null) && Boolean.FALSE.equals(available)) {
List<DomainRouterVO> routers = _routerDao.listBy(account.getId());
for (Iterator<DataCenterVO> iter = dcs.iterator(); iter.hasNext();) {
DataCenterVO dc = iter.next();
boolean found = false;
for (DomainRouterVO router : routers) {
if (dc.getId() == router.getDataCenterIdToDeployIn()) {
found = true;
break;
}
}
if (!found) {
iter.remove();
}
}
}
}
if (id != null) {
List<DataCenterVO> singleZone = new ArrayList<DataCenterVO>();
for (DataCenterVO zone : dcs) {
if (zone.getId() == id) {
singleZone.add(zone);
}
}
return singleZone;
}
return dcs;
}
@Override
public HostVO getHostBy(long hostId) {
return _hostDao.findById(hostId);
}
@Override
public long getId() {
return MacAddress.getMacAddress().toLong();
}
protected void checkPortParameters(String publicPort, String privatePort, String privateIp, String proto) {
if (!NetUtils.isValidPort(publicPort)) {
throw new InvalidParameterValueException("publicPort is an invalid value");
}
if (!NetUtils.isValidPort(privatePort)) {
throw new InvalidParameterValueException("privatePort is an invalid value");
}
// s_logger.debug("Checking if " + privateIp + " is a valid private IP address. Guest IP address is: " +
// _configs.get("guest.ip.network"));
//
// if (!NetUtils.isValidPrivateIp(privateIp, _configs.get("guest.ip.network"))) {
// throw new InvalidParameterValueException("Invalid private ip address");
// }
if (!NetUtils.isValidProto(proto)) {
throw new InvalidParameterValueException("Invalid protocol");
}
}
@Override
public List<EventVO> getEvents(long userId, long accountId, Long domainId, String type, String level, Date startDate, Date endDate) {
SearchCriteria<EventVO> sc = _eventDao.createSearchCriteria();
if (userId > 0) {
sc.addAnd("userId", SearchCriteria.Op.EQ, userId);
}
if (accountId > 0) {
sc.addAnd("accountId", SearchCriteria.Op.EQ, accountId);
}
if (domainId != null) {
sc.addAnd("domainId", SearchCriteria.Op.EQ, domainId);
}
if (type != null) {
sc.addAnd("type", SearchCriteria.Op.EQ, type);
}
if (level != null) {
sc.addAnd("level", SearchCriteria.Op.EQ, level);
}
if (startDate != null && endDate != null) {
startDate = massageDate(startDate, 0, 0, 0);
endDate = massageDate(endDate, 23, 59, 59);
sc.addAnd("createDate", SearchCriteria.Op.BETWEEN, startDate, endDate);
} else if (startDate != null) {
startDate = massageDate(startDate, 0, 0, 0);
sc.addAnd("createDate", SearchCriteria.Op.GTEQ, startDate);
} else if (endDate != null) {
endDate = massageDate(endDate, 23, 59, 59);
sc.addAnd("createDate", SearchCriteria.Op.LTEQ, endDate);
}
return _eventDao.search(sc, null);
}
private Date massageDate(Date date, int hourOfDay, int minute, int second) {
Calendar cal = Calendar.getInstance();
cal.setTime(date);
cal.set(Calendar.HOUR_OF_DAY, hourOfDay);
cal.set(Calendar.MINUTE, minute);
cal.set(Calendar.SECOND, second);
return cal.getTime();
}
// This method is used for permissions check for both disk and service offerings
private boolean isPermissible(Long accountDomainId, Long offeringDomainId) {
if (accountDomainId == offeringDomainId) {
return true; // account and service offering in same domain
}
DomainVO domainRecord = _domainDao.findById(accountDomainId);
if (domainRecord != null) {
while (true) {
if (domainRecord.getId() == offeringDomainId) {
return true;
}
// try and move on to the next domain
if (domainRecord.getParent() != null) {
domainRecord = _domainDao.findById(domainRecord.getParent());
} else {
break;
}
}
}
return false;
}
@Override
public List<ServiceOfferingVO> searchForServiceOfferings(ListServiceOfferingsCmd cmd) {
// Note
// The list method for offerings is being modified in accordance with discussion with Will/Kevin
// For now, we will be listing the following based on the usertype
// 1. For root, we will list all offerings
// 2. For domainAdmin and regular users, we will list everything in their domains+parent domains ... all the way
// till
// root
Boolean isAscending = Boolean.parseBoolean(_configDao.getValue("sortkey.algorithm"));
isAscending = (isAscending == null ? true : isAscending);
Filter searchFilter = new Filter(ServiceOfferingVO.class, "sortKey", isAscending, cmd.getStartIndex(), cmd.getPageSizeVal());
SearchCriteria<ServiceOfferingVO> sc = _offeringsDao.createSearchCriteria();
Account caller = UserContext.current().getCaller();
Object name = cmd.getServiceOfferingName();
Object id = cmd.getId();
Object keyword = cmd.getKeyword();
Long vmId = cmd.getVirtualMachineId();
Long domainId = cmd.getDomainId();
Boolean isSystem = cmd.getIsSystem();
String vm_type_str = cmd.getSystemVmType();
if (caller.getType() != Account.ACCOUNT_TYPE_ADMIN && isSystem) {
throw new InvalidParameterValueException("Only ROOT admins can access system's offering");
}
// Keeping this logic consistent with domain specific zones
// if a domainId is provided, we just return the so associated with this domain
if (domainId != null && caller.getType() != Account.ACCOUNT_TYPE_ADMIN) {
// check if the user's domain == so's domain || user's domain is a child of so's domain
if (!isPermissible(caller.getDomainId(), domainId)) {
throw new PermissionDeniedException("The account:" + caller.getAccountName() + " does not fall in the same domain hierarchy as the service offering");
}
}
// For non-root users
if ((caller.getType() == Account.ACCOUNT_TYPE_NORMAL || caller.getType() == Account.ACCOUNT_TYPE_DOMAIN_ADMIN) || caller.getType() == Account.ACCOUNT_TYPE_RESOURCE_DOMAIN_ADMIN) {
if (isSystem) {
throw new InvalidParameterValueException("Only root admins can access system's offering");
}
return searchServiceOfferingsInternal(caller, name, id, vmId, keyword, searchFilter);
}
// for root users, the existing flow
if (caller.getDomainId() != 1 && isSystem) { // NON ROOT admin
throw new InvalidParameterValueException("Non ROOT admins cannot access system's offering");
}
if (keyword != null) {
SearchCriteria<ServiceOfferingVO> ssc = _offeringsDao.createSearchCriteria();
ssc.addOr("displayText", SearchCriteria.Op.LIKE, "%" + keyword + "%");
ssc.addOr("name", SearchCriteria.Op.LIKE, "%" + keyword + "%");
sc.addAnd("name", SearchCriteria.Op.SC, ssc);
} else if (vmId != null) {
UserVmVO vmInstance = _userVmDao.findById(vmId);
if ((vmInstance == null) || (vmInstance.getRemoved() != null)) {
InvalidParameterValueException ex = new InvalidParameterValueException("unable to find a virtual machine with specified id");
// Get the VO object's table name.
String tablename = AnnotationHelper.getTableName(vmInstance);
if (tablename != null) {
ex.addProxyObject(tablename, vmId, "vmId");
} else {
s_logger.info("\nCould not retrieve table name (annotation) from " + tablename + " VO proxy object\n");
}
throw ex;
}
_accountMgr.checkAccess(caller, null, true, vmInstance);
ServiceOfferingVO offering = _offeringsDao.findByIdIncludingRemoved(vmInstance.getServiceOfferingId());
sc.addAnd("id", SearchCriteria.Op.NEQ, offering.getId());
// Only return offerings with the same Guest IP type and storage pool preference
// sc.addAnd("guestIpType", SearchCriteria.Op.EQ, offering.getGuestIpType());
sc.addAnd("useLocalStorage", SearchCriteria.Op.EQ, offering.getUseLocalStorage());
}
if (id != null) {
sc.addAnd("id", SearchCriteria.Op.EQ, id);
}
if (isSystem != null) {
sc.addAnd("systemUse", SearchCriteria.Op.EQ, isSystem);
}
if (name != null) {
sc.addAnd("name", SearchCriteria.Op.LIKE, "%" + name + "%");
}
if (domainId != null) {
sc.addAnd("domainId", SearchCriteria.Op.EQ, domainId);
}
if (vm_type_str != null) {
sc.addAnd("vm_type", SearchCriteria.Op.EQ, vm_type_str);
}
sc.addAnd("systemUse", SearchCriteria.Op.EQ, isSystem);
sc.addAnd("removed", SearchCriteria.Op.NULL);
return _offeringsDao.search(sc, searchFilter);
}
private List<ServiceOfferingVO> searchServiceOfferingsInternal(Account caller, Object name, Object id, Long vmId, Object keyword, Filter searchFilter) {
// it was decided to return all offerings for the user's domain, and everything above till root (for normal user
// or
// domain admin)
// list all offerings belonging to this domain, and all of its parents
// check the parent, if not null, add offerings for that parent to list
List<ServiceOfferingVO> sol = new ArrayList<ServiceOfferingVO>();
DomainVO domainRecord = _domainDao.findById(caller.getDomainId());
boolean includePublicOfferings = true;
if (domainRecord != null) {
while (true) {
if (id != null) {
ServiceOfferingVO so = _offeringsDao.findById((Long) id);
if (so != null) {
sol.add(so);
}
return sol;
}
SearchCriteria<ServiceOfferingVO> sc = _offeringsDao.createSearchCriteria();
if (keyword != null) {
includePublicOfferings = false;
SearchCriteria<ServiceOfferingVO> ssc = _offeringsDao.createSearchCriteria();
ssc.addOr("displayText", SearchCriteria.Op.LIKE, "%" + keyword + "%");
ssc.addOr("name", SearchCriteria.Op.LIKE, "%" + keyword + "%");
sc.addAnd("name", SearchCriteria.Op.SC, ssc);
} else if (vmId != null) {
UserVmVO vmInstance = _userVmDao.findById(vmId);
if ((vmInstance == null) || (vmInstance.getRemoved() != null)) {
InvalidParameterValueException ex = new InvalidParameterValueException("unable to find a virtual machine with id " + vmId);
// Get the VO object's table name.
String tablename = AnnotationHelper.getTableName(vmInstance);
if (tablename != null) {
ex.addProxyObject(tablename, vmId, "vmId");
} else {
s_logger.info("\nCould not retrieve table name (annotation) from " + tablename + " VO proxy object\n");
}
throw ex;
}
_accountMgr.checkAccess(caller, null, false, vmInstance);
ServiceOfferingVO offering = _offeringsDao.findById(vmInstance.getServiceOfferingId());
sc.addAnd("id", SearchCriteria.Op.NEQ, offering.getId());
sc.addAnd("useLocalStorage", SearchCriteria.Op.EQ, offering.getUseLocalStorage());
}
if (name != null) {
includePublicOfferings = false;
sc.addAnd("name", SearchCriteria.Op.LIKE, "%" + name + "%");
}
sc.addAnd("systemUse", SearchCriteria.Op.EQ, false);
// for this domain
sc.addAnd("domainId", SearchCriteria.Op.EQ, domainRecord.getId());
// don't return removed service offerings
sc.addAnd("removed", SearchCriteria.Op.NULL);
// search and add for this domain
sol.addAll(_offeringsDao.search(sc, searchFilter));
// try and move on to the next domain
if (domainRecord.getParent() != null) {
domainRecord = _domainDao.findById(domainRecord.getParent());
} else {
break;// now we got all the offerings for this user/dom adm
}
}
} else {
s_logger.error("Could not find the domainId for account:" + caller.getAccountName());
throw new CloudAuthenticationException("Could not find the domainId for account:" + caller.getAccountName());
}
// add all the public offerings to the sol list before returning
if (includePublicOfferings) {
sol.addAll(_offeringsDao.findPublicServiceOfferings());
}
return sol;
}
@Override
public List<ClusterVO> searchForClusters(ListClustersCmd cmd) {
Filter searchFilter = new Filter(ClusterVO.class, "id", true, cmd.getStartIndex(), cmd.getPageSizeVal());
SearchCriteria<ClusterVO> sc = _clusterDao.createSearchCriteria();
Object id = cmd.getId();
Object name = cmd.getClusterName();
Object podId = cmd.getPodId();
Long zoneId = cmd.getZoneId();
Object hypervisorType = cmd.getHypervisorType();
Object clusterType = cmd.getClusterType();
Object allocationState = cmd.getAllocationState();
String keyword = cmd.getKeyword();
zoneId = _accountMgr.checkAccessAndSpecifyAuthority(UserContext.current().getCaller(), zoneId);
if (id != null) {
sc.addAnd("id", SearchCriteria.Op.EQ, id);
}
if (name != null) {
sc.addAnd("name", SearchCriteria.Op.LIKE, "%" + name + "%");
}
if (podId != null) {
sc.addAnd("podId", SearchCriteria.Op.EQ, podId);
}
if (zoneId != null) {
sc.addAnd("dataCenterId", SearchCriteria.Op.EQ, zoneId);
}
if (hypervisorType != null) {
sc.addAnd("hypervisorType", SearchCriteria.Op.EQ, hypervisorType);
}
if (clusterType != null) {
sc.addAnd("clusterType", SearchCriteria.Op.EQ, clusterType);
}
if (allocationState != null) {
sc.addAnd("allocationState", SearchCriteria.Op.EQ, allocationState);
}
if (keyword != null) {
SearchCriteria<ClusterVO> ssc = _clusterDao.createSearchCriteria();
ssc.addOr("name", SearchCriteria.Op.LIKE, "%" + keyword + "%");
ssc.addOr("hypervisorType", SearchCriteria.Op.LIKE, "%" + keyword + "%");
sc.addAnd("name", SearchCriteria.Op.SC, ssc);
}
return _clusterDao.search(sc, searchFilter);
}
@Override
public List<HostVO> searchForServers(ListHostsCmd cmd) {
Long zoneId = _accountMgr.checkAccessAndSpecifyAuthority(UserContext.current().getCaller(), cmd.getZoneId());
Object name = cmd.getHostName();
Object type = cmd.getType();
Object state = cmd.getState();
Object pod = cmd.getPodId();
Object cluster = cmd.getClusterId();
Object id = cmd.getId();
Object keyword = cmd.getKeyword();
Object resourceState = cmd.getResourceState();
return searchForServers(cmd.getStartIndex(), cmd.getPageSizeVal(), name, type, state, zoneId, pod, cluster, id, keyword, resourceState);
}
@Override
public Pair<List<? extends Host>, List<? extends Host>> listHostsForMigrationOfVM(Long vmId, Long startIndex, Long pageSize) {
// access check - only root admin can migrate VM
Account caller = UserContext.current().getCaller();
if (caller.getType() != Account.ACCOUNT_TYPE_ADMIN) {
if (s_logger.isDebugEnabled()) {
s_logger.debug("Caller is not a root admin, permission denied to migrate the VM");
}
throw new PermissionDeniedException("No permission to migrate VM, Only Root Admin can migrate a VM!");
}
VMInstanceVO vm = _vmInstanceDao.findById(vmId);
if (vm == null) {
InvalidParameterValueException ex = new InvalidParameterValueException("Unable to find the VM with specified id");
// Get the VO object's table name.
String tablename = AnnotationHelper.getTableName(vm);
if (tablename != null) {
ex.addProxyObject(tablename, vmId, "vmId");
} else {
s_logger.info("\nCould not retrieve table name (annotation) from " + tablename + " VO proxy object\n");
}
throw ex;
}
// business logic
if (vm.getState() != State.Running) {
if (s_logger.isDebugEnabled()) {
s_logger.debug("VM is not Running, unable to migrate the vm" + vm);
}
InvalidParameterValueException ex = new InvalidParameterValueException("VM is not Running, unable to migrate the vm with specified id");
// Get the VO object's table name.
String tablename = AnnotationHelper.getTableName(vm);
if (tablename != null) {
ex.addProxyObject(tablename, vmId, "vmId");
} else {
s_logger.info("\nCould not retrieve table name (annotation) from " + tablename + " VO proxy object\n");
}
throw ex;
}
if (!vm.getHypervisorType().equals(HypervisorType.XenServer) && !vm.getHypervisorType().equals(HypervisorType.VMware) && !vm.getHypervisorType().equals(HypervisorType.KVM)
&& !vm.getHypervisorType().equals(HypervisorType.Ovm)) {
if (s_logger.isDebugEnabled()) {
s_logger.debug(vm + " is not XenServer/VMware/KVM/OVM, cannot migrate this VM.");
}
throw new InvalidParameterValueException("Unsupported Hypervisor Type for VM migration, we support XenServer/VMware/KVM only");
}
ServiceOfferingVO svcOffering = _offeringsDao.findById(vm.getServiceOfferingId());
if (svcOffering.getUseLocalStorage()) {
if (s_logger.isDebugEnabled()) {
s_logger.debug(vm + " is using Local Storage, cannot migrate this VM.");
}
throw new InvalidParameterValueException("Unsupported operation, VM uses Local storage, cannot migrate");
}
long srcHostId = vm.getHostId();
// why is this not HostVO?
Host srcHost = _hostDao.findById(srcHostId);
if (srcHost == null) {
if (s_logger.isDebugEnabled()) {
s_logger.debug("Unable to find the host with id: " + srcHostId + " of this VM:" + vm);
}
InvalidParameterValueException ex = new InvalidParameterValueException("Unable to find the host (with specified id) of VM with specified id");
// Get the VO object's table name.
String tablename = AnnotationHelper.getTableName(srcHost);
if (tablename != null) {
ex.addProxyObject(tablename, srcHostId, "hostId");
} else {
s_logger.info("\nCould not retrieve table name (annotation) from " + tablename + " VO proxy object\n");
}
tablename = AnnotationHelper.getTableName(vm);
if (tablename != null) {
ex.addProxyObject(tablename, vmId, "vmId");
} else {
s_logger.info("\nCould not retrieve table name (annotation) from " + tablename + " VO proxy object\n");
}
throw ex;
}
Long cluster = srcHost.getClusterId();
Type hostType = srcHost.getType();
if (s_logger.isDebugEnabled()) {
s_logger.debug("Searching for all hosts in cluster: " + cluster + " for migrating VM " + vm);
}
List<? extends Host> allHostsInCluster = searchForServers(startIndex, pageSize, null, hostType, null, null, null, cluster, null, null, null);
// filter out the current host
allHostsInCluster.remove(srcHost);
if (s_logger.isDebugEnabled()) {
s_logger.debug("Other Hosts in this cluster: " + allHostsInCluster);
}
if (s_logger.isDebugEnabled()) {
s_logger.debug("Calling HostAllocators to search for hosts in cluster: " + cluster + " having enough capacity and suitable for migration");
}
List<Host> suitableHosts = new ArrayList<Host>();
Enumeration<HostAllocator> enHost = _hostAllocators.enumeration();
VirtualMachineProfile<VMInstanceVO> vmProfile = new VirtualMachineProfileImpl<VMInstanceVO>(vm);
DataCenterDeployment plan = new DataCenterDeployment(srcHost.getDataCenterId(), srcHost.getPodId(), srcHost.getClusterId(), null, null, null);
ExcludeList excludes = new ExcludeList();
excludes.addHost(srcHostId);
while (enHost.hasMoreElements()) {
final HostAllocator allocator = enHost.nextElement();
suitableHosts = allocator.allocateTo(vmProfile, plan, Host.Type.Routing, excludes, HostAllocator.RETURN_UPTO_ALL, false);
if (suitableHosts != null && !suitableHosts.isEmpty()) {
break;
}
}
if (suitableHosts.isEmpty()) {
s_logger.debug("No suitable hosts found");
} else {
if (s_logger.isDebugEnabled()) {
s_logger.debug("Hosts having capacity and suitable for migration: " + suitableHosts);
}
}
return new Pair<List<? extends Host>, List<? extends Host>>(allHostsInCluster, suitableHosts);
}
private List<HostVO> searchForServers(Long startIndex, Long pageSize, Object name, Object type, Object state, Object zone, Object pod, Object cluster, Object id, Object keyword,
Object resourceState) {
Filter searchFilter = new Filter(HostVO.class, "id", Boolean.TRUE, startIndex, pageSize);
SearchCriteria<HostVO> sc = _hostDao.createSearchCriteria();
if (keyword != null) {
SearchCriteria<HostVO> ssc = _hostDao.createSearchCriteria();
ssc.addOr("name", SearchCriteria.Op.LIKE, "%" + keyword + "%");
ssc.addOr("status", SearchCriteria.Op.LIKE, "%" + keyword + "%");
ssc.addOr("type", SearchCriteria.Op.LIKE, "%" + keyword + "%");
sc.addAnd("name", SearchCriteria.Op.SC, ssc);
}
if (id != null) {
sc.addAnd("id", SearchCriteria.Op.EQ, id);
}
if (name != null) {
sc.addAnd("name", SearchCriteria.Op.LIKE, "%" + name + "%");
}
if (type != null) {
sc.addAnd("type", SearchCriteria.Op.LIKE, "%" + type);
}
if (state != null) {
sc.addAnd("status", SearchCriteria.Op.EQ, state);
}
if (zone != null) {
sc.addAnd("dataCenterId", SearchCriteria.Op.EQ, zone);
}
if (pod != null) {
sc.addAnd("podId", SearchCriteria.Op.EQ, pod);
}
if (cluster != null) {
sc.addAnd("clusterId", SearchCriteria.Op.EQ, cluster);
}
if (resourceState != null) {
sc.addAnd("resourceState", SearchCriteria.Op.EQ, resourceState);
}
return _hostDao.search(sc, searchFilter);
}
@Override
public List<HostPodVO> searchForPods(ListPodsByCmd cmd) {
Filter searchFilter = new Filter(HostPodVO.class, "dataCenterId", true, cmd.getStartIndex(), cmd.getPageSizeVal());
SearchCriteria<HostPodVO> sc = _hostPodDao.createSearchCriteria();
String podName = cmd.getPodName();
Long id = cmd.getId();
Long zoneId = cmd.getZoneId();
Object keyword = cmd.getKeyword();
Object allocationState = cmd.getAllocationState();
zoneId = _accountMgr.checkAccessAndSpecifyAuthority(UserContext.current().getCaller(), zoneId);
if (keyword != null) {
SearchCriteria<HostPodVO> ssc = _hostPodDao.createSearchCriteria();
ssc.addOr("name", SearchCriteria.Op.LIKE, "%" + keyword + "%");
ssc.addOr("description", SearchCriteria.Op.LIKE, "%" + keyword + "%");
sc.addAnd("name", SearchCriteria.Op.SC, ssc);
}
if (id != null) {
sc.addAnd("id", SearchCriteria.Op.EQ, id);
}
if (podName != null) {
sc.addAnd("name", SearchCriteria.Op.LIKE, "%" + podName + "%");
}
if (zoneId != null) {
sc.addAnd("dataCenterId", SearchCriteria.Op.EQ, zoneId);
}
if (allocationState != null) {
sc.addAnd("allocationState", SearchCriteria.Op.EQ, allocationState);
}
return _hostPodDao.search(sc, searchFilter);
}
@Override
public List<VlanVO> searchForVlans(ListVlanIpRangesCmd cmd) {
// If an account name and domain ID are specified, look up the account
String accountName = cmd.getAccountName();
Long domainId = cmd.getDomainId();
Long accountId = null;
Long networkId = cmd.getNetworkId();
Boolean forVirtual = cmd.getForVirtualNetwork();
String vlanType = null;
Long projectId = cmd.getProjectId();
Long physicalNetworkId = cmd.getPhysicalNetworkId();
if (accountName != null && domainId != null) {
if (projectId != null) {
throw new InvalidParameterValueException("Account and projectId can't be specified together");
}
Account account = _accountDao.findActiveAccount(accountName, domainId);
if (account == null) {
InvalidParameterValueException ex = new InvalidParameterValueException("Unable to find account " + accountName + " in specified domain");
// Since we don't have a DomainVO object here, we directly set tablename to "domain".
String tablename = "domain";
ex.addProxyObject(tablename, domainId, "domainId");
throw ex;
} else {
accountId = account.getId();
}
}
if (forVirtual != null) {
if (forVirtual) {
vlanType = VlanType.VirtualNetwork.toString();
} else {
vlanType = VlanType.DirectAttached.toString();
}
}
// set project information
if (projectId != null) {
Project project = _projectMgr.getProject(projectId);
if (project == null) {
InvalidParameterValueException ex = new InvalidParameterValueException("Unable to find project by id " + projectId);
// Get the VO object's table name.
String tablename = AnnotationHelper.getTableName(project);
if (tablename != null) {
ex.addProxyObject(tablename, projectId, "projectId");
} else {
s_logger.info("\nCould not retrieve table name (annotation) from " + tablename + " VO proxy object\n");
}
throw ex;
}
accountId = project.getProjectAccountId();
}
Filter searchFilter = new Filter(VlanVO.class, "id", true, cmd.getStartIndex(), cmd.getPageSizeVal());
Object id = cmd.getId();
Object vlan = cmd.getVlan();
Object dataCenterId = cmd.getZoneId();
Object podId = cmd.getPodId();
Object keyword = cmd.getKeyword();
SearchBuilder<VlanVO> sb = _vlanDao.createSearchBuilder();
sb.and("id", sb.entity().getId(), SearchCriteria.Op.EQ);
sb.and("vlan", sb.entity().getVlanTag(), SearchCriteria.Op.EQ);
sb.and("dataCenterId", sb.entity().getDataCenterId(), SearchCriteria.Op.EQ);
sb.and("vlan", sb.entity().getVlanTag(), SearchCriteria.Op.EQ);
sb.and("networkId", sb.entity().getNetworkId(), SearchCriteria.Op.EQ);
sb.and("vlanType", sb.entity().getVlanType(), SearchCriteria.Op.EQ);
sb.and("physicalNetworkId", sb.entity().getPhysicalNetworkId(), SearchCriteria.Op.EQ);
if (accountId != null) {
SearchBuilder<AccountVlanMapVO> accountVlanMapSearch = _accountVlanMapDao.createSearchBuilder();
accountVlanMapSearch.and("accountId", accountVlanMapSearch.entity().getAccountId(), SearchCriteria.Op.EQ);
sb.join("accountVlanMapSearch", accountVlanMapSearch, sb.entity().getId(), accountVlanMapSearch.entity().getVlanDbId(), JoinBuilder.JoinType.INNER);
}
if (podId != null) {
SearchBuilder<PodVlanMapVO> podVlanMapSearch = _podVlanMapDao.createSearchBuilder();
podVlanMapSearch.and("podId", podVlanMapSearch.entity().getPodId(), SearchCriteria.Op.EQ);
sb.join("podVlanMapSearch", podVlanMapSearch, sb.entity().getId(), podVlanMapSearch.entity().getVlanDbId(), JoinBuilder.JoinType.INNER);
}
SearchCriteria<VlanVO> sc = sb.create();
if (keyword != null) {
SearchCriteria<VlanVO> ssc = _vlanDao.createSearchCriteria();
ssc.addOr("vlanId", SearchCriteria.Op.LIKE, "%" + keyword + "%");
ssc.addOr("ipRange", SearchCriteria.Op.LIKE, "%" + keyword + "%");
sc.addAnd("vlanId", SearchCriteria.Op.SC, ssc);
} else {
if (id != null) {
sc.setParameters("id", id);
}
if (vlan != null) {
sc.setParameters("vlan", vlan);
}
if (dataCenterId != null) {
sc.setParameters("dataCenterId", dataCenterId);
}
if (networkId != null) {
sc.setParameters("networkId", networkId);
}
if (accountId != null) {
sc.setJoinParameters("accountVlanMapSearch", "accountId", accountId);
}
if (podId != null) {
sc.setJoinParameters("podVlanMapSearch", "podId", podId);
}
if (vlanType != null) {
sc.setParameters("vlanType", vlanType);
}
if (physicalNetworkId != null) {
sc.setParameters("physicalNetworkId", physicalNetworkId);
}
}
return _vlanDao.search(sc, searchFilter);
}
@Override
public List<ConfigurationVO> searchForConfigurations(ListCfgsByCmd cmd) {
Filter searchFilter = new Filter(ConfigurationVO.class, "name", true, cmd.getStartIndex(), cmd.getPageSizeVal());
SearchCriteria<ConfigurationVO> sc = _configDao.createSearchCriteria();
Object name = cmd.getConfigName();
Object category = cmd.getCategory();
Object keyword = cmd.getKeyword();
if (keyword != null) {
SearchCriteria<ConfigurationVO> ssc = _configDao.createSearchCriteria();
ssc.addOr("name", SearchCriteria.Op.LIKE, "%" + keyword + "%");
ssc.addOr("instance", SearchCriteria.Op.LIKE, "%" + keyword + "%");
ssc.addOr("component", SearchCriteria.Op.LIKE, "%" + keyword + "%");
ssc.addOr("description", SearchCriteria.Op.LIKE, "%" + keyword + "%");
ssc.addOr("category", SearchCriteria.Op.LIKE, "%" + keyword + "%");
ssc.addOr("value", SearchCriteria.Op.LIKE, "%" + keyword + "%");
sc.addAnd("name", SearchCriteria.Op.SC, ssc);
}
if (name != null) {
sc.addAnd("name", SearchCriteria.Op.LIKE, "%" + name + "%");
}
if (category != null) {
sc.addAnd("category", SearchCriteria.Op.EQ, category);
}
// hidden configurations are not displayed using the search API
sc.addAnd("category", SearchCriteria.Op.NEQ, "Hidden");
return _configDao.search(sc, searchFilter);
}
@Override
public Set<Pair<Long, Long>> listIsos(ListIsosCmd cmd) throws IllegalArgumentException, InvalidParameterValueException {
TemplateFilter isoFilter = TemplateFilter.valueOf(cmd.getIsoFilter());
Account caller = UserContext.current().getCaller();
boolean listAll = (caller.getType() != Account.ACCOUNT_TYPE_NORMAL && (isoFilter != null && isoFilter == TemplateFilter.all));
List<Long> permittedAccountIds = new ArrayList<Long>();
Ternary<Long, Boolean, ListProjectResourcesCriteria> domainIdRecursiveListProject = new Ternary<Long, Boolean, ListProjectResourcesCriteria>(cmd.getDomainId(), cmd.isRecursive(), null);
_accountMgr.buildACLSearchParameters(caller, cmd.getId(), cmd.getAccountName(), cmd.getProjectId(), permittedAccountIds, domainIdRecursiveListProject, listAll, false);
ListProjectResourcesCriteria listProjectResourcesCriteria = domainIdRecursiveListProject.third();
List<Account> permittedAccounts = new ArrayList<Account>();
for (Long accountId : permittedAccountIds) {
permittedAccounts.add(_accountMgr.getAccount(accountId));
}
HypervisorType hypervisorType = HypervisorType.getType(cmd.getHypervisor());
return listTemplates(cmd.getId(), cmd.getIsoName(), cmd.getKeyword(), isoFilter, true, cmd.isBootable(), cmd.getPageSizeVal(), cmd.getStartIndex(), cmd.getZoneId(), hypervisorType, true,
cmd.listInReadyState(), permittedAccounts, caller, listProjectResourcesCriteria);
}
@Override
public Set<Pair<Long, Long>> listTemplates(ListTemplatesCmd cmd) throws IllegalArgumentException, InvalidParameterValueException {
TemplateFilter templateFilter = TemplateFilter.valueOf(cmd.getTemplateFilter());
Long id = cmd.getId();
Account caller = UserContext.current().getCaller();
boolean listAll = (caller.getType() != Account.ACCOUNT_TYPE_NORMAL && (templateFilter != null && templateFilter == TemplateFilter.all));
List<Long> permittedAccountIds = new ArrayList<Long>();
Ternary<Long, Boolean, ListProjectResourcesCriteria> domainIdRecursiveListProject = new Ternary<Long, Boolean, ListProjectResourcesCriteria>(cmd.getDomainId(), cmd.isRecursive(), null);
_accountMgr.buildACLSearchParameters(caller, id, cmd.getAccountName(), cmd.getProjectId(), permittedAccountIds, domainIdRecursiveListProject, listAll, false);
ListProjectResourcesCriteria listProjectResourcesCriteria = domainIdRecursiveListProject.third();
List<Account> permittedAccounts = new ArrayList<Account>();
for (Long accountId : permittedAccountIds) {
permittedAccounts.add(_accountMgr.getAccount(accountId));
}
boolean showDomr = ((templateFilter != TemplateFilter.selfexecutable) && (templateFilter != TemplateFilter.featured));
HypervisorType hypervisorType = HypervisorType.getType(cmd.getHypervisor());
return listTemplates(id, cmd.getTemplateName(), cmd.getKeyword(), templateFilter, false, null, cmd.getPageSizeVal(), cmd.getStartIndex(), cmd.getZoneId(), hypervisorType, showDomr,
cmd.listInReadyState(), permittedAccounts, caller, listProjectResourcesCriteria);
}
private Set<Pair<Long, Long>> listTemplates(Long templateId, String name, String keyword, TemplateFilter templateFilter, boolean isIso, Boolean bootable, Long pageSize, Long startIndex,
Long zoneId, HypervisorType hyperType, boolean showDomr, boolean onlyReady, List<Account> permittedAccounts, Account caller, ListProjectResourcesCriteria listProjectResourcesCriteria) {
VMTemplateVO template = null;
if (templateId != null) {
template = _templateDao.findById(templateId);
if (template == null) {
throw new InvalidParameterValueException("Please specify a valid template ID.");
}// If ISO requested then it should be ISO.
if (isIso && template.getFormat() != ImageFormat.ISO) {
s_logger.error("Template Id " + templateId + " is not an ISO");
InvalidParameterValueException ex = new InvalidParameterValueException("Specified Template Id is not an ISO");
// Get the VO object's table name.
String tablename = AnnotationHelper.getTableName(template);
if (tablename != null) {
ex.addProxyObject(tablename, templateId, "templateId");
} else {
s_logger.info("\nCould not retrieve table name (annotation) from " + tablename + " VO proxy object\n");
}
throw ex;
}// If ISO not requested then it shouldn't be an ISO.
if (!isIso && template.getFormat() == ImageFormat.ISO) {
s_logger.error("Incorrect format of the template id " + templateId);
InvalidParameterValueException ex = new InvalidParameterValueException("Incorrect format " + template.getFormat() + " of the specified template id");
// Get the VO object's table name.
String tablename = AnnotationHelper.getTableName(template);
if (tablename != null) {
ex.addProxyObject(tablename, templateId, "templateId");
} else {
s_logger.info("\nCould not retrieve table name (annotation) from " + tablename + " VO proxy object\n");
}
throw ex;
}
}
DomainVO domain = null;
if (!permittedAccounts.isEmpty()) {
domain = _domainDao.findById(permittedAccounts.get(0).getDomainId());
} else {
domain = _domainDao.findById(DomainVO.ROOT_DOMAIN);
}
List<HypervisorType> hypers = null;
if (!isIso) {
hypers = _resourceMgr.listAvailHypervisorInZone(null, null);
}
Set<Pair<Long, Long>> templateZonePairSet = new HashSet<Pair<Long, Long>>();
if (_swiftMgr.isSwiftEnabled()) {
if (template == null) {
templateZonePairSet = _templateDao.searchSwiftTemplates(name, keyword, templateFilter, isIso, hypers, bootable, domain, pageSize, startIndex, zoneId, hyperType, onlyReady, showDomr,
permittedAccounts, caller);
Set<Pair<Long, Long>> templateZonePairSet2 = new HashSet<Pair<Long, Long>>();
templateZonePairSet2 = _templateDao.searchTemplates(name, keyword, templateFilter, isIso, hypers, bootable, domain, pageSize, startIndex, zoneId, hyperType, onlyReady, showDomr,
permittedAccounts, caller, listProjectResourcesCriteria);
for (Pair<Long, Long> tmpltPair : templateZonePairSet2) {
if (!templateZonePairSet.contains(new Pair<Long, Long>(tmpltPair.first(), -1L))) {
templateZonePairSet.add(tmpltPair);
}
}
} else {
// if template is not public, perform permission check here
if (!template.isPublicTemplate() && caller.getType() != Account.ACCOUNT_TYPE_ADMIN) {
Account owner = _accountMgr.getAccount(template.getAccountId());
_accountMgr.checkAccess(caller, null, true, owner);
}
templateZonePairSet.add(new Pair<Long, Long>(template.getId(), zoneId));
}
} else {
if (template == null) {
templateZonePairSet = _templateDao.searchTemplates(name, keyword, templateFilter, isIso, hypers, bootable, domain, pageSize, startIndex, zoneId, hyperType, onlyReady, showDomr,
permittedAccounts, caller, listProjectResourcesCriteria);
} else {
// if template is not public, perform permission check here
if (!template.isPublicTemplate() && caller.getType() != Account.ACCOUNT_TYPE_ADMIN) {
Account owner = _accountMgr.getAccount(template.getAccountId());
_accountMgr.checkAccess(caller, null, true, owner);
}
templateZonePairSet.add(new Pair<Long, Long>(template.getId(), zoneId));
}
}
return templateZonePairSet;
}
@Override
public VMTemplateVO updateTemplate(UpdateIsoCmd cmd) {
return updateTemplateOrIso(cmd);
}
@Override
public VMTemplateVO updateTemplate(UpdateTemplateCmd cmd) {
return updateTemplateOrIso(cmd);
}
private VMTemplateVO updateTemplateOrIso(UpdateTemplateOrIsoCmd cmd) {
Long id = cmd.getId();
String name = cmd.getTemplateName();
String displayText = cmd.getDisplayText();
String format = cmd.getFormat();
Long guestOSId = cmd.getOsTypeId();
Boolean passwordEnabled = cmd.isPasswordEnabled();
Boolean bootable = cmd.isBootable();
Integer sortKey = cmd.getSortKey();
Account account = UserContext.current().getCaller();
// verify that template exists
VMTemplateVO template = _templateDao.findById(id);
if (template == null || template.getRemoved() != null) {
InvalidParameterValueException ex = new InvalidParameterValueException("unable to find template/iso with specified id");
// Get the VO object's table name.
String tablename = AnnotationHelper.getTableName(template);
if (tablename != null) {
ex.addProxyObject(tablename, id, "templateId");
} else {
s_logger.info("\nCould not retrieve table name (annotation) from " + tablename + " VO proxy object\n");
}
throw ex;
}
// Don't allow to modify system template
if (id == Long.valueOf(1)) {
InvalidParameterValueException ex = new InvalidParameterValueException("Unable to update template/iso of specified id");
// Get the VO object's table name.
String tablename = AnnotationHelper.getTableName(template);
if (tablename != null) {
ex.addProxyObject(tablename, id, "templateId");
} else {
s_logger.info("\nCould not retrieve table name (annotation) from " + tablename + " VO proxy object\n");
}
throw ex;
}
// do a permission check
_accountMgr.checkAccess(account, AccessType.ModifyEntry, true, template);
boolean updateNeeded = !(name == null && displayText == null && format == null && guestOSId == null && passwordEnabled == null && bootable == null && sortKey == null);
if (!updateNeeded) {
return template;
}
template = _templateDao.createForUpdate(id);
if (name != null) {
template.setName(name);
}
if (displayText != null) {
template.setDisplayText(displayText);
}
if (sortKey != null) {
template.setSortKey(sortKey);
}
ImageFormat imageFormat = null;
if (format != null) {
try {
imageFormat = ImageFormat.valueOf(format.toUpperCase());
} catch (IllegalArgumentException e) {
throw new InvalidParameterValueException("Image format: " + format + " is incorrect. Supported formats are " + EnumUtils.listValues(ImageFormat.values()));
}
template.setFormat(imageFormat);
}
if (guestOSId != null) {
GuestOSVO guestOS = _guestOSDao.findById(guestOSId);
if (guestOS == null) {
throw new InvalidParameterValueException("Please specify a valid guest OS ID.");
} else {
template.setGuestOSId(guestOSId);
}
}
if (passwordEnabled != null) {
template.setEnablePassword(passwordEnabled);
}
if (bootable != null) {
template.setBootable(bootable);
}
_templateDao.update(id, template);
return _templateDao.findById(id);
}
@Override
public List<EventVO> searchForEvents(ListEventsCmd cmd) {
Account caller = UserContext.current().getCaller();
List<Long> permittedAccounts = new ArrayList<Long>();
Long id = cmd.getId();
String type = cmd.getType();
String level = cmd.getLevel();
Date startDate = cmd.getStartDate();
Date endDate = cmd.getEndDate();
String keyword = cmd.getKeyword();
Integer entryTime = cmd.getEntryTime();
Integer duration = cmd.getDuration();
Ternary<Long, Boolean, ListProjectResourcesCriteria> domainIdRecursiveListProject = new Ternary<Long, Boolean, ListProjectResourcesCriteria>(cmd.getDomainId(), cmd.isRecursive(), null);
_accountMgr.buildACLSearchParameters(caller, id, cmd.getAccountName(), cmd.getProjectId(), permittedAccounts, domainIdRecursiveListProject, cmd.listAll(), false);
Long domainId = domainIdRecursiveListProject.first();
Boolean isRecursive = domainIdRecursiveListProject.second();
ListProjectResourcesCriteria listProjectResourcesCriteria = domainIdRecursiveListProject.third();
Filter searchFilter = new Filter(EventVO.class, "createDate", false, cmd.getStartIndex(), cmd.getPageSizeVal());
SearchBuilder<EventVO> sb = _eventDao.createSearchBuilder();
sb.and("accountIdIN", sb.entity().getAccountId(), SearchCriteria.Op.IN);
sb.and("domainId", sb.entity().getDomainId(), SearchCriteria.Op.EQ);
if (((permittedAccounts.isEmpty()) && (domainId != null) && isRecursive)) {
// if accountId isn't specified, we can do a domain match for the admin case if isRecursive is true
SearchBuilder<DomainVO> domainSearch = _domainDao.createSearchBuilder();
domainSearch.and("path", domainSearch.entity().getPath(), SearchCriteria.Op.LIKE);
sb.join("domainSearch", domainSearch, sb.entity().getDomainId(), domainSearch.entity().getId(), JoinBuilder.JoinType.INNER);
}
if (listProjectResourcesCriteria != null) {
SearchBuilder<AccountVO> accountSearch = _accountDao.createSearchBuilder();
if (listProjectResourcesCriteria == Project.ListProjectResourcesCriteria.ListProjectResourcesOnly) {
accountSearch.and("accountType", accountSearch.entity().getType(), SearchCriteria.Op.EQ);
sb.join("accountSearch", accountSearch, sb.entity().getAccountId(), accountSearch.entity().getId(), JoinBuilder.JoinType.INNER);
} else if (listProjectResourcesCriteria == Project.ListProjectResourcesCriteria.SkipProjectResources) {
accountSearch.and("accountType", accountSearch.entity().getType(), SearchCriteria.Op.NEQ);
sb.join("accountSearch", accountSearch, sb.entity().getAccountId(), accountSearch.entity().getId(), JoinBuilder.JoinType.INNER);
}
}
sb.and("id", sb.entity().getId(), SearchCriteria.Op.EQ);
sb.and("levelL", sb.entity().getLevel(), SearchCriteria.Op.LIKE);
sb.and("levelEQ", sb.entity().getLevel(), SearchCriteria.Op.EQ);
sb.and("type", sb.entity().getType(), SearchCriteria.Op.EQ);
sb.and("createDateB", sb.entity().getCreateDate(), SearchCriteria.Op.BETWEEN);
sb.and("createDateG", sb.entity().getCreateDate(), SearchCriteria.Op.GTEQ);
sb.and("createDateL", sb.entity().getCreateDate(), SearchCriteria.Op.LTEQ);
sb.and("state", sb.entity().getState(), SearchCriteria.Op.NEQ);
sb.and("startId", sb.entity().getStartId(), SearchCriteria.Op.EQ);
sb.and("createDate", sb.entity().getCreateDate(), SearchCriteria.Op.BETWEEN);
SearchCriteria<EventVO> sc = sb.create();
if (listProjectResourcesCriteria != null) {
sc.setJoinParameters("accountSearch", "accountType", Account.ACCOUNT_TYPE_PROJECT);
}
if (!permittedAccounts.isEmpty()) {
sc.setParameters("accountIdIN", permittedAccounts.toArray());
} else if (domainId != null) {
DomainVO domain = _domainDao.findById(domainId);
if (isRecursive) {
sc.setJoinParameters("domainSearch", "path", domain.getPath() + "%");
} else {
sc.setParameters("domainId", domainId);
}
}
if (id != null) {
sc.setParameters("id", id);
}
if (keyword != null) {
SearchCriteria<EventVO> ssc = _eventDao.createSearchCriteria();
ssc.addOr("type", SearchCriteria.Op.LIKE, "%" + keyword + "%");
ssc.addOr("description", SearchCriteria.Op.LIKE, "%" + keyword + "%");
ssc.addOr("level", SearchCriteria.Op.LIKE, "%" + keyword + "%");
sc.addAnd("level", SearchCriteria.Op.SC, ssc);
}
if (level != null) {
sc.setParameters("levelEQ", level);
}
if (type != null) {
sc.setParameters("type", type);
}
if (startDate != null && endDate != null) {
sc.setParameters("createDateB", startDate, endDate);
} else if (startDate != null) {
sc.setParameters("createDateG", startDate);
} else if (endDate != null) {
sc.setParameters("createDateL", endDate);
}
if ((entryTime != null) && (duration != null)) {
if (entryTime <= duration) {
throw new InvalidParameterValueException("Entry time must be greater than duration");
}
Calendar calMin = Calendar.getInstance();
Calendar calMax = Calendar.getInstance();
calMin.add(Calendar.SECOND, -entryTime);
calMax.add(Calendar.SECOND, -duration);
Date minTime = calMin.getTime();
Date maxTime = calMax.getTime();
sc.setParameters("state", com.cloud.event.Event.State.Completed);
sc.setParameters("startId", 0);
sc.setParameters("createDate", minTime, maxTime);
List<EventVO> startedEvents = _eventDao.searchAllEvents(sc, searchFilter);
List<EventVO> pendingEvents = new ArrayList<EventVO>();
for (EventVO event : startedEvents) {
EventVO completedEvent = _eventDao.findCompletedEvent(event.getId());
if (completedEvent == null) {
pendingEvents.add(event);
}
}
return pendingEvents;
} else {
return _eventDao.searchAllEvents(sc, searchFilter);
}
}
@Override
public List<DomainRouterVO> searchForRouters(ListRoutersCmd cmd) {
Long id = cmd.getId();
String name = cmd.getRouterName();
String state = cmd.getState();
Long zone = cmd.getZoneId();
Long pod = cmd.getPodId();
Long hostId = cmd.getHostId();
String keyword = cmd.getKeyword();
Long networkId = cmd.getNetworkId();
Account caller = UserContext.current().getCaller();
List<Long> permittedAccounts = new ArrayList<Long>();
Ternary<Long, Boolean, ListProjectResourcesCriteria> domainIdRecursiveListProject = new Ternary<Long, Boolean, ListProjectResourcesCriteria>(cmd.getDomainId(), cmd.isRecursive(), null);
_accountMgr.buildACLSearchParameters(caller, id, cmd.getAccountName(), cmd.getProjectId(), permittedAccounts, domainIdRecursiveListProject, cmd.listAll(), false);
Long domainId = domainIdRecursiveListProject.first();
Boolean isRecursive = domainIdRecursiveListProject.second();
ListProjectResourcesCriteria listProjectResourcesCriteria = domainIdRecursiveListProject.third();
Filter searchFilter = new Filter(DomainRouterVO.class, "id", true, cmd.getStartIndex(), cmd.getPageSizeVal());
SearchBuilder<DomainRouterVO> sb = _routerDao.createSearchBuilder();
_accountMgr.buildACLSearchBuilder(sb, domainId, isRecursive, permittedAccounts, listProjectResourcesCriteria);
sb.and("name", sb.entity().getHostName(), SearchCriteria.Op.LIKE);
sb.and("id", sb.entity().getId(), SearchCriteria.Op.EQ);
sb.and("accountId", sb.entity().getAccountId(), SearchCriteria.Op.IN);
sb.and("state", sb.entity().getState(), SearchCriteria.Op.EQ);
sb.and("dataCenterId", sb.entity().getDataCenterIdToDeployIn(), SearchCriteria.Op.EQ);
sb.and("podId", sb.entity().getPodIdToDeployIn(), SearchCriteria.Op.EQ);
sb.and("hostId", sb.entity().getHostId(), SearchCriteria.Op.EQ);
if (networkId != null) {
SearchBuilder<NicVO> nicSearch = _nicDao.createSearchBuilder();
nicSearch.and("networkId", nicSearch.entity().getNetworkId(), SearchCriteria.Op.EQ);
SearchBuilder<NetworkVO> networkSearch = _networkDao.createSearchBuilder();
networkSearch.and("networkId", networkSearch.entity().getId(), SearchCriteria.Op.EQ);
nicSearch.join("networkSearch", networkSearch, nicSearch.entity().getNetworkId(), networkSearch.entity().getId(), JoinBuilder.JoinType.INNER);
sb.join("nicSearch", nicSearch, sb.entity().getId(), nicSearch.entity().getInstanceId(), JoinBuilder.JoinType.INNER);
}
SearchCriteria<DomainRouterVO> sc = sb.create();
_accountMgr.buildACLSearchCriteria(sc, domainId, isRecursive, permittedAccounts, listProjectResourcesCriteria);
if (keyword != null) {
SearchCriteria<DomainRouterVO> ssc = _routerDao.createSearchCriteria();
ssc.addOr("hostName", SearchCriteria.Op.LIKE, "%" + keyword + "%");
ssc.addOr("instanceName", SearchCriteria.Op.LIKE, "%" + keyword + "%");
ssc.addOr("state", SearchCriteria.Op.LIKE, "%" + keyword + "%");
sc.addAnd("hostName", SearchCriteria.Op.SC, ssc);
}
if (name != null) {
sc.setParameters("name", "%" + name + "%");
}
if (id != null) {
sc.setParameters("id", id);
}
if (state != null) {
sc.setParameters("state", state);
}
if (zone != null) {
sc.setParameters("dataCenterId", zone);
}
if (pod != null) {
sc.setParameters("podId", pod);
}
if (hostId != null) {
sc.setParameters("hostId", hostId);
}
if (networkId != null) {
sc.setJoinParameters("nicSearch", "networkId", networkId);
}
return _routerDao.search(sc, searchFilter);
}
@Override
public List<IPAddressVO> searchForIPAddresses(ListPublicIpAddressesCmd cmd) {
Object keyword = cmd.getKeyword();
Long physicalNetworkId = cmd.getPhysicalNetworkId();
Long associatedNetworkId = cmd.getAssociatedNetworkId();
Long zone = cmd.getZoneId();
String address = cmd.getIpAddress();
Long vlan = cmd.getVlanId();
Boolean forVirtualNetwork = cmd.isForVirtualNetwork();
Boolean forLoadBalancing = cmd.isForLoadBalancing();
Long ipId = cmd.getId();
Boolean sourceNat = cmd.getIsSourceNat();
Boolean staticNat = cmd.getIsStaticNat();
Account caller = UserContext.current().getCaller();
List<Long> permittedAccounts = new ArrayList<Long>();
Boolean isAllocated = cmd.isAllocatedOnly();
if (isAllocated == null) {
isAllocated = Boolean.TRUE;
}
Ternary<Long, Boolean, ListProjectResourcesCriteria> domainIdRecursiveListProject = new Ternary<Long, Boolean, ListProjectResourcesCriteria>(cmd.getDomainId(), cmd.isRecursive(), null);
_accountMgr.buildACLSearchParameters(caller, cmd.getId(), cmd.getAccountName(), cmd.getProjectId(), permittedAccounts, domainIdRecursiveListProject, cmd.listAll(), false);
Long domainId = domainIdRecursiveListProject.first();
Boolean isRecursive = domainIdRecursiveListProject.second();
ListProjectResourcesCriteria listProjectResourcesCriteria = domainIdRecursiveListProject.third();
Filter searchFilter = new Filter(IPAddressVO.class, "address", false, cmd.getStartIndex(), cmd.getPageSizeVal());
SearchBuilder<IPAddressVO> sb = _publicIpAddressDao.createSearchBuilder();
_accountMgr.buildACLSearchBuilder(sb, domainId, isRecursive, permittedAccounts, listProjectResourcesCriteria);
sb.and("dataCenterId", sb.entity().getDataCenterId(), SearchCriteria.Op.EQ);
sb.and("address", sb.entity().getAddress(), SearchCriteria.Op.EQ);
sb.and("vlanDbId", sb.entity().getVlanId(), SearchCriteria.Op.EQ);
sb.and("id", sb.entity().getId(), SearchCriteria.Op.EQ);
sb.and("physicalNetworkId", sb.entity().getPhysicalNetworkId(), SearchCriteria.Op.EQ);
sb.and("associatedNetworkIdEq", sb.entity().getAssociatedWithNetworkId(), SearchCriteria.Op.EQ);
sb.and("isSourceNat", sb.entity().isSourceNat(), SearchCriteria.Op.EQ);
sb.and("isStaticNat", sb.entity().isOneToOneNat(), SearchCriteria.Op.EQ);
if (forLoadBalancing != null && (Boolean) forLoadBalancing) {
SearchBuilder<LoadBalancerVO> lbSearch = _loadbalancerDao.createSearchBuilder();
sb.join("lbSearch", lbSearch, sb.entity().getId(), lbSearch.entity().getSourceIpAddressId(), JoinType.INNER);
sb.groupBy(sb.entity().getId());
}
if (keyword != null && address == null) {
sb.and("addressLIKE", sb.entity().getAddress(), SearchCriteria.Op.LIKE);
}
SearchBuilder<VlanVO> vlanSearch = _vlanDao.createSearchBuilder();
vlanSearch.and("vlanType", vlanSearch.entity().getVlanType(), SearchCriteria.Op.EQ);
sb.join("vlanSearch", vlanSearch, sb.entity().getVlanId(), vlanSearch.entity().getId(), JoinBuilder.JoinType.INNER);
boolean allocatedOnly = false;
if ((isAllocated != null) && (isAllocated == true)) {
sb.and("allocated", sb.entity().getAllocatedTime(), SearchCriteria.Op.NNULL);
allocatedOnly = true;
}
VlanType vlanType = null;
if (forVirtualNetwork != null) {
vlanType = (Boolean) forVirtualNetwork ? VlanType.VirtualNetwork : VlanType.DirectAttached;
} else {
vlanType = VlanType.VirtualNetwork;
}
// don't show SSVM/CPVM ips
if (vlanType == VlanType.VirtualNetwork && (allocatedOnly)) {
sb.and("associatedNetworkId", sb.entity().getAssociatedWithNetworkId(), SearchCriteria.Op.NNULL);
}
SearchCriteria<IPAddressVO> sc = sb.create();
_accountMgr.buildACLSearchCriteria(sc, domainId, isRecursive, permittedAccounts, listProjectResourcesCriteria);
sc.setJoinParameters("vlanSearch", "vlanType", vlanType);
if (zone != null) {
sc.setParameters("dataCenterId", zone);
}
if (ipId != null) {
sc.setParameters("id", ipId);
}
if (sourceNat != null) {
sc.setParameters("isSourceNat", sourceNat);
}
if (staticNat != null) {
sc.setParameters("isStaticNat", staticNat);
}
if (address == null && keyword != null) {
sc.setParameters("addressLIKE", "%" + keyword + "%");
}
if (address != null) {
sc.setParameters("address", address);
}
if (vlan != null) {
sc.setParameters("vlanDbId", vlan);
}
if (physicalNetworkId != null) {
sc.setParameters("physicalNetworkId", physicalNetworkId);
}
if (associatedNetworkId != null) {
sc.setParameters("associatedNetworkIdEq", associatedNetworkId);
}
return _publicIpAddressDao.search(sc, searchFilter);
}
@Override
public List<GuestOSVO> listGuestOSByCriteria(ListGuestOsCmd cmd) {
Filter searchFilter = new Filter(GuestOSVO.class, "displayName", true, cmd.getStartIndex(), cmd.getPageSizeVal());
Long id = cmd.getId();
Long osCategoryId = cmd.getOsCategoryId();
SearchBuilder<GuestOSVO> sb = _guestOSDao.createSearchBuilder();
sb.and("id", sb.entity().getId(), SearchCriteria.Op.EQ);
sb.and("categoryId", sb.entity().getCategoryId(), SearchCriteria.Op.EQ);
SearchCriteria<GuestOSVO> sc = sb.create();
if (id != null) {
sc.setParameters("id", id);
}
if (osCategoryId != null) {
sc.setParameters("categoryId", osCategoryId);
}
return _guestOSDao.search(sc, searchFilter);
}
@Override
public List<GuestOSCategoryVO> listGuestOSCategoriesByCriteria(ListGuestOsCategoriesCmd cmd) {
Filter searchFilter = new Filter(GuestOSCategoryVO.class, "id", true, cmd.getStartIndex(), cmd.getPageSizeVal());
Long id = cmd.getId();
SearchBuilder<GuestOSCategoryVO> sb = _guestOSCategoryDao.createSearchBuilder();
sb.and("id", sb.entity().getId(), SearchCriteria.Op.EQ);
SearchCriteria<GuestOSCategoryVO> sc = sb.create();
if (id != null) {
sc.setParameters("id", id);
}
return _guestOSCategoryDao.search(sc, searchFilter);
}
@Override
public ConsoleProxyInfo getConsoleProxyForVm(long dataCenterId, long userVmId) {
return _consoleProxyMgr.assignProxy(dataCenterId, userVmId);
}
@ActionEvent(eventType = EventTypes.EVENT_PROXY_START, eventDescription = "starting console proxy Vm", async = true)
private ConsoleProxyVO startConsoleProxy(long instanceId) {
return _consoleProxyMgr.startProxy(instanceId);
}
@ActionEvent(eventType = EventTypes.EVENT_PROXY_STOP, eventDescription = "stopping console proxy Vm", async = true)
private ConsoleProxyVO stopConsoleProxy(VMInstanceVO systemVm, boolean isForced) throws ResourceUnavailableException, OperationTimedoutException, ConcurrentOperationException {
User caller = _userDao.findById(UserContext.current().getCallerUserId());
if (_itMgr.advanceStop(systemVm, isForced, caller, UserContext.current().getCaller())) {
return _consoleProxyDao.findById(systemVm.getId());
}
return null;
}
@ActionEvent(eventType = EventTypes.EVENT_PROXY_REBOOT, eventDescription = "rebooting console proxy Vm", async = true)
private ConsoleProxyVO rebootConsoleProxy(long instanceId) {
_consoleProxyMgr.rebootProxy(instanceId);
return _consoleProxyDao.findById(instanceId);
}
@ActionEvent(eventType = EventTypes.EVENT_PROXY_DESTROY, eventDescription = "destroying console proxy Vm", async = true)
public ConsoleProxyVO destroyConsoleProxy(long instanceId) {
ConsoleProxyVO proxy = _consoleProxyDao.findById(instanceId);
if (_consoleProxyMgr.destroyProxy(instanceId)) {
return proxy;
}
return null;
}
@Override
public String getConsoleAccessUrlRoot(long vmId) {
VMInstanceVO vm = _vmInstanceDao.findById(vmId);
if (vm != null) {
ConsoleProxyInfo proxy = getConsoleProxyForVm(vm.getDataCenterIdToDeployIn(), vmId);
if (proxy != null) {
return proxy.getProxyImageUrl();
}
}
return null;
}
@Override
public Pair<String, Integer> getVncPort(VirtualMachine vm) {
if (vm.getHostId() == null) {
s_logger.warn("VM " + vm.getHostName() + " does not have host, return -1 for its VNC port");
return new Pair<String, Integer>(null, -1);
}
if (s_logger.isTraceEnabled()) {
s_logger.trace("Trying to retrieve VNC port from agent about VM " + vm.getHostName());
}
GetVncPortAnswer answer = (GetVncPortAnswer) _agentMgr.easySend(vm.getHostId(), new GetVncPortCommand(vm.getId(), vm.getInstanceName()));
if (answer != null && answer.getResult()) {
return new Pair<String, Integer>(answer.getAddress(), answer.getPort());
}
return new Pair<String, Integer>(null, -1);
}
@Override
@ActionEvent(eventType = EventTypes.EVENT_DOMAIN_UPDATE, eventDescription = "updating Domain")
@DB
public DomainVO updateDomain(UpdateDomainCmd cmd) {
Long domainId = cmd.getId();
String domainName = cmd.getDomainName();
String networkDomain = cmd.getNetworkDomain();
// check if domain exists in the system
DomainVO domain = _domainDao.findById(domainId);
if (domain == null) {
InvalidParameterValueException ex = new InvalidParameterValueException("Unable to find domain with specified domain id");
// Get the VO object's table name.
String tablename = AnnotationHelper.getTableName(domain);
if (tablename != null) {
ex.addProxyObject(tablename, domainId, "domainId");
} else {
s_logger.info("\nCould not retrieve table name (annotation) from " + tablename + " VO proxy object\n");
}
throw ex;
} else if (domain.getParent() == null && domainName != null) {
// check if domain is ROOT domain - and deny to edit it with the new name
throw new InvalidParameterValueException("ROOT domain can not be edited with a new name");
}
// check permissions
Account caller = UserContext.current().getCaller();
_accountMgr.checkAccess(caller, domain);
// domain name is unique in the cloud
if (domainName != null) {
SearchCriteria<DomainVO> sc = _domainDao.createSearchCriteria();
sc.addAnd("name", SearchCriteria.Op.EQ, domainName);
List<DomainVO> domains = _domainDao.search(sc, null);
boolean sameDomain = (domains.size() == 1 && domains.get(0).getId() == domainId);
if (!domains.isEmpty() && !sameDomain) {
InvalidParameterValueException ex = new InvalidParameterValueException("Failed to update specified domain id with name '" + domainName + "' since it already exists in the system");
// Get the domainVO object's table name.
String tablename = AnnotationHelper.getTableName(domain);
if (tablename != null) {
ex.addProxyObject(tablename, domainId, "domainId");
} else {
s_logger.info("\nCould not retrieve table name (annotation) from domainVO proxy cglib object\n");
}
throw ex;
}
}
// validate network domain
if (networkDomain != null && !networkDomain.isEmpty()) {
if (!NetUtils.verifyDomainName(networkDomain)) {
throw new InvalidParameterValueException(
"Invalid network domain. Total length shouldn't exceed 190 chars. Each domain label must be between 1 and 63 characters long, can contain ASCII letters 'a' through 'z', the digits '0' through '9', "
+ "and the hyphen ('-'); can't start or end with \"-\"");
}
}
Transaction txn = Transaction.currentTxn();
txn.start();
if (domainName != null) {
String updatedDomainPath = getUpdatedDomainPath(domain.getPath(), domainName);
updateDomainChildren(domain, updatedDomainPath);
domain.setName(domainName);
domain.setPath(updatedDomainPath);
}
if (networkDomain != null) {
if (networkDomain.isEmpty()) {
domain.setNetworkDomain(null);
} else {
domain.setNetworkDomain(networkDomain);
}
}
_domainDao.update(domainId, domain);
txn.commit();
return _domainDao.findById(domainId);
}
private String getUpdatedDomainPath(String oldPath, String newName) {
String[] tokenizedPath = oldPath.split("/");
tokenizedPath[tokenizedPath.length - 1] = newName;
StringBuilder finalPath = new StringBuilder();
for (String token : tokenizedPath) {
finalPath.append(token);
finalPath.append("/");
}
return finalPath.toString();
}
private void updateDomainChildren(DomainVO domain, String updatedDomainPrefix) {
List<DomainVO> domainChildren = _domainDao.findAllChildren(domain.getPath(), domain.getId());
// for each child, update the path
for (DomainVO dom : domainChildren) {
dom.setPath(dom.getPath().replaceFirst(domain.getPath(), updatedDomainPrefix));
_domainDao.update(dom.getId(), dom);
}
}
@Override
public List<? extends Alert> searchForAlerts(ListAlertsCmd cmd) {
Filter searchFilter = new Filter(AlertVO.class, "lastSent", false, cmd.getStartIndex(), cmd.getPageSizeVal());
SearchCriteria<AlertVO> sc = _alertDao.createSearchCriteria();
Object id = cmd.getId();
Object type = cmd.getType();
Object keyword = cmd.getKeyword();
Long zoneId = _accountMgr.checkAccessAndSpecifyAuthority(UserContext.current().getCaller(), null);
if (id != null) {
sc.addAnd("id", SearchCriteria.Op.EQ, id);
}
if (zoneId != null) {
sc.addAnd("data_center_id", SearchCriteria.Op.EQ, zoneId);
}
if (keyword != null) {
SearchCriteria<AlertVO> ssc = _alertDao.createSearchCriteria();
ssc.addOr("subject", SearchCriteria.Op.LIKE, "%" + keyword + "%");
sc.addAnd("subject", SearchCriteria.Op.SC, ssc);
}
if (type != null) {
sc.addAnd("type", SearchCriteria.Op.EQ, type);
}
return _alertDao.search(sc, searchFilter);
}
@Override
public List<CapacityVO> listTopConsumedResources(ListCapacityCmd cmd) {
Integer capacityType = cmd.getType();
Long zoneId = cmd.getZoneId();
Long podId = cmd.getPodId();
Long clusterId = cmd.getClusterId();
if (clusterId != null) {
throw new InvalidParameterValueException("Currently clusterId param is not suppoerted");
}
zoneId = _accountMgr.checkAccessAndSpecifyAuthority(UserContext.current().getCaller(), zoneId);
List<SummedCapacity> summedCapacities = new ArrayList<SummedCapacity>();
if (zoneId == null && podId == null) {// Group by Zone, capacity type
List<SummedCapacity> summedCapacitiesAtZone = _capacityDao.listCapacitiesGroupedByLevelAndType(capacityType, zoneId, podId, clusterId, 1, cmd.getPageSizeVal());
if (summedCapacitiesAtZone != null) {
summedCapacities.addAll(summedCapacitiesAtZone);
}
}
if (podId == null) {// Group by Pod, capacity type
List<SummedCapacity> summedCapacitiesAtPod = _capacityDao.listCapacitiesGroupedByLevelAndType(capacityType, zoneId, podId, clusterId, 2, cmd.getPageSizeVal());
if (summedCapacitiesAtPod != null) {
summedCapacities.addAll(summedCapacitiesAtPod);
}
List<SummedCapacity> summedCapacitiesForSecStorage = getSecStorageUsed(zoneId, capacityType);
if (summedCapacitiesForSecStorage != null) {
summedCapacities.addAll(summedCapacitiesForSecStorage);
}
}
// Group by Cluster, capacity type
List<SummedCapacity> summedCapacitiesAtCluster = _capacityDao.listCapacitiesGroupedByLevelAndType(capacityType, zoneId, podId, clusterId, 3, cmd.getPageSizeVal());
if (summedCapacitiesAtCluster != null) {
summedCapacities.addAll(summedCapacitiesAtCluster);
}
// Sort Capacities
Collections.sort(summedCapacities, new Comparator<SummedCapacity>() {
@Override
public int compare(SummedCapacity arg0, SummedCapacity arg1) {
if (arg0.getPercentUsed() < arg1.getPercentUsed()) {
return 1;
} else if (arg0.getPercentUsed() == arg1.getPercentUsed()) {
return 0;
}
return -1;
}
});
List<CapacityVO> capacities = new ArrayList<CapacityVO>();
Integer pageSize = null;
try {
pageSize = Integer.valueOf(cmd.getPageSizeVal().toString());
} catch (IllegalArgumentException e) {
throw new InvalidParameterValueException("pageSize " + cmd.getPageSizeVal() + " is out of Integer range is not supported for this call");
}
summedCapacities = summedCapacities.subList(0, summedCapacities.size() < cmd.getPageSizeVal() ? summedCapacities.size() : pageSize);
for (SummedCapacity summedCapacity : summedCapacities) {
CapacityVO capacity = new CapacityVO(summedCapacity.getDataCenterId(), summedCapacity.getPodId(), summedCapacity.getClusterId(),
summedCapacity.getCapacityType(), summedCapacity.getPercentUsed());
capacity.setUsedCapacity(summedCapacity.getUsedCapacity());
capacity.setTotalCapacity(summedCapacity.getTotalCapacity());
capacities.add(capacity);
}
return capacities;
}
List<SummedCapacity> getSecStorageUsed(Long zoneId, Integer capacityType) {
if (capacityType == null || capacityType == Capacity.CAPACITY_TYPE_SECONDARY_STORAGE) {
List<SummedCapacity> list = new ArrayList<SummedCapacity>();
if (zoneId != null) {
DataCenterVO zone = ApiDBUtils.findZoneById(zoneId);
if (zone == null || zone.getAllocationState() == AllocationState.Disabled) {
return null;
}
CapacityVO capacity = _storageMgr.getSecondaryStorageUsedStats(null, zoneId);
if (capacity.getTotalCapacity() != 0) {
capacity.setUsedPercentage(capacity.getUsedCapacity() / capacity.getTotalCapacity());
} else {
capacity.setUsedPercentage(0);
}
SummedCapacity summedCapacity = new SummedCapacity(capacity.getUsedCapacity(), capacity.getTotalCapacity(), capacity.getUsedPercentage(), capacity.getCapacityType(), capacity.getDataCenterId(),
capacity.getPodId(), capacity.getClusterId());
list.add(summedCapacity);
} else {
List<DataCenterVO> dcList = _dcDao.listEnabledZones();
for (DataCenterVO dc : dcList) {
CapacityVO capacity = _storageMgr.getSecondaryStorageUsedStats(null, dc.getId());
if (capacity.getTotalCapacity() != 0) {
capacity.setUsedPercentage((float)capacity.getUsedCapacity() / capacity.getTotalCapacity());
} else {
capacity.setUsedPercentage(0);
}
SummedCapacity summedCapacity = new SummedCapacity(capacity.getUsedCapacity(), capacity.getTotalCapacity(), capacity.getUsedPercentage(), capacity.getCapacityType(), capacity.getDataCenterId(),
capacity.getPodId(), capacity.getClusterId());
list.add(summedCapacity);
}// End of for
}
return list;
}
return null;
}
@Override
public List<CapacityVO> listCapacities(ListCapacityCmd cmd) {
Integer capacityType = cmd.getType();
Long zoneId = cmd.getZoneId();
Long podId = cmd.getPodId();
Long clusterId = cmd.getClusterId();
Boolean fetchLatest = cmd.getFetchLatest();
zoneId = _accountMgr.checkAccessAndSpecifyAuthority(UserContext.current().getCaller(), zoneId);
if (fetchLatest != null && fetchLatest) {
_alertMgr.recalculateCapacity();
}
List<SummedCapacity> summedCapacities = _capacityDao.findCapacityBy(capacityType, zoneId, podId, clusterId);
List<CapacityVO> capacities = new ArrayList<CapacityVO>();
for (SummedCapacity summedCapacity : summedCapacities) {
CapacityVO capacity = new CapacityVO(null, summedCapacity.getDataCenterId(), podId, clusterId,
summedCapacity.getUsedCapacity() + summedCapacity.getReservedCapacity(),
summedCapacity.getTotalCapacity(), summedCapacity.getCapacityType());
if (summedCapacity.getCapacityType() == Capacity.CAPACITY_TYPE_CPU) {
capacity.setTotalCapacity((long) (summedCapacity.getTotalCapacity() * ApiDBUtils.getCpuOverprovisioningFactor()));
}
capacities.add(capacity);
}
// op_host_Capacity contains only allocated stats and the real time stats are stored "in memory".
// Show Sec. Storage only when the api is invoked for the zone layer.
List<DataCenterVO> dcList = new ArrayList<DataCenterVO>();
if (zoneId == null && podId == null && clusterId == null) {
dcList = ApiDBUtils.listZones();
} else if (zoneId != null) {
dcList.add(ApiDBUtils.findZoneById(zoneId));
} else {
if (capacityType == null || capacityType == Capacity.CAPACITY_TYPE_STORAGE) {
capacities.add(_storageMgr.getStoragePoolUsedStats(null, clusterId, podId, zoneId));
}
}
for (DataCenterVO zone : dcList) {
zoneId = zone.getId();
if ((capacityType == null || capacityType == Capacity.CAPACITY_TYPE_SECONDARY_STORAGE) && podId == null && clusterId == null) {
capacities.add(_storageMgr.getSecondaryStorageUsedStats(null, zoneId));
}
if (capacityType == null || capacityType == Capacity.CAPACITY_TYPE_STORAGE) {
capacities.add(_storageMgr.getStoragePoolUsedStats(null, clusterId, podId, zoneId));
}
}
return capacities;
}
@Override
public long getMemoryOrCpuCapacityByHost(Long hostId, short capacityType) {
CapacityVO capacity = _capacityDao.findByHostIdType(hostId, capacityType);
return capacity == null ? 0 : capacity.getReservedCapacity() + capacity.getUsedCapacity();
}
public static boolean isAdmin(short accountType) {
return ((accountType == Account.ACCOUNT_TYPE_ADMIN) || (accountType == Account.ACCOUNT_TYPE_RESOURCE_DOMAIN_ADMIN) || (accountType == Account.ACCOUNT_TYPE_DOMAIN_ADMIN) || (accountType == Account.ACCOUNT_TYPE_READ_ONLY_ADMIN));
}
private List<DiskOfferingVO> searchDiskOfferingsInternal(Account account, Object name, Object id, Object keyword, Filter searchFilter) {
// it was decided to return all offerings for the user's domain, and everything above till root (for normal user
// or
// domain admin)
// list all offerings belonging to this domain, and all of its parents
// check the parent, if not null, add offerings for that parent to list
List<DiskOfferingVO> dol = new ArrayList<DiskOfferingVO>();
DomainVO domainRecord = _domainDao.findById(account.getDomainId());
boolean includePublicOfferings = true;
if (domainRecord != null) {
while (true) {
SearchBuilder<DiskOfferingVO> sb = _diskOfferingDao.createSearchBuilder();
sb.and("name", sb.entity().getName(), SearchCriteria.Op.LIKE);
sb.and("id", sb.entity().getId(), SearchCriteria.Op.EQ);
sb.and("removed", sb.entity().getRemoved(), SearchCriteria.Op.NULL);
SearchCriteria<DiskOfferingVO> sc = sb.create();
if (keyword != null) {
includePublicOfferings = false;
SearchCriteria<DiskOfferingVO> ssc = _diskOfferingDao.createSearchCriteria();
ssc.addOr("displayText", SearchCriteria.Op.LIKE, "%" + keyword + "%");
ssc.addOr("name", SearchCriteria.Op.LIKE, "%" + keyword + "%");
sc.addAnd("name", SearchCriteria.Op.SC, ssc);
}
if (name != null) {
includePublicOfferings = false;
sc.setParameters("name", "%" + name + "%");
}
if (id != null) {
includePublicOfferings = false;
sc.setParameters("id", id);
}
// for this domain
sc.addAnd("domainId", SearchCriteria.Op.EQ, domainRecord.getId());
// search and add for this domain
dol.addAll(_diskOfferingDao.search(sc, searchFilter));
// try and move on to the next domain
if (domainRecord.getParent() != null) {
domainRecord = _domainDao.findById(domainRecord.getParent());
} else {
break;// now we got all the offerings for this user/dom adm
}
}
} else {
s_logger.error("Could not find the domainId for account:" + account.getAccountName());
throw new CloudAuthenticationException("Could not find the domainId for account:" + account.getAccountName());
}
// add all the public offerings to the sol list before returning
if (includePublicOfferings) {
dol.addAll(_diskOfferingDao.findPublicDiskOfferings());
}
return dol;
}
@Override
public List<DiskOfferingVO> searchForDiskOfferings(ListDiskOfferingsCmd cmd) {
// Note
// The list method for offerings is being modified in accordance with discussion with Will/Kevin
// For now, we will be listing the following based on the usertype
// 1. For root, we will list all offerings
// 2. For domainAdmin and regular users, we will list everything in their domains+parent domains ... all the way
// till
// root
Boolean isAscending = Boolean.parseBoolean(_configDao.getValue("sortkey.algorithm"));
isAscending = (isAscending == null ? true : isAscending);
Filter searchFilter = new Filter(DiskOfferingVO.class, "sortKey", isAscending, cmd.getStartIndex(), cmd.getPageSizeVal());
SearchBuilder<DiskOfferingVO> sb = _diskOfferingDao.createSearchBuilder();
// SearchBuilder and SearchCriteria are now flexible so that the search builder can be built with all possible
// search terms and only those with criteria can be set. The proper SQL should be generated as a result.
Account account = UserContext.current().getCaller();
Object name = cmd.getDiskOfferingName();
Object id = cmd.getId();
Object keyword = cmd.getKeyword();
Long domainId = cmd.getDomainId();
// Keeping this logic consistent with domain specific zones
// if a domainId is provided, we just return the disk offering associated with this domain
if (domainId != null) {
if (account.getType() == Account.ACCOUNT_TYPE_ADMIN) {
return _diskOfferingDao.listByDomainId(domainId);// no perm check
} else {
// check if the user's domain == do's domain || user's domain is a child of so's domain
if (isPermissible(account.getDomainId(), domainId)) {
// perm check succeeded
return _diskOfferingDao.listByDomainId(domainId);
} else {
throw new PermissionDeniedException("The account:" + account.getAccountName() + " does not fall in the same domain hierarchy as the disk offering");
}
}
}
// For non-root users
if ((account.getType() == Account.ACCOUNT_TYPE_NORMAL || account.getType() == Account.ACCOUNT_TYPE_DOMAIN_ADMIN) || account.getType() == Account.ACCOUNT_TYPE_RESOURCE_DOMAIN_ADMIN) {
return searchDiskOfferingsInternal(account, name, id, keyword, searchFilter);
}
// For root users, preserving existing flow
sb.and("name", sb.entity().getName(), SearchCriteria.Op.LIKE);
sb.and("id", sb.entity().getId(), SearchCriteria.Op.EQ);
sb.and("removed", sb.entity().getRemoved(), SearchCriteria.Op.NULL);
// FIXME: disk offerings should search back up the hierarchy for available disk offerings...
/*
* sb.addAnd("domainId", sb.entity().getDomainId(), SearchCriteria.Op.EQ); if (domainId != null) {
* SearchBuilder<DomainVO> domainSearch = _domainDao.createSearchBuilder(); domainSearch.addAnd("path",
* domainSearch.entity().getPath(), SearchCriteria.Op.LIKE); sb.join("domainSearch", domainSearch,
* sb.entity().getDomainId(), domainSearch.entity().getId()); }
*/
SearchCriteria<DiskOfferingVO> sc = sb.create();
if (keyword != null) {
SearchCriteria<DiskOfferingVO> ssc = _diskOfferingDao.createSearchCriteria();
ssc.addOr("displayText", SearchCriteria.Op.LIKE, "%" + keyword + "%");
ssc.addOr("name", SearchCriteria.Op.LIKE, "%" + keyword + "%");
sc.addAnd("name", SearchCriteria.Op.SC, ssc);
}
if (name != null) {
sc.setParameters("name", "%" + name + "%");
}
if (id != null) {
sc.setParameters("id", id);
}
// FIXME: disk offerings should search back up the hierarchy for available disk offerings...
/*
* if (domainId != null) { sc.setParameters("domainId", domainId); // //DomainVO domain =
* _domainDao.findById((Long)domainId); // // I want to join on user_vm.domain_id = domain.id where domain.path
* like
* 'foo%' //sc.setJoinParameters("domainSearch", "path", domain.getPath() + "%"); // }
*/
return _diskOfferingDao.search(sc, searchFilter);
}
@Override
public String[] getApiConfig() {
return new String[] { "commands.properties" };
}
protected class EventPurgeTask implements Runnable {
@Override
public void run() {
try {
GlobalLock lock = GlobalLock.getInternLock("EventPurge");
if (lock == null) {
s_logger.debug("Couldn't get the global lock");
return;
}
if (!lock.lock(30)) {
s_logger.debug("Couldn't lock the db");
return;
}
try {
final Calendar purgeCal = Calendar.getInstance();
purgeCal.add(Calendar.DAY_OF_YEAR, -_purgeDelay);
Date purgeTime = purgeCal.getTime();
s_logger.debug("Deleting events older than: " + purgeTime.toString());
List<EventVO> oldEvents = _eventDao.listOlderEvents(purgeTime);
s_logger.debug("Found " + oldEvents.size() + " events to be purged");
for (EventVO event : oldEvents) {
_eventDao.expunge(event.getId());
}
} catch (Exception e) {
s_logger.error("Exception ", e);
} finally {
lock.unlock();
}
} catch (Exception e) {
s_logger.error("Exception ", e);
}
}
}
@Override
public List<? extends StoragePoolVO> searchForStoragePools(ListStoragePoolsCmd cmd) {
Long zoneId = _accountMgr.checkAccessAndSpecifyAuthority(UserContext.current().getCaller(), cmd.getZoneId());
Criteria c = new Criteria("id", Boolean.TRUE, cmd.getStartIndex(), cmd.getPageSizeVal());
c.addCriteria(Criteria.ID, cmd.getId());
c.addCriteria(Criteria.NAME, cmd.getStoragePoolName());
c.addCriteria(Criteria.CLUSTERID, cmd.getClusterId());
c.addCriteria(Criteria.ADDRESS, cmd.getIpAddress());
c.addCriteria(Criteria.KEYWORD, cmd.getKeyword());
c.addCriteria(Criteria.PATH, cmd.getPath());
c.addCriteria(Criteria.PODID, cmd.getPodId());
c.addCriteria(Criteria.DATACENTERID, zoneId);
return searchForStoragePools(c);
}
@Override
public List<? extends StoragePoolVO> searchForStoragePools(Criteria c) {
Filter searchFilter = new Filter(StoragePoolVO.class, c.getOrderBy(), c.getAscending(), c.getOffset(), c.getLimit());
SearchCriteria<StoragePoolVO> sc = _poolDao.createSearchCriteria();
Object id = c.getCriteria(Criteria.ID);
Object name = c.getCriteria(Criteria.NAME);
Object host = c.getCriteria(Criteria.HOST);
Object path = c.getCriteria(Criteria.PATH);
Object zone = c.getCriteria(Criteria.DATACENTERID);
Object pod = c.getCriteria(Criteria.PODID);
Object cluster = c.getCriteria(Criteria.CLUSTERID);
Object address = c.getCriteria(Criteria.ADDRESS);
Object keyword = c.getCriteria(Criteria.KEYWORD);
if (keyword != null) {
SearchCriteria<StoragePoolVO> ssc = _poolDao.createSearchCriteria();
ssc.addOr("name", SearchCriteria.Op.LIKE, "%" + keyword + "%");
ssc.addOr("poolType", SearchCriteria.Op.LIKE, "%" + keyword + "%");
sc.addAnd("name", SearchCriteria.Op.SC, ssc);
}
if (id != null) {
sc.addAnd("id", SearchCriteria.Op.EQ, id);
}
if (name != null) {
sc.addAnd("name", SearchCriteria.Op.LIKE, "%" + name + "%");
}
if (host != null) {
sc.addAnd("host", SearchCriteria.Op.EQ, host);
}
if (path != null) {
sc.addAnd("path", SearchCriteria.Op.EQ, path);
}
if (zone != null) {
sc.addAnd("dataCenterId", SearchCriteria.Op.EQ, zone);
}
if (pod != null) {
sc.addAnd("podId", SearchCriteria.Op.EQ, pod);
}
if (address != null) {
sc.addAnd("hostAddress", SearchCriteria.Op.EQ, address);
}
if (cluster != null) {
sc.addAnd("clusterId", SearchCriteria.Op.EQ, cluster);
}
return _poolDao.search(sc, searchFilter);
}
@Override
public List<AsyncJobVO> searchForAsyncJobs(ListAsyncJobsCmd cmd) {
Account caller = UserContext.current().getCaller();
List<Long> permittedAccounts = new ArrayList<Long>();
Ternary<Long, Boolean, ListProjectResourcesCriteria> domainIdRecursiveListProject = new Ternary<Long, Boolean, ListProjectResourcesCriteria>(cmd.getDomainId(), cmd.isRecursive(), null);
_accountMgr.buildACLSearchParameters(caller, null, cmd.getAccountName(), null, permittedAccounts, domainIdRecursiveListProject, cmd.listAll(), false);
Long domainId = domainIdRecursiveListProject.first();
Boolean isRecursive = domainIdRecursiveListProject.second();
ListProjectResourcesCriteria listProjectResourcesCriteria = domainIdRecursiveListProject.third();
Filter searchFilter = new Filter(AsyncJobVO.class, "id", true, cmd.getStartIndex(), cmd.getPageSizeVal());
SearchBuilder<AsyncJobVO> sb = _jobDao.createSearchBuilder();
sb.and("accountIdIN", sb.entity().getAccountId(), SearchCriteria.Op.IN);
SearchBuilder<AccountVO> accountSearch = null;
boolean accountJoinIsDone = false;
if (permittedAccounts.isEmpty() && domainId != null) {
accountSearch = _accountDao.createSearchBuilder();
// if accountId isn't specified, we can do a domain match for the admin case if isRecursive is true
SearchBuilder<DomainVO> domainSearch = _domainDao.createSearchBuilder();
domainSearch.and("domainId", domainSearch.entity().getId(), SearchCriteria.Op.EQ);
domainSearch.and("path", domainSearch.entity().getPath(), SearchCriteria.Op.LIKE);
sb.join("accountSearch", accountSearch, sb.entity().getAccountId(), accountSearch.entity().getId(), JoinBuilder.JoinType.INNER);
accountJoinIsDone = true;
accountSearch.join("domainSearch", domainSearch, accountSearch.entity().getDomainId(), domainSearch.entity().getId(), JoinBuilder.JoinType.INNER);
}
if (listProjectResourcesCriteria != null) {
if (accountSearch == null) {
accountSearch = _accountDao.createSearchBuilder();
}
if (listProjectResourcesCriteria == Project.ListProjectResourcesCriteria.ListProjectResourcesOnly) {
accountSearch.and("type", accountSearch.entity().getType(), SearchCriteria.Op.EQ);
} else if (listProjectResourcesCriteria == Project.ListProjectResourcesCriteria.SkipProjectResources) {
accountSearch.and("type", accountSearch.entity().getType(), SearchCriteria.Op.NEQ);
}
if (!accountJoinIsDone) {
sb.join("accountSearch", accountSearch, sb.entity().getAccountId(), accountSearch.entity().getId(), JoinBuilder.JoinType.INNER);
}
}
Object keyword = cmd.getKeyword();
Object startDate = cmd.getStartDate();
SearchCriteria<AsyncJobVO> sc = sb.create();
if (listProjectResourcesCriteria != null) {
sc.setJoinParameters("accountSearch", "type", Account.ACCOUNT_TYPE_PROJECT);
}
if (!permittedAccounts.isEmpty()) {
sc.setParameters("accountIdIN", permittedAccounts.toArray());
} else if (domainId != null) {
DomainVO domain = _domainDao.findById(domainId);
if (isRecursive) {
sc.setJoinParameters("domainSearch", "path", domain.getPath() + "%");
} else {
sc.setJoinParameters("domainSearch", "domainId", domainId);
}
}
if (keyword != null) {
sc.addAnd("cmd", SearchCriteria.Op.LIKE, "%" + keyword + "%");
}
if (startDate != null) {
sc.addAnd("created", SearchCriteria.Op.GTEQ, startDate);
}
return _jobDao.search(sc, searchFilter);
}
@ActionEvent(eventType = EventTypes.EVENT_SSVM_START, eventDescription = "starting secondary storage Vm", async = true)
public SecondaryStorageVmVO startSecondaryStorageVm(long instanceId) {
return _secStorageVmMgr.startSecStorageVm(instanceId);
}
@ActionEvent(eventType = EventTypes.EVENT_SSVM_STOP, eventDescription = "stopping secondary storage Vm", async = true)
private SecondaryStorageVmVO stopSecondaryStorageVm(VMInstanceVO systemVm, boolean isForced) throws ResourceUnavailableException, OperationTimedoutException, ConcurrentOperationException {
User caller = _userDao.findById(UserContext.current().getCallerUserId());
if (_itMgr.advanceStop(systemVm, isForced, caller, UserContext.current().getCaller())) {
return _secStorageVmDao.findById(systemVm.getId());
}
return null;
}
@ActionEvent(eventType = EventTypes.EVENT_SSVM_REBOOT, eventDescription = "rebooting secondary storage Vm", async = true)
public SecondaryStorageVmVO rebootSecondaryStorageVm(long instanceId) {
_secStorageVmMgr.rebootSecStorageVm(instanceId);
return _secStorageVmDao.findById(instanceId);
}
@ActionEvent(eventType = EventTypes.EVENT_SSVM_DESTROY, eventDescription = "destroying secondary storage Vm", async = true)
public SecondaryStorageVmVO destroySecondaryStorageVm(long instanceId) {
SecondaryStorageVmVO secStorageVm = _secStorageVmDao.findById(instanceId);
if (_secStorageVmMgr.destroySecStorageVm(instanceId)) {
return secStorageVm;
}
return null;
}
@Override
public List<? extends VMInstanceVO> searchForSystemVm(ListSystemVMsCmd cmd) {
String type = cmd.getSystemVmType();
Long zoneId = _accountMgr.checkAccessAndSpecifyAuthority(UserContext.current().getCaller(), cmd.getZoneId());
Long id = cmd.getId();
String name = cmd.getSystemVmName();
String state = cmd.getState();
String keyword = cmd.getKeyword();
Long podId = cmd.getPodId();
Long hostId = cmd.getHostId();
Filter searchFilter = new Filter(VMInstanceVO.class, "id", true, cmd.getStartIndex(), cmd.getPageSizeVal());
SearchBuilder<VMInstanceVO> sb = _vmInstanceDao.createSearchBuilder();
sb.and("id", sb.entity().getId(), SearchCriteria.Op.EQ);
sb.and("hostName", sb.entity().getHostName(), SearchCriteria.Op.LIKE);
sb.and("state", sb.entity().getState(), SearchCriteria.Op.EQ);
sb.and("dataCenterId", sb.entity().getDataCenterIdToDeployIn(), SearchCriteria.Op.EQ);
sb.and("podId", sb.entity().getPodIdToDeployIn(), SearchCriteria.Op.EQ);
sb.and("hostId", sb.entity().getHostId(), SearchCriteria.Op.EQ);
sb.and("type", sb.entity().getType(), SearchCriteria.Op.EQ);
sb.and("nulltype", sb.entity().getType(), SearchCriteria.Op.IN);
SearchCriteria<VMInstanceVO> sc = sb.create();
if (keyword != null) {
SearchCriteria<VMInstanceVO> ssc = _vmInstanceDao.createSearchCriteria();
ssc.addOr("hostName", SearchCriteria.Op.LIKE, "%" + keyword + "%");
ssc.addOr("state", SearchCriteria.Op.LIKE, "%" + keyword + "%");
sc.addAnd("hostName", SearchCriteria.Op.SC, ssc);
}
if (id != null) {
sc.setParameters("id", id);
}
if (name != null) {
sc.setParameters("hostName", name);
}
if (state != null) {
sc.setParameters("state", state);
}
if (zoneId != null) {
sc.setParameters("dataCenterId", zoneId);
}
if (podId != null) {
sc.setParameters("podId", podId);
}
if (hostId != null) {
sc.setParameters("hostId", hostId);
}
if (type != null) {
sc.setParameters("type", type);
} else {
sc.setParameters("nulltype", VirtualMachine.Type.SecondaryStorageVm, VirtualMachine.Type.ConsoleProxy);
}
return _vmInstanceDao.search(sc, searchFilter);
}
@Override
public VirtualMachine.Type findSystemVMTypeById(long instanceId) {
VMInstanceVO systemVm = _vmInstanceDao.findByIdTypes(instanceId, VirtualMachine.Type.ConsoleProxy, VirtualMachine.Type.SecondaryStorageVm);
if (systemVm == null) {
InvalidParameterValueException ex = new InvalidParameterValueException("Unable to find a system vm of specified instanceId");
// Get the VMInstanceVO object's table name.
String tablename = AnnotationHelper.getTableName(systemVm);
if (tablename != null) {
ex.addProxyObject(tablename, instanceId, "instanceId");
} else {
s_logger.info("\nCould not retrieve table name (annotation) from VMInstanceVO proxy cglib object\n");
}
throw ex;
}
return systemVm.getType();
}
@Override
public VirtualMachine startSystemVM(long vmId) {
VMInstanceVO systemVm = _vmInstanceDao.findByIdTypes(vmId, VirtualMachine.Type.ConsoleProxy, VirtualMachine.Type.SecondaryStorageVm);
if (systemVm == null) {
InvalidParameterValueException ex = new InvalidParameterValueException("unable to find a system vm with specified vmId");
// Get the VMInstanceVO object's table name.
String tablename = AnnotationHelper.getTableName(systemVm);
if (tablename != null) {
ex.addProxyObject(tablename, vmId, "vmId");
} else {
s_logger.info("\nCould not retrieve table name (annotation) from VMInstanceVO proxy cglib object\n");
}
throw ex;
}
if (systemVm.getType() == VirtualMachine.Type.ConsoleProxy) {
return startConsoleProxy(vmId);
} else if (systemVm.getType() == VirtualMachine.Type.SecondaryStorageVm) {
return startSecondaryStorageVm(vmId);
} else {
InvalidParameterValueException ex = new InvalidParameterValueException("Unable to find a system vm with specified vmId");
// Get the VMInstanceVO object's table name.
String tablename = AnnotationHelper.getTableName(systemVm);
if (tablename != null) {
ex.addProxyObject(tablename, vmId, "vmId");
} else {
s_logger.info("\nCould not retrieve table name (annotation) from VMInstanceVO proxy cglib object\n");
}
throw ex;
}
}
@Override
public VMInstanceVO stopSystemVM(StopSystemVmCmd cmd) throws ResourceUnavailableException, ConcurrentOperationException {
Long id = cmd.getId();
// verify parameters
VMInstanceVO systemVm = _vmInstanceDao.findByIdTypes(id, VirtualMachine.Type.ConsoleProxy, VirtualMachine.Type.SecondaryStorageVm);
if (systemVm == null) {
InvalidParameterValueException ex = new InvalidParameterValueException("unable to find a system vm with specified vmId");
// Get the VMInstanceVO object's table name.
String tablename = AnnotationHelper.getTableName(systemVm);
if (tablename != null) {
ex.addProxyObject(tablename, id, "vmId");
} else {
s_logger.info("\nCould not retrieve table name (annotation) from VMInstanceVO proxy object\n");
}
throw ex;
}
try {
if (systemVm.getType() == VirtualMachine.Type.ConsoleProxy) {
return stopConsoleProxy(systemVm, cmd.isForced());
} else if (systemVm.getType() == VirtualMachine.Type.SecondaryStorageVm) {
return stopSecondaryStorageVm(systemVm, cmd.isForced());
}
return null;
} catch (OperationTimedoutException e) {
throw new CloudRuntimeException("Unable to stop " + systemVm, e);
}
}
@Override
public VMInstanceVO rebootSystemVM(RebootSystemVmCmd cmd) {
VMInstanceVO systemVm = _vmInstanceDao.findByIdTypes(cmd.getId(), VirtualMachine.Type.ConsoleProxy, VirtualMachine.Type.SecondaryStorageVm);
if (systemVm == null) {
InvalidParameterValueException ex = new InvalidParameterValueException("unable to find a system vm with specified vmId");
// Get the VMInstanceVO object's table name.
String tablename = AnnotationHelper.getTableName(systemVm);
if (tablename != null) {
ex.addProxyObject(tablename, cmd.getId(), "vmId");
} else {
s_logger.info("\nCould not retrieve table name (annotation) from VMInstanceVO proxy object\n");
}
throw ex;
}
if (systemVm.getType().equals(VirtualMachine.Type.ConsoleProxy)) {
return rebootConsoleProxy(cmd.getId());
} else {
return rebootSecondaryStorageVm(cmd.getId());
}
}
@Override
public VMInstanceVO destroySystemVM(DestroySystemVmCmd cmd) {
VMInstanceVO systemVm = _vmInstanceDao.findByIdTypes(cmd.getId(), VirtualMachine.Type.ConsoleProxy, VirtualMachine.Type.SecondaryStorageVm);
if (systemVm == null) {
InvalidParameterValueException ex = new InvalidParameterValueException("unable to find a system vm with specified vmId");
// Get the VMInstanceVO object's table name.
String tablename = AnnotationHelper.getTableName(systemVm);
if (tablename != null) {
ex.addProxyObject(tablename, cmd.getId(), "vmId");
} else {
s_logger.info("\nCould not retrieve table name (annotation) from VMInstanceVO proxy object\n");
}
throw ex;
}
if (systemVm.getType().equals(VirtualMachine.Type.ConsoleProxy)) {
return destroyConsoleProxy(cmd.getId());
} else {
return destroySecondaryStorageVm(cmd.getId());
}
}
private String signRequest(String request, String key) {
try {
s_logger.info("Request: " + request);
s_logger.info("Key: " + key);
if (key != null && request != null) {
Mac mac = Mac.getInstance("HmacSHA1");
SecretKeySpec keySpec = new SecretKeySpec(key.getBytes(), "HmacSHA1");
mac.init(keySpec);
mac.update(request.getBytes());
byte[] encryptedBytes = mac.doFinal();
return new String((Base64.encodeBase64(encryptedBytes)));
}
} catch (Exception ex) {
s_logger.error("unable to sign request", ex);
}
return null;
}
@Override
public ArrayList<String> getCloudIdentifierResponse(long userId) {
Account caller = UserContext.current().getCaller();
// verify that user exists
User user = _accountMgr.getUserIncludingRemoved(userId);
if ((user == null) || (user.getRemoved() != null)) {
InvalidParameterValueException ex = new InvalidParameterValueException("Unable to find active user of specified id");
// Get the VO object's table name.
String tablename = AnnotationHelper.getTableName(user);
if (tablename != null) {
ex.addProxyObject(tablename, userId, "userId");
} else {
s_logger.info("\nCould not retrieve table name (annotation) from " + tablename + " VO proxy object\n");
}
throw ex;
}
// check permissions
_accountMgr.checkAccess(caller, null, true, _accountMgr.getAccount(user.getAccountId()));
String cloudIdentifier = _configDao.getValue("cloud.identifier");
if (cloudIdentifier == null) {
cloudIdentifier = "";
}
String signature = "";
try {
// get the user obj to get his secret key
user = _accountMgr.getActiveUser(userId);
String secretKey = user.getSecretKey();
String input = cloudIdentifier;
signature = signRequest(input, secretKey);
} catch (Exception e) {
s_logger.warn("Exception whilst creating a signature:" + e);
}
ArrayList<String> cloudParams = new ArrayList<String>();
cloudParams.add(cloudIdentifier);
cloudParams.add(signature);
return cloudParams;
}
@Override
public Map<String, Object> listCapabilities(ListCapabilitiesCmd cmd) {
Map<String, Object> capabilities = new HashMap<String, Object>();
boolean securityGroupsEnabled = false;
boolean elasticLoadBalancerEnabled = false;
String supportELB = "false";
List<NetworkVO> networks = _networkDao.listSecurityGroupEnabledNetworks();
if (networks != null && !networks.isEmpty()) {
securityGroupsEnabled = true;
String elbEnabled = _configDao.getValue(Config.ElasticLoadBalancerEnabled.key());
elasticLoadBalancerEnabled = elbEnabled == null ? false : Boolean.parseBoolean(elbEnabled);
if (elasticLoadBalancerEnabled) {
String networkType = _configDao.getValue(Config.ElasticLoadBalancerNetwork.key());
if (networkType != null)
supportELB = networkType;
}
}
String userPublicTemplateEnabled = _configs.get(Config.AllowPublicUserTemplates.key());
capabilities.put("securityGroupsEnabled", securityGroupsEnabled);
capabilities.put("userPublicTemplateEnabled", (userPublicTemplateEnabled == null || userPublicTemplateEnabled.equals("false") ? false : true));
capabilities.put("cloudStackVersion", getVersion());
capabilities.put("supportELB", supportELB);
capabilities.put("projectInviteRequired", _projectMgr.projectInviteRequired());
capabilities.put("allowusercreateprojects", _projectMgr.allowUserToCreateProject());
return capabilities;
}
@Override
public GuestOSVO getGuestOs(Long guestOsId) {
return _guestOSDao.findById(guestOsId);
}
@Override
@ActionEvent(eventType = EventTypes.EVENT_VOLUME_EXTRACT, eventDescription = "extracting volume", async = true)
public Long extractVolume(ExtractVolumeCmd cmd) throws URISyntaxException {
Long volumeId = cmd.getId();
String url = cmd.getUrl();
Long zoneId = cmd.getZoneId();
AsyncJobVO job = null; // FIXME: cmd.getJob();
String mode = cmd.getMode();
Account account = UserContext.current().getCaller();
if (!_accountMgr.isRootAdmin(account.getType()) && ApiDBUtils.isExtractionDisabled()) {
throw new PermissionDeniedException("Extraction has been disabled by admin");
}
VolumeVO volume = _volumeDao.findById(volumeId);
if (volume == null) {
InvalidParameterValueException ex = new InvalidParameterValueException("Unable to find volume with specified volumeId");
// Get the VolumeVO object's table name.
String tablename = AnnotationHelper.getTableName(volume);
if (tablename != null) {
ex.addProxyObject(tablename, volumeId, "volumeId");
} else {
s_logger.info("\nCould not retrieve table name (annotation) from VolumeVO proxy object\n");
}
throw ex;
}
// perform permission check
_accountMgr.checkAccess(account, null, true, volume);
if (_dcDao.findById(zoneId) == null) {
throw new InvalidParameterValueException("Please specify a valid zone.");
}
if (volume.getPoolId() == null) {
throw new InvalidParameterValueException("The volume doesnt belong to a storage pool so cant extract it");
}
// Extract activity only for detached volumes or for volumes whose instance is stopped
if (volume.getInstanceId() != null && ApiDBUtils.findVMInstanceById(volume.getInstanceId()).getState() != State.Stopped) {
s_logger.debug("Invalid state of the volume with ID: " + volumeId + ". It should be either detached or the VM should be in stopped state.");
PermissionDeniedException ex = new PermissionDeniedException("Invalid state of the volume with specified ID. It should be either detached or the VM should be in stopped state.");
// Get the VO object's table name.
String tablename = AnnotationHelper.getTableName(volume);
if (tablename != null) {
ex.addProxyObject(tablename, volumeId, "volumeId");
} else {
s_logger.info("\nCould not retrieve table name (annotation) from VolumeVO proxy object\n");
}
throw ex;
}
if (volume.getVolumeType() != Volume.Type.DATADISK) { // Datadisk dont have any template dependence.
VMTemplateVO template = ApiDBUtils.findTemplateById(volume.getTemplateId());
if (template != null) { // For ISO based volumes template = null and we allow extraction of all ISO based
// volumes
boolean isExtractable = template.isExtractable() && template.getTemplateType() != Storage.TemplateType.SYSTEM;
if (!isExtractable && account != null && account.getType() != Account.ACCOUNT_TYPE_ADMIN) { // Global
// admins are always allowed to extract
PermissionDeniedException ex = new PermissionDeniedException("The volume with specified volumeId is not allowed to be extracted");
// Get the VO object's table name.
String tablename = AnnotationHelper.getTableName(volume);
if (tablename != null) {
ex.addProxyObject(tablename, volumeId, "volumeId");
} else {
s_logger.info("\nCould not retrieve table name (annotation) from VolumeVO proxy object\n");
}
throw ex;
}
}
}
Upload.Mode extractMode;
if (mode == null || (!mode.equals(Upload.Mode.FTP_UPLOAD.toString()) && !mode.equals(Upload.Mode.HTTP_DOWNLOAD.toString()))) {
throw new InvalidParameterValueException("Please specify a valid extract Mode ");
} else {
extractMode = mode.equals(Upload.Mode.FTP_UPLOAD.toString()) ? Upload.Mode.FTP_UPLOAD : Upload.Mode.HTTP_DOWNLOAD;
}
// If mode is upload perform extra checks on url and also see if there is an ongoing upload on the same.
if (extractMode == Upload.Mode.FTP_UPLOAD) {
URI uri = new URI(url);
if ((uri.getScheme() == null) || (!uri.getScheme().equalsIgnoreCase("ftp"))) {
throw new IllegalArgumentException("Unsupported scheme for url: " + url);
}
String host = uri.getHost();
try {
InetAddress hostAddr = InetAddress.getByName(host);
if (hostAddr.isAnyLocalAddress() || hostAddr.isLinkLocalAddress() || hostAddr.isLoopbackAddress() || hostAddr.isMulticastAddress()) {
throw new IllegalArgumentException("Illegal host specified in url");
}
if (hostAddr instanceof Inet6Address) {
throw new IllegalArgumentException("IPV6 addresses not supported (" + hostAddr.getHostAddress() + ")");
}
} catch (UnknownHostException uhe) {
throw new IllegalArgumentException("Unable to resolve " + host);
}
if (_uploadMonitor.isTypeUploadInProgress(volumeId, Upload.Type.VOLUME)) {
throw new IllegalArgumentException(volume.getName() + " upload is in progress. Please wait for some time to schedule another upload for the same");
}
}
long accountId = volume.getAccountId();
String secondaryStorageURL = _storageMgr.getSecondaryStorageURL(zoneId);
StoragePoolVO srcPool = _poolDao.findById(volume.getPoolId());
List<HostVO> storageServers = _resourceMgr.listAllHostsInOneZoneByType(Host.Type.SecondaryStorage, zoneId);
HostVO sserver = storageServers.get(0);
List<UploadVO> extractURLList = _uploadDao.listByTypeUploadStatus(volumeId, Upload.Type.VOLUME, UploadVO.Status.DOWNLOAD_URL_CREATED);
if (extractMode == Upload.Mode.HTTP_DOWNLOAD && extractURLList.size() > 0) {
return extractURLList.get(0).getId(); // If download url already exists then return
} else {
UploadVO uploadJob = _uploadMonitor.createNewUploadEntry(sserver.getId(), volumeId, UploadVO.Status.COPY_IN_PROGRESS, Upload.Type.VOLUME, url, extractMode);
s_logger.debug("Extract Mode - " + uploadJob.getMode());
uploadJob = _uploadDao.createForUpdate(uploadJob.getId());
// Update the async Job
ExtractResponse resultObj = new ExtractResponse(volumeId, volume.getName(), accountId, UploadVO.Status.COPY_IN_PROGRESS.toString(), uploadJob.getId());
resultObj.setResponseName(cmd.getCommandName());
AsyncJobExecutor asyncExecutor = BaseAsyncJobExecutor.getCurrentExecutor();
if (asyncExecutor != null) {
job = asyncExecutor.getJob();
_asyncMgr.updateAsyncJobAttachment(job.getId(), Upload.Type.VOLUME.toString(), volumeId);
_asyncMgr.updateAsyncJobStatus(job.getId(), AsyncJobResult.STATUS_IN_PROGRESS, resultObj);
}
String value = _configs.get(Config.CopyVolumeWait.toString());
int copyvolumewait = NumbersUtil.parseInt(value, Integer.parseInt(Config.CopyVolumeWait.getDefaultValue()));
// Copy the volume from the source storage pool to secondary storage
CopyVolumeCommand cvCmd = new CopyVolumeCommand(volume.getId(), volume.getPath(), srcPool, secondaryStorageURL, true, copyvolumewait);
CopyVolumeAnswer cvAnswer = null;
try {
cvAnswer = (CopyVolumeAnswer) _storageMgr.sendToPool(srcPool, cvCmd);
} catch (StorageUnavailableException e) {
s_logger.debug("Storage unavailable");
}
// Check if you got a valid answer.
if (cvAnswer == null || !cvAnswer.getResult()) {
String errorString = "Failed to copy the volume from the source primary storage pool to secondary storage.";
// Update the async job.
resultObj.setResultString(errorString);
resultObj.setUploadStatus(UploadVO.Status.COPY_ERROR.toString());
if (asyncExecutor != null) {
_asyncMgr.completeAsyncJob(job.getId(), AsyncJobResult.STATUS_FAILED, 0, resultObj);
}
// Update the DB that volume couldn't be copied
uploadJob.setUploadState(UploadVO.Status.COPY_ERROR);
uploadJob.setErrorString(errorString);
uploadJob.setLastUpdated(new Date());
_uploadDao.update(uploadJob.getId(), uploadJob);
throw new CloudRuntimeException(errorString);
}
String volumeLocalPath = "volumes/" + volume.getId() + "/" + cvAnswer.getVolumePath() + "." + getFormatForPool(srcPool);
// Update the DB that volume is copied and volumePath
uploadJob.setUploadState(UploadVO.Status.COPY_COMPLETE);
uploadJob.setLastUpdated(new Date());
uploadJob.setInstallPath(volumeLocalPath);
_uploadDao.update(uploadJob.getId(), uploadJob);
if (extractMode == Mode.FTP_UPLOAD) { // Now that the volume is copied perform the actual uploading
_uploadMonitor.extractVolume(uploadJob, sserver, volume, url, zoneId, volumeLocalPath, cmd.getStartEventId(), job.getId(), _asyncMgr);
return uploadJob.getId();
} else { // Volume is copied now make it visible under apache and create a URL.
_uploadMonitor.createVolumeDownloadURL(volumeId, volumeLocalPath, Upload.Type.VOLUME, zoneId, uploadJob.getId());
return uploadJob.getId();
}
}
}
private String getFormatForPool(StoragePoolVO pool) {
ClusterVO cluster = ApiDBUtils.findClusterById(pool.getClusterId());
if (cluster.getHypervisorType() == HypervisorType.XenServer) {
return "vhd";
} else if (cluster.getHypervisorType() == HypervisorType.KVM) {
return "qcow2";
} else if (cluster.getHypervisorType() == HypervisorType.VMware) {
return "ova";
} else if (cluster.getHypervisorType() == HypervisorType.Ovm) {
return "raw";
} else {
return null;
}
}
@Override
public InstanceGroupVO updateVmGroup(UpdateVMGroupCmd cmd) {
Account caller = UserContext.current().getCaller();
Long groupId = cmd.getId();
String groupName = cmd.getGroupName();
// Verify input parameters
InstanceGroupVO group = _vmGroupDao.findById(groupId.longValue());
if (group == null) {
InvalidParameterValueException ex = new InvalidParameterValueException("unable to find a vm group with specified groupId");
// Get the VolumeVO object's table name.
String tablename = AnnotationHelper.getTableName(group);
if (tablename != null) {
ex.addProxyObject(tablename, groupId, "groupId");
} else {
s_logger.info("\nCould not retrieve table name (annotation) from InstanceGroupVO proxy object\n");
}
throw ex;
}
_accountMgr.checkAccess(caller, null, true, group);
// Check if name is already in use by this account (exclude this group)
boolean isNameInUse = _vmGroupDao.isNameInUse(group.getAccountId(), groupName);
if (isNameInUse && !group.getName().equals(groupName)) {
throw new InvalidParameterValueException("Unable to update vm group, a group with name " + groupName + " already exists for account");
}
if (groupName != null) {
_vmGroupDao.updateVmGroup(groupId, groupName);
}
return _vmGroupDao.findById(groupId);
}
@Override
public List<InstanceGroupVO> searchForVmGroups(ListVMGroupsCmd cmd) {
Long id = cmd.getId();
String name = cmd.getGroupName();
String keyword = cmd.getKeyword();
Account caller = UserContext.current().getCaller();
List<Long> permittedAccounts = new ArrayList<Long>();
Ternary<Long, Boolean, ListProjectResourcesCriteria> domainIdRecursiveListProject = new Ternary<Long, Boolean, ListProjectResourcesCriteria>(cmd.getDomainId(), cmd.isRecursive(), null);
_accountMgr.buildACLSearchParameters(caller, id, cmd.getAccountName(), cmd.getProjectId(), permittedAccounts, domainIdRecursiveListProject, cmd.listAll(), false);
Long domainId = domainIdRecursiveListProject.first();
Boolean isRecursive = domainIdRecursiveListProject.second();
ListProjectResourcesCriteria listProjectResourcesCriteria = domainIdRecursiveListProject.third();
Filter searchFilter = new Filter(InstanceGroupVO.class, "id", true, cmd.getStartIndex(), cmd.getPageSizeVal());
SearchBuilder<InstanceGroupVO> sb = _vmGroupDao.createSearchBuilder();
sb.and("accountIdIN", sb.entity().getAccountId(), SearchCriteria.Op.IN);
sb.and("domainId", sb.entity().getDomainId(), SearchCriteria.Op.EQ);
if (((permittedAccounts.isEmpty()) && (domainId != null) && isRecursive)) {
// if accountId isn't specified, we can do a domain match for the admin case if isRecursive is true
SearchBuilder<DomainVO> domainSearch = _domainDao.createSearchBuilder();
domainSearch.and("path", domainSearch.entity().getPath(), SearchCriteria.Op.LIKE);
sb.join("domainSearch", domainSearch, sb.entity().getDomainId(), domainSearch.entity().getId(), JoinBuilder.JoinType.INNER);
}
if (listProjectResourcesCriteria != null) {
if (listProjectResourcesCriteria == Project.ListProjectResourcesCriteria.ListProjectResourcesOnly) {
sb.and("accountType", sb.entity().getAccountType(), SearchCriteria.Op.EQ);
} else if (listProjectResourcesCriteria == Project.ListProjectResourcesCriteria.SkipProjectResources) {
sb.and("accountType", sb.entity().getAccountType(), SearchCriteria.Op.NEQ);
}
}
sb.and("id", sb.entity().getId(), SearchCriteria.Op.EQ);
sb.and("name", sb.entity().getName(), SearchCriteria.Op.LIKE);
SearchCriteria<InstanceGroupVO> sc = sb.create();
if (listProjectResourcesCriteria != null) {
sc.setParameters("accountType", Account.ACCOUNT_TYPE_PROJECT);
}
if (!permittedAccounts.isEmpty()) {
sc.setParameters("accountIdIN", permittedAccounts.toArray());
} else if (domainId != null) {
DomainVO domain = _domainDao.findById(domainId);
if (isRecursive) {
sc.setJoinParameters("domainSearch", "path", domain.getPath() + "%");
} else {
sc.setParameters("domainId", domainId);
}
}
if (keyword != null) {
SearchCriteria<InstanceGroupVO> ssc = _vmGroupDao.createSearchCriteria();
ssc.addOr("name", SearchCriteria.Op.LIKE, "%" + keyword + "%");
}
if (id != null) {
sc.setParameters("id", id);
}
if (name != null) {
sc.setParameters("name", "%" + name + "%");
}
return _vmGroupDao.search(sc, searchFilter);
}
@Override
public String getVersion() {
final Class<?> c = ManagementServer.class;
String fullVersion = c.getPackage().getImplementationVersion();
if (fullVersion.length() > 0) {
return fullVersion;
}
return "unknown";
}
@Override
public Long saveStartedEvent(Long userId, Long accountId, String type, String description, long startEventId) {
return EventUtils.saveStartedEvent(userId, accountId, type, description, startEventId);
}
@Override
public Long saveCompletedEvent(Long userId, Long accountId, String level, String type, String description, long startEventId) {
return EventUtils.saveEvent(userId, accountId, level, type, description, startEventId);
}
@Override
@DB
public String uploadCertificate(UploadCustomCertificateCmd cmd) {
if (cmd.getPrivateKey() != null && cmd.getAlias() != null) {
throw new InvalidParameterValueException("Can't change the alias for private key certification");
}
if (cmd.getPrivateKey() == null) {
if (cmd.getAlias() == null) {
throw new InvalidParameterValueException("alias can't be empty, if it's a certification chain");
}
if (cmd.getCertIndex() == null) {
throw new InvalidParameterValueException("index can't be empty, if it's a certifciation chain");
}
}
if (cmd.getPrivateKey() != null && !_ksMgr.validateCertificate(cmd.getCertificate(), cmd.getPrivateKey(), cmd.getDomainSuffix())) {
throw new InvalidParameterValueException("Failed to pass certificate validation check");
}
if (cmd.getPrivateKey() != null) {
_ksMgr.saveCertificate(ConsoleProxyManager.CERTIFICATE_NAME, cmd.getCertificate(), cmd.getPrivateKey(), cmd.getDomainSuffix());
} else {
_ksMgr.saveCertificate(cmd.getAlias(), cmd.getCertificate(), cmd.getCertIndex(), cmd.getDomainSuffix());
}
_consoleProxyMgr.setManagementState(ConsoleProxyManagementState.ResetSuspending);
return "Certificate has been updated, we will stop all running console proxy VMs to propagate the new certificate, please give a few minutes for console access service to be up again";
}
@Override
public List<String> getHypervisors(Long zoneId) {
List<String> result = new ArrayList<String>();
String hypers = _configDao.getValue(Config.HypervisorList.key());
String[] hypervisors = hypers.split(",");
if (zoneId != null) {
if (zoneId.longValue() == -1L) {
List<DataCenterVO> zones = _dcDao.listAll();
for (String hypervisor : hypervisors) {
int hyperCount = 0;
for (DataCenterVO zone : zones) {
List<ClusterVO> clusters = _clusterDao.listByDcHyType(zone.getId(), hypervisor);
if (!clusters.isEmpty()) {
hyperCount++;
}
}
if (hyperCount == zones.size()) {
result.add(hypervisor);
}
}
} else {
List<ClusterVO> clustersForZone = _clusterDao.listByZoneId(zoneId);
for (ClusterVO cluster : clustersForZone) {
result.add(cluster.getHypervisorType().toString());
}
}
} else {
return Arrays.asList(hypervisors);
}
return result;
}
@Override
public String getHashKey() {
// although we may have race conditioning here, database transaction serialization should
// give us the same key
if (_hashKey == null) {
_hashKey = _configDao.getValueAndInitIfNotExist(Config.HashKey.key(), Config.HashKey.getCategory(), UUID.randomUUID().toString());
}
return _hashKey;
}
@Override
public SSHKeyPair createSSHKeyPair(CreateSSHKeyPairCmd cmd) {
Account caller = UserContext.current().getCaller();
String accountName = cmd.getAccountName();
Long domainId = cmd.getDomainId();
Long projectId = cmd.getProjectId();
Account owner = _accountMgr.finalizeOwner(caller, accountName, domainId, projectId);
SSHKeyPairVO s = _sshKeyPairDao.findByName(owner.getAccountId(), owner.getDomainId(), cmd.getName());
if (s != null) {
throw new InvalidParameterValueException("A key pair with name '" + cmd.getName() + "' already exists.");
}
SSHKeysHelper keys = new SSHKeysHelper();
String name = cmd.getName();
String publicKey = keys.getPublicKey();
String fingerprint = keys.getPublicKeyFingerPrint();
String privateKey = keys.getPrivateKey();
return createAndSaveSSHKeyPair(name, fingerprint, publicKey, privateKey, owner);
}
@Override
public boolean deleteSSHKeyPair(DeleteSSHKeyPairCmd cmd) {
Account caller = UserContext.current().getCaller();
String accountName = cmd.getAccountName();
Long domainId = cmd.getDomainId();
Long projectId = cmd.getProjectId();
Account owner = _accountMgr.finalizeOwner(caller, accountName, domainId, projectId);
SSHKeyPairVO s = _sshKeyPairDao.findByName(owner.getAccountId(), owner.getDomainId(), cmd.getName());
if (s == null) {
InvalidParameterValueException ex = new InvalidParameterValueException("A key pair with name '" + cmd.getName() + "' does not exist for account " + owner.getAccountName() + " in specified domain id");
// Get the VO object's table name.
String tablename = AnnotationHelper.getTableName(owner);
if (tablename != null) {
ex.addProxyObject(tablename, owner.getDomainId(), "domainId");
} else {
s_logger.info("\nCould not retrieve table name (annotation) from " + tablename + " VO proxy object\n");
}
throw ex;
}
return _sshKeyPairDao.deleteByName(caller.getAccountId(), caller.getDomainId(), cmd.getName());
}
@Override
public List<? extends SSHKeyPair> listSSHKeyPairs(ListSSHKeyPairsCmd cmd) {
String name = cmd.getName();
String fingerPrint = cmd.getFingerprint();
Account caller = UserContext.current().getCaller();
List<Long> permittedAccounts = new ArrayList<Long>();
Ternary<Long, Boolean, ListProjectResourcesCriteria> domainIdRecursiveListProject = new Ternary<Long, Boolean, ListProjectResourcesCriteria>(cmd.getDomainId(), cmd.isRecursive(), null);
_accountMgr.buildACLSearchParameters(caller, null, cmd.getAccountName(), cmd.getProjectId(), permittedAccounts, domainIdRecursiveListProject, cmd.listAll(), false);
Long domainId = domainIdRecursiveListProject.first();
Boolean isRecursive = domainIdRecursiveListProject.second();
ListProjectResourcesCriteria listProjectResourcesCriteria = domainIdRecursiveListProject.third();
SearchBuilder<SSHKeyPairVO> sb = _sshKeyPairDao.createSearchBuilder();
_accountMgr.buildACLSearchBuilder(sb, domainId, isRecursive, permittedAccounts, listProjectResourcesCriteria);
Filter searchFilter = new Filter(SSHKeyPairVO.class, "id", false, cmd.getStartIndex(), cmd.getPageSizeVal());
SearchCriteria<SSHKeyPairVO> sc = sb.create();
_accountMgr.buildACLSearchCriteria(sc, domainId, isRecursive, permittedAccounts, listProjectResourcesCriteria);
if (name != null) {
sc.addAnd("name", SearchCriteria.Op.EQ, name);
}
if (fingerPrint != null) {
sc.addAnd("fingerprint", SearchCriteria.Op.EQ, fingerPrint);
}
return _sshKeyPairDao.search(sc, searchFilter);
}
@Override
public SSHKeyPair registerSSHKeyPair(RegisterSSHKeyPairCmd cmd) {
Account caller = UserContext.current().getCaller();
Account owner = _accountMgr.finalizeOwner(caller, cmd.getAccountName(), cmd.getDomainId(), cmd.getProjectId());
SSHKeyPairVO s = _sshKeyPairDao.findByName(owner.getAccountId(), owner.getDomainId(), cmd.getName());
if (s != null) {
throw new InvalidParameterValueException("A key pair with name '" + cmd.getName() + "' already exists.");
}
String name = cmd.getName();
String publicKey = SSHKeysHelper.getPublicKeyFromKeyMaterial(cmd.getPublicKey());
if (publicKey == null) {
throw new InvalidParameterValueException("Public key is invalid");
}
String fingerprint = SSHKeysHelper.getPublicKeyFingerprint(publicKey);
return createAndSaveSSHKeyPair(name, fingerprint, publicKey, null, owner);
}
private SSHKeyPair createAndSaveSSHKeyPair(String name, String fingerprint, String publicKey, String privateKey, Account owner) {
SSHKeyPairVO newPair = new SSHKeyPairVO();
newPair.setAccountId(owner.getAccountId());
newPair.setDomainId(owner.getDomainId());
newPair.setName(name);
newPair.setFingerprint(fingerprint);
newPair.setPublicKey(publicKey);
newPair.setPrivateKey(privateKey); // transient; not saved.
_sshKeyPairDao.persist(newPair);
return newPair;
}
@Override
public String getVMPassword(GetVMPasswordCmd cmd) {
Account caller = UserContext.current().getCaller();
UserVmVO vm = _userVmDao.findById(cmd.getId());
if (vm == null) {
InvalidParameterValueException ex = new InvalidParameterValueException("No VM with specified id found.");
// Get the VO object's table name.
String tablename = AnnotationHelper.getTableName(vm);
if (tablename != null) {
ex.addProxyObject(tablename, cmd.getId(), "vmId");
} else {
s_logger.info("\nCould not retrieve table name (annotation) from " + tablename + " VO proxy object\n");
}
throw ex;
}
// make permission check
_accountMgr.checkAccess(caller, null, true, vm);
_userVmDao.loadDetails(vm);
String password = vm.getDetail("Encrypted.Password");
if (password == null || password.equals("")) {
InvalidParameterValueException ex = new InvalidParameterValueException("No password for VM with specified id found.");
// Get the VO object's table name.
String tablename = AnnotationHelper.getTableName(vm);
if (tablename != null) {
ex.addProxyObject(tablename, cmd.getId(), "vmId");
} else {
s_logger.info("\nCould not retrieve table name (annotation) from " + tablename + " VO proxy object\n");
}
throw ex;
}
return password;
}
@Override
@DB
public boolean updateHostPassword(UpdateHostPasswordCmd cmd) {
if (cmd.getClusterId() == null && cmd.getHostId() == null) {
throw new InvalidParameterValueException("You should provide one of cluster id or a host id.");
} else if (cmd.getClusterId() == null) {
HostVO host = _hostDao.findById(cmd.getHostId());
if (host != null && host.getHypervisorType() == HypervisorType.XenServer) {
throw new InvalidParameterValueException("You should provide cluster id for Xenserver cluster.");
} else {
throw new InvalidParameterValueException("This operation is not supported for this hypervisor type");
}
} else {
ClusterVO cluster = ApiDBUtils.findClusterById(cmd.getClusterId());
if (cluster == null || cluster.getHypervisorType() != HypervisorType.XenServer) {
throw new InvalidParameterValueException("This operation is not supported for this hypervisor type");
}
// get all the hosts in this cluster
List<HostVO> hosts = _resourceMgr.listAllHostsInCluster(cmd.getClusterId());
Transaction txn = Transaction.currentTxn();
try {
txn.start();
for (HostVO h : hosts) {
if (s_logger.isDebugEnabled()) {
s_logger.debug("Changing password for host name = " + h.getName());
}
// update password for this host
DetailVO nv = _detailsDao.findDetail(h.getId(), ApiConstants.USERNAME);
if (nv.getValue().equals(cmd.getUsername())) {
DetailVO nvp = _detailsDao.findDetail(h.getId(), ApiConstants.PASSWORD);
nvp.setValue(DBEncryptionUtil.encrypt(cmd.getPassword()));
_detailsDao.persist(nvp);
} else {
// if one host in the cluster has diff username then rollback to maintain consistency
txn.rollback();
throw new InvalidParameterValueException("The username is not same for all hosts, please modify passwords for individual hosts.");
}
}
txn.commit();
// if hypervisor is xenserver then we update it in CitrixResourceBase
} catch (Exception e) {
txn.rollback();
throw new CloudRuntimeException("Failed to update password " + e.getMessage());
}
}
return true;
}
@Override
public String[] listEventTypes() {
Object eventObj = new EventTypes();
Class<EventTypes> c = EventTypes.class;
Field[] fields = c.getDeclaredFields();
String[] eventTypes = new String[fields.length];
try {
int i = 0;
for (Field field : fields) {
eventTypes[i++] = field.get(eventObj).toString();
}
return eventTypes;
} catch (IllegalArgumentException e) {
s_logger.error("Error while listing Event Types", e);
} catch (IllegalAccessException e) {
s_logger.error("Error while listing Event Types", e);
}
return null;
}
@Override
public List<HypervisorCapabilitiesVO> listHypervisorCapabilities(Long id, HypervisorType hypervisorType, String keyword, Long startIndex, Long pageSizeVal) {
Filter searchFilter = new Filter(HypervisorCapabilitiesVO.class, "id", true, startIndex, pageSizeVal);
SearchCriteria<HypervisorCapabilitiesVO> sc = _hypervisorCapabilitiesDao.createSearchCriteria();
if (id != null) {
sc.addAnd("id", SearchCriteria.Op.EQ, id);
}
if (hypervisorType != null) {
sc.addAnd("hypervisorType", SearchCriteria.Op.EQ, hypervisorType);
}
if (keyword != null) {
SearchCriteria<HypervisorCapabilitiesVO> ssc = _hypervisorCapabilitiesDao.createSearchCriteria();
ssc.addOr("hypervisorType", SearchCriteria.Op.LIKE, "%" + keyword + "%");
sc.addAnd("hypervisorType", SearchCriteria.Op.SC, ssc);
}
return _hypervisorCapabilitiesDao.search(sc, searchFilter);
}
@Override
public HypervisorCapabilities updateHypervisorCapabilities(Long id, Long maxGuestsLimit, Boolean securityGroupEnabled) {
HypervisorCapabilitiesVO hpvCapabilities = _hypervisorCapabilitiesDao.findById(id, true);
if (hpvCapabilities == null) {
InvalidParameterValueException ex = new InvalidParameterValueException("unable to find the hypervisor capabilities for specified id");
// Get the VO object's table name.
String tablename = AnnotationHelper.getTableName(hpvCapabilities);
if (tablename != null) {
ex.addProxyObject(tablename, id, "Id");
} else {
s_logger.info("\nCould not retrieve table name (annotation) from " + tablename + " VO proxy object\n");
}
throw ex;
}
boolean updateNeeded = (maxGuestsLimit != null || securityGroupEnabled != null);
if (!updateNeeded) {
return hpvCapabilities;
}
hpvCapabilities = _hypervisorCapabilitiesDao.createForUpdate(id);
if (maxGuestsLimit != null) {
hpvCapabilities.setMaxGuestsLimit(maxGuestsLimit);
}
if (securityGroupEnabled != null) {
hpvCapabilities.setSecurityGroupEnabled(securityGroupEnabled);
}
if (_hypervisorCapabilitiesDao.update(id, hpvCapabilities)) {
hpvCapabilities = _hypervisorCapabilitiesDao.findById(id);
UserContext.current().setEventDetails("Hypervisor Capabilities id=" + hpvCapabilities.getId());
return hpvCapabilities;
} else {
return null;
}
}
}
| server/src/com/cloud/server/ManagementServerImpl.java | /**
* Copyright (C) 2010 Cloud.com, Inc. All rights reserved.
*
* This software is licensed under the GNU General Public License v3 or later.
*
* It is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or any later version.
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
*/
package com.cloud.server;
import java.lang.reflect.Field;
import java.net.Inet6Address;
import java.net.InetAddress;
import java.net.URI;
import java.net.URISyntaxException;
import java.net.UnknownHostException;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Comparator;
import java.util.Date;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TimeZone;
import java.util.UUID;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import javax.crypto.Mac;
import javax.crypto.spec.SecretKeySpec;
import org.apache.commons.codec.binary.Base64;
import org.apache.log4j.Logger;
import com.cloud.acl.SecurityChecker.AccessType;
import com.cloud.agent.AgentManager;
import com.cloud.agent.api.GetVncPortAnswer;
import com.cloud.agent.api.GetVncPortCommand;
import com.cloud.agent.api.storage.CopyVolumeAnswer;
import com.cloud.agent.api.storage.CopyVolumeCommand;
import com.cloud.agent.manager.allocator.HostAllocator;
import com.cloud.alert.Alert;
import com.cloud.alert.AlertManager;
import com.cloud.alert.AlertVO;
import com.cloud.alert.dao.AlertDao;
import com.cloud.api.ApiConstants;
import com.cloud.api.ApiDBUtils;
import com.cloud.api.commands.CreateSSHKeyPairCmd;
import com.cloud.api.commands.DeleteSSHKeyPairCmd;
import com.cloud.api.commands.DestroySystemVmCmd;
import com.cloud.api.commands.ExtractVolumeCmd;
import com.cloud.api.commands.GetVMPasswordCmd;
import com.cloud.api.commands.ListAlertsCmd;
import com.cloud.api.commands.ListAsyncJobsCmd;
import com.cloud.api.commands.ListCapabilitiesCmd;
import com.cloud.api.commands.ListCapacityCmd;
import com.cloud.api.commands.ListCfgsByCmd;
import com.cloud.api.commands.ListClustersCmd;
import com.cloud.api.commands.ListDiskOfferingsCmd;
import com.cloud.api.commands.ListEventsCmd;
import com.cloud.api.commands.ListGuestOsCategoriesCmd;
import com.cloud.api.commands.ListGuestOsCmd;
import com.cloud.api.commands.ListHostsCmd;
import com.cloud.api.commands.ListIsosCmd;
import com.cloud.api.commands.ListPodsByCmd;
import com.cloud.api.commands.ListPublicIpAddressesCmd;
import com.cloud.api.commands.ListRoutersCmd;
import com.cloud.api.commands.ListSSHKeyPairsCmd;
import com.cloud.api.commands.ListServiceOfferingsCmd;
import com.cloud.api.commands.ListStoragePoolsCmd;
import com.cloud.api.commands.ListSystemVMsCmd;
import com.cloud.api.commands.ListTemplatesCmd;
import com.cloud.api.commands.ListVMGroupsCmd;
import com.cloud.api.commands.ListVlanIpRangesCmd;
import com.cloud.api.commands.ListZonesByCmd;
import com.cloud.api.commands.RebootSystemVmCmd;
import com.cloud.api.commands.RegisterSSHKeyPairCmd;
import com.cloud.api.commands.StopSystemVmCmd;
import com.cloud.api.commands.UpdateDomainCmd;
import com.cloud.api.commands.UpdateHostPasswordCmd;
import com.cloud.api.commands.UpdateIsoCmd;
import com.cloud.api.commands.UpdateTemplateCmd;
import com.cloud.api.commands.UpdateTemplateOrIsoCmd;
import com.cloud.api.commands.UpdateVMGroupCmd;
import com.cloud.api.commands.UploadCustomCertificateCmd;
import com.cloud.api.response.ExtractResponse;
import com.cloud.async.AsyncJobExecutor;
import com.cloud.async.AsyncJobManager;
import com.cloud.async.AsyncJobResult;
import com.cloud.async.AsyncJobVO;
import com.cloud.async.BaseAsyncJobExecutor;
import com.cloud.async.dao.AsyncJobDao;
import com.cloud.capacity.Capacity;
import com.cloud.capacity.CapacityVO;
import com.cloud.capacity.dao.CapacityDao;
import com.cloud.capacity.dao.CapacityDaoImpl.SummedCapacity;
import com.cloud.configuration.Config;
import com.cloud.configuration.ConfigurationVO;
import com.cloud.configuration.dao.ConfigurationDao;
import com.cloud.consoleproxy.ConsoleProxyManagementState;
import com.cloud.consoleproxy.ConsoleProxyManager;
import com.cloud.dc.AccountVlanMapVO;
import com.cloud.dc.ClusterVO;
import com.cloud.dc.DataCenterVO;
import com.cloud.dc.HostPodVO;
import com.cloud.dc.PodVlanMapVO;
import com.cloud.dc.Vlan.VlanType;
import com.cloud.dc.VlanVO;
import com.cloud.dc.dao.AccountVlanMapDao;
import com.cloud.dc.dao.ClusterDao;
import com.cloud.dc.dao.DataCenterDao;
import com.cloud.dc.dao.HostPodDao;
import com.cloud.dc.dao.PodVlanMapDao;
import com.cloud.dc.dao.VlanDao;
import com.cloud.deploy.DataCenterDeployment;
import com.cloud.deploy.DeploymentPlanner.ExcludeList;
import com.cloud.domain.DomainVO;
import com.cloud.domain.dao.DomainDao;
import com.cloud.event.ActionEvent;
import com.cloud.event.EventTypes;
import com.cloud.event.EventUtils;
import com.cloud.event.EventVO;
import com.cloud.event.dao.EventDao;
import com.cloud.exception.CloudAuthenticationException;
import com.cloud.exception.ConcurrentOperationException;
import com.cloud.exception.InvalidParameterValueException;
import com.cloud.exception.OperationTimedoutException;
import com.cloud.exception.PermissionDeniedException;
import com.cloud.exception.ResourceUnavailableException;
import com.cloud.exception.StorageUnavailableException;
import com.cloud.host.DetailVO;
import com.cloud.host.Host;
import com.cloud.host.Host.Type;
import com.cloud.host.HostVO;
import com.cloud.host.dao.HostDao;
import com.cloud.host.dao.HostDetailsDao;
import com.cloud.hypervisor.Hypervisor.HypervisorType;
import com.cloud.hypervisor.HypervisorCapabilities;
import com.cloud.hypervisor.HypervisorCapabilitiesVO;
import com.cloud.hypervisor.dao.HypervisorCapabilitiesDao;
import com.cloud.info.ConsoleProxyInfo;
import com.cloud.keystore.KeystoreManager;
import com.cloud.network.IPAddressVO;
import com.cloud.network.LoadBalancerVO;
import com.cloud.network.NetworkVO;
import com.cloud.network.dao.IPAddressDao;
import com.cloud.network.dao.LoadBalancerDao;
import com.cloud.network.dao.NetworkDao;
import com.cloud.org.Grouping.AllocationState;
import com.cloud.projects.Project;
import com.cloud.projects.Project.ListProjectResourcesCriteria;
import com.cloud.projects.ProjectManager;
import com.cloud.resource.ResourceManager;
import com.cloud.service.ServiceOfferingVO;
import com.cloud.service.dao.ServiceOfferingDao;
import com.cloud.storage.DiskOfferingVO;
import com.cloud.storage.GuestOSCategoryVO;
import com.cloud.storage.GuestOSVO;
import com.cloud.storage.Storage;
import com.cloud.storage.Storage.ImageFormat;
import com.cloud.storage.StorageManager;
import com.cloud.storage.StoragePoolVO;
import com.cloud.storage.Upload;
import com.cloud.storage.Upload.Mode;
import com.cloud.storage.UploadVO;
import com.cloud.storage.VMTemplateVO;
import com.cloud.storage.Volume;
import com.cloud.storage.VolumeVO;
import com.cloud.storage.dao.DiskOfferingDao;
import com.cloud.storage.dao.GuestOSCategoryDao;
import com.cloud.storage.dao.GuestOSDao;
import com.cloud.storage.dao.StoragePoolDao;
import com.cloud.storage.dao.UploadDao;
import com.cloud.storage.dao.VMTemplateDao;
import com.cloud.storage.dao.VolumeDao;
import com.cloud.storage.secondary.SecondaryStorageVmManager;
import com.cloud.storage.snapshot.SnapshotManager;
import com.cloud.storage.swift.SwiftManager;
import com.cloud.storage.upload.UploadMonitor;
import com.cloud.template.VirtualMachineTemplate.TemplateFilter;
import com.cloud.user.Account;
import com.cloud.user.AccountManager;
import com.cloud.user.AccountVO;
import com.cloud.user.SSHKeyPair;
import com.cloud.user.SSHKeyPairVO;
import com.cloud.user.User;
import com.cloud.user.UserContext;
import com.cloud.user.dao.AccountDao;
import com.cloud.user.dao.SSHKeyPairDao;
import com.cloud.user.dao.UserDao;
import com.cloud.utils.EnumUtils;
import com.cloud.utils.NumbersUtil;
import com.cloud.utils.Pair;
import com.cloud.utils.PasswordGenerator;
import com.cloud.utils.Ternary;
import com.cloud.utils.component.Adapters;
import com.cloud.utils.component.ComponentLocator;
import com.cloud.utils.component.Inject;
import com.cloud.utils.concurrency.NamedThreadFactory;
import com.cloud.utils.crypt.DBEncryptionUtil;
import com.cloud.utils.db.DB;
import com.cloud.utils.db.Filter;
import com.cloud.utils.db.GlobalLock;
import com.cloud.utils.db.JoinBuilder;
import com.cloud.utils.db.JoinBuilder.JoinType;
import com.cloud.utils.db.SearchBuilder;
import com.cloud.utils.db.SearchCriteria;
import com.cloud.utils.db.Transaction;
import com.cloud.utils.exception.CloudRuntimeException;
import com.cloud.utils.net.MacAddress;
import com.cloud.utils.net.NetUtils;
import com.cloud.utils.ssh.SSHKeysHelper;
import com.cloud.vm.ConsoleProxyVO;
import com.cloud.vm.DomainRouterVO;
import com.cloud.vm.InstanceGroupVO;
import com.cloud.vm.NicVO;
import com.cloud.vm.SecondaryStorageVmVO;
import com.cloud.vm.UserVmVO;
import com.cloud.vm.VMInstanceVO;
import com.cloud.vm.VirtualMachine;
import com.cloud.vm.VirtualMachine.State;
import com.cloud.vm.VirtualMachineManager;
import com.cloud.vm.VirtualMachineProfile;
import com.cloud.vm.VirtualMachineProfileImpl;
import com.cloud.vm.dao.ConsoleProxyDao;
import com.cloud.vm.dao.DomainRouterDao;
import com.cloud.vm.dao.InstanceGroupDao;
import com.cloud.vm.dao.NicDao;
import com.cloud.vm.dao.SecondaryStorageVmDao;
import com.cloud.vm.dao.UserVmDao;
import com.cloud.vm.dao.VMInstanceDao;
import com.cloud.utils.exception.CSExceptionErrorCode;
import com.cloud.utils.AnnotationHelper;
import edu.emory.mathcs.backport.java.util.Arrays;
import edu.emory.mathcs.backport.java.util.Collections;
public class ManagementServerImpl implements ManagementServer {
public static final Logger s_logger = Logger.getLogger(ManagementServerImpl.class.getName());
private final AccountManager _accountMgr;
private final AgentManager _agentMgr;
private final AlertManager _alertMgr;
private final IPAddressDao _publicIpAddressDao;
private final DomainRouterDao _routerDao;
private final ConsoleProxyDao _consoleProxyDao;
private final ClusterDao _clusterDao;
private final SecondaryStorageVmDao _secStorageVmDao;
private final EventDao _eventDao;
private final DataCenterDao _dcDao;
private final VlanDao _vlanDao;
private final AccountVlanMapDao _accountVlanMapDao;
private final PodVlanMapDao _podVlanMapDao;
private final HostDao _hostDao;
private final HostDetailsDao _detailsDao;
private final UserDao _userDao;
private final UserVmDao _userVmDao;
private final ConfigurationDao _configDao;
private final ConsoleProxyManager _consoleProxyMgr;
private final SecondaryStorageVmManager _secStorageVmMgr;
private final SwiftManager _swiftMgr;
private final ServiceOfferingDao _offeringsDao;
private final DiskOfferingDao _diskOfferingDao;
private final VMTemplateDao _templateDao;
private final DomainDao _domainDao;
private final AccountDao _accountDao;
private final AlertDao _alertDao;
private final CapacityDao _capacityDao;
private final GuestOSDao _guestOSDao;
private final GuestOSCategoryDao _guestOSCategoryDao;
private final StoragePoolDao _poolDao;
private final NicDao _nicDao;
private final NetworkDao _networkDao;
private final StorageManager _storageMgr;
private final VirtualMachineManager _itMgr;
private final HostPodDao _hostPodDao;
private final VMInstanceDao _vmInstanceDao;
private final VolumeDao _volumeDao;
private final AsyncJobDao _jobDao;
private final AsyncJobManager _asyncMgr;
private final int _purgeDelay;
private final InstanceGroupDao _vmGroupDao;
private final UploadMonitor _uploadMonitor;
private final UploadDao _uploadDao;
private final SSHKeyPairDao _sshKeyPairDao;
private final LoadBalancerDao _loadbalancerDao;
private final HypervisorCapabilitiesDao _hypervisorCapabilitiesDao;
private final Adapters<HostAllocator> _hostAllocators;
@Inject
ProjectManager _projectMgr;
private final ResourceManager _resourceMgr;
@Inject
SnapshotManager _snapshotMgr;
private final KeystoreManager _ksMgr;
private final ScheduledExecutorService _eventExecutor = Executors.newScheduledThreadPool(1, new NamedThreadFactory("EventChecker"));
private final Map<String, String> _configs;
private final StatsCollector _statsCollector;
private final Map<String, Boolean> _availableIdsMap;
private String _hashKey = null;
protected ManagementServerImpl() {
ComponentLocator locator = ComponentLocator.getLocator(Name);
_configDao = locator.getDao(ConfigurationDao.class);
_routerDao = locator.getDao(DomainRouterDao.class);
_eventDao = locator.getDao(EventDao.class);
_dcDao = locator.getDao(DataCenterDao.class);
_vlanDao = locator.getDao(VlanDao.class);
_accountVlanMapDao = locator.getDao(AccountVlanMapDao.class);
_podVlanMapDao = locator.getDao(PodVlanMapDao.class);
_hostDao = locator.getDao(HostDao.class);
_detailsDao = locator.getDao(HostDetailsDao.class);
_hostPodDao = locator.getDao(HostPodDao.class);
_jobDao = locator.getDao(AsyncJobDao.class);
_clusterDao = locator.getDao(ClusterDao.class);
_nicDao = locator.getDao(NicDao.class);
_networkDao = locator.getDao(NetworkDao.class);
_loadbalancerDao = locator.getDao(LoadBalancerDao.class);
_accountMgr = locator.getManager(AccountManager.class);
_agentMgr = locator.getManager(AgentManager.class);
_alertMgr = locator.getManager(AlertManager.class);
_consoleProxyMgr = locator.getManager(ConsoleProxyManager.class);
_secStorageVmMgr = locator.getManager(SecondaryStorageVmManager.class);
_swiftMgr = locator.getManager(SwiftManager.class);
_storageMgr = locator.getManager(StorageManager.class);
_publicIpAddressDao = locator.getDao(IPAddressDao.class);
_consoleProxyDao = locator.getDao(ConsoleProxyDao.class);
_secStorageVmDao = locator.getDao(SecondaryStorageVmDao.class);
_userDao = locator.getDao(UserDao.class);
_userVmDao = locator.getDao(UserVmDao.class);
_offeringsDao = locator.getDao(ServiceOfferingDao.class);
_diskOfferingDao = locator.getDao(DiskOfferingDao.class);
_templateDao = locator.getDao(VMTemplateDao.class);
_domainDao = locator.getDao(DomainDao.class);
_accountDao = locator.getDao(AccountDao.class);
_alertDao = locator.getDao(AlertDao.class);
_capacityDao = locator.getDao(CapacityDao.class);
_guestOSDao = locator.getDao(GuestOSDao.class);
_guestOSCategoryDao = locator.getDao(GuestOSCategoryDao.class);
_poolDao = locator.getDao(StoragePoolDao.class);
_vmGroupDao = locator.getDao(InstanceGroupDao.class);
_uploadDao = locator.getDao(UploadDao.class);
_configs = _configDao.getConfiguration();
_vmInstanceDao = locator.getDao(VMInstanceDao.class);
_volumeDao = locator.getDao(VolumeDao.class);
_asyncMgr = locator.getManager(AsyncJobManager.class);
_uploadMonitor = locator.getManager(UploadMonitor.class);
_sshKeyPairDao = locator.getDao(SSHKeyPairDao.class);
_itMgr = locator.getManager(VirtualMachineManager.class);
_ksMgr = locator.getManager(KeystoreManager.class);
_resourceMgr = locator.getManager(ResourceManager.class);
_hypervisorCapabilitiesDao = locator.getDao(HypervisorCapabilitiesDao.class);
_hostAllocators = locator.getAdapters(HostAllocator.class);
if (_hostAllocators == null || !_hostAllocators.isSet()) {
s_logger.error("Unable to find HostAllocators");
}
String value = _configs.get("account.cleanup.interval");
int cleanup = NumbersUtil.parseInt(value, 60 * 60 * 24); // 1 day.
_statsCollector = StatsCollector.getInstance(_configs);
_purgeDelay = NumbersUtil.parseInt(_configs.get("event.purge.delay"), 0);
if (_purgeDelay != 0) {
_eventExecutor.scheduleAtFixedRate(new EventPurgeTask(), cleanup, cleanup, TimeUnit.SECONDS);
}
String[] availableIds = TimeZone.getAvailableIDs();
_availableIdsMap = new HashMap<String, Boolean>(availableIds.length);
for (String id : availableIds) {
_availableIdsMap.put(id, true);
}
}
protected Map<String, String> getConfigs() {
return _configs;
}
@Override
public String generateRandomPassword() {
return PasswordGenerator.generateRandomPassword(6);
}
@Override
public List<DataCenterVO> listDataCenters(ListZonesByCmd cmd) {
Account account = UserContext.current().getCaller();
List<DataCenterVO> dcs = null;
Long domainId = cmd.getDomainId();
Long id = cmd.getId();
boolean removeDisabledZones = false;
String keyword = cmd.getKeyword();
if (domainId != null) {
// for domainId != null
// right now, we made the decision to only list zones associated with this domain
dcs = _dcDao.findZonesByDomainId(domainId, keyword); // private zones
} else if ((account == null || account.getType() == Account.ACCOUNT_TYPE_ADMIN)) {
if (keyword != null) {
dcs = _dcDao.findByKeyword(keyword);
} else {
dcs = _dcDao.listAll(); // all zones
}
} else if (account.getType() == Account.ACCOUNT_TYPE_NORMAL) {
// it was decided to return all zones for the user's domain, and everything above till root
// list all zones belonging to this domain, and all of its parents
// check the parent, if not null, add zones for that parent to list
dcs = new ArrayList<DataCenterVO>();
DomainVO domainRecord = _domainDao.findById(account.getDomainId());
if (domainRecord != null) {
while (true) {
dcs.addAll(_dcDao.findZonesByDomainId(domainRecord.getId(), keyword));
if (domainRecord.getParent() != null) {
domainRecord = _domainDao.findById(domainRecord.getParent());
} else {
break;
}
}
}
// add all public zones too
dcs.addAll(_dcDao.listPublicZones(keyword));
removeDisabledZones = true;
} else if (account.getType() == Account.ACCOUNT_TYPE_DOMAIN_ADMIN || account.getType() == Account.ACCOUNT_TYPE_RESOURCE_DOMAIN_ADMIN) {
// it was decided to return all zones for the domain admin, and everything above till root
dcs = new ArrayList<DataCenterVO>();
DomainVO domainRecord = _domainDao.findById(account.getDomainId());
// this covers path till root
if (domainRecord != null) {
DomainVO localRecord = domainRecord;
while (true) {
dcs.addAll(_dcDao.findZonesByDomainId(localRecord.getId(), keyword));
if (localRecord.getParent() != null) {
localRecord = _domainDao.findById(localRecord.getParent());
} else {
break;
}
}
}
// this covers till leaf
if (domainRecord != null) {
// find all children for this domain based on a like search by path
List<DomainVO> allChildDomains = _domainDao.findAllChildren(domainRecord.getPath(), domainRecord.getId());
List<Long> allChildDomainIds = new ArrayList<Long>();
// create list of domainIds for search
for (DomainVO domain : allChildDomains) {
allChildDomainIds.add(domain.getId());
}
// now make a search for zones based on this
if (allChildDomainIds.size() > 0) {
List<DataCenterVO> childZones = _dcDao.findChildZones((allChildDomainIds.toArray()), keyword);
dcs.addAll(childZones);
}
}
// add all public zones too
dcs.addAll(_dcDao.listPublicZones(keyword));
removeDisabledZones = true;
}
if (removeDisabledZones) {
dcs.removeAll(_dcDao.listDisabledZones());
}
Boolean available = cmd.isAvailable();
if (account != null) {
if ((available != null) && Boolean.FALSE.equals(available)) {
List<DomainRouterVO> routers = _routerDao.listBy(account.getId());
for (Iterator<DataCenterVO> iter = dcs.iterator(); iter.hasNext();) {
DataCenterVO dc = iter.next();
boolean found = false;
for (DomainRouterVO router : routers) {
if (dc.getId() == router.getDataCenterIdToDeployIn()) {
found = true;
break;
}
}
if (!found) {
iter.remove();
}
}
}
}
if (id != null) {
List<DataCenterVO> singleZone = new ArrayList<DataCenterVO>();
for (DataCenterVO zone : dcs) {
if (zone.getId() == id) {
singleZone.add(zone);
}
}
return singleZone;
}
return dcs;
}
@Override
public HostVO getHostBy(long hostId) {
return _hostDao.findById(hostId);
}
@Override
public long getId() {
return MacAddress.getMacAddress().toLong();
}
protected void checkPortParameters(String publicPort, String privatePort, String privateIp, String proto) {
if (!NetUtils.isValidPort(publicPort)) {
throw new InvalidParameterValueException("publicPort is an invalid value");
}
if (!NetUtils.isValidPort(privatePort)) {
throw new InvalidParameterValueException("privatePort is an invalid value");
}
// s_logger.debug("Checking if " + privateIp + " is a valid private IP address. Guest IP address is: " +
// _configs.get("guest.ip.network"));
//
// if (!NetUtils.isValidPrivateIp(privateIp, _configs.get("guest.ip.network"))) {
// throw new InvalidParameterValueException("Invalid private ip address");
// }
if (!NetUtils.isValidProto(proto)) {
throw new InvalidParameterValueException("Invalid protocol");
}
}
@Override
public List<EventVO> getEvents(long userId, long accountId, Long domainId, String type, String level, Date startDate, Date endDate) {
SearchCriteria<EventVO> sc = _eventDao.createSearchCriteria();
if (userId > 0) {
sc.addAnd("userId", SearchCriteria.Op.EQ, userId);
}
if (accountId > 0) {
sc.addAnd("accountId", SearchCriteria.Op.EQ, accountId);
}
if (domainId != null) {
sc.addAnd("domainId", SearchCriteria.Op.EQ, domainId);
}
if (type != null) {
sc.addAnd("type", SearchCriteria.Op.EQ, type);
}
if (level != null) {
sc.addAnd("level", SearchCriteria.Op.EQ, level);
}
if (startDate != null && endDate != null) {
startDate = massageDate(startDate, 0, 0, 0);
endDate = massageDate(endDate, 23, 59, 59);
sc.addAnd("createDate", SearchCriteria.Op.BETWEEN, startDate, endDate);
} else if (startDate != null) {
startDate = massageDate(startDate, 0, 0, 0);
sc.addAnd("createDate", SearchCriteria.Op.GTEQ, startDate);
} else if (endDate != null) {
endDate = massageDate(endDate, 23, 59, 59);
sc.addAnd("createDate", SearchCriteria.Op.LTEQ, endDate);
}
return _eventDao.search(sc, null);
}
private Date massageDate(Date date, int hourOfDay, int minute, int second) {
Calendar cal = Calendar.getInstance();
cal.setTime(date);
cal.set(Calendar.HOUR_OF_DAY, hourOfDay);
cal.set(Calendar.MINUTE, minute);
cal.set(Calendar.SECOND, second);
return cal.getTime();
}
// This method is used for permissions check for both disk and service offerings
private boolean isPermissible(Long accountDomainId, Long offeringDomainId) {
if (accountDomainId == offeringDomainId) {
return true; // account and service offering in same domain
}
DomainVO domainRecord = _domainDao.findById(accountDomainId);
if (domainRecord != null) {
while (true) {
if (domainRecord.getId() == offeringDomainId) {
return true;
}
// try and move on to the next domain
if (domainRecord.getParent() != null) {
domainRecord = _domainDao.findById(domainRecord.getParent());
} else {
break;
}
}
}
return false;
}
@Override
public List<ServiceOfferingVO> searchForServiceOfferings(ListServiceOfferingsCmd cmd) {
// Note
// The list method for offerings is being modified in accordance with discussion with Will/Kevin
// For now, we will be listing the following based on the usertype
// 1. For root, we will list all offerings
// 2. For domainAdmin and regular users, we will list everything in their domains+parent domains ... all the way
// till
// root
Boolean isAscending = Boolean.parseBoolean(_configDao.getValue("sortkey.algorithm"));
isAscending = (isAscending == null ? true : isAscending);
Filter searchFilter = new Filter(ServiceOfferingVO.class, "sortKey", isAscending, cmd.getStartIndex(), cmd.getPageSizeVal());
SearchCriteria<ServiceOfferingVO> sc = _offeringsDao.createSearchCriteria();
Account caller = UserContext.current().getCaller();
Object name = cmd.getServiceOfferingName();
Object id = cmd.getId();
Object keyword = cmd.getKeyword();
Long vmId = cmd.getVirtualMachineId();
Long domainId = cmd.getDomainId();
Boolean isSystem = cmd.getIsSystem();
String vm_type_str = cmd.getSystemVmType();
if (caller.getType() != Account.ACCOUNT_TYPE_ADMIN && isSystem) {
throw new InvalidParameterValueException("Only ROOT admins can access system's offering");
}
// Keeping this logic consistent with domain specific zones
// if a domainId is provided, we just return the so associated with this domain
if (domainId != null && caller.getType() != Account.ACCOUNT_TYPE_ADMIN) {
// check if the user's domain == so's domain || user's domain is a child of so's domain
if (!isPermissible(caller.getDomainId(), domainId)) {
throw new PermissionDeniedException("The account:" + caller.getAccountName() + " does not fall in the same domain hierarchy as the service offering");
}
}
// For non-root users
if ((caller.getType() == Account.ACCOUNT_TYPE_NORMAL || caller.getType() == Account.ACCOUNT_TYPE_DOMAIN_ADMIN) || caller.getType() == Account.ACCOUNT_TYPE_RESOURCE_DOMAIN_ADMIN) {
if (isSystem) {
throw new InvalidParameterValueException("Only root admins can access system's offering");
}
return searchServiceOfferingsInternal(caller, name, id, vmId, keyword, searchFilter);
}
// for root users, the existing flow
if (caller.getDomainId() != 1 && isSystem) { // NON ROOT admin
throw new InvalidParameterValueException("Non ROOT admins cannot access system's offering");
}
if (keyword != null) {
SearchCriteria<ServiceOfferingVO> ssc = _offeringsDao.createSearchCriteria();
ssc.addOr("displayText", SearchCriteria.Op.LIKE, "%" + keyword + "%");
ssc.addOr("name", SearchCriteria.Op.LIKE, "%" + keyword + "%");
sc.addAnd("name", SearchCriteria.Op.SC, ssc);
} else if (vmId != null) {
UserVmVO vmInstance = _userVmDao.findById(vmId);
if ((vmInstance == null) || (vmInstance.getRemoved() != null)) {
throw new InvalidParameterValueException("unable to find a virtual machine with id " + vmId);
}
_accountMgr.checkAccess(caller, null, true, vmInstance);
ServiceOfferingVO offering = _offeringsDao.findByIdIncludingRemoved(vmInstance.getServiceOfferingId());
sc.addAnd("id", SearchCriteria.Op.NEQ, offering.getId());
// Only return offerings with the same Guest IP type and storage pool preference
// sc.addAnd("guestIpType", SearchCriteria.Op.EQ, offering.getGuestIpType());
sc.addAnd("useLocalStorage", SearchCriteria.Op.EQ, offering.getUseLocalStorage());
}
if (id != null) {
sc.addAnd("id", SearchCriteria.Op.EQ, id);
}
if (isSystem != null) {
sc.addAnd("systemUse", SearchCriteria.Op.EQ, isSystem);
}
if (name != null) {
sc.addAnd("name", SearchCriteria.Op.LIKE, "%" + name + "%");
}
if (domainId != null) {
sc.addAnd("domainId", SearchCriteria.Op.EQ, domainId);
}
if (vm_type_str != null) {
sc.addAnd("vm_type", SearchCriteria.Op.EQ, vm_type_str);
}
sc.addAnd("systemUse", SearchCriteria.Op.EQ, isSystem);
sc.addAnd("removed", SearchCriteria.Op.NULL);
return _offeringsDao.search(sc, searchFilter);
}
private List<ServiceOfferingVO> searchServiceOfferingsInternal(Account caller, Object name, Object id, Long vmId, Object keyword, Filter searchFilter) {
// it was decided to return all offerings for the user's domain, and everything above till root (for normal user
// or
// domain admin)
// list all offerings belonging to this domain, and all of its parents
// check the parent, if not null, add offerings for that parent to list
List<ServiceOfferingVO> sol = new ArrayList<ServiceOfferingVO>();
DomainVO domainRecord = _domainDao.findById(caller.getDomainId());
boolean includePublicOfferings = true;
if (domainRecord != null) {
while (true) {
if (id != null) {
ServiceOfferingVO so = _offeringsDao.findById((Long) id);
if (so != null) {
sol.add(so);
}
return sol;
}
SearchCriteria<ServiceOfferingVO> sc = _offeringsDao.createSearchCriteria();
if (keyword != null) {
includePublicOfferings = false;
SearchCriteria<ServiceOfferingVO> ssc = _offeringsDao.createSearchCriteria();
ssc.addOr("displayText", SearchCriteria.Op.LIKE, "%" + keyword + "%");
ssc.addOr("name", SearchCriteria.Op.LIKE, "%" + keyword + "%");
sc.addAnd("name", SearchCriteria.Op.SC, ssc);
} else if (vmId != null) {
UserVmVO vmInstance = _userVmDao.findById(vmId);
if ((vmInstance == null) || (vmInstance.getRemoved() != null)) {
throw new InvalidParameterValueException("unable to find a virtual machine with id " + vmId);
}
_accountMgr.checkAccess(caller, null, false, vmInstance);
ServiceOfferingVO offering = _offeringsDao.findById(vmInstance.getServiceOfferingId());
sc.addAnd("id", SearchCriteria.Op.NEQ, offering.getId());
sc.addAnd("useLocalStorage", SearchCriteria.Op.EQ, offering.getUseLocalStorage());
}
if (name != null) {
includePublicOfferings = false;
sc.addAnd("name", SearchCriteria.Op.LIKE, "%" + name + "%");
}
sc.addAnd("systemUse", SearchCriteria.Op.EQ, false);
// for this domain
sc.addAnd("domainId", SearchCriteria.Op.EQ, domainRecord.getId());
// don't return removed service offerings
sc.addAnd("removed", SearchCriteria.Op.NULL);
// search and add for this domain
sol.addAll(_offeringsDao.search(sc, searchFilter));
// try and move on to the next domain
if (domainRecord.getParent() != null) {
domainRecord = _domainDao.findById(domainRecord.getParent());
} else {
break;// now we got all the offerings for this user/dom adm
}
}
} else {
s_logger.error("Could not find the domainId for account:" + caller.getAccountName());
throw new CloudAuthenticationException("Could not find the domainId for account:" + caller.getAccountName());
}
// add all the public offerings to the sol list before returning
if (includePublicOfferings) {
sol.addAll(_offeringsDao.findPublicServiceOfferings());
}
return sol;
}
@Override
public List<ClusterVO> searchForClusters(ListClustersCmd cmd) {
Filter searchFilter = new Filter(ClusterVO.class, "id", true, cmd.getStartIndex(), cmd.getPageSizeVal());
SearchCriteria<ClusterVO> sc = _clusterDao.createSearchCriteria();
Object id = cmd.getId();
Object name = cmd.getClusterName();
Object podId = cmd.getPodId();
Long zoneId = cmd.getZoneId();
Object hypervisorType = cmd.getHypervisorType();
Object clusterType = cmd.getClusterType();
Object allocationState = cmd.getAllocationState();
String keyword = cmd.getKeyword();
zoneId = _accountMgr.checkAccessAndSpecifyAuthority(UserContext.current().getCaller(), zoneId);
if (id != null) {
sc.addAnd("id", SearchCriteria.Op.EQ, id);
}
if (name != null) {
sc.addAnd("name", SearchCriteria.Op.LIKE, "%" + name + "%");
}
if (podId != null) {
sc.addAnd("podId", SearchCriteria.Op.EQ, podId);
}
if (zoneId != null) {
sc.addAnd("dataCenterId", SearchCriteria.Op.EQ, zoneId);
}
if (hypervisorType != null) {
sc.addAnd("hypervisorType", SearchCriteria.Op.EQ, hypervisorType);
}
if (clusterType != null) {
sc.addAnd("clusterType", SearchCriteria.Op.EQ, clusterType);
}
if (allocationState != null) {
sc.addAnd("allocationState", SearchCriteria.Op.EQ, allocationState);
}
if (keyword != null) {
SearchCriteria<ClusterVO> ssc = _clusterDao.createSearchCriteria();
ssc.addOr("name", SearchCriteria.Op.LIKE, "%" + keyword + "%");
ssc.addOr("hypervisorType", SearchCriteria.Op.LIKE, "%" + keyword + "%");
sc.addAnd("name", SearchCriteria.Op.SC, ssc);
}
return _clusterDao.search(sc, searchFilter);
}
@Override
public List<HostVO> searchForServers(ListHostsCmd cmd) {
Long zoneId = _accountMgr.checkAccessAndSpecifyAuthority(UserContext.current().getCaller(), cmd.getZoneId());
Object name = cmd.getHostName();
Object type = cmd.getType();
Object state = cmd.getState();
Object pod = cmd.getPodId();
Object cluster = cmd.getClusterId();
Object id = cmd.getId();
Object keyword = cmd.getKeyword();
Object resourceState = cmd.getResourceState();
return searchForServers(cmd.getStartIndex(), cmd.getPageSizeVal(), name, type, state, zoneId, pod, cluster, id, keyword, resourceState);
}
@Override
public Pair<List<? extends Host>, List<? extends Host>> listHostsForMigrationOfVM(Long vmId, Long startIndex, Long pageSize) {
// access check - only root admin can migrate VM
Account caller = UserContext.current().getCaller();
if (caller.getType() != Account.ACCOUNT_TYPE_ADMIN) {
if (s_logger.isDebugEnabled()) {
s_logger.debug("Caller is not a root admin, permission denied to migrate the VM");
}
throw new PermissionDeniedException("No permission to migrate VM, Only Root Admin can migrate a VM!");
}
VMInstanceVO vm = _vmInstanceDao.findById(vmId);
if (vm == null) {
throw new InvalidParameterValueException("Unable to find the VM by id=" + vmId);
}
// business logic
if (vm.getState() != State.Running) {
if (s_logger.isDebugEnabled()) {
s_logger.debug("VM is not Running, unable to migrate the vm " + vm);
}
throw new InvalidParameterValueException("VM is not Running, unable to migrate the vm " + vm);
}
if (!vm.getHypervisorType().equals(HypervisorType.XenServer) && !vm.getHypervisorType().equals(HypervisorType.VMware) && !vm.getHypervisorType().equals(HypervisorType.KVM)
&& !vm.getHypervisorType().equals(HypervisorType.Ovm)) {
if (s_logger.isDebugEnabled()) {
s_logger.debug(vm + " is not XenServer/VMware/KVM/OVM, cannot migrate this VM.");
}
throw new InvalidParameterValueException("Unsupported Hypervisor Type for VM migration, we support XenServer/VMware/KVM only");
}
ServiceOfferingVO svcOffering = _offeringsDao.findById(vm.getServiceOfferingId());
if (svcOffering.getUseLocalStorage()) {
if (s_logger.isDebugEnabled()) {
s_logger.debug(vm + " is using Local Storage, cannot migrate this VM.");
}
throw new InvalidParameterValueException("Unsupported operation, VM uses Local storage, cannot migrate");
}
long srcHostId = vm.getHostId();
Host srcHost = _hostDao.findById(srcHostId);
if (srcHost == null) {
if (s_logger.isDebugEnabled()) {
s_logger.debug("Unable to find the host with id: " + srcHostId + " of this VM:" + vm);
}
throw new InvalidParameterValueException("Unable to find the host with id: " + srcHostId + " of this VM:" + vm);
}
Long cluster = srcHost.getClusterId();
Type hostType = srcHost.getType();
if (s_logger.isDebugEnabled()) {
s_logger.debug("Searching for all hosts in cluster: " + cluster + " for migrating VM " + vm);
}
List<? extends Host> allHostsInCluster = searchForServers(startIndex, pageSize, null, hostType, null, null, null, cluster, null, null, null);
// filter out the current host
allHostsInCluster.remove(srcHost);
if (s_logger.isDebugEnabled()) {
s_logger.debug("Other Hosts in this cluster: " + allHostsInCluster);
}
if (s_logger.isDebugEnabled()) {
s_logger.debug("Calling HostAllocators to search for hosts in cluster: " + cluster + " having enough capacity and suitable for migration");
}
List<Host> suitableHosts = new ArrayList<Host>();
Enumeration<HostAllocator> enHost = _hostAllocators.enumeration();
VirtualMachineProfile<VMInstanceVO> vmProfile = new VirtualMachineProfileImpl<VMInstanceVO>(vm);
DataCenterDeployment plan = new DataCenterDeployment(srcHost.getDataCenterId(), srcHost.getPodId(), srcHost.getClusterId(), null, null, null);
ExcludeList excludes = new ExcludeList();
excludes.addHost(srcHostId);
while (enHost.hasMoreElements()) {
final HostAllocator allocator = enHost.nextElement();
suitableHosts = allocator.allocateTo(vmProfile, plan, Host.Type.Routing, excludes, HostAllocator.RETURN_UPTO_ALL, false);
if (suitableHosts != null && !suitableHosts.isEmpty()) {
break;
}
}
if (suitableHosts.isEmpty()) {
s_logger.debug("No suitable hosts found");
} else {
if (s_logger.isDebugEnabled()) {
s_logger.debug("Hosts having capacity and suitable for migration: " + suitableHosts);
}
}
return new Pair<List<? extends Host>, List<? extends Host>>(allHostsInCluster, suitableHosts);
}
private List<HostVO> searchForServers(Long startIndex, Long pageSize, Object name, Object type, Object state, Object zone, Object pod, Object cluster, Object id, Object keyword,
Object resourceState) {
Filter searchFilter = new Filter(HostVO.class, "id", Boolean.TRUE, startIndex, pageSize);
SearchCriteria<HostVO> sc = _hostDao.createSearchCriteria();
if (keyword != null) {
SearchCriteria<HostVO> ssc = _hostDao.createSearchCriteria();
ssc.addOr("name", SearchCriteria.Op.LIKE, "%" + keyword + "%");
ssc.addOr("status", SearchCriteria.Op.LIKE, "%" + keyword + "%");
ssc.addOr("type", SearchCriteria.Op.LIKE, "%" + keyword + "%");
sc.addAnd("name", SearchCriteria.Op.SC, ssc);
}
if (id != null) {
sc.addAnd("id", SearchCriteria.Op.EQ, id);
}
if (name != null) {
sc.addAnd("name", SearchCriteria.Op.LIKE, "%" + name + "%");
}
if (type != null) {
sc.addAnd("type", SearchCriteria.Op.LIKE, "%" + type);
}
if (state != null) {
sc.addAnd("status", SearchCriteria.Op.EQ, state);
}
if (zone != null) {
sc.addAnd("dataCenterId", SearchCriteria.Op.EQ, zone);
}
if (pod != null) {
sc.addAnd("podId", SearchCriteria.Op.EQ, pod);
}
if (cluster != null) {
sc.addAnd("clusterId", SearchCriteria.Op.EQ, cluster);
}
if (resourceState != null) {
sc.addAnd("resourceState", SearchCriteria.Op.EQ, resourceState);
}
return _hostDao.search(sc, searchFilter);
}
@Override
public List<HostPodVO> searchForPods(ListPodsByCmd cmd) {
Filter searchFilter = new Filter(HostPodVO.class, "dataCenterId", true, cmd.getStartIndex(), cmd.getPageSizeVal());
SearchCriteria<HostPodVO> sc = _hostPodDao.createSearchCriteria();
String podName = cmd.getPodName();
Long id = cmd.getId();
Long zoneId = cmd.getZoneId();
Object keyword = cmd.getKeyword();
Object allocationState = cmd.getAllocationState();
zoneId = _accountMgr.checkAccessAndSpecifyAuthority(UserContext.current().getCaller(), zoneId);
if (keyword != null) {
SearchCriteria<HostPodVO> ssc = _hostPodDao.createSearchCriteria();
ssc.addOr("name", SearchCriteria.Op.LIKE, "%" + keyword + "%");
ssc.addOr("description", SearchCriteria.Op.LIKE, "%" + keyword + "%");
sc.addAnd("name", SearchCriteria.Op.SC, ssc);
}
if (id != null) {
sc.addAnd("id", SearchCriteria.Op.EQ, id);
}
if (podName != null) {
sc.addAnd("name", SearchCriteria.Op.LIKE, "%" + podName + "%");
}
if (zoneId != null) {
sc.addAnd("dataCenterId", SearchCriteria.Op.EQ, zoneId);
}
if (allocationState != null) {
sc.addAnd("allocationState", SearchCriteria.Op.EQ, allocationState);
}
return _hostPodDao.search(sc, searchFilter);
}
@Override
public List<VlanVO> searchForVlans(ListVlanIpRangesCmd cmd) {
// If an account name and domain ID are specified, look up the account
String accountName = cmd.getAccountName();
Long domainId = cmd.getDomainId();
Long accountId = null;
Long networkId = cmd.getNetworkId();
Boolean forVirtual = cmd.getForVirtualNetwork();
String vlanType = null;
Long projectId = cmd.getProjectId();
Long physicalNetworkId = cmd.getPhysicalNetworkId();
if (accountName != null && domainId != null) {
if (projectId != null) {
throw new InvalidParameterValueException("Account and projectId can't be specified together");
}
Account account = _accountDao.findActiveAccount(accountName, domainId);
if (account == null) {
throw new InvalidParameterValueException("Unable to find account " + accountName + " in domain " + domainId);
} else {
accountId = account.getId();
}
}
if (forVirtual != null) {
if (forVirtual) {
vlanType = VlanType.VirtualNetwork.toString();
} else {
vlanType = VlanType.DirectAttached.toString();
}
}
// set project information
if (projectId != null) {
Project project = _projectMgr.getProject(projectId);
if (project == null) {
throw new InvalidParameterValueException("Unable to find project by id " + projectId);
}
accountId = project.getProjectAccountId();
}
Filter searchFilter = new Filter(VlanVO.class, "id", true, cmd.getStartIndex(), cmd.getPageSizeVal());
Object id = cmd.getId();
Object vlan = cmd.getVlan();
Object dataCenterId = cmd.getZoneId();
Object podId = cmd.getPodId();
Object keyword = cmd.getKeyword();
SearchBuilder<VlanVO> sb = _vlanDao.createSearchBuilder();
sb.and("id", sb.entity().getId(), SearchCriteria.Op.EQ);
sb.and("vlan", sb.entity().getVlanTag(), SearchCriteria.Op.EQ);
sb.and("dataCenterId", sb.entity().getDataCenterId(), SearchCriteria.Op.EQ);
sb.and("vlan", sb.entity().getVlanTag(), SearchCriteria.Op.EQ);
sb.and("networkId", sb.entity().getNetworkId(), SearchCriteria.Op.EQ);
sb.and("vlanType", sb.entity().getVlanType(), SearchCriteria.Op.EQ);
sb.and("physicalNetworkId", sb.entity().getPhysicalNetworkId(), SearchCriteria.Op.EQ);
if (accountId != null) {
SearchBuilder<AccountVlanMapVO> accountVlanMapSearch = _accountVlanMapDao.createSearchBuilder();
accountVlanMapSearch.and("accountId", accountVlanMapSearch.entity().getAccountId(), SearchCriteria.Op.EQ);
sb.join("accountVlanMapSearch", accountVlanMapSearch, sb.entity().getId(), accountVlanMapSearch.entity().getVlanDbId(), JoinBuilder.JoinType.INNER);
}
if (podId != null) {
SearchBuilder<PodVlanMapVO> podVlanMapSearch = _podVlanMapDao.createSearchBuilder();
podVlanMapSearch.and("podId", podVlanMapSearch.entity().getPodId(), SearchCriteria.Op.EQ);
sb.join("podVlanMapSearch", podVlanMapSearch, sb.entity().getId(), podVlanMapSearch.entity().getVlanDbId(), JoinBuilder.JoinType.INNER);
}
SearchCriteria<VlanVO> sc = sb.create();
if (keyword != null) {
SearchCriteria<VlanVO> ssc = _vlanDao.createSearchCriteria();
ssc.addOr("vlanId", SearchCriteria.Op.LIKE, "%" + keyword + "%");
ssc.addOr("ipRange", SearchCriteria.Op.LIKE, "%" + keyword + "%");
sc.addAnd("vlanId", SearchCriteria.Op.SC, ssc);
} else {
if (id != null) {
sc.setParameters("id", id);
}
if (vlan != null) {
sc.setParameters("vlan", vlan);
}
if (dataCenterId != null) {
sc.setParameters("dataCenterId", dataCenterId);
}
if (networkId != null) {
sc.setParameters("networkId", networkId);
}
if (accountId != null) {
sc.setJoinParameters("accountVlanMapSearch", "accountId", accountId);
}
if (podId != null) {
sc.setJoinParameters("podVlanMapSearch", "podId", podId);
}
if (vlanType != null) {
sc.setParameters("vlanType", vlanType);
}
if (physicalNetworkId != null) {
sc.setParameters("physicalNetworkId", physicalNetworkId);
}
}
return _vlanDao.search(sc, searchFilter);
}
@Override
public List<ConfigurationVO> searchForConfigurations(ListCfgsByCmd cmd) {
Filter searchFilter = new Filter(ConfigurationVO.class, "name", true, cmd.getStartIndex(), cmd.getPageSizeVal());
SearchCriteria<ConfigurationVO> sc = _configDao.createSearchCriteria();
Object name = cmd.getConfigName();
Object category = cmd.getCategory();
Object keyword = cmd.getKeyword();
if (keyword != null) {
SearchCriteria<ConfigurationVO> ssc = _configDao.createSearchCriteria();
ssc.addOr("name", SearchCriteria.Op.LIKE, "%" + keyword + "%");
ssc.addOr("instance", SearchCriteria.Op.LIKE, "%" + keyword + "%");
ssc.addOr("component", SearchCriteria.Op.LIKE, "%" + keyword + "%");
ssc.addOr("description", SearchCriteria.Op.LIKE, "%" + keyword + "%");
ssc.addOr("category", SearchCriteria.Op.LIKE, "%" + keyword + "%");
ssc.addOr("value", SearchCriteria.Op.LIKE, "%" + keyword + "%");
sc.addAnd("name", SearchCriteria.Op.SC, ssc);
}
if (name != null) {
sc.addAnd("name", SearchCriteria.Op.LIKE, "%" + name + "%");
}
if (category != null) {
sc.addAnd("category", SearchCriteria.Op.EQ, category);
}
// hidden configurations are not displayed using the search API
sc.addAnd("category", SearchCriteria.Op.NEQ, "Hidden");
return _configDao.search(sc, searchFilter);
}
@Override
public Set<Pair<Long, Long>> listIsos(ListIsosCmd cmd) throws IllegalArgumentException, InvalidParameterValueException {
TemplateFilter isoFilter = TemplateFilter.valueOf(cmd.getIsoFilter());
Account caller = UserContext.current().getCaller();
boolean listAll = (caller.getType() != Account.ACCOUNT_TYPE_NORMAL && (isoFilter != null && isoFilter == TemplateFilter.all));
List<Long> permittedAccountIds = new ArrayList<Long>();
Ternary<Long, Boolean, ListProjectResourcesCriteria> domainIdRecursiveListProject = new Ternary<Long, Boolean, ListProjectResourcesCriteria>(cmd.getDomainId(), cmd.isRecursive(), null);
_accountMgr.buildACLSearchParameters(caller, cmd.getId(), cmd.getAccountName(), cmd.getProjectId(), permittedAccountIds, domainIdRecursiveListProject, listAll, false);
ListProjectResourcesCriteria listProjectResourcesCriteria = domainIdRecursiveListProject.third();
List<Account> permittedAccounts = new ArrayList<Account>();
for (Long accountId : permittedAccountIds) {
permittedAccounts.add(_accountMgr.getAccount(accountId));
}
HypervisorType hypervisorType = HypervisorType.getType(cmd.getHypervisor());
return listTemplates(cmd.getId(), cmd.getIsoName(), cmd.getKeyword(), isoFilter, true, cmd.isBootable(), cmd.getPageSizeVal(), cmd.getStartIndex(), cmd.getZoneId(), hypervisorType, true,
cmd.listInReadyState(), permittedAccounts, caller, listProjectResourcesCriteria);
}
@Override
public Set<Pair<Long, Long>> listTemplates(ListTemplatesCmd cmd) throws IllegalArgumentException, InvalidParameterValueException {
TemplateFilter templateFilter = TemplateFilter.valueOf(cmd.getTemplateFilter());
Long id = cmd.getId();
Account caller = UserContext.current().getCaller();
boolean listAll = (caller.getType() != Account.ACCOUNT_TYPE_NORMAL && (templateFilter != null && templateFilter == TemplateFilter.all));
List<Long> permittedAccountIds = new ArrayList<Long>();
Ternary<Long, Boolean, ListProjectResourcesCriteria> domainIdRecursiveListProject = new Ternary<Long, Boolean, ListProjectResourcesCriteria>(cmd.getDomainId(), cmd.isRecursive(), null);
_accountMgr.buildACLSearchParameters(caller, id, cmd.getAccountName(), cmd.getProjectId(), permittedAccountIds, domainIdRecursiveListProject, listAll, false);
ListProjectResourcesCriteria listProjectResourcesCriteria = domainIdRecursiveListProject.third();
List<Account> permittedAccounts = new ArrayList<Account>();
for (Long accountId : permittedAccountIds) {
permittedAccounts.add(_accountMgr.getAccount(accountId));
}
boolean showDomr = ((templateFilter != TemplateFilter.selfexecutable) && (templateFilter != TemplateFilter.featured));
HypervisorType hypervisorType = HypervisorType.getType(cmd.getHypervisor());
return listTemplates(id, cmd.getTemplateName(), cmd.getKeyword(), templateFilter, false, null, cmd.getPageSizeVal(), cmd.getStartIndex(), cmd.getZoneId(), hypervisorType, showDomr,
cmd.listInReadyState(), permittedAccounts, caller, listProjectResourcesCriteria);
}
private Set<Pair<Long, Long>> listTemplates(Long templateId, String name, String keyword, TemplateFilter templateFilter, boolean isIso, Boolean bootable, Long pageSize, Long startIndex,
Long zoneId, HypervisorType hyperType, boolean showDomr, boolean onlyReady, List<Account> permittedAccounts, Account caller, ListProjectResourcesCriteria listProjectResourcesCriteria) {
VMTemplateVO template = null;
if (templateId != null) {
template = _templateDao.findById(templateId);
if (template == null) {
throw new InvalidParameterValueException("Please specify a valid template ID.");
}// If ISO requested then it should be ISO.
if (isIso && template.getFormat() != ImageFormat.ISO) {
s_logger.error("Template Id " + templateId + " is not an ISO");
throw new InvalidParameterValueException("Template Id " + templateId + " is not an ISO");
}// If ISO not requested then it shouldn't be an ISO.
if (!isIso && template.getFormat() == ImageFormat.ISO) {
s_logger.error("Incorrect format of the template id " + templateId);
throw new InvalidParameterValueException("Incorrect format " + template.getFormat() + " of the template id " + templateId);
}
}
DomainVO domain = null;
if (!permittedAccounts.isEmpty()) {
domain = _domainDao.findById(permittedAccounts.get(0).getDomainId());
} else {
domain = _domainDao.findById(DomainVO.ROOT_DOMAIN);
}
List<HypervisorType> hypers = null;
if (!isIso) {
hypers = _resourceMgr.listAvailHypervisorInZone(null, null);
}
Set<Pair<Long, Long>> templateZonePairSet = new HashSet<Pair<Long, Long>>();
if (_swiftMgr.isSwiftEnabled()) {
if (template == null) {
templateZonePairSet = _templateDao.searchSwiftTemplates(name, keyword, templateFilter, isIso, hypers, bootable, domain, pageSize, startIndex, zoneId, hyperType, onlyReady, showDomr,
permittedAccounts, caller);
Set<Pair<Long, Long>> templateZonePairSet2 = new HashSet<Pair<Long, Long>>();
templateZonePairSet2 = _templateDao.searchTemplates(name, keyword, templateFilter, isIso, hypers, bootable, domain, pageSize, startIndex, zoneId, hyperType, onlyReady, showDomr,
permittedAccounts, caller, listProjectResourcesCriteria);
for (Pair<Long, Long> tmpltPair : templateZonePairSet2) {
if (!templateZonePairSet.contains(new Pair<Long, Long>(tmpltPair.first(), -1L))) {
templateZonePairSet.add(tmpltPair);
}
}
} else {
// if template is not public, perform permission check here
if (!template.isPublicTemplate() && caller.getType() != Account.ACCOUNT_TYPE_ADMIN) {
Account owner = _accountMgr.getAccount(template.getAccountId());
_accountMgr.checkAccess(caller, null, true, owner);
}
templateZonePairSet.add(new Pair<Long, Long>(template.getId(), zoneId));
}
} else {
if (template == null) {
templateZonePairSet = _templateDao.searchTemplates(name, keyword, templateFilter, isIso, hypers, bootable, domain, pageSize, startIndex, zoneId, hyperType, onlyReady, showDomr,
permittedAccounts, caller, listProjectResourcesCriteria);
} else {
// if template is not public, perform permission check here
if (!template.isPublicTemplate() && caller.getType() != Account.ACCOUNT_TYPE_ADMIN) {
Account owner = _accountMgr.getAccount(template.getAccountId());
_accountMgr.checkAccess(caller, null, true, owner);
}
templateZonePairSet.add(new Pair<Long, Long>(template.getId(), zoneId));
}
}
return templateZonePairSet;
}
@Override
public VMTemplateVO updateTemplate(UpdateIsoCmd cmd) {
return updateTemplateOrIso(cmd);
}
@Override
public VMTemplateVO updateTemplate(UpdateTemplateCmd cmd) {
return updateTemplateOrIso(cmd);
}
private VMTemplateVO updateTemplateOrIso(UpdateTemplateOrIsoCmd cmd) {
Long id = cmd.getId();
String name = cmd.getTemplateName();
String displayText = cmd.getDisplayText();
String format = cmd.getFormat();
Long guestOSId = cmd.getOsTypeId();
Boolean passwordEnabled = cmd.isPasswordEnabled();
Boolean bootable = cmd.isBootable();
Integer sortKey = cmd.getSortKey();
Account account = UserContext.current().getCaller();
// verify that template exists
VMTemplateVO template = _templateDao.findById(id);
if (template == null || template.getRemoved() != null) {
throw new InvalidParameterValueException("unable to find template/iso with id " + id);
}
// Don't allow to modify system template
if (id == Long.valueOf(1)) {
throw new InvalidParameterValueException("Unable to update template/iso with id " + id);
}
// do a permission check
_accountMgr.checkAccess(account, AccessType.ModifyEntry, true, template);
boolean updateNeeded = !(name == null && displayText == null && format == null && guestOSId == null && passwordEnabled == null && bootable == null && sortKey == null);
if (!updateNeeded) {
return template;
}
template = _templateDao.createForUpdate(id);
if (name != null) {
template.setName(name);
}
if (displayText != null) {
template.setDisplayText(displayText);
}
if (sortKey != null) {
template.setSortKey(sortKey);
}
ImageFormat imageFormat = null;
if (format != null) {
try {
imageFormat = ImageFormat.valueOf(format.toUpperCase());
} catch (IllegalArgumentException e) {
throw new InvalidParameterValueException("Image format: " + format + " is incorrect. Supported formats are " + EnumUtils.listValues(ImageFormat.values()));
}
template.setFormat(imageFormat);
}
if (guestOSId != null) {
GuestOSVO guestOS = _guestOSDao.findById(guestOSId);
if (guestOS == null) {
throw new InvalidParameterValueException("Please specify a valid guest OS ID.");
} else {
template.setGuestOSId(guestOSId);
}
}
if (passwordEnabled != null) {
template.setEnablePassword(passwordEnabled);
}
if (bootable != null) {
template.setBootable(bootable);
}
_templateDao.update(id, template);
return _templateDao.findById(id);
}
@Override
public List<EventVO> searchForEvents(ListEventsCmd cmd) {
Account caller = UserContext.current().getCaller();
List<Long> permittedAccounts = new ArrayList<Long>();
Long id = cmd.getId();
String type = cmd.getType();
String level = cmd.getLevel();
Date startDate = cmd.getStartDate();
Date endDate = cmd.getEndDate();
String keyword = cmd.getKeyword();
Integer entryTime = cmd.getEntryTime();
Integer duration = cmd.getDuration();
Ternary<Long, Boolean, ListProjectResourcesCriteria> domainIdRecursiveListProject = new Ternary<Long, Boolean, ListProjectResourcesCriteria>(cmd.getDomainId(), cmd.isRecursive(), null);
_accountMgr.buildACLSearchParameters(caller, id, cmd.getAccountName(), cmd.getProjectId(), permittedAccounts, domainIdRecursiveListProject, cmd.listAll(), false);
Long domainId = domainIdRecursiveListProject.first();
Boolean isRecursive = domainIdRecursiveListProject.second();
ListProjectResourcesCriteria listProjectResourcesCriteria = domainIdRecursiveListProject.third();
Filter searchFilter = new Filter(EventVO.class, "createDate", false, cmd.getStartIndex(), cmd.getPageSizeVal());
SearchBuilder<EventVO> sb = _eventDao.createSearchBuilder();
sb.and("accountIdIN", sb.entity().getAccountId(), SearchCriteria.Op.IN);
sb.and("domainId", sb.entity().getDomainId(), SearchCriteria.Op.EQ);
if (((permittedAccounts.isEmpty()) && (domainId != null) && isRecursive)) {
// if accountId isn't specified, we can do a domain match for the admin case if isRecursive is true
SearchBuilder<DomainVO> domainSearch = _domainDao.createSearchBuilder();
domainSearch.and("path", domainSearch.entity().getPath(), SearchCriteria.Op.LIKE);
sb.join("domainSearch", domainSearch, sb.entity().getDomainId(), domainSearch.entity().getId(), JoinBuilder.JoinType.INNER);
}
if (listProjectResourcesCriteria != null) {
SearchBuilder<AccountVO> accountSearch = _accountDao.createSearchBuilder();
if (listProjectResourcesCriteria == Project.ListProjectResourcesCriteria.ListProjectResourcesOnly) {
accountSearch.and("accountType", accountSearch.entity().getType(), SearchCriteria.Op.EQ);
sb.join("accountSearch", accountSearch, sb.entity().getAccountId(), accountSearch.entity().getId(), JoinBuilder.JoinType.INNER);
} else if (listProjectResourcesCriteria == Project.ListProjectResourcesCriteria.SkipProjectResources) {
accountSearch.and("accountType", accountSearch.entity().getType(), SearchCriteria.Op.NEQ);
sb.join("accountSearch", accountSearch, sb.entity().getAccountId(), accountSearch.entity().getId(), JoinBuilder.JoinType.INNER);
}
}
sb.and("id", sb.entity().getId(), SearchCriteria.Op.EQ);
sb.and("levelL", sb.entity().getLevel(), SearchCriteria.Op.LIKE);
sb.and("levelEQ", sb.entity().getLevel(), SearchCriteria.Op.EQ);
sb.and("type", sb.entity().getType(), SearchCriteria.Op.EQ);
sb.and("createDateB", sb.entity().getCreateDate(), SearchCriteria.Op.BETWEEN);
sb.and("createDateG", sb.entity().getCreateDate(), SearchCriteria.Op.GTEQ);
sb.and("createDateL", sb.entity().getCreateDate(), SearchCriteria.Op.LTEQ);
sb.and("state", sb.entity().getState(), SearchCriteria.Op.NEQ);
sb.and("startId", sb.entity().getStartId(), SearchCriteria.Op.EQ);
sb.and("createDate", sb.entity().getCreateDate(), SearchCriteria.Op.BETWEEN);
SearchCriteria<EventVO> sc = sb.create();
if (listProjectResourcesCriteria != null) {
sc.setJoinParameters("accountSearch", "accountType", Account.ACCOUNT_TYPE_PROJECT);
}
if (!permittedAccounts.isEmpty()) {
sc.setParameters("accountIdIN", permittedAccounts.toArray());
} else if (domainId != null) {
DomainVO domain = _domainDao.findById(domainId);
if (isRecursive) {
sc.setJoinParameters("domainSearch", "path", domain.getPath() + "%");
} else {
sc.setParameters("domainId", domainId);
}
}
if (id != null) {
sc.setParameters("id", id);
}
if (keyword != null) {
SearchCriteria<EventVO> ssc = _eventDao.createSearchCriteria();
ssc.addOr("type", SearchCriteria.Op.LIKE, "%" + keyword + "%");
ssc.addOr("description", SearchCriteria.Op.LIKE, "%" + keyword + "%");
ssc.addOr("level", SearchCriteria.Op.LIKE, "%" + keyword + "%");
sc.addAnd("level", SearchCriteria.Op.SC, ssc);
}
if (level != null) {
sc.setParameters("levelEQ", level);
}
if (type != null) {
sc.setParameters("type", type);
}
if (startDate != null && endDate != null) {
sc.setParameters("createDateB", startDate, endDate);
} else if (startDate != null) {
sc.setParameters("createDateG", startDate);
} else if (endDate != null) {
sc.setParameters("createDateL", endDate);
}
if ((entryTime != null) && (duration != null)) {
if (entryTime <= duration) {
throw new InvalidParameterValueException("Entry time must be greater than duration");
}
Calendar calMin = Calendar.getInstance();
Calendar calMax = Calendar.getInstance();
calMin.add(Calendar.SECOND, -entryTime);
calMax.add(Calendar.SECOND, -duration);
Date minTime = calMin.getTime();
Date maxTime = calMax.getTime();
sc.setParameters("state", com.cloud.event.Event.State.Completed);
sc.setParameters("startId", 0);
sc.setParameters("createDate", minTime, maxTime);
List<EventVO> startedEvents = _eventDao.searchAllEvents(sc, searchFilter);
List<EventVO> pendingEvents = new ArrayList<EventVO>();
for (EventVO event : startedEvents) {
EventVO completedEvent = _eventDao.findCompletedEvent(event.getId());
if (completedEvent == null) {
pendingEvents.add(event);
}
}
return pendingEvents;
} else {
return _eventDao.searchAllEvents(sc, searchFilter);
}
}
@Override
public List<DomainRouterVO> searchForRouters(ListRoutersCmd cmd) {
Long id = cmd.getId();
String name = cmd.getRouterName();
String state = cmd.getState();
Long zone = cmd.getZoneId();
Long pod = cmd.getPodId();
Long hostId = cmd.getHostId();
String keyword = cmd.getKeyword();
Long networkId = cmd.getNetworkId();
Account caller = UserContext.current().getCaller();
List<Long> permittedAccounts = new ArrayList<Long>();
Ternary<Long, Boolean, ListProjectResourcesCriteria> domainIdRecursiveListProject = new Ternary<Long, Boolean, ListProjectResourcesCriteria>(cmd.getDomainId(), cmd.isRecursive(), null);
_accountMgr.buildACLSearchParameters(caller, id, cmd.getAccountName(), cmd.getProjectId(), permittedAccounts, domainIdRecursiveListProject, cmd.listAll(), false);
Long domainId = domainIdRecursiveListProject.first();
Boolean isRecursive = domainIdRecursiveListProject.second();
ListProjectResourcesCriteria listProjectResourcesCriteria = domainIdRecursiveListProject.third();
Filter searchFilter = new Filter(DomainRouterVO.class, "id", true, cmd.getStartIndex(), cmd.getPageSizeVal());
SearchBuilder<DomainRouterVO> sb = _routerDao.createSearchBuilder();
_accountMgr.buildACLSearchBuilder(sb, domainId, isRecursive, permittedAccounts, listProjectResourcesCriteria);
sb.and("name", sb.entity().getHostName(), SearchCriteria.Op.LIKE);
sb.and("id", sb.entity().getId(), SearchCriteria.Op.EQ);
sb.and("accountId", sb.entity().getAccountId(), SearchCriteria.Op.IN);
sb.and("state", sb.entity().getState(), SearchCriteria.Op.EQ);
sb.and("dataCenterId", sb.entity().getDataCenterIdToDeployIn(), SearchCriteria.Op.EQ);
sb.and("podId", sb.entity().getPodIdToDeployIn(), SearchCriteria.Op.EQ);
sb.and("hostId", sb.entity().getHostId(), SearchCriteria.Op.EQ);
if (networkId != null) {
SearchBuilder<NicVO> nicSearch = _nicDao.createSearchBuilder();
nicSearch.and("networkId", nicSearch.entity().getNetworkId(), SearchCriteria.Op.EQ);
SearchBuilder<NetworkVO> networkSearch = _networkDao.createSearchBuilder();
networkSearch.and("networkId", networkSearch.entity().getId(), SearchCriteria.Op.EQ);
nicSearch.join("networkSearch", networkSearch, nicSearch.entity().getNetworkId(), networkSearch.entity().getId(), JoinBuilder.JoinType.INNER);
sb.join("nicSearch", nicSearch, sb.entity().getId(), nicSearch.entity().getInstanceId(), JoinBuilder.JoinType.INNER);
}
SearchCriteria<DomainRouterVO> sc = sb.create();
_accountMgr.buildACLSearchCriteria(sc, domainId, isRecursive, permittedAccounts, listProjectResourcesCriteria);
if (keyword != null) {
SearchCriteria<DomainRouterVO> ssc = _routerDao.createSearchCriteria();
ssc.addOr("hostName", SearchCriteria.Op.LIKE, "%" + keyword + "%");
ssc.addOr("instanceName", SearchCriteria.Op.LIKE, "%" + keyword + "%");
ssc.addOr("state", SearchCriteria.Op.LIKE, "%" + keyword + "%");
sc.addAnd("hostName", SearchCriteria.Op.SC, ssc);
}
if (name != null) {
sc.setParameters("name", "%" + name + "%");
}
if (id != null) {
sc.setParameters("id", id);
}
if (state != null) {
sc.setParameters("state", state);
}
if (zone != null) {
sc.setParameters("dataCenterId", zone);
}
if (pod != null) {
sc.setParameters("podId", pod);
}
if (hostId != null) {
sc.setParameters("hostId", hostId);
}
if (networkId != null) {
sc.setJoinParameters("nicSearch", "networkId", networkId);
}
return _routerDao.search(sc, searchFilter);
}
@Override
public List<IPAddressVO> searchForIPAddresses(ListPublicIpAddressesCmd cmd) {
Object keyword = cmd.getKeyword();
Long physicalNetworkId = cmd.getPhysicalNetworkId();
Long associatedNetworkId = cmd.getAssociatedNetworkId();
Long zone = cmd.getZoneId();
String address = cmd.getIpAddress();
Long vlan = cmd.getVlanId();
Boolean forVirtualNetwork = cmd.isForVirtualNetwork();
Boolean forLoadBalancing = cmd.isForLoadBalancing();
Long ipId = cmd.getId();
Boolean sourceNat = cmd.getIsSourceNat();
Boolean staticNat = cmd.getIsStaticNat();
Account caller = UserContext.current().getCaller();
List<Long> permittedAccounts = new ArrayList<Long>();
Boolean isAllocated = cmd.isAllocatedOnly();
if (isAllocated == null) {
isAllocated = Boolean.TRUE;
}
Ternary<Long, Boolean, ListProjectResourcesCriteria> domainIdRecursiveListProject = new Ternary<Long, Boolean, ListProjectResourcesCriteria>(cmd.getDomainId(), cmd.isRecursive(), null);
_accountMgr.buildACLSearchParameters(caller, cmd.getId(), cmd.getAccountName(), cmd.getProjectId(), permittedAccounts, domainIdRecursiveListProject, cmd.listAll(), false);
Long domainId = domainIdRecursiveListProject.first();
Boolean isRecursive = domainIdRecursiveListProject.second();
ListProjectResourcesCriteria listProjectResourcesCriteria = domainIdRecursiveListProject.third();
Filter searchFilter = new Filter(IPAddressVO.class, "address", false, cmd.getStartIndex(), cmd.getPageSizeVal());
SearchBuilder<IPAddressVO> sb = _publicIpAddressDao.createSearchBuilder();
_accountMgr.buildACLSearchBuilder(sb, domainId, isRecursive, permittedAccounts, listProjectResourcesCriteria);
sb.and("dataCenterId", sb.entity().getDataCenterId(), SearchCriteria.Op.EQ);
sb.and("address", sb.entity().getAddress(), SearchCriteria.Op.EQ);
sb.and("vlanDbId", sb.entity().getVlanId(), SearchCriteria.Op.EQ);
sb.and("id", sb.entity().getId(), SearchCriteria.Op.EQ);
sb.and("physicalNetworkId", sb.entity().getPhysicalNetworkId(), SearchCriteria.Op.EQ);
sb.and("associatedNetworkIdEq", sb.entity().getAssociatedWithNetworkId(), SearchCriteria.Op.EQ);
sb.and("isSourceNat", sb.entity().isSourceNat(), SearchCriteria.Op.EQ);
sb.and("isStaticNat", sb.entity().isOneToOneNat(), SearchCriteria.Op.EQ);
if (forLoadBalancing != null && (Boolean) forLoadBalancing) {
SearchBuilder<LoadBalancerVO> lbSearch = _loadbalancerDao.createSearchBuilder();
sb.join("lbSearch", lbSearch, sb.entity().getId(), lbSearch.entity().getSourceIpAddressId(), JoinType.INNER);
sb.groupBy(sb.entity().getId());
}
if (keyword != null && address == null) {
sb.and("addressLIKE", sb.entity().getAddress(), SearchCriteria.Op.LIKE);
}
SearchBuilder<VlanVO> vlanSearch = _vlanDao.createSearchBuilder();
vlanSearch.and("vlanType", vlanSearch.entity().getVlanType(), SearchCriteria.Op.EQ);
sb.join("vlanSearch", vlanSearch, sb.entity().getVlanId(), vlanSearch.entity().getId(), JoinBuilder.JoinType.INNER);
boolean allocatedOnly = false;
if ((isAllocated != null) && (isAllocated == true)) {
sb.and("allocated", sb.entity().getAllocatedTime(), SearchCriteria.Op.NNULL);
allocatedOnly = true;
}
VlanType vlanType = null;
if (forVirtualNetwork != null) {
vlanType = (Boolean) forVirtualNetwork ? VlanType.VirtualNetwork : VlanType.DirectAttached;
} else {
vlanType = VlanType.VirtualNetwork;
}
// don't show SSVM/CPVM ips
if (vlanType == VlanType.VirtualNetwork && (allocatedOnly)) {
sb.and("associatedNetworkId", sb.entity().getAssociatedWithNetworkId(), SearchCriteria.Op.NNULL);
}
SearchCriteria<IPAddressVO> sc = sb.create();
_accountMgr.buildACLSearchCriteria(sc, domainId, isRecursive, permittedAccounts, listProjectResourcesCriteria);
sc.setJoinParameters("vlanSearch", "vlanType", vlanType);
if (zone != null) {
sc.setParameters("dataCenterId", zone);
}
if (ipId != null) {
sc.setParameters("id", ipId);
}
if (sourceNat != null) {
sc.setParameters("isSourceNat", sourceNat);
}
if (staticNat != null) {
sc.setParameters("isStaticNat", staticNat);
}
if (address == null && keyword != null) {
sc.setParameters("addressLIKE", "%" + keyword + "%");
}
if (address != null) {
sc.setParameters("address", address);
}
if (vlan != null) {
sc.setParameters("vlanDbId", vlan);
}
if (physicalNetworkId != null) {
sc.setParameters("physicalNetworkId", physicalNetworkId);
}
if (associatedNetworkId != null) {
sc.setParameters("associatedNetworkIdEq", associatedNetworkId);
}
return _publicIpAddressDao.search(sc, searchFilter);
}
@Override
public List<GuestOSVO> listGuestOSByCriteria(ListGuestOsCmd cmd) {
Filter searchFilter = new Filter(GuestOSVO.class, "displayName", true, cmd.getStartIndex(), cmd.getPageSizeVal());
Long id = cmd.getId();
Long osCategoryId = cmd.getOsCategoryId();
SearchBuilder<GuestOSVO> sb = _guestOSDao.createSearchBuilder();
sb.and("id", sb.entity().getId(), SearchCriteria.Op.EQ);
sb.and("categoryId", sb.entity().getCategoryId(), SearchCriteria.Op.EQ);
SearchCriteria<GuestOSVO> sc = sb.create();
if (id != null) {
sc.setParameters("id", id);
}
if (osCategoryId != null) {
sc.setParameters("categoryId", osCategoryId);
}
return _guestOSDao.search(sc, searchFilter);
}
@Override
public List<GuestOSCategoryVO> listGuestOSCategoriesByCriteria(ListGuestOsCategoriesCmd cmd) {
Filter searchFilter = new Filter(GuestOSCategoryVO.class, "id", true, cmd.getStartIndex(), cmd.getPageSizeVal());
Long id = cmd.getId();
SearchBuilder<GuestOSCategoryVO> sb = _guestOSCategoryDao.createSearchBuilder();
sb.and("id", sb.entity().getId(), SearchCriteria.Op.EQ);
SearchCriteria<GuestOSCategoryVO> sc = sb.create();
if (id != null) {
sc.setParameters("id", id);
}
return _guestOSCategoryDao.search(sc, searchFilter);
}
@Override
public ConsoleProxyInfo getConsoleProxyForVm(long dataCenterId, long userVmId) {
return _consoleProxyMgr.assignProxy(dataCenterId, userVmId);
}
@ActionEvent(eventType = EventTypes.EVENT_PROXY_START, eventDescription = "starting console proxy Vm", async = true)
private ConsoleProxyVO startConsoleProxy(long instanceId) {
return _consoleProxyMgr.startProxy(instanceId);
}
@ActionEvent(eventType = EventTypes.EVENT_PROXY_STOP, eventDescription = "stopping console proxy Vm", async = true)
private ConsoleProxyVO stopConsoleProxy(VMInstanceVO systemVm, boolean isForced) throws ResourceUnavailableException, OperationTimedoutException, ConcurrentOperationException {
User caller = _userDao.findById(UserContext.current().getCallerUserId());
if (_itMgr.advanceStop(systemVm, isForced, caller, UserContext.current().getCaller())) {
return _consoleProxyDao.findById(systemVm.getId());
}
return null;
}
@ActionEvent(eventType = EventTypes.EVENT_PROXY_REBOOT, eventDescription = "rebooting console proxy Vm", async = true)
private ConsoleProxyVO rebootConsoleProxy(long instanceId) {
_consoleProxyMgr.rebootProxy(instanceId);
return _consoleProxyDao.findById(instanceId);
}
@ActionEvent(eventType = EventTypes.EVENT_PROXY_DESTROY, eventDescription = "destroying console proxy Vm", async = true)
public ConsoleProxyVO destroyConsoleProxy(long instanceId) {
ConsoleProxyVO proxy = _consoleProxyDao.findById(instanceId);
if (_consoleProxyMgr.destroyProxy(instanceId)) {
return proxy;
}
return null;
}
@Override
public String getConsoleAccessUrlRoot(long vmId) {
VMInstanceVO vm = _vmInstanceDao.findById(vmId);
if (vm != null) {
ConsoleProxyInfo proxy = getConsoleProxyForVm(vm.getDataCenterIdToDeployIn(), vmId);
if (proxy != null) {
return proxy.getProxyImageUrl();
}
}
return null;
}
@Override
public Pair<String, Integer> getVncPort(VirtualMachine vm) {
if (vm.getHostId() == null) {
s_logger.warn("VM " + vm.getHostName() + " does not have host, return -1 for its VNC port");
return new Pair<String, Integer>(null, -1);
}
if (s_logger.isTraceEnabled()) {
s_logger.trace("Trying to retrieve VNC port from agent about VM " + vm.getHostName());
}
GetVncPortAnswer answer = (GetVncPortAnswer) _agentMgr.easySend(vm.getHostId(), new GetVncPortCommand(vm.getId(), vm.getInstanceName()));
if (answer != null && answer.getResult()) {
return new Pair<String, Integer>(answer.getAddress(), answer.getPort());
}
return new Pair<String, Integer>(null, -1);
}
@Override
@ActionEvent(eventType = EventTypes.EVENT_DOMAIN_UPDATE, eventDescription = "updating Domain")
@DB
public DomainVO updateDomain(UpdateDomainCmd cmd) {
Long domainId = cmd.getId();
String domainName = cmd.getDomainName();
String networkDomain = cmd.getNetworkDomain();
// check if domain exists in the system
DomainVO domain = _domainDao.findById(domainId);
if (domain == null) {
throw new InvalidParameterValueException("Unable to find domain " + domainId);
} else if (domain.getParent() == null && domainName != null) {
// check if domain is ROOT domain - and deny to edit it with the new name
throw new InvalidParameterValueException("ROOT domain can not be edited with a new name");
}
// check permissions
Account caller = UserContext.current().getCaller();
_accountMgr.checkAccess(caller, domain);
// domain name is unique in the cloud
if (domainName != null) {
SearchCriteria<DomainVO> sc = _domainDao.createSearchCriteria();
sc.addAnd("name", SearchCriteria.Op.EQ, domainName);
List<DomainVO> domains = _domainDao.search(sc, null);
boolean sameDomain = (domains.size() == 1 && domains.get(0).getId() == domainId);
if (!domains.isEmpty() && !sameDomain) {
InvalidParameterValueException ex = new InvalidParameterValueException("Failed to update specified domain id with name '" + domainName + "' since it already exists in the system");
// Get the domainVO object's table name.
String tablename = AnnotationHelper.getTableName(domain);
if (tablename != null) {
ex.addProxyObject(tablename, domainId, "domainId");
} else {
s_logger.info("\nCould not retrieve table name (annotation) from domainVO proxy cglib object\n");
}
throw ex;
}
}
// validate network domain
if (networkDomain != null && !networkDomain.isEmpty()) {
if (!NetUtils.verifyDomainName(networkDomain)) {
throw new InvalidParameterValueException(
"Invalid network domain. Total length shouldn't exceed 190 chars. Each domain label must be between 1 and 63 characters long, can contain ASCII letters 'a' through 'z', the digits '0' through '9', "
+ "and the hyphen ('-'); can't start or end with \"-\"");
}
}
Transaction txn = Transaction.currentTxn();
txn.start();
if (domainName != null) {
String updatedDomainPath = getUpdatedDomainPath(domain.getPath(), domainName);
updateDomainChildren(domain, updatedDomainPath);
domain.setName(domainName);
domain.setPath(updatedDomainPath);
}
if (networkDomain != null) {
if (networkDomain.isEmpty()) {
domain.setNetworkDomain(null);
} else {
domain.setNetworkDomain(networkDomain);
}
}
_domainDao.update(domainId, domain);
txn.commit();
return _domainDao.findById(domainId);
}
private String getUpdatedDomainPath(String oldPath, String newName) {
String[] tokenizedPath = oldPath.split("/");
tokenizedPath[tokenizedPath.length - 1] = newName;
StringBuilder finalPath = new StringBuilder();
for (String token : tokenizedPath) {
finalPath.append(token);
finalPath.append("/");
}
return finalPath.toString();
}
private void updateDomainChildren(DomainVO domain, String updatedDomainPrefix) {
List<DomainVO> domainChildren = _domainDao.findAllChildren(domain.getPath(), domain.getId());
// for each child, update the path
for (DomainVO dom : domainChildren) {
dom.setPath(dom.getPath().replaceFirst(domain.getPath(), updatedDomainPrefix));
_domainDao.update(dom.getId(), dom);
}
}
@Override
public List<? extends Alert> searchForAlerts(ListAlertsCmd cmd) {
Filter searchFilter = new Filter(AlertVO.class, "lastSent", false, cmd.getStartIndex(), cmd.getPageSizeVal());
SearchCriteria<AlertVO> sc = _alertDao.createSearchCriteria();
Object id = cmd.getId();
Object type = cmd.getType();
Object keyword = cmd.getKeyword();
Long zoneId = _accountMgr.checkAccessAndSpecifyAuthority(UserContext.current().getCaller(), null);
if (id != null) {
sc.addAnd("id", SearchCriteria.Op.EQ, id);
}
if (zoneId != null) {
sc.addAnd("data_center_id", SearchCriteria.Op.EQ, zoneId);
}
if (keyword != null) {
SearchCriteria<AlertVO> ssc = _alertDao.createSearchCriteria();
ssc.addOr("subject", SearchCriteria.Op.LIKE, "%" + keyword + "%");
sc.addAnd("subject", SearchCriteria.Op.SC, ssc);
}
if (type != null) {
sc.addAnd("type", SearchCriteria.Op.EQ, type);
}
return _alertDao.search(sc, searchFilter);
}
@Override
public List<CapacityVO> listTopConsumedResources(ListCapacityCmd cmd) {
Integer capacityType = cmd.getType();
Long zoneId = cmd.getZoneId();
Long podId = cmd.getPodId();
Long clusterId = cmd.getClusterId();
if (clusterId != null) {
throw new InvalidParameterValueException("Currently clusterId param is not suppoerted");
}
zoneId = _accountMgr.checkAccessAndSpecifyAuthority(UserContext.current().getCaller(), zoneId);
List<SummedCapacity> summedCapacities = new ArrayList<SummedCapacity>();
if (zoneId == null && podId == null) {// Group by Zone, capacity type
List<SummedCapacity> summedCapacitiesAtZone = _capacityDao.listCapacitiesGroupedByLevelAndType(capacityType, zoneId, podId, clusterId, 1, cmd.getPageSizeVal());
if (summedCapacitiesAtZone != null) {
summedCapacities.addAll(summedCapacitiesAtZone);
}
}
if (podId == null) {// Group by Pod, capacity type
List<SummedCapacity> summedCapacitiesAtPod = _capacityDao.listCapacitiesGroupedByLevelAndType(capacityType, zoneId, podId, clusterId, 2, cmd.getPageSizeVal());
if (summedCapacitiesAtPod != null) {
summedCapacities.addAll(summedCapacitiesAtPod);
}
List<SummedCapacity> summedCapacitiesForSecStorage = getSecStorageUsed(zoneId, capacityType);
if (summedCapacitiesForSecStorage != null) {
summedCapacities.addAll(summedCapacitiesForSecStorage);
}
}
// Group by Cluster, capacity type
List<SummedCapacity> summedCapacitiesAtCluster = _capacityDao.listCapacitiesGroupedByLevelAndType(capacityType, zoneId, podId, clusterId, 3, cmd.getPageSizeVal());
if (summedCapacitiesAtCluster != null) {
summedCapacities.addAll(summedCapacitiesAtCluster);
}
// Sort Capacities
Collections.sort(summedCapacities, new Comparator<SummedCapacity>() {
@Override
public int compare(SummedCapacity arg0, SummedCapacity arg1) {
if (arg0.getPercentUsed() < arg1.getPercentUsed()) {
return 1;
} else if (arg0.getPercentUsed() == arg1.getPercentUsed()) {
return 0;
}
return -1;
}
});
List<CapacityVO> capacities = new ArrayList<CapacityVO>();
Integer pageSize = null;
try {
pageSize = Integer.valueOf(cmd.getPageSizeVal().toString());
} catch (IllegalArgumentException e) {
throw new InvalidParameterValueException("pageSize " + cmd.getPageSizeVal() + " is out of Integer range is not supported for this call");
}
summedCapacities = summedCapacities.subList(0, summedCapacities.size() < cmd.getPageSizeVal() ? summedCapacities.size() : pageSize);
for (SummedCapacity summedCapacity : summedCapacities) {
CapacityVO capacity = new CapacityVO(summedCapacity.getDataCenterId(), summedCapacity.getPodId(), summedCapacity.getClusterId(),
summedCapacity.getCapacityType(), summedCapacity.getPercentUsed());
capacity.setUsedCapacity(summedCapacity.getUsedCapacity());
capacity.setTotalCapacity(summedCapacity.getTotalCapacity());
capacities.add(capacity);
}
return capacities;
}
List<SummedCapacity> getSecStorageUsed(Long zoneId, Integer capacityType) {
if (capacityType == null || capacityType == Capacity.CAPACITY_TYPE_SECONDARY_STORAGE) {
List<SummedCapacity> list = new ArrayList<SummedCapacity>();
if (zoneId != null) {
DataCenterVO zone = ApiDBUtils.findZoneById(zoneId);
if (zone == null || zone.getAllocationState() == AllocationState.Disabled) {
return null;
}
CapacityVO capacity = _storageMgr.getSecondaryStorageUsedStats(null, zoneId);
if (capacity.getTotalCapacity() != 0) {
capacity.setUsedPercentage(capacity.getUsedCapacity() / capacity.getTotalCapacity());
} else {
capacity.setUsedPercentage(0);
}
SummedCapacity summedCapacity = new SummedCapacity(capacity.getUsedCapacity(), capacity.getTotalCapacity(), capacity.getUsedPercentage(), capacity.getCapacityType(), capacity.getDataCenterId(),
capacity.getPodId(), capacity.getClusterId());
list.add(summedCapacity);
} else {
List<DataCenterVO> dcList = _dcDao.listEnabledZones();
for (DataCenterVO dc : dcList) {
CapacityVO capacity = _storageMgr.getSecondaryStorageUsedStats(null, dc.getId());
if (capacity.getTotalCapacity() != 0) {
capacity.setUsedPercentage((float)capacity.getUsedCapacity() / capacity.getTotalCapacity());
} else {
capacity.setUsedPercentage(0);
}
SummedCapacity summedCapacity = new SummedCapacity(capacity.getUsedCapacity(), capacity.getTotalCapacity(), capacity.getUsedPercentage(), capacity.getCapacityType(), capacity.getDataCenterId(),
capacity.getPodId(), capacity.getClusterId());
list.add(summedCapacity);
}// End of for
}
return list;
}
return null;
}
@Override
public List<CapacityVO> listCapacities(ListCapacityCmd cmd) {
Integer capacityType = cmd.getType();
Long zoneId = cmd.getZoneId();
Long podId = cmd.getPodId();
Long clusterId = cmd.getClusterId();
Boolean fetchLatest = cmd.getFetchLatest();
zoneId = _accountMgr.checkAccessAndSpecifyAuthority(UserContext.current().getCaller(), zoneId);
if (fetchLatest != null && fetchLatest) {
_alertMgr.recalculateCapacity();
}
List<SummedCapacity> summedCapacities = _capacityDao.findCapacityBy(capacityType, zoneId, podId, clusterId);
List<CapacityVO> capacities = new ArrayList<CapacityVO>();
for (SummedCapacity summedCapacity : summedCapacities) {
CapacityVO capacity = new CapacityVO(null, summedCapacity.getDataCenterId(), podId, clusterId,
summedCapacity.getUsedCapacity() + summedCapacity.getReservedCapacity(),
summedCapacity.getTotalCapacity(), summedCapacity.getCapacityType());
if (summedCapacity.getCapacityType() == Capacity.CAPACITY_TYPE_CPU) {
capacity.setTotalCapacity((long) (summedCapacity.getTotalCapacity() * ApiDBUtils.getCpuOverprovisioningFactor()));
}
capacities.add(capacity);
}
// op_host_Capacity contains only allocated stats and the real time stats are stored "in memory".
// Show Sec. Storage only when the api is invoked for the zone layer.
List<DataCenterVO> dcList = new ArrayList<DataCenterVO>();
if (zoneId == null && podId == null && clusterId == null) {
dcList = ApiDBUtils.listZones();
} else if (zoneId != null) {
dcList.add(ApiDBUtils.findZoneById(zoneId));
} else {
if (capacityType == null || capacityType == Capacity.CAPACITY_TYPE_STORAGE) {
capacities.add(_storageMgr.getStoragePoolUsedStats(null, clusterId, podId, zoneId));
}
}
for (DataCenterVO zone : dcList) {
zoneId = zone.getId();
if ((capacityType == null || capacityType == Capacity.CAPACITY_TYPE_SECONDARY_STORAGE) && podId == null && clusterId == null) {
capacities.add(_storageMgr.getSecondaryStorageUsedStats(null, zoneId));
}
if (capacityType == null || capacityType == Capacity.CAPACITY_TYPE_STORAGE) {
capacities.add(_storageMgr.getStoragePoolUsedStats(null, clusterId, podId, zoneId));
}
}
return capacities;
}
@Override
public long getMemoryOrCpuCapacityByHost(Long hostId, short capacityType) {
CapacityVO capacity = _capacityDao.findByHostIdType(hostId, capacityType);
return capacity == null ? 0 : capacity.getReservedCapacity() + capacity.getUsedCapacity();
}
public static boolean isAdmin(short accountType) {
return ((accountType == Account.ACCOUNT_TYPE_ADMIN) || (accountType == Account.ACCOUNT_TYPE_RESOURCE_DOMAIN_ADMIN) || (accountType == Account.ACCOUNT_TYPE_DOMAIN_ADMIN) || (accountType == Account.ACCOUNT_TYPE_READ_ONLY_ADMIN));
}
private List<DiskOfferingVO> searchDiskOfferingsInternal(Account account, Object name, Object id, Object keyword, Filter searchFilter) {
// it was decided to return all offerings for the user's domain, and everything above till root (for normal user
// or
// domain admin)
// list all offerings belonging to this domain, and all of its parents
// check the parent, if not null, add offerings for that parent to list
List<DiskOfferingVO> dol = new ArrayList<DiskOfferingVO>();
DomainVO domainRecord = _domainDao.findById(account.getDomainId());
boolean includePublicOfferings = true;
if (domainRecord != null) {
while (true) {
SearchBuilder<DiskOfferingVO> sb = _diskOfferingDao.createSearchBuilder();
sb.and("name", sb.entity().getName(), SearchCriteria.Op.LIKE);
sb.and("id", sb.entity().getId(), SearchCriteria.Op.EQ);
sb.and("removed", sb.entity().getRemoved(), SearchCriteria.Op.NULL);
SearchCriteria<DiskOfferingVO> sc = sb.create();
if (keyword != null) {
includePublicOfferings = false;
SearchCriteria<DiskOfferingVO> ssc = _diskOfferingDao.createSearchCriteria();
ssc.addOr("displayText", SearchCriteria.Op.LIKE, "%" + keyword + "%");
ssc.addOr("name", SearchCriteria.Op.LIKE, "%" + keyword + "%");
sc.addAnd("name", SearchCriteria.Op.SC, ssc);
}
if (name != null) {
includePublicOfferings = false;
sc.setParameters("name", "%" + name + "%");
}
if (id != null) {
includePublicOfferings = false;
sc.setParameters("id", id);
}
// for this domain
sc.addAnd("domainId", SearchCriteria.Op.EQ, domainRecord.getId());
// search and add for this domain
dol.addAll(_diskOfferingDao.search(sc, searchFilter));
// try and move on to the next domain
if (domainRecord.getParent() != null) {
domainRecord = _domainDao.findById(domainRecord.getParent());
} else {
break;// now we got all the offerings for this user/dom adm
}
}
} else {
s_logger.error("Could not find the domainId for account:" + account.getAccountName());
throw new CloudAuthenticationException("Could not find the domainId for account:" + account.getAccountName());
}
// add all the public offerings to the sol list before returning
if (includePublicOfferings) {
dol.addAll(_diskOfferingDao.findPublicDiskOfferings());
}
return dol;
}
@Override
public List<DiskOfferingVO> searchForDiskOfferings(ListDiskOfferingsCmd cmd) {
// Note
// The list method for offerings is being modified in accordance with discussion with Will/Kevin
// For now, we will be listing the following based on the usertype
// 1. For root, we will list all offerings
// 2. For domainAdmin and regular users, we will list everything in their domains+parent domains ... all the way
// till
// root
Boolean isAscending = Boolean.parseBoolean(_configDao.getValue("sortkey.algorithm"));
isAscending = (isAscending == null ? true : isAscending);
Filter searchFilter = new Filter(DiskOfferingVO.class, "sortKey", isAscending, cmd.getStartIndex(), cmd.getPageSizeVal());
SearchBuilder<DiskOfferingVO> sb = _diskOfferingDao.createSearchBuilder();
// SearchBuilder and SearchCriteria are now flexible so that the search builder can be built with all possible
// search terms and only those with criteria can be set. The proper SQL should be generated as a result.
Account account = UserContext.current().getCaller();
Object name = cmd.getDiskOfferingName();
Object id = cmd.getId();
Object keyword = cmd.getKeyword();
Long domainId = cmd.getDomainId();
// Keeping this logic consistent with domain specific zones
// if a domainId is provided, we just return the disk offering associated with this domain
if (domainId != null) {
if (account.getType() == Account.ACCOUNT_TYPE_ADMIN) {
return _diskOfferingDao.listByDomainId(domainId);// no perm check
} else {
// check if the user's domain == do's domain || user's domain is a child of so's domain
if (isPermissible(account.getDomainId(), domainId)) {
// perm check succeeded
return _diskOfferingDao.listByDomainId(domainId);
} else {
throw new PermissionDeniedException("The account:" + account.getAccountName() + " does not fall in the same domain hierarchy as the disk offering");
}
}
}
// For non-root users
if ((account.getType() == Account.ACCOUNT_TYPE_NORMAL || account.getType() == Account.ACCOUNT_TYPE_DOMAIN_ADMIN) || account.getType() == Account.ACCOUNT_TYPE_RESOURCE_DOMAIN_ADMIN) {
return searchDiskOfferingsInternal(account, name, id, keyword, searchFilter);
}
// For root users, preserving existing flow
sb.and("name", sb.entity().getName(), SearchCriteria.Op.LIKE);
sb.and("id", sb.entity().getId(), SearchCriteria.Op.EQ);
sb.and("removed", sb.entity().getRemoved(), SearchCriteria.Op.NULL);
// FIXME: disk offerings should search back up the hierarchy for available disk offerings...
/*
* sb.addAnd("domainId", sb.entity().getDomainId(), SearchCriteria.Op.EQ); if (domainId != null) {
* SearchBuilder<DomainVO> domainSearch = _domainDao.createSearchBuilder(); domainSearch.addAnd("path",
* domainSearch.entity().getPath(), SearchCriteria.Op.LIKE); sb.join("domainSearch", domainSearch,
* sb.entity().getDomainId(), domainSearch.entity().getId()); }
*/
SearchCriteria<DiskOfferingVO> sc = sb.create();
if (keyword != null) {
SearchCriteria<DiskOfferingVO> ssc = _diskOfferingDao.createSearchCriteria();
ssc.addOr("displayText", SearchCriteria.Op.LIKE, "%" + keyword + "%");
ssc.addOr("name", SearchCriteria.Op.LIKE, "%" + keyword + "%");
sc.addAnd("name", SearchCriteria.Op.SC, ssc);
}
if (name != null) {
sc.setParameters("name", "%" + name + "%");
}
if (id != null) {
sc.setParameters("id", id);
}
// FIXME: disk offerings should search back up the hierarchy for available disk offerings...
/*
* if (domainId != null) { sc.setParameters("domainId", domainId); // //DomainVO domain =
* _domainDao.findById((Long)domainId); // // I want to join on user_vm.domain_id = domain.id where domain.path
* like
* 'foo%' //sc.setJoinParameters("domainSearch", "path", domain.getPath() + "%"); // }
*/
return _diskOfferingDao.search(sc, searchFilter);
}
@Override
public String[] getApiConfig() {
return new String[] { "commands.properties" };
}
protected class EventPurgeTask implements Runnable {
@Override
public void run() {
try {
GlobalLock lock = GlobalLock.getInternLock("EventPurge");
if (lock == null) {
s_logger.debug("Couldn't get the global lock");
return;
}
if (!lock.lock(30)) {
s_logger.debug("Couldn't lock the db");
return;
}
try {
final Calendar purgeCal = Calendar.getInstance();
purgeCal.add(Calendar.DAY_OF_YEAR, -_purgeDelay);
Date purgeTime = purgeCal.getTime();
s_logger.debug("Deleting events older than: " + purgeTime.toString());
List<EventVO> oldEvents = _eventDao.listOlderEvents(purgeTime);
s_logger.debug("Found " + oldEvents.size() + " events to be purged");
for (EventVO event : oldEvents) {
_eventDao.expunge(event.getId());
}
} catch (Exception e) {
s_logger.error("Exception ", e);
} finally {
lock.unlock();
}
} catch (Exception e) {
s_logger.error("Exception ", e);
}
}
}
@Override
public List<? extends StoragePoolVO> searchForStoragePools(ListStoragePoolsCmd cmd) {
Long zoneId = _accountMgr.checkAccessAndSpecifyAuthority(UserContext.current().getCaller(), cmd.getZoneId());
Criteria c = new Criteria("id", Boolean.TRUE, cmd.getStartIndex(), cmd.getPageSizeVal());
c.addCriteria(Criteria.ID, cmd.getId());
c.addCriteria(Criteria.NAME, cmd.getStoragePoolName());
c.addCriteria(Criteria.CLUSTERID, cmd.getClusterId());
c.addCriteria(Criteria.ADDRESS, cmd.getIpAddress());
c.addCriteria(Criteria.KEYWORD, cmd.getKeyword());
c.addCriteria(Criteria.PATH, cmd.getPath());
c.addCriteria(Criteria.PODID, cmd.getPodId());
c.addCriteria(Criteria.DATACENTERID, zoneId);
return searchForStoragePools(c);
}
@Override
public List<? extends StoragePoolVO> searchForStoragePools(Criteria c) {
Filter searchFilter = new Filter(StoragePoolVO.class, c.getOrderBy(), c.getAscending(), c.getOffset(), c.getLimit());
SearchCriteria<StoragePoolVO> sc = _poolDao.createSearchCriteria();
Object id = c.getCriteria(Criteria.ID);
Object name = c.getCriteria(Criteria.NAME);
Object host = c.getCriteria(Criteria.HOST);
Object path = c.getCriteria(Criteria.PATH);
Object zone = c.getCriteria(Criteria.DATACENTERID);
Object pod = c.getCriteria(Criteria.PODID);
Object cluster = c.getCriteria(Criteria.CLUSTERID);
Object address = c.getCriteria(Criteria.ADDRESS);
Object keyword = c.getCriteria(Criteria.KEYWORD);
if (keyword != null) {
SearchCriteria<StoragePoolVO> ssc = _poolDao.createSearchCriteria();
ssc.addOr("name", SearchCriteria.Op.LIKE, "%" + keyword + "%");
ssc.addOr("poolType", SearchCriteria.Op.LIKE, "%" + keyword + "%");
sc.addAnd("name", SearchCriteria.Op.SC, ssc);
}
if (id != null) {
sc.addAnd("id", SearchCriteria.Op.EQ, id);
}
if (name != null) {
sc.addAnd("name", SearchCriteria.Op.LIKE, "%" + name + "%");
}
if (host != null) {
sc.addAnd("host", SearchCriteria.Op.EQ, host);
}
if (path != null) {
sc.addAnd("path", SearchCriteria.Op.EQ, path);
}
if (zone != null) {
sc.addAnd("dataCenterId", SearchCriteria.Op.EQ, zone);
}
if (pod != null) {
sc.addAnd("podId", SearchCriteria.Op.EQ, pod);
}
if (address != null) {
sc.addAnd("hostAddress", SearchCriteria.Op.EQ, address);
}
if (cluster != null) {
sc.addAnd("clusterId", SearchCriteria.Op.EQ, cluster);
}
return _poolDao.search(sc, searchFilter);
}
@Override
public List<AsyncJobVO> searchForAsyncJobs(ListAsyncJobsCmd cmd) {
Account caller = UserContext.current().getCaller();
List<Long> permittedAccounts = new ArrayList<Long>();
Ternary<Long, Boolean, ListProjectResourcesCriteria> domainIdRecursiveListProject = new Ternary<Long, Boolean, ListProjectResourcesCriteria>(cmd.getDomainId(), cmd.isRecursive(), null);
_accountMgr.buildACLSearchParameters(caller, null, cmd.getAccountName(), null, permittedAccounts, domainIdRecursiveListProject, cmd.listAll(), false);
Long domainId = domainIdRecursiveListProject.first();
Boolean isRecursive = domainIdRecursiveListProject.second();
ListProjectResourcesCriteria listProjectResourcesCriteria = domainIdRecursiveListProject.third();
Filter searchFilter = new Filter(AsyncJobVO.class, "id", true, cmd.getStartIndex(), cmd.getPageSizeVal());
SearchBuilder<AsyncJobVO> sb = _jobDao.createSearchBuilder();
sb.and("accountIdIN", sb.entity().getAccountId(), SearchCriteria.Op.IN);
SearchBuilder<AccountVO> accountSearch = null;
boolean accountJoinIsDone = false;
if (permittedAccounts.isEmpty() && domainId != null) {
accountSearch = _accountDao.createSearchBuilder();
// if accountId isn't specified, we can do a domain match for the admin case if isRecursive is true
SearchBuilder<DomainVO> domainSearch = _domainDao.createSearchBuilder();
domainSearch.and("domainId", domainSearch.entity().getId(), SearchCriteria.Op.EQ);
domainSearch.and("path", domainSearch.entity().getPath(), SearchCriteria.Op.LIKE);
sb.join("accountSearch", accountSearch, sb.entity().getAccountId(), accountSearch.entity().getId(), JoinBuilder.JoinType.INNER);
accountJoinIsDone = true;
accountSearch.join("domainSearch", domainSearch, accountSearch.entity().getDomainId(), domainSearch.entity().getId(), JoinBuilder.JoinType.INNER);
}
if (listProjectResourcesCriteria != null) {
if (accountSearch == null) {
accountSearch = _accountDao.createSearchBuilder();
}
if (listProjectResourcesCriteria == Project.ListProjectResourcesCriteria.ListProjectResourcesOnly) {
accountSearch.and("type", accountSearch.entity().getType(), SearchCriteria.Op.EQ);
} else if (listProjectResourcesCriteria == Project.ListProjectResourcesCriteria.SkipProjectResources) {
accountSearch.and("type", accountSearch.entity().getType(), SearchCriteria.Op.NEQ);
}
if (!accountJoinIsDone) {
sb.join("accountSearch", accountSearch, sb.entity().getAccountId(), accountSearch.entity().getId(), JoinBuilder.JoinType.INNER);
}
}
Object keyword = cmd.getKeyword();
Object startDate = cmd.getStartDate();
SearchCriteria<AsyncJobVO> sc = sb.create();
if (listProjectResourcesCriteria != null) {
sc.setJoinParameters("accountSearch", "type", Account.ACCOUNT_TYPE_PROJECT);
}
if (!permittedAccounts.isEmpty()) {
sc.setParameters("accountIdIN", permittedAccounts.toArray());
} else if (domainId != null) {
DomainVO domain = _domainDao.findById(domainId);
if (isRecursive) {
sc.setJoinParameters("domainSearch", "path", domain.getPath() + "%");
} else {
sc.setJoinParameters("domainSearch", "domainId", domainId);
}
}
if (keyword != null) {
sc.addAnd("cmd", SearchCriteria.Op.LIKE, "%" + keyword + "%");
}
if (startDate != null) {
sc.addAnd("created", SearchCriteria.Op.GTEQ, startDate);
}
return _jobDao.search(sc, searchFilter);
}
@ActionEvent(eventType = EventTypes.EVENT_SSVM_START, eventDescription = "starting secondary storage Vm", async = true)
public SecondaryStorageVmVO startSecondaryStorageVm(long instanceId) {
return _secStorageVmMgr.startSecStorageVm(instanceId);
}
@ActionEvent(eventType = EventTypes.EVENT_SSVM_STOP, eventDescription = "stopping secondary storage Vm", async = true)
private SecondaryStorageVmVO stopSecondaryStorageVm(VMInstanceVO systemVm, boolean isForced) throws ResourceUnavailableException, OperationTimedoutException, ConcurrentOperationException {
User caller = _userDao.findById(UserContext.current().getCallerUserId());
if (_itMgr.advanceStop(systemVm, isForced, caller, UserContext.current().getCaller())) {
return _secStorageVmDao.findById(systemVm.getId());
}
return null;
}
@ActionEvent(eventType = EventTypes.EVENT_SSVM_REBOOT, eventDescription = "rebooting secondary storage Vm", async = true)
public SecondaryStorageVmVO rebootSecondaryStorageVm(long instanceId) {
_secStorageVmMgr.rebootSecStorageVm(instanceId);
return _secStorageVmDao.findById(instanceId);
}
@ActionEvent(eventType = EventTypes.EVENT_SSVM_DESTROY, eventDescription = "destroying secondary storage Vm", async = true)
public SecondaryStorageVmVO destroySecondaryStorageVm(long instanceId) {
SecondaryStorageVmVO secStorageVm = _secStorageVmDao.findById(instanceId);
if (_secStorageVmMgr.destroySecStorageVm(instanceId)) {
return secStorageVm;
}
return null;
}
@Override
public List<? extends VMInstanceVO> searchForSystemVm(ListSystemVMsCmd cmd) {
String type = cmd.getSystemVmType();
Long zoneId = _accountMgr.checkAccessAndSpecifyAuthority(UserContext.current().getCaller(), cmd.getZoneId());
Long id = cmd.getId();
String name = cmd.getSystemVmName();
String state = cmd.getState();
String keyword = cmd.getKeyword();
Long podId = cmd.getPodId();
Long hostId = cmd.getHostId();
Filter searchFilter = new Filter(VMInstanceVO.class, "id", true, cmd.getStartIndex(), cmd.getPageSizeVal());
SearchBuilder<VMInstanceVO> sb = _vmInstanceDao.createSearchBuilder();
sb.and("id", sb.entity().getId(), SearchCriteria.Op.EQ);
sb.and("hostName", sb.entity().getHostName(), SearchCriteria.Op.LIKE);
sb.and("state", sb.entity().getState(), SearchCriteria.Op.EQ);
sb.and("dataCenterId", sb.entity().getDataCenterIdToDeployIn(), SearchCriteria.Op.EQ);
sb.and("podId", sb.entity().getPodIdToDeployIn(), SearchCriteria.Op.EQ);
sb.and("hostId", sb.entity().getHostId(), SearchCriteria.Op.EQ);
sb.and("type", sb.entity().getType(), SearchCriteria.Op.EQ);
sb.and("nulltype", sb.entity().getType(), SearchCriteria.Op.IN);
SearchCriteria<VMInstanceVO> sc = sb.create();
if (keyword != null) {
SearchCriteria<VMInstanceVO> ssc = _vmInstanceDao.createSearchCriteria();
ssc.addOr("hostName", SearchCriteria.Op.LIKE, "%" + keyword + "%");
ssc.addOr("state", SearchCriteria.Op.LIKE, "%" + keyword + "%");
sc.addAnd("hostName", SearchCriteria.Op.SC, ssc);
}
if (id != null) {
sc.setParameters("id", id);
}
if (name != null) {
sc.setParameters("hostName", name);
}
if (state != null) {
sc.setParameters("state", state);
}
if (zoneId != null) {
sc.setParameters("dataCenterId", zoneId);
}
if (podId != null) {
sc.setParameters("podId", podId);
}
if (hostId != null) {
sc.setParameters("hostId", hostId);
}
if (type != null) {
sc.setParameters("type", type);
} else {
sc.setParameters("nulltype", VirtualMachine.Type.SecondaryStorageVm, VirtualMachine.Type.ConsoleProxy);
}
return _vmInstanceDao.search(sc, searchFilter);
}
@Override
public VirtualMachine.Type findSystemVMTypeById(long instanceId) {
VMInstanceVO systemVm = _vmInstanceDao.findByIdTypes(instanceId, VirtualMachine.Type.ConsoleProxy, VirtualMachine.Type.SecondaryStorageVm);
if (systemVm == null) {
throw new InvalidParameterValueException("Unable to find a system vm: " + instanceId);
}
return systemVm.getType();
}
@Override
public VirtualMachine startSystemVM(long vmId) {
VMInstanceVO systemVm = _vmInstanceDao.findByIdTypes(vmId, VirtualMachine.Type.ConsoleProxy, VirtualMachine.Type.SecondaryStorageVm);
if (systemVm == null) {
throw new InvalidParameterValueException("unable to find a system vm with id " + vmId);
}
if (systemVm.getType() == VirtualMachine.Type.ConsoleProxy) {
return startConsoleProxy(vmId);
} else if (systemVm.getType() == VirtualMachine.Type.SecondaryStorageVm) {
return startSecondaryStorageVm(vmId);
} else {
throw new InvalidParameterValueException("Unable to find a system vm: " + vmId);
}
}
@Override
public VMInstanceVO stopSystemVM(StopSystemVmCmd cmd) throws ResourceUnavailableException, ConcurrentOperationException {
Long id = cmd.getId();
// verify parameters
VMInstanceVO systemVm = _vmInstanceDao.findByIdTypes(id, VirtualMachine.Type.ConsoleProxy, VirtualMachine.Type.SecondaryStorageVm);
if (systemVm == null) {
throw new InvalidParameterValueException("unable to find a system vm with id " + id);
}
try {
if (systemVm.getType() == VirtualMachine.Type.ConsoleProxy) {
return stopConsoleProxy(systemVm, cmd.isForced());
} else if (systemVm.getType() == VirtualMachine.Type.SecondaryStorageVm) {
return stopSecondaryStorageVm(systemVm, cmd.isForced());
}
return null;
} catch (OperationTimedoutException e) {
throw new CloudRuntimeException("Unable to stop " + systemVm, e);
}
}
@Override
public VMInstanceVO rebootSystemVM(RebootSystemVmCmd cmd) {
VMInstanceVO systemVm = _vmInstanceDao.findByIdTypes(cmd.getId(), VirtualMachine.Type.ConsoleProxy, VirtualMachine.Type.SecondaryStorageVm);
if (systemVm == null) {
throw new InvalidParameterValueException("unable to find a system vm with id " + cmd.getId());
}
if (systemVm.getType().equals(VirtualMachine.Type.ConsoleProxy)) {
return rebootConsoleProxy(cmd.getId());
} else {
return rebootSecondaryStorageVm(cmd.getId());
}
}
@Override
public VMInstanceVO destroySystemVM(DestroySystemVmCmd cmd) {
VMInstanceVO systemVm = _vmInstanceDao.findByIdTypes(cmd.getId(), VirtualMachine.Type.ConsoleProxy, VirtualMachine.Type.SecondaryStorageVm);
if (systemVm == null) {
throw new InvalidParameterValueException("unable to find a system vm with id " + cmd.getId());
}
if (systemVm.getType().equals(VirtualMachine.Type.ConsoleProxy)) {
return destroyConsoleProxy(cmd.getId());
} else {
return destroySecondaryStorageVm(cmd.getId());
}
}
private String signRequest(String request, String key) {
try {
s_logger.info("Request: " + request);
s_logger.info("Key: " + key);
if (key != null && request != null) {
Mac mac = Mac.getInstance("HmacSHA1");
SecretKeySpec keySpec = new SecretKeySpec(key.getBytes(), "HmacSHA1");
mac.init(keySpec);
mac.update(request.getBytes());
byte[] encryptedBytes = mac.doFinal();
return new String((Base64.encodeBase64(encryptedBytes)));
}
} catch (Exception ex) {
s_logger.error("unable to sign request", ex);
}
return null;
}
@Override
public ArrayList<String> getCloudIdentifierResponse(long userId) {
Account caller = UserContext.current().getCaller();
// verify that user exists
User user = _accountMgr.getUserIncludingRemoved(userId);
if ((user == null) || (user.getRemoved() != null)) {
throw new InvalidParameterValueException("Unable to find active user by id " + userId);
}
// check permissions
_accountMgr.checkAccess(caller, null, true, _accountMgr.getAccount(user.getAccountId()));
String cloudIdentifier = _configDao.getValue("cloud.identifier");
if (cloudIdentifier == null) {
cloudIdentifier = "";
}
String signature = "";
try {
// get the user obj to get his secret key
user = _accountMgr.getActiveUser(userId);
String secretKey = user.getSecretKey();
String input = cloudIdentifier;
signature = signRequest(input, secretKey);
} catch (Exception e) {
s_logger.warn("Exception whilst creating a signature:" + e);
}
ArrayList<String> cloudParams = new ArrayList<String>();
cloudParams.add(cloudIdentifier);
cloudParams.add(signature);
return cloudParams;
}
@Override
public Map<String, Object> listCapabilities(ListCapabilitiesCmd cmd) {
Map<String, Object> capabilities = new HashMap<String, Object>();
boolean securityGroupsEnabled = false;
boolean elasticLoadBalancerEnabled = false;
String supportELB = "false";
List<NetworkVO> networks = _networkDao.listSecurityGroupEnabledNetworks();
if (networks != null && !networks.isEmpty()) {
securityGroupsEnabled = true;
String elbEnabled = _configDao.getValue(Config.ElasticLoadBalancerEnabled.key());
elasticLoadBalancerEnabled = elbEnabled == null ? false : Boolean.parseBoolean(elbEnabled);
if (elasticLoadBalancerEnabled) {
String networkType = _configDao.getValue(Config.ElasticLoadBalancerNetwork.key());
if (networkType != null)
supportELB = networkType;
}
}
String userPublicTemplateEnabled = _configs.get(Config.AllowPublicUserTemplates.key());
capabilities.put("securityGroupsEnabled", securityGroupsEnabled);
capabilities.put("userPublicTemplateEnabled", (userPublicTemplateEnabled == null || userPublicTemplateEnabled.equals("false") ? false : true));
capabilities.put("cloudStackVersion", getVersion());
capabilities.put("supportELB", supportELB);
capabilities.put("projectInviteRequired", _projectMgr.projectInviteRequired());
capabilities.put("allowusercreateprojects", _projectMgr.allowUserToCreateProject());
return capabilities;
}
@Override
public GuestOSVO getGuestOs(Long guestOsId) {
return _guestOSDao.findById(guestOsId);
}
@Override
@ActionEvent(eventType = EventTypes.EVENT_VOLUME_EXTRACT, eventDescription = "extracting volume", async = true)
public Long extractVolume(ExtractVolumeCmd cmd) throws URISyntaxException {
Long volumeId = cmd.getId();
String url = cmd.getUrl();
Long zoneId = cmd.getZoneId();
AsyncJobVO job = null; // FIXME: cmd.getJob();
String mode = cmd.getMode();
Account account = UserContext.current().getCaller();
if (!_accountMgr.isRootAdmin(account.getType()) && ApiDBUtils.isExtractionDisabled()) {
throw new PermissionDeniedException("Extraction has been disabled by admin");
}
VolumeVO volume = _volumeDao.findById(volumeId);
if (volume == null) {
throw new InvalidParameterValueException("Unable to find volume with id " + volumeId);
}
// perform permission check
_accountMgr.checkAccess(account, null, true, volume);
if (_dcDao.findById(zoneId) == null) {
throw new InvalidParameterValueException("Please specify a valid zone.");
}
if (volume.getPoolId() == null) {
throw new InvalidParameterValueException("The volume doesnt belong to a storage pool so cant extract it");
}
// Extract activity only for detached volumes or for volumes whose instance is stopped
if (volume.getInstanceId() != null && ApiDBUtils.findVMInstanceById(volume.getInstanceId()).getState() != State.Stopped) {
s_logger.debug("Invalid state of the volume with ID: " + volumeId + ". It should be either detached or the VM should be in stopped state.");
throw new PermissionDeniedException("Invalid state of the volume with ID: " + volumeId + ". It should be either detached or the VM should be in stopped state.");
}
if (volume.getVolumeType() != Volume.Type.DATADISK) { // Datadisk dont have any template dependence.
VMTemplateVO template = ApiDBUtils.findTemplateById(volume.getTemplateId());
if (template != null) { // For ISO based volumes template = null and we allow extraction of all ISO based
// volumes
boolean isExtractable = template.isExtractable() && template.getTemplateType() != Storage.TemplateType.SYSTEM;
if (!isExtractable && account != null && account.getType() != Account.ACCOUNT_TYPE_ADMIN) { // Global
// admins are always allowed to extract
throw new PermissionDeniedException("The volume:" + volumeId + " is not allowed to be extracted");
}
}
}
Upload.Mode extractMode;
if (mode == null || (!mode.equals(Upload.Mode.FTP_UPLOAD.toString()) && !mode.equals(Upload.Mode.HTTP_DOWNLOAD.toString()))) {
throw new InvalidParameterValueException("Please specify a valid extract Mode ");
} else {
extractMode = mode.equals(Upload.Mode.FTP_UPLOAD.toString()) ? Upload.Mode.FTP_UPLOAD : Upload.Mode.HTTP_DOWNLOAD;
}
// If mode is upload perform extra checks on url and also see if there is an ongoing upload on the same.
if (extractMode == Upload.Mode.FTP_UPLOAD) {
URI uri = new URI(url);
if ((uri.getScheme() == null) || (!uri.getScheme().equalsIgnoreCase("ftp"))) {
throw new IllegalArgumentException("Unsupported scheme for url: " + url);
}
String host = uri.getHost();
try {
InetAddress hostAddr = InetAddress.getByName(host);
if (hostAddr.isAnyLocalAddress() || hostAddr.isLinkLocalAddress() || hostAddr.isLoopbackAddress() || hostAddr.isMulticastAddress()) {
throw new IllegalArgumentException("Illegal host specified in url");
}
if (hostAddr instanceof Inet6Address) {
throw new IllegalArgumentException("IPV6 addresses not supported (" + hostAddr.getHostAddress() + ")");
}
} catch (UnknownHostException uhe) {
throw new IllegalArgumentException("Unable to resolve " + host);
}
if (_uploadMonitor.isTypeUploadInProgress(volumeId, Upload.Type.VOLUME)) {
throw new IllegalArgumentException(volume.getName() + " upload is in progress. Please wait for some time to schedule another upload for the same");
}
}
long accountId = volume.getAccountId();
String secondaryStorageURL = _storageMgr.getSecondaryStorageURL(zoneId);
StoragePoolVO srcPool = _poolDao.findById(volume.getPoolId());
List<HostVO> storageServers = _resourceMgr.listAllHostsInOneZoneByType(Host.Type.SecondaryStorage, zoneId);
HostVO sserver = storageServers.get(0);
List<UploadVO> extractURLList = _uploadDao.listByTypeUploadStatus(volumeId, Upload.Type.VOLUME, UploadVO.Status.DOWNLOAD_URL_CREATED);
if (extractMode == Upload.Mode.HTTP_DOWNLOAD && extractURLList.size() > 0) {
return extractURLList.get(0).getId(); // If download url already exists then return
} else {
UploadVO uploadJob = _uploadMonitor.createNewUploadEntry(sserver.getId(), volumeId, UploadVO.Status.COPY_IN_PROGRESS, Upload.Type.VOLUME, url, extractMode);
s_logger.debug("Extract Mode - " + uploadJob.getMode());
uploadJob = _uploadDao.createForUpdate(uploadJob.getId());
// Update the async Job
ExtractResponse resultObj = new ExtractResponse(volumeId, volume.getName(), accountId, UploadVO.Status.COPY_IN_PROGRESS.toString(), uploadJob.getId());
resultObj.setResponseName(cmd.getCommandName());
AsyncJobExecutor asyncExecutor = BaseAsyncJobExecutor.getCurrentExecutor();
if (asyncExecutor != null) {
job = asyncExecutor.getJob();
_asyncMgr.updateAsyncJobAttachment(job.getId(), Upload.Type.VOLUME.toString(), volumeId);
_asyncMgr.updateAsyncJobStatus(job.getId(), AsyncJobResult.STATUS_IN_PROGRESS, resultObj);
}
String value = _configs.get(Config.CopyVolumeWait.toString());
int copyvolumewait = NumbersUtil.parseInt(value, Integer.parseInt(Config.CopyVolumeWait.getDefaultValue()));
// Copy the volume from the source storage pool to secondary storage
CopyVolumeCommand cvCmd = new CopyVolumeCommand(volume.getId(), volume.getPath(), srcPool, secondaryStorageURL, true, copyvolumewait);
CopyVolumeAnswer cvAnswer = null;
try {
cvAnswer = (CopyVolumeAnswer) _storageMgr.sendToPool(srcPool, cvCmd);
} catch (StorageUnavailableException e) {
s_logger.debug("Storage unavailable");
}
// Check if you got a valid answer.
if (cvAnswer == null || !cvAnswer.getResult()) {
String errorString = "Failed to copy the volume from the source primary storage pool to secondary storage.";
// Update the async job.
resultObj.setResultString(errorString);
resultObj.setUploadStatus(UploadVO.Status.COPY_ERROR.toString());
if (asyncExecutor != null) {
_asyncMgr.completeAsyncJob(job.getId(), AsyncJobResult.STATUS_FAILED, 0, resultObj);
}
// Update the DB that volume couldn't be copied
uploadJob.setUploadState(UploadVO.Status.COPY_ERROR);
uploadJob.setErrorString(errorString);
uploadJob.setLastUpdated(new Date());
_uploadDao.update(uploadJob.getId(), uploadJob);
throw new CloudRuntimeException(errorString);
}
String volumeLocalPath = "volumes/" + volume.getId() + "/" + cvAnswer.getVolumePath() + "." + getFormatForPool(srcPool);
// Update the DB that volume is copied and volumePath
uploadJob.setUploadState(UploadVO.Status.COPY_COMPLETE);
uploadJob.setLastUpdated(new Date());
uploadJob.setInstallPath(volumeLocalPath);
_uploadDao.update(uploadJob.getId(), uploadJob);
if (extractMode == Mode.FTP_UPLOAD) { // Now that the volume is copied perform the actual uploading
_uploadMonitor.extractVolume(uploadJob, sserver, volume, url, zoneId, volumeLocalPath, cmd.getStartEventId(), job.getId(), _asyncMgr);
return uploadJob.getId();
} else { // Volume is copied now make it visible under apache and create a URL.
_uploadMonitor.createVolumeDownloadURL(volumeId, volumeLocalPath, Upload.Type.VOLUME, zoneId, uploadJob.getId());
return uploadJob.getId();
}
}
}
private String getFormatForPool(StoragePoolVO pool) {
ClusterVO cluster = ApiDBUtils.findClusterById(pool.getClusterId());
if (cluster.getHypervisorType() == HypervisorType.XenServer) {
return "vhd";
} else if (cluster.getHypervisorType() == HypervisorType.KVM) {
return "qcow2";
} else if (cluster.getHypervisorType() == HypervisorType.VMware) {
return "ova";
} else if (cluster.getHypervisorType() == HypervisorType.Ovm) {
return "raw";
} else {
return null;
}
}
@Override
public InstanceGroupVO updateVmGroup(UpdateVMGroupCmd cmd) {
Account caller = UserContext.current().getCaller();
Long groupId = cmd.getId();
String groupName = cmd.getGroupName();
// Verify input parameters
InstanceGroupVO group = _vmGroupDao.findById(groupId.longValue());
if (group == null) {
throw new InvalidParameterValueException("unable to find a vm group with id " + groupId);
}
_accountMgr.checkAccess(caller, null, true, group);
// Check if name is already in use by this account (exclude this group)
boolean isNameInUse = _vmGroupDao.isNameInUse(group.getAccountId(), groupName);
if (isNameInUse && !group.getName().equals(groupName)) {
throw new InvalidParameterValueException("Unable to update vm group, a group with name " + groupName + " already exists for account");
}
if (groupName != null) {
_vmGroupDao.updateVmGroup(groupId, groupName);
}
return _vmGroupDao.findById(groupId);
}
@Override
public List<InstanceGroupVO> searchForVmGroups(ListVMGroupsCmd cmd) {
Long id = cmd.getId();
String name = cmd.getGroupName();
String keyword = cmd.getKeyword();
Account caller = UserContext.current().getCaller();
List<Long> permittedAccounts = new ArrayList<Long>();
Ternary<Long, Boolean, ListProjectResourcesCriteria> domainIdRecursiveListProject = new Ternary<Long, Boolean, ListProjectResourcesCriteria>(cmd.getDomainId(), cmd.isRecursive(), null);
_accountMgr.buildACLSearchParameters(caller, id, cmd.getAccountName(), cmd.getProjectId(), permittedAccounts, domainIdRecursiveListProject, cmd.listAll(), false);
Long domainId = domainIdRecursiveListProject.first();
Boolean isRecursive = domainIdRecursiveListProject.second();
ListProjectResourcesCriteria listProjectResourcesCriteria = domainIdRecursiveListProject.third();
Filter searchFilter = new Filter(InstanceGroupVO.class, "id", true, cmd.getStartIndex(), cmd.getPageSizeVal());
SearchBuilder<InstanceGroupVO> sb = _vmGroupDao.createSearchBuilder();
sb.and("accountIdIN", sb.entity().getAccountId(), SearchCriteria.Op.IN);
sb.and("domainId", sb.entity().getDomainId(), SearchCriteria.Op.EQ);
if (((permittedAccounts.isEmpty()) && (domainId != null) && isRecursive)) {
// if accountId isn't specified, we can do a domain match for the admin case if isRecursive is true
SearchBuilder<DomainVO> domainSearch = _domainDao.createSearchBuilder();
domainSearch.and("path", domainSearch.entity().getPath(), SearchCriteria.Op.LIKE);
sb.join("domainSearch", domainSearch, sb.entity().getDomainId(), domainSearch.entity().getId(), JoinBuilder.JoinType.INNER);
}
if (listProjectResourcesCriteria != null) {
if (listProjectResourcesCriteria == Project.ListProjectResourcesCriteria.ListProjectResourcesOnly) {
sb.and("accountType", sb.entity().getAccountType(), SearchCriteria.Op.EQ);
} else if (listProjectResourcesCriteria == Project.ListProjectResourcesCriteria.SkipProjectResources) {
sb.and("accountType", sb.entity().getAccountType(), SearchCriteria.Op.NEQ);
}
}
sb.and("id", sb.entity().getId(), SearchCriteria.Op.EQ);
sb.and("name", sb.entity().getName(), SearchCriteria.Op.LIKE);
SearchCriteria<InstanceGroupVO> sc = sb.create();
if (listProjectResourcesCriteria != null) {
sc.setParameters("accountType", Account.ACCOUNT_TYPE_PROJECT);
}
if (!permittedAccounts.isEmpty()) {
sc.setParameters("accountIdIN", permittedAccounts.toArray());
} else if (domainId != null) {
DomainVO domain = _domainDao.findById(domainId);
if (isRecursive) {
sc.setJoinParameters("domainSearch", "path", domain.getPath() + "%");
} else {
sc.setParameters("domainId", domainId);
}
}
if (keyword != null) {
SearchCriteria<InstanceGroupVO> ssc = _vmGroupDao.createSearchCriteria();
ssc.addOr("name", SearchCriteria.Op.LIKE, "%" + keyword + "%");
}
if (id != null) {
sc.setParameters("id", id);
}
if (name != null) {
sc.setParameters("name", "%" + name + "%");
}
return _vmGroupDao.search(sc, searchFilter);
}
@Override
public String getVersion() {
final Class<?> c = ManagementServer.class;
String fullVersion = c.getPackage().getImplementationVersion();
if (fullVersion.length() > 0) {
return fullVersion;
}
return "unknown";
}
@Override
public Long saveStartedEvent(Long userId, Long accountId, String type, String description, long startEventId) {
return EventUtils.saveStartedEvent(userId, accountId, type, description, startEventId);
}
@Override
public Long saveCompletedEvent(Long userId, Long accountId, String level, String type, String description, long startEventId) {
return EventUtils.saveEvent(userId, accountId, level, type, description, startEventId);
}
@Override
@DB
public String uploadCertificate(UploadCustomCertificateCmd cmd) {
if (cmd.getPrivateKey() != null && cmd.getAlias() != null) {
throw new InvalidParameterValueException("Can't change the alias for private key certification");
}
if (cmd.getPrivateKey() == null) {
if (cmd.getAlias() == null) {
throw new InvalidParameterValueException("alias can't be empty, if it's a certification chain");
}
if (cmd.getCertIndex() == null) {
throw new InvalidParameterValueException("index can't be empty, if it's a certifciation chain");
}
}
if (cmd.getPrivateKey() != null && !_ksMgr.validateCertificate(cmd.getCertificate(), cmd.getPrivateKey(), cmd.getDomainSuffix())) {
throw new InvalidParameterValueException("Failed to pass certificate validation check");
}
if (cmd.getPrivateKey() != null) {
_ksMgr.saveCertificate(ConsoleProxyManager.CERTIFICATE_NAME, cmd.getCertificate(), cmd.getPrivateKey(), cmd.getDomainSuffix());
} else {
_ksMgr.saveCertificate(cmd.getAlias(), cmd.getCertificate(), cmd.getCertIndex(), cmd.getDomainSuffix());
}
_consoleProxyMgr.setManagementState(ConsoleProxyManagementState.ResetSuspending);
return "Certificate has been updated, we will stop all running console proxy VMs to propagate the new certificate, please give a few minutes for console access service to be up again";
}
@Override
public List<String> getHypervisors(Long zoneId) {
List<String> result = new ArrayList<String>();
String hypers = _configDao.getValue(Config.HypervisorList.key());
String[] hypervisors = hypers.split(",");
if (zoneId != null) {
if (zoneId.longValue() == -1L) {
List<DataCenterVO> zones = _dcDao.listAll();
for (String hypervisor : hypervisors) {
int hyperCount = 0;
for (DataCenterVO zone : zones) {
List<ClusterVO> clusters = _clusterDao.listByDcHyType(zone.getId(), hypervisor);
if (!clusters.isEmpty()) {
hyperCount++;
}
}
if (hyperCount == zones.size()) {
result.add(hypervisor);
}
}
} else {
List<ClusterVO> clustersForZone = _clusterDao.listByZoneId(zoneId);
for (ClusterVO cluster : clustersForZone) {
result.add(cluster.getHypervisorType().toString());
}
}
} else {
return Arrays.asList(hypervisors);
}
return result;
}
@Override
public String getHashKey() {
// although we may have race conditioning here, database transaction serialization should
// give us the same key
if (_hashKey == null) {
_hashKey = _configDao.getValueAndInitIfNotExist(Config.HashKey.key(), Config.HashKey.getCategory(), UUID.randomUUID().toString());
}
return _hashKey;
}
@Override
public SSHKeyPair createSSHKeyPair(CreateSSHKeyPairCmd cmd) {
Account caller = UserContext.current().getCaller();
String accountName = cmd.getAccountName();
Long domainId = cmd.getDomainId();
Long projectId = cmd.getProjectId();
Account owner = _accountMgr.finalizeOwner(caller, accountName, domainId, projectId);
SSHKeyPairVO s = _sshKeyPairDao.findByName(owner.getAccountId(), owner.getDomainId(), cmd.getName());
if (s != null) {
throw new InvalidParameterValueException("A key pair with name '" + cmd.getName() + "' already exists.");
}
SSHKeysHelper keys = new SSHKeysHelper();
String name = cmd.getName();
String publicKey = keys.getPublicKey();
String fingerprint = keys.getPublicKeyFingerPrint();
String privateKey = keys.getPrivateKey();
return createAndSaveSSHKeyPair(name, fingerprint, publicKey, privateKey, owner);
}
@Override
public boolean deleteSSHKeyPair(DeleteSSHKeyPairCmd cmd) {
Account caller = UserContext.current().getCaller();
String accountName = cmd.getAccountName();
Long domainId = cmd.getDomainId();
Long projectId = cmd.getProjectId();
Account owner = _accountMgr.finalizeOwner(caller, accountName, domainId, projectId);
SSHKeyPairVO s = _sshKeyPairDao.findByName(owner.getAccountId(), owner.getDomainId(), cmd.getName());
if (s == null) {
throw new InvalidParameterValueException("A key pair with name '" + cmd.getName() + "' does not exist for account " + owner.getAccountName() + " in domain id=" + owner.getDomainId());
}
return _sshKeyPairDao.deleteByName(caller.getAccountId(), caller.getDomainId(), cmd.getName());
}
@Override
public List<? extends SSHKeyPair> listSSHKeyPairs(ListSSHKeyPairsCmd cmd) {
String name = cmd.getName();
String fingerPrint = cmd.getFingerprint();
Account caller = UserContext.current().getCaller();
List<Long> permittedAccounts = new ArrayList<Long>();
Ternary<Long, Boolean, ListProjectResourcesCriteria> domainIdRecursiveListProject = new Ternary<Long, Boolean, ListProjectResourcesCriteria>(cmd.getDomainId(), cmd.isRecursive(), null);
_accountMgr.buildACLSearchParameters(caller, null, cmd.getAccountName(), cmd.getProjectId(), permittedAccounts, domainIdRecursiveListProject, cmd.listAll(), false);
Long domainId = domainIdRecursiveListProject.first();
Boolean isRecursive = domainIdRecursiveListProject.second();
ListProjectResourcesCriteria listProjectResourcesCriteria = domainIdRecursiveListProject.third();
SearchBuilder<SSHKeyPairVO> sb = _sshKeyPairDao.createSearchBuilder();
_accountMgr.buildACLSearchBuilder(sb, domainId, isRecursive, permittedAccounts, listProjectResourcesCriteria);
Filter searchFilter = new Filter(SSHKeyPairVO.class, "id", false, cmd.getStartIndex(), cmd.getPageSizeVal());
SearchCriteria<SSHKeyPairVO> sc = sb.create();
_accountMgr.buildACLSearchCriteria(sc, domainId, isRecursive, permittedAccounts, listProjectResourcesCriteria);
if (name != null) {
sc.addAnd("name", SearchCriteria.Op.EQ, name);
}
if (fingerPrint != null) {
sc.addAnd("fingerprint", SearchCriteria.Op.EQ, fingerPrint);
}
return _sshKeyPairDao.search(sc, searchFilter);
}
@Override
public SSHKeyPair registerSSHKeyPair(RegisterSSHKeyPairCmd cmd) {
Account caller = UserContext.current().getCaller();
Account owner = _accountMgr.finalizeOwner(caller, cmd.getAccountName(), cmd.getDomainId(), cmd.getProjectId());
SSHKeyPairVO s = _sshKeyPairDao.findByName(owner.getAccountId(), owner.getDomainId(), cmd.getName());
if (s != null) {
throw new InvalidParameterValueException("A key pair with name '" + cmd.getName() + "' already exists.");
}
String name = cmd.getName();
String publicKey = SSHKeysHelper.getPublicKeyFromKeyMaterial(cmd.getPublicKey());
if (publicKey == null) {
throw new InvalidParameterValueException("Public key is invalid");
}
String fingerprint = SSHKeysHelper.getPublicKeyFingerprint(publicKey);
return createAndSaveSSHKeyPair(name, fingerprint, publicKey, null, owner);
}
private SSHKeyPair createAndSaveSSHKeyPair(String name, String fingerprint, String publicKey, String privateKey, Account owner) {
SSHKeyPairVO newPair = new SSHKeyPairVO();
newPair.setAccountId(owner.getAccountId());
newPair.setDomainId(owner.getDomainId());
newPair.setName(name);
newPair.setFingerprint(fingerprint);
newPair.setPublicKey(publicKey);
newPair.setPrivateKey(privateKey); // transient; not saved.
_sshKeyPairDao.persist(newPair);
return newPair;
}
@Override
public String getVMPassword(GetVMPasswordCmd cmd) {
Account caller = UserContext.current().getCaller();
UserVmVO vm = _userVmDao.findById(cmd.getId());
if (vm == null) {
throw new InvalidParameterValueException("No VM with id '" + cmd.getId() + "' found.");
}
// make permission check
_accountMgr.checkAccess(caller, null, true, vm);
_userVmDao.loadDetails(vm);
String password = vm.getDetail("Encrypted.Password");
if (password == null || password.equals("")) {
throw new InvalidParameterValueException("No password for VM with id '" + cmd.getId() + "' found.");
}
return password;
}
@Override
@DB
public boolean updateHostPassword(UpdateHostPasswordCmd cmd) {
if (cmd.getClusterId() == null && cmd.getHostId() == null) {
throw new InvalidParameterValueException("You should provide one of cluster id or a host id.");
} else if (cmd.getClusterId() == null) {
HostVO host = _hostDao.findById(cmd.getHostId());
if (host != null && host.getHypervisorType() == HypervisorType.XenServer) {
throw new InvalidParameterValueException("You should provide cluster id for Xenserver cluster.");
} else {
throw new InvalidParameterValueException("This operation is not supported for this hypervisor type");
}
} else {
ClusterVO cluster = ApiDBUtils.findClusterById(cmd.getClusterId());
if (cluster == null || cluster.getHypervisorType() != HypervisorType.XenServer) {
throw new InvalidParameterValueException("This operation is not supported for this hypervisor type");
}
// get all the hosts in this cluster
List<HostVO> hosts = _resourceMgr.listAllHostsInCluster(cmd.getClusterId());
Transaction txn = Transaction.currentTxn();
try {
txn.start();
for (HostVO h : hosts) {
if (s_logger.isDebugEnabled()) {
s_logger.debug("Changing password for host name = " + h.getName());
}
// update password for this host
DetailVO nv = _detailsDao.findDetail(h.getId(), ApiConstants.USERNAME);
if (nv.getValue().equals(cmd.getUsername())) {
DetailVO nvp = _detailsDao.findDetail(h.getId(), ApiConstants.PASSWORD);
nvp.setValue(DBEncryptionUtil.encrypt(cmd.getPassword()));
_detailsDao.persist(nvp);
} else {
// if one host in the cluster has diff username then rollback to maintain consistency
txn.rollback();
throw new InvalidParameterValueException("The username is not same for all hosts, please modify passwords for individual hosts.");
}
}
txn.commit();
// if hypervisor is xenserver then we update it in CitrixResourceBase
} catch (Exception e) {
txn.rollback();
throw new CloudRuntimeException("Failed to update password " + e.getMessage());
}
}
return true;
}
@Override
public String[] listEventTypes() {
Object eventObj = new EventTypes();
Class<EventTypes> c = EventTypes.class;
Field[] fields = c.getDeclaredFields();
String[] eventTypes = new String[fields.length];
try {
int i = 0;
for (Field field : fields) {
eventTypes[i++] = field.get(eventObj).toString();
}
return eventTypes;
} catch (IllegalArgumentException e) {
s_logger.error("Error while listing Event Types", e);
} catch (IllegalAccessException e) {
s_logger.error("Error while listing Event Types", e);
}
return null;
}
@Override
public List<HypervisorCapabilitiesVO> listHypervisorCapabilities(Long id, HypervisorType hypervisorType, String keyword, Long startIndex, Long pageSizeVal) {
Filter searchFilter = new Filter(HypervisorCapabilitiesVO.class, "id", true, startIndex, pageSizeVal);
SearchCriteria<HypervisorCapabilitiesVO> sc = _hypervisorCapabilitiesDao.createSearchCriteria();
if (id != null) {
sc.addAnd("id", SearchCriteria.Op.EQ, id);
}
if (hypervisorType != null) {
sc.addAnd("hypervisorType", SearchCriteria.Op.EQ, hypervisorType);
}
if (keyword != null) {
SearchCriteria<HypervisorCapabilitiesVO> ssc = _hypervisorCapabilitiesDao.createSearchCriteria();
ssc.addOr("hypervisorType", SearchCriteria.Op.LIKE, "%" + keyword + "%");
sc.addAnd("hypervisorType", SearchCriteria.Op.SC, ssc);
}
return _hypervisorCapabilitiesDao.search(sc, searchFilter);
}
@Override
public HypervisorCapabilities updateHypervisorCapabilities(Long id, Long maxGuestsLimit, Boolean securityGroupEnabled) {
HypervisorCapabilitiesVO hpvCapabilities = _hypervisorCapabilitiesDao.findById(id, true);
if (hpvCapabilities == null) {
throw new InvalidParameterValueException("unable to find the hypervisor capabilities " + id);
}
boolean updateNeeded = (maxGuestsLimit != null || securityGroupEnabled != null);
if (!updateNeeded) {
return hpvCapabilities;
}
hpvCapabilities = _hypervisorCapabilitiesDao.createForUpdate(id);
if (maxGuestsLimit != null) {
hpvCapabilities.setMaxGuestsLimit(maxGuestsLimit);
}
if (securityGroupEnabled != null) {
hpvCapabilities.setSecurityGroupEnabled(securityGroupEnabled);
}
if (_hypervisorCapabilitiesDao.update(id, hpvCapabilities)) {
hpvCapabilities = _hypervisorCapabilitiesDao.findById(id);
UserContext.current().setEventDetails("Hypervisor Capabilities id=" + hpvCapabilities.getId());
return hpvCapabilities;
} else {
return null;
}
}
}
| Bug 13127: API error text refer to database ids instead of uuids
Description:
Replacing placement of db ids in exception messages to uuids
in the file ManagementServerImpl.java.
Since there are a large number of files that throw exceptions
with db ids in them and they need to be changed, we will make
the changes in multiple commits.
| server/src/com/cloud/server/ManagementServerImpl.java | Bug 13127: API error text refer to database ids instead of uuids | <ide><path>erver/src/com/cloud/server/ManagementServerImpl.java
<ide> } else if (vmId != null) {
<ide> UserVmVO vmInstance = _userVmDao.findById(vmId);
<ide> if ((vmInstance == null) || (vmInstance.getRemoved() != null)) {
<del> throw new InvalidParameterValueException("unable to find a virtual machine with id " + vmId);
<add> InvalidParameterValueException ex = new InvalidParameterValueException("unable to find a virtual machine with specified id");
<add> // Get the VO object's table name.
<add> String tablename = AnnotationHelper.getTableName(vmInstance);
<add> if (tablename != null) {
<add> ex.addProxyObject(tablename, vmId, "vmId");
<add> } else {
<add> s_logger.info("\nCould not retrieve table name (annotation) from " + tablename + " VO proxy object\n");
<add> }
<add> throw ex;
<ide> }
<ide>
<ide> _accountMgr.checkAccess(caller, null, true, vmInstance);
<ide> } else if (vmId != null) {
<ide> UserVmVO vmInstance = _userVmDao.findById(vmId);
<ide> if ((vmInstance == null) || (vmInstance.getRemoved() != null)) {
<del> throw new InvalidParameterValueException("unable to find a virtual machine with id " + vmId);
<add> InvalidParameterValueException ex = new InvalidParameterValueException("unable to find a virtual machine with id " + vmId);
<add> // Get the VO object's table name.
<add> String tablename = AnnotationHelper.getTableName(vmInstance);
<add> if (tablename != null) {
<add> ex.addProxyObject(tablename, vmId, "vmId");
<add> } else {
<add> s_logger.info("\nCould not retrieve table name (annotation) from " + tablename + " VO proxy object\n");
<add> }
<add> throw ex;
<ide> }
<ide>
<ide> _accountMgr.checkAccess(caller, null, false, vmInstance);
<ide>
<ide> VMInstanceVO vm = _vmInstanceDao.findById(vmId);
<ide> if (vm == null) {
<del> throw new InvalidParameterValueException("Unable to find the VM by id=" + vmId);
<add> InvalidParameterValueException ex = new InvalidParameterValueException("Unable to find the VM with specified id");
<add> // Get the VO object's table name.
<add> String tablename = AnnotationHelper.getTableName(vm);
<add> if (tablename != null) {
<add> ex.addProxyObject(tablename, vmId, "vmId");
<add> } else {
<add> s_logger.info("\nCould not retrieve table name (annotation) from " + tablename + " VO proxy object\n");
<add> }
<add> throw ex;
<ide> }
<ide> // business logic
<ide> if (vm.getState() != State.Running) {
<ide> if (s_logger.isDebugEnabled()) {
<del> s_logger.debug("VM is not Running, unable to migrate the vm " + vm);
<del> }
<del> throw new InvalidParameterValueException("VM is not Running, unable to migrate the vm " + vm);
<add> s_logger.debug("VM is not Running, unable to migrate the vm" + vm);
<add> }
<add> InvalidParameterValueException ex = new InvalidParameterValueException("VM is not Running, unable to migrate the vm with specified id");
<add> // Get the VO object's table name.
<add> String tablename = AnnotationHelper.getTableName(vm);
<add> if (tablename != null) {
<add> ex.addProxyObject(tablename, vmId, "vmId");
<add> } else {
<add> s_logger.info("\nCould not retrieve table name (annotation) from " + tablename + " VO proxy object\n");
<add> }
<add> throw ex;
<ide> }
<ide>
<ide> if (!vm.getHypervisorType().equals(HypervisorType.XenServer) && !vm.getHypervisorType().equals(HypervisorType.VMware) && !vm.getHypervisorType().equals(HypervisorType.KVM)
<ide> throw new InvalidParameterValueException("Unsupported operation, VM uses Local storage, cannot migrate");
<ide> }
<ide> long srcHostId = vm.getHostId();
<add> // why is this not HostVO?
<ide> Host srcHost = _hostDao.findById(srcHostId);
<ide> if (srcHost == null) {
<ide> if (s_logger.isDebugEnabled()) {
<ide> s_logger.debug("Unable to find the host with id: " + srcHostId + " of this VM:" + vm);
<ide> }
<del> throw new InvalidParameterValueException("Unable to find the host with id: " + srcHostId + " of this VM:" + vm);
<add> InvalidParameterValueException ex = new InvalidParameterValueException("Unable to find the host (with specified id) of VM with specified id");
<add> // Get the VO object's table name.
<add> String tablename = AnnotationHelper.getTableName(srcHost);
<add> if (tablename != null) {
<add> ex.addProxyObject(tablename, srcHostId, "hostId");
<add> } else {
<add> s_logger.info("\nCould not retrieve table name (annotation) from " + tablename + " VO proxy object\n");
<add> }
<add> tablename = AnnotationHelper.getTableName(vm);
<add> if (tablename != null) {
<add> ex.addProxyObject(tablename, vmId, "vmId");
<add> } else {
<add> s_logger.info("\nCould not retrieve table name (annotation) from " + tablename + " VO proxy object\n");
<add> }
<add> throw ex;
<ide> }
<ide> Long cluster = srcHost.getClusterId();
<ide> Type hostType = srcHost.getType();
<ide> }
<ide> Account account = _accountDao.findActiveAccount(accountName, domainId);
<ide> if (account == null) {
<del> throw new InvalidParameterValueException("Unable to find account " + accountName + " in domain " + domainId);
<add> InvalidParameterValueException ex = new InvalidParameterValueException("Unable to find account " + accountName + " in specified domain");
<add> // Since we don't have a DomainVO object here, we directly set tablename to "domain".
<add> String tablename = "domain";
<add> ex.addProxyObject(tablename, domainId, "domainId");
<add> throw ex;
<ide> } else {
<ide> accountId = account.getId();
<ide> }
<ide> if (projectId != null) {
<ide> Project project = _projectMgr.getProject(projectId);
<ide> if (project == null) {
<del> throw new InvalidParameterValueException("Unable to find project by id " + projectId);
<add> InvalidParameterValueException ex = new InvalidParameterValueException("Unable to find project by id " + projectId);
<add> // Get the VO object's table name.
<add> String tablename = AnnotationHelper.getTableName(project);
<add> if (tablename != null) {
<add> ex.addProxyObject(tablename, projectId, "projectId");
<add> } else {
<add> s_logger.info("\nCould not retrieve table name (annotation) from " + tablename + " VO proxy object\n");
<add> }
<add> throw ex;
<ide> }
<ide> accountId = project.getProjectAccountId();
<ide> }
<ide> }// If ISO requested then it should be ISO.
<ide> if (isIso && template.getFormat() != ImageFormat.ISO) {
<ide> s_logger.error("Template Id " + templateId + " is not an ISO");
<del> throw new InvalidParameterValueException("Template Id " + templateId + " is not an ISO");
<add> InvalidParameterValueException ex = new InvalidParameterValueException("Specified Template Id is not an ISO");
<add> // Get the VO object's table name.
<add> String tablename = AnnotationHelper.getTableName(template);
<add> if (tablename != null) {
<add> ex.addProxyObject(tablename, templateId, "templateId");
<add> } else {
<add> s_logger.info("\nCould not retrieve table name (annotation) from " + tablename + " VO proxy object\n");
<add> }
<add> throw ex;
<ide> }// If ISO not requested then it shouldn't be an ISO.
<ide> if (!isIso && template.getFormat() == ImageFormat.ISO) {
<ide> s_logger.error("Incorrect format of the template id " + templateId);
<del> throw new InvalidParameterValueException("Incorrect format " + template.getFormat() + " of the template id " + templateId);
<add> InvalidParameterValueException ex = new InvalidParameterValueException("Incorrect format " + template.getFormat() + " of the specified template id");
<add> // Get the VO object's table name.
<add> String tablename = AnnotationHelper.getTableName(template);
<add> if (tablename != null) {
<add> ex.addProxyObject(tablename, templateId, "templateId");
<add> } else {
<add> s_logger.info("\nCould not retrieve table name (annotation) from " + tablename + " VO proxy object\n");
<add> }
<add> throw ex;
<ide> }
<ide> }
<ide>
<ide> // verify that template exists
<ide> VMTemplateVO template = _templateDao.findById(id);
<ide> if (template == null || template.getRemoved() != null) {
<del> throw new InvalidParameterValueException("unable to find template/iso with id " + id);
<add> InvalidParameterValueException ex = new InvalidParameterValueException("unable to find template/iso with specified id");
<add> // Get the VO object's table name.
<add> String tablename = AnnotationHelper.getTableName(template);
<add> if (tablename != null) {
<add> ex.addProxyObject(tablename, id, "templateId");
<add> } else {
<add> s_logger.info("\nCould not retrieve table name (annotation) from " + tablename + " VO proxy object\n");
<add> }
<add> throw ex;
<ide> }
<ide>
<ide> // Don't allow to modify system template
<ide> if (id == Long.valueOf(1)) {
<del> throw new InvalidParameterValueException("Unable to update template/iso with id " + id);
<add> InvalidParameterValueException ex = new InvalidParameterValueException("Unable to update template/iso of specified id");
<add> // Get the VO object's table name.
<add> String tablename = AnnotationHelper.getTableName(template);
<add> if (tablename != null) {
<add> ex.addProxyObject(tablename, id, "templateId");
<add> } else {
<add> s_logger.info("\nCould not retrieve table name (annotation) from " + tablename + " VO proxy object\n");
<add> }
<add> throw ex;
<ide> }
<ide>
<ide> // do a permission check
<ide> // check if domain exists in the system
<ide> DomainVO domain = _domainDao.findById(domainId);
<ide> if (domain == null) {
<del> throw new InvalidParameterValueException("Unable to find domain " + domainId);
<add> InvalidParameterValueException ex = new InvalidParameterValueException("Unable to find domain with specified domain id");
<add> // Get the VO object's table name.
<add> String tablename = AnnotationHelper.getTableName(domain);
<add> if (tablename != null) {
<add> ex.addProxyObject(tablename, domainId, "domainId");
<add> } else {
<add> s_logger.info("\nCould not retrieve table name (annotation) from " + tablename + " VO proxy object\n");
<add> }
<add> throw ex;
<ide> } else if (domain.getParent() == null && domainName != null) {
<ide> // check if domain is ROOT domain - and deny to edit it with the new name
<ide> throw new InvalidParameterValueException("ROOT domain can not be edited with a new name");
<ide> public VirtualMachine.Type findSystemVMTypeById(long instanceId) {
<ide> VMInstanceVO systemVm = _vmInstanceDao.findByIdTypes(instanceId, VirtualMachine.Type.ConsoleProxy, VirtualMachine.Type.SecondaryStorageVm);
<ide> if (systemVm == null) {
<del> throw new InvalidParameterValueException("Unable to find a system vm: " + instanceId);
<add> InvalidParameterValueException ex = new InvalidParameterValueException("Unable to find a system vm of specified instanceId");
<add> // Get the VMInstanceVO object's table name.
<add> String tablename = AnnotationHelper.getTableName(systemVm);
<add> if (tablename != null) {
<add> ex.addProxyObject(tablename, instanceId, "instanceId");
<add> } else {
<add> s_logger.info("\nCould not retrieve table name (annotation) from VMInstanceVO proxy cglib object\n");
<add> }
<add> throw ex;
<ide> }
<ide> return systemVm.getType();
<ide> }
<ide>
<ide> VMInstanceVO systemVm = _vmInstanceDao.findByIdTypes(vmId, VirtualMachine.Type.ConsoleProxy, VirtualMachine.Type.SecondaryStorageVm);
<ide> if (systemVm == null) {
<del> throw new InvalidParameterValueException("unable to find a system vm with id " + vmId);
<add> InvalidParameterValueException ex = new InvalidParameterValueException("unable to find a system vm with specified vmId");
<add> // Get the VMInstanceVO object's table name.
<add> String tablename = AnnotationHelper.getTableName(systemVm);
<add> if (tablename != null) {
<add> ex.addProxyObject(tablename, vmId, "vmId");
<add> } else {
<add> s_logger.info("\nCould not retrieve table name (annotation) from VMInstanceVO proxy cglib object\n");
<add> }
<add> throw ex;
<ide> }
<ide>
<ide> if (systemVm.getType() == VirtualMachine.Type.ConsoleProxy) {
<ide> } else if (systemVm.getType() == VirtualMachine.Type.SecondaryStorageVm) {
<ide> return startSecondaryStorageVm(vmId);
<ide> } else {
<del> throw new InvalidParameterValueException("Unable to find a system vm: " + vmId);
<add> InvalidParameterValueException ex = new InvalidParameterValueException("Unable to find a system vm with specified vmId");
<add> // Get the VMInstanceVO object's table name.
<add> String tablename = AnnotationHelper.getTableName(systemVm);
<add> if (tablename != null) {
<add> ex.addProxyObject(tablename, vmId, "vmId");
<add> } else {
<add> s_logger.info("\nCould not retrieve table name (annotation) from VMInstanceVO proxy cglib object\n");
<add> }
<add> throw ex;
<ide> }
<ide> }
<ide>
<ide> // verify parameters
<ide> VMInstanceVO systemVm = _vmInstanceDao.findByIdTypes(id, VirtualMachine.Type.ConsoleProxy, VirtualMachine.Type.SecondaryStorageVm);
<ide> if (systemVm == null) {
<del> throw new InvalidParameterValueException("unable to find a system vm with id " + id);
<add> InvalidParameterValueException ex = new InvalidParameterValueException("unable to find a system vm with specified vmId");
<add> // Get the VMInstanceVO object's table name.
<add> String tablename = AnnotationHelper.getTableName(systemVm);
<add> if (tablename != null) {
<add> ex.addProxyObject(tablename, id, "vmId");
<add> } else {
<add> s_logger.info("\nCould not retrieve table name (annotation) from VMInstanceVO proxy object\n");
<add> }
<add> throw ex;
<ide> }
<ide>
<ide> try {
<ide> VMInstanceVO systemVm = _vmInstanceDao.findByIdTypes(cmd.getId(), VirtualMachine.Type.ConsoleProxy, VirtualMachine.Type.SecondaryStorageVm);
<ide>
<ide> if (systemVm == null) {
<del> throw new InvalidParameterValueException("unable to find a system vm with id " + cmd.getId());
<add> InvalidParameterValueException ex = new InvalidParameterValueException("unable to find a system vm with specified vmId");
<add> // Get the VMInstanceVO object's table name.
<add> String tablename = AnnotationHelper.getTableName(systemVm);
<add> if (tablename != null) {
<add> ex.addProxyObject(tablename, cmd.getId(), "vmId");
<add> } else {
<add> s_logger.info("\nCould not retrieve table name (annotation) from VMInstanceVO proxy object\n");
<add> }
<add> throw ex;
<ide> }
<ide>
<ide> if (systemVm.getType().equals(VirtualMachine.Type.ConsoleProxy)) {
<ide> VMInstanceVO systemVm = _vmInstanceDao.findByIdTypes(cmd.getId(), VirtualMachine.Type.ConsoleProxy, VirtualMachine.Type.SecondaryStorageVm);
<ide>
<ide> if (systemVm == null) {
<del> throw new InvalidParameterValueException("unable to find a system vm with id " + cmd.getId());
<add> InvalidParameterValueException ex = new InvalidParameterValueException("unable to find a system vm with specified vmId");
<add> // Get the VMInstanceVO object's table name.
<add> String tablename = AnnotationHelper.getTableName(systemVm);
<add> if (tablename != null) {
<add> ex.addProxyObject(tablename, cmd.getId(), "vmId");
<add> } else {
<add> s_logger.info("\nCould not retrieve table name (annotation) from VMInstanceVO proxy object\n");
<add> }
<add> throw ex;
<ide> }
<ide>
<ide> if (systemVm.getType().equals(VirtualMachine.Type.ConsoleProxy)) {
<ide> // verify that user exists
<ide> User user = _accountMgr.getUserIncludingRemoved(userId);
<ide> if ((user == null) || (user.getRemoved() != null)) {
<del> throw new InvalidParameterValueException("Unable to find active user by id " + userId);
<add> InvalidParameterValueException ex = new InvalidParameterValueException("Unable to find active user of specified id");
<add> // Get the VO object's table name.
<add> String tablename = AnnotationHelper.getTableName(user);
<add> if (tablename != null) {
<add> ex.addProxyObject(tablename, userId, "userId");
<add> } else {
<add> s_logger.info("\nCould not retrieve table name (annotation) from " + tablename + " VO proxy object\n");
<add> }
<add> throw ex;
<ide> }
<ide>
<ide> // check permissions
<ide>
<ide> VolumeVO volume = _volumeDao.findById(volumeId);
<ide> if (volume == null) {
<del> throw new InvalidParameterValueException("Unable to find volume with id " + volumeId);
<add> InvalidParameterValueException ex = new InvalidParameterValueException("Unable to find volume with specified volumeId");
<add> // Get the VolumeVO object's table name.
<add> String tablename = AnnotationHelper.getTableName(volume);
<add> if (tablename != null) {
<add> ex.addProxyObject(tablename, volumeId, "volumeId");
<add> } else {
<add> s_logger.info("\nCould not retrieve table name (annotation) from VolumeVO proxy object\n");
<add> }
<add> throw ex;
<ide> }
<ide>
<ide> // perform permission check
<ide> // Extract activity only for detached volumes or for volumes whose instance is stopped
<ide> if (volume.getInstanceId() != null && ApiDBUtils.findVMInstanceById(volume.getInstanceId()).getState() != State.Stopped) {
<ide> s_logger.debug("Invalid state of the volume with ID: " + volumeId + ". It should be either detached or the VM should be in stopped state.");
<del> throw new PermissionDeniedException("Invalid state of the volume with ID: " + volumeId + ". It should be either detached or the VM should be in stopped state.");
<add> PermissionDeniedException ex = new PermissionDeniedException("Invalid state of the volume with specified ID. It should be either detached or the VM should be in stopped state.");
<add> // Get the VO object's table name.
<add> String tablename = AnnotationHelper.getTableName(volume);
<add> if (tablename != null) {
<add> ex.addProxyObject(tablename, volumeId, "volumeId");
<add> } else {
<add> s_logger.info("\nCould not retrieve table name (annotation) from VolumeVO proxy object\n");
<add> }
<add> throw ex;
<ide> }
<ide>
<ide> if (volume.getVolumeType() != Volume.Type.DATADISK) { // Datadisk dont have any template dependence.
<ide> boolean isExtractable = template.isExtractable() && template.getTemplateType() != Storage.TemplateType.SYSTEM;
<ide> if (!isExtractable && account != null && account.getType() != Account.ACCOUNT_TYPE_ADMIN) { // Global
<ide> // admins are always allowed to extract
<del> throw new PermissionDeniedException("The volume:" + volumeId + " is not allowed to be extracted");
<add> PermissionDeniedException ex = new PermissionDeniedException("The volume with specified volumeId is not allowed to be extracted");
<add> // Get the VO object's table name.
<add> String tablename = AnnotationHelper.getTableName(volume);
<add> if (tablename != null) {
<add> ex.addProxyObject(tablename, volumeId, "volumeId");
<add> } else {
<add> s_logger.info("\nCould not retrieve table name (annotation) from VolumeVO proxy object\n");
<add> }
<add> throw ex;
<ide> }
<ide> }
<ide> }
<ide> // Verify input parameters
<ide> InstanceGroupVO group = _vmGroupDao.findById(groupId.longValue());
<ide> if (group == null) {
<del> throw new InvalidParameterValueException("unable to find a vm group with id " + groupId);
<add> InvalidParameterValueException ex = new InvalidParameterValueException("unable to find a vm group with specified groupId");
<add> // Get the VolumeVO object's table name.
<add> String tablename = AnnotationHelper.getTableName(group);
<add> if (tablename != null) {
<add> ex.addProxyObject(tablename, groupId, "groupId");
<add> } else {
<add> s_logger.info("\nCould not retrieve table name (annotation) from InstanceGroupVO proxy object\n");
<add> }
<add> throw ex;
<ide> }
<ide>
<ide> _accountMgr.checkAccess(caller, null, true, group);
<ide>
<ide> SSHKeyPairVO s = _sshKeyPairDao.findByName(owner.getAccountId(), owner.getDomainId(), cmd.getName());
<ide> if (s == null) {
<del> throw new InvalidParameterValueException("A key pair with name '" + cmd.getName() + "' does not exist for account " + owner.getAccountName() + " in domain id=" + owner.getDomainId());
<add> InvalidParameterValueException ex = new InvalidParameterValueException("A key pair with name '" + cmd.getName() + "' does not exist for account " + owner.getAccountName() + " in specified domain id");
<add> // Get the VO object's table name.
<add> String tablename = AnnotationHelper.getTableName(owner);
<add> if (tablename != null) {
<add> ex.addProxyObject(tablename, owner.getDomainId(), "domainId");
<add> } else {
<add> s_logger.info("\nCould not retrieve table name (annotation) from " + tablename + " VO proxy object\n");
<add> }
<add> throw ex;
<ide> }
<ide>
<ide> return _sshKeyPairDao.deleteByName(caller.getAccountId(), caller.getDomainId(), cmd.getName());
<ide>
<ide> UserVmVO vm = _userVmDao.findById(cmd.getId());
<ide> if (vm == null) {
<del> throw new InvalidParameterValueException("No VM with id '" + cmd.getId() + "' found.");
<add> InvalidParameterValueException ex = new InvalidParameterValueException("No VM with specified id found.");
<add> // Get the VO object's table name.
<add> String tablename = AnnotationHelper.getTableName(vm);
<add> if (tablename != null) {
<add> ex.addProxyObject(tablename, cmd.getId(), "vmId");
<add> } else {
<add> s_logger.info("\nCould not retrieve table name (annotation) from " + tablename + " VO proxy object\n");
<add> }
<add> throw ex;
<ide> }
<ide>
<ide> // make permission check
<ide> _userVmDao.loadDetails(vm);
<ide> String password = vm.getDetail("Encrypted.Password");
<ide> if (password == null || password.equals("")) {
<del> throw new InvalidParameterValueException("No password for VM with id '" + cmd.getId() + "' found.");
<add> InvalidParameterValueException ex = new InvalidParameterValueException("No password for VM with specified id found.");
<add> // Get the VO object's table name.
<add> String tablename = AnnotationHelper.getTableName(vm);
<add> if (tablename != null) {
<add> ex.addProxyObject(tablename, cmd.getId(), "vmId");
<add> } else {
<add> s_logger.info("\nCould not retrieve table name (annotation) from " + tablename + " VO proxy object\n");
<add> }
<add> throw ex;
<ide> }
<ide>
<ide> return password;
<ide> HypervisorCapabilitiesVO hpvCapabilities = _hypervisorCapabilitiesDao.findById(id, true);
<ide>
<ide> if (hpvCapabilities == null) {
<del> throw new InvalidParameterValueException("unable to find the hypervisor capabilities " + id);
<add> InvalidParameterValueException ex = new InvalidParameterValueException("unable to find the hypervisor capabilities for specified id");
<add> // Get the VO object's table name.
<add> String tablename = AnnotationHelper.getTableName(hpvCapabilities);
<add> if (tablename != null) {
<add> ex.addProxyObject(tablename, id, "Id");
<add> } else {
<add> s_logger.info("\nCould not retrieve table name (annotation) from " + tablename + " VO proxy object\n");
<add> }
<add> throw ex;
<ide> }
<ide>
<ide> boolean updateNeeded = (maxGuestsLimit != null || securityGroupEnabled != null); |
|
Java | mit | 947c4131906b8023eb94f30e7392d8438379477d | 0 | liorhson/jenkins,abayer/jenkins,jenkinsci/jenkins,samatdav/jenkins,nandan4/Jenkins,thomassuckow/jenkins,maikeffi/hudson,liorhson/jenkins,KostyaSha/jenkins,ChrisA89/jenkins,intelchen/jenkins,albers/jenkins,dennisjlee/jenkins,vivek/hudson,my7seven/jenkins,mrooney/jenkins,huybrechts/hudson,lindzh/jenkins,lvotypko/jenkins3,MarkEWaite/jenkins,vivek/hudson,hashar/jenkins,viqueen/jenkins,pjanouse/jenkins,dbroady1/jenkins,vlajos/jenkins,292388900/jenkins,SenolOzer/jenkins,SebastienGllmt/jenkins,ErikVerheul/jenkins,singh88/jenkins,mcanthony/jenkins,varmenise/jenkins,aduprat/jenkins,alvarolobato/jenkins,albers/jenkins,v1v/jenkins,amruthsoft9/Jenkis,Jimilian/jenkins,ikedam/jenkins,wangyikai/jenkins,guoxu0514/jenkins,Krasnyanskiy/jenkins,paulmillar/jenkins,mcanthony/jenkins,jk47/jenkins,tastatur/jenkins,damianszczepanik/jenkins,iqstack/jenkins,ajshastri/jenkins,synopsys-arc-oss/jenkins,vijayto/jenkins,tfennelly/jenkins,elkingtonmcb/jenkins,brunocvcunha/jenkins,jzjzjzj/jenkins,Krasnyanskiy/jenkins,MarkEWaite/jenkins,vjuranek/jenkins,jpbriend/jenkins,christ66/jenkins,huybrechts/hudson,khmarbaise/jenkins,Wilfred/jenkins,6WIND/jenkins,v1v/jenkins,daspilker/jenkins,luoqii/jenkins,amruthsoft9/Jenkis,pselle/jenkins,duzifang/my-jenkins,viqueen/jenkins,kohsuke/hudson,mattclark/jenkins,ns163/jenkins,msrb/jenkins,elkingtonmcb/jenkins,arcivanov/jenkins,MichaelPranovich/jenkins_sc,FTG-003/jenkins,azweb76/jenkins,damianszczepanik/jenkins,Vlatombe/jenkins,bpzhang/jenkins,jhoblitt/jenkins,viqueen/jenkins,vlajos/jenkins,godfath3r/jenkins,yonglehou/jenkins,hudson/hudson-2.x,khmarbaise/jenkins,soenter/jenkins,jpederzolli/jenkins-1,lvotypko/jenkins3,jenkinsci/jenkins,samatdav/jenkins,oleg-nenashev/jenkins,ikedam/jenkins,Jochen-A-Fuerbacher/jenkins,vlajos/jenkins,dennisjlee/jenkins,albers/jenkins,goldchang/jenkins,jenkinsci/jenkins,kzantow/jenkins,amuniz/jenkins,amruthsoft9/Jenkis,jk47/jenkins,paulmillar/jenkins,noikiy/jenkins,ndeloof/jenkins,jhoblitt/jenkins,alvarolobato/jenkins,bkmeneguello/jenkins,recena/jenkins,hplatou/jenkins,vlajos/jenkins,guoxu0514/jenkins,bkmeneguello/jenkins,lvotypko/jenkins,CodeShane/jenkins,arunsingh/jenkins,gusreiber/jenkins,soenter/jenkins,oleg-nenashev/jenkins,gorcz/jenkins,aquarellian/jenkins,fbelzunc/jenkins,amuniz/jenkins,Vlatombe/jenkins,jhoblitt/jenkins,petermarcoen/jenkins,rsandell/jenkins,kohsuke/hudson,aduprat/jenkins,SenolOzer/jenkins,ydubreuil/jenkins,azweb76/jenkins,everyonce/jenkins,MadsNielsen/jtemp,6WIND/jenkins,ydubreuil/jenkins,1and1/jenkins,svanoort/jenkins,deadmoose/jenkins,liorhson/jenkins,mpeltonen/jenkins,maikeffi/hudson,kohsuke/hudson,jk47/jenkins,sathiya-mit/jenkins,iqstack/jenkins,escoem/jenkins,vvv444/jenkins,christ66/jenkins,hudson/hudson-2.x,rashmikanta-1984/jenkins,arcivanov/jenkins,mattclark/jenkins,olivergondza/jenkins,jglick/jenkins,duzifang/my-jenkins,aduprat/jenkins,tfennelly/jenkins,DoctorQ/jenkins,NehemiahMi/jenkins,FTG-003/jenkins,stephenc/jenkins,Vlatombe/jenkins,lvotypko/jenkins2,ns163/jenkins,ChrisA89/jenkins,synopsys-arc-oss/jenkins,mdonohue/jenkins,varmenise/jenkins,lindzh/jenkins,stefanbrausch/hudson-main,deadmoose/jenkins,aldaris/jenkins,1and1/jenkins,elkingtonmcb/jenkins,batmat/jenkins,Ykus/jenkins,vjuranek/jenkins,bpzhang/jenkins,daspilker/jenkins,paulwellnerbou/jenkins,csimons/jenkins,goldchang/jenkins,tfennelly/jenkins,hemantojhaa/jenkins,jcarrothers-sap/jenkins,Krasnyanskiy/jenkins,gitaccountforprashant/gittest,jpederzolli/jenkins-1,mdonohue/jenkins,DanielWeber/jenkins,mattclark/jenkins,viqueen/jenkins,aduprat/jenkins,vijayto/jenkins,6WIND/jenkins,huybrechts/hudson,Jochen-A-Fuerbacher/jenkins,alvarolobato/jenkins,Jochen-A-Fuerbacher/jenkins,godfath3r/jenkins,1and1/jenkins,protazy/jenkins,Jimilian/jenkins,jcsirot/jenkins,chbiel/jenkins,rlugojr/jenkins,wangyikai/jenkins,shahharsh/jenkins,rlugojr/jenkins,samatdav/jenkins,escoem/jenkins,recena/jenkins,paulmillar/jenkins,daniel-beck/jenkins,akshayabd/jenkins,tangkun75/jenkins,ns163/jenkins,aldaris/jenkins,ikedam/jenkins,FarmGeek4Life/jenkins,h4ck3rm1k3/jenkins,ns163/jenkins,protazy/jenkins,KostyaSha/jenkins,rlugojr/jenkins,Vlatombe/jenkins,rlugojr/jenkins,DanielWeber/jenkins,chbiel/jenkins,pjanouse/jenkins,stephenc/jenkins,h4ck3rm1k3/jenkins,lvotypko/jenkins,fbelzunc/jenkins,SebastienGllmt/jenkins,DanielWeber/jenkins,kzantow/jenkins,lvotypko/jenkins2,KostyaSha/jenkins,daniel-beck/jenkins,intelchen/jenkins,goldchang/jenkins,thomassuckow/jenkins,tfennelly/jenkins,ikedam/jenkins,liupugong/jenkins,yonglehou/jenkins,csimons/jenkins,hashar/jenkins,DoctorQ/jenkins,iterate/coding-dojo,stefanbrausch/hudson-main,aheritier/jenkins,gusreiber/jenkins,MarkEWaite/jenkins,azweb76/jenkins,dariver/jenkins,aheritier/jenkins,mdonohue/jenkins,Jochen-A-Fuerbacher/jenkins,MichaelPranovich/jenkins_sc,jenkinsci/jenkins,arcivanov/jenkins,AustinKwang/jenkins,lvotypko/jenkins,morficus/jenkins,jzjzjzj/jenkins,msrb/jenkins,lvotypko/jenkins3,rashmikanta-1984/jenkins,my7seven/jenkins,lilyJi/jenkins,mcanthony/jenkins,rsandell/jenkins,jglick/jenkins,SenolOzer/jenkins,hplatou/jenkins,NehemiahMi/jenkins,vijayto/jenkins,huybrechts/hudson,seanlin816/jenkins,escoem/jenkins,FTG-003/jenkins,mrooney/jenkins,github-api-test-org/jenkins,jglick/jenkins,nandan4/Jenkins,damianszczepanik/jenkins,paulwellnerbou/jenkins,jk47/jenkins,duzifang/my-jenkins,vlajos/jenkins,6WIND/jenkins,kzantow/jenkins,pantheon-systems/jenkins,KostyaSha/jenkins,soenter/jenkins,lvotypko/jenkins2,CodeShane/jenkins,singh88/jenkins,Krasnyanskiy/jenkins,6WIND/jenkins,noikiy/jenkins,jglick/jenkins,recena/jenkins,keyurpatankar/hudson,liorhson/jenkins,1and1/jenkins,mattclark/jenkins,evernat/jenkins,jtnord/jenkins,seanlin816/jenkins,batmat/jenkins,patbos/jenkins,abayer/jenkins,jpbriend/jenkins,rsandell/jenkins,bkmeneguello/jenkins,petermarcoen/jenkins,SenolOzer/jenkins,shahharsh/jenkins,stefanbrausch/hudson-main,MadsNielsen/jtemp,lilyJi/jenkins,lindzh/jenkins,mrooney/jenkins,seanlin816/jenkins,MadsNielsen/jtemp,svanoort/jenkins,my7seven/jenkins,maikeffi/hudson,protazy/jenkins,maikeffi/hudson,seanlin816/jenkins,jzjzjzj/jenkins,brunocvcunha/jenkins,brunocvcunha/jenkins,ikedam/jenkins,soenter/jenkins,evernat/jenkins,jcsirot/jenkins,patbos/jenkins,shahharsh/jenkins,jcsirot/jenkins,andresrc/jenkins,mdonohue/jenkins,mrobinet/jenkins,mpeltonen/jenkins,morficus/jenkins,nandan4/Jenkins,pantheon-systems/jenkins,damianszczepanik/jenkins,Jimilian/jenkins,stefanbrausch/hudson-main,godfath3r/jenkins,DoctorQ/jenkins,lordofthejars/jenkins,daniel-beck/jenkins,ikedam/jenkins,morficus/jenkins,MarkEWaite/jenkins,jglick/jenkins,ndeloof/jenkins,Krasnyanskiy/jenkins,aheritier/jenkins,shahharsh/jenkins,everyonce/jenkins,Wilfred/jenkins,recena/jenkins,lilyJi/jenkins,csimons/jenkins,liupugong/jenkins,olivergondza/jenkins,csimons/jenkins,recena/jenkins,intelchen/jenkins,MichaelPranovich/jenkins_sc,msrb/jenkins,khmarbaise/jenkins,tangkun75/jenkins,mpeltonen/jenkins,thomassuckow/jenkins,khmarbaise/jenkins,iterate/coding-dojo,NehemiahMi/jenkins,aquarellian/jenkins,evernat/jenkins,dbroady1/jenkins,github-api-test-org/jenkins,FTG-003/jenkins,pjanouse/jenkins,stephenc/jenkins,elkingtonmcb/jenkins,hudson/hudson-2.x,huybrechts/hudson,luoqii/jenkins,mattclark/jenkins,azweb76/jenkins,MadsNielsen/jtemp,tangkun75/jenkins,Jimilian/jenkins,kohsuke/hudson,AustinKwang/jenkins,hemantojhaa/jenkins,DoctorQ/jenkins,ajshastri/jenkins,batmat/jenkins,amruthsoft9/Jenkis,my7seven/jenkins,rsandell/jenkins,goldchang/jenkins,aheritier/jenkins,daspilker/jenkins,vivek/hudson,ndeloof/jenkins,jtnord/jenkins,tastatur/jenkins,6WIND/jenkins,jpederzolli/jenkins-1,Vlatombe/jenkins,jcarrothers-sap/jenkins,guoxu0514/jenkins,abayer/jenkins,protazy/jenkins,rashmikanta-1984/jenkins,msrb/jenkins,mrooney/jenkins,ydubreuil/jenkins,jcarrothers-sap/jenkins,vivek/hudson,brunocvcunha/jenkins,Krasnyanskiy/jenkins,CodeShane/jenkins,alvarolobato/jenkins,pantheon-systems/jenkins,huybrechts/hudson,damianszczepanik/jenkins,luoqii/jenkins,arunsingh/jenkins,maikeffi/hudson,pantheon-systems/jenkins,svanoort/jenkins,noikiy/jenkins,v1v/jenkins,jpederzolli/jenkins-1,christ66/jenkins,svanoort/jenkins,everyonce/jenkins,aduprat/jenkins,abayer/jenkins,akshayabd/jenkins,gorcz/jenkins,keyurpatankar/hudson,jk47/jenkins,rashmikanta-1984/jenkins,soenter/jenkins,aquarellian/jenkins,andresrc/jenkins,AustinKwang/jenkins,amuniz/jenkins,luoqii/jenkins,lvotypko/jenkins2,brunocvcunha/jenkins,svanoort/jenkins,daspilker/jenkins,christ66/jenkins,svanoort/jenkins,gorcz/jenkins,dbroady1/jenkins,tfennelly/jenkins,MarkEWaite/jenkins,lvotypko/jenkins,arcivanov/jenkins,oleg-nenashev/jenkins,daspilker/jenkins,DoctorQ/jenkins,pselle/jenkins,SebastienGllmt/jenkins,khmarbaise/jenkins,gusreiber/jenkins,verbitan/jenkins,maikeffi/hudson,dennisjlee/jenkins,daspilker/jenkins,fbelzunc/jenkins,pjanouse/jenkins,albers/jenkins,SebastienGllmt/jenkins,ns163/jenkins,lilyJi/jenkins,stephenc/jenkins,ChrisA89/jenkins,duzifang/my-jenkins,jenkinsci/jenkins,ChrisA89/jenkins,everyonce/jenkins,intelchen/jenkins,alvarolobato/jenkins,wangyikai/jenkins,dennisjlee/jenkins,batmat/jenkins,noikiy/jenkins,iterate/coding-dojo,kzantow/jenkins,MadsNielsen/jtemp,jcarrothers-sap/jenkins,stephenc/jenkins,nandan4/Jenkins,292388900/jenkins,ajshastri/jenkins,shahharsh/jenkins,liupugong/jenkins,ChrisA89/jenkins,MichaelPranovich/jenkins_sc,gorcz/jenkins,mrobinet/jenkins,ydubreuil/jenkins,keyurpatankar/hudson,jzjzjzj/jenkins,hashar/jenkins,rsandell/jenkins,jtnord/jenkins,lordofthejars/jenkins,ydubreuil/jenkins,wuwen5/jenkins,noikiy/jenkins,seanlin816/jenkins,wangyikai/jenkins,mrobinet/jenkins,hplatou/jenkins,msrb/jenkins,tfennelly/jenkins,arcivanov/jenkins,github-api-test-org/jenkins,daniel-beck/jenkins,luoqii/jenkins,bpzhang/jenkins,bpzhang/jenkins,scoheb/jenkins,evernat/jenkins,Jimilian/jenkins,mpeltonen/jenkins,damianszczepanik/jenkins,mattclark/jenkins,batmat/jenkins,verbitan/jenkins,andresrc/jenkins,yonglehou/jenkins,luoqii/jenkins,sathiya-mit/jenkins,KostyaSha/jenkins,thomassuckow/jenkins,lindzh/jenkins,scoheb/jenkins,verbitan/jenkins,MarkEWaite/jenkins,akshayabd/jenkins,lvotypko/jenkins,vijayto/jenkins,MadsNielsen/jtemp,Jochen-A-Fuerbacher/jenkins,Wilfred/jenkins,github-api-test-org/jenkins,hemantojhaa/jenkins,wuwen5/jenkins,KostyaSha/jenkins,h4ck3rm1k3/jenkins,hemantojhaa/jenkins,iterate/coding-dojo,petermarcoen/jenkins,hplatou/jenkins,MichaelPranovich/jenkins_sc,Vlatombe/jenkins,lvotypko/jenkins3,lvotypko/jenkins3,pselle/jenkins,ajshastri/jenkins,evernat/jenkins,vijayto/jenkins,vjuranek/jenkins,noikiy/jenkins,SenolOzer/jenkins,Ykus/jenkins,msrb/jenkins,Wilfred/jenkins,jtnord/jenkins,gusreiber/jenkins,andresrc/jenkins,abayer/jenkins,jpbriend/jenkins,paulwellnerbou/jenkins,vvv444/jenkins,jcsirot/jenkins,olivergondza/jenkins,kohsuke/hudson,h4ck3rm1k3/jenkins,aldaris/jenkins,albers/jenkins,arunsingh/jenkins,MadsNielsen/jtemp,singh88/jenkins,stefanbrausch/hudson-main,arunsingh/jenkins,dennisjlee/jenkins,deadmoose/jenkins,deadmoose/jenkins,vjuranek/jenkins,ndeloof/jenkins,Wilfred/jenkins,keyurpatankar/hudson,chbiel/jenkins,gitaccountforprashant/gittest,morficus/jenkins,SebastienGllmt/jenkins,jk47/jenkins,nandan4/Jenkins,protazy/jenkins,pselle/jenkins,AustinKwang/jenkins,rlugojr/jenkins,vivek/hudson,292388900/jenkins,pantheon-systems/jenkins,FarmGeek4Life/jenkins,tangkun75/jenkins,keyurpatankar/hudson,NehemiahMi/jenkins,akshayabd/jenkins,thomassuckow/jenkins,gorcz/jenkins,dbroady1/jenkins,aldaris/jenkins,ajshastri/jenkins,evernat/jenkins,wuwen5/jenkins,duzifang/my-jenkins,dbroady1/jenkins,kohsuke/hudson,bpzhang/jenkins,chbiel/jenkins,SebastienGllmt/jenkins,duzifang/my-jenkins,CodeShane/jenkins,jenkinsci/jenkins,protazy/jenkins,hashar/jenkins,fbelzunc/jenkins,paulwellnerbou/jenkins,lilyJi/jenkins,patbos/jenkins,gusreiber/jenkins,aheritier/jenkins,morficus/jenkins,jpbriend/jenkins,scoheb/jenkins,mrobinet/jenkins,godfath3r/jenkins,dbroady1/jenkins,vvv444/jenkins,patbos/jenkins,goldchang/jenkins,Ykus/jenkins,fbelzunc/jenkins,ns163/jenkins,godfath3r/jenkins,v1v/jenkins,bkmeneguello/jenkins,NehemiahMi/jenkins,h4ck3rm1k3/jenkins,verbitan/jenkins,pselle/jenkins,stephenc/jenkins,DoctorQ/jenkins,mrooney/jenkins,elkingtonmcb/jenkins,azweb76/jenkins,rsandell/jenkins,DanielWeber/jenkins,synopsys-arc-oss/jenkins,ndeloof/jenkins,seanlin816/jenkins,wuwen5/jenkins,kohsuke/hudson,yonglehou/jenkins,yonglehou/jenkins,singh88/jenkins,gitaccountforprashant/gittest,jglick/jenkins,ajshastri/jenkins,dariver/jenkins,tangkun75/jenkins,scoheb/jenkins,ndeloof/jenkins,aldaris/jenkins,hplatou/jenkins,292388900/jenkins,olivergondza/jenkins,everyonce/jenkins,gorcz/jenkins,hudson/hudson-2.x,jcsirot/jenkins,Wilfred/jenkins,iqstack/jenkins,hashar/jenkins,petermarcoen/jenkins,rlugojr/jenkins,ErikVerheul/jenkins,pjanouse/jenkins,olivergondza/jenkins,intelchen/jenkins,nandan4/Jenkins,MarkEWaite/jenkins,samatdav/jenkins,gorcz/jenkins,hplatou/jenkins,morficus/jenkins,mdonohue/jenkins,gitaccountforprashant/gittest,amuniz/jenkins,shahharsh/jenkins,daniel-beck/jenkins,MichaelPranovich/jenkins_sc,mpeltonen/jenkins,liupugong/jenkins,FarmGeek4Life/jenkins,AustinKwang/jenkins,petermarcoen/jenkins,pantheon-systems/jenkins,tastatur/jenkins,hemantojhaa/jenkins,jzjzjzj/jenkins,singh88/jenkins,oleg-nenashev/jenkins,goldchang/jenkins,akshayabd/jenkins,verbitan/jenkins,rashmikanta-1984/jenkins,pjanouse/jenkins,github-api-test-org/jenkins,1and1/jenkins,github-api-test-org/jenkins,synopsys-arc-oss/jenkins,jenkinsci/jenkins,tangkun75/jenkins,Jochen-A-Fuerbacher/jenkins,lvotypko/jenkins2,292388900/jenkins,paulwellnerbou/jenkins,my7seven/jenkins,dariver/jenkins,jhoblitt/jenkins,paulmillar/jenkins,hudson/hudson-2.x,Wilfred/jenkins,gitaccountforprashant/gittest,intelchen/jenkins,ikedam/jenkins,mrobinet/jenkins,jcarrothers-sap/jenkins,FTG-003/jenkins,amruthsoft9/Jenkis,DanielWeber/jenkins,jpbriend/jenkins,ajshastri/jenkins,paulmillar/jenkins,chbiel/jenkins,chbiel/jenkins,jhoblitt/jenkins,bkmeneguello/jenkins,iterate/coding-dojo,samatdav/jenkins,mrobinet/jenkins,duzifang/my-jenkins,abayer/jenkins,jpederzolli/jenkins-1,escoem/jenkins,ndeloof/jenkins,yonglehou/jenkins,daniel-beck/jenkins,liorhson/jenkins,github-api-test-org/jenkins,vlajos/jenkins,vvv444/jenkins,6WIND/jenkins,samatdav/jenkins,pjanouse/jenkins,mcanthony/jenkins,khmarbaise/jenkins,guoxu0514/jenkins,ErikVerheul/jenkins,escoem/jenkins,ErikVerheul/jenkins,MarkEWaite/jenkins,deadmoose/jenkins,aheritier/jenkins,iterate/coding-dojo,lvotypko/jenkins3,lvotypko/jenkins3,paulmillar/jenkins,FTG-003/jenkins,lordofthejars/jenkins,dennisjlee/jenkins,akshayabd/jenkins,patbos/jenkins,soenter/jenkins,keyurpatankar/hudson,deadmoose/jenkins,KostyaSha/jenkins,vvv444/jenkins,mcanthony/jenkins,vijayto/jenkins,elkingtonmcb/jenkins,recena/jenkins,jpbriend/jenkins,varmenise/jenkins,ChrisA89/jenkins,jk47/jenkins,SenolOzer/jenkins,oleg-nenashev/jenkins,aduprat/jenkins,keyurpatankar/hudson,amuniz/jenkins,292388900/jenkins,lvotypko/jenkins,Ykus/jenkins,v1v/jenkins,gorcz/jenkins,synopsys-arc-oss/jenkins,batmat/jenkins,sathiya-mit/jenkins,mrobinet/jenkins,amruthsoft9/Jenkis,AustinKwang/jenkins,noikiy/jenkins,singh88/jenkins,github-api-test-org/jenkins,ErikVerheul/jenkins,csimons/jenkins,petermarcoen/jenkins,guoxu0514/jenkins,wangyikai/jenkins,dbroady1/jenkins,arunsingh/jenkins,ErikVerheul/jenkins,jzjzjzj/jenkins,sathiya-mit/jenkins,guoxu0514/jenkins,tastatur/jenkins,vvv444/jenkins,mcanthony/jenkins,jhoblitt/jenkins,tastatur/jenkins,maikeffi/hudson,Jochen-A-Fuerbacher/jenkins,Jimilian/jenkins,lilyJi/jenkins,aquarellian/jenkins,FarmGeek4Life/jenkins,arunsingh/jenkins,liorhson/jenkins,tastatur/jenkins,gusreiber/jenkins,h4ck3rm1k3/jenkins,viqueen/jenkins,jcarrothers-sap/jenkins,escoem/jenkins,albers/jenkins,guoxu0514/jenkins,292388900/jenkins,bkmeneguello/jenkins,alvarolobato/jenkins,FTG-003/jenkins,FarmGeek4Life/jenkins,gusreiber/jenkins,dariver/jenkins,scoheb/jenkins,iterate/coding-dojo,azweb76/jenkins,abayer/jenkins,stefanbrausch/hudson-main,verbitan/jenkins,ErikVerheul/jenkins,fbelzunc/jenkins,arunsingh/jenkins,iqstack/jenkins,Jimilian/jenkins,kzantow/jenkins,1and1/jenkins,vivek/hudson,rlugojr/jenkins,samatdav/jenkins,kzantow/jenkins,aquarellian/jenkins,sathiya-mit/jenkins,MichaelPranovich/jenkins_sc,vjuranek/jenkins,CodeShane/jenkins,viqueen/jenkins,paulwellnerbou/jenkins,mpeltonen/jenkins,jpederzolli/jenkins-1,andresrc/jenkins,akshayabd/jenkins,hudson/hudson-2.x,damianszczepanik/jenkins,scoheb/jenkins,liupugong/jenkins,jtnord/jenkins,dennisjlee/jenkins,Ykus/jenkins,pantheon-systems/jenkins,lvotypko/jenkins,pselle/jenkins,AustinKwang/jenkins,vijayto/jenkins,shahharsh/jenkins,dariver/jenkins,godfath3r/jenkins,mpeltonen/jenkins,KostyaSha/jenkins,petermarcoen/jenkins,vivek/hudson,rsandell/jenkins,liupugong/jenkins,paulwellnerbou/jenkins,wuwen5/jenkins,csimons/jenkins,albers/jenkins,Ykus/jenkins,hashar/jenkins,ikedam/jenkins,jcsirot/jenkins,tfennelly/jenkins,jcarrothers-sap/jenkins,thomassuckow/jenkins,ChrisA89/jenkins,brunocvcunha/jenkins,oleg-nenashev/jenkins,patbos/jenkins,lordofthejars/jenkins,lindzh/jenkins,sathiya-mit/jenkins,vivek/hudson,batmat/jenkins,jcarrothers-sap/jenkins,FarmGeek4Life/jenkins,seanlin816/jenkins,paulmillar/jenkins,andresrc/jenkins,keyurpatankar/hudson,jzjzjzj/jenkins,iqstack/jenkins,patbos/jenkins,synopsys-arc-oss/jenkins,bpzhang/jenkins,tastatur/jenkins,arcivanov/jenkins,SenolOzer/jenkins,hashar/jenkins,viqueen/jenkins,ydubreuil/jenkins,mrooney/jenkins,chbiel/jenkins,sathiya-mit/jenkins,Vlatombe/jenkins,aldaris/jenkins,olivergondza/jenkins,christ66/jenkins,FarmGeek4Life/jenkins,varmenise/jenkins,iqstack/jenkins,synopsys-arc-oss/jenkins,goldchang/jenkins,v1v/jenkins,pselle/jenkins,intelchen/jenkins,lordofthejars/jenkins,varmenise/jenkins,NehemiahMi/jenkins,mdonohue/jenkins,lindzh/jenkins,dariver/jenkins,DoctorQ/jenkins,azweb76/jenkins,aduprat/jenkins,jtnord/jenkins,escoem/jenkins,amuniz/jenkins,mcanthony/jenkins,huybrechts/hudson,everyonce/jenkins,oleg-nenashev/jenkins,vlajos/jenkins,bpzhang/jenkins,arcivanov/jenkins,varmenise/jenkins,DanielWeber/jenkins,singh88/jenkins,bkmeneguello/jenkins,gitaccountforprashant/gittest,protazy/jenkins,csimons/jenkins,amuniz/jenkins,daniel-beck/jenkins,evernat/jenkins,rashmikanta-1984/jenkins,ydubreuil/jenkins,NehemiahMi/jenkins,lordofthejars/jenkins,jpbriend/jenkins,aquarellian/jenkins,khmarbaise/jenkins,rashmikanta-1984/jenkins,andresrc/jenkins,rsandell/jenkins,damianszczepanik/jenkins,mdonohue/jenkins,CodeShane/jenkins,scoheb/jenkins,v1v/jenkins,maikeffi/hudson,jzjzjzj/jenkins,kzantow/jenkins,yonglehou/jenkins,morficus/jenkins,varmenise/jenkins,vvv444/jenkins,msrb/jenkins,lordofthejars/jenkins,lindzh/jenkins,jglick/jenkins,stephenc/jenkins,wangyikai/jenkins,brunocvcunha/jenkins,svanoort/jenkins,aquarellian/jenkins,deadmoose/jenkins,lvotypko/jenkins2,wuwen5/jenkins,christ66/jenkins,liorhson/jenkins,ns163/jenkins,nandan4/Jenkins,mattclark/jenkins,DoctorQ/jenkins,lvotypko/jenkins2,goldchang/jenkins,vjuranek/jenkins,daniel-beck/jenkins,vjuranek/jenkins,SebastienGllmt/jenkins,lilyJi/jenkins,soenter/jenkins,daspilker/jenkins,hemantojhaa/jenkins,1and1/jenkins,DanielWeber/jenkins,Krasnyanskiy/jenkins,jtnord/jenkins,recena/jenkins,liupugong/jenkins,wangyikai/jenkins,jhoblitt/jenkins,mrooney/jenkins,thomassuckow/jenkins,hplatou/jenkins,verbitan/jenkins,alvarolobato/jenkins,luoqii/jenkins,dariver/jenkins,tangkun75/jenkins,my7seven/jenkins,fbelzunc/jenkins,jenkinsci/jenkins,amruthsoft9/Jenkis,olivergondza/jenkins,aheritier/jenkins,iqstack/jenkins,stefanbrausch/hudson-main,shahharsh/jenkins,christ66/jenkins,Ykus/jenkins,hemantojhaa/jenkins,elkingtonmcb/jenkins,jcsirot/jenkins,h4ck3rm1k3/jenkins,wuwen5/jenkins,everyonce/jenkins,CodeShane/jenkins,godfath3r/jenkins,gitaccountforprashant/gittest,my7seven/jenkins,jpederzolli/jenkins-1,aldaris/jenkins,kohsuke/hudson | /*
* The MIT License
*
* Copyright (c) 2004-2009, Sun Microsystems, Inc., Kohsuke Kawaguchi, Luca Domenico Milanesio
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package hudson.tasks;
import hudson.Extension;
import hudson.ExtensionList;
import hudson.ExtensionListView;
import hudson.ExtensionPoint;
import hudson.model.Hudson;
import hudson.model.User;
import java.util.List;
import java.util.logging.Logger;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* Infers e-mail addresses for the user when none is specified.
*
* <p>
* This is an extension point of Hudson. Plugins tha contribute new implementation
* of this class should put {@link Extension} on your implementation class, like this:
*
* <pre>
* @Extension
* class MyMailAddressResolver extends {@link MailAddressResolver} {
* ...
* }
* </pre>
*
* @author Kohsuke Kawaguchi
* @since 1.192
*/
public abstract class MailAddressResolver implements ExtensionPoint {
/**
* Infers e-mail address of the given user.
*
* <p>
* This method is called when a {@link User} without explicitly configured e-mail
* address is used, as an attempt to infer e-mail address.
*
* <p>
* The normal strategy is to look at {@link User#getProjects() the projects that the user
* is participating}, then use the repository information to infer the e-mail address.
*
* <p>
* When multiple resolvers are installed, they are consulted in order and
* the search will be over when an address is inferred by someone.
*
* <p>
* Since {@link MailAddressResolver} is singleton, this method can be invoked concurrently
* from multiple threads.
*
* @return
* null if the inference failed.
*/
public abstract String findMailAddressFor(User u);
public static String resolve(User u) {
LOGGER.fine("Resolving e-mail address for \""+u+"\" ID="+u.getId());
for (MailAddressResolver r : all()) {
String email = r.findMailAddressFor(u);
if(email!=null) {
LOGGER.fine(r+" resolved "+u.getId()+" to "+email);
return email;
}
}
// fall back logic
String extractedAddress = extractAddressFromId(u.getFullName());
if (extractedAddress != null)
return extractedAddress;
if(u.getFullName().contains("@"))
// this already looks like an e-mail ID
return u.getFullName();
String ds = Mailer.descriptor().getDefaultSuffix();
if(ds!=null) {
// another common pattern is "DOMAIN\person" in Windows. Only
// do this when this full name is not manually set. see HUDSON-5164
Matcher m = WINDOWS_DOMAIN_REGEXP.matcher(u.getFullName());
if (m.matches() && u.getFullName().replace('\\','_').equals(u.getId()))
return m.group(1)+ds; // user+defaultSuffix
return u.getId()+ds;
} else
return null;
}
/**
* Tries to extract an email address from the user id, or returns null
*/
private static String extractAddressFromId(String id) {
Matcher m = EMAIL_ADDRESS_REGEXP.matcher(id);
if(m.matches())
return m.group(1);
return null;
}
/**
* Matches strings like "Kohsuke Kawaguchi <[email protected]>"
* @see #extractAddressFromId(String)
*/
private static final Pattern EMAIL_ADDRESS_REGEXP = Pattern.compile("^.*<([^>]+)>.*$");
/**
* Matches something like "DOMAIN\person"
*/
private static final Pattern WINDOWS_DOMAIN_REGEXP = Pattern.compile("[^\\\\ ]+\\\\([^\\\\ ]+)");
/**
* All registered {@link MailAddressResolver} implementations.
*
* @deprecated as of 1.286
* Use {@link #all()} for read access and {@link Extension} for registration.
*/
public static final List<MailAddressResolver> LIST = ExtensionListView.createList(MailAddressResolver.class);
/**
* Returns all the registered {@link MailAddressResolver} descriptors.
*/
public static ExtensionList<MailAddressResolver> all() {
return Hudson.getInstance().getExtensionList(MailAddressResolver.class);
}
private static final Logger LOGGER = Logger.getLogger(MailAddressResolver.class.getName());
}
| core/src/main/java/hudson/tasks/MailAddressResolver.java | /*
* The MIT License
*
* Copyright (c) 2004-2009, Sun Microsystems, Inc., Kohsuke Kawaguchi, Luca Domenico Milanesio
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package hudson.tasks;
import hudson.Extension;
import hudson.ExtensionList;
import hudson.ExtensionListView;
import hudson.ExtensionPoint;
import hudson.model.Hudson;
import hudson.model.User;
import java.util.List;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* Infers e-mail addresses for the user when none is specified.
*
* <p>
* This is an extension point of Hudson. Plugins tha contribute new implementation
* of this class should put {@link Extension} on your implementation class, like this:
*
* <pre>
* @Extension
* class MyMailAddressResolver extends {@link MailAddressResolver} {
* ...
* }
* </pre>
*
* @author Kohsuke Kawaguchi
* @since 1.192
*/
public abstract class MailAddressResolver implements ExtensionPoint {
/**
* Infers e-mail address of the given user.
*
* <p>
* This method is called when a {@link User} without explicitly configured e-mail
* address is used, as an attempt to infer e-mail address.
*
* <p>
* The normal strategy is to look at {@link User#getProjects() the projects that the user
* is participating}, then use the repository information to infer the e-mail address.
*
* <p>
* When multiple resolvers are installed, they are consulted in order and
* the search will be over when an address is inferred by someone.
*
* <p>
* Since {@link MailAddressResolver} is singleton, this method can be invoked concurrently
* from multiple threads.
*
* @return
* null if the inference failed.
*/
public abstract String findMailAddressFor(User u);
public static String resolve(User u) {
for (MailAddressResolver r : all()) {
String email = r.findMailAddressFor(u);
if(email!=null) return email;
}
// fall back logic
String extractedAddress = extractAddressFromId(u.getFullName());
if (extractedAddress != null)
return extractedAddress;
if(u.getFullName().contains("@"))
// this already looks like an e-mail ID
return u.getFullName();
String ds = Mailer.descriptor().getDefaultSuffix();
if(ds!=null) {
// another common pattern is "DOMAIN\person" in Windows. Only
// do this when this full name is not manually set. see HUDSON-5164
Matcher m = WINDOWS_DOMAIN_REGEXP.matcher(u.getFullName());
if (m.matches() && u.getFullName().replace('\\','_').equals(u.getId()))
return m.group(1)+ds; // user+defaultSuffix
return u.getId()+ds;
} else
return null;
}
/**
* Tries to extract an email address from the user id, or returns null
*/
private static String extractAddressFromId(String id) {
Matcher m = EMAIL_ADDRESS_REGEXP.matcher(id);
if(m.matches())
return m.group(1);
return null;
}
/**
* Matches strings like "Kohsuke Kawaguchi <[email protected]>"
* @see #extractAddressFromId(String)
*/
private static final Pattern EMAIL_ADDRESS_REGEXP = Pattern.compile("^.*<([^>]+)>.*$");
/**
* Matches something like "DOMAIN\person"
*/
private static final Pattern WINDOWS_DOMAIN_REGEXP = Pattern.compile("[^\\\\ ]+\\\\([^\\\\ ]+)");
/**
* All registered {@link MailAddressResolver} implementations.
*
* @deprecated as of 1.286
* Use {@link #all()} for read access and {@link Extension} for registration.
*/
public static final List<MailAddressResolver> LIST = ExtensionListView.createList(MailAddressResolver.class);
/**
* Returns all the registered {@link MailAddressResolver} descriptors.
*/
public static ExtensionList<MailAddressResolver> all() {
return Hudson.getInstance().getExtensionList(MailAddressResolver.class);
}
}
| [HUDSON-5403] adding more logging in MailAddressResolver to help us figure out what's going on.
git-svn-id: 28f34f9aa52bc55a5ddd5be9e183c5cccadc6ee4@27623 71c3de6d-444a-0410-be80-ed276b4c234a
| core/src/main/java/hudson/tasks/MailAddressResolver.java | [HUDSON-5403] adding more logging in MailAddressResolver to help us figure out what's going on. | <ide><path>ore/src/main/java/hudson/tasks/MailAddressResolver.java
<ide> import hudson.model.User;
<ide>
<ide> import java.util.List;
<add>import java.util.logging.Logger;
<ide> import java.util.regex.Matcher;
<ide> import java.util.regex.Pattern;
<ide>
<ide> public abstract String findMailAddressFor(User u);
<ide>
<ide> public static String resolve(User u) {
<add> LOGGER.fine("Resolving e-mail address for \""+u+"\" ID="+u.getId());
<add>
<ide> for (MailAddressResolver r : all()) {
<ide> String email = r.findMailAddressFor(u);
<del> if(email!=null) return email;
<add> if(email!=null) {
<add> LOGGER.fine(r+" resolved "+u.getId()+" to "+email);
<add> return email;
<add> }
<ide> }
<ide>
<ide> // fall back logic
<ide> String extractedAddress = extractAddressFromId(u.getFullName());
<ide> if (extractedAddress != null)
<del> return extractedAddress;
<add> return extractedAddress;
<ide>
<ide> if(u.getFullName().contains("@"))
<ide> // this already looks like an e-mail ID
<ide> public static ExtensionList<MailAddressResolver> all() {
<ide> return Hudson.getInstance().getExtensionList(MailAddressResolver.class);
<ide> }
<add>
<add> private static final Logger LOGGER = Logger.getLogger(MailAddressResolver.class.getName());
<ide> } |
|
JavaScript | mit | db1e558c943d953df0d45eca5770ea42c1e56f76 | 0 | uberVU/elasticboard,uberVU/elasticboard,uberVU/elasticboard | function drawIssuesActivity() {
$.getJSON(API_BASE + '/issues_activity')
.done(function (json) {
data = json.data;
var opened = data.opened;
var closed = data.closed;
$('#issues-activity').highcharts({
chart: {
type: 'areaspline'
},
title: {
text: 'Issues Burndown'
},
subtitle: {
text: '# of issues opened vs closed, monthly'
},
xAxis: {
categories: opened.map(function(e) { return e.month; })
},
yAxis: {
min: 0,
title: {
text: 'Events'
}
},
legend: {
enabled: false
},
series: [
{
name: 'Opened',
data: opened.map(function(e) { return e.value; }),
lineColor: '#FF4E50',
color: '#FF4E50'
},
{
name: 'Closed',
data: closed.map(function(e) { return e.value; }),
lineColor: '#88C425',
color: '#88C425'
}
]
});
})
.fail(logFailure);
}
var issuesListTemplate = Handlebars.compile($('#issues-list-template').html());
function drawUntouchedIssues() {
$.getJSON(API_BASE + '/untouched_issues')
.done(function(json) {
var data = json.data;
var context = {
issues: data,
title: "Untouched Issues"
}
var $list = $(issuesListTemplate(context));
$('#untouched-issues').empty().append($list);
})
.fail(logFailure);
}
function drawInactiveIssues() {
$.getJSON(API_BASE + '/inactive_issues')
.done(function(json) {
var data = json.data;
var context = {
issues: data,
title: "Inactive Issues (2 weeks)"
}
var $list = $(issuesListTemplate(context));
$('#inactive-issues').empty().append($list);
})
.fail(logFailure);
}
function drawAvgIssueTime() {
makeXYGraph('#avg-issue-time', {
endpoint: '/avg_issue_time',
type: 'spline',
title: "Average Issue Time",
subtitle: "From the time it's opened until it's closed",
keyName: 'month',
valueName: function(e) {
var m = moment.duration(e.value, 'seconds');
return {
name: m.humanize(),
y: Math.ceil(e.value / 3600)
};
},
yTitle: 'Hours',
label: 'hours'
});
}
function makeD3Graph(issues_data) {
var nodes = [];
var links = [];
for (number in issues_data) {
var user_nodes = [];
var issue = issues_data[number];
issue.users.forEach(function(user) {
var node = {
type: 'user',
login: user.login
}
nodes.push(node);
user_nodes.push(node);
});
var issue_node = {
type: 'issue',
number: issue.issue.number
}
nodes.push(issue_node);
// make links
user_nodes.forEach(function(node) {
var link = {
source: node,
target: issue_node
};
links.push(link);
});
}
return {
nodes: nodes,
links: links
}
}
function drawIssuesInvolvement() {
$.getJSON(API_BASE + '/issues_involvement')
.done(function(json) {
var $container = $('#issues-involvement-graph-container');
var width = $container.width();
var height = $container.height();
var radius = 25;
var ratio = 1.5;
var graph_data = makeD3Graph(json.data);
var nodes = graph_data.nodes;
var links = graph_data.links;
var force = d3.layout.force()
.charge(-350)
.linkDistance(80)
.gravity(0.05)
.size([width, height]);
var svg = d3.select('#issues-involvement-graph-container').append('svg')
.attr('width', width)
.attr('height', height);
force.nodes(nodes)
.links(links)
.start();
var link = svg.selectAll(".link")
.data(links)
.enter().append("line")
.attr("class", "link")
.style("stroke-width", 2);
var gnodes = svg.selectAll('g.gnode')
.data(nodes)
.enter()
.append('g')
.classed('gnode', true)
var nodes = gnodes.append('circle')
.attr('class', 'node')
.attr("r", function(d) {
if (d.type == 'issue') {
return radius;
}
return radius / ratio;
})
.style("fill", function(d) {
if (d.type == 'issue') {
return '#FF4E50';
}
return '#88C425';
})
.call(force.drag);
var labels = gnodes.append('text')
.attr('dx', function (d) {
if (d.type == 'issue') {
return radius + 3;
}
return radius / ratio + 3;
})
.attr('dy', 6)
.style('fill', 'white')
.text(function(d) {
if (d.type == 'issue') {
return '#' + d.number;
}
return d.login;
});
force.on("tick", function() {
link.attr("x1", function(d) { return d.source.x; })
.attr("y1", function(d) { return d.source.y; })
.attr("x2", function(d) { return d.target.x; })
.attr("y2", function(d) { return d.target.y; });
gnodes.attr("transform", function(d) {
return 'translate(' + [d.x, d.y] + ')';
});
// http://mbostock.github.io/d3/talk/20110921/bounding.html
var rl = radius + 5;
gnodes.attr("cx", function(d) { return d.x = Math.max(rl, Math.min(width - rl, d.x)); })
.attr("cy", function(d) { return d.y = Math.max(rl, Math.min(height - rl, d.y)); });
});
})
.fail(logFailure);
}
function drawInsights () {
drawIssuesActivity();
drawUntouchedIssues();
drawInactiveIssues();
drawAvgIssueTime();
drawIssuesInvolvement();
}
| dashboard/js/insights.js | function drawIssuesActivity() {
$.getJSON(API_BASE + '/issues_activity')
.done(function (json) {
data = json.data;
var opened = data.opened;
var closed = data.closed;
$('#issues-activity').highcharts({
chart: {
type: 'areaspline'
},
title: {
text: 'Issues Burndown'
},
subtitle: {
text: '# of issues opened vs closed, monthly'
},
xAxis: {
categories: opened.map(function(e) { return e.month; })
},
yAxis: {
min: 0,
title: {
text: 'Events'
}
},
legend: {
enabled: false
},
series: [
{
name: 'Opened',
data: opened.map(function(e) { return e.value; }),
lineColor: '#FF4E50',
color: '#FF4E50'
},
{
name: 'Closed',
data: closed.map(function(e) { return e.value; }),
lineColor: '#88C425',
color: '#88C425'
}
]
});
})
.fail(logFailure);
}
var issuesListTemplate = Handlebars.compile($('#issues-list-template').html());
function drawUntouchedIssues() {
$.getJSON(API_BASE + '/untouched_issues')
.done(function(json) {
var data = json.data;
var context = {
issues: data,
title: "Untouched Issues"
}
var $list = $(issuesListTemplate(context));
$('#untouched-issues').empty().append($list);
})
.fail(logFailure);
}
function drawInactiveIssues() {
$.getJSON(API_BASE + '/inactive_issues')
.done(function(json) {
var data = json.data;
var context = {
issues: data,
title: "Inactive Issues (2 weeks)"
}
var $list = $(issuesListTemplate(context));
$('#inactive-issues').empty().append($list);
})
.fail(logFailure);
}
function drawAvgIssueTime() {
makeXYGraph('#avg-issue-time', {
endpoint: '/avg_issue_time',
type: 'spline',
title: "Average Issue Time",
subtitle: "From the time it's opened until it's closed",
keyName: 'month',
valueName: function(e) {
var m = moment.duration(e.value, 'seconds');
return {
name: m.humanize(),
y: Math.ceil(e.value / 3600)
};
},
yTitle: 'Hours',
label: 'hours'
});
}
function makeD3Graph(issues_data) {
var nodes = [];
var links = [];
for (number in issues_data) {
var user_nodes = [];
var issue = issues_data[number];
issue.users.forEach(function(user) {
var node = {
type: 'user',
login: user.login
}
nodes.push(node);
user_nodes.push(node);
});
var issue_node = {
type: 'issue',
number: issue.issue.number
}
nodes.push(issue_node);
// make links
user_nodes.forEach(function(node) {
var link = {
source: node,
target: issue_node
};
links.push(link);
});
}
return {
nodes: nodes,
links: links
}
}
function drawIssuesInvolvement() {
$.getJSON(API_BASE + '/issues_involvement')
.done(function(json) {
var $container = $('#issues-involvement-graph-container');
var width = $container.width();
var height = $container.height();
var radius = 25;
var ratio = 1.5;
var graph_data = makeD3Graph(json.data);
var nodes = graph_data.nodes;
var links = graph_data.links;
var force = d3.layout.force()
.charge(-350)
.linkDistance(80)
.gravity(0.05)
.size([width, height]);
var svg = d3.select('#issues-involvement-graph-container').append('svg')
.attr('width', width)
.attr('height', height);
force.nodes(nodes)
.links(links)
.start();
var link = svg.selectAll(".link")
.data(links)
.enter().append("line")
.attr("class", "link")
.style("stroke-width", 2);
var gnodes = svg.selectAll('g.gnode')
.data(nodes)
.enter()
.append('g')
.classed('gnode', true)
var nodes = gnodes.append('circle')
.attr('class', 'node')
.attr("r", function(d) {
if (d.type == 'issue') {
return radius;
}
return radius / ratio;
})
.style("fill", function(d) {
if (d.type == 'issue') {
return '#FF4E50';
}
return '#88C425';
})
.call(force.drag);
var labels = gnodes.append('text')
.attr('dx', function (d) {
if (d.type == 'issue') {
return radius + 3;
}
return radius / ratio + 3;
})
.attr('dy', 6)
.style('fill', 'white')
.text(function(d) {
if (d.type == 'issue') {
return '#' + d.number;
}
return d.login;
});
force.on("tick", function() {
link.attr("x1", function(d) { return d.source.x; })
.attr("y1", function(d) { return d.source.y; })
.attr("x2", function(d) { return d.target.x; })
.attr("y2", function(d) { return d.target.y; });
gnodes.attr("transform", function(d) {
return 'translate(' + [d.x, d.y] + ')';
});
var rl = radius + 5;
gnodes.attr("cx", function(d) { return d.x = Math.max(rl, Math.min(width - rl, d.x)); })
.attr("cy", function(d) { return d.y = Math.max(rl, Math.min(height - rl, d.y)); });
});
})
.fail(logFailure);
}
function drawInsights () {
drawIssuesActivity();
drawUntouchedIssues();
drawInactiveIssues();
drawAvgIssueTime();
drawIssuesInvolvement();
}
| mention bounding box inspiration
| dashboard/js/insights.js | mention bounding box inspiration | <ide><path>ashboard/js/insights.js
<ide> });
<ide>
<ide>
<add> // http://mbostock.github.io/d3/talk/20110921/bounding.html
<ide> var rl = radius + 5;
<ide> gnodes.attr("cx", function(d) { return d.x = Math.max(rl, Math.min(width - rl, d.x)); })
<ide> .attr("cy", function(d) { return d.y = Math.max(rl, Math.min(height - rl, d.y)); }); |
|
Java | bsd-3-clause | 7ce726825ed50270b08458dcc5d16b4def1d2318 | 0 | ccagnoli/gremlin,ccagnoli/gremlin,tinkerpop/gremlin,cesarmarinhorj/gremlin,samanalysis/gremlin,cesarmarinhorj/gremlin,samanalysis/gremlin,tinkerpop/gremlin | package com.tinkerpop.gremlin.java;
import com.tinkerpop.blueprints.Direction;
import com.tinkerpop.blueprints.Edge;
import com.tinkerpop.blueprints.Query;
import com.tinkerpop.blueprints.Vertex;
import com.tinkerpop.gremlin.pipes.filter.IntervalFilterPipe;
import com.tinkerpop.gremlin.pipes.filter.PropertyFilterPipe;
import com.tinkerpop.gremlin.pipes.transform.EdgesVerticesPipe;
import com.tinkerpop.gremlin.pipes.transform.QueryPipe;
import com.tinkerpop.gremlin.pipes.transform.VerticesEdgesPipe;
import com.tinkerpop.pipes.Pipe;
import com.tinkerpop.pipes.filter.FilterPipe;
import com.tinkerpop.pipes.util.FluentUtility;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
/**
* @author Marko A. Rodriguez (http://markorodriguez.com)
*/
public class GremlinFluentUtility extends FluentUtility {
public static List<Pipe> removeEdgeQueryOptimizationPipes(final GremlinPipeline pipeline) {
int numberedStep = -1;
int pipelineSize = pipeline.size();
for (int i = pipelineSize - 1; i >= 0; i--) {
final Pipe pipe = pipeline.get(i);
if (pipe instanceof VerticesEdgesPipe) {
numberedStep = pipelineSize - i;
break;
} else if (pipe instanceof PropertyFilterPipe || pipe instanceof IntervalFilterPipe || pipe instanceof EdgesVerticesPipe) {
continue;
} else {
break;
}
}
if (numberedStep != -1)
return FluentUtility.removePreviousPipes(pipeline, numberedStep);
else
return Collections.emptyList();
}
public static GremlinPipeline optimizePipelineForVertexQueries(final GremlinPipeline pipeline) {
final List<QueryPipe.HasContainer> hasContainers = new ArrayList<QueryPipe.HasContainer>();
final List<QueryPipe.IntervalContainer> intervalContainers = new ArrayList<QueryPipe.IntervalContainer>();
String[] labels = new String[]{};
Direction direction = Direction.BOTH;
Class elementClass = Edge.class;
List<Pipe> removedPipes = removeEdgeQueryOptimizationPipes(pipeline);
for (final Pipe pipe : removedPipes) {
if (pipe instanceof PropertyFilterPipe) {
final PropertyFilterPipe temp = (PropertyFilterPipe) pipe;
hasContainers.add(new QueryPipe.HasContainer(temp.getKey(), temp.getValue(), convertFromFilter(temp.getFilter())));
} else if (pipe instanceof IntervalFilterPipe) {
final IntervalFilterPipe temp = (IntervalFilterPipe) pipe;
intervalContainers.add(new QueryPipe.IntervalContainer(temp.getKey(), temp.getStartValue(), temp.getEndValue()));
} else if (pipe instanceof VerticesEdgesPipe) {
labels = ((VerticesEdgesPipe) pipe).getLabels();
direction = ((VerticesEdgesPipe) pipe).getDirection();
} else if (pipe instanceof EdgesVerticesPipe) {
elementClass = Vertex.class;
}
}
if (removedPipes.size() > 0)
pipeline.addPipe(new QueryPipe(elementClass, direction, hasContainers, intervalContainers, labels));
return pipeline;
}
private static Query.Compare convertFromFilter(final FilterPipe.Filter filter) {
if (filter.equals(FilterPipe.Filter.EQUAL))
return Query.Compare.EQUAL;
else if (filter.equals(FilterPipe.Filter.GREATER_THAN))
return Query.Compare.GREATER_THAN;
else if (filter.equals(FilterPipe.Filter.GREATER_THAN_EQUAL))
return Query.Compare.GREATER_THAN_EQUAL;
else if (filter.equals(FilterPipe.Filter.LESS_THAN))
return Query.Compare.LESS_THAN;
else if (filter.equals(FilterPipe.Filter.LESS_THAN_EQUAL))
return Query.Compare.LESS_THAN_EQUAL;
else if (filter.equals(FilterPipe.Filter.NOT_EQUAL))
return Query.Compare.NOT_EQUAL;
else
throw new IllegalStateException("The provided filter is not a legal filter: " + filter);
}
}
| gremlin-java/src/main/java/com/tinkerpop/gremlin/java/GremlinFluentUtility.java | package com.tinkerpop.gremlin.java;
import com.tinkerpop.blueprints.Direction;
import com.tinkerpop.blueprints.Edge;
import com.tinkerpop.blueprints.Query;
import com.tinkerpop.blueprints.Vertex;
import com.tinkerpop.gremlin.pipes.filter.IntervalFilterPipe;
import com.tinkerpop.gremlin.pipes.filter.PropertyFilterPipe;
import com.tinkerpop.gremlin.pipes.transform.EdgesVerticesPipe;
import com.tinkerpop.gremlin.pipes.transform.QueryPipe;
import com.tinkerpop.gremlin.pipes.transform.VerticesEdgesPipe;
import com.tinkerpop.pipes.Pipe;
import com.tinkerpop.pipes.filter.FilterPipe;
import com.tinkerpop.pipes.util.FluentUtility;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
/**
* @author Marko A. Rodriguez (http://markorodriguez.com)
*/
public class GremlinFluentUtility extends FluentUtility {
public static List<Pipe> removeEdgeQueryOptimizationPipes(final GremlinPipeline pipeline) {
int numberedStep = -1;
int pipelineSize = pipeline.size();
for (int i = pipelineSize - 1; i >= 0; i--) {
final Pipe pipe = pipeline.get(i);
if (pipe instanceof VerticesEdgesPipe) {
numberedStep = pipelineSize - i;
break;
} else if (pipe instanceof PropertyFilterPipe || pipe instanceof EdgesVerticesPipe) {
continue;
} else {
break;
}
}
if (numberedStep != -1)
return FluentUtility.removePreviousPipes(pipeline, numberedStep);
else
return Collections.emptyList();
}
public static GremlinPipeline optimizePipelineForVertexQueries(final GremlinPipeline pipeline) {
final List<QueryPipe.HasContainer> hasContainers = new ArrayList<QueryPipe.HasContainer>();
final List<QueryPipe.IntervalContainer> intervalContainers = new ArrayList<QueryPipe.IntervalContainer>();
String[] labels = new String[]{};
Direction direction = Direction.BOTH;
Class elementClass = Edge.class;
List<Pipe> removedPipes = removeEdgeQueryOptimizationPipes(pipeline);
for (final Pipe pipe : removedPipes) {
if (pipe instanceof PropertyFilterPipe) {
final PropertyFilterPipe temp = (PropertyFilterPipe) pipe;
hasContainers.add(new QueryPipe.HasContainer(temp.getKey(), temp.getValue(), convertFromFilter(temp.getFilter())));
} else if (pipe instanceof IntervalFilterPipe) {
final IntervalFilterPipe temp = (IntervalFilterPipe) pipe;
intervalContainers.add(new QueryPipe.IntervalContainer(temp.getKey(), temp.getStartValue(), temp.getEndValue()));
} else if (pipe instanceof VerticesEdgesPipe) {
labels = ((VerticesEdgesPipe) pipe).getLabels();
direction = ((VerticesEdgesPipe) pipe).getDirection();
} else if (pipe instanceof EdgesVerticesPipe) {
elementClass = Vertex.class;
}
}
if (removedPipes.size() > 0)
pipeline.addPipe(new QueryPipe(elementClass, direction, hasContainers, intervalContainers, labels));
return pipeline;
}
private static Query.Compare convertFromFilter(final FilterPipe.Filter filter) {
if (filter.equals(FilterPipe.Filter.EQUAL))
return Query.Compare.EQUAL;
else if (filter.equals(FilterPipe.Filter.GREATER_THAN))
return Query.Compare.GREATER_THAN;
else if (filter.equals(FilterPipe.Filter.GREATER_THAN_EQUAL))
return Query.Compare.GREATER_THAN_EQUAL;
else if (filter.equals(FilterPipe.Filter.LESS_THAN))
return Query.Compare.LESS_THAN;
else if (filter.equals(FilterPipe.Filter.LESS_THAN_EQUAL))
return Query.Compare.LESS_THAN_EQUAL;
else if (filter.equals(FilterPipe.Filter.NOT_EQUAL))
return Query.Compare.NOT_EQUAL;
else
throw new IllegalStateException("The provided filter is not a legal filter: " + filter);
}
}
| fixed IntervalPipe bug.
| gremlin-java/src/main/java/com/tinkerpop/gremlin/java/GremlinFluentUtility.java | fixed IntervalPipe bug. | <ide><path>remlin-java/src/main/java/com/tinkerpop/gremlin/java/GremlinFluentUtility.java
<ide> if (pipe instanceof VerticesEdgesPipe) {
<ide> numberedStep = pipelineSize - i;
<ide> break;
<del> } else if (pipe instanceof PropertyFilterPipe || pipe instanceof EdgesVerticesPipe) {
<add> } else if (pipe instanceof PropertyFilterPipe || pipe instanceof IntervalFilterPipe || pipe instanceof EdgesVerticesPipe) {
<ide> continue;
<ide> } else {
<ide> break; |
|
JavaScript | mit | de8005f52f3e69d8bcc5f6fda00c49e63afaa798 | 0 | papandreou/html5,Chiens/html5,aredridel/html5,brianmcd/cloudbrowser-html5 | var HTML5 = exports.HTML5 = require('../html5');
var events = require('events');
require('./treebuilder');
require('./tokenizer');
var Phase = require('./parser/phase').Phase;
var Parser = HTML5.Parser = function HTML5Parser(options) {
events.EventEmitter.apply(this);
this.strict = false;
this.errors = [];
var phase;
this.__defineSetter__('phase', function(p) {
phase = p;
if(!p) throw( new Error("Can't leave phase undefined"));
if(!p instanceof Function) throw( new Error("Not a function"));
});
this.__defineGetter__('phase', function() {
return phase;
});
if(options) for(o in options) {
this[o] = options[o];
}
if(!this.document) {
var l2 = require('jsdom/level2/core').dom.level2.core;
var browser = require('jsdom/browser')
var DOM = browser.browserAugmentation(l2)
this.document = new DOM.Document('html');
}
this.tree = new HTML5.TreeBuilder(this.document);
}
Parser.prototype = new events.EventEmitter;
Parser.prototype.parse = function(source) {
if(!source) throw(new Error("No source to parse"));
HTML5.debug('parser.parse', source)
this.tokenizer = new HTML5.Tokenizer(source, this.document);
this.setup();
this.tokenizer.tokenize();
}
Parser.prototype.parse_fragment = function(source, element) {
HTML5.debug('parser.parse_fragment', source, element)
// FIXME: Check to make sure element is inside document
this.tokenizer = new HTML5.Tokenizer(source, this.document);
if(element && element.ownerDocument) {
this.setup(element.tagName, null);
this.tree.open_elements.push(element);
this.tree.root_pointer = element;
} else if(element) {
this.setup(element, null);
this.tree.open_elements.push(this.tree.html_pointer);
this.tree.open_elements.push(this.tree.body_pointer);
this.tree.root_pointer = this.tree.body_pointer;
} else {
this.setup('div', null);
this.tree.open_elements.push(this.tree.html_pointer);
this.tree.open_elements.push(this.tree.body_pointer);
this.tree.root_pointer = this.tree.body_pointer;
}
this.tokenizer.tokenize();
}
Object.defineProperty(Parser.prototype, 'fragment', {
get: function() {
return this.tree.getFragment();
}
});
Parser.prototype.newPhase = function(name) {
this.phase = new PHASES[name](this, this.tree);
HTML5.debug('parser.newPhase', name)
this.phaseName = name;
}
Parser.prototype.do_token = function(token) {
var method = 'process' + token.type;
switch(token.type) {
case 'Characters':
case 'SpaceCharacters':
case 'Comment':
this.phase[method](token.data);
break;
case 'StartTag':
this.phase[method](token.name, token.data, token.self_closing);
break;
case 'EndTag':
this.phase[method](token.name);
break;
case 'Doctype':
this.phase[method](token.name, token.publicId, token.systemId, token.correct);
break;
case 'EOF':
this.phase[method]();
break;
default:
this.parse_error(token.data, token.datavars)
}
}
Parser.prototype.setup = function(container, encoding) {
this.tokenizer.addListener('token', function(t) {
return function(token) { t.do_token(token); };
}(this));
this.tokenizer.addListener('end', function(t) {
return function() { t.emit('end'); };
}(this));
this.emit('setup', this);
var inner_html = !!container;
container = container || 'div';
this.tree.reset();
this.first_start_tag = false;
this.errors = [];
// FIXME: instantiate tokenizer and plumb. Pass lowercasing options.
if(inner_html) {
this.inner_html = container.toLowerCase();
switch(this.inner_html) {
case 'title':
case 'textarea':
this.tokenizer.content_model = HTML5.Models.RCDATA;
break;
case 'style':
case 'script':
case 'xmp':
case 'iframe':
case 'noembed':
case 'noframes':
case 'noscript':
this.tokenizer.content_model = HTML5.Models.CDATA;
break;
case 'plaintext':
this.tokenizer.content_model = HTML5.Models.PLAINTEXT;
break;
default:
this.tokenizer.content_model = HTML5.Models.PCDATA;
}
this.tree.create_structure_elements(inner_html);
switch(inner_html) {
case 'html':
this.newPhase('afterHtml')
break;
case 'head':
this.newPhase('inHead')
break;
default:
this.newPhase('inBody')
}
this.reset_insertion_mode(this.inner_html);
} else {
this.inner_html = false;
this.newPhase('initial');
}
this.last_phase = null;
}
Parser.prototype.parse_error = function(code, data) {
// FIXME: this.errors.push([this.tokenizer.position, code, data]);
this.errors.push([code, data]);
if(this.strict) throw(this.errors.last());
}
Parser.prototype.reset_insertion_mode = function(context) {
var last = false;
var node_name;
for(var i = this.tree.open_elements.length - 1; i >= 0; i--) {
var node = this.tree.open_elements[i]
node_name = node.tagName.toLowerCase()
if(node == this.tree.open_elements[0]) {
last = true
if(node_name != 'th' && node_name != 'td') {
// XXX
// assert.ok(this.inner_html);
node_name = context.tagName;
}
}
if(!(node_name == 'select' || node_name == 'colgroup' || node_name == 'head' || node_name == 'frameset')) {
// XXX
// assert.ok(this.inner_html)
}
if(HTML5.TAGMODES[node_name]) {
this.newPhase(HTML5.TAGMODES[node_name]);
} else if(node_name == 'html') {
this.newPhase(this.tree.head_pointer ? 'afterHead' : 'beforeHead');
} else if(last) {
this.newPhase('inBody');
} else {
continue;
}
break;
}
}
Parser.prototype._ = function(str) {
return(str);
}
| lib/html5/parser.js | var HTML5 = exports.HTML5 = require('../html5');
var events = require('events');
require('./treebuilder');
require('./tokenizer');
var Phase = require('./parser/phase').Phase;
var Parser = HTML5.Parser = function HTML5Parser(options) {
events.EventEmitter.apply(this);
this.strict = false;
this.errors = [];
var phase;
this.__defineSetter__('phase', function(p) {
phase = p;
if(!p) throw( new Error("Can't leave phase undefined"));
if(!p instanceof Function) throw( new Error("Not a function"));
});
this.__defineGetter__('phase', function() {
return phase;
});
if(options) for(o in options) {
this[o] = options[o];
}
if(!this.document) {
var l2 = require('jsdom/level2/core').dom.level2.core;
var browser = require('jsdom/browser')
var DOM = browser.browserAugmentation(l2)
this.document = new DOM.Document('html');
}
this.tree = new HTML5.TreeBuilder(this.document);
}
Parser.prototype = new events.EventEmitter;
Parser.prototype.parse = function(source) {
if(!source) throw(new Error("No source to parse"));
HTML5.debug('parser.parse', source)
this.tokenizer = new HTML5.Tokenizer(source, this.document);
this.setup();
this.tokenizer.tokenize();
}
Parser.prototype.parse_fragment = function(source, element) {
HTML5.debug('parser.parse_fragment', source, element)
// FIXME: Check to make sure element is inside document
this.tokenizer = new HTML5.Tokenizer(source, this.document);
if(element && element.ownerDocument) {
this.setup(element.tagName, null);
this.tree.open_elements.push(element);
this.tree.root_pointer = element;
} else if(element) {
this.setup(element, null);
this.tree.open_elements.push(this.tree.html_pointer);
this.tree.open_elements.push(this.tree.body_pointer);
this.tree.root_pointer = this.tree.body_pointer;
} else {
this.setup('div', null);
this.tree.open_elements.push(this.tree.html_pointer);
this.tree.open_elements.push(this.tree.body_pointer);
this.tree.root_pointer = this.tree.body_pointer;
}
this.tokenizer.tokenize();
}
Object.defineProperty(Parser.prototype, 'fragment', {
get: function() {
return this.tree.getFragment();
}
});
Parser.prototype.newPhase = function(name) {
this.phase = new PHASES[name](this, this.tree);
HTML5.debug('parser.newPhase', name)
this.phaseName = name;
}
Parser.prototype.do_token = function(token) {
var method = 'process' + token.type;
switch(token.type) {
case 'Characters':
case 'SpaceCharacters':
case 'Comment':
this.phase[method](token.data);
break;
case 'StartTag':
this.phase[method](token.name, token.data, token.self_closing);
break;
case 'EndTag':
this.phase[method](token.name);
break;
case 'Doctype':
this.phase[method](token.name, token.publicId, token.systemId, token.correct);
break;
case 'EOF':
this.phase[method]();
break;
default:
this.parse_error(token.data, token.datavars)
}
}
Parser.prototype.setup = function(container, encoding) {
this.tokenizer.addListener('token', function(t) {
return function(token) { t.do_token(token); };
}(this));
this.tokenizer.addListener('end', function(t) {
return function() { t.emit('end'); };
}(this));
var inner_html = !!container;
container = container || 'div';
this.tree.reset();
this.first_start_tag = false;
this.errors = [];
// FIXME: instantiate tokenizer and plumb. Pass lowercasing options.
if(inner_html) {
this.inner_html = container.toLowerCase();
switch(this.inner_html) {
case 'title':
case 'textarea':
this.tokenizer.content_model = HTML5.Models.RCDATA;
break;
case 'style':
case 'script':
case 'xmp':
case 'iframe':
case 'noembed':
case 'noframes':
case 'noscript':
this.tokenizer.content_model = HTML5.Models.CDATA;
break;
case 'plaintext':
this.tokenizer.content_model = HTML5.Models.PLAINTEXT;
break;
default:
this.tokenizer.content_model = HTML5.Models.PCDATA;
}
this.tree.create_structure_elements(inner_html);
switch(inner_html) {
case 'html':
this.newPhase('afterHtml')
break;
case 'head':
this.newPhase('inHead')
break;
default:
this.newPhase('inBody')
}
this.reset_insertion_mode(this.inner_html);
} else {
this.inner_html = false;
this.newPhase('initial');
}
this.last_phase = null;
}
Parser.prototype.parse_error = function(code, data) {
// FIXME: this.errors.push([this.tokenizer.position, code, data]);
this.errors.push([code, data]);
if(this.strict) throw(this.errors.last());
}
Parser.prototype.reset_insertion_mode = function(context) {
var last = false;
var node_name;
for(var i = this.tree.open_elements.length - 1; i >= 0; i--) {
var node = this.tree.open_elements[i]
node_name = node.tagName.toLowerCase()
if(node == this.tree.open_elements[0]) {
last = true
if(node_name != 'th' && node_name != 'td') {
// XXX
// assert.ok(this.inner_html);
node_name = context.tagName;
}
}
if(!(node_name == 'select' || node_name == 'colgroup' || node_name == 'head' || node_name == 'frameset')) {
// XXX
// assert.ok(this.inner_html)
}
if(HTML5.TAGMODES[node_name]) {
this.newPhase(HTML5.TAGMODES[node_name]);
} else if(node_name == 'html') {
this.newPhase(this.tree.head_pointer ? 'afterHead' : 'beforeHead');
} else if(last) {
this.newPhase('inBody');
} else {
continue;
}
break;
}
}
Parser.prototype._ = function(str) {
return(str);
}
| Emit a 'setup' event so one can attach to the tokenizer
| lib/html5/parser.js | Emit a 'setup' event so one can attach to the tokenizer | <ide><path>ib/html5/parser.js
<ide> this.tokenizer.addListener('end', function(t) {
<ide> return function() { t.emit('end'); };
<ide> }(this));
<add> this.emit('setup', this);
<ide>
<ide> var inner_html = !!container;
<ide> container = container || 'div'; |
|
Java | lgpl-2.1 | 474fb8d4c3a41784752b7b46c2e74186872208be | 0 | xasx/wildfly,rhusar/wildfly,wildfly/wildfly,wildfly/wildfly,wildfly/wildfly,golovnin/wildfly,rhusar/wildfly,tadamski/wildfly,iweiss/wildfly,pferraro/wildfly,99sono/wildfly,jstourac/wildfly,pferraro/wildfly,iweiss/wildfly,pferraro/wildfly,tadamski/wildfly,99sono/wildfly,golovnin/wildfly,tadamski/wildfly,jstourac/wildfly,xasx/wildfly,pferraro/wildfly,99sono/wildfly,iweiss/wildfly,iweiss/wildfly,xasx/wildfly,rhusar/wildfly,tomazzupan/wildfly,tomazzupan/wildfly,wildfly/wildfly,jstourac/wildfly,tomazzupan/wildfly,golovnin/wildfly,jstourac/wildfly,rhusar/wildfly | /*
* JBoss, Home of Professional Open Source
* Copyright 2011 Red Hat Inc. and/or its affiliates and other contributors
* as indicated by the @authors tag. All rights reserved.
* See the copyright.txt in the distribution for a
* full listing of individual contributors.
*
* This copyrighted material is made available to anyone wishing to use,
* modify, copy, or redistribute it subject to the terms and conditions
* of the GNU Lesser General Public License, v. 2.1.
* This program is distributed in the hope that it will be useful, but WITHOUT A
* WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A
* PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details.
* You should have received a copy of the GNU Lesser General Public License,
* v.2.1 along with this distribution; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
* MA 02110-1301, USA.
*/
package org.jboss.as.controller.operations.common;
import static org.jboss.as.controller.descriptions.ModelDescriptionConstants.ADD;
import static org.jboss.as.controller.descriptions.ModelDescriptionConstants.NAME;
import static org.jboss.as.controller.descriptions.ModelDescriptionConstants.OP;
import static org.jboss.as.controller.descriptions.ModelDescriptionConstants.OP_ADDR;
import static org.jboss.as.controller.descriptions.ModelDescriptionConstants.PATH;
import static org.jboss.as.controller.descriptions.common.PathDescription.RELATIVE_TO;
import java.util.List;
import java.util.Locale;
import org.jboss.as.controller.AbstractAddStepHandler;
import org.jboss.as.controller.OperationContext;
import org.jboss.as.controller.OperationFailedException;
import org.jboss.as.controller.PathAddress;
import org.jboss.as.controller.ServiceVerificationHandler;
import org.jboss.as.controller.descriptions.DescriptionProvider;
import org.jboss.as.controller.descriptions.common.PathDescription;
import org.jboss.as.controller.operations.validation.ModelTypeValidator;
import org.jboss.as.controller.operations.validation.ParametersValidator;
import org.jboss.as.controller.operations.validation.StringLengthValidator;
import org.jboss.as.controller.services.path.AbsolutePathService;
import org.jboss.as.controller.services.path.RelativePathService;
import org.jboss.dmr.ModelNode;
import org.jboss.dmr.ModelType;
import org.jboss.msc.service.ServiceController;
import org.jboss.msc.service.ServiceTarget;
/**
* Handler for the path resource add operation.
*
* @author Brian Stansberry (c) 2011 Red Hat Inc.
*/
public class PathAddHandler extends AbstractAddStepHandler implements DescriptionProvider {
public static final String OPERATION_NAME = ADD;
public static ModelNode getAddPathOperation(ModelNode address, ModelNode path, ModelNode relativeTo) {
ModelNode op = new ModelNode();
op.get(OP).set(OPERATION_NAME);
op.get(OP_ADDR).set(address);
if (path.isDefined()) {
op.get(PATH).set(path);
}
if (relativeTo.isDefined()) {
op.get(RELATIVE_TO).set(relativeTo);
}
return op;
}
public static final PathAddHandler NAMED_INSTANCE = new PathAddHandler(false, false);
public static final PathAddHandler SPECIFIED_INSTANCE = new PathAddHandler(true, true);
public static final PathAddHandler SPECIFIED_NO_SERVICES_INSTANCE = new PathAddHandler(true, false);
private final boolean specified;
private final boolean services;
private final ParametersValidator validator = new ParametersValidator();
/**
* Create the PathAddHandler
*/
protected PathAddHandler(boolean specified, boolean services) {
this.specified = specified;
this.services = services;
this.validator.registerValidator(PATH, new StringLengthValidator(1, !specified));
this.validator.registerValidator(RELATIVE_TO, new ModelTypeValidator(ModelType.STRING, true));
}
protected void populateModel(ModelNode operation, ModelNode model) throws OperationFailedException {
validator.validate(operation);
PathAddress address = PathAddress.pathAddress(operation.get(OP_ADDR));
String name = address.getLastElement().getValue();
model.get(NAME).set(name);
model.get(PATH).set(operation.get(PATH));
model.get(RELATIVE_TO).set(operation.get(RELATIVE_TO));
}
@Override
protected boolean requiresRuntime(OperationContext context) {
return services;
}
protected void performRuntime(OperationContext context, ModelNode operation, ModelNode model,
ServiceVerificationHandler verificationHandler, List<ServiceController<?>> newControllers) {
//This will only get called for the services case
PathAddress address = PathAddress.pathAddress(operation.get(OP_ADDR));
String name = address.getLastElement().getValue();
ModelNode pathNode = operation.get(PATH);
ModelNode relNode = operation.get(RELATIVE_TO);
String path = pathNode.isDefined() ? pathNode.asString() : null;
String relativeTo = relNode.isDefined() ? relNode.asString() : null;
final ServiceTarget target = context.getServiceTarget();
if (relativeTo == null) {
newControllers.add(AbsolutePathService.addService(name, path, target, newControllers, verificationHandler));
} else {
newControllers.add(RelativePathService.addService(name, path, relativeTo, target, newControllers, verificationHandler));
}
}
protected boolean requiresRuntimeVerification() {
return false;
}
@Override
public ModelNode getModelDescription(Locale locale) {
return specified ? PathDescription.getSpecifiedPathAddOperation(locale) : PathDescription.getNamedPathAddOperation(locale);
}
}
| controller/src/main/java/org/jboss/as/controller/operations/common/PathAddHandler.java | /*
* JBoss, Home of Professional Open Source
* Copyright 2011 Red Hat Inc. and/or its affiliates and other contributors
* as indicated by the @authors tag. All rights reserved.
* See the copyright.txt in the distribution for a
* full listing of individual contributors.
*
* This copyrighted material is made available to anyone wishing to use,
* modify, copy, or redistribute it subject to the terms and conditions
* of the GNU Lesser General Public License, v. 2.1.
* This program is distributed in the hope that it will be useful, but WITHOUT A
* WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A
* PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details.
* You should have received a copy of the GNU Lesser General Public License,
* v.2.1 along with this distribution; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
* MA 02110-1301, USA.
*/
package org.jboss.as.controller.operations.common;
import static org.jboss.as.controller.descriptions.ModelDescriptionConstants.ADD;
import static org.jboss.as.controller.descriptions.ModelDescriptionConstants.NAME;
import static org.jboss.as.controller.descriptions.ModelDescriptionConstants.OP;
import static org.jboss.as.controller.descriptions.ModelDescriptionConstants.OP_ADDR;
import static org.jboss.as.controller.descriptions.ModelDescriptionConstants.PATH;
import static org.jboss.as.controller.descriptions.common.PathDescription.RELATIVE_TO;
import java.util.List;
import java.util.Locale;
import org.jboss.as.controller.AbstractAddStepHandler;
import org.jboss.as.controller.OperationContext;
import org.jboss.as.controller.OperationFailedException;
import org.jboss.as.controller.PathAddress;
import org.jboss.as.controller.ServiceVerificationHandler;
import org.jboss.as.controller.descriptions.DescriptionProvider;
import org.jboss.as.controller.descriptions.common.PathDescription;
import org.jboss.as.controller.operations.validation.ModelTypeValidator;
import org.jboss.as.controller.operations.validation.ParametersValidator;
import org.jboss.as.controller.operations.validation.StringLengthValidator;
import org.jboss.as.controller.services.path.AbsolutePathService;
import org.jboss.as.controller.services.path.RelativePathService;
import org.jboss.dmr.ModelNode;
import org.jboss.dmr.ModelType;
import org.jboss.msc.service.ServiceController;
import org.jboss.msc.service.ServiceTarget;
/**
* Handler for the path resource add operation.
*
* @author Brian Stansberry (c) 2011 Red Hat Inc.
*/
public class PathAddHandler extends AbstractAddStepHandler implements DescriptionProvider {
public static final String OPERATION_NAME = ADD;
public static ModelNode getAddPathOperation(ModelNode address, ModelNode path, ModelNode relativeTo) {
ModelNode op = new ModelNode();
op.get(OP).set(OPERATION_NAME);
op.get(OP_ADDR).set(address);
if (path.isDefined()) {
op.get(PATH).set(path);
}
if (relativeTo.isDefined()) {
op.get(RELATIVE_TO).set(relativeTo);
}
return op;
}
public static final PathAddHandler NAMED_INSTANCE = new PathAddHandler(false, false);
public static final PathAddHandler SPECIFIED_INSTANCE = new PathAddHandler(true, true);
public static final PathAddHandler SPECIFIED_NO_SERVICES_INSTANCE = new PathAddHandler(true, false);
private final boolean specified;
private final boolean services;
private final ParametersValidator validator = new ParametersValidator();
/**
* Create the PathAddHandler
*/
protected PathAddHandler(boolean specified, boolean services) {
this.specified = specified;
this.services = services;
this.validator.registerValidator(PATH, new StringLengthValidator(1, !specified));
this.validator.registerValidator(RELATIVE_TO, new ModelTypeValidator(ModelType.STRING, true));
}
protected void populateModel(ModelNode operation, ModelNode model) throws OperationFailedException {
validator.validate(operation);
PathAddress address = PathAddress.pathAddress(operation.get(OP_ADDR));
String name = address.getLastElement().getValue();
model.get(NAME).set(name);
model.get(PATH).set(operation.get(PATH));
model.get(RELATIVE_TO).set(operation.get(RELATIVE_TO));
}
@Override
protected boolean requiresRuntime(OperationContext context) {
return services;
}
protected void performRuntime(OperationContext context, ModelNode operation, ModelNode model,
ServiceVerificationHandler verificationHandler, List<ServiceController<?>> newControllers) {
//This will only get called for the services case
System.out.println("----> Performing runtime for " + operation);
PathAddress address = PathAddress.pathAddress(operation.get(OP_ADDR));
String name = address.getLastElement().getValue();
ModelNode pathNode = operation.get(PATH);
ModelNode relNode = operation.get(RELATIVE_TO);
String path = pathNode.isDefined() ? pathNode.asString() : null;
String relativeTo = relNode.isDefined() ? relNode.asString() : null;
final ServiceTarget target = context.getServiceTarget();
if (relativeTo == null) {
newControllers.add(AbsolutePathService.addService(name, path, target, newControllers, verificationHandler));
} else {
newControllers.add(RelativePathService.addService(name, path, relativeTo, target, newControllers, verificationHandler));
}
}
protected boolean requiresRuntimeVerification() {
return false;
}
@Override
public ModelNode getModelDescription(Locale locale) {
return specified ? PathDescription.getSpecifiedPathAddOperation(locale) : PathDescription.getNamedPathAddOperation(locale);
}
}
| [AS7-3856] System.out.println
| controller/src/main/java/org/jboss/as/controller/operations/common/PathAddHandler.java | [AS7-3856] System.out.println | <ide><path>ontroller/src/main/java/org/jboss/as/controller/operations/common/PathAddHandler.java
<ide> protected void performRuntime(OperationContext context, ModelNode operation, ModelNode model,
<ide> ServiceVerificationHandler verificationHandler, List<ServiceController<?>> newControllers) {
<ide> //This will only get called for the services case
<del>
<del> System.out.println("----> Performing runtime for " + operation);
<del>
<ide> PathAddress address = PathAddress.pathAddress(operation.get(OP_ADDR));
<ide> String name = address.getLastElement().getValue();
<ide> ModelNode pathNode = operation.get(PATH); |
|
Java | mit | 3ae41a9f631df81f4cb11cd2d9dfc0dd5dd5f524 | 0 | Alanyanbc/sharebook,Alanyanbc/sharebook | package com.activeandroid.util;
/*
* Copyright (C) 2010 Michael Pardo
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.lang.reflect.Constructor;
import java.lang.reflect.Field;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import android.database.Cursor;
import android.os.Build;
import android.text.TextUtils;
import com.activeandroid.Cache;
import com.activeandroid.Model;
import com.activeandroid.TableInfo;
import com.activeandroid.annotation.Column;
import com.activeandroid.serializer.TypeSerializer;
public final class SQLiteUtils {
//////////////////////////////////////////////////////////////////////////////////////
// ENUMERATIONS
//////////////////////////////////////////////////////////////////////////////////////
public enum SQLiteType {
INTEGER, REAL, TEXT, BLOB
}
//////////////////////////////////////////////////////////////////////////////////////
// PUBLIC CONSTANTS
//////////////////////////////////////////////////////////////////////////////////////
public static final boolean FOREIGN_KEYS_SUPPORTED = Build.VERSION.SDK_INT >= Build.VERSION_CODES.FROYO;
//////////////////////////////////////////////////////////////////////////////////////
// PRIVATE CONTSANTS
//////////////////////////////////////////////////////////////////////////////////////
@SuppressWarnings("serial")
private static final HashMap<Class<?>, SQLiteType> TYPE_MAP = new HashMap<Class<?>, SQLiteType>() {
{
put(byte.class, SQLiteType.INTEGER);
put(short.class, SQLiteType.INTEGER);
put(int.class, SQLiteType.INTEGER);
put(long.class, SQLiteType.INTEGER);
put(float.class, SQLiteType.REAL);
put(double.class, SQLiteType.REAL);
put(boolean.class, SQLiteType.INTEGER);
put(char.class, SQLiteType.TEXT);
put(byte[].class, SQLiteType.BLOB);
put(Byte.class, SQLiteType.INTEGER);
put(Short.class, SQLiteType.INTEGER);
put(Integer.class, SQLiteType.INTEGER);
put(Long.class, SQLiteType.INTEGER);
put(Float.class, SQLiteType.REAL);
put(Double.class, SQLiteType.REAL);
put(Boolean.class, SQLiteType.INTEGER);
put(Character.class, SQLiteType.TEXT);
put(String.class, SQLiteType.TEXT);
put(Byte[].class, SQLiteType.BLOB);
}
};
//////////////////////////////////////////////////////////////////////////////////////
// PUBLIC METHODS
//////////////////////////////////////////////////////////////////////////////////////
public static void execSql(String sql) {
Cache.openDatabase().execSQL(sql);
}
public static void execSql(String sql, Object[] bindArgs) {
Cache.openDatabase().execSQL(sql, bindArgs);
}
public static <T extends Model> List<T> rawQuery(Class<? extends Model> type, String sql, String[] selectionArgs) {
Cursor cursor = Cache.openDatabase().rawQuery(sql, selectionArgs);
List<T> entities = processCursor(type, cursor);
cursor.close();
return entities;
}
public static <T extends Model> T rawQuerySingle(Class<? extends Model> type, String sql, String[] selectionArgs) {
List<T> entities = rawQuery(type, sql, selectionArgs);
if (entities.size() > 0) {
return entities.get(0);
}
return null;
}
// Database creation
public static String createTableDefinition(TableInfo tableInfo) {
final ArrayList<String> definitions = new ArrayList<String>();
for (Field field : tableInfo.getFields()) {
String definition = createColumnDefinition(tableInfo, field);
if (!TextUtils.isEmpty(definition)) {
definitions.add(definition);
}
}
return String.format("CREATE TABLE IF NOT EXISTS %s (%s);", tableInfo.getTableName(),
TextUtils.join(", ", definitions));
}
@SuppressWarnings("unchecked")
public static String createColumnDefinition(TableInfo tableInfo, Field field) {
String definition = null;
Class<?> type = field.getType();
final String name = tableInfo.getColumnName(field);
final TypeSerializer typeSerializer = Cache.getParserForType(field.getType());
final Column column = field.getAnnotation(Column.class);
if (typeSerializer != null) {
type = typeSerializer.getSerializedType();
}
if (TYPE_MAP.containsKey(type)) {
definition = name + " " + TYPE_MAP.get(type).toString();
}
else if (ReflectionUtils.isModel(type)) {
definition = name + " " + SQLiteType.INTEGER.toString();
}
else if (ReflectionUtils.isSubclassOf(type, Enum.class)) {
definition = name + " " + SQLiteType.TEXT.toString();
}
if (definition != null) {
if (column.length() > -1) {
definition += "(" + column.length() + ")";
}
if (name.equals("Id")) {
definition += " PRIMARY KEY AUTOINCREMENT";
}
if (column.notNull()) {
definition += " NOT NULL ON CONFLICT " + column.onNullConflict().toString();
}
if (column.unique()) {
definition += " UNIQUE ON CONFLICT " + column.onUniqueConflict().toString();
}
if (FOREIGN_KEYS_SUPPORTED && ReflectionUtils.isModel(type)) {
definition += " REFERENCES " + Cache.getTableInfo((Class<? extends Model>) type).getTableName() + "(Id)";
definition += " ON DELETE " + column.onDelete().toString().replace("_", " ");
definition += " ON UPDATE " + column.onUpdate().toString().replace("_", " ");
}
}
else {
Log.e("No type mapping for: " + type.toString());
}
return definition;
}
@SuppressWarnings("unchecked")
public static <T extends Model> List<T> processCursor(Class<? extends Model> type, Cursor cursor) {
final List<T> entities = new ArrayList<T>();
try {
Constructor<?> entityConstructor = type.getConstructor();
if (cursor.moveToFirst()) {
do {
// TODO: Investigate entity cache leak
T entity = (T) entityConstructor.newInstance();
((Model) entity).loadFromCursor(type, cursor);
entities.add(entity);
}
while (cursor.moveToNext());
}
}
catch (Exception e) {
Log.e("Failed to process cursor.", e);
}
return entities;
}
} | src/com/activeandroid/util/SQLiteUtils.java | package com.activeandroid.util;
/*
* Copyright (C) 2010 Michael Pardo
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.lang.reflect.Constructor;
import java.lang.reflect.Field;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import android.database.Cursor;
import android.os.Build;
import android.text.TextUtils;
import com.activeandroid.Cache;
import com.activeandroid.Model;
import com.activeandroid.TableInfo;
import com.activeandroid.annotation.Column;
import com.activeandroid.serializer.TypeSerializer;
public final class SQLiteUtils {
//////////////////////////////////////////////////////////////////////////////////////
// ENUMERATIONS
//////////////////////////////////////////////////////////////////////////////////////
public enum SQLiteType {
INTEGER, REAL, TEXT, BLOB
}
//////////////////////////////////////////////////////////////////////////////////////
// PUBLIC CONSTANTS
//////////////////////////////////////////////////////////////////////////////////////
public static final boolean FOREIGN_KEYS_SUPPORTED = Build.VERSION.SDK_INT >= Build.VERSION_CODES.FROYO;
//////////////////////////////////////////////////////////////////////////////////////
// PRIVATE CONTSANTS
//////////////////////////////////////////////////////////////////////////////////////
@SuppressWarnings("serial")
private static final HashMap<Class<?>, SQLiteType> TYPE_MAP = new HashMap<Class<?>, SQLiteType>() {
{
put(byte.class, SQLiteType.INTEGER);
put(short.class, SQLiteType.INTEGER);
put(int.class, SQLiteType.INTEGER);
put(long.class, SQLiteType.INTEGER);
put(float.class, SQLiteType.REAL);
put(double.class, SQLiteType.REAL);
put(boolean.class, SQLiteType.INTEGER);
put(char.class, SQLiteType.TEXT);
put(byte[].class, SQLiteType.BLOB);
put(Byte.class, SQLiteType.INTEGER);
put(Short.class, SQLiteType.INTEGER);
put(Integer.class, SQLiteType.INTEGER);
put(Long.class, SQLiteType.INTEGER);
put(Float.class, SQLiteType.REAL);
put(Double.class, SQLiteType.REAL);
put(Boolean.class, SQLiteType.INTEGER);
put(Character.class, SQLiteType.TEXT);
put(String.class, SQLiteType.TEXT);
put(Byte[].class, SQLiteType.BLOB);
}
};
//////////////////////////////////////////////////////////////////////////////////////
// PUBLIC METHODS
//////////////////////////////////////////////////////////////////////////////////////
public static void execSql(String sql) {
Cache.openDatabase().execSQL(sql);
}
public static void execSql(String sql, Object[] bindArgs) {
Cache.openDatabase().execSQL(sql, bindArgs);
}
public static <T extends Model> List<T> rawQuery(Class<? extends Model> type, String sql, String[] selectionArgs) {
Cursor cursor = Cache.openDatabase().rawQuery(sql, selectionArgs);
List<T> entities = processCursor(type, cursor);
cursor.close();
return entities;
}
public static <T extends Model> T rawQuerySingle(Class<? extends Model> type, String sql, String[] selectionArgs) {
List<T> entities = rawQuery(type, sql, selectionArgs);
if (entities.size() > 0) {
return entities.get(0);
}
return null;
}
// Database creation
public static String createTableDefinition(TableInfo tableInfo) {
final ArrayList<String> definitions = new ArrayList<String>();
for (Field field : tableInfo.getFields()) {
String definition = createColumnDefinition(tableInfo, field);
if (!TextUtils.isEmpty(definition)) {
definitions.add(definition);
}
}
return String.format("CREATE TABLE IF NOT EXISTS %s (%s);", tableInfo.getTableName(),
TextUtils.join(", ", definitions));
}
public static String createColumnDefinition(TableInfo tableInfo, Field field) {
String definition = null;
Class<?> type = field.getType();
final String name = tableInfo.getColumnName(field);
final TypeSerializer typeSerializer = Cache.getParserForType(field.getType());
final Column column = field.getAnnotation(Column.class);
if (typeSerializer != null) {
type = typeSerializer.getSerializedType();
}
if (TYPE_MAP.containsKey(type)) {
definition = name + " " + TYPE_MAP.get(type).toString();
}
else if (ReflectionUtils.isModel(type)) {
definition = name + " " + SQLiteType.INTEGER.toString();
}
else if (ReflectionUtils.isSubclassOf(type, Enum.class)) {
definition = name + " " + SQLiteType.TEXT.toString();
}
if (definition != null) {
if (column.length() > -1) {
definition += "(" + column.length() + ")";
}
if (name.equals("Id")) {
definition += " PRIMARY KEY AUTOINCREMENT";
}
if (column.notNull()) {
definition += " NOT NULL ON CONFLICT " + column.onNullConflict().toString();
}
if (column.unique()) {
definition += " UNIQUE ON CONFLICT " + column.onUniqueConflict().toString();
}
if (FOREIGN_KEYS_SUPPORTED && ReflectionUtils.isModel(type)) {
definition += " REFERENCES " + tableInfo.getTableName() + "(Id)";
definition += " ON DELETE " + column.onDelete().toString().replace("_", " ");
definition += " ON UPDATE " + column.onUpdate().toString().replace("_", " ");
}
}
else {
Log.e("No type mapping for: " + type.toString());
}
return definition;
}
@SuppressWarnings("unchecked")
public static <T extends Model> List<T> processCursor(Class<? extends Model> type, Cursor cursor) {
final List<T> entities = new ArrayList<T>();
try {
Constructor<?> entityConstructor = type.getConstructor();
if (cursor.moveToFirst()) {
do {
// TODO: Investigate entity cache leak
T entity = (T) entityConstructor.newInstance();
((Model) entity).loadFromCursor(type, cursor);
entities.add(entity);
}
while (cursor.moveToNext());
}
}
catch (Exception e) {
Log.e("Failed to process cursor.", e);
}
return entities;
}
} | Reference correct table
| src/com/activeandroid/util/SQLiteUtils.java | Reference correct table | <ide><path>rc/com/activeandroid/util/SQLiteUtils.java
<ide> TextUtils.join(", ", definitions));
<ide> }
<ide>
<add> @SuppressWarnings("unchecked")
<ide> public static String createColumnDefinition(TableInfo tableInfo, Field field) {
<ide> String definition = null;
<ide>
<ide> }
<ide>
<ide> if (FOREIGN_KEYS_SUPPORTED && ReflectionUtils.isModel(type)) {
<del> definition += " REFERENCES " + tableInfo.getTableName() + "(Id)";
<add> definition += " REFERENCES " + Cache.getTableInfo((Class<? extends Model>) type).getTableName() + "(Id)";
<ide> definition += " ON DELETE " + column.onDelete().toString().replace("_", " ");
<ide> definition += " ON UPDATE " + column.onUpdate().toString().replace("_", " ");
<ide> } |
|
Java | apache-2.0 | f9f360f507121185707ae48cb6c6f76c0f31b84e | 0 | phax/ph-commons | /**
* Copyright (C) 2014-2015 Philip Helger (www.helger.com)
* philip[at]helger[dot]com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.helger.commons.xml;
import java.util.Collection;
import java.util.LinkedHashMap;
import java.util.Map;
import javax.annotation.Nonnegative;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import javax.annotation.concurrent.Immutable;
import javax.xml.XMLConstants;
import javax.xml.namespace.QName;
import org.w3c.dom.Attr;
import org.w3c.dom.CharacterData;
import org.w3c.dom.Comment;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.EntityReference;
import org.w3c.dom.NamedNodeMap;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import org.w3c.dom.Text;
import com.helger.commons.ValueEnforcer;
import com.helger.commons.annotations.Nonempty;
import com.helger.commons.annotations.PresentForCodeCoverage;
import com.helger.commons.collections.ArrayHelper;
import com.helger.commons.collections.ContainerHelper;
import com.helger.commons.collections.iterate.IIterableIterator;
import edu.umd.cs.findbugs.annotations.SuppressFBWarnings;
/**
* This class contains multiple XML utility methods.
*
* @author Philip Helger
*/
@Immutable
public final class XMLHelper
{
@PresentForCodeCoverage
@SuppressWarnings ("unused")
private static final XMLHelper s_aInstance = new XMLHelper ();
private XMLHelper ()
{}
/**
* Get the first direct child element of the passed element.
*
* @param aStartNode
* The element to start searching.
* @return <code>null</code> if the passed element does not have any direct
* child element.
*/
@Nullable
public static Element getFirstChildElement (@Nonnull final Node aStartNode)
{
final NodeList aNodeList = aStartNode.getChildNodes ();
final int nLen = aNodeList.getLength ();
for (int i = 0; i < nLen; ++i)
{
final Node aNode = aNodeList.item (i);
if (aNode.getNodeType () == Node.ELEMENT_NODE)
return (Element) aNode;
}
return null;
}
/**
* Check if the passed node has at least one direct child element or not.
*
* @param aStartNode
* The parent element to be searched. May not be <code>null</code>.
* @return <code>true</code> if the passed node has at least one child
* element, <code>false</code> otherwise.
*/
public static boolean hasChildElementNodes (@Nonnull final Node aStartNode)
{
return getFirstChildElement (aStartNode) != null;
}
/**
* Search all child nodes of the given for the first element that has the
* specified tag name.
*
* @param aStartNode
* The parent element to be searched. May not be <code>null</code>.
* @param sName
* The tag name to search.
* @return <code>null</code> if the parent element has no such child element.
*/
@Nullable
public static Element getFirstChildElementOfName (@Nonnull final Node aStartNode, @Nullable final String sName)
{
final NodeList aNodeList = aStartNode.getChildNodes ();
final int nLen = aNodeList.getLength ();
for (int i = 0; i < nLen; ++i)
{
final Node aNode = aNodeList.item (i);
if (aNode.getNodeType () == Node.ELEMENT_NODE)
{
final Element aElement = (Element) aNode;
if (aElement.getTagName ().equals (sName))
return aElement;
}
}
return null;
}
/**
* Get the owner document of the passed node. If the node itself is a
* document, only a cast is performed.
*
* @param aNode
* The node to get the document from. May be <code>null</code>.
* @return <code>null</code> if the passed node was <code>null</code>.
*/
@Nullable
public static Document getOwnerDocument (@Nullable final Node aNode)
{
return aNode == null ? null : aNode instanceof Document ? (Document) aNode : aNode.getOwnerDocument ();
}
@Nonnull
public static Node append (@Nonnull final Node aParentNode, @Nullable final Object aChild)
{
ValueEnforcer.notNull (aParentNode, "ParentNode");
if (aChild != null)
if (aChild instanceof Document)
{
// Special handling for Document comes first, as this is a special case
// of "Node"
// Cannot add complete documents!
append (aParentNode, ((Document) aChild).getDocumentElement ());
}
else
if (aChild instanceof Node)
{
// directly append Node
final Node aChildNode = (Node) aChild;
final Document aParentDoc = getOwnerDocument (aParentNode);
if (getOwnerDocument (aChildNode).equals (aParentDoc))
{
// Nodes have the same parent
aParentNode.appendChild (aChildNode);
}
else
{
// Node to be added belongs to a different document
aParentNode.appendChild (aParentDoc.adoptNode (aChildNode.cloneNode (true)));
}
}
else
if (aChild instanceof String)
{
// append a string node
aParentNode.appendChild (getOwnerDocument (aParentNode).createTextNode ((String) aChild));
}
else
if (aChild instanceof Iterable <?>)
{
// it's a nested collection -> recursion
for (final Object aSubChild : (Iterable <?>) aChild)
append (aParentNode, aSubChild);
}
else
if (ArrayHelper.isArray (aChild))
{
// it's a nested collection -> recursion
for (final Object aSubChild : (Object []) aChild)
append (aParentNode, aSubChild);
}
else
{
// unsupported type
throw new IllegalArgumentException ("Passed object cannot be appended to a DOMNode (type=" +
aChild.getClass ().getName () +
".");
}
return aParentNode;
}
public static void append (@Nonnull final Node aSrcNode, @Nonnull final Collection <?> aNodesToAppend)
{
for (final Object aNode : aNodesToAppend)
append (aSrcNode, aNode);
}
@Nonnegative
public static int getDirectChildElementCountNoNS (@Nullable final Element aParent)
{
return aParent == null ? 0 : ContainerHelper.getSize (getChildElementIteratorNoNS (aParent));
}
@Nonnegative
public static int getDirectChildElementCountNoNS (@Nullable final Element aParent,
@Nonnull @Nonempty final String sTagName)
{
return aParent == null ? 0 : ContainerHelper.getSize (getChildElementIteratorNoNS (aParent, sTagName));
}
@Nonnegative
public static int getDirectChildElementCountNS (@Nullable final Element aParent, @Nullable final String sNamespaceURI)
{
return aParent == null ? 0 : ContainerHelper.getSize (getChildElementIteratorNS (aParent, sNamespaceURI));
}
@Nonnegative
public static int getDirectChildElementCountNS (@Nullable final Element aParent,
@Nullable final String sNamespaceURI,
@Nonnull @Nonempty final String sLocalName)
{
return aParent == null ? 0
: ContainerHelper.getSize (getChildElementIteratorNS (aParent, sNamespaceURI, sLocalName));
}
/**
* Get an iterator over all child elements that have no namespace.
*
* @param aStartNode
* the parent element
* @return a non-null Iterator
*/
@Nonnull
public static IIterableIterator <Element> getChildElementIteratorNoNS (@Nonnull final Node aStartNode)
{
return new ChildElementIterator (aStartNode, FilterElementWithoutNamespace.getInstance ());
}
/**
* Get an iterator over all child elements that have no namespace and the
* desired tag name.
*
* @param aStartNode
* the parent element
* @param sTagName
* the name of the tag that is desired
* @return a non-null Iterator
* @throws IllegalArgumentException
* if the passed tag name is null or empty
*/
@Nonnull
public static IIterableIterator <Element> getChildElementIteratorNoNS (@Nonnull final Node aStartNode,
@Nonnull @Nonempty final String sTagName)
{
ValueEnforcer.notEmpty (sTagName, "TagName");
return new ChildElementIterator (aStartNode, new FilterElementWithTagName (sTagName));
}
@Nonnull
public static IIterableIterator <Element> getChildElementIteratorNS (@Nonnull final Node aStartNode,
@Nullable final String sNamespaceURI)
{
return new ChildElementIterator (aStartNode, new FilterElementWithNamespace (sNamespaceURI));
}
@Nonnull
public static IIterableIterator <Element> getChildElementIteratorNS (@Nonnull final Node aStartNode,
@Nullable final String sNamespaceURI,
@Nonnull @Nonempty final String sLocalName)
{
ValueEnforcer.notEmpty (sLocalName, "LocalName");
return new ChildElementIterator (aStartNode, new FilterElementWithNamespaceAndLocalName (sNamespaceURI, sLocalName));
}
public static boolean hasNamespaceURI (@Nullable final Node aNode, @Nullable final String sNamespaceURI)
{
final String sNSURI = aNode == null ? null : aNode.getNamespaceURI ();
return sNSURI != null && sNSURI.equals (sNamespaceURI);
}
/**
* Shortcut for {@link #getPathToNode(Node, String)} using "/" as the
* separator.
*
* @param aNode
* The node to check.
* @return A non-<code>null</code> path.
*/
@Nonnull
public static String getPathToNode (@Nonnull final Node aNode)
{
return getPathToNode (aNode, "/");
}
/**
* Get the path from root node to the passed node. This includes all nodes up
* to the document node!
*
* @param aNode
* The node to start. May not be <code>null</code>.
* @param sSep
* The separator string to use. May not be <code>null</code>.
* @return The path to the node.
*/
@Nonnull
@SuppressFBWarnings ("IL_INFINITE_LOOP")
public static String getPathToNode (@Nonnull final Node aNode, @Nonnull final String sSep)
{
ValueEnforcer.notNull (aNode, "Node");
ValueEnforcer.notNull (sSep, "Separator");
final StringBuilder aRet = new StringBuilder ();
Node aCurNode = aNode;
while (aCurNode != null)
{
final StringBuilder aName = new StringBuilder (aCurNode.getNodeName ());
if (aCurNode.getNodeType () == Node.ELEMENT_NODE && aCurNode.getParentNode () != null)
{
// get index of my current element
final Element aCurElement = (Element) aCurNode;
int nIndex = 0;
// For all elements of the parent node
for (final Element x : new ChildElementIterator (aCurNode.getParentNode ()))
{
if (x == aCurNode)// NOPMD
break;
if (x.getTagName ().equals (aCurElement.getTagName ()))
++nIndex;
}
aName.append ('[').append (nIndex).append (']');
}
aRet.insert (0, sSep).insert (0, aName);
// goto parent
aCurNode = aCurNode.getParentNode ();
}
return aRet.toString ();
}
/**
* Shortcut for {@link #getPathToNode2(Node,String)} using "/" as the
* separator.
*
* @param aNode
* The node to check.
* @return A non-<code>null</code> path.
*/
@Nonnull
public static String getPathToNode2 (@Nonnull final Node aNode)
{
return getPathToNode2 (aNode, "/");
}
/**
* Get the path from root node to the passed node. This includes all nodes but
* excluding the document node!
*
* @param aNode
* The node to start. May not be <code>null</code>.
* @param sSep
* The separator string to use. May not be <code>null</code>.
* @return The path to the node.
*/
@Nonnull
public static String getPathToNode2 (@Nonnull final Node aNode, @Nonnull final String sSep)
{
ValueEnforcer.notNull (aNode, "Node");
ValueEnforcer.notNull (sSep, "Separator");
final StringBuilder aRet = new StringBuilder ();
Node aCurNode = aNode;
while (aCurNode != null)
{
if (aCurNode.getNodeType () == Node.DOCUMENT_NODE && aRet.length () > 0)
{
// Avoid printing the content of the document node, if something else is
// already present
// Add leading separator
aRet.insert (0, sSep);
break;
}
final StringBuilder aName = new StringBuilder (aCurNode.getNodeName ());
if (aCurNode.getNodeType () == Node.ELEMENT_NODE &&
aCurNode.getParentNode () != null &&
aCurNode.getParentNode ().getNodeType () == Node.ELEMENT_NODE)
{
// get index of current element in parent element
final Element aCurElement = (Element) aCurNode;
int nIndex = 0;
int nMatchingIndex = -1;
for (final Element x : new ChildElementIterator (aCurNode.getParentNode ()))
{
if (x == aCurNode)// NOPMD
nMatchingIndex = nIndex;
if (x.getTagName ().equals (aCurElement.getTagName ()))
++nIndex;
}
if (nMatchingIndex < 0)
throw new IllegalStateException ("Failed to find Node at parent");
if (nIndex > 1)
{
// Append index only, if more than one element is present
aName.append ('[').append (nMatchingIndex).append (']');
}
}
if (aRet.length () > 0)
{
// Avoid trailing separator
aRet.insert (0, sSep);
}
aRet.insert (0, aName);
// goto parent
aCurNode = aCurNode.getParentNode ();
}
return aRet.toString ();
}
/**
* Remove all child nodes of the given node.
*
* @param aElement
* The element whose children are to be removed.
*/
public static void removeAllChildElements (@Nonnull final Element aElement)
{
while (aElement.getChildNodes ().getLength () > 0)
aElement.removeChild (aElement.getChildNodes ().item (0));
}
/**
* Check if the passed node is a text node. This includes all nodes derived
* from {@link CharacterData} which are not {@link Comment} nodes as well as
* {@link EntityReference} nodes.
*
* @param aNode
* The node to be checked.
* @return <code>true</code> if the passed node is a text node,
* <code>false</code> otherwise.
*/
public static boolean isTextNode (@Nullable final Node aNode)
{
return (aNode instanceof CharacterData && !(aNode instanceof Comment)) || aNode instanceof EntityReference;
}
/**
* Get the content of the first Text child element of the passed element.
*
* @param aStartNode
* the element to scan for a TextNode child
* @return <code>null</code> if the element contains no text node as child
*/
@Nullable
public static String getFirstChildText (@Nullable final Node aStartNode)
{
if (aStartNode != null)
{
final NodeList aNodeList = aStartNode.getChildNodes ();
final int nLen = aNodeList.getLength ();
for (int i = 0; i < nLen; ++i)
{
final Node aNode = aNodeList.item (i);
if (aNode instanceof Text)
{
final Text aText = (Text) aNode;
// ignore whitespace-only content
if (!aText.isElementContentWhitespace ())
return aText.getData ();
}
}
}
return null;
}
/**
* The latest version of XercesJ 2.9 returns an empty string for non existing
* attributes. To differentiate between empty attributes and non-existing
* attributes, this method returns null for non existing attributes.
*
* @param aElement
* the source element to get the attribute from
* @param sAttrName
* the name of the attribute to query
* @return <code>null</code> if the attribute does not exists, the string
* value otherwise
*/
@Nullable
public static String getAttributeValue (@Nonnull final Element aElement, @Nonnull final String sAttrName)
{
return getAttributeValue (aElement, sAttrName, null);
}
/**
* The latest version of XercesJ 2.9 returns an empty string for non existing
* attributes. To differentiate between empty attributes and non-existing
* attributes, this method returns a default value for non existing
* attributes.
*
* @param aElement
* the source element to get the attribute from. May not be
* <code>null</code>.
* @param sAttrName
* the name of the attribute to query. May not be <code>null</code>.
* @param sDefault
* the value to be returned if the attribute is not present.
* @return the default value if the attribute does not exists, the string
* value otherwise
*/
@Nullable
public static String getAttributeValue (@Nonnull final Element aElement,
@Nonnull final String sAttrName,
@Nullable final String sDefault)
{
final Attr aAttr = aElement.getAttributeNode (sAttrName);
return aAttr == null ? sDefault : aAttr.getValue ();
}
@Nullable
public static Map <String, String> getAllAttributesAsMap (@Nullable final Element aSrcNode)
{
if (aSrcNode != null)
{
final NamedNodeMap aNNM = aSrcNode.getAttributes ();
if (aNNM != null)
{
final Map <String, String> aMap = new LinkedHashMap <String, String> (aNNM.getLength ());
final int nMax = aNNM.getLength ();
for (int i = 0; i < nMax; ++i)
{
final Attr aAttr = (Attr) aNNM.item (i);
aMap.put (aAttr.getName (), aAttr.getValue ());
}
return aMap;
}
}
return null;
}
/**
* Get the full qualified attribute name to use for the given namespace
* prefix. The result will e.g. be <code>xmlns</code> or
* <code>xmlns:foo</code>.
*
* @param sNSPrefix
* The namespace prefix to build the attribute name from. May be
* <code>null</code> or empty.
* @return If the namespace prefix is empty (if it equals
* {@link XMLConstants#DEFAULT_NS_PREFIX} or <code>null</code>) than
* "xmlns" is returned, else "xmlns:<i>prefix</i>" is returned.
*/
@Nonnull
@Deprecated
public static String getXMLNSAttrName (@Nullable final String sNSPrefix)
{
if (sNSPrefix != null && sNSPrefix.contains (CXML.XML_PREFIX_NAMESPACE_SEP_STR))
throw new IllegalArgumentException ("prefix is invalid: " + sNSPrefix);
if (sNSPrefix == null || sNSPrefix.equals (XMLConstants.DEFAULT_NS_PREFIX))
return CXML.XML_ATTR_XMLNS;
return CXML.XML_ATTR_XMLNS_WITH_SEP + sNSPrefix;
}
/**
* Get the full qualified attribute name to use for the given namespace
* prefix. The result will e.g. be <code>xmlns</code> or
* <code>xmlns:foo</code>.
*
* @param sNSPrefix
* The namespace prefix to build the attribute name from. May be
* <code>null</code> or empty.
* @return If the namespace prefix is empty (if it equals
* {@link XMLConstants#DEFAULT_NS_PREFIX} or <code>null</code>) than
* "xmlns" is returned, else "xmlns:<i>prefix</i>" is returned.
*/
@Nonnull
public static QName getXMLNSAttrQName (@Nullable final String sNSPrefix)
{
if (sNSPrefix != null && sNSPrefix.contains (CXML.XML_PREFIX_NAMESPACE_SEP_STR))
throw new IllegalArgumentException ("prefix is invalid: " + sNSPrefix);
if (sNSPrefix == null || sNSPrefix.equals (XMLConstants.DEFAULT_NS_PREFIX))
return new QName (CXML.XML_ATTR_XMLNS);
return new QName (XMLConstants.XMLNS_ATTRIBUTE_NS_URI, sNSPrefix, CXML.XML_ATTR_XMLNS);
}
@Nullable
public static String getNamespaceURI (@Nullable final Node aNode)
{
if (aNode instanceof Document)
return getNamespaceURI (((Document) aNode).getDocumentElement ());
if (aNode != null)
return aNode.getNamespaceURI ();
return null;
}
@Nullable
public static String getElementName (@Nullable final Node aNode)
{
if (aNode instanceof Document)
return getElementName (((Document) aNode).getDocumentElement ());
if (aNode instanceof Element)
{
String ret = aNode.getLocalName ();
if (ret == null)
ret = ((Element) aNode).getTagName ();
return ret;
}
return null;
}
@Nonnegative
public static int getLength (@Nullable final NodeList aNL)
{
return aNL == null ? 0 : aNL.getLength ();
}
public static boolean isEmpty (@Nullable final NodeList aNL)
{
return getLength (aNL) == 0;
}
}
| src/main/java/com/helger/commons/xml/XMLHelper.java | /**
* Copyright (C) 2014-2015 Philip Helger (www.helger.com)
* philip[at]helger[dot]com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.helger.commons.xml;
import java.util.Collection;
import java.util.LinkedHashMap;
import java.util.Map;
import javax.annotation.Nonnegative;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import javax.annotation.concurrent.Immutable;
import javax.xml.XMLConstants;
import javax.xml.namespace.QName;
import org.w3c.dom.Attr;
import org.w3c.dom.CharacterData;
import org.w3c.dom.Comment;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.EntityReference;
import org.w3c.dom.NamedNodeMap;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import org.w3c.dom.Text;
import com.helger.commons.ValueEnforcer;
import com.helger.commons.annotations.Nonempty;
import com.helger.commons.annotations.PresentForCodeCoverage;
import com.helger.commons.collections.ArrayHelper;
import com.helger.commons.collections.ContainerHelper;
import com.helger.commons.collections.iterate.IIterableIterator;
import edu.umd.cs.findbugs.annotations.SuppressFBWarnings;
/**
* This class contains multiple XML utility methods.
*
* @author Philip Helger
*/
@Immutable
public final class XMLHelper
{
@PresentForCodeCoverage
@SuppressWarnings ("unused")
private static final XMLHelper s_aInstance = new XMLHelper ();
private XMLHelper ()
{}
/**
* Get the first direct child element of the passed element.
*
* @param aStartNode
* The element to start searching.
* @return <code>null</code> if the passed element does not have any direct
* child element.
*/
@Nullable
public static Element getFirstChildElement (@Nonnull final Node aStartNode)
{
final NodeList aNodeList = aStartNode.getChildNodes ();
final int nLen = aNodeList.getLength ();
for (int i = 0; i < nLen; ++i)
{
final Node aNode = aNodeList.item (i);
if (aNode.getNodeType () == Node.ELEMENT_NODE)
return (Element) aNode;
}
return null;
}
/**
* Check if the passed node has at least one direct child element or not.
*
* @param aStartNode
* The parent element to be searched. May not be <code>null</code>.
* @return <code>true</code> if the passed node has at least one child
* element, <code>false</code> otherwise.
*/
public static boolean hasChildElementNodes (@Nonnull final Node aStartNode)
{
return getFirstChildElement (aStartNode) != null;
}
/**
* Search all child nodes of the given for the first element that has the
* specified tag name.
*
* @param aStartNode
* The parent element to be searched. May not be <code>null</code>.
* @param sName
* The tag name to search.
* @return <code>null</code> if the parent element has no such child element.
*/
@Nullable
public static Element getFirstChildElementOfName (@Nonnull final Node aStartNode, @Nullable final String sName)
{
final NodeList aNodeList = aStartNode.getChildNodes ();
final int nLen = aNodeList.getLength ();
for (int i = 0; i < nLen; ++i)
{
final Node aNode = aNodeList.item (i);
if (aNode.getNodeType () == Node.ELEMENT_NODE)
{
final Element aElement = (Element) aNode;
if (aElement.getTagName ().equals (sName))
return aElement;
}
}
return null;
}
/**
* Get the owner document of the passed node. If the node itself is a
* document, only a cast is performed.
*
* @param aNode
* The node to get the document from. May be <code>null</code>.
* @return <code>null</code> if the passed node was <code>null</code>.
*/
@Nullable
public static Document getOwnerDocument (@Nullable final Node aNode)
{
return aNode == null ? null : aNode instanceof Document ? (Document) aNode : aNode.getOwnerDocument ();
}
@Nonnull
public static Node append (@Nonnull final Node aParentNode, @Nullable final Object aChild)
{
ValueEnforcer.notNull (aParentNode, "ParentNode");
if (aChild != null)
if (aChild instanceof Document)
{
// Special handling for Document comes first, as this is a special case
// of "Node"
// Cannot add complete documents!
append (aParentNode, ((Document) aChild).getDocumentElement ());
}
else
if (aChild instanceof Node)
{
// directly append Node
final Node aChildNode = (Node) aChild;
final Document aParentDoc = getOwnerDocument (aParentNode);
if (getOwnerDocument (aChildNode).equals (aParentDoc))
{
// Nodes have the same parent
aParentNode.appendChild (aChildNode);
}
else
{
// Node to be added belongs to a different document
aParentNode.appendChild (aParentDoc.adoptNode (aChildNode.cloneNode (true)));
}
}
else
if (aChild instanceof String)
{
// append a string node
aParentNode.appendChild (getOwnerDocument (aParentNode).createTextNode ((String) aChild));
}
else
if (aChild instanceof Iterable <?>)
{
// it's a nested collection -> recursion
for (final Object aSubChild : (Iterable <?>) aChild)
append (aParentNode, aSubChild);
}
else
if (ArrayHelper.isArray (aChild))
{
// it's a nested collection -> recursion
for (final Object aSubChild : (Object []) aChild)
append (aParentNode, aSubChild);
}
else
{
// unsupported type
throw new IllegalArgumentException ("Passed object cannot be appended to a DOMNode (type=" +
aChild.getClass ().getName () +
".");
}
return aParentNode;
}
public static void append (@Nonnull final Node aSrcNode, @Nonnull final Collection <?> aNodesToAppend)
{
for (final Object aNode : aNodesToAppend)
append (aSrcNode, aNode);
}
@Nonnegative
public static int getDirectChildElementCountNoNS (@Nullable final Element aParent)
{
return aParent == null ? 0 : ContainerHelper.getSize (getChildElementIteratorNoNS (aParent));
}
@Nonnegative
public static int getDirectChildElementCountNoNS (@Nullable final Element aParent,
@Nonnull @Nonempty final String sTagName)
{
return aParent == null ? 0 : ContainerHelper.getSize (getChildElementIteratorNoNS (aParent, sTagName));
}
@Nonnegative
public static int getDirectChildElementCountNS (@Nullable final Element aParent, @Nullable final String sNamespaceURI)
{
return aParent == null ? 0 : ContainerHelper.getSize (getChildElementIteratorNS (aParent, sNamespaceURI));
}
@Nonnegative
public static int getDirectChildElementCountNS (@Nullable final Element aParent,
@Nullable final String sNamespaceURI,
@Nonnull @Nonempty final String sLocalName)
{
return aParent == null ? 0
: ContainerHelper.getSize (getChildElementIteratorNS (aParent, sNamespaceURI, sLocalName));
}
/**
* Get an iterator over all child elements that have no namespace.
*
* @param aStartNode
* the parent element
* @return a non-null Iterator
*/
@Nonnull
public static IIterableIterator <Element> getChildElementIteratorNoNS (@Nonnull final Node aStartNode)
{
return new ChildElementIterator (aStartNode, FilterElementWithoutNamespace.getInstance ());
}
/**
* Get an iterator over all child elements that have no namespace and the
* desired tag name.
*
* @param aStartNode
* the parent element
* @param sTagName
* the name of the tag that is desired
* @return a non-null Iterator
* @throws IllegalArgumentException
* if the passed tag name is null or empty
*/
@Nonnull
public static IIterableIterator <Element> getChildElementIteratorNoNS (@Nonnull final Node aStartNode,
@Nonnull @Nonempty final String sTagName)
{
ValueEnforcer.notEmpty (sTagName, "TagName");
return new ChildElementIterator (aStartNode, new FilterElementWithTagName (sTagName));
}
@Nonnull
public static IIterableIterator <Element> getChildElementIteratorNS (@Nonnull final Node aStartNode,
@Nullable final String sNamespaceURI)
{
return new ChildElementIterator (aStartNode, new FilterElementWithNamespace (sNamespaceURI));
}
@Nonnull
public static IIterableIterator <Element> getChildElementIteratorNS (@Nonnull final Node aStartNode,
@Nullable final String sNamespaceURI,
@Nonnull @Nonempty final String sLocalName)
{
ValueEnforcer.notEmpty (sLocalName, "LocalName");
return new ChildElementIterator (aStartNode, new FilterElementWithNamespaceAndLocalName (sNamespaceURI, sLocalName));
}
public static boolean hasNamespaceURI (@Nullable final Node aNode, @Nullable final String sNamespaceURI)
{
final String sNSURI = aNode == null ? null : aNode.getNamespaceURI ();
return sNSURI != null && sNSURI.equals (sNamespaceURI);
}
/**
* Shortcut for {@link #getPathToNode(Node, String)} using "/" as the
* separator.
*
* @param aNode
* The node to check.
* @return A non-<code>null</code> path.
*/
@Nonnull
public static String getPathToNode (@Nonnull final Node aNode)
{
return getPathToNode (aNode, "/");
}
/**
* Get the path from root node to the passed node. This includes all nodes up
* to the document node!
*
* @param aNode
* The node to start. May not be <code>null</code>.
* @param sSep
* The separator string to use. May not be <code>null</code>.
* @return The path to the node.
*/
@Nonnull
@SuppressFBWarnings ("IL_INFINITE_LOOP")
public static String getPathToNode (@Nonnull final Node aNode, @Nonnull final String sSep)
{
ValueEnforcer.notNull (aNode, "Node");
ValueEnforcer.notNull (sSep, "Separator");
final StringBuilder aRet = new StringBuilder ();
Node aCurNode = aNode;
while (aCurNode != null)
{
final StringBuilder aName = new StringBuilder (aCurNode.getNodeName ());
if (aCurNode.getNodeType () == Node.ELEMENT_NODE && aCurNode.getParentNode () != null)
{
// get index of my current element
final Element aCurElement = (Element) aCurNode;
int nIndex = 0;
// For all elements of the parent node
for (final Element x : new ChildElementIterator (aCurNode.getParentNode ()))
{
if (x == aCurNode)// NOPMD
break;
if (x.getTagName ().equals (aCurElement.getTagName ()))
++nIndex;
}
aName.append ('[').append (nIndex).append (']');
}
aRet.insert (0, sSep).insert (0, aName);
// goto parent
aCurNode = aCurNode.getParentNode ();
}
return aRet.toString ();
}
/**
* Shortcut for {@link #getPathToNode2(Node,String)} using "/" as the
* separator.
*
* @param aNode
* The node to check.
* @return A non-<code>null</code> path.
*/
@Nonnull
public static String getPathToNode2 (@Nonnull final Node aNode)
{
return getPathToNode2 (aNode, "/");
}
/**
* Get the path from root node to the passed node. This includes all nodes but
* excluding the document node!
*
* @param aNode
* The node to start. May not be <code>null</code>.
* @param sSep
* The separator string to use. May not be <code>null</code>.
* @return The path to the node.
*/
@Nonnull
public static String getPathToNode2 (@Nonnull final Node aNode, @Nonnull final String sSep)
{
ValueEnforcer.notNull (aNode, "Node");
ValueEnforcer.notNull (sSep, "Separator");
final StringBuilder aRet = new StringBuilder ();
Node aCurNode = aNode;
while (aCurNode != null)
{
if (aCurNode.getNodeType () == Node.DOCUMENT_NODE && aRet.length () > 0)
{
// Avoid printing the content of the document node, if something else is
// already present
// Add leading separator
aRet.insert (0, sSep);
break;
}
final StringBuilder aName = new StringBuilder (aCurNode.getNodeName ());
if (aCurNode.getNodeType () == Node.ELEMENT_NODE &&
aCurNode.getParentNode () != null &&
aCurNode.getParentNode ().getNodeType () == Node.ELEMENT_NODE)
{
// get index of current element in parent element
final Element aCurElement = (Element) aCurNode;
int nIndex = 0;
int nMatchingIndex = -1;
for (final Element x : new ChildElementIterator (aCurNode.getParentNode ()))
{
if (x == aCurNode)// NOPMD
nMatchingIndex = nIndex;
if (x.getTagName ().equals (aCurElement.getTagName ()))
++nIndex;
}
if (nMatchingIndex < 0)
throw new IllegalStateException ("Failed to find Node at parent");
if (nIndex > 1)
{
// Append index only, if more than one element is present
aName.append ('[').append (nMatchingIndex).append (']');
}
}
if (aRet.length () > 0)
{
// Avoid trailing separator
aRet.insert (0, sSep);
}
aRet.insert (0, aName);
// goto parent
aCurNode = aCurNode.getParentNode ();
}
return aRet.toString ();
}
/**
* Remove all child nodes of the given node.
*
* @param aElement
* The element whose children are to be removed.
*/
public static void removeAllChildElements (@Nonnull final Element aElement)
{
while (aElement.getChildNodes ().getLength () > 0)
aElement.removeChild (aElement.getChildNodes ().item (0));
}
/**
* Check if the passed node is a text node. This includes all nodes derived
* from {@link CharacterData} which are not {@link Comment} nodes as well as
* {@link EntityReference} nodes.
*
* @param aNode
* The node to be checked.
* @return <code>true</code> if the passed node is a text node,
* <code>false</code> otherwise.
*/
public static boolean isTextNode (@Nullable final Node aNode)
{
return (aNode instanceof CharacterData && !(aNode instanceof Comment)) || aNode instanceof EntityReference;
}
/**
* Get the content of the first Text child element of the passed element.
*
* @param aStartNode
* the element to scan for a TextNode child
* @return <code>null</code> if the element contains no text node as child
*/
@Nullable
public static String getFirstChildText (@Nullable final Node aStartNode)
{
if (aStartNode != null)
{
final NodeList aNodeList = aStartNode.getChildNodes ();
final int nLen = aNodeList.getLength ();
for (int i = 0; i < nLen; ++i)
{
final Node aNode = aNodeList.item (i);
if (aNode instanceof Text)
{
final Text aText = (Text) aNode;
// ignore whitespace-only content
if (!aText.isElementContentWhitespace ())
return aText.getData ();
}
}
}
return null;
}
/**
* The latest version of XercesJ 2.9 returns an empty string for non existing
* attributes. To differentiate between empty attributes and non-existing
* attributes, this method returns null for non existing attributes.
*
* @param aElement
* the source element to get the attribute from
* @param sAttrName
* the name of the attribute to query
* @return <code>null</code> if the attribute does not exists, the string
* value otherwise
*/
@Nullable
public static String getAttributeValue (@Nonnull final Element aElement, @Nonnull final String sAttrName)
{
return getAttributeValue (aElement, sAttrName, null);
}
/**
* The latest version of XercesJ 2.9 returns an empty string for non existing
* attributes. To differentiate between empty attributes and non-existing
* attributes, this method returns a default value for non existing
* attributes.
*
* @param aElement
* the source element to get the attribute from. May not be
* <code>null</code>.
* @param sAttrName
* the name of the attribute to query. May not be <code>null</code>.
* @param sDefault
* the value to be returned if the attribute is not present.
* @return the default value if the attribute does not exists, the string
* value otherwise
*/
@Nullable
public static String getAttributeValue (@Nonnull final Element aElement,
@Nonnull final String sAttrName,
@Nullable final String sDefault)
{
final Attr aAttr = aElement.getAttributeNode (sAttrName);
return aAttr == null ? sDefault : aAttr.getValue ();
}
@Nullable
public static Map <String, String> getAllAttributesAsMap (@Nullable final Element aSrcNode)
{
if (aSrcNode != null)
{
final NamedNodeMap aNNM = aSrcNode.getAttributes ();
if (aNNM != null)
{
final Map <String, String> aMap = new LinkedHashMap <String, String> (aNNM.getLength ());
final int nMax = aNNM.getLength ();
for (int i = 0; i < nMax; ++i)
{
final Attr aAttr = (Attr) aNNM.item (i);
aMap.put (aAttr.getName (), aAttr.getValue ());
}
return aMap;
}
}
return null;
}
/**
* Get the full qualified attribute name to use for the given namespace
* prefix. The result will e.g. be <code>xmlns</code> or
* <code>xmlns:foo</code>.
*
* @param sNSPrefix
* The namespace prefix to build the attribute name from. May be
* <code>null</code> or empty.
* @return If the namespace prefix is empty (if it equals
* {@link XMLConstants#DEFAULT_NS_PREFIX} or <code>null</code>) than
* "xmlns" is returned, else "xmlns:<i>prefix</i>" is returned.
*/
@Nonnull
@Deprecated
public static String getXMLNSAttrName (@Nullable final String sNSPrefix)
{
if (sNSPrefix != null && sNSPrefix.contains (CXML.XML_PREFIX_NAMESPACE_SEP_STR))
throw new IllegalArgumentException ("prefix is invalid: " + sNSPrefix);
if (sNSPrefix == null || sNSPrefix.equals (XMLConstants.DEFAULT_NS_PREFIX))
return CXML.XML_ATTR_XMLNS;
return CXML.XML_ATTR_XMLNS_WITH_SEP + sNSPrefix;
}
/**
* Get the full qualified attribute name to use for the given namespace
* prefix. The result will e.g. be <code>xmlns</code> or
* <code>xmlns:foo</code>.
*
* @param sNSPrefix
* The namespace prefix to build the attribute name from. May be
* <code>null</code> or empty.
* @return If the namespace prefix is empty (if it equals
* {@link XMLConstants#DEFAULT_NS_PREFIX} or <code>null</code>) than
* "xmlns" is returned, else "xmlns:<i>prefix</i>" is returned.
*/
@Nonnull
public static QName getXMLNSAttrQName (@Nullable final String sNSPrefix)
{
if (sNSPrefix != null && sNSPrefix.contains (CXML.XML_PREFIX_NAMESPACE_SEP_STR))
throw new IllegalArgumentException ("prefix is invalid: " + sNSPrefix);
if (sNSPrefix == null || sNSPrefix.equals (XMLConstants.DEFAULT_NS_PREFIX))
return new QName (CXML.XML_ATTR_XMLNS);
return new QName (XMLConstants.XMLNS_ATTRIBUTE_NS_URI, sNSPrefix, CXML.XML_ATTR_XMLNS);
}
@Nullable
public static String getNamespaceURI (@Nullable final Node aNode)
{
if (aNode instanceof Document)
return getNamespaceURI (((Document) aNode).getDocumentElement ());
if (aNode != null)
return aNode.getNamespaceURI ();
return null;
}
@Nullable
public static String getElementName (@Nullable final Node aNode)
{
if (aNode instanceof Document)
return getElementName (((Document) aNode).getDocumentElement ());
if (aNode != null)
{
String ret = aNode.getLocalName ();
if (ret == null)
ret = aNode.getNodeName ();
return ret;
}
return null;
}
@Nonnegative
public static int getLength (@Nullable final NodeList aNL)
{
return aNL == null ? 0 : aNL.getLength ();
}
public static boolean isEmpty (@Nullable final NodeList aNL)
{
return getLength (aNL) == 0;
}
}
| Handle only Element | src/main/java/com/helger/commons/xml/XMLHelper.java | Handle only Element | <ide><path>rc/main/java/com/helger/commons/xml/XMLHelper.java
<ide> {
<ide> if (aNode instanceof Document)
<ide> return getElementName (((Document) aNode).getDocumentElement ());
<del> if (aNode != null)
<add> if (aNode instanceof Element)
<ide> {
<ide> String ret = aNode.getLocalName ();
<ide> if (ret == null)
<del> ret = aNode.getNodeName ();
<add> ret = ((Element) aNode).getTagName ();
<ide> return ret;
<ide> }
<ide> return null; |
|
Java | bsd-2-clause | 9e682a5eff4301e894eb4de6805f4c904999ec35 | 0 | pgjdbc/pgjdbc,alexismeneses/pgjdbc,jkutner/pgjdbc,ekoontz/pgjdbc,jorsol/pgjdbc,rjmac/pgjdbc,zemian/pgjdbc,zemian/pgjdbc,pgjdbc/pgjdbc,sehrope/pgjdbc,lonnyj/pgjdbc,tivv/pgjdbc,Gordiychuk/pgjdbc,underyx/pgjdbc,amozhenin/pgjdbc,golovnin/pgjdbc,thkoch2001/libpostgresql-jdbc-java,underyx/pgjdbc,Gordiychuk/pgjdbc,marschall/pgjdbc,AlexElin/pgjdbc,lordnelson/pgjdbc,whitingjr/pgjdbc,phillipross/pgjdbc,lordnelson/pgjdbc,thkoch2001/libpostgresql-jdbc-java,schlosna/pgjdbc,zapov/pgjdbc,pgjdbc/pgjdbc,pgjdbc/pgjdbc,AlexElin/pgjdbc,davecramer/pgjdbc,phillipross/pgjdbc,alexismeneses/pgjdbc,thkoch2001/libpostgresql-jdbc-java,AlexElin/pgjdbc,marschall/pgjdbc,sehrope/pgjdbc,jkutner/pgjdbc,golovnin/pgjdbc,jorsol/pgjdbc,rjmac/pgjdbc,sehrope/pgjdbc,panchenko/pgjdbc,lonnyj/pgjdbc,lordnelson/pgjdbc,marschall/pgjdbc,golovnin/pgjdbc,sehrope/pgjdbc,zemian/pgjdbc,tivv/pgjdbc,rjmac/pgjdbc,zapov/pgjdbc,bocap/pgjdbc,jorsol/pgjdbc,amozhenin/pgjdbc,jamesthomp/pgjdbc,phillipross/pgjdbc,schlosna/pgjdbc,panchenko/pgjdbc,jamesthomp/pgjdbc,bocap/pgjdbc,jamesthomp/pgjdbc,marschall/pgjdbc,whitingjr/pgjdbc,panchenko/pgjdbc,whitingjr/pgjdbc,bocap/pgjdbc,ekoontz/pgjdbc,jorsol/pgjdbc,underyx/pgjdbc,amozhenin/pgjdbc,davecramer/pgjdbc,jkutner/pgjdbc,zapov/pgjdbc,davecramer/pgjdbc,Gordiychuk/pgjdbc,davecramer/pgjdbc | package org.postgresql.jdbc1;
import java.math.BigDecimal;
import java.io.*;
import java.sql.*;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Vector;
import org.postgresql.Field;
import org.postgresql.core.Encoding;
import org.postgresql.largeobject.*;
import org.postgresql.util.PGbytea;
import org.postgresql.util.PSQLException;
/* $Header$
* This class defines methods of the jdbc1 specification. This class is
* extended by org.postgresql.jdbc2.AbstractJdbc2ResultSet which adds the jdbc2
* methods. The real ResultSet class (for jdbc1) is org.postgresql.jdbc1.Jdbc1ResultSet
*/
public abstract class AbstractJdbc1ResultSet
{
protected Vector rows; // The results
protected Statement statement;
protected Field fields[]; // The field descriptions
protected String status; // Status of the result
protected boolean binaryCursor = false; // is the data binary or Strings
protected int updateCount; // How many rows did we get back?
protected long insertOID; // The oid of an inserted row
protected int current_row; // Our pointer to where we are at
protected byte[][] this_row; // the current row result
protected org.postgresql.PGConnection connection; // the connection which we returned from
protected SQLWarning warnings = null; // The warning chain
protected boolean wasNullFlag = false; // the flag for wasNull()
// We can chain multiple resultSets together - this points to
// next resultSet in the chain.
protected ResultSet next = null;
protected StringBuffer sbuf = null;
public byte[][] rowBuffer = null;
public AbstractJdbc1ResultSet(org.postgresql.PGConnection conn, Statement statement, Field[] fields, Vector tuples, String status, int updateCount, long insertOID, boolean binaryCursor)
{
this.connection = conn;
this.statement = statement;
this.fields = fields;
this.rows = tuples;
this.status = status;
this.updateCount = updateCount;
this.insertOID = insertOID;
this.this_row = null;
this.current_row = -1;
this.binaryCursor = binaryCursor;
}
public boolean next() throws SQLException
{
if (rows == null)
throw new PSQLException("postgresql.con.closed");
if (++current_row >= rows.size())
return false;
this_row = (byte [][])rows.elementAt(current_row);
rowBuffer = new byte[this_row.length][];
System.arraycopy(this_row, 0, rowBuffer, 0, this_row.length);
return true;
}
public void close() throws SQLException
{
//release resources held (memory for tuples)
if (rows != null)
{
rows = null;
}
}
public boolean wasNull() throws SQLException
{
return wasNullFlag;
}
public String getString(int columnIndex) throws SQLException
{
checkResultSet( columnIndex );
wasNullFlag = (this_row[columnIndex - 1] == null);
if (wasNullFlag)
return null;
Encoding encoding = connection.getEncoding();
return encoding.decode(this_row[columnIndex - 1]);
}
public boolean getBoolean(int columnIndex) throws SQLException
{
return toBoolean( getString(columnIndex) );
}
public byte getByte(int columnIndex) throws SQLException
{
String s = getString(columnIndex);
if (s != null)
{
try
{
return Byte.parseByte(s);
}
catch (NumberFormatException e)
{
throw new PSQLException("postgresql.res.badbyte", s);
}
}
return 0; // SQL NULL
}
public short getShort(int columnIndex) throws SQLException
{
String s = getFixedString(columnIndex);
if (s != null)
{
try
{
return Short.parseShort(s);
}
catch (NumberFormatException e)
{
throw new PSQLException("postgresql.res.badshort", s);
}
}
return 0; // SQL NULL
}
public int getInt(int columnIndex) throws SQLException
{
return toInt( getFixedString(columnIndex) );
}
public long getLong(int columnIndex) throws SQLException
{
return toLong( getFixedString(columnIndex) );
}
public float getFloat(int columnIndex) throws SQLException
{
return toFloat( getFixedString(columnIndex) );
}
public double getDouble(int columnIndex) throws SQLException
{
return toDouble( getFixedString(columnIndex) );
}
public BigDecimal getBigDecimal(int columnIndex, int scale) throws SQLException
{
return toBigDecimal( getFixedString(columnIndex), scale );
}
/*
* Get the value of a column in the current row as a Java byte array.
*
* <p>In normal use, the bytes represent the raw values returned by the
* backend. However, if the column is an OID, then it is assumed to
* refer to a Large Object, and that object is returned as a byte array.
*
* <p><b>Be warned</b> If the large object is huge, then you may run out
* of memory.
*
* @param columnIndex the first column is 1, the second is 2, ...
* @return the column value; if the value is SQL NULL, the result
* is null
* @exception SQLException if a database access error occurs
*/
public byte[] getBytes(int columnIndex) throws SQLException
{
checkResultSet( columnIndex );
wasNullFlag = (this_row[columnIndex - 1] == null);
if (!wasNullFlag)
{
if (binaryCursor)
{
//If the data is already binary then just return it
return this_row[columnIndex - 1];
}
else if (((AbstractJdbc1Connection)connection).haveMinimumCompatibleVersion("7.2"))
{
//Version 7.2 supports the bytea datatype for byte arrays
if (fields[columnIndex - 1].getPGType().equals("bytea"))
{
return PGbytea.toBytes(this_row[columnIndex - 1]);
}
else
{
return this_row[columnIndex - 1];
}
}
else
{
//Version 7.1 and earlier supports LargeObjects for byte arrays
// Handle OID's as BLOBS
if ( fields[columnIndex - 1].getOID() == 26)
{
LargeObjectManager lom = connection.getLargeObjectAPI();
LargeObject lob = lom.open(getInt(columnIndex));
byte buf[] = lob.read(lob.size());
lob.close();
return buf;
}
else
{
return this_row[columnIndex - 1];
}
}
}
return null;
}
public java.sql.Date getDate(int columnIndex) throws SQLException
{
return toDate( getString(columnIndex) );
}
public Time getTime(int columnIndex) throws SQLException
{
return toTime( getString(columnIndex), (java.sql.ResultSet)this, fields[columnIndex - 1].getPGType() );
}
public Timestamp getTimestamp(int columnIndex) throws SQLException
{
return toTimestamp( getString(columnIndex), (java.sql.ResultSet)this, fields[columnIndex - 1].getPGType() );
}
public InputStream getAsciiStream(int columnIndex) throws SQLException
{
checkResultSet( columnIndex );
wasNullFlag = (this_row[columnIndex - 1] == null);
if (wasNullFlag)
return null;
if (((AbstractJdbc1Connection)connection).haveMinimumCompatibleVersion("7.2"))
{
//Version 7.2 supports AsciiStream for all the PG text types
//As the spec/javadoc for this method indicate this is to be used for
//large text values (i.e. LONGVARCHAR) PG doesn't have a separate
//long string datatype, but with toast the text datatype is capable of
//handling very large values. Thus the implementation ends up calling
//getString() since there is no current way to stream the value from the server
try
{
return new ByteArrayInputStream(getString(columnIndex).getBytes("ASCII"));
}
catch (UnsupportedEncodingException l_uee)
{
throw new PSQLException("postgresql.unusual", l_uee);
}
}
else
{
// In 7.1 Handle as BLOBS so return the LargeObject input stream
return getBinaryStream(columnIndex);
}
}
public InputStream getUnicodeStream(int columnIndex) throws SQLException
{
checkResultSet( columnIndex );
wasNullFlag = (this_row[columnIndex - 1] == null);
if (wasNullFlag)
return null;
if (((AbstractJdbc1Connection)connection).haveMinimumCompatibleVersion("7.2"))
{
//Version 7.2 supports AsciiStream for all the PG text types
//As the spec/javadoc for this method indicate this is to be used for
//large text values (i.e. LONGVARCHAR) PG doesn't have a separate
//long string datatype, but with toast the text datatype is capable of
//handling very large values. Thus the implementation ends up calling
//getString() since there is no current way to stream the value from the server
try
{
return new ByteArrayInputStream(getString(columnIndex).getBytes("UTF-8"));
}
catch (UnsupportedEncodingException l_uee)
{
throw new PSQLException("postgresql.unusual", l_uee);
}
}
else
{
// In 7.1 Handle as BLOBS so return the LargeObject input stream
return getBinaryStream(columnIndex);
}
}
public InputStream getBinaryStream(int columnIndex) throws SQLException
{
checkResultSet( columnIndex );
wasNullFlag = (this_row[columnIndex - 1] == null);
if (wasNullFlag)
return null;
if (((AbstractJdbc1Connection)connection).haveMinimumCompatibleVersion("7.2"))
{
//Version 7.2 supports BinaryStream for all PG bytea type
//As the spec/javadoc for this method indicate this is to be used for
//large binary values (i.e. LONGVARBINARY) PG doesn't have a separate
//long binary datatype, but with toast the bytea datatype is capable of
//handling very large values. Thus the implementation ends up calling
//getBytes() since there is no current way to stream the value from the server
byte b[] = getBytes(columnIndex);
if (b != null)
return new ByteArrayInputStream(b);
}
else
{
// In 7.1 Handle as BLOBS so return the LargeObject input stream
if ( fields[columnIndex - 1].getOID() == 26)
{
LargeObjectManager lom = connection.getLargeObjectAPI();
LargeObject lob = lom.open(getInt(columnIndex));
return lob.getInputStream();
}
}
return null;
}
public String getString(String columnName) throws SQLException
{
return getString(findColumn(columnName));
}
public boolean getBoolean(String columnName) throws SQLException
{
return getBoolean(findColumn(columnName));
}
public byte getByte(String columnName) throws SQLException
{
return getByte(findColumn(columnName));
}
public short getShort(String columnName) throws SQLException
{
return getShort(findColumn(columnName));
}
public int getInt(String columnName) throws SQLException
{
return getInt(findColumn(columnName));
}
public long getLong(String columnName) throws SQLException
{
return getLong(findColumn(columnName));
}
public float getFloat(String columnName) throws SQLException
{
return getFloat(findColumn(columnName));
}
public double getDouble(String columnName) throws SQLException
{
return getDouble(findColumn(columnName));
}
public BigDecimal getBigDecimal(String columnName, int scale) throws SQLException
{
return getBigDecimal(findColumn(columnName), scale);
}
public byte[] getBytes(String columnName) throws SQLException
{
return getBytes(findColumn(columnName));
}
public java.sql.Date getDate(String columnName) throws SQLException
{
return getDate(findColumn(columnName));
}
public Time getTime(String columnName) throws SQLException
{
return getTime(findColumn(columnName));
}
public Timestamp getTimestamp(String columnName) throws SQLException
{
return getTimestamp(findColumn(columnName));
}
public InputStream getAsciiStream(String columnName) throws SQLException
{
return getAsciiStream(findColumn(columnName));
}
public InputStream getUnicodeStream(String columnName) throws SQLException
{
return getUnicodeStream(findColumn(columnName));
}
public InputStream getBinaryStream(String columnName) throws SQLException
{
return getBinaryStream(findColumn(columnName));
}
public SQLWarning getWarnings() throws SQLException
{
return warnings;
}
public void clearWarnings() throws SQLException
{
warnings = null;
}
public void addWarnings(SQLWarning warnings)
{
if ( this.warnings != null )
this.warnings.setNextWarning(warnings);
else
this.warnings = warnings;
}
public String getCursorName() throws SQLException
{
return ((AbstractJdbc1Connection)connection).getCursorName();
}
/*
* Get the value of a column in the current row as a Java object
*
* <p>This method will return the value of the given column as a
* Java object. The type of the Java object will be the default
* Java Object type corresponding to the column's SQL type, following
* the mapping specified in the JDBC specification.
*
* <p>This method may also be used to read database specific abstract
* data types.
*
* @param columnIndex the first column is 1, the second is 2...
* @return a Object holding the column value
* @exception SQLException if a database access error occurs
*/
public Object getObject(int columnIndex) throws SQLException
{
Field field;
if (columnIndex < 1 || columnIndex > fields.length)
throw new PSQLException("postgresql.res.colrange");
field = fields[columnIndex - 1];
// some fields can be null, mainly from those returned by MetaData methods
if (field == null)
{
wasNullFlag = true;
return null;
}
switch (field.getSQLType())
{
case Types.BIT:
return getBoolean(columnIndex) ? Boolean.TRUE : Boolean.FALSE;
case Types.SMALLINT:
return new Short(getShort(columnIndex));
case Types.INTEGER:
return new Integer(getInt(columnIndex));
case Types.BIGINT:
return new Long(getLong(columnIndex));
case Types.NUMERIC:
return getBigDecimal
(columnIndex, (field.getMod() == -1) ? -1 : ((field.getMod() - 4) & 0xffff));
case Types.REAL:
return new Float(getFloat(columnIndex));
case Types.DOUBLE:
return new Double(getDouble(columnIndex));
case Types.CHAR:
case Types.VARCHAR:
return getString(columnIndex);
case Types.DATE:
return getDate(columnIndex);
case Types.TIME:
return getTime(columnIndex);
case Types.TIMESTAMP:
return getTimestamp(columnIndex);
case Types.BINARY:
case Types.VARBINARY:
return getBytes(columnIndex);
default:
String type = field.getPGType();
// if the backend doesn't know the type then coerce to String
if (type.equals("unknown"))
{
return getString(columnIndex);
}
else
{
return connection.getObject(field.getPGType(), getString(columnIndex));
}
}
}
public Object getObject(String columnName) throws SQLException
{
return getObject(findColumn(columnName));
}
/*
* Map a ResultSet column name to a ResultSet column index
*/
public int findColumn(String columnName) throws SQLException
{
int i;
final int flen = fields.length;
for (i = 0 ; i < flen; ++i)
if (fields[i].getName().equalsIgnoreCase(columnName))
return (i + 1);
throw new PSQLException ("postgresql.res.colname", columnName);
}
/*
* We at times need to know if the resultSet we are working
* with is the result of an UPDATE, DELETE or INSERT (in which
* case, we only have a row count), or of a SELECT operation
* (in which case, we have multiple fields) - this routine
* tells us.
*/
public boolean reallyResultSet()
{
return (fields != null);
}
/*
* Since ResultSets can be chained, we need some method of
* finding the next one in the chain. The method getNext()
* returns the next one in the chain.
*
* @return the next ResultSet, or null if there are none
*/
public java.sql.ResultSet getNext()
{
return (java.sql.ResultSet)next;
}
/*
* This following method allows us to add a ResultSet object
* to the end of the current chain.
*/
public void append(AbstractJdbc1ResultSet r)
{
if (next == null)
next = (java.sql.ResultSet)r;
else
((AbstractJdbc1ResultSet)next).append(r);
}
/*
* If we are just a place holder for results, we still need
* to get an updateCount. This method returns it.
*/
public int getResultCount()
{
return updateCount;
}
/*
* We also need to provide a couple of auxiliary functions for
* the implementation of the ResultMetaData functions. In
* particular, we need to know the number of rows and the
* number of columns. Rows are also known as Tuples
*/
public int getTupleCount()
{
return rows.size();
}
/*
* getColumnCount returns the number of columns
*/
public int getColumnCount()
{
return fields.length;
}
/*
* Returns the status message from the backend.<p>
* It is used internally by the driver.
*/
public String getStatusString()
{
return status;
}
/*
* returns the OID of a field.<p>
* It is used internally by the driver.
*/
public int getColumnOID(int field)
{
return fields[field -1].getOID();
}
/*
* returns the OID of the last inserted row. Deprecated in 7.2 because
* range for OID values is greater than java signed int.
* @deprecated Replaced by getLastOID() in 7.2
*/
public int getInsertedOID()
{
return (int) getLastOID();
}
/*
* returns the OID of the last inserted row
* @since 7.2
*/
public long getLastOID()
{
return insertOID;
}
/*
* This is used to fix get*() methods on Money fields. It should only be
* used by those methods!
*
* It converts ($##.##) to -##.## and $##.## to ##.##
*/
public String getFixedString(int col) throws SQLException
{
String s = getString(col);
// Handle SQL Null
wasNullFlag = (this_row[col - 1] == null);
if (wasNullFlag)
return null;
// if we don't have at least 2 characters it can't be money.
if (s.length() < 2)
return s;
// Handle Money
if (s.charAt(0) == '(')
{
s = "-" + org.postgresql.util.PGtokenizer.removePara(s).substring(1);
}
if (s.charAt(0) == '$')
{
s = s.substring(1);
}
else if (s.charAt(0) == '-' && s.charAt(1) == '$')
{
s = "-" + s.substring(2);
}
return s;
}
protected void checkResultSet( int column ) throws SQLException
{
if ( this_row == null )
throw new PSQLException("postgresql.res.nextrequired");
if ( column < 1 || column > fields.length )
throw new PSQLException("postgresql.res.colrange" );
}
//----------------- Formatting Methods -------------------
public static boolean toBoolean(String s)
{
if (s != null)
{
int c = s.charAt(0);
return ((c == 't') || (c == 'T') || (c == '1'));
}
return false; // SQL NULL
}
public static int toInt(String s) throws SQLException
{
if (s != null)
{
try
{
return Integer.parseInt(s);
}
catch (NumberFormatException e)
{
throw new PSQLException ("postgresql.res.badint", s);
}
}
return 0; // SQL NULL
}
public static long toLong(String s) throws SQLException
{
if (s != null)
{
try
{
return Long.parseLong(s);
}
catch (NumberFormatException e)
{
throw new PSQLException ("postgresql.res.badlong", s);
}
}
return 0; // SQL NULL
}
public static BigDecimal toBigDecimal(String s, int scale) throws SQLException
{
BigDecimal val;
if (s != null)
{
try
{
val = new BigDecimal(s);
}
catch (NumberFormatException e)
{
throw new PSQLException ("postgresql.res.badbigdec", s);
}
if (scale == -1)
return val;
try
{
return val.setScale(scale);
}
catch (ArithmeticException e)
{
throw new PSQLException ("postgresql.res.badbigdec", s);
}
}
return null; // SQL NULL
}
public static float toFloat(String s) throws SQLException
{
if (s != null)
{
try
{
return Float.valueOf(s).floatValue();
}
catch (NumberFormatException e)
{
throw new PSQLException ("postgresql.res.badfloat", s);
}
}
return 0; // SQL NULL
}
public static double toDouble(String s) throws SQLException
{
if (s != null)
{
try
{
return Double.valueOf(s).doubleValue();
}
catch (NumberFormatException e)
{
throw new PSQLException ("postgresql.res.baddouble", s);
}
}
return 0; // SQL NULL
}
public static java.sql.Date toDate(String s) throws SQLException
{
if (s == null)
return null;
// length == 10: SQL Date
// length > 10: SQL Timestamp, assumes PGDATESTYLE=ISO
try
{
return java.sql.Date.valueOf((s.length() == 10) ? s : s.substring(0, 10));
}
catch (NumberFormatException e)
{
throw new PSQLException("postgresql.res.baddate", s);
}
}
public static Time toTime(String s, java.sql.ResultSet resultSet, String pgDataType) throws SQLException
{
if (s == null)
return null; // SQL NULL
try
{
if (s.length() == 8)
{
//value is a time value
return java.sql.Time.valueOf(s);
}
else if (s.indexOf(".") == 8)
{
//value is a time value with fractional seconds
java.sql.Time l_time = java.sql.Time.valueOf(s.substring(0, 8));
String l_strMillis = s.substring(9);
if (l_strMillis.length() > 3)
l_strMillis = l_strMillis.substring(0, 3);
int l_millis = Integer.parseInt(l_strMillis);
if (l_millis < 10)
{
l_millis = l_millis * 100;
}
else if (l_millis < 100)
{
l_millis = l_millis * 10;
}
return new java.sql.Time(l_time.getTime() + l_millis);
}
else
{
//value is a timestamp
return new java.sql.Time(toTimestamp(s, resultSet, pgDataType).getTime());
}
}
catch (NumberFormatException e)
{
throw new PSQLException("postgresql.res.badtime", s);
}
}
/**
* Parse a string and return a timestamp representing its value.
*
* The driver is set to return ISO date formated strings. We modify this
* string from the ISO format to a format that Java can understand. Java
* expects timezone info as 'GMT+09:00' where as ISO gives '+09'.
* Java also expects fractional seconds to 3 places where postgres
* will give, none, 2 or 6 depending on the time and postgres version.
* From version 7.2 postgres returns fractional seconds to 6 places.
* If available, we drop the last 3 digits.
*
* @param s The ISO formated date string to parse.
* @param resultSet The ResultSet this date is part of.
*
* @return null if s is null or a timestamp of the parsed string s.
*
* @throws SQLException if there is a problem parsing s.
**/
public static Timestamp toTimestamp(String s, java.sql.ResultSet resultSet, String pgDataType)
throws SQLException
{
AbstractJdbc1ResultSet rs = (AbstractJdbc1ResultSet)resultSet;
if (s == null)
return null;
// We must be synchronized here incase more theads access the ResultSet
// bad practice but possible. Anyhow this is to protect sbuf and
// SimpleDateFormat objects
synchronized (rs)
{
SimpleDateFormat df = null;
if ( org.postgresql.Driver.logDebug )
org.postgresql.Driver.debug("the data from the DB is " + s);
// If first time, create the buffer, otherwise clear it.
if (rs.sbuf == null)
rs.sbuf = new StringBuffer(32);
else
{
rs.sbuf.setLength(0);
}
// Copy s into sbuf for parsing.
rs.sbuf.append(s);
int slen = s.length();
if (slen > 19)
{
// The len of the ISO string to the second value is 19 chars. If
// greater then 19, there may be tz info and perhaps fractional
// second info which we need to change to java to read it.
// cut the copy to second value "2001-12-07 16:29:22"
int i = 19;
rs.sbuf.setLength(i);
char c = s.charAt(i++);
if (c == '.')
{
// Found a fractional value. Append up to 3 digits including
// the leading '.'
do
{
if (i < 24)
rs.sbuf.append(c);
c = s.charAt(i++);
}
while (i < slen && Character.isDigit(c));
// If there wasn't at least 3 digits we should add some zeros
// to make up the 3 digits we tell java to expect.
for (int j = i; j < 24; j++)
rs.sbuf.append('0');
}
else
{
// No fractional seconds, lets add some.
rs.sbuf.append(".000");
}
if (i < slen)
{
// prepend the GMT part and then add the remaining bit of
// the string.
rs.sbuf.append(" GMT");
rs.sbuf.append(c);
rs.sbuf.append(s.substring(i, slen));
// Lastly, if the tz part doesn't specify the :MM part then
// we add ":00" for java.
if (slen - i < 5)
rs.sbuf.append(":00");
// we'll use this dateformat string to parse the result.
df = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS z");
}
else
{
// Just found fractional seconds but no timezone.
//If timestamptz then we use GMT, else local timezone
if (pgDataType.equals("timestamptz"))
{
rs.sbuf.append(" GMT");
df = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS z");
}
else
{
df = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS");
}
}
}
else if (slen == 19)
{
// No tz or fractional second info.
//If timestamptz then we use GMT, else local timezone
if (pgDataType.equals("timestamptz"))
{
rs.sbuf.append(" GMT");
df = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss z");
}
else
{
df = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
}
}
else
{
if (slen == 8 && s.equals("infinity"))
//java doesn't have a concept of postgres's infinity
//so set to an arbitrary future date
s = "9999-01-01";
if (slen == 9 && s.equals("-infinity"))
//java doesn't have a concept of postgres's infinity
//so set to an arbitrary old date
s = "0001-01-01";
// We must just have a date. This case is
// needed if this method is called on a date
// column
df = new SimpleDateFormat("yyyy-MM-dd");
}
try
{
// All that's left is to parse the string and return the ts.
if ( org.postgresql.Driver.logDebug )
org.postgresql.Driver.debug( "" + df.parse(rs.sbuf.toString()).getTime() );
return new Timestamp(df.parse(rs.sbuf.toString()).getTime());
}
catch (ParseException e)
{
throw new PSQLException("postgresql.res.badtimestamp", new Integer(e.getErrorOffset()), s);
}
}
}
}
| org/postgresql/jdbc1/AbstractJdbc1ResultSet.java | package org.postgresql.jdbc1;
import java.math.BigDecimal;
import java.io.*;
import java.sql.*;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Vector;
import org.postgresql.Field;
import org.postgresql.core.Encoding;
import org.postgresql.largeobject.*;
import org.postgresql.util.PGbytea;
import org.postgresql.util.PSQLException;
/* $Header$
* This class defines methods of the jdbc1 specification. This class is
* extended by org.postgresql.jdbc2.AbstractJdbc2ResultSet which adds the jdbc2
* methods. The real ResultSet class (for jdbc1) is org.postgresql.jdbc1.Jdbc1ResultSet
*/
public abstract class AbstractJdbc1ResultSet
{
protected Vector rows; // The results
protected Statement statement;
protected Field fields[]; // The field descriptions
protected String status; // Status of the result
protected boolean binaryCursor = false; // is the data binary or Strings
protected int updateCount; // How many rows did we get back?
protected long insertOID; // The oid of an inserted row
protected int current_row; // Our pointer to where we are at
protected byte[][] this_row; // the current row result
protected org.postgresql.PGConnection connection; // the connection which we returned from
protected SQLWarning warnings = null; // The warning chain
protected boolean wasNullFlag = false; // the flag for wasNull()
// We can chain multiple resultSets together - this points to
// next resultSet in the chain.
protected ResultSet next = null;
protected StringBuffer sbuf = null;
public byte[][] rowBuffer = null;
public AbstractJdbc1ResultSet(org.postgresql.PGConnection conn, Statement statement, Field[] fields, Vector tuples, String status, int updateCount, long insertOID, boolean binaryCursor)
{
this.connection = conn;
this.statement = statement;
this.fields = fields;
this.rows = tuples;
this.status = status;
this.updateCount = updateCount;
this.insertOID = insertOID;
this.this_row = null;
this.current_row = -1;
this.binaryCursor = binaryCursor;
}
public boolean next() throws SQLException
{
if (rows == null)
throw new PSQLException("postgresql.con.closed");
if (++current_row >= rows.size())
return false;
this_row = (byte [][])rows.elementAt(current_row);
rowBuffer = new byte[this_row.length][];
System.arraycopy(this_row, 0, rowBuffer, 0, this_row.length);
return true;
}
public void close() throws SQLException
{
//release resources held (memory for tuples)
if (rows != null)
{
rows = null;
}
}
public boolean wasNull() throws SQLException
{
return wasNullFlag;
}
public String getString(int columnIndex) throws SQLException
{
checkResultSet( columnIndex );
wasNullFlag = (this_row[columnIndex - 1] == null);
if (wasNullFlag)
return null;
Encoding encoding = connection.getEncoding();
return encoding.decode(this_row[columnIndex - 1]);
}
public boolean getBoolean(int columnIndex) throws SQLException
{
return toBoolean( getString(columnIndex) );
}
public byte getByte(int columnIndex) throws SQLException
{
String s = getString(columnIndex);
if (s != null)
{
try
{
return Byte.parseByte(s);
}
catch (NumberFormatException e)
{
throw new PSQLException("postgresql.res.badbyte", s);
}
}
return 0; // SQL NULL
}
public short getShort(int columnIndex) throws SQLException
{
String s = getFixedString(columnIndex);
if (s != null)
{
try
{
return Short.parseShort(s);
}
catch (NumberFormatException e)
{
throw new PSQLException("postgresql.res.badshort", s);
}
}
return 0; // SQL NULL
}
public int getInt(int columnIndex) throws SQLException
{
return toInt( getFixedString(columnIndex) );
}
public long getLong(int columnIndex) throws SQLException
{
return toLong( getFixedString(columnIndex) );
}
public float getFloat(int columnIndex) throws SQLException
{
return toFloat( getFixedString(columnIndex) );
}
public double getDouble(int columnIndex) throws SQLException
{
return toDouble( getFixedString(columnIndex) );
}
public BigDecimal getBigDecimal(int columnIndex, int scale) throws SQLException
{
return toBigDecimal( getFixedString(columnIndex), scale );
}
/*
* Get the value of a column in the current row as a Java byte array.
*
* <p>In normal use, the bytes represent the raw values returned by the
* backend. However, if the column is an OID, then it is assumed to
* refer to a Large Object, and that object is returned as a byte array.
*
* <p><b>Be warned</b> If the large object is huge, then you may run out
* of memory.
*
* @param columnIndex the first column is 1, the second is 2, ...
* @return the column value; if the value is SQL NULL, the result
* is null
* @exception SQLException if a database access error occurs
*/
public byte[] getBytes(int columnIndex) throws SQLException
{
checkResultSet( columnIndex );
wasNullFlag = (this_row[columnIndex - 1] == null);
if (!wasNullFlag)
{
if (binaryCursor)
{
//If the data is already binary then just return it
return this_row[columnIndex - 1];
}
else if (((AbstractJdbc1Connection)connection).haveMinimumCompatibleVersion("7.2"))
{
//Version 7.2 supports the bytea datatype for byte arrays
if (fields[columnIndex - 1].getPGType().equals("bytea"))
{
return PGbytea.toBytes(this_row[columnIndex - 1]);
}
else
{
return this_row[columnIndex - 1];
}
}
else
{
//Version 7.1 and earlier supports LargeObjects for byte arrays
// Handle OID's as BLOBS
if ( fields[columnIndex - 1].getOID() == 26)
{
LargeObjectManager lom = connection.getLargeObjectAPI();
LargeObject lob = lom.open(getInt(columnIndex));
byte buf[] = lob.read(lob.size());
lob.close();
return buf;
}
else
{
return this_row[columnIndex - 1];
}
}
}
return null;
}
public java.sql.Date getDate(int columnIndex) throws SQLException
{
return toDate( getString(columnIndex) );
}
public Time getTime(int columnIndex) throws SQLException
{
return toTime( getString(columnIndex), (java.sql.ResultSet)this, fields[columnIndex - 1].getPGType() );
}
public Timestamp getTimestamp(int columnIndex) throws SQLException
{
return toTimestamp( getString(columnIndex), (java.sql.ResultSet)this, fields[columnIndex - 1].getPGType() );
}
public InputStream getAsciiStream(int columnIndex) throws SQLException
{
checkResultSet( columnIndex );
wasNullFlag = (this_row[columnIndex - 1] == null);
if (wasNullFlag)
return null;
if (((AbstractJdbc1Connection)connection).haveMinimumCompatibleVersion("7.2"))
{
//Version 7.2 supports AsciiStream for all the PG text types
//As the spec/javadoc for this method indicate this is to be used for
//large text values (i.e. LONGVARCHAR) PG doesn't have a separate
//long string datatype, but with toast the text datatype is capable of
//handling very large values. Thus the implementation ends up calling
//getString() since there is no current way to stream the value from the server
try
{
return new ByteArrayInputStream(getString(columnIndex).getBytes("ASCII"));
}
catch (UnsupportedEncodingException l_uee)
{
throw new PSQLException("postgresql.unusual", l_uee);
}
}
else
{
// In 7.1 Handle as BLOBS so return the LargeObject input stream
return getBinaryStream(columnIndex);
}
}
public InputStream getUnicodeStream(int columnIndex) throws SQLException
{
checkResultSet( columnIndex );
wasNullFlag = (this_row[columnIndex - 1] == null);
if (wasNullFlag)
return null;
if (((AbstractJdbc1Connection)connection).haveMinimumCompatibleVersion("7.2"))
{
//Version 7.2 supports AsciiStream for all the PG text types
//As the spec/javadoc for this method indicate this is to be used for
//large text values (i.e. LONGVARCHAR) PG doesn't have a separate
//long string datatype, but with toast the text datatype is capable of
//handling very large values. Thus the implementation ends up calling
//getString() since there is no current way to stream the value from the server
try
{
return new ByteArrayInputStream(getString(columnIndex).getBytes("UTF-8"));
}
catch (UnsupportedEncodingException l_uee)
{
throw new PSQLException("postgresql.unusual", l_uee);
}
}
else
{
// In 7.1 Handle as BLOBS so return the LargeObject input stream
return getBinaryStream(columnIndex);
}
}
public InputStream getBinaryStream(int columnIndex) throws SQLException
{
checkResultSet( columnIndex );
wasNullFlag = (this_row[columnIndex - 1] == null);
if (wasNullFlag)
return null;
if (((AbstractJdbc1Connection)connection).haveMinimumCompatibleVersion("7.2"))
{
//Version 7.2 supports BinaryStream for all PG bytea type
//As the spec/javadoc for this method indicate this is to be used for
//large binary values (i.e. LONGVARBINARY) PG doesn't have a separate
//long binary datatype, but with toast the bytea datatype is capable of
//handling very large values. Thus the implementation ends up calling
//getBytes() since there is no current way to stream the value from the server
byte b[] = getBytes(columnIndex);
if (b != null)
return new ByteArrayInputStream(b);
}
else
{
// In 7.1 Handle as BLOBS so return the LargeObject input stream
if ( fields[columnIndex - 1].getOID() == 26)
{
LargeObjectManager lom = connection.getLargeObjectAPI();
LargeObject lob = lom.open(getInt(columnIndex));
return lob.getInputStream();
}
}
return null;
}
public String getString(String columnName) throws SQLException
{
return getString(findColumn(columnName));
}
public boolean getBoolean(String columnName) throws SQLException
{
return getBoolean(findColumn(columnName));
}
public byte getByte(String columnName) throws SQLException
{
return getByte(findColumn(columnName));
}
public short getShort(String columnName) throws SQLException
{
return getShort(findColumn(columnName));
}
public int getInt(String columnName) throws SQLException
{
return getInt(findColumn(columnName));
}
public long getLong(String columnName) throws SQLException
{
return getLong(findColumn(columnName));
}
public float getFloat(String columnName) throws SQLException
{
return getFloat(findColumn(columnName));
}
public double getDouble(String columnName) throws SQLException
{
return getDouble(findColumn(columnName));
}
public BigDecimal getBigDecimal(String columnName, int scale) throws SQLException
{
return getBigDecimal(findColumn(columnName), scale);
}
public byte[] getBytes(String columnName) throws SQLException
{
return getBytes(findColumn(columnName));
}
public java.sql.Date getDate(String columnName) throws SQLException
{
return getDate(findColumn(columnName));
}
public Time getTime(String columnName) throws SQLException
{
return getTime(findColumn(columnName));
}
public Timestamp getTimestamp(String columnName) throws SQLException
{
return getTimestamp(findColumn(columnName));
}
public InputStream getAsciiStream(String columnName) throws SQLException
{
return getAsciiStream(findColumn(columnName));
}
public InputStream getUnicodeStream(String columnName) throws SQLException
{
return getUnicodeStream(findColumn(columnName));
}
public InputStream getBinaryStream(String columnName) throws SQLException
{
return getBinaryStream(findColumn(columnName));
}
public SQLWarning getWarnings() throws SQLException
{
return warnings;
}
public void clearWarnings() throws SQLException
{
warnings = null;
}
public void addWarnings(SQLWarning warnings)
{
if ( this.warnings != null )
this.warnings.setNextWarning(warnings);
else
this.warnings = warnings;
}
public String getCursorName() throws SQLException
{
return ((AbstractJdbc1Connection)connection).getCursorName();
}
/*
* Get the value of a column in the current row as a Java object
*
* <p>This method will return the value of the given column as a
* Java object. The type of the Java object will be the default
* Java Object type corresponding to the column's SQL type, following
* the mapping specified in the JDBC specification.
*
* <p>This method may also be used to read database specific abstract
* data types.
*
* @param columnIndex the first column is 1, the second is 2...
* @return a Object holding the column value
* @exception SQLException if a database access error occurs
*/
public Object getObject(int columnIndex) throws SQLException
{
Field field;
if (columnIndex < 1 || columnIndex > fields.length)
throw new PSQLException("postgresql.res.colrange");
field = fields[columnIndex - 1];
// some fields can be null, mainly from those returned by MetaData methods
if (field == null)
{
wasNullFlag = true;
return null;
}
switch (field.getSQLType())
{
case Types.BIT:
return getBoolean(columnIndex) ? Boolean.TRUE : Boolean.FALSE;
case Types.SMALLINT:
return new Short(getShort(columnIndex));
case Types.INTEGER:
return new Integer(getInt(columnIndex));
case Types.BIGINT:
return new Long(getLong(columnIndex));
case Types.NUMERIC:
return getBigDecimal
(columnIndex, (field.getMod() == -1) ? -1 : ((field.getMod() - 4) & 0xffff));
case Types.REAL:
return new Float(getFloat(columnIndex));
case Types.DOUBLE:
return new Double(getDouble(columnIndex));
case Types.CHAR:
case Types.VARCHAR:
return getString(columnIndex);
case Types.DATE:
return getDate(columnIndex);
case Types.TIME:
return getTime(columnIndex);
case Types.TIMESTAMP:
return getTimestamp(columnIndex);
case Types.BINARY:
case Types.VARBINARY:
return getBytes(columnIndex);
default:
String type = field.getPGType();
// if the backend doesn't know the type then coerce to String
if (type.equals("unknown"))
{
return getString(columnIndex);
}
else
{
return connection.getObject(field.getPGType(), getString(columnIndex));
}
}
}
public Object getObject(String columnName) throws SQLException
{
return getObject(findColumn(columnName));
}
/*
* Map a ResultSet column name to a ResultSet column index
*/
public int findColumn(String columnName) throws SQLException
{
int i;
final int flen = fields.length;
for (i = 0 ; i < flen; ++i)
if (fields[i].getName().equalsIgnoreCase(columnName))
return (i + 1);
throw new PSQLException ("postgresql.res.colname", columnName);
}
/*
* We at times need to know if the resultSet we are working
* with is the result of an UPDATE, DELETE or INSERT (in which
* case, we only have a row count), or of a SELECT operation
* (in which case, we have multiple fields) - this routine
* tells us.
*/
public boolean reallyResultSet()
{
return (fields != null);
}
/*
* Since ResultSets can be chained, we need some method of
* finding the next one in the chain. The method getNext()
* returns the next one in the chain.
*
* @return the next ResultSet, or null if there are none
*/
public java.sql.ResultSet getNext()
{
return (java.sql.ResultSet)next;
}
/*
* This following method allows us to add a ResultSet object
* to the end of the current chain.
*/
public void append(AbstractJdbc1ResultSet r)
{
if (next == null)
next = (java.sql.ResultSet)r;
else
((AbstractJdbc1ResultSet)next).append(r);
}
/*
* If we are just a place holder for results, we still need
* to get an updateCount. This method returns it.
*/
public int getResultCount()
{
return updateCount;
}
/*
* We also need to provide a couple of auxiliary functions for
* the implementation of the ResultMetaData functions. In
* particular, we need to know the number of rows and the
* number of columns. Rows are also known as Tuples
*/
public int getTupleCount()
{
return rows.size();
}
/*
* getColumnCount returns the number of columns
*/
public int getColumnCount()
{
return fields.length;
}
/*
* Returns the status message from the backend.<p>
* It is used internally by the driver.
*/
public String getStatusString()
{
return status;
}
/*
* returns the OID of a field.<p>
* It is used internally by the driver.
*/
public int getColumnOID(int field)
{
return fields[field -1].getOID();
}
/*
* returns the OID of the last inserted row. Deprecated in 7.2 because
* range for OID values is greater than java signed int.
* @deprecated Replaced by getLastOID() in 7.2
*/
public int getInsertedOID()
{
return (int) getLastOID();
}
/*
* returns the OID of the last inserted row
* @since 7.2
*/
public long getLastOID()
{
return insertOID;
}
/*
* This is used to fix get*() methods on Money fields. It should only be
* used by those methods!
*
* It converts ($##.##) to -##.## and $##.## to ##.##
*/
public String getFixedString(int col) throws SQLException
{
String s = getString(col);
// Handle SQL Null
wasNullFlag = (this_row[col - 1] == null);
if (wasNullFlag)
return null;
// Handle Money
if (s.charAt(0) == '(')
{
s = "-" + org.postgresql.util.PGtokenizer.removePara(s).substring(1);
}
if (s.charAt(0) == '$')
{
s = s.substring(1);
}
return s;
}
protected void checkResultSet( int column ) throws SQLException
{
if ( this_row == null )
throw new PSQLException("postgresql.res.nextrequired");
if ( column < 1 || column > fields.length )
throw new PSQLException("postgresql.res.colrange" );
}
//----------------- Formatting Methods -------------------
public static boolean toBoolean(String s)
{
if (s != null)
{
int c = s.charAt(0);
return ((c == 't') || (c == 'T') || (c == '1'));
}
return false; // SQL NULL
}
public static int toInt(String s) throws SQLException
{
if (s != null)
{
try
{
return Integer.parseInt(s);
}
catch (NumberFormatException e)
{
throw new PSQLException ("postgresql.res.badint", s);
}
}
return 0; // SQL NULL
}
public static long toLong(String s) throws SQLException
{
if (s != null)
{
try
{
return Long.parseLong(s);
}
catch (NumberFormatException e)
{
throw new PSQLException ("postgresql.res.badlong", s);
}
}
return 0; // SQL NULL
}
public static BigDecimal toBigDecimal(String s, int scale) throws SQLException
{
BigDecimal val;
if (s != null)
{
try
{
val = new BigDecimal(s);
}
catch (NumberFormatException e)
{
throw new PSQLException ("postgresql.res.badbigdec", s);
}
if (scale == -1)
return val;
try
{
return val.setScale(scale);
}
catch (ArithmeticException e)
{
throw new PSQLException ("postgresql.res.badbigdec", s);
}
}
return null; // SQL NULL
}
public static float toFloat(String s) throws SQLException
{
if (s != null)
{
try
{
return Float.valueOf(s).floatValue();
}
catch (NumberFormatException e)
{
throw new PSQLException ("postgresql.res.badfloat", s);
}
}
return 0; // SQL NULL
}
public static double toDouble(String s) throws SQLException
{
if (s != null)
{
try
{
return Double.valueOf(s).doubleValue();
}
catch (NumberFormatException e)
{
throw new PSQLException ("postgresql.res.baddouble", s);
}
}
return 0; // SQL NULL
}
public static java.sql.Date toDate(String s) throws SQLException
{
if (s == null)
return null;
// length == 10: SQL Date
// length > 10: SQL Timestamp, assumes PGDATESTYLE=ISO
try
{
return java.sql.Date.valueOf((s.length() == 10) ? s : s.substring(0, 10));
}
catch (NumberFormatException e)
{
throw new PSQLException("postgresql.res.baddate", s);
}
}
public static Time toTime(String s, java.sql.ResultSet resultSet, String pgDataType) throws SQLException
{
if (s == null)
return null; // SQL NULL
try
{
if (s.length() == 8)
{
//value is a time value
return java.sql.Time.valueOf(s);
}
else if (s.indexOf(".") == 8)
{
//value is a time value with fractional seconds
java.sql.Time l_time = java.sql.Time.valueOf(s.substring(0, 8));
String l_strMillis = s.substring(9);
if (l_strMillis.length() > 3)
l_strMillis = l_strMillis.substring(0, 3);
int l_millis = Integer.parseInt(l_strMillis);
if (l_millis < 10)
{
l_millis = l_millis * 100;
}
else if (l_millis < 100)
{
l_millis = l_millis * 10;
}
return new java.sql.Time(l_time.getTime() + l_millis);
}
else
{
//value is a timestamp
return new java.sql.Time(toTimestamp(s, resultSet, pgDataType).getTime());
}
}
catch (NumberFormatException e)
{
throw new PSQLException("postgresql.res.badtime", s);
}
}
/**
* Parse a string and return a timestamp representing its value.
*
* The driver is set to return ISO date formated strings. We modify this
* string from the ISO format to a format that Java can understand. Java
* expects timezone info as 'GMT+09:00' where as ISO gives '+09'.
* Java also expects fractional seconds to 3 places where postgres
* will give, none, 2 or 6 depending on the time and postgres version.
* From version 7.2 postgres returns fractional seconds to 6 places.
* If available, we drop the last 3 digits.
*
* @param s The ISO formated date string to parse.
* @param resultSet The ResultSet this date is part of.
*
* @return null if s is null or a timestamp of the parsed string s.
*
* @throws SQLException if there is a problem parsing s.
**/
public static Timestamp toTimestamp(String s, java.sql.ResultSet resultSet, String pgDataType)
throws SQLException
{
AbstractJdbc1ResultSet rs = (AbstractJdbc1ResultSet)resultSet;
if (s == null)
return null;
// We must be synchronized here incase more theads access the ResultSet
// bad practice but possible. Anyhow this is to protect sbuf and
// SimpleDateFormat objects
synchronized (rs)
{
SimpleDateFormat df = null;
if ( org.postgresql.Driver.logDebug )
org.postgresql.Driver.debug("the data from the DB is " + s);
// If first time, create the buffer, otherwise clear it.
if (rs.sbuf == null)
rs.sbuf = new StringBuffer(32);
else
{
rs.sbuf.setLength(0);
}
// Copy s into sbuf for parsing.
rs.sbuf.append(s);
int slen = s.length();
if (slen > 19)
{
// The len of the ISO string to the second value is 19 chars. If
// greater then 19, there may be tz info and perhaps fractional
// second info which we need to change to java to read it.
// cut the copy to second value "2001-12-07 16:29:22"
int i = 19;
rs.sbuf.setLength(i);
char c = s.charAt(i++);
if (c == '.')
{
// Found a fractional value. Append up to 3 digits including
// the leading '.'
do
{
if (i < 24)
rs.sbuf.append(c);
c = s.charAt(i++);
}
while (i < slen && Character.isDigit(c));
// If there wasn't at least 3 digits we should add some zeros
// to make up the 3 digits we tell java to expect.
for (int j = i; j < 24; j++)
rs.sbuf.append('0');
}
else
{
// No fractional seconds, lets add some.
rs.sbuf.append(".000");
}
if (i < slen)
{
// prepend the GMT part and then add the remaining bit of
// the string.
rs.sbuf.append(" GMT");
rs.sbuf.append(c);
rs.sbuf.append(s.substring(i, slen));
// Lastly, if the tz part doesn't specify the :MM part then
// we add ":00" for java.
if (slen - i < 5)
rs.sbuf.append(":00");
// we'll use this dateformat string to parse the result.
df = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS z");
}
else
{
// Just found fractional seconds but no timezone.
//If timestamptz then we use GMT, else local timezone
if (pgDataType.equals("timestamptz"))
{
rs.sbuf.append(" GMT");
df = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS z");
}
else
{
df = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS");
}
}
}
else if (slen == 19)
{
// No tz or fractional second info.
//If timestamptz then we use GMT, else local timezone
if (pgDataType.equals("timestamptz"))
{
rs.sbuf.append(" GMT");
df = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss z");
}
else
{
df = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
}
}
else
{
if (slen == 8 && s.equals("infinity"))
//java doesn't have a concept of postgres's infinity
//so set to an arbitrary future date
s = "9999-01-01";
if (slen == 9 && s.equals("-infinity"))
//java doesn't have a concept of postgres's infinity
//so set to an arbitrary old date
s = "0001-01-01";
// We must just have a date. This case is
// needed if this method is called on a date
// column
df = new SimpleDateFormat("yyyy-MM-dd");
}
try
{
// All that's left is to parse the string and return the ts.
if ( org.postgresql.Driver.logDebug )
org.postgresql.Driver.debug( "" + df.parse(rs.sbuf.toString()).getTime() );
return new Timestamp(df.parse(rs.sbuf.toString()).getTime());
}
catch (ParseException e)
{
throw new PSQLException("postgresql.res.badtimestamp", new Integer(e.getErrorOffset()), s);
}
}
}
}
| Applied patch submitted by Kris Jurka to result in a better error message
under some circumstances and handle negative money values better.
Modified Files:
jdbc/org/postgresql/jdbc1/AbstractJdbc1ResultSet.java
| org/postgresql/jdbc1/AbstractJdbc1ResultSet.java | Applied patch submitted by Kris Jurka to result in a better error message under some circumstances and handle negative money values better. | <ide><path>rg/postgresql/jdbc1/AbstractJdbc1ResultSet.java
<ide> if (wasNullFlag)
<ide> return null;
<ide>
<add> // if we don't have at least 2 characters it can't be money.
<add> if (s.length() < 2)
<add> return s;
<add>
<ide> // Handle Money
<ide> if (s.charAt(0) == '(')
<ide> {
<ide> if (s.charAt(0) == '$')
<ide> {
<ide> s = s.substring(1);
<add> }
<add> else if (s.charAt(0) == '-' && s.charAt(1) == '$')
<add> {
<add> s = "-" + s.substring(2);
<ide> }
<ide>
<ide> return s; |
|
Java | apache-2.0 | 64cb2a115e1349fcbdd3789fcb8708a24ac21942 | 0 | aravindc/databenecommons | /*
* (c) Copyright 2011-2013 by Volker Bergmann. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, is permitted under the terms of the
* GNU General Public License (GPL).
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* WITHOUT A WARRANTY OF ANY KIND. ALL EXPRESS OR IMPLIED CONDITIONS,
* REPRESENTATIONS AND WARRANTIES, INCLUDING ANY IMPLIED WARRANTY OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE OR NON-INFRINGEMENT, ARE
* HEREBY EXCLUDED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
package org.databene.commons;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.zip.ZipEntry;
import java.util.zip.ZipInputStream;
import java.util.zip.ZipOutputStream;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Provides ZIP-related convenience methods.<br/><br/>
* Created: 20.10.2011 15:19:07
* @since 0.5.10
* @author Volker Bergmann
*/
public class ZipUtil {
private static final int BUFFER_SIZE = 2048;
private static final Logger LOGGER = LoggerFactory.getLogger(ZipUtil.class);
public static void compressAndDelete(File source, File zipFile) {
try {
compress(source, zipFile);
source.delete();
} catch (IOException e) {
throw new RuntimeException("Unexpected error", e);
}
}
public static void compress(File source, File zipFile) throws IOException {
ZipOutputStream out = new ZipOutputStream(new BufferedOutputStream(new FileOutputStream(zipFile)));
out.setMethod(ZipOutputStream.DEFLATED);
try {
addFileOrDirectory(source, source, out);
out.close();
} catch (IOException e) {
throw new RuntimeException("Zipping the report failed");
}
}
public static void printContent(File zipFile) {
ZipInputStream in = null;
try {
in = new ZipInputStream(new BufferedInputStream(new FileInputStream(zipFile)));
ZipEntry entry;
while ((entry = in.getNextEntry()) != null)
System.out.println(entry.getName());
} catch (IOException e) {
LOGGER.error("Error listing archive content of file " + zipFile, e);
} finally {
IOUtil.close(in);
}
}
// private helpers -------------------------------------------------------------------------------------------------
private static void addFileOrDirectory(File source, File root, ZipOutputStream out) throws IOException {
if (source.isFile())
addFile(source, root, out);
else if (source.isDirectory())
addDirectory(source, root, out);
}
private static void addDirectory(File source, File root, ZipOutputStream out) throws IOException {
for (File file : source.listFiles())
addFileOrDirectory(file, root, out);
}
private static void addFile(File source, File root, ZipOutputStream out) throws IOException {
byte[] buffer = new byte[BUFFER_SIZE];
InputStream in = new BufferedInputStream(new FileInputStream(source));
ZipEntry entry = new ZipEntry(FileUtil.relativePath(root, source));
out.putNextEntry(entry);
int count;
while ((count = in.read(buffer, 0, BUFFER_SIZE)) != -1)
out.write(buffer, 0, count);
in.close();
}
}
| src/main/java/org/databene/commons/ZipUtil.java | /*
* (c) Copyright 2011 by Volker Bergmann. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, is permitted under the terms of the
* GNU General Public License (GPL).
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* WITHOUT A WARRANTY OF ANY KIND. ALL EXPRESS OR IMPLIED CONDITIONS,
* REPRESENTATIONS AND WARRANTIES, INCLUDING ANY IMPLIED WARRANTY OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE OR NON-INFRINGEMENT, ARE
* HEREBY EXCLUDED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
package org.databene.commons;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.zip.ZipEntry;
import java.util.zip.ZipInputStream;
import java.util.zip.ZipOutputStream;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Provides ZIP-related convenience methods.<br/><br/>
* Created: 20.10.2011 15:19:07
* @since 0.5.10
* @author Volker Bergmann
*/
public class ZipUtil {
private static final int BUFFER_SIZE = 2048;
private static final Logger LOGGER = LoggerFactory.getLogger(ZipUtil.class);
public static void compress(File source, File zipFile) throws IOException {
ZipOutputStream out = new ZipOutputStream(new BufferedOutputStream(new FileOutputStream(zipFile)));
out.setMethod(ZipOutputStream.DEFLATED);
try {
addFileOrDirectory(source, source, out);
out.close();
} catch (IOException e) {
throw new RuntimeException("Zipping the report failed");
}
}
public static void printContent(File zipFile) {
ZipInputStream in = null;
try {
in = new ZipInputStream(new BufferedInputStream(new FileInputStream(zipFile)));
ZipEntry entry;
while ((entry = in.getNextEntry()) != null)
System.out.println(entry.getName());
} catch (IOException e) {
LOGGER.error("Error listing archive content of file " + zipFile, e);
} finally {
IOUtil.close(in);
}
}
// private helpers -------------------------------------------------------------------------------------------------
private static void addFileOrDirectory(File source, File root, ZipOutputStream out) throws IOException {
if (source.isFile())
addFile(source, root, out);
else if (source.isDirectory())
addDirectory(source, root, out);
}
private static void addDirectory(File source, File root, ZipOutputStream out) throws IOException {
for (File file : source.listFiles())
addFileOrDirectory(file, root, out);
}
private static void addFile(File source, File root, ZipOutputStream out) throws IOException {
byte[] buffer = new byte[BUFFER_SIZE];
InputStream in = new BufferedInputStream(new FileInputStream(source));
ZipEntry entry = new ZipEntry(FileUtil.relativePath(root, source));
out.putNextEntry(entry);
int count;
while ((count = in.read(buffer, 0, BUFFER_SIZE)) != -1)
out.write(buffer, 0, count);
in.close();
}
}
| new method compressAndDelete()
git-svn-id: 5d4207b98d7274194a51d188374a5d8ee1fe55c3@279 783448d1-dfc0-4521-9cb0-58c191f7d5bb
| src/main/java/org/databene/commons/ZipUtil.java | new method compressAndDelete() | <ide><path>rc/main/java/org/databene/commons/ZipUtil.java
<ide> /*
<del> * (c) Copyright 2011 by Volker Bergmann. All rights reserved.
<add> * (c) Copyright 2011-2013 by Volker Bergmann. All rights reserved.
<ide> *
<ide> * Redistribution and use in source and binary forms, with or without
<ide> * modification, is permitted under the terms of the
<ide> private static final int BUFFER_SIZE = 2048;
<ide>
<ide> private static final Logger LOGGER = LoggerFactory.getLogger(ZipUtil.class);
<del>
<add>
<add> public static void compressAndDelete(File source, File zipFile) {
<add> try {
<add> compress(source, zipFile);
<add> source.delete();
<add> } catch (IOException e) {
<add> throw new RuntimeException("Unexpected error", e);
<add> }
<add> }
<add>
<ide> public static void compress(File source, File zipFile) throws IOException {
<ide> ZipOutputStream out = new ZipOutputStream(new BufferedOutputStream(new FileOutputStream(zipFile)));
<ide> out.setMethod(ZipOutputStream.DEFLATED); |
|
Java | mit | 5754490c29f2271803fd48ebc78f00c7dd35dffe | 0 | Aquerr/EagleFactions,Aquerr/EagleFactions | package io.github.aquerr.eaglefactions.common.integrations.placeholderapi;
import io.github.aquerr.eaglefactions.api.EagleFactions;
import io.github.aquerr.eaglefactions.api.entities.Faction;
import io.github.aquerr.eaglefactions.api.entities.FactionPlayer;
import io.github.aquerr.eaglefactions.api.managers.PowerManager;
import me.rojo8399.placeholderapi.*;
import org.spongepowered.api.data.manipulator.mutable.entity.JoinData;
import org.spongepowered.api.data.value.mutable.Value;
import org.spongepowered.api.entity.living.player.User;
import org.spongepowered.api.text.Text;
import javax.annotation.Nullable;
import java.time.Instant;
import java.time.LocalDateTime;
import java.time.ZoneId;
import java.time.format.DateTimeFormatter;
import java.util.Collections;
import java.util.HashSet;
import java.util.Optional;
import java.util.Set;
public class EFPlaceholderService
{
private static EFPlaceholderService INSTANCE = null;
private final EagleFactions plugin;
private final PlaceholderService placeholderService;
public static EFPlaceholderService getInstance(final EagleFactions plugin, Object placeholderService)
{
if(INSTANCE == null) {
INSTANCE = new EFPlaceholderService(plugin, (PlaceholderService) placeholderService);
}
return INSTANCE;
}
private EFPlaceholderService(final EagleFactions plugin, PlaceholderService placeholderService)
{
this.plugin = plugin;
this.placeholderService = placeholderService;
registerPlaceholders();
}
public PlaceholderService getPlaceholderService()
{
return placeholderService;
}
private void registerPlaceholders()
{
placeholderService.loadAll(this, this.plugin).stream().map(builder -> {
switch(builder.getId())
{
case "faction": {
try
{
return ((ExpansionBuilder) builder)
.tokens("name", "tag", "power", "maxpower", "last_online", "claims_count", "alliances",
"enemies", "truce", "officers_count", "members_count", "recruits_count")
.description("Player's faction's placeholders.")
.url("https://github.com/Aquerr/EagleFactions")
.author("Aquerr (Nerdi)")
.version("1.0")
.plugin(this.plugin);
}
catch(Exception e)
{
e.printStackTrace();
}
}
case "factionplayer":
{
try
{
return ((ExpansionBuilder) builder)
.tokens("power", "maxpower", "last_online")
.description("Player's placeholders.")
.url("https://github.com/Aquerr/EagleFactions")
.author("Aquerr (Nerdi)")
.version("1.0")
.plugin(this.plugin);
}
catch(Exception e)
{
e.printStackTrace();
}
}
}
return builder;
}).map(builder -> builder.author("Aquerr (Nerdi)").version("1.0")).forEach(builder -> {
try
{
builder.buildAndRegister();
}
catch(Exception e)
{
e.printStackTrace();
}
});
}
@Placeholder(id = "faction")
public Object faction(@Token(fix = true) @Nullable String token, @Nullable @Source User player)
{
if(token == null)
return "";
switch(token)
{
case "name":
return getFactionName(player);
case "tag":
return getFactionTag(player);
case "power":
return getFactionPower(player);
case "maxpower":
return getFactionMaxPower(player);
case "last_online":
return getFactionLastOnline(player);
case "claims_count":
return getFactionClaimCount(player);
case "officers_count":
return getFactionOfficerCount(player);
case "members_count":
return getFactionMemberCount(player);
case "recruits_count":
return getFactionRecruitCount(player);
case "alliances":
return getFactionAlliances(player);
case "enemies":
return getFactionEnemies(player);
case "truce":
return getFactionTruce(player);
}
return "";
}
@Placeholder(id = "factionplayer")
public Object factionPlayer(@Token(fix = true) @Nullable String token, @Nullable @Source User player)
{
if(token == null)
return "";
switch(token)
{
case "power":
return getPlayerPower(player);
case "maxpower":
return getPlayerMaxPower(player);
case "last_online":
return getPlayerLastOnline(player);
}
return "";
}
private String getFactionName(final User user)
{
return this.plugin.getFactionLogic().getFactionByPlayerUUID(user.getUniqueId())
.map(Faction::getName)
.orElse("");
}
private Text getFactionTag(final User user)
{
return this.plugin.getFactionLogic().getFactionByPlayerUUID(user.getUniqueId())
.map(Faction::getTag)
.orElse(Text.EMPTY);
}
private float getFactionPower(final User player)
{
return this.plugin.getFactionLogic().getFactionByPlayerUUID(player.getUniqueId())
.map(this.plugin.getPowerManager()::getFactionPower)
.orElse(0F);
}
private float getFactionMaxPower(final User player)
{
return this.plugin.getFactionLogic().getFactionByPlayerUUID(player.getUniqueId())
.map(this.plugin.getPowerManager()::getFactionMaxPower)
.orElse(0F);
}
private Instant getFactionLastOnline(final User player)
{
return this.plugin.getFactionLogic().getFactionByPlayerUUID(player.getUniqueId())
.map(Faction::getLastOnline)
.orElse(Instant.now());
}
private int getFactionClaimCount(final User player)
{
return this.plugin.getFactionLogic().getFactionByPlayerUUID(player.getUniqueId())
.map(Faction::getClaims)
.map(Set::size)
.orElse(0);
}
private int getFactionOfficerCount(final User player)
{
return this.plugin.getFactionLogic().getFactionByPlayerUUID(player.getUniqueId())
.map(Faction::getOfficers)
.map(Set::size)
.orElse(0);
}
private int getFactionMemberCount(final User player)
{
return this.plugin.getFactionLogic().getFactionByPlayerUUID(player.getUniqueId())
.map(Faction::getMembers)
.map(Set::size)
.orElse(0);
}
private int getFactionRecruitCount(final User player)
{
return this.plugin.getFactionLogic().getFactionByPlayerUUID(player.getUniqueId())
.map(Faction::getRecruits)
.map(Set::size)
.orElse(0);
}
private Set<String> getFactionAlliances(final User player)
{
return this.plugin.getFactionLogic().getFactionByPlayerUUID(player.getUniqueId())
.map(Faction::getAlliances)
.orElse(Collections.emptySet());
}
private Set<String> getFactionEnemies(final User player)
{
return this.plugin.getFactionLogic().getFactionByPlayerUUID(player.getUniqueId())
.map(Faction::getEnemies)
.orElse(Collections.emptySet());
}
private Set<String> getFactionTruce(final User player)
{
return this.plugin.getFactionLogic().getFactionByPlayerUUID(player.getUniqueId())
.map(Faction::getTruces)
.orElse(Collections.emptySet());
}
//
//FactionPlayer placeholder methods starts here.
//
private float getPlayerPower(final User player)
{
return this.plugin.getPlayerManager().getFactionPlayer(player.getUniqueId())
.map(FactionPlayer::getPower)
.orElse(0F);
}
private float getPlayerMaxPower(final User player)
{
return this.plugin.getPlayerManager().getFactionPlayer(player.getUniqueId())
.map(FactionPlayer::getMaxPower)
.orElse(0F);
}
private String getPlayerLastOnline(final User player)
{
return player.get(JoinData.class)
.map(JoinData::lastPlayed)
.map(Value::get)
.map(instant -> LocalDateTime.ofInstant(instant, ZoneId.systemDefault()))
.map(DateTimeFormatter.ofPattern("dd-MM-yyyy HH:mm:ss")::format)
.orElse("");
}
}
| common/src/main/java/io/github/aquerr/eaglefactions/common/integrations/placeholderapi/EFPlaceholderService.java | package io.github.aquerr.eaglefactions.common.integrations.placeholderapi;
import io.github.aquerr.eaglefactions.api.EagleFactions;
import io.github.aquerr.eaglefactions.api.entities.Faction;
import io.github.aquerr.eaglefactions.api.entities.FactionPlayer;
import me.rojo8399.placeholderapi.*;
import org.spongepowered.api.entity.living.player.User;
import org.spongepowered.api.text.Text;
import javax.annotation.Nullable;
import java.time.Instant;
import java.util.HashSet;
import java.util.Optional;
import java.util.Set;
public class EFPlaceholderService
{
private static EFPlaceholderService INSTANCE = null;
private final EagleFactions plugin;
private final PlaceholderService placeholderService;
public static EFPlaceholderService getInstance(final EagleFactions plugin, Object placeholderService)
{
if(INSTANCE == null) {
INSTANCE = new EFPlaceholderService(plugin, (PlaceholderService) placeholderService);
}
return INSTANCE;
}
private EFPlaceholderService(final EagleFactions plugin, PlaceholderService placeholderService)
{
this.plugin = plugin;
this.placeholderService = placeholderService;
registerPlaceholders();
}
public PlaceholderService getPlaceholderService()
{
return placeholderService;
}
private void registerPlaceholders()
{
placeholderService.loadAll(this, this.plugin).stream().map(builder -> {
switch(builder.getId())
{
case "faction": {
try
{
return ((ExpansionBuilder) builder)
.tokens("name", "tag", "power", "maxpower", "last_online", "claims_count", "alliances",
"enemies", "truce", "officers_count", "members_count", "recruits_count")
.description("Player's faction's placeholders.")
.url("https://github.com/Aquerr/EagleFactions")
.author("Aquerr (Nerdi)")
.version("1.0")
.plugin(this.plugin);
}
catch(Exception e)
{
e.printStackTrace();
}
}
case "factionplayer":
{
try
{
return ((ExpansionBuilder) builder)
.tokens("power", "maxpower", "last_online")
.description("Player's placeholders.")
.url("https://github.com/Aquerr/EagleFactions")
.author("Aquerr (Nerdi)")
.version("1.0")
.plugin(this.plugin);
}
catch(Exception e)
{
e.printStackTrace();
}
}
}
return builder;
}).map(builder -> builder.author("Aquerr (Nerdi)").version("1.0")).forEach(builder -> {
try
{
builder.buildAndRegister();
}
catch(Exception e)
{
e.printStackTrace();
}
});
}
@Placeholder(id = "faction")
public Object faction(@Token(fix = true) @Nullable String token, @Nullable @Source User player)
{
if(token == null)
return "";
switch(token)
{
case "name":
return getFactionName(player);
case "tag":
return getFactionTag(player);
case "power":
return getFactionPower(player);
case "maxpower":
return getFactionMaxPower(player);
case "last_online":
return getFactionLastOnline(player);
case "claims_count":
return getFactionClaimCount(player);
case "officers_count":
return getFactionOfficerCount(player);
case "members_count":
return getFactionMemberCount(player);
case "recruits_count":
return getFactionRecruitCount(player);
case "alliances":
return getFactionAlliances(player);
case "enemies":
return getFactionEnemies(player);
case "truce":
return getFactionTruce(player);
}
return "";
}
@Placeholder(id = "factionplayer")
public Object factionPlayer(@Token(fix = true) @Nullable String token, @Nullable @Source User player)
{
if(token == null)
return "";
switch(token)
{
case "power":
return getPlayerPower(player);
case "maxpower":
return getPlayerMaxPower(player);
case "last_online":
return getPlayerLastOnline(player);
}
return "";
}
private String getFactionName(final User user)
{
final Optional<Faction> optionalFaction = this.plugin.getFactionLogic().getFactionByPlayerUUID(user.getUniqueId());
if(optionalFaction.isPresent())
{
return optionalFaction.get().getName();
}
else return "";
}
private Text getFactionTag(final User user)
{
final Optional<Faction> optionalFaction = this.plugin.getFactionLogic().getFactionByPlayerUUID(user.getUniqueId());
if(optionalFaction.isPresent())
{
return optionalFaction.get().getTag();
}
else return Text.of("");
}
private float getFactionPower(final User player)
{
final Optional<Faction> optionalFaction = this.plugin.getFactionLogic().getFactionByPlayerUUID(player.getUniqueId());
return optionalFaction.map(faction -> this.plugin.getPowerManager().getFactionPower(faction)).orElse(0F);
}
private float getFactionMaxPower(final User player)
{
final Optional<Faction> optionalFaction = this.plugin.getFactionLogic().getFactionByPlayerUUID(player.getUniqueId());
return optionalFaction.map(faction -> this.plugin.getPowerManager().getFactionMaxPower(faction)).orElse(0F);
}
private Instant getFactionLastOnline(final User player)
{
final Optional<Faction> optionalFaction = this.plugin.getFactionLogic().getFactionByPlayerUUID(player.getUniqueId());
if(optionalFaction.isPresent())
{
// final Date date = Date.from(optionalFaction.get().getLastOnline());
// final SimpleDateFormat formatter = new SimpleDateFormat("dd-MM-yyyy HH:mm:ss");
// return formatter.format(date);
return optionalFaction.get().getLastOnline();
}
return Instant.now();
}
private int getFactionClaimCount(final User player)
{
final Optional<Faction> optionalFaction = this.plugin.getFactionLogic().getFactionByPlayerUUID(player.getUniqueId());
return optionalFaction.map(x->x.getClaims().size()).orElse(0);
}
private int getFactionOfficerCount(final User player)
{
final Optional<Faction> optionalFaction = this.plugin.getFactionLogic().getFactionByPlayerUUID(player.getUniqueId());
return optionalFaction.map(x->x.getOfficers().size()).orElse(0);
}
private int getFactionMemberCount(final User player)
{
final Optional<Faction> optionalFaction = this.plugin.getFactionLogic().getFactionByPlayerUUID(player.getUniqueId());
return optionalFaction.map(x->x.getMembers().size()).orElse(0);
}
private int getFactionRecruitCount(final User player)
{
final Optional<Faction> optionalFaction = this.plugin.getFactionLogic().getFactionByPlayerUUID(player.getUniqueId());
return optionalFaction.map(x->x.getRecruits().size()).orElse(0);
}
private Set<String> getFactionAlliances(final User player)
{
final Optional<Faction> optionalFaction = this.plugin.getFactionLogic().getFactionByPlayerUUID(player.getUniqueId());
return optionalFaction.map(Faction::getAlliances).orElse(new HashSet<>());
}
private Set<String> getFactionEnemies(final User player)
{
final Optional<Faction> optionalFaction = this.plugin.getFactionLogic().getFactionByPlayerUUID(player.getUniqueId());
return optionalFaction.map(Faction::getEnemies).orElse(new HashSet<>());
}
private Set<String> getFactionTruce(final User player)
{
final Optional<Faction> optionalFaction = this.plugin.getFactionLogic().getFactionByPlayerUUID(player.getUniqueId());
return optionalFaction.map(Faction::getTruces).orElse(new HashSet<>());
}
//
//FactionPlayer placeholder methods starts here.
//
private float getPlayerPower(final User player)
{
final Optional<FactionPlayer> optionalFactionPlayer = this.plugin.getPlayerManager().getFactionPlayer(player.getUniqueId());
return optionalFactionPlayer.map(FactionPlayer::getPower).orElse(0f);
}
private float getPlayerMaxPower(final User player)
{
final Optional<FactionPlayer> optionalFactionPlayer = this.plugin.getPlayerManager().getFactionPlayer(player.getUniqueId());
return optionalFactionPlayer.map(FactionPlayer::getMaxPower).orElse(0f);
}
private String getPlayerLastOnline(final User player)
{
//TODO
// this.plugin.getPlayerManager().get
return "";
}
}
| Refactor methods for getting placeholders values from EF. Add player last online placeholder.
| common/src/main/java/io/github/aquerr/eaglefactions/common/integrations/placeholderapi/EFPlaceholderService.java | Refactor methods for getting placeholders values from EF. Add player last online placeholder. | <ide><path>ommon/src/main/java/io/github/aquerr/eaglefactions/common/integrations/placeholderapi/EFPlaceholderService.java
<ide> import io.github.aquerr.eaglefactions.api.EagleFactions;
<ide> import io.github.aquerr.eaglefactions.api.entities.Faction;
<ide> import io.github.aquerr.eaglefactions.api.entities.FactionPlayer;
<add>import io.github.aquerr.eaglefactions.api.managers.PowerManager;
<ide> import me.rojo8399.placeholderapi.*;
<add>import org.spongepowered.api.data.manipulator.mutable.entity.JoinData;
<add>import org.spongepowered.api.data.value.mutable.Value;
<ide> import org.spongepowered.api.entity.living.player.User;
<ide> import org.spongepowered.api.text.Text;
<ide>
<ide> import javax.annotation.Nullable;
<ide> import java.time.Instant;
<add>import java.time.LocalDateTime;
<add>import java.time.ZoneId;
<add>import java.time.format.DateTimeFormatter;
<add>import java.util.Collections;
<ide> import java.util.HashSet;
<ide> import java.util.Optional;
<ide> import java.util.Set;
<ide>
<ide> private String getFactionName(final User user)
<ide> {
<del> final Optional<Faction> optionalFaction = this.plugin.getFactionLogic().getFactionByPlayerUUID(user.getUniqueId());
<del> if(optionalFaction.isPresent())
<del> {
<del> return optionalFaction.get().getName();
<del> }
<del> else return "";
<add> return this.plugin.getFactionLogic().getFactionByPlayerUUID(user.getUniqueId())
<add> .map(Faction::getName)
<add> .orElse("");
<ide> }
<ide>
<ide> private Text getFactionTag(final User user)
<ide> {
<del> final Optional<Faction> optionalFaction = this.plugin.getFactionLogic().getFactionByPlayerUUID(user.getUniqueId());
<del> if(optionalFaction.isPresent())
<del> {
<del> return optionalFaction.get().getTag();
<del> }
<del> else return Text.of("");
<add> return this.plugin.getFactionLogic().getFactionByPlayerUUID(user.getUniqueId())
<add> .map(Faction::getTag)
<add> .orElse(Text.EMPTY);
<ide> }
<ide>
<ide> private float getFactionPower(final User player)
<ide> {
<del> final Optional<Faction> optionalFaction = this.plugin.getFactionLogic().getFactionByPlayerUUID(player.getUniqueId());
<del> return optionalFaction.map(faction -> this.plugin.getPowerManager().getFactionPower(faction)).orElse(0F);
<add> return this.plugin.getFactionLogic().getFactionByPlayerUUID(player.getUniqueId())
<add> .map(this.plugin.getPowerManager()::getFactionPower)
<add> .orElse(0F);
<ide> }
<ide>
<ide> private float getFactionMaxPower(final User player)
<ide> {
<del> final Optional<Faction> optionalFaction = this.plugin.getFactionLogic().getFactionByPlayerUUID(player.getUniqueId());
<del> return optionalFaction.map(faction -> this.plugin.getPowerManager().getFactionMaxPower(faction)).orElse(0F);
<add> return this.plugin.getFactionLogic().getFactionByPlayerUUID(player.getUniqueId())
<add> .map(this.plugin.getPowerManager()::getFactionMaxPower)
<add> .orElse(0F);
<ide> }
<ide>
<ide> private Instant getFactionLastOnline(final User player)
<ide> {
<del> final Optional<Faction> optionalFaction = this.plugin.getFactionLogic().getFactionByPlayerUUID(player.getUniqueId());
<del> if(optionalFaction.isPresent())
<del> {
<del>// final Date date = Date.from(optionalFaction.get().getLastOnline());
<del>// final SimpleDateFormat formatter = new SimpleDateFormat("dd-MM-yyyy HH:mm:ss");
<del>// return formatter.format(date);
<del> return optionalFaction.get().getLastOnline();
<del> }
<del> return Instant.now();
<add> return this.plugin.getFactionLogic().getFactionByPlayerUUID(player.getUniqueId())
<add> .map(Faction::getLastOnline)
<add> .orElse(Instant.now());
<ide> }
<ide>
<ide> private int getFactionClaimCount(final User player)
<ide> {
<del> final Optional<Faction> optionalFaction = this.plugin.getFactionLogic().getFactionByPlayerUUID(player.getUniqueId());
<del> return optionalFaction.map(x->x.getClaims().size()).orElse(0);
<add> return this.plugin.getFactionLogic().getFactionByPlayerUUID(player.getUniqueId())
<add> .map(Faction::getClaims)
<add> .map(Set::size)
<add> .orElse(0);
<ide> }
<ide>
<ide> private int getFactionOfficerCount(final User player)
<ide> {
<del> final Optional<Faction> optionalFaction = this.plugin.getFactionLogic().getFactionByPlayerUUID(player.getUniqueId());
<del> return optionalFaction.map(x->x.getOfficers().size()).orElse(0);
<add> return this.plugin.getFactionLogic().getFactionByPlayerUUID(player.getUniqueId())
<add> .map(Faction::getOfficers)
<add> .map(Set::size)
<add> .orElse(0);
<ide> }
<ide>
<ide> private int getFactionMemberCount(final User player)
<ide> {
<del> final Optional<Faction> optionalFaction = this.plugin.getFactionLogic().getFactionByPlayerUUID(player.getUniqueId());
<del> return optionalFaction.map(x->x.getMembers().size()).orElse(0);
<add> return this.plugin.getFactionLogic().getFactionByPlayerUUID(player.getUniqueId())
<add> .map(Faction::getMembers)
<add> .map(Set::size)
<add> .orElse(0);
<ide> }
<ide>
<ide> private int getFactionRecruitCount(final User player)
<ide> {
<del> final Optional<Faction> optionalFaction = this.plugin.getFactionLogic().getFactionByPlayerUUID(player.getUniqueId());
<del> return optionalFaction.map(x->x.getRecruits().size()).orElse(0);
<add> return this.plugin.getFactionLogic().getFactionByPlayerUUID(player.getUniqueId())
<add> .map(Faction::getRecruits)
<add> .map(Set::size)
<add> .orElse(0);
<ide> }
<ide>
<ide> private Set<String> getFactionAlliances(final User player)
<ide> {
<del> final Optional<Faction> optionalFaction = this.plugin.getFactionLogic().getFactionByPlayerUUID(player.getUniqueId());
<del> return optionalFaction.map(Faction::getAlliances).orElse(new HashSet<>());
<add> return this.plugin.getFactionLogic().getFactionByPlayerUUID(player.getUniqueId())
<add> .map(Faction::getAlliances)
<add> .orElse(Collections.emptySet());
<ide> }
<ide>
<ide> private Set<String> getFactionEnemies(final User player)
<ide> {
<del> final Optional<Faction> optionalFaction = this.plugin.getFactionLogic().getFactionByPlayerUUID(player.getUniqueId());
<del> return optionalFaction.map(Faction::getEnemies).orElse(new HashSet<>());
<add> return this.plugin.getFactionLogic().getFactionByPlayerUUID(player.getUniqueId())
<add> .map(Faction::getEnemies)
<add> .orElse(Collections.emptySet());
<ide> }
<ide>
<ide> private Set<String> getFactionTruce(final User player)
<ide> {
<del> final Optional<Faction> optionalFaction = this.plugin.getFactionLogic().getFactionByPlayerUUID(player.getUniqueId());
<del> return optionalFaction.map(Faction::getTruces).orElse(new HashSet<>());
<add> return this.plugin.getFactionLogic().getFactionByPlayerUUID(player.getUniqueId())
<add> .map(Faction::getTruces)
<add> .orElse(Collections.emptySet());
<ide> }
<ide>
<ide> //
<ide>
<ide> private float getPlayerPower(final User player)
<ide> {
<del> final Optional<FactionPlayer> optionalFactionPlayer = this.plugin.getPlayerManager().getFactionPlayer(player.getUniqueId());
<del> return optionalFactionPlayer.map(FactionPlayer::getPower).orElse(0f);
<add> return this.plugin.getPlayerManager().getFactionPlayer(player.getUniqueId())
<add> .map(FactionPlayer::getPower)
<add> .orElse(0F);
<ide> }
<ide>
<ide> private float getPlayerMaxPower(final User player)
<ide> {
<del> final Optional<FactionPlayer> optionalFactionPlayer = this.plugin.getPlayerManager().getFactionPlayer(player.getUniqueId());
<del> return optionalFactionPlayer.map(FactionPlayer::getMaxPower).orElse(0f);
<add> return this.plugin.getPlayerManager().getFactionPlayer(player.getUniqueId())
<add> .map(FactionPlayer::getMaxPower)
<add> .orElse(0F);
<ide> }
<ide>
<ide> private String getPlayerLastOnline(final User player)
<ide> {
<del> //TODO
<del>// this.plugin.getPlayerManager().get
<del> return "";
<add> return player.get(JoinData.class)
<add> .map(JoinData::lastPlayed)
<add> .map(Value::get)
<add> .map(instant -> LocalDateTime.ofInstant(instant, ZoneId.systemDefault()))
<add> .map(DateTimeFormatter.ofPattern("dd-MM-yyyy HH:mm:ss")::format)
<add> .orElse("");
<ide> }
<ide> } |
|
Java | lgpl-2.1 | 7c788648b537c636c7d407ad670c256131def71b | 0 | certusoft/swingx,certusoft/swingx | /*
* $Id$
*
* Copyright 2006 Sun Microsystems, Inc., 4150 Network Circle,
* Santa Clara, California 95054, U.S.A. All rights reserved.
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
*/
package org.jdesktop.swingx.plaf.basic;
import java.awt.BorderLayout;
import java.awt.Component;
import java.awt.Container;
import java.awt.Dimension;
import java.awt.FontMetrics;
import java.awt.Insets;
import java.awt.LayoutManager;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.event.FocusEvent;
import java.awt.event.FocusListener;
import java.awt.event.MouseEvent;
import java.awt.event.MouseListener;
import java.awt.event.MouseMotionListener;
import java.beans.PropertyChangeEvent;
import java.beans.PropertyChangeListener;
import java.beans.PropertyVetoException;
import java.text.DateFormat;
import java.text.ParseException;
import java.util.Calendar;
import java.util.Date;
import java.util.Locale;
import java.util.TimeZone;
import java.util.logging.Logger;
import javax.swing.AbstractAction;
import javax.swing.Action;
import javax.swing.ActionMap;
import javax.swing.Icon;
import javax.swing.InputMap;
import javax.swing.JButton;
import javax.swing.JComboBox;
import javax.swing.JComponent;
import javax.swing.JFormattedTextField;
import javax.swing.JPopupMenu;
import javax.swing.KeyStroke;
import javax.swing.SwingUtilities;
import javax.swing.UIManager;
import javax.swing.JFormattedTextField.AbstractFormatter;
import javax.swing.JFormattedTextField.AbstractFormatterFactory;
import javax.swing.plaf.ComponentUI;
import javax.swing.plaf.UIResource;
import javax.swing.text.DefaultFormatterFactory;
import javax.swing.text.View;
import org.jdesktop.swingx.JXDatePicker;
import org.jdesktop.swingx.JXMonthView;
import org.jdesktop.swingx.calendar.CalendarUtils;
import org.jdesktop.swingx.calendar.DatePickerFormatter;
import org.jdesktop.swingx.calendar.DateSelectionModel;
import org.jdesktop.swingx.calendar.DatePickerFormatter.DatePickerFormatterUIResource;
import org.jdesktop.swingx.event.DateSelectionEvent;
import org.jdesktop.swingx.event.DateSelectionListener;
import org.jdesktop.swingx.event.DateSelectionEvent.EventType;
import org.jdesktop.swingx.plaf.DatePickerUI;
/**
* The basic implementation of a <code>DatePickerUI</code>.
* <p>
*
*
* @author Joshua Outwater
* @author Jeanette Winzenburg
*/
public class BasicDatePickerUI extends DatePickerUI {
@SuppressWarnings("all")
private static final Logger LOG = Logger.getLogger(BasicDatePickerUI.class
.getName());
protected JXDatePicker datePicker;
private JButton popupButton;
private BasicDatePickerPopup popup;
private Handler handler;
/*
* shared listeners
*/
protected PropertyChangeListener propertyChangeListener;
private FocusListener focusListener;
/*
* listener's for the arrow button
*/
protected MouseListener mouseListener;
protected MouseMotionListener mouseMotionListener;
/*
* listeners for the picker's editor
*/
private ActionListener editorActionListener;
private EditorCancelAction editorCancelAction;
private PropertyChangeListener editorPropertyListener;
/**
* listeners for the picker's monthview
*/
private DateSelectionListener monthViewSelectionListener;
private ActionListener monthViewActionListener;
private PropertyChangeListener monthViewPropertyListener;
@SuppressWarnings({"UnusedDeclaration"})
public static ComponentUI createUI(JComponent c) {
return new BasicDatePickerUI();
}
@Override
public void installUI(JComponent c) {
datePicker = (JXDatePicker)c;
datePicker.setLayout(createLayoutManager());
installComponents();
installDefaults();
installKeyboardActions();
installListeners();
}
@Override
public void uninstallUI(JComponent c) {
uninstallListeners();
uninstallKeyboardActions();
uninstallDefaults();
uninstallComponents();
datePicker.setLayout(null);
datePicker = null;
}
protected void installComponents() {
JFormattedTextField editor = datePicker.getEditor();
if (editor == null || editor instanceof UIResource) {
DateFormat[] formats = getCustomFormats(editor);
// we are not yet listening ...
datePicker.setEditor(createEditor());
if (formats != null) {
datePicker.setFormats(formats);
}
}
updateFromEditorChanged(null, false);
popupButton = createPopupButton();
if (popupButton != null) {
// this is a trick to get hold of the client prop which
// prevents closing of the popup
JComboBox box = new JComboBox();
Object preventHide = box.getClientProperty("doNotCancelPopup");
popupButton.putClientProperty("doNotCancelPopup", preventHide);
datePicker.add(popupButton);
}
// JW: the condition is hacking around #681-swingx: overlapping rows in JXMonthView
// in locales with first day of week monday if locale is set
// this way the current brittle behaviour is at least not detoriated
// (== default locales are okay)
// if ((datePicker.getLocale() != null) &&
// !datePicker.getLocale().equals(Locale.getDefault()))
updateChildLocale(datePicker.getLocale());
}
/**
* Checks and returns custom formats on the editor, if any.
*
* @param editor the editor to check
* @return the custom formats uses in the editor or null if it had
* used defaults as defined in the datepicker properties
*/
private DateFormat[] getCustomFormats(JFormattedTextField editor) {
DateFormat[] formats = null;
if (editor != null) {
AbstractFormatterFactory factory = editor.getFormatterFactory();
if (factory != null) {
AbstractFormatter formatter = factory.getFormatter(editor);
if (!(formatter instanceof DatePickerFormatterUIResource)) {
formats = ((DatePickerFormatter) formatter).getFormats();
}
}
}
return formats;
}
protected void uninstallComponents() {
JFormattedTextField editor = datePicker.getEditor();
if (editor != null) {
datePicker.remove(editor);
}
if (popupButton != null) {
datePicker.remove(popupButton);
popupButton = null;
}
}
protected void installDefaults() {
}
protected void uninstallDefaults() {
}
protected void installKeyboardActions() {
// install picker's actions
ActionMap pickerMap = datePicker.getActionMap();
pickerMap.put(JXDatePicker.CANCEL_KEY, createCancelAction());
pickerMap.put(JXDatePicker.COMMIT_KEY, createCommitAction());
pickerMap.put(JXDatePicker.HOME_NAVIGATE_KEY, createHomeAction(false));
pickerMap.put(JXDatePicker.HOME_COMMIT_KEY, createHomeAction(true));
TogglePopupAction popupAction = createTogglePopupAction();
pickerMap.put("TOGGLE_POPUP", popupAction);
InputMap pickerInputMap = datePicker.getInputMap(JComponent.WHEN_ANCESTOR_OF_FOCUSED_COMPONENT);
pickerInputMap.put(KeyStroke.getKeyStroke("ENTER"), JXDatePicker.COMMIT_KEY);
pickerInputMap.put(KeyStroke.getKeyStroke("ESCAPE"), JXDatePicker.CANCEL_KEY);
// PENDING: get from LF
pickerInputMap.put(KeyStroke.getKeyStroke("F5"), JXDatePicker.HOME_COMMIT_KEY);
pickerInputMap.put(KeyStroke.getKeyStroke("shift F5"), JXDatePicker.HOME_NAVIGATE_KEY);
pickerInputMap.put(KeyStroke.getKeyStroke("SPACE"), "TOGGLE_POPUP");
installLinkPanelKeyboardActions();
}
protected void uninstallKeyboardActions() {
uninstallLinkPanelKeyboardActions(datePicker.getLinkPanel());
}
/**
* Installs actions and key bindings on the datePicker's linkPanel. Does
* nothing if the linkPanel is null.
*
* PRE: keybindings installed on picker.
*/
protected void installLinkPanelKeyboardActions() {
if (datePicker.getLinkPanel() == null)
return;
ActionMap map = datePicker.getLinkPanel().getActionMap();
map.put(JXDatePicker.HOME_COMMIT_KEY, datePicker.getActionMap().get(
JXDatePicker.HOME_COMMIT_KEY));
map.put(JXDatePicker.HOME_NAVIGATE_KEY, datePicker.getActionMap().get(
JXDatePicker.HOME_NAVIGATE_KEY));
InputMap inputMap = datePicker.getLinkPanel().getInputMap(
JComponent.WHEN_IN_FOCUSED_WINDOW);
// PENDING: get from LF
inputMap.put(KeyStroke.getKeyStroke("F5"),
JXDatePicker.HOME_COMMIT_KEY);
inputMap.put(KeyStroke.getKeyStroke("shift F5"),
JXDatePicker.HOME_NAVIGATE_KEY);
}
/**
* Uninstalls actions and key bindings from linkPanel. Does nothing if the
* linkPanel is null.
*
* @param panel the component to uninstall
*
*/
protected void uninstallLinkPanelKeyboardActions(JComponent panel) {
if (panel == null) return;
ActionMap map = panel.getActionMap();
map.remove(JXDatePicker.HOME_COMMIT_KEY);
map.remove(JXDatePicker.HOME_NAVIGATE_KEY);
InputMap inputMap = panel.getInputMap(JComponent.WHEN_IN_FOCUSED_WINDOW);
// PENDING: get from LF
inputMap.remove(KeyStroke.getKeyStroke("F5"));
inputMap.remove(KeyStroke.getKeyStroke("shift F5"));
}
/**
* Creates and installs all listeners to all components.
*
*/
protected void installListeners() {
/*
* create the listeners.
*/
// propertyListener for datePicker
propertyChangeListener = createPropertyChangeListener();
// mouseListener (for popup button only) ?
mouseListener = createMouseListener();
mouseMotionListener = createMouseMotionListener();
// shared focuslistener (installed to picker and editor)
focusListener = createFocusListener();
// editor related listeners
editorActionListener = createEditorActionListener();
editorPropertyListener = createEditorPropertyListener();
// montheView related listeners
monthViewSelectionListener = createMonthViewSelectionListener();
monthViewActionListener = createMonthViewActionListener();
monthViewPropertyListener = createMonthViewPropertyListener();
/*
* install the listeners
*/
// picker
datePicker.addPropertyChangeListener(propertyChangeListener);
datePicker.addFocusListener(focusListener);
if (popupButton != null) {
// JW: which property do we want to monitor?
popupButton.addPropertyChangeListener(propertyChangeListener);
popupButton.addMouseListener(mouseListener);
popupButton.addMouseMotionListener(mouseMotionListener);
}
updateEditorListeners(null);
// JW the following does more than installing the listeners ..
// synchs properties of datepicker to monthView's
// prepares monthview for usage in popup
// synch the date
// Relies on being the last thing done in the install ..
//
updateFromMonthViewChanged(null);
}
/**
* Uninstalls and nulls all listeners which had been installed
* by this delegate.
*
*/
protected void uninstallListeners() {
// datePicker
datePicker.removePropertyChangeListener(propertyChangeListener);
datePicker.removeFocusListener(focusListener);
// monthView
datePicker.getMonthView().getSelectionModel().removeDateSelectionListener(monthViewSelectionListener);
datePicker.getMonthView().removeActionListener(monthViewActionListener);
datePicker.getMonthView().removePropertyChangeListener(propertyChangeListener);
// JW: when can that be null?
// maybe in the very beginning? if some code calls ui.uninstall
// before ui.install? The editor is created by the ui.
if (datePicker.getEditor() != null) {
uninstallEditorListeners(datePicker.getEditor());
}
if (popupButton != null) {
popupButton.removePropertyChangeListener(propertyChangeListener);
popupButton.removeMouseListener(mouseListener);
popupButton.removeMouseMotionListener(mouseMotionListener);
}
propertyChangeListener = null;
mouseListener = null;
mouseMotionListener = null;
editorActionListener = null;
editorPropertyListener = null;
monthViewSelectionListener = null;
monthViewActionListener = null;
monthViewPropertyListener = null;
handler = null;
}
// --------------------- wiring listeners
/**
* Wires the picker's monthView related listening. Removes all
* listeners from the given old view and adds the listeners to
* the current monthView. <p>
*
* @param oldMonthView
*/
protected void updateMonthViewListeners(JXMonthView oldMonthView) {
DateSelectionModel oldModel = null;
if (oldMonthView != null) {
oldMonthView.removePropertyChangeListener(monthViewPropertyListener);
oldMonthView.removeActionListener(monthViewActionListener);
oldModel = oldMonthView.getSelectionModel();
}
datePicker.getMonthView().addPropertyChangeListener(monthViewPropertyListener);
datePicker.getMonthView().addActionListener(monthViewActionListener);
updateSelectionModelListeners(oldModel);
}
/**
* Wires the picker's editor related listening and actions. Removes
* listeners/actions from the old editor and adds them to
* the new editor. <p>
*
* @param oldEditor the pickers editor before the change
*/
protected void updateEditorListeners(JFormattedTextField oldEditor) {
if (oldEditor != null) {
uninstallEditorListeners(oldEditor);
}
datePicker.getEditor().addPropertyChangeListener(editorPropertyListener);
datePicker.getEditor().addActionListener(editorActionListener);
datePicker.getEditor().addFocusListener(focusListener);
editorCancelAction = new EditorCancelAction(datePicker.getEditor());
}
/**
* Uninstalls all listeners and actions which have been installed
* by this delegate from the given editor.
*
* @param oldEditor the editor to uninstall.
*/
private void uninstallEditorListeners(JFormattedTextField oldEditor) {
oldEditor.removePropertyChangeListener(editorPropertyListener);
oldEditor.removeActionListener(editorActionListener);
oldEditor.removeFocusListener(focusListener);
if (editorCancelAction != null) {
editorCancelAction.uninstall();
editorCancelAction = null;
}
}
/**
* Wires monthView's selection model listening. Removes the
* selection listener from the old model and add to the new model.
*
* @param oldModel the dateSelectionModel before the change, may be null.
*/
protected void updateSelectionModelListeners(DateSelectionModel oldModel) {
if (oldModel != null) {
oldModel.removeDateSelectionListener(monthViewSelectionListener);
}
datePicker.getMonthView().getSelectionModel()
.addDateSelectionListener(monthViewSelectionListener);
}
//---------------- component creation
/**
* Creates the editor used to edit the date selection. Subclasses should
* override this method if they want to substitute in their own editor.
*
* @return an instance of a JFormattedTextField
*/
protected JFormattedTextField createEditor() {
JFormattedTextField f = new DefaultEditor(new DatePickerFormatterUIResource(datePicker.getLocale()));
f.setName("dateField");
f.setColumns(UIManager.getInt("JXDatePicker.numColumns"));
f.setBorder(UIManager.getBorder("JXDatePicker.border"));
return f;
}
protected JButton createPopupButton() {
JButton b = new JButton();
b.setName("popupButton");
b.setRolloverEnabled(false);
b.setMargin(new Insets(0, 3, 0, 3));
Icon icon = UIManager.getIcon("JXDatePicker.arrowIcon");
if (icon == null) {
icon = (Icon)UIManager.get("Tree.expandedIcon");
}
b.setIcon(icon);
b.setFocusable(false);
return b;
}
private class DefaultEditor extends JFormattedTextField implements UIResource {
public DefaultEditor(AbstractFormatter formatter) {
super(formatter);
}
}
// ---------------- Layout
/**
* {@inheritDoc}
*/
@Override
public Dimension getMinimumSize(JComponent c) {
return getPreferredSize(c);
}
/**
* {@inheritDoc}
*/
@Override
public Dimension getPreferredSize(JComponent c) {
Dimension dim = getEditorPreferredSize();
if (popupButton != null) {
dim.width += popupButton.getPreferredSize().width;
}
Insets insets = datePicker.getInsets();
dim.width += insets.left + insets.right;
dim.height += insets.top + insets.bottom;
return (Dimension)dim.clone();
}
/**
* Returns a preferred size for the editor. If the selected date
* is null, returns a reasonable minimal width. <p>
*
* PENDING: how to find the "reasonable" width is open to discussion.
* This implementation creates another datePicker, feeds it with
* the formats and asks its prefWidth.
*
* @return the editor's preferred size
*/
private Dimension getEditorPreferredSize() {
Dimension dim = datePicker.getEditor().getPreferredSize();
if (datePicker.getDate() == null) {
// the editor tends to collapsing for empty values
// JW: better do this in a custom editor?
JXDatePicker picker = new JXDatePicker();
picker.setFormats(datePicker.getFormats());
dim.width = picker.getEditor().getPreferredSize().width;
}
return dim;
}
@Override
public int getBaseline(int width, int height) {
JFormattedTextField editor = datePicker.getEditor();
View rootView = editor.getUI().getRootView(editor);
if (rootView.getViewCount() > 0) {
Insets insets = editor.getInsets();
Insets insetsOut = datePicker.getInsets();
int nh = height - insets.top - insets.bottom
- insetsOut.top - insetsOut.bottom;
int y = insets.top + insetsOut.top;
View fieldView = rootView.getView(0);
int vspan = (int) fieldView.getPreferredSpan(View.Y_AXIS);
if (nh != vspan) {
int slop = nh - vspan;
y += slop / 2;
}
FontMetrics fm = editor.getFontMetrics(editor.getFont());
y += fm.getAscent();
return y;
}
return -1;
}
//------------------------------- controller methods/classes
/**
* {@inheritDoc}
*/
@Override
public Date getSelectableDate(Date date) throws PropertyVetoException {
Date cleaned = date != null ? cleanupDate(date) : null;
if (equalsDate(cleaned, datePicker.getDate())) {
// one place to interrupt the update spiral
throw new PropertyVetoException("date not selectable", null);
}
if (cleaned == null) return cleaned;
if (datePicker.getMonthView().isUnselectableDate(cleaned)) {
throw new PropertyVetoException("date not selectable", null);
}
return cleaned;
}
//-------------------- update methods called from listeners
/**
* Updates internals after picker's date property changed.
*/
protected void updateFromDateChanged() {
datePicker.getEditor().setValue(datePicker.getDate());
}
/**
* Updates date related properties in picker/monthView
* after a change in the editor's value. Reverts the
* value if the new date is unselectable.
*
* @param oldDate the editor value before the change
* @param newDate the editor value after the change
*/
protected void updateFromValueChanged(Date oldDate, Date newDate) {
if ((newDate != null) && datePicker.getMonthView().isUnselectableDate(newDate)) {
revertValue(oldDate);
return;
}
// the other place to interrupt the update spiral
if (!equalsDate(newDate, datePicker.getMonthView().getSelectedDate())) {
datePicker.getMonthView().setSelectedDate(newDate);
}
datePicker.setDate(newDate);
}
/**
* PENDING: currently this resets at once - but it's a no-no,
* because it happens during notification
*
*
* @param oldDate the old date to revert to
*/
private void revertValue(Date oldDate) {
datePicker.getEditor().setValue(oldDate);
}
/**
* Updates date related properties picker/editor
* after a change in the monthView's
* selection.
*
* Here: does nothing if the change is intermediate.
*
* @param eventType the type of the selection change
* @param adjusting flag to indicate whether the the selection change
* is intermediate
*/
protected void updateFromSelectionChanged(EventType eventType, boolean adjusting) {
if (adjusting) return;
updateEditorValue();
}
/**
* Updates internals after the picker's monthView has changed. <p>
*
* Cleans to popup. Wires the listeners. Updates date.
* Updates formats' timezone.
*
* @param oldMonthView the picker's monthView before the change,
* may be null.
*/
protected void updateFromMonthViewChanged(JXMonthView oldMonthView) {
popup = null;
updateMonthViewListeners(oldMonthView);
TimeZone oldTimeZone = null;
if (oldMonthView != null) {
oldMonthView.setComponentInputMapEnabled(false);
oldTimeZone = oldMonthView.getTimeZone();
}
datePicker.getMonthView().setComponentInputMapEnabled(true);
updateTimeZone(oldTimeZone);
// updateFormatsFromTimeZone(datePicker.getTimeZone());
updateEditorValue();
}
/**
* Updates internals after the picker's editor property
* has changed. <p>
*
* Updates the picker's children. Removes the old editor and
* adds the new editor. Wires the editor listeners, it the flag
* set. Typically, this method is called during installing the
* componentUI with the flag set to false and true at all other
* moments.
*
*
* @param oldEditor the picker's editor before the change,
* may be null.
* @param updateListeners a flag to indicate whether the listeners
* are ready for usage.
*/
protected void updateFromEditorChanged(JFormattedTextField oldEditor,
boolean updateListeners) {
if (oldEditor != null) {
datePicker.remove(oldEditor);
oldEditor.putClientProperty("doNotCancelPopup", null);
}
datePicker.add(datePicker.getEditor());
// this is a trick to get hold of the client prop which
// prevents closing of the popup
JComboBox box = new JComboBox();
Object preventHide = box.getClientProperty("doNotCancelPopup");
datePicker.getEditor().putClientProperty("doNotCancelPopup", preventHide);
updateEditorValue();
if (updateListeners) {
updateEditorListeners(oldEditor);
datePicker.revalidate();
}
}
/**
* Updates internals after the selection model changed.
*
* @param oldModel the model before the change.
*/
protected void updateFromSelectionModelChanged(DateSelectionModel oldModel) {
updateSelectionModelListeners(oldModel);
updateEditorValue();
}
/**
* Sets the editor value to the model's selectedDate.
*/
private void updateEditorValue() {
datePicker.getEditor().setValue(datePicker.getMonthView().getSelectedDate());
}
//---------------------- updating other properties
/**
* Updates the picker's formats to the given TimeZone.
* @param zone the timezone to set on the formats.
*/
protected void updateFormatsFromTimeZone(TimeZone zone) {
for (DateFormat format : datePicker.getFormats()) {
format.setTimeZone(zone);
}
}
/**
* Updates picker's timezone dependent properties on change notification
* from the associated monthView.
*
* PENDING JW: DatePicker needs to send notification on timezone change?
*
* @param old the timezone before the change.
*/
protected void updateTimeZone(TimeZone old) {
updateFormatsFromTimeZone(datePicker.getTimeZone());
updateLinkDate();
// datePicker.firePropertyChange(propertyName, oldValue, newValue)
}
/**
* Updates the picker's linkDate to be in synch with monthView's today.
*/
protected void updateLinkDate() {
datePicker.setLinkDate(datePicker.getMonthView().getTodayInMillis());
}
/**
* Called form property listener, updates all components locale, formats
* etc.
*
* @author PeS
*/
protected void updateLocale() {
Locale locale = datePicker.getLocale();
updateFormatLocale(locale);
updateChildLocale(locale);
}
private void updateFormatLocale(Locale locale) {
if (locale != null) {
/*
* FIXME: PeS: It should probably use this
*
* however that gets beyond my understanding of the inner workings.
* It reaches to UiManagerExt for date formats? Therefore I am using
* simply JRE defined formats
*/
// JW: yes should do that - but only if we have no custom formats
// installed.
// PENDING: timezone?
if (getCustomFormats(datePicker.getEditor()) == null) {
datePicker.getEditor().setFormatterFactory(
new DefaultFormatterFactory(
new DatePickerFormatterUIResource(locale)));
}
// DateFormat[] formats = new DateFormat[3];
// SimpleDateFormat f =
// (SimpleDateFormat)DateFormat.getDateInstance(DateFormat.SHORT,
// locale);
// if (!f.toPattern().contains("E")) {
// f.applyPattern("EE " + f.toPattern());
// }
// formats[0] = f;
// formats[1] = DateFormat.getDateInstance(DateFormat.DEFAULT,
// locale);
// formats[2] = DateFormat.getDateInstance(DateFormat.MEDIUM,
// locale);
// datePicker.setFormats(formats);
}
}
private void updateChildLocale(Locale locale) {
if (locale != null) {
datePicker.getEditor().setLocale(locale);
datePicker.getLinkPanel().setLocale(locale);
datePicker.getMonthView().setLocale(locale);
}
}
/**
* @param oldLinkPanel
*
*/
protected void updateLinkPanel(JComponent oldLinkPanel) {
if (oldLinkPanel != null) {
uninstallLinkPanelKeyboardActions(oldLinkPanel);
}
installLinkPanelKeyboardActions();
if (popup != null) {
popup.updateLinkPanel(oldLinkPanel);
}
// PENDING: datepicker installs a new todayPanel if
// any of the linkDate related properties changed.
// should be less-rude - can set properties on the
// panel? Fire more atomic changes.
datePicker.getMonthView().setFirstDisplayedDate(datePicker.getLinkDate());
}
//------------------- methods called by installed actions
/**
*
*/
protected void commit() {
hidePopup();
try {
datePicker.commitEdit();
} catch (ParseException ex) {
// can't help it
}
}
/**
*
*/
protected void cancel() {
hidePopup();
datePicker.cancelEdit();
}
/**
*
*/
private void hidePopup() {
if (popup != null) popup.setVisible(false);
}
/**
* Navigates to linkDate. If commit, the linkDate is selected
* and committed. If not commit, the linkDate is scrolled to visible, if the
* monthview is open, does nothing for invisible monthView.
*
* @param commit boolean to indicate whether the linkDate should be
* selected and committed
*/
protected void home(boolean commit) {
if (commit) {
Calendar cal = datePicker.getMonthView().getCalendar();
cal.setTimeInMillis(datePicker.getLinkDate());
datePicker.getMonthView().setSelectedDate(cal.getTime());
datePicker.getMonthView().commitSelection();
} else {
datePicker.getMonthView().ensureDateVisible(datePicker.getLinkDate());
}
}
//---------------------- other stuff
/**
* Creates and returns the action for committing the picker's
* input.
*
* @return
*/
private Action createCommitAction() {
Action action = new AbstractAction() {
public void actionPerformed(ActionEvent e) {
commit();
}
};
return action;
}
/**
* Creates and returns the action for cancel the picker's
* edit.
*
* @return
*/
private Action createCancelAction() {
Action action = new AbstractAction() {
public void actionPerformed(ActionEvent e) {
cancel();
}
};
return action;
}
private Action createHomeAction(final boolean commit) {
Action action = new AbstractAction( ) {
public void actionPerformed(ActionEvent e) {
home(commit);
}
};
return action ;
}
/**
* The wrapper for the editor cancel action.
*
* PENDING: Need to extend TestAction?
*
*/
public class EditorCancelAction extends AbstractAction {
private JFormattedTextField editor;
private Action cancelAction;
public static final String TEXT_CANCEL_KEY = "reset-field-edit";
public EditorCancelAction(JFormattedTextField field) {
install(field);
}
/**
* Resets the contained editors actionMap to original and
* nulls all fields. <p>
* NOTE: after calling this method the action must not be
* used! Create a new one for the same or another editor.
*
*/
public void uninstall() {
editor.getActionMap().remove(TEXT_CANCEL_KEY);
cancelAction = null;
editor = null;
}
/**
* @param editor
*/
private void install(JFormattedTextField editor) {
this.editor = editor;
cancelAction = editor.getActionMap().get(TEXT_CANCEL_KEY);
editor.getActionMap().put(TEXT_CANCEL_KEY, this);
}
public void actionPerformed(ActionEvent e) {
cancelAction.actionPerformed(null);
cancel();
}
}
/**
* @return
*/
protected TogglePopupAction createTogglePopupAction() {
return new TogglePopupAction();
}
/**
* Toggles the popups visibility after preparing internal state.
*
*
*/
public void toggleShowPopup() {
if (popup == null) {
popup = createMonthViewPopup();
}
if (popup.isVisible()) {
popup.setVisible(false);
} else {
datePicker.requestFocusInWindow();
SwingUtilities.invokeLater(new Runnable() {
public void run() {
popup.show(datePicker,
0, datePicker.getHeight());
}
});
}
}
/**
*
*/
private BasicDatePickerPopup createMonthViewPopup() {
BasicDatePickerPopup popup = new BasicDatePickerPopup();
popup.setLightWeightPopupEnabled(datePicker.isLightWeightPopupEnabled());
return popup;
}
/**
* Action used to commit the current value in the JFormattedTextField.
* This action is used by the keyboard bindings.
*/
private class TogglePopupAction extends AbstractAction {
public TogglePopupAction() {
super("TogglePopup");
}
public void actionPerformed(ActionEvent ev) {
toggleShowPopup();
}
}
/**
* Popup component that shows a JXMonthView component along with controlling
* buttons to allow traversal of the months. Upon selection of a date the
* popup will automatically hide itself and enter the selection into the
* editable field of the JXDatePicker.
*
*/
protected class BasicDatePickerPopup extends JPopupMenu {
public BasicDatePickerPopup() {
setLayout(new BorderLayout());
add(datePicker.getMonthView(), BorderLayout.CENTER);
updateLinkPanel(null);
}
/**
* @param oldLinkPanel
*/
public void updateLinkPanel(JComponent oldLinkPanel) {
if (oldLinkPanel != null) {
remove(oldLinkPanel);
}
if (datePicker.getLinkPanel() != null) {
add(datePicker.getLinkPanel(), BorderLayout.SOUTH);
}
}
}
/**
* PENDING: JW - I <b>really</b> hate the one-in-all. Wont touch
* it for now, maybe later. As long as we have it, the new
* listeners (dateSelection) are here too, for consistency.
* Adding the Layout here as well is ... , IMO.
*/
private class Handler implements LayoutManager, MouseListener, MouseMotionListener,
PropertyChangeListener, DateSelectionListener, ActionListener, FocusListener {
//------------- implement Mouse/MotionListener
private boolean _forwardReleaseEvent = false;
public void mouseClicked(MouseEvent ev) {
}
public void mousePressed(MouseEvent ev) {
if (!datePicker.isEnabled()) {
return;
}
// PENDING JW: why do we need a mouseListener? the
// arrowbutton should have the toggleAction installed?
// Hmm... maybe doesn't ... check!
// reason might be that we want to open on pressed
// typically (or LF-dependent?),
// the button's action is invoked on released.
toggleShowPopup();
}
public void mouseReleased(MouseEvent ev) {
if (!datePicker.isEnabled() || !datePicker.isEditable()) {
return;
}
// Retarget mouse event to the month view.
if (_forwardReleaseEvent) {
JXMonthView monthView = datePicker.getMonthView();
ev = SwingUtilities.convertMouseEvent(popupButton, ev,
monthView);
monthView.dispatchEvent(ev);
_forwardReleaseEvent = false;
}
}
public void mouseEntered(MouseEvent ev) {
}
public void mouseExited(MouseEvent ev) {
}
public void mouseDragged(MouseEvent ev) {
if (!datePicker.isEnabled() || !datePicker.isEditable()) {
return;
}
_forwardReleaseEvent = true;
if (!popup.isShowing()) {
return;
}
// Retarget mouse event to the month view.
JXMonthView monthView = datePicker.getMonthView();
ev = SwingUtilities.convertMouseEvent(popupButton, ev, monthView);
monthView.dispatchEvent(ev);
}
public void mouseMoved(MouseEvent ev) {
}
//------------------ implement DateSelectionListener
public void valueChanged(DateSelectionEvent ev) {
updateFromSelectionChanged(ev.getEventType(), ev.isAdjusting());
}
//------------------ implement propertyChangeListener
/**
* {@inheritDoc}
*/
public void propertyChange(PropertyChangeEvent e) {
if (e.getSource() == datePicker) {
datePickerPropertyChange(e);
} else
if (e.getSource() == datePicker.getEditor()) {
editorPropertyChange(e);
} else
if (e.getSource() == datePicker.getMonthView()) {
monthViewPropertyChange(e);
} else
if (e.getSource() == popupButton) {
buttonPropertyChange(e);
} else
// PENDING - move back, ...
if ("value".equals(e.getPropertyName())) {
throw new IllegalStateException(
"editor listening is moved to dedicated propertyChangeLisener");
}
}
/**
* Handles property changes from datepicker's editor.
*
* @param e the PropertyChangeEvent object describing the event source
* and the property that has changed
*/
private void editorPropertyChange(PropertyChangeEvent evt) {
if ("value".equals(evt.getPropertyName())) {
updateFromValueChanged((Date) evt.getOldValue(), (Date) evt
.getNewValue());
}
}
/**
* Handles property changes from DatePicker.
* @param e the PropertyChangeEvent object describing the
* event source and the property that has changed
*/
private void datePickerPropertyChange(PropertyChangeEvent e) {
String property = e.getPropertyName();
if ("date".equals(property)) {
updateFromDateChanged();
} else if ("enabled".equals(property)) {
boolean isEnabled = datePicker.isEnabled();
popupButton.setEnabled(isEnabled);
datePicker.getEditor().setEnabled(isEnabled);
} else if ("editable".equals(property)) {
boolean isEditable = datePicker.isEditable();
datePicker.getMonthView().setEnabled(isEditable);
datePicker.getEditor().setEditable(isEditable);
} else if (JComponent.TOOL_TIP_TEXT_KEY.equals(property)) {
String tip = datePicker.getToolTipText();
datePicker.getEditor().setToolTipText(tip);
popupButton.setToolTipText(tip);
} else if (JXDatePicker.MONTH_VIEW.equals(property)) {
updateFromMonthViewChanged((JXMonthView) e.getOldValue());
} else if (JXDatePicker.LINK_PANEL.equals(property)) {
updateLinkPanel((JComponent) e.getOldValue());
} else if (JXDatePicker.EDITOR.equals(property)) {
updateFromEditorChanged((JFormattedTextField) e.getOldValue(), true);
} else if ("componentOrientation".equals(property)) {
datePicker.revalidate();
} else if ("lightWeightPopupEnabled".equals(property)) {
// Force recreation of the popup when this property changes.
if (popup != null) {
popup.setVisible(false);
}
popup = null;
} else if ("formats".equals(property)) {
updateFormatsFromTimeZone(datePicker.getTimeZone());
}
else if ("locale".equals(property)) {
updateLocale();
}
}
/**
* Handles propertyChanges from the picker's monthView.
*
* @param e the PropertyChangeEvent object describing the event source
* and the property that has changed
*/
private void monthViewPropertyChange(PropertyChangeEvent e) {
if ("selectionModel".equals(e.getPropertyName())) {
updateFromSelectionModelChanged((DateSelectionModel) e.getOldValue());
} else if ("timeZone".equals(e.getPropertyName())) {
// updateFormatsFromTimeZone((TimeZone) e.getNewValue());
updateTimeZone((TimeZone) e.getOldValue());
} else if ("todayInMillis".equals(e.getPropertyName())) {
updateLinkDate();
}
}
/**
* Handles propertyChanges from the picker's popupButton.
*
* PENDING: does nothing, kept while refactoring .. which
* properties from the button do we want to handle?
*
* @param e the PropertyChangeEvent object describing the event source
* and the property that has changed.
*/
private void buttonPropertyChange(PropertyChangeEvent e) {
}
//-------------- implement LayoutManager
public void addLayoutComponent(String name, Component comp) { }
public void removeLayoutComponent(Component comp) { }
public Dimension preferredLayoutSize(Container parent) {
return parent.getPreferredSize();
}
public Dimension minimumLayoutSize(Container parent) {
return parent.getMinimumSize();
}
public void layoutContainer(Container parent) {
Insets insets = datePicker.getInsets();
int width = datePicker.getWidth() - insets.left - insets.right;
int height = datePicker.getHeight() - insets.top - insets.bottom;
int popupButtonWidth = popupButton != null ? popupButton.getPreferredSize().width : 0;
boolean ltr = datePicker.getComponentOrientation().isLeftToRight();
datePicker.getEditor().setBounds(ltr ? insets.left : insets.left + popupButtonWidth,
insets.top,
width - popupButtonWidth,
height);
if (popupButton != null) {
popupButton.setBounds(ltr ? width - popupButtonWidth + insets.left : insets.left,
insets.top,
popupButtonWidth,
height);
}
}
// ------------- implement actionListener (listening to monthView actionEvent)
public void actionPerformed(ActionEvent e) {
if (e == null) return;
if (e.getSource() == datePicker.getMonthView()) {
monthViewActionPerformed(e);
} else if (e.getSource() == datePicker.getEditor()) {
editorActionPerformed(e);
}
}
/**
* Listening to actionEvents fired by the picker's editor.
*
* @param e
*/
private void editorActionPerformed(ActionEvent e) {
// pass the commit on to the picker.
commit();
}
/**
* Listening to actionEvents fired by the picker's monthView.
*
* @param e
*/
private void monthViewActionPerformed(ActionEvent e) {
if (JXMonthView.CANCEL_KEY.equals(e.getActionCommand())) {
cancel();
} else if (JXMonthView.COMMIT_KEY.equals(e.getActionCommand())) {
commit();
}
}
//------------------- focusListener
/**
* Issue #573-swingx - F2 in table doesn't focus the editor.
*
* Do the same as combo: manually pass-on the focus to the editor.
*
*/
public void focusGained(FocusEvent e) {
if (e.isTemporary()) return;
if (e.getSource() == datePicker) {
datePicker.getEditor().requestFocusInWindow();
}
}
/**
* #565-swingx: popup not hidden if clicked into combo.
* The problem is that the combo uses the same trick as
* this datepicker to prevent auto-closing of the popup
* if focus is transfered back to the picker's editor.
*
* The idea is to hide the popup manually when the
* permanentFocusOwner changes to somewhere else.
*
* JW: doesn't work - we only get the temporary lost,
* but no permanent loss if the focus is transfered from
* the focusOwner to a new permanentFocusOwner.
*
* OOOkaay ... looks like exclusively related to a combo:
* we do get the expected focusLost if the focus is
* transferred permanently from the temporary focusowner
* to a new "normal" permanentFocusOwner (like a textfield),
* we don't get it if transfered to a tricksing owner (like
* a combo or picker)
*
* listen to keyboardFocusManager?
*/
public void focusLost(FocusEvent e) {
// LOG.info("lost - old " + e);
// if (e.isTemporary()) return;
// if (e.getSource() == datePicker.getEditor()) {
// hidePopup();
// }
}
}
// ------------------ listener creation
/**
* Creates and returns the property change listener for the
* picker's monthView
* @return the listener for monthView properties
*/
protected PropertyChangeListener createMonthViewPropertyListener() {
return getHandler();
}
/**
* Creates and returns the focuslistener for picker and editor.
* @return the focusListener
*/
protected FocusListener createFocusListener() {
return getHandler();
}
/**
* Creates and returns the ActionListener for the picker's editor.
* @return the Actionlistener for the editor.
*/
protected ActionListener createEditorActionListener() {
return getHandler();
}
/**
* Creates and returns the ActionListener for the picker's monthView.
*
* @return the Actionlistener for the monthView.
*/
protected ActionListener createMonthViewActionListener() {
return getHandler();
}
/**
* Returns the listener for the dateSelection.
*
* @return the date selection listener
*/
protected DateSelectionListener createMonthViewSelectionListener() {
return getHandler();
}
/**
* @return a propertyChangeListener listening to
* editor property changes
*/
protected PropertyChangeListener createEditorPropertyListener() {
return getHandler();
}
/**
* Lazily creates and returns the shared all-mighty listener of everything
*
* @return the shared listener.
*/
private Handler getHandler() {
if (handler == null) {
handler = new Handler();
}
return handler;
}
protected PropertyChangeListener createPropertyChangeListener() {
return getHandler();
}
protected LayoutManager createLayoutManager() {
return getHandler();
}
protected MouseListener createMouseListener() {
return getHandler();
}
protected MouseMotionListener createMouseMotionListener() {
return getHandler();
}
//------------ utility methods
// duplication!!
private Date cleanupDate(Date date) {
// only modify defensive copies
return new Date(cleanupDate(date.getTime(), datePicker.getMonthView().getCalendar()));
}
// duplication!!
// PENDING: move to CalendarUtils ?
private long cleanupDate(long date, Calendar cal) {
cal.setTimeInMillis(date);
// We only want to compare the day, month and year
CalendarUtils.startOfDay(cal);
return cal.getTimeInMillis();
}
/**
* Checks the given dates for being equal.
*
* PENDING: this should be a utility somewhere ...
*
* @param current one of the dates to compare
* @param date the otherr of the dates to compare
* @return true if the two given dates are equal, false otherwise.
*/
private boolean equalsDate(Date current, Date date) {
if ((date == null) && (current == null)) {
return true;
}
if ((date != null) && (date.equals(current))) {
return true;
}
return false;
}
}
| src/java/org/jdesktop/swingx/plaf/basic/BasicDatePickerUI.java | /*
* $Id$
*
* Copyright 2006 Sun Microsystems, Inc., 4150 Network Circle,
* Santa Clara, California 95054, U.S.A. All rights reserved.
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
*/
package org.jdesktop.swingx.plaf.basic;
import java.awt.BorderLayout;
import java.awt.Component;
import java.awt.Container;
import java.awt.Dimension;
import java.awt.FontMetrics;
import java.awt.Insets;
import java.awt.LayoutManager;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.event.FocusEvent;
import java.awt.event.FocusListener;
import java.awt.event.MouseEvent;
import java.awt.event.MouseListener;
import java.awt.event.MouseMotionListener;
import java.beans.PropertyChangeEvent;
import java.beans.PropertyChangeListener;
import java.beans.PropertyVetoException;
import java.text.DateFormat;
import java.text.ParseException;
import java.util.Calendar;
import java.util.Date;
import java.util.Locale;
import java.util.TimeZone;
import java.util.logging.Logger;
import javax.swing.AbstractAction;
import javax.swing.Action;
import javax.swing.ActionMap;
import javax.swing.Icon;
import javax.swing.InputMap;
import javax.swing.JButton;
import javax.swing.JComboBox;
import javax.swing.JComponent;
import javax.swing.JFormattedTextField;
import javax.swing.JPopupMenu;
import javax.swing.KeyStroke;
import javax.swing.SwingUtilities;
import javax.swing.UIManager;
import javax.swing.JFormattedTextField.AbstractFormatter;
import javax.swing.JFormattedTextField.AbstractFormatterFactory;
import javax.swing.plaf.ComponentUI;
import javax.swing.plaf.UIResource;
import javax.swing.text.DefaultFormatterFactory;
import javax.swing.text.View;
import org.jdesktop.swingx.JXDatePicker;
import org.jdesktop.swingx.JXMonthView;
import org.jdesktop.swingx.calendar.CalendarUtils;
import org.jdesktop.swingx.calendar.DatePickerFormatter;
import org.jdesktop.swingx.calendar.DateSelectionModel;
import org.jdesktop.swingx.calendar.DatePickerFormatter.DatePickerFormatterUIResource;
import org.jdesktop.swingx.event.DateSelectionEvent;
import org.jdesktop.swingx.event.DateSelectionListener;
import org.jdesktop.swingx.event.DateSelectionEvent.EventType;
import org.jdesktop.swingx.plaf.DatePickerUI;
/**
* The basic implementation of a <code>DatePickerUI</code>.
* <p>
*
*
* @author Joshua Outwater
* @author Jeanette Winzenburg
*/
public class BasicDatePickerUI extends DatePickerUI {
@SuppressWarnings("all")
private static final Logger LOG = Logger.getLogger(BasicDatePickerUI.class
.getName());
protected JXDatePicker datePicker;
private JButton popupButton;
private BasicDatePickerPopup popup;
private Handler handler;
/*
* shared listeners
*/
protected PropertyChangeListener propertyChangeListener;
private FocusListener focusListener;
/*
* listener's for the arrow button
*/
protected MouseListener mouseListener;
protected MouseMotionListener mouseMotionListener;
/*
* listeners for the picker's editor
*/
private ActionListener editorActionListener;
private EditorCancelAction editorCancelAction;
private PropertyChangeListener editorPropertyListener;
/**
* listeners for the picker's monthview
*/
private DateSelectionListener monthViewSelectionListener;
private ActionListener monthViewActionListener;
private PropertyChangeListener monthViewPropertyListener;
@SuppressWarnings({"UnusedDeclaration"})
public static ComponentUI createUI(JComponent c) {
return new BasicDatePickerUI();
}
@Override
public void installUI(JComponent c) {
datePicker = (JXDatePicker)c;
datePicker.setLayout(createLayoutManager());
installComponents();
installDefaults();
installKeyboardActions();
installListeners();
}
@Override
public void uninstallUI(JComponent c) {
uninstallListeners();
uninstallKeyboardActions();
uninstallDefaults();
uninstallComponents();
datePicker.setLayout(null);
datePicker = null;
}
protected void installComponents() {
JFormattedTextField editor = datePicker.getEditor();
if (editor == null || editor instanceof UIResource) {
DateFormat[] formats = getCustomFormats(editor);
// we are not yet listening ...
datePicker.setEditor(createEditor());
if (formats != null) {
datePicker.setFormats(formats);
}
}
updateFromEditorChanged(null, false);
popupButton = createPopupButton();
if (popupButton != null) {
// this is a trick to get hold of the client prop which
// prevents closing of the popup
JComboBox box = new JComboBox();
Object preventHide = box.getClientProperty("doNotCancelPopup");
popupButton.putClientProperty("doNotCancelPopup", preventHide);
datePicker.add(popupButton);
}
// JW: the condition is hacking around #681-swingx: overlapping rows in JXMonthView
// in locales with first day of week monday if locale is set
// this way the current brittle behaviour is at least not detoriated
// (== default locales are okay)
// if ((datePicker.getLocale() != null) &&
// !datePicker.getLocale().equals(Locale.getDefault()))
updateChildLocale(datePicker.getLocale());
}
/**
* Checks and returns custom formats on the editor, if any.
*
* @param editor the editor to check
* @return the custom formats uses in the editor or null if it had
* used defaults as defined in the datepicker properties
*/
private DateFormat[] getCustomFormats(JFormattedTextField editor) {
DateFormat[] formats = null;
if (editor != null) {
AbstractFormatterFactory factory = editor.getFormatterFactory();
if (factory != null) {
AbstractFormatter formatter = factory.getFormatter(editor);
if (!(formatter instanceof DatePickerFormatterUIResource)) {
formats = ((DatePickerFormatter) formatter).getFormats();
}
}
}
return formats;
}
protected void uninstallComponents() {
JFormattedTextField editor = datePicker.getEditor();
if (editor != null) {
datePicker.remove(editor);
}
if (popupButton != null) {
datePicker.remove(popupButton);
popupButton = null;
}
}
protected void installDefaults() {
}
protected void uninstallDefaults() {
}
protected void installKeyboardActions() {
// install picker's actions
ActionMap pickerMap = datePicker.getActionMap();
pickerMap.put(JXDatePicker.CANCEL_KEY, createCancelAction());
pickerMap.put(JXDatePicker.COMMIT_KEY, createCommitAction());
pickerMap.put(JXDatePicker.HOME_NAVIGATE_KEY, createHomeAction(false));
pickerMap.put(JXDatePicker.HOME_COMMIT_KEY, createHomeAction(true));
TogglePopupAction popupAction = createTogglePopupAction();
pickerMap.put("TOGGLE_POPUP", popupAction);
InputMap pickerInputMap = datePicker.getInputMap(JComponent.WHEN_ANCESTOR_OF_FOCUSED_COMPONENT);
pickerInputMap.put(KeyStroke.getKeyStroke("ENTER"), JXDatePicker.COMMIT_KEY);
pickerInputMap.put(KeyStroke.getKeyStroke("ESCAPE"), JXDatePicker.CANCEL_KEY);
// PENDING: get from LF
pickerInputMap.put(KeyStroke.getKeyStroke("F5"), JXDatePicker.HOME_COMMIT_KEY);
pickerInputMap.put(KeyStroke.getKeyStroke("shift F5"), JXDatePicker.HOME_NAVIGATE_KEY);
pickerInputMap.put(KeyStroke.getKeyStroke("SPACE"), "TOGGLE_POPUP");
installLinkPanelKeyboardActions();
}
protected void uninstallKeyboardActions() {
uninstallLinkPanelKeyboardActions(datePicker.getLinkPanel());
}
/**
* Installs actions and key bindings on the datePicker's linkPanel. Does
* nothing if the linkPanel is null.
*
* PRE: keybindings installed on picker.
*/
protected void installLinkPanelKeyboardActions() {
if (datePicker.getLinkPanel() == null)
return;
ActionMap map = datePicker.getLinkPanel().getActionMap();
map.put(JXDatePicker.HOME_COMMIT_KEY, datePicker.getActionMap().get(
JXDatePicker.HOME_COMMIT_KEY));
map.put(JXDatePicker.HOME_NAVIGATE_KEY, datePicker.getActionMap().get(
JXDatePicker.HOME_NAVIGATE_KEY));
InputMap inputMap = datePicker.getLinkPanel().getInputMap(
JComponent.WHEN_IN_FOCUSED_WINDOW);
// PENDING: get from LF
inputMap.put(KeyStroke.getKeyStroke("F5"),
JXDatePicker.HOME_COMMIT_KEY);
inputMap.put(KeyStroke.getKeyStroke("shift F5"),
JXDatePicker.HOME_NAVIGATE_KEY);
}
/**
* Uninstalls actions and key bindings from linkPanel. Does nothing if the
* linkPanel is null.
*
* @param panel the component to uninstall
*
*/
protected void uninstallLinkPanelKeyboardActions(JComponent panel) {
if (panel == null) return;
ActionMap map = panel.getActionMap();
map.remove(JXDatePicker.HOME_COMMIT_KEY);
map.remove(JXDatePicker.HOME_NAVIGATE_KEY);
InputMap inputMap = panel.getInputMap(JComponent.WHEN_IN_FOCUSED_WINDOW);
// PENDING: get from LF
inputMap.remove(KeyStroke.getKeyStroke("F5"));
inputMap.remove(KeyStroke.getKeyStroke("shift F5"));
}
/**
* Creates and installs all listeners to all components.
*
*/
protected void installListeners() {
/*
* create the listeners.
*/
// propertyListener for datePicker
propertyChangeListener = createPropertyChangeListener();
// mouseListener (for popup button only) ?
mouseListener = createMouseListener();
mouseMotionListener = createMouseMotionListener();
// shared focuslistener (installed to picker and editor)
focusListener = createFocusListener();
// editor related listeners
editorActionListener = createEditorActionListener();
editorPropertyListener = createEditorPropertyListener();
// montheView related listeners
monthViewSelectionListener = createMonthViewSelectionListener();
monthViewActionListener = createMonthViewActionListener();
monthViewPropertyListener = createMonthViewPropertyListener();
/*
* install the listeners
*/
// picker
datePicker.addPropertyChangeListener(propertyChangeListener);
datePicker.addFocusListener(focusListener);
if (popupButton != null) {
// JW: which property do we want to monitor?
popupButton.addPropertyChangeListener(propertyChangeListener);
popupButton.addMouseListener(mouseListener);
popupButton.addMouseMotionListener(mouseMotionListener);
}
updateEditorListeners(null);
// JW the following does more than installing the listeners ..
// synchs properties of datepicker to monthView's
// prepares monthview for usage in popup
// synch the date
// Relies on being the last thing done in the install ..
//
updateFromMonthViewChanged(null);
}
/**
* Uninstalls and nulls all listeners which had been installed
* by this delegate.
*
*/
protected void uninstallListeners() {
// datePicker
datePicker.removePropertyChangeListener(propertyChangeListener);
datePicker.removeFocusListener(focusListener);
// monthView
datePicker.getMonthView().getSelectionModel().removeDateSelectionListener(monthViewSelectionListener);
datePicker.getMonthView().removeActionListener(monthViewActionListener);
datePicker.getMonthView().removePropertyChangeListener(propertyChangeListener);
// JW: when can that be null?
// maybe in the very beginning? if some code calls ui.uninstall
// before ui.install? The editor is created by the ui.
if (datePicker.getEditor() != null) {
uninstallEditorListeners(datePicker.getEditor());
}
if (popupButton != null) {
popupButton.removePropertyChangeListener(propertyChangeListener);
popupButton.removeMouseListener(mouseListener);
popupButton.removeMouseMotionListener(mouseMotionListener);
}
propertyChangeListener = null;
mouseListener = null;
mouseMotionListener = null;
editorActionListener = null;
editorPropertyListener = null;
monthViewSelectionListener = null;
monthViewActionListener = null;
monthViewPropertyListener = null;
handler = null;
}
// --------------------- wiring listeners
/**
* Wires the picker's monthView related listening. Removes all
* listeners from the given old view and adds the listeners to
* the current monthView. <p>
*
* @param oldMonthView
*/
protected void updateMonthViewListeners(JXMonthView oldMonthView) {
DateSelectionModel oldModel = null;
if (oldMonthView != null) {
oldMonthView.removePropertyChangeListener(monthViewPropertyListener);
oldMonthView.removeActionListener(monthViewActionListener);
oldModel = oldMonthView.getSelectionModel();
}
datePicker.getMonthView().addPropertyChangeListener(monthViewPropertyListener);
datePicker.getMonthView().addActionListener(monthViewActionListener);
updateSelectionModelListeners(oldModel);
}
/**
* Wires the picker's editor related listening and actions. Removes
* listeners/actions from the old editor and adds them to
* the new editor. <p>
*
* @param oldEditor the pickers editor before the change
*/
protected void updateEditorListeners(JFormattedTextField oldEditor) {
if (oldEditor != null) {
uninstallEditorListeners(oldEditor);
}
datePicker.getEditor().addPropertyChangeListener(editorPropertyListener);
datePicker.getEditor().addActionListener(editorActionListener);
datePicker.getEditor().addFocusListener(focusListener);
editorCancelAction = new EditorCancelAction(datePicker.getEditor());
}
/**
* Uninstalls all listeners and actions which have been installed
* by this delegate from the given editor.
*
* @param oldEditor the editor to uninstall.
*/
private void uninstallEditorListeners(JFormattedTextField oldEditor) {
oldEditor.removePropertyChangeListener(editorPropertyListener);
oldEditor.removeActionListener(editorActionListener);
oldEditor.removeFocusListener(focusListener);
if (editorCancelAction != null) {
editorCancelAction.uninstall();
editorCancelAction = null;
}
}
/**
* Wires monthView's selection model listening. Removes the
* selection listener from the old model and add to the new model.
*
* @param oldModel the dateSelectionModel before the change, may be null.
*/
protected void updateSelectionModelListeners(DateSelectionModel oldModel) {
if (oldModel != null) {
oldModel.removeDateSelectionListener(monthViewSelectionListener);
}
datePicker.getMonthView().getSelectionModel()
.addDateSelectionListener(monthViewSelectionListener);
}
//---------------- component creation
/**
* Creates the editor used to edit the date selection. Subclasses should
* override this method if they want to substitute in their own editor.
*
* @return an instance of a JFormattedTextField
*/
protected JFormattedTextField createEditor() {
JFormattedTextField f = new DefaultEditor(new DatePickerFormatterUIResource(datePicker.getLocale()));
f.setName("dateField");
f.setColumns(UIManager.getInt("JXDatePicker.numColumns"));
f.setBorder(UIManager.getBorder("JXDatePicker.border"));
return f;
}
protected JButton createPopupButton() {
JButton b = new JButton();
b.setName("popupButton");
b.setRolloverEnabled(false);
b.setMargin(new Insets(0, 3, 0, 3));
Icon icon = UIManager.getIcon("JXDatePicker.arrowIcon");
if (icon == null) {
icon = (Icon)UIManager.get("Tree.expandedIcon");
}
b.setIcon(icon);
b.setFocusable(false);
return b;
}
private class DefaultEditor extends JFormattedTextField implements UIResource {
public DefaultEditor(AbstractFormatter formatter) {
super(formatter);
}
}
// ---------------- Layout
/**
* {@inheritDoc}
*/
@Override
public Dimension getMinimumSize(JComponent c) {
return getPreferredSize(c);
}
/**
* {@inheritDoc}
*/
@Override
public Dimension getPreferredSize(JComponent c) {
Dimension dim = getEditorPreferredSize();
if (popupButton != null) {
dim.width += popupButton.getPreferredSize().width;
}
Insets insets = datePicker.getInsets();
dim.width += insets.left + insets.right;
dim.height += insets.top + insets.bottom;
return (Dimension)dim.clone();
}
/**
* Returns a preferred size for the editor. If the selected date
* is null, returns a reasonable minimal width. <p>
*
* PENDING: how to find the "reasonable" width is open to discussion.
* This implementation creates another datePicker, feeds it with
* the formats and asks its prefWidth.
*
* @return the editor's preferred size
*/
private Dimension getEditorPreferredSize() {
Dimension dim = datePicker.getEditor().getPreferredSize();
if (datePicker.getDate() == null) {
// the editor tends to collapsing for empty values
// JW: better do this in a custom editor?
JXDatePicker picker = new JXDatePicker();
picker.setFormats(datePicker.getFormats());
dim.width = picker.getEditor().getPreferredSize().width;
}
return dim;
}
@Override
public int getBaseline(int width, int height) {
JFormattedTextField editor = datePicker.getEditor();
View rootView = editor.getUI().getRootView(editor);
if (rootView.getViewCount() > 0) {
Insets insets = editor.getInsets();
Insets insetsOut = datePicker.getInsets();
int nh = height - insets.top - insets.bottom
- insetsOut.top - insetsOut.bottom;
int y = insets.top + insetsOut.top;
View fieldView = rootView.getView(0);
int vspan = (int) fieldView.getPreferredSpan(View.Y_AXIS);
if (nh != vspan) {
int slop = nh - vspan;
y += slop / 2;
}
FontMetrics fm = editor.getFontMetrics(editor.getFont());
y += fm.getAscent();
return y;
}
return -1;
}
//------------------------------- controller methods/classes
/**
* {@inheritDoc}
*/
@Override
public Date getSelectableDate(Date date) throws PropertyVetoException {
Date cleaned = date != null ? cleanupDate(date) : null;
if (equalsDate(cleaned, datePicker.getDate())) {
// one place to interrupt the update spiral
throw new PropertyVetoException("date not selectable", null);
}
if (cleaned == null) return cleaned;
if (datePicker.getMonthView().isUnselectableDate(cleaned)) {
throw new PropertyVetoException("date not selectable", null);
}
return cleaned;
}
//-------------------- update methods called from listeners
/**
* Updates internals after picker's date property changed.
*/
protected void updateFromDateChanged() {
datePicker.getEditor().setValue(datePicker.getDate());
}
/**
* Updates date related properties in picker/monthView
* after a change in the editor's value. Reverts the
* value if the new date is unselectable.
*
* @param oldDate the editor value before the change
* @param newDate the editor value after the change
*/
protected void updateFromValueChanged(Date oldDate, Date newDate) {
if ((newDate != null) && datePicker.getMonthView().isUnselectableDate(newDate)) {
revertValue(oldDate);
return;
}
// the other place to interrupt the update spiral
if (!equalsDate(newDate, datePicker.getMonthView().getSelectedDate())) {
datePicker.getMonthView().setSelectedDate(newDate);
}
datePicker.setDate(newDate);
}
/**
* PENDING: currently this resets at once - but it's a no-no,
* because it happens during notification
*
*
* @param oldDate the old date to revert to
*/
private void revertValue(Date oldDate) {
datePicker.getEditor().setValue(oldDate);
}
/**
* Updates date related properties picker/editor
* after a change in the monthView's
* selection.
*
* Here: does nothing if the change is intermediate.
*
* @param eventType the type of the selection change
* @param adjusting flag to indicate whether the the selection change
* is intermediate
*/
protected void updateFromSelectionChanged(EventType eventType, boolean adjusting) {
if (adjusting) return;
updateEditorValue();
}
/**
* Updates internals after the picker's monthView has changed. <p>
*
* Cleans to popup. Wires the listeners. Updates date.
* Updates formats' timezone.
*
* @param oldMonthView the picker's monthView before the change,
* may be null.
*/
protected void updateFromMonthViewChanged(JXMonthView oldMonthView) {
popup = null;
updateMonthViewListeners(oldMonthView);
if (oldMonthView != null) {
oldMonthView.setComponentInputMapEnabled(false);
}
datePicker.getMonthView().setComponentInputMapEnabled(true);
updateFormatTimeZone(datePicker.getTimeZone());
updateEditorValue();
}
/**
* Updates internals after the picker's editor property
* has changed. <p>
*
* Updates the picker's children. Removes the old editor and
* adds the new editor. Wires the editor listeners, it the flag
* set. Typically, this method is called during installing the
* componentUI with the flag set to false and true at all other
* moments.
*
*
* @param oldEditor the picker's editor before the change,
* may be null.
* @param updateListeners a flag to indicate whether the listeners
* are ready for usage.
*/
protected void updateFromEditorChanged(JFormattedTextField oldEditor,
boolean updateListeners) {
if (oldEditor != null) {
datePicker.remove(oldEditor);
oldEditor.putClientProperty("doNotCancelPopup", null);
}
datePicker.add(datePicker.getEditor());
// this is a trick to get hold of the client prop which
// prevents closing of the popup
JComboBox box = new JComboBox();
Object preventHide = box.getClientProperty("doNotCancelPopup");
datePicker.getEditor().putClientProperty("doNotCancelPopup", preventHide);
updateEditorValue();
if (updateListeners) {
updateEditorListeners(oldEditor);
datePicker.revalidate();
}
}
/**
* Updates internals after the selection model changed.
*
* @param oldModel the model before the change.
*/
protected void updateFromSelectionModelChanged(DateSelectionModel oldModel) {
updateSelectionModelListeners(oldModel);
updateEditorValue();
}
/**
* Sets the editor value to the model's selectedDate.
*/
private void updateEditorValue() {
datePicker.getEditor().setValue(datePicker.getMonthView().getSelectedDate());
}
//---------------------- updating other properties
/**
* Updates the picker's formats to the given TimeZone.
* @param zone the timezone to set on the formats.
*/
protected void updateFormatTimeZone(TimeZone zone) {
for (DateFormat format : datePicker.getFormats()) {
format.setTimeZone(zone);
}
}
/**
* Called form property listener, updates all components locale, formats
* etc.
*
* @author PeS
*/
protected void updateLocale() {
Locale locale = datePicker.getLocale();
updateFormatLocale(locale);
updateChildLocale(locale);
}
private void updateFormatLocale(Locale locale) {
if (locale != null) {
/*
* FIXME: PeS: It should probably use this
*
* however that gets beyond my understanding of the inner workings.
* It reaches to UiManagerExt for date formats? Therefore I am using
* simply JRE defined formats
*/
// JW: yes should do that - but only if we have no custom formats
// installed.
// PENDING: timezone?
if (getCustomFormats(datePicker.getEditor()) == null) {
datePicker.getEditor().setFormatterFactory(
new DefaultFormatterFactory(
new DatePickerFormatterUIResource(locale)));
}
// DateFormat[] formats = new DateFormat[3];
// SimpleDateFormat f =
// (SimpleDateFormat)DateFormat.getDateInstance(DateFormat.SHORT,
// locale);
// if (!f.toPattern().contains("E")) {
// f.applyPattern("EE " + f.toPattern());
// }
// formats[0] = f;
// formats[1] = DateFormat.getDateInstance(DateFormat.DEFAULT,
// locale);
// formats[2] = DateFormat.getDateInstance(DateFormat.MEDIUM,
// locale);
// datePicker.setFormats(formats);
}
}
private void updateChildLocale(Locale locale) {
if (locale != null) {
datePicker.getEditor().setLocale(locale);
datePicker.getLinkPanel().setLocale(locale);
datePicker.getMonthView().setLocale(locale);
}
}
/**
* @param oldLinkPanel
*
*/
protected void updateLinkPanel(JComponent oldLinkPanel) {
if (oldLinkPanel != null) {
uninstallLinkPanelKeyboardActions(oldLinkPanel);
}
installLinkPanelKeyboardActions();
if (popup != null) {
popup.updateLinkPanel(oldLinkPanel);
}
// PENDING: datepicker installs a new todayPanel if
// any of the linkDate related properties changed.
// should be less-rude - can set properties on the
// panel? Fire more atomic changes.
datePicker.getMonthView().setFirstDisplayedDate(datePicker.getLinkDate());
}
//------------------- methods called by installed actions
/**
*
*/
protected void commit() {
hidePopup();
try {
datePicker.commitEdit();
} catch (ParseException ex) {
// can't help it
}
}
/**
*
*/
protected void cancel() {
hidePopup();
datePicker.cancelEdit();
}
/**
*
*/
private void hidePopup() {
if (popup != null) popup.setVisible(false);
}
/**
* Navigates to linkDate. If commit, the linkDate is selected
* and committed. If not commit, the linkDate is scrolled to visible, if the
* monthview is open, does nothing for invisible monthView.
*
* @param commit boolean to indicate whether the linkDate should be
* selected and committed
*/
protected void home(boolean commit) {
if (commit) {
Calendar cal = datePicker.getMonthView().getCalendar();
cal.setTimeInMillis(datePicker.getLinkDate());
datePicker.getMonthView().setSelectedDate(cal.getTime());
datePicker.getMonthView().commitSelection();
} else {
datePicker.getMonthView().ensureDateVisible(datePicker.getLinkDate());
}
}
//---------------------- other stuff
/**
* Creates and returns the action for committing the picker's
* input.
*
* @return
*/
private Action createCommitAction() {
Action action = new AbstractAction() {
public void actionPerformed(ActionEvent e) {
commit();
}
};
return action;
}
/**
* Creates and returns the action for cancel the picker's
* edit.
*
* @return
*/
private Action createCancelAction() {
Action action = new AbstractAction() {
public void actionPerformed(ActionEvent e) {
cancel();
}
};
return action;
}
private Action createHomeAction(final boolean commit) {
Action action = new AbstractAction( ) {
public void actionPerformed(ActionEvent e) {
home(commit);
}
};
return action ;
}
/**
* The wrapper for the editor cancel action.
*
* PENDING: Need to extend TestAction?
*
*/
public class EditorCancelAction extends AbstractAction {
private JFormattedTextField editor;
private Action cancelAction;
public static final String TEXT_CANCEL_KEY = "reset-field-edit";
public EditorCancelAction(JFormattedTextField field) {
install(field);
}
/**
* Resets the contained editors actionMap to original and
* nulls all fields. <p>
* NOTE: after calling this method the action must not be
* used! Create a new one for the same or another editor.
*
*/
public void uninstall() {
editor.getActionMap().remove(TEXT_CANCEL_KEY);
cancelAction = null;
editor = null;
}
/**
* @param editor
*/
private void install(JFormattedTextField editor) {
this.editor = editor;
cancelAction = editor.getActionMap().get(TEXT_CANCEL_KEY);
editor.getActionMap().put(TEXT_CANCEL_KEY, this);
}
public void actionPerformed(ActionEvent e) {
cancelAction.actionPerformed(null);
cancel();
}
}
/**
* @return
*/
protected TogglePopupAction createTogglePopupAction() {
return new TogglePopupAction();
}
/**
* Toggles the popups visibility after preparing internal state.
*
*
*/
public void toggleShowPopup() {
if (popup == null) {
popup = createMonthViewPopup();
}
if (popup.isVisible()) {
popup.setVisible(false);
} else {
datePicker.requestFocusInWindow();
SwingUtilities.invokeLater(new Runnable() {
public void run() {
popup.show(datePicker,
0, datePicker.getHeight());
}
});
}
}
/**
*
*/
private BasicDatePickerPopup createMonthViewPopup() {
BasicDatePickerPopup popup = new BasicDatePickerPopup();
popup.setLightWeightPopupEnabled(datePicker.isLightWeightPopupEnabled());
return popup;
}
/**
* Action used to commit the current value in the JFormattedTextField.
* This action is used by the keyboard bindings.
*/
private class TogglePopupAction extends AbstractAction {
public TogglePopupAction() {
super("TogglePopup");
}
public void actionPerformed(ActionEvent ev) {
toggleShowPopup();
}
}
/**
* Popup component that shows a JXMonthView component along with controlling
* buttons to allow traversal of the months. Upon selection of a date the
* popup will automatically hide itself and enter the selection into the
* editable field of the JXDatePicker.
*
*/
protected class BasicDatePickerPopup extends JPopupMenu {
public BasicDatePickerPopup() {
setLayout(new BorderLayout());
add(datePicker.getMonthView(), BorderLayout.CENTER);
updateLinkPanel(null);
}
/**
* @param oldLinkPanel
*/
public void updateLinkPanel(JComponent oldLinkPanel) {
if (oldLinkPanel != null) {
remove(oldLinkPanel);
}
if (datePicker.getLinkPanel() != null) {
add(datePicker.getLinkPanel(), BorderLayout.SOUTH);
}
}
}
/**
* PENDING: JW - I <b>really</b> hate the one-in-all. Wont touch
* it for now, maybe later. As long as we have it, the new
* listeners (dateSelection) are here too, for consistency.
* Adding the Layout here as well is ... , IMO.
*/
private class Handler implements LayoutManager, MouseListener, MouseMotionListener,
PropertyChangeListener, DateSelectionListener, ActionListener, FocusListener {
//------------- implement Mouse/MotionListener
private boolean _forwardReleaseEvent = false;
public void mouseClicked(MouseEvent ev) {
}
public void mousePressed(MouseEvent ev) {
if (!datePicker.isEnabled()) {
return;
}
// PENDING JW: why do we need a mouseListener? the
// arrowbutton should have the toggleAction installed?
// Hmm... maybe doesn't ... check!
// reason might be that we want to open on pressed
// typically (or LF-dependent?),
// the button's action is invoked on released.
toggleShowPopup();
}
public void mouseReleased(MouseEvent ev) {
if (!datePicker.isEnabled() || !datePicker.isEditable()) {
return;
}
// Retarget mouse event to the month view.
if (_forwardReleaseEvent) {
JXMonthView monthView = datePicker.getMonthView();
ev = SwingUtilities.convertMouseEvent(popupButton, ev,
monthView);
monthView.dispatchEvent(ev);
_forwardReleaseEvent = false;
}
}
public void mouseEntered(MouseEvent ev) {
}
public void mouseExited(MouseEvent ev) {
}
public void mouseDragged(MouseEvent ev) {
if (!datePicker.isEnabled() || !datePicker.isEditable()) {
return;
}
_forwardReleaseEvent = true;
if (!popup.isShowing()) {
return;
}
// Retarget mouse event to the month view.
JXMonthView monthView = datePicker.getMonthView();
ev = SwingUtilities.convertMouseEvent(popupButton, ev, monthView);
monthView.dispatchEvent(ev);
}
public void mouseMoved(MouseEvent ev) {
}
//------------------ implement DateSelectionListener
public void valueChanged(DateSelectionEvent ev) {
updateFromSelectionChanged(ev.getEventType(), ev.isAdjusting());
}
//------------------ implement propertyChangeListener
/**
* {@inheritDoc}
*/
public void propertyChange(PropertyChangeEvent e) {
if (e.getSource() == datePicker) {
datePickerPropertyChange(e);
} else
if (e.getSource() == datePicker.getEditor()) {
editorPropertyChange(e);
} else
if (e.getSource() == datePicker.getMonthView()) {
monthViewPropertyChange(e);
} else
if (e.getSource() == popupButton) {
buttonPropertyChange(e);
} else
// PENDING - move back, ...
if ("value".equals(e.getPropertyName())) {
throw new IllegalStateException(
"editor listening is moved to dedicated propertyChangeLisener");
}
}
/**
* Handles property changes from datepicker's editor.
*
* @param e the PropertyChangeEvent object describing the event source
* and the property that has changed
*/
private void editorPropertyChange(PropertyChangeEvent evt) {
if ("value".equals(evt.getPropertyName())) {
updateFromValueChanged((Date) evt.getOldValue(), (Date) evt
.getNewValue());
}
}
/**
* Handles property changes from DatePicker.
* @param e the PropertyChangeEvent object describing the
* event source and the property that has changed
*/
private void datePickerPropertyChange(PropertyChangeEvent e) {
String property = e.getPropertyName();
if ("date".equals(property)) {
updateFromDateChanged();
} else if ("enabled".equals(property)) {
boolean isEnabled = datePicker.isEnabled();
popupButton.setEnabled(isEnabled);
datePicker.getEditor().setEnabled(isEnabled);
} else if ("editable".equals(property)) {
boolean isEditable = datePicker.isEditable();
datePicker.getMonthView().setEnabled(isEditable);
datePicker.getEditor().setEditable(isEditable);
} else if (JComponent.TOOL_TIP_TEXT_KEY.equals(property)) {
String tip = datePicker.getToolTipText();
datePicker.getEditor().setToolTipText(tip);
popupButton.setToolTipText(tip);
} else if (JXDatePicker.MONTH_VIEW.equals(property)) {
updateFromMonthViewChanged((JXMonthView) e.getOldValue());
} else if (JXDatePicker.LINK_PANEL.equals(property)) {
updateLinkPanel((JComponent) e.getOldValue());
} else if (JXDatePicker.EDITOR.equals(property)) {
updateFromEditorChanged((JFormattedTextField) e.getOldValue(), true);
} else if ("componentOrientation".equals(property)) {
datePicker.revalidate();
} else if ("lightWeightPopupEnabled".equals(property)) {
// Force recreation of the popup when this property changes.
if (popup != null) {
popup.setVisible(false);
}
popup = null;
} else if ("formats".equals(property)) {
updateFormatTimeZone(datePicker.getTimeZone());
}
else if ("locale".equals(property)) {
updateLocale();
}
}
/**
* Handles propertyChanges from the picker's monthView.
*
* @param e the PropertyChangeEvent object describing the event source
* and the property that has changed
*/
private void monthViewPropertyChange(PropertyChangeEvent e) {
if ("selectionModel".equals(e.getPropertyName())) {
updateFromSelectionModelChanged((DateSelectionModel) e.getOldValue());
} else if ("timeZone".equals(e.getPropertyName())) {
updateFormatTimeZone((TimeZone) e.getNewValue());
}
}
/**
* Handles propertyChanges from the picker's popupButton.
*
* PENDING: does nothing, kept while refactoring .. which
* properties from the button do we want to handle?
*
* @param e the PropertyChangeEvent object describing the event source
* and the property that has changed.
*/
private void buttonPropertyChange(PropertyChangeEvent e) {
}
//-------------- implement LayoutManager
public void addLayoutComponent(String name, Component comp) { }
public void removeLayoutComponent(Component comp) { }
public Dimension preferredLayoutSize(Container parent) {
return parent.getPreferredSize();
}
public Dimension minimumLayoutSize(Container parent) {
return parent.getMinimumSize();
}
public void layoutContainer(Container parent) {
Insets insets = datePicker.getInsets();
int width = datePicker.getWidth() - insets.left - insets.right;
int height = datePicker.getHeight() - insets.top - insets.bottom;
int popupButtonWidth = popupButton != null ? popupButton.getPreferredSize().width : 0;
boolean ltr = datePicker.getComponentOrientation().isLeftToRight();
datePicker.getEditor().setBounds(ltr ? insets.left : insets.left + popupButtonWidth,
insets.top,
width - popupButtonWidth,
height);
if (popupButton != null) {
popupButton.setBounds(ltr ? width - popupButtonWidth + insets.left : insets.left,
insets.top,
popupButtonWidth,
height);
}
}
// ------------- implement actionListener (listening to monthView actionEvent)
public void actionPerformed(ActionEvent e) {
if (e == null) return;
if (e.getSource() == datePicker.getMonthView()) {
monthViewActionPerformed(e);
} else if (e.getSource() == datePicker.getEditor()) {
editorActionPerformed(e);
}
}
/**
* Listening to actionEvents fired by the picker's editor.
*
* @param e
*/
private void editorActionPerformed(ActionEvent e) {
// pass the commit on to the picker.
commit();
}
/**
* Listening to actionEvents fired by the picker's monthView.
*
* @param e
*/
private void monthViewActionPerformed(ActionEvent e) {
if (JXMonthView.CANCEL_KEY.equals(e.getActionCommand())) {
cancel();
} else if (JXMonthView.COMMIT_KEY.equals(e.getActionCommand())) {
commit();
}
}
//------------------- focusListener
/**
* Issue #573-swingx - F2 in table doesn't focus the editor.
*
* Do the same as combo: manually pass-on the focus to the editor.
*
*/
public void focusGained(FocusEvent e) {
if (e.isTemporary()) return;
if (e.getSource() == datePicker) {
datePicker.getEditor().requestFocusInWindow();
}
}
/**
* #565-swingx: popup not hidden if clicked into combo.
* The problem is that the combo uses the same trick as
* this datepicker to prevent auto-closing of the popup
* if focus is transfered back to the picker's editor.
*
* The idea is to hide the popup manually when the
* permanentFocusOwner changes to somewhere else.
*
* JW: doesn't work - we only get the temporary lost,
* but no permanent loss if the focus is transfered from
* the focusOwner to a new permanentFocusOwner.
*
* OOOkaay ... looks like exclusively related to a combo:
* we do get the expected focusLost if the focus is
* transferred permanently from the temporary focusowner
* to a new "normal" permanentFocusOwner (like a textfield),
* we don't get it if transfered to a tricksing owner (like
* a combo or picker)
*
* listen to keyboardFocusManager?
*/
public void focusLost(FocusEvent e) {
// LOG.info("lost - old " + e);
// if (e.isTemporary()) return;
// if (e.getSource() == datePicker.getEditor()) {
// hidePopup();
// }
}
}
// ------------------ listener creation
/**
* Creates and returns the property change listener for the
* picker's monthView
* @return the listener for monthView properties
*/
protected PropertyChangeListener createMonthViewPropertyListener() {
return getHandler();
}
/**
* Creates and returns the focuslistener for picker and editor.
* @return the focusListener
*/
protected FocusListener createFocusListener() {
return getHandler();
}
/**
* Creates and returns the ActionListener for the picker's editor.
* @return the Actionlistener for the editor.
*/
protected ActionListener createEditorActionListener() {
return getHandler();
}
/**
* Creates and returns the ActionListener for the picker's monthView.
*
* @return the Actionlistener for the monthView.
*/
protected ActionListener createMonthViewActionListener() {
return getHandler();
}
/**
* Returns the listener for the dateSelection.
*
* @return the date selection listener
*/
protected DateSelectionListener createMonthViewSelectionListener() {
return getHandler();
}
/**
* @return a propertyChangeListener listening to
* editor property changes
*/
protected PropertyChangeListener createEditorPropertyListener() {
return getHandler();
}
/**
* Lazily creates and returns the shared all-mighty listener of everything
*
* @return the shared listener.
*/
private Handler getHandler() {
if (handler == null) {
handler = new Handler();
}
return handler;
}
protected PropertyChangeListener createPropertyChangeListener() {
return getHandler();
}
protected LayoutManager createLayoutManager() {
return getHandler();
}
protected MouseListener createMouseListener() {
return getHandler();
}
protected MouseMotionListener createMouseMotionListener() {
return getHandler();
}
//------------ utility methods
// duplication!!
private Date cleanupDate(Date date) {
// only modify defensive copies
return new Date(cleanupDate(date.getTime(), datePicker.getMonthView().getCalendar()));
}
// duplication!!
// PENDING: move to CalendarUtils ?
private long cleanupDate(long date, Calendar cal) {
cal.setTimeInMillis(date);
// We only want to compare the day, month and year
CalendarUtils.startOfDay(cal);
return cal.getTimeInMillis();
}
/**
* Checks the given dates for being equal.
*
* PENDING: this should be a utility somewhere ...
*
* @param current one of the dates to compare
* @param date the otherr of the dates to compare
* @return true if the two given dates are equal, false otherwise.
*/
private boolean equalsDate(Date current, Date date) {
if ((date == null) && (current == null)) {
return true;
}
if ((date != null) && (date.equals(current))) {
return true;
}
return false;
}
}
| Issue #658-swingx: DatePicker's linkPanel not updated on setTimeZone
forgot to commit the changed BasicDatePickerUI
| src/java/org/jdesktop/swingx/plaf/basic/BasicDatePickerUI.java | Issue #658-swingx: DatePicker's linkPanel not updated on setTimeZone | <ide><path>rc/java/org/jdesktop/swingx/plaf/basic/BasicDatePickerUI.java
<ide> protected void updateFromMonthViewChanged(JXMonthView oldMonthView) {
<ide> popup = null;
<ide> updateMonthViewListeners(oldMonthView);
<add> TimeZone oldTimeZone = null;
<ide> if (oldMonthView != null) {
<ide> oldMonthView.setComponentInputMapEnabled(false);
<add> oldTimeZone = oldMonthView.getTimeZone();
<ide> }
<ide> datePicker.getMonthView().setComponentInputMapEnabled(true);
<del> updateFormatTimeZone(datePicker.getTimeZone());
<add> updateTimeZone(oldTimeZone);
<add>// updateFormatsFromTimeZone(datePicker.getTimeZone());
<ide> updateEditorValue();
<ide> }
<ide>
<ide> * Updates the picker's formats to the given TimeZone.
<ide> * @param zone the timezone to set on the formats.
<ide> */
<del> protected void updateFormatTimeZone(TimeZone zone) {
<add> protected void updateFormatsFromTimeZone(TimeZone zone) {
<ide> for (DateFormat format : datePicker.getFormats()) {
<ide> format.setTimeZone(zone);
<ide> }
<add> }
<add>
<add> /**
<add> * Updates picker's timezone dependent properties on change notification
<add> * from the associated monthView.
<add> *
<add> * PENDING JW: DatePicker needs to send notification on timezone change?
<add> *
<add> * @param old the timezone before the change.
<add> */
<add> protected void updateTimeZone(TimeZone old) {
<add> updateFormatsFromTimeZone(datePicker.getTimeZone());
<add> updateLinkDate();
<add>
<add>// datePicker.firePropertyChange(propertyName, oldValue, newValue)
<add> }
<add>
<add> /**
<add> * Updates the picker's linkDate to be in synch with monthView's today.
<add> */
<add> protected void updateLinkDate() {
<add> datePicker.setLinkDate(datePicker.getMonthView().getTodayInMillis());
<ide> }
<ide>
<ide> /**
<ide> }
<ide> popup = null;
<ide> } else if ("formats".equals(property)) {
<del> updateFormatTimeZone(datePicker.getTimeZone());
<add> updateFormatsFromTimeZone(datePicker.getTimeZone());
<ide> }
<ide> else if ("locale".equals(property)) {
<ide> updateLocale();
<ide> if ("selectionModel".equals(e.getPropertyName())) {
<ide> updateFromSelectionModelChanged((DateSelectionModel) e.getOldValue());
<ide> } else if ("timeZone".equals(e.getPropertyName())) {
<del> updateFormatTimeZone((TimeZone) e.getNewValue());
<add>// updateFormatsFromTimeZone((TimeZone) e.getNewValue());
<add> updateTimeZone((TimeZone) e.getOldValue());
<add> } else if ("todayInMillis".equals(e.getPropertyName())) {
<add> updateLinkDate();
<ide> }
<ide> }
<ide> |
|
JavaScript | mit | d13a0427550b04652d8b0af56c9e3a06010910e3 | 0 | MyEtherWallet/MyEtherWallet,MyEtherWallet/MyEtherWallet,MyEtherWallet/MyEtherWallet | import { isObject } from 'lodash';
const mewApiError = 'Websocket connection failed';
export const knownErrors = {
'Can\'t assign to property "request"': mewApiError,
"Cannot create property 'request'": mewApiError,
'GATT Server is disconnected. Cannot perform GATT operations.': mewApiError,
"'CONNECTION ERROR: Couldn't connect to node on WS.'": mewApiError,
'Interaction timeout': 'Please unlock your deveice',
"CONNECTION ERROR: Couldn't connect to node on WS":
"Couldn't connect to WS node. Please refresh and try again.",
'connection not open on send': mewApiError,
'Ledger device: Condition of use not satisfied': mewApiError,
'Provided address null is invalid': 'No address provided',
'transaction underpriced': 'Transaction gas price too low',
'invalid remainder': 'invalid remainder',
'Internal JSON-RPC error': 'Internal JSON-RPC error. Execution Reverted',
'Transaction has been reverted by the EVM':
'Transaction has been reverted by the EVM',
'TypeError: Failed to fetch': 'Request Failed. Please refresh and try again.',
"TypeError: Cannot read properties of null (reading 'errorHandler')":
'There was an error signing transaction with this wallet',
"Returned values aren't valid, did it run Out of Gas?":
"Returned values aren't valid, did it run Out of Gas?" +
'You might also see this error if you are not using the ' +
'correct ABI for the contract you are retrieving data from, ' +
'requesting data from a block number that does not exist, ' +
'or querying a node which is not fully synced.',
'未能完成该操作。无效的自变量':
'The operation could not be completed. Invalid argument.',
"Cannot read properties of undefined (reading 'click')": ''
};
const handleError = err => {
const errorValues = Object.keys(knownErrors);
const foundError = errorValues.find(item => {
const message =
err && err.message
? isObject(err.message)
? err.message.message
: err.message
: err;
if (!message) return false;
return message.includes(item);
});
return foundError ? true : false;
};
export default handleError;
| src/main/errorHandler.js | import { isObject } from 'lodash';
const mewApiError = 'Websocket connection failed';
export const knownErrors = {
'Can\'t assign to property "request"': mewApiError,
"Cannot create property 'request'": mewApiError,
'GATT Server is disconnected. Cannot perform GATT operations.': mewApiError,
"'CONNECTION ERROR: Couldn't connect to node on WS.'": mewApiError,
'Interaction timeout': 'Please unlock your deveice',
"CONNECTION ERROR: Couldn't connect to node on WS":
"Couldn't connect to WS node. Please refresh and try again.",
'connection not open on send': mewApiError,
'Ledger device: Condition of use not satisfied': mewApiError,
'Provided address null is invalid': 'No address provided',
'transaction underpriced': 'Transaction gas price too low',
'invalid remainder': 'invalid remainder',
'Internal JSON-RPC error': 'Internal JSON-RPC error. Execution Reverted',
'Transaction has been reverted by the EVM':
'Transaction has been reverted by the EVM',
'TypeError: Failed to fetch': 'Request Failed. Please refresh and try again.',
"TypeError: Cannot read properties of null (reading 'errorHandler')":
'There was an error signing transaction with this wallet',
"Returned values aren't valid, did it run Out of Gas?":
"Returned values aren't valid, did it run Out of Gas?" +
'You might also see this error if you are not using the ' +
'correct ABI for the contract you are retrieving data from, ' +
'requesting data from a block number that does not exist, ' +
'or querying a node which is not fully synced.',
'未能完成该操作。无效的自变量':
'The operation could not be completed. Invalid argument.'
};
const handleError = err => {
const errorValues = Object.keys(knownErrors);
const foundError = errorValues.find(item => {
const message =
err && err.message
? isObject(err.message)
? err.message.message
: err.message
: err;
if (!message) return false;
return message.includes(item);
});
return foundError ? true : false;
};
export default handleError;
| devop: �� added error to error handler
| src/main/errorHandler.js | devop: �� added error to error handler | <ide><path>rc/main/errorHandler.js
<ide> 'requesting data from a block number that does not exist, ' +
<ide> 'or querying a node which is not fully synced.',
<ide> '未能完成该操作。无效的自变量':
<del> 'The operation could not be completed. Invalid argument.'
<add> 'The operation could not be completed. Invalid argument.',
<add> "Cannot read properties of undefined (reading 'click')": ''
<ide> };
<ide>
<ide> const handleError = err => { |
|
JavaScript | apache-2.0 | 6ddbfb8a63e04fcb05a1603858f4584ed7f7da60 | 0 | mongodb/node-mongodb-native,mongodb/node-mongodb-native,mongodb/node-mongodb-native,mongodb/node-mongodb-native | 'use strict';
const net = require('net');
const tls = require('tls');
const Connection = require('./connection');
const Query = require('./commands').Query;
const createClientInfo = require('../topologies/shared').createClientInfo;
const MongoError = require('../error').MongoError;
function connect(options, callback) {
if (options.family !== void 0) {
makeConnection(options.family, options, (err, socket) => {
if (err) {
callback(err, socket); // in the error case, `socket` is the originating error event name
return;
}
performInitialHandshake(new Connection(socket, options), options, callback);
});
return;
}
return makeConnection(6, options, (err, ipv6Socket) => {
if (err) {
makeConnection(4, options, (err, ipv4Socket) => {
if (err) {
callback(err, ipv4Socket); // in the error case, `ipv4Socket` is the originating error event name
return;
}
performInitialHandshake(new Connection(ipv4Socket, options), options, callback);
});
return;
}
performInitialHandshake(new Connection(ipv6Socket, options), options, callback);
});
}
function isSupportedServer(ismaster) {
return ismaster && typeof ismaster.maxWireVersion === 'number' && ismaster.maxWireVersion >= 2;
}
function getSaslSupportedMechs(options) {
if (!(options && options.credentials)) {
return {};
}
const credentials = options.credentials;
// TODO: revisit whether or not items like `options.user` and `options.dbName` should be checked here
const authMechanism = credentials.mechanism;
const authSource = credentials.source || options.dbName || 'admin';
const user = credentials.username || options.user;
if (typeof authMechanism === 'string' && authMechanism.toUpperCase() !== 'DEFAULT') {
return {};
}
if (!user) {
return {};
}
return { saslSupportedMechs: `${authSource}.${user}` };
}
function performInitialHandshake(conn, options, callback) {
let compressors = [];
if (options.compression && options.compression.compressors) {
compressors = options.compression.compressors;
}
const handshakeDoc = Object.assign(
{
ismaster: true,
client: createClientInfo(options),
compression: compressors
},
getSaslSupportedMechs(options)
);
const start = new Date().getTime();
runCommand(conn, 'admin.$cmd', handshakeDoc, options, (err, ismaster) => {
if (err) {
callback(err, null);
return;
}
if (ismaster.ok === 0) {
callback(new MongoError(ismaster), null);
return;
}
if (!isSupportedServer(ismaster)) {
const latestSupportedVersion = '2.6';
const latestSupportedMaxWireVersion = 2;
const message =
'Server at ' +
options.host +
':' +
options.port +
' reports wire version ' +
(ismaster.maxWireVersion || 0) +
', but this version of Node.js Driver requires at least ' +
latestSupportedMaxWireVersion +
' (MongoDB' +
latestSupportedVersion +
').';
callback(new MongoError(message), null);
return;
}
// resolve compression
if (ismaster.compression) {
const agreedCompressors = compressors.filter(
compressor => ismaster.compression.indexOf(compressor) !== -1
);
if (agreedCompressors.length) {
conn.agreedCompressor = agreedCompressors[0];
}
if (options.compression && options.compression.zlibCompressionLevel) {
conn.zlibCompressionLevel = options.compression.zlibCompressionLevel;
}
}
// NOTE: This is metadata attached to the connection while porting away from
// handshake being done in the `Server` class. Likely, it should be
// relocated, or at very least restructured.
conn.ismaster = ismaster;
conn.lastIsMasterMS = new Date().getTime() - start;
callback(null, conn);
});
}
const LEGAL_SSL_SOCKET_OPTIONS = [
'pfx',
'key',
'passphrase',
'cert',
'ca',
'ciphers',
'NPNProtocols',
'ALPNProtocols',
'servername',
'ecdhCurve',
'secureProtocol',
'secureContext',
'session',
'minDHSize',
'crl',
'rejectUnauthorized'
];
function parseConnectOptions(family, options) {
const host = typeof options.host === 'string' ? options.host : 'localhost';
if (host.indexOf('/') !== -1) {
return { path: host };
}
const result = {
family,
host,
port: typeof options.port === 'number' ? options.port : 27017,
rejectUnauthorized: false
};
return result;
}
function parseSslOptions(family, options) {
const result = parseConnectOptions(family, options);
// Merge in valid SSL options
for (const name in options) {
if (options[name] != null && LEGAL_SSL_SOCKET_OPTIONS.indexOf(name) !== -1) {
result[name] = options[name];
}
}
// Override checkServerIdentity behavior
if (options.checkServerIdentity === false) {
// Skip the identiy check by retuning undefined as per node documents
// https://nodejs.org/api/tls.html#tls_tls_connect_options_callback
result.checkServerIdentity = function() {
return undefined;
};
} else if (typeof options.checkServerIdentity === 'function') {
result.checkServerIdentity = options.checkServerIdentity;
}
// Set default sni servername to be the same as host
if (result.servername == null) {
result.servername = result.host;
}
return result;
}
function makeConnection(family, options, callback) {
const useSsl = typeof options.ssl === 'boolean' ? options.ssl : false;
const keepAlive = typeof options.keepAlive === 'boolean' ? options.keepAlive : true;
let keepAliveInitialDelay =
typeof options.keepAliveInitialDelay === 'number' ? options.keepAliveInitialDelay : 300000;
const noDelay = typeof options.noDelay === 'boolean' ? options.noDelay : true;
const connectionTimeout =
typeof options.connectionTimeout === 'number' ? options.connectionTimeout : 30000;
const socketTimeout = typeof options.socketTimeout === 'number' ? options.socketTimeout : 360000;
const rejectUnauthorized =
typeof options.rejectUnauthorized === 'boolean' ? options.rejectUnauthorized : true;
if (keepAliveInitialDelay > socketTimeout) {
keepAliveInitialDelay = Math.round(socketTimeout / 2);
}
let socket;
try {
if (useSsl) {
socket = tls.connect(parseSslOptions(family, options));
} else {
socket = net.createConnection(parseConnectOptions(family, options));
}
} catch (err) {
return callback(err);
}
socket.setKeepAlive(keepAlive, keepAliveInitialDelay);
socket.setTimeout(connectionTimeout);
socket.setNoDelay(noDelay);
const errorEvents = ['error', 'close', 'timeout', 'parseError', 'connect'];
function errorHandler(eventName) {
return err => {
if (err == null || err === false) err = true;
errorEvents.forEach(event => socket.removeAllListeners(event));
socket.removeListener('connect', connectHandler);
callback(err, eventName);
};
}
function connectHandler() {
errorEvents.forEach(event => socket.removeAllListeners(event));
if (socket.authorizationError && rejectUnauthorized) {
return callback(socket.authorizationError);
}
socket.setTimeout(socketTimeout);
callback(null, socket);
}
socket.once('error', errorHandler('error'));
socket.once('close', errorHandler('close'));
socket.once('timeout', errorHandler('timeout'));
socket.once('parseError', errorHandler('parseError'));
socket.once('connect', connectHandler);
}
const CONNECTION_ERROR_EVENTS = ['error', 'close', 'timeout', 'parseError'];
function runCommand(conn, ns, command, options, callback) {
const socketTimeout = typeof options.socketTimeout === 'number' ? options.socketTimeout : 360000;
const bson = conn.options.bson;
const query = new Query(bson, ns, command, {
numberToSkip: 0,
numberToReturn: 1
});
function errorHandler(err) {
conn.resetSocketTimeout();
CONNECTION_ERROR_EVENTS.forEach(eventName => conn.removeListener(eventName, errorHandler));
conn.removeListener('message', messageHandler);
callback(err, null);
}
function messageHandler(msg) {
if (msg.responseTo !== query.requestId) {
return;
}
conn.resetSocketTimeout();
CONNECTION_ERROR_EVENTS.forEach(eventName => conn.removeListener(eventName, errorHandler));
conn.removeListener('message', messageHandler);
msg.parse({ promoteValues: true });
callback(null, msg.documents[0]);
}
conn.setSocketTimeout(socketTimeout);
CONNECTION_ERROR_EVENTS.forEach(eventName => conn.once(eventName, errorHandler));
conn.on('message', messageHandler);
conn.write(query.toBin());
}
module.exports = connect;
| connection/connect.js | 'use strict';
const net = require('net');
const tls = require('tls');
const Connection = require('./connection');
const Query = require('./commands').Query;
const createClientInfo = require('../topologies/shared').createClientInfo;
const MongoError = require('../error').MongoError;
function connect(options, callback) {
if (options.family !== void 0) {
makeConnection(options.family, options, (err, socket) => {
if (err) {
callback(err, socket); // in the error case, `socket` is the originating error event name
return;
}
performInitialHandshake(new Connection(socket, options), options, callback);
});
return;
}
return makeConnection(6, options, (err, ipv6Socket) => {
if (err) {
makeConnection(4, options, (err, ipv4Socket) => {
if (err) {
callback(err, ipv4Socket); // in the error case, `ipv4Socket` is the originating error event name
return;
}
performInitialHandshake(new Connection(ipv4Socket, options), options, callback);
});
return;
}
performInitialHandshake(new Connection(ipv6Socket, options), options, callback);
});
}
function isSupportedServer(ismaster) {
return ismaster && typeof ismaster.maxWireVersion === 'number' && ismaster.maxWireVersion >= 2;
}
function getSaslSupportedMechs(options) {
if (!(options && options.credentials)) {
return {};
}
const credentials = options.credentials;
const authMechanism = credentials.mechanism;
const authSource = credentials.source || options.dbName || 'admin';
const user = credentials.username || options.user;
if (typeof authMechanism === 'string' && authMechanism.toUpperCase() !== 'DEFAULT') {
return {};
}
if (!user) {
return {};
}
return { saslSupportedMechs: `${authSource}.${user}` };
}
function performInitialHandshake(conn, options, callback) {
let compressors = [];
if (options.compression && options.compression.compressors) {
compressors = options.compression.compressors;
}
const handshakeDoc = Object.assign(
{
ismaster: true,
client: createClientInfo(options),
compression: compressors
},
getSaslSupportedMechs(options)
);
const start = new Date().getTime();
runCommand(conn, 'admin.$cmd', handshakeDoc, options, (err, ismaster) => {
if (err) {
callback(err, null);
return;
}
if (ismaster.ok === 0) {
callback(new MongoError(ismaster), null);
return;
}
if (!isSupportedServer(ismaster)) {
const latestSupportedVersion = '2.6';
const latestSupportedMaxWireVersion = 2;
const message =
'Server at ' +
options.host +
':' +
options.port +
' reports wire version ' +
(ismaster.maxWireVersion || 0) +
', but this version of Node.js Driver requires at least ' +
latestSupportedMaxWireVersion +
' (MongoDB' +
latestSupportedVersion +
').';
callback(new MongoError(message), null);
return;
}
// resolve compression
if (ismaster.compression) {
const agreedCompressors = compressors.filter(
compressor => ismaster.compression.indexOf(compressor) !== -1
);
if (agreedCompressors.length) {
conn.agreedCompressor = agreedCompressors[0];
}
if (options.compression && options.compression.zlibCompressionLevel) {
conn.zlibCompressionLevel = options.compression.zlibCompressionLevel;
}
}
// NOTE: This is metadata attached to the connection while porting away from
// handshake being done in the `Server` class. Likely, it should be
// relocated, or at very least restructured.
conn.ismaster = ismaster;
conn.lastIsMasterMS = new Date().getTime() - start;
callback(null, conn);
});
}
const LEGAL_SSL_SOCKET_OPTIONS = [
'pfx',
'key',
'passphrase',
'cert',
'ca',
'ciphers',
'NPNProtocols',
'ALPNProtocols',
'servername',
'ecdhCurve',
'secureProtocol',
'secureContext',
'session',
'minDHSize',
'crl',
'rejectUnauthorized'
];
function parseConnectOptions(family, options) {
const host = typeof options.host === 'string' ? options.host : 'localhost';
if (host.indexOf('/') !== -1) {
return { path: host };
}
const result = {
family,
host,
port: typeof options.port === 'number' ? options.port : 27017,
rejectUnauthorized: false
};
return result;
}
function parseSslOptions(family, options) {
const result = parseConnectOptions(family, options);
// Merge in valid SSL options
for (const name in options) {
if (options[name] != null && LEGAL_SSL_SOCKET_OPTIONS.indexOf(name) !== -1) {
result[name] = options[name];
}
}
// Override checkServerIdentity behavior
if (options.checkServerIdentity === false) {
// Skip the identiy check by retuning undefined as per node documents
// https://nodejs.org/api/tls.html#tls_tls_connect_options_callback
result.checkServerIdentity = function() {
return undefined;
};
} else if (typeof options.checkServerIdentity === 'function') {
result.checkServerIdentity = options.checkServerIdentity;
}
// Set default sni servername to be the same as host
if (result.servername == null) {
result.servername = result.host;
}
return result;
}
function makeConnection(family, options, callback) {
const useSsl = typeof options.ssl === 'boolean' ? options.ssl : false;
const keepAlive = typeof options.keepAlive === 'boolean' ? options.keepAlive : true;
let keepAliveInitialDelay =
typeof options.keepAliveInitialDelay === 'number' ? options.keepAliveInitialDelay : 300000;
const noDelay = typeof options.noDelay === 'boolean' ? options.noDelay : true;
const connectionTimeout =
typeof options.connectionTimeout === 'number' ? options.connectionTimeout : 30000;
const socketTimeout = typeof options.socketTimeout === 'number' ? options.socketTimeout : 360000;
const rejectUnauthorized =
typeof options.rejectUnauthorized === 'boolean' ? options.rejectUnauthorized : true;
if (keepAliveInitialDelay > socketTimeout) {
keepAliveInitialDelay = Math.round(socketTimeout / 2);
}
let socket;
try {
if (useSsl) {
socket = tls.connect(parseSslOptions(family, options));
} else {
socket = net.createConnection(parseConnectOptions(family, options));
}
} catch (err) {
return callback(err);
}
socket.setKeepAlive(keepAlive, keepAliveInitialDelay);
socket.setTimeout(connectionTimeout);
socket.setNoDelay(noDelay);
const errorEvents = ['error', 'close', 'timeout', 'parseError', 'connect'];
function errorHandler(eventName) {
return err => {
if (err == null || err === false) err = true;
errorEvents.forEach(event => socket.removeAllListeners(event));
socket.removeListener('connect', connectHandler);
callback(err, eventName);
};
}
function connectHandler() {
errorEvents.forEach(event => socket.removeAllListeners(event));
if (socket.authorizationError && rejectUnauthorized) {
return callback(socket.authorizationError);
}
socket.setTimeout(socketTimeout);
callback(null, socket);
}
socket.once('error', errorHandler('error'));
socket.once('close', errorHandler('close'));
socket.once('timeout', errorHandler('timeout'));
socket.once('parseError', errorHandler('parseError'));
socket.once('connect', connectHandler);
}
const CONNECTION_ERROR_EVENTS = ['error', 'close', 'timeout', 'parseError'];
function runCommand(conn, ns, command, options, callback) {
const socketTimeout = typeof options.socketTimeout === 'number' ? options.socketTimeout : 360000;
const bson = conn.options.bson;
const query = new Query(bson, ns, command, {
numberToSkip: 0,
numberToReturn: 1
});
function errorHandler(err) {
conn.resetSocketTimeout();
CONNECTION_ERROR_EVENTS.forEach(eventName => conn.removeListener(eventName, errorHandler));
conn.removeListener('message', messageHandler);
callback(err, null);
}
function messageHandler(msg) {
if (msg.responseTo !== query.requestId) {
return;
}
conn.resetSocketTimeout();
CONNECTION_ERROR_EVENTS.forEach(eventName => conn.removeListener(eventName, errorHandler));
conn.removeListener('message', messageHandler);
msg.parse({ promoteValues: true });
callback(null, msg.documents[0]);
}
conn.setSocketTimeout(socketTimeout);
CONNECTION_ERROR_EVENTS.forEach(eventName => conn.once(eventName, errorHandler));
conn.on('message', messageHandler);
conn.write(query.toBin());
}
module.exports = connect;
| refactor(connect): leave note regarding `user` and `db` checks
| connection/connect.js | refactor(connect): leave note regarding `user` and `db` checks | <ide><path>onnection/connect.js
<ide> }
<ide>
<ide> const credentials = options.credentials;
<add>
<add> // TODO: revisit whether or not items like `options.user` and `options.dbName` should be checked here
<ide> const authMechanism = credentials.mechanism;
<ide> const authSource = credentials.source || options.dbName || 'admin';
<ide> const user = credentials.username || options.user; |
|
Java | apache-2.0 | 86dd0e49192939cdfc6775e54632b071a465bf38 | 0 | babble/babble,babble/babble,babble/babble,babble/babble,babble/babble,babble/babble | // Security.java
/**
* Copyright (C) 2008 10gen Inc.
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License, version 3,
* as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package ed.security;
import java.util.*;
import java.io.*;
import ed.appserver.*;
import ed.js.engine.*;
public class Security {
public final static boolean OFF = ed.util.Config.get().getBoolean( "NO-SECURITY" );
public final static String _baseClass = Convert.cleanName( Module.getBase());
public final static Set<String> allowedSites;
static {
Set<String> s = new HashSet<String>();
s.add( "admin" );
s.add( "www" );
s.add( "grid" );
allowedSites = Collections.synchronizedSet( s );
}
public final static boolean isAllowedSite( String siteName ){
return allowedSites.contains( siteName );
}
final static String SECURE[] = new String[]{
Convert.DEFAULT_PACKAGE + "." + _baseClass + "corejs_" ,
Convert.DEFAULT_PACKAGE + "." + _baseClass + "core_modules_admin_" ,
Convert.DEFAULT_PACKAGE + "." + _baseClass + "sites_admin_" ,
Convert.DEFAULT_PACKAGE + "." + _baseClass + "sites_www_" ,
Convert.DEFAULT_PACKAGE + "." + _baseClass + "sites_grid_" ,
Convert.DEFAULT_PACKAGE + "." + _baseClass + "sites_modules_" ,
Convert.DEFAULT_PACKAGE + ".lastline" ,
Convert.DEFAULT_PACKAGE + ".src_main_ed_" ,
Convert.DEFAULT_PACKAGE + "._home_yellow_code_for_hudson" ,
Convert.DEFAULT_PACKAGE + "." + new File( "src/test/ed" ).getAbsolutePath().replace( '/' , '_' )
};
public static boolean isCoreJS(){
if ( OFF )
return true;
String topjs = getTopJS();
if ( topjs == null ) {
return false;
}
for ( int i=0; i<SECURE.length; i++ )
if ( topjs.startsWith( SECURE[i] ) )
return true;
return false;
}
public static String getTopJS(){
StackTraceElement[] st = Thread.currentThread().getStackTrace();
for ( int i=0; i<st.length; i++ ){
StackTraceElement e = st[i];
if ( e.getClassName().startsWith( Convert.DEFAULT_PACKAGE + "." ) )
return e.getClassName();
}
return null;
}
public static StackTraceElement getTopUserStackElement(){
StackTraceElement[] st = Thread.currentThread().getStackTrace();
for ( int i=0; i<st.length; i++ ){
StackTraceElement e = st[i];
final String name = e.getClassName();
if ( name.startsWith( Convert.DEFAULT_PACKAGE + "." ) )
return e;
if ( name.startsWith( "ed." ) || name.startsWith( "java." ) )
continue;
return e;
}
return null;
}
}
| src/main/ed/security/Security.java | // Security.java
/**
* Copyright (C) 2008 10gen Inc.
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License, version 3,
* as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package ed.security;
import java.util.*;
import java.io.*;
import ed.appserver.*;
import ed.js.engine.*;
public class Security {
public final static boolean OFF = ed.util.Config.get().getBoolean( "NO-SECURITY" );
public final static String _baseClass = Convert.cleanName( Module.getBase());
public final static Set<String> allowedSites;
static {
Set<String> s = new HashSet<String>();
s.add( "admin" );
s.add( "www" );
s.add( "grid" );
allowedSites = Collections.synchronizedSet( s );
}
public final static boolean isAllowedSite( String siteName ){
return allowedSites.contains( siteName );
}
final static String SECURE[] = new String[]{
Convert.DEFAULT_PACKAGE + "." + _baseClass + "corejs_" ,
Convert.DEFAULT_PACKAGE + "." + _baseClass + "core_modules_admin_" ,
Convert.DEFAULT_PACKAGE + "." + _baseClass + "sites_admin_" ,
Convert.DEFAULT_PACKAGE + "." + _baseClass + "sites_www_" ,
Convert.DEFAULT_PACKAGE + "." + _baseClass + "sites_grid_" ,
Convert.DEFAULT_PACKAGE + "." + _baseClass + "sites_modules_" ,
Convert.DEFAULT_PACKAGE + ".lastline" ,
Convert.DEFAULT_PACKAGE + ".src_main_ed_" ,
Convert.DEFAULT_PACKAGE + "._home_yellow_code_for_hudson" ,
Convert.DEFAULT_PACKAGE + "." + new File( "src/test/ed" ).getAbsolutePath().replace( '/' , '_' )
};
public static boolean isCoreJS(){
if ( OFF )
return true;
String topjs = getTopJS();
if ( topjs == null ) {
return false;
}
for ( int i=0; i<SECURE.length; i++ )
if ( topjs.startsWith( SECURE[i] ) )
return true;
return false;
}
public static String getTopJS(){
StackTraceElement[] st = Thread.currentThread().getStackTrace();
for ( int i=0; i<st.length; i++ ){
StackTraceElement e = st[i];
if ( e.getClassName().startsWith( Convert.DEFAULT_PACKAGE + "." ) )
return e.getClassName();
}
return null;
}
}
| getTopUserStackElemenet
| src/main/ed/security/Security.java | getTopUserStackElemenet | <ide><path>rc/main/ed/security/Security.java
<ide>
<ide> return null;
<ide> }
<add>
<add> public static StackTraceElement getTopUserStackElement(){
<add> StackTraceElement[] st = Thread.currentThread().getStackTrace();
<add>
<add> for ( int i=0; i<st.length; i++ ){
<add> StackTraceElement e = st[i];
<add>
<add> final String name = e.getClassName();
<add>
<add> if ( name.startsWith( Convert.DEFAULT_PACKAGE + "." ) )
<add> return e;
<add>
<add> if ( name.startsWith( "ed." ) || name.startsWith( "java." ) )
<add> continue;
<add>
<add> return e;
<add> }
<add>
<add> return null;
<add>
<add> }
<ide> } |
|
Java | bsd-2-clause | d2dd020ac0a44a35b8b9229c657b77538c1ed4ac | 0 | chototsu/MikuMikuStudio,chototsu/MikuMikuStudio,chototsu/MikuMikuStudio,chototsu/MikuMikuStudio | /*
* Copyright (c) 2003-2008 jMonkeyEngine
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* * Neither the name of 'jMonkeyEngine' nor the names of its contributors
* may be used to endorse or promote products derived from this software
* without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
* TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package com.jmex.model.collada;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.URL;
import java.nio.FloatBuffer;
import java.nio.IntBuffer;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Map;
import java.util.StringTokenizer;
import java.util.logging.Level;
import java.util.logging.Logger;
import com.jme.animation.Bone;
import com.jme.animation.BoneAnimation;
import com.jme.animation.BoneTransform;
import com.jme.animation.SkinNode;
import com.jme.animation.TextureKeyframeController;
import com.jme.bounding.BoundingBox;
import com.jme.image.Image;
import com.jme.image.Texture;
import com.jme.image.Texture.WrapAxis;
import com.jme.light.DirectionalLight;
import com.jme.light.Light;
import com.jme.light.LightNode;
import com.jme.light.PointLight;
import com.jme.light.SpotLight;
import com.jme.math.FastMath;
import com.jme.math.Matrix3f;
import com.jme.math.Matrix4f;
import com.jme.math.Quaternion;
import com.jme.math.Vector2f;
import com.jme.math.Vector3f;
import com.jme.renderer.Camera;
import com.jme.renderer.ColorRGBA;
import com.jme.renderer.Renderer;
import com.jme.scene.CameraNode;
import com.jme.scene.Controller;
import com.jme.scene.Geometry;
import com.jme.scene.Node;
import com.jme.scene.SharedMesh;
import com.jme.scene.SharedNode;
import com.jme.scene.Spatial;
import com.jme.scene.TexCoords;
import com.jme.scene.TriMesh;
import com.jme.scene.state.BlendState;
import com.jme.scene.state.ClipState;
import com.jme.scene.state.ColorMaskState;
import com.jme.scene.state.CullState;
import com.jme.scene.state.FogState;
import com.jme.scene.state.MaterialState;
import com.jme.scene.state.RenderState;
import com.jme.scene.state.ShadeState;
import com.jme.scene.state.StencilState;
import com.jme.scene.state.TextureState;
import com.jme.scene.state.ZBufferState;
import com.jme.system.DisplaySystem;
import com.jme.util.TextureManager;
import com.jme.util.export.binary.BinaryExporter;
import com.jme.util.export.binary.BinaryImporter;
import com.jme.util.geom.BufferUtils;
import com.jme.util.geom.GeometryTool;
import com.jme.util.geom.VertMap;
import com.jme.util.resource.ResourceLocatorTool;
import com.jmex.model.collada.schema.COLLADAType;
import com.jmex.model.collada.schema.IDREF_arrayType;
import com.jmex.model.collada.schema.InstanceWithExtra;
import com.jmex.model.collada.schema.Name_arrayType;
import com.jmex.model.collada.schema.TargetableFloat3;
import com.jmex.model.collada.schema.accessorType;
import com.jmex.model.collada.schema.animationType;
import com.jmex.model.collada.schema.assetType;
import com.jmex.model.collada.schema.bind_materialType;
import com.jmex.model.collada.schema.cameraType;
import com.jmex.model.collada.schema.collada_schema_1_4_1Doc;
import com.jmex.model.collada.schema.colorType;
import com.jmex.model.collada.schema.common_newparam_type;
import com.jmex.model.collada.schema.controllerType;
import com.jmex.model.collada.schema.effectType;
import com.jmex.model.collada.schema.float4x4;
import com.jmex.model.collada.schema.float_arrayType;
import com.jmex.model.collada.schema.fx_sampler2D_common;
import com.jmex.model.collada.schema.fx_surface_common;
import com.jmex.model.collada.schema.geometryType;
import com.jmex.model.collada.schema.imageType;
import com.jmex.model.collada.schema.instance_controllerType;
import com.jmex.model.collada.schema.instance_geometryType;
import com.jmex.model.collada.schema.instance_materialType;
import com.jmex.model.collada.schema.instance_physics_modelType;
import com.jmex.model.collada.schema.lambertType;
import com.jmex.model.collada.schema.library_animationsType;
import com.jmex.model.collada.schema.library_camerasType;
import com.jmex.model.collada.schema.library_controllersType;
import com.jmex.model.collada.schema.library_effectsType;
import com.jmex.model.collada.schema.library_geometriesType;
import com.jmex.model.collada.schema.library_imagesType;
import com.jmex.model.collada.schema.library_lightsType;
import com.jmex.model.collada.schema.library_materialsType;
import com.jmex.model.collada.schema.library_nodesType;
import com.jmex.model.collada.schema.library_physics_modelsType;
import com.jmex.model.collada.schema.library_physics_scenesType;
import com.jmex.model.collada.schema.library_visual_scenesType;
import com.jmex.model.collada.schema.lightType;
import com.jmex.model.collada.schema.materialType;
import com.jmex.model.collada.schema.meshType;
import com.jmex.model.collada.schema.nodeType2;
import com.jmex.model.collada.schema.opticsType;
import com.jmex.model.collada.schema.orthographicType;
import com.jmex.model.collada.schema.paramType3;
import com.jmex.model.collada.schema.passType3;
import com.jmex.model.collada.schema.perspectiveType;
import com.jmex.model.collada.schema.phongType;
import com.jmex.model.collada.schema.physics_modelType;
import com.jmex.model.collada.schema.physics_sceneType;
import com.jmex.model.collada.schema.polygonsType;
import com.jmex.model.collada.schema.rigid_bodyType;
import com.jmex.model.collada.schema.sceneType;
import com.jmex.model.collada.schema.shapeType2;
import com.jmex.model.collada.schema.skinType;
import com.jmex.model.collada.schema.sourceType;
import com.jmex.model.collada.schema.techniqueType2;
import com.jmex.model.collada.schema.techniqueType4;
import com.jmex.model.collada.schema.technique_commonType;
import com.jmex.model.collada.schema.technique_commonType2;
import com.jmex.model.collada.schema.technique_commonType4;
import com.jmex.model.collada.schema.textureType;
import com.jmex.model.collada.schema.trianglesType;
import com.jmex.model.collada.schema.vertex_weightsType;
import com.jmex.model.collada.schema.visual_sceneType;
/**
* <code>ColladaNode</code> provides a mechanism to parse and load a COLLADA
* (COLLAborative Design Activity) model. Making use of a DOM parse, the XML
* formatted COLLADA file is parsed into Java Type classes and then processed by
* jME. This processing is currently aimed at the 1.4.1 release of the COLLADA
* Specification, and will, in most likelyhood, require updating with a new
* release of COLLADA.
*
* @author Mark Powell, Rikard Herlitz, and others
*/
public class ColladaImporter {
private static final Logger logger = Logger.getLogger(ColladaImporter.class
.getName());
// asset information
private String modelAuthor;
private String tool;
private String revision;
private String unitName;
private float unitMeter;
private String upAxis;
private static ColladaImporter instance;
private String name;
private String[] boneIds;
private static boolean squelch;
// If true, models loaded by ColladaImporter will automatically have
// geometry optimization applied. default: true.
public static boolean OPTIMIZE_GEOMETRY = true;
public static OptimizeCallback optimizeCallBack = null;
private Map<String, Object> resourceLibrary;
private ArrayList<String> controllerNames;
private ArrayList<String> uvControllerNames;
private ArrayList<String> skinNodeNames;
private ArrayList<String> cameraNodeNames;
private ArrayList<String> lightNodeNames;
private ArrayList<String> geometryNames;
private ArrayList<String> skeletonNames;
private Map<String, Object> userInformation;
private Map<TriMesh, String> subMaterialLibrary;
private Node model;
/**
* Unique Serial ID for ColladaNode
*/
private static final long serialVersionUID = -4024091270314000507L;
/**
* Default constructor instantiates a ColladaImporter object. A basic Node
* structure is built and no data is loaded until the <code>load</code>
* method is called.
*
* @param name
* the name of the node.
*/
private ColladaImporter(String name) {
this.name = name;
}
public static boolean hasUserInformation(String key) {
if (instance.userInformation == null) {
return false;
} else {
return instance.userInformation.containsKey(key);
}
}
public static void addUserInformation(String key, Object value) {
if (instance.userInformation == null) {
instance.userInformation = new HashMap<String, Object>();
}
instance.userInformation.put(key, value);
}
public static Object getUserInformation(String key) {
if (instance.userInformation == null) {
return null;
} else {
return instance.userInformation.get(key);
}
}
/**
* load takes the model path as a string object and uses the
* COLLADASchemaDoc object to load it. This is then stored as a heirarchy of
* data objects. This heirarchy is passed to the processCollada method to
* build the jME data structures necessary to view the model.
*
* @param source
* the source to import.
* @param textureDirectory
* the location of the textures.
* @param name
* the name of the node.
*/
public static void load(InputStream source, String name) {
if (instance == null) {
instance = new ColladaImporter(name);
}
instance.load(source);
}
/**
* load is called by the static load method, creating an instance of the
* model to be returned.
*
* @param source
* the source to import.
* @param textureDirectory
* the location of the textures.
*/
private void load(InputStream source) {
model = new Node(name);
resourceLibrary = new HashMap<String, Object>();
subMaterialLibrary = new HashMap<TriMesh, String>();
collada_schema_1_4_1Doc doc = new collada_schema_1_4_1Doc();
try {
COLLADAType root = new COLLADAType(doc.load(source));
logger.info("Version: " + root.getversion().getValue());
processCollada(root);
} catch (Exception ex) {
logger.log(Level.WARNING, "Unable to load Collada file. ", ex);
return;
}
}
/**
* returns the names of the controllers that affect this imported model.
*
* @return the list of string values for each controller name.
*/
public static ArrayList<String> getControllerNames() {
if (instance == null) {
return null;
}
return instance.controllerNames;
}
/**
* @return
*/
public static ArrayList<String> getUVControllerNames() {
if (instance == null) {
return null;
}
return instance.uvControllerNames;
}
public static void addUVControllerName(String name) {
if (instance.uvControllerNames == null) {
instance.uvControllerNames = new ArrayList<String>();
}
instance.uvControllerNames.add(name);
}
/**
* returns the names of the skin nodes that are associated with this
* imported model.
*
* @return the names of the skin nodes associated with this model.
*/
public static ArrayList<String> getSkinNodeNames() {
if (instance == null) {
return null;
}
return instance.skinNodeNames;
}
/**
* Returns the camera node names associated with this model.
*
* @return the list of camera names that are referenced in this file.
*/
public static ArrayList<String> getCameraNodeNames() {
if (instance == null) {
return null;
}
return instance.cameraNodeNames;
}
public static ArrayList<String> getLightNodeNames() {
if (instance == null) {
return null;
}
return instance.lightNodeNames;
}
public static ArrayList<String> getSkeletonNames() {
if (instance == null) {
return null;
}
return instance.skeletonNames;
}
public static ArrayList<String> getGeometryNames() {
if (instance == null) {
return null;
}
return instance.geometryNames;
}
public static Node getModel() {
if (instance == null) {
return null;
}
return instance.model;
}
public static SkinNode getSkinNode(String id) {
if (instance == null) {
return null;
}
return (SkinNode) instance.resourceLibrary.get(id);
}
public static CameraNode getCameraNode(String id) {
if (instance == null) {
return null;
}
return (CameraNode) instance.resourceLibrary.get(id);
}
public static LightNode getLightNode(String id) {
if (instance == null) {
return null;
}
return (LightNode) instance.resourceLibrary.get(id);
}
public static Object get(Object id) {
return instance.resourceLibrary.get(id);
}
/**
* places an object into the resource library with a given key. If there is
* an object referenced by this key and it is not the same object that is to
* be added to the library, a warning is issued. If this object already
* exists in the library we do not readd it.
*
* @param key
* the key to obtain the object from the library.
* @param value
* the object to store in the library.
*/
public static void put(String key, Object value) {
Object data = instance.resourceLibrary.get(key);
if (data != value) {
if (data != null) {
if (!squelch) {
logger
.warning("Key: "
+ key
+ " already in use. Overriding previous data. This is probably not"
+ " desired.");
}
}
instance.resourceLibrary.put(key, value);
}
}
public static BoneAnimation getAnimationController(String id) {
if (instance == null) {
return null;
}
return (BoneAnimation) instance.resourceLibrary.get(id);
}
public static TextureKeyframeController getUVAnimationController(String id) {
if (instance == null) {
return null;
}
return (TextureKeyframeController) instance.resourceLibrary.get(id);
}
public static Bone getSkeleton(String id) {
if (instance == null) {
return null;
}
return (Bone) instance.resourceLibrary.get(id);
}
public static Geometry getGeometry(String id) {
if (instance == null) {
return null;
}
return (Geometry) instance.resourceLibrary.get(id);
}
public static void cleanUp() {
if (instance != null) {
instance.shutdown();
}
}
public void shutdown() {
instance = null;
}
/**
* Author of the last loaded collada model.
*
* @return the modelAuthor the author of the last loaded model.
*/
public String getModelAuthor() {
return modelAuthor;
}
/**
* Revision number of the last loaded collada model.
*
* @return the revision revision number of the last loaded collada model.
*/
public String getRevision() {
return revision;
}
/**
* the tool used to build the last collada model.
*
* @return the tool
*/
public String getTool() {
return tool;
}
/**
* the unit scale of the last collada model.
*
* @return the unitMeter
*/
public float getUnitMeter() {
return unitMeter;
}
/**
* the unit name of the last collada model.
*
* @return the unitName
*/
public String getUnitName() {
return unitName;
}
/**
* getAssetInformation returns a string of the collected asset information
* of this COLLADA model. The format is such: <br>
* AUTHOR REVISION<br>
* TOOL<br>
* UNITNAME UNITMETER<br>
* UPAXIS<br>
*
* @return the string representation of the asset information of this file.
*/
public String getAssetInformation() {
return modelAuthor + " " + revision + "\n" + tool + "\n" + unitName
+ " " + unitMeter + "\n" + upAxis;
}
/**
* processCollada takes a COLLADAType object that contains the heirarchical
* information obtained from the XML structure of a COLLADA model. This root
* object is processed and sets the data structures for jME to render the
* model to *this* object.
*
* @param root
* the COLLADAType data structure that contains the COLLADA model
* information.
*/
public void processCollada(COLLADAType root) {
// build the asset information about this model. This can be used
// for debugging information. Only a single asset tag is allowed per
// model.
if (root.hasasset()) {
try {
processAssetInformation(root.getasset());
} catch (Exception e) {
if (!squelch) {
logger.log(Level.WARNING,
"Error processing asset information", e);
}
}
}
// user defined libraries may exist (for example, uv animations)
if (root.hasextra()) {
try {
ExtraPluginManager.processExtra(root, root.getextra());
} catch (Exception e) {
if (!squelch) {
logger.log(Level.WARNING,
"Error processing extra information", e);
}
}
}
// builds the animation keyframes and places the controllers into a
// node.
if (root.haslibrary_animations()) {
try {
processAnimationLibrary(root.getlibrary_animations());
} catch (Exception e) {
if (!squelch) {
logger.log(Level.WARNING,
"Error processing animation information", e);
}
}
}
if (root.haslibrary_animation_clips()) {
if (!squelch) {
logger.warning("Animation Clips not currently supported");
}
}
if (root.haslibrary_cameras()) {
try {
processCameraLibrary(root.getlibrary_cameras());
} catch (Exception e) {
if (!squelch) {
logger.log(Level.WARNING,
"Error processing camera information", e);
}
}
}
if (root.haslibrary_force_fields()) {
if (!squelch) {
logger.warning("Forcefields not currently supported");
}
}
if (root.haslibrary_lights()) {
try {
processLightLibrary(root.getlibrary_lights());
} catch (Exception e) {
if (!squelch) {
logger.log(Level.WARNING,
"Error processing light information", e);
}
}
}
// build a map of images that the materials can use in the future.
if (root.haslibrary_images()) {
try {
processImageLibrary(root.getlibrary_images());
} catch (Exception e) {
if (!squelch) {
logger.log(Level.WARNING,
"Error processing image library information", e);
}
}
}
// build all the material states that can be used later
if (root.haslibrary_materials()) {
try {
processMaterialLibrary(root.getlibrary_materials());
} catch (Exception e) {
if (!squelch) {
logger.log(Level.WARNING,
"Error processing material library information", e);
}
}
}
// process the library of effects, filling in the appropriate
// states.
if (root.haslibrary_effects()) {
try {
processEffects(root.getlibrary_effects());
} catch (Exception e) {
if (!squelch) {
logger.log(Level.WARNING,
"Error processing effects library information", e);
}
}
}
// process the geometry information, creating the appropriate Geometry
// object from jME (TriMesh, lines or point).
if (root.haslibrary_geometries()) {
try {
processGeometry(root.getlibrary_geometries());
} catch (Exception e) {
if (!squelch) {
logger.log(Level.WARNING,
"Error processing geometry library information", e);
}
}
}
// controllers will define the action of another object. For example,
// there may be a controller with a skin tag, defining how a mesh
// is skinning a skeleton.
if (root.haslibrary_controllers()) {
try {
processControllerLibrary(root.getlibrary_controllers());
} catch (Exception e) {
if (!squelch) {
logger.log(Level.WARNING,
"Error processing controller library information",
e);
}
}
}
if (root.haslibrary_nodes()) {
try {
processNodes(root.getlibrary_nodes());
} catch (Exception e) {
if (!squelch) {
logger.log(Level.WARNING,
"Error processing nodes library information", e);
}
}
}
// process the visual scene. This scene will define how the geometries
// are structured in the world.
if (root.haslibrary_visual_scenes()) {
try {
processVisualSceneLibrary(root.getlibrary_visual_scenes());
} catch (Exception e) {
if (!squelch) {
logger
.log(
Level.WARNING,
"Error processing visual scene library information",
e);
}
}
}
if (root.haslibrary_physics_scenes()) {
try {
library_physics_scenesType library = root
.getlibrary_physics_scenes();
for (int i = 0; i < library.getphysics_sceneCount(); i++) {
physics_sceneType scene = library.getphysics_sceneAt(i);
put(scene.getid().toString(), scene);
}
} catch (Exception e) {
if (!squelch) {
logger
.log(
Level.WARNING,
"Error processing physics scene library information",
e);
}
}
}
if (root.haslibrary_physics_models()) {
try {
library_physics_modelsType library = root
.getlibrary_physics_models();
for (int i = 0; i < library.getphysics_modelCount(); i++) {
physics_modelType model = library.getphysics_modelAt(i);
put(model.getid().toString(), model);
}
} catch (Exception e) {
if (!squelch) {
logger
.log(
Level.WARNING,
"Error processing physics model library information",
e);
}
}
}
// the scene tag actually takes instances of the visual scene defined
// above
// and attaches them to the model that is returned.
if (root.hasscene()) {
try {
processScene(root.getscene());
} catch (Exception e) {
if (!squelch) {
logger.log(Level.WARNING,
"Error processing scene information", e);
}
}
}
try {
optimizeGeometry();
} catch (Exception e) {
if (!squelch) {
logger.log(Level.WARNING, "Error optimizing geometry", e);
}
}
}
/**
* optimizeGeometry
*/
private void optimizeGeometry() {
for (String key : resourceLibrary.keySet()) {
Object val = resourceLibrary.get(key);
if (val instanceof Spatial) {
Spatial spatial = (Spatial) val;
int options = GeometryTool.MV_SAME_COLORS
| GeometryTool.MV_SAME_NORMALS
| GeometryTool.MV_SAME_TEXS;
if (spatial.getParent() instanceof SkinNode) {
SkinNode pNode = ((SkinNode) spatial.getParent());
pNode.assignSkeletonBoneInfluences();
if (spatial instanceof Node) {
Node skins = (Node) spatial;
for (int i = 0; i < skins.getQuantity(); i++) {
TriMesh mesh = (TriMesh) skins.getChild(i);
if (OPTIMIZE_GEOMETRY) {
VertMap map = GeometryTool.minimizeVerts(mesh,
options);
if (optimizeCallBack != null) {
optimizeCallBack.remapInfluences(mesh, map);
}
int geomIndex = pNode.getSkins().getChildIndex(
mesh);
pNode.remapInfluences(map, geomIndex);
}
}
} else if (spatial instanceof TriMesh) {
TriMesh mesh = (TriMesh) spatial;
if (OPTIMIZE_GEOMETRY) {
VertMap map = GeometryTool.minimizeVerts(mesh,
options);
if (optimizeCallBack != null) {
optimizeCallBack.remapInfluences(mesh, map);
}
int geomIndex = pNode.getSkins()
.getChildIndex(mesh);
pNode.remapInfluences(map, geomIndex);
}
}
if (OPTIMIZE_GEOMETRY) {
pNode.regenInfluenceOffsets();
}
pNode.revertToBind();
} else if (spatial instanceof TriMesh) {
TriMesh mesh = (TriMesh) spatial;
if (OPTIMIZE_GEOMETRY) {
VertMap map = GeometryTool.minimizeVerts(mesh, options);
if (optimizeCallBack != null) {
optimizeCallBack.remapInfluences(mesh, map);
}
}
}
}
}
}
/**
* processLightLibrary
*
* @param libraryLights
* @throws Exception
*/
private void processLightLibrary(library_lightsType libraryLights)
throws Exception {
if (libraryLights.haslight()) {
for (int i = 0; i < libraryLights.getlightCount(); i++) {
processLight(libraryLights.getlightAt(i));
}
}
}
/**
* @param light
* @throws Exception
*/
private void processLight(lightType light) throws Exception {
technique_commonType4 common = light.gettechnique_common();
Light l = null;
if (common.hasdirectional()) {
l = new DirectionalLight();
l.setDiffuse(getLightColor(common.getdirectional().getcolor()));
} else if (common.haspoint()) {
l = new PointLight();
l.setDiffuse(getLightColor(common.getpoint().getcolor()));
l.setAttenuate(true);
l.setConstant(Float.parseFloat(common.getpoint()
.getconstant_attenuation().getValue().toString()));
l.setLinear(Float.parseFloat(common.getpoint()
.getlinear_attenuation().getValue().toString()));
l.setQuadratic(Float.parseFloat(common.getpoint()
.getquadratic_attenuation().getValue().toString()));
} else if (common.hasspot()) {
l = new SpotLight();
l.setDiffuse(getLightColor(common.getspot().getcolor()));
l.setAttenuate(true);
l.setConstant(Float.parseFloat(common.getspot()
.getconstant_attenuation().getValue().toString()));
l.setLinear(Float.parseFloat(common.getspot()
.getlinear_attenuation().getValue().toString()));
l.setQuadratic(Float.parseFloat(common.getspot()
.getquadratic_attenuation().getValue().toString()));
((SpotLight) l).setAngle(Float.parseFloat(common.getspot()
.getfalloff_angle().getValue().toString()));
((SpotLight) l).setExponent(Float.parseFloat(common.getspot()
.getfalloff_exponent().getValue().toString()));
}
if (l != null) {
l.getSpecular().set(0, 0, 0, 1);
if (common.hasambient()) {
l.setAmbient(getLightColor(common.getambient().getcolor()));
} else {
l.getAmbient().set(0, 0, 0, 1);
}
l.setEnabled(true);
LightNode lightNode = new LightNode(light.getid().toString());
lightNode.setLight(l);
if (lightNodeNames == null) {
lightNodeNames = new ArrayList<String>();
}
lightNodeNames.add(lightNode.getName());
put(lightNode.getName(), lightNode);
}
}
/**
* getLightColor
*
* @param color
* @return c
*/
private ColorRGBA getLightColor(TargetableFloat3 color) {
StringTokenizer st = new StringTokenizer(color.getValue().toString());
return new ColorRGBA(Float.parseFloat(st.nextToken()), Float
.parseFloat(st.nextToken()), Float.parseFloat(st.nextToken()),
1);
}
/**
* processScene finalizes the model node to be returned as the COLLADA
* model. This looks up visual scene instances that were placed in the
* resource library previously.
*
* @param scene
* the scene to process
* @throws Exception
* thrown if there is an error processing the xml.
*/
public void processScene(sceneType scene) throws Exception {
if (scene.hasinstance_visual_scene()) {
for (int i = 0; i < scene.getinstance_visual_sceneCount(); i++) {
String key = scene.getinstance_visual_sceneAt(i).geturl()
.toString().substring(1);
Node n = (Node) resourceLibrary.get(key);
if (n != null) {
model.attachChild(n);
}
}
}
if (scene.hasinstance_physics_scene()) {
for (int i = 0; i < scene.getinstance_physics_sceneCount(); i++) {
String key = scene.getinstance_physics_sceneAt(i).geturl()
.toString().substring(1);
physics_sceneType physScene = (physics_sceneType) resourceLibrary
.get(key);
if (physScene != null) {
processPhysicsScene(physScene);
}
}
}
}
private void processPhysicsScene(physics_sceneType physScene)
throws Exception {
if (physScene.hasinstance_physics_model()) {
for (int i = 0; i < physScene.getinstance_physics_modelCount(); i++) {
instance_physics_modelType instPhysModel = physScene
.getinstance_physics_modelAt(i);
String key = instPhysModel.geturl().toString().substring(1);
physics_modelType physModel = (physics_modelType) resourceLibrary
.get(key);
if (physModel != null) {
processPhysicsModel(physModel);
}
if (instPhysModel.hasinstance_rigid_body()) {
// get the Spatial that is the collision mesh
String rigidBodyKey = instPhysModel
.getinstance_rigid_body().getbody().toString();
Spatial collisionMesh = (Spatial) resourceLibrary
.get(rigidBodyKey);
if (collisionMesh != null) {
// get the target
String targetKey = instPhysModel
.getinstance_rigid_body().gettarget()
.toString().substring(1);
Node n = (Node) resourceLibrary.get(targetKey);
if (n != null) {
n.setUserData("COLLISION", collisionMesh);
}
}
}
}
}
}
private void processPhysicsModel(physics_modelType physModel)
throws Exception {
// we only care about the shape (which for now will only reference a
// geometry), so simply store this geometry with the name of the rigid
// body as the key. Initially, this only supports a single shape per
// physics model. Will be enhanced first available chance.
if (physModel.hasrigid_body()) {
for (int i = 0; i < physModel.getrigid_bodyCount(); i++) {
rigid_bodyType rigidBody = physModel.getrigid_bodyAt(i);
String id = rigidBody.getsid().toString();
if (rigidBody.hastechnique_common()) {
if (rigidBody.gettechnique_common().hasshape()) {
for (int j = 0; j < rigidBody.gettechnique_common()
.getshapeCount(); j++) {
shapeType2 shape = rigidBody.gettechnique_common()
.getshapeAt(j);
if (shape.hasinstance_geometry()) {
String key = shape.getinstance_geometry()
.geturl().toString().substring(1);
Spatial s = (Spatial) resourceLibrary.get(key);
if (s != null) {
put(id, s);
}
}
}
}
}
}
}
}
/**
* processSource builds resource objects TIME, TRANSFORM and Name array for
* the interpolation type.
*
* @param source
* the source to process
* @throws Exception
* exception thrown if there is a problem with
*/
private void processSource(sourceType source) throws Exception {
if (source.hasfloat_array()) {
if (source.hastechnique_common()) {
float[] floatArray = processFloatArray(source.getfloat_array());
paramType3 p = source.gettechnique_common().getaccessor()
.getparam();
if ("TIME".equals(p.getname().toString())) {
put(source.getid().toString(), floatArray);
} else if ("float4x4".equals(p.gettype().toString())) {
Matrix4f[] transforms = new Matrix4f[floatArray.length / 16];
for (int i = 0; i < transforms.length; i++) {
transforms[i] = new Matrix4f();
float[] data = new float[16];
for (int x = 0; x < 16; x++) {
data[x] = floatArray[(16 * i) + x];
}
transforms[i].set(data, true); // collada matrices are
// in row order.
}
put(source.getid().toString(), transforms);
} else if ("ROTX.ANGLE".equals(p.getname().toString())) {
if ("float".equals(p.gettype().toString())) {
float[] xRot = new float[floatArray.length];
System.arraycopy(floatArray, 0, xRot, 0, xRot.length);
put(source.getid().toString(), xRot);
} else {
if (!squelch) {
logger.warning(p.gettype() + " not yet supported "
+ "for animation transforms.");
}
}
} else if ("ROTY.ANGLE".equals(p.getname().toString())) {
if ("float".equals(p.gettype().toString())) {
float[] yRot = new float[floatArray.length];
System.arraycopy(floatArray, 0, yRot, 0, yRot.length);
put(source.getid().toString(), yRot);
} else {
if (!squelch) {
logger.warning(p.gettype() + " not yet supported "
+ "for animation transforms.");
}
}
} else if ("ROTZ.ANGLE".equals(p.getname().toString())) {
if ("float".equals(p.gettype().toString())) {
float[] zRot = new float[floatArray.length];
System.arraycopy(floatArray, 0, zRot, 0, zRot.length);
put(source.getid().toString(), zRot);
} else {
if (!squelch) {
logger.warning(p.gettype() + " not yet supported "
+ "for animation transforms.");
}
}
} else if ("TRANS.X".equals(p.getname().toString())) {
if ("float".equals(p.gettype().toString())) {
float[] xTrans = new float[floatArray.length];
System.arraycopy(floatArray, 0, xTrans, 0,
xTrans.length);
put(source.getid().toString(), xTrans);
} else {
if (!squelch) {
logger.warning(p.gettype() + " not yet supported "
+ "for animation transforms.");
}
}
} else if ("TRANS.Y".equals(p.getname().toString())) {
if ("float".equals(p.gettype().toString())) {
float[] yTrans = new float[floatArray.length];
System.arraycopy(floatArray, 0, yTrans, 0,
yTrans.length);
put(source.getid().toString(), yTrans);
} else {
if (!squelch) {
logger.warning(p.gettype() + " not yet supported "
+ "for animation transforms.");
}
}
} else if ("TRANS.Z".equals(p.getname().toString())) {
if ("float".equals(p.gettype().toString())) {
float[] zTrans = new float[floatArray.length];
System.arraycopy(floatArray, 0, zTrans, 0,
zTrans.length);
put(source.getid().toString(), zTrans);
} else {
if (!squelch) {
logger.warning(p.gettype() + " not yet supported "
+ "for animation transforms.");
}
}
} else {
if (!squelch) {
logger.warning(p.getname() + " not yet supported "
+ "for animation source.");
}
}
}
} else if (source.hasName_array()) {
int[] interpolation = processInterpolationArray(source
.getName_array());
put(source.getid().toString(), interpolation);
}
}
/**
* processInterpolationArray builds a int array that corresponds to the
* interpolation types defined in BoneAnimationController.
*
* @param array
* the array to process.
* @return the int array.
* @throws Exception
* thrown if there is a problem processing this xml document.
*/
private int[] processInterpolationArray(Name_arrayType array)
throws Exception {
StringTokenizer st = new StringTokenizer(array.getValue().toString());
int[] out = new int[array.getcount().intValue()];
String token = null;
for (int i = 0; i < out.length; i++) {
token = st.nextToken();
if ("LINEAR".equals(token)) {
out[i] = BoneAnimation.LINEAR;
} else if ("BEZIER".equals(token)) {
out[i] = BoneAnimation.BEZIER;
}
}
return out;
}
/**
* processes a float array object. The floats are represented as a String
* with the values delimited by a space.
*
* @param array
* the array to parse.
* @return the float array to return.
* @throws Exception
* thrown if there is a problem processing the XML.
*/
private float[] processFloatArray(float_arrayType array) throws Exception {
StringTokenizer st = new StringTokenizer(array.getValue().toString());
float[] out = new float[array.getcount().intValue()];
for (int i = 0; i < out.length; i++) {
out[i] = Float.parseFloat(st.nextToken());
}
return out;
}
/**
* processAssetInformation will store the information about the collada file
* for future reference. This will include the author, the tool used, the
* revision, the unit information, and the defined up axis.
*
* @param asset
* the assetType for the root of the model.
*/
private void processAssetInformation(assetType asset) throws Exception {
if (asset.hascontributor()) {
if (asset.getcontributor().hasauthor()) {
modelAuthor = asset.getcontributor().getauthor().toString();
}
if (asset.getcontributor().hasauthoring_tool()) {
tool = asset.getcontributor().getauthoring_tool().toString();
}
}
if (asset.hasrevision()) {
revision = asset.getrevision().toString();
}
if (asset.hasunit()) {
unitName = asset.getunit().getname().toString();
unitMeter = asset.getunit().getmeter().floatValue();
}
if (asset.hasup_axis()) {
upAxis = asset.getup_axis().getValue();
}
}
/**
* processAnimationLibrary will store the individual
* BoneAnimationControllers in the resource library for future use.
* Animations at this level can be considered top level animations that
* should be called from this level. These animations may contain children
* animations the top level animation is responsible for calling.
*
* @param animLib
* the library of animations to parse.
*/
private void processAnimationLibrary(library_animationsType animLib)
throws Exception {
if (animLib.hasanimation()) {
if (controllerNames == null) {
controllerNames = new ArrayList<String>();
}
for (int i = 0; i < animLib.getanimationCount(); i++) {
BoneAnimation bac = processAnimation(animLib.getanimationAt(i));
bac.setInterpolate(false);
bac.optimize(true);
put(bac.getName(), bac);
controllerNames.add(bac.getName());
if (animLib.getanimationAt(i).hasextra()) {
for (int j = 0; j < animLib.getanimationAt(i)
.getextraCount(); j++) {
logger.info("Processing extra in animation library.");
ExtraPluginManager.processExtra(bac, animLib
.getanimationAt(i).getextraAt(j));
}
}
}
}
}
/**
* the animation element catgorizes an animation hierarchy with each
* controller defining the animation's keyframe and sampler functions. These
* interact on single bones, where a collection of controllers will build up
* a complete animation.
*
* @param animation
* the animation to parse.
* @throws Exception
* thrown if there is a problem processing the xml.
*/
private BoneAnimation processAnimation(animationType animation)
throws Exception {
BoneAnimation out = new BoneAnimation(animation.getid().toString());
BoneTransform bt = new BoneTransform();
out.setInterpolate(true);
if (animation.hassource()) {
for (int i = 0; i < animation.getsourceCount(); i++) {
processSource(animation.getsourceAt(i));
}
}
float[] rotx = null;
float[] roty = null;
float[] rotz = null;
float[] transx = null;
float[] transy = null;
float[] transz = null;
boolean transformsSet = false;
if (animation.hassampler()) {
for (int j = 0; j < animation.getsamplerCount(); j++) {
for (int i = 0; i < animation.getsamplerAt(j).getinputCount(); i++) {
if ("INPUT".equals(animation.getsamplerAt(j).getinputAt(i)
.getsemantic().toString())) {
String key = animation.getsamplerAt(j).getinputAt(i)
.getsource().toString().substring(1);
float[] times = (float[]) resourceLibrary.get(key);
if (times == null) {
logger.warning("Animation source invalid: " + key);
continue;
}
out.setTimes(times);
out.setStartFrame(0);
out.setEndFrame(times.length - 1);
} else if ("OUTPUT".equals(animation.getsamplerAt(j)
.getinputAt(i).getsemantic().toString())) {
String key = animation.getsamplerAt(j).getinputAt(i)
.getsource().toString().substring(1);
Object object = resourceLibrary.get(key);
if (object == null) {
logger.warning("Animation source invalid: " + key);
continue;
}
if (object instanceof Matrix4f[]) {
Matrix4f[] transforms = (Matrix4f[]) object;
bt.setTransforms(transforms);
transformsSet = true;
} else if (object instanceof float[]) {
// Another bit of a hack that should be improved:
// to put the float arrays into the BoneTransform,
// we need to know what angle it is changing,
// I see know way to determine other than looking
// at the source name.
if (animation.getsamplerAt(j).getinputAt(i)
.getsource().toString().contains(
"Rotate-X-")) {
rotx = (float[]) object;
} else if (animation.getsamplerAt(j).getinputAt(i)
.getsource().toString().contains(
"Rotate-Y-")) {
roty = (float[]) object;
} else if (animation.getsamplerAt(j).getinputAt(i)
.getsource().toString().contains(
"Rotate-Z-")) {
rotz = (float[]) object;
} else if (animation.getsamplerAt(j).getinputAt(i)
.getsource().toString().contains(
"Translate-X-")) {
transx = (float[]) object;
} else if (animation.getsamplerAt(j).getinputAt(i)
.getsource().toString().contains(
"Translate-Y-")) {
transy = (float[]) object;
} else if (animation.getsamplerAt(j).getinputAt(i)
.getsource().toString().contains(
"Translate-Z-")) {
transz = (float[]) object;
} else {
if (!squelch) {
logger
.warning("Not sure what this sampler is.");
}
}
}
} else if ("INTERPOLATION".equals(animation.getsamplerAt(j)
.getinputAt(i).getsemantic().toString())) {
String key = animation.getsamplerAt(j).getinputAt(i)
.getsource().toString().substring(1);
int[] interpolation = (int[]) resourceLibrary.get(key);
if (interpolation == null) {
logger.warning("Animation source invalid: " + key);
continue;
}
out.setInterpolationTypes(interpolation);
}
}
}
if (!transformsSet) {
Matrix4f[] transforms = generateTransforms(rotx, roty, rotz,
transx, transy, transz);
if (transforms != null) {
bt.setTransforms(transforms);
}
}
}
if (animation.haschannel()) {
String target = animation.getchannel().gettarget().toString();
if (target.contains("/")) {
String key = target.substring(0, animation.getchannel()
.gettarget().toString().indexOf('/'));
bt.setBoneId(key);
Bone b = (Bone) resourceLibrary.get(key);
if (b != null) {
bt.setBone(b);
}
out.addBoneTransforms(bt);
}
}
// if the animation has children attach them
if (animation.hasanimation()) {
for (int i = 0; i < animation.getanimationCount(); i++) {
out.addBoneAnimation(processAnimation(animation
.getanimationAt(i)));
}
}
return out;
}
private Matrix4f[] generateTransforms(float[] rotx, float[] roty,
float[] rotz, float[] transx, float[] transy, float[] transz) {
Quaternion rot = new Quaternion();
int index = 0;
if (rotx != null) {
index = rotx.length;
} else if (transx != null) {
index = transx.length;
}
Matrix4f[] transforms = new Matrix4f[index];
float[] angles = new float[3];
for (int i = 0; i < transforms.length; i++) {
angles[0] = angles[1] = angles[2] = 0;
if (rotx != null) {
angles[0] = rotx[i];
}
if (roty != null) {
angles[1] = roty[i];
}
if (rotz != null) {
angles[2] = rotz[i];
}
rot.fromAngles(angles);
transforms[i] = rot.toRotationMatrix(new Matrix4f());
if (transx != null) {
transforms[i].m03 = transx[i];
}
if (transy != null) {
transforms[i].m13 = transy[i];
}
if (transz != null) {
transforms[i].m23 = transz[i];
}
}
return transforms;
}
private void processCameraLibrary(library_camerasType libraryCam)
throws Exception {
if (libraryCam.hascamera()) {
for (int i = 0; i < libraryCam.getcameraCount(); i++) {
// processCamera(libraryCam.getcameraAt(i));
}
}
}
private void processCamera(cameraType camera) throws Exception {
opticsType optics = camera.getoptics();
technique_commonType2 common = optics.gettechnique_common();
Renderer r = DisplaySystem.getDisplaySystem().getRenderer();
int width = r.getWidth();
int height = r.getHeight();
// FIXME: THIS LINE IS SUPPOSED TO ONLY BE DONE IN A GL THREAD.
Camera c = r.createCamera(width, height);
float near = c.getFrustumNear();
float far = c.getFrustumFar();
float aspect = (float) width / (float) height;
if (common.hasorthographic()) {
orthographicType ortho = common.getorthographic();
float xmag = 1.0f;
float ymag = 1.0f;
if (ortho.hasznear()) {
near = Float.parseFloat(ortho.getznear().getValue().toString());
}
if (ortho.haszfar()) {
far = Float.parseFloat(ortho.getzfar().getValue().toString());
}
if (ortho.hasxmag() && ortho.hasymag()) {
xmag = Float.parseFloat(ortho.getxmag().getValue().toString());
ymag = Float.parseFloat(ortho.getymag().getValue().toString());
} else {
if (ortho.hasaspect_ratio()) {
aspect = Float.parseFloat(ortho.getaspect_ratio()
.getValue().toString());
}
if (ortho.hasxmag()) {
assert (!ortho.hasymag());
xmag = Float.parseFloat(ortho.getxmag().getValue()
.toString());
ymag = xmag / aspect;
} else {
assert (ortho.hasymag());
ymag = Float.parseFloat(ortho.getymag().getValue()
.toString());
xmag = ymag * aspect;
}
}
c.setParallelProjection(true);
c.setFrustum(near, far, -xmag, xmag, -ymag, ymag);
} else {
assert (common.hasperspective());
perspectiveType persp = common.getperspective();
float xfov = 1.0f;
float yfov = 1.0f;
if (persp.hasznear()) {
near = Float.parseFloat(persp.getznear().getValue().toString());
}
if (persp.haszfar()) {
far = Float.parseFloat(persp.getzfar().getValue().toString());
}
if (persp.hasxfov() && persp.hasyfov()) {
xfov = Float.parseFloat(persp.getxfov().getValue().toString());
yfov = Float.parseFloat(persp.getyfov().getValue().toString());
} else {
if (persp.hasaspect_ratio()) {
aspect = Float.parseFloat(persp.getaspect_ratio()
.getValue().toString());
}
if (persp.hasxfov()) {
assert (!persp.hasyfov());
xfov = Float.parseFloat(persp.getxfov().getValue()
.toString());
yfov = xfov / aspect;
} else {
assert (persp.hasyfov());
yfov = Float.parseFloat(persp.getyfov().getValue()
.toString());
xfov = yfov * aspect;
}
}
c.setParallelProjection(false);
c.setFrustumPerspective(yfov, aspect, near, far);
}
if (cameraNodeNames == null) {
cameraNodeNames = new ArrayList<String>();
}
CameraNode nodeCamera = new CameraNode(camera.getid().toString(), c);
// cameras are odd in that their rotation is typically exported
// backwards from the direction that they're looking in the scene
if ("X_UP".equals(upAxis))
nodeCamera.setLocalRotation(new Quaternion(1, 0, 0, 0));
else if ("Y_UP".equals(upAxis))
nodeCamera.setLocalRotation(new Quaternion(0, 1, 0, 0));
else if ("Z_UP".equals(upAxis))
nodeCamera.setLocalRotation(new Quaternion(0, 0, 1, 0));
cameraNodeNames.add(nodeCamera.getName());
put(nodeCamera.getName(), nodeCamera);
}
/**
* processImageLibrary will build a collection of image filenames. The image
* tag contains the full directory path of the image from the artists
* working directory. Therefore, the directory will be stripped off leaving
* only the filename. This filename will be associated with a id key that
* can be obtained by the material that wishes to make use of it.
*
* @param libraryImg
* the library of images (name/image pair).
*/
private void processImageLibrary(library_imagesType libraryImg)
throws Exception {
if (libraryImg.hasimage()) {
for (int i = 0; i < libraryImg.getimageCount(); i++) {
processImage(libraryImg.getimageAt(i));
}
}
}
/**
* processImage takes an image type and places the necessary information in
* the resource library.
*
* @param image
* the image to process.
* @throws Exception
* thrown if there is a problem with the imagetype.
*/
private void processImage(imageType image) throws Exception {
if (image.hasdata()) {
if (!squelch) {
logger.warning("Raw data images not supported.");
}
}
if (image.hasinit_from()) {
put(image.getid().toString(), image.getinit_from().toString());
}
}
/**
* processMaterialLibrary will build a collection (Map) of MaterialStates,
* with the defined material id as the key in the Map. This map and
* corresponding key will then be used to apply materials to the appropriate
* node. The library only defines the id of the material and the url of the
* instance effect that defines its qualities, it won't be until the
* library_effects tag is processed that the material state information is
* filled in.
*
* @param libraryMat
* the material library type.
* @throws Exception
* thrown if there is a problem processing the xml.
*/
private void processMaterialLibrary(library_materialsType libraryMat)
throws Exception {
if (libraryMat.hasmaterial()) {
for (int i = 0; i < libraryMat.getmaterialCount(); i++) {
processMaterial(libraryMat.getmaterialAt(i));
}
}
}
/**
* process Material which typically contains an id and a reference URL to an
* effect.
*
* @param mat
* @throws Exception
* thrown if there is a problem processing the xml.
*/
private void processMaterial(materialType mat) throws Exception {
ColladaMaterial material = new ColladaMaterial();
String url = null;
if (mat.hasinstance_effect()) {
url = mat.getinstance_effect().geturl().toString();
if (url.startsWith("#")) {
url = url.substring(1);
}
put(url, material);
put(mat.getid().toString(), url);
}
if (mat.hasextra()) {
ExtraPluginManager.processExtra(material, mat.getextra());
}
}
/**
* processEffects will build effects as defined by the techinque. The
* appropriate render state will be obtained from the materialMap hashmap
* based on the the name of the effect. Currently, the id of the effect is
* ignored as it is directly tied to the material id. However, in the future
* this may require support.
*
* @param libraryEffects
* the library of effects to build.
* @throws Exception
* thrown if there is a problem processing the xml.
*/
private void processEffects(library_effectsType libraryEffects)
throws Exception {
if (libraryEffects.haseffect()) {
for (int i = 0; i < libraryEffects.geteffectCount(); i++) {
String key = libraryEffects.geteffectAt(i).getid().toString();
ColladaMaterial mat = (ColladaMaterial) resourceLibrary
.get(key);
if (mat != null) {
fillMaterial(libraryEffects.geteffectAt(i), mat);
}
}
}
}
/**
* fillMaterial will use the provided effectType to generate the material
* setting for the collada model. The effect can handle both programmable
* pipelines and fixed pipelines. This is defined by what sort of profile it
* is using (profile_COMMON, profile_GLSL, profile_CG). Currently,
* profile_CG is ignored. There may be multiple profiles, describing a path
* of fallbacks. Currently, only one profile will be supported at a time.<br>
* <br>
* There is a possibility that each profile may have multiple techniques,
* defining different materials for different situations, i.e. LOD. This
* version of the loader will assume a single technique.
*
* @param effect
* the collada effect to process.
* @param mat
* the ColladaMaterial that will hold the RenderStates needed to
* express this material.
* @throws Exception
* thrown if there is a problem processing the file.
*/
private void fillMaterial(effectType effect, ColladaMaterial mat)
throws Exception {
// process the fixed pipeline information
if (effect.hasprofile_COMMON()) {
for (int i = 0; i < effect.getprofile_COMMON().getnewparamCount(); i++) {
processNewParam(effect.getprofile_COMMON().getnewparamAt(i),
mat);
}
for (int i = 0; i < effect.getprofile_COMMON().gettechniqueCount(); i++) {
processTechniqueCOMMON(effect.getprofile_COMMON()
.gettechniqueAt(i), mat);
}
if (effect.getprofile_COMMON().hasextra()) {
for (int i = 0; i < effect.getprofile_COMMON().getextraCount(); i++) {
ExtraPluginManager.processExtra(mat, effect
.getprofile_COMMON().getextraAt(i));
}
}
}
// process the programmable pipeline
// profile_GLSL defines all of OpenGL states as well as GLSL shaders.
if (effect.hasprofile_GLSL()) {
for (int i = 0; i < effect.getprofile_GLSL().gettechniqueCount(); i++) {
processTechniqueGLSL(
effect.getprofile_GLSL().gettechniqueAt(i), mat);
}
}
}
/**
* processNewParam sets specific properties of a material (surface
* properties, sampler properties, etc).
*
* @param param
* the xml element of the new parameter.
* @param mat
* the material to store the parameters in.
* @throws Exception
* thrown if there is a problem reading the xml.
*/
private void processNewParam(common_newparam_type param, ColladaMaterial mat)
throws Exception {
if (param.hassampler2D()) {
processSampler2D(param.getsid().toString(), param.getsampler2D(),
mat);
}
if (param.hassurface()) {
processSurface(param.getsid().toString(), param.getsurface());
}
}
/**
* processes images information, defining the min and mag filter for
* mipmapping.
*
* @param id
* the id on the sampler
* @param sampler
* the sampler xml element.
* @param mat
* the material to store the values in.
* @throws Exception
* thrown if there is a problem reading the file.
*/
private void processSampler2D(String id, fx_sampler2D_common sampler,
ColladaMaterial mat) throws Exception {
if (sampler.hasmagfilter()) {
mat.magFilter = sampler.getmagfilter().getValue();
}
if (sampler.hasminfilter()) {
mat.minFilter = sampler.getminfilter().getValue();
}
mat.wrapS = "WRAP";
mat.wrapT = "WRAP";
put(id, sampler.getsource().getValue());
}
private void processSurface(String id, fx_surface_common surface)
throws Exception {
put(id, surface.getinit_from().getValue().toString());
}
/**
* processes rendering information defined to be GLSL standard, which
* includes all OpenGL state information and GLSL shader information.
*
* @param technique
* @param mat
* @throws Exception
*/
private void processTechniqueGLSL(techniqueType4 technique,
ColladaMaterial mat) throws Exception {
if (technique.haspass()) {
for (int i = 0; i < technique.getpassCount(); i++) {
processPassGLSL(technique.getpassAt(i), mat);
}
}
}
private void processPassGLSL(passType3 pass, ColladaMaterial mat)
throws Exception {
// XXX only a single pass supported currently. If multiple passes
// XXX are defined under a profile_GLSL the states will be combined
// XXX to a single pass. If the same render state is defined in
// XXX different passes, the last pass will override the previous.
if (pass.hasclip_plane()) {
ClipState cs = (ClipState) mat.getState(RenderState.RS_CLIP);
if (cs == null) {
cs = DisplaySystem.getDisplaySystem().getRenderer()
.createClipState();
mat.setState(cs);
}
if (pass.getclip_plane().hasindex()
&& pass.getclip_plane().hasvalue2()) {
int index = pass.getclip_plane().getindex().intValue();
StringTokenizer st = new StringTokenizer(pass.getclip_plane()
.getvalue2().toString());
float[] clip = new float[4];
for (int i = 0; i < 4; i++) {
clip[i] = Float.parseFloat(st.nextToken());
}
cs.setClipPlaneEquation(index, clip[0], clip[1], clip[2],
clip[3]);
}
}
if (pass.hasclip_plane_enable()) {
ClipState cs = (ClipState) mat.getState(RenderState.RS_CLIP);
if (cs == null) {
cs = DisplaySystem.getDisplaySystem().getRenderer()
.createClipState();
mat.setState(cs);
}
if (pass.getclip_plane_enable().hasindex()
&& pass.getclip_plane_enable().hasvalue2()) {
int index = pass.getclip_plane().getindex().intValue();
cs.setEnableClipPlane(index, pass.getclip_plane_enable()
.getvalue2().booleanValue());
}
}
if (pass.hascolor_mask()) {
ColorMaskState cms = (ColorMaskState) mat
.getState(RenderState.RS_COLORMASK_STATE);
if (cms == null) {
cms = DisplaySystem.getDisplaySystem().getRenderer()
.createColorMaskState();
mat.setState(cms);
}
if (pass.getcolor_mask().hasvalue2()) {
StringTokenizer st = new StringTokenizer(pass.getcolor_mask()
.getvalue2().toString());
boolean[] color = new boolean[4];
for (int i = 0; i < 4; i++) {
color[i] = Boolean.parseBoolean(st.nextToken());
}
cms.setRed(color[0]);
cms.setGreen(color[1]);
cms.setBlue(color[2]);
cms.setAlpha(color[3]);
}
}
if (pass.hasdepth_func()) {
ZBufferState zbs = (ZBufferState) mat
.getState(RenderState.RS_ZBUFFER);
if (zbs == null) {
zbs = DisplaySystem.getDisplaySystem().getRenderer()
.createZBufferState();
mat.setState(zbs);
}
if (pass.getdepth_func().hasvalue2()) {
String depth = pass.getdepth_func().getvalue2().toString();
if ("NEVER".equals(depth)) {
zbs.setFunction(ZBufferState.TestFunction.Never);
} else if ("LESS".equals(depth)) {
zbs.setFunction(ZBufferState.TestFunction.LessThan);
} else if ("LEQUAL".equals(depth)) {
zbs.setFunction(ZBufferState.TestFunction.LessThanOrEqualTo);
} else if ("EQUAL".equals(depth)) {
zbs.setFunction(ZBufferState.TestFunction.EqualTo);
} else if ("GREATER".equals(depth)) {
zbs.setFunction(ZBufferState.TestFunction.GreaterThan);
} else if ("NOTEQUAL".equals(depth)) {
zbs.setFunction(ZBufferState.TestFunction.NotEqualTo);
} else if ("GEQUAL".equals(depth)) {
zbs.setFunction(ZBufferState.TestFunction.GreaterThanOrEqualTo);
} else if ("ALWAYS".equals(depth)) {
zbs.setFunction(ZBufferState.TestFunction.Always);
}
}
}
if (pass.hasdepth_mask()) {
ZBufferState zbs = (ZBufferState) mat
.getState(RenderState.RS_ZBUFFER);
if (zbs == null) {
zbs = DisplaySystem.getDisplaySystem().getRenderer()
.createZBufferState();
mat.setState(zbs);
}
if (pass.getdepth_mask().hasvalue2()) {
zbs
.setWritable(pass.getdepth_mask().getvalue2()
.booleanValue());
}
}
if (pass.hasdepth_test_enable()) {
ZBufferState zbs = (ZBufferState) mat
.getState(RenderState.RS_ZBUFFER);
if (zbs == null) {
zbs = DisplaySystem.getDisplaySystem().getRenderer()
.createZBufferState();
mat.setState(zbs);
}
if (pass.getdepth_test_enable().hasvalue2()) {
zbs.setEnabled(pass.getdepth_test_enable().getvalue2()
.booleanValue());
}
}
if (pass.hascolor_material()) {
MaterialState ms = (MaterialState) mat
.getState(RenderState.RS_MATERIAL);
if (ms == null) {
ms = DisplaySystem.getDisplaySystem().getRenderer()
.createMaterialState();
mat.setState(ms);
}
if (pass.getcolor_material().hasface()) {
String face = pass.getcolor_material().getface().getvalue2()
.toString();
if ("FRONT".equals(face)) {
ms.setMaterialFace(MaterialState.MaterialFace.Front);
} else if ("BACK".equals(face)) {
ms.setMaterialFace(MaterialState.MaterialFace.Back);
} else if ("FRONT_AND_BACK".equals(face)) {
ms.setMaterialFace(MaterialState.MaterialFace.FrontAndBack);
}
}
if (pass.getcolor_material().hasmode()) {
String mode = pass.getcolor_material().getmode().getvalue2()
.toString();
if ("AMBIENT".equals(mode)) {
ms.setColorMaterial(MaterialState.ColorMaterial.Ambient);
} else if ("EMISSION".equals(mode)) {
ms.setColorMaterial(MaterialState.ColorMaterial.Emissive);
} else if ("DIFFUSE".equals(mode)) {
ms.setColorMaterial(MaterialState.ColorMaterial.Diffuse);
} else if ("SPECULAR".equals(mode)) {
ms.setColorMaterial(MaterialState.ColorMaterial.Specular);
} else if ("AMBIENT_AND_DIFFUSE".equals(mode)) {
ms.setColorMaterial(MaterialState.ColorMaterial.AmbientAndDiffuse);
}
}
}
if (pass.hasfog_color()) {
FogState fs = (FogState) mat.getState(RenderState.RS_FOG);
if (fs == null) {
fs = DisplaySystem.getDisplaySystem().getRenderer()
.createFogState();
mat.setState(fs);
}
if (pass.getfog_color().hasvalue2()) {
StringTokenizer st = new StringTokenizer(pass.getfog_color()
.getvalue2().toString());
float[] color = new float[4];
for (int i = 0; i < 4; i++) {
color[i] = Float.parseFloat(st.nextToken());
}
fs.setColor(new ColorRGBA(color[0], color[1], color[2],
color[3]));
}
}
if (pass.hasfog_density()) {
FogState fs = (FogState) mat.getState(RenderState.RS_FOG);
if (fs == null) {
fs = DisplaySystem.getDisplaySystem().getRenderer()
.createFogState();
mat.setState(fs);
}
if (pass.getfog_density().hasvalue2()) {
fs.setDensity(pass.getfog_density().getvalue2().floatValue());
}
}
if (pass.hasfog_enable()) {
FogState fs = (FogState) mat.getState(RenderState.RS_FOG);
if (fs == null) {
fs = DisplaySystem.getDisplaySystem().getRenderer()
.createFogState();
mat.setState(fs);
}
if (pass.getfog_enable().hasvalue2()) {
fs.setEnabled(pass.getfog_enable().getvalue2().booleanValue());
}
}
if (pass.hasfog_end()) {
FogState fs = (FogState) mat.getState(RenderState.RS_FOG);
if (fs == null) {
fs = DisplaySystem.getDisplaySystem().getRenderer()
.createFogState();
mat.setState(fs);
}
if (pass.getfog_end().hasvalue2()) {
fs.setEnd(pass.getfog_end().getvalue2().floatValue());
}
}
if (pass.hasfog_mode()) {
FogState fs = (FogState) mat.getState(RenderState.RS_FOG);
if (fs == null) {
fs = DisplaySystem.getDisplaySystem().getRenderer()
.createFogState();
mat.setState(fs);
}
if (pass.getfog_mode().hasvalue2()) {
String mode = pass.getfog_mode().getvalue2().toString();
if ("LINEAR".equals(mode)) {
fs.setDensityFunction(FogState.DensityFunction.Linear);
} else if ("EXP".equals(mode)) {
fs.setDensityFunction(FogState.DensityFunction.Exponential);
} else if ("EXP2".equals(mode)) {
fs.setDensityFunction(FogState.DensityFunction.ExponentialSquared);
}
}
}
if (pass.hasfog_start()) {
FogState fs = (FogState) mat.getState(RenderState.RS_FOG);
if (fs == null) {
fs = DisplaySystem.getDisplaySystem().getRenderer()
.createFogState();
mat.setState(fs);
}
if (pass.getfog_start().hasvalue2()) {
fs.setStart(pass.getfog_start().getvalue2().floatValue());
}
}
if (pass.hasalpha_test_enable()) {
BlendState as = (BlendState) mat.getState(RenderState.RS_BLEND);
if (as == null) {
as = DisplaySystem.getDisplaySystem().getRenderer()
.createBlendState();
mat.setState(as);
}
as.setTestEnabled(pass.getalpha_test_enable().getvalue2()
.booleanValue());
}
if (pass.hasalpha_func()) {
BlendState as = (BlendState) mat.getState(RenderState.RS_BLEND);
if (as == null) {
as = DisplaySystem.getDisplaySystem().getRenderer()
.createBlendState();
mat.setState(as);
}
if (pass.getalpha_func().hasfunc()) {
String func = pass.getalpha_func().getfunc().getvalue2()
.toString();
if ("NEVER".equals(func)) {
as.setTestFunction(BlendState.TestFunction.Never);
} else if ("LESS".equals(func)) {
as.setTestFunction(BlendState.TestFunction.LessThan);
} else if ("LEQUAL".equals(func)) {
as.setTestFunction(BlendState.TestFunction.LessThanOrEqualTo);
} else if ("EQUAL".equals(func)) {
as.setTestFunction(BlendState.TestFunction.EqualTo);
} else if ("GREATER".equals(func)) {
as.setTestFunction(BlendState.TestFunction.GreaterThan);
} else if ("NOTEQUAL".equals(func)) {
as.setTestFunction(BlendState.TestFunction.NotEqualTo);
} else if ("GEQUAL".equals(func)) {
as.setTestFunction(BlendState.TestFunction.GreaterThanOrEqualTo);
} else if ("ALWAYS".equals(func)) {
as.setTestFunction(BlendState.TestFunction.Always);
}
}
if (pass.getalpha_func().hasvalue2()) {
as.setReference(pass.getalpha_func().getvalue2().getvalue2()
.floatValue());
}
}
if (pass.hasblend_enable()) {
BlendState as = (BlendState) mat.getState(RenderState.RS_BLEND);
if (as == null) {
as = DisplaySystem.getDisplaySystem().getRenderer()
.createBlendState();
mat.setState(as);
}
as.setBlendEnabled(pass.getblend_enable().getvalue2()
.booleanValue());
}
if (pass.hasblend_func()) {
BlendState as = (BlendState) mat.getState(RenderState.RS_BLEND);
if (as == null) {
as = DisplaySystem.getDisplaySystem().getRenderer()
.createBlendState();
mat.setState(as);
}
if (pass.getblend_func().hasdest()) {
String dest = pass.getblend_func().getdest().getvalue2()
.toString();
if ("ZERO".equals(dest)) {
as.setDestinationFunction(BlendState.DestinationFunction.Zero);
} else if ("ONE".equals(dest)) {
as.setDestinationFunction(BlendState.DestinationFunction.One);
} else if ("SRC_COLOR".equals(dest)) {
as.setDestinationFunction(BlendState.DestinationFunction.SourceColor);
} else if ("ONE_MINUS_SRC_COLOR".equals(dest)) {
as.setDestinationFunction(BlendState.DestinationFunction.OneMinusSourceColor);
} else if ("SRC_ALPHA".equals(dest)) {
as.setDestinationFunction(BlendState.DestinationFunction.SourceAlpha);
} else if ("ONE_MINUS_SRC_ALPHA".equals(dest)) {
as.setDestinationFunction(BlendState.DestinationFunction.OneMinusSourceAlpha);
} else if ("DST_ALPHA".equals(dest)) {
as.setDestinationFunction(BlendState.DestinationFunction.DestinationAlpha);
} else if ("ONE_MINUS_DST_ALPHA".equals(dest)) {
as.setDestinationFunction(BlendState.DestinationFunction.OneMinusDestinationAlpha);
} else if ("CONSTANT_COLOR".equals(dest)) {
as.setDestinationFunction(BlendState.DestinationFunction.ConstantColor);
} else if ("ONE_MINUS_CONSTANT_COLOR".equals(dest)) {
as.setDestinationFunction(BlendState.DestinationFunction.OneMinusConstantColor);
} else if ("CONSTANT_ALPHA".equals(dest)) {
as.setDestinationFunction(BlendState.DestinationFunction.ConstantAlpha);
} else if ("ONE_MINUS_CONSTANT_ALPHA".equals(dest)) {
as.setDestinationFunction(BlendState.DestinationFunction.OneMinusConstantAlpha);
}
}
if (pass.getblend_func().hassrc()) {
String src = pass.getblend_func().getsrc().getvalue2()
.toString();
if ("ZERO".equals(src)) {
as.setSourceFunction(BlendState.SourceFunction.Zero);
} else if ("ONE".equals(src)) {
as.setSourceFunction(BlendState.SourceFunction.One);
} else if ("DEST_COLOR".equals(src)) {
as.setSourceFunction(BlendState.SourceFunction.DestinationColor);
} else if ("ONE_MINUS_DEST_COLOR".equals(src)) {
as.setSourceFunction(BlendState.SourceFunction.OneMinusDestinationColor);
} else if ("SRC_ALPHA".equals(src)) {
as.setSourceFunction(BlendState.SourceFunction.SourceAlpha);
} else if ("ONE_MINUS_SRC_ALPHA".equals(src)) {
as.setSourceFunction(BlendState.SourceFunction.OneMinusDestinationAlpha);
} else if ("DST_ALPHA".equals(src)) {
as.setSourceFunction(BlendState.SourceFunction.DestinationAlpha);
} else if ("ONE_MINUS_DST_ALPHA".equals(src)) {
as.setSourceFunction(BlendState.SourceFunction.OneMinusDestinationAlpha);
} else if ("CONSTANT_COLOR".equals(src)) {
as.setSourceFunction(BlendState.SourceFunction.ConstantColor);
} else if ("ONE_MINUS_CONSTANT_COLOR".equals(src)) {
as.setSourceFunction(BlendState.SourceFunction.OneMinusConstantColor);
} else if ("CONSTANT_ALPHA".equals(src)) {
as.setSourceFunction(BlendState.SourceFunction.ConstantAlpha);
} else if ("ONE_MINUS_CONSTANT_ALPHA".equals(src)) {
as.setSourceFunction(BlendState.SourceFunction.OneMinusConstantAlpha);
} else if ("SRC_ALPHA_SATURATE".equals(src)) {
as.setSourceFunction(BlendState.SourceFunction.SourceAlphaSaturate);
}
}
}
if (pass.hascull_face_enable()) {
CullState cs = (CullState) mat.getState(RenderState.RS_CULL);
if (cs == null) {
cs = DisplaySystem.getDisplaySystem().getRenderer()
.createCullState();
mat.setState(cs);
}
cs
.setEnabled(pass.getcull_face_enable().getvalue2()
.booleanValue());
}
if (pass.hascull_face()) {
CullState cs = (CullState) mat.getState(RenderState.RS_CULL);
if (cs == null) {
cs = DisplaySystem.getDisplaySystem().getRenderer()
.createCullState();
mat.setState(cs);
}
if (pass.getcull_face().hasvalue2()) {
String face = pass.getcull_face().getvalue2().toString();
if ("FRONT".equals(face)) {
cs.setCullFace(CullState.Face.Front);
} else if ("BACK".equals(face)) {
cs.setCullFace(CullState.Face.Back);
} else if ("FRONT_AND_BACK".equals(face)) {
cs.setCullFace(CullState.Face.FrontAndBack);
}
}
}
// Define the ShadeState (FLAT OR SMOOTH);
if (pass.hasshade_model()) {
ShadeState ss = (ShadeState) mat.getState(RenderState.RS_SHADE);
if (ss == null) {
ss = DisplaySystem.getDisplaySystem().getRenderer()
.createShadeState();
mat.setState(ss);
}
if (pass.getshade_model().hasvalue2()) {
String shade = pass.getshade_model().getvalue2().toString();
if ("FLAT".equals(shade)) {
ss.setShadeMode(ShadeState.ShadeMode.Flat);
} else if ("SMOOTH".equals(shade)) {
ss.setShadeMode(ShadeState.ShadeMode.Smooth);
}
}
}
if (pass.hasmaterial_ambient()) {
MaterialState ms = (MaterialState) mat
.getState(RenderState.RS_MATERIAL);
if (ms == null) {
ms = DisplaySystem.getDisplaySystem().getRenderer()
.createMaterialState();
mat.setState(ms);
}
if (pass.getmaterial_ambient().hasvalue2()) {
StringTokenizer st = new StringTokenizer(pass
.getmaterial_ambient().getvalue2().toString());
float[] color = new float[4];
for (int i = 0; i < 4; i++) {
color[i] = Float.parseFloat(st.nextToken());
}
ms.setAmbient(new ColorRGBA(color[0], color[1], color[2],
color[3]));
}
}
if (pass.hasmaterial_diffuse()) {
MaterialState ms = (MaterialState) mat
.getState(RenderState.RS_MATERIAL);
if (ms == null) {
ms = DisplaySystem.getDisplaySystem().getRenderer()
.createMaterialState();
mat.setState(ms);
}
if (pass.getmaterial_diffuse().hasvalue2()) {
StringTokenizer st = new StringTokenizer(pass
.getmaterial_diffuse().getvalue2().toString());
float[] color = new float[4];
for (int i = 0; i < 4; i++) {
color[i] = Float.parseFloat(st.nextToken());
}
ms.setDiffuse(new ColorRGBA(color[0], color[1], color[2],
color[3]));
}
}
if (pass.hasmaterial_emission()) {
MaterialState ms = (MaterialState) mat
.getState(RenderState.RS_MATERIAL);
if (ms == null) {
ms = DisplaySystem.getDisplaySystem().getRenderer()
.createMaterialState();
mat.setState(ms);
}
if (pass.getmaterial_emission().hasvalue2()) {
StringTokenizer st = new StringTokenizer(pass
.getmaterial_emission().getvalue2().toString());
float[] color = new float[4];
for (int i = 0; i < 4; i++) {
color[i] = Float.parseFloat(st.nextToken());
}
ms.setEmissive(new ColorRGBA(color[0], color[1], color[2],
color[3]));
}
}
if (pass.hasmaterial_shininess()) {
MaterialState ms = (MaterialState) mat
.getState(RenderState.RS_MATERIAL);
if (ms == null) {
ms = DisplaySystem.getDisplaySystem().getRenderer()
.createMaterialState();
mat.setState(ms);
}
if (pass.getmaterial_shininess().hasvalue2()) {
ms.setShininess(pass.getmaterial_shininess().getvalue2()
.floatValue());
}
}
if (pass.hasmaterial_specular()) {
MaterialState ms = (MaterialState) mat
.getState(RenderState.RS_MATERIAL);
if (ms == null) {
ms = DisplaySystem.getDisplaySystem().getRenderer()
.createMaterialState();
mat.setState(ms);
}
if (pass.getmaterial_specular().hasvalue2()) {
StringTokenizer st = new StringTokenizer(pass
.getmaterial_specular().getvalue2().toString());
float[] color = new float[4];
for (int i = 0; i < 4; i++) {
color[i] = Float.parseFloat(st.nextToken());
}
ms.setSpecular(new ColorRGBA(color[0], color[1], color[2],
color[3]));
}
}
if (pass.hasstencil_func()) {
StencilState ss = (StencilState) mat
.getState(RenderState.RS_STENCIL);
if (ss == null) {
ss = DisplaySystem.getDisplaySystem().getRenderer()
.createStencilState();
// FIXME: This, and other if == null sections do not set new state back into mat.
}
if (pass.getstencil_func().hasfunc()) {
String func = pass.getstencil_func().getfunc().toString();
if ("NEVER".equals(func)) {
ss.setStencilFunction(StencilState.StencilFunction.Never);
} else if ("LESS".equals(func)) {
ss.setStencilFunction(StencilState.StencilFunction.LessThan);
} else if ("LEQUAL".equals(func)) {
ss.setStencilFunction(StencilState.StencilFunction.LessThanOrEqualTo);
} else if ("EQUAL".equals(func)) {
ss.setStencilFunction(StencilState.StencilFunction.EqualTo);
} else if ("GREATER".equals(func)) {
ss.setStencilFunction(StencilState.StencilFunction.GreaterThan);
} else if ("NOTEQUAL".equals(func)) {
ss.setStencilFunction(StencilState.StencilFunction.NotEqualTo);
} else if ("GEQUAL".equals(func)) {
ss.setStencilFunction(StencilState.StencilFunction.GreaterThanOrEqualTo);
} else if ("ALWAYS".equals(func)) {
ss.setStencilFunction(StencilState.StencilFunction.Always);
}
}
if (pass.getstencil_func().hasref()) {
ss.setStencilReference(pass.getstencil_func().getref().getvalue2()
.intValue());
}
if (pass.getstencil_func().hasmask()) {
ss.setStencilReference(pass.getstencil_func().getmask().getvalue2()
.intValue());
}
}
if (pass.hasstencil_op()) {
StencilState ss = (StencilState) mat
.getState(RenderState.RS_STENCIL);
if (ss == null) {
ss = DisplaySystem.getDisplaySystem().getRenderer()
.createStencilState();
}
if (pass.getstencil_op().hasfail()) {
ss.setStencilOpFail(evaluateStencilOp(pass.getstencil_op()
.getfail().toString()));
}
if (pass.getstencil_op().haszfail()) {
ss.setStencilOpZFail(evaluateStencilOp(pass.getstencil_op()
.getzfail().toString()));
}
if (pass.getstencil_op().haszpass()) {
ss.setStencilOpZPass(evaluateStencilOp(pass.getstencil_op()
.getzpass().toString()));
}
}
if (pass.hasstencil_test_enable()) {
StencilState ss = (StencilState) mat
.getState(RenderState.RS_STENCIL);
if (ss == null) {
ss = DisplaySystem.getDisplaySystem().getRenderer()
.createStencilState();
}
ss.setEnabled(pass.getstencil_test_enable().getvalue2()
.booleanValue());
}
}
public StencilState.StencilOperation evaluateStencilOp(String value) {
if ("KEEP".equals(value)) {
return StencilState.StencilOperation.Keep;
} else if ("ZERO".equals(value)) {
return StencilState.StencilOperation.Zero;
} else if ("REPLACE".equals(value)) {
return StencilState.StencilOperation.Replace;
} else if ("INCR".equals(value)) {
return StencilState.StencilOperation.Increment;
} else if ("DECR".equals(value)) {
return StencilState.StencilOperation.Decrement;
} else if ("INVERT".equals(value)) {
return StencilState.StencilOperation.Invert;
} else if ("INCR_WRAP".equals(value)) {
return StencilState.StencilOperation.IncrementWrap;
} else if ("DECT_WRAP".equals(value)) {
return StencilState.StencilOperation.DecrementWrap;
} else {
return StencilState.StencilOperation.Keep;
}
}
/**
* processTechniqueCOMMON process a technique of techniqueType2 which are
* defined to be returned from a profile_COMMON object. This technique
* contains images, lambert shading, phong shading and blinn shading.
*
* @param technique
* the fixed pipeline technique.
* @param mat
* the material to store the technique in.
* @throws Exception
* thrown if there is a problem processing the xml.
*/
private void processTechniqueCOMMON(techniqueType2 technique,
ColladaMaterial mat) throws Exception {
if (technique.haslambert()) {
processLambert(technique.getlambert(), mat);
}
// blinn shading and phong shading are virtually the same, and OpenGL
// only has a single "smooth" attribute for this.
if (technique.hasphong()) {
processPhong(technique.getphong(), mat);
}
if (technique.hasextra()) {
for (int i = 0; i < technique.getextraCount(); i++) {
ExtraPluginManager.processExtra(mat, technique.getextraAt(i));
}
}
}
private void processPhong(phongType pt, ColladaMaterial mat)
throws Exception {
// obtain the colors for the material
MaterialState ms = DisplaySystem.getDisplaySystem().getRenderer()
.createMaterialState();
// set the ambient color value of the material
if (pt.hasambient()) {
ms.setAmbient(getColor(pt.getambient().getcolor()));
}
// set the diffuse color value of the material
if (pt.hasdiffuse()) {
if (pt.getdiffuse().hascolor()) {
ms.setDiffuse(getColor(pt.getdiffuse().getcolor()));
}
if (pt.getdiffuse().hastexture()) {
// create a texturestate, and we will need to make use of
// texcoord to put this texture in the correct "unit"
for (int i = 0; i < pt.getdiffuse().gettextureCount(); i++) {
mat.setState(processTexture(
pt.getdiffuse().gettextureAt(i), mat));
}
}
}
// set the emmission color value of the material
if (pt.hasemission()) {
ms.setEmissive(getColor(pt.getemission().getcolor()));
}
// set the specular color value of the material
if (pt.hasspecular()) {
ms.setSpecular(getColor(pt.getspecular().getcolor()));
}
// set the shininess value of the material
if (pt.hasshininess()) {
ms.setShininess(pt.getshininess().getfloat2().getValue().floatValue());
}
/*
* if (pt.hastransparent()) { if (pt.gettransparent().hascolor() &&
* !pt.gettransparency().getfloat2().getValue() .toString().equals("0")) {
* BlendState as = DisplaySystem.getDisplaySystem()
* .getRenderer().createBlendState();
* as.setSrcFunction(BlendState.SourceFunction.One_MINUS_DST_COLOR);
* as.setDstFunction(BlendState.DestinationFunction.One); as.setBlendEnabled(true);
* mat.setState(as); } else if (pt.gettransparent().hastexture()) {
* BlendState as = DisplaySystem.getDisplaySystem()
* .getRenderer().createBlendState();
* as.setSrcFunction(BlendState.SourceFunction.SourceAlpha);
* as.setDstFunction(BlendState.DestinationFunction.OneMinusSourceAlpha);
* as.setBlendEnabled(true); as.setReference(0.14f);
* as.setTestEnabled(true); as.setTestFunction(BlendState.TF_GEQUAL);
* mat.setState(as); } }
*/
mat.setState(ms);
}
private void processLambert(lambertType lt, ColladaMaterial mat)
throws Exception {
// lambert shading, create a FLAT shade state and material state
// with
// defined colors.
ShadeState ss = DisplaySystem.getDisplaySystem().getRenderer()
.createShadeState();
ss.setShadeMode(ShadeState.ShadeMode.Flat);
mat.setState(ss);
// obtain the colors for the material
MaterialState ms = DisplaySystem.getDisplaySystem().getRenderer()
.createMaterialState();
// set the ambient color value of the material
if (lt.hasambient()) {
ms.setAmbient(getColor(lt.getambient().getcolor()));
}
// set the diffuse color value of the material
if (lt.hasdiffuse()) {
if (lt.getdiffuse().hascolor()) {
ms.setDiffuse(getColor(lt.getdiffuse().getcolor()));
}
if (lt.getdiffuse().hastexture()) {
// create a texturestate, and we will need to make use of
// texcoord to put this texture in the correct "unit"
for (int i = 0; i < lt.getdiffuse().gettextureCount(); i++) {
mat.setState(processTexture(
lt.getdiffuse().gettextureAt(i), mat));
}
}
}
// set the emmission color value of the material
if (lt.hasemission()) {
ms.setEmissive(getColor(lt.getemission().getcolor()));
}
mat.setState(ms);
/*
* if (lt.hastransparent()) { if (lt.gettransparent().hascolor() &&
* !lt.gettransparency().getfloat2().getValue() .toString().equals("0")) {
* BlendState as = DisplaySystem.getDisplaySystem()
* .getRenderer().createBlendState();
* as.setSrcFunction(BlendState.SourceFunction.One_MINUS_DST_COLOR);
* as.setDstFunction(BlendState.DestinationFunction.One); as.setBlendEnabled(true);
* mat.setState(as); } else if (lt.gettransparent().hastexture()) {
* BlendState as = DisplaySystem.getDisplaySystem()
* .getRenderer().createBlendState();
* as.setSrcFunction(BlendState.SourceFunction.SourceAlpha);
* as.setDstFunction(BlendState.DestinationFunction.OneMinusSourceAlpha);
* as.setBlendEnabled(true); as.setReference(0.14f);
* as.setTestEnabled(true); as.setTestFunction(BlendState.TF_GEQUAL);
* mat.setState(as); } }
*/
// Ignored: reflective attributes, transparent attributes
}
/**
* processTexture generates a texture state that contains the image and
* texture coordinate unit information. This texture state is returned to be
* placed in the Collada material.
*
* @param texture
* the texture type to process.
* @return the generated TextureState that handles this texture tag.
* @throws Exception
* thrown if there is a problem processing the xml.
*/
public TextureState processTexture(textureType texture,
ColladaMaterial mat) throws Exception {
String key = texture.gettexture().toString();
String channel = texture.gettexcoord().toString();
if(channel.contains("CHANNEL")) {
channel = channel.substring(channel.indexOf("CHANNEL")+7);
}
int index = 0;
try {
index = Integer.parseInt(channel) - 1;
} catch (NumberFormatException e) {
}
return processTexture(key, mat, index);
}
public TextureState processTexture(String key, ColladaMaterial mat,
int index) throws Exception {
TextureState ts = (TextureState) mat.getState(RenderState.RS_TEXTURE);
if (ts == null) {
ts = DisplaySystem.getDisplaySystem().getRenderer()
.createTextureState();
}
String surfaceName = (String) resourceLibrary.get(key);
if (surfaceName == null) {
return null;
}
String imageName = (String) resourceLibrary.get(surfaceName);
if (imageName == null) {
return null;
}
String filename = (String) resourceLibrary.get(imageName);
loadTexture(ts, filename, mat, index);
return ts;
}
/**
* @param ts
* @param textureURL
* @param filename
*/
private void loadTexture(TextureState ts, String filename,
ColladaMaterial mat, int index) {
URL textureURL = ResourceLocatorTool.locateResource(
ResourceLocatorTool.TYPE_TEXTURE, filename);
if (textureURL != null) {
Texture t0 = TextureManager.loadTexture(textureURL, mat
.getMinFilterConstant(), mat.getMagFilterConstant(),
Image.Format.GuessNoCompression, 0, true);
t0.setWrap(WrapAxis.S, mat.getWrapSConstant());
t0.setWrap(WrapAxis.T, mat.getWrapTConstant());
ts.setTexture(t0, index);
} else {
if (!squelch) {
logger.warning("Invalid or missing texture: \"" + filename
+ "\"");
}
}
}
/**
* Process Geometry will build a number of Geometry objects attaching them
* to the supplied parent.
*
* @param geometryLibrary
* the geometries to process individually.
* @throws Exception
* thrown if there is a problem processing the xml.
*/
private void processGeometry(library_geometriesType geometryLibrary)
throws Exception {
// go through each geometry one at a time
for (int i = 0; i < geometryLibrary.getgeometryCount(); i++) {
geometryType geom = geometryLibrary.getgeometryAt(i);
if (geom.hasmesh()) {
for (int j = 0; j < geom.getmeshCount(); j++) {
Spatial s = processMesh(geom.getmeshAt(j), geom);
put(geom.getid().toString(), s);
if (geometryNames == null) {
geometryNames = new ArrayList<String>();
}
geometryNames.add(geom.getid().toString());
}
}
// splines are not currently supported.
if (geom.hasspline()) {
if (!squelch) {
logger.warning("splines not yet supported.");
}
}
}
}
/**
* processControllerLibrary builds a controller for each controller tag in
* the file.
*
* @param controllerLibrary
* the controller library object to parse.
* @throws Exception
* thrown if there is a problem with the loader.
*/
private void processControllerLibrary(
library_controllersType controllerLibrary) throws Exception {
if (controllerLibrary.hascontroller()) {
for (int i = 0; i < controllerLibrary.getcontrollerCount(); i++) {
processController(controllerLibrary.getcontrollerAt(i));
}
}
}
/**
* controllers define how one object interacts with another. Typically, this
* is skinning and morph targets.
*
* @param controller
* the controller to process
*/
private void processController(controllerType controller) throws Exception {
// skin and morph are mutually exclusive.
if (controller.hasskin()) {
// there can only be one skin per controller
processSkin(controller.getid().toString(), controller.getskin());
} else if (controller.hasmorph()) {
// more not currently supported.
}
}
/**
* processSkin builds a SkinnedMesh object that defines the vertex
* information of a model and the skeletal system that supports it.
*
* @param skin
* the skin to process
* @throws Exception
* thrown if there is a problem parsing the skin.
*/
private void processSkin(String id, skinType skin) throws Exception {
// Add this skin's associated mesh to the resource library
// put(id, skin.getsource().toString());
SkinNode skinNode = new SkinNode(id + "_node");
if (skinNodeNames == null) {
skinNodeNames = new ArrayList<String>();
}
skinNodeNames.add(id);
put(id, skinNode);
// create a new SkinnedMesh object that will act on a given geometry.
// SkinnedMesh skinnedMesh = new
// SkinnedMesh(source.getName()+"skinned",source);
// the bind shape matrix defines the overall orientation of the mesh
// before any skinning occurs.
if (skin.hasbind_shape_matrix()) {
String key = skin.getsource().toString();
if (key.startsWith("#")) {
key = key.substring(1);
}
Spatial mesh = (Spatial) resourceLibrary.get(key);
if (mesh == null) {
if (!squelch) {
logger.warning(key
+ " mesh does NOT exist in COLLADA file.");
}
return;
}
Node skins = null;
if (mesh instanceof TriMesh) {
skins = new Node(mesh.getName());
skins.attachChild(mesh);
resourceLibrary.put(key, skins);
} else if (mesh instanceof Node) {
skins = (Node) mesh;
} else {
if (!squelch) {
logger.warning(key + " mesh is of unsupported skin type: "
+ mesh);
}
return;
}
processBindShapeMatrix(skinNode, skin.getbind_shape_matrix());
skinNode.setSkins(skins);
}
// There are a couple types of sources, those setting the joints,
// the binding table, and the weights. The Collada exporter
// automatically
// names them something like skin-joint-*, skin-binding-table-*, etc.
// we are going to check for the string to determine what it is.
if (skin.hassource2()) {
for (int i = 0; i < skin.getsource2Count(); i++) {
processControllerSource(skin.getsource2At(i));
}
}
// the vertex weights will be assigned to the appropriate bones
if (skin.hasvertex_weights()) {
processVertexWeights(skin.getvertex_weights(), skinNode);
}
if (skin.hasjoints()) {
String[] boneIds = null;
Matrix4f[] bindMatrices = null;
// define the inverse bind matrix to the joint
if (skin.getjoints().hasinput()) {
for (int i = 0; i < skin.getjoints().getinputCount(); i++) {
if ("JOINT".equals(skin.getjoints().getinputAt(i)
.getsemantic().toString())) {
boneIds = (String[]) resourceLibrary.get(skin
.getjoints().getinputAt(i).getsource()
.toString().substring(1));
} else if ("INV_BIND_MATRIX".equals(skin.getjoints()
.getinputAt(i).getsemantic().toString())) {
bindMatrices = (Matrix4f[]) resourceLibrary.get(skin
.getjoints().getinputAt(i).getsource()
.toString().substring(1));
}
}
}
if (boneIds != null) {
for (int i = 0; i < boneIds.length; i++) {
Bone b = (Bone) resourceLibrary.get(boneIds[i]);
b.setBindMatrix(bindMatrices[i].invert());
}
}
}
}
/**
* processVertexWeights defines a list of vertices and weights for a given
* bone. These bones are defined by <v> as the first element to a group. The
* bones were prebuilt in the priocessControllerSource method.
*
* @param weights
* @throws Exception
*/
@SuppressWarnings("unchecked")
private void processVertexWeights(vertex_weightsType weights,
SkinNode skinNode) throws Exception {
int[] boneCount = new int[weights.getcount().intValue()];
StringTokenizer st = new StringTokenizer(weights.getvcount().getValue());
for (int i = 0; i < boneCount.length; i++) {
boneCount[i] = Integer.parseInt(st.nextToken());
}
st = new StringTokenizer(weights.getv().getValue());
int count = 0;
String[] boneIdArray = null;
float[] weightArray = null;
for (int i = 0; i < weights.getinputCount(); i++) {
if ("JOINT".equals(weights.getinputAt(i).getsemantic().toString())) {
String key = weights.getinputAt(i).getsource().toString();
key = key.substring(1);
boneIdArray = (String[]) resourceLibrary.get(key);
} else if ("WEIGHT".equals(weights.getinputAt(i).getsemantic()
.toString())) {
String key = weights.getinputAt(i).getsource().toString();
key = key.substring(1);
weightArray = (float[]) resourceLibrary.get(key);
}
}
if (boneIdArray == null || weightArray == null) {
if (!squelch) {
logger.warning("Missing resource values for either bone "
+ "weights or bone vertex ids.");
}
return;
}
Map<Integer, ArrayList<MeshVertPair>> vertMap = (Map) resourceLibrary
.get(skinNode.getSkins().getName() + "VertMap");
while (st.hasMoreTokens()) {
// Get bone index
for (int i = 0; i < boneCount[count]; i++) {
int idIndex = Integer.parseInt(st.nextToken());
int key = Integer.parseInt(st.nextToken());
float weight = weightArray[key];
ArrayList<MeshVertPair> target = vertMap.get(count);
if (target != null) {
for (int j = 0, max = target.size(); j < max; j++) {
MeshVertPair bvp = target.get(j);
// Bone b =
// (Bone)resourceLibrary.get(boneIds[idIndex]);
skinNode.addBoneInfluence(bvp.mesh, bvp.index,
boneIds[idIndex], weight);
}
}
}
count++;
}
}
/**
* processControllerSource will process the source types that define how a
* controller is built. This includes support for skin joints, bindings and
* weights.
*
* @param source
* the source to process.
* @throws Exception
* thrown if there is a problem processing the XML.
*/
private void processControllerSource(sourceType source) throws Exception {
// check for the joint id list
String key = source.gettechnique_common().getaccessor().getparam()
.gettype().getValue();
if (key.equalsIgnoreCase("IDREF")) {
if (source.hasIDREF_array()) {
IDREF_arrayType idrefs = source.getIDREF_array();
Bone[] bones = new Bone[idrefs.getcount().intValue()];
boneIds = new String[bones.length];
StringTokenizer st = new StringTokenizer(idrefs.getValue()
.toString());
for (int i = 0; i < bones.length; i++) {
// this skin has a number of bones assigned to it.
// Create a Bone for each entry.
bones[i] = new Bone(st.nextToken());
boneIds[i] = bones[i].getName();
put(boneIds[i], bones[i]);
}
put(source.getid().toString(), boneIds);
}
} else if (key.equalsIgnoreCase("Name")) {
if (source.hasName_array()) {
Name_arrayType names = source.getName_array();
Bone[] bones = new Bone[names.getcount().intValue()];
boneIds = new String[bones.length];
StringTokenizer st = new StringTokenizer(names.getValue()
.toString());
for (int i = 0; i < bones.length; i++) {
// this skin has a number of bones assigned to it.
// Create a Bone for each entry.
bones[i] = new Bone(st.nextToken());
boneIds[i] = bones[i].getName();
put(boneIds[i], bones[i]);
put(source.getid().toString(), boneIds);
}
}
} else if (key.equalsIgnoreCase("float4x4")) {
StringTokenizer st = new StringTokenizer(source.getfloat_array()
.getValue().toString());
int numOfTransforms = st.countTokens() / 16;
// this creates a 4x4 matrix
Matrix4f[] tm = new Matrix4f[numOfTransforms];
for (int i = 0; i < tm.length; i++) {
tm[i] = new Matrix4f();
float[] data = new float[16];
for (int x = 0; x < 16; x++) {
data[x] = Float.parseFloat(st.nextToken());
}
tm[i].set(data, true); // collada matrices are in row order.
}
put(source.getid().toString(), tm);
} else if (key.equalsIgnoreCase("float")) {
float_arrayType floats = source.getfloat_array();
float[] weights = new float[floats.getcount().intValue()];
StringTokenizer st = new StringTokenizer(floats.getValue()
.toString());
for (int i = 0; i < weights.length; i++) {
weights[i] = Float.parseFloat(st.nextToken());
}
put(source.getid().toString(), weights);
}
}
/**
* processBindShapeMatrix sets the initial transform of the skinned mesh.
* The 4x4 matrix is converted to a 3x3 matrix and a vector, then passed to
* the skinned mesh for use.
*
* @param skin
* the skin to apply the bind to.
* @param matrix
* the matrix to parse.
*/
private void processBindShapeMatrix(SkinNode skin, float4x4 matrix) {
Matrix4f mat = new Matrix4f();
StringTokenizer st = new StringTokenizer(matrix.getValue());
float[] data = new float[16];
for (int x = 0; x < 16; x++) {
data[x] = Float.parseFloat(st.nextToken());
}
mat.set(data, true); // collada matrices are in row order.
skin.setBindMatrix(mat);
}
/**
* processBindMaterial
*
* @param material
* @param spatial
* @throws Exception
* the matrix to parse.
*/
private void processBindMaterial(bind_materialType material,
Spatial geomBindTo) throws Exception {
technique_commonType common = material.gettechnique_common();
for (int i = 0; i < common.getinstance_materialCount(); i++) {
processInstanceMaterial(common.getinstance_materialAt(i),
geomBindTo);
}
}
/**
* processMesh will create either lines or a TriMesh. This means that the
* only supported child elements are: triangles and lines or linestrips.
* Polygons, trifans and tristrips are ignored.
*
* @param mesh
* the mesh to parse.
* @param geom
* the geometryType of the Geometry to build.
* @return the created Geometry built from the mesh data.
* @throws Exception
* thrown if there is a problem processing the xml.
*/
private Spatial processMesh(meshType mesh, geometryType geom)
throws Exception {
// we need to build all the source data objects.
for (int i = 0; i < mesh.getsourceCount(); i++) {
sourceType source = mesh.getsourceAt(i);
if (source.hasfloat_array()) {
float_arrayType floatArray = source.getfloat_array();
StringTokenizer st = new StringTokenizer(floatArray.getValue()
.toString());
// build an array of data to use for the final vector list.
float[] floats = new float[floatArray.getcount().intValue()];
for (int j = 0; j < floats.length; j++) {
floats[j] = Float.parseFloat(st.nextToken());
}
// technique_common should have the accessor type
if (source.hastechnique_common()) {
accessorType accessor = source.gettechnique_common()
.getaccessor();
// create an array of Vector3fs, using zero for the last
// element
// if the stride is 2 (the UV map case)
Vector3f[] vecs = new Vector3f[accessor.getcount()
.intValue()];
int stride = accessor.getstride().intValue();
if (2 == stride) {
for (int k = 0; k < vecs.length; k++) {
vecs[k] = new Vector3f(floats[(k * stride)],
floats[(k * stride) + 1], 0.0f);
}
} else {
assert (3 == stride);
for (int k = 0; k < vecs.length; k++) {
vecs[k] = new Vector3f(floats[(k * stride)],
floats[(k * stride) + 1],
floats[(k * stride) + 2]);
}
}
put(source.getid().toString(), vecs);
}
}
}
// next we have to define what source defines the vertices positional
// information
if (mesh.hasvertices()) {
if (mesh.getvertices().hasinput()) {
put(mesh.getvertices().getid().toString(), mesh.getvertices()
.getinput().getsource().toString());
}
}
// determine what type of geometry this is, and use the
// lists to build the object.
if (mesh.hastriangles()) {
return processTriMesh(mesh, geom);
} else if (mesh.haspolygons()) {
return processPolygonMesh(mesh, geom);
} else if (mesh.haslines()) {
return processLines(mesh, geom);
} else {
return null;
}
}
/**
* processTriMesh will process the triangles tag from the mesh section of
* the COLLADA file. A jME TriMesh is returned that defines the vertices,
* indices, normals, texture coordinates and colors.
*
* @param mesh
* the meshType to process for the trimesh.
* @param geom
* the geometryType of the TriMesh to build.
* @return the jME tri mesh representing the COLLADA mesh.
* @throws Exception
* thrown if there is a problem processing the xml.
*/
private Spatial processTriMesh(meshType mesh, geometryType geom)
throws Exception {
HashMap<Integer, ArrayList<MeshVertPair>> vertMap = new HashMap<Integer, ArrayList<MeshVertPair>>();
put(geom.getid().toString() + "VertMap", vertMap);
Node parentNode = new Node(geom.getid().toString());
for (int triangleIndex = 0; triangleIndex < mesh.gettrianglesCount(); triangleIndex++) {
trianglesType tri = mesh.gettrianglesAt(triangleIndex);
TriMesh triMesh = new TriMesh(geom.getid().toString());
if (tri.hasmaterial()) {
// first set the appropriate materials to this mesh.
String matKey = (String) resourceLibrary.get(tri.getmaterial()
.toString());
triMesh.setName(triMesh.getName()+"-"+tri.getmaterial().toString());
ColladaMaterial cm = (ColladaMaterial) resourceLibrary
.get(matKey);
if (cm != null) {
for (int i = 0; i < RenderState.RS_MAX_STATE; i++) {
if (cm.getState(i) != null) {
if (cm.getState(i).getType() == RenderState.RS_BLEND) {
triMesh
.setRenderQueueMode(Renderer.QUEUE_TRANSPARENT);
}
// clone the state as different mesh's may have
// different
// attributes
try {
ByteArrayOutputStream out = new ByteArrayOutputStream();
BinaryExporter.getInstance().save(
cm.getState(i), out);
ByteArrayInputStream in = new ByteArrayInputStream(
out.toByteArray());
RenderState rs = (RenderState) BinaryImporter
.getInstance().load(in);
triMesh.setRenderState(rs);
} catch (IOException e) {
logger
.throwing(
this.getClass().toString(),
"processTriMesh(meshType mesh, geometryType geom)",
e);
}
}
}
ArrayList<Controller> cList = cm.getControllerList();
if (cList != null) {
for (int c = 0; c < cList.size(); c++) {
if (cList.get(c) instanceof TextureKeyframeController) {
TextureState ts = (TextureState) triMesh
.getRenderState(RenderState.RS_TEXTURE);
if (ts != null) {
// allow wrapping, as animated textures will
// almost always need it.
ts.getTexture().setWrap(Texture.WrapAxis.S, Texture.WrapMode.Repeat);
ts.getTexture().setWrap(Texture.WrapAxis.T, Texture.WrapMode.Repeat);
((TextureKeyframeController) cList.get(c))
.setTexture(ts.getTexture());
}
}
}
}
if (mesh.hasextra()) {
for (int i = 0; i < mesh.getextraCount(); i++) {
try {
ExtraPluginManager.processExtra(triMesh, mesh
.getextraAt(i));
} catch (Exception e) {
if (!squelch) {
logger
.log(
Level.INFO,
"Error processing extra information for mesh",
e);
}
}
}
}
}
subMaterialLibrary.put(triMesh, tri.getmaterial().toString());
}
// build the index buffer, this is going to be easy as it's only
// 0...N where N is the number of vertices in the model.
IntBuffer indexBuffer = BufferUtils.createIntBuffer(tri.getcount()
.intValue() * 3);
for (int i = 0; i < indexBuffer.capacity(); i++) {
indexBuffer.put(i);
}
triMesh.setIndexBuffer(indexBuffer);
// find the maximum offset to understand the stride
int maxOffset = -1;
for (int i = 0; i < tri.getinputCount(); i++) {
int temp = tri.getinputAt(i).getoffset().intValue();
if (maxOffset < temp) {
maxOffset = temp;
}
}
// next build the other buffers, based on the input semantic
for (int i = 0; i < tri.getinputCount(); i++) {
if ("VERTEX".equals(tri.getinputAt(i).getsemantic().toString())) {
// build the vertex buffer
String key = tri.getinputAt(i).getsource().getValue();
if (key.startsWith("#")) {
key = key.substring(1);
}
Object data = resourceLibrary.get(key);
while (data instanceof String) {
key = (String) data;
if (key.startsWith("#")) {
key = key.substring(1);
}
data = resourceLibrary.get(key);
}
if (data == null) {
logger.warning("Invalid source: " + key);
continue;
}
Vector3f[] v = (Vector3f[]) data;
StringTokenizer st = new StringTokenizer(tri.getp()
.getValue());
int vertCount = tri.getcount().intValue() * 3;
FloatBuffer vertBuffer = BufferUtils
.createVector3Buffer(vertCount);
triMesh.setVertexCount(vertCount);
for (int j = 0; j < vertCount; j++) {
// need to store the index in p to what j is for later
// processing the index to the vert for bones
int vertKey = Integer.parseInt(st.nextToken());
ArrayList<MeshVertPair> storage = vertMap.get(Integer
.valueOf(vertKey));
if (storage == null) {
storage = new ArrayList<MeshVertPair>();
storage.add(new MeshVertPair(triangleIndex, j));
vertMap.put(Integer.valueOf(vertKey), storage);
} else {
storage.add(new MeshVertPair(triangleIndex, j));
}
BufferUtils.setInBuffer(v[vertKey], vertBuffer, j);
for (int k = 0; k < maxOffset; k++) {
st.nextToken();
}
}
triMesh.setVertexBuffer(vertBuffer);
} else if ("NORMAL".equals(tri.getinputAt(i).getsemantic()
.toString())) {
// build the normal buffer
String key = tri.getinputAt(i).getsource().getValue();
if (key.startsWith("#")) {
key = key.substring(1);
}
Object data = resourceLibrary.get(key);
while (data instanceof String) {
key = (String) data;
if (key.startsWith("#")) {
key = key.substring(1);
}
data = resourceLibrary.get(key);
}
if (data == null) {
logger.warning("Invalid source: " + key);
continue;
}
Vector3f[] v = (Vector3f[]) data;
StringTokenizer st = new StringTokenizer(tri.getp()
.getValue());
int normCount = tri.getcount().intValue() * 3;
FloatBuffer normBuffer = BufferUtils
.createVector3Buffer(normCount);
int offset = tri.getinputAt(i).getoffset().intValue();
for (int j = 0; j < offset; j++) {
st.nextToken();
}
for (int j = 0; j < normCount; j++) {
int index = Integer.parseInt(st.nextToken());
if (index < v.length)
BufferUtils.setInBuffer(v[index], normBuffer, j);
for (int k = 0; k < maxOffset; k++) {
if (st.hasMoreTokens()) {
st.nextToken();
}
}
}
triMesh.setNormalBuffer(normBuffer);
} else if ("TANGENT".equals(tri.getinputAt(i).getsemantic()
.toString())) {
// build the tangent buffer
String key = tri.getinputAt(i).getsource().getValue();
if (key.startsWith("#")) {
key = key.substring(1);
}
Object data = resourceLibrary.get(key);
while (data instanceof String) {
key = (String) data;
if (key.startsWith("#")) {
key = key.substring(1);
}
data = resourceLibrary.get(key);
}
if (data == null) {
logger.warning("Invalid source: " + key);
continue;
}
Vector3f[] v = (Vector3f[]) data;
StringTokenizer st = new StringTokenizer(tri.getp()
.getValue());
int normCount = tri.getcount().intValue() * 3;
FloatBuffer colorBuffer = BufferUtils
.createColorBuffer(normCount);
int offset = tri.getinputAt(i).getoffset().intValue();
for (int j = 0; j < offset; j++) {
st.nextToken();
}
for (int j = 0; j < normCount; j++) {
int index = Integer.parseInt(st.nextToken());
if (index < v.length) {
colorBuffer.put((-v[index].x) / 2.0f + 0.5f);
colorBuffer.put((-v[index].y) / 2.0f + 0.5f);
colorBuffer.put((-v[index].z) / 2.0f + 0.5f);
colorBuffer.put(0.0f);
}
for (int k = 0; k < maxOffset; k++) {
if (st.hasMoreTokens()) {
st.nextToken();
}
}
}
triMesh.setColorBuffer(colorBuffer);
} else if ("TEXCOORD".equals(tri.getinputAt(i).getsemantic()
.toString())) {
// build the texture buffer
String key = tri.getinputAt(i).getsource().getValue();
if (key.startsWith("#")) {
key = key.substring(1);
}
Object data = resourceLibrary.get(key);
while (data instanceof String) {
key = (String) data;
if (key.startsWith("#")) {
key = key.substring(1);
}
data = resourceLibrary.get(key);
}
if (data == null) {
logger.warning("Invalid source: " + key);
continue;
}
Vector3f[] v = (Vector3f[]) data;
StringTokenizer st = new StringTokenizer(tri.getp()
.getValue());
int texCount = tri.getcount().intValue() * 3;
FloatBuffer texBuffer = BufferUtils
.createVector2Buffer(texCount);
int offset = tri.getinputAt(i).getoffset().intValue();
int set = tri.getinputAt(i).getset().intValue();
for (int j = 0; j < offset; j++) {
st.nextToken();
}
// Keep a max to set the wrap mode (if it's 1, clamp, if
// it's > 1 || < 0 wrap it)
float maxX = -10;
float maxY = -10;
float minX = 10;
float minY = 10;
Vector2f tempTexCoord = new Vector2f();
for (int j = 0; j < texCount; j++) {
int index = Integer.parseInt(st.nextToken());
Vector3f value = v[index];
if (value.x > maxX) {
maxX = value.x;
}
if (value.x < minX) {
minX = value.x;
}
if (value.y > maxY) {
maxY = value.y;
}
if (value.y < minY) {
minY = value.y;
}
tempTexCoord.set(value.x, value.y);
BufferUtils.setInBuffer(tempTexCoord, texBuffer, j);
for (int k = 0; k < maxOffset; k++) {
if (st.hasMoreTokens()) {
st.nextToken();
}
}
}
int unit;
if (set == 0) {
unit = 0;
} else {
unit = set - 1;
}
triMesh.setTextureCoords(new TexCoords(texBuffer,2), unit);
// Set the wrap mode, check if the mesh has a texture
// first, if not check the geometry.
// Then, based on the texture coordinates, we may need to
// change it from the default.
//XXX: not a good way of doing it
// TextureState ts = (TextureState) triMesh
// .getRenderState(RenderState.RS_TEXTURE);
// if (ts == null) {
// ts = (TextureState) triMesh
// .getRenderState(RenderState.RS_TEXTURE);
// }
// if (ts != null) {
// Texture t = ts.getTexture(unit);
// if (t != null) {
// if (maxX > 1 || minX < 0) {
// t.setWrap(Texture.WrapAxis.S, Texture.WrapMode.Repeat);
// } else {
// t.setWrap(Texture.WrapAxis.S, Texture.WrapMode.Clamp);
// }
//
// if (maxY > 1 || minY < 0) {
// t.setWrap(Texture.WrapAxis.T, Texture.WrapMode.Repeat);
// } else {
// t.setWrap(Texture.WrapAxis.T, Texture.WrapMode.Clamp);
// }
// }
// }
} else if ("COLOR".equals(tri.getinputAt(i).getsemantic()
.toString())) {
// build the texture buffer
String key = tri.getinputAt(i).getsource().getValue();
if (key.startsWith("#")) {
key = key.substring(1);
}
Object data = resourceLibrary.get(key);
while (data instanceof String) {
key = (String) data;
if (key.startsWith("#")) {
key = key.substring(1);
}
data = resourceLibrary.get(key);
}
Vector3f[] v = (Vector3f[]) data;
StringTokenizer st = new StringTokenizer(tri.getp()
.getValue());
int colorCount = tri.getcount().intValue() * 3;
FloatBuffer colorBuffer = BufferUtils
.createColorBuffer(colorCount);
int offset = tri.getinputAt(i).getoffset().intValue();
for (int j = 0; j < offset; j++) {
st.nextToken();
}
ColorRGBA tempColor = new ColorRGBA();
for (int j = 0; j < colorCount; j++) {
int index = Integer.parseInt(st.nextToken());
Vector3f value = v[index];
tempColor.set(value.x, value.y, value.z, 1);
BufferUtils.setInBuffer(tempColor, colorBuffer, j);
for (int k = 0; k < maxOffset; k++) {
if (st.hasMoreTokens()) {
st.nextToken();
}
}
}
triMesh.setColorBuffer(colorBuffer);
}
}
triMesh.setModelBound(new BoundingBox());
triMesh.updateModelBound();
//XXX: not parenting under a node when only one mesh needs to be fixed!! /rherlitz
// if (mesh.gettrianglesCount() == 1) {
// return triMesh;
// }
parentNode.attachChild(triMesh);
}
return parentNode;
}
/**
* TODO: this implementation is a quick hack to import triangles supplied in
* polygon form... processPolygonMesh will process the polygons tag from the
* mesh section of the COLLADA file. A jME TriMesh is returned that defines
* the vertices, indices, normals, texture coordinates and colors.
*
* @param mesh
* the meshType to process for the trimesh.
* @param geom
* the geometryType of the TriMesh to build.
* @return the jME tri mesh representing the COLLADA mesh.
* @throws Exception
* thrown if there is a problem processing the xml.
*/
private Spatial processPolygonMesh(meshType mesh, geometryType geom)
throws Exception {
HashMap<Integer, ArrayList<MeshVertPair>> vertMap = new HashMap<Integer, ArrayList<MeshVertPair>>();
put(geom.getid().toString() + "VertMap", vertMap);
Node parentNode = new Node(geom.getid().toString());
for (int triangleIndex = 0; triangleIndex < mesh.getpolygonsCount(); triangleIndex++) {
polygonsType poly = mesh.getpolygonsAt(triangleIndex);
TriMesh triMesh = new TriMesh(geom.getid().toString());
if (poly.hasmaterial()) {
// first set the appropriate materials to this mesh.
String matKey = (String) resourceLibrary.get(poly.getmaterial()
.toString());
ColladaMaterial cm = (ColladaMaterial) resourceLibrary
.get(matKey);
if (cm != null) {
for (int i = 0; i < RenderState.RS_MAX_STATE; i++) {
if (cm.getState(i) != null) {
if (cm.getState(i).getType() == RenderState.RS_BLEND) {
triMesh
.setRenderQueueMode(Renderer.QUEUE_TRANSPARENT);
}
// clone the state as different mesh's may have
// different
// attributes
try {
ByteArrayOutputStream out = new ByteArrayOutputStream();
BinaryExporter.getInstance().save(
cm.getState(i), out);
ByteArrayInputStream in = new ByteArrayInputStream(
out.toByteArray());
RenderState rs = (RenderState) BinaryImporter
.getInstance().load(in);
triMesh.setRenderState(rs);
} catch (IOException e) {
logger
.throwing(
this.getClass().toString(),
"processTriMesh(meshType mesh, geometryType geom)",
e);
}
}
}
ArrayList<Controller> cList = cm.getControllerList();
if (cList != null) {
for (int c = 0; c < cList.size(); c++) {
if (cList.get(c) instanceof TextureKeyframeController) {
TextureState ts = (TextureState) triMesh
.getRenderState(RenderState.RS_TEXTURE);
if (ts != null) {
// allow wrapping, as animated textures will
// almost always need it.
ts.getTexture().setWrap(Texture.WrapAxis.S, Texture.WrapMode.Repeat);
ts.getTexture().setWrap(Texture.WrapAxis.T, Texture.WrapMode.Repeat);
((TextureKeyframeController) cList.get(c))
.setTexture(ts.getTexture());
}
}
}
}
if (mesh.hasextra()) {
for (int i = 0; i < mesh.getextraCount(); i++) {
try {
ExtraPluginManager.processExtra(triMesh, mesh
.getextraAt(i));
} catch (Exception e) {
if (!squelch) {
logger
.log(
Level.INFO,
"Error processing extra information for mesh",
e);
}
}
}
}
}
subMaterialLibrary.put(triMesh, poly.getmaterial().toString());
}
// build the index buffer, this is going to be easy as it's only
// 0...N where N is the number of vertices in the model.
IntBuffer indexBuffer = BufferUtils.createIntBuffer(poly.getcount()
.intValue() * 3);
for (int i = 0; i < indexBuffer.capacity(); i++) {
indexBuffer.put(i);
}
triMesh.setIndexBuffer(indexBuffer);
// find the maximum offset to understand the stride
int maxOffset = -1;
for (int i = 0; i < poly.getinputCount(); i++) {
int temp = poly.getinputAt(i).getoffset().intValue();
if (maxOffset < temp) {
maxOffset = temp;
}
}
int stride = maxOffset + 1;
// next build the other buffers, based on the input semantic
for (int i = 0; i < poly.getinputCount(); i++) {
if ("VERTEX"
.equals(poly.getinputAt(i).getsemantic().toString())) {
// build the vertex buffer
String key = poly.getinputAt(i).getsource().getValue();
if (key.startsWith("#")) {
key = key.substring(1);
}
Object data = resourceLibrary.get(key);
while (data instanceof String) {
key = (String) data;
if (key.startsWith("#")) {
key = key.substring(1);
}
data = resourceLibrary.get(key);
}
if (data == null) {
logger.warning("Invalid source: " + key);
continue;
}
Vector3f[] v = (Vector3f[]) data;
StringTokenizer st = null;
int vertCount = poly.getcount().intValue() * stride;
FloatBuffer vertBuffer = BufferUtils
.createVector3Buffer(vertCount);
triMesh.setVertexCount(vertCount);
for (int j = 0; j < vertCount; j++) {
if (j % stride == 0) {
st = new StringTokenizer(poly.getpAt(j / stride)
.getValue());
}
// need to store the index in p to what j is for later
// processing the index to the vert for bones
int vertKey = Integer.parseInt(st.nextToken());
ArrayList<MeshVertPair> storage = vertMap.get(Integer
.valueOf(vertKey));
if (storage == null) {
storage = new ArrayList<MeshVertPair>();
storage.add(new MeshVertPair(triangleIndex, j));
vertMap.put(Integer.valueOf(vertKey), storage);
} else {
storage.add(new MeshVertPair(triangleIndex, j));
}
BufferUtils.setInBuffer(v[vertKey], vertBuffer, j);
for (int k = 0; k < maxOffset; k++) {
st.nextToken();
}
}
triMesh.setVertexBuffer(vertBuffer);
} else if ("NORMAL".equals(poly.getinputAt(i).getsemantic()
.toString())) {
// build the normal buffer
String key = poly.getinputAt(i).getsource().getValue();
if (key.startsWith("#")) {
key = key.substring(1);
}
Object data = resourceLibrary.get(key);
while (data instanceof String) {
key = (String) data;
if (key.startsWith("#")) {
key = key.substring(1);
}
data = resourceLibrary.get(key);
}
if (data == null) {
logger.warning("Invalid source: " + key);
continue;
}
Vector3f[] v = (Vector3f[]) data;
StringTokenizer st = null;
int normCount = poly.getcount().intValue() * stride;
FloatBuffer normBuffer = BufferUtils
.createVector3Buffer(normCount);
int offset = poly.getinputAt(i).getoffset().intValue();
for (int j = 0; j < offset; j++) {
if (j % stride == 0) {
st = new StringTokenizer(poly.getpAt(j / stride)
.getValue());
}
st.nextToken();
}
for (int j = 0; j < normCount; j++) {
if (j % stride == 0) {
st = new StringTokenizer(poly.getpAt(j / stride)
.getValue());
}
int index = Integer.parseInt(st.nextToken());
if (index < v.length)
BufferUtils.setInBuffer(v[index], normBuffer, j);
for (int k = 0; k < maxOffset; k++) {
if (st.hasMoreTokens()) {
st.nextToken();
}
}
}
triMesh.setNormalBuffer(normBuffer);
} else if ("TANGENT".equals(poly.getinputAt(i).getsemantic()
.toString())) {
// build the tangent buffer
String key = poly.getinputAt(i).getsource().getValue();
if (key.startsWith("#")) {
key = key.substring(1);
}
Object data = resourceLibrary.get(key);
while (data instanceof String) {
key = (String) data;
if (key.startsWith("#")) {
key = key.substring(1);
}
data = resourceLibrary.get(key);
}
if (data == null) {
logger.warning("Invalid source: " + key);
continue;
}
Vector3f[] v = (Vector3f[]) data;
StringTokenizer st = new StringTokenizer(poly.getp()
.getValue());
int normCount = poly.getcount().intValue() * 3;
FloatBuffer normBuffer = BufferUtils
.createVector3Buffer(normCount);
int offset = poly.getinputAt(i).getoffset().intValue();
for (int j = 0; j < offset; j++) {
st.nextToken();
}
for (int j = 0; j < normCount; j++) {
int index = Integer.parseInt(st.nextToken());
if (index < v.length)
BufferUtils.setInBuffer(v[index], normBuffer, j);
for (int k = 0; k < maxOffset; k++) {
if (st.hasMoreTokens()) {
st.nextToken();
}
}
}
triMesh.setTangentBuffer(normBuffer);
logger.info("setting tangent buffer: " + normBuffer);
} else if ("BINORMAL".equals(poly.getinputAt(i).getsemantic()
.toString())) {
// build the tangent buffer
String key = poly.getinputAt(i).getsource().getValue();
if (key.startsWith("#")) {
key = key.substring(1);
}
Object data = resourceLibrary.get(key);
while (data instanceof String) {
key = (String) data;
if (key.startsWith("#")) {
key = key.substring(1);
}
data = resourceLibrary.get(key);
}
if (data == null) {
logger.warning("Invalid source: " + key);
continue;
}
Vector3f[] v = (Vector3f[]) data;
StringTokenizer st = new StringTokenizer(poly.getp()
.getValue());
int normCount = poly.getcount().intValue() * 3;
FloatBuffer normBuffer = BufferUtils
.createVector3Buffer(normCount);
int offset = poly.getinputAt(i).getoffset().intValue();
for (int j = 0; j < offset; j++) {
st.nextToken();
}
for (int j = 0; j < normCount; j++) {
int index = Integer.parseInt(st.nextToken());
if (index < v.length)
BufferUtils.setInBuffer(v[index], normBuffer, j);
for (int k = 0; k < maxOffset; k++) {
if (st.hasMoreTokens()) {
st.nextToken();
}
}
}
triMesh.setBinormalBuffer(normBuffer);
} else if ("TEXCOORD".equals(poly.getinputAt(i).getsemantic()
.toString())) {
// build the texture buffer
String key = poly.getinputAt(i).getsource().getValue();
if (key.startsWith("#")) {
key = key.substring(1);
}
Object data = resourceLibrary.get(key);
while (data instanceof String) {
key = (String) data;
if (key.startsWith("#")) {
key = key.substring(1);
}
data = resourceLibrary.get(key);
}
if (data == null) {
logger.warning("Invalid source: " + key);
continue;
}
Vector3f[] v = (Vector3f[]) data;
StringTokenizer st = new StringTokenizer(poly.getp()
.getValue());
int texCount = poly.getcount().intValue() * stride;
FloatBuffer texBuffer = BufferUtils
.createVector2Buffer(texCount);
int offset = poly.getinputAt(i).getoffset().intValue();
int set = poly.getinputAt(i).getset().intValue();
for (int j = 0; j < offset; j++) {
if (j % stride == 0) {
st = new StringTokenizer(poly.getpAt(j / stride)
.getValue());
}
st.nextToken();
}
// Keep a max to set the wrap mode (if it's 1, clamp, if
// it's > 1 wrap it)
float maxX = -1, maxY = -1;
float minX = 1, minY = 1;
Vector2f tempTexCoord = new Vector2f();
for (int j = 0; j < texCount; j++) {
if (j % stride == 0) {
st = new StringTokenizer(poly.getpAt(j / stride)
.getValue());
}
int index = Integer.parseInt(st.nextToken());
Vector3f value = v[index];
if (value.x > maxX) {
maxX = value.x;
}
if (value.x < minX) {
minX = value.x;
}
if (value.y > maxY) {
maxY = value.y;
}
if (value.y < minY) {
minY = value.y;
}
tempTexCoord.set(value.x, value.y);
BufferUtils.setInBuffer(tempTexCoord, texBuffer, j);
for (int k = 0; k < maxOffset; k++) {
if (st.hasMoreTokens()) {
st.nextToken();
}
}
}
int unit;
if (set == 0) {
unit = 0;
} else {
unit = set - 1;
}
triMesh.setTextureCoords(new TexCoords(texBuffer,2), unit);
// Set the wrap mode, check if the mesh has a texture
// first, if not
// check the geometry.
// Then, based on the texture coordinates, we may need to
// change it from the
// default.
//XXX: not a good way of doing it
// TextureState ts = (TextureState) triMesh
// .getRenderState(RenderState.RS_TEXTURE);
// if (ts == null) {
// ts = (TextureState) triMesh
// .getRenderState(RenderState.RS_TEXTURE);
// }
// if (ts != null) {
// Texture t = ts.getTexture(unit);
// if (t != null) {
// if (maxX > 1 || minX < 0) {
// t.setWrap(Texture.WrapAxis.S, Texture.WrapMode.Repeat);
// } else {
// t.setWrap(Texture.WrapAxis.S, Texture.WrapMode.Clamp);
// }
//
// if (maxY > 1 || minY < 0) {
// t.setWrap(Texture.WrapAxis.T, Texture.WrapMode.Repeat);
// } else {
// t.setWrap(Texture.WrapAxis.T, Texture.WrapMode.Clamp);
// }
// }
// }
} else if ("COLOR".equals(poly.getinputAt(i).getsemantic()
.toString())) {
// build the texture buffer
String key = poly.getinputAt(i).getsource().getValue();
if (key.startsWith("#")) {
key = key.substring(1);
}
Object data = resourceLibrary.get(key);
while (data instanceof String) {
key = (String) data;
if (key.startsWith("#")) {
key = key.substring(1);
}
data = resourceLibrary.get(key);
}
Vector3f[] v = (Vector3f[]) data;
StringTokenizer st = new StringTokenizer(poly.getp()
.getValue());
int colorCount = poly.getcount().intValue() * 3;
FloatBuffer colorBuffer = BufferUtils
.createColorBuffer(colorCount);
int offset = poly.getinputAt(i).getoffset().intValue();
for (int j = 0; j < offset; j++) {
st.nextToken();
}
ColorRGBA tempColor = new ColorRGBA();
for (int j = 0; j < colorCount; j++) {
int index = Integer.parseInt(st.nextToken());
Vector3f value = v[index];
tempColor.set(value.x, value.y, value.z, 1);
BufferUtils.setInBuffer(tempColor, colorBuffer, j);
for (int k = 0; k < maxOffset; k++) {
if (st.hasMoreTokens()) {
st.nextToken();
}
}
}
triMesh.setColorBuffer(colorBuffer);
}
}
triMesh.setModelBound(new BoundingBox());
triMesh.updateModelBound();
if (mesh.gettrianglesCount() == 1) {
return triMesh;
}
parentNode.attachChild(triMesh);
}
return parentNode;
}
/**
* processLines will process the lines tag from the mesh section of the
* COLLADA file. A jME Line is returned that defines the vertices, normals,
* texture coordinates and colors.
*
* @param mesh
* the meshType to process for the lines.
* @param geom
* the geomType for the lines
* @return the jME tri mesh representing the COLLADA mesh.
*/
private Spatial processLines(meshType mesh, geometryType geom) {
if (!squelch) {
logger.warning("Line are not supported.");
}
return null;
}
/**
* the nodes library is a collection of nodes that can be instanced later by
* the visual scene.
*
* @param type
* the nodes library to process.
* @throws Exception
* thrown if there is a problem with the processing.
*/
private void processNodes(library_nodesType type)
throws Exception {
Node tempParent = new Node("temp_parent");
for (int i = 0; i < type.getnodeCount(); i++) {
processNode(type.getnodeAt(i), tempParent);
}
// should all be in the resource library now.
}
/**
* The library of visual scenes defines how the loaded geometry is stored in
* the scene graph, including scaling, translation, rotation, etc.
*
* @param libScene
* the library of scenes
* @throws Exception
* thrown if there is a problem processing the xml.
*/
private void processVisualSceneLibrary(library_visual_scenesType libScene)
throws Exception {
for (int i = 0; i < libScene.getvisual_sceneCount(); i++) {
Node scene = new Node(libScene.getvisual_sceneAt(i).getid()
.toString());
put(scene.getName(), scene);
processVisualScene(libScene.getvisual_sceneAt(i), scene);
}
}
/**
* the visual scene will contain any number of nodes that define references
* to geometry. These are then placed into the scene as needed.
*
* @param scene
* the scene to process.
* @param node
* the jME node to attach this scene to.
* @throws Exception
* thrown if there is a problem with the processing.
*/
private void processVisualScene(visual_sceneType scene, Node node)
throws Exception {
for (int i = 0; i < scene.getnodeCount(); i++) {
processNode(scene.getnodeAt(i), node);
}
for (int i = 0; i < node.getQuantity(); i++) {
Spatial s = node.getChild(i);
if (s instanceof Bone) {
s.updateGeometricState(0, true);
s.removeFromParent();
node.attachChild(s);
}
}
}
/**
* a node tag
*
* @param xmlNode
* @param parent
* @throws Exception
*/
private void processNode(nodeType2 xmlNode, Node parent) throws Exception {
String childName = null;
if (xmlNode.hasid())
childName = xmlNode.getid().toString();
else if (xmlNode.hassid())
childName = xmlNode.getsid().toString();
else if (xmlNode.hasname())
childName = xmlNode.getname().toString();
Node child = null;
if (xmlNode.hastype() && "JOINT".equals(xmlNode.gettype().toString())
&& (xmlNode.hassid() || xmlNode.hasid())) {
String key = (xmlNode.hassid() ? xmlNode.getsid() : xmlNode.getid())
.toString();
child = (Bone) resourceLibrary.get(key);
if (child == null) {
child = new Bone(key);
put(key, child);
if (!squelch) {
logger.warning("Bone " + key
+ " is not attached to any vertices.");
}
}
if (!(parent instanceof Bone)) {
if (skeletonNames == null) {
skeletonNames = new ArrayList<String>();
}
skeletonNames.add(key);
}
}
if (xmlNode.hasextra()) {
for (int i = 0; i < xmlNode.getextraCount(); i++) {
try {
Object o = ExtraPluginManager.processExtra(childName,
xmlNode.getextraAt(i));
if (o instanceof Node) {
child = (Node) o;
}
} catch (Exception e) {
if (!squelch) {
logger.log(Level.WARNING,
"Error processing extra information", e);
}
}
}
}
if (child == null) {
child = new Node(childName);
}
parent.attachChild(child);
put(childName, child);
if (xmlNode.hasinstance_camera()) {
for (int i = 0; i < xmlNode.getinstance_cameraCount(); i++) {
processInstanceCamera(xmlNode.getinstance_cameraAt(i), child);
}
}
// this node has a skeleton and skin
if (xmlNode.hasinstance_controller()) {
for (int i = 0; i < xmlNode.getinstance_controllerCount(); i++) {
processInstanceController(xmlNode.getinstance_controllerAt(i),
child);
}
}
if (xmlNode.hasinstance_geometry()) {
for (int i = 0; i < xmlNode.getinstance_geometryCount(); i++) {
processInstanceGeom(xmlNode.getinstance_geometryAt(i), child);
}
}
if (xmlNode.hasinstance_node()) {
for (int i = 0; i < xmlNode.getinstance_nodeCount(); i++) {
processInstanceNode(xmlNode.getinstance_nodeAt(i), child);
}
}
if (xmlNode.hasinstance_light()) {
for (int i = 0; i < xmlNode.getinstance_lightCount(); i++) {
processInstanceLight(xmlNode.getinstance_lightAt(i), child);
}
}
// parse translation
if (xmlNode.hastranslate()) {
Vector3f translate = new Vector3f();
StringTokenizer st = new StringTokenizer(xmlNode.gettranslate()
.getValue().toString());
translate.x = Float.parseFloat(st.nextToken());
translate.y = Float.parseFloat(st.nextToken());
translate.z = Float.parseFloat(st.nextToken());
child.setLocalTranslation(translate);
}
if (xmlNode.hasrotate()) {
Quaternion rotation = null;
for (int i = 0; i < xmlNode.getrotateCount(); i++) {
Quaternion temp = new Quaternion();
Vector3f axis = new Vector3f();
StringTokenizer st = new StringTokenizer(xmlNode.getrotateAt(i)
.getValue().toString());
axis.x = Float.parseFloat(st.nextToken());
axis.y = Float.parseFloat(st.nextToken());
axis.z = Float.parseFloat(st.nextToken());
axis.normalizeLocal();
float angle = Float.parseFloat(st.nextToken());
angle *= FastMath.DEG_TO_RAD;
temp.fromAngleNormalAxis(angle, axis);
if (rotation == null) {
rotation = new Quaternion();
rotation.set(temp);
} else {
rotation.multLocal(temp);
}
}
child.setLocalRotation(rotation);
}
if (xmlNode.hasmatrix()) {
Matrix4f tm = new Matrix4f();
StringTokenizer st = new StringTokenizer(xmlNode.getmatrix()
.getValue().toString());
float[] data = new float[16];
for (int x = 0; x < 16; x++) {
data[x] = Float.parseFloat(st.nextToken());
}
tm.set(data, true); // collada matrices are in row order.
child.setLocalTranslation(tm.toTranslationVector());
// find scale
Vector3f vCol1 = new Vector3f(tm.m00, tm.m10, tm.m20);
Vector3f vCol2 = new Vector3f(tm.m01, tm.m11, tm.m21);
Vector3f vCol3 = new Vector3f(tm.m02, tm.m12, tm.m22);
float scaleX = vCol1.length();
float scaleY = vCol2.length();
float scaleZ = vCol3.length();
child.setLocalScale(new Vector3f(scaleX, scaleY, scaleZ));
Matrix3f rm = new Matrix3f();
rm.m00 = tm.m00 / scaleX;
rm.m10 = tm.m10 / scaleX;
rm.m20 = tm.m20 / scaleX;
rm.m01 = tm.m01 / scaleY;
rm.m11 = tm.m11 / scaleY;
rm.m21 = tm.m21 / scaleY;
rm.m02 = tm.m02 / scaleZ;
rm.m12 = tm.m12 / scaleZ;
rm.m22 = tm.m22 / scaleZ;
Quaternion q = new Quaternion().fromRotationMatrix(rm);
//Quaternion q = tm.toRotationQuat();
//float scale = FastMath.sqrt(q.norm());
//System.out.println(scale);
//q.normalize();
child.setLocalRotation(q);
}
if (xmlNode.hasscale()) {
Vector3f scale = new Vector3f();
StringTokenizer st = new StringTokenizer(xmlNode.getscale()
.getValue().toString());
scale.x = Float.parseFloat(st.nextToken());
scale.y = Float.parseFloat(st.nextToken());
scale.z = Float.parseFloat(st.nextToken());
child.setLocalScale(scale);
}
// parse subnodes
if (xmlNode.hasnode()) {
for (int i = 0; i < xmlNode.getnodeCount(); i++) {
processNode(xmlNode.getnodeAt(i), child);
}
}
}
/**
* processInstanceCamera
*
* @param camera
* @param node
* @throws Exception
*/
private void processInstanceCamera(InstanceWithExtra camera, Node node)
throws Exception {
String key = camera.geturl().toString();
if (key.startsWith("#")) {
key = key.substring(1);
}
CameraNode cn = (CameraNode) resourceLibrary.get(key);
if (cn != null) {
node.attachChild(cn);
}
}
/**
* processInstanceLight
*
* @param light
* @param node
* @throws Exception
*/
private void processInstanceLight(InstanceWithExtra light, Node node)
throws Exception {
String key = light.geturl().toString();
if (key.startsWith("#")) {
key = key.substring(1);
}
LightNode ln = (LightNode) resourceLibrary.get(key);
if (ln != null) {
node.attachChild(ln);
}
}
/**
* processInstanceController
*
* @param controller
* @param node
* @throws Exception
*/
private void processInstanceController(instance_controllerType controller,
Node node) throws Exception {
String key = controller.geturl().toString();
if (key.startsWith("#")) {
key = key.substring(1);
}
SkinNode sNode = (SkinNode) resourceLibrary.get(key);
if (sNode != null) {
node.attachChild(sNode);
} else {
if (!squelch) {
logger.warning("Instance "
+ controller.geturl().toString().substring(1)
+ " does not exist.");
}
}
if (controller.hasskeleton()) {
if (controller.getskeletonCount() > 1) {
if (!squelch) {
logger.warning("Controller has more than one skeleton.");
}
}
String url = controller.getskeleton().getValue();
if (url.startsWith("#")) {
url = url.substring(1);
}
Bone b = (Bone) resourceLibrary.get(url);
if (b != null) {
sNode.setSkeleton(b);
}
}
if (controller.hasbind_material()) {
processBindMaterial(controller.getbind_material(), sNode.getSkins());
}
}
/**
* processInstanceNode
*
* @param instance
* @param parent
* @throws Exception
*/
private void processInstanceNode(InstanceWithExtra instance, Node parent)
throws Exception {
String key = instance.geturl().toString();
if (key.startsWith("#")) {
key = key.substring(1);
}
Spatial spatial = (Spatial) resourceLibrary.get(key);
if (spatial != null) {
if (spatial instanceof Node) {
spatial = new SharedNode(key, (Node) spatial);
}
parent.attachChild(spatial);
}
}
/**
* processInstanceGeom
*
* @param geometry
* @param node
* @throws Exception
*/
private void processInstanceGeom(instance_geometryType geometry, Node node)
throws Exception {
String key = geometry.geturl().toString();
if (key.startsWith("#")) {
key = key.substring(1);
}
Spatial spatial = (Spatial) resourceLibrary.get(key);
if (spatial != null) {
if (spatial instanceof TriMesh) {
spatial = new SharedMesh(key, (TriMesh) spatial);
} else if (spatial instanceof Node) {
spatial = new SharedNode(key, (Node) spatial);
}
node.attachChild(spatial);
if (geometry.hasbind_material()) {
processBindMaterial(geometry.getbind_material(), spatial);
}
}
}
/**
* processInstanceMaterial
*
* @param material
* @param node
* @throws Exception
*/
private void processInstanceMaterial(instance_materialType material,
Spatial geomBindTo) throws Exception {
String key = material.gettarget().toString();
if (key.startsWith("#")) {
key = key.substring(1);
}
ColladaMaterial cm = (ColladaMaterial) resourceLibrary
.get(resourceLibrary.get(key));
Spatial target = geomBindTo;
String symbol = material.getsymbol().toString();
if (target instanceof Node) {
Node targetNode = (Node) target;
for (int i = 0; i < targetNode.getQuantity(); ++i) {
Spatial child = targetNode.getChild(i);
if (child instanceof TriMesh
&& symbol.equals(subMaterialLibrary.get(child))) {
target = child;
break;
} else if (child instanceof SharedMesh
&& symbol.equals(subMaterialLibrary
.get(((SharedMesh) child).getTarget()))) {
target = child;
break;
}
}
}
if (cm != null) {
for (int i = 0; i < RenderState.RS_MAX_STATE; ++i) {
if (cm.getState(i) != null) {
if (cm.getState(i).getType() == RenderState.RS_BLEND) {
target.setRenderQueueMode(Renderer.QUEUE_TRANSPARENT);
}
// clone the state as different mesh's may have
// different
// attributes
try {
ByteArrayOutputStream out = new ByteArrayOutputStream();
BinaryExporter.getInstance().save(cm.getState(i), out);
ByteArrayInputStream in = new ByteArrayInputStream(out
.toByteArray());
RenderState rs = (RenderState) BinaryImporter
.getInstance().load(in);
target.setRenderState(rs);
} catch (IOException e) {
logger.log(Level.WARNING, "Error cloning state", e);
}
}
}
}
}
/**
* getColor uses a string tokenizer to parse the value of a colorType into a
* ColorRGBA type used internally by jME.
*
* @param color
* the colorType to parse (RGBA format).
* @return the ColorRGBA object to be used by jME.
*/
private ColorRGBA getColor(colorType color) {
ColorRGBA out = new ColorRGBA();
StringTokenizer st = new StringTokenizer(color.getValue().toString());
out.r = Float.parseFloat(st.nextToken());
out.g = Float.parseFloat(st.nextToken());
out.b = Float.parseFloat(st.nextToken());
out.a = Float.parseFloat(st.nextToken());
return out;
}
/**
* MeshVertPair simply contain a mesh index and a vertex index. This defines
* where a specific vertex may be found.
*/
private class MeshVertPair {
public int mesh;
public int index;
/**
* MeshVertPair
*
* @param mesh
* @param index
*/
public MeshVertPair(int mesh, int index) {
this.mesh = mesh;
this.index = index;
}
}
/**
* squelchErrors sets if the ColladaImporter should spit out errors or not
*
* @param b
*/
public static void squelchErrors(boolean b) {
squelch = b;
}
public static ColladaImporter getInstance() {
return instance;
}
}
| src/com/jmex/model/collada/ColladaImporter.java | /*
* Copyright (c) 2003-2008 jMonkeyEngine
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* * Neither the name of 'jMonkeyEngine' nor the names of its contributors
* may be used to endorse or promote products derived from this software
* without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
* TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package com.jmex.model.collada;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.URL;
import java.nio.FloatBuffer;
import java.nio.IntBuffer;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Map;
import java.util.StringTokenizer;
import java.util.logging.Level;
import java.util.logging.Logger;
import com.jme.animation.Bone;
import com.jme.animation.BoneAnimation;
import com.jme.animation.BoneTransform;
import com.jme.animation.SkinNode;
import com.jme.animation.TextureKeyframeController;
import com.jme.bounding.BoundingBox;
import com.jme.image.Image;
import com.jme.image.Texture;
import com.jme.image.Texture.WrapAxis;
import com.jme.light.DirectionalLight;
import com.jme.light.Light;
import com.jme.light.LightNode;
import com.jme.light.PointLight;
import com.jme.light.SpotLight;
import com.jme.math.FastMath;
import com.jme.math.Matrix3f;
import com.jme.math.Matrix4f;
import com.jme.math.Quaternion;
import com.jme.math.Vector2f;
import com.jme.math.Vector3f;
import com.jme.renderer.Camera;
import com.jme.renderer.ColorRGBA;
import com.jme.renderer.Renderer;
import com.jme.scene.CameraNode;
import com.jme.scene.Controller;
import com.jme.scene.Geometry;
import com.jme.scene.Node;
import com.jme.scene.SharedMesh;
import com.jme.scene.SharedNode;
import com.jme.scene.Spatial;
import com.jme.scene.TexCoords;
import com.jme.scene.TriMesh;
import com.jme.scene.state.BlendState;
import com.jme.scene.state.ClipState;
import com.jme.scene.state.ColorMaskState;
import com.jme.scene.state.CullState;
import com.jme.scene.state.FogState;
import com.jme.scene.state.MaterialState;
import com.jme.scene.state.RenderState;
import com.jme.scene.state.ShadeState;
import com.jme.scene.state.StencilState;
import com.jme.scene.state.TextureState;
import com.jme.scene.state.ZBufferState;
import com.jme.system.DisplaySystem;
import com.jme.util.TextureManager;
import com.jme.util.export.binary.BinaryExporter;
import com.jme.util.export.binary.BinaryImporter;
import com.jme.util.geom.BufferUtils;
import com.jme.util.geom.GeometryTool;
import com.jme.util.geom.VertMap;
import com.jme.util.resource.ResourceLocatorTool;
import com.jmex.model.collada.schema.COLLADAType;
import com.jmex.model.collada.schema.IDREF_arrayType;
import com.jmex.model.collada.schema.InstanceWithExtra;
import com.jmex.model.collada.schema.Name_arrayType;
import com.jmex.model.collada.schema.TargetableFloat3;
import com.jmex.model.collada.schema.accessorType;
import com.jmex.model.collada.schema.animationType;
import com.jmex.model.collada.schema.assetType;
import com.jmex.model.collada.schema.bind_materialType;
import com.jmex.model.collada.schema.cameraType;
import com.jmex.model.collada.schema.collada_schema_1_4_1Doc;
import com.jmex.model.collada.schema.colorType;
import com.jmex.model.collada.schema.common_newparam_type;
import com.jmex.model.collada.schema.controllerType;
import com.jmex.model.collada.schema.effectType;
import com.jmex.model.collada.schema.float4x4;
import com.jmex.model.collada.schema.float_arrayType;
import com.jmex.model.collada.schema.fx_sampler2D_common;
import com.jmex.model.collada.schema.fx_surface_common;
import com.jmex.model.collada.schema.geometryType;
import com.jmex.model.collada.schema.imageType;
import com.jmex.model.collada.schema.instance_controllerType;
import com.jmex.model.collada.schema.instance_geometryType;
import com.jmex.model.collada.schema.instance_materialType;
import com.jmex.model.collada.schema.instance_physics_modelType;
import com.jmex.model.collada.schema.lambertType;
import com.jmex.model.collada.schema.library_animationsType;
import com.jmex.model.collada.schema.library_camerasType;
import com.jmex.model.collada.schema.library_controllersType;
import com.jmex.model.collada.schema.library_effectsType;
import com.jmex.model.collada.schema.library_geometriesType;
import com.jmex.model.collada.schema.library_imagesType;
import com.jmex.model.collada.schema.library_lightsType;
import com.jmex.model.collada.schema.library_materialsType;
import com.jmex.model.collada.schema.library_nodesType;
import com.jmex.model.collada.schema.library_physics_modelsType;
import com.jmex.model.collada.schema.library_physics_scenesType;
import com.jmex.model.collada.schema.library_visual_scenesType;
import com.jmex.model.collada.schema.lightType;
import com.jmex.model.collada.schema.materialType;
import com.jmex.model.collada.schema.meshType;
import com.jmex.model.collada.schema.nodeType2;
import com.jmex.model.collada.schema.opticsType;
import com.jmex.model.collada.schema.orthographicType;
import com.jmex.model.collada.schema.paramType3;
import com.jmex.model.collada.schema.passType3;
import com.jmex.model.collada.schema.perspectiveType;
import com.jmex.model.collada.schema.phongType;
import com.jmex.model.collada.schema.physics_modelType;
import com.jmex.model.collada.schema.physics_sceneType;
import com.jmex.model.collada.schema.polygonsType;
import com.jmex.model.collada.schema.rigid_bodyType;
import com.jmex.model.collada.schema.sceneType;
import com.jmex.model.collada.schema.shapeType2;
import com.jmex.model.collada.schema.skinType;
import com.jmex.model.collada.schema.sourceType;
import com.jmex.model.collada.schema.techniqueType2;
import com.jmex.model.collada.schema.techniqueType4;
import com.jmex.model.collada.schema.technique_commonType;
import com.jmex.model.collada.schema.technique_commonType2;
import com.jmex.model.collada.schema.technique_commonType4;
import com.jmex.model.collada.schema.textureType;
import com.jmex.model.collada.schema.trianglesType;
import com.jmex.model.collada.schema.vertex_weightsType;
import com.jmex.model.collada.schema.visual_sceneType;
/**
* <code>ColladaNode</code> provides a mechanism to parse and load a COLLADA
* (COLLAborative Design Activity) model. Making use of a DOM parse, the XML
* formatted COLLADA file is parsed into Java Type classes and then processed by
* jME. This processing is currently aimed at the 1.4.1 release of the COLLADA
* Specification, and will, in most likelyhood, require updating with a new
* release of COLLADA.
*
* @author Mark Powell, Rikard Herlitz, and others
*/
public class ColladaImporter {
private static final Logger logger = Logger.getLogger(ColladaImporter.class
.getName());
// asset information
private String modelAuthor;
private String tool;
private String revision;
private String unitName;
private float unitMeter;
private String upAxis;
private static ColladaImporter instance;
private String name;
private String[] boneIds;
private static boolean squelch;
// If true, models loaded by ColladaImporter will automatically have
// geometry optimization applied. default: true.
public static boolean OPTIMIZE_GEOMETRY = true;
public static OptimizeCallback optimizeCallBack = null;
private Map<String, Object> resourceLibrary;
private ArrayList<String> controllerNames;
private ArrayList<String> uvControllerNames;
private ArrayList<String> skinNodeNames;
private ArrayList<String> cameraNodeNames;
private ArrayList<String> lightNodeNames;
private ArrayList<String> geometryNames;
private ArrayList<String> skeletonNames;
private Map<String, Object> userInformation;
private Map<TriMesh, String> subMaterialLibrary;
private Node model;
/**
* Unique Serial ID for ColladaNode
*/
private static final long serialVersionUID = -4024091270314000507L;
/**
* Default constructor instantiates a ColladaImporter object. A basic Node
* structure is built and no data is loaded until the <code>load</code>
* method is called.
*
* @param name
* the name of the node.
*/
private ColladaImporter(String name) {
this.name = name;
}
public static boolean hasUserInformation(String key) {
if (instance.userInformation == null) {
return false;
} else {
return instance.userInformation.containsKey(key);
}
}
public static void addUserInformation(String key, Object value) {
if (instance.userInformation == null) {
instance.userInformation = new HashMap<String, Object>();
}
instance.userInformation.put(key, value);
}
public static Object getUserInformation(String key) {
if (instance.userInformation == null) {
return null;
} else {
return instance.userInformation.get(key);
}
}
/**
* load takes the model path as a string object and uses the
* COLLADASchemaDoc object to load it. This is then stored as a heirarchy of
* data objects. This heirarchy is passed to the processCollada method to
* build the jME data structures necessary to view the model.
*
* @param source
* the source to import.
* @param textureDirectory
* the location of the textures.
* @param name
* the name of the node.
*/
public static void load(InputStream source, String name) {
if (instance == null) {
instance = new ColladaImporter(name);
}
instance.load(source);
}
/**
* load is called by the static load method, creating an instance of the
* model to be returned.
*
* @param source
* the source to import.
* @param textureDirectory
* the location of the textures.
*/
private void load(InputStream source) {
model = new Node(name);
resourceLibrary = new HashMap<String, Object>();
subMaterialLibrary = new HashMap<TriMesh, String>();
collada_schema_1_4_1Doc doc = new collada_schema_1_4_1Doc();
try {
COLLADAType root = new COLLADAType(doc.load(source));
logger.info("Version: " + root.getversion().getValue());
processCollada(root);
} catch (Exception ex) {
logger.log(Level.WARNING, "Unable to load Collada file. ", ex);
return;
}
}
/**
* returns the names of the controllers that affect this imported model.
*
* @return the list of string values for each controller name.
*/
public static ArrayList<String> getControllerNames() {
if (instance == null) {
return null;
}
return instance.controllerNames;
}
/**
* @return
*/
public static ArrayList<String> getUVControllerNames() {
if (instance == null) {
return null;
}
return instance.uvControllerNames;
}
public static void addUVControllerName(String name) {
if (instance.uvControllerNames == null) {
instance.uvControllerNames = new ArrayList<String>();
}
instance.uvControllerNames.add(name);
}
/**
* returns the names of the skin nodes that are associated with this
* imported model.
*
* @return the names of the skin nodes associated with this model.
*/
public static ArrayList<String> getSkinNodeNames() {
if (instance == null) {
return null;
}
return instance.skinNodeNames;
}
/**
* Returns the camera node names associated with this model.
*
* @return the list of camera names that are referenced in this file.
*/
public static ArrayList<String> getCameraNodeNames() {
if (instance == null) {
return null;
}
return instance.cameraNodeNames;
}
public static ArrayList<String> getLightNodeNames() {
if (instance == null) {
return null;
}
return instance.lightNodeNames;
}
public static ArrayList<String> getSkeletonNames() {
if (instance == null) {
return null;
}
return instance.skeletonNames;
}
public static ArrayList<String> getGeometryNames() {
if (instance == null) {
return null;
}
return instance.geometryNames;
}
public static Node getModel() {
if (instance == null) {
return null;
}
return instance.model;
}
public static SkinNode getSkinNode(String id) {
if (instance == null) {
return null;
}
return (SkinNode) instance.resourceLibrary.get(id);
}
public static CameraNode getCameraNode(String id) {
if (instance == null) {
return null;
}
return (CameraNode) instance.resourceLibrary.get(id);
}
public static LightNode getLightNode(String id) {
if (instance == null) {
return null;
}
return (LightNode) instance.resourceLibrary.get(id);
}
public static Object get(Object id) {
return instance.resourceLibrary.get(id);
}
/**
* places an object into the resource library with a given key. If there is
* an object referenced by this key and it is not the same object that is to
* be added to the library, a warning is issued. If this object already
* exists in the library we do not readd it.
*
* @param key
* the key to obtain the object from the library.
* @param value
* the object to store in the library.
*/
public static void put(String key, Object value) {
Object data = instance.resourceLibrary.get(key);
if (data != value) {
if (data != null) {
if (!squelch) {
logger
.warning("Key: "
+ key
+ " already in use. Overriding previous data. This is probably not"
+ " desired.");
}
}
instance.resourceLibrary.put(key, value);
}
}
public static BoneAnimation getAnimationController(String id) {
if (instance == null) {
return null;
}
return (BoneAnimation) instance.resourceLibrary.get(id);
}
public static TextureKeyframeController getUVAnimationController(String id) {
if (instance == null) {
return null;
}
return (TextureKeyframeController) instance.resourceLibrary.get(id);
}
public static Bone getSkeleton(String id) {
if (instance == null) {
return null;
}
return (Bone) instance.resourceLibrary.get(id);
}
public static Geometry getGeometry(String id) {
if (instance == null) {
return null;
}
return (Geometry) instance.resourceLibrary.get(id);
}
public static void cleanUp() {
if (instance != null) {
instance.shutdown();
}
}
public void shutdown() {
instance = null;
}
/**
* Author of the last loaded collada model.
*
* @return the modelAuthor the author of the last loaded model.
*/
public String getModelAuthor() {
return modelAuthor;
}
/**
* Revision number of the last loaded collada model.
*
* @return the revision revision number of the last loaded collada model.
*/
public String getRevision() {
return revision;
}
/**
* the tool used to build the last collada model.
*
* @return the tool
*/
public String getTool() {
return tool;
}
/**
* the unit scale of the last collada model.
*
* @return the unitMeter
*/
public float getUnitMeter() {
return unitMeter;
}
/**
* the unit name of the last collada model.
*
* @return the unitName
*/
public String getUnitName() {
return unitName;
}
/**
* getAssetInformation returns a string of the collected asset information
* of this COLLADA model. The format is such: <br>
* AUTHOR REVISION<br>
* TOOL<br>
* UNITNAME UNITMETER<br>
* UPAXIS<br>
*
* @return the string representation of the asset information of this file.
*/
public String getAssetInformation() {
return modelAuthor + " " + revision + "\n" + tool + "\n" + unitName
+ " " + unitMeter + "\n" + upAxis;
}
/**
* processCollada takes a COLLADAType object that contains the heirarchical
* information obtained from the XML structure of a COLLADA model. This root
* object is processed and sets the data structures for jME to render the
* model to *this* object.
*
* @param root
* the COLLADAType data structure that contains the COLLADA model
* information.
*/
public void processCollada(COLLADAType root) {
// build the asset information about this model. This can be used
// for debugging information. Only a single asset tag is allowed per
// model.
if (root.hasasset()) {
try {
processAssetInformation(root.getasset());
} catch (Exception e) {
if (!squelch) {
logger.log(Level.WARNING,
"Error processing asset information", e);
}
}
}
// user defined libraries may exist (for example, uv animations)
if (root.hasextra()) {
try {
ExtraPluginManager.processExtra(root, root.getextra());
} catch (Exception e) {
if (!squelch) {
logger.log(Level.WARNING,
"Error processing extra information", e);
}
}
}
// builds the animation keyframes and places the controllers into a
// node.
if (root.haslibrary_animations()) {
try {
processAnimationLibrary(root.getlibrary_animations());
} catch (Exception e) {
if (!squelch) {
logger.log(Level.WARNING,
"Error processing animation information", e);
}
}
}
if (root.haslibrary_animation_clips()) {
if (!squelch) {
logger.warning("Animation Clips not currently supported");
}
}
if (root.haslibrary_cameras()) {
try {
processCameraLibrary(root.getlibrary_cameras());
} catch (Exception e) {
if (!squelch) {
logger.log(Level.WARNING,
"Error processing camera information", e);
}
}
}
if (root.haslibrary_force_fields()) {
if (!squelch) {
logger.warning("Forcefields not currently supported");
}
}
if (root.haslibrary_lights()) {
try {
processLightLibrary(root.getlibrary_lights());
} catch (Exception e) {
if (!squelch) {
logger.log(Level.WARNING,
"Error processing light information", e);
}
}
}
// build a map of images that the materials can use in the future.
if (root.haslibrary_images()) {
try {
processImageLibrary(root.getlibrary_images());
} catch (Exception e) {
if (!squelch) {
logger.log(Level.WARNING,
"Error processing image library information", e);
}
}
}
// build all the material states that can be used later
if (root.haslibrary_materials()) {
try {
processMaterialLibrary(root.getlibrary_materials());
} catch (Exception e) {
if (!squelch) {
logger.log(Level.WARNING,
"Error processing material library information", e);
}
}
}
// process the library of effects, filling in the appropriate
// states.
if (root.haslibrary_effects()) {
try {
processEffects(root.getlibrary_effects());
} catch (Exception e) {
if (!squelch) {
logger.log(Level.WARNING,
"Error processing effects library information", e);
}
}
}
// process the geometry information, creating the appropriate Geometry
// object from jME (TriMesh, lines or point).
if (root.haslibrary_geometries()) {
try {
processGeometry(root.getlibrary_geometries());
} catch (Exception e) {
if (!squelch) {
logger.log(Level.WARNING,
"Error processing geometry library information", e);
}
}
}
// controllers will define the action of another object. For example,
// there may be a controller with a skin tag, defining how a mesh
// is skinning a skeleton.
if (root.haslibrary_controllers()) {
try {
processControllerLibrary(root.getlibrary_controllers());
} catch (Exception e) {
if (!squelch) {
logger.log(Level.WARNING,
"Error processing controller library information",
e);
}
}
}
if (root.haslibrary_nodes()) {
try {
processNodes(root.getlibrary_nodes());
} catch (Exception e) {
if (!squelch) {
logger.log(Level.WARNING,
"Error processing nodes library information", e);
}
}
}
// process the visual scene. This scene will define how the geometries
// are structured in the world.
if (root.haslibrary_visual_scenes()) {
try {
processVisualSceneLibrary(root.getlibrary_visual_scenes());
} catch (Exception e) {
if (!squelch) {
logger
.log(
Level.WARNING,
"Error processing visual scene library information",
e);
}
}
}
if (root.haslibrary_physics_scenes()) {
try {
library_physics_scenesType library = root
.getlibrary_physics_scenes();
for (int i = 0; i < library.getphysics_sceneCount(); i++) {
physics_sceneType scene = library.getphysics_sceneAt(i);
put(scene.getid().toString(), scene);
}
} catch (Exception e) {
if (!squelch) {
logger
.log(
Level.WARNING,
"Error processing physics scene library information",
e);
}
}
}
if (root.haslibrary_physics_models()) {
try {
library_physics_modelsType library = root
.getlibrary_physics_models();
for (int i = 0; i < library.getphysics_modelCount(); i++) {
physics_modelType model = library.getphysics_modelAt(i);
put(model.getid().toString(), model);
}
} catch (Exception e) {
if (!squelch) {
logger
.log(
Level.WARNING,
"Error processing physics model library information",
e);
}
}
}
// the scene tag actually takes instances of the visual scene defined
// above
// and attaches them to the model that is returned.
if (root.hasscene()) {
try {
processScene(root.getscene());
} catch (Exception e) {
if (!squelch) {
logger.log(Level.WARNING,
"Error processing scene information", e);
}
}
}
try {
optimizeGeometry();
} catch (Exception e) {
if (!squelch) {
logger.log(Level.WARNING, "Error optimizing geometry", e);
}
}
}
/**
* optimizeGeometry
*/
private void optimizeGeometry() {
for (String key : resourceLibrary.keySet()) {
Object val = resourceLibrary.get(key);
if (val instanceof Spatial) {
Spatial spatial = (Spatial) val;
int options = GeometryTool.MV_SAME_COLORS
| GeometryTool.MV_SAME_NORMALS
| GeometryTool.MV_SAME_TEXS;
if (spatial.getParent() instanceof SkinNode) {
SkinNode pNode = ((SkinNode) spatial.getParent());
pNode.assignSkeletonBoneInfluences();
if (spatial instanceof Node) {
Node skins = (Node) spatial;
for (int i = 0; i < skins.getQuantity(); i++) {
TriMesh mesh = (TriMesh) skins.getChild(i);
if (OPTIMIZE_GEOMETRY) {
VertMap map = GeometryTool.minimizeVerts(mesh,
options);
if (optimizeCallBack != null) {
optimizeCallBack.remapInfluences(mesh, map);
}
int geomIndex = pNode.getSkins().getChildIndex(
mesh);
pNode.remapInfluences(map, geomIndex);
}
}
} else if (spatial instanceof TriMesh) {
TriMesh mesh = (TriMesh) spatial;
if (OPTIMIZE_GEOMETRY) {
VertMap map = GeometryTool.minimizeVerts(mesh,
options);
if (optimizeCallBack != null) {
optimizeCallBack.remapInfluences(mesh, map);
}
int geomIndex = pNode.getSkins()
.getChildIndex(mesh);
pNode.remapInfluences(map, geomIndex);
}
}
if (OPTIMIZE_GEOMETRY) {
pNode.regenInfluenceOffsets();
}
pNode.revertToBind();
} else if (spatial instanceof TriMesh) {
TriMesh mesh = (TriMesh) spatial;
if (OPTIMIZE_GEOMETRY) {
VertMap map = GeometryTool.minimizeVerts(mesh, options);
if (optimizeCallBack != null) {
optimizeCallBack.remapInfluences(mesh, map);
}
}
}
}
}
}
/**
* processLightLibrary
*
* @param libraryLights
* @throws Exception
*/
private void processLightLibrary(library_lightsType libraryLights)
throws Exception {
if (libraryLights.haslight()) {
for (int i = 0; i < libraryLights.getlightCount(); i++) {
processLight(libraryLights.getlightAt(i));
}
}
}
/**
* @param light
* @throws Exception
*/
private void processLight(lightType light) throws Exception {
technique_commonType4 common = light.gettechnique_common();
Light l = null;
if (common.hasdirectional()) {
l = new DirectionalLight();
l.setDiffuse(getLightColor(common.getdirectional().getcolor()));
} else if (common.haspoint()) {
l = new PointLight();
l.setDiffuse(getLightColor(common.getpoint().getcolor()));
l.setAttenuate(true);
l.setConstant(Float.parseFloat(common.getpoint()
.getconstant_attenuation().getValue().toString()));
l.setLinear(Float.parseFloat(common.getpoint()
.getlinear_attenuation().getValue().toString()));
l.setQuadratic(Float.parseFloat(common.getpoint()
.getquadratic_attenuation().getValue().toString()));
} else if (common.hasspot()) {
l = new SpotLight();
l.setDiffuse(getLightColor(common.getspot().getcolor()));
l.setAttenuate(true);
l.setConstant(Float.parseFloat(common.getspot()
.getconstant_attenuation().getValue().toString()));
l.setLinear(Float.parseFloat(common.getspot()
.getlinear_attenuation().getValue().toString()));
l.setQuadratic(Float.parseFloat(common.getspot()
.getquadratic_attenuation().getValue().toString()));
((SpotLight) l).setAngle(Float.parseFloat(common.getspot()
.getfalloff_angle().getValue().toString()));
((SpotLight) l).setExponent(Float.parseFloat(common.getspot()
.getfalloff_exponent().getValue().toString()));
}
if (l != null) {
l.getSpecular().set(0, 0, 0, 1);
if (common.hasambient()) {
l.setAmbient(getLightColor(common.getambient().getcolor()));
} else {
l.getAmbient().set(0, 0, 0, 1);
}
l.setEnabled(true);
LightNode lightNode = new LightNode(light.getid().toString());
lightNode.setLight(l);
if (lightNodeNames == null) {
lightNodeNames = new ArrayList<String>();
}
lightNodeNames.add(lightNode.getName());
put(lightNode.getName(), lightNode);
}
}
/**
* getLightColor
*
* @param color
* @return c
*/
private ColorRGBA getLightColor(TargetableFloat3 color) {
StringTokenizer st = new StringTokenizer(color.getValue().toString());
return new ColorRGBA(Float.parseFloat(st.nextToken()), Float
.parseFloat(st.nextToken()), Float.parseFloat(st.nextToken()),
1);
}
/**
* processScene finalizes the model node to be returned as the COLLADA
* model. This looks up visual scene instances that were placed in the
* resource library previously.
*
* @param scene
* the scene to process
* @throws Exception
* thrown if there is an error processing the xml.
*/
public void processScene(sceneType scene) throws Exception {
if (scene.hasinstance_visual_scene()) {
for (int i = 0; i < scene.getinstance_visual_sceneCount(); i++) {
String key = scene.getinstance_visual_sceneAt(i).geturl()
.toString().substring(1);
Node n = (Node) resourceLibrary.get(key);
if (n != null) {
model.attachChild(n);
}
}
}
if (scene.hasinstance_physics_scene()) {
for (int i = 0; i < scene.getinstance_physics_sceneCount(); i++) {
String key = scene.getinstance_physics_sceneAt(i).geturl()
.toString().substring(1);
physics_sceneType physScene = (physics_sceneType) resourceLibrary
.get(key);
if (physScene != null) {
processPhysicsScene(physScene);
}
}
}
}
private void processPhysicsScene(physics_sceneType physScene)
throws Exception {
if (physScene.hasinstance_physics_model()) {
for (int i = 0; i < physScene.getinstance_physics_modelCount(); i++) {
instance_physics_modelType instPhysModel = physScene
.getinstance_physics_modelAt(i);
String key = instPhysModel.geturl().toString().substring(1);
physics_modelType physModel = (physics_modelType) resourceLibrary
.get(key);
if (physModel != null) {
processPhysicsModel(physModel);
}
if (instPhysModel.hasinstance_rigid_body()) {
// get the Spatial that is the collision mesh
String rigidBodyKey = instPhysModel
.getinstance_rigid_body().getbody().toString();
Spatial collisionMesh = (Spatial) resourceLibrary
.get(rigidBodyKey);
if (collisionMesh != null) {
// get the target
String targetKey = instPhysModel
.getinstance_rigid_body().gettarget()
.toString().substring(1);
Node n = (Node) resourceLibrary.get(targetKey);
if (n != null) {
n.setUserData("COLLISION", collisionMesh);
}
}
}
}
}
}
private void processPhysicsModel(physics_modelType physModel)
throws Exception {
// we only care about the shape (which for now will only reference a
// geometry), so simply store this geometry with the name of the rigid
// body as the key. Initially, this only supports a single shape per
// physics model. Will be enhanced first available chance.
if (physModel.hasrigid_body()) {
for (int i = 0; i < physModel.getrigid_bodyCount(); i++) {
rigid_bodyType rigidBody = physModel.getrigid_bodyAt(i);
String id = rigidBody.getsid().toString();
if (rigidBody.hastechnique_common()) {
if (rigidBody.gettechnique_common().hasshape()) {
for (int j = 0; j < rigidBody.gettechnique_common()
.getshapeCount(); j++) {
shapeType2 shape = rigidBody.gettechnique_common()
.getshapeAt(j);
if (shape.hasinstance_geometry()) {
String key = shape.getinstance_geometry()
.geturl().toString().substring(1);
Spatial s = (Spatial) resourceLibrary.get(key);
if (s != null) {
put(id, s);
}
}
}
}
}
}
}
}
/**
* processSource builds resource objects TIME, TRANSFORM and Name array for
* the interpolation type.
*
* @param source
* the source to process
* @throws Exception
* exception thrown if there is a problem with
*/
private void processSource(sourceType source) throws Exception {
if (source.hasfloat_array()) {
if (source.hastechnique_common()) {
float[] floatArray = processFloatArray(source.getfloat_array());
paramType3 p = source.gettechnique_common().getaccessor()
.getparam();
if ("TIME".equals(p.getname().toString())) {
put(source.getid().toString(), floatArray);
} else if ("float4x4".equals(p.gettype().toString())) {
Matrix4f[] transforms = new Matrix4f[floatArray.length / 16];
for (int i = 0; i < transforms.length; i++) {
transforms[i] = new Matrix4f();
float[] data = new float[16];
for (int x = 0; x < 16; x++) {
data[x] = floatArray[(16 * i) + x];
}
transforms[i].set(data, true); // collada matrices are
// in row order.
}
put(source.getid().toString(), transforms);
} else if ("ROTX.ANGLE".equals(p.getname().toString())) {
if ("float".equals(p.gettype().toString())) {
float[] xRot = new float[floatArray.length];
System.arraycopy(floatArray, 0, xRot, 0, xRot.length);
put(source.getid().toString(), xRot);
} else {
if (!squelch) {
logger.warning(p.gettype() + " not yet supported "
+ "for animation transforms.");
}
}
} else if ("ROTY.ANGLE".equals(p.getname().toString())) {
if ("float".equals(p.gettype().toString())) {
float[] yRot = new float[floatArray.length];
System.arraycopy(floatArray, 0, yRot, 0, yRot.length);
put(source.getid().toString(), yRot);
} else {
if (!squelch) {
logger.warning(p.gettype() + " not yet supported "
+ "for animation transforms.");
}
}
} else if ("ROTZ.ANGLE".equals(p.getname().toString())) {
if ("float".equals(p.gettype().toString())) {
float[] zRot = new float[floatArray.length];
System.arraycopy(floatArray, 0, zRot, 0, zRot.length);
put(source.getid().toString(), zRot);
} else {
if (!squelch) {
logger.warning(p.gettype() + " not yet supported "
+ "for animation transforms.");
}
}
} else if ("TRANS.X".equals(p.getname().toString())) {
if ("float".equals(p.gettype().toString())) {
float[] xTrans = new float[floatArray.length];
System.arraycopy(floatArray, 0, xTrans, 0,
xTrans.length);
put(source.getid().toString(), xTrans);
} else {
if (!squelch) {
logger.warning(p.gettype() + " not yet supported "
+ "for animation transforms.");
}
}
} else if ("TRANS.Y".equals(p.getname().toString())) {
if ("float".equals(p.gettype().toString())) {
float[] yTrans = new float[floatArray.length];
System.arraycopy(floatArray, 0, yTrans, 0,
yTrans.length);
put(source.getid().toString(), yTrans);
} else {
if (!squelch) {
logger.warning(p.gettype() + " not yet supported "
+ "for animation transforms.");
}
}
} else if ("TRANS.Z".equals(p.getname().toString())) {
if ("float".equals(p.gettype().toString())) {
float[] zTrans = new float[floatArray.length];
System.arraycopy(floatArray, 0, zTrans, 0,
zTrans.length);
put(source.getid().toString(), zTrans);
} else {
if (!squelch) {
logger.warning(p.gettype() + " not yet supported "
+ "for animation transforms.");
}
}
} else {
if (!squelch) {
logger.warning(p.getname() + " not yet supported "
+ "for animation source.");
}
}
}
} else if (source.hasName_array()) {
int[] interpolation = processInterpolationArray(source
.getName_array());
put(source.getid().toString(), interpolation);
}
}
/**
* processInterpolationArray builds a int array that corresponds to the
* interpolation types defined in BoneAnimationController.
*
* @param array
* the array to process.
* @return the int array.
* @throws Exception
* thrown if there is a problem processing this xml document.
*/
private int[] processInterpolationArray(Name_arrayType array)
throws Exception {
StringTokenizer st = new StringTokenizer(array.getValue().toString());
int[] out = new int[array.getcount().intValue()];
String token = null;
for (int i = 0; i < out.length; i++) {
token = st.nextToken();
if ("LINEAR".equals(token)) {
out[i] = BoneAnimation.LINEAR;
} else if ("BEZIER".equals(token)) {
out[i] = BoneAnimation.BEZIER;
}
}
return out;
}
/**
* processes a float array object. The floats are represented as a String
* with the values delimited by a space.
*
* @param array
* the array to parse.
* @return the float array to return.
* @throws Exception
* thrown if there is a problem processing the XML.
*/
private float[] processFloatArray(float_arrayType array) throws Exception {
StringTokenizer st = new StringTokenizer(array.getValue().toString());
float[] out = new float[array.getcount().intValue()];
for (int i = 0; i < out.length; i++) {
out[i] = Float.parseFloat(st.nextToken());
}
return out;
}
/**
* processAssetInformation will store the information about the collada file
* for future reference. This will include the author, the tool used, the
* revision, the unit information, and the defined up axis.
*
* @param asset
* the assetType for the root of the model.
*/
private void processAssetInformation(assetType asset) throws Exception {
if (asset.hascontributor()) {
if (asset.getcontributor().hasauthor()) {
modelAuthor = asset.getcontributor().getauthor().toString();
}
if (asset.getcontributor().hasauthoring_tool()) {
tool = asset.getcontributor().getauthoring_tool().toString();
}
}
if (asset.hasrevision()) {
revision = asset.getrevision().toString();
}
if (asset.hasunit()) {
unitName = asset.getunit().getname().toString();
unitMeter = asset.getunit().getmeter().floatValue();
}
if (asset.hasup_axis()) {
upAxis = asset.getup_axis().getValue();
}
}
/**
* processAnimationLibrary will store the individual
* BoneAnimationControllers in the resource library for future use.
* Animations at this level can be considered top level animations that
* should be called from this level. These animations may contain children
* animations the top level animation is responsible for calling.
*
* @param animLib
* the library of animations to parse.
*/
private void processAnimationLibrary(library_animationsType animLib)
throws Exception {
if (animLib.hasanimation()) {
if (controllerNames == null) {
controllerNames = new ArrayList<String>();
}
for (int i = 0; i < animLib.getanimationCount(); i++) {
BoneAnimation bac = processAnimation(animLib.getanimationAt(i));
bac.setInterpolate(false);
bac.optimize(true);
put(bac.getName(), bac);
controllerNames.add(bac.getName());
if (animLib.getanimationAt(i).hasextra()) {
for (int j = 0; j < animLib.getanimationAt(i)
.getextraCount(); j++) {
logger.info("Processing extra in animation library.");
ExtraPluginManager.processExtra(bac, animLib
.getanimationAt(i).getextraAt(j));
}
}
}
}
}
/**
* the animation element catgorizes an animation hierarchy with each
* controller defining the animation's keyframe and sampler functions. These
* interact on single bones, where a collection of controllers will build up
* a complete animation.
*
* @param animation
* the animation to parse.
* @throws Exception
* thrown if there is a problem processing the xml.
*/
private BoneAnimation processAnimation(animationType animation)
throws Exception {
BoneAnimation out = new BoneAnimation(animation.getid().toString());
BoneTransform bt = new BoneTransform();
out.setInterpolate(true);
if (animation.hassource()) {
for (int i = 0; i < animation.getsourceCount(); i++) {
processSource(animation.getsourceAt(i));
}
}
float[] rotx = null;
float[] roty = null;
float[] rotz = null;
float[] transx = null;
float[] transy = null;
float[] transz = null;
boolean transformsSet = false;
if (animation.hassampler()) {
for (int j = 0; j < animation.getsamplerCount(); j++) {
for (int i = 0; i < animation.getsamplerAt(j).getinputCount(); i++) {
if ("INPUT".equals(animation.getsamplerAt(j).getinputAt(i)
.getsemantic().toString())) {
String key = animation.getsamplerAt(j).getinputAt(i)
.getsource().toString().substring(1);
float[] times = (float[]) resourceLibrary.get(key);
if (times == null) {
logger.warning("Animation source invalid: " + key);
continue;
}
out.setTimes(times);
out.setStartFrame(0);
out.setEndFrame(times.length - 1);
} else if ("OUTPUT".equals(animation.getsamplerAt(j)
.getinputAt(i).getsemantic().toString())) {
String key = animation.getsamplerAt(j).getinputAt(i)
.getsource().toString().substring(1);
Object object = resourceLibrary.get(key);
if (object == null) {
logger.warning("Animation source invalid: " + key);
continue;
}
if (object instanceof Matrix4f[]) {
Matrix4f[] transforms = (Matrix4f[]) object;
bt.setTransforms(transforms);
transformsSet = true;
} else if (object instanceof float[]) {
// Another bit of a hack that should be improved:
// to put the float arrays into the BoneTransform,
// we need to know what angle it is changing,
// I see know way to determine other than looking
// at the source name.
if (animation.getsamplerAt(j).getinputAt(i)
.getsource().toString().contains(
"Rotate-X-")) {
rotx = (float[]) object;
} else if (animation.getsamplerAt(j).getinputAt(i)
.getsource().toString().contains(
"Rotate-Y-")) {
roty = (float[]) object;
} else if (animation.getsamplerAt(j).getinputAt(i)
.getsource().toString().contains(
"Rotate-Z-")) {
rotz = (float[]) object;
} else if (animation.getsamplerAt(j).getinputAt(i)
.getsource().toString().contains(
"Translate-X-")) {
transx = (float[]) object;
} else if (animation.getsamplerAt(j).getinputAt(i)
.getsource().toString().contains(
"Translate-Y-")) {
transy = (float[]) object;
} else if (animation.getsamplerAt(j).getinputAt(i)
.getsource().toString().contains(
"Translate-Z-")) {
transz = (float[]) object;
} else {
if (!squelch) {
logger
.warning("Not sure what this sampler is.");
}
}
}
} else if ("INTERPOLATION".equals(animation.getsamplerAt(j)
.getinputAt(i).getsemantic().toString())) {
String key = animation.getsamplerAt(j).getinputAt(i)
.getsource().toString().substring(1);
int[] interpolation = (int[]) resourceLibrary.get(key);
if (interpolation == null) {
logger.warning("Animation source invalid: " + key);
continue;
}
out.setInterpolationTypes(interpolation);
}
}
}
if (!transformsSet) {
Matrix4f[] transforms = generateTransforms(rotx, roty, rotz,
transx, transy, transz);
if (transforms != null) {
bt.setTransforms(transforms);
}
}
}
if (animation.haschannel()) {
String target = animation.getchannel().gettarget().toString();
if (target.contains("/")) {
String key = target.substring(0, animation.getchannel()
.gettarget().toString().indexOf('/'));
bt.setBoneId(key);
Bone b = (Bone) resourceLibrary.get(key);
if (b != null) {
bt.setBone(b);
}
out.addBoneTransforms(bt);
}
}
// if the animation has children attach them
if (animation.hasanimation()) {
for (int i = 0; i < animation.getanimationCount(); i++) {
out.addBoneAnimation(processAnimation(animation
.getanimationAt(i)));
}
}
return out;
}
private Matrix4f[] generateTransforms(float[] rotx, float[] roty,
float[] rotz, float[] transx, float[] transy, float[] transz) {
Quaternion rot = new Quaternion();
int index = 0;
if (rotx != null) {
index = rotx.length;
} else if (transx != null) {
index = transx.length;
}
Matrix4f[] transforms = new Matrix4f[index];
float[] angles = new float[3];
for (int i = 0; i < transforms.length; i++) {
angles[0] = angles[1] = angles[2] = 0;
if (rotx != null) {
angles[0] = rotx[i];
}
if (roty != null) {
angles[1] = roty[i];
}
if (rotz != null) {
angles[2] = rotz[i];
}
rot.fromAngles(angles);
transforms[i] = rot.toRotationMatrix(new Matrix4f());
if (transx != null) {
transforms[i].m03 = transx[i];
}
if (transy != null) {
transforms[i].m13 = transy[i];
}
if (transz != null) {
transforms[i].m23 = transz[i];
}
}
return transforms;
}
private void processCameraLibrary(library_camerasType libraryCam)
throws Exception {
if (libraryCam.hascamera()) {
for (int i = 0; i < libraryCam.getcameraCount(); i++) {
// processCamera(libraryCam.getcameraAt(i));
}
}
}
private void processCamera(cameraType camera) throws Exception {
opticsType optics = camera.getoptics();
technique_commonType2 common = optics.gettechnique_common();
Renderer r = DisplaySystem.getDisplaySystem().getRenderer();
int width = r.getWidth();
int height = r.getHeight();
// FIXME: THIS LINE IS SUPPOSED TO ONLY BE DONE IN A GL THREAD.
Camera c = r.createCamera(width, height);
float near = c.getFrustumNear();
float far = c.getFrustumFar();
float aspect = (float) width / (float) height;
if (common.hasorthographic()) {
orthographicType ortho = common.getorthographic();
float xmag = 1.0f;
float ymag = 1.0f;
if (ortho.hasznear()) {
near = Float.parseFloat(ortho.getznear().getValue().toString());
}
if (ortho.haszfar()) {
far = Float.parseFloat(ortho.getzfar().getValue().toString());
}
if (ortho.hasxmag() && ortho.hasymag()) {
xmag = Float.parseFloat(ortho.getxmag().getValue().toString());
ymag = Float.parseFloat(ortho.getymag().getValue().toString());
} else {
if (ortho.hasaspect_ratio()) {
aspect = Float.parseFloat(ortho.getaspect_ratio()
.getValue().toString());
}
if (ortho.hasxmag()) {
assert (!ortho.hasymag());
xmag = Float.parseFloat(ortho.getxmag().getValue()
.toString());
ymag = xmag / aspect;
} else {
assert (ortho.hasymag());
ymag = Float.parseFloat(ortho.getymag().getValue()
.toString());
xmag = ymag * aspect;
}
}
c.setParallelProjection(true);
c.setFrustum(near, far, -xmag, xmag, -ymag, ymag);
} else {
assert (common.hasperspective());
perspectiveType persp = common.getperspective();
float xfov = 1.0f;
float yfov = 1.0f;
if (persp.hasznear()) {
near = Float.parseFloat(persp.getznear().getValue().toString());
}
if (persp.haszfar()) {
far = Float.parseFloat(persp.getzfar().getValue().toString());
}
if (persp.hasxfov() && persp.hasyfov()) {
xfov = Float.parseFloat(persp.getxfov().getValue().toString());
yfov = Float.parseFloat(persp.getyfov().getValue().toString());
} else {
if (persp.hasaspect_ratio()) {
aspect = Float.parseFloat(persp.getaspect_ratio()
.getValue().toString());
}
if (persp.hasxfov()) {
assert (!persp.hasyfov());
xfov = Float.parseFloat(persp.getxfov().getValue()
.toString());
yfov = xfov / aspect;
} else {
assert (persp.hasyfov());
yfov = Float.parseFloat(persp.getyfov().getValue()
.toString());
xfov = yfov * aspect;
}
}
c.setParallelProjection(false);
c.setFrustumPerspective(yfov, aspect, near, far);
}
if (cameraNodeNames == null) {
cameraNodeNames = new ArrayList<String>();
}
CameraNode nodeCamera = new CameraNode(camera.getid().toString(), c);
// cameras are odd in that their rotation is typically exported
// backwards from the direction that they're looking in the scene
if ("X_UP".equals(upAxis))
nodeCamera.setLocalRotation(new Quaternion(1, 0, 0, 0));
else if ("Y_UP".equals(upAxis))
nodeCamera.setLocalRotation(new Quaternion(0, 1, 0, 0));
else if ("Z_UP".equals(upAxis))
nodeCamera.setLocalRotation(new Quaternion(0, 0, 1, 0));
cameraNodeNames.add(nodeCamera.getName());
put(nodeCamera.getName(), nodeCamera);
}
/**
* processImageLibrary will build a collection of image filenames. The image
* tag contains the full directory path of the image from the artists
* working directory. Therefore, the directory will be stripped off leaving
* only the filename. This filename will be associated with a id key that
* can be obtained by the material that wishes to make use of it.
*
* @param libraryImg
* the library of images (name/image pair).
*/
private void processImageLibrary(library_imagesType libraryImg)
throws Exception {
if (libraryImg.hasimage()) {
for (int i = 0; i < libraryImg.getimageCount(); i++) {
processImage(libraryImg.getimageAt(i));
}
}
}
/**
* processImage takes an image type and places the necessary information in
* the resource library.
*
* @param image
* the image to process.
* @throws Exception
* thrown if there is a problem with the imagetype.
*/
private void processImage(imageType image) throws Exception {
if (image.hasdata()) {
if (!squelch) {
logger.warning("Raw data images not supported.");
}
}
if (image.hasinit_from()) {
put(image.getid().toString(), image.getinit_from().toString());
}
}
/**
* processMaterialLibrary will build a collection (Map) of MaterialStates,
* with the defined material id as the key in the Map. This map and
* corresponding key will then be used to apply materials to the appropriate
* node. The library only defines the id of the material and the url of the
* instance effect that defines its qualities, it won't be until the
* library_effects tag is processed that the material state information is
* filled in.
*
* @param libraryMat
* the material library type.
* @throws Exception
* thrown if there is a problem processing the xml.
*/
private void processMaterialLibrary(library_materialsType libraryMat)
throws Exception {
if (libraryMat.hasmaterial()) {
for (int i = 0; i < libraryMat.getmaterialCount(); i++) {
processMaterial(libraryMat.getmaterialAt(i));
}
}
}
/**
* process Material which typically contains an id and a reference URL to an
* effect.
*
* @param mat
* @throws Exception
* thrown if there is a problem processing the xml.
*/
private void processMaterial(materialType mat) throws Exception {
ColladaMaterial material = new ColladaMaterial();
String url = null;
if (mat.hasinstance_effect()) {
url = mat.getinstance_effect().geturl().toString();
if (url.startsWith("#")) {
url = url.substring(1);
}
put(url, material);
put(mat.getid().toString(), url);
}
if (mat.hasextra()) {
ExtraPluginManager.processExtra(material, mat.getextra());
}
}
/**
* processEffects will build effects as defined by the techinque. The
* appropriate render state will be obtained from the materialMap hashmap
* based on the the name of the effect. Currently, the id of the effect is
* ignored as it is directly tied to the material id. However, in the future
* this may require support.
*
* @param libraryEffects
* the library of effects to build.
* @throws Exception
* thrown if there is a problem processing the xml.
*/
private void processEffects(library_effectsType libraryEffects)
throws Exception {
if (libraryEffects.haseffect()) {
for (int i = 0; i < libraryEffects.geteffectCount(); i++) {
String key = libraryEffects.geteffectAt(i).getid().toString();
ColladaMaterial mat = (ColladaMaterial) resourceLibrary
.get(key);
if (mat != null) {
fillMaterial(libraryEffects.geteffectAt(i), mat);
}
}
}
}
/**
* fillMaterial will use the provided effectType to generate the material
* setting for the collada model. The effect can handle both programmable
* pipelines and fixed pipelines. This is defined by what sort of profile it
* is using (profile_COMMON, profile_GLSL, profile_CG). Currently,
* profile_CG is ignored. There may be multiple profiles, describing a path
* of fallbacks. Currently, only one profile will be supported at a time.<br>
* <br>
* There is a possibility that each profile may have multiple techniques,
* defining different materials for different situations, i.e. LOD. This
* version of the loader will assume a single technique.
*
* @param effect
* the collada effect to process.
* @param mat
* the ColladaMaterial that will hold the RenderStates needed to
* express this material.
* @throws Exception
* thrown if there is a problem processing the file.
*/
private void fillMaterial(effectType effect, ColladaMaterial mat)
throws Exception {
// process the fixed pipeline information
if (effect.hasprofile_COMMON()) {
for (int i = 0; i < effect.getprofile_COMMON().getnewparamCount(); i++) {
processNewParam(effect.getprofile_COMMON().getnewparamAt(i),
mat);
}
for (int i = 0; i < effect.getprofile_COMMON().gettechniqueCount(); i++) {
processTechniqueCOMMON(effect.getprofile_COMMON()
.gettechniqueAt(i), mat);
}
if (effect.getprofile_COMMON().hasextra()) {
for (int i = 0; i < effect.getprofile_COMMON().getextraCount(); i++) {
ExtraPluginManager.processExtra(mat, effect
.getprofile_COMMON().getextraAt(i));
}
}
}
// process the programmable pipeline
// profile_GLSL defines all of OpenGL states as well as GLSL shaders.
if (effect.hasprofile_GLSL()) {
for (int i = 0; i < effect.getprofile_GLSL().gettechniqueCount(); i++) {
processTechniqueGLSL(
effect.getprofile_GLSL().gettechniqueAt(i), mat);
}
}
}
/**
* processNewParam sets specific properties of a material (surface
* properties, sampler properties, etc).
*
* @param param
* the xml element of the new parameter.
* @param mat
* the material to store the parameters in.
* @throws Exception
* thrown if there is a problem reading the xml.
*/
private void processNewParam(common_newparam_type param, ColladaMaterial mat)
throws Exception {
if (param.hassampler2D()) {
processSampler2D(param.getsid().toString(), param.getsampler2D(),
mat);
}
if (param.hassurface()) {
processSurface(param.getsid().toString(), param.getsurface());
}
}
/**
* processes images information, defining the min and mag filter for
* mipmapping.
*
* @param id
* the id on the sampler
* @param sampler
* the sampler xml element.
* @param mat
* the material to store the values in.
* @throws Exception
* thrown if there is a problem reading the file.
*/
private void processSampler2D(String id, fx_sampler2D_common sampler,
ColladaMaterial mat) throws Exception {
if (sampler.hasmagfilter()) {
mat.magFilter = sampler.getmagfilter().getValue();
}
if (sampler.hasminfilter()) {
mat.minFilter = sampler.getminfilter().getValue();
}
mat.wrapS = "WRAP";
mat.wrapT = "WRAP";
put(id, sampler.getsource().getValue());
}
private void processSurface(String id, fx_surface_common surface)
throws Exception {
put(id, surface.getinit_from().getValue().toString());
}
/**
* processes rendering information defined to be GLSL standard, which
* includes all OpenGL state information and GLSL shader information.
*
* @param technique
* @param mat
* @throws Exception
*/
private void processTechniqueGLSL(techniqueType4 technique,
ColladaMaterial mat) throws Exception {
if (technique.haspass()) {
for (int i = 0; i < technique.getpassCount(); i++) {
processPassGLSL(technique.getpassAt(i), mat);
}
}
}
private void processPassGLSL(passType3 pass, ColladaMaterial mat)
throws Exception {
// XXX only a single pass supported currently. If multiple passes
// XXX are defined under a profile_GLSL the states will be combined
// XXX to a single pass. If the same render state is defined in
// XXX different passes, the last pass will override the previous.
if (pass.hasclip_plane()) {
ClipState cs = (ClipState) mat.getState(RenderState.RS_CLIP);
if (cs == null) {
cs = DisplaySystem.getDisplaySystem().getRenderer()
.createClipState();
mat.setState(cs);
}
if (pass.getclip_plane().hasindex()
&& pass.getclip_plane().hasvalue2()) {
int index = pass.getclip_plane().getindex().intValue();
StringTokenizer st = new StringTokenizer(pass.getclip_plane()
.getvalue2().toString());
float[] clip = new float[4];
for (int i = 0; i < 4; i++) {
clip[i] = Float.parseFloat(st.nextToken());
}
cs.setClipPlaneEquation(index, clip[0], clip[1], clip[2],
clip[3]);
}
}
if (pass.hasclip_plane_enable()) {
ClipState cs = (ClipState) mat.getState(RenderState.RS_CLIP);
if (cs == null) {
cs = DisplaySystem.getDisplaySystem().getRenderer()
.createClipState();
mat.setState(cs);
}
if (pass.getclip_plane_enable().hasindex()
&& pass.getclip_plane_enable().hasvalue2()) {
int index = pass.getclip_plane().getindex().intValue();
cs.setEnableClipPlane(index, pass.getclip_plane_enable()
.getvalue2().booleanValue());
}
}
if (pass.hascolor_mask()) {
ColorMaskState cms = (ColorMaskState) mat
.getState(RenderState.RS_COLORMASK_STATE);
if (cms == null) {
cms = DisplaySystem.getDisplaySystem().getRenderer()
.createColorMaskState();
mat.setState(cms);
}
if (pass.getcolor_mask().hasvalue2()) {
StringTokenizer st = new StringTokenizer(pass.getcolor_mask()
.getvalue2().toString());
boolean[] color = new boolean[4];
for (int i = 0; i < 4; i++) {
color[i] = Boolean.parseBoolean(st.nextToken());
}
cms.setRed(color[0]);
cms.setGreen(color[1]);
cms.setBlue(color[2]);
cms.setAlpha(color[3]);
}
}
if (pass.hasdepth_func()) {
ZBufferState zbs = (ZBufferState) mat
.getState(RenderState.RS_ZBUFFER);
if (zbs == null) {
zbs = DisplaySystem.getDisplaySystem().getRenderer()
.createZBufferState();
mat.setState(zbs);
}
if (pass.getdepth_func().hasvalue2()) {
String depth = pass.getdepth_func().getvalue2().toString();
if ("NEVER".equals(depth)) {
zbs.setFunction(ZBufferState.TestFunction.Never);
} else if ("LESS".equals(depth)) {
zbs.setFunction(ZBufferState.TestFunction.LessThan);
} else if ("LEQUAL".equals(depth)) {
zbs.setFunction(ZBufferState.TestFunction.LessThanOrEqualTo);
} else if ("EQUAL".equals(depth)) {
zbs.setFunction(ZBufferState.TestFunction.EqualTo);
} else if ("GREATER".equals(depth)) {
zbs.setFunction(ZBufferState.TestFunction.GreaterThan);
} else if ("NOTEQUAL".equals(depth)) {
zbs.setFunction(ZBufferState.TestFunction.NotEqualTo);
} else if ("GEQUAL".equals(depth)) {
zbs.setFunction(ZBufferState.TestFunction.GreaterThanOrEqualTo);
} else if ("ALWAYS".equals(depth)) {
zbs.setFunction(ZBufferState.TestFunction.Always);
}
}
}
if (pass.hasdepth_mask()) {
ZBufferState zbs = (ZBufferState) mat
.getState(RenderState.RS_ZBUFFER);
if (zbs == null) {
zbs = DisplaySystem.getDisplaySystem().getRenderer()
.createZBufferState();
mat.setState(zbs);
}
if (pass.getdepth_mask().hasvalue2()) {
zbs
.setWritable(pass.getdepth_mask().getvalue2()
.booleanValue());
}
}
if (pass.hasdepth_test_enable()) {
ZBufferState zbs = (ZBufferState) mat
.getState(RenderState.RS_ZBUFFER);
if (zbs == null) {
zbs = DisplaySystem.getDisplaySystem().getRenderer()
.createZBufferState();
mat.setState(zbs);
}
if (pass.getdepth_test_enable().hasvalue2()) {
zbs.setEnabled(pass.getdepth_test_enable().getvalue2()
.booleanValue());
}
}
if (pass.hascolor_material()) {
MaterialState ms = (MaterialState) mat
.getState(RenderState.RS_MATERIAL);
if (ms == null) {
ms = DisplaySystem.getDisplaySystem().getRenderer()
.createMaterialState();
mat.setState(ms);
}
if (pass.getcolor_material().hasface()) {
String face = pass.getcolor_material().getface().getvalue2()
.toString();
if ("FRONT".equals(face)) {
ms.setMaterialFace(MaterialState.MaterialFace.Front);
} else if ("BACK".equals(face)) {
ms.setMaterialFace(MaterialState.MaterialFace.Back);
} else if ("FRONT_AND_BACK".equals(face)) {
ms.setMaterialFace(MaterialState.MaterialFace.FrontAndBack);
}
}
if (pass.getcolor_material().hasmode()) {
String mode = pass.getcolor_material().getmode().getvalue2()
.toString();
if ("AMBIENT".equals(mode)) {
ms.setColorMaterial(MaterialState.ColorMaterial.Ambient);
} else if ("EMISSION".equals(mode)) {
ms.setColorMaterial(MaterialState.ColorMaterial.Emissive);
} else if ("DIFFUSE".equals(mode)) {
ms.setColorMaterial(MaterialState.ColorMaterial.Diffuse);
} else if ("SPECULAR".equals(mode)) {
ms.setColorMaterial(MaterialState.ColorMaterial.Specular);
} else if ("AMBIENT_AND_DIFFUSE".equals(mode)) {
ms.setColorMaterial(MaterialState.ColorMaterial.AmbientAndDiffuse);
}
}
}
if (pass.hasfog_color()) {
FogState fs = (FogState) mat.getState(RenderState.RS_FOG);
if (fs == null) {
fs = DisplaySystem.getDisplaySystem().getRenderer()
.createFogState();
mat.setState(fs);
}
if (pass.getfog_color().hasvalue2()) {
StringTokenizer st = new StringTokenizer(pass.getfog_color()
.getvalue2().toString());
float[] color = new float[4];
for (int i = 0; i < 4; i++) {
color[i] = Float.parseFloat(st.nextToken());
}
fs.setColor(new ColorRGBA(color[0], color[1], color[2],
color[3]));
}
}
if (pass.hasfog_density()) {
FogState fs = (FogState) mat.getState(RenderState.RS_FOG);
if (fs == null) {
fs = DisplaySystem.getDisplaySystem().getRenderer()
.createFogState();
mat.setState(fs);
}
if (pass.getfog_density().hasvalue2()) {
fs.setDensity(pass.getfog_density().getvalue2().floatValue());
}
}
if (pass.hasfog_enable()) {
FogState fs = (FogState) mat.getState(RenderState.RS_FOG);
if (fs == null) {
fs = DisplaySystem.getDisplaySystem().getRenderer()
.createFogState();
mat.setState(fs);
}
if (pass.getfog_enable().hasvalue2()) {
fs.setEnabled(pass.getfog_enable().getvalue2().booleanValue());
}
}
if (pass.hasfog_end()) {
FogState fs = (FogState) mat.getState(RenderState.RS_FOG);
if (fs == null) {
fs = DisplaySystem.getDisplaySystem().getRenderer()
.createFogState();
mat.setState(fs);
}
if (pass.getfog_end().hasvalue2()) {
fs.setEnd(pass.getfog_end().getvalue2().floatValue());
}
}
if (pass.hasfog_mode()) {
FogState fs = (FogState) mat.getState(RenderState.RS_FOG);
if (fs == null) {
fs = DisplaySystem.getDisplaySystem().getRenderer()
.createFogState();
mat.setState(fs);
}
if (pass.getfog_mode().hasvalue2()) {
String mode = pass.getfog_mode().getvalue2().toString();
if ("LINEAR".equals(mode)) {
fs.setDensityFunction(FogState.DensityFunction.Linear);
} else if ("EXP".equals(mode)) {
fs.setDensityFunction(FogState.DensityFunction.Exponential);
} else if ("EXP2".equals(mode)) {
fs.setDensityFunction(FogState.DensityFunction.ExponentialSquared);
}
}
}
if (pass.hasfog_start()) {
FogState fs = (FogState) mat.getState(RenderState.RS_FOG);
if (fs == null) {
fs = DisplaySystem.getDisplaySystem().getRenderer()
.createFogState();
mat.setState(fs);
}
if (pass.getfog_start().hasvalue2()) {
fs.setStart(pass.getfog_start().getvalue2().floatValue());
}
}
if (pass.hasalpha_test_enable()) {
BlendState as = (BlendState) mat.getState(RenderState.RS_BLEND);
if (as == null) {
as = DisplaySystem.getDisplaySystem().getRenderer()
.createBlendState();
mat.setState(as);
}
as.setTestEnabled(pass.getalpha_test_enable().getvalue2()
.booleanValue());
}
if (pass.hasalpha_func()) {
BlendState as = (BlendState) mat.getState(RenderState.RS_BLEND);
if (as == null) {
as = DisplaySystem.getDisplaySystem().getRenderer()
.createBlendState();
mat.setState(as);
}
if (pass.getalpha_func().hasfunc()) {
String func = pass.getalpha_func().getfunc().getvalue2()
.toString();
if ("NEVER".equals(func)) {
as.setTestFunction(BlendState.TestFunction.Never);
} else if ("LESS".equals(func)) {
as.setTestFunction(BlendState.TestFunction.LessThan);
} else if ("LEQUAL".equals(func)) {
as.setTestFunction(BlendState.TestFunction.LessThanOrEqualTo);
} else if ("EQUAL".equals(func)) {
as.setTestFunction(BlendState.TestFunction.EqualTo);
} else if ("GREATER".equals(func)) {
as.setTestFunction(BlendState.TestFunction.GreaterThan);
} else if ("NOTEQUAL".equals(func)) {
as.setTestFunction(BlendState.TestFunction.NotEqualTo);
} else if ("GEQUAL".equals(func)) {
as.setTestFunction(BlendState.TestFunction.GreaterThanOrEqualTo);
} else if ("ALWAYS".equals(func)) {
as.setTestFunction(BlendState.TestFunction.Always);
}
}
if (pass.getalpha_func().hasvalue2()) {
as.setReference(pass.getalpha_func().getvalue2().getvalue2()
.floatValue());
}
}
if (pass.hasblend_enable()) {
BlendState as = (BlendState) mat.getState(RenderState.RS_BLEND);
if (as == null) {
as = DisplaySystem.getDisplaySystem().getRenderer()
.createBlendState();
mat.setState(as);
}
as.setBlendEnabled(pass.getblend_enable().getvalue2()
.booleanValue());
}
if (pass.hasblend_func()) {
BlendState as = (BlendState) mat.getState(RenderState.RS_BLEND);
if (as == null) {
as = DisplaySystem.getDisplaySystem().getRenderer()
.createBlendState();
mat.setState(as);
}
if (pass.getblend_func().hasdest()) {
String dest = pass.getblend_func().getdest().getvalue2()
.toString();
if ("ZERO".equals(dest)) {
as.setDestinationFunction(BlendState.DestinationFunction.Zero);
} else if ("ONE".equals(dest)) {
as.setDestinationFunction(BlendState.DestinationFunction.One);
} else if ("SRC_COLOR".equals(dest)) {
as.setDestinationFunction(BlendState.DestinationFunction.SourceColor);
} else if ("ONE_MINUS_SRC_COLOR".equals(dest)) {
as.setDestinationFunction(BlendState.DestinationFunction.OneMinusSourceColor);
} else if ("SRC_ALPHA".equals(dest)) {
as.setDestinationFunction(BlendState.DestinationFunction.SourceAlpha);
} else if ("ONE_MINUS_SRC_ALPHA".equals(dest)) {
as.setDestinationFunction(BlendState.DestinationFunction.OneMinusSourceAlpha);
} else if ("DST_ALPHA".equals(dest)) {
as.setDestinationFunction(BlendState.DestinationFunction.DestinationAlpha);
} else if ("ONE_MINUS_DST_ALPHA".equals(dest)) {
as.setDestinationFunction(BlendState.DestinationFunction.OneMinusDestinationAlpha);
} else if ("CONSTANT_COLOR".equals(dest)) {
as.setDestinationFunction(BlendState.DestinationFunction.ConstantColor);
} else if ("ONE_MINUS_CONSTANT_COLOR".equals(dest)) {
as.setDestinationFunction(BlendState.DestinationFunction.OneMinusConstantColor);
} else if ("CONSTANT_ALPHA".equals(dest)) {
as.setDestinationFunction(BlendState.DestinationFunction.ConstantAlpha);
} else if ("ONE_MINUS_CONSTANT_ALPHA".equals(dest)) {
as.setDestinationFunction(BlendState.DestinationFunction.OneMinusConstantAlpha);
}
}
if (pass.getblend_func().hassrc()) {
String src = pass.getblend_func().getsrc().getvalue2()
.toString();
if ("ZERO".equals(src)) {
as.setSourceFunction(BlendState.SourceFunction.Zero);
} else if ("ONE".equals(src)) {
as.setSourceFunction(BlendState.SourceFunction.One);
} else if ("DEST_COLOR".equals(src)) {
as.setSourceFunction(BlendState.SourceFunction.DestinationColor);
} else if ("ONE_MINUS_DEST_COLOR".equals(src)) {
as.setSourceFunction(BlendState.SourceFunction.OneMinusDestinationColor);
} else if ("SRC_ALPHA".equals(src)) {
as.setSourceFunction(BlendState.SourceFunction.SourceAlpha);
} else if ("ONE_MINUS_SRC_ALPHA".equals(src)) {
as.setSourceFunction(BlendState.SourceFunction.OneMinusDestinationAlpha);
} else if ("DST_ALPHA".equals(src)) {
as.setSourceFunction(BlendState.SourceFunction.DestinationAlpha);
} else if ("ONE_MINUS_DST_ALPHA".equals(src)) {
as.setSourceFunction(BlendState.SourceFunction.OneMinusDestinationAlpha);
} else if ("CONSTANT_COLOR".equals(src)) {
as.setSourceFunction(BlendState.SourceFunction.ConstantColor);
} else if ("ONE_MINUS_CONSTANT_COLOR".equals(src)) {
as.setSourceFunction(BlendState.SourceFunction.OneMinusConstantColor);
} else if ("CONSTANT_ALPHA".equals(src)) {
as.setSourceFunction(BlendState.SourceFunction.ConstantAlpha);
} else if ("ONE_MINUS_CONSTANT_ALPHA".equals(src)) {
as.setSourceFunction(BlendState.SourceFunction.OneMinusConstantAlpha);
} else if ("SRC_ALPHA_SATURATE".equals(src)) {
as.setSourceFunction(BlendState.SourceFunction.SourceAlphaSaturate);
}
}
}
if (pass.hascull_face_enable()) {
CullState cs = (CullState) mat.getState(RenderState.RS_CULL);
if (cs == null) {
cs = DisplaySystem.getDisplaySystem().getRenderer()
.createCullState();
mat.setState(cs);
}
cs
.setEnabled(pass.getcull_face_enable().getvalue2()
.booleanValue());
}
if (pass.hascull_face()) {
CullState cs = (CullState) mat.getState(RenderState.RS_CULL);
if (cs == null) {
cs = DisplaySystem.getDisplaySystem().getRenderer()
.createCullState();
mat.setState(cs);
}
if (pass.getcull_face().hasvalue2()) {
String face = pass.getcull_face().getvalue2().toString();
if ("FRONT".equals(face)) {
cs.setCullFace(CullState.Face.Front);
} else if ("BACK".equals(face)) {
cs.setCullFace(CullState.Face.Back);
} else if ("FRONT_AND_BACK".equals(face)) {
cs.setCullFace(CullState.Face.FrontAndBack);
}
}
}
// Define the ShadeState (FLAT OR SMOOTH);
if (pass.hasshade_model()) {
ShadeState ss = (ShadeState) mat.getState(RenderState.RS_SHADE);
if (ss == null) {
ss = DisplaySystem.getDisplaySystem().getRenderer()
.createShadeState();
mat.setState(ss);
}
if (pass.getshade_model().hasvalue2()) {
String shade = pass.getshade_model().getvalue2().toString();
if ("FLAT".equals(shade)) {
ss.setShadeMode(ShadeState.ShadeMode.Flat);
} else if ("SMOOTH".equals(shade)) {
ss.setShadeMode(ShadeState.ShadeMode.Smooth);
}
}
}
if (pass.hasmaterial_ambient()) {
MaterialState ms = (MaterialState) mat
.getState(RenderState.RS_MATERIAL);
if (ms == null) {
ms = DisplaySystem.getDisplaySystem().getRenderer()
.createMaterialState();
mat.setState(ms);
}
if (pass.getmaterial_ambient().hasvalue2()) {
StringTokenizer st = new StringTokenizer(pass
.getmaterial_ambient().getvalue2().toString());
float[] color = new float[4];
for (int i = 0; i < 4; i++) {
color[i] = Float.parseFloat(st.nextToken());
}
ms.setAmbient(new ColorRGBA(color[0], color[1], color[2],
color[3]));
}
}
if (pass.hasmaterial_diffuse()) {
MaterialState ms = (MaterialState) mat
.getState(RenderState.RS_MATERIAL);
if (ms == null) {
ms = DisplaySystem.getDisplaySystem().getRenderer()
.createMaterialState();
mat.setState(ms);
}
if (pass.getmaterial_diffuse().hasvalue2()) {
StringTokenizer st = new StringTokenizer(pass
.getmaterial_diffuse().getvalue2().toString());
float[] color = new float[4];
for (int i = 0; i < 4; i++) {
color[i] = Float.parseFloat(st.nextToken());
}
ms.setDiffuse(new ColorRGBA(color[0], color[1], color[2],
color[3]));
}
}
if (pass.hasmaterial_emission()) {
MaterialState ms = (MaterialState) mat
.getState(RenderState.RS_MATERIAL);
if (ms == null) {
ms = DisplaySystem.getDisplaySystem().getRenderer()
.createMaterialState();
mat.setState(ms);
}
if (pass.getmaterial_emission().hasvalue2()) {
StringTokenizer st = new StringTokenizer(pass
.getmaterial_emission().getvalue2().toString());
float[] color = new float[4];
for (int i = 0; i < 4; i++) {
color[i] = Float.parseFloat(st.nextToken());
}
ms.setEmissive(new ColorRGBA(color[0], color[1], color[2],
color[3]));
}
}
if (pass.hasmaterial_shininess()) {
MaterialState ms = (MaterialState) mat
.getState(RenderState.RS_MATERIAL);
if (ms == null) {
ms = DisplaySystem.getDisplaySystem().getRenderer()
.createMaterialState();
mat.setState(ms);
}
if (pass.getmaterial_shininess().hasvalue2()) {
ms.setShininess(pass.getmaterial_shininess().getvalue2()
.floatValue());
}
}
if (pass.hasmaterial_specular()) {
MaterialState ms = (MaterialState) mat
.getState(RenderState.RS_MATERIAL);
if (ms == null) {
ms = DisplaySystem.getDisplaySystem().getRenderer()
.createMaterialState();
mat.setState(ms);
}
if (pass.getmaterial_specular().hasvalue2()) {
StringTokenizer st = new StringTokenizer(pass
.getmaterial_specular().getvalue2().toString());
float[] color = new float[4];
for (int i = 0; i < 4; i++) {
color[i] = Float.parseFloat(st.nextToken());
}
ms.setSpecular(new ColorRGBA(color[0], color[1], color[2],
color[3]));
}
}
if (pass.hasstencil_func()) {
StencilState ss = (StencilState) mat
.getState(RenderState.RS_STENCIL);
if (ss == null) {
ss = DisplaySystem.getDisplaySystem().getRenderer()
.createStencilState();
// FIXME: This, and other if == null sections do not set new state back into mat.
}
if (pass.getstencil_func().hasfunc()) {
String func = pass.getstencil_func().getfunc().toString();
if ("NEVER".equals(func)) {
ss.setStencilFunction(StencilState.StencilFunction.Never);
} else if ("LESS".equals(func)) {
ss.setStencilFunction(StencilState.StencilFunction.LessThan);
} else if ("LEQUAL".equals(func)) {
ss.setStencilFunction(StencilState.StencilFunction.LessThanOrEqualTo);
} else if ("EQUAL".equals(func)) {
ss.setStencilFunction(StencilState.StencilFunction.EqualTo);
} else if ("GREATER".equals(func)) {
ss.setStencilFunction(StencilState.StencilFunction.GreaterThan);
} else if ("NOTEQUAL".equals(func)) {
ss.setStencilFunction(StencilState.StencilFunction.NotEqualTo);
} else if ("GEQUAL".equals(func)) {
ss.setStencilFunction(StencilState.StencilFunction.GreaterThanOrEqualTo);
} else if ("ALWAYS".equals(func)) {
ss.setStencilFunction(StencilState.StencilFunction.Always);
}
}
if (pass.getstencil_func().hasref()) {
ss.setStencilReference(pass.getstencil_func().getref().getvalue2()
.intValue());
}
if (pass.getstencil_func().hasmask()) {
ss.setStencilReference(pass.getstencil_func().getmask().getvalue2()
.intValue());
}
}
if (pass.hasstencil_op()) {
StencilState ss = (StencilState) mat
.getState(RenderState.RS_STENCIL);
if (ss == null) {
ss = DisplaySystem.getDisplaySystem().getRenderer()
.createStencilState();
}
if (pass.getstencil_op().hasfail()) {
ss.setStencilOpFail(evaluateStencilOp(pass.getstencil_op()
.getfail().toString()));
}
if (pass.getstencil_op().haszfail()) {
ss.setStencilOpZFail(evaluateStencilOp(pass.getstencil_op()
.getzfail().toString()));
}
if (pass.getstencil_op().haszpass()) {
ss.setStencilOpZPass(evaluateStencilOp(pass.getstencil_op()
.getzpass().toString()));
}
}
if (pass.hasstencil_test_enable()) {
StencilState ss = (StencilState) mat
.getState(RenderState.RS_STENCIL);
if (ss == null) {
ss = DisplaySystem.getDisplaySystem().getRenderer()
.createStencilState();
}
ss.setEnabled(pass.getstencil_test_enable().getvalue2()
.booleanValue());
}
}
public StencilState.StencilOperation evaluateStencilOp(String value) {
if ("KEEP".equals(value)) {
return StencilState.StencilOperation.Keep;
} else if ("ZERO".equals(value)) {
return StencilState.StencilOperation.Zero;
} else if ("REPLACE".equals(value)) {
return StencilState.StencilOperation.Replace;
} else if ("INCR".equals(value)) {
return StencilState.StencilOperation.Increment;
} else if ("DECR".equals(value)) {
return StencilState.StencilOperation.Decrement;
} else if ("INVERT".equals(value)) {
return StencilState.StencilOperation.Invert;
} else if ("INCR_WRAP".equals(value)) {
return StencilState.StencilOperation.IncrementWrap;
} else if ("DECT_WRAP".equals(value)) {
return StencilState.StencilOperation.DecrementWrap;
} else {
return StencilState.StencilOperation.Keep;
}
}
/**
* processTechniqueCOMMON process a technique of techniqueType2 which are
* defined to be returned from a profile_COMMON object. This technique
* contains images, lambert shading, phong shading and blinn shading.
*
* @param technique
* the fixed pipeline technique.
* @param mat
* the material to store the technique in.
* @throws Exception
* thrown if there is a problem processing the xml.
*/
private void processTechniqueCOMMON(techniqueType2 technique,
ColladaMaterial mat) throws Exception {
if (technique.haslambert()) {
processLambert(technique.getlambert(), mat);
}
// blinn shading and phong shading are virtually the same, and OpenGL
// only has a single "smooth" attribute for this.
if (technique.hasphong()) {
processPhong(technique.getphong(), mat);
}
if (technique.hasextra()) {
for (int i = 0; i < technique.getextraCount(); i++) {
ExtraPluginManager.processExtra(mat, technique.getextraAt(i));
}
}
}
private void processPhong(phongType pt, ColladaMaterial mat)
throws Exception {
// obtain the colors for the material
MaterialState ms = DisplaySystem.getDisplaySystem().getRenderer()
.createMaterialState();
// set the ambient color value of the material
if (pt.hasambient()) {
ms.setAmbient(getColor(pt.getambient().getcolor()));
}
// set the diffuse color value of the material
if (pt.hasdiffuse()) {
if (pt.getdiffuse().hascolor()) {
ms.setDiffuse(getColor(pt.getdiffuse().getcolor()));
}
if (pt.getdiffuse().hastexture()) {
// create a texturestate, and we will need to make use of
// texcoord to put this texture in the correct "unit"
for (int i = 0; i < pt.getdiffuse().gettextureCount(); i++) {
mat.setState(processTexture(
pt.getdiffuse().gettextureAt(i), mat));
}
}
}
// set the emmission color value of the material
if (pt.hasemission()) {
ms.setEmissive(getColor(pt.getemission().getcolor()));
}
// set the specular color value of the material
if (pt.hasspecular()) {
ms.setSpecular(getColor(pt.getspecular().getcolor()));
}
// set the shininess value of the material
if (pt.hasshininess()) {
ms.setShininess(pt.getshininess().getfloat2().getValue().floatValue());
}
/*
* if (pt.hastransparent()) { if (pt.gettransparent().hascolor() &&
* !pt.gettransparency().getfloat2().getValue() .toString().equals("0")) {
* BlendState as = DisplaySystem.getDisplaySystem()
* .getRenderer().createBlendState();
* as.setSrcFunction(BlendState.SourceFunction.One_MINUS_DST_COLOR);
* as.setDstFunction(BlendState.DestinationFunction.One); as.setBlendEnabled(true);
* mat.setState(as); } else if (pt.gettransparent().hastexture()) {
* BlendState as = DisplaySystem.getDisplaySystem()
* .getRenderer().createBlendState();
* as.setSrcFunction(BlendState.SourceFunction.SourceAlpha);
* as.setDstFunction(BlendState.DestinationFunction.OneMinusSourceAlpha);
* as.setBlendEnabled(true); as.setReference(0.14f);
* as.setTestEnabled(true); as.setTestFunction(BlendState.TF_GEQUAL);
* mat.setState(as); } }
*/
mat.setState(ms);
}
private void processLambert(lambertType lt, ColladaMaterial mat)
throws Exception {
// lambert shading, create a FLAT shade state and material state
// with
// defined colors.
ShadeState ss = DisplaySystem.getDisplaySystem().getRenderer()
.createShadeState();
ss.setShadeMode(ShadeState.ShadeMode.Flat);
mat.setState(ss);
// obtain the colors for the material
MaterialState ms = DisplaySystem.getDisplaySystem().getRenderer()
.createMaterialState();
// set the ambient color value of the material
if (lt.hasambient()) {
ms.setAmbient(getColor(lt.getambient().getcolor()));
}
// set the diffuse color value of the material
if (lt.hasdiffuse()) {
if (lt.getdiffuse().hascolor()) {
ms.setDiffuse(getColor(lt.getdiffuse().getcolor()));
}
if (lt.getdiffuse().hastexture()) {
// create a texturestate, and we will need to make use of
// texcoord to put this texture in the correct "unit"
for (int i = 0; i < lt.getdiffuse().gettextureCount(); i++) {
mat.setState(processTexture(
lt.getdiffuse().gettextureAt(i), mat));
}
}
}
// set the emmission color value of the material
if (lt.hasemission()) {
ms.setEmissive(getColor(lt.getemission().getcolor()));
}
mat.setState(ms);
/*
* if (lt.hastransparent()) { if (lt.gettransparent().hascolor() &&
* !lt.gettransparency().getfloat2().getValue() .toString().equals("0")) {
* BlendState as = DisplaySystem.getDisplaySystem()
* .getRenderer().createBlendState();
* as.setSrcFunction(BlendState.SourceFunction.One_MINUS_DST_COLOR);
* as.setDstFunction(BlendState.DestinationFunction.One); as.setBlendEnabled(true);
* mat.setState(as); } else if (lt.gettransparent().hastexture()) {
* BlendState as = DisplaySystem.getDisplaySystem()
* .getRenderer().createBlendState();
* as.setSrcFunction(BlendState.SourceFunction.SourceAlpha);
* as.setDstFunction(BlendState.DestinationFunction.OneMinusSourceAlpha);
* as.setBlendEnabled(true); as.setReference(0.14f);
* as.setTestEnabled(true); as.setTestFunction(BlendState.TF_GEQUAL);
* mat.setState(as); } }
*/
// Ignored: reflective attributes, transparent attributes
}
/**
* processTexture generates a texture state that contains the image and
* texture coordinate unit information. This texture state is returned to be
* placed in the Collada material.
*
* @param texture
* the texture type to process.
* @return the generated TextureState that handles this texture tag.
* @throws Exception
* thrown if there is a problem processing the xml.
*/
public TextureState processTexture(textureType texture,
ColladaMaterial mat) throws Exception {
String key = texture.gettexture().toString();
String channel = texture.gettexcoord().toString();
if(channel.contains("CHANNEL")) {
channel = channel.substring(channel.indexOf("CHANNEL")+7);
}
int index = 0;
try {
index = Integer.parseInt(channel) - 1;
} catch (NumberFormatException e) {
}
return processTexture(key, mat, index);
}
public TextureState processTexture(String key, ColladaMaterial mat,
int index) throws Exception {
TextureState ts = (TextureState) mat.getState(RenderState.RS_TEXTURE);
if (ts == null) {
ts = DisplaySystem.getDisplaySystem().getRenderer()
.createTextureState();
}
String surfaceName = (String) resourceLibrary.get(key);
if (surfaceName == null) {
return null;
}
String imageName = (String) resourceLibrary.get(surfaceName);
if (imageName == null) {
return null;
}
String filename = (String) resourceLibrary.get(imageName);
loadTexture(ts, filename, mat, index);
return ts;
}
/**
* @param ts
* @param textureURL
* @param filename
*/
private void loadTexture(TextureState ts, String filename,
ColladaMaterial mat, int index) {
URL textureURL = ResourceLocatorTool.locateResource(
ResourceLocatorTool.TYPE_TEXTURE, filename);
if (textureURL != null) {
Texture t0 = TextureManager.loadTexture(textureURL, mat
.getMinFilterConstant(), mat.getMagFilterConstant(),
Image.Format.GuessNoCompression, 0, true);
t0.setWrap(WrapAxis.S, mat.getWrapSConstant());
t0.setWrap(WrapAxis.T, mat.getWrapTConstant());
ts.setTexture(t0, index);
} else {
if (!squelch) {
logger.warning("Invalid or missing texture: \"" + filename
+ "\"");
}
}
}
/**
* Process Geometry will build a number of Geometry objects attaching them
* to the supplied parent.
*
* @param geometryLibrary
* the geometries to process individually.
* @throws Exception
* thrown if there is a problem processing the xml.
*/
private void processGeometry(library_geometriesType geometryLibrary)
throws Exception {
// go through each geometry one at a time
for (int i = 0; i < geometryLibrary.getgeometryCount(); i++) {
geometryType geom = geometryLibrary.getgeometryAt(i);
if (geom.hasmesh()) {
for (int j = 0; j < geom.getmeshCount(); j++) {
Spatial s = processMesh(geom.getmeshAt(j), geom);
put(geom.getid().toString(), s);
if (geometryNames == null) {
geometryNames = new ArrayList<String>();
}
geometryNames.add(geom.getid().toString());
}
}
// splines are not currently supported.
if (geom.hasspline()) {
if (!squelch) {
logger.warning("splines not yet supported.");
}
}
}
}
/**
* processControllerLibrary builds a controller for each controller tag in
* the file.
*
* @param controllerLibrary
* the controller library object to parse.
* @throws Exception
* thrown if there is a problem with the loader.
*/
private void processControllerLibrary(
library_controllersType controllerLibrary) throws Exception {
if (controllerLibrary.hascontroller()) {
for (int i = 0; i < controllerLibrary.getcontrollerCount(); i++) {
processController(controllerLibrary.getcontrollerAt(i));
}
}
}
/**
* controllers define how one object interacts with another. Typically, this
* is skinning and morph targets.
*
* @param controller
* the controller to process
*/
private void processController(controllerType controller) throws Exception {
// skin and morph are mutually exclusive.
if (controller.hasskin()) {
// there can only be one skin per controller
processSkin(controller.getid().toString(), controller.getskin());
} else if (controller.hasmorph()) {
// more not currently supported.
}
}
/**
* processSkin builds a SkinnedMesh object that defines the vertex
* information of a model and the skeletal system that supports it.
*
* @param skin
* the skin to process
* @throws Exception
* thrown if there is a problem parsing the skin.
*/
private void processSkin(String id, skinType skin) throws Exception {
// Add this skin's associated mesh to the resource library
// put(id, skin.getsource().toString());
SkinNode skinNode = new SkinNode(id + "_node");
if (skinNodeNames == null) {
skinNodeNames = new ArrayList<String>();
}
skinNodeNames.add(id);
put(id, skinNode);
// create a new SkinnedMesh object that will act on a given geometry.
// SkinnedMesh skinnedMesh = new
// SkinnedMesh(source.getName()+"skinned",source);
// the bind shape matrix defines the overall orientation of the mesh
// before any skinning occurs.
if (skin.hasbind_shape_matrix()) {
String key = skin.getsource().toString();
if (key.startsWith("#")) {
key = key.substring(1);
}
Spatial mesh = (Spatial) resourceLibrary.get(key);
if (mesh == null) {
if (!squelch) {
logger.warning(key
+ " mesh does NOT exist in COLLADA file.");
}
return;
}
Node skins = null;
if (mesh instanceof TriMesh) {
skins = new Node(mesh.getName());
skins.attachChild(mesh);
resourceLibrary.put(key, skins);
} else if (mesh instanceof Node) {
skins = (Node) mesh;
} else {
if (!squelch) {
logger.warning(key + " mesh is of unsupported skin type: "
+ mesh);
}
return;
}
processBindShapeMatrix(skinNode, skin.getbind_shape_matrix());
skinNode.setSkins(skins);
}
// There are a couple types of sources, those setting the joints,
// the binding table, and the weights. The Collada exporter
// automatically
// names them something like skin-joint-*, skin-binding-table-*, etc.
// we are going to check for the string to determine what it is.
if (skin.hassource2()) {
for (int i = 0; i < skin.getsource2Count(); i++) {
processControllerSource(skin.getsource2At(i));
}
}
// the vertex weights will be assigned to the appropriate bones
if (skin.hasvertex_weights()) {
processVertexWeights(skin.getvertex_weights(), skinNode);
}
if (skin.hasjoints()) {
String[] boneIds = null;
Matrix4f[] bindMatrices = null;
// define the inverse bind matrix to the joint
if (skin.getjoints().hasinput()) {
for (int i = 0; i < skin.getjoints().getinputCount(); i++) {
if ("JOINT".equals(skin.getjoints().getinputAt(i)
.getsemantic().toString())) {
boneIds = (String[]) resourceLibrary.get(skin
.getjoints().getinputAt(i).getsource()
.toString().substring(1));
} else if ("INV_BIND_MATRIX".equals(skin.getjoints()
.getinputAt(i).getsemantic().toString())) {
bindMatrices = (Matrix4f[]) resourceLibrary.get(skin
.getjoints().getinputAt(i).getsource()
.toString().substring(1));
}
}
}
if (boneIds != null) {
for (int i = 0; i < boneIds.length; i++) {
Bone b = (Bone) resourceLibrary.get(boneIds[i]);
b.setBindMatrix(bindMatrices[i].invert());
}
}
}
}
/**
* processVertexWeights defines a list of vertices and weights for a given
* bone. These bones are defined by <v> as the first element to a group. The
* bones were prebuilt in the priocessControllerSource method.
*
* @param weights
* @throws Exception
*/
@SuppressWarnings("unchecked")
private void processVertexWeights(vertex_weightsType weights,
SkinNode skinNode) throws Exception {
int[] boneCount = new int[weights.getcount().intValue()];
StringTokenizer st = new StringTokenizer(weights.getvcount().getValue());
for (int i = 0; i < boneCount.length; i++) {
boneCount[i] = Integer.parseInt(st.nextToken());
}
st = new StringTokenizer(weights.getv().getValue());
int count = 0;
String[] boneIdArray = null;
float[] weightArray = null;
for (int i = 0; i < weights.getinputCount(); i++) {
if ("JOINT".equals(weights.getinputAt(i).getsemantic().toString())) {
String key = weights.getinputAt(i).getsource().toString();
key = key.substring(1);
boneIdArray = (String[]) resourceLibrary.get(key);
} else if ("WEIGHT".equals(weights.getinputAt(i).getsemantic()
.toString())) {
String key = weights.getinputAt(i).getsource().toString();
key = key.substring(1);
weightArray = (float[]) resourceLibrary.get(key);
}
}
if (boneIdArray == null || weightArray == null) {
if (!squelch) {
logger.warning("Missing resource values for either bone "
+ "weights or bone vertex ids.");
}
return;
}
Map<Integer, ArrayList<MeshVertPair>> vertMap = (Map) resourceLibrary
.get(skinNode.getSkins().getName() + "VertMap");
while (st.hasMoreTokens()) {
// Get bone index
for (int i = 0; i < boneCount[count]; i++) {
int idIndex = Integer.parseInt(st.nextToken());
int key = Integer.parseInt(st.nextToken());
float weight = weightArray[key];
ArrayList<MeshVertPair> target = vertMap.get(count);
if (target != null) {
for (int j = 0, max = target.size(); j < max; j++) {
MeshVertPair bvp = target.get(j);
// Bone b =
// (Bone)resourceLibrary.get(boneIds[idIndex]);
skinNode.addBoneInfluence(bvp.mesh, bvp.index,
boneIds[idIndex], weight);
}
}
}
count++;
}
}
/**
* processControllerSource will process the source types that define how a
* controller is built. This includes support for skin joints, bindings and
* weights.
*
* @param source
* the source to process.
* @throws Exception
* thrown if there is a problem processing the XML.
*/
private void processControllerSource(sourceType source) throws Exception {
// check for the joint id list
String key = source.gettechnique_common().getaccessor().getparam()
.gettype().getValue();
if (key.equalsIgnoreCase("IDREF")) {
if (source.hasIDREF_array()) {
IDREF_arrayType idrefs = source.getIDREF_array();
Bone[] bones = new Bone[idrefs.getcount().intValue()];
boneIds = new String[bones.length];
StringTokenizer st = new StringTokenizer(idrefs.getValue()
.toString());
for (int i = 0; i < bones.length; i++) {
// this skin has a number of bones assigned to it.
// Create a Bone for each entry.
bones[i] = new Bone(st.nextToken());
boneIds[i] = bones[i].getName();
put(boneIds[i], bones[i]);
}
put(source.getid().toString(), boneIds);
}
} else if (key.equalsIgnoreCase("Name")) {
if (source.hasName_array()) {
Name_arrayType names = source.getName_array();
Bone[] bones = new Bone[names.getcount().intValue()];
boneIds = new String[bones.length];
StringTokenizer st = new StringTokenizer(names.getValue()
.toString());
for (int i = 0; i < bones.length; i++) {
// this skin has a number of bones assigned to it.
// Create a Bone for each entry.
bones[i] = new Bone(st.nextToken());
boneIds[i] = bones[i].getName();
put(boneIds[i], bones[i]);
put(source.getid().toString(), boneIds);
}
}
} else if (key.equalsIgnoreCase("float4x4")) {
StringTokenizer st = new StringTokenizer(source.getfloat_array()
.getValue().toString());
int numOfTransforms = st.countTokens() / 16;
// this creates a 4x4 matrix
Matrix4f[] tm = new Matrix4f[numOfTransforms];
for (int i = 0; i < tm.length; i++) {
tm[i] = new Matrix4f();
float[] data = new float[16];
for (int x = 0; x < 16; x++) {
data[x] = Float.parseFloat(st.nextToken());
}
tm[i].set(data, true); // collada matrices are in row order.
}
put(source.getid().toString(), tm);
} else if (key.equalsIgnoreCase("float")) {
float_arrayType floats = source.getfloat_array();
float[] weights = new float[floats.getcount().intValue()];
StringTokenizer st = new StringTokenizer(floats.getValue()
.toString());
for (int i = 0; i < weights.length; i++) {
weights[i] = Float.parseFloat(st.nextToken());
}
put(source.getid().toString(), weights);
}
}
/**
* processBindShapeMatrix sets the initial transform of the skinned mesh.
* The 4x4 matrix is converted to a 3x3 matrix and a vector, then passed to
* the skinned mesh for use.
*
* @param skin
* the skin to apply the bind to.
* @param matrix
* the matrix to parse.
*/
private void processBindShapeMatrix(SkinNode skin, float4x4 matrix) {
Matrix4f mat = new Matrix4f();
StringTokenizer st = new StringTokenizer(matrix.getValue());
float[] data = new float[16];
for (int x = 0; x < 16; x++) {
data[x] = Float.parseFloat(st.nextToken());
}
mat.set(data, true); // collada matrices are in row order.
skin.setBindMatrix(mat);
}
/**
* processBindMaterial
*
* @param material
* @param spatial
* @throws Exception
* the matrix to parse.
*/
private void processBindMaterial(bind_materialType material,
Spatial geomBindTo) throws Exception {
technique_commonType common = material.gettechnique_common();
for (int i = 0; i < common.getinstance_materialCount(); i++) {
processInstanceMaterial(common.getinstance_materialAt(i),
geomBindTo);
}
}
/**
* processMesh will create either lines or a TriMesh. This means that the
* only supported child elements are: triangles and lines or linestrips.
* Polygons, trifans and tristrips are ignored.
*
* @param mesh
* the mesh to parse.
* @param geom
* the geometryType of the Geometry to build.
* @return the created Geometry built from the mesh data.
* @throws Exception
* thrown if there is a problem processing the xml.
*/
private Spatial processMesh(meshType mesh, geometryType geom)
throws Exception {
// we need to build all the source data objects.
for (int i = 0; i < mesh.getsourceCount(); i++) {
sourceType source = mesh.getsourceAt(i);
if (source.hasfloat_array()) {
float_arrayType floatArray = source.getfloat_array();
StringTokenizer st = new StringTokenizer(floatArray.getValue()
.toString());
// build an array of data to use for the final vector list.
float[] floats = new float[floatArray.getcount().intValue()];
for (int j = 0; j < floats.length; j++) {
floats[j] = Float.parseFloat(st.nextToken());
}
// technique_common should have the accessor type
if (source.hastechnique_common()) {
accessorType accessor = source.gettechnique_common()
.getaccessor();
// create an array of Vector3fs, using zero for the last
// element
// if the stride is 2 (the UV map case)
Vector3f[] vecs = new Vector3f[accessor.getcount()
.intValue()];
int stride = accessor.getstride().intValue();
if (2 == stride) {
for (int k = 0; k < vecs.length; k++) {
vecs[k] = new Vector3f(floats[(k * stride)],
floats[(k * stride) + 1], 0.0f);
}
} else {
assert (3 == stride);
for (int k = 0; k < vecs.length; k++) {
vecs[k] = new Vector3f(floats[(k * stride)],
floats[(k * stride) + 1],
floats[(k * stride) + 2]);
}
}
put(source.getid().toString(), vecs);
}
}
}
// next we have to define what source defines the vertices positional
// information
if (mesh.hasvertices()) {
if (mesh.getvertices().hasinput()) {
put(mesh.getvertices().getid().toString(), mesh.getvertices()
.getinput().getsource().toString());
}
}
// determine what type of geometry this is, and use the
// lists to build the object.
if (mesh.hastriangles()) {
return processTriMesh(mesh, geom);
} else if (mesh.haspolygons()) {
return processPolygonMesh(mesh, geom);
} else if (mesh.haslines()) {
return processLines(mesh, geom);
} else {
return null;
}
}
/**
* processTriMesh will process the triangles tag from the mesh section of
* the COLLADA file. A jME TriMesh is returned that defines the vertices,
* indices, normals, texture coordinates and colors.
*
* @param mesh
* the meshType to process for the trimesh.
* @param geom
* the geometryType of the TriMesh to build.
* @return the jME tri mesh representing the COLLADA mesh.
* @throws Exception
* thrown if there is a problem processing the xml.
*/
private Spatial processTriMesh(meshType mesh, geometryType geom)
throws Exception {
HashMap<Integer, ArrayList<MeshVertPair>> vertMap = new HashMap<Integer, ArrayList<MeshVertPair>>();
put(geom.getid().toString() + "VertMap", vertMap);
Node parentNode = new Node(geom.getid().toString());
for (int triangleIndex = 0; triangleIndex < mesh.gettrianglesCount(); triangleIndex++) {
trianglesType tri = mesh.gettrianglesAt(triangleIndex);
TriMesh triMesh = new TriMesh(geom.getid().toString());
if (tri.hasmaterial()) {
// first set the appropriate materials to this mesh.
String matKey = (String) resourceLibrary.get(tri.getmaterial()
.toString());
triMesh.setName(triMesh.getName()+"-"+tri.getmaterial().toString());
ColladaMaterial cm = (ColladaMaterial) resourceLibrary
.get(matKey);
if (cm != null) {
for (int i = 0; i < RenderState.RS_MAX_STATE; i++) {
if (cm.getState(i) != null) {
if (cm.getState(i).getType() == RenderState.RS_BLEND) {
triMesh
.setRenderQueueMode(Renderer.QUEUE_TRANSPARENT);
}
// clone the state as different mesh's may have
// different
// attributes
try {
ByteArrayOutputStream out = new ByteArrayOutputStream();
BinaryExporter.getInstance().save(
cm.getState(i), out);
ByteArrayInputStream in = new ByteArrayInputStream(
out.toByteArray());
RenderState rs = (RenderState) BinaryImporter
.getInstance().load(in);
triMesh.setRenderState(rs);
} catch (IOException e) {
logger
.throwing(
this.getClass().toString(),
"processTriMesh(meshType mesh, geometryType geom)",
e);
}
}
}
ArrayList<Controller> cList = cm.getControllerList();
if (cList != null) {
for (int c = 0; c < cList.size(); c++) {
if (cList.get(c) instanceof TextureKeyframeController) {
TextureState ts = (TextureState) triMesh
.getRenderState(RenderState.RS_TEXTURE);
if (ts != null) {
// allow wrapping, as animated textures will
// almost always need it.
ts.getTexture().setWrap(Texture.WrapAxis.S, Texture.WrapMode.Repeat);
ts.getTexture().setWrap(Texture.WrapAxis.T, Texture.WrapMode.Repeat);
((TextureKeyframeController) cList.get(c))
.setTexture(ts.getTexture());
}
}
}
}
if (mesh.hasextra()) {
for (int i = 0; i < mesh.getextraCount(); i++) {
try {
ExtraPluginManager.processExtra(triMesh, mesh
.getextraAt(i));
} catch (Exception e) {
if (!squelch) {
logger
.log(
Level.INFO,
"Error processing extra information for mesh",
e);
}
}
}
}
}
subMaterialLibrary.put(triMesh, tri.getmaterial().toString());
}
// build the index buffer, this is going to be easy as it's only
// 0...N where N is the number of vertices in the model.
IntBuffer indexBuffer = BufferUtils.createIntBuffer(tri.getcount()
.intValue() * 3);
for (int i = 0; i < indexBuffer.capacity(); i++) {
indexBuffer.put(i);
}
triMesh.setIndexBuffer(indexBuffer);
// find the maximum offset to understand the stride
int maxOffset = -1;
for (int i = 0; i < tri.getinputCount(); i++) {
int temp = tri.getinputAt(i).getoffset().intValue();
if (maxOffset < temp) {
maxOffset = temp;
}
}
// next build the other buffers, based on the input semantic
for (int i = 0; i < tri.getinputCount(); i++) {
if ("VERTEX".equals(tri.getinputAt(i).getsemantic().toString())) {
// build the vertex buffer
String key = tri.getinputAt(i).getsource().getValue();
if (key.startsWith("#")) {
key = key.substring(1);
}
Object data = resourceLibrary.get(key);
while (data instanceof String) {
key = (String) data;
if (key.startsWith("#")) {
key = key.substring(1);
}
data = resourceLibrary.get(key);
}
if (data == null) {
logger.warning("Invalid source: " + key);
continue;
}
Vector3f[] v = (Vector3f[]) data;
StringTokenizer st = new StringTokenizer(tri.getp()
.getValue());
int vertCount = tri.getcount().intValue() * 3;
FloatBuffer vertBuffer = BufferUtils
.createVector3Buffer(vertCount);
triMesh.setVertexCount(vertCount);
for (int j = 0; j < vertCount; j++) {
// need to store the index in p to what j is for later
// processing the index to the vert for bones
int vertKey = Integer.parseInt(st.nextToken());
ArrayList<MeshVertPair> storage = vertMap.get(Integer
.valueOf(vertKey));
if (storage == null) {
storage = new ArrayList<MeshVertPair>();
storage.add(new MeshVertPair(triangleIndex, j));
vertMap.put(Integer.valueOf(vertKey), storage);
} else {
storage.add(new MeshVertPair(triangleIndex, j));
}
BufferUtils.setInBuffer(v[vertKey], vertBuffer, j);
for (int k = 0; k < maxOffset; k++) {
st.nextToken();
}
}
triMesh.setVertexBuffer(vertBuffer);
} else if ("NORMAL".equals(tri.getinputAt(i).getsemantic()
.toString())) {
// build the normal buffer
String key = tri.getinputAt(i).getsource().getValue();
if (key.startsWith("#")) {
key = key.substring(1);
}
Object data = resourceLibrary.get(key);
while (data instanceof String) {
key = (String) data;
if (key.startsWith("#")) {
key = key.substring(1);
}
data = resourceLibrary.get(key);
}
if (data == null) {
logger.warning("Invalid source: " + key);
continue;
}
Vector3f[] v = (Vector3f[]) data;
StringTokenizer st = new StringTokenizer(tri.getp()
.getValue());
int normCount = tri.getcount().intValue() * 3;
FloatBuffer normBuffer = BufferUtils
.createVector3Buffer(normCount);
int offset = tri.getinputAt(i).getoffset().intValue();
for (int j = 0; j < offset; j++) {
st.nextToken();
}
for (int j = 0; j < normCount; j++) {
int index = Integer.parseInt(st.nextToken());
if (index < v.length)
BufferUtils.setInBuffer(v[index], normBuffer, j);
for (int k = 0; k < maxOffset; k++) {
if (st.hasMoreTokens()) {
st.nextToken();
}
}
}
triMesh.setNormalBuffer(normBuffer);
} else if ("TANGENT".equals(tri.getinputAt(i).getsemantic()
.toString())) {
// build the tangent buffer
String key = tri.getinputAt(i).getsource().getValue();
if (key.startsWith("#")) {
key = key.substring(1);
}
Object data = resourceLibrary.get(key);
while (data instanceof String) {
key = (String) data;
if (key.startsWith("#")) {
key = key.substring(1);
}
data = resourceLibrary.get(key);
}
if (data == null) {
logger.warning("Invalid source: " + key);
continue;
}
Vector3f[] v = (Vector3f[]) data;
StringTokenizer st = new StringTokenizer(tri.getp()
.getValue());
int normCount = tri.getcount().intValue() * 3;
FloatBuffer colorBuffer = BufferUtils
.createColorBuffer(normCount);
int offset = tri.getinputAt(i).getoffset().intValue();
for (int j = 0; j < offset; j++) {
st.nextToken();
}
for (int j = 0; j < normCount; j++) {
int index = Integer.parseInt(st.nextToken());
if (index < v.length) {
colorBuffer.put((-v[index].x) / 2.0f + 0.5f);
colorBuffer.put((-v[index].y) / 2.0f + 0.5f);
colorBuffer.put((-v[index].z) / 2.0f + 0.5f);
colorBuffer.put(0.0f);
}
for (int k = 0; k < maxOffset; k++) {
if (st.hasMoreTokens()) {
st.nextToken();
}
}
}
triMesh.setColorBuffer(colorBuffer);
} else if ("TEXCOORD".equals(tri.getinputAt(i).getsemantic()
.toString())) {
// build the texture buffer
String key = tri.getinputAt(i).getsource().getValue();
if (key.startsWith("#")) {
key = key.substring(1);
}
Object data = resourceLibrary.get(key);
while (data instanceof String) {
key = (String) data;
if (key.startsWith("#")) {
key = key.substring(1);
}
data = resourceLibrary.get(key);
}
if (data == null) {
logger.warning("Invalid source: " + key);
continue;
}
Vector3f[] v = (Vector3f[]) data;
StringTokenizer st = new StringTokenizer(tri.getp()
.getValue());
int texCount = tri.getcount().intValue() * 3;
FloatBuffer texBuffer = BufferUtils
.createVector2Buffer(texCount);
int offset = tri.getinputAt(i).getoffset().intValue();
int set = tri.getinputAt(i).getset().intValue();
for (int j = 0; j < offset; j++) {
st.nextToken();
}
// Keep a max to set the wrap mode (if it's 1, clamp, if
// it's > 1 || < 0 wrap it)
float maxX = -10;
float maxY = -10;
float minX = 10;
float minY = 10;
Vector2f tempTexCoord = new Vector2f();
for (int j = 0; j < texCount; j++) {
int index = Integer.parseInt(st.nextToken());
Vector3f value = v[index];
if (value.x > maxX) {
maxX = value.x;
}
if (value.x < minX) {
minX = value.x;
}
if (value.y > maxY) {
maxY = value.y;
}
if (value.y < minY) {
minY = value.y;
}
tempTexCoord.set(value.x, value.y);
BufferUtils.setInBuffer(tempTexCoord, texBuffer, j);
for (int k = 0; k < maxOffset; k++) {
if (st.hasMoreTokens()) {
st.nextToken();
}
}
}
int unit;
if (set == 0) {
unit = 0;
} else {
unit = set - 1;
}
triMesh.setTextureCoords(new TexCoords(texBuffer,2), unit);
// Set the wrap mode, check if the mesh has a texture
// first, if not check the geometry.
// Then, based on the texture coordinates, we may need to
// change it from the default.
//XXX: not a good way of doing it
// TextureState ts = (TextureState) triMesh
// .getRenderState(RenderState.RS_TEXTURE);
// if (ts == null) {
// ts = (TextureState) triMesh
// .getRenderState(RenderState.RS_TEXTURE);
// }
// if (ts != null) {
// Texture t = ts.getTexture(unit);
// if (t != null) {
// if (maxX > 1 || minX < 0) {
// t.setWrap(Texture.WrapAxis.S, Texture.WrapMode.Repeat);
// } else {
// t.setWrap(Texture.WrapAxis.S, Texture.WrapMode.Clamp);
// }
//
// if (maxY > 1 || minY < 0) {
// t.setWrap(Texture.WrapAxis.T, Texture.WrapMode.Repeat);
// } else {
// t.setWrap(Texture.WrapAxis.T, Texture.WrapMode.Clamp);
// }
// }
// }
} else if ("COLOR".equals(tri.getinputAt(i).getsemantic()
.toString())) {
// build the texture buffer
String key = tri.getinputAt(i).getsource().getValue();
if (key.startsWith("#")) {
key = key.substring(1);
}
Object data = resourceLibrary.get(key);
while (data instanceof String) {
key = (String) data;
if (key.startsWith("#")) {
key = key.substring(1);
}
data = resourceLibrary.get(key);
}
Vector3f[] v = (Vector3f[]) data;
StringTokenizer st = new StringTokenizer(tri.getp()
.getValue());
int colorCount = tri.getcount().intValue() * 3;
FloatBuffer colorBuffer = BufferUtils
.createColorBuffer(colorCount);
int offset = tri.getinputAt(i).getoffset().intValue();
for (int j = 0; j < offset; j++) {
st.nextToken();
}
ColorRGBA tempColor = new ColorRGBA();
for (int j = 0; j < colorCount; j++) {
int index = Integer.parseInt(st.nextToken());
Vector3f value = v[index];
tempColor.set(value.x, value.y, value.z, 1);
BufferUtils.setInBuffer(tempColor, colorBuffer, j);
for (int k = 0; k < maxOffset; k++) {
if (st.hasMoreTokens()) {
st.nextToken();
}
}
}
triMesh.setColorBuffer(colorBuffer);
}
}
triMesh.setModelBound(new BoundingBox());
triMesh.updateModelBound();
//XXX: not parenting under a node when only one mesh needs to be fixed!! /rherlitz
// if (mesh.gettrianglesCount() == 1) {
// return triMesh;
// }
parentNode.attachChild(triMesh);
}
return parentNode;
}
/**
* TODO: this implementation is a quick hack to import triangles supplied in
* polygon form... processPolygonMesh will process the polygons tag from the
* mesh section of the COLLADA file. A jME TriMesh is returned that defines
* the vertices, indices, normals, texture coordinates and colors.
*
* @param mesh
* the meshType to process for the trimesh.
* @param geom
* the geometryType of the TriMesh to build.
* @return the jME tri mesh representing the COLLADA mesh.
* @throws Exception
* thrown if there is a problem processing the xml.
*/
private Spatial processPolygonMesh(meshType mesh, geometryType geom)
throws Exception {
HashMap<Integer, ArrayList<MeshVertPair>> vertMap = new HashMap<Integer, ArrayList<MeshVertPair>>();
put(geom.getid().toString() + "VertMap", vertMap);
Node parentNode = new Node(geom.getid().toString());
for (int triangleIndex = 0; triangleIndex < mesh.getpolygonsCount(); triangleIndex++) {
polygonsType poly = mesh.getpolygonsAt(triangleIndex);
TriMesh triMesh = new TriMesh(geom.getid().toString());
if (poly.hasmaterial()) {
// first set the appropriate materials to this mesh.
String matKey = (String) resourceLibrary.get(poly.getmaterial()
.toString());
ColladaMaterial cm = (ColladaMaterial) resourceLibrary
.get(matKey);
if (cm != null) {
for (int i = 0; i < RenderState.RS_MAX_STATE; i++) {
if (cm.getState(i) != null) {
if (cm.getState(i).getType() == RenderState.RS_BLEND) {
triMesh
.setRenderQueueMode(Renderer.QUEUE_TRANSPARENT);
}
// clone the state as different mesh's may have
// different
// attributes
try {
ByteArrayOutputStream out = new ByteArrayOutputStream();
BinaryExporter.getInstance().save(
cm.getState(i), out);
ByteArrayInputStream in = new ByteArrayInputStream(
out.toByteArray());
RenderState rs = (RenderState) BinaryImporter
.getInstance().load(in);
triMesh.setRenderState(rs);
} catch (IOException e) {
logger
.throwing(
this.getClass().toString(),
"processTriMesh(meshType mesh, geometryType geom)",
e);
}
}
}
ArrayList<Controller> cList = cm.getControllerList();
if (cList != null) {
for (int c = 0; c < cList.size(); c++) {
if (cList.get(c) instanceof TextureKeyframeController) {
TextureState ts = (TextureState) triMesh
.getRenderState(RenderState.RS_TEXTURE);
if (ts != null) {
// allow wrapping, as animated textures will
// almost always need it.
ts.getTexture().setWrap(Texture.WrapAxis.S, Texture.WrapMode.Repeat);
ts.getTexture().setWrap(Texture.WrapAxis.T, Texture.WrapMode.Repeat);
((TextureKeyframeController) cList.get(c))
.setTexture(ts.getTexture());
}
}
}
}
if (mesh.hasextra()) {
for (int i = 0; i < mesh.getextraCount(); i++) {
try {
ExtraPluginManager.processExtra(triMesh, mesh
.getextraAt(i));
} catch (Exception e) {
if (!squelch) {
logger
.log(
Level.INFO,
"Error processing extra information for mesh",
e);
}
}
}
}
}
subMaterialLibrary.put(triMesh, poly.getmaterial().toString());
}
// build the index buffer, this is going to be easy as it's only
// 0...N where N is the number of vertices in the model.
IntBuffer indexBuffer = BufferUtils.createIntBuffer(poly.getcount()
.intValue() * 3);
for (int i = 0; i < indexBuffer.capacity(); i++) {
indexBuffer.put(i);
}
triMesh.setIndexBuffer(indexBuffer);
// find the maximum offset to understand the stride
int maxOffset = -1;
for (int i = 0; i < poly.getinputCount(); i++) {
int temp = poly.getinputAt(i).getoffset().intValue();
if (maxOffset < temp) {
maxOffset = temp;
}
}
int stride = maxOffset + 1;
// next build the other buffers, based on the input semantic
for (int i = 0; i < poly.getinputCount(); i++) {
if ("VERTEX"
.equals(poly.getinputAt(i).getsemantic().toString())) {
// build the vertex buffer
String key = poly.getinputAt(i).getsource().getValue();
if (key.startsWith("#")) {
key = key.substring(1);
}
Object data = resourceLibrary.get(key);
while (data instanceof String) {
key = (String) data;
if (key.startsWith("#")) {
key = key.substring(1);
}
data = resourceLibrary.get(key);
}
if (data == null) {
logger.warning("Invalid source: " + key);
continue;
}
Vector3f[] v = (Vector3f[]) data;
StringTokenizer st = null;
int vertCount = poly.getcount().intValue() * stride;
FloatBuffer vertBuffer = BufferUtils
.createVector3Buffer(vertCount);
triMesh.setVertexCount(vertCount);
for (int j = 0; j < vertCount; j++) {
if (j % stride == 0) {
st = new StringTokenizer(poly.getpAt(j / stride)
.getValue());
}
// need to store the index in p to what j is for later
// processing the index to the vert for bones
int vertKey = Integer.parseInt(st.nextToken());
ArrayList<MeshVertPair> storage = vertMap.get(Integer
.valueOf(vertKey));
if (storage == null) {
storage = new ArrayList<MeshVertPair>();
storage.add(new MeshVertPair(triangleIndex, j));
vertMap.put(Integer.valueOf(vertKey), storage);
} else {
storage.add(new MeshVertPair(triangleIndex, j));
}
BufferUtils.setInBuffer(v[vertKey], vertBuffer, j);
for (int k = 0; k < maxOffset; k++) {
st.nextToken();
}
}
triMesh.setVertexBuffer(vertBuffer);
} else if ("NORMAL".equals(poly.getinputAt(i).getsemantic()
.toString())) {
// build the normal buffer
String key = poly.getinputAt(i).getsource().getValue();
if (key.startsWith("#")) {
key = key.substring(1);
}
Object data = resourceLibrary.get(key);
while (data instanceof String) {
key = (String) data;
if (key.startsWith("#")) {
key = key.substring(1);
}
data = resourceLibrary.get(key);
}
if (data == null) {
logger.warning("Invalid source: " + key);
continue;
}
Vector3f[] v = (Vector3f[]) data;
StringTokenizer st = null;
int normCount = poly.getcount().intValue() * stride;
FloatBuffer normBuffer = BufferUtils
.createVector3Buffer(normCount);
int offset = poly.getinputAt(i).getoffset().intValue();
for (int j = 0; j < offset; j++) {
if (j % stride == 0) {
st = new StringTokenizer(poly.getpAt(j / stride)
.getValue());
}
st.nextToken();
}
for (int j = 0; j < normCount; j++) {
if (j % stride == 0) {
st = new StringTokenizer(poly.getpAt(j / stride)
.getValue());
}
int index = Integer.parseInt(st.nextToken());
if (index < v.length)
BufferUtils.setInBuffer(v[index], normBuffer, j);
for (int k = 0; k < maxOffset; k++) {
if (st.hasMoreTokens()) {
st.nextToken();
}
}
}
triMesh.setNormalBuffer(normBuffer);
} else if ("TANGENT".equals(poly.getinputAt(i).getsemantic()
.toString())) {
// build the tangent buffer
String key = poly.getinputAt(i).getsource().getValue();
if (key.startsWith("#")) {
key = key.substring(1);
}
Object data = resourceLibrary.get(key);
while (data instanceof String) {
key = (String) data;
if (key.startsWith("#")) {
key = key.substring(1);
}
data = resourceLibrary.get(key);
}
if (data == null) {
logger.warning("Invalid source: " + key);
continue;
}
Vector3f[] v = (Vector3f[]) data;
StringTokenizer st = new StringTokenizer(poly.getp()
.getValue());
int normCount = poly.getcount().intValue() * 3;
FloatBuffer normBuffer = BufferUtils
.createVector3Buffer(normCount);
int offset = poly.getinputAt(i).getoffset().intValue();
for (int j = 0; j < offset; j++) {
st.nextToken();
}
for (int j = 0; j < normCount; j++) {
int index = Integer.parseInt(st.nextToken());
if (index < v.length)
BufferUtils.setInBuffer(v[index], normBuffer, j);
for (int k = 0; k < maxOffset; k++) {
if (st.hasMoreTokens()) {
st.nextToken();
}
}
}
triMesh.setTangentBuffer(normBuffer);
logger.info("setting tangent buffer: " + normBuffer);
} else if ("BINORMAL".equals(poly.getinputAt(i).getsemantic()
.toString())) {
// build the tangent buffer
String key = poly.getinputAt(i).getsource().getValue();
if (key.startsWith("#")) {
key = key.substring(1);
}
Object data = resourceLibrary.get(key);
while (data instanceof String) {
key = (String) data;
if (key.startsWith("#")) {
key = key.substring(1);
}
data = resourceLibrary.get(key);
}
if (data == null) {
logger.warning("Invalid source: " + key);
continue;
}
Vector3f[] v = (Vector3f[]) data;
StringTokenizer st = new StringTokenizer(poly.getp()
.getValue());
int normCount = poly.getcount().intValue() * 3;
FloatBuffer normBuffer = BufferUtils
.createVector3Buffer(normCount);
int offset = poly.getinputAt(i).getoffset().intValue();
for (int j = 0; j < offset; j++) {
st.nextToken();
}
for (int j = 0; j < normCount; j++) {
int index = Integer.parseInt(st.nextToken());
if (index < v.length)
BufferUtils.setInBuffer(v[index], normBuffer, j);
for (int k = 0; k < maxOffset; k++) {
if (st.hasMoreTokens()) {
st.nextToken();
}
}
}
triMesh.setBinormalBuffer(normBuffer);
} else if ("TEXCOORD".equals(poly.getinputAt(i).getsemantic()
.toString())) {
// build the texture buffer
String key = poly.getinputAt(i).getsource().getValue();
if (key.startsWith("#")) {
key = key.substring(1);
}
Object data = resourceLibrary.get(key);
while (data instanceof String) {
key = (String) data;
if (key.startsWith("#")) {
key = key.substring(1);
}
data = resourceLibrary.get(key);
}
if (data == null) {
logger.warning("Invalid source: " + key);
continue;
}
Vector3f[] v = (Vector3f[]) data;
StringTokenizer st = new StringTokenizer(poly.getp()
.getValue());
int texCount = poly.getcount().intValue() * stride;
FloatBuffer texBuffer = BufferUtils
.createVector2Buffer(texCount);
int offset = poly.getinputAt(i).getoffset().intValue();
int set = poly.getinputAt(i).getset().intValue();
for (int j = 0; j < offset; j++) {
if (j % stride == 0) {
st = new StringTokenizer(poly.getpAt(j / stride)
.getValue());
}
st.nextToken();
}
// Keep a max to set the wrap mode (if it's 1, clamp, if
// it's > 1 wrap it)
float maxX = -1, maxY = -1;
float minX = 1, minY = 1;
Vector2f tempTexCoord = new Vector2f();
for (int j = 0; j < texCount; j++) {
if (j % stride == 0) {
st = new StringTokenizer(poly.getpAt(j / stride)
.getValue());
}
int index = Integer.parseInt(st.nextToken());
Vector3f value = v[index];
if (value.x > maxX) {
maxX = value.x;
}
if (value.x < minX) {
minX = value.x;
}
if (value.y > maxY) {
maxY = value.y;
}
if (value.y < minY) {
minY = value.y;
}
tempTexCoord.set(value.x, value.y);
BufferUtils.setInBuffer(tempTexCoord, texBuffer, j);
for (int k = 0; k < maxOffset; k++) {
if (st.hasMoreTokens()) {
st.nextToken();
}
}
}
int unit;
if (set == 0) {
unit = 0;
} else {
unit = set - 1;
}
triMesh.setTextureCoords(new TexCoords(texBuffer,2), unit);
// Set the wrap mode, check if the mesh has a texture
// first, if not
// check the geometry.
// Then, based on the texture coordinates, we may need to
// change it from the
// default.
//XXX: not a good way of doing it
// TextureState ts = (TextureState) triMesh
// .getRenderState(RenderState.RS_TEXTURE);
// if (ts == null) {
// ts = (TextureState) triMesh
// .getRenderState(RenderState.RS_TEXTURE);
// }
// if (ts != null) {
// Texture t = ts.getTexture(unit);
// if (t != null) {
// if (maxX > 1 || minX < 0) {
// t.setWrap(Texture.WrapAxis.S, Texture.WrapMode.Repeat);
// } else {
// t.setWrap(Texture.WrapAxis.S, Texture.WrapMode.Clamp);
// }
//
// if (maxY > 1 || minY < 0) {
// t.setWrap(Texture.WrapAxis.T, Texture.WrapMode.Repeat);
// } else {
// t.setWrap(Texture.WrapAxis.T, Texture.WrapMode.Clamp);
// }
// }
// }
} else if ("COLOR".equals(poly.getinputAt(i).getsemantic()
.toString())) {
// build the texture buffer
String key = poly.getinputAt(i).getsource().getValue();
if (key.startsWith("#")) {
key = key.substring(1);
}
Object data = resourceLibrary.get(key);
while (data instanceof String) {
key = (String) data;
if (key.startsWith("#")) {
key = key.substring(1);
}
data = resourceLibrary.get(key);
}
Vector3f[] v = (Vector3f[]) data;
StringTokenizer st = new StringTokenizer(poly.getp()
.getValue());
int colorCount = poly.getcount().intValue() * 3;
FloatBuffer colorBuffer = BufferUtils
.createColorBuffer(colorCount);
int offset = poly.getinputAt(i).getoffset().intValue();
for (int j = 0; j < offset; j++) {
st.nextToken();
}
ColorRGBA tempColor = new ColorRGBA();
for (int j = 0; j < colorCount; j++) {
int index = Integer.parseInt(st.nextToken());
Vector3f value = v[index];
tempColor.set(value.x, value.y, value.z, 1);
BufferUtils.setInBuffer(tempColor, colorBuffer, j);
for (int k = 0; k < maxOffset; k++) {
if (st.hasMoreTokens()) {
st.nextToken();
}
}
}
triMesh.setColorBuffer(colorBuffer);
}
}
triMesh.setModelBound(new BoundingBox());
triMesh.updateModelBound();
if (mesh.gettrianglesCount() == 1) {
return triMesh;
}
parentNode.attachChild(triMesh);
}
return parentNode;
}
/**
* processLines will process the lines tag from the mesh section of the
* COLLADA file. A jME Line is returned that defines the vertices, normals,
* texture coordinates and colors.
*
* @param mesh
* the meshType to process for the lines.
* @param geom
* the geomType for the lines
* @return the jME tri mesh representing the COLLADA mesh.
*/
private Spatial processLines(meshType mesh, geometryType geom) {
if (!squelch) {
logger.warning("Line are not supported.");
}
return null;
}
/**
* the nodes library is a collection of nodes that can be instanced later by
* the visual scene.
*
* @param type
* the nodes library to process.
* @throws Exception
* thrown if there is a problem with the processing.
*/
private void processNodes(library_nodesType type)
throws Exception {
Node tempParent = new Node("temp_parent");
for (int i = 0; i < type.getnodeCount(); i++) {
processNode(type.getnodeAt(i), tempParent);
}
// should all be in the resource library now.
}
/**
* The library of visual scenes defines how the loaded geometry is stored in
* the scene graph, including scaling, translation, rotation, etc.
*
* @param libScene
* the library of scenes
* @throws Exception
* thrown if there is a problem processing the xml.
*/
private void processVisualSceneLibrary(library_visual_scenesType libScene)
throws Exception {
for (int i = 0; i < libScene.getvisual_sceneCount(); i++) {
Node scene = new Node(libScene.getvisual_sceneAt(i).getid()
.toString());
put(scene.getName(), scene);
processVisualScene(libScene.getvisual_sceneAt(i), scene);
}
}
/**
* the visual scene will contain any number of nodes that define references
* to geometry. These are then placed into the scene as needed.
*
* @param scene
* the scene to process.
* @param node
* the jME node to attach this scene to.
* @throws Exception
* thrown if there is a problem with the processing.
*/
private void processVisualScene(visual_sceneType scene, Node node)
throws Exception {
for (int i = 0; i < scene.getnodeCount(); i++) {
processNode(scene.getnodeAt(i), node);
}
for (int i = 0; i < node.getQuantity(); i++) {
Spatial s = node.getChild(i);
if (s instanceof Bone) {
s.updateGeometricState(0, true);
s.removeFromParent();
node.attachChild(s);
}
}
}
/**
* a node tag
*
* @param xmlNode
* @param parent
* @throws Exception
*/
private void processNode(nodeType2 xmlNode, Node parent) throws Exception {
String childName = null;
if (xmlNode.hasid())
childName = xmlNode.getid().toString();
else if (xmlNode.hassid())
childName = xmlNode.getsid().toString();
else if (xmlNode.hasname())
childName = xmlNode.getname().toString();
Node child = null;
if (xmlNode.hastype() && "JOINT".equals(xmlNode.gettype().toString())
&& (xmlNode.hassid() || xmlNode.hasid())) {
String key = (xmlNode.hassid() ? xmlNode.getsid() : xmlNode.getid())
.toString();
child = (Bone) resourceLibrary.get(key);
if (child == null) {
child = new Bone(key);
put(key, child);
if (!squelch) {
logger.warning("Bone " + key
+ " is not attached to any vertices.");
}
}
if (!(parent instanceof Bone)) {
if (skeletonNames == null) {
skeletonNames = new ArrayList<String>();
}
skeletonNames.add(key);
}
}
if (xmlNode.hasextra()) {
for (int i = 0; i < xmlNode.getextraCount(); i++) {
try {
Object o = ExtraPluginManager.processExtra(childName,
xmlNode.getextraAt(i));
if (o instanceof Node) {
child = (Node) o;
}
} catch (Exception e) {
if (!squelch) {
logger.log(Level.WARNING,
"Error processing extra information", e);
}
}
}
}
if (child == null) {
child = new Node(childName);
}
parent.attachChild(child);
put(childName, child);
if (xmlNode.hasinstance_camera()) {
for (int i = 0; i < xmlNode.getinstance_cameraCount(); i++) {
processInstanceCamera(xmlNode.getinstance_cameraAt(i), child);
}
}
// this node has a skeleton and skin
if (xmlNode.hasinstance_controller()) {
for (int i = 0; i < xmlNode.getinstance_controllerCount(); i++) {
processInstanceController(xmlNode.getinstance_controllerAt(i),
child);
}
}
if (xmlNode.hasinstance_geometry()) {
for (int i = 0; i < xmlNode.getinstance_geometryCount(); i++) {
processInstanceGeom(xmlNode.getinstance_geometryAt(i), child);
}
}
if (xmlNode.hasinstance_node()) {
for (int i = 0; i < xmlNode.getinstance_nodeCount(); i++) {
processInstanceNode(xmlNode.getinstance_nodeAt(i), child);
}
}
if (xmlNode.hasinstance_light()) {
for (int i = 0; i < xmlNode.getinstance_lightCount(); i++) {
processInstanceLight(xmlNode.getinstance_lightAt(i), child);
}
}
// parse translation
if (xmlNode.hastranslate()) {
Vector3f translate = new Vector3f();
StringTokenizer st = new StringTokenizer(xmlNode.gettranslate()
.getValue().toString());
translate.x = Float.parseFloat(st.nextToken());
translate.y = Float.parseFloat(st.nextToken());
translate.z = Float.parseFloat(st.nextToken());
child.setLocalTranslation(translate);
}
if (xmlNode.hasrotate()) {
Quaternion rotation = null;
for (int i = 0; i < xmlNode.getrotateCount(); i++) {
Quaternion temp = new Quaternion();
Vector3f axis = new Vector3f();
StringTokenizer st = new StringTokenizer(xmlNode.getrotateAt(i)
.getValue().toString());
axis.x = Float.parseFloat(st.nextToken());
axis.y = Float.parseFloat(st.nextToken());
axis.z = Float.parseFloat(st.nextToken());
axis.normalizeLocal();
float angle = Float.parseFloat(st.nextToken());
angle *= FastMath.DEG_TO_RAD;
temp.fromAngleNormalAxis(angle, axis);
if (rotation == null) {
rotation = new Quaternion();
rotation.set(temp);
} else {
rotation.multLocal(temp);
}
}
child.setLocalRotation(rotation);
}
if (xmlNode.hasmatrix()) {
Matrix4f tm = new Matrix4f();
StringTokenizer st = new StringTokenizer(xmlNode.getmatrix()
.getValue().toString());
float[] data = new float[16];
for (int x = 0; x < 16; x++) {
data[x] = Float.parseFloat(st.nextToken());
}
tm.set(data, true); // collada matrices are in row order.
child.setLocalTranslation(tm.toTranslationVector());
// find scale
Vector3f vCol1 = new Vector3f(tm.m00, tm.m10, tm.m20);
Vector3f vCol2 = new Vector3f(tm.m01, tm.m11, tm.m21);
Vector3f vCol3 = new Vector3f(tm.m02, tm.m12, tm.m22);
float scaleX = vCol1.length();
float scaleY = vCol2.length();
float scaleZ = vCol3.length();
child.setLocalScale(new Vector3f(scaleX, scaleY, scaleZ));
Matrix3f rm = new Matrix3f();
rm.m00 = tm.m00 / scaleX;
rm.m10 = tm.m10 / scaleX;
rm.m20 = tm.m20 / scaleX;
rm.m01 = tm.m01 / scaleY;
rm.m11 = tm.m11 / scaleY;
rm.m21 = tm.m21 / scaleY;
rm.m02 = tm.m02 / scaleZ;
rm.m12 = tm.m12 / scaleZ;
rm.m22 = tm.m22 / scaleZ;
Quaternion q = new Quaternion().fromRotationMatrix(rm);
//Quaternion q = tm.toRotationQuat();
//float scale = FastMath.sqrt(q.norm());
//System.out.println(scale);
//q.normalize();
child.setLocalRotation(q);
}
if (xmlNode.hasscale()) {
Vector3f scale = new Vector3f();
StringTokenizer st = new StringTokenizer(xmlNode.getscale()
.getValue().toString());
scale.x = Float.parseFloat(st.nextToken());
scale.y = Float.parseFloat(st.nextToken());
scale.z = Float.parseFloat(st.nextToken());
child.setLocalScale(scale);
}
// parse subnodes
if (xmlNode.hasnode()) {
for (int i = 0; i < xmlNode.getnodeCount(); i++) {
processNode(xmlNode.getnodeAt(i), child);
}
}
}
/**
* processInstanceCamera
*
* @param camera
* @param node
* @throws Exception
*/
private void processInstanceCamera(InstanceWithExtra camera, Node node)
throws Exception {
String key = camera.geturl().toString();
if (key.startsWith("#")) {
key = key.substring(1);
}
CameraNode cn = (CameraNode) resourceLibrary.get(key);
if (cn != null) {
node.attachChild(cn);
}
}
/**
* processInstanceLight
*
* @param light
* @param node
* @throws Exception
*/
private void processInstanceLight(InstanceWithExtra light, Node node)
throws Exception {
String key = light.geturl().toString();
if (key.startsWith("#")) {
key = key.substring(1);
}
LightNode ln = (LightNode) resourceLibrary.get(key);
if (ln != null) {
node.attachChild(ln);
}
}
/**
* processInstanceController
*
* @param controller
* @param node
* @throws Exception
*/
private void processInstanceController(instance_controllerType controller,
Node node) throws Exception {
String key = controller.geturl().toString();
if (key.startsWith("#")) {
key = key.substring(1);
}
SkinNode sNode = (SkinNode) resourceLibrary.get(key);
if (sNode != null) {
node.attachChild(sNode);
} else {
if (!squelch) {
logger.warning("Instance "
+ controller.geturl().toString().substring(1)
+ " does not exist.");
}
}
if (controller.hasskeleton()) {
if (controller.getskeletonCount() > 1) {
if (!squelch) {
logger.warning("Controller has more than one skeleton.");
}
}
String url = controller.getskeleton().getValue();
if (url.startsWith("#")) {
url = url.substring(1);
}
Bone b = (Bone) resourceLibrary.get(url);
if (b != null) {
sNode.setSkeleton(b);
}
}
if (controller.hasbind_material()) {
processBindMaterial(controller.getbind_material(), sNode.getSkins());
}
}
/**
* processInstanceNode
*
* @param instance
* @param parent
* @throws Exception
*/
private void processInstanceNode(InstanceWithExtra instance, Node parent)
throws Exception {
String key = instance.geturl().toString();
if (key.startsWith("#")) {
key = key.substring(1);
}
Spatial spatial = (Spatial) resourceLibrary.get(key);
if (spatial != null) {
if (spatial instanceof Node) {
spatial = new SharedNode(key, (Node) spatial);
}
parent.attachChild(spatial);
}
}
/**
* processInstanceGeom
*
* @param geometry
* @param node
* @throws Exception
*/
private void processInstanceGeom(instance_geometryType geometry, Node node)
throws Exception {
String key = geometry.geturl().toString();
if (key.startsWith("#")) {
key = key.substring(1);
}
Spatial spatial = (Spatial) resourceLibrary.get(key);
if (spatial != null) {
if (spatial instanceof TriMesh) {
spatial = new SharedMesh(key, (TriMesh) spatial);
} else if (spatial instanceof Node) {
spatial = new SharedNode(key, (Node) spatial);
}
node.attachChild(spatial);
if (geometry.hasbind_material()) {
processBindMaterial(geometry.getbind_material(), spatial);
}
}
}
/**
* processInstanceMaterial
*
* @param material
* @param node
* @throws Exception
*/
private void processInstanceMaterial(instance_materialType material,
Spatial geomBindTo) throws Exception {
String key = material.gettarget().toString();
if (key.startsWith("#")) {
key = key.substring(1);
}
ColladaMaterial cm = (ColladaMaterial) resourceLibrary
.get(resourceLibrary.get(key));
Spatial target = geomBindTo;
String symbol = material.getsymbol().toString();
if (target instanceof Node) {
Node targetNode = (Node) target;
for (int i = 0; i < targetNode.getQuantity(); ++i) {
Spatial child = targetNode.getChild(i);
if (child instanceof TriMesh
&& symbol.equals(subMaterialLibrary.get(child))) {
target = child;
break;
} else if (child instanceof SharedMesh
&& symbol.equals(subMaterialLibrary
.get(((SharedMesh) child).getTarget()))) {
target = child;
break;
}
}
}
if (cm != null) {
for (int i = 0; i < RenderState.RS_MAX_STATE; ++i) {
if (cm.getState(i) != null) {
if (cm.getState(i).getType() == RenderState.RS_BLEND) {
target.setRenderQueueMode(Renderer.QUEUE_TRANSPARENT);
}
// clone the state as different mesh's may have
// different
// attributes
try {
ByteArrayOutputStream out = new ByteArrayOutputStream();
BinaryExporter.getInstance().save(cm.getState(i), out);
ByteArrayInputStream in = new ByteArrayInputStream(out
.toByteArray());
RenderState rs = (RenderState) BinaryImporter
.getInstance().load(in);
target.setRenderState(rs);
} catch (IOException e) {
logger.log(Level.WARNING, "Error cloning state", e);
}
}
}
}
}
/**
* getColor uses a string tokenizer to parse the value of a colorType into a
* ColorRGBA type used internally by jME.
*
* @param color
* the colorType to parse (RGBA format).
* @return the ColorRGBA object to be used by jME.
*/
private ColorRGBA getColor(colorType color) {
ColorRGBA out = new ColorRGBA();
StringTokenizer st = new StringTokenizer(color.getValue().toString());
out.r = Float.parseFloat(st.nextToken());
out.g = Float.parseFloat(st.nextToken());
out.b = Float.parseFloat(st.nextToken());
out.a = Float.parseFloat(st.nextToken());
return out;
}
/**
* MeshVertPair simply contain a mesh index and a vertex index. This defines
* where a specific vertex may be found.
*/
private class MeshVertPair {
public int mesh;
public int index;
/**
* MeshVertPair
*
* @param mesh
* @param index
*/
public MeshVertPair(int mesh, int index) {
this.mesh = mesh;
this.index = index;
}
}
/**
* squelchErrors sets if the ColladaImporter should spit out errors or not
*
* @param b
*/
public static void squelchErrors(boolean b) {
squelch = true;
}
public static ColladaImporter getInstance() {
return instance;
}
}
| ColladaImporter.squelchErrors() don't set squelch always true
git-svn-id: 5afc437a751a4ff2ced778146f5faadda0b504ab@4048 75d07b2b-3a1a-0410-a2c5-0572b91ccdca
| src/com/jmex/model/collada/ColladaImporter.java | ColladaImporter.squelchErrors() don't set squelch always true | <ide><path>rc/com/jmex/model/collada/ColladaImporter.java
<ide> * @param b
<ide> */
<ide> public static void squelchErrors(boolean b) {
<del> squelch = true;
<add> squelch = b;
<ide> }
<ide>
<ide> public static ColladaImporter getInstance() { |
|
Java | bsd-2-clause | 7761d08c77d1869f8bc920f1c0fde8ae24c9b52a | 0 | iron-io/iron_mq_java,iron-io/iron_mq_java | package io.iron.ironmq;
import java.io.IOException;
import java.io.Reader;
import java.io.Serializable;
import com.google.gson.Gson;
/**
* The Queue class represents a specific IronMQ queue bound to a client.
*/
public class Queue {
final private Client client;
final private String name;
public Queue(Client client, String name) {
this.client = client;
this.name = name;
}
/**
* Retrieves a Message from the queue. If there are no items on the queue, an
* EmptyQueueException is thrown.
*
* @throws EmptyQueueException If the queue is empty.
* @throws HTTPException If the IronMQ service returns a status other than 200 OK.
* @throws IOException If there is an error accessing the IronMQ server.
*/
public Message get() throws IOException {
Messages msgs = get(1);
Message msg;
try {
msg = msgs.getMessage(0);
} catch (IndexOutOfBoundsException e) {
throw new EmptyQueueException();
}
return msg;
}
/**
* Retrieves Messages from the queue. If there are no items on the queue, an
* EmptyQueueException is thrown.
* @param numberOfMessages The number of messages to receive. Max. is 100.
* @throws EmptyQueueException If the queue is empty.
* @throws HTTPException If the IronMQ service returns a status other than 200 OK.
* @throws IOException If there is an error accessing the IronMQ server.
*/
public Messages get(int numberOfMessages) throws IOException {
return get(numberOfMessages, 120);
}
/**
* Retrieves Messages from the queue. If there are no items on the queue, an
* EmptyQueueException is thrown.
* @param numberOfMessages The number of messages to receive. Max. is 100.
* @param timeout timeout in seconds.
* @throws EmptyQueueException If the queue is empty.
* @throws HTTPException If the IronMQ service returns a status other than 200 OK.
* @throws IOException If there is an error accessing the IronMQ server.
*/
public Messages get(int numberOfMessages, int timeout) throws IOException {
if (numberOfMessages < 1 || numberOfMessages > 100) {
throw new IllegalArgumentException("numberOfMessages has to be within 1..100");
}
Reader reader = client.get("queues/" + name + "/messages?n="+numberOfMessages+"&timeout=" + timeout);
Gson gson = new Gson();
Messages messages = gson.fromJson(reader, Messages.class);
reader.close();
return messages;
}
/**
* Deletes a Message from the queue.
*
* @param id The ID of the message to delete.
*
* @throws HTTPException If the IronMQ service returns a status other than 200 OK.
* @throws IOException If there is an error accessing the IronMQ server.
*/
public void deleteMessage(String id) throws IOException {
client.delete("queues/" + name + "/messages/" + id);
}
/**
* Deletes a Message from the queue.
*
* @param msg The message to delete.
*
* @throws HTTPException If the IronMQ service returns a status other than 200 OK.
* @throws IOException If there is an error accessing the IronMQ server.
*/
public void deleteMessage(Message msg) throws IOException {
deleteMessage(msg.getId());
}
/**
* Pushes a message onto the queue.
*
* @param msg The body of the message to push.
* @return The new message's ID
*
* @throws HTTPException If the IronMQ service returns a status other than 200 OK.
* @throws IOException If there is an error accessing the IronMQ server.
*/
public String push(String msg) throws IOException {
return push(msg, 0);
}
/**
* Pushes a message onto the queue.
*
* @param msg The body of the message to push.
* @param timeout The message's timeout in seconds.
* @return The new message's ID
*
* @throws HTTPException If the IronMQ service returns a status other than 200 OK.
* @throws IOException If there is an error accessing the IronMQ server.
*/
public String push(String msg, long timeout) throws IOException {
return push(msg, timeout, 0);
}
/**
* Pushes a message onto the queue.
*
* @param msg The body of the message to push.
* @param timeout The message's timeout in seconds.
* @param delay The message's delay in seconds.
* @return The new message's ID
*
* @throws HTTPException If the IronMQ service returns a status other than 200 OK.
* @throws IOException If there is an error accessing the IronMQ server.
*/
public String push(String msg, long timeout, long delay) throws IOException {
return push(msg, timeout, delay, 0);
}
/**
* Pushes a message onto the queue.
*
* @param msg The body of the message to push.
* @param timeout The message's timeout in seconds.
* @param delay The message's delay in seconds.
* @param expiresIn The message's expiration offset in seconds.
* @return The new message's ID
*
* @throws HTTPException If the IronMQ service returns a status other than 200 OK.
* @throws IOException If there is an error accessing the IronMQ server.
*/
public String push(String msg, long timeout, long delay, long expiresIn) throws IOException {
Message message = new Message();
message.setBody(msg);
message.setTimeout(timeout);
message.setDelay(delay);
message.setExpiresIn(expiresIn);
Messages msgs = new Messages(message);
Gson gson = new Gson();
String body = gson.toJson(msgs);
Reader reader = client.post("queues/" + name + "/messages", body);
Ids ids = gson.fromJson(reader, Ids.class);
reader.close();
return ids.getId(0);
}
/**
* Clears the queue off all messages
* @param queue the name of the queue
* @throws IOException
*/
public void clear() throws IOException {
client.post("queues/"+name+"/clear", "").close();
}
static class Info implements Serializable {
int count;
int size;
}
public int getSize() throws IOException {
Reader reader = client.get("queues/"+name);
Gson gson = new Gson();
Info info = gson.fromJson(reader, Info.class);
reader.close();
return info.size;
}
}
| src/main/java/io/iron/ironmq/Queue.java | package io.iron.ironmq;
import java.io.IOException;
import java.io.Reader;
import java.io.Serializable;
import com.google.gson.Gson;
/**
* The Queue class represents a specific IronMQ queue bound to a client.
*/
public class Queue {
final private Client client;
final private String name;
public Queue(Client client, String name) {
this.client = client;
this.name = name;
}
/**
* Retrieves a Message from the queue. If there are no items on the queue, an
* EmptyQueueException is thrown.
*
* @throws EmptyQueueException If the queue is empty.
* @throws HTTPException If the IronMQ service returns a status other than 200 OK.
* @throws IOException If there is an error accessing the IronMQ server.
*/
public Message get() throws IOException {
Messages msgs = get(1);
Message msg;
try {
msg = msgs.getMessage(0);
} catch (IndexOutOfBoundsException e) {
throw new EmptyQueueException();
}
return msg;
}
/**
* Retrieves Messages from the queue. If there are no items on the queue, an
* EmptyQueueException is thrown.
* @param numberOfMessages The number of messages to receive. Max. is 100.
* @throws EmptyQueueException If the queue is empty.
* @throws HTTPException If the IronMQ service returns a status other than 200 OK.
* @throws IOException If there is an error accessing the IronMQ server.
*/
public Messages get(int numberOfMessages) throws IOException {
return get(numberOfMessages, 120);
}
/**
* Retrieves Messages from the queue. If there are no items on the queue, an
* EmptyQueueException is thrown.
* @param numberOfMessages The number of messages to receive. Max. is 100.
* @param timeout timeout in seconds.
* @throws EmptyQueueException If the queue is empty.
* @throws HTTPException If the IronMQ service returns a status other than 200 OK.
* @throws IOException If there is an error accessing the IronMQ server.
*/
public Messages get(int numberOfMessages, int timeout) throws IOException {
if (numberOfMessages < 0 || numberOfMessages > 100) {
throw new IllegalArgumentException("numberOfMessages has to be within 1..100");
}
Reader reader = client.get("queues/" + name + "/messages?n="+numberOfMessages+"&timeout=" + timeout);
Gson gson = new Gson();
Messages messages = gson.fromJson(reader, Messages.class);
reader.close();
return messages;
}
/**
* Deletes a Message from the queue.
*
* @param id The ID of the message to delete.
*
* @throws HTTPException If the IronMQ service returns a status other than 200 OK.
* @throws IOException If there is an error accessing the IronMQ server.
*/
public void deleteMessage(String id) throws IOException {
client.delete("queues/" + name + "/messages/" + id);
}
/**
* Deletes a Message from the queue.
*
* @param msg The message to delete.
*
* @throws HTTPException If the IronMQ service returns a status other than 200 OK.
* @throws IOException If there is an error accessing the IronMQ server.
*/
public void deleteMessage(Message msg) throws IOException {
deleteMessage(msg.getId());
}
/**
* Pushes a message onto the queue.
*
* @param msg The body of the message to push.
* @return The new message's ID
*
* @throws HTTPException If the IronMQ service returns a status other than 200 OK.
* @throws IOException If there is an error accessing the IronMQ server.
*/
public String push(String msg) throws IOException {
return push(msg, 0);
}
/**
* Pushes a message onto the queue.
*
* @param msg The body of the message to push.
* @param timeout The message's timeout in seconds.
* @return The new message's ID
*
* @throws HTTPException If the IronMQ service returns a status other than 200 OK.
* @throws IOException If there is an error accessing the IronMQ server.
*/
public String push(String msg, long timeout) throws IOException {
return push(msg, timeout, 0);
}
/**
* Pushes a message onto the queue.
*
* @param msg The body of the message to push.
* @param timeout The message's timeout in seconds.
* @param delay The message's delay in seconds.
* @return The new message's ID
*
* @throws HTTPException If the IronMQ service returns a status other than 200 OK.
* @throws IOException If there is an error accessing the IronMQ server.
*/
public String push(String msg, long timeout, long delay) throws IOException {
return push(msg, timeout, delay, 0);
}
/**
* Pushes a message onto the queue.
*
* @param msg The body of the message to push.
* @param timeout The message's timeout in seconds.
* @param delay The message's delay in seconds.
* @param expiresIn The message's expiration offset in seconds.
* @return The new message's ID
*
* @throws HTTPException If the IronMQ service returns a status other than 200 OK.
* @throws IOException If there is an error accessing the IronMQ server.
*/
public String push(String msg, long timeout, long delay, long expiresIn) throws IOException {
Message message = new Message();
message.setBody(msg);
message.setTimeout(timeout);
message.setDelay(delay);
message.setExpiresIn(expiresIn);
Messages msgs = new Messages(message);
Gson gson = new Gson();
String body = gson.toJson(msgs);
Reader reader = client.post("queues/" + name + "/messages", body);
Ids ids = gson.fromJson(reader, Ids.class);
reader.close();
return ids.getId(0);
}
/**
* Clears the queue off all messages
* @param queue the name of the queue
* @throws IOException
*/
public void clear() throws IOException {
client.post("queues/"+name+"/clear", "").close();
}
static class Info implements Serializable {
int count;
int size;
}
public int getSize() throws IOException {
Reader reader = client.get("queues/"+name);
Gson gson = new Gson();
Info info = gson.fromJson(reader, Info.class);
reader.close();
return info.size;
}
}
| fix: changed minimum number of messages from 0 to 1 | src/main/java/io/iron/ironmq/Queue.java | fix: changed minimum number of messages from 0 to 1 | <ide><path>rc/main/java/io/iron/ironmq/Queue.java
<ide> * @throws IOException If there is an error accessing the IronMQ server.
<ide> */
<ide> public Messages get(int numberOfMessages, int timeout) throws IOException {
<del> if (numberOfMessages < 0 || numberOfMessages > 100) {
<add> if (numberOfMessages < 1 || numberOfMessages > 100) {
<ide> throw new IllegalArgumentException("numberOfMessages has to be within 1..100");
<ide> }
<ide> Reader reader = client.get("queues/" + name + "/messages?n="+numberOfMessages+"&timeout=" + timeout); |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.