target
stringlengths
20
113k
src_fm
stringlengths
11
86.3k
src_fm_fc
stringlengths
21
86.4k
src_fm_fc_co
stringlengths
30
86.4k
src_fm_fc_ms
stringlengths
42
86.8k
src_fm_fc_ms_ff
stringlengths
43
86.8k
@Test public void get_primary_keys() throws TException { PrimaryKeysRequest request = new PrimaryKeysRequest(DB_P, "table"); PrimaryKeysRequest inboundRequest = new PrimaryKeysRequest(); PrimaryKeysResponse response = new PrimaryKeysResponse(); PrimaryKeysResponse expected = new PrimaryKeysResponse(); when(primaryMapping.transformInboundPrimaryKeysRequest(request)).thenReturn(inboundRequest); when(primaryMapping.transformOutboundPrimaryKeysResponse(response)).thenReturn(expected); when(primaryClient.get_primary_keys(inboundRequest)).thenReturn(response); PrimaryKeysResponse result = handler.get_primary_keys(request); assertThat(result, is(expected)); }
@Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) public PrimaryKeysResponse get_primary_keys(PrimaryKeysRequest request) throws MetaException, NoSuchObjectException, TException { DatabaseMapping mapping = databaseMappingService.databaseMapping(request.getDb_name()); return mapping .transformOutboundPrimaryKeysResponse( mapping.getClient().get_primary_keys(mapping.transformInboundPrimaryKeysRequest(request))); }
FederatedHMSHandler extends FacebookBase implements CloseableIHMSHandler { @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) public PrimaryKeysResponse get_primary_keys(PrimaryKeysRequest request) throws MetaException, NoSuchObjectException, TException { DatabaseMapping mapping = databaseMappingService.databaseMapping(request.getDb_name()); return mapping .transformOutboundPrimaryKeysResponse( mapping.getClient().get_primary_keys(mapping.transformInboundPrimaryKeysRequest(request))); } }
FederatedHMSHandler extends FacebookBase implements CloseableIHMSHandler { @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) public PrimaryKeysResponse get_primary_keys(PrimaryKeysRequest request) throws MetaException, NoSuchObjectException, TException { DatabaseMapping mapping = databaseMappingService.databaseMapping(request.getDb_name()); return mapping .transformOutboundPrimaryKeysResponse( mapping.getClient().get_primary_keys(mapping.transformInboundPrimaryKeysRequest(request))); } FederatedHMSHandler( MappingEventListener databaseMappingService, NotifyingFederationService notifyingFederationService); }
FederatedHMSHandler extends FacebookBase implements CloseableIHMSHandler { @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) public PrimaryKeysResponse get_primary_keys(PrimaryKeysRequest request) throws MetaException, NoSuchObjectException, TException { DatabaseMapping mapping = databaseMappingService.databaseMapping(request.getDb_name()); return mapping .transformOutboundPrimaryKeysResponse( mapping.getClient().get_primary_keys(mapping.transformInboundPrimaryKeysRequest(request))); } FederatedHMSHandler( MappingEventListener databaseMappingService, NotifyingFederationService notifyingFederationService); @Override void close(); @Override void shutdown(); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) String getMetaConf(String key); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) void setMetaConf(String key, String value); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) void create_database(Database database); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) Database get_database(String name); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) void drop_database(String name, boolean deleteData, boolean cascade); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) List<String> get_databases(String pattern); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) List<String> get_all_databases(); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) void alter_database(String dbname, Database db); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) Type get_type(String name); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) boolean create_type(Type type); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) boolean drop_type(String type); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) Map<String, Type> get_type_all(String name); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) List<FieldSchema> get_fields(String db_name, String table_name); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) List<FieldSchema> get_schema(String db_name, String table_name); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) void create_table(Table tbl); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) void create_table_with_environment_context(Table tbl, EnvironmentContext environment_context); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) void drop_table(String dbname, String name, boolean deleteData); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) void drop_table_with_environment_context( String dbname, String name, boolean deleteData, EnvironmentContext environment_context); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) List<String> get_tables(String db_name, String pattern); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) List<String> get_all_tables(String db_name); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) Table get_table(String dbname, String tbl_name); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) List<Table> get_table_objects_by_name(String dbname, List<String> tbl_names); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) List<String> get_table_names_by_filter(String dbname, String filter, short max_tables); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) void alter_table(String dbname, String tbl_name, Table new_tbl); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) void alter_table_with_environment_context( String dbname, String tbl_name, Table new_tbl, EnvironmentContext environment_context); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) Partition add_partition(Partition new_part); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) Partition add_partition_with_environment_context(Partition new_part, EnvironmentContext environment_context); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) int add_partitions(List<Partition> new_parts); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) int add_partitions_pspec(List<PartitionSpec> new_parts); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) Partition append_partition(String db_name, String tbl_name, List<String> part_vals); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) AddPartitionsResult add_partitions_req(AddPartitionsRequest request); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) Partition append_partition_with_environment_context( String db_name, String tbl_name, List<String> part_vals, EnvironmentContext environment_context); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) Partition append_partition_by_name(String db_name, String tbl_name, String part_name); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) Partition append_partition_by_name_with_environment_context( String db_name, String tbl_name, String part_name, EnvironmentContext environment_context); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) boolean drop_partition(String db_name, String tbl_name, List<String> part_vals, boolean deleteData); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) boolean drop_partition_with_environment_context( String db_name, String tbl_name, List<String> part_vals, boolean deleteData, EnvironmentContext environment_context); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) boolean drop_partition_by_name(String db_name, String tbl_name, String part_name, boolean deleteData); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) boolean drop_partition_by_name_with_environment_context( String db_name, String tbl_name, String part_name, boolean deleteData, EnvironmentContext environment_context); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) DropPartitionsResult drop_partitions_req(DropPartitionsRequest req); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) Partition get_partition(String db_name, String tbl_name, List<String> part_vals); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) Partition exchange_partition( Map<String, String> partitionSpecs, String source_db, String source_table_name, String dest_db, String dest_table_name); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) Partition get_partition_with_auth( String db_name, String tbl_name, List<String> part_vals, String user_name, List<String> group_names); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) Partition get_partition_by_name(String db_name, String tbl_name, String part_name); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME, prepend=true) List<Partition> get_partitions(String db_name, String tbl_name, short max_parts); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME, prepend=true) List<Partition> get_partitions_with_auth( String db_name, String tbl_name, short max_parts, String user_name, List<String> group_names); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME, prepend=true) List<PartitionSpec> get_partitions_pspec(String db_name, String tbl_name, int max_parts); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) List<String> get_partition_names(String db_name, String tbl_name, short max_parts); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME, prepend=true) List<Partition> get_partitions_ps(String db_name, String tbl_name, List<String> part_vals, short max_parts); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME, prepend=true) List<Partition> get_partitions_ps_with_auth( String db_name, String tbl_name, List<String> part_vals, short max_parts, String user_name, List<String> group_names); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) List<String> get_partition_names_ps(String db_name, String tbl_name, List<String> part_vals, short max_parts); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME, prepend=true) List<Partition> get_partitions_by_filter(String db_name, String tbl_name, String filter, short max_parts); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME, prepend=true) List<PartitionSpec> get_part_specs_by_filter(String db_name, String tbl_name, String filter, int max_parts); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) PartitionsByExprResult get_partitions_by_expr(PartitionsByExprRequest req); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME, prepend=true) List<Partition> get_partitions_by_names(String db_name, String tbl_name, List<String> names); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) void alter_partition(String db_name, String tbl_name, Partition new_part); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) void alter_partitions(String db_name, String tbl_name, List<Partition> new_parts); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) void alter_partition_with_environment_context( String db_name, String tbl_name, Partition new_part, EnvironmentContext environment_context); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) void rename_partition(String db_name, String tbl_name, List<String> part_vals, Partition new_part); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) boolean partition_name_has_valid_characters(List<String> part_vals, boolean throw_exception); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) String get_config_value(String name, String defaultValue); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) List<String> partition_name_to_vals(String part_name); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) Map<String, String> partition_name_to_spec(String part_name); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) void markPartitionForEvent( String db_name, String tbl_name, Map<String, String> part_vals, PartitionEventType eventType); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) boolean isPartitionMarkedForEvent( String db_name, String tbl_name, Map<String, String> part_vals, PartitionEventType eventType); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) Index add_index(Index new_index, Table index_table); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) void alter_index(String dbname, String base_tbl_name, String idx_name, Index new_idx); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) boolean drop_index_by_name(String db_name, String tbl_name, String index_name, boolean deleteData); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) Index get_index_by_name(String db_name, String tbl_name, String index_name); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) List<Index> get_indexes(String db_name, String tbl_name, short max_indexes); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) List<String> get_index_names(String db_name, String tbl_name, short max_indexes); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) boolean update_table_column_statistics(ColumnStatistics stats_obj); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) boolean update_partition_column_statistics(ColumnStatistics stats_obj); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) ColumnStatistics get_table_column_statistics(String db_name, String tbl_name, String col_name); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) ColumnStatistics get_partition_column_statistics( String db_name, String tbl_name, String part_name, String col_name); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) TableStatsResult get_table_statistics_req(TableStatsRequest request); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) PartitionsStatsResult get_partitions_statistics_req(PartitionsStatsRequest request); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) AggrStats get_aggr_stats_for(PartitionsStatsRequest request); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) boolean set_aggr_stats_for(SetPartitionsStatsRequest request); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) boolean delete_partition_column_statistics(String db_name, String tbl_name, String part_name, String col_name); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) boolean delete_table_column_statistics(String db_name, String tbl_name, String col_name); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) void create_function(Function func); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) void drop_function(String dbName, String funcName); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) void alter_function(String dbName, String funcName, Function newFunc); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) List<String> get_functions(String dbName, String pattern); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) Function get_function(String dbName, String funcName); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) boolean create_role(Role role); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) boolean drop_role(String role_name); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) List<String> get_role_names(); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) boolean grant_role( String role_name, String principal_name, PrincipalType principal_type, String grantor, PrincipalType grantorType, boolean grant_option); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) boolean revoke_role(String role_name, String principal_name, PrincipalType principal_type); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) List<Role> list_roles(String principal_name, PrincipalType principal_type); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) GrantRevokeRoleResponse grant_revoke_role(GrantRevokeRoleRequest request); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) GetPrincipalsInRoleResponse get_principals_in_role(GetPrincipalsInRoleRequest request); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) GetRoleGrantsForPrincipalResponse get_role_grants_for_principal(GetRoleGrantsForPrincipalRequest request); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) PrincipalPrivilegeSet get_privilege_set(HiveObjectRef hiveObject, String user_name, List<String> group_names); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) List<HiveObjectPrivilege> list_privileges( String principal_name, PrincipalType principal_type, HiveObjectRef hiveObject); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) boolean grant_privileges(PrivilegeBag privileges); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) boolean revoke_privileges(PrivilegeBag privileges); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) GrantRevokePrivilegeResponse grant_revoke_privileges(GrantRevokePrivilegeRequest request); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) List<String> set_ugi(String user_name, List<String> group_names); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) String get_delegation_token(String token_owner, String renewer_kerberos_principal_name); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) long renew_delegation_token(String token_str_form); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) void cancel_delegation_token(String token_str_form); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) GetOpenTxnsResponse get_open_txns(); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) GetOpenTxnsInfoResponse get_open_txns_info(); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) OpenTxnsResponse open_txns(OpenTxnRequest rqst); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) void abort_txn(AbortTxnRequest rqst); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) void commit_txn(CommitTxnRequest rqst); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) LockResponse lock(LockRequest rqst); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) LockResponse check_lock(CheckLockRequest rqst); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) void unlock(UnlockRequest rqst); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) ShowLocksResponse show_locks(ShowLocksRequest rqst); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) void heartbeat(HeartbeatRequest ids); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) HeartbeatTxnRangeResponse heartbeat_txn_range(HeartbeatTxnRangeRequest txns); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) void compact(CompactionRequest rqst); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) ShowCompactResponse show_compact(ShowCompactRequest rqst); @Override String getCpuProfile(int arg0); @Override String getVersion(); @Override fb_status getStatus(); @Override Configuration getConf(); @Override void setConf(Configuration conf); @Override void init(); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) void abort_txns(AbortTxnsRequest rqst); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) void add_dynamic_partitions(AddDynamicPartitions rqst); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) void add_foreign_key(AddForeignKeyRequest req); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) int add_master_key(String key); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) void add_primary_key(AddPrimaryKeyRequest req); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) boolean add_token(String token_identifier, String delegation_token); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) void alter_partitions_with_environment_context( String db_name, String tbl_name, List<Partition> new_parts, EnvironmentContext environment_context); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) void alter_table_with_cascade(String dbname, String tbl_name, Table new_tbl, boolean cascade); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) CacheFileMetadataResult cache_file_metadata(CacheFileMetadataRequest req); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) ClearFileMetadataResult clear_file_metadata(ClearFileMetadataRequest req); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) void create_table_with_constraints(Table tbl, List<SQLPrimaryKey> primaryKeys, List<SQLForeignKey> foreignKeys); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) void drop_constraint(DropConstraintRequest req); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) List<Partition> exchange_partitions( Map<String, String> partitionSpecs, String source_db, String source_table_name, String dest_db, String dest_table_name); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) FireEventResponse fire_listener_event(FireEventRequest rqst); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) void flushCache(); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) GetAllFunctionsResponse get_all_functions(); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) List<String> get_all_token_identifiers(); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) CurrentNotificationEventId get_current_notificationEventId(); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) List<FieldSchema> get_fields_with_environment_context( String db_name, String table_name, EnvironmentContext environment_context); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) GetFileMetadataResult get_file_metadata(GetFileMetadataRequest req); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) GetFileMetadataByExprResult get_file_metadata_by_expr(GetFileMetadataByExprRequest req); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) ForeignKeysResponse get_foreign_keys(ForeignKeysRequest request); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) List<String> get_master_keys(); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) NotificationEventResponse get_next_notification(NotificationEventRequest rqst); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) int get_num_partitions_by_filter(String db_name, String tbl_name, String filter); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) PrimaryKeysResponse get_primary_keys(PrimaryKeysRequest request); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) List<FieldSchema> get_schema_with_environment_context( String db_name, String table_name, EnvironmentContext environment_context); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) List<TableMeta> get_table_meta(String db_patterns, String tbl_patterns, List<String> tbl_types); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) String get_token(String token_identifier); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) PutFileMetadataResult put_file_metadata(PutFileMetadataRequest req); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) boolean remove_master_key(int key_seq); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) boolean remove_token(String token_identifier); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) void update_master_key(int seq_number, String key); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) List<String> get_tables_by_type(String db_name, String pattern, String tableType); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) GetTableResult get_table_req(GetTableRequest req); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) GetTablesResult get_table_objects_by_name_req(GetTablesRequest req); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) CompactionResponse compact2(CompactionRequest rqst); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) PartitionValuesResponse get_partition_values(PartitionValuesRequest req); }
FederatedHMSHandler extends FacebookBase implements CloseableIHMSHandler { @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) public PrimaryKeysResponse get_primary_keys(PrimaryKeysRequest request) throws MetaException, NoSuchObjectException, TException { DatabaseMapping mapping = databaseMappingService.databaseMapping(request.getDb_name()); return mapping .transformOutboundPrimaryKeysResponse( mapping.getClient().get_primary_keys(mapping.transformInboundPrimaryKeysRequest(request))); } FederatedHMSHandler( MappingEventListener databaseMappingService, NotifyingFederationService notifyingFederationService); @Override void close(); @Override void shutdown(); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) String getMetaConf(String key); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) void setMetaConf(String key, String value); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) void create_database(Database database); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) Database get_database(String name); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) void drop_database(String name, boolean deleteData, boolean cascade); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) List<String> get_databases(String pattern); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) List<String> get_all_databases(); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) void alter_database(String dbname, Database db); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) Type get_type(String name); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) boolean create_type(Type type); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) boolean drop_type(String type); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) Map<String, Type> get_type_all(String name); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) List<FieldSchema> get_fields(String db_name, String table_name); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) List<FieldSchema> get_schema(String db_name, String table_name); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) void create_table(Table tbl); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) void create_table_with_environment_context(Table tbl, EnvironmentContext environment_context); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) void drop_table(String dbname, String name, boolean deleteData); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) void drop_table_with_environment_context( String dbname, String name, boolean deleteData, EnvironmentContext environment_context); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) List<String> get_tables(String db_name, String pattern); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) List<String> get_all_tables(String db_name); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) Table get_table(String dbname, String tbl_name); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) List<Table> get_table_objects_by_name(String dbname, List<String> tbl_names); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) List<String> get_table_names_by_filter(String dbname, String filter, short max_tables); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) void alter_table(String dbname, String tbl_name, Table new_tbl); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) void alter_table_with_environment_context( String dbname, String tbl_name, Table new_tbl, EnvironmentContext environment_context); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) Partition add_partition(Partition new_part); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) Partition add_partition_with_environment_context(Partition new_part, EnvironmentContext environment_context); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) int add_partitions(List<Partition> new_parts); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) int add_partitions_pspec(List<PartitionSpec> new_parts); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) Partition append_partition(String db_name, String tbl_name, List<String> part_vals); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) AddPartitionsResult add_partitions_req(AddPartitionsRequest request); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) Partition append_partition_with_environment_context( String db_name, String tbl_name, List<String> part_vals, EnvironmentContext environment_context); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) Partition append_partition_by_name(String db_name, String tbl_name, String part_name); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) Partition append_partition_by_name_with_environment_context( String db_name, String tbl_name, String part_name, EnvironmentContext environment_context); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) boolean drop_partition(String db_name, String tbl_name, List<String> part_vals, boolean deleteData); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) boolean drop_partition_with_environment_context( String db_name, String tbl_name, List<String> part_vals, boolean deleteData, EnvironmentContext environment_context); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) boolean drop_partition_by_name(String db_name, String tbl_name, String part_name, boolean deleteData); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) boolean drop_partition_by_name_with_environment_context( String db_name, String tbl_name, String part_name, boolean deleteData, EnvironmentContext environment_context); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) DropPartitionsResult drop_partitions_req(DropPartitionsRequest req); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) Partition get_partition(String db_name, String tbl_name, List<String> part_vals); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) Partition exchange_partition( Map<String, String> partitionSpecs, String source_db, String source_table_name, String dest_db, String dest_table_name); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) Partition get_partition_with_auth( String db_name, String tbl_name, List<String> part_vals, String user_name, List<String> group_names); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) Partition get_partition_by_name(String db_name, String tbl_name, String part_name); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME, prepend=true) List<Partition> get_partitions(String db_name, String tbl_name, short max_parts); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME, prepend=true) List<Partition> get_partitions_with_auth( String db_name, String tbl_name, short max_parts, String user_name, List<String> group_names); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME, prepend=true) List<PartitionSpec> get_partitions_pspec(String db_name, String tbl_name, int max_parts); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) List<String> get_partition_names(String db_name, String tbl_name, short max_parts); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME, prepend=true) List<Partition> get_partitions_ps(String db_name, String tbl_name, List<String> part_vals, short max_parts); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME, prepend=true) List<Partition> get_partitions_ps_with_auth( String db_name, String tbl_name, List<String> part_vals, short max_parts, String user_name, List<String> group_names); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) List<String> get_partition_names_ps(String db_name, String tbl_name, List<String> part_vals, short max_parts); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME, prepend=true) List<Partition> get_partitions_by_filter(String db_name, String tbl_name, String filter, short max_parts); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME, prepend=true) List<PartitionSpec> get_part_specs_by_filter(String db_name, String tbl_name, String filter, int max_parts); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) PartitionsByExprResult get_partitions_by_expr(PartitionsByExprRequest req); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME, prepend=true) List<Partition> get_partitions_by_names(String db_name, String tbl_name, List<String> names); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) void alter_partition(String db_name, String tbl_name, Partition new_part); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) void alter_partitions(String db_name, String tbl_name, List<Partition> new_parts); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) void alter_partition_with_environment_context( String db_name, String tbl_name, Partition new_part, EnvironmentContext environment_context); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) void rename_partition(String db_name, String tbl_name, List<String> part_vals, Partition new_part); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) boolean partition_name_has_valid_characters(List<String> part_vals, boolean throw_exception); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) String get_config_value(String name, String defaultValue); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) List<String> partition_name_to_vals(String part_name); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) Map<String, String> partition_name_to_spec(String part_name); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) void markPartitionForEvent( String db_name, String tbl_name, Map<String, String> part_vals, PartitionEventType eventType); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) boolean isPartitionMarkedForEvent( String db_name, String tbl_name, Map<String, String> part_vals, PartitionEventType eventType); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) Index add_index(Index new_index, Table index_table); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) void alter_index(String dbname, String base_tbl_name, String idx_name, Index new_idx); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) boolean drop_index_by_name(String db_name, String tbl_name, String index_name, boolean deleteData); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) Index get_index_by_name(String db_name, String tbl_name, String index_name); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) List<Index> get_indexes(String db_name, String tbl_name, short max_indexes); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) List<String> get_index_names(String db_name, String tbl_name, short max_indexes); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) boolean update_table_column_statistics(ColumnStatistics stats_obj); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) boolean update_partition_column_statistics(ColumnStatistics stats_obj); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) ColumnStatistics get_table_column_statistics(String db_name, String tbl_name, String col_name); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) ColumnStatistics get_partition_column_statistics( String db_name, String tbl_name, String part_name, String col_name); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) TableStatsResult get_table_statistics_req(TableStatsRequest request); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) PartitionsStatsResult get_partitions_statistics_req(PartitionsStatsRequest request); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) AggrStats get_aggr_stats_for(PartitionsStatsRequest request); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) boolean set_aggr_stats_for(SetPartitionsStatsRequest request); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) boolean delete_partition_column_statistics(String db_name, String tbl_name, String part_name, String col_name); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) boolean delete_table_column_statistics(String db_name, String tbl_name, String col_name); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) void create_function(Function func); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) void drop_function(String dbName, String funcName); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) void alter_function(String dbName, String funcName, Function newFunc); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) List<String> get_functions(String dbName, String pattern); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) Function get_function(String dbName, String funcName); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) boolean create_role(Role role); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) boolean drop_role(String role_name); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) List<String> get_role_names(); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) boolean grant_role( String role_name, String principal_name, PrincipalType principal_type, String grantor, PrincipalType grantorType, boolean grant_option); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) boolean revoke_role(String role_name, String principal_name, PrincipalType principal_type); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) List<Role> list_roles(String principal_name, PrincipalType principal_type); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) GrantRevokeRoleResponse grant_revoke_role(GrantRevokeRoleRequest request); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) GetPrincipalsInRoleResponse get_principals_in_role(GetPrincipalsInRoleRequest request); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) GetRoleGrantsForPrincipalResponse get_role_grants_for_principal(GetRoleGrantsForPrincipalRequest request); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) PrincipalPrivilegeSet get_privilege_set(HiveObjectRef hiveObject, String user_name, List<String> group_names); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) List<HiveObjectPrivilege> list_privileges( String principal_name, PrincipalType principal_type, HiveObjectRef hiveObject); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) boolean grant_privileges(PrivilegeBag privileges); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) boolean revoke_privileges(PrivilegeBag privileges); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) GrantRevokePrivilegeResponse grant_revoke_privileges(GrantRevokePrivilegeRequest request); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) List<String> set_ugi(String user_name, List<String> group_names); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) String get_delegation_token(String token_owner, String renewer_kerberos_principal_name); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) long renew_delegation_token(String token_str_form); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) void cancel_delegation_token(String token_str_form); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) GetOpenTxnsResponse get_open_txns(); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) GetOpenTxnsInfoResponse get_open_txns_info(); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) OpenTxnsResponse open_txns(OpenTxnRequest rqst); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) void abort_txn(AbortTxnRequest rqst); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) void commit_txn(CommitTxnRequest rqst); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) LockResponse lock(LockRequest rqst); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) LockResponse check_lock(CheckLockRequest rqst); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) void unlock(UnlockRequest rqst); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) ShowLocksResponse show_locks(ShowLocksRequest rqst); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) void heartbeat(HeartbeatRequest ids); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) HeartbeatTxnRangeResponse heartbeat_txn_range(HeartbeatTxnRangeRequest txns); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) void compact(CompactionRequest rqst); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) ShowCompactResponse show_compact(ShowCompactRequest rqst); @Override String getCpuProfile(int arg0); @Override String getVersion(); @Override fb_status getStatus(); @Override Configuration getConf(); @Override void setConf(Configuration conf); @Override void init(); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) void abort_txns(AbortTxnsRequest rqst); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) void add_dynamic_partitions(AddDynamicPartitions rqst); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) void add_foreign_key(AddForeignKeyRequest req); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) int add_master_key(String key); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) void add_primary_key(AddPrimaryKeyRequest req); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) boolean add_token(String token_identifier, String delegation_token); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) void alter_partitions_with_environment_context( String db_name, String tbl_name, List<Partition> new_parts, EnvironmentContext environment_context); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) void alter_table_with_cascade(String dbname, String tbl_name, Table new_tbl, boolean cascade); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) CacheFileMetadataResult cache_file_metadata(CacheFileMetadataRequest req); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) ClearFileMetadataResult clear_file_metadata(ClearFileMetadataRequest req); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) void create_table_with_constraints(Table tbl, List<SQLPrimaryKey> primaryKeys, List<SQLForeignKey> foreignKeys); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) void drop_constraint(DropConstraintRequest req); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) List<Partition> exchange_partitions( Map<String, String> partitionSpecs, String source_db, String source_table_name, String dest_db, String dest_table_name); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) FireEventResponse fire_listener_event(FireEventRequest rqst); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) void flushCache(); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) GetAllFunctionsResponse get_all_functions(); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) List<String> get_all_token_identifiers(); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) CurrentNotificationEventId get_current_notificationEventId(); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) List<FieldSchema> get_fields_with_environment_context( String db_name, String table_name, EnvironmentContext environment_context); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) GetFileMetadataResult get_file_metadata(GetFileMetadataRequest req); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) GetFileMetadataByExprResult get_file_metadata_by_expr(GetFileMetadataByExprRequest req); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) ForeignKeysResponse get_foreign_keys(ForeignKeysRequest request); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) List<String> get_master_keys(); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) NotificationEventResponse get_next_notification(NotificationEventRequest rqst); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) int get_num_partitions_by_filter(String db_name, String tbl_name, String filter); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) PrimaryKeysResponse get_primary_keys(PrimaryKeysRequest request); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) List<FieldSchema> get_schema_with_environment_context( String db_name, String table_name, EnvironmentContext environment_context); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) List<TableMeta> get_table_meta(String db_patterns, String tbl_patterns, List<String> tbl_types); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) String get_token(String token_identifier); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) PutFileMetadataResult put_file_metadata(PutFileMetadataRequest req); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) boolean remove_master_key(int key_seq); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) boolean remove_token(String token_identifier); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) void update_master_key(int seq_number, String key); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) List<String> get_tables_by_type(String db_name, String pattern, String tableType); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) GetTableResult get_table_req(GetTableRequest req); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) GetTablesResult get_table_objects_by_name_req(GetTablesRequest req); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) CompactionResponse compact2(CompactionRequest rqst); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) PartitionValuesResponse get_partition_values(PartitionValuesRequest req); }
@Test public void get_partition_values() throws TException { PartitionValuesRequest request = new PartitionValuesRequest(DB_P, "table", Collections.singletonList(new FieldSchema())); List<PartitionValuesRow> partitionValues = Collections.singletonList(new PartitionValuesRow()); PartitionValuesResponse response = new PartitionValuesResponse(partitionValues); when(primaryClient.get_partition_values(request)).thenReturn(response); when(primaryMapping.transformInboundPartitionValuesRequest(request)).thenReturn(request); PartitionValuesResponse result = handler.get_partition_values(request); assertThat(result.getPartitionValuesSize(), is(1)); assertThat(result.getPartitionValues(), is(sameInstance(partitionValues))); }
@Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) public PartitionValuesResponse get_partition_values(PartitionValuesRequest req) throws MetaException, NoSuchObjectException, TException { DatabaseMapping mapping = databaseMappingService.databaseMapping(req.getDbName()); return mapping .getClient() .get_partition_values(mapping.transformInboundPartitionValuesRequest(req)); }
FederatedHMSHandler extends FacebookBase implements CloseableIHMSHandler { @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) public PartitionValuesResponse get_partition_values(PartitionValuesRequest req) throws MetaException, NoSuchObjectException, TException { DatabaseMapping mapping = databaseMappingService.databaseMapping(req.getDbName()); return mapping .getClient() .get_partition_values(mapping.transformInboundPartitionValuesRequest(req)); } }
FederatedHMSHandler extends FacebookBase implements CloseableIHMSHandler { @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) public PartitionValuesResponse get_partition_values(PartitionValuesRequest req) throws MetaException, NoSuchObjectException, TException { DatabaseMapping mapping = databaseMappingService.databaseMapping(req.getDbName()); return mapping .getClient() .get_partition_values(mapping.transformInboundPartitionValuesRequest(req)); } FederatedHMSHandler( MappingEventListener databaseMappingService, NotifyingFederationService notifyingFederationService); }
FederatedHMSHandler extends FacebookBase implements CloseableIHMSHandler { @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) public PartitionValuesResponse get_partition_values(PartitionValuesRequest req) throws MetaException, NoSuchObjectException, TException { DatabaseMapping mapping = databaseMappingService.databaseMapping(req.getDbName()); return mapping .getClient() .get_partition_values(mapping.transformInboundPartitionValuesRequest(req)); } FederatedHMSHandler( MappingEventListener databaseMappingService, NotifyingFederationService notifyingFederationService); @Override void close(); @Override void shutdown(); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) String getMetaConf(String key); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) void setMetaConf(String key, String value); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) void create_database(Database database); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) Database get_database(String name); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) void drop_database(String name, boolean deleteData, boolean cascade); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) List<String> get_databases(String pattern); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) List<String> get_all_databases(); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) void alter_database(String dbname, Database db); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) Type get_type(String name); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) boolean create_type(Type type); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) boolean drop_type(String type); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) Map<String, Type> get_type_all(String name); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) List<FieldSchema> get_fields(String db_name, String table_name); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) List<FieldSchema> get_schema(String db_name, String table_name); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) void create_table(Table tbl); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) void create_table_with_environment_context(Table tbl, EnvironmentContext environment_context); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) void drop_table(String dbname, String name, boolean deleteData); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) void drop_table_with_environment_context( String dbname, String name, boolean deleteData, EnvironmentContext environment_context); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) List<String> get_tables(String db_name, String pattern); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) List<String> get_all_tables(String db_name); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) Table get_table(String dbname, String tbl_name); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) List<Table> get_table_objects_by_name(String dbname, List<String> tbl_names); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) List<String> get_table_names_by_filter(String dbname, String filter, short max_tables); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) void alter_table(String dbname, String tbl_name, Table new_tbl); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) void alter_table_with_environment_context( String dbname, String tbl_name, Table new_tbl, EnvironmentContext environment_context); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) Partition add_partition(Partition new_part); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) Partition add_partition_with_environment_context(Partition new_part, EnvironmentContext environment_context); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) int add_partitions(List<Partition> new_parts); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) int add_partitions_pspec(List<PartitionSpec> new_parts); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) Partition append_partition(String db_name, String tbl_name, List<String> part_vals); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) AddPartitionsResult add_partitions_req(AddPartitionsRequest request); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) Partition append_partition_with_environment_context( String db_name, String tbl_name, List<String> part_vals, EnvironmentContext environment_context); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) Partition append_partition_by_name(String db_name, String tbl_name, String part_name); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) Partition append_partition_by_name_with_environment_context( String db_name, String tbl_name, String part_name, EnvironmentContext environment_context); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) boolean drop_partition(String db_name, String tbl_name, List<String> part_vals, boolean deleteData); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) boolean drop_partition_with_environment_context( String db_name, String tbl_name, List<String> part_vals, boolean deleteData, EnvironmentContext environment_context); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) boolean drop_partition_by_name(String db_name, String tbl_name, String part_name, boolean deleteData); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) boolean drop_partition_by_name_with_environment_context( String db_name, String tbl_name, String part_name, boolean deleteData, EnvironmentContext environment_context); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) DropPartitionsResult drop_partitions_req(DropPartitionsRequest req); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) Partition get_partition(String db_name, String tbl_name, List<String> part_vals); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) Partition exchange_partition( Map<String, String> partitionSpecs, String source_db, String source_table_name, String dest_db, String dest_table_name); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) Partition get_partition_with_auth( String db_name, String tbl_name, List<String> part_vals, String user_name, List<String> group_names); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) Partition get_partition_by_name(String db_name, String tbl_name, String part_name); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME, prepend=true) List<Partition> get_partitions(String db_name, String tbl_name, short max_parts); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME, prepend=true) List<Partition> get_partitions_with_auth( String db_name, String tbl_name, short max_parts, String user_name, List<String> group_names); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME, prepend=true) List<PartitionSpec> get_partitions_pspec(String db_name, String tbl_name, int max_parts); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) List<String> get_partition_names(String db_name, String tbl_name, short max_parts); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME, prepend=true) List<Partition> get_partitions_ps(String db_name, String tbl_name, List<String> part_vals, short max_parts); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME, prepend=true) List<Partition> get_partitions_ps_with_auth( String db_name, String tbl_name, List<String> part_vals, short max_parts, String user_name, List<String> group_names); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) List<String> get_partition_names_ps(String db_name, String tbl_name, List<String> part_vals, short max_parts); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME, prepend=true) List<Partition> get_partitions_by_filter(String db_name, String tbl_name, String filter, short max_parts); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME, prepend=true) List<PartitionSpec> get_part_specs_by_filter(String db_name, String tbl_name, String filter, int max_parts); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) PartitionsByExprResult get_partitions_by_expr(PartitionsByExprRequest req); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME, prepend=true) List<Partition> get_partitions_by_names(String db_name, String tbl_name, List<String> names); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) void alter_partition(String db_name, String tbl_name, Partition new_part); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) void alter_partitions(String db_name, String tbl_name, List<Partition> new_parts); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) void alter_partition_with_environment_context( String db_name, String tbl_name, Partition new_part, EnvironmentContext environment_context); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) void rename_partition(String db_name, String tbl_name, List<String> part_vals, Partition new_part); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) boolean partition_name_has_valid_characters(List<String> part_vals, boolean throw_exception); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) String get_config_value(String name, String defaultValue); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) List<String> partition_name_to_vals(String part_name); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) Map<String, String> partition_name_to_spec(String part_name); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) void markPartitionForEvent( String db_name, String tbl_name, Map<String, String> part_vals, PartitionEventType eventType); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) boolean isPartitionMarkedForEvent( String db_name, String tbl_name, Map<String, String> part_vals, PartitionEventType eventType); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) Index add_index(Index new_index, Table index_table); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) void alter_index(String dbname, String base_tbl_name, String idx_name, Index new_idx); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) boolean drop_index_by_name(String db_name, String tbl_name, String index_name, boolean deleteData); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) Index get_index_by_name(String db_name, String tbl_name, String index_name); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) List<Index> get_indexes(String db_name, String tbl_name, short max_indexes); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) List<String> get_index_names(String db_name, String tbl_name, short max_indexes); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) boolean update_table_column_statistics(ColumnStatistics stats_obj); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) boolean update_partition_column_statistics(ColumnStatistics stats_obj); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) ColumnStatistics get_table_column_statistics(String db_name, String tbl_name, String col_name); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) ColumnStatistics get_partition_column_statistics( String db_name, String tbl_name, String part_name, String col_name); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) TableStatsResult get_table_statistics_req(TableStatsRequest request); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) PartitionsStatsResult get_partitions_statistics_req(PartitionsStatsRequest request); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) AggrStats get_aggr_stats_for(PartitionsStatsRequest request); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) boolean set_aggr_stats_for(SetPartitionsStatsRequest request); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) boolean delete_partition_column_statistics(String db_name, String tbl_name, String part_name, String col_name); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) boolean delete_table_column_statistics(String db_name, String tbl_name, String col_name); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) void create_function(Function func); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) void drop_function(String dbName, String funcName); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) void alter_function(String dbName, String funcName, Function newFunc); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) List<String> get_functions(String dbName, String pattern); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) Function get_function(String dbName, String funcName); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) boolean create_role(Role role); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) boolean drop_role(String role_name); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) List<String> get_role_names(); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) boolean grant_role( String role_name, String principal_name, PrincipalType principal_type, String grantor, PrincipalType grantorType, boolean grant_option); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) boolean revoke_role(String role_name, String principal_name, PrincipalType principal_type); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) List<Role> list_roles(String principal_name, PrincipalType principal_type); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) GrantRevokeRoleResponse grant_revoke_role(GrantRevokeRoleRequest request); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) GetPrincipalsInRoleResponse get_principals_in_role(GetPrincipalsInRoleRequest request); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) GetRoleGrantsForPrincipalResponse get_role_grants_for_principal(GetRoleGrantsForPrincipalRequest request); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) PrincipalPrivilegeSet get_privilege_set(HiveObjectRef hiveObject, String user_name, List<String> group_names); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) List<HiveObjectPrivilege> list_privileges( String principal_name, PrincipalType principal_type, HiveObjectRef hiveObject); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) boolean grant_privileges(PrivilegeBag privileges); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) boolean revoke_privileges(PrivilegeBag privileges); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) GrantRevokePrivilegeResponse grant_revoke_privileges(GrantRevokePrivilegeRequest request); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) List<String> set_ugi(String user_name, List<String> group_names); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) String get_delegation_token(String token_owner, String renewer_kerberos_principal_name); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) long renew_delegation_token(String token_str_form); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) void cancel_delegation_token(String token_str_form); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) GetOpenTxnsResponse get_open_txns(); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) GetOpenTxnsInfoResponse get_open_txns_info(); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) OpenTxnsResponse open_txns(OpenTxnRequest rqst); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) void abort_txn(AbortTxnRequest rqst); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) void commit_txn(CommitTxnRequest rqst); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) LockResponse lock(LockRequest rqst); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) LockResponse check_lock(CheckLockRequest rqst); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) void unlock(UnlockRequest rqst); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) ShowLocksResponse show_locks(ShowLocksRequest rqst); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) void heartbeat(HeartbeatRequest ids); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) HeartbeatTxnRangeResponse heartbeat_txn_range(HeartbeatTxnRangeRequest txns); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) void compact(CompactionRequest rqst); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) ShowCompactResponse show_compact(ShowCompactRequest rqst); @Override String getCpuProfile(int arg0); @Override String getVersion(); @Override fb_status getStatus(); @Override Configuration getConf(); @Override void setConf(Configuration conf); @Override void init(); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) void abort_txns(AbortTxnsRequest rqst); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) void add_dynamic_partitions(AddDynamicPartitions rqst); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) void add_foreign_key(AddForeignKeyRequest req); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) int add_master_key(String key); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) void add_primary_key(AddPrimaryKeyRequest req); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) boolean add_token(String token_identifier, String delegation_token); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) void alter_partitions_with_environment_context( String db_name, String tbl_name, List<Partition> new_parts, EnvironmentContext environment_context); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) void alter_table_with_cascade(String dbname, String tbl_name, Table new_tbl, boolean cascade); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) CacheFileMetadataResult cache_file_metadata(CacheFileMetadataRequest req); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) ClearFileMetadataResult clear_file_metadata(ClearFileMetadataRequest req); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) void create_table_with_constraints(Table tbl, List<SQLPrimaryKey> primaryKeys, List<SQLForeignKey> foreignKeys); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) void drop_constraint(DropConstraintRequest req); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) List<Partition> exchange_partitions( Map<String, String> partitionSpecs, String source_db, String source_table_name, String dest_db, String dest_table_name); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) FireEventResponse fire_listener_event(FireEventRequest rqst); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) void flushCache(); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) GetAllFunctionsResponse get_all_functions(); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) List<String> get_all_token_identifiers(); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) CurrentNotificationEventId get_current_notificationEventId(); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) List<FieldSchema> get_fields_with_environment_context( String db_name, String table_name, EnvironmentContext environment_context); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) GetFileMetadataResult get_file_metadata(GetFileMetadataRequest req); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) GetFileMetadataByExprResult get_file_metadata_by_expr(GetFileMetadataByExprRequest req); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) ForeignKeysResponse get_foreign_keys(ForeignKeysRequest request); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) List<String> get_master_keys(); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) NotificationEventResponse get_next_notification(NotificationEventRequest rqst); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) int get_num_partitions_by_filter(String db_name, String tbl_name, String filter); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) PrimaryKeysResponse get_primary_keys(PrimaryKeysRequest request); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) List<FieldSchema> get_schema_with_environment_context( String db_name, String table_name, EnvironmentContext environment_context); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) List<TableMeta> get_table_meta(String db_patterns, String tbl_patterns, List<String> tbl_types); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) String get_token(String token_identifier); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) PutFileMetadataResult put_file_metadata(PutFileMetadataRequest req); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) boolean remove_master_key(int key_seq); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) boolean remove_token(String token_identifier); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) void update_master_key(int seq_number, String key); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) List<String> get_tables_by_type(String db_name, String pattern, String tableType); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) GetTableResult get_table_req(GetTableRequest req); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) GetTablesResult get_table_objects_by_name_req(GetTablesRequest req); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) CompactionResponse compact2(CompactionRequest rqst); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) PartitionValuesResponse get_partition_values(PartitionValuesRequest req); }
FederatedHMSHandler extends FacebookBase implements CloseableIHMSHandler { @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) public PartitionValuesResponse get_partition_values(PartitionValuesRequest req) throws MetaException, NoSuchObjectException, TException { DatabaseMapping mapping = databaseMappingService.databaseMapping(req.getDbName()); return mapping .getClient() .get_partition_values(mapping.transformInboundPartitionValuesRequest(req)); } FederatedHMSHandler( MappingEventListener databaseMappingService, NotifyingFederationService notifyingFederationService); @Override void close(); @Override void shutdown(); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) String getMetaConf(String key); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) void setMetaConf(String key, String value); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) void create_database(Database database); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) Database get_database(String name); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) void drop_database(String name, boolean deleteData, boolean cascade); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) List<String> get_databases(String pattern); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) List<String> get_all_databases(); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) void alter_database(String dbname, Database db); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) Type get_type(String name); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) boolean create_type(Type type); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) boolean drop_type(String type); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) Map<String, Type> get_type_all(String name); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) List<FieldSchema> get_fields(String db_name, String table_name); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) List<FieldSchema> get_schema(String db_name, String table_name); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) void create_table(Table tbl); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) void create_table_with_environment_context(Table tbl, EnvironmentContext environment_context); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) void drop_table(String dbname, String name, boolean deleteData); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) void drop_table_with_environment_context( String dbname, String name, boolean deleteData, EnvironmentContext environment_context); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) List<String> get_tables(String db_name, String pattern); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) List<String> get_all_tables(String db_name); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) Table get_table(String dbname, String tbl_name); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) List<Table> get_table_objects_by_name(String dbname, List<String> tbl_names); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) List<String> get_table_names_by_filter(String dbname, String filter, short max_tables); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) void alter_table(String dbname, String tbl_name, Table new_tbl); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) void alter_table_with_environment_context( String dbname, String tbl_name, Table new_tbl, EnvironmentContext environment_context); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) Partition add_partition(Partition new_part); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) Partition add_partition_with_environment_context(Partition new_part, EnvironmentContext environment_context); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) int add_partitions(List<Partition> new_parts); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) int add_partitions_pspec(List<PartitionSpec> new_parts); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) Partition append_partition(String db_name, String tbl_name, List<String> part_vals); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) AddPartitionsResult add_partitions_req(AddPartitionsRequest request); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) Partition append_partition_with_environment_context( String db_name, String tbl_name, List<String> part_vals, EnvironmentContext environment_context); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) Partition append_partition_by_name(String db_name, String tbl_name, String part_name); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) Partition append_partition_by_name_with_environment_context( String db_name, String tbl_name, String part_name, EnvironmentContext environment_context); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) boolean drop_partition(String db_name, String tbl_name, List<String> part_vals, boolean deleteData); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) boolean drop_partition_with_environment_context( String db_name, String tbl_name, List<String> part_vals, boolean deleteData, EnvironmentContext environment_context); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) boolean drop_partition_by_name(String db_name, String tbl_name, String part_name, boolean deleteData); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) boolean drop_partition_by_name_with_environment_context( String db_name, String tbl_name, String part_name, boolean deleteData, EnvironmentContext environment_context); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) DropPartitionsResult drop_partitions_req(DropPartitionsRequest req); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) Partition get_partition(String db_name, String tbl_name, List<String> part_vals); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) Partition exchange_partition( Map<String, String> partitionSpecs, String source_db, String source_table_name, String dest_db, String dest_table_name); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) Partition get_partition_with_auth( String db_name, String tbl_name, List<String> part_vals, String user_name, List<String> group_names); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) Partition get_partition_by_name(String db_name, String tbl_name, String part_name); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME, prepend=true) List<Partition> get_partitions(String db_name, String tbl_name, short max_parts); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME, prepend=true) List<Partition> get_partitions_with_auth( String db_name, String tbl_name, short max_parts, String user_name, List<String> group_names); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME, prepend=true) List<PartitionSpec> get_partitions_pspec(String db_name, String tbl_name, int max_parts); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) List<String> get_partition_names(String db_name, String tbl_name, short max_parts); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME, prepend=true) List<Partition> get_partitions_ps(String db_name, String tbl_name, List<String> part_vals, short max_parts); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME, prepend=true) List<Partition> get_partitions_ps_with_auth( String db_name, String tbl_name, List<String> part_vals, short max_parts, String user_name, List<String> group_names); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) List<String> get_partition_names_ps(String db_name, String tbl_name, List<String> part_vals, short max_parts); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME, prepend=true) List<Partition> get_partitions_by_filter(String db_name, String tbl_name, String filter, short max_parts); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME, prepend=true) List<PartitionSpec> get_part_specs_by_filter(String db_name, String tbl_name, String filter, int max_parts); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) PartitionsByExprResult get_partitions_by_expr(PartitionsByExprRequest req); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME, prepend=true) List<Partition> get_partitions_by_names(String db_name, String tbl_name, List<String> names); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) void alter_partition(String db_name, String tbl_name, Partition new_part); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) void alter_partitions(String db_name, String tbl_name, List<Partition> new_parts); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) void alter_partition_with_environment_context( String db_name, String tbl_name, Partition new_part, EnvironmentContext environment_context); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) void rename_partition(String db_name, String tbl_name, List<String> part_vals, Partition new_part); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) boolean partition_name_has_valid_characters(List<String> part_vals, boolean throw_exception); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) String get_config_value(String name, String defaultValue); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) List<String> partition_name_to_vals(String part_name); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) Map<String, String> partition_name_to_spec(String part_name); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) void markPartitionForEvent( String db_name, String tbl_name, Map<String, String> part_vals, PartitionEventType eventType); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) boolean isPartitionMarkedForEvent( String db_name, String tbl_name, Map<String, String> part_vals, PartitionEventType eventType); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) Index add_index(Index new_index, Table index_table); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) void alter_index(String dbname, String base_tbl_name, String idx_name, Index new_idx); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) boolean drop_index_by_name(String db_name, String tbl_name, String index_name, boolean deleteData); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) Index get_index_by_name(String db_name, String tbl_name, String index_name); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) List<Index> get_indexes(String db_name, String tbl_name, short max_indexes); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) List<String> get_index_names(String db_name, String tbl_name, short max_indexes); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) boolean update_table_column_statistics(ColumnStatistics stats_obj); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) boolean update_partition_column_statistics(ColumnStatistics stats_obj); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) ColumnStatistics get_table_column_statistics(String db_name, String tbl_name, String col_name); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) ColumnStatistics get_partition_column_statistics( String db_name, String tbl_name, String part_name, String col_name); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) TableStatsResult get_table_statistics_req(TableStatsRequest request); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) PartitionsStatsResult get_partitions_statistics_req(PartitionsStatsRequest request); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) AggrStats get_aggr_stats_for(PartitionsStatsRequest request); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) boolean set_aggr_stats_for(SetPartitionsStatsRequest request); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) boolean delete_partition_column_statistics(String db_name, String tbl_name, String part_name, String col_name); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) boolean delete_table_column_statistics(String db_name, String tbl_name, String col_name); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) void create_function(Function func); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) void drop_function(String dbName, String funcName); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) void alter_function(String dbName, String funcName, Function newFunc); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) List<String> get_functions(String dbName, String pattern); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) Function get_function(String dbName, String funcName); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) boolean create_role(Role role); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) boolean drop_role(String role_name); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) List<String> get_role_names(); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) boolean grant_role( String role_name, String principal_name, PrincipalType principal_type, String grantor, PrincipalType grantorType, boolean grant_option); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) boolean revoke_role(String role_name, String principal_name, PrincipalType principal_type); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) List<Role> list_roles(String principal_name, PrincipalType principal_type); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) GrantRevokeRoleResponse grant_revoke_role(GrantRevokeRoleRequest request); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) GetPrincipalsInRoleResponse get_principals_in_role(GetPrincipalsInRoleRequest request); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) GetRoleGrantsForPrincipalResponse get_role_grants_for_principal(GetRoleGrantsForPrincipalRequest request); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) PrincipalPrivilegeSet get_privilege_set(HiveObjectRef hiveObject, String user_name, List<String> group_names); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) List<HiveObjectPrivilege> list_privileges( String principal_name, PrincipalType principal_type, HiveObjectRef hiveObject); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) boolean grant_privileges(PrivilegeBag privileges); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) boolean revoke_privileges(PrivilegeBag privileges); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) GrantRevokePrivilegeResponse grant_revoke_privileges(GrantRevokePrivilegeRequest request); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) List<String> set_ugi(String user_name, List<String> group_names); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) String get_delegation_token(String token_owner, String renewer_kerberos_principal_name); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) long renew_delegation_token(String token_str_form); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) void cancel_delegation_token(String token_str_form); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) GetOpenTxnsResponse get_open_txns(); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) GetOpenTxnsInfoResponse get_open_txns_info(); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) OpenTxnsResponse open_txns(OpenTxnRequest rqst); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) void abort_txn(AbortTxnRequest rqst); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) void commit_txn(CommitTxnRequest rqst); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) LockResponse lock(LockRequest rqst); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) LockResponse check_lock(CheckLockRequest rqst); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) void unlock(UnlockRequest rqst); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) ShowLocksResponse show_locks(ShowLocksRequest rqst); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) void heartbeat(HeartbeatRequest ids); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) HeartbeatTxnRangeResponse heartbeat_txn_range(HeartbeatTxnRangeRequest txns); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) void compact(CompactionRequest rqst); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) ShowCompactResponse show_compact(ShowCompactRequest rqst); @Override String getCpuProfile(int arg0); @Override String getVersion(); @Override fb_status getStatus(); @Override Configuration getConf(); @Override void setConf(Configuration conf); @Override void init(); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) void abort_txns(AbortTxnsRequest rqst); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) void add_dynamic_partitions(AddDynamicPartitions rqst); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) void add_foreign_key(AddForeignKeyRequest req); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) int add_master_key(String key); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) void add_primary_key(AddPrimaryKeyRequest req); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) boolean add_token(String token_identifier, String delegation_token); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) void alter_partitions_with_environment_context( String db_name, String tbl_name, List<Partition> new_parts, EnvironmentContext environment_context); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) void alter_table_with_cascade(String dbname, String tbl_name, Table new_tbl, boolean cascade); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) CacheFileMetadataResult cache_file_metadata(CacheFileMetadataRequest req); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) ClearFileMetadataResult clear_file_metadata(ClearFileMetadataRequest req); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) void create_table_with_constraints(Table tbl, List<SQLPrimaryKey> primaryKeys, List<SQLForeignKey> foreignKeys); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) void drop_constraint(DropConstraintRequest req); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) List<Partition> exchange_partitions( Map<String, String> partitionSpecs, String source_db, String source_table_name, String dest_db, String dest_table_name); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) FireEventResponse fire_listener_event(FireEventRequest rqst); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) void flushCache(); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) GetAllFunctionsResponse get_all_functions(); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) List<String> get_all_token_identifiers(); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) CurrentNotificationEventId get_current_notificationEventId(); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) List<FieldSchema> get_fields_with_environment_context( String db_name, String table_name, EnvironmentContext environment_context); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) GetFileMetadataResult get_file_metadata(GetFileMetadataRequest req); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) GetFileMetadataByExprResult get_file_metadata_by_expr(GetFileMetadataByExprRequest req); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) ForeignKeysResponse get_foreign_keys(ForeignKeysRequest request); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) List<String> get_master_keys(); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) NotificationEventResponse get_next_notification(NotificationEventRequest rqst); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) int get_num_partitions_by_filter(String db_name, String tbl_name, String filter); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) PrimaryKeysResponse get_primary_keys(PrimaryKeysRequest request); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) List<FieldSchema> get_schema_with_environment_context( String db_name, String table_name, EnvironmentContext environment_context); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) List<TableMeta> get_table_meta(String db_patterns, String tbl_patterns, List<String> tbl_types); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) String get_token(String token_identifier); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) PutFileMetadataResult put_file_metadata(PutFileMetadataRequest req); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) boolean remove_master_key(int key_seq); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) boolean remove_token(String token_identifier); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) void update_master_key(int seq_number, String key); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) List<String> get_tables_by_type(String db_name, String pattern, String tableType); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) GetTableResult get_table_req(GetTableRequest req); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) GetTablesResult get_table_objects_by_name_req(GetTablesRequest req); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) CompactionResponse compact2(CompactionRequest rqst); @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) PartitionValuesResponse get_partition_values(PartitionValuesRequest req); }
@Test public void correctType() throws Exception { TProcessor processor = factory.getProcessor(transport); assertThat(TSetIpAddressProcessor.class.isAssignableFrom(processor.getClass()), is(true)); }
@Override public TProcessor getProcessor(TTransport transport) { try { if (transport instanceof TSocket) { Socket socket = ((TSocket) transport).getSocket(); log.debug("Received a connection from ip: {}", socket.getInetAddress().getHostAddress()); } CloseableIHMSHandler baseHandler = federatedHMSHandlerFactory.create(); IHMSHandler handler = newRetryingHMSHandler(ExceptionWrappingHMSHandler.newProxyInstance(baseHandler), hiveConf, false); transportMonitor.monitor(transport, baseHandler); return new TSetIpAddressProcessor<>(handler); } catch (MetaException | ReflectiveOperationException | RuntimeException e) { throw new RuntimeException("Error creating TProcessor", e); } }
TSetIpAddressProcessorFactory extends TProcessorFactory { @Override public TProcessor getProcessor(TTransport transport) { try { if (transport instanceof TSocket) { Socket socket = ((TSocket) transport).getSocket(); log.debug("Received a connection from ip: {}", socket.getInetAddress().getHostAddress()); } CloseableIHMSHandler baseHandler = federatedHMSHandlerFactory.create(); IHMSHandler handler = newRetryingHMSHandler(ExceptionWrappingHMSHandler.newProxyInstance(baseHandler), hiveConf, false); transportMonitor.monitor(transport, baseHandler); return new TSetIpAddressProcessor<>(handler); } catch (MetaException | ReflectiveOperationException | RuntimeException e) { throw new RuntimeException("Error creating TProcessor", e); } } }
TSetIpAddressProcessorFactory extends TProcessorFactory { @Override public TProcessor getProcessor(TTransport transport) { try { if (transport instanceof TSocket) { Socket socket = ((TSocket) transport).getSocket(); log.debug("Received a connection from ip: {}", socket.getInetAddress().getHostAddress()); } CloseableIHMSHandler baseHandler = federatedHMSHandlerFactory.create(); IHMSHandler handler = newRetryingHMSHandler(ExceptionWrappingHMSHandler.newProxyInstance(baseHandler), hiveConf, false); transportMonitor.monitor(transport, baseHandler); return new TSetIpAddressProcessor<>(handler); } catch (MetaException | ReflectiveOperationException | RuntimeException e) { throw new RuntimeException("Error creating TProcessor", e); } } @Autowired TSetIpAddressProcessorFactory( HiveConf hiveConf, FederatedHMSHandlerFactory federatedHMSHandlerFactory, TTransportMonitor transportMonitor); }
TSetIpAddressProcessorFactory extends TProcessorFactory { @Override public TProcessor getProcessor(TTransport transport) { try { if (transport instanceof TSocket) { Socket socket = ((TSocket) transport).getSocket(); log.debug("Received a connection from ip: {}", socket.getInetAddress().getHostAddress()); } CloseableIHMSHandler baseHandler = federatedHMSHandlerFactory.create(); IHMSHandler handler = newRetryingHMSHandler(ExceptionWrappingHMSHandler.newProxyInstance(baseHandler), hiveConf, false); transportMonitor.monitor(transport, baseHandler); return new TSetIpAddressProcessor<>(handler); } catch (MetaException | ReflectiveOperationException | RuntimeException e) { throw new RuntimeException("Error creating TProcessor", e); } } @Autowired TSetIpAddressProcessorFactory( HiveConf hiveConf, FederatedHMSHandlerFactory federatedHMSHandlerFactory, TTransportMonitor transportMonitor); @Override TProcessor getProcessor(TTransport transport); }
TSetIpAddressProcessorFactory extends TProcessorFactory { @Override public TProcessor getProcessor(TTransport transport) { try { if (transport instanceof TSocket) { Socket socket = ((TSocket) transport).getSocket(); log.debug("Received a connection from ip: {}", socket.getInetAddress().getHostAddress()); } CloseableIHMSHandler baseHandler = federatedHMSHandlerFactory.create(); IHMSHandler handler = newRetryingHMSHandler(ExceptionWrappingHMSHandler.newProxyInstance(baseHandler), hiveConf, false); transportMonitor.monitor(transport, baseHandler); return new TSetIpAddressProcessor<>(handler); } catch (MetaException | ReflectiveOperationException | RuntimeException e) { throw new RuntimeException("Error creating TProcessor", e); } } @Autowired TSetIpAddressProcessorFactory( HiveConf hiveConf, FederatedHMSHandlerFactory federatedHMSHandlerFactory, TTransportMonitor transportMonitor); @Override TProcessor getProcessor(TTransport transport); }
@Test public void connectionIsMonitored() throws Exception { factory.getProcessor(transport); ArgumentCaptor<TTransport> transportCaptor = ArgumentCaptor.forClass(TTransport.class); ArgumentCaptor<Closeable> handlerCaptor = ArgumentCaptor.forClass(Closeable.class); verify(transportMonitor).monitor(transportCaptor.capture(), handlerCaptor.capture()); assertThat(transportCaptor.getValue(), is(transport)); assertThat(handlerCaptor.getValue(), is(instanceOf(FederatedHMSHandler.class))); }
@Override public TProcessor getProcessor(TTransport transport) { try { if (transport instanceof TSocket) { Socket socket = ((TSocket) transport).getSocket(); log.debug("Received a connection from ip: {}", socket.getInetAddress().getHostAddress()); } CloseableIHMSHandler baseHandler = federatedHMSHandlerFactory.create(); IHMSHandler handler = newRetryingHMSHandler(ExceptionWrappingHMSHandler.newProxyInstance(baseHandler), hiveConf, false); transportMonitor.monitor(transport, baseHandler); return new TSetIpAddressProcessor<>(handler); } catch (MetaException | ReflectiveOperationException | RuntimeException e) { throw new RuntimeException("Error creating TProcessor", e); } }
TSetIpAddressProcessorFactory extends TProcessorFactory { @Override public TProcessor getProcessor(TTransport transport) { try { if (transport instanceof TSocket) { Socket socket = ((TSocket) transport).getSocket(); log.debug("Received a connection from ip: {}", socket.getInetAddress().getHostAddress()); } CloseableIHMSHandler baseHandler = federatedHMSHandlerFactory.create(); IHMSHandler handler = newRetryingHMSHandler(ExceptionWrappingHMSHandler.newProxyInstance(baseHandler), hiveConf, false); transportMonitor.monitor(transport, baseHandler); return new TSetIpAddressProcessor<>(handler); } catch (MetaException | ReflectiveOperationException | RuntimeException e) { throw new RuntimeException("Error creating TProcessor", e); } } }
TSetIpAddressProcessorFactory extends TProcessorFactory { @Override public TProcessor getProcessor(TTransport transport) { try { if (transport instanceof TSocket) { Socket socket = ((TSocket) transport).getSocket(); log.debug("Received a connection from ip: {}", socket.getInetAddress().getHostAddress()); } CloseableIHMSHandler baseHandler = federatedHMSHandlerFactory.create(); IHMSHandler handler = newRetryingHMSHandler(ExceptionWrappingHMSHandler.newProxyInstance(baseHandler), hiveConf, false); transportMonitor.monitor(transport, baseHandler); return new TSetIpAddressProcessor<>(handler); } catch (MetaException | ReflectiveOperationException | RuntimeException e) { throw new RuntimeException("Error creating TProcessor", e); } } @Autowired TSetIpAddressProcessorFactory( HiveConf hiveConf, FederatedHMSHandlerFactory federatedHMSHandlerFactory, TTransportMonitor transportMonitor); }
TSetIpAddressProcessorFactory extends TProcessorFactory { @Override public TProcessor getProcessor(TTransport transport) { try { if (transport instanceof TSocket) { Socket socket = ((TSocket) transport).getSocket(); log.debug("Received a connection from ip: {}", socket.getInetAddress().getHostAddress()); } CloseableIHMSHandler baseHandler = federatedHMSHandlerFactory.create(); IHMSHandler handler = newRetryingHMSHandler(ExceptionWrappingHMSHandler.newProxyInstance(baseHandler), hiveConf, false); transportMonitor.monitor(transport, baseHandler); return new TSetIpAddressProcessor<>(handler); } catch (MetaException | ReflectiveOperationException | RuntimeException e) { throw new RuntimeException("Error creating TProcessor", e); } } @Autowired TSetIpAddressProcessorFactory( HiveConf hiveConf, FederatedHMSHandlerFactory federatedHMSHandlerFactory, TTransportMonitor transportMonitor); @Override TProcessor getProcessor(TTransport transport); }
TSetIpAddressProcessorFactory extends TProcessorFactory { @Override public TProcessor getProcessor(TTransport transport) { try { if (transport instanceof TSocket) { Socket socket = ((TSocket) transport).getSocket(); log.debug("Received a connection from ip: {}", socket.getInetAddress().getHostAddress()); } CloseableIHMSHandler baseHandler = federatedHMSHandlerFactory.create(); IHMSHandler handler = newRetryingHMSHandler(ExceptionWrappingHMSHandler.newProxyInstance(baseHandler), hiveConf, false); transportMonitor.monitor(transport, baseHandler); return new TSetIpAddressProcessor<>(handler); } catch (MetaException | ReflectiveOperationException | RuntimeException e) { throw new RuntimeException("Error creating TProcessor", e); } } @Autowired TSetIpAddressProcessorFactory( HiveConf hiveConf, FederatedHMSHandlerFactory federatedHMSHandlerFactory, TTransportMonitor transportMonitor); @Override TProcessor getProcessor(TTransport transport); }
@Test public void get_databaseNoExceptions() throws Exception { IHMSHandler handler = ExceptionWrappingHMSHandler.newProxyInstance(baseHandler); handler.get_database("bdp"); verify(baseHandler).get_database("bdp"); }
public static IHMSHandler newProxyInstance(IHMSHandler baseHandler) { return (IHMSHandler) Proxy.newProxyInstance(ExceptionWrappingHMSHandler.class.getClassLoader(), new Class[] { IHMSHandler.class }, new ExceptionWrappingHMSHandler(baseHandler)); }
ExceptionWrappingHMSHandler implements InvocationHandler { public static IHMSHandler newProxyInstance(IHMSHandler baseHandler) { return (IHMSHandler) Proxy.newProxyInstance(ExceptionWrappingHMSHandler.class.getClassLoader(), new Class[] { IHMSHandler.class }, new ExceptionWrappingHMSHandler(baseHandler)); } }
ExceptionWrappingHMSHandler implements InvocationHandler { public static IHMSHandler newProxyInstance(IHMSHandler baseHandler) { return (IHMSHandler) Proxy.newProxyInstance(ExceptionWrappingHMSHandler.class.getClassLoader(), new Class[] { IHMSHandler.class }, new ExceptionWrappingHMSHandler(baseHandler)); } ExceptionWrappingHMSHandler(IHMSHandler baseHandler); }
ExceptionWrappingHMSHandler implements InvocationHandler { public static IHMSHandler newProxyInstance(IHMSHandler baseHandler) { return (IHMSHandler) Proxy.newProxyInstance(ExceptionWrappingHMSHandler.class.getClassLoader(), new Class[] { IHMSHandler.class }, new ExceptionWrappingHMSHandler(baseHandler)); } ExceptionWrappingHMSHandler(IHMSHandler baseHandler); static IHMSHandler newProxyInstance(IHMSHandler baseHandler); @Override Object invoke(Object proxy, Method method, Object[] args); }
ExceptionWrappingHMSHandler implements InvocationHandler { public static IHMSHandler newProxyInstance(IHMSHandler baseHandler) { return (IHMSHandler) Proxy.newProxyInstance(ExceptionWrappingHMSHandler.class.getClassLoader(), new Class[] { IHMSHandler.class }, new ExceptionWrappingHMSHandler(baseHandler)); } ExceptionWrappingHMSHandler(IHMSHandler baseHandler); static IHMSHandler newProxyInstance(IHMSHandler baseHandler); @Override Object invoke(Object proxy, Method method, Object[] args); }
@Test public void get_databaseWaggleDanceServerException() throws Exception { expectedException.expect(MetaException.class); IHMSHandler handler = ExceptionWrappingHMSHandler.newProxyInstance(baseHandler); when(baseHandler.get_database("bdp")).thenThrow(new WaggleDanceServerException("waggle waggle!")); handler.get_database("bdp"); }
public static IHMSHandler newProxyInstance(IHMSHandler baseHandler) { return (IHMSHandler) Proxy.newProxyInstance(ExceptionWrappingHMSHandler.class.getClassLoader(), new Class[] { IHMSHandler.class }, new ExceptionWrappingHMSHandler(baseHandler)); }
ExceptionWrappingHMSHandler implements InvocationHandler { public static IHMSHandler newProxyInstance(IHMSHandler baseHandler) { return (IHMSHandler) Proxy.newProxyInstance(ExceptionWrappingHMSHandler.class.getClassLoader(), new Class[] { IHMSHandler.class }, new ExceptionWrappingHMSHandler(baseHandler)); } }
ExceptionWrappingHMSHandler implements InvocationHandler { public static IHMSHandler newProxyInstance(IHMSHandler baseHandler) { return (IHMSHandler) Proxy.newProxyInstance(ExceptionWrappingHMSHandler.class.getClassLoader(), new Class[] { IHMSHandler.class }, new ExceptionWrappingHMSHandler(baseHandler)); } ExceptionWrappingHMSHandler(IHMSHandler baseHandler); }
ExceptionWrappingHMSHandler implements InvocationHandler { public static IHMSHandler newProxyInstance(IHMSHandler baseHandler) { return (IHMSHandler) Proxy.newProxyInstance(ExceptionWrappingHMSHandler.class.getClassLoader(), new Class[] { IHMSHandler.class }, new ExceptionWrappingHMSHandler(baseHandler)); } ExceptionWrappingHMSHandler(IHMSHandler baseHandler); static IHMSHandler newProxyInstance(IHMSHandler baseHandler); @Override Object invoke(Object proxy, Method method, Object[] args); }
ExceptionWrappingHMSHandler implements InvocationHandler { public static IHMSHandler newProxyInstance(IHMSHandler baseHandler) { return (IHMSHandler) Proxy.newProxyInstance(ExceptionWrappingHMSHandler.class.getClassLoader(), new Class[] { IHMSHandler.class }, new ExceptionWrappingHMSHandler(baseHandler)); } ExceptionWrappingHMSHandler(IHMSHandler baseHandler); static IHMSHandler newProxyInstance(IHMSHandler baseHandler); @Override Object invoke(Object proxy, Method method, Object[] args); }
@Test public void get_databasNotAllowedException() throws Exception { expectedException.expect(MetaException.class); IHMSHandler handler = ExceptionWrappingHMSHandler.newProxyInstance(baseHandler); when(baseHandler.get_database("bdp")).thenThrow(new NotAllowedException("waggle waggle!")); handler.get_database("bdp"); }
public static IHMSHandler newProxyInstance(IHMSHandler baseHandler) { return (IHMSHandler) Proxy.newProxyInstance(ExceptionWrappingHMSHandler.class.getClassLoader(), new Class[] { IHMSHandler.class }, new ExceptionWrappingHMSHandler(baseHandler)); }
ExceptionWrappingHMSHandler implements InvocationHandler { public static IHMSHandler newProxyInstance(IHMSHandler baseHandler) { return (IHMSHandler) Proxy.newProxyInstance(ExceptionWrappingHMSHandler.class.getClassLoader(), new Class[] { IHMSHandler.class }, new ExceptionWrappingHMSHandler(baseHandler)); } }
ExceptionWrappingHMSHandler implements InvocationHandler { public static IHMSHandler newProxyInstance(IHMSHandler baseHandler) { return (IHMSHandler) Proxy.newProxyInstance(ExceptionWrappingHMSHandler.class.getClassLoader(), new Class[] { IHMSHandler.class }, new ExceptionWrappingHMSHandler(baseHandler)); } ExceptionWrappingHMSHandler(IHMSHandler baseHandler); }
ExceptionWrappingHMSHandler implements InvocationHandler { public static IHMSHandler newProxyInstance(IHMSHandler baseHandler) { return (IHMSHandler) Proxy.newProxyInstance(ExceptionWrappingHMSHandler.class.getClassLoader(), new Class[] { IHMSHandler.class }, new ExceptionWrappingHMSHandler(baseHandler)); } ExceptionWrappingHMSHandler(IHMSHandler baseHandler); static IHMSHandler newProxyInstance(IHMSHandler baseHandler); @Override Object invoke(Object proxy, Method method, Object[] args); }
ExceptionWrappingHMSHandler implements InvocationHandler { public static IHMSHandler newProxyInstance(IHMSHandler baseHandler) { return (IHMSHandler) Proxy.newProxyInstance(ExceptionWrappingHMSHandler.class.getClassLoader(), new Class[] { IHMSHandler.class }, new ExceptionWrappingHMSHandler(baseHandler)); } ExceptionWrappingHMSHandler(IHMSHandler baseHandler); static IHMSHandler newProxyInstance(IHMSHandler baseHandler); @Override Object invoke(Object proxy, Method method, Object[] args); }
@Test public void get_databaseRunTimeExceptionIsNotWrapped() throws Exception { expectedException.expect(RuntimeException.class); expectedException.expectMessage("generic non waggle dance exception"); IHMSHandler handler = ExceptionWrappingHMSHandler.newProxyInstance(baseHandler); when(baseHandler.get_database("bdp")).thenThrow(new RuntimeException("generic non waggle dance exception")); handler.get_database("bdp"); }
public static IHMSHandler newProxyInstance(IHMSHandler baseHandler) { return (IHMSHandler) Proxy.newProxyInstance(ExceptionWrappingHMSHandler.class.getClassLoader(), new Class[] { IHMSHandler.class }, new ExceptionWrappingHMSHandler(baseHandler)); }
ExceptionWrappingHMSHandler implements InvocationHandler { public static IHMSHandler newProxyInstance(IHMSHandler baseHandler) { return (IHMSHandler) Proxy.newProxyInstance(ExceptionWrappingHMSHandler.class.getClassLoader(), new Class[] { IHMSHandler.class }, new ExceptionWrappingHMSHandler(baseHandler)); } }
ExceptionWrappingHMSHandler implements InvocationHandler { public static IHMSHandler newProxyInstance(IHMSHandler baseHandler) { return (IHMSHandler) Proxy.newProxyInstance(ExceptionWrappingHMSHandler.class.getClassLoader(), new Class[] { IHMSHandler.class }, new ExceptionWrappingHMSHandler(baseHandler)); } ExceptionWrappingHMSHandler(IHMSHandler baseHandler); }
ExceptionWrappingHMSHandler implements InvocationHandler { public static IHMSHandler newProxyInstance(IHMSHandler baseHandler) { return (IHMSHandler) Proxy.newProxyInstance(ExceptionWrappingHMSHandler.class.getClassLoader(), new Class[] { IHMSHandler.class }, new ExceptionWrappingHMSHandler(baseHandler)); } ExceptionWrappingHMSHandler(IHMSHandler baseHandler); static IHMSHandler newProxyInstance(IHMSHandler baseHandler); @Override Object invoke(Object proxy, Method method, Object[] args); }
ExceptionWrappingHMSHandler implements InvocationHandler { public static IHMSHandler newProxyInstance(IHMSHandler baseHandler) { return (IHMSHandler) Proxy.newProxyInstance(ExceptionWrappingHMSHandler.class.getClassLoader(), new Class[] { IHMSHandler.class }, new ExceptionWrappingHMSHandler(baseHandler)); } ExceptionWrappingHMSHandler(IHMSHandler baseHandler); static IHMSHandler newProxyInstance(IHMSHandler baseHandler); @Override Object invoke(Object proxy, Method method, Object[] args); }
@Test public void negativePort() { graphiteConfiguration.setPort(-1); Set<ConstraintViolation<GraphiteConfiguration>> violations = validator.validate(graphiteConfiguration); assertThat(violations.size(), is(1)); }
public void setPort(int port) { this.port = port; }
GraphiteConfiguration { public void setPort(int port) { this.port = port; } }
GraphiteConfiguration { public void setPort(int port) { this.port = port; } }
GraphiteConfiguration { public void setPort(int port) { this.port = port; } @PostConstruct void init(); boolean isEnabled(); int getPort(); void setPort(int port); String getHost(); void setHost(String host); String getPrefix(); void setPrefix(String prefix); long getPollInterval(); void setPollInterval(long pollInterval); TimeUnit getPollIntervalTimeUnit(); void setPollIntervalTimeUnit(TimeUnit pollIntervalTimeUnit); }
GraphiteConfiguration { public void setPort(int port) { this.port = port; } @PostConstruct void init(); boolean isEnabled(); int getPort(); void setPort(int port); String getHost(); void setHost(String host); String getPrefix(); void setPrefix(String prefix); long getPollInterval(); void setPollInterval(long pollInterval); TimeUnit getPollIntervalTimeUnit(); void setPollIntervalTimeUnit(TimeUnit pollIntervalTimeUnit); }
@Test public void get_databaseCheckedExceptionIsNotWrapped() throws Exception { expectedException.expect(NoSuchObjectException.class); expectedException.expectMessage("Does not exist!"); IHMSHandler handler = ExceptionWrappingHMSHandler.newProxyInstance(baseHandler); when(baseHandler.get_database("bdp")).thenThrow(new NoSuchObjectException("Does not exist!")); handler.get_database("bdp"); }
public static IHMSHandler newProxyInstance(IHMSHandler baseHandler) { return (IHMSHandler) Proxy.newProxyInstance(ExceptionWrappingHMSHandler.class.getClassLoader(), new Class[] { IHMSHandler.class }, new ExceptionWrappingHMSHandler(baseHandler)); }
ExceptionWrappingHMSHandler implements InvocationHandler { public static IHMSHandler newProxyInstance(IHMSHandler baseHandler) { return (IHMSHandler) Proxy.newProxyInstance(ExceptionWrappingHMSHandler.class.getClassLoader(), new Class[] { IHMSHandler.class }, new ExceptionWrappingHMSHandler(baseHandler)); } }
ExceptionWrappingHMSHandler implements InvocationHandler { public static IHMSHandler newProxyInstance(IHMSHandler baseHandler) { return (IHMSHandler) Proxy.newProxyInstance(ExceptionWrappingHMSHandler.class.getClassLoader(), new Class[] { IHMSHandler.class }, new ExceptionWrappingHMSHandler(baseHandler)); } ExceptionWrappingHMSHandler(IHMSHandler baseHandler); }
ExceptionWrappingHMSHandler implements InvocationHandler { public static IHMSHandler newProxyInstance(IHMSHandler baseHandler) { return (IHMSHandler) Proxy.newProxyInstance(ExceptionWrappingHMSHandler.class.getClassLoader(), new Class[] { IHMSHandler.class }, new ExceptionWrappingHMSHandler(baseHandler)); } ExceptionWrappingHMSHandler(IHMSHandler baseHandler); static IHMSHandler newProxyInstance(IHMSHandler baseHandler); @Override Object invoke(Object proxy, Method method, Object[] args); }
ExceptionWrappingHMSHandler implements InvocationHandler { public static IHMSHandler newProxyInstance(IHMSHandler baseHandler) { return (IHMSHandler) Proxy.newProxyInstance(ExceptionWrappingHMSHandler.class.getClassLoader(), new Class[] { IHMSHandler.class }, new ExceptionWrappingHMSHandler(baseHandler)); } ExceptionWrappingHMSHandler(IHMSHandler baseHandler); static IHMSHandler newProxyInstance(IHMSHandler baseHandler); @Override Object invoke(Object proxy, Method method, Object[] args); }
@Test public void typical() throws Exception { when(waggleDanceConfiguration.getDatabaseResolution()).thenReturn(DatabaseResolution.MANUAL); CloseableIHMSHandler handler = factory.create(); assertThat(handler, is(instanceOf(FederatedHMSHandler.class))); }
public CloseableIHMSHandler create() { MappingEventListener service = createDatabaseMappingService(); MonitoredDatabaseMappingService monitoredService = new MonitoredDatabaseMappingService(service); CloseableIHMSHandler baseHandler = new FederatedHMSHandler(monitoredService, notifyingFederationService); HiveConf conf = new HiveConf(hiveConf); baseHandler.setConf(conf); return baseHandler; }
FederatedHMSHandlerFactory { public CloseableIHMSHandler create() { MappingEventListener service = createDatabaseMappingService(); MonitoredDatabaseMappingService monitoredService = new MonitoredDatabaseMappingService(service); CloseableIHMSHandler baseHandler = new FederatedHMSHandler(monitoredService, notifyingFederationService); HiveConf conf = new HiveConf(hiveConf); baseHandler.setConf(conf); return baseHandler; } }
FederatedHMSHandlerFactory { public CloseableIHMSHandler create() { MappingEventListener service = createDatabaseMappingService(); MonitoredDatabaseMappingService monitoredService = new MonitoredDatabaseMappingService(service); CloseableIHMSHandler baseHandler = new FederatedHMSHandler(monitoredService, notifyingFederationService); HiveConf conf = new HiveConf(hiveConf); baseHandler.setConf(conf); return baseHandler; } @Autowired FederatedHMSHandlerFactory( HiveConf hiveConf, NotifyingFederationService notifyingFederationService, MetaStoreMappingFactory metaStoreMappingFactory, WaggleDanceConfiguration waggleDanceConfiguration, QueryMapping queryMapping); }
FederatedHMSHandlerFactory { public CloseableIHMSHandler create() { MappingEventListener service = createDatabaseMappingService(); MonitoredDatabaseMappingService monitoredService = new MonitoredDatabaseMappingService(service); CloseableIHMSHandler baseHandler = new FederatedHMSHandler(monitoredService, notifyingFederationService); HiveConf conf = new HiveConf(hiveConf); baseHandler.setConf(conf); return baseHandler; } @Autowired FederatedHMSHandlerFactory( HiveConf hiveConf, NotifyingFederationService notifyingFederationService, MetaStoreMappingFactory metaStoreMappingFactory, WaggleDanceConfiguration waggleDanceConfiguration, QueryMapping queryMapping); CloseableIHMSHandler create(); }
FederatedHMSHandlerFactory { public CloseableIHMSHandler create() { MappingEventListener service = createDatabaseMappingService(); MonitoredDatabaseMappingService monitoredService = new MonitoredDatabaseMappingService(service); CloseableIHMSHandler baseHandler = new FederatedHMSHandler(monitoredService, notifyingFederationService); HiveConf conf = new HiveConf(hiveConf); baseHandler.setConf(conf); return baseHandler; } @Autowired FederatedHMSHandlerFactory( HiveConf hiveConf, NotifyingFederationService notifyingFederationService, MetaStoreMappingFactory metaStoreMappingFactory, WaggleDanceConfiguration waggleDanceConfiguration, QueryMapping queryMapping); CloseableIHMSHandler create(); }
@Test public void prefixedDatabase() throws Exception { when(waggleDanceConfiguration.getDatabaseResolution()).thenReturn(DatabaseResolution.PREFIXED); factory = new FederatedHMSHandlerFactory(hiveConf, notifyingFederationService, metaStoreMappingFactory, waggleDanceConfiguration, queryMapping); CloseableIHMSHandler handler = factory.create(); assertThat(handler, is(instanceOf(FederatedHMSHandler.class))); }
public CloseableIHMSHandler create() { MappingEventListener service = createDatabaseMappingService(); MonitoredDatabaseMappingService monitoredService = new MonitoredDatabaseMappingService(service); CloseableIHMSHandler baseHandler = new FederatedHMSHandler(monitoredService, notifyingFederationService); HiveConf conf = new HiveConf(hiveConf); baseHandler.setConf(conf); return baseHandler; }
FederatedHMSHandlerFactory { public CloseableIHMSHandler create() { MappingEventListener service = createDatabaseMappingService(); MonitoredDatabaseMappingService monitoredService = new MonitoredDatabaseMappingService(service); CloseableIHMSHandler baseHandler = new FederatedHMSHandler(monitoredService, notifyingFederationService); HiveConf conf = new HiveConf(hiveConf); baseHandler.setConf(conf); return baseHandler; } }
FederatedHMSHandlerFactory { public CloseableIHMSHandler create() { MappingEventListener service = createDatabaseMappingService(); MonitoredDatabaseMappingService monitoredService = new MonitoredDatabaseMappingService(service); CloseableIHMSHandler baseHandler = new FederatedHMSHandler(monitoredService, notifyingFederationService); HiveConf conf = new HiveConf(hiveConf); baseHandler.setConf(conf); return baseHandler; } @Autowired FederatedHMSHandlerFactory( HiveConf hiveConf, NotifyingFederationService notifyingFederationService, MetaStoreMappingFactory metaStoreMappingFactory, WaggleDanceConfiguration waggleDanceConfiguration, QueryMapping queryMapping); }
FederatedHMSHandlerFactory { public CloseableIHMSHandler create() { MappingEventListener service = createDatabaseMappingService(); MonitoredDatabaseMappingService monitoredService = new MonitoredDatabaseMappingService(service); CloseableIHMSHandler baseHandler = new FederatedHMSHandler(monitoredService, notifyingFederationService); HiveConf conf = new HiveConf(hiveConf); baseHandler.setConf(conf); return baseHandler; } @Autowired FederatedHMSHandlerFactory( HiveConf hiveConf, NotifyingFederationService notifyingFederationService, MetaStoreMappingFactory metaStoreMappingFactory, WaggleDanceConfiguration waggleDanceConfiguration, QueryMapping queryMapping); CloseableIHMSHandler create(); }
FederatedHMSHandlerFactory { public CloseableIHMSHandler create() { MappingEventListener service = createDatabaseMappingService(); MonitoredDatabaseMappingService monitoredService = new MonitoredDatabaseMappingService(service); CloseableIHMSHandler baseHandler = new FederatedHMSHandler(monitoredService, notifyingFederationService); HiveConf conf = new HiveConf(hiveConf); baseHandler.setConf(conf); return baseHandler; } @Autowired FederatedHMSHandlerFactory( HiveConf hiveConf, NotifyingFederationService notifyingFederationService, MetaStoreMappingFactory metaStoreMappingFactory, WaggleDanceConfiguration waggleDanceConfiguration, QueryMapping queryMapping); CloseableIHMSHandler create(); }
@Test(expected = WaggleDanceException.class) public void noMode() { factory = new FederatedHMSHandlerFactory(hiveConf, notifyingFederationService, metaStoreMappingFactory, waggleDanceConfiguration, queryMapping); factory.create(); }
public CloseableIHMSHandler create() { MappingEventListener service = createDatabaseMappingService(); MonitoredDatabaseMappingService monitoredService = new MonitoredDatabaseMappingService(service); CloseableIHMSHandler baseHandler = new FederatedHMSHandler(monitoredService, notifyingFederationService); HiveConf conf = new HiveConf(hiveConf); baseHandler.setConf(conf); return baseHandler; }
FederatedHMSHandlerFactory { public CloseableIHMSHandler create() { MappingEventListener service = createDatabaseMappingService(); MonitoredDatabaseMappingService monitoredService = new MonitoredDatabaseMappingService(service); CloseableIHMSHandler baseHandler = new FederatedHMSHandler(monitoredService, notifyingFederationService); HiveConf conf = new HiveConf(hiveConf); baseHandler.setConf(conf); return baseHandler; } }
FederatedHMSHandlerFactory { public CloseableIHMSHandler create() { MappingEventListener service = createDatabaseMappingService(); MonitoredDatabaseMappingService monitoredService = new MonitoredDatabaseMappingService(service); CloseableIHMSHandler baseHandler = new FederatedHMSHandler(monitoredService, notifyingFederationService); HiveConf conf = new HiveConf(hiveConf); baseHandler.setConf(conf); return baseHandler; } @Autowired FederatedHMSHandlerFactory( HiveConf hiveConf, NotifyingFederationService notifyingFederationService, MetaStoreMappingFactory metaStoreMappingFactory, WaggleDanceConfiguration waggleDanceConfiguration, QueryMapping queryMapping); }
FederatedHMSHandlerFactory { public CloseableIHMSHandler create() { MappingEventListener service = createDatabaseMappingService(); MonitoredDatabaseMappingService monitoredService = new MonitoredDatabaseMappingService(service); CloseableIHMSHandler baseHandler = new FederatedHMSHandler(monitoredService, notifyingFederationService); HiveConf conf = new HiveConf(hiveConf); baseHandler.setConf(conf); return baseHandler; } @Autowired FederatedHMSHandlerFactory( HiveConf hiveConf, NotifyingFederationService notifyingFederationService, MetaStoreMappingFactory metaStoreMappingFactory, WaggleDanceConfiguration waggleDanceConfiguration, QueryMapping queryMapping); CloseableIHMSHandler create(); }
FederatedHMSHandlerFactory { public CloseableIHMSHandler create() { MappingEventListener service = createDatabaseMappingService(); MonitoredDatabaseMappingService monitoredService = new MonitoredDatabaseMappingService(service); CloseableIHMSHandler baseHandler = new FederatedHMSHandler(monitoredService, notifyingFederationService); HiveConf conf = new HiveConf(hiveConf); baseHandler.setConf(conf); return baseHandler; } @Autowired FederatedHMSHandlerFactory( HiveConf hiveConf, NotifyingFederationService notifyingFederationService, MetaStoreMappingFactory metaStoreMappingFactory, WaggleDanceConfiguration waggleDanceConfiguration, QueryMapping queryMapping); CloseableIHMSHandler create(); }
@Test public void zeroPollInterval() { graphiteConfiguration.setPollInterval(0); Set<ConstraintViolation<GraphiteConfiguration>> violations = validator.validate(graphiteConfiguration); assertThat(violations.size(), is(1)); }
public void setPollInterval(long pollInterval) { this.pollInterval = pollInterval; }
GraphiteConfiguration { public void setPollInterval(long pollInterval) { this.pollInterval = pollInterval; } }
GraphiteConfiguration { public void setPollInterval(long pollInterval) { this.pollInterval = pollInterval; } }
GraphiteConfiguration { public void setPollInterval(long pollInterval) { this.pollInterval = pollInterval; } @PostConstruct void init(); boolean isEnabled(); int getPort(); void setPort(int port); String getHost(); void setHost(String host); String getPrefix(); void setPrefix(String prefix); long getPollInterval(); void setPollInterval(long pollInterval); TimeUnit getPollIntervalTimeUnit(); void setPollIntervalTimeUnit(TimeUnit pollIntervalTimeUnit); }
GraphiteConfiguration { public void setPollInterval(long pollInterval) { this.pollInterval = pollInterval; } @PostConstruct void init(); boolean isEnabled(); int getPort(); void setPort(int port); String getHost(); void setHost(String host); String getPrefix(); void setPrefix(String prefix); long getPollInterval(); void setPollInterval(long pollInterval); TimeUnit getPollIntervalTimeUnit(); void setPollIntervalTimeUnit(TimeUnit pollIntervalTimeUnit); }
@Test public void negativePollInterval() { graphiteConfiguration.setPollInterval(-1); Set<ConstraintViolation<GraphiteConfiguration>> violations = validator.validate(graphiteConfiguration); assertThat(violations.size(), is(1)); }
public void setPollInterval(long pollInterval) { this.pollInterval = pollInterval; }
GraphiteConfiguration { public void setPollInterval(long pollInterval) { this.pollInterval = pollInterval; } }
GraphiteConfiguration { public void setPollInterval(long pollInterval) { this.pollInterval = pollInterval; } }
GraphiteConfiguration { public void setPollInterval(long pollInterval) { this.pollInterval = pollInterval; } @PostConstruct void init(); boolean isEnabled(); int getPort(); void setPort(int port); String getHost(); void setHost(String host); String getPrefix(); void setPrefix(String prefix); long getPollInterval(); void setPollInterval(long pollInterval); TimeUnit getPollIntervalTimeUnit(); void setPollIntervalTimeUnit(TimeUnit pollIntervalTimeUnit); }
GraphiteConfiguration { public void setPollInterval(long pollInterval) { this.pollInterval = pollInterval; } @PostConstruct void init(); boolean isEnabled(); int getPort(); void setPort(int port); String getHost(); void setHost(String host); String getPrefix(); void setPrefix(String prefix); long getPollInterval(); void setPollInterval(long pollInterval); TimeUnit getPollIntervalTimeUnit(); void setPollIntervalTimeUnit(TimeUnit pollIntervalTimeUnit); }
@Test public void nullPollIntervalTimeUnit() { graphiteConfiguration.setPollIntervalTimeUnit(null); Set<ConstraintViolation<GraphiteConfiguration>> violations = validator.validate(graphiteConfiguration); assertThat(violations.size(), is(1)); }
public void setPollIntervalTimeUnit(TimeUnit pollIntervalTimeUnit) { this.pollIntervalTimeUnit = pollIntervalTimeUnit; }
GraphiteConfiguration { public void setPollIntervalTimeUnit(TimeUnit pollIntervalTimeUnit) { this.pollIntervalTimeUnit = pollIntervalTimeUnit; } }
GraphiteConfiguration { public void setPollIntervalTimeUnit(TimeUnit pollIntervalTimeUnit) { this.pollIntervalTimeUnit = pollIntervalTimeUnit; } }
GraphiteConfiguration { public void setPollIntervalTimeUnit(TimeUnit pollIntervalTimeUnit) { this.pollIntervalTimeUnit = pollIntervalTimeUnit; } @PostConstruct void init(); boolean isEnabled(); int getPort(); void setPort(int port); String getHost(); void setHost(String host); String getPrefix(); void setPrefix(String prefix); long getPollInterval(); void setPollInterval(long pollInterval); TimeUnit getPollIntervalTimeUnit(); void setPollIntervalTimeUnit(TimeUnit pollIntervalTimeUnit); }
GraphiteConfiguration { public void setPollIntervalTimeUnit(TimeUnit pollIntervalTimeUnit) { this.pollIntervalTimeUnit = pollIntervalTimeUnit; } @PostConstruct void init(); boolean isEnabled(); int getPort(); void setPort(int port); String getHost(); void setHost(String host); String getPrefix(); void setPrefix(String prefix); long getPollInterval(); void setPollInterval(long pollInterval); TimeUnit getPollIntervalTimeUnit(); void setPollIntervalTimeUnit(TimeUnit pollIntervalTimeUnit); }
@Test public void nullRemoteMetaStoreUris() { metaStore.setRemoteMetaStoreUris(null); Set<ConstraintViolation<T>> violations = validator.validate(metaStore); assertThat(violations.size(), is(1)); }
public void setRemoteMetaStoreUris(String remoteMetaStoreUris) { this.remoteMetaStoreUris = remoteMetaStoreUris; }
AbstractMetaStore { public void setRemoteMetaStoreUris(String remoteMetaStoreUris) { this.remoteMetaStoreUris = remoteMetaStoreUris; } }
AbstractMetaStore { public void setRemoteMetaStoreUris(String remoteMetaStoreUris) { this.remoteMetaStoreUris = remoteMetaStoreUris; } AbstractMetaStore(); AbstractMetaStore(String name, String remoteMetaStoreUris, AccessControlType accessControlType); AbstractMetaStore( String name, String remoteMetaStoreUris, AccessControlType accessControlType, List<String> writableDatabaseWhitelist); }
AbstractMetaStore { public void setRemoteMetaStoreUris(String remoteMetaStoreUris) { this.remoteMetaStoreUris = remoteMetaStoreUris; } AbstractMetaStore(); AbstractMetaStore(String name, String remoteMetaStoreUris, AccessControlType accessControlType); AbstractMetaStore( String name, String remoteMetaStoreUris, AccessControlType accessControlType, List<String> writableDatabaseWhitelist); static FederatedMetaStore newFederatedInstance(String name, String remoteMetaStoreUris); static PrimaryMetaStore newPrimaryInstance( String name, String remoteMetaStoreUris, AccessControlType accessControlType); static PrimaryMetaStore newPrimaryInstance(String name, String remoteMetaStoreUris); String getDatabasePrefix(); void setDatabasePrefix(String databasePrefix); String getName(); void setName(String name); String getRemoteMetaStoreUris(); void setRemoteMetaStoreUris(String remoteMetaStoreUris); MetastoreTunnel getMetastoreTunnel(); void setMetastoreTunnel(MetastoreTunnel metastoreTunnel); ConnectionType getConnectionType(); abstract FederationType getFederationType(); AccessControlType getAccessControlType(); void setAccessControlType(AccessControlType accessControlType); List<String> getWritableDatabaseWhiteList(); void setWritableDatabaseWhiteList(List<String> writableDatabaseWhitelist); long getLatency(); void setLatency(long latency); List<String> getMappedDatabases(); void setMappedDatabases(List<String> mappedDatabases); Map<String, String> getDatabaseNameMapping(); void setDatabaseNameMapping(Map<String, String> databaseNameMapping); @Transient HashBiMap<String, String> getDatabaseNameBiMapping(); @Transient MetaStoreStatus getStatus(); @Transient void setStatus(MetaStoreStatus status); @Override int hashCode(); @Override boolean equals(Object obj); @Override String toString(); }
AbstractMetaStore { public void setRemoteMetaStoreUris(String remoteMetaStoreUris) { this.remoteMetaStoreUris = remoteMetaStoreUris; } AbstractMetaStore(); AbstractMetaStore(String name, String remoteMetaStoreUris, AccessControlType accessControlType); AbstractMetaStore( String name, String remoteMetaStoreUris, AccessControlType accessControlType, List<String> writableDatabaseWhitelist); static FederatedMetaStore newFederatedInstance(String name, String remoteMetaStoreUris); static PrimaryMetaStore newPrimaryInstance( String name, String remoteMetaStoreUris, AccessControlType accessControlType); static PrimaryMetaStore newPrimaryInstance(String name, String remoteMetaStoreUris); String getDatabasePrefix(); void setDatabasePrefix(String databasePrefix); String getName(); void setName(String name); String getRemoteMetaStoreUris(); void setRemoteMetaStoreUris(String remoteMetaStoreUris); MetastoreTunnel getMetastoreTunnel(); void setMetastoreTunnel(MetastoreTunnel metastoreTunnel); ConnectionType getConnectionType(); abstract FederationType getFederationType(); AccessControlType getAccessControlType(); void setAccessControlType(AccessControlType accessControlType); List<String> getWritableDatabaseWhiteList(); void setWritableDatabaseWhiteList(List<String> writableDatabaseWhitelist); long getLatency(); void setLatency(long latency); List<String> getMappedDatabases(); void setMappedDatabases(List<String> mappedDatabases); Map<String, String> getDatabaseNameMapping(); void setDatabaseNameMapping(Map<String, String> databaseNameMapping); @Transient HashBiMap<String, String> getDatabaseNameBiMapping(); @Transient MetaStoreStatus getStatus(); @Transient void setStatus(MetaStoreStatus status); @Override int hashCode(); @Override boolean equals(Object obj); @Override String toString(); }
@Test public void nullPort() { waggleDanceConfiguration.setPort(null); Set<ConstraintViolation<WaggleDanceConfiguration>> violations = validator.validate(waggleDanceConfiguration); assertThat(violations.size(), is(1)); }
public void setPort(Integer port) { this.port = port; }
WaggleDanceConfiguration { public void setPort(Integer port) { this.port = port; } }
WaggleDanceConfiguration { public void setPort(Integer port) { this.port = port; } }
WaggleDanceConfiguration { public void setPort(Integer port) { this.port = port; } Integer getPort(); void setPort(Integer port); boolean isVerbose(); void setVerbose(boolean verbose); int getDisconnectConnectionDelay(); void setDisconnectConnectionDelay(int disconnectConnectionDelay); TimeUnit getDisconnectTimeUnit(); void setDisconnectTimeUnit(TimeUnit disconnectTimeUnit); Map<String, String> getConfigurationProperties(); void setConfigurationProperties(Map<String, String> configurationProperties); void setDatabaseResolution(DatabaseResolution databaseResolution); DatabaseResolution getDatabaseResolution(); int getThriftServerStopTimeoutValInSeconds(); void setThriftServerStopTimeoutValInSeconds(int thriftServerStopTimeoutValInSeconds); int getThriftServerRequestTimeout(); void setThriftServerRequestTimeout(int thriftServerRequestTimeout); TimeUnit getThriftServerRequestTimeoutUnit(); void setThriftServerRequestTimeoutUnit(TimeUnit thriftServerRequestTimeoutUnit); int getStatusPollingDelay(); void setStatusPollingDelay(int statusPollingDelay); TimeUnit getStatusPollingDelayTimeUnit(); void setStatusPollingDelayTimeUnit(TimeUnit statusPollingDelayTimeUnit); }
WaggleDanceConfiguration { public void setPort(Integer port) { this.port = port; } Integer getPort(); void setPort(Integer port); boolean isVerbose(); void setVerbose(boolean verbose); int getDisconnectConnectionDelay(); void setDisconnectConnectionDelay(int disconnectConnectionDelay); TimeUnit getDisconnectTimeUnit(); void setDisconnectTimeUnit(TimeUnit disconnectTimeUnit); Map<String, String> getConfigurationProperties(); void setConfigurationProperties(Map<String, String> configurationProperties); void setDatabaseResolution(DatabaseResolution databaseResolution); DatabaseResolution getDatabaseResolution(); int getThriftServerStopTimeoutValInSeconds(); void setThriftServerStopTimeoutValInSeconds(int thriftServerStopTimeoutValInSeconds); int getThriftServerRequestTimeout(); void setThriftServerRequestTimeout(int thriftServerRequestTimeout); TimeUnit getThriftServerRequestTimeoutUnit(); void setThriftServerRequestTimeoutUnit(TimeUnit thriftServerRequestTimeoutUnit); int getStatusPollingDelay(); void setStatusPollingDelay(int statusPollingDelay); TimeUnit getStatusPollingDelayTimeUnit(); void setStatusPollingDelayTimeUnit(TimeUnit statusPollingDelayTimeUnit); }
@Test public void zeroPort() { waggleDanceConfiguration.setPort(0); Set<ConstraintViolation<WaggleDanceConfiguration>> violations = validator.validate(waggleDanceConfiguration); assertThat(violations.size(), is(1)); }
public void setPort(Integer port) { this.port = port; }
WaggleDanceConfiguration { public void setPort(Integer port) { this.port = port; } }
WaggleDanceConfiguration { public void setPort(Integer port) { this.port = port; } }
WaggleDanceConfiguration { public void setPort(Integer port) { this.port = port; } Integer getPort(); void setPort(Integer port); boolean isVerbose(); void setVerbose(boolean verbose); int getDisconnectConnectionDelay(); void setDisconnectConnectionDelay(int disconnectConnectionDelay); TimeUnit getDisconnectTimeUnit(); void setDisconnectTimeUnit(TimeUnit disconnectTimeUnit); Map<String, String> getConfigurationProperties(); void setConfigurationProperties(Map<String, String> configurationProperties); void setDatabaseResolution(DatabaseResolution databaseResolution); DatabaseResolution getDatabaseResolution(); int getThriftServerStopTimeoutValInSeconds(); void setThriftServerStopTimeoutValInSeconds(int thriftServerStopTimeoutValInSeconds); int getThriftServerRequestTimeout(); void setThriftServerRequestTimeout(int thriftServerRequestTimeout); TimeUnit getThriftServerRequestTimeoutUnit(); void setThriftServerRequestTimeoutUnit(TimeUnit thriftServerRequestTimeoutUnit); int getStatusPollingDelay(); void setStatusPollingDelay(int statusPollingDelay); TimeUnit getStatusPollingDelayTimeUnit(); void setStatusPollingDelayTimeUnit(TimeUnit statusPollingDelayTimeUnit); }
WaggleDanceConfiguration { public void setPort(Integer port) { this.port = port; } Integer getPort(); void setPort(Integer port); boolean isVerbose(); void setVerbose(boolean verbose); int getDisconnectConnectionDelay(); void setDisconnectConnectionDelay(int disconnectConnectionDelay); TimeUnit getDisconnectTimeUnit(); void setDisconnectTimeUnit(TimeUnit disconnectTimeUnit); Map<String, String> getConfigurationProperties(); void setConfigurationProperties(Map<String, String> configurationProperties); void setDatabaseResolution(DatabaseResolution databaseResolution); DatabaseResolution getDatabaseResolution(); int getThriftServerStopTimeoutValInSeconds(); void setThriftServerStopTimeoutValInSeconds(int thriftServerStopTimeoutValInSeconds); int getThriftServerRequestTimeout(); void setThriftServerRequestTimeout(int thriftServerRequestTimeout); TimeUnit getThriftServerRequestTimeoutUnit(); void setThriftServerRequestTimeoutUnit(TimeUnit thriftServerRequestTimeoutUnit); int getStatusPollingDelay(); void setStatusPollingDelay(int statusPollingDelay); TimeUnit getStatusPollingDelayTimeUnit(); void setStatusPollingDelayTimeUnit(TimeUnit statusPollingDelayTimeUnit); }
@Test public void negativePort() { waggleDanceConfiguration.setPort(-1); Set<ConstraintViolation<WaggleDanceConfiguration>> violations = validator.validate(waggleDanceConfiguration); assertThat(violations.size(), is(1)); }
public void setPort(Integer port) { this.port = port; }
WaggleDanceConfiguration { public void setPort(Integer port) { this.port = port; } }
WaggleDanceConfiguration { public void setPort(Integer port) { this.port = port; } }
WaggleDanceConfiguration { public void setPort(Integer port) { this.port = port; } Integer getPort(); void setPort(Integer port); boolean isVerbose(); void setVerbose(boolean verbose); int getDisconnectConnectionDelay(); void setDisconnectConnectionDelay(int disconnectConnectionDelay); TimeUnit getDisconnectTimeUnit(); void setDisconnectTimeUnit(TimeUnit disconnectTimeUnit); Map<String, String> getConfigurationProperties(); void setConfigurationProperties(Map<String, String> configurationProperties); void setDatabaseResolution(DatabaseResolution databaseResolution); DatabaseResolution getDatabaseResolution(); int getThriftServerStopTimeoutValInSeconds(); void setThriftServerStopTimeoutValInSeconds(int thriftServerStopTimeoutValInSeconds); int getThriftServerRequestTimeout(); void setThriftServerRequestTimeout(int thriftServerRequestTimeout); TimeUnit getThriftServerRequestTimeoutUnit(); void setThriftServerRequestTimeoutUnit(TimeUnit thriftServerRequestTimeoutUnit); int getStatusPollingDelay(); void setStatusPollingDelay(int statusPollingDelay); TimeUnit getStatusPollingDelayTimeUnit(); void setStatusPollingDelayTimeUnit(TimeUnit statusPollingDelayTimeUnit); }
WaggleDanceConfiguration { public void setPort(Integer port) { this.port = port; } Integer getPort(); void setPort(Integer port); boolean isVerbose(); void setVerbose(boolean verbose); int getDisconnectConnectionDelay(); void setDisconnectConnectionDelay(int disconnectConnectionDelay); TimeUnit getDisconnectTimeUnit(); void setDisconnectTimeUnit(TimeUnit disconnectTimeUnit); Map<String, String> getConfigurationProperties(); void setConfigurationProperties(Map<String, String> configurationProperties); void setDatabaseResolution(DatabaseResolution databaseResolution); DatabaseResolution getDatabaseResolution(); int getThriftServerStopTimeoutValInSeconds(); void setThriftServerStopTimeoutValInSeconds(int thriftServerStopTimeoutValInSeconds); int getThriftServerRequestTimeout(); void setThriftServerRequestTimeout(int thriftServerRequestTimeout); TimeUnit getThriftServerRequestTimeoutUnit(); void setThriftServerRequestTimeoutUnit(TimeUnit thriftServerRequestTimeoutUnit); int getStatusPollingDelay(); void setStatusPollingDelay(int statusPollingDelay); TimeUnit getStatusPollingDelayTimeUnit(); void setStatusPollingDelayTimeUnit(TimeUnit statusPollingDelayTimeUnit); }
@Test public void zeroDisconnectConnectionDelay() { waggleDanceConfiguration.setDisconnectConnectionDelay(0); Set<ConstraintViolation<WaggleDanceConfiguration>> violations = validator.validate(waggleDanceConfiguration); assertThat(violations.size(), is(1)); }
public void setDisconnectConnectionDelay(int disconnectConnectionDelay) { this.disconnectConnectionDelay = disconnectConnectionDelay; }
WaggleDanceConfiguration { public void setDisconnectConnectionDelay(int disconnectConnectionDelay) { this.disconnectConnectionDelay = disconnectConnectionDelay; } }
WaggleDanceConfiguration { public void setDisconnectConnectionDelay(int disconnectConnectionDelay) { this.disconnectConnectionDelay = disconnectConnectionDelay; } }
WaggleDanceConfiguration { public void setDisconnectConnectionDelay(int disconnectConnectionDelay) { this.disconnectConnectionDelay = disconnectConnectionDelay; } Integer getPort(); void setPort(Integer port); boolean isVerbose(); void setVerbose(boolean verbose); int getDisconnectConnectionDelay(); void setDisconnectConnectionDelay(int disconnectConnectionDelay); TimeUnit getDisconnectTimeUnit(); void setDisconnectTimeUnit(TimeUnit disconnectTimeUnit); Map<String, String> getConfigurationProperties(); void setConfigurationProperties(Map<String, String> configurationProperties); void setDatabaseResolution(DatabaseResolution databaseResolution); DatabaseResolution getDatabaseResolution(); int getThriftServerStopTimeoutValInSeconds(); void setThriftServerStopTimeoutValInSeconds(int thriftServerStopTimeoutValInSeconds); int getThriftServerRequestTimeout(); void setThriftServerRequestTimeout(int thriftServerRequestTimeout); TimeUnit getThriftServerRequestTimeoutUnit(); void setThriftServerRequestTimeoutUnit(TimeUnit thriftServerRequestTimeoutUnit); int getStatusPollingDelay(); void setStatusPollingDelay(int statusPollingDelay); TimeUnit getStatusPollingDelayTimeUnit(); void setStatusPollingDelayTimeUnit(TimeUnit statusPollingDelayTimeUnit); }
WaggleDanceConfiguration { public void setDisconnectConnectionDelay(int disconnectConnectionDelay) { this.disconnectConnectionDelay = disconnectConnectionDelay; } Integer getPort(); void setPort(Integer port); boolean isVerbose(); void setVerbose(boolean verbose); int getDisconnectConnectionDelay(); void setDisconnectConnectionDelay(int disconnectConnectionDelay); TimeUnit getDisconnectTimeUnit(); void setDisconnectTimeUnit(TimeUnit disconnectTimeUnit); Map<String, String> getConfigurationProperties(); void setConfigurationProperties(Map<String, String> configurationProperties); void setDatabaseResolution(DatabaseResolution databaseResolution); DatabaseResolution getDatabaseResolution(); int getThriftServerStopTimeoutValInSeconds(); void setThriftServerStopTimeoutValInSeconds(int thriftServerStopTimeoutValInSeconds); int getThriftServerRequestTimeout(); void setThriftServerRequestTimeout(int thriftServerRequestTimeout); TimeUnit getThriftServerRequestTimeoutUnit(); void setThriftServerRequestTimeoutUnit(TimeUnit thriftServerRequestTimeoutUnit); int getStatusPollingDelay(); void setStatusPollingDelay(int statusPollingDelay); TimeUnit getStatusPollingDelayTimeUnit(); void setStatusPollingDelayTimeUnit(TimeUnit statusPollingDelayTimeUnit); }
@Test public void negativeDisconnectConnectionDelay() { waggleDanceConfiguration.setDisconnectConnectionDelay(-1); Set<ConstraintViolation<WaggleDanceConfiguration>> violations = validator.validate(waggleDanceConfiguration); assertThat(violations.size(), is(1)); }
public void setDisconnectConnectionDelay(int disconnectConnectionDelay) { this.disconnectConnectionDelay = disconnectConnectionDelay; }
WaggleDanceConfiguration { public void setDisconnectConnectionDelay(int disconnectConnectionDelay) { this.disconnectConnectionDelay = disconnectConnectionDelay; } }
WaggleDanceConfiguration { public void setDisconnectConnectionDelay(int disconnectConnectionDelay) { this.disconnectConnectionDelay = disconnectConnectionDelay; } }
WaggleDanceConfiguration { public void setDisconnectConnectionDelay(int disconnectConnectionDelay) { this.disconnectConnectionDelay = disconnectConnectionDelay; } Integer getPort(); void setPort(Integer port); boolean isVerbose(); void setVerbose(boolean verbose); int getDisconnectConnectionDelay(); void setDisconnectConnectionDelay(int disconnectConnectionDelay); TimeUnit getDisconnectTimeUnit(); void setDisconnectTimeUnit(TimeUnit disconnectTimeUnit); Map<String, String> getConfigurationProperties(); void setConfigurationProperties(Map<String, String> configurationProperties); void setDatabaseResolution(DatabaseResolution databaseResolution); DatabaseResolution getDatabaseResolution(); int getThriftServerStopTimeoutValInSeconds(); void setThriftServerStopTimeoutValInSeconds(int thriftServerStopTimeoutValInSeconds); int getThriftServerRequestTimeout(); void setThriftServerRequestTimeout(int thriftServerRequestTimeout); TimeUnit getThriftServerRequestTimeoutUnit(); void setThriftServerRequestTimeoutUnit(TimeUnit thriftServerRequestTimeoutUnit); int getStatusPollingDelay(); void setStatusPollingDelay(int statusPollingDelay); TimeUnit getStatusPollingDelayTimeUnit(); void setStatusPollingDelayTimeUnit(TimeUnit statusPollingDelayTimeUnit); }
WaggleDanceConfiguration { public void setDisconnectConnectionDelay(int disconnectConnectionDelay) { this.disconnectConnectionDelay = disconnectConnectionDelay; } Integer getPort(); void setPort(Integer port); boolean isVerbose(); void setVerbose(boolean verbose); int getDisconnectConnectionDelay(); void setDisconnectConnectionDelay(int disconnectConnectionDelay); TimeUnit getDisconnectTimeUnit(); void setDisconnectTimeUnit(TimeUnit disconnectTimeUnit); Map<String, String> getConfigurationProperties(); void setConfigurationProperties(Map<String, String> configurationProperties); void setDatabaseResolution(DatabaseResolution databaseResolution); DatabaseResolution getDatabaseResolution(); int getThriftServerStopTimeoutValInSeconds(); void setThriftServerStopTimeoutValInSeconds(int thriftServerStopTimeoutValInSeconds); int getThriftServerRequestTimeout(); void setThriftServerRequestTimeout(int thriftServerRequestTimeout); TimeUnit getThriftServerRequestTimeoutUnit(); void setThriftServerRequestTimeoutUnit(TimeUnit thriftServerRequestTimeoutUnit); int getStatusPollingDelay(); void setStatusPollingDelay(int statusPollingDelay); TimeUnit getStatusPollingDelayTimeUnit(); void setStatusPollingDelayTimeUnit(TimeUnit statusPollingDelayTimeUnit); }
@Test public void nullDisconnectTimeUnit() { waggleDanceConfiguration.setDisconnectTimeUnit(null); Set<ConstraintViolation<WaggleDanceConfiguration>> violations = validator.validate(waggleDanceConfiguration); assertThat(violations.size(), is(1)); }
public void setDisconnectTimeUnit(TimeUnit disconnectTimeUnit) { this.disconnectTimeUnit = disconnectTimeUnit; }
WaggleDanceConfiguration { public void setDisconnectTimeUnit(TimeUnit disconnectTimeUnit) { this.disconnectTimeUnit = disconnectTimeUnit; } }
WaggleDanceConfiguration { public void setDisconnectTimeUnit(TimeUnit disconnectTimeUnit) { this.disconnectTimeUnit = disconnectTimeUnit; } }
WaggleDanceConfiguration { public void setDisconnectTimeUnit(TimeUnit disconnectTimeUnit) { this.disconnectTimeUnit = disconnectTimeUnit; } Integer getPort(); void setPort(Integer port); boolean isVerbose(); void setVerbose(boolean verbose); int getDisconnectConnectionDelay(); void setDisconnectConnectionDelay(int disconnectConnectionDelay); TimeUnit getDisconnectTimeUnit(); void setDisconnectTimeUnit(TimeUnit disconnectTimeUnit); Map<String, String> getConfigurationProperties(); void setConfigurationProperties(Map<String, String> configurationProperties); void setDatabaseResolution(DatabaseResolution databaseResolution); DatabaseResolution getDatabaseResolution(); int getThriftServerStopTimeoutValInSeconds(); void setThriftServerStopTimeoutValInSeconds(int thriftServerStopTimeoutValInSeconds); int getThriftServerRequestTimeout(); void setThriftServerRequestTimeout(int thriftServerRequestTimeout); TimeUnit getThriftServerRequestTimeoutUnit(); void setThriftServerRequestTimeoutUnit(TimeUnit thriftServerRequestTimeoutUnit); int getStatusPollingDelay(); void setStatusPollingDelay(int statusPollingDelay); TimeUnit getStatusPollingDelayTimeUnit(); void setStatusPollingDelayTimeUnit(TimeUnit statusPollingDelayTimeUnit); }
WaggleDanceConfiguration { public void setDisconnectTimeUnit(TimeUnit disconnectTimeUnit) { this.disconnectTimeUnit = disconnectTimeUnit; } Integer getPort(); void setPort(Integer port); boolean isVerbose(); void setVerbose(boolean verbose); int getDisconnectConnectionDelay(); void setDisconnectConnectionDelay(int disconnectConnectionDelay); TimeUnit getDisconnectTimeUnit(); void setDisconnectTimeUnit(TimeUnit disconnectTimeUnit); Map<String, String> getConfigurationProperties(); void setConfigurationProperties(Map<String, String> configurationProperties); void setDatabaseResolution(DatabaseResolution databaseResolution); DatabaseResolution getDatabaseResolution(); int getThriftServerStopTimeoutValInSeconds(); void setThriftServerStopTimeoutValInSeconds(int thriftServerStopTimeoutValInSeconds); int getThriftServerRequestTimeout(); void setThriftServerRequestTimeout(int thriftServerRequestTimeout); TimeUnit getThriftServerRequestTimeoutUnit(); void setThriftServerRequestTimeoutUnit(TimeUnit thriftServerRequestTimeoutUnit); int getStatusPollingDelay(); void setStatusPollingDelay(int statusPollingDelay); TimeUnit getStatusPollingDelayTimeUnit(); void setStatusPollingDelayTimeUnit(TimeUnit statusPollingDelayTimeUnit); }
@Test public void nullConfigurationProperties() { waggleDanceConfiguration.setConfigurationProperties(null); Set<ConstraintViolation<WaggleDanceConfiguration>> violations = validator.validate(waggleDanceConfiguration); assertThat(violations.size(), is(0)); }
public void setConfigurationProperties(Map<String, String> configurationProperties) { this.configurationProperties = configurationProperties; }
WaggleDanceConfiguration { public void setConfigurationProperties(Map<String, String> configurationProperties) { this.configurationProperties = configurationProperties; } }
WaggleDanceConfiguration { public void setConfigurationProperties(Map<String, String> configurationProperties) { this.configurationProperties = configurationProperties; } }
WaggleDanceConfiguration { public void setConfigurationProperties(Map<String, String> configurationProperties) { this.configurationProperties = configurationProperties; } Integer getPort(); void setPort(Integer port); boolean isVerbose(); void setVerbose(boolean verbose); int getDisconnectConnectionDelay(); void setDisconnectConnectionDelay(int disconnectConnectionDelay); TimeUnit getDisconnectTimeUnit(); void setDisconnectTimeUnit(TimeUnit disconnectTimeUnit); Map<String, String> getConfigurationProperties(); void setConfigurationProperties(Map<String, String> configurationProperties); void setDatabaseResolution(DatabaseResolution databaseResolution); DatabaseResolution getDatabaseResolution(); int getThriftServerStopTimeoutValInSeconds(); void setThriftServerStopTimeoutValInSeconds(int thriftServerStopTimeoutValInSeconds); int getThriftServerRequestTimeout(); void setThriftServerRequestTimeout(int thriftServerRequestTimeout); TimeUnit getThriftServerRequestTimeoutUnit(); void setThriftServerRequestTimeoutUnit(TimeUnit thriftServerRequestTimeoutUnit); int getStatusPollingDelay(); void setStatusPollingDelay(int statusPollingDelay); TimeUnit getStatusPollingDelayTimeUnit(); void setStatusPollingDelayTimeUnit(TimeUnit statusPollingDelayTimeUnit); }
WaggleDanceConfiguration { public void setConfigurationProperties(Map<String, String> configurationProperties) { this.configurationProperties = configurationProperties; } Integer getPort(); void setPort(Integer port); boolean isVerbose(); void setVerbose(boolean verbose); int getDisconnectConnectionDelay(); void setDisconnectConnectionDelay(int disconnectConnectionDelay); TimeUnit getDisconnectTimeUnit(); void setDisconnectTimeUnit(TimeUnit disconnectTimeUnit); Map<String, String> getConfigurationProperties(); void setConfigurationProperties(Map<String, String> configurationProperties); void setDatabaseResolution(DatabaseResolution databaseResolution); DatabaseResolution getDatabaseResolution(); int getThriftServerStopTimeoutValInSeconds(); void setThriftServerStopTimeoutValInSeconds(int thriftServerStopTimeoutValInSeconds); int getThriftServerRequestTimeout(); void setThriftServerRequestTimeout(int thriftServerRequestTimeout); TimeUnit getThriftServerRequestTimeoutUnit(); void setThriftServerRequestTimeoutUnit(TimeUnit thriftServerRequestTimeoutUnit); int getStatusPollingDelay(); void setStatusPollingDelay(int statusPollingDelay); TimeUnit getStatusPollingDelayTimeUnit(); void setStatusPollingDelayTimeUnit(TimeUnit statusPollingDelayTimeUnit); }
@Test public void emptyConfigurationProperties() { waggleDanceConfiguration.setConfigurationProperties(ImmutableMap.of()); Set<ConstraintViolation<WaggleDanceConfiguration>> violations = validator.validate(waggleDanceConfiguration); assertThat(violations.size(), is(0)); }
public void setConfigurationProperties(Map<String, String> configurationProperties) { this.configurationProperties = configurationProperties; }
WaggleDanceConfiguration { public void setConfigurationProperties(Map<String, String> configurationProperties) { this.configurationProperties = configurationProperties; } }
WaggleDanceConfiguration { public void setConfigurationProperties(Map<String, String> configurationProperties) { this.configurationProperties = configurationProperties; } }
WaggleDanceConfiguration { public void setConfigurationProperties(Map<String, String> configurationProperties) { this.configurationProperties = configurationProperties; } Integer getPort(); void setPort(Integer port); boolean isVerbose(); void setVerbose(boolean verbose); int getDisconnectConnectionDelay(); void setDisconnectConnectionDelay(int disconnectConnectionDelay); TimeUnit getDisconnectTimeUnit(); void setDisconnectTimeUnit(TimeUnit disconnectTimeUnit); Map<String, String> getConfigurationProperties(); void setConfigurationProperties(Map<String, String> configurationProperties); void setDatabaseResolution(DatabaseResolution databaseResolution); DatabaseResolution getDatabaseResolution(); int getThriftServerStopTimeoutValInSeconds(); void setThriftServerStopTimeoutValInSeconds(int thriftServerStopTimeoutValInSeconds); int getThriftServerRequestTimeout(); void setThriftServerRequestTimeout(int thriftServerRequestTimeout); TimeUnit getThriftServerRequestTimeoutUnit(); void setThriftServerRequestTimeoutUnit(TimeUnit thriftServerRequestTimeoutUnit); int getStatusPollingDelay(); void setStatusPollingDelay(int statusPollingDelay); TimeUnit getStatusPollingDelayTimeUnit(); void setStatusPollingDelayTimeUnit(TimeUnit statusPollingDelayTimeUnit); }
WaggleDanceConfiguration { public void setConfigurationProperties(Map<String, String> configurationProperties) { this.configurationProperties = configurationProperties; } Integer getPort(); void setPort(Integer port); boolean isVerbose(); void setVerbose(boolean verbose); int getDisconnectConnectionDelay(); void setDisconnectConnectionDelay(int disconnectConnectionDelay); TimeUnit getDisconnectTimeUnit(); void setDisconnectTimeUnit(TimeUnit disconnectTimeUnit); Map<String, String> getConfigurationProperties(); void setConfigurationProperties(Map<String, String> configurationProperties); void setDatabaseResolution(DatabaseResolution databaseResolution); DatabaseResolution getDatabaseResolution(); int getThriftServerStopTimeoutValInSeconds(); void setThriftServerStopTimeoutValInSeconds(int thriftServerStopTimeoutValInSeconds); int getThriftServerRequestTimeout(); void setThriftServerRequestTimeout(int thriftServerRequestTimeout); TimeUnit getThriftServerRequestTimeoutUnit(); void setThriftServerRequestTimeoutUnit(TimeUnit thriftServerRequestTimeoutUnit); int getStatusPollingDelay(); void setStatusPollingDelay(int statusPollingDelay); TimeUnit getStatusPollingDelayTimeUnit(); void setStatusPollingDelayTimeUnit(TimeUnit statusPollingDelayTimeUnit); }
@Test public void nullDatabaseResolution() { waggleDanceConfiguration.setDatabaseResolution(null); Set<ConstraintViolation<WaggleDanceConfiguration>> violations = validator.validate(waggleDanceConfiguration); assertThat(violations.size(), is(1)); }
public void setDatabaseResolution(DatabaseResolution databaseResolution) { this.databaseResolution = databaseResolution; }
WaggleDanceConfiguration { public void setDatabaseResolution(DatabaseResolution databaseResolution) { this.databaseResolution = databaseResolution; } }
WaggleDanceConfiguration { public void setDatabaseResolution(DatabaseResolution databaseResolution) { this.databaseResolution = databaseResolution; } }
WaggleDanceConfiguration { public void setDatabaseResolution(DatabaseResolution databaseResolution) { this.databaseResolution = databaseResolution; } Integer getPort(); void setPort(Integer port); boolean isVerbose(); void setVerbose(boolean verbose); int getDisconnectConnectionDelay(); void setDisconnectConnectionDelay(int disconnectConnectionDelay); TimeUnit getDisconnectTimeUnit(); void setDisconnectTimeUnit(TimeUnit disconnectTimeUnit); Map<String, String> getConfigurationProperties(); void setConfigurationProperties(Map<String, String> configurationProperties); void setDatabaseResolution(DatabaseResolution databaseResolution); DatabaseResolution getDatabaseResolution(); int getThriftServerStopTimeoutValInSeconds(); void setThriftServerStopTimeoutValInSeconds(int thriftServerStopTimeoutValInSeconds); int getThriftServerRequestTimeout(); void setThriftServerRequestTimeout(int thriftServerRequestTimeout); TimeUnit getThriftServerRequestTimeoutUnit(); void setThriftServerRequestTimeoutUnit(TimeUnit thriftServerRequestTimeoutUnit); int getStatusPollingDelay(); void setStatusPollingDelay(int statusPollingDelay); TimeUnit getStatusPollingDelayTimeUnit(); void setStatusPollingDelayTimeUnit(TimeUnit statusPollingDelayTimeUnit); }
WaggleDanceConfiguration { public void setDatabaseResolution(DatabaseResolution databaseResolution) { this.databaseResolution = databaseResolution; } Integer getPort(); void setPort(Integer port); boolean isVerbose(); void setVerbose(boolean verbose); int getDisconnectConnectionDelay(); void setDisconnectConnectionDelay(int disconnectConnectionDelay); TimeUnit getDisconnectTimeUnit(); void setDisconnectTimeUnit(TimeUnit disconnectTimeUnit); Map<String, String> getConfigurationProperties(); void setConfigurationProperties(Map<String, String> configurationProperties); void setDatabaseResolution(DatabaseResolution databaseResolution); DatabaseResolution getDatabaseResolution(); int getThriftServerStopTimeoutValInSeconds(); void setThriftServerStopTimeoutValInSeconds(int thriftServerStopTimeoutValInSeconds); int getThriftServerRequestTimeout(); void setThriftServerRequestTimeout(int thriftServerRequestTimeout); TimeUnit getThriftServerRequestTimeoutUnit(); void setThriftServerRequestTimeoutUnit(TimeUnit thriftServerRequestTimeoutUnit); int getStatusPollingDelay(); void setStatusPollingDelay(int statusPollingDelay); TimeUnit getStatusPollingDelayTimeUnit(); void setStatusPollingDelayTimeUnit(TimeUnit statusPollingDelayTimeUnit); }
@Test public void setterGetterStatusPollingDelayDefault() { assertThat(waggleDanceConfiguration.getStatusPollingDelay(), is(5)); }
public int getStatusPollingDelay() { return statusPollingDelay; }
WaggleDanceConfiguration { public int getStatusPollingDelay() { return statusPollingDelay; } }
WaggleDanceConfiguration { public int getStatusPollingDelay() { return statusPollingDelay; } }
WaggleDanceConfiguration { public int getStatusPollingDelay() { return statusPollingDelay; } Integer getPort(); void setPort(Integer port); boolean isVerbose(); void setVerbose(boolean verbose); int getDisconnectConnectionDelay(); void setDisconnectConnectionDelay(int disconnectConnectionDelay); TimeUnit getDisconnectTimeUnit(); void setDisconnectTimeUnit(TimeUnit disconnectTimeUnit); Map<String, String> getConfigurationProperties(); void setConfigurationProperties(Map<String, String> configurationProperties); void setDatabaseResolution(DatabaseResolution databaseResolution); DatabaseResolution getDatabaseResolution(); int getThriftServerStopTimeoutValInSeconds(); void setThriftServerStopTimeoutValInSeconds(int thriftServerStopTimeoutValInSeconds); int getThriftServerRequestTimeout(); void setThriftServerRequestTimeout(int thriftServerRequestTimeout); TimeUnit getThriftServerRequestTimeoutUnit(); void setThriftServerRequestTimeoutUnit(TimeUnit thriftServerRequestTimeoutUnit); int getStatusPollingDelay(); void setStatusPollingDelay(int statusPollingDelay); TimeUnit getStatusPollingDelayTimeUnit(); void setStatusPollingDelayTimeUnit(TimeUnit statusPollingDelayTimeUnit); }
WaggleDanceConfiguration { public int getStatusPollingDelay() { return statusPollingDelay; } Integer getPort(); void setPort(Integer port); boolean isVerbose(); void setVerbose(boolean verbose); int getDisconnectConnectionDelay(); void setDisconnectConnectionDelay(int disconnectConnectionDelay); TimeUnit getDisconnectTimeUnit(); void setDisconnectTimeUnit(TimeUnit disconnectTimeUnit); Map<String, String> getConfigurationProperties(); void setConfigurationProperties(Map<String, String> configurationProperties); void setDatabaseResolution(DatabaseResolution databaseResolution); DatabaseResolution getDatabaseResolution(); int getThriftServerStopTimeoutValInSeconds(); void setThriftServerStopTimeoutValInSeconds(int thriftServerStopTimeoutValInSeconds); int getThriftServerRequestTimeout(); void setThriftServerRequestTimeout(int thriftServerRequestTimeout); TimeUnit getThriftServerRequestTimeoutUnit(); void setThriftServerRequestTimeoutUnit(TimeUnit thriftServerRequestTimeoutUnit); int getStatusPollingDelay(); void setStatusPollingDelay(int statusPollingDelay); TimeUnit getStatusPollingDelayTimeUnit(); void setStatusPollingDelayTimeUnit(TimeUnit statusPollingDelayTimeUnit); }
@Test public void emptyRemoteMetaStoreUris() { metaStore.setRemoteMetaStoreUris(" "); Set<ConstraintViolation<T>> violations = validator.validate(metaStore); assertThat(violations.size(), is(1)); }
public void setRemoteMetaStoreUris(String remoteMetaStoreUris) { this.remoteMetaStoreUris = remoteMetaStoreUris; }
AbstractMetaStore { public void setRemoteMetaStoreUris(String remoteMetaStoreUris) { this.remoteMetaStoreUris = remoteMetaStoreUris; } }
AbstractMetaStore { public void setRemoteMetaStoreUris(String remoteMetaStoreUris) { this.remoteMetaStoreUris = remoteMetaStoreUris; } AbstractMetaStore(); AbstractMetaStore(String name, String remoteMetaStoreUris, AccessControlType accessControlType); AbstractMetaStore( String name, String remoteMetaStoreUris, AccessControlType accessControlType, List<String> writableDatabaseWhitelist); }
AbstractMetaStore { public void setRemoteMetaStoreUris(String remoteMetaStoreUris) { this.remoteMetaStoreUris = remoteMetaStoreUris; } AbstractMetaStore(); AbstractMetaStore(String name, String remoteMetaStoreUris, AccessControlType accessControlType); AbstractMetaStore( String name, String remoteMetaStoreUris, AccessControlType accessControlType, List<String> writableDatabaseWhitelist); static FederatedMetaStore newFederatedInstance(String name, String remoteMetaStoreUris); static PrimaryMetaStore newPrimaryInstance( String name, String remoteMetaStoreUris, AccessControlType accessControlType); static PrimaryMetaStore newPrimaryInstance(String name, String remoteMetaStoreUris); String getDatabasePrefix(); void setDatabasePrefix(String databasePrefix); String getName(); void setName(String name); String getRemoteMetaStoreUris(); void setRemoteMetaStoreUris(String remoteMetaStoreUris); MetastoreTunnel getMetastoreTunnel(); void setMetastoreTunnel(MetastoreTunnel metastoreTunnel); ConnectionType getConnectionType(); abstract FederationType getFederationType(); AccessControlType getAccessControlType(); void setAccessControlType(AccessControlType accessControlType); List<String> getWritableDatabaseWhiteList(); void setWritableDatabaseWhiteList(List<String> writableDatabaseWhitelist); long getLatency(); void setLatency(long latency); List<String> getMappedDatabases(); void setMappedDatabases(List<String> mappedDatabases); Map<String, String> getDatabaseNameMapping(); void setDatabaseNameMapping(Map<String, String> databaseNameMapping); @Transient HashBiMap<String, String> getDatabaseNameBiMapping(); @Transient MetaStoreStatus getStatus(); @Transient void setStatus(MetaStoreStatus status); @Override int hashCode(); @Override boolean equals(Object obj); @Override String toString(); }
AbstractMetaStore { public void setRemoteMetaStoreUris(String remoteMetaStoreUris) { this.remoteMetaStoreUris = remoteMetaStoreUris; } AbstractMetaStore(); AbstractMetaStore(String name, String remoteMetaStoreUris, AccessControlType accessControlType); AbstractMetaStore( String name, String remoteMetaStoreUris, AccessControlType accessControlType, List<String> writableDatabaseWhitelist); static FederatedMetaStore newFederatedInstance(String name, String remoteMetaStoreUris); static PrimaryMetaStore newPrimaryInstance( String name, String remoteMetaStoreUris, AccessControlType accessControlType); static PrimaryMetaStore newPrimaryInstance(String name, String remoteMetaStoreUris); String getDatabasePrefix(); void setDatabasePrefix(String databasePrefix); String getName(); void setName(String name); String getRemoteMetaStoreUris(); void setRemoteMetaStoreUris(String remoteMetaStoreUris); MetastoreTunnel getMetastoreTunnel(); void setMetastoreTunnel(MetastoreTunnel metastoreTunnel); ConnectionType getConnectionType(); abstract FederationType getFederationType(); AccessControlType getAccessControlType(); void setAccessControlType(AccessControlType accessControlType); List<String> getWritableDatabaseWhiteList(); void setWritableDatabaseWhiteList(List<String> writableDatabaseWhitelist); long getLatency(); void setLatency(long latency); List<String> getMappedDatabases(); void setMappedDatabases(List<String> mappedDatabases); Map<String, String> getDatabaseNameMapping(); void setDatabaseNameMapping(Map<String, String> databaseNameMapping); @Transient HashBiMap<String, String> getDatabaseNameBiMapping(); @Transient MetaStoreStatus getStatus(); @Transient void setStatus(MetaStoreStatus status); @Override int hashCode(); @Override boolean equals(Object obj); @Override String toString(); }
@Test public void setterGetterStatusPollingDelayTimeUnitDefault() { assertThat(waggleDanceConfiguration.getStatusPollingDelayTimeUnit(), is(TimeUnit.MINUTES)); }
public TimeUnit getStatusPollingDelayTimeUnit() { return statusPollingDelayTimeUnit; }
WaggleDanceConfiguration { public TimeUnit getStatusPollingDelayTimeUnit() { return statusPollingDelayTimeUnit; } }
WaggleDanceConfiguration { public TimeUnit getStatusPollingDelayTimeUnit() { return statusPollingDelayTimeUnit; } }
WaggleDanceConfiguration { public TimeUnit getStatusPollingDelayTimeUnit() { return statusPollingDelayTimeUnit; } Integer getPort(); void setPort(Integer port); boolean isVerbose(); void setVerbose(boolean verbose); int getDisconnectConnectionDelay(); void setDisconnectConnectionDelay(int disconnectConnectionDelay); TimeUnit getDisconnectTimeUnit(); void setDisconnectTimeUnit(TimeUnit disconnectTimeUnit); Map<String, String> getConfigurationProperties(); void setConfigurationProperties(Map<String, String> configurationProperties); void setDatabaseResolution(DatabaseResolution databaseResolution); DatabaseResolution getDatabaseResolution(); int getThriftServerStopTimeoutValInSeconds(); void setThriftServerStopTimeoutValInSeconds(int thriftServerStopTimeoutValInSeconds); int getThriftServerRequestTimeout(); void setThriftServerRequestTimeout(int thriftServerRequestTimeout); TimeUnit getThriftServerRequestTimeoutUnit(); void setThriftServerRequestTimeoutUnit(TimeUnit thriftServerRequestTimeoutUnit); int getStatusPollingDelay(); void setStatusPollingDelay(int statusPollingDelay); TimeUnit getStatusPollingDelayTimeUnit(); void setStatusPollingDelayTimeUnit(TimeUnit statusPollingDelayTimeUnit); }
WaggleDanceConfiguration { public TimeUnit getStatusPollingDelayTimeUnit() { return statusPollingDelayTimeUnit; } Integer getPort(); void setPort(Integer port); boolean isVerbose(); void setVerbose(boolean verbose); int getDisconnectConnectionDelay(); void setDisconnectConnectionDelay(int disconnectConnectionDelay); TimeUnit getDisconnectTimeUnit(); void setDisconnectTimeUnit(TimeUnit disconnectTimeUnit); Map<String, String> getConfigurationProperties(); void setConfigurationProperties(Map<String, String> configurationProperties); void setDatabaseResolution(DatabaseResolution databaseResolution); DatabaseResolution getDatabaseResolution(); int getThriftServerStopTimeoutValInSeconds(); void setThriftServerStopTimeoutValInSeconds(int thriftServerStopTimeoutValInSeconds); int getThriftServerRequestTimeout(); void setThriftServerRequestTimeout(int thriftServerRequestTimeout); TimeUnit getThriftServerRequestTimeoutUnit(); void setThriftServerRequestTimeoutUnit(TimeUnit thriftServerRequestTimeoutUnit); int getStatusPollingDelay(); void setStatusPollingDelay(int statusPollingDelay); TimeUnit getStatusPollingDelayTimeUnit(); void setStatusPollingDelayTimeUnit(TimeUnit statusPollingDelayTimeUnit); }
@Test public void defaultFactory() { ArgumentCaptor<HiveConf> hiveConfCaptor = ArgumentCaptor.forClass(HiveConf.class); factory.newInstance(newFederatedInstance("fed1", THRIFT_URI)); verify(defaultMetaStoreClientFactory).newInstance(hiveConfCaptor.capture(), eq( "waggledance-fed1"), eq(3), eq(2000)); verifyZeroInteractions(tunnelingMetaStoreClientFactory); HiveConf hiveConf = hiveConfCaptor.getValue(); assertThat(hiveConf.getVar(ConfVars.METASTOREURIS), is(THRIFT_URI)); }
public CloseableThriftHiveMetastoreIface newInstance(AbstractMetaStore metaStore) { String uris = MetaStoreUriNormaliser.normaliseMetaStoreUris(metaStore.getRemoteMetaStoreUris()); String name = metaStore.getName().toLowerCase(Locale.ROOT); int connectionTimeout = Math.max(1, defaultConnectionTimeout + (int) metaStore.getLatency()); if (metaStore.getConnectionType() == TUNNELED) { return tunnelingMetaStoreClientFactory .newInstance(uris, metaStore.getMetastoreTunnel(), name, DEFAULT_CLIENT_FACTORY_RECONNECTION_RETRY, connectionTimeout); } Map<String, String> properties = new HashMap<>(); properties.put(ConfVars.METASTOREURIS.varname, uris); HiveConfFactory confFactory = new HiveConfFactory(Collections.emptyList(), properties); return defaultMetaStoreClientFactory .newInstance(confFactory.newInstance(), "waggledance-" + name, DEFAULT_CLIENT_FACTORY_RECONNECTION_RETRY, connectionTimeout); }
CloseableThriftHiveMetastoreIfaceClientFactory { public CloseableThriftHiveMetastoreIface newInstance(AbstractMetaStore metaStore) { String uris = MetaStoreUriNormaliser.normaliseMetaStoreUris(metaStore.getRemoteMetaStoreUris()); String name = metaStore.getName().toLowerCase(Locale.ROOT); int connectionTimeout = Math.max(1, defaultConnectionTimeout + (int) metaStore.getLatency()); if (metaStore.getConnectionType() == TUNNELED) { return tunnelingMetaStoreClientFactory .newInstance(uris, metaStore.getMetastoreTunnel(), name, DEFAULT_CLIENT_FACTORY_RECONNECTION_RETRY, connectionTimeout); } Map<String, String> properties = new HashMap<>(); properties.put(ConfVars.METASTOREURIS.varname, uris); HiveConfFactory confFactory = new HiveConfFactory(Collections.emptyList(), properties); return defaultMetaStoreClientFactory .newInstance(confFactory.newInstance(), "waggledance-" + name, DEFAULT_CLIENT_FACTORY_RECONNECTION_RETRY, connectionTimeout); } }
CloseableThriftHiveMetastoreIfaceClientFactory { public CloseableThriftHiveMetastoreIface newInstance(AbstractMetaStore metaStore) { String uris = MetaStoreUriNormaliser.normaliseMetaStoreUris(metaStore.getRemoteMetaStoreUris()); String name = metaStore.getName().toLowerCase(Locale.ROOT); int connectionTimeout = Math.max(1, defaultConnectionTimeout + (int) metaStore.getLatency()); if (metaStore.getConnectionType() == TUNNELED) { return tunnelingMetaStoreClientFactory .newInstance(uris, metaStore.getMetastoreTunnel(), name, DEFAULT_CLIENT_FACTORY_RECONNECTION_RETRY, connectionTimeout); } Map<String, String> properties = new HashMap<>(); properties.put(ConfVars.METASTOREURIS.varname, uris); HiveConfFactory confFactory = new HiveConfFactory(Collections.emptyList(), properties); return defaultMetaStoreClientFactory .newInstance(confFactory.newInstance(), "waggledance-" + name, DEFAULT_CLIENT_FACTORY_RECONNECTION_RETRY, connectionTimeout); } CloseableThriftHiveMetastoreIfaceClientFactory( TunnelingMetaStoreClientFactory tunnelingMetaStoreClientFactory, DefaultMetaStoreClientFactory defaultMetaStoreClientFactory); }
CloseableThriftHiveMetastoreIfaceClientFactory { public CloseableThriftHiveMetastoreIface newInstance(AbstractMetaStore metaStore) { String uris = MetaStoreUriNormaliser.normaliseMetaStoreUris(metaStore.getRemoteMetaStoreUris()); String name = metaStore.getName().toLowerCase(Locale.ROOT); int connectionTimeout = Math.max(1, defaultConnectionTimeout + (int) metaStore.getLatency()); if (metaStore.getConnectionType() == TUNNELED) { return tunnelingMetaStoreClientFactory .newInstance(uris, metaStore.getMetastoreTunnel(), name, DEFAULT_CLIENT_FACTORY_RECONNECTION_RETRY, connectionTimeout); } Map<String, String> properties = new HashMap<>(); properties.put(ConfVars.METASTOREURIS.varname, uris); HiveConfFactory confFactory = new HiveConfFactory(Collections.emptyList(), properties); return defaultMetaStoreClientFactory .newInstance(confFactory.newInstance(), "waggledance-" + name, DEFAULT_CLIENT_FACTORY_RECONNECTION_RETRY, connectionTimeout); } CloseableThriftHiveMetastoreIfaceClientFactory( TunnelingMetaStoreClientFactory tunnelingMetaStoreClientFactory, DefaultMetaStoreClientFactory defaultMetaStoreClientFactory); CloseableThriftHiveMetastoreIface newInstance(AbstractMetaStore metaStore); }
CloseableThriftHiveMetastoreIfaceClientFactory { public CloseableThriftHiveMetastoreIface newInstance(AbstractMetaStore metaStore) { String uris = MetaStoreUriNormaliser.normaliseMetaStoreUris(metaStore.getRemoteMetaStoreUris()); String name = metaStore.getName().toLowerCase(Locale.ROOT); int connectionTimeout = Math.max(1, defaultConnectionTimeout + (int) metaStore.getLatency()); if (metaStore.getConnectionType() == TUNNELED) { return tunnelingMetaStoreClientFactory .newInstance(uris, metaStore.getMetastoreTunnel(), name, DEFAULT_CLIENT_FACTORY_RECONNECTION_RETRY, connectionTimeout); } Map<String, String> properties = new HashMap<>(); properties.put(ConfVars.METASTOREURIS.varname, uris); HiveConfFactory confFactory = new HiveConfFactory(Collections.emptyList(), properties); return defaultMetaStoreClientFactory .newInstance(confFactory.newInstance(), "waggledance-" + name, DEFAULT_CLIENT_FACTORY_RECONNECTION_RETRY, connectionTimeout); } CloseableThriftHiveMetastoreIfaceClientFactory( TunnelingMetaStoreClientFactory tunnelingMetaStoreClientFactory, DefaultMetaStoreClientFactory defaultMetaStoreClientFactory); CloseableThriftHiveMetastoreIface newInstance(AbstractMetaStore metaStore); }
@Test public void tunnelingFactory() { MetastoreTunnel metastoreTunnel = new MetastoreTunnel(); metastoreTunnel.setLocalhost("local-machine"); metastoreTunnel.setPort(2222); metastoreTunnel.setRoute("a -> b -> c"); metastoreTunnel.setKnownHosts("knownHosts"); metastoreTunnel.setPrivateKeys("privateKeys"); metastoreTunnel.setTimeout(123); AbstractMetaStore federatedMetaStore = newFederatedInstance("fed1", THRIFT_URI); federatedMetaStore.setMetastoreTunnel(metastoreTunnel); factory.newInstance(federatedMetaStore); verify(tunnelingMetaStoreClientFactory).newInstance(THRIFT_URI, metastoreTunnel, "fed1", 3, 2000); verifyZeroInteractions(defaultMetaStoreClientFactory); }
public CloseableThriftHiveMetastoreIface newInstance(AbstractMetaStore metaStore) { String uris = MetaStoreUriNormaliser.normaliseMetaStoreUris(metaStore.getRemoteMetaStoreUris()); String name = metaStore.getName().toLowerCase(Locale.ROOT); int connectionTimeout = Math.max(1, defaultConnectionTimeout + (int) metaStore.getLatency()); if (metaStore.getConnectionType() == TUNNELED) { return tunnelingMetaStoreClientFactory .newInstance(uris, metaStore.getMetastoreTunnel(), name, DEFAULT_CLIENT_FACTORY_RECONNECTION_RETRY, connectionTimeout); } Map<String, String> properties = new HashMap<>(); properties.put(ConfVars.METASTOREURIS.varname, uris); HiveConfFactory confFactory = new HiveConfFactory(Collections.emptyList(), properties); return defaultMetaStoreClientFactory .newInstance(confFactory.newInstance(), "waggledance-" + name, DEFAULT_CLIENT_FACTORY_RECONNECTION_RETRY, connectionTimeout); }
CloseableThriftHiveMetastoreIfaceClientFactory { public CloseableThriftHiveMetastoreIface newInstance(AbstractMetaStore metaStore) { String uris = MetaStoreUriNormaliser.normaliseMetaStoreUris(metaStore.getRemoteMetaStoreUris()); String name = metaStore.getName().toLowerCase(Locale.ROOT); int connectionTimeout = Math.max(1, defaultConnectionTimeout + (int) metaStore.getLatency()); if (metaStore.getConnectionType() == TUNNELED) { return tunnelingMetaStoreClientFactory .newInstance(uris, metaStore.getMetastoreTunnel(), name, DEFAULT_CLIENT_FACTORY_RECONNECTION_RETRY, connectionTimeout); } Map<String, String> properties = new HashMap<>(); properties.put(ConfVars.METASTOREURIS.varname, uris); HiveConfFactory confFactory = new HiveConfFactory(Collections.emptyList(), properties); return defaultMetaStoreClientFactory .newInstance(confFactory.newInstance(), "waggledance-" + name, DEFAULT_CLIENT_FACTORY_RECONNECTION_RETRY, connectionTimeout); } }
CloseableThriftHiveMetastoreIfaceClientFactory { public CloseableThriftHiveMetastoreIface newInstance(AbstractMetaStore metaStore) { String uris = MetaStoreUriNormaliser.normaliseMetaStoreUris(metaStore.getRemoteMetaStoreUris()); String name = metaStore.getName().toLowerCase(Locale.ROOT); int connectionTimeout = Math.max(1, defaultConnectionTimeout + (int) metaStore.getLatency()); if (metaStore.getConnectionType() == TUNNELED) { return tunnelingMetaStoreClientFactory .newInstance(uris, metaStore.getMetastoreTunnel(), name, DEFAULT_CLIENT_FACTORY_RECONNECTION_RETRY, connectionTimeout); } Map<String, String> properties = new HashMap<>(); properties.put(ConfVars.METASTOREURIS.varname, uris); HiveConfFactory confFactory = new HiveConfFactory(Collections.emptyList(), properties); return defaultMetaStoreClientFactory .newInstance(confFactory.newInstance(), "waggledance-" + name, DEFAULT_CLIENT_FACTORY_RECONNECTION_RETRY, connectionTimeout); } CloseableThriftHiveMetastoreIfaceClientFactory( TunnelingMetaStoreClientFactory tunnelingMetaStoreClientFactory, DefaultMetaStoreClientFactory defaultMetaStoreClientFactory); }
CloseableThriftHiveMetastoreIfaceClientFactory { public CloseableThriftHiveMetastoreIface newInstance(AbstractMetaStore metaStore) { String uris = MetaStoreUriNormaliser.normaliseMetaStoreUris(metaStore.getRemoteMetaStoreUris()); String name = metaStore.getName().toLowerCase(Locale.ROOT); int connectionTimeout = Math.max(1, defaultConnectionTimeout + (int) metaStore.getLatency()); if (metaStore.getConnectionType() == TUNNELED) { return tunnelingMetaStoreClientFactory .newInstance(uris, metaStore.getMetastoreTunnel(), name, DEFAULT_CLIENT_FACTORY_RECONNECTION_RETRY, connectionTimeout); } Map<String, String> properties = new HashMap<>(); properties.put(ConfVars.METASTOREURIS.varname, uris); HiveConfFactory confFactory = new HiveConfFactory(Collections.emptyList(), properties); return defaultMetaStoreClientFactory .newInstance(confFactory.newInstance(), "waggledance-" + name, DEFAULT_CLIENT_FACTORY_RECONNECTION_RETRY, connectionTimeout); } CloseableThriftHiveMetastoreIfaceClientFactory( TunnelingMetaStoreClientFactory tunnelingMetaStoreClientFactory, DefaultMetaStoreClientFactory defaultMetaStoreClientFactory); CloseableThriftHiveMetastoreIface newInstance(AbstractMetaStore metaStore); }
CloseableThriftHiveMetastoreIfaceClientFactory { public CloseableThriftHiveMetastoreIface newInstance(AbstractMetaStore metaStore) { String uris = MetaStoreUriNormaliser.normaliseMetaStoreUris(metaStore.getRemoteMetaStoreUris()); String name = metaStore.getName().toLowerCase(Locale.ROOT); int connectionTimeout = Math.max(1, defaultConnectionTimeout + (int) metaStore.getLatency()); if (metaStore.getConnectionType() == TUNNELED) { return tunnelingMetaStoreClientFactory .newInstance(uris, metaStore.getMetastoreTunnel(), name, DEFAULT_CLIENT_FACTORY_RECONNECTION_RETRY, connectionTimeout); } Map<String, String> properties = new HashMap<>(); properties.put(ConfVars.METASTOREURIS.varname, uris); HiveConfFactory confFactory = new HiveConfFactory(Collections.emptyList(), properties); return defaultMetaStoreClientFactory .newInstance(confFactory.newInstance(), "waggledance-" + name, DEFAULT_CLIENT_FACTORY_RECONNECTION_RETRY, connectionTimeout); } CloseableThriftHiveMetastoreIfaceClientFactory( TunnelingMetaStoreClientFactory tunnelingMetaStoreClientFactory, DefaultMetaStoreClientFactory defaultMetaStoreClientFactory); CloseableThriftHiveMetastoreIface newInstance(AbstractMetaStore metaStore); }
@Test public void get_table_req() throws Exception { CloseableThriftHiveMetastoreIface thriftHiveMetastoreIface = factory.newInstance(delegate); GetTableRequest tableRequest = new GetTableRequest(DB_NAME, TABLE_NAME); when(delegate.get_table_req(tableRequest)).thenThrow(new TApplicationException("Error")); when(delegate.get_table(DB_NAME, TABLE_NAME)).thenReturn(table); GetTableResult tableResult = thriftHiveMetastoreIface.get_table_req(tableRequest); assertThat(tableResult, is(new GetTableResult(table))); }
public CloseableThriftHiveMetastoreIface newInstance(ThriftHiveMetastore.Client delegate) { HiveThriftMetaStoreIfaceCompatibility compatibility = new HiveThriftMetaStoreIfaceCompatibility1xx(delegate); return newInstance(delegate, compatibility); }
HiveCompatibleThriftHiveMetastoreIfaceFactory { public CloseableThriftHiveMetastoreIface newInstance(ThriftHiveMetastore.Client delegate) { HiveThriftMetaStoreIfaceCompatibility compatibility = new HiveThriftMetaStoreIfaceCompatibility1xx(delegate); return newInstance(delegate, compatibility); } }
HiveCompatibleThriftHiveMetastoreIfaceFactory { public CloseableThriftHiveMetastoreIface newInstance(ThriftHiveMetastore.Client delegate) { HiveThriftMetaStoreIfaceCompatibility compatibility = new HiveThriftMetaStoreIfaceCompatibility1xx(delegate); return newInstance(delegate, compatibility); } }
HiveCompatibleThriftHiveMetastoreIfaceFactory { public CloseableThriftHiveMetastoreIface newInstance(ThriftHiveMetastore.Client delegate) { HiveThriftMetaStoreIfaceCompatibility compatibility = new HiveThriftMetaStoreIfaceCompatibility1xx(delegate); return newInstance(delegate, compatibility); } CloseableThriftHiveMetastoreIface newInstance(ThriftHiveMetastore.Client delegate); }
HiveCompatibleThriftHiveMetastoreIfaceFactory { public CloseableThriftHiveMetastoreIface newInstance(ThriftHiveMetastore.Client delegate) { HiveThriftMetaStoreIfaceCompatibility compatibility = new HiveThriftMetaStoreIfaceCompatibility1xx(delegate); return newInstance(delegate, compatibility); } CloseableThriftHiveMetastoreIface newInstance(ThriftHiveMetastore.Client delegate); }
@Test public void get_table_objects_by_name_req() throws Exception { CloseableThriftHiveMetastoreIface thriftHiveMetastoreIface = factory.newInstance(delegate); GetTablesRequest tablesRequest = new GetTablesRequest(DB_NAME); tablesRequest.addToTblNames(TABLE_NAME); when(delegate.get_table_objects_by_name_req(tablesRequest)).thenThrow(new TApplicationException("Error")); when(delegate.get_table_objects_by_name(DB_NAME, Lists.newArrayList(TABLE_NAME))) .thenReturn(Lists.newArrayList(table)); GetTablesResult tablesResult = thriftHiveMetastoreIface.get_table_objects_by_name_req(tablesRequest); assertThat(tablesResult, is(new GetTablesResult(Lists.newArrayList(table)))); }
public CloseableThriftHiveMetastoreIface newInstance(ThriftHiveMetastore.Client delegate) { HiveThriftMetaStoreIfaceCompatibility compatibility = new HiveThriftMetaStoreIfaceCompatibility1xx(delegate); return newInstance(delegate, compatibility); }
HiveCompatibleThriftHiveMetastoreIfaceFactory { public CloseableThriftHiveMetastoreIface newInstance(ThriftHiveMetastore.Client delegate) { HiveThriftMetaStoreIfaceCompatibility compatibility = new HiveThriftMetaStoreIfaceCompatibility1xx(delegate); return newInstance(delegate, compatibility); } }
HiveCompatibleThriftHiveMetastoreIfaceFactory { public CloseableThriftHiveMetastoreIface newInstance(ThriftHiveMetastore.Client delegate) { HiveThriftMetaStoreIfaceCompatibility compatibility = new HiveThriftMetaStoreIfaceCompatibility1xx(delegate); return newInstance(delegate, compatibility); } }
HiveCompatibleThriftHiveMetastoreIfaceFactory { public CloseableThriftHiveMetastoreIface newInstance(ThriftHiveMetastore.Client delegate) { HiveThriftMetaStoreIfaceCompatibility compatibility = new HiveThriftMetaStoreIfaceCompatibility1xx(delegate); return newInstance(delegate, compatibility); } CloseableThriftHiveMetastoreIface newInstance(ThriftHiveMetastore.Client delegate); }
HiveCompatibleThriftHiveMetastoreIfaceFactory { public CloseableThriftHiveMetastoreIface newInstance(ThriftHiveMetastore.Client delegate) { HiveThriftMetaStoreIfaceCompatibility compatibility = new HiveThriftMetaStoreIfaceCompatibility1xx(delegate); return newInstance(delegate, compatibility); } CloseableThriftHiveMetastoreIface newInstance(ThriftHiveMetastore.Client delegate); }
@Test public void normalGetTableCallWorks() throws Exception { CloseableThriftHiveMetastoreIface thriftHiveMetastoreIface = factory.newInstance(delegate); when(delegate.get_table(DB_NAME, TABLE_NAME)).thenReturn(table); Table tableResult = thriftHiveMetastoreIface.get_table(DB_NAME, TABLE_NAME); assertThat(tableResult, is(table)); }
public CloseableThriftHiveMetastoreIface newInstance(ThriftHiveMetastore.Client delegate) { HiveThriftMetaStoreIfaceCompatibility compatibility = new HiveThriftMetaStoreIfaceCompatibility1xx(delegate); return newInstance(delegate, compatibility); }
HiveCompatibleThriftHiveMetastoreIfaceFactory { public CloseableThriftHiveMetastoreIface newInstance(ThriftHiveMetastore.Client delegate) { HiveThriftMetaStoreIfaceCompatibility compatibility = new HiveThriftMetaStoreIfaceCompatibility1xx(delegate); return newInstance(delegate, compatibility); } }
HiveCompatibleThriftHiveMetastoreIfaceFactory { public CloseableThriftHiveMetastoreIface newInstance(ThriftHiveMetastore.Client delegate) { HiveThriftMetaStoreIfaceCompatibility compatibility = new HiveThriftMetaStoreIfaceCompatibility1xx(delegate); return newInstance(delegate, compatibility); } }
HiveCompatibleThriftHiveMetastoreIfaceFactory { public CloseableThriftHiveMetastoreIface newInstance(ThriftHiveMetastore.Client delegate) { HiveThriftMetaStoreIfaceCompatibility compatibility = new HiveThriftMetaStoreIfaceCompatibility1xx(delegate); return newInstance(delegate, compatibility); } CloseableThriftHiveMetastoreIface newInstance(ThriftHiveMetastore.Client delegate); }
HiveCompatibleThriftHiveMetastoreIfaceFactory { public CloseableThriftHiveMetastoreIface newInstance(ThriftHiveMetastore.Client delegate) { HiveThriftMetaStoreIfaceCompatibility compatibility = new HiveThriftMetaStoreIfaceCompatibility1xx(delegate); return newInstance(delegate, compatibility); } CloseableThriftHiveMetastoreIface newInstance(ThriftHiveMetastore.Client delegate); }
@Test public void underlyingExceptionIsThrownWhenCompatibilityFails() throws Exception { CloseableThriftHiveMetastoreIface thriftHiveMetastoreIface = factory.newInstance(delegate); TApplicationException cause = new TApplicationException("CAUSE"); when(delegate.get_all_databases()).thenThrow(cause); try { thriftHiveMetastoreIface.get_all_databases(); fail("exception should have been thrown"); } catch (TApplicationException e) { assertThat(e, is(cause)); } }
public CloseableThriftHiveMetastoreIface newInstance(ThriftHiveMetastore.Client delegate) { HiveThriftMetaStoreIfaceCompatibility compatibility = new HiveThriftMetaStoreIfaceCompatibility1xx(delegate); return newInstance(delegate, compatibility); }
HiveCompatibleThriftHiveMetastoreIfaceFactory { public CloseableThriftHiveMetastoreIface newInstance(ThriftHiveMetastore.Client delegate) { HiveThriftMetaStoreIfaceCompatibility compatibility = new HiveThriftMetaStoreIfaceCompatibility1xx(delegate); return newInstance(delegate, compatibility); } }
HiveCompatibleThriftHiveMetastoreIfaceFactory { public CloseableThriftHiveMetastoreIface newInstance(ThriftHiveMetastore.Client delegate) { HiveThriftMetaStoreIfaceCompatibility compatibility = new HiveThriftMetaStoreIfaceCompatibility1xx(delegate); return newInstance(delegate, compatibility); } }
HiveCompatibleThriftHiveMetastoreIfaceFactory { public CloseableThriftHiveMetastoreIface newInstance(ThriftHiveMetastore.Client delegate) { HiveThriftMetaStoreIfaceCompatibility compatibility = new HiveThriftMetaStoreIfaceCompatibility1xx(delegate); return newInstance(delegate, compatibility); } CloseableThriftHiveMetastoreIface newInstance(ThriftHiveMetastore.Client delegate); }
HiveCompatibleThriftHiveMetastoreIfaceFactory { public CloseableThriftHiveMetastoreIface newInstance(ThriftHiveMetastore.Client delegate) { HiveThriftMetaStoreIfaceCompatibility compatibility = new HiveThriftMetaStoreIfaceCompatibility1xx(delegate); return newInstance(delegate, compatibility); } CloseableThriftHiveMetastoreIface newInstance(ThriftHiveMetastore.Client delegate); }
@Test public void compatibilityExceptionIsThrownWhenCompatibilityFailsOnTException() throws Exception { CloseableThriftHiveMetastoreIface thriftHiveMetastoreIface = factory.newInstance(delegate); GetTableRequest tableRequest = new GetTableRequest(DB_NAME, TABLE_NAME); when(delegate.get_table_req(tableRequest)) .thenThrow(new TApplicationException("ApplicationException, should not be thrown")); NoSuchObjectException cause = new NoSuchObjectException("Should be thrown, this is called from compatiblity layer"); when(delegate.get_table(DB_NAME, TABLE_NAME)).thenThrow(cause); try { thriftHiveMetastoreIface.get_table_req(tableRequest); fail("exception should have been thrown"); } catch (NoSuchObjectException e) { assertThat(e, is(cause)); } }
public CloseableThriftHiveMetastoreIface newInstance(ThriftHiveMetastore.Client delegate) { HiveThriftMetaStoreIfaceCompatibility compatibility = new HiveThriftMetaStoreIfaceCompatibility1xx(delegate); return newInstance(delegate, compatibility); }
HiveCompatibleThriftHiveMetastoreIfaceFactory { public CloseableThriftHiveMetastoreIface newInstance(ThriftHiveMetastore.Client delegate) { HiveThriftMetaStoreIfaceCompatibility compatibility = new HiveThriftMetaStoreIfaceCompatibility1xx(delegate); return newInstance(delegate, compatibility); } }
HiveCompatibleThriftHiveMetastoreIfaceFactory { public CloseableThriftHiveMetastoreIface newInstance(ThriftHiveMetastore.Client delegate) { HiveThriftMetaStoreIfaceCompatibility compatibility = new HiveThriftMetaStoreIfaceCompatibility1xx(delegate); return newInstance(delegate, compatibility); } }
HiveCompatibleThriftHiveMetastoreIfaceFactory { public CloseableThriftHiveMetastoreIface newInstance(ThriftHiveMetastore.Client delegate) { HiveThriftMetaStoreIfaceCompatibility compatibility = new HiveThriftMetaStoreIfaceCompatibility1xx(delegate); return newInstance(delegate, compatibility); } CloseableThriftHiveMetastoreIface newInstance(ThriftHiveMetastore.Client delegate); }
HiveCompatibleThriftHiveMetastoreIfaceFactory { public CloseableThriftHiveMetastoreIface newInstance(ThriftHiveMetastore.Client delegate) { HiveThriftMetaStoreIfaceCompatibility compatibility = new HiveThriftMetaStoreIfaceCompatibility1xx(delegate); return newInstance(delegate, compatibility); } CloseableThriftHiveMetastoreIface newInstance(ThriftHiveMetastore.Client delegate); }
@Test public void underlyingyExceptionIsThrownWhenCompatibilityFailsOnTApplication() throws Exception { CloseableThriftHiveMetastoreIface thriftHiveMetastoreIface = factory.newInstance(delegate); GetTableRequest tableRequest = new GetTableRequest(DB_NAME, TABLE_NAME); TApplicationException cause = new TApplicationException("Should be thrown"); when(delegate.get_table_req(tableRequest)).thenThrow(cause); when(delegate.get_table(DB_NAME, TABLE_NAME)) .thenThrow(new TApplicationException("should not be thrown, this is called from compatiblity layer")); try { thriftHiveMetastoreIface.get_table_req(tableRequest); fail("exception should have been thrown"); } catch (TApplicationException e) { assertThat(e, is(cause)); } }
public CloseableThriftHiveMetastoreIface newInstance(ThriftHiveMetastore.Client delegate) { HiveThriftMetaStoreIfaceCompatibility compatibility = new HiveThriftMetaStoreIfaceCompatibility1xx(delegate); return newInstance(delegate, compatibility); }
HiveCompatibleThriftHiveMetastoreIfaceFactory { public CloseableThriftHiveMetastoreIface newInstance(ThriftHiveMetastore.Client delegate) { HiveThriftMetaStoreIfaceCompatibility compatibility = new HiveThriftMetaStoreIfaceCompatibility1xx(delegate); return newInstance(delegate, compatibility); } }
HiveCompatibleThriftHiveMetastoreIfaceFactory { public CloseableThriftHiveMetastoreIface newInstance(ThriftHiveMetastore.Client delegate) { HiveThriftMetaStoreIfaceCompatibility compatibility = new HiveThriftMetaStoreIfaceCompatibility1xx(delegate); return newInstance(delegate, compatibility); } }
HiveCompatibleThriftHiveMetastoreIfaceFactory { public CloseableThriftHiveMetastoreIface newInstance(ThriftHiveMetastore.Client delegate) { HiveThriftMetaStoreIfaceCompatibility compatibility = new HiveThriftMetaStoreIfaceCompatibility1xx(delegate); return newInstance(delegate, compatibility); } CloseableThriftHiveMetastoreIface newInstance(ThriftHiveMetastore.Client delegate); }
HiveCompatibleThriftHiveMetastoreIfaceFactory { public CloseableThriftHiveMetastoreIface newInstance(ThriftHiveMetastore.Client delegate) { HiveThriftMetaStoreIfaceCompatibility compatibility = new HiveThriftMetaStoreIfaceCompatibility1xx(delegate); return newInstance(delegate, compatibility); } CloseableThriftHiveMetastoreIface newInstance(ThriftHiveMetastore.Client delegate); }
@Test public void nonTApplicationExceptionsAreThrown() throws Exception { CloseableThriftHiveMetastoreIface thriftHiveMetastoreIface = factory.newInstance(delegate); GetTableRequest tableRequest = new GetTableRequest(DB_NAME, TABLE_NAME); NoSuchObjectException cause = new NoSuchObjectException("Normal Error nothing to do with compatibility"); when(delegate.get_table_req(tableRequest)).thenThrow(cause); try { thriftHiveMetastoreIface.get_table_req(tableRequest); fail("exception should have been thrown"); } catch (TException e) { assertThat(e, is(cause)); verify(delegate, never()).get_table(DB_NAME, TABLE_NAME); } }
public CloseableThriftHiveMetastoreIface newInstance(ThriftHiveMetastore.Client delegate) { HiveThriftMetaStoreIfaceCompatibility compatibility = new HiveThriftMetaStoreIfaceCompatibility1xx(delegate); return newInstance(delegate, compatibility); }
HiveCompatibleThriftHiveMetastoreIfaceFactory { public CloseableThriftHiveMetastoreIface newInstance(ThriftHiveMetastore.Client delegate) { HiveThriftMetaStoreIfaceCompatibility compatibility = new HiveThriftMetaStoreIfaceCompatibility1xx(delegate); return newInstance(delegate, compatibility); } }
HiveCompatibleThriftHiveMetastoreIfaceFactory { public CloseableThriftHiveMetastoreIface newInstance(ThriftHiveMetastore.Client delegate) { HiveThriftMetaStoreIfaceCompatibility compatibility = new HiveThriftMetaStoreIfaceCompatibility1xx(delegate); return newInstance(delegate, compatibility); } }
HiveCompatibleThriftHiveMetastoreIfaceFactory { public CloseableThriftHiveMetastoreIface newInstance(ThriftHiveMetastore.Client delegate) { HiveThriftMetaStoreIfaceCompatibility compatibility = new HiveThriftMetaStoreIfaceCompatibility1xx(delegate); return newInstance(delegate, compatibility); } CloseableThriftHiveMetastoreIface newInstance(ThriftHiveMetastore.Client delegate); }
HiveCompatibleThriftHiveMetastoreIfaceFactory { public CloseableThriftHiveMetastoreIface newInstance(ThriftHiveMetastore.Client delegate) { HiveThriftMetaStoreIfaceCompatibility compatibility = new HiveThriftMetaStoreIfaceCompatibility1xx(delegate); return newInstance(delegate, compatibility); } CloseableThriftHiveMetastoreIface newInstance(ThriftHiveMetastore.Client delegate); }
@Test public void validMetastoreTunnel() { metaStore.setMetastoreTunnel(newMetastoreTunnel()); Set<ConstraintViolation<T>> violations = validator.validate(metaStore); assertThat(violations.size(), is(0)); }
public void setMetastoreTunnel(MetastoreTunnel metastoreTunnel) { this.metastoreTunnel = metastoreTunnel; }
AbstractMetaStore { public void setMetastoreTunnel(MetastoreTunnel metastoreTunnel) { this.metastoreTunnel = metastoreTunnel; } }
AbstractMetaStore { public void setMetastoreTunnel(MetastoreTunnel metastoreTunnel) { this.metastoreTunnel = metastoreTunnel; } AbstractMetaStore(); AbstractMetaStore(String name, String remoteMetaStoreUris, AccessControlType accessControlType); AbstractMetaStore( String name, String remoteMetaStoreUris, AccessControlType accessControlType, List<String> writableDatabaseWhitelist); }
AbstractMetaStore { public void setMetastoreTunnel(MetastoreTunnel metastoreTunnel) { this.metastoreTunnel = metastoreTunnel; } AbstractMetaStore(); AbstractMetaStore(String name, String remoteMetaStoreUris, AccessControlType accessControlType); AbstractMetaStore( String name, String remoteMetaStoreUris, AccessControlType accessControlType, List<String> writableDatabaseWhitelist); static FederatedMetaStore newFederatedInstance(String name, String remoteMetaStoreUris); static PrimaryMetaStore newPrimaryInstance( String name, String remoteMetaStoreUris, AccessControlType accessControlType); static PrimaryMetaStore newPrimaryInstance(String name, String remoteMetaStoreUris); String getDatabasePrefix(); void setDatabasePrefix(String databasePrefix); String getName(); void setName(String name); String getRemoteMetaStoreUris(); void setRemoteMetaStoreUris(String remoteMetaStoreUris); MetastoreTunnel getMetastoreTunnel(); void setMetastoreTunnel(MetastoreTunnel metastoreTunnel); ConnectionType getConnectionType(); abstract FederationType getFederationType(); AccessControlType getAccessControlType(); void setAccessControlType(AccessControlType accessControlType); List<String> getWritableDatabaseWhiteList(); void setWritableDatabaseWhiteList(List<String> writableDatabaseWhitelist); long getLatency(); void setLatency(long latency); List<String> getMappedDatabases(); void setMappedDatabases(List<String> mappedDatabases); Map<String, String> getDatabaseNameMapping(); void setDatabaseNameMapping(Map<String, String> databaseNameMapping); @Transient HashBiMap<String, String> getDatabaseNameBiMapping(); @Transient MetaStoreStatus getStatus(); @Transient void setStatus(MetaStoreStatus status); @Override int hashCode(); @Override boolean equals(Object obj); @Override String toString(); }
AbstractMetaStore { public void setMetastoreTunnel(MetastoreTunnel metastoreTunnel) { this.metastoreTunnel = metastoreTunnel; } AbstractMetaStore(); AbstractMetaStore(String name, String remoteMetaStoreUris, AccessControlType accessControlType); AbstractMetaStore( String name, String remoteMetaStoreUris, AccessControlType accessControlType, List<String> writableDatabaseWhitelist); static FederatedMetaStore newFederatedInstance(String name, String remoteMetaStoreUris); static PrimaryMetaStore newPrimaryInstance( String name, String remoteMetaStoreUris, AccessControlType accessControlType); static PrimaryMetaStore newPrimaryInstance(String name, String remoteMetaStoreUris); String getDatabasePrefix(); void setDatabasePrefix(String databasePrefix); String getName(); void setName(String name); String getRemoteMetaStoreUris(); void setRemoteMetaStoreUris(String remoteMetaStoreUris); MetastoreTunnel getMetastoreTunnel(); void setMetastoreTunnel(MetastoreTunnel metastoreTunnel); ConnectionType getConnectionType(); abstract FederationType getFederationType(); AccessControlType getAccessControlType(); void setAccessControlType(AccessControlType accessControlType); List<String> getWritableDatabaseWhiteList(); void setWritableDatabaseWhiteList(List<String> writableDatabaseWhitelist); long getLatency(); void setLatency(long latency); List<String> getMappedDatabases(); void setMappedDatabases(List<String> mappedDatabases); Map<String, String> getDatabaseNameMapping(); void setDatabaseNameMapping(Map<String, String> databaseNameMapping); @Transient HashBiMap<String, String> getDatabaseNameBiMapping(); @Transient MetaStoreStatus getStatus(); @Transient void setStatus(MetaStoreStatus status); @Override int hashCode(); @Override boolean equals(Object obj); @Override String toString(); }
@Test public void get_primary_keys() throws Exception { CloseableThriftHiveMetastoreIface thriftHiveMetastoreIface = factory.newInstance(delegate); PrimaryKeysRequest primaryKeysRequest = new PrimaryKeysRequest(DB_NAME, TABLE_NAME); when(delegate.get_primary_keys(primaryKeysRequest)).thenThrow(new TApplicationException("Error")); PrimaryKeysResponse primaryKeysResponse = thriftHiveMetastoreIface.get_primary_keys(primaryKeysRequest); assertThat(primaryKeysResponse, is(new PrimaryKeysResponse(Collections.emptyList()))); verify(delegate).get_table(DB_NAME, TABLE_NAME); }
public CloseableThriftHiveMetastoreIface newInstance(ThriftHiveMetastore.Client delegate) { HiveThriftMetaStoreIfaceCompatibility compatibility = new HiveThriftMetaStoreIfaceCompatibility1xx(delegate); return newInstance(delegate, compatibility); }
HiveCompatibleThriftHiveMetastoreIfaceFactory { public CloseableThriftHiveMetastoreIface newInstance(ThriftHiveMetastore.Client delegate) { HiveThriftMetaStoreIfaceCompatibility compatibility = new HiveThriftMetaStoreIfaceCompatibility1xx(delegate); return newInstance(delegate, compatibility); } }
HiveCompatibleThriftHiveMetastoreIfaceFactory { public CloseableThriftHiveMetastoreIface newInstance(ThriftHiveMetastore.Client delegate) { HiveThriftMetaStoreIfaceCompatibility compatibility = new HiveThriftMetaStoreIfaceCompatibility1xx(delegate); return newInstance(delegate, compatibility); } }
HiveCompatibleThriftHiveMetastoreIfaceFactory { public CloseableThriftHiveMetastoreIface newInstance(ThriftHiveMetastore.Client delegate) { HiveThriftMetaStoreIfaceCompatibility compatibility = new HiveThriftMetaStoreIfaceCompatibility1xx(delegate); return newInstance(delegate, compatibility); } CloseableThriftHiveMetastoreIface newInstance(ThriftHiveMetastore.Client delegate); }
HiveCompatibleThriftHiveMetastoreIfaceFactory { public CloseableThriftHiveMetastoreIface newInstance(ThriftHiveMetastore.Client delegate) { HiveThriftMetaStoreIfaceCompatibility compatibility = new HiveThriftMetaStoreIfaceCompatibility1xx(delegate); return newInstance(delegate, compatibility); } CloseableThriftHiveMetastoreIface newInstance(ThriftHiveMetastore.Client delegate); }
@Test public void get_foreign_keys() throws Exception { CloseableThriftHiveMetastoreIface thriftHiveMetastoreIface = factory.newInstance(delegate); ForeignKeysRequest foreignKeysRequest = new ForeignKeysRequest(null, null, DB_NAME, TABLE_NAME); when(delegate.get_foreign_keys(foreignKeysRequest)).thenThrow(new TApplicationException("Error")); ForeignKeysResponse foreignKeysResponse = thriftHiveMetastoreIface.get_foreign_keys(foreignKeysRequest); assertThat(foreignKeysResponse, is(new ForeignKeysResponse(Collections.emptyList()))); verify(delegate).get_table(DB_NAME, TABLE_NAME); }
public CloseableThriftHiveMetastoreIface newInstance(ThriftHiveMetastore.Client delegate) { HiveThriftMetaStoreIfaceCompatibility compatibility = new HiveThriftMetaStoreIfaceCompatibility1xx(delegate); return newInstance(delegate, compatibility); }
HiveCompatibleThriftHiveMetastoreIfaceFactory { public CloseableThriftHiveMetastoreIface newInstance(ThriftHiveMetastore.Client delegate) { HiveThriftMetaStoreIfaceCompatibility compatibility = new HiveThriftMetaStoreIfaceCompatibility1xx(delegate); return newInstance(delegate, compatibility); } }
HiveCompatibleThriftHiveMetastoreIfaceFactory { public CloseableThriftHiveMetastoreIface newInstance(ThriftHiveMetastore.Client delegate) { HiveThriftMetaStoreIfaceCompatibility compatibility = new HiveThriftMetaStoreIfaceCompatibility1xx(delegate); return newInstance(delegate, compatibility); } }
HiveCompatibleThriftHiveMetastoreIfaceFactory { public CloseableThriftHiveMetastoreIface newInstance(ThriftHiveMetastore.Client delegate) { HiveThriftMetaStoreIfaceCompatibility compatibility = new HiveThriftMetaStoreIfaceCompatibility1xx(delegate); return newInstance(delegate, compatibility); } CloseableThriftHiveMetastoreIface newInstance(ThriftHiveMetastore.Client delegate); }
HiveCompatibleThriftHiveMetastoreIfaceFactory { public CloseableThriftHiveMetastoreIface newInstance(ThriftHiveMetastore.Client delegate) { HiveThriftMetaStoreIfaceCompatibility compatibility = new HiveThriftMetaStoreIfaceCompatibility1xx(delegate); return newInstance(delegate, compatibility); } CloseableThriftHiveMetastoreIface newInstance(ThriftHiveMetastore.Client delegate); }
@Test public void newInstance() { tunnelingMetaStoreClientFactory.newInstance(METASTORE_URI, metastoreTunnel, NAME, RECONNECTION_RETRIES, CONNECTION_TIMEOUT); verify(tunnelableFactory) .wrap(tunnelableSupplierCaptor.capture(), eq(tunnelingMetaStoreClientFactory.METHOD_CHECKER), eq(metastoreTunnel.getLocalhost()), anyInt(), eq(METASTORE_HOST), eq(METASTORE_PORT)); TunnelableSupplier<CloseableThriftHiveMetastoreIface> tunnelable = tunnelableSupplierCaptor.getValue(); assertThat(tunnelable, is(hiveMetaStoreClientSupplier)); }
public CloseableThriftHiveMetastoreIface newInstance( String uris, MetastoreTunnel metastoreTunnel, String name, int reconnectionRetries, int connectionTimeout) { String uri = uris; String[] urisSplit = uri.split(","); if (urisSplit.length > 1) { uri = urisSplit[0]; LOG.debug("Can't support multiple uris '{}' for tunneling endpoint, using first '{}'", uris, uri); } String localHost = metastoreTunnel.getLocalhost(); int localPort = getLocalPort(); Map<String, String> properties = new HashMap<>(); properties.put(ConfVars.METASTOREURIS.varname, uri); HiveConfFactory confFactory = new HiveConfFactory(Collections.<String>emptyList(), properties); HiveConf localHiveConf = localHiveConfFactory.newInstance(localHost, localPort, confFactory.newInstance()); TunnelableFactory<CloseableThriftHiveMetastoreIface> tunnelableFactory = tunnelableFactorySupplier .get(metastoreTunnel); LOG .info("Metastore URI {} is being proxied through {}", uri, localHiveConf.getVar(HiveConf.ConfVars.METASTOREURIS)); HiveMetaStoreClientSupplier supplier = hiveMetaStoreClientSupplierFactory .newInstance(localHiveConf, name, reconnectionRetries, connectionTimeout); URI metaStoreUri = URI.create(uri); String remoteHost = metaStoreUri.getHost(); int remotePort = metaStoreUri.getPort(); return (CloseableThriftHiveMetastoreIface) tunnelableFactory .wrap(supplier, METHOD_CHECKER, localHost, localPort, remoteHost, remotePort); }
TunnelingMetaStoreClientFactory { public CloseableThriftHiveMetastoreIface newInstance( String uris, MetastoreTunnel metastoreTunnel, String name, int reconnectionRetries, int connectionTimeout) { String uri = uris; String[] urisSplit = uri.split(","); if (urisSplit.length > 1) { uri = urisSplit[0]; LOG.debug("Can't support multiple uris '{}' for tunneling endpoint, using first '{}'", uris, uri); } String localHost = metastoreTunnel.getLocalhost(); int localPort = getLocalPort(); Map<String, String> properties = new HashMap<>(); properties.put(ConfVars.METASTOREURIS.varname, uri); HiveConfFactory confFactory = new HiveConfFactory(Collections.<String>emptyList(), properties); HiveConf localHiveConf = localHiveConfFactory.newInstance(localHost, localPort, confFactory.newInstance()); TunnelableFactory<CloseableThriftHiveMetastoreIface> tunnelableFactory = tunnelableFactorySupplier .get(metastoreTunnel); LOG .info("Metastore URI {} is being proxied through {}", uri, localHiveConf.getVar(HiveConf.ConfVars.METASTOREURIS)); HiveMetaStoreClientSupplier supplier = hiveMetaStoreClientSupplierFactory .newInstance(localHiveConf, name, reconnectionRetries, connectionTimeout); URI metaStoreUri = URI.create(uri); String remoteHost = metaStoreUri.getHost(); int remotePort = metaStoreUri.getPort(); return (CloseableThriftHiveMetastoreIface) tunnelableFactory .wrap(supplier, METHOD_CHECKER, localHost, localPort, remoteHost, remotePort); } }
TunnelingMetaStoreClientFactory { public CloseableThriftHiveMetastoreIface newInstance( String uris, MetastoreTunnel metastoreTunnel, String name, int reconnectionRetries, int connectionTimeout) { String uri = uris; String[] urisSplit = uri.split(","); if (urisSplit.length > 1) { uri = urisSplit[0]; LOG.debug("Can't support multiple uris '{}' for tunneling endpoint, using first '{}'", uris, uri); } String localHost = metastoreTunnel.getLocalhost(); int localPort = getLocalPort(); Map<String, String> properties = new HashMap<>(); properties.put(ConfVars.METASTOREURIS.varname, uri); HiveConfFactory confFactory = new HiveConfFactory(Collections.<String>emptyList(), properties); HiveConf localHiveConf = localHiveConfFactory.newInstance(localHost, localPort, confFactory.newInstance()); TunnelableFactory<CloseableThriftHiveMetastoreIface> tunnelableFactory = tunnelableFactorySupplier .get(metastoreTunnel); LOG .info("Metastore URI {} is being proxied through {}", uri, localHiveConf.getVar(HiveConf.ConfVars.METASTOREURIS)); HiveMetaStoreClientSupplier supplier = hiveMetaStoreClientSupplierFactory .newInstance(localHiveConf, name, reconnectionRetries, connectionTimeout); URI metaStoreUri = URI.create(uri); String remoteHost = metaStoreUri.getHost(); int remotePort = metaStoreUri.getPort(); return (CloseableThriftHiveMetastoreIface) tunnelableFactory .wrap(supplier, METHOD_CHECKER, localHost, localPort, remoteHost, remotePort); } TunnelingMetaStoreClientFactory(); @VisibleForTesting TunnelingMetaStoreClientFactory( TunnelableFactorySupplier tunnelableFactorySupplier, LocalHiveConfFactory localHiveConfFactory, HiveMetaStoreClientSupplierFactory hiveMetaStoreClientSupplierFactory); }
TunnelingMetaStoreClientFactory { public CloseableThriftHiveMetastoreIface newInstance( String uris, MetastoreTunnel metastoreTunnel, String name, int reconnectionRetries, int connectionTimeout) { String uri = uris; String[] urisSplit = uri.split(","); if (urisSplit.length > 1) { uri = urisSplit[0]; LOG.debug("Can't support multiple uris '{}' for tunneling endpoint, using first '{}'", uris, uri); } String localHost = metastoreTunnel.getLocalhost(); int localPort = getLocalPort(); Map<String, String> properties = new HashMap<>(); properties.put(ConfVars.METASTOREURIS.varname, uri); HiveConfFactory confFactory = new HiveConfFactory(Collections.<String>emptyList(), properties); HiveConf localHiveConf = localHiveConfFactory.newInstance(localHost, localPort, confFactory.newInstance()); TunnelableFactory<CloseableThriftHiveMetastoreIface> tunnelableFactory = tunnelableFactorySupplier .get(metastoreTunnel); LOG .info("Metastore URI {} is being proxied through {}", uri, localHiveConf.getVar(HiveConf.ConfVars.METASTOREURIS)); HiveMetaStoreClientSupplier supplier = hiveMetaStoreClientSupplierFactory .newInstance(localHiveConf, name, reconnectionRetries, connectionTimeout); URI metaStoreUri = URI.create(uri); String remoteHost = metaStoreUri.getHost(); int remotePort = metaStoreUri.getPort(); return (CloseableThriftHiveMetastoreIface) tunnelableFactory .wrap(supplier, METHOD_CHECKER, localHost, localPort, remoteHost, remotePort); } TunnelingMetaStoreClientFactory(); @VisibleForTesting TunnelingMetaStoreClientFactory( TunnelableFactorySupplier tunnelableFactorySupplier, LocalHiveConfFactory localHiveConfFactory, HiveMetaStoreClientSupplierFactory hiveMetaStoreClientSupplierFactory); CloseableThriftHiveMetastoreIface newInstance( String uris, MetastoreTunnel metastoreTunnel, String name, int reconnectionRetries, int connectionTimeout); }
TunnelingMetaStoreClientFactory { public CloseableThriftHiveMetastoreIface newInstance( String uris, MetastoreTunnel metastoreTunnel, String name, int reconnectionRetries, int connectionTimeout) { String uri = uris; String[] urisSplit = uri.split(","); if (urisSplit.length > 1) { uri = urisSplit[0]; LOG.debug("Can't support multiple uris '{}' for tunneling endpoint, using first '{}'", uris, uri); } String localHost = metastoreTunnel.getLocalhost(); int localPort = getLocalPort(); Map<String, String> properties = new HashMap<>(); properties.put(ConfVars.METASTOREURIS.varname, uri); HiveConfFactory confFactory = new HiveConfFactory(Collections.<String>emptyList(), properties); HiveConf localHiveConf = localHiveConfFactory.newInstance(localHost, localPort, confFactory.newInstance()); TunnelableFactory<CloseableThriftHiveMetastoreIface> tunnelableFactory = tunnelableFactorySupplier .get(metastoreTunnel); LOG .info("Metastore URI {} is being proxied through {}", uri, localHiveConf.getVar(HiveConf.ConfVars.METASTOREURIS)); HiveMetaStoreClientSupplier supplier = hiveMetaStoreClientSupplierFactory .newInstance(localHiveConf, name, reconnectionRetries, connectionTimeout); URI metaStoreUri = URI.create(uri); String remoteHost = metaStoreUri.getHost(); int remotePort = metaStoreUri.getPort(); return (CloseableThriftHiveMetastoreIface) tunnelableFactory .wrap(supplier, METHOD_CHECKER, localHost, localPort, remoteHost, remotePort); } TunnelingMetaStoreClientFactory(); @VisibleForTesting TunnelingMetaStoreClientFactory( TunnelableFactorySupplier tunnelableFactorySupplier, LocalHiveConfFactory localHiveConfFactory, HiveMetaStoreClientSupplierFactory hiveMetaStoreClientSupplierFactory); CloseableThriftHiveMetastoreIface newInstance( String uris, MetastoreTunnel metastoreTunnel, String name, int reconnectionRetries, int connectionTimeout); }
@Test public void newInstanceMultipleUris() { String metastoreUris = METASTORE_URI + ",thrift: tunnelingMetaStoreClientFactory.newInstance(metastoreUris, metastoreTunnel, NAME, RECONNECTION_RETRIES, CONNECTION_TIMEOUT); verify(tunnelableFactory) .wrap(tunnelableSupplierCaptor.capture(), eq(tunnelingMetaStoreClientFactory.METHOD_CHECKER), eq(metastoreTunnel.getLocalhost()), anyInt(), eq(METASTORE_HOST), eq(METASTORE_PORT)); TunnelableSupplier<CloseableThriftHiveMetastoreIface> tunnelable = tunnelableSupplierCaptor.getValue(); assertThat(tunnelable, is(hiveMetaStoreClientSupplier)); }
public CloseableThriftHiveMetastoreIface newInstance( String uris, MetastoreTunnel metastoreTunnel, String name, int reconnectionRetries, int connectionTimeout) { String uri = uris; String[] urisSplit = uri.split(","); if (urisSplit.length > 1) { uri = urisSplit[0]; LOG.debug("Can't support multiple uris '{}' for tunneling endpoint, using first '{}'", uris, uri); } String localHost = metastoreTunnel.getLocalhost(); int localPort = getLocalPort(); Map<String, String> properties = new HashMap<>(); properties.put(ConfVars.METASTOREURIS.varname, uri); HiveConfFactory confFactory = new HiveConfFactory(Collections.<String>emptyList(), properties); HiveConf localHiveConf = localHiveConfFactory.newInstance(localHost, localPort, confFactory.newInstance()); TunnelableFactory<CloseableThriftHiveMetastoreIface> tunnelableFactory = tunnelableFactorySupplier .get(metastoreTunnel); LOG .info("Metastore URI {} is being proxied through {}", uri, localHiveConf.getVar(HiveConf.ConfVars.METASTOREURIS)); HiveMetaStoreClientSupplier supplier = hiveMetaStoreClientSupplierFactory .newInstance(localHiveConf, name, reconnectionRetries, connectionTimeout); URI metaStoreUri = URI.create(uri); String remoteHost = metaStoreUri.getHost(); int remotePort = metaStoreUri.getPort(); return (CloseableThriftHiveMetastoreIface) tunnelableFactory .wrap(supplier, METHOD_CHECKER, localHost, localPort, remoteHost, remotePort); }
TunnelingMetaStoreClientFactory { public CloseableThriftHiveMetastoreIface newInstance( String uris, MetastoreTunnel metastoreTunnel, String name, int reconnectionRetries, int connectionTimeout) { String uri = uris; String[] urisSplit = uri.split(","); if (urisSplit.length > 1) { uri = urisSplit[0]; LOG.debug("Can't support multiple uris '{}' for tunneling endpoint, using first '{}'", uris, uri); } String localHost = metastoreTunnel.getLocalhost(); int localPort = getLocalPort(); Map<String, String> properties = new HashMap<>(); properties.put(ConfVars.METASTOREURIS.varname, uri); HiveConfFactory confFactory = new HiveConfFactory(Collections.<String>emptyList(), properties); HiveConf localHiveConf = localHiveConfFactory.newInstance(localHost, localPort, confFactory.newInstance()); TunnelableFactory<CloseableThriftHiveMetastoreIface> tunnelableFactory = tunnelableFactorySupplier .get(metastoreTunnel); LOG .info("Metastore URI {} is being proxied through {}", uri, localHiveConf.getVar(HiveConf.ConfVars.METASTOREURIS)); HiveMetaStoreClientSupplier supplier = hiveMetaStoreClientSupplierFactory .newInstance(localHiveConf, name, reconnectionRetries, connectionTimeout); URI metaStoreUri = URI.create(uri); String remoteHost = metaStoreUri.getHost(); int remotePort = metaStoreUri.getPort(); return (CloseableThriftHiveMetastoreIface) tunnelableFactory .wrap(supplier, METHOD_CHECKER, localHost, localPort, remoteHost, remotePort); } }
TunnelingMetaStoreClientFactory { public CloseableThriftHiveMetastoreIface newInstance( String uris, MetastoreTunnel metastoreTunnel, String name, int reconnectionRetries, int connectionTimeout) { String uri = uris; String[] urisSplit = uri.split(","); if (urisSplit.length > 1) { uri = urisSplit[0]; LOG.debug("Can't support multiple uris '{}' for tunneling endpoint, using first '{}'", uris, uri); } String localHost = metastoreTunnel.getLocalhost(); int localPort = getLocalPort(); Map<String, String> properties = new HashMap<>(); properties.put(ConfVars.METASTOREURIS.varname, uri); HiveConfFactory confFactory = new HiveConfFactory(Collections.<String>emptyList(), properties); HiveConf localHiveConf = localHiveConfFactory.newInstance(localHost, localPort, confFactory.newInstance()); TunnelableFactory<CloseableThriftHiveMetastoreIface> tunnelableFactory = tunnelableFactorySupplier .get(metastoreTunnel); LOG .info("Metastore URI {} is being proxied through {}", uri, localHiveConf.getVar(HiveConf.ConfVars.METASTOREURIS)); HiveMetaStoreClientSupplier supplier = hiveMetaStoreClientSupplierFactory .newInstance(localHiveConf, name, reconnectionRetries, connectionTimeout); URI metaStoreUri = URI.create(uri); String remoteHost = metaStoreUri.getHost(); int remotePort = metaStoreUri.getPort(); return (CloseableThriftHiveMetastoreIface) tunnelableFactory .wrap(supplier, METHOD_CHECKER, localHost, localPort, remoteHost, remotePort); } TunnelingMetaStoreClientFactory(); @VisibleForTesting TunnelingMetaStoreClientFactory( TunnelableFactorySupplier tunnelableFactorySupplier, LocalHiveConfFactory localHiveConfFactory, HiveMetaStoreClientSupplierFactory hiveMetaStoreClientSupplierFactory); }
TunnelingMetaStoreClientFactory { public CloseableThriftHiveMetastoreIface newInstance( String uris, MetastoreTunnel metastoreTunnel, String name, int reconnectionRetries, int connectionTimeout) { String uri = uris; String[] urisSplit = uri.split(","); if (urisSplit.length > 1) { uri = urisSplit[0]; LOG.debug("Can't support multiple uris '{}' for tunneling endpoint, using first '{}'", uris, uri); } String localHost = metastoreTunnel.getLocalhost(); int localPort = getLocalPort(); Map<String, String> properties = new HashMap<>(); properties.put(ConfVars.METASTOREURIS.varname, uri); HiveConfFactory confFactory = new HiveConfFactory(Collections.<String>emptyList(), properties); HiveConf localHiveConf = localHiveConfFactory.newInstance(localHost, localPort, confFactory.newInstance()); TunnelableFactory<CloseableThriftHiveMetastoreIface> tunnelableFactory = tunnelableFactorySupplier .get(metastoreTunnel); LOG .info("Metastore URI {} is being proxied through {}", uri, localHiveConf.getVar(HiveConf.ConfVars.METASTOREURIS)); HiveMetaStoreClientSupplier supplier = hiveMetaStoreClientSupplierFactory .newInstance(localHiveConf, name, reconnectionRetries, connectionTimeout); URI metaStoreUri = URI.create(uri); String remoteHost = metaStoreUri.getHost(); int remotePort = metaStoreUri.getPort(); return (CloseableThriftHiveMetastoreIface) tunnelableFactory .wrap(supplier, METHOD_CHECKER, localHost, localPort, remoteHost, remotePort); } TunnelingMetaStoreClientFactory(); @VisibleForTesting TunnelingMetaStoreClientFactory( TunnelableFactorySupplier tunnelableFactorySupplier, LocalHiveConfFactory localHiveConfFactory, HiveMetaStoreClientSupplierFactory hiveMetaStoreClientSupplierFactory); CloseableThriftHiveMetastoreIface newInstance( String uris, MetastoreTunnel metastoreTunnel, String name, int reconnectionRetries, int connectionTimeout); }
TunnelingMetaStoreClientFactory { public CloseableThriftHiveMetastoreIface newInstance( String uris, MetastoreTunnel metastoreTunnel, String name, int reconnectionRetries, int connectionTimeout) { String uri = uris; String[] urisSplit = uri.split(","); if (urisSplit.length > 1) { uri = urisSplit[0]; LOG.debug("Can't support multiple uris '{}' for tunneling endpoint, using first '{}'", uris, uri); } String localHost = metastoreTunnel.getLocalhost(); int localPort = getLocalPort(); Map<String, String> properties = new HashMap<>(); properties.put(ConfVars.METASTOREURIS.varname, uri); HiveConfFactory confFactory = new HiveConfFactory(Collections.<String>emptyList(), properties); HiveConf localHiveConf = localHiveConfFactory.newInstance(localHost, localPort, confFactory.newInstance()); TunnelableFactory<CloseableThriftHiveMetastoreIface> tunnelableFactory = tunnelableFactorySupplier .get(metastoreTunnel); LOG .info("Metastore URI {} is being proxied through {}", uri, localHiveConf.getVar(HiveConf.ConfVars.METASTOREURIS)); HiveMetaStoreClientSupplier supplier = hiveMetaStoreClientSupplierFactory .newInstance(localHiveConf, name, reconnectionRetries, connectionTimeout); URI metaStoreUri = URI.create(uri); String remoteHost = metaStoreUri.getHost(); int remotePort = metaStoreUri.getPort(); return (CloseableThriftHiveMetastoreIface) tunnelableFactory .wrap(supplier, METHOD_CHECKER, localHost, localPort, remoteHost, remotePort); } TunnelingMetaStoreClientFactory(); @VisibleForTesting TunnelingMetaStoreClientFactory( TunnelableFactorySupplier tunnelableFactorySupplier, LocalHiveConfFactory localHiveConfFactory, HiveMetaStoreClientSupplierFactory hiveMetaStoreClientSupplierFactory); CloseableThriftHiveMetastoreIface newInstance( String uris, MetastoreTunnel metastoreTunnel, String name, int reconnectionRetries, int connectionTimeout); }
@Test public void localHiveConfigUsesCorrectParameters() { tunnelingMetaStoreClientFactory.newInstance(METASTORE_URI, metastoreTunnel, NAME, RECONNECTION_RETRIES, CONNECTION_TIMEOUT); ArgumentCaptor<String> localHostCaptor = ArgumentCaptor.forClass(String.class); ArgumentCaptor<HiveConf> hiveConfCaptor = ArgumentCaptor.forClass(HiveConf.class); verify(localHiveConfFactory).newInstance(localHostCaptor.capture(), anyInt(), hiveConfCaptor.capture()); assertThat(localHostCaptor.getValue(), is(TUNNEL_LOCALHOST)); HiveConf hiveConf = hiveConfCaptor.getValue(); assertThat(hiveConf.get(ConfVars.METASTOREURIS.varname), is(METASTORE_URI)); }
public CloseableThriftHiveMetastoreIface newInstance( String uris, MetastoreTunnel metastoreTunnel, String name, int reconnectionRetries, int connectionTimeout) { String uri = uris; String[] urisSplit = uri.split(","); if (urisSplit.length > 1) { uri = urisSplit[0]; LOG.debug("Can't support multiple uris '{}' for tunneling endpoint, using first '{}'", uris, uri); } String localHost = metastoreTunnel.getLocalhost(); int localPort = getLocalPort(); Map<String, String> properties = new HashMap<>(); properties.put(ConfVars.METASTOREURIS.varname, uri); HiveConfFactory confFactory = new HiveConfFactory(Collections.<String>emptyList(), properties); HiveConf localHiveConf = localHiveConfFactory.newInstance(localHost, localPort, confFactory.newInstance()); TunnelableFactory<CloseableThriftHiveMetastoreIface> tunnelableFactory = tunnelableFactorySupplier .get(metastoreTunnel); LOG .info("Metastore URI {} is being proxied through {}", uri, localHiveConf.getVar(HiveConf.ConfVars.METASTOREURIS)); HiveMetaStoreClientSupplier supplier = hiveMetaStoreClientSupplierFactory .newInstance(localHiveConf, name, reconnectionRetries, connectionTimeout); URI metaStoreUri = URI.create(uri); String remoteHost = metaStoreUri.getHost(); int remotePort = metaStoreUri.getPort(); return (CloseableThriftHiveMetastoreIface) tunnelableFactory .wrap(supplier, METHOD_CHECKER, localHost, localPort, remoteHost, remotePort); }
TunnelingMetaStoreClientFactory { public CloseableThriftHiveMetastoreIface newInstance( String uris, MetastoreTunnel metastoreTunnel, String name, int reconnectionRetries, int connectionTimeout) { String uri = uris; String[] urisSplit = uri.split(","); if (urisSplit.length > 1) { uri = urisSplit[0]; LOG.debug("Can't support multiple uris '{}' for tunneling endpoint, using first '{}'", uris, uri); } String localHost = metastoreTunnel.getLocalhost(); int localPort = getLocalPort(); Map<String, String> properties = new HashMap<>(); properties.put(ConfVars.METASTOREURIS.varname, uri); HiveConfFactory confFactory = new HiveConfFactory(Collections.<String>emptyList(), properties); HiveConf localHiveConf = localHiveConfFactory.newInstance(localHost, localPort, confFactory.newInstance()); TunnelableFactory<CloseableThriftHiveMetastoreIface> tunnelableFactory = tunnelableFactorySupplier .get(metastoreTunnel); LOG .info("Metastore URI {} is being proxied through {}", uri, localHiveConf.getVar(HiveConf.ConfVars.METASTOREURIS)); HiveMetaStoreClientSupplier supplier = hiveMetaStoreClientSupplierFactory .newInstance(localHiveConf, name, reconnectionRetries, connectionTimeout); URI metaStoreUri = URI.create(uri); String remoteHost = metaStoreUri.getHost(); int remotePort = metaStoreUri.getPort(); return (CloseableThriftHiveMetastoreIface) tunnelableFactory .wrap(supplier, METHOD_CHECKER, localHost, localPort, remoteHost, remotePort); } }
TunnelingMetaStoreClientFactory { public CloseableThriftHiveMetastoreIface newInstance( String uris, MetastoreTunnel metastoreTunnel, String name, int reconnectionRetries, int connectionTimeout) { String uri = uris; String[] urisSplit = uri.split(","); if (urisSplit.length > 1) { uri = urisSplit[0]; LOG.debug("Can't support multiple uris '{}' for tunneling endpoint, using first '{}'", uris, uri); } String localHost = metastoreTunnel.getLocalhost(); int localPort = getLocalPort(); Map<String, String> properties = new HashMap<>(); properties.put(ConfVars.METASTOREURIS.varname, uri); HiveConfFactory confFactory = new HiveConfFactory(Collections.<String>emptyList(), properties); HiveConf localHiveConf = localHiveConfFactory.newInstance(localHost, localPort, confFactory.newInstance()); TunnelableFactory<CloseableThriftHiveMetastoreIface> tunnelableFactory = tunnelableFactorySupplier .get(metastoreTunnel); LOG .info("Metastore URI {} is being proxied through {}", uri, localHiveConf.getVar(HiveConf.ConfVars.METASTOREURIS)); HiveMetaStoreClientSupplier supplier = hiveMetaStoreClientSupplierFactory .newInstance(localHiveConf, name, reconnectionRetries, connectionTimeout); URI metaStoreUri = URI.create(uri); String remoteHost = metaStoreUri.getHost(); int remotePort = metaStoreUri.getPort(); return (CloseableThriftHiveMetastoreIface) tunnelableFactory .wrap(supplier, METHOD_CHECKER, localHost, localPort, remoteHost, remotePort); } TunnelingMetaStoreClientFactory(); @VisibleForTesting TunnelingMetaStoreClientFactory( TunnelableFactorySupplier tunnelableFactorySupplier, LocalHiveConfFactory localHiveConfFactory, HiveMetaStoreClientSupplierFactory hiveMetaStoreClientSupplierFactory); }
TunnelingMetaStoreClientFactory { public CloseableThriftHiveMetastoreIface newInstance( String uris, MetastoreTunnel metastoreTunnel, String name, int reconnectionRetries, int connectionTimeout) { String uri = uris; String[] urisSplit = uri.split(","); if (urisSplit.length > 1) { uri = urisSplit[0]; LOG.debug("Can't support multiple uris '{}' for tunneling endpoint, using first '{}'", uris, uri); } String localHost = metastoreTunnel.getLocalhost(); int localPort = getLocalPort(); Map<String, String> properties = new HashMap<>(); properties.put(ConfVars.METASTOREURIS.varname, uri); HiveConfFactory confFactory = new HiveConfFactory(Collections.<String>emptyList(), properties); HiveConf localHiveConf = localHiveConfFactory.newInstance(localHost, localPort, confFactory.newInstance()); TunnelableFactory<CloseableThriftHiveMetastoreIface> tunnelableFactory = tunnelableFactorySupplier .get(metastoreTunnel); LOG .info("Metastore URI {} is being proxied through {}", uri, localHiveConf.getVar(HiveConf.ConfVars.METASTOREURIS)); HiveMetaStoreClientSupplier supplier = hiveMetaStoreClientSupplierFactory .newInstance(localHiveConf, name, reconnectionRetries, connectionTimeout); URI metaStoreUri = URI.create(uri); String remoteHost = metaStoreUri.getHost(); int remotePort = metaStoreUri.getPort(); return (CloseableThriftHiveMetastoreIface) tunnelableFactory .wrap(supplier, METHOD_CHECKER, localHost, localPort, remoteHost, remotePort); } TunnelingMetaStoreClientFactory(); @VisibleForTesting TunnelingMetaStoreClientFactory( TunnelableFactorySupplier tunnelableFactorySupplier, LocalHiveConfFactory localHiveConfFactory, HiveMetaStoreClientSupplierFactory hiveMetaStoreClientSupplierFactory); CloseableThriftHiveMetastoreIface newInstance( String uris, MetastoreTunnel metastoreTunnel, String name, int reconnectionRetries, int connectionTimeout); }
TunnelingMetaStoreClientFactory { public CloseableThriftHiveMetastoreIface newInstance( String uris, MetastoreTunnel metastoreTunnel, String name, int reconnectionRetries, int connectionTimeout) { String uri = uris; String[] urisSplit = uri.split(","); if (urisSplit.length > 1) { uri = urisSplit[0]; LOG.debug("Can't support multiple uris '{}' for tunneling endpoint, using first '{}'", uris, uri); } String localHost = metastoreTunnel.getLocalhost(); int localPort = getLocalPort(); Map<String, String> properties = new HashMap<>(); properties.put(ConfVars.METASTOREURIS.varname, uri); HiveConfFactory confFactory = new HiveConfFactory(Collections.<String>emptyList(), properties); HiveConf localHiveConf = localHiveConfFactory.newInstance(localHost, localPort, confFactory.newInstance()); TunnelableFactory<CloseableThriftHiveMetastoreIface> tunnelableFactory = tunnelableFactorySupplier .get(metastoreTunnel); LOG .info("Metastore URI {} is being proxied through {}", uri, localHiveConf.getVar(HiveConf.ConfVars.METASTOREURIS)); HiveMetaStoreClientSupplier supplier = hiveMetaStoreClientSupplierFactory .newInstance(localHiveConf, name, reconnectionRetries, connectionTimeout); URI metaStoreUri = URI.create(uri); String remoteHost = metaStoreUri.getHost(); int remotePort = metaStoreUri.getPort(); return (CloseableThriftHiveMetastoreIface) tunnelableFactory .wrap(supplier, METHOD_CHECKER, localHost, localPort, remoteHost, remotePort); } TunnelingMetaStoreClientFactory(); @VisibleForTesting TunnelingMetaStoreClientFactory( TunnelableFactorySupplier tunnelableFactorySupplier, LocalHiveConfFactory localHiveConfFactory, HiveMetaStoreClientSupplierFactory hiveMetaStoreClientSupplierFactory); CloseableThriftHiveMetastoreIface newInstance( String uris, MetastoreTunnel metastoreTunnel, String name, int reconnectionRetries, int connectionTimeout); }
@Test public void get() { TunnelableFactory<CloseableThriftHiveMetastoreIface> tunnelableFactory = supplier.get(metastoreTunnel); assertNotNull(tunnelableFactory); }
public TunnelableFactory<CloseableThriftHiveMetastoreIface> get(MetastoreTunnel metastoreTunnel) { return new TunnelableFactory<>(buildSshSettings(metastoreTunnel)); }
TunnelableFactorySupplier { public TunnelableFactory<CloseableThriftHiveMetastoreIface> get(MetastoreTunnel metastoreTunnel) { return new TunnelableFactory<>(buildSshSettings(metastoreTunnel)); } }
TunnelableFactorySupplier { public TunnelableFactory<CloseableThriftHiveMetastoreIface> get(MetastoreTunnel metastoreTunnel) { return new TunnelableFactory<>(buildSshSettings(metastoreTunnel)); } }
TunnelableFactorySupplier { public TunnelableFactory<CloseableThriftHiveMetastoreIface> get(MetastoreTunnel metastoreTunnel) { return new TunnelableFactory<>(buildSshSettings(metastoreTunnel)); } TunnelableFactory<CloseableThriftHiveMetastoreIface> get(MetastoreTunnel metastoreTunnel); }
TunnelableFactorySupplier { public TunnelableFactory<CloseableThriftHiveMetastoreIface> get(MetastoreTunnel metastoreTunnel) { return new TunnelableFactory<>(buildSshSettings(metastoreTunnel)); } TunnelableFactory<CloseableThriftHiveMetastoreIface> get(MetastoreTunnel metastoreTunnel); }
@Test public void buildSshSettings() { SshSettings sshSettings = supplier.buildSshSettings(metastoreTunnel); assertThat(sshSettings.getRoute(), is(TUNNEL_ROUTE)); assertThat(sshSettings.getPrivateKeys(), is(Lists.newArrayList(TUNNEL_PRIVATE_KEY))); assertThat(sshSettings.getKnownHosts(), is(TUNNEL_KNOWN_HOSTS)); assertThat(sshSettings.isStrictHostKeyChecking(), is(true)); }
@VisibleForTesting SshSettings buildSshSettings(MetastoreTunnel metastoreTunnel) { return SshSettings .builder() .withSshPort(metastoreTunnel.getPort()) .withSessionTimeout(metastoreTunnel.getTimeout()) .withRoute(metastoreTunnel.getRoute()) .withKnownHosts(metastoreTunnel.getKnownHosts()) .withLocalhost(metastoreTunnel.getLocalhost()) .withPrivateKeys(metastoreTunnel.getPrivateKeys()) .withStrictHostKeyChecking(metastoreTunnel.isStrictHostKeyCheckingEnabled()) .build(); }
TunnelableFactorySupplier { @VisibleForTesting SshSettings buildSshSettings(MetastoreTunnel metastoreTunnel) { return SshSettings .builder() .withSshPort(metastoreTunnel.getPort()) .withSessionTimeout(metastoreTunnel.getTimeout()) .withRoute(metastoreTunnel.getRoute()) .withKnownHosts(metastoreTunnel.getKnownHosts()) .withLocalhost(metastoreTunnel.getLocalhost()) .withPrivateKeys(metastoreTunnel.getPrivateKeys()) .withStrictHostKeyChecking(metastoreTunnel.isStrictHostKeyCheckingEnabled()) .build(); } }
TunnelableFactorySupplier { @VisibleForTesting SshSettings buildSshSettings(MetastoreTunnel metastoreTunnel) { return SshSettings .builder() .withSshPort(metastoreTunnel.getPort()) .withSessionTimeout(metastoreTunnel.getTimeout()) .withRoute(metastoreTunnel.getRoute()) .withKnownHosts(metastoreTunnel.getKnownHosts()) .withLocalhost(metastoreTunnel.getLocalhost()) .withPrivateKeys(metastoreTunnel.getPrivateKeys()) .withStrictHostKeyChecking(metastoreTunnel.isStrictHostKeyCheckingEnabled()) .build(); } }
TunnelableFactorySupplier { @VisibleForTesting SshSettings buildSshSettings(MetastoreTunnel metastoreTunnel) { return SshSettings .builder() .withSshPort(metastoreTunnel.getPort()) .withSessionTimeout(metastoreTunnel.getTimeout()) .withRoute(metastoreTunnel.getRoute()) .withKnownHosts(metastoreTunnel.getKnownHosts()) .withLocalhost(metastoreTunnel.getLocalhost()) .withPrivateKeys(metastoreTunnel.getPrivateKeys()) .withStrictHostKeyChecking(metastoreTunnel.isStrictHostKeyCheckingEnabled()) .build(); } TunnelableFactory<CloseableThriftHiveMetastoreIface> get(MetastoreTunnel metastoreTunnel); }
TunnelableFactorySupplier { @VisibleForTesting SshSettings buildSshSettings(MetastoreTunnel metastoreTunnel) { return SshSettings .builder() .withSshPort(metastoreTunnel.getPort()) .withSessionTimeout(metastoreTunnel.getTimeout()) .withRoute(metastoreTunnel.getRoute()) .withKnownHosts(metastoreTunnel.getKnownHosts()) .withLocalhost(metastoreTunnel.getLocalhost()) .withPrivateKeys(metastoreTunnel.getPrivateKeys()) .withStrictHostKeyChecking(metastoreTunnel.isStrictHostKeyCheckingEnabled()) .build(); } TunnelableFactory<CloseableThriftHiveMetastoreIface> get(MetastoreTunnel metastoreTunnel); }
@Test public void getMetaStoreClientFactoryInstance() { String name = "test"; int reconnectionRetries = 10; int connectionTimeout = 10; HiveMetaStoreClientSupplier supplier = new HiveMetaStoreClientSupplier(factory, hiveConf, name, reconnectionRetries, connectionTimeout); supplier.get(); verify(factory).newInstance(hiveConf, name, reconnectionRetries, connectionTimeout); }
@Override public CloseableThriftHiveMetastoreIface get() { return factory.newInstance(hiveConf, name, reconnectionRetries, connectionTimeout); }
HiveMetaStoreClientSupplier implements TunnelableSupplier<CloseableThriftHiveMetastoreIface> { @Override public CloseableThriftHiveMetastoreIface get() { return factory.newInstance(hiveConf, name, reconnectionRetries, connectionTimeout); } }
HiveMetaStoreClientSupplier implements TunnelableSupplier<CloseableThriftHiveMetastoreIface> { @Override public CloseableThriftHiveMetastoreIface get() { return factory.newInstance(hiveConf, name, reconnectionRetries, connectionTimeout); } HiveMetaStoreClientSupplier(MetaStoreClientFactory factory, HiveConf hiveConf, String name, int reconnectionRetries, int connectionTimeout); }
HiveMetaStoreClientSupplier implements TunnelableSupplier<CloseableThriftHiveMetastoreIface> { @Override public CloseableThriftHiveMetastoreIface get() { return factory.newInstance(hiveConf, name, reconnectionRetries, connectionTimeout); } HiveMetaStoreClientSupplier(MetaStoreClientFactory factory, HiveConf hiveConf, String name, int reconnectionRetries, int connectionTimeout); @Override CloseableThriftHiveMetastoreIface get(); }
HiveMetaStoreClientSupplier implements TunnelableSupplier<CloseableThriftHiveMetastoreIface> { @Override public CloseableThriftHiveMetastoreIface get() { return factory.newInstance(hiveConf, name, reconnectionRetries, connectionTimeout); } HiveMetaStoreClientSupplier(MetaStoreClientFactory factory, HiveConf hiveConf, String name, int reconnectionRetries, int connectionTimeout); @Override CloseableThriftHiveMetastoreIface get(); }
@Test public void getCorrectHiveConf() { String localHost = "localHost"; int localPort = 10; String expectedUri = "thrift: HiveConf hiveConf = new HiveConf(); HiveConf conf = new LocalHiveConfFactory().newInstance(localHost, localPort, hiveConf); assertThat(conf.getVar(HiveConf.ConfVars.METASTOREURIS), is(expectedUri)); assertThat(conf, not(sameInstance(hiveConf))); }
HiveConf newInstance(String localHost, int localPort, HiveConf hiveConf) { HiveConf localHiveConf = new HiveConf(hiveConf); String proxyMetaStoreUris = "thrift: localHiveConf.setVar(HiveConf.ConfVars.METASTOREURIS, proxyMetaStoreUris); return localHiveConf; }
LocalHiveConfFactory { HiveConf newInstance(String localHost, int localPort, HiveConf hiveConf) { HiveConf localHiveConf = new HiveConf(hiveConf); String proxyMetaStoreUris = "thrift: localHiveConf.setVar(HiveConf.ConfVars.METASTOREURIS, proxyMetaStoreUris); return localHiveConf; } }
LocalHiveConfFactory { HiveConf newInstance(String localHost, int localPort, HiveConf hiveConf) { HiveConf localHiveConf = new HiveConf(hiveConf); String proxyMetaStoreUris = "thrift: localHiveConf.setVar(HiveConf.ConfVars.METASTOREURIS, proxyMetaStoreUris); return localHiveConf; } }
LocalHiveConfFactory { HiveConf newInstance(String localHost, int localPort, HiveConf hiveConf) { HiveConf localHiveConf = new HiveConf(hiveConf); String proxyMetaStoreUris = "thrift: localHiveConf.setVar(HiveConf.ConfVars.METASTOREURIS, proxyMetaStoreUris); return localHiveConf; } }
LocalHiveConfFactory { HiveConf newInstance(String localHost, int localPort, HiveConf hiveConf) { HiveConf localHiveConf = new HiveConf(hiveConf); String proxyMetaStoreUris = "thrift: localHiveConf.setVar(HiveConf.ConfVars.METASTOREURIS, proxyMetaStoreUris); return localHiveConf; } }
@Test public void isOpen() { when(base.isOpen()).thenReturn(true); CloseableThriftHiveMetastoreIface iface = factory.newInstance("name", RECONNECTION_RETRIES, base); boolean result = iface.isOpen(); assertThat(result, is(true)); verify(base, never()).reconnect(); }
@Override public CloseableThriftHiveMetastoreIface newInstance( HiveConf hiveConf, String name, int reconnectionRetries, int connectionTimeout) { return newInstance(name, reconnectionRetries, new ThriftMetastoreClientManager(hiveConf, new HiveCompatibleThriftHiveMetastoreIfaceFactory(), connectionTimeout)); }
DefaultMetaStoreClientFactory implements MetaStoreClientFactory { @Override public CloseableThriftHiveMetastoreIface newInstance( HiveConf hiveConf, String name, int reconnectionRetries, int connectionTimeout) { return newInstance(name, reconnectionRetries, new ThriftMetastoreClientManager(hiveConf, new HiveCompatibleThriftHiveMetastoreIfaceFactory(), connectionTimeout)); } }
DefaultMetaStoreClientFactory implements MetaStoreClientFactory { @Override public CloseableThriftHiveMetastoreIface newInstance( HiveConf hiveConf, String name, int reconnectionRetries, int connectionTimeout) { return newInstance(name, reconnectionRetries, new ThriftMetastoreClientManager(hiveConf, new HiveCompatibleThriftHiveMetastoreIfaceFactory(), connectionTimeout)); } }
DefaultMetaStoreClientFactory implements MetaStoreClientFactory { @Override public CloseableThriftHiveMetastoreIface newInstance( HiveConf hiveConf, String name, int reconnectionRetries, int connectionTimeout) { return newInstance(name, reconnectionRetries, new ThriftMetastoreClientManager(hiveConf, new HiveCompatibleThriftHiveMetastoreIfaceFactory(), connectionTimeout)); } @Override CloseableThriftHiveMetastoreIface newInstance( HiveConf hiveConf, String name, int reconnectionRetries, int connectionTimeout); }
DefaultMetaStoreClientFactory implements MetaStoreClientFactory { @Override public CloseableThriftHiveMetastoreIface newInstance( HiveConf hiveConf, String name, int reconnectionRetries, int connectionTimeout) { return newInstance(name, reconnectionRetries, new ThriftMetastoreClientManager(hiveConf, new HiveCompatibleThriftHiveMetastoreIfaceFactory(), connectionTimeout)); } @Override CloseableThriftHiveMetastoreIface newInstance( HiveConf hiveConf, String name, int reconnectionRetries, int connectionTimeout); }
@Test public void invalidMetastoreTunnel() { MetastoreTunnel metastoreTunnel = newMetastoreTunnel(); metastoreTunnel.setPort(-1); metaStore.setMetastoreTunnel(metastoreTunnel); Set<ConstraintViolation<T>> violations = validator.validate(metaStore); assertThat(violations.size(), is(1)); }
public void setMetastoreTunnel(MetastoreTunnel metastoreTunnel) { this.metastoreTunnel = metastoreTunnel; }
AbstractMetaStore { public void setMetastoreTunnel(MetastoreTunnel metastoreTunnel) { this.metastoreTunnel = metastoreTunnel; } }
AbstractMetaStore { public void setMetastoreTunnel(MetastoreTunnel metastoreTunnel) { this.metastoreTunnel = metastoreTunnel; } AbstractMetaStore(); AbstractMetaStore(String name, String remoteMetaStoreUris, AccessControlType accessControlType); AbstractMetaStore( String name, String remoteMetaStoreUris, AccessControlType accessControlType, List<String> writableDatabaseWhitelist); }
AbstractMetaStore { public void setMetastoreTunnel(MetastoreTunnel metastoreTunnel) { this.metastoreTunnel = metastoreTunnel; } AbstractMetaStore(); AbstractMetaStore(String name, String remoteMetaStoreUris, AccessControlType accessControlType); AbstractMetaStore( String name, String remoteMetaStoreUris, AccessControlType accessControlType, List<String> writableDatabaseWhitelist); static FederatedMetaStore newFederatedInstance(String name, String remoteMetaStoreUris); static PrimaryMetaStore newPrimaryInstance( String name, String remoteMetaStoreUris, AccessControlType accessControlType); static PrimaryMetaStore newPrimaryInstance(String name, String remoteMetaStoreUris); String getDatabasePrefix(); void setDatabasePrefix(String databasePrefix); String getName(); void setName(String name); String getRemoteMetaStoreUris(); void setRemoteMetaStoreUris(String remoteMetaStoreUris); MetastoreTunnel getMetastoreTunnel(); void setMetastoreTunnel(MetastoreTunnel metastoreTunnel); ConnectionType getConnectionType(); abstract FederationType getFederationType(); AccessControlType getAccessControlType(); void setAccessControlType(AccessControlType accessControlType); List<String> getWritableDatabaseWhiteList(); void setWritableDatabaseWhiteList(List<String> writableDatabaseWhitelist); long getLatency(); void setLatency(long latency); List<String> getMappedDatabases(); void setMappedDatabases(List<String> mappedDatabases); Map<String, String> getDatabaseNameMapping(); void setDatabaseNameMapping(Map<String, String> databaseNameMapping); @Transient HashBiMap<String, String> getDatabaseNameBiMapping(); @Transient MetaStoreStatus getStatus(); @Transient void setStatus(MetaStoreStatus status); @Override int hashCode(); @Override boolean equals(Object obj); @Override String toString(); }
AbstractMetaStore { public void setMetastoreTunnel(MetastoreTunnel metastoreTunnel) { this.metastoreTunnel = metastoreTunnel; } AbstractMetaStore(); AbstractMetaStore(String name, String remoteMetaStoreUris, AccessControlType accessControlType); AbstractMetaStore( String name, String remoteMetaStoreUris, AccessControlType accessControlType, List<String> writableDatabaseWhitelist); static FederatedMetaStore newFederatedInstance(String name, String remoteMetaStoreUris); static PrimaryMetaStore newPrimaryInstance( String name, String remoteMetaStoreUris, AccessControlType accessControlType); static PrimaryMetaStore newPrimaryInstance(String name, String remoteMetaStoreUris); String getDatabasePrefix(); void setDatabasePrefix(String databasePrefix); String getName(); void setName(String name); String getRemoteMetaStoreUris(); void setRemoteMetaStoreUris(String remoteMetaStoreUris); MetastoreTunnel getMetastoreTunnel(); void setMetastoreTunnel(MetastoreTunnel metastoreTunnel); ConnectionType getConnectionType(); abstract FederationType getFederationType(); AccessControlType getAccessControlType(); void setAccessControlType(AccessControlType accessControlType); List<String> getWritableDatabaseWhiteList(); void setWritableDatabaseWhiteList(List<String> writableDatabaseWhitelist); long getLatency(); void setLatency(long latency); List<String> getMappedDatabases(); void setMappedDatabases(List<String> mappedDatabases); Map<String, String> getDatabaseNameMapping(); void setDatabaseNameMapping(Map<String, String> databaseNameMapping); @Transient HashBiMap<String, String> getDatabaseNameBiMapping(); @Transient MetaStoreStatus getStatus(); @Transient void setStatus(MetaStoreStatus status); @Override int hashCode(); @Override boolean equals(Object obj); @Override String toString(); }
@Test public void isOpenWithReconnection() { when(base.isOpen()).thenReturn(false).thenReturn(true); CloseableThriftHiveMetastoreIface iface = factory.newInstance("name", RECONNECTION_RETRIES, base); boolean result = iface.isOpen(); assertThat(result, is(true)); verify(base).reconnect(); }
@Override public CloseableThriftHiveMetastoreIface newInstance( HiveConf hiveConf, String name, int reconnectionRetries, int connectionTimeout) { return newInstance(name, reconnectionRetries, new ThriftMetastoreClientManager(hiveConf, new HiveCompatibleThriftHiveMetastoreIfaceFactory(), connectionTimeout)); }
DefaultMetaStoreClientFactory implements MetaStoreClientFactory { @Override public CloseableThriftHiveMetastoreIface newInstance( HiveConf hiveConf, String name, int reconnectionRetries, int connectionTimeout) { return newInstance(name, reconnectionRetries, new ThriftMetastoreClientManager(hiveConf, new HiveCompatibleThriftHiveMetastoreIfaceFactory(), connectionTimeout)); } }
DefaultMetaStoreClientFactory implements MetaStoreClientFactory { @Override public CloseableThriftHiveMetastoreIface newInstance( HiveConf hiveConf, String name, int reconnectionRetries, int connectionTimeout) { return newInstance(name, reconnectionRetries, new ThriftMetastoreClientManager(hiveConf, new HiveCompatibleThriftHiveMetastoreIfaceFactory(), connectionTimeout)); } }
DefaultMetaStoreClientFactory implements MetaStoreClientFactory { @Override public CloseableThriftHiveMetastoreIface newInstance( HiveConf hiveConf, String name, int reconnectionRetries, int connectionTimeout) { return newInstance(name, reconnectionRetries, new ThriftMetastoreClientManager(hiveConf, new HiveCompatibleThriftHiveMetastoreIfaceFactory(), connectionTimeout)); } @Override CloseableThriftHiveMetastoreIface newInstance( HiveConf hiveConf, String name, int reconnectionRetries, int connectionTimeout); }
DefaultMetaStoreClientFactory implements MetaStoreClientFactory { @Override public CloseableThriftHiveMetastoreIface newInstance( HiveConf hiveConf, String name, int reconnectionRetries, int connectionTimeout) { return newInstance(name, reconnectionRetries, new ThriftMetastoreClientManager(hiveConf, new HiveCompatibleThriftHiveMetastoreIfaceFactory(), connectionTimeout)); } @Override CloseableThriftHiveMetastoreIface newInstance( HiveConf hiveConf, String name, int reconnectionRetries, int connectionTimeout); }
@Test public void isOpenThrowsException() { when(base.isOpen()).thenThrow(new RuntimeException()); CloseableThriftHiveMetastoreIface iface = factory.newInstance("name", RECONNECTION_RETRIES, base); boolean result = iface.isOpen(); assertThat(result, is(false)); }
@Override public CloseableThriftHiveMetastoreIface newInstance( HiveConf hiveConf, String name, int reconnectionRetries, int connectionTimeout) { return newInstance(name, reconnectionRetries, new ThriftMetastoreClientManager(hiveConf, new HiveCompatibleThriftHiveMetastoreIfaceFactory(), connectionTimeout)); }
DefaultMetaStoreClientFactory implements MetaStoreClientFactory { @Override public CloseableThriftHiveMetastoreIface newInstance( HiveConf hiveConf, String name, int reconnectionRetries, int connectionTimeout) { return newInstance(name, reconnectionRetries, new ThriftMetastoreClientManager(hiveConf, new HiveCompatibleThriftHiveMetastoreIfaceFactory(), connectionTimeout)); } }
DefaultMetaStoreClientFactory implements MetaStoreClientFactory { @Override public CloseableThriftHiveMetastoreIface newInstance( HiveConf hiveConf, String name, int reconnectionRetries, int connectionTimeout) { return newInstance(name, reconnectionRetries, new ThriftMetastoreClientManager(hiveConf, new HiveCompatibleThriftHiveMetastoreIfaceFactory(), connectionTimeout)); } }
DefaultMetaStoreClientFactory implements MetaStoreClientFactory { @Override public CloseableThriftHiveMetastoreIface newInstance( HiveConf hiveConf, String name, int reconnectionRetries, int connectionTimeout) { return newInstance(name, reconnectionRetries, new ThriftMetastoreClientManager(hiveConf, new HiveCompatibleThriftHiveMetastoreIfaceFactory(), connectionTimeout)); } @Override CloseableThriftHiveMetastoreIface newInstance( HiveConf hiveConf, String name, int reconnectionRetries, int connectionTimeout); }
DefaultMetaStoreClientFactory implements MetaStoreClientFactory { @Override public CloseableThriftHiveMetastoreIface newInstance( HiveConf hiveConf, String name, int reconnectionRetries, int connectionTimeout) { return newInstance(name, reconnectionRetries, new ThriftMetastoreClientManager(hiveConf, new HiveCompatibleThriftHiveMetastoreIfaceFactory(), connectionTimeout)); } @Override CloseableThriftHiveMetastoreIface newInstance( HiveConf hiveConf, String name, int reconnectionRetries, int connectionTimeout); }
@Test public void closeNullBase() throws Exception { CloseableThriftHiveMetastoreIface iface = factory.newInstance("name", RECONNECTION_RETRIES, null); iface.close(); verify(base, never()).close(); }
@Override public CloseableThriftHiveMetastoreIface newInstance( HiveConf hiveConf, String name, int reconnectionRetries, int connectionTimeout) { return newInstance(name, reconnectionRetries, new ThriftMetastoreClientManager(hiveConf, new HiveCompatibleThriftHiveMetastoreIfaceFactory(), connectionTimeout)); }
DefaultMetaStoreClientFactory implements MetaStoreClientFactory { @Override public CloseableThriftHiveMetastoreIface newInstance( HiveConf hiveConf, String name, int reconnectionRetries, int connectionTimeout) { return newInstance(name, reconnectionRetries, new ThriftMetastoreClientManager(hiveConf, new HiveCompatibleThriftHiveMetastoreIfaceFactory(), connectionTimeout)); } }
DefaultMetaStoreClientFactory implements MetaStoreClientFactory { @Override public CloseableThriftHiveMetastoreIface newInstance( HiveConf hiveConf, String name, int reconnectionRetries, int connectionTimeout) { return newInstance(name, reconnectionRetries, new ThriftMetastoreClientManager(hiveConf, new HiveCompatibleThriftHiveMetastoreIfaceFactory(), connectionTimeout)); } }
DefaultMetaStoreClientFactory implements MetaStoreClientFactory { @Override public CloseableThriftHiveMetastoreIface newInstance( HiveConf hiveConf, String name, int reconnectionRetries, int connectionTimeout) { return newInstance(name, reconnectionRetries, new ThriftMetastoreClientManager(hiveConf, new HiveCompatibleThriftHiveMetastoreIfaceFactory(), connectionTimeout)); } @Override CloseableThriftHiveMetastoreIface newInstance( HiveConf hiveConf, String name, int reconnectionRetries, int connectionTimeout); }
DefaultMetaStoreClientFactory implements MetaStoreClientFactory { @Override public CloseableThriftHiveMetastoreIface newInstance( HiveConf hiveConf, String name, int reconnectionRetries, int connectionTimeout) { return newInstance(name, reconnectionRetries, new ThriftMetastoreClientManager(hiveConf, new HiveCompatibleThriftHiveMetastoreIfaceFactory(), connectionTimeout)); } @Override CloseableThriftHiveMetastoreIface newInstance( HiveConf hiveConf, String name, int reconnectionRetries, int connectionTimeout); }
@Test public void defaultMethodCall() throws Exception { when(base.getClient()).thenReturn(client); when(client.getName()).thenReturn("ourName"); CloseableThriftHiveMetastoreIface iface = factory.newInstance("name", RECONNECTION_RETRIES, base); String result = iface.getName(); assertThat(result, is("ourName")); }
@Override public CloseableThriftHiveMetastoreIface newInstance( HiveConf hiveConf, String name, int reconnectionRetries, int connectionTimeout) { return newInstance(name, reconnectionRetries, new ThriftMetastoreClientManager(hiveConf, new HiveCompatibleThriftHiveMetastoreIfaceFactory(), connectionTimeout)); }
DefaultMetaStoreClientFactory implements MetaStoreClientFactory { @Override public CloseableThriftHiveMetastoreIface newInstance( HiveConf hiveConf, String name, int reconnectionRetries, int connectionTimeout) { return newInstance(name, reconnectionRetries, new ThriftMetastoreClientManager(hiveConf, new HiveCompatibleThriftHiveMetastoreIfaceFactory(), connectionTimeout)); } }
DefaultMetaStoreClientFactory implements MetaStoreClientFactory { @Override public CloseableThriftHiveMetastoreIface newInstance( HiveConf hiveConf, String name, int reconnectionRetries, int connectionTimeout) { return newInstance(name, reconnectionRetries, new ThriftMetastoreClientManager(hiveConf, new HiveCompatibleThriftHiveMetastoreIfaceFactory(), connectionTimeout)); } }
DefaultMetaStoreClientFactory implements MetaStoreClientFactory { @Override public CloseableThriftHiveMetastoreIface newInstance( HiveConf hiveConf, String name, int reconnectionRetries, int connectionTimeout) { return newInstance(name, reconnectionRetries, new ThriftMetastoreClientManager(hiveConf, new HiveCompatibleThriftHiveMetastoreIfaceFactory(), connectionTimeout)); } @Override CloseableThriftHiveMetastoreIface newInstance( HiveConf hiveConf, String name, int reconnectionRetries, int connectionTimeout); }
DefaultMetaStoreClientFactory implements MetaStoreClientFactory { @Override public CloseableThriftHiveMetastoreIface newInstance( HiveConf hiveConf, String name, int reconnectionRetries, int connectionTimeout) { return newInstance(name, reconnectionRetries, new ThriftMetastoreClientManager(hiveConf, new HiveCompatibleThriftHiveMetastoreIfaceFactory(), connectionTimeout)); } @Override CloseableThriftHiveMetastoreIface newInstance( HiveConf hiveConf, String name, int reconnectionRetries, int connectionTimeout); }
@Test public void defaultMethodCallThrowsTransportExceptionRetries() throws TException { when(base.getClient()).thenReturn(client); when(client.getName()).thenThrow(new TTransportException()).thenReturn("ourName"); CloseableThriftHiveMetastoreIface iface = factory.newInstance("name", RECONNECTION_RETRIES, base); String result = iface.getName(); assertThat(result, is("ourName")); verify(base).reconnect(); }
@Override public CloseableThriftHiveMetastoreIface newInstance( HiveConf hiveConf, String name, int reconnectionRetries, int connectionTimeout) { return newInstance(name, reconnectionRetries, new ThriftMetastoreClientManager(hiveConf, new HiveCompatibleThriftHiveMetastoreIfaceFactory(), connectionTimeout)); }
DefaultMetaStoreClientFactory implements MetaStoreClientFactory { @Override public CloseableThriftHiveMetastoreIface newInstance( HiveConf hiveConf, String name, int reconnectionRetries, int connectionTimeout) { return newInstance(name, reconnectionRetries, new ThriftMetastoreClientManager(hiveConf, new HiveCompatibleThriftHiveMetastoreIfaceFactory(), connectionTimeout)); } }
DefaultMetaStoreClientFactory implements MetaStoreClientFactory { @Override public CloseableThriftHiveMetastoreIface newInstance( HiveConf hiveConf, String name, int reconnectionRetries, int connectionTimeout) { return newInstance(name, reconnectionRetries, new ThriftMetastoreClientManager(hiveConf, new HiveCompatibleThriftHiveMetastoreIfaceFactory(), connectionTimeout)); } }
DefaultMetaStoreClientFactory implements MetaStoreClientFactory { @Override public CloseableThriftHiveMetastoreIface newInstance( HiveConf hiveConf, String name, int reconnectionRetries, int connectionTimeout) { return newInstance(name, reconnectionRetries, new ThriftMetastoreClientManager(hiveConf, new HiveCompatibleThriftHiveMetastoreIfaceFactory(), connectionTimeout)); } @Override CloseableThriftHiveMetastoreIface newInstance( HiveConf hiveConf, String name, int reconnectionRetries, int connectionTimeout); }
DefaultMetaStoreClientFactory implements MetaStoreClientFactory { @Override public CloseableThriftHiveMetastoreIface newInstance( HiveConf hiveConf, String name, int reconnectionRetries, int connectionTimeout) { return newInstance(name, reconnectionRetries, new ThriftMetastoreClientManager(hiveConf, new HiveCompatibleThriftHiveMetastoreIfaceFactory(), connectionTimeout)); } @Override CloseableThriftHiveMetastoreIface newInstance( HiveConf hiveConf, String name, int reconnectionRetries, int connectionTimeout); }
@Test(expected = MetastoreUnavailableException.class) public void shutdownThrowsTransportExceptionNoRetry() throws TException { when(base.getClient()).thenReturn(client); doThrow(new TTransportException()).when(client).shutdown(); CloseableThriftHiveMetastoreIface iface = factory.newInstance("name", RECONNECTION_RETRIES, base); iface.shutdown(); }
@Override public CloseableThriftHiveMetastoreIface newInstance( HiveConf hiveConf, String name, int reconnectionRetries, int connectionTimeout) { return newInstance(name, reconnectionRetries, new ThriftMetastoreClientManager(hiveConf, new HiveCompatibleThriftHiveMetastoreIfaceFactory(), connectionTimeout)); }
DefaultMetaStoreClientFactory implements MetaStoreClientFactory { @Override public CloseableThriftHiveMetastoreIface newInstance( HiveConf hiveConf, String name, int reconnectionRetries, int connectionTimeout) { return newInstance(name, reconnectionRetries, new ThriftMetastoreClientManager(hiveConf, new HiveCompatibleThriftHiveMetastoreIfaceFactory(), connectionTimeout)); } }
DefaultMetaStoreClientFactory implements MetaStoreClientFactory { @Override public CloseableThriftHiveMetastoreIface newInstance( HiveConf hiveConf, String name, int reconnectionRetries, int connectionTimeout) { return newInstance(name, reconnectionRetries, new ThriftMetastoreClientManager(hiveConf, new HiveCompatibleThriftHiveMetastoreIfaceFactory(), connectionTimeout)); } }
DefaultMetaStoreClientFactory implements MetaStoreClientFactory { @Override public CloseableThriftHiveMetastoreIface newInstance( HiveConf hiveConf, String name, int reconnectionRetries, int connectionTimeout) { return newInstance(name, reconnectionRetries, new ThriftMetastoreClientManager(hiveConf, new HiveCompatibleThriftHiveMetastoreIfaceFactory(), connectionTimeout)); } @Override CloseableThriftHiveMetastoreIface newInstance( HiveConf hiveConf, String name, int reconnectionRetries, int connectionTimeout); }
DefaultMetaStoreClientFactory implements MetaStoreClientFactory { @Override public CloseableThriftHiveMetastoreIface newInstance( HiveConf hiveConf, String name, int reconnectionRetries, int connectionTimeout) { return newInstance(name, reconnectionRetries, new ThriftMetastoreClientManager(hiveConf, new HiveCompatibleThriftHiveMetastoreIfaceFactory(), connectionTimeout)); } @Override CloseableThriftHiveMetastoreIface newInstance( HiveConf hiveConf, String name, int reconnectionRetries, int connectionTimeout); }
@Test(expected = MetastoreUnavailableException.class) public void defaultMethodCallThrowsTransportExceptionNoRetriesLeft() throws TException { when(base.getClient()).thenReturn(client); when(client.getName()).thenThrow(new TTransportException()); CloseableThriftHiveMetastoreIface iface = factory.newInstance("name", 0, base); iface.getName(); }
@Override public CloseableThriftHiveMetastoreIface newInstance( HiveConf hiveConf, String name, int reconnectionRetries, int connectionTimeout) { return newInstance(name, reconnectionRetries, new ThriftMetastoreClientManager(hiveConf, new HiveCompatibleThriftHiveMetastoreIfaceFactory(), connectionTimeout)); }
DefaultMetaStoreClientFactory implements MetaStoreClientFactory { @Override public CloseableThriftHiveMetastoreIface newInstance( HiveConf hiveConf, String name, int reconnectionRetries, int connectionTimeout) { return newInstance(name, reconnectionRetries, new ThriftMetastoreClientManager(hiveConf, new HiveCompatibleThriftHiveMetastoreIfaceFactory(), connectionTimeout)); } }
DefaultMetaStoreClientFactory implements MetaStoreClientFactory { @Override public CloseableThriftHiveMetastoreIface newInstance( HiveConf hiveConf, String name, int reconnectionRetries, int connectionTimeout) { return newInstance(name, reconnectionRetries, new ThriftMetastoreClientManager(hiveConf, new HiveCompatibleThriftHiveMetastoreIfaceFactory(), connectionTimeout)); } }
DefaultMetaStoreClientFactory implements MetaStoreClientFactory { @Override public CloseableThriftHiveMetastoreIface newInstance( HiveConf hiveConf, String name, int reconnectionRetries, int connectionTimeout) { return newInstance(name, reconnectionRetries, new ThriftMetastoreClientManager(hiveConf, new HiveCompatibleThriftHiveMetastoreIfaceFactory(), connectionTimeout)); } @Override CloseableThriftHiveMetastoreIface newInstance( HiveConf hiveConf, String name, int reconnectionRetries, int connectionTimeout); }
DefaultMetaStoreClientFactory implements MetaStoreClientFactory { @Override public CloseableThriftHiveMetastoreIface newInstance( HiveConf hiveConf, String name, int reconnectionRetries, int connectionTimeout) { return newInstance(name, reconnectionRetries, new ThriftMetastoreClientManager(hiveConf, new HiveCompatibleThriftHiveMetastoreIfaceFactory(), connectionTimeout)); } @Override CloseableThriftHiveMetastoreIface newInstance( HiveConf hiveConf, String name, int reconnectionRetries, int connectionTimeout); }
@Test(expected = TException.class) public void defaultMethodCallThrowsRealException() throws TException { when(base.getClient()).thenReturn(client); when(client.getName()).thenThrow(new TException()); CloseableThriftHiveMetastoreIface iface = factory.newInstance("name", RECONNECTION_RETRIES, base); iface.getName(); }
@Override public CloseableThriftHiveMetastoreIface newInstance( HiveConf hiveConf, String name, int reconnectionRetries, int connectionTimeout) { return newInstance(name, reconnectionRetries, new ThriftMetastoreClientManager(hiveConf, new HiveCompatibleThriftHiveMetastoreIfaceFactory(), connectionTimeout)); }
DefaultMetaStoreClientFactory implements MetaStoreClientFactory { @Override public CloseableThriftHiveMetastoreIface newInstance( HiveConf hiveConf, String name, int reconnectionRetries, int connectionTimeout) { return newInstance(name, reconnectionRetries, new ThriftMetastoreClientManager(hiveConf, new HiveCompatibleThriftHiveMetastoreIfaceFactory(), connectionTimeout)); } }
DefaultMetaStoreClientFactory implements MetaStoreClientFactory { @Override public CloseableThriftHiveMetastoreIface newInstance( HiveConf hiveConf, String name, int reconnectionRetries, int connectionTimeout) { return newInstance(name, reconnectionRetries, new ThriftMetastoreClientManager(hiveConf, new HiveCompatibleThriftHiveMetastoreIfaceFactory(), connectionTimeout)); } }
DefaultMetaStoreClientFactory implements MetaStoreClientFactory { @Override public CloseableThriftHiveMetastoreIface newInstance( HiveConf hiveConf, String name, int reconnectionRetries, int connectionTimeout) { return newInstance(name, reconnectionRetries, new ThriftMetastoreClientManager(hiveConf, new HiveCompatibleThriftHiveMetastoreIfaceFactory(), connectionTimeout)); } @Override CloseableThriftHiveMetastoreIface newInstance( HiveConf hiveConf, String name, int reconnectionRetries, int connectionTimeout); }
DefaultMetaStoreClientFactory implements MetaStoreClientFactory { @Override public CloseableThriftHiveMetastoreIface newInstance( HiveConf hiveConf, String name, int reconnectionRetries, int connectionTimeout) { return newInstance(name, reconnectionRetries, new ThriftMetastoreClientManager(hiveConf, new HiveCompatibleThriftHiveMetastoreIfaceFactory(), connectionTimeout)); } @Override CloseableThriftHiveMetastoreIface newInstance( HiveConf hiveConf, String name, int reconnectionRetries, int connectionTimeout); }
@Test(expected = RuntimeException.class) public void openSlowConnection() { client = new ThriftMetastoreClientManager(hiveConf, hiveCompatibleThriftHiveMetastoreIfaceFactory, 1); client.open(); }
void open() { if (isConnected) { return; } TException te = null; boolean useSasl = conf.getBoolVar(ConfVars.METASTORE_USE_THRIFT_SASL); boolean useFramedTransport = conf.getBoolVar(ConfVars.METASTORE_USE_THRIFT_FRAMED_TRANSPORT); boolean useCompactProtocol = conf.getBoolVar(ConfVars.METASTORE_USE_THRIFT_COMPACT_PROTOCOL); int clientSocketTimeout = (int) conf.getTimeVar(ConfVars.METASTORE_CLIENT_SOCKET_TIMEOUT, TimeUnit.MILLISECONDS); for (int attempt = 0; !isConnected && (attempt < retries); ++attempt) { for (URI store : metastoreUris) { LOG.info("Trying to connect to metastore with URI " + store); try { transport = new TSocket(store.getHost(), store.getPort(), clientSocketTimeout, connectionTimeout); if (useSasl) { try { HadoopThriftAuthBridge.Client authBridge = ShimLoader.getHadoopThriftAuthBridge().createClient(); String tokenSig = conf.getVar(ConfVars.METASTORE_TOKEN_SIGNATURE); String tokenStrForm = Utils.getTokenStrForm(tokenSig); if (tokenStrForm != null) { transport = authBridge .createClientTransport(null, store.getHost(), "DIGEST", tokenStrForm, transport, MetaStoreUtils.getMetaStoreSaslProperties(conf)); } else { String principalConfig = conf.getVar(HiveConf.ConfVars.METASTORE_KERBEROS_PRINCIPAL); transport = authBridge .createClientTransport(principalConfig, store.getHost(), "KERBEROS", null, transport, MetaStoreUtils.getMetaStoreSaslProperties(conf)); } } catch (IOException ioe) { LOG.error("Couldn't create client transport", ioe); throw new MetaException(ioe.toString()); } } else if (useFramedTransport) { transport = new TFramedTransport(transport); } TProtocol protocol; if (useCompactProtocol) { protocol = new TCompactProtocol(transport); } else { protocol = new TBinaryProtocol(transport); } client = hiveCompatibleThriftHiveMetastoreIfaceFactory.newInstance(new ThriftHiveMetastore.Client(protocol)); try { transport.open(); LOG .info("Opened a connection to metastore '" + store + "', total current connections to all metastores: " + CONN_COUNT.incrementAndGet()); isConnected = true; } catch (TException e) { te = e; if (LOG.isDebugEnabled()) { LOG.warn("Failed to connect to the MetaStore Server...", e); } else { LOG.warn("Failed to connect to the MetaStore Server..."); } } } catch (MetaException e) { LOG.error("Unable to connect to metastore with URI " + store + " in attempt " + attempt, e); } if (isConnected) { break; } } if (!isConnected && (retryDelaySeconds > 0) && ((attempt + 1) < retries)) { try { LOG.info("Waiting " + retryDelaySeconds + " seconds before next connection attempt."); Thread.sleep(retryDelaySeconds * 1000); } catch (InterruptedException ignore) {} } } if (!isConnected) { throw new RuntimeException("Could not connect to meta store using any of the URIs provided. Most recent failure: " + StringUtils.stringifyException(te)); } LOG.info("Connected to metastore."); }
ThriftMetastoreClientManager implements Closeable { void open() { if (isConnected) { return; } TException te = null; boolean useSasl = conf.getBoolVar(ConfVars.METASTORE_USE_THRIFT_SASL); boolean useFramedTransport = conf.getBoolVar(ConfVars.METASTORE_USE_THRIFT_FRAMED_TRANSPORT); boolean useCompactProtocol = conf.getBoolVar(ConfVars.METASTORE_USE_THRIFT_COMPACT_PROTOCOL); int clientSocketTimeout = (int) conf.getTimeVar(ConfVars.METASTORE_CLIENT_SOCKET_TIMEOUT, TimeUnit.MILLISECONDS); for (int attempt = 0; !isConnected && (attempt < retries); ++attempt) { for (URI store : metastoreUris) { LOG.info("Trying to connect to metastore with URI " + store); try { transport = new TSocket(store.getHost(), store.getPort(), clientSocketTimeout, connectionTimeout); if (useSasl) { try { HadoopThriftAuthBridge.Client authBridge = ShimLoader.getHadoopThriftAuthBridge().createClient(); String tokenSig = conf.getVar(ConfVars.METASTORE_TOKEN_SIGNATURE); String tokenStrForm = Utils.getTokenStrForm(tokenSig); if (tokenStrForm != null) { transport = authBridge .createClientTransport(null, store.getHost(), "DIGEST", tokenStrForm, transport, MetaStoreUtils.getMetaStoreSaslProperties(conf)); } else { String principalConfig = conf.getVar(HiveConf.ConfVars.METASTORE_KERBEROS_PRINCIPAL); transport = authBridge .createClientTransport(principalConfig, store.getHost(), "KERBEROS", null, transport, MetaStoreUtils.getMetaStoreSaslProperties(conf)); } } catch (IOException ioe) { LOG.error("Couldn't create client transport", ioe); throw new MetaException(ioe.toString()); } } else if (useFramedTransport) { transport = new TFramedTransport(transport); } TProtocol protocol; if (useCompactProtocol) { protocol = new TCompactProtocol(transport); } else { protocol = new TBinaryProtocol(transport); } client = hiveCompatibleThriftHiveMetastoreIfaceFactory.newInstance(new ThriftHiveMetastore.Client(protocol)); try { transport.open(); LOG .info("Opened a connection to metastore '" + store + "', total current connections to all metastores: " + CONN_COUNT.incrementAndGet()); isConnected = true; } catch (TException e) { te = e; if (LOG.isDebugEnabled()) { LOG.warn("Failed to connect to the MetaStore Server...", e); } else { LOG.warn("Failed to connect to the MetaStore Server..."); } } } catch (MetaException e) { LOG.error("Unable to connect to metastore with URI " + store + " in attempt " + attempt, e); } if (isConnected) { break; } } if (!isConnected && (retryDelaySeconds > 0) && ((attempt + 1) < retries)) { try { LOG.info("Waiting " + retryDelaySeconds + " seconds before next connection attempt."); Thread.sleep(retryDelaySeconds * 1000); } catch (InterruptedException ignore) {} } } if (!isConnected) { throw new RuntimeException("Could not connect to meta store using any of the URIs provided. Most recent failure: " + StringUtils.stringifyException(te)); } LOG.info("Connected to metastore."); } }
ThriftMetastoreClientManager implements Closeable { void open() { if (isConnected) { return; } TException te = null; boolean useSasl = conf.getBoolVar(ConfVars.METASTORE_USE_THRIFT_SASL); boolean useFramedTransport = conf.getBoolVar(ConfVars.METASTORE_USE_THRIFT_FRAMED_TRANSPORT); boolean useCompactProtocol = conf.getBoolVar(ConfVars.METASTORE_USE_THRIFT_COMPACT_PROTOCOL); int clientSocketTimeout = (int) conf.getTimeVar(ConfVars.METASTORE_CLIENT_SOCKET_TIMEOUT, TimeUnit.MILLISECONDS); for (int attempt = 0; !isConnected && (attempt < retries); ++attempt) { for (URI store : metastoreUris) { LOG.info("Trying to connect to metastore with URI " + store); try { transport = new TSocket(store.getHost(), store.getPort(), clientSocketTimeout, connectionTimeout); if (useSasl) { try { HadoopThriftAuthBridge.Client authBridge = ShimLoader.getHadoopThriftAuthBridge().createClient(); String tokenSig = conf.getVar(ConfVars.METASTORE_TOKEN_SIGNATURE); String tokenStrForm = Utils.getTokenStrForm(tokenSig); if (tokenStrForm != null) { transport = authBridge .createClientTransport(null, store.getHost(), "DIGEST", tokenStrForm, transport, MetaStoreUtils.getMetaStoreSaslProperties(conf)); } else { String principalConfig = conf.getVar(HiveConf.ConfVars.METASTORE_KERBEROS_PRINCIPAL); transport = authBridge .createClientTransport(principalConfig, store.getHost(), "KERBEROS", null, transport, MetaStoreUtils.getMetaStoreSaslProperties(conf)); } } catch (IOException ioe) { LOG.error("Couldn't create client transport", ioe); throw new MetaException(ioe.toString()); } } else if (useFramedTransport) { transport = new TFramedTransport(transport); } TProtocol protocol; if (useCompactProtocol) { protocol = new TCompactProtocol(transport); } else { protocol = new TBinaryProtocol(transport); } client = hiveCompatibleThriftHiveMetastoreIfaceFactory.newInstance(new ThriftHiveMetastore.Client(protocol)); try { transport.open(); LOG .info("Opened a connection to metastore '" + store + "', total current connections to all metastores: " + CONN_COUNT.incrementAndGet()); isConnected = true; } catch (TException e) { te = e; if (LOG.isDebugEnabled()) { LOG.warn("Failed to connect to the MetaStore Server...", e); } else { LOG.warn("Failed to connect to the MetaStore Server..."); } } } catch (MetaException e) { LOG.error("Unable to connect to metastore with URI " + store + " in attempt " + attempt, e); } if (isConnected) { break; } } if (!isConnected && (retryDelaySeconds > 0) && ((attempt + 1) < retries)) { try { LOG.info("Waiting " + retryDelaySeconds + " seconds before next connection attempt."); Thread.sleep(retryDelaySeconds * 1000); } catch (InterruptedException ignore) {} } } if (!isConnected) { throw new RuntimeException("Could not connect to meta store using any of the URIs provided. Most recent failure: " + StringUtils.stringifyException(te)); } LOG.info("Connected to metastore."); } ThriftMetastoreClientManager( HiveConf conf, HiveCompatibleThriftHiveMetastoreIfaceFactory hiveCompatibleThriftHiveMetastoreIfaceFactory, int connectionTimeout); }
ThriftMetastoreClientManager implements Closeable { void open() { if (isConnected) { return; } TException te = null; boolean useSasl = conf.getBoolVar(ConfVars.METASTORE_USE_THRIFT_SASL); boolean useFramedTransport = conf.getBoolVar(ConfVars.METASTORE_USE_THRIFT_FRAMED_TRANSPORT); boolean useCompactProtocol = conf.getBoolVar(ConfVars.METASTORE_USE_THRIFT_COMPACT_PROTOCOL); int clientSocketTimeout = (int) conf.getTimeVar(ConfVars.METASTORE_CLIENT_SOCKET_TIMEOUT, TimeUnit.MILLISECONDS); for (int attempt = 0; !isConnected && (attempt < retries); ++attempt) { for (URI store : metastoreUris) { LOG.info("Trying to connect to metastore with URI " + store); try { transport = new TSocket(store.getHost(), store.getPort(), clientSocketTimeout, connectionTimeout); if (useSasl) { try { HadoopThriftAuthBridge.Client authBridge = ShimLoader.getHadoopThriftAuthBridge().createClient(); String tokenSig = conf.getVar(ConfVars.METASTORE_TOKEN_SIGNATURE); String tokenStrForm = Utils.getTokenStrForm(tokenSig); if (tokenStrForm != null) { transport = authBridge .createClientTransport(null, store.getHost(), "DIGEST", tokenStrForm, transport, MetaStoreUtils.getMetaStoreSaslProperties(conf)); } else { String principalConfig = conf.getVar(HiveConf.ConfVars.METASTORE_KERBEROS_PRINCIPAL); transport = authBridge .createClientTransport(principalConfig, store.getHost(), "KERBEROS", null, transport, MetaStoreUtils.getMetaStoreSaslProperties(conf)); } } catch (IOException ioe) { LOG.error("Couldn't create client transport", ioe); throw new MetaException(ioe.toString()); } } else if (useFramedTransport) { transport = new TFramedTransport(transport); } TProtocol protocol; if (useCompactProtocol) { protocol = new TCompactProtocol(transport); } else { protocol = new TBinaryProtocol(transport); } client = hiveCompatibleThriftHiveMetastoreIfaceFactory.newInstance(new ThriftHiveMetastore.Client(protocol)); try { transport.open(); LOG .info("Opened a connection to metastore '" + store + "', total current connections to all metastores: " + CONN_COUNT.incrementAndGet()); isConnected = true; } catch (TException e) { te = e; if (LOG.isDebugEnabled()) { LOG.warn("Failed to connect to the MetaStore Server...", e); } else { LOG.warn("Failed to connect to the MetaStore Server..."); } } } catch (MetaException e) { LOG.error("Unable to connect to metastore with URI " + store + " in attempt " + attempt, e); } if (isConnected) { break; } } if (!isConnected && (retryDelaySeconds > 0) && ((attempt + 1) < retries)) { try { LOG.info("Waiting " + retryDelaySeconds + " seconds before next connection attempt."); Thread.sleep(retryDelaySeconds * 1000); } catch (InterruptedException ignore) {} } } if (!isConnected) { throw new RuntimeException("Could not connect to meta store using any of the URIs provided. Most recent failure: " + StringUtils.stringifyException(te)); } LOG.info("Connected to metastore."); } ThriftMetastoreClientManager( HiveConf conf, HiveCompatibleThriftHiveMetastoreIfaceFactory hiveCompatibleThriftHiveMetastoreIfaceFactory, int connectionTimeout); @Override void close(); }
ThriftMetastoreClientManager implements Closeable { void open() { if (isConnected) { return; } TException te = null; boolean useSasl = conf.getBoolVar(ConfVars.METASTORE_USE_THRIFT_SASL); boolean useFramedTransport = conf.getBoolVar(ConfVars.METASTORE_USE_THRIFT_FRAMED_TRANSPORT); boolean useCompactProtocol = conf.getBoolVar(ConfVars.METASTORE_USE_THRIFT_COMPACT_PROTOCOL); int clientSocketTimeout = (int) conf.getTimeVar(ConfVars.METASTORE_CLIENT_SOCKET_TIMEOUT, TimeUnit.MILLISECONDS); for (int attempt = 0; !isConnected && (attempt < retries); ++attempt) { for (URI store : metastoreUris) { LOG.info("Trying to connect to metastore with URI " + store); try { transport = new TSocket(store.getHost(), store.getPort(), clientSocketTimeout, connectionTimeout); if (useSasl) { try { HadoopThriftAuthBridge.Client authBridge = ShimLoader.getHadoopThriftAuthBridge().createClient(); String tokenSig = conf.getVar(ConfVars.METASTORE_TOKEN_SIGNATURE); String tokenStrForm = Utils.getTokenStrForm(tokenSig); if (tokenStrForm != null) { transport = authBridge .createClientTransport(null, store.getHost(), "DIGEST", tokenStrForm, transport, MetaStoreUtils.getMetaStoreSaslProperties(conf)); } else { String principalConfig = conf.getVar(HiveConf.ConfVars.METASTORE_KERBEROS_PRINCIPAL); transport = authBridge .createClientTransport(principalConfig, store.getHost(), "KERBEROS", null, transport, MetaStoreUtils.getMetaStoreSaslProperties(conf)); } } catch (IOException ioe) { LOG.error("Couldn't create client transport", ioe); throw new MetaException(ioe.toString()); } } else if (useFramedTransport) { transport = new TFramedTransport(transport); } TProtocol protocol; if (useCompactProtocol) { protocol = new TCompactProtocol(transport); } else { protocol = new TBinaryProtocol(transport); } client = hiveCompatibleThriftHiveMetastoreIfaceFactory.newInstance(new ThriftHiveMetastore.Client(protocol)); try { transport.open(); LOG .info("Opened a connection to metastore '" + store + "', total current connections to all metastores: " + CONN_COUNT.incrementAndGet()); isConnected = true; } catch (TException e) { te = e; if (LOG.isDebugEnabled()) { LOG.warn("Failed to connect to the MetaStore Server...", e); } else { LOG.warn("Failed to connect to the MetaStore Server..."); } } } catch (MetaException e) { LOG.error("Unable to connect to metastore with URI " + store + " in attempt " + attempt, e); } if (isConnected) { break; } } if (!isConnected && (retryDelaySeconds > 0) && ((attempt + 1) < retries)) { try { LOG.info("Waiting " + retryDelaySeconds + " seconds before next connection attempt."); Thread.sleep(retryDelaySeconds * 1000); } catch (InterruptedException ignore) {} } } if (!isConnected) { throw new RuntimeException("Could not connect to meta store using any of the URIs provided. Most recent failure: " + StringUtils.stringifyException(te)); } LOG.info("Connected to metastore."); } ThriftMetastoreClientManager( HiveConf conf, HiveCompatibleThriftHiveMetastoreIfaceFactory hiveCompatibleThriftHiveMetastoreIfaceFactory, int connectionTimeout); @Override void close(); }
@Test public void typicalPrefixed() { when(waggleDanceConfiguration.getDatabaseResolution()).thenReturn(DatabaseResolution.PREFIXED); AbstractMetaStore federatedMetaStore = newFederatedInstance("fed1", thrift.getThriftConnectionUri()); MetaStoreMapping mapping = factory.newInstance(federatedMetaStore); assertThat(mapping, is(notNullValue())); verify(prefixNamingStrategy).apply(federatedMetaStore); verify(accessControlHandlerFactory).newInstance(federatedMetaStore); assertThat(mapping.getDatabasePrefix(), is("fed1_")); assertThat(mapping.getMetastoreMappingName(), is("fed1")); }
@SuppressWarnings("resource") @Override public MetaStoreMapping newInstance(AbstractMetaStore metaStore) { LOG .info("Mapping databases with name '{}' to metastore: {}", metaStore.getName(), metaStore.getRemoteMetaStoreUris()); MetaStoreMapping metaStoreMapping = new MetaStoreMappingImpl(prefixNameFor(metaStore), metaStore.getName(), createClient(metaStore), accessControlHandlerFactory.newInstance(metaStore), metaStore.getConnectionType(), metaStore.getLatency()); if (waggleDanceConfiguration.getDatabaseResolution() == DatabaseResolution.PREFIXED) { return new DatabaseNameMapping(new PrefixMapping(metaStoreMapping), metaStore.getDatabaseNameBiMapping()); } else { return new DatabaseNameMapping(metaStoreMapping, metaStore.getDatabaseNameBiMapping()); } }
MetaStoreMappingFactoryImpl implements MetaStoreMappingFactory { @SuppressWarnings("resource") @Override public MetaStoreMapping newInstance(AbstractMetaStore metaStore) { LOG .info("Mapping databases with name '{}' to metastore: {}", metaStore.getName(), metaStore.getRemoteMetaStoreUris()); MetaStoreMapping metaStoreMapping = new MetaStoreMappingImpl(prefixNameFor(metaStore), metaStore.getName(), createClient(metaStore), accessControlHandlerFactory.newInstance(metaStore), metaStore.getConnectionType(), metaStore.getLatency()); if (waggleDanceConfiguration.getDatabaseResolution() == DatabaseResolution.PREFIXED) { return new DatabaseNameMapping(new PrefixMapping(metaStoreMapping), metaStore.getDatabaseNameBiMapping()); } else { return new DatabaseNameMapping(metaStoreMapping, metaStore.getDatabaseNameBiMapping()); } } }
MetaStoreMappingFactoryImpl implements MetaStoreMappingFactory { @SuppressWarnings("resource") @Override public MetaStoreMapping newInstance(AbstractMetaStore metaStore) { LOG .info("Mapping databases with name '{}' to metastore: {}", metaStore.getName(), metaStore.getRemoteMetaStoreUris()); MetaStoreMapping metaStoreMapping = new MetaStoreMappingImpl(prefixNameFor(metaStore), metaStore.getName(), createClient(metaStore), accessControlHandlerFactory.newInstance(metaStore), metaStore.getConnectionType(), metaStore.getLatency()); if (waggleDanceConfiguration.getDatabaseResolution() == DatabaseResolution.PREFIXED) { return new DatabaseNameMapping(new PrefixMapping(metaStoreMapping), metaStore.getDatabaseNameBiMapping()); } else { return new DatabaseNameMapping(metaStoreMapping, metaStore.getDatabaseNameBiMapping()); } } @Autowired MetaStoreMappingFactoryImpl( WaggleDanceConfiguration waggleDanceConfiguration, PrefixNamingStrategy prefixNamingStrategy, CloseableThriftHiveMetastoreIfaceClientFactory metaStoreClientFactory, AccessControlHandlerFactory accessControlHandlerFactory); }
MetaStoreMappingFactoryImpl implements MetaStoreMappingFactory { @SuppressWarnings("resource") @Override public MetaStoreMapping newInstance(AbstractMetaStore metaStore) { LOG .info("Mapping databases with name '{}' to metastore: {}", metaStore.getName(), metaStore.getRemoteMetaStoreUris()); MetaStoreMapping metaStoreMapping = new MetaStoreMappingImpl(prefixNameFor(metaStore), metaStore.getName(), createClient(metaStore), accessControlHandlerFactory.newInstance(metaStore), metaStore.getConnectionType(), metaStore.getLatency()); if (waggleDanceConfiguration.getDatabaseResolution() == DatabaseResolution.PREFIXED) { return new DatabaseNameMapping(new PrefixMapping(metaStoreMapping), metaStore.getDatabaseNameBiMapping()); } else { return new DatabaseNameMapping(metaStoreMapping, metaStore.getDatabaseNameBiMapping()); } } @Autowired MetaStoreMappingFactoryImpl( WaggleDanceConfiguration waggleDanceConfiguration, PrefixNamingStrategy prefixNamingStrategy, CloseableThriftHiveMetastoreIfaceClientFactory metaStoreClientFactory, AccessControlHandlerFactory accessControlHandlerFactory); @SuppressWarnings("resource") @Override MetaStoreMapping newInstance(AbstractMetaStore metaStore); @Override String prefixNameFor(AbstractMetaStore federatedMetaStore); }
MetaStoreMappingFactoryImpl implements MetaStoreMappingFactory { @SuppressWarnings("resource") @Override public MetaStoreMapping newInstance(AbstractMetaStore metaStore) { LOG .info("Mapping databases with name '{}' to metastore: {}", metaStore.getName(), metaStore.getRemoteMetaStoreUris()); MetaStoreMapping metaStoreMapping = new MetaStoreMappingImpl(prefixNameFor(metaStore), metaStore.getName(), createClient(metaStore), accessControlHandlerFactory.newInstance(metaStore), metaStore.getConnectionType(), metaStore.getLatency()); if (waggleDanceConfiguration.getDatabaseResolution() == DatabaseResolution.PREFIXED) { return new DatabaseNameMapping(new PrefixMapping(metaStoreMapping), metaStore.getDatabaseNameBiMapping()); } else { return new DatabaseNameMapping(metaStoreMapping, metaStore.getDatabaseNameBiMapping()); } } @Autowired MetaStoreMappingFactoryImpl( WaggleDanceConfiguration waggleDanceConfiguration, PrefixNamingStrategy prefixNamingStrategy, CloseableThriftHiveMetastoreIfaceClientFactory metaStoreClientFactory, AccessControlHandlerFactory accessControlHandlerFactory); @SuppressWarnings("resource") @Override MetaStoreMapping newInstance(AbstractMetaStore metaStore); @Override String prefixNameFor(AbstractMetaStore federatedMetaStore); }
@Test public void nullName() { metaStore.setName(null); Set<ConstraintViolation<T>> violations = validator.validate(metaStore); assertThat(violations.size(), is(1)); }
public void setName(String name) { this.name = name; }
AbstractMetaStore { public void setName(String name) { this.name = name; } }
AbstractMetaStore { public void setName(String name) { this.name = name; } AbstractMetaStore(); AbstractMetaStore(String name, String remoteMetaStoreUris, AccessControlType accessControlType); AbstractMetaStore( String name, String remoteMetaStoreUris, AccessControlType accessControlType, List<String> writableDatabaseWhitelist); }
AbstractMetaStore { public void setName(String name) { this.name = name; } AbstractMetaStore(); AbstractMetaStore(String name, String remoteMetaStoreUris, AccessControlType accessControlType); AbstractMetaStore( String name, String remoteMetaStoreUris, AccessControlType accessControlType, List<String> writableDatabaseWhitelist); static FederatedMetaStore newFederatedInstance(String name, String remoteMetaStoreUris); static PrimaryMetaStore newPrimaryInstance( String name, String remoteMetaStoreUris, AccessControlType accessControlType); static PrimaryMetaStore newPrimaryInstance(String name, String remoteMetaStoreUris); String getDatabasePrefix(); void setDatabasePrefix(String databasePrefix); String getName(); void setName(String name); String getRemoteMetaStoreUris(); void setRemoteMetaStoreUris(String remoteMetaStoreUris); MetastoreTunnel getMetastoreTunnel(); void setMetastoreTunnel(MetastoreTunnel metastoreTunnel); ConnectionType getConnectionType(); abstract FederationType getFederationType(); AccessControlType getAccessControlType(); void setAccessControlType(AccessControlType accessControlType); List<String> getWritableDatabaseWhiteList(); void setWritableDatabaseWhiteList(List<String> writableDatabaseWhitelist); long getLatency(); void setLatency(long latency); List<String> getMappedDatabases(); void setMappedDatabases(List<String> mappedDatabases); Map<String, String> getDatabaseNameMapping(); void setDatabaseNameMapping(Map<String, String> databaseNameMapping); @Transient HashBiMap<String, String> getDatabaseNameBiMapping(); @Transient MetaStoreStatus getStatus(); @Transient void setStatus(MetaStoreStatus status); @Override int hashCode(); @Override boolean equals(Object obj); @Override String toString(); }
AbstractMetaStore { public void setName(String name) { this.name = name; } AbstractMetaStore(); AbstractMetaStore(String name, String remoteMetaStoreUris, AccessControlType accessControlType); AbstractMetaStore( String name, String remoteMetaStoreUris, AccessControlType accessControlType, List<String> writableDatabaseWhitelist); static FederatedMetaStore newFederatedInstance(String name, String remoteMetaStoreUris); static PrimaryMetaStore newPrimaryInstance( String name, String remoteMetaStoreUris, AccessControlType accessControlType); static PrimaryMetaStore newPrimaryInstance(String name, String remoteMetaStoreUris); String getDatabasePrefix(); void setDatabasePrefix(String databasePrefix); String getName(); void setName(String name); String getRemoteMetaStoreUris(); void setRemoteMetaStoreUris(String remoteMetaStoreUris); MetastoreTunnel getMetastoreTunnel(); void setMetastoreTunnel(MetastoreTunnel metastoreTunnel); ConnectionType getConnectionType(); abstract FederationType getFederationType(); AccessControlType getAccessControlType(); void setAccessControlType(AccessControlType accessControlType); List<String> getWritableDatabaseWhiteList(); void setWritableDatabaseWhiteList(List<String> writableDatabaseWhitelist); long getLatency(); void setLatency(long latency); List<String> getMappedDatabases(); void setMappedDatabases(List<String> mappedDatabases); Map<String, String> getDatabaseNameMapping(); void setDatabaseNameMapping(Map<String, String> databaseNameMapping); @Transient HashBiMap<String, String> getDatabaseNameBiMapping(); @Transient MetaStoreStatus getStatus(); @Transient void setStatus(MetaStoreStatus status); @Override int hashCode(); @Override boolean equals(Object obj); @Override String toString(); }
@Test public void typicalNonPrefixed() { AbstractMetaStore federatedMetaStore = newFederatedInstance("fed1", thrift.getThriftConnectionUri()); MetaStoreMapping mapping = factory.newInstance(federatedMetaStore); assertThat(mapping, is(notNullValue())); verify(prefixNamingStrategy).apply(federatedMetaStore); verify(accessControlHandlerFactory).newInstance(federatedMetaStore); assertThat(mapping.getDatabasePrefix(), is("fed1_")); assertThat(mapping.getMetastoreMappingName(), is("fed1")); }
@SuppressWarnings("resource") @Override public MetaStoreMapping newInstance(AbstractMetaStore metaStore) { LOG .info("Mapping databases with name '{}' to metastore: {}", metaStore.getName(), metaStore.getRemoteMetaStoreUris()); MetaStoreMapping metaStoreMapping = new MetaStoreMappingImpl(prefixNameFor(metaStore), metaStore.getName(), createClient(metaStore), accessControlHandlerFactory.newInstance(metaStore), metaStore.getConnectionType(), metaStore.getLatency()); if (waggleDanceConfiguration.getDatabaseResolution() == DatabaseResolution.PREFIXED) { return new DatabaseNameMapping(new PrefixMapping(metaStoreMapping), metaStore.getDatabaseNameBiMapping()); } else { return new DatabaseNameMapping(metaStoreMapping, metaStore.getDatabaseNameBiMapping()); } }
MetaStoreMappingFactoryImpl implements MetaStoreMappingFactory { @SuppressWarnings("resource") @Override public MetaStoreMapping newInstance(AbstractMetaStore metaStore) { LOG .info("Mapping databases with name '{}' to metastore: {}", metaStore.getName(), metaStore.getRemoteMetaStoreUris()); MetaStoreMapping metaStoreMapping = new MetaStoreMappingImpl(prefixNameFor(metaStore), metaStore.getName(), createClient(metaStore), accessControlHandlerFactory.newInstance(metaStore), metaStore.getConnectionType(), metaStore.getLatency()); if (waggleDanceConfiguration.getDatabaseResolution() == DatabaseResolution.PREFIXED) { return new DatabaseNameMapping(new PrefixMapping(metaStoreMapping), metaStore.getDatabaseNameBiMapping()); } else { return new DatabaseNameMapping(metaStoreMapping, metaStore.getDatabaseNameBiMapping()); } } }
MetaStoreMappingFactoryImpl implements MetaStoreMappingFactory { @SuppressWarnings("resource") @Override public MetaStoreMapping newInstance(AbstractMetaStore metaStore) { LOG .info("Mapping databases with name '{}' to metastore: {}", metaStore.getName(), metaStore.getRemoteMetaStoreUris()); MetaStoreMapping metaStoreMapping = new MetaStoreMappingImpl(prefixNameFor(metaStore), metaStore.getName(), createClient(metaStore), accessControlHandlerFactory.newInstance(metaStore), metaStore.getConnectionType(), metaStore.getLatency()); if (waggleDanceConfiguration.getDatabaseResolution() == DatabaseResolution.PREFIXED) { return new DatabaseNameMapping(new PrefixMapping(metaStoreMapping), metaStore.getDatabaseNameBiMapping()); } else { return new DatabaseNameMapping(metaStoreMapping, metaStore.getDatabaseNameBiMapping()); } } @Autowired MetaStoreMappingFactoryImpl( WaggleDanceConfiguration waggleDanceConfiguration, PrefixNamingStrategy prefixNamingStrategy, CloseableThriftHiveMetastoreIfaceClientFactory metaStoreClientFactory, AccessControlHandlerFactory accessControlHandlerFactory); }
MetaStoreMappingFactoryImpl implements MetaStoreMappingFactory { @SuppressWarnings("resource") @Override public MetaStoreMapping newInstance(AbstractMetaStore metaStore) { LOG .info("Mapping databases with name '{}' to metastore: {}", metaStore.getName(), metaStore.getRemoteMetaStoreUris()); MetaStoreMapping metaStoreMapping = new MetaStoreMappingImpl(prefixNameFor(metaStore), metaStore.getName(), createClient(metaStore), accessControlHandlerFactory.newInstance(metaStore), metaStore.getConnectionType(), metaStore.getLatency()); if (waggleDanceConfiguration.getDatabaseResolution() == DatabaseResolution.PREFIXED) { return new DatabaseNameMapping(new PrefixMapping(metaStoreMapping), metaStore.getDatabaseNameBiMapping()); } else { return new DatabaseNameMapping(metaStoreMapping, metaStore.getDatabaseNameBiMapping()); } } @Autowired MetaStoreMappingFactoryImpl( WaggleDanceConfiguration waggleDanceConfiguration, PrefixNamingStrategy prefixNamingStrategy, CloseableThriftHiveMetastoreIfaceClientFactory metaStoreClientFactory, AccessControlHandlerFactory accessControlHandlerFactory); @SuppressWarnings("resource") @Override MetaStoreMapping newInstance(AbstractMetaStore metaStore); @Override String prefixNameFor(AbstractMetaStore federatedMetaStore); }
MetaStoreMappingFactoryImpl implements MetaStoreMappingFactory { @SuppressWarnings("resource") @Override public MetaStoreMapping newInstance(AbstractMetaStore metaStore) { LOG .info("Mapping databases with name '{}' to metastore: {}", metaStore.getName(), metaStore.getRemoteMetaStoreUris()); MetaStoreMapping metaStoreMapping = new MetaStoreMappingImpl(prefixNameFor(metaStore), metaStore.getName(), createClient(metaStore), accessControlHandlerFactory.newInstance(metaStore), metaStore.getConnectionType(), metaStore.getLatency()); if (waggleDanceConfiguration.getDatabaseResolution() == DatabaseResolution.PREFIXED) { return new DatabaseNameMapping(new PrefixMapping(metaStoreMapping), metaStore.getDatabaseNameBiMapping()); } else { return new DatabaseNameMapping(metaStoreMapping, metaStore.getDatabaseNameBiMapping()); } } @Autowired MetaStoreMappingFactoryImpl( WaggleDanceConfiguration waggleDanceConfiguration, PrefixNamingStrategy prefixNamingStrategy, CloseableThriftHiveMetastoreIfaceClientFactory metaStoreClientFactory, AccessControlHandlerFactory accessControlHandlerFactory); @SuppressWarnings("resource") @Override MetaStoreMapping newInstance(AbstractMetaStore metaStore); @Override String prefixNameFor(AbstractMetaStore federatedMetaStore); }
@Test public void reconnection() throws Exception { MetaStoreMapping mapping = factory.newInstance(newFederatedInstance("fed1", thrift.getThriftConnectionUri())); assertThat(mapping.getClient().get_all_databases(), is(Arrays.asList("default", "test_db"))); mapping.close(); assertThat(mapping.getClient().get_all_databases(), is(Arrays.asList("default", "test_db"))); }
@SuppressWarnings("resource") @Override public MetaStoreMapping newInstance(AbstractMetaStore metaStore) { LOG .info("Mapping databases with name '{}' to metastore: {}", metaStore.getName(), metaStore.getRemoteMetaStoreUris()); MetaStoreMapping metaStoreMapping = new MetaStoreMappingImpl(prefixNameFor(metaStore), metaStore.getName(), createClient(metaStore), accessControlHandlerFactory.newInstance(metaStore), metaStore.getConnectionType(), metaStore.getLatency()); if (waggleDanceConfiguration.getDatabaseResolution() == DatabaseResolution.PREFIXED) { return new DatabaseNameMapping(new PrefixMapping(metaStoreMapping), metaStore.getDatabaseNameBiMapping()); } else { return new DatabaseNameMapping(metaStoreMapping, metaStore.getDatabaseNameBiMapping()); } }
MetaStoreMappingFactoryImpl implements MetaStoreMappingFactory { @SuppressWarnings("resource") @Override public MetaStoreMapping newInstance(AbstractMetaStore metaStore) { LOG .info("Mapping databases with name '{}' to metastore: {}", metaStore.getName(), metaStore.getRemoteMetaStoreUris()); MetaStoreMapping metaStoreMapping = new MetaStoreMappingImpl(prefixNameFor(metaStore), metaStore.getName(), createClient(metaStore), accessControlHandlerFactory.newInstance(metaStore), metaStore.getConnectionType(), metaStore.getLatency()); if (waggleDanceConfiguration.getDatabaseResolution() == DatabaseResolution.PREFIXED) { return new DatabaseNameMapping(new PrefixMapping(metaStoreMapping), metaStore.getDatabaseNameBiMapping()); } else { return new DatabaseNameMapping(metaStoreMapping, metaStore.getDatabaseNameBiMapping()); } } }
MetaStoreMappingFactoryImpl implements MetaStoreMappingFactory { @SuppressWarnings("resource") @Override public MetaStoreMapping newInstance(AbstractMetaStore metaStore) { LOG .info("Mapping databases with name '{}' to metastore: {}", metaStore.getName(), metaStore.getRemoteMetaStoreUris()); MetaStoreMapping metaStoreMapping = new MetaStoreMappingImpl(prefixNameFor(metaStore), metaStore.getName(), createClient(metaStore), accessControlHandlerFactory.newInstance(metaStore), metaStore.getConnectionType(), metaStore.getLatency()); if (waggleDanceConfiguration.getDatabaseResolution() == DatabaseResolution.PREFIXED) { return new DatabaseNameMapping(new PrefixMapping(metaStoreMapping), metaStore.getDatabaseNameBiMapping()); } else { return new DatabaseNameMapping(metaStoreMapping, metaStore.getDatabaseNameBiMapping()); } } @Autowired MetaStoreMappingFactoryImpl( WaggleDanceConfiguration waggleDanceConfiguration, PrefixNamingStrategy prefixNamingStrategy, CloseableThriftHiveMetastoreIfaceClientFactory metaStoreClientFactory, AccessControlHandlerFactory accessControlHandlerFactory); }
MetaStoreMappingFactoryImpl implements MetaStoreMappingFactory { @SuppressWarnings("resource") @Override public MetaStoreMapping newInstance(AbstractMetaStore metaStore) { LOG .info("Mapping databases with name '{}' to metastore: {}", metaStore.getName(), metaStore.getRemoteMetaStoreUris()); MetaStoreMapping metaStoreMapping = new MetaStoreMappingImpl(prefixNameFor(metaStore), metaStore.getName(), createClient(metaStore), accessControlHandlerFactory.newInstance(metaStore), metaStore.getConnectionType(), metaStore.getLatency()); if (waggleDanceConfiguration.getDatabaseResolution() == DatabaseResolution.PREFIXED) { return new DatabaseNameMapping(new PrefixMapping(metaStoreMapping), metaStore.getDatabaseNameBiMapping()); } else { return new DatabaseNameMapping(metaStoreMapping, metaStore.getDatabaseNameBiMapping()); } } @Autowired MetaStoreMappingFactoryImpl( WaggleDanceConfiguration waggleDanceConfiguration, PrefixNamingStrategy prefixNamingStrategy, CloseableThriftHiveMetastoreIfaceClientFactory metaStoreClientFactory, AccessControlHandlerFactory accessControlHandlerFactory); @SuppressWarnings("resource") @Override MetaStoreMapping newInstance(AbstractMetaStore metaStore); @Override String prefixNameFor(AbstractMetaStore federatedMetaStore); }
MetaStoreMappingFactoryImpl implements MetaStoreMappingFactory { @SuppressWarnings("resource") @Override public MetaStoreMapping newInstance(AbstractMetaStore metaStore) { LOG .info("Mapping databases with name '{}' to metastore: {}", metaStore.getName(), metaStore.getRemoteMetaStoreUris()); MetaStoreMapping metaStoreMapping = new MetaStoreMappingImpl(prefixNameFor(metaStore), metaStore.getName(), createClient(metaStore), accessControlHandlerFactory.newInstance(metaStore), metaStore.getConnectionType(), metaStore.getLatency()); if (waggleDanceConfiguration.getDatabaseResolution() == DatabaseResolution.PREFIXED) { return new DatabaseNameMapping(new PrefixMapping(metaStoreMapping), metaStore.getDatabaseNameBiMapping()); } else { return new DatabaseNameMapping(metaStoreMapping, metaStore.getDatabaseNameBiMapping()); } } @Autowired MetaStoreMappingFactoryImpl( WaggleDanceConfiguration waggleDanceConfiguration, PrefixNamingStrategy prefixNamingStrategy, CloseableThriftHiveMetastoreIfaceClientFactory metaStoreClientFactory, AccessControlHandlerFactory accessControlHandlerFactory); @SuppressWarnings("resource") @Override MetaStoreMapping newInstance(AbstractMetaStore metaStore); @Override String prefixNameFor(AbstractMetaStore federatedMetaStore); }
@Test public void connectionLost() throws Exception { MetaStoreMapping mapping = factory.newInstance(newFederatedInstance("fed1", thrift.getThriftConnectionUri())); assertThat(mapping.getClient().get_all_databases(), is(Arrays.asList("default", "test_db"))); thrift.client().reconnect(); assertThat(mapping.getClient().get_all_databases(), is(Arrays.asList("default", "test_db"))); }
@SuppressWarnings("resource") @Override public MetaStoreMapping newInstance(AbstractMetaStore metaStore) { LOG .info("Mapping databases with name '{}' to metastore: {}", metaStore.getName(), metaStore.getRemoteMetaStoreUris()); MetaStoreMapping metaStoreMapping = new MetaStoreMappingImpl(prefixNameFor(metaStore), metaStore.getName(), createClient(metaStore), accessControlHandlerFactory.newInstance(metaStore), metaStore.getConnectionType(), metaStore.getLatency()); if (waggleDanceConfiguration.getDatabaseResolution() == DatabaseResolution.PREFIXED) { return new DatabaseNameMapping(new PrefixMapping(metaStoreMapping), metaStore.getDatabaseNameBiMapping()); } else { return new DatabaseNameMapping(metaStoreMapping, metaStore.getDatabaseNameBiMapping()); } }
MetaStoreMappingFactoryImpl implements MetaStoreMappingFactory { @SuppressWarnings("resource") @Override public MetaStoreMapping newInstance(AbstractMetaStore metaStore) { LOG .info("Mapping databases with name '{}' to metastore: {}", metaStore.getName(), metaStore.getRemoteMetaStoreUris()); MetaStoreMapping metaStoreMapping = new MetaStoreMappingImpl(prefixNameFor(metaStore), metaStore.getName(), createClient(metaStore), accessControlHandlerFactory.newInstance(metaStore), metaStore.getConnectionType(), metaStore.getLatency()); if (waggleDanceConfiguration.getDatabaseResolution() == DatabaseResolution.PREFIXED) { return new DatabaseNameMapping(new PrefixMapping(metaStoreMapping), metaStore.getDatabaseNameBiMapping()); } else { return new DatabaseNameMapping(metaStoreMapping, metaStore.getDatabaseNameBiMapping()); } } }
MetaStoreMappingFactoryImpl implements MetaStoreMappingFactory { @SuppressWarnings("resource") @Override public MetaStoreMapping newInstance(AbstractMetaStore metaStore) { LOG .info("Mapping databases with name '{}' to metastore: {}", metaStore.getName(), metaStore.getRemoteMetaStoreUris()); MetaStoreMapping metaStoreMapping = new MetaStoreMappingImpl(prefixNameFor(metaStore), metaStore.getName(), createClient(metaStore), accessControlHandlerFactory.newInstance(metaStore), metaStore.getConnectionType(), metaStore.getLatency()); if (waggleDanceConfiguration.getDatabaseResolution() == DatabaseResolution.PREFIXED) { return new DatabaseNameMapping(new PrefixMapping(metaStoreMapping), metaStore.getDatabaseNameBiMapping()); } else { return new DatabaseNameMapping(metaStoreMapping, metaStore.getDatabaseNameBiMapping()); } } @Autowired MetaStoreMappingFactoryImpl( WaggleDanceConfiguration waggleDanceConfiguration, PrefixNamingStrategy prefixNamingStrategy, CloseableThriftHiveMetastoreIfaceClientFactory metaStoreClientFactory, AccessControlHandlerFactory accessControlHandlerFactory); }
MetaStoreMappingFactoryImpl implements MetaStoreMappingFactory { @SuppressWarnings("resource") @Override public MetaStoreMapping newInstance(AbstractMetaStore metaStore) { LOG .info("Mapping databases with name '{}' to metastore: {}", metaStore.getName(), metaStore.getRemoteMetaStoreUris()); MetaStoreMapping metaStoreMapping = new MetaStoreMappingImpl(prefixNameFor(metaStore), metaStore.getName(), createClient(metaStore), accessControlHandlerFactory.newInstance(metaStore), metaStore.getConnectionType(), metaStore.getLatency()); if (waggleDanceConfiguration.getDatabaseResolution() == DatabaseResolution.PREFIXED) { return new DatabaseNameMapping(new PrefixMapping(metaStoreMapping), metaStore.getDatabaseNameBiMapping()); } else { return new DatabaseNameMapping(metaStoreMapping, metaStore.getDatabaseNameBiMapping()); } } @Autowired MetaStoreMappingFactoryImpl( WaggleDanceConfiguration waggleDanceConfiguration, PrefixNamingStrategy prefixNamingStrategy, CloseableThriftHiveMetastoreIfaceClientFactory metaStoreClientFactory, AccessControlHandlerFactory accessControlHandlerFactory); @SuppressWarnings("resource") @Override MetaStoreMapping newInstance(AbstractMetaStore metaStore); @Override String prefixNameFor(AbstractMetaStore federatedMetaStore); }
MetaStoreMappingFactoryImpl implements MetaStoreMappingFactory { @SuppressWarnings("resource") @Override public MetaStoreMapping newInstance(AbstractMetaStore metaStore) { LOG .info("Mapping databases with name '{}' to metastore: {}", metaStore.getName(), metaStore.getRemoteMetaStoreUris()); MetaStoreMapping metaStoreMapping = new MetaStoreMappingImpl(prefixNameFor(metaStore), metaStore.getName(), createClient(metaStore), accessControlHandlerFactory.newInstance(metaStore), metaStore.getConnectionType(), metaStore.getLatency()); if (waggleDanceConfiguration.getDatabaseResolution() == DatabaseResolution.PREFIXED) { return new DatabaseNameMapping(new PrefixMapping(metaStoreMapping), metaStore.getDatabaseNameBiMapping()); } else { return new DatabaseNameMapping(metaStoreMapping, metaStore.getDatabaseNameBiMapping()); } } @Autowired MetaStoreMappingFactoryImpl( WaggleDanceConfiguration waggleDanceConfiguration, PrefixNamingStrategy prefixNamingStrategy, CloseableThriftHiveMetastoreIfaceClientFactory metaStoreClientFactory, AccessControlHandlerFactory accessControlHandlerFactory); @SuppressWarnings("resource") @Override MetaStoreMapping newInstance(AbstractMetaStore metaStore); @Override String prefixNameFor(AbstractMetaStore federatedMetaStore); }
@Test public void unreachableMetastoreClient() { CloseableThriftHiveMetastoreIfaceClientFactory closeableThriftHiveMetastoreIfaceClientFactory = Mockito .mock(CloseableThriftHiveMetastoreIfaceClientFactory.class); MetaStoreMappingFactoryImpl factory = new MetaStoreMappingFactoryImpl(waggleDanceConfiguration, prefixNamingStrategy, closeableThriftHiveMetastoreIfaceClientFactory, accessControlHandlerFactory); AbstractMetaStore federatedMetaStore = newFederatedInstance("fed1", thrift.getThriftConnectionUri()); when(closeableThriftHiveMetastoreIfaceClientFactory.newInstance(federatedMetaStore)) .thenThrow(new RuntimeException("Cannot create client")); MetaStoreMapping mapping = factory.newInstance(federatedMetaStore); assertThat(mapping, is(notNullValue())); assertThat(mapping.isAvailable(), is(false)); try { mapping.getClient().getStatusDetails(); } catch (TException e) { assertThat("Metastore 'fed1' unavailable", is(e.getMessage())); } }
@SuppressWarnings("resource") @Override public MetaStoreMapping newInstance(AbstractMetaStore metaStore) { LOG .info("Mapping databases with name '{}' to metastore: {}", metaStore.getName(), metaStore.getRemoteMetaStoreUris()); MetaStoreMapping metaStoreMapping = new MetaStoreMappingImpl(prefixNameFor(metaStore), metaStore.getName(), createClient(metaStore), accessControlHandlerFactory.newInstance(metaStore), metaStore.getConnectionType(), metaStore.getLatency()); if (waggleDanceConfiguration.getDatabaseResolution() == DatabaseResolution.PREFIXED) { return new DatabaseNameMapping(new PrefixMapping(metaStoreMapping), metaStore.getDatabaseNameBiMapping()); } else { return new DatabaseNameMapping(metaStoreMapping, metaStore.getDatabaseNameBiMapping()); } }
MetaStoreMappingFactoryImpl implements MetaStoreMappingFactory { @SuppressWarnings("resource") @Override public MetaStoreMapping newInstance(AbstractMetaStore metaStore) { LOG .info("Mapping databases with name '{}' to metastore: {}", metaStore.getName(), metaStore.getRemoteMetaStoreUris()); MetaStoreMapping metaStoreMapping = new MetaStoreMappingImpl(prefixNameFor(metaStore), metaStore.getName(), createClient(metaStore), accessControlHandlerFactory.newInstance(metaStore), metaStore.getConnectionType(), metaStore.getLatency()); if (waggleDanceConfiguration.getDatabaseResolution() == DatabaseResolution.PREFIXED) { return new DatabaseNameMapping(new PrefixMapping(metaStoreMapping), metaStore.getDatabaseNameBiMapping()); } else { return new DatabaseNameMapping(metaStoreMapping, metaStore.getDatabaseNameBiMapping()); } } }
MetaStoreMappingFactoryImpl implements MetaStoreMappingFactory { @SuppressWarnings("resource") @Override public MetaStoreMapping newInstance(AbstractMetaStore metaStore) { LOG .info("Mapping databases with name '{}' to metastore: {}", metaStore.getName(), metaStore.getRemoteMetaStoreUris()); MetaStoreMapping metaStoreMapping = new MetaStoreMappingImpl(prefixNameFor(metaStore), metaStore.getName(), createClient(metaStore), accessControlHandlerFactory.newInstance(metaStore), metaStore.getConnectionType(), metaStore.getLatency()); if (waggleDanceConfiguration.getDatabaseResolution() == DatabaseResolution.PREFIXED) { return new DatabaseNameMapping(new PrefixMapping(metaStoreMapping), metaStore.getDatabaseNameBiMapping()); } else { return new DatabaseNameMapping(metaStoreMapping, metaStore.getDatabaseNameBiMapping()); } } @Autowired MetaStoreMappingFactoryImpl( WaggleDanceConfiguration waggleDanceConfiguration, PrefixNamingStrategy prefixNamingStrategy, CloseableThriftHiveMetastoreIfaceClientFactory metaStoreClientFactory, AccessControlHandlerFactory accessControlHandlerFactory); }
MetaStoreMappingFactoryImpl implements MetaStoreMappingFactory { @SuppressWarnings("resource") @Override public MetaStoreMapping newInstance(AbstractMetaStore metaStore) { LOG .info("Mapping databases with name '{}' to metastore: {}", metaStore.getName(), metaStore.getRemoteMetaStoreUris()); MetaStoreMapping metaStoreMapping = new MetaStoreMappingImpl(prefixNameFor(metaStore), metaStore.getName(), createClient(metaStore), accessControlHandlerFactory.newInstance(metaStore), metaStore.getConnectionType(), metaStore.getLatency()); if (waggleDanceConfiguration.getDatabaseResolution() == DatabaseResolution.PREFIXED) { return new DatabaseNameMapping(new PrefixMapping(metaStoreMapping), metaStore.getDatabaseNameBiMapping()); } else { return new DatabaseNameMapping(metaStoreMapping, metaStore.getDatabaseNameBiMapping()); } } @Autowired MetaStoreMappingFactoryImpl( WaggleDanceConfiguration waggleDanceConfiguration, PrefixNamingStrategy prefixNamingStrategy, CloseableThriftHiveMetastoreIfaceClientFactory metaStoreClientFactory, AccessControlHandlerFactory accessControlHandlerFactory); @SuppressWarnings("resource") @Override MetaStoreMapping newInstance(AbstractMetaStore metaStore); @Override String prefixNameFor(AbstractMetaStore federatedMetaStore); }
MetaStoreMappingFactoryImpl implements MetaStoreMappingFactory { @SuppressWarnings("resource") @Override public MetaStoreMapping newInstance(AbstractMetaStore metaStore) { LOG .info("Mapping databases with name '{}' to metastore: {}", metaStore.getName(), metaStore.getRemoteMetaStoreUris()); MetaStoreMapping metaStoreMapping = new MetaStoreMappingImpl(prefixNameFor(metaStore), metaStore.getName(), createClient(metaStore), accessControlHandlerFactory.newInstance(metaStore), metaStore.getConnectionType(), metaStore.getLatency()); if (waggleDanceConfiguration.getDatabaseResolution() == DatabaseResolution.PREFIXED) { return new DatabaseNameMapping(new PrefixMapping(metaStoreMapping), metaStore.getDatabaseNameBiMapping()); } else { return new DatabaseNameMapping(metaStoreMapping, metaStore.getDatabaseNameBiMapping()); } } @Autowired MetaStoreMappingFactoryImpl( WaggleDanceConfiguration waggleDanceConfiguration, PrefixNamingStrategy prefixNamingStrategy, CloseableThriftHiveMetastoreIfaceClientFactory metaStoreClientFactory, AccessControlHandlerFactory accessControlHandlerFactory); @SuppressWarnings("resource") @Override MetaStoreMapping newInstance(AbstractMetaStore metaStore); @Override String prefixNameFor(AbstractMetaStore federatedMetaStore); }
@Test public void checkWritePermissions() throws Exception { decorator.checkWritePermissions("db"); verify(metaStoreMapping).checkWritePermissions("db"); }
@Override public MetaStoreMapping checkWritePermissions(String databaseName) { return metaStoreMapping.checkWritePermissions(databaseName); }
MetaStoreMappingDecorator implements MetaStoreMapping { @Override public MetaStoreMapping checkWritePermissions(String databaseName) { return metaStoreMapping.checkWritePermissions(databaseName); } }
MetaStoreMappingDecorator implements MetaStoreMapping { @Override public MetaStoreMapping checkWritePermissions(String databaseName) { return metaStoreMapping.checkWritePermissions(databaseName); } MetaStoreMappingDecorator(MetaStoreMapping metaStoreMapping); }
MetaStoreMappingDecorator implements MetaStoreMapping { @Override public MetaStoreMapping checkWritePermissions(String databaseName) { return metaStoreMapping.checkWritePermissions(databaseName); } MetaStoreMappingDecorator(MetaStoreMapping metaStoreMapping); @Override String transformOutboundDatabaseName(String databaseName); @Override List<String> transformOutboundDatabaseNameMultiple(String databaseName); @Override Database transformOutboundDatabase(Database database); @Override String transformInboundDatabaseName(String databaseName); @Override void close(); @Override Iface getClient(); @Override String getDatabasePrefix(); @Override String getMetastoreMappingName(); @Override boolean isAvailable(); @Override MetaStoreMapping checkWritePermissions(String databaseName); @Override void createDatabase(Database database); @Override long getLatency(); }
MetaStoreMappingDecorator implements MetaStoreMapping { @Override public MetaStoreMapping checkWritePermissions(String databaseName) { return metaStoreMapping.checkWritePermissions(databaseName); } MetaStoreMappingDecorator(MetaStoreMapping metaStoreMapping); @Override String transformOutboundDatabaseName(String databaseName); @Override List<String> transformOutboundDatabaseNameMultiple(String databaseName); @Override Database transformOutboundDatabase(Database database); @Override String transformInboundDatabaseName(String databaseName); @Override void close(); @Override Iface getClient(); @Override String getDatabasePrefix(); @Override String getMetastoreMappingName(); @Override boolean isAvailable(); @Override MetaStoreMapping checkWritePermissions(String databaseName); @Override void createDatabase(Database database); @Override long getLatency(); }
@Test public void close() throws Exception { decorator.close(); verify(metaStoreMapping).close(); }
@Override public void close() throws IOException { metaStoreMapping.close(); }
MetaStoreMappingDecorator implements MetaStoreMapping { @Override public void close() throws IOException { metaStoreMapping.close(); } }
MetaStoreMappingDecorator implements MetaStoreMapping { @Override public void close() throws IOException { metaStoreMapping.close(); } MetaStoreMappingDecorator(MetaStoreMapping metaStoreMapping); }
MetaStoreMappingDecorator implements MetaStoreMapping { @Override public void close() throws IOException { metaStoreMapping.close(); } MetaStoreMappingDecorator(MetaStoreMapping metaStoreMapping); @Override String transformOutboundDatabaseName(String databaseName); @Override List<String> transformOutboundDatabaseNameMultiple(String databaseName); @Override Database transformOutboundDatabase(Database database); @Override String transformInboundDatabaseName(String databaseName); @Override void close(); @Override Iface getClient(); @Override String getDatabasePrefix(); @Override String getMetastoreMappingName(); @Override boolean isAvailable(); @Override MetaStoreMapping checkWritePermissions(String databaseName); @Override void createDatabase(Database database); @Override long getLatency(); }
MetaStoreMappingDecorator implements MetaStoreMapping { @Override public void close() throws IOException { metaStoreMapping.close(); } MetaStoreMappingDecorator(MetaStoreMapping metaStoreMapping); @Override String transformOutboundDatabaseName(String databaseName); @Override List<String> transformOutboundDatabaseNameMultiple(String databaseName); @Override Database transformOutboundDatabase(Database database); @Override String transformInboundDatabaseName(String databaseName); @Override void close(); @Override Iface getClient(); @Override String getDatabasePrefix(); @Override String getMetastoreMappingName(); @Override boolean isAvailable(); @Override MetaStoreMapping checkWritePermissions(String databaseName); @Override void createDatabase(Database database); @Override long getLatency(); }
@Test public void createDatabase() throws Exception { Database database = new Database(); decorator.createDatabase(database); verify(metaStoreMapping).createDatabase(database); }
@Override public void createDatabase(Database database) throws AlreadyExistsException, InvalidObjectException, MetaException, TException { metaStoreMapping.createDatabase(database); }
MetaStoreMappingDecorator implements MetaStoreMapping { @Override public void createDatabase(Database database) throws AlreadyExistsException, InvalidObjectException, MetaException, TException { metaStoreMapping.createDatabase(database); } }
MetaStoreMappingDecorator implements MetaStoreMapping { @Override public void createDatabase(Database database) throws AlreadyExistsException, InvalidObjectException, MetaException, TException { metaStoreMapping.createDatabase(database); } MetaStoreMappingDecorator(MetaStoreMapping metaStoreMapping); }
MetaStoreMappingDecorator implements MetaStoreMapping { @Override public void createDatabase(Database database) throws AlreadyExistsException, InvalidObjectException, MetaException, TException { metaStoreMapping.createDatabase(database); } MetaStoreMappingDecorator(MetaStoreMapping metaStoreMapping); @Override String transformOutboundDatabaseName(String databaseName); @Override List<String> transformOutboundDatabaseNameMultiple(String databaseName); @Override Database transformOutboundDatabase(Database database); @Override String transformInboundDatabaseName(String databaseName); @Override void close(); @Override Iface getClient(); @Override String getDatabasePrefix(); @Override String getMetastoreMappingName(); @Override boolean isAvailable(); @Override MetaStoreMapping checkWritePermissions(String databaseName); @Override void createDatabase(Database database); @Override long getLatency(); }
MetaStoreMappingDecorator implements MetaStoreMapping { @Override public void createDatabase(Database database) throws AlreadyExistsException, InvalidObjectException, MetaException, TException { metaStoreMapping.createDatabase(database); } MetaStoreMappingDecorator(MetaStoreMapping metaStoreMapping); @Override String transformOutboundDatabaseName(String databaseName); @Override List<String> transformOutboundDatabaseNameMultiple(String databaseName); @Override Database transformOutboundDatabase(Database database); @Override String transformInboundDatabaseName(String databaseName); @Override void close(); @Override Iface getClient(); @Override String getDatabasePrefix(); @Override String getMetastoreMappingName(); @Override boolean isAvailable(); @Override MetaStoreMapping checkWritePermissions(String databaseName); @Override void createDatabase(Database database); @Override long getLatency(); }
@Test public void getClient() throws Exception { when(metaStoreMapping.getClient()).thenReturn(client); Iface result = decorator.getClient(); assertThat(result, is(client)); }
@Override public Iface getClient() { return metaStoreMapping.getClient(); }
MetaStoreMappingDecorator implements MetaStoreMapping { @Override public Iface getClient() { return metaStoreMapping.getClient(); } }
MetaStoreMappingDecorator implements MetaStoreMapping { @Override public Iface getClient() { return metaStoreMapping.getClient(); } MetaStoreMappingDecorator(MetaStoreMapping metaStoreMapping); }
MetaStoreMappingDecorator implements MetaStoreMapping { @Override public Iface getClient() { return metaStoreMapping.getClient(); } MetaStoreMappingDecorator(MetaStoreMapping metaStoreMapping); @Override String transformOutboundDatabaseName(String databaseName); @Override List<String> transformOutboundDatabaseNameMultiple(String databaseName); @Override Database transformOutboundDatabase(Database database); @Override String transformInboundDatabaseName(String databaseName); @Override void close(); @Override Iface getClient(); @Override String getDatabasePrefix(); @Override String getMetastoreMappingName(); @Override boolean isAvailable(); @Override MetaStoreMapping checkWritePermissions(String databaseName); @Override void createDatabase(Database database); @Override long getLatency(); }
MetaStoreMappingDecorator implements MetaStoreMapping { @Override public Iface getClient() { return metaStoreMapping.getClient(); } MetaStoreMappingDecorator(MetaStoreMapping metaStoreMapping); @Override String transformOutboundDatabaseName(String databaseName); @Override List<String> transformOutboundDatabaseNameMultiple(String databaseName); @Override Database transformOutboundDatabase(Database database); @Override String transformInboundDatabaseName(String databaseName); @Override void close(); @Override Iface getClient(); @Override String getDatabasePrefix(); @Override String getMetastoreMappingName(); @Override boolean isAvailable(); @Override MetaStoreMapping checkWritePermissions(String databaseName); @Override void createDatabase(Database database); @Override long getLatency(); }
@Test public void getDatabasePrefix() throws Exception { when(metaStoreMapping.getDatabasePrefix()).thenReturn("pre"); String result = decorator.getDatabasePrefix(); assertThat(result, is("pre")); }
@Override public String getDatabasePrefix() { return metaStoreMapping.getDatabasePrefix(); }
MetaStoreMappingDecorator implements MetaStoreMapping { @Override public String getDatabasePrefix() { return metaStoreMapping.getDatabasePrefix(); } }
MetaStoreMappingDecorator implements MetaStoreMapping { @Override public String getDatabasePrefix() { return metaStoreMapping.getDatabasePrefix(); } MetaStoreMappingDecorator(MetaStoreMapping metaStoreMapping); }
MetaStoreMappingDecorator implements MetaStoreMapping { @Override public String getDatabasePrefix() { return metaStoreMapping.getDatabasePrefix(); } MetaStoreMappingDecorator(MetaStoreMapping metaStoreMapping); @Override String transformOutboundDatabaseName(String databaseName); @Override List<String> transformOutboundDatabaseNameMultiple(String databaseName); @Override Database transformOutboundDatabase(Database database); @Override String transformInboundDatabaseName(String databaseName); @Override void close(); @Override Iface getClient(); @Override String getDatabasePrefix(); @Override String getMetastoreMappingName(); @Override boolean isAvailable(); @Override MetaStoreMapping checkWritePermissions(String databaseName); @Override void createDatabase(Database database); @Override long getLatency(); }
MetaStoreMappingDecorator implements MetaStoreMapping { @Override public String getDatabasePrefix() { return metaStoreMapping.getDatabasePrefix(); } MetaStoreMappingDecorator(MetaStoreMapping metaStoreMapping); @Override String transformOutboundDatabaseName(String databaseName); @Override List<String> transformOutboundDatabaseNameMultiple(String databaseName); @Override Database transformOutboundDatabase(Database database); @Override String transformInboundDatabaseName(String databaseName); @Override void close(); @Override Iface getClient(); @Override String getDatabasePrefix(); @Override String getMetastoreMappingName(); @Override boolean isAvailable(); @Override MetaStoreMapping checkWritePermissions(String databaseName); @Override void createDatabase(Database database); @Override long getLatency(); }
@Test public void getLatency() throws Exception { when(metaStoreMapping.getLatency()).thenReturn(1L); long result = decorator.getLatency(); assertThat(result, is(1L)); }
@Override public long getLatency() { return metaStoreMapping.getLatency(); }
MetaStoreMappingDecorator implements MetaStoreMapping { @Override public long getLatency() { return metaStoreMapping.getLatency(); } }
MetaStoreMappingDecorator implements MetaStoreMapping { @Override public long getLatency() { return metaStoreMapping.getLatency(); } MetaStoreMappingDecorator(MetaStoreMapping metaStoreMapping); }
MetaStoreMappingDecorator implements MetaStoreMapping { @Override public long getLatency() { return metaStoreMapping.getLatency(); } MetaStoreMappingDecorator(MetaStoreMapping metaStoreMapping); @Override String transformOutboundDatabaseName(String databaseName); @Override List<String> transformOutboundDatabaseNameMultiple(String databaseName); @Override Database transformOutboundDatabase(Database database); @Override String transformInboundDatabaseName(String databaseName); @Override void close(); @Override Iface getClient(); @Override String getDatabasePrefix(); @Override String getMetastoreMappingName(); @Override boolean isAvailable(); @Override MetaStoreMapping checkWritePermissions(String databaseName); @Override void createDatabase(Database database); @Override long getLatency(); }
MetaStoreMappingDecorator implements MetaStoreMapping { @Override public long getLatency() { return metaStoreMapping.getLatency(); } MetaStoreMappingDecorator(MetaStoreMapping metaStoreMapping); @Override String transformOutboundDatabaseName(String databaseName); @Override List<String> transformOutboundDatabaseNameMultiple(String databaseName); @Override Database transformOutboundDatabase(Database database); @Override String transformInboundDatabaseName(String databaseName); @Override void close(); @Override Iface getClient(); @Override String getDatabasePrefix(); @Override String getMetastoreMappingName(); @Override boolean isAvailable(); @Override MetaStoreMapping checkWritePermissions(String databaseName); @Override void createDatabase(Database database); @Override long getLatency(); }
@Test public void emptyName() { metaStore.setName(" "); Set<ConstraintViolation<T>> violations = validator.validate(metaStore); assertThat(violations.size(), is(1)); }
public void setName(String name) { this.name = name; }
AbstractMetaStore { public void setName(String name) { this.name = name; } }
AbstractMetaStore { public void setName(String name) { this.name = name; } AbstractMetaStore(); AbstractMetaStore(String name, String remoteMetaStoreUris, AccessControlType accessControlType); AbstractMetaStore( String name, String remoteMetaStoreUris, AccessControlType accessControlType, List<String> writableDatabaseWhitelist); }
AbstractMetaStore { public void setName(String name) { this.name = name; } AbstractMetaStore(); AbstractMetaStore(String name, String remoteMetaStoreUris, AccessControlType accessControlType); AbstractMetaStore( String name, String remoteMetaStoreUris, AccessControlType accessControlType, List<String> writableDatabaseWhitelist); static FederatedMetaStore newFederatedInstance(String name, String remoteMetaStoreUris); static PrimaryMetaStore newPrimaryInstance( String name, String remoteMetaStoreUris, AccessControlType accessControlType); static PrimaryMetaStore newPrimaryInstance(String name, String remoteMetaStoreUris); String getDatabasePrefix(); void setDatabasePrefix(String databasePrefix); String getName(); void setName(String name); String getRemoteMetaStoreUris(); void setRemoteMetaStoreUris(String remoteMetaStoreUris); MetastoreTunnel getMetastoreTunnel(); void setMetastoreTunnel(MetastoreTunnel metastoreTunnel); ConnectionType getConnectionType(); abstract FederationType getFederationType(); AccessControlType getAccessControlType(); void setAccessControlType(AccessControlType accessControlType); List<String> getWritableDatabaseWhiteList(); void setWritableDatabaseWhiteList(List<String> writableDatabaseWhitelist); long getLatency(); void setLatency(long latency); List<String> getMappedDatabases(); void setMappedDatabases(List<String> mappedDatabases); Map<String, String> getDatabaseNameMapping(); void setDatabaseNameMapping(Map<String, String> databaseNameMapping); @Transient HashBiMap<String, String> getDatabaseNameBiMapping(); @Transient MetaStoreStatus getStatus(); @Transient void setStatus(MetaStoreStatus status); @Override int hashCode(); @Override boolean equals(Object obj); @Override String toString(); }
AbstractMetaStore { public void setName(String name) { this.name = name; } AbstractMetaStore(); AbstractMetaStore(String name, String remoteMetaStoreUris, AccessControlType accessControlType); AbstractMetaStore( String name, String remoteMetaStoreUris, AccessControlType accessControlType, List<String> writableDatabaseWhitelist); static FederatedMetaStore newFederatedInstance(String name, String remoteMetaStoreUris); static PrimaryMetaStore newPrimaryInstance( String name, String remoteMetaStoreUris, AccessControlType accessControlType); static PrimaryMetaStore newPrimaryInstance(String name, String remoteMetaStoreUris); String getDatabasePrefix(); void setDatabasePrefix(String databasePrefix); String getName(); void setName(String name); String getRemoteMetaStoreUris(); void setRemoteMetaStoreUris(String remoteMetaStoreUris); MetastoreTunnel getMetastoreTunnel(); void setMetastoreTunnel(MetastoreTunnel metastoreTunnel); ConnectionType getConnectionType(); abstract FederationType getFederationType(); AccessControlType getAccessControlType(); void setAccessControlType(AccessControlType accessControlType); List<String> getWritableDatabaseWhiteList(); void setWritableDatabaseWhiteList(List<String> writableDatabaseWhitelist); long getLatency(); void setLatency(long latency); List<String> getMappedDatabases(); void setMappedDatabases(List<String> mappedDatabases); Map<String, String> getDatabaseNameMapping(); void setDatabaseNameMapping(Map<String, String> databaseNameMapping); @Transient HashBiMap<String, String> getDatabaseNameBiMapping(); @Transient MetaStoreStatus getStatus(); @Transient void setStatus(MetaStoreStatus status); @Override int hashCode(); @Override boolean equals(Object obj); @Override String toString(); }
@Test(expected = IllegalStateException.class) public void test_invalid_xpath_compilation_throws_exception() throws Exception { new SimpleXPathBasedCheck() { XPathExpression failing = getXPathExpression("boolean(a"); @Override public void scanFile(XmlFile file) { } }; }
public XPathExpression getXPathExpression(String expression) { try { return xpath.compile(expression); } catch (XPathExpressionException e) { throw new IllegalStateException(String.format("[%s] Fail to compile XPath expression '%s'.", ruleKey(), expression), e); } }
SimpleXPathBasedCheck extends SonarXmlCheck { public XPathExpression getXPathExpression(String expression) { try { return xpath.compile(expression); } catch (XPathExpressionException e) { throw new IllegalStateException(String.format("[%s] Fail to compile XPath expression '%s'.", ruleKey(), expression), e); } } }
SimpleXPathBasedCheck extends SonarXmlCheck { public XPathExpression getXPathExpression(String expression) { try { return xpath.compile(expression); } catch (XPathExpressionException e) { throw new IllegalStateException(String.format("[%s] Fail to compile XPath expression '%s'.", ruleKey(), expression), e); } } }
SimpleXPathBasedCheck extends SonarXmlCheck { public XPathExpression getXPathExpression(String expression) { try { return xpath.compile(expression); } catch (XPathExpressionException e) { throw new IllegalStateException(String.format("[%s] Fail to compile XPath expression '%s'.", ruleKey(), expression), e); } } XPathExpression getXPathExpression(String expression); @CheckForNull NodeList evaluate(XPathExpression expression, Node node); List<Node> evaluateAsList(XPathExpression expression, Node node); }
SimpleXPathBasedCheck extends SonarXmlCheck { public XPathExpression getXPathExpression(String expression) { try { return xpath.compile(expression); } catch (XPathExpressionException e) { throw new IllegalStateException(String.format("[%s] Fail to compile XPath expression '%s'.", ruleKey(), expression), e); } } XPathExpression getXPathExpression(String expression); @CheckForNull NodeList evaluate(XPathExpression expression, Node node); List<Node> evaluateAsList(XPathExpression expression, Node node); }
@Test public void fails_when_activating_rules_more_than_once() { NewBuiltInQualityProfile newProfile = testContext.createBuiltInQualityProfile(PROFILE_NAME, LANGUAGE); BuiltInQualityProfileJsonLoader.load(newProfile, REPOSITORY_KEY, PROFILE_PATH); thrown.expect(IllegalArgumentException.class); thrown.expectMessage("The rule 'repo-key:S100' is already activated"); BuiltInQualityProfileJsonLoader.load(newProfile, REPOSITORY_KEY, PROFILE_PATH); }
public static void load(NewBuiltInQualityProfile profile, String repositoryKey, String jsonProfilePath) { Set<String> activeKeys = loadActiveKeysFromJsonProfile(jsonProfilePath); for (String activeKey : activeKeys) { profile.activateRule(repositoryKey, activeKey); } }
BuiltInQualityProfileJsonLoader { public static void load(NewBuiltInQualityProfile profile, String repositoryKey, String jsonProfilePath) { Set<String> activeKeys = loadActiveKeysFromJsonProfile(jsonProfilePath); for (String activeKey : activeKeys) { profile.activateRule(repositoryKey, activeKey); } } }
BuiltInQualityProfileJsonLoader { public static void load(NewBuiltInQualityProfile profile, String repositoryKey, String jsonProfilePath) { Set<String> activeKeys = loadActiveKeysFromJsonProfile(jsonProfilePath); for (String activeKey : activeKeys) { profile.activateRule(repositoryKey, activeKey); } } private BuiltInQualityProfileJsonLoader(); }
BuiltInQualityProfileJsonLoader { public static void load(NewBuiltInQualityProfile profile, String repositoryKey, String jsonProfilePath) { Set<String> activeKeys = loadActiveKeysFromJsonProfile(jsonProfilePath); for (String activeKey : activeKeys) { profile.activateRule(repositoryKey, activeKey); } } private BuiltInQualityProfileJsonLoader(); static void load(NewBuiltInQualityProfile profile, String repositoryKey, String jsonProfilePath); static Set<String> loadActiveKeysFromJsonProfile(String profilePath); }
BuiltInQualityProfileJsonLoader { public static void load(NewBuiltInQualityProfile profile, String repositoryKey, String jsonProfilePath) { Set<String> activeKeys = loadActiveKeysFromJsonProfile(jsonProfilePath); for (String activeKey : activeKeys) { profile.activateRule(repositoryKey, activeKey); } } private BuiltInQualityProfileJsonLoader(); static void load(NewBuiltInQualityProfile profile, String repositoryKey, String jsonProfilePath); static Set<String> loadActiveKeysFromJsonProfile(String profilePath); }
@Test public void fails_when_no_profile_found() { thrown.expect(IllegalStateException.class); thrown.expectMessage("Can't read resource: /wrong/path/Sonar_way_profile.json"); NewBuiltInQualityProfile newProfile = testContext.createBuiltInQualityProfile(PROFILE_NAME, LANGUAGE); BuiltInQualityProfileJsonLoader.load(newProfile, REPOSITORY_KEY, "/wrong/path/Sonar_way_profile.json"); }
public static void load(NewBuiltInQualityProfile profile, String repositoryKey, String jsonProfilePath) { Set<String> activeKeys = loadActiveKeysFromJsonProfile(jsonProfilePath); for (String activeKey : activeKeys) { profile.activateRule(repositoryKey, activeKey); } }
BuiltInQualityProfileJsonLoader { public static void load(NewBuiltInQualityProfile profile, String repositoryKey, String jsonProfilePath) { Set<String> activeKeys = loadActiveKeysFromJsonProfile(jsonProfilePath); for (String activeKey : activeKeys) { profile.activateRule(repositoryKey, activeKey); } } }
BuiltInQualityProfileJsonLoader { public static void load(NewBuiltInQualityProfile profile, String repositoryKey, String jsonProfilePath) { Set<String> activeKeys = loadActiveKeysFromJsonProfile(jsonProfilePath); for (String activeKey : activeKeys) { profile.activateRule(repositoryKey, activeKey); } } private BuiltInQualityProfileJsonLoader(); }
BuiltInQualityProfileJsonLoader { public static void load(NewBuiltInQualityProfile profile, String repositoryKey, String jsonProfilePath) { Set<String> activeKeys = loadActiveKeysFromJsonProfile(jsonProfilePath); for (String activeKey : activeKeys) { profile.activateRule(repositoryKey, activeKey); } } private BuiltInQualityProfileJsonLoader(); static void load(NewBuiltInQualityProfile profile, String repositoryKey, String jsonProfilePath); static Set<String> loadActiveKeysFromJsonProfile(String profilePath); }
BuiltInQualityProfileJsonLoader { public static void load(NewBuiltInQualityProfile profile, String repositoryKey, String jsonProfilePath) { Set<String> activeKeys = loadActiveKeysFromJsonProfile(jsonProfilePath); for (String activeKey : activeKeys) { profile.activateRule(repositoryKey, activeKey); } } private BuiltInQualityProfileJsonLoader(); static void load(NewBuiltInQualityProfile profile, String repositoryKey, String jsonProfilePath); static Set<String> loadActiveKeysFromJsonProfile(String profilePath); }
@Test public void fails_when_no_rule_keys_in_profile() { thrown.expect(IllegalStateException.class); thrown.expectMessage("missing 'ruleKeys'"); NewBuiltInQualityProfile newProfile = testContext.createBuiltInQualityProfile(PROFILE_NAME, LANGUAGE); BuiltInQualityProfileJsonLoader.load(newProfile, REPOSITORY_KEY, "org/sonarsource/analyzer/commons/Sonar_way_profile_no_rule_keys.json"); }
public static void load(NewBuiltInQualityProfile profile, String repositoryKey, String jsonProfilePath) { Set<String> activeKeys = loadActiveKeysFromJsonProfile(jsonProfilePath); for (String activeKey : activeKeys) { profile.activateRule(repositoryKey, activeKey); } }
BuiltInQualityProfileJsonLoader { public static void load(NewBuiltInQualityProfile profile, String repositoryKey, String jsonProfilePath) { Set<String> activeKeys = loadActiveKeysFromJsonProfile(jsonProfilePath); for (String activeKey : activeKeys) { profile.activateRule(repositoryKey, activeKey); } } }
BuiltInQualityProfileJsonLoader { public static void load(NewBuiltInQualityProfile profile, String repositoryKey, String jsonProfilePath) { Set<String> activeKeys = loadActiveKeysFromJsonProfile(jsonProfilePath); for (String activeKey : activeKeys) { profile.activateRule(repositoryKey, activeKey); } } private BuiltInQualityProfileJsonLoader(); }
BuiltInQualityProfileJsonLoader { public static void load(NewBuiltInQualityProfile profile, String repositoryKey, String jsonProfilePath) { Set<String> activeKeys = loadActiveKeysFromJsonProfile(jsonProfilePath); for (String activeKey : activeKeys) { profile.activateRule(repositoryKey, activeKey); } } private BuiltInQualityProfileJsonLoader(); static void load(NewBuiltInQualityProfile profile, String repositoryKey, String jsonProfilePath); static Set<String> loadActiveKeysFromJsonProfile(String profilePath); }
BuiltInQualityProfileJsonLoader { public static void load(NewBuiltInQualityProfile profile, String repositoryKey, String jsonProfilePath) { Set<String> activeKeys = loadActiveKeysFromJsonProfile(jsonProfilePath); for (String activeKey : activeKeys) { profile.activateRule(repositoryKey, activeKey); } } private BuiltInQualityProfileJsonLoader(); static void load(NewBuiltInQualityProfile profile, String repositoryKey, String jsonProfilePath); static Set<String> loadActiveKeysFromJsonProfile(String profilePath); }
@Test public void should_activate_hotspots() { NewBuiltInQualityProfile newProfile = testContext.createBuiltInQualityProfile(PROFILE_NAME, LANGUAGE); String profilePath = "org/sonarsource/analyzer/commons/Sonar_way_profile_with_hotspots.json"; BuiltInQualityProfileJsonLoader.load(newProfile, REPOSITORY_KEY, profilePath); newProfile.done(); BuiltInQualityProfile profile = testContext.profile(LANGUAGE, PROFILE_NAME); List<BuiltInActiveRule> activeRules = profile.rules(); assertThat(activeRules).extracting("ruleKey").containsExactlyInAnyOrder("S100", "S110", "S2092"); }
public static void load(NewBuiltInQualityProfile profile, String repositoryKey, String jsonProfilePath) { Set<String> activeKeys = loadActiveKeysFromJsonProfile(jsonProfilePath); for (String activeKey : activeKeys) { profile.activateRule(repositoryKey, activeKey); } }
BuiltInQualityProfileJsonLoader { public static void load(NewBuiltInQualityProfile profile, String repositoryKey, String jsonProfilePath) { Set<String> activeKeys = loadActiveKeysFromJsonProfile(jsonProfilePath); for (String activeKey : activeKeys) { profile.activateRule(repositoryKey, activeKey); } } }
BuiltInQualityProfileJsonLoader { public static void load(NewBuiltInQualityProfile profile, String repositoryKey, String jsonProfilePath) { Set<String> activeKeys = loadActiveKeysFromJsonProfile(jsonProfilePath); for (String activeKey : activeKeys) { profile.activateRule(repositoryKey, activeKey); } } private BuiltInQualityProfileJsonLoader(); }
BuiltInQualityProfileJsonLoader { public static void load(NewBuiltInQualityProfile profile, String repositoryKey, String jsonProfilePath) { Set<String> activeKeys = loadActiveKeysFromJsonProfile(jsonProfilePath); for (String activeKey : activeKeys) { profile.activateRule(repositoryKey, activeKey); } } private BuiltInQualityProfileJsonLoader(); static void load(NewBuiltInQualityProfile profile, String repositoryKey, String jsonProfilePath); static Set<String> loadActiveKeysFromJsonProfile(String profilePath); }
BuiltInQualityProfileJsonLoader { public static void load(NewBuiltInQualityProfile profile, String repositoryKey, String jsonProfilePath) { Set<String> activeKeys = loadActiveKeysFromJsonProfile(jsonProfilePath); for (String activeKey : activeKeys) { profile.activateRule(repositoryKey, activeKey); } } private BuiltInQualityProfileJsonLoader(); static void load(NewBuiltInQualityProfile profile, String repositoryKey, String jsonProfilePath); static Set<String> loadActiveKeysFromJsonProfile(String profilePath); }
@Test public void load_rule_S100() throws Exception { @Rule(key = "S100") class TestRule { } ruleMetadataLoader.addRulesByAnnotatedClass(newRepository, Collections.singletonList(TestRule.class)); newRepository.done(); RulesDefinition.Repository repository = context.repository(RULE_REPOSITORY_KEY); RulesDefinition.Rule rule = repository.rule("S100"); assertThat(rule).isNotNull(); assertThat(rule.name()).isEqualTo("Function names should comply with a naming convention"); assertThat(rule.htmlDescription()).isEqualTo("<p>description S100</p>"); assertThat(rule.severity()).isEqualTo("MINOR"); assertThat(rule.type()).isEqualTo(RuleType.CODE_SMELL); assertThat(rule.status()).isEqualTo(RuleStatus.READY); assertThat(rule.tags()).containsExactly("convention"); DebtRemediationFunction remediation = rule.debtRemediationFunction(); assertThat(remediation).isNotNull(); assertThat(remediation.type()).isEqualTo(DebtRemediationFunction.Type.CONSTANT_ISSUE); assertThat(remediation.baseEffort()).isEqualTo("5min"); assertThat(rule.deprecatedRuleKeys()).isEmpty(); }
public void addRulesByAnnotatedClass(NewRepository repository, List<Class<?>> ruleClasses) { for (Class<?> ruleClass : ruleClasses) { addRuleByAnnotatedClass(repository, ruleClass); } }
RuleMetadataLoader { public void addRulesByAnnotatedClass(NewRepository repository, List<Class<?>> ruleClasses) { for (Class<?> ruleClass : ruleClasses) { addRuleByAnnotatedClass(repository, ruleClass); } } }
RuleMetadataLoader { public void addRulesByAnnotatedClass(NewRepository repository, List<Class<?>> ruleClasses) { for (Class<?> ruleClass : ruleClasses) { addRuleByAnnotatedClass(repository, ruleClass); } } RuleMetadataLoader(String resourceFolder); RuleMetadataLoader(String resourceFolder, String defaultProfilePath); private RuleMetadataLoader(String resourceFolder, Set<String> activatedByDefault); }
RuleMetadataLoader { public void addRulesByAnnotatedClass(NewRepository repository, List<Class<?>> ruleClasses) { for (Class<?> ruleClass : ruleClasses) { addRuleByAnnotatedClass(repository, ruleClass); } } RuleMetadataLoader(String resourceFolder); RuleMetadataLoader(String resourceFolder, String defaultProfilePath); private RuleMetadataLoader(String resourceFolder, Set<String> activatedByDefault); void addRulesByAnnotatedClass(NewRepository repository, List<Class<?>> ruleClasses); void addRulesByRuleKey(NewRepository repository, List<String> ruleKeys); }
RuleMetadataLoader { public void addRulesByAnnotatedClass(NewRepository repository, List<Class<?>> ruleClasses) { for (Class<?> ruleClass : ruleClasses) { addRuleByAnnotatedClass(repository, ruleClass); } } RuleMetadataLoader(String resourceFolder); RuleMetadataLoader(String resourceFolder, String defaultProfilePath); private RuleMetadataLoader(String resourceFolder, Set<String> activatedByDefault); void addRulesByAnnotatedClass(NewRepository repository, List<Class<?>> ruleClasses); void addRulesByRuleKey(NewRepository repository, List<String> ruleKeys); }
@Test public void load_rule_S110() throws Exception { @Rule(key = "S110") class TestRule { } ruleMetadataLoader.addRulesByAnnotatedClass(newRepository, Collections.singletonList(TestRule.class)); newRepository.done(); RulesDefinition.Repository repository = context.repository(RULE_REPOSITORY_KEY); RulesDefinition.Rule rule = repository.rule("S110"); assertThat(rule).isNotNull(); DebtRemediationFunction remediation = rule.debtRemediationFunction(); assertThat(remediation).isNotNull(); assertThat(remediation.type()).isEqualTo(DebtRemediationFunction.Type.LINEAR_OFFSET); assertThat(remediation.baseEffort()).isEqualTo("4h"); assertThat(remediation.gapMultiplier()).isEqualTo("30min"); assertThat(rule.gapDescription()).isEqualTo("Number of parents above the defined threshold"); }
public void addRulesByAnnotatedClass(NewRepository repository, List<Class<?>> ruleClasses) { for (Class<?> ruleClass : ruleClasses) { addRuleByAnnotatedClass(repository, ruleClass); } }
RuleMetadataLoader { public void addRulesByAnnotatedClass(NewRepository repository, List<Class<?>> ruleClasses) { for (Class<?> ruleClass : ruleClasses) { addRuleByAnnotatedClass(repository, ruleClass); } } }
RuleMetadataLoader { public void addRulesByAnnotatedClass(NewRepository repository, List<Class<?>> ruleClasses) { for (Class<?> ruleClass : ruleClasses) { addRuleByAnnotatedClass(repository, ruleClass); } } RuleMetadataLoader(String resourceFolder); RuleMetadataLoader(String resourceFolder, String defaultProfilePath); private RuleMetadataLoader(String resourceFolder, Set<String> activatedByDefault); }
RuleMetadataLoader { public void addRulesByAnnotatedClass(NewRepository repository, List<Class<?>> ruleClasses) { for (Class<?> ruleClass : ruleClasses) { addRuleByAnnotatedClass(repository, ruleClass); } } RuleMetadataLoader(String resourceFolder); RuleMetadataLoader(String resourceFolder, String defaultProfilePath); private RuleMetadataLoader(String resourceFolder, Set<String> activatedByDefault); void addRulesByAnnotatedClass(NewRepository repository, List<Class<?>> ruleClasses); void addRulesByRuleKey(NewRepository repository, List<String> ruleKeys); }
RuleMetadataLoader { public void addRulesByAnnotatedClass(NewRepository repository, List<Class<?>> ruleClasses) { for (Class<?> ruleClass : ruleClasses) { addRuleByAnnotatedClass(repository, ruleClass); } } RuleMetadataLoader(String resourceFolder); RuleMetadataLoader(String resourceFolder, String defaultProfilePath); private RuleMetadataLoader(String resourceFolder, Set<String> activatedByDefault); void addRulesByAnnotatedClass(NewRepository repository, List<Class<?>> ruleClasses); void addRulesByRuleKey(NewRepository repository, List<String> ruleKeys); }
@Test public void load_rules_key_based() throws Exception { ruleMetadataLoader.addRulesByRuleKey(newRepository, Arrays.asList("S110", "S100")); newRepository.done(); RulesDefinition.Repository repository = context.repository(RULE_REPOSITORY_KEY); RulesDefinition.Rule ruleS110 = repository.rule("S110"); assertThat(ruleS110).isNotNull(); assertThat(ruleS110.name()).isEqualTo("Inheritance tree of classes should not be too deep"); assertThat(ruleS110.htmlDescription()).isEqualTo("<p>description S110</p>"); RulesDefinition.Rule ruleS100 = repository.rule("S100"); assertThat(ruleS100).isNotNull(); assertThat(ruleS100.name()).isEqualTo("Function names should comply with a naming convention"); assertThat(ruleS100.htmlDescription()).isEqualTo("<p>description S100</p>"); }
public void addRulesByRuleKey(NewRepository repository, List<String> ruleKeys) { for (String ruleKey : ruleKeys) { addRuleByRuleKey(repository, ruleKey); } }
RuleMetadataLoader { public void addRulesByRuleKey(NewRepository repository, List<String> ruleKeys) { for (String ruleKey : ruleKeys) { addRuleByRuleKey(repository, ruleKey); } } }
RuleMetadataLoader { public void addRulesByRuleKey(NewRepository repository, List<String> ruleKeys) { for (String ruleKey : ruleKeys) { addRuleByRuleKey(repository, ruleKey); } } RuleMetadataLoader(String resourceFolder); RuleMetadataLoader(String resourceFolder, String defaultProfilePath); private RuleMetadataLoader(String resourceFolder, Set<String> activatedByDefault); }
RuleMetadataLoader { public void addRulesByRuleKey(NewRepository repository, List<String> ruleKeys) { for (String ruleKey : ruleKeys) { addRuleByRuleKey(repository, ruleKey); } } RuleMetadataLoader(String resourceFolder); RuleMetadataLoader(String resourceFolder, String defaultProfilePath); private RuleMetadataLoader(String resourceFolder, Set<String> activatedByDefault); void addRulesByAnnotatedClass(NewRepository repository, List<Class<?>> ruleClasses); void addRulesByRuleKey(NewRepository repository, List<String> ruleKeys); }
RuleMetadataLoader { public void addRulesByRuleKey(NewRepository repository, List<String> ruleKeys) { for (String ruleKey : ruleKeys) { addRuleByRuleKey(repository, ruleKey); } } RuleMetadataLoader(String resourceFolder); RuleMetadataLoader(String resourceFolder, String defaultProfilePath); private RuleMetadataLoader(String resourceFolder, Set<String> activatedByDefault); void addRulesByAnnotatedClass(NewRepository repository, List<Class<?>> ruleClasses); void addRulesByRuleKey(NewRepository repository, List<String> ruleKeys); }
@Test public void load_rule_S123() throws Exception { @Rule(key = "S123") class TestRule { } ruleMetadataLoader.addRulesByAnnotatedClass(newRepository, Collections.singletonList(TestRule.class)); newRepository.done(); RulesDefinition.Repository repository = context.repository(RULE_REPOSITORY_KEY); RulesDefinition.Rule rule = repository.rule("S123"); assertThat(rule).isNotNull(); DebtRemediationFunction remediation = rule.debtRemediationFunction(); assertThat(remediation).isNotNull(); assertThat(remediation.type()).isEqualTo(DebtRemediationFunction.Type.LINEAR); assertThat(remediation.gapMultiplier()).isEqualTo("10min"); assertThat(rule.gapDescription()).isNull(); }
public void addRulesByAnnotatedClass(NewRepository repository, List<Class<?>> ruleClasses) { for (Class<?> ruleClass : ruleClasses) { addRuleByAnnotatedClass(repository, ruleClass); } }
RuleMetadataLoader { public void addRulesByAnnotatedClass(NewRepository repository, List<Class<?>> ruleClasses) { for (Class<?> ruleClass : ruleClasses) { addRuleByAnnotatedClass(repository, ruleClass); } } }
RuleMetadataLoader { public void addRulesByAnnotatedClass(NewRepository repository, List<Class<?>> ruleClasses) { for (Class<?> ruleClass : ruleClasses) { addRuleByAnnotatedClass(repository, ruleClass); } } RuleMetadataLoader(String resourceFolder); RuleMetadataLoader(String resourceFolder, String defaultProfilePath); private RuleMetadataLoader(String resourceFolder, Set<String> activatedByDefault); }
RuleMetadataLoader { public void addRulesByAnnotatedClass(NewRepository repository, List<Class<?>> ruleClasses) { for (Class<?> ruleClass : ruleClasses) { addRuleByAnnotatedClass(repository, ruleClass); } } RuleMetadataLoader(String resourceFolder); RuleMetadataLoader(String resourceFolder, String defaultProfilePath); private RuleMetadataLoader(String resourceFolder, Set<String> activatedByDefault); void addRulesByAnnotatedClass(NewRepository repository, List<Class<?>> ruleClasses); void addRulesByRuleKey(NewRepository repository, List<String> ruleKeys); }
RuleMetadataLoader { public void addRulesByAnnotatedClass(NewRepository repository, List<Class<?>> ruleClasses) { for (Class<?> ruleClass : ruleClasses) { addRuleByAnnotatedClass(repository, ruleClass); } } RuleMetadataLoader(String resourceFolder); RuleMetadataLoader(String resourceFolder, String defaultProfilePath); private RuleMetadataLoader(String resourceFolder, Set<String> activatedByDefault); void addRulesByAnnotatedClass(NewRepository repository, List<Class<?>> ruleClasses); void addRulesByRuleKey(NewRepository repository, List<String> ruleKeys); }
@Test public void load_rule_with_deprecated_key() throws Exception { @Rule(key = "S123") @DeprecatedRuleKey(repositoryKey = "oldRepo", ruleKey = "oldKey") class TestRule { } ruleMetadataLoader.addRulesByAnnotatedClass(newRepository, Collections.singletonList(TestRule.class)); newRepository.done(); RulesDefinition.Repository repository = context.repository(RULE_REPOSITORY_KEY); assertThat(repository.rule("S123").deprecatedRuleKeys()).containsExactlyInAnyOrder(RuleKey.of("oldRepo", "oldKey")); }
public void addRulesByAnnotatedClass(NewRepository repository, List<Class<?>> ruleClasses) { for (Class<?> ruleClass : ruleClasses) { addRuleByAnnotatedClass(repository, ruleClass); } }
RuleMetadataLoader { public void addRulesByAnnotatedClass(NewRepository repository, List<Class<?>> ruleClasses) { for (Class<?> ruleClass : ruleClasses) { addRuleByAnnotatedClass(repository, ruleClass); } } }
RuleMetadataLoader { public void addRulesByAnnotatedClass(NewRepository repository, List<Class<?>> ruleClasses) { for (Class<?> ruleClass : ruleClasses) { addRuleByAnnotatedClass(repository, ruleClass); } } RuleMetadataLoader(String resourceFolder); RuleMetadataLoader(String resourceFolder, String defaultProfilePath); private RuleMetadataLoader(String resourceFolder, Set<String> activatedByDefault); }
RuleMetadataLoader { public void addRulesByAnnotatedClass(NewRepository repository, List<Class<?>> ruleClasses) { for (Class<?> ruleClass : ruleClasses) { addRuleByAnnotatedClass(repository, ruleClass); } } RuleMetadataLoader(String resourceFolder); RuleMetadataLoader(String resourceFolder, String defaultProfilePath); private RuleMetadataLoader(String resourceFolder, Set<String> activatedByDefault); void addRulesByAnnotatedClass(NewRepository repository, List<Class<?>> ruleClasses); void addRulesByRuleKey(NewRepository repository, List<String> ruleKeys); }
RuleMetadataLoader { public void addRulesByAnnotatedClass(NewRepository repository, List<Class<?>> ruleClasses) { for (Class<?> ruleClass : ruleClasses) { addRuleByAnnotatedClass(repository, ruleClass); } } RuleMetadataLoader(String resourceFolder); RuleMetadataLoader(String resourceFolder, String defaultProfilePath); private RuleMetadataLoader(String resourceFolder, Set<String> activatedByDefault); void addRulesByAnnotatedClass(NewRepository repository, List<Class<?>> ruleClasses); void addRulesByRuleKey(NewRepository repository, List<String> ruleKeys); }
@Test public void load_rule_with_many_deprecated_keys() throws Exception { @Rule(key = "S123") @DeprecatedRuleKey(repositoryKey = "oldRepo1", ruleKey = "oldKey1") @DeprecatedRuleKey(repositoryKey = "oldRepo2", ruleKey = "oldKey2") class TestRule { } ruleMetadataLoader.addRulesByAnnotatedClass(newRepository, Collections.singletonList(TestRule.class)); newRepository.done(); RulesDefinition.Repository repository = context.repository(RULE_REPOSITORY_KEY); assertThat(repository.rule("S123").deprecatedRuleKeys()).containsExactlyInAnyOrder(RuleKey.of("oldRepo1", "oldKey1"), RuleKey.of("oldRepo2", "oldKey2")); }
public void addRulesByAnnotatedClass(NewRepository repository, List<Class<?>> ruleClasses) { for (Class<?> ruleClass : ruleClasses) { addRuleByAnnotatedClass(repository, ruleClass); } }
RuleMetadataLoader { public void addRulesByAnnotatedClass(NewRepository repository, List<Class<?>> ruleClasses) { for (Class<?> ruleClass : ruleClasses) { addRuleByAnnotatedClass(repository, ruleClass); } } }
RuleMetadataLoader { public void addRulesByAnnotatedClass(NewRepository repository, List<Class<?>> ruleClasses) { for (Class<?> ruleClass : ruleClasses) { addRuleByAnnotatedClass(repository, ruleClass); } } RuleMetadataLoader(String resourceFolder); RuleMetadataLoader(String resourceFolder, String defaultProfilePath); private RuleMetadataLoader(String resourceFolder, Set<String> activatedByDefault); }
RuleMetadataLoader { public void addRulesByAnnotatedClass(NewRepository repository, List<Class<?>> ruleClasses) { for (Class<?> ruleClass : ruleClasses) { addRuleByAnnotatedClass(repository, ruleClass); } } RuleMetadataLoader(String resourceFolder); RuleMetadataLoader(String resourceFolder, String defaultProfilePath); private RuleMetadataLoader(String resourceFolder, Set<String> activatedByDefault); void addRulesByAnnotatedClass(NewRepository repository, List<Class<?>> ruleClasses); void addRulesByRuleKey(NewRepository repository, List<String> ruleKeys); }
RuleMetadataLoader { public void addRulesByAnnotatedClass(NewRepository repository, List<Class<?>> ruleClasses) { for (Class<?> ruleClass : ruleClasses) { addRuleByAnnotatedClass(repository, ruleClass); } } RuleMetadataLoader(String resourceFolder); RuleMetadataLoader(String resourceFolder, String defaultProfilePath); private RuleMetadataLoader(String resourceFolder, Set<String> activatedByDefault); void addRulesByAnnotatedClass(NewRepository repository, List<Class<?>> ruleClasses); void addRulesByRuleKey(NewRepository repository, List<String> ruleKeys); }
@Test public void test_createXMLInputFactory() { assertThat(SafetyFactory.createXMLInputFactory()).isNotNull(); }
@Deprecated public static XMLInputFactory createXMLInputFactory() { return SafeStaxParserFactory.createXMLInputFactory(); }
SafetyFactory { @Deprecated public static XMLInputFactory createXMLInputFactory() { return SafeStaxParserFactory.createXMLInputFactory(); } }
SafetyFactory { @Deprecated public static XMLInputFactory createXMLInputFactory() { return SafeStaxParserFactory.createXMLInputFactory(); } private SafetyFactory(); }
SafetyFactory { @Deprecated public static XMLInputFactory createXMLInputFactory() { return SafeStaxParserFactory.createXMLInputFactory(); } private SafetyFactory(); @Deprecated static XMLInputFactory createXMLInputFactory(); @Deprecated static DocumentBuilder createDocumentBuilder(boolean namespaceAware); }
SafetyFactory { @Deprecated public static XMLInputFactory createXMLInputFactory() { return SafeStaxParserFactory.createXMLInputFactory(); } private SafetyFactory(); @Deprecated static XMLInputFactory createXMLInputFactory(); @Deprecated static DocumentBuilder createDocumentBuilder(boolean namespaceAware); }
@Test public void load_rule_with_deprecated_key_without_repo() throws Exception { @Rule(key = "S123") @DeprecatedRuleKey(ruleKey = "oldKey") class TestRule { } ruleMetadataLoader.addRulesByAnnotatedClass(newRepository, Collections.singletonList(TestRule.class)); newRepository.done(); RulesDefinition.Repository repository = context.repository(RULE_REPOSITORY_KEY); assertThat(repository.rule("S123").deprecatedRuleKeys()).containsOnly(RuleKey.of(RULE_REPOSITORY_KEY, "oldKey")); }
public void addRulesByAnnotatedClass(NewRepository repository, List<Class<?>> ruleClasses) { for (Class<?> ruleClass : ruleClasses) { addRuleByAnnotatedClass(repository, ruleClass); } }
RuleMetadataLoader { public void addRulesByAnnotatedClass(NewRepository repository, List<Class<?>> ruleClasses) { for (Class<?> ruleClass : ruleClasses) { addRuleByAnnotatedClass(repository, ruleClass); } } }
RuleMetadataLoader { public void addRulesByAnnotatedClass(NewRepository repository, List<Class<?>> ruleClasses) { for (Class<?> ruleClass : ruleClasses) { addRuleByAnnotatedClass(repository, ruleClass); } } RuleMetadataLoader(String resourceFolder); RuleMetadataLoader(String resourceFolder, String defaultProfilePath); private RuleMetadataLoader(String resourceFolder, Set<String> activatedByDefault); }
RuleMetadataLoader { public void addRulesByAnnotatedClass(NewRepository repository, List<Class<?>> ruleClasses) { for (Class<?> ruleClass : ruleClasses) { addRuleByAnnotatedClass(repository, ruleClass); } } RuleMetadataLoader(String resourceFolder); RuleMetadataLoader(String resourceFolder, String defaultProfilePath); private RuleMetadataLoader(String resourceFolder, Set<String> activatedByDefault); void addRulesByAnnotatedClass(NewRepository repository, List<Class<?>> ruleClasses); void addRulesByRuleKey(NewRepository repository, List<String> ruleKeys); }
RuleMetadataLoader { public void addRulesByAnnotatedClass(NewRepository repository, List<Class<?>> ruleClasses) { for (Class<?> ruleClass : ruleClasses) { addRuleByAnnotatedClass(repository, ruleClass); } } RuleMetadataLoader(String resourceFolder); RuleMetadataLoader(String resourceFolder, String defaultProfilePath); private RuleMetadataLoader(String resourceFolder, Set<String> activatedByDefault); void addRulesByAnnotatedClass(NewRepository repository, List<Class<?>> ruleClasses); void addRulesByRuleKey(NewRepository repository, List<String> ruleKeys); }
@Test public void load_rule_list() throws Exception { @Rule(key = "S100") class RuleA { } @Rule(key = "S110") class RuleB { } ruleMetadataLoader.addRulesByAnnotatedClass(newRepository, Arrays.asList(RuleA.class, RuleB.class)); newRepository.done(); RulesDefinition.Repository repository = context.repository(RULE_REPOSITORY_KEY); assertThat(repository.rule("S100")).isNotNull(); assertThat(repository.rule("S110")).isNotNull(); }
public void addRulesByAnnotatedClass(NewRepository repository, List<Class<?>> ruleClasses) { for (Class<?> ruleClass : ruleClasses) { addRuleByAnnotatedClass(repository, ruleClass); } }
RuleMetadataLoader { public void addRulesByAnnotatedClass(NewRepository repository, List<Class<?>> ruleClasses) { for (Class<?> ruleClass : ruleClasses) { addRuleByAnnotatedClass(repository, ruleClass); } } }
RuleMetadataLoader { public void addRulesByAnnotatedClass(NewRepository repository, List<Class<?>> ruleClasses) { for (Class<?> ruleClass : ruleClasses) { addRuleByAnnotatedClass(repository, ruleClass); } } RuleMetadataLoader(String resourceFolder); RuleMetadataLoader(String resourceFolder, String defaultProfilePath); private RuleMetadataLoader(String resourceFolder, Set<String> activatedByDefault); }
RuleMetadataLoader { public void addRulesByAnnotatedClass(NewRepository repository, List<Class<?>> ruleClasses) { for (Class<?> ruleClass : ruleClasses) { addRuleByAnnotatedClass(repository, ruleClass); } } RuleMetadataLoader(String resourceFolder); RuleMetadataLoader(String resourceFolder, String defaultProfilePath); private RuleMetadataLoader(String resourceFolder, Set<String> activatedByDefault); void addRulesByAnnotatedClass(NewRepository repository, List<Class<?>> ruleClasses); void addRulesByRuleKey(NewRepository repository, List<String> ruleKeys); }
RuleMetadataLoader { public void addRulesByAnnotatedClass(NewRepository repository, List<Class<?>> ruleClasses) { for (Class<?> ruleClass : ruleClasses) { addRuleByAnnotatedClass(repository, ruleClass); } } RuleMetadataLoader(String resourceFolder); RuleMetadataLoader(String resourceFolder, String defaultProfilePath); private RuleMetadataLoader(String resourceFolder, Set<String> activatedByDefault); void addRulesByAnnotatedClass(NewRepository repository, List<Class<?>> ruleClasses); void addRulesByRuleKey(NewRepository repository, List<String> ruleKeys); }
@Test public void no_profile() throws Exception { @Rule(key = "S100") class TestRule { } ruleMetadataLoader = new RuleMetadataLoader(RESOURCE_FOLDER); ruleMetadataLoader.addRulesByAnnotatedClass(newRepository, Collections.singletonList(TestRule.class)); newRepository.done(); RulesDefinition.Rule rule = context.repository(RULE_REPOSITORY_KEY).rule("S100"); assertThat(rule.activatedByDefault()).isFalse(); }
public void addRulesByAnnotatedClass(NewRepository repository, List<Class<?>> ruleClasses) { for (Class<?> ruleClass : ruleClasses) { addRuleByAnnotatedClass(repository, ruleClass); } }
RuleMetadataLoader { public void addRulesByAnnotatedClass(NewRepository repository, List<Class<?>> ruleClasses) { for (Class<?> ruleClass : ruleClasses) { addRuleByAnnotatedClass(repository, ruleClass); } } }
RuleMetadataLoader { public void addRulesByAnnotatedClass(NewRepository repository, List<Class<?>> ruleClasses) { for (Class<?> ruleClass : ruleClasses) { addRuleByAnnotatedClass(repository, ruleClass); } } RuleMetadataLoader(String resourceFolder); RuleMetadataLoader(String resourceFolder, String defaultProfilePath); private RuleMetadataLoader(String resourceFolder, Set<String> activatedByDefault); }
RuleMetadataLoader { public void addRulesByAnnotatedClass(NewRepository repository, List<Class<?>> ruleClasses) { for (Class<?> ruleClass : ruleClasses) { addRuleByAnnotatedClass(repository, ruleClass); } } RuleMetadataLoader(String resourceFolder); RuleMetadataLoader(String resourceFolder, String defaultProfilePath); private RuleMetadataLoader(String resourceFolder, Set<String> activatedByDefault); void addRulesByAnnotatedClass(NewRepository repository, List<Class<?>> ruleClasses); void addRulesByRuleKey(NewRepository repository, List<String> ruleKeys); }
RuleMetadataLoader { public void addRulesByAnnotatedClass(NewRepository repository, List<Class<?>> ruleClasses) { for (Class<?> ruleClass : ruleClasses) { addRuleByAnnotatedClass(repository, ruleClass); } } RuleMetadataLoader(String resourceFolder); RuleMetadataLoader(String resourceFolder, String defaultProfilePath); private RuleMetadataLoader(String resourceFolder, Set<String> activatedByDefault); void addRulesByAnnotatedClass(NewRepository repository, List<Class<?>> ruleClasses); void addRulesByRuleKey(NewRepository repository, List<String> ruleKeys); }
@Test public void rule_not_in_default_profile() throws Exception { @Rule(key = "S123") class TestRule { } ruleMetadataLoader = new RuleMetadataLoader(RESOURCE_FOLDER, DEFAULT_PROFILE_PATH); ruleMetadataLoader.addRulesByAnnotatedClass(newRepository, Collections.singletonList(TestRule.class)); newRepository.done(); RulesDefinition.Rule rule = context.repository(RULE_REPOSITORY_KEY).rule("S123"); assertThat(rule.activatedByDefault()).isFalse(); }
public void addRulesByAnnotatedClass(NewRepository repository, List<Class<?>> ruleClasses) { for (Class<?> ruleClass : ruleClasses) { addRuleByAnnotatedClass(repository, ruleClass); } }
RuleMetadataLoader { public void addRulesByAnnotatedClass(NewRepository repository, List<Class<?>> ruleClasses) { for (Class<?> ruleClass : ruleClasses) { addRuleByAnnotatedClass(repository, ruleClass); } } }
RuleMetadataLoader { public void addRulesByAnnotatedClass(NewRepository repository, List<Class<?>> ruleClasses) { for (Class<?> ruleClass : ruleClasses) { addRuleByAnnotatedClass(repository, ruleClass); } } RuleMetadataLoader(String resourceFolder); RuleMetadataLoader(String resourceFolder, String defaultProfilePath); private RuleMetadataLoader(String resourceFolder, Set<String> activatedByDefault); }
RuleMetadataLoader { public void addRulesByAnnotatedClass(NewRepository repository, List<Class<?>> ruleClasses) { for (Class<?> ruleClass : ruleClasses) { addRuleByAnnotatedClass(repository, ruleClass); } } RuleMetadataLoader(String resourceFolder); RuleMetadataLoader(String resourceFolder, String defaultProfilePath); private RuleMetadataLoader(String resourceFolder, Set<String> activatedByDefault); void addRulesByAnnotatedClass(NewRepository repository, List<Class<?>> ruleClasses); void addRulesByRuleKey(NewRepository repository, List<String> ruleKeys); }
RuleMetadataLoader { public void addRulesByAnnotatedClass(NewRepository repository, List<Class<?>> ruleClasses) { for (Class<?> ruleClass : ruleClasses) { addRuleByAnnotatedClass(repository, ruleClass); } } RuleMetadataLoader(String resourceFolder); RuleMetadataLoader(String resourceFolder, String defaultProfilePath); private RuleMetadataLoader(String resourceFolder, Set<String> activatedByDefault); void addRulesByAnnotatedClass(NewRepository repository, List<Class<?>> ruleClasses); void addRulesByRuleKey(NewRepository repository, List<String> ruleKeys); }
@Test public void rule_in_default_profile() throws Exception { @Rule(key = "S100") class TestRule { } ruleMetadataLoader = new RuleMetadataLoader(RESOURCE_FOLDER, DEFAULT_PROFILE_PATH); ruleMetadataLoader.addRulesByAnnotatedClass(newRepository, Collections.singletonList(TestRule.class)); newRepository.done(); RulesDefinition.Rule rule = context.repository(RULE_REPOSITORY_KEY).rule("S100"); assertThat(rule.activatedByDefault()).isTrue(); }
public void addRulesByAnnotatedClass(NewRepository repository, List<Class<?>> ruleClasses) { for (Class<?> ruleClass : ruleClasses) { addRuleByAnnotatedClass(repository, ruleClass); } }
RuleMetadataLoader { public void addRulesByAnnotatedClass(NewRepository repository, List<Class<?>> ruleClasses) { for (Class<?> ruleClass : ruleClasses) { addRuleByAnnotatedClass(repository, ruleClass); } } }
RuleMetadataLoader { public void addRulesByAnnotatedClass(NewRepository repository, List<Class<?>> ruleClasses) { for (Class<?> ruleClass : ruleClasses) { addRuleByAnnotatedClass(repository, ruleClass); } } RuleMetadataLoader(String resourceFolder); RuleMetadataLoader(String resourceFolder, String defaultProfilePath); private RuleMetadataLoader(String resourceFolder, Set<String> activatedByDefault); }
RuleMetadataLoader { public void addRulesByAnnotatedClass(NewRepository repository, List<Class<?>> ruleClasses) { for (Class<?> ruleClass : ruleClasses) { addRuleByAnnotatedClass(repository, ruleClass); } } RuleMetadataLoader(String resourceFolder); RuleMetadataLoader(String resourceFolder, String defaultProfilePath); private RuleMetadataLoader(String resourceFolder, Set<String> activatedByDefault); void addRulesByAnnotatedClass(NewRepository repository, List<Class<?>> ruleClasses); void addRulesByRuleKey(NewRepository repository, List<String> ruleKeys); }
RuleMetadataLoader { public void addRulesByAnnotatedClass(NewRepository repository, List<Class<?>> ruleClasses) { for (Class<?> ruleClass : ruleClasses) { addRuleByAnnotatedClass(repository, ruleClass); } } RuleMetadataLoader(String resourceFolder); RuleMetadataLoader(String resourceFolder, String defaultProfilePath); private RuleMetadataLoader(String resourceFolder, Set<String> activatedByDefault); void addRulesByAnnotatedClass(NewRepository repository, List<Class<?>> ruleClasses); void addRulesByRuleKey(NewRepository repository, List<String> ruleKeys); }
@Test public void getStringArray() throws Exception { Map<String, Object> map = Collections.singletonMap("key", Arrays.asList("x", "y")); assertThat(RuleMetadataLoader.getStringArray(map, "key")).containsExactly("x", "y"); }
static String[] getStringArray(Map<String, Object> map, String propertyName) { Object propertyValue = map.get(propertyName); if (!(propertyValue instanceof List)) { throw new IllegalStateException(String.format(INVALID_PROPERTY_MESSAGE, propertyName)); } return ((List<String>) propertyValue).toArray(new String[0]); }
RuleMetadataLoader { static String[] getStringArray(Map<String, Object> map, String propertyName) { Object propertyValue = map.get(propertyName); if (!(propertyValue instanceof List)) { throw new IllegalStateException(String.format(INVALID_PROPERTY_MESSAGE, propertyName)); } return ((List<String>) propertyValue).toArray(new String[0]); } }
RuleMetadataLoader { static String[] getStringArray(Map<String, Object> map, String propertyName) { Object propertyValue = map.get(propertyName); if (!(propertyValue instanceof List)) { throw new IllegalStateException(String.format(INVALID_PROPERTY_MESSAGE, propertyName)); } return ((List<String>) propertyValue).toArray(new String[0]); } RuleMetadataLoader(String resourceFolder); RuleMetadataLoader(String resourceFolder, String defaultProfilePath); private RuleMetadataLoader(String resourceFolder, Set<String> activatedByDefault); }
RuleMetadataLoader { static String[] getStringArray(Map<String, Object> map, String propertyName) { Object propertyValue = map.get(propertyName); if (!(propertyValue instanceof List)) { throw new IllegalStateException(String.format(INVALID_PROPERTY_MESSAGE, propertyName)); } return ((List<String>) propertyValue).toArray(new String[0]); } RuleMetadataLoader(String resourceFolder); RuleMetadataLoader(String resourceFolder, String defaultProfilePath); private RuleMetadataLoader(String resourceFolder, Set<String> activatedByDefault); void addRulesByAnnotatedClass(NewRepository repository, List<Class<?>> ruleClasses); void addRulesByRuleKey(NewRepository repository, List<String> ruleKeys); }
RuleMetadataLoader { static String[] getStringArray(Map<String, Object> map, String propertyName) { Object propertyValue = map.get(propertyName); if (!(propertyValue instanceof List)) { throw new IllegalStateException(String.format(INVALID_PROPERTY_MESSAGE, propertyName)); } return ((List<String>) propertyValue).toArray(new String[0]); } RuleMetadataLoader(String resourceFolder); RuleMetadataLoader(String resourceFolder, String defaultProfilePath); private RuleMetadataLoader(String resourceFolder, Set<String> activatedByDefault); void addRulesByAnnotatedClass(NewRepository repository, List<Class<?>> ruleClasses); void addRulesByRuleKey(NewRepository repository, List<String> ruleKeys); }
@Test(expected = IllegalStateException.class) public void getStringArray_with_invalid_type() throws Exception { Map<String, Object> map = Collections.singletonMap("key", "x"); RuleMetadataLoader.getStringArray(map, "key"); }
static String[] getStringArray(Map<String, Object> map, String propertyName) { Object propertyValue = map.get(propertyName); if (!(propertyValue instanceof List)) { throw new IllegalStateException(String.format(INVALID_PROPERTY_MESSAGE, propertyName)); } return ((List<String>) propertyValue).toArray(new String[0]); }
RuleMetadataLoader { static String[] getStringArray(Map<String, Object> map, String propertyName) { Object propertyValue = map.get(propertyName); if (!(propertyValue instanceof List)) { throw new IllegalStateException(String.format(INVALID_PROPERTY_MESSAGE, propertyName)); } return ((List<String>) propertyValue).toArray(new String[0]); } }
RuleMetadataLoader { static String[] getStringArray(Map<String, Object> map, String propertyName) { Object propertyValue = map.get(propertyName); if (!(propertyValue instanceof List)) { throw new IllegalStateException(String.format(INVALID_PROPERTY_MESSAGE, propertyName)); } return ((List<String>) propertyValue).toArray(new String[0]); } RuleMetadataLoader(String resourceFolder); RuleMetadataLoader(String resourceFolder, String defaultProfilePath); private RuleMetadataLoader(String resourceFolder, Set<String> activatedByDefault); }
RuleMetadataLoader { static String[] getStringArray(Map<String, Object> map, String propertyName) { Object propertyValue = map.get(propertyName); if (!(propertyValue instanceof List)) { throw new IllegalStateException(String.format(INVALID_PROPERTY_MESSAGE, propertyName)); } return ((List<String>) propertyValue).toArray(new String[0]); } RuleMetadataLoader(String resourceFolder); RuleMetadataLoader(String resourceFolder, String defaultProfilePath); private RuleMetadataLoader(String resourceFolder, Set<String> activatedByDefault); void addRulesByAnnotatedClass(NewRepository repository, List<Class<?>> ruleClasses); void addRulesByRuleKey(NewRepository repository, List<String> ruleKeys); }
RuleMetadataLoader { static String[] getStringArray(Map<String, Object> map, String propertyName) { Object propertyValue = map.get(propertyName); if (!(propertyValue instanceof List)) { throw new IllegalStateException(String.format(INVALID_PROPERTY_MESSAGE, propertyName)); } return ((List<String>) propertyValue).toArray(new String[0]); } RuleMetadataLoader(String resourceFolder); RuleMetadataLoader(String resourceFolder, String defaultProfilePath); private RuleMetadataLoader(String resourceFolder, Set<String> activatedByDefault); void addRulesByAnnotatedClass(NewRepository repository, List<Class<?>> ruleClasses); void addRulesByRuleKey(NewRepository repository, List<String> ruleKeys); }
@Test(expected = IllegalStateException.class) public void getStringArray_without_property() throws Exception { RuleMetadataLoader.getStringArray(Collections.emptyMap(), "key"); }
static String[] getStringArray(Map<String, Object> map, String propertyName) { Object propertyValue = map.get(propertyName); if (!(propertyValue instanceof List)) { throw new IllegalStateException(String.format(INVALID_PROPERTY_MESSAGE, propertyName)); } return ((List<String>) propertyValue).toArray(new String[0]); }
RuleMetadataLoader { static String[] getStringArray(Map<String, Object> map, String propertyName) { Object propertyValue = map.get(propertyName); if (!(propertyValue instanceof List)) { throw new IllegalStateException(String.format(INVALID_PROPERTY_MESSAGE, propertyName)); } return ((List<String>) propertyValue).toArray(new String[0]); } }
RuleMetadataLoader { static String[] getStringArray(Map<String, Object> map, String propertyName) { Object propertyValue = map.get(propertyName); if (!(propertyValue instanceof List)) { throw new IllegalStateException(String.format(INVALID_PROPERTY_MESSAGE, propertyName)); } return ((List<String>) propertyValue).toArray(new String[0]); } RuleMetadataLoader(String resourceFolder); RuleMetadataLoader(String resourceFolder, String defaultProfilePath); private RuleMetadataLoader(String resourceFolder, Set<String> activatedByDefault); }
RuleMetadataLoader { static String[] getStringArray(Map<String, Object> map, String propertyName) { Object propertyValue = map.get(propertyName); if (!(propertyValue instanceof List)) { throw new IllegalStateException(String.format(INVALID_PROPERTY_MESSAGE, propertyName)); } return ((List<String>) propertyValue).toArray(new String[0]); } RuleMetadataLoader(String resourceFolder); RuleMetadataLoader(String resourceFolder, String defaultProfilePath); private RuleMetadataLoader(String resourceFolder, Set<String> activatedByDefault); void addRulesByAnnotatedClass(NewRepository repository, List<Class<?>> ruleClasses); void addRulesByRuleKey(NewRepository repository, List<String> ruleKeys); }
RuleMetadataLoader { static String[] getStringArray(Map<String, Object> map, String propertyName) { Object propertyValue = map.get(propertyName); if (!(propertyValue instanceof List)) { throw new IllegalStateException(String.format(INVALID_PROPERTY_MESSAGE, propertyName)); } return ((List<String>) propertyValue).toArray(new String[0]); } RuleMetadataLoader(String resourceFolder); RuleMetadataLoader(String resourceFolder, String defaultProfilePath); private RuleMetadataLoader(String resourceFolder, Set<String> activatedByDefault); void addRulesByAnnotatedClass(NewRepository repository, List<Class<?>> ruleClasses); void addRulesByRuleKey(NewRepository repository, List<String> ruleKeys); }
@Test public void test_security_hotspot() { @Rule(key = "S2092") class TestRule { } ruleMetadataLoader = new RuleMetadataLoader(RESOURCE_FOLDER, DEFAULT_PROFILE_PATH); ruleMetadataLoader.addRulesByAnnotatedClass(newRepository, Collections.singletonList(TestRule.class)); newRepository.done(); RulesDefinition.Rule rule = context.repository(RULE_REPOSITORY_KEY).rule("S2092"); assertThat(rule.type()).isEqualTo(RuleType.SECURITY_HOTSPOT); assertThat(rule.securityStandards()).containsExactlyInAnyOrder("cwe:311", "cwe:315", "cwe:614", "owaspTop10:a2", "owaspTop10:a3"); }
public void addRulesByAnnotatedClass(NewRepository repository, List<Class<?>> ruleClasses) { for (Class<?> ruleClass : ruleClasses) { addRuleByAnnotatedClass(repository, ruleClass); } }
RuleMetadataLoader { public void addRulesByAnnotatedClass(NewRepository repository, List<Class<?>> ruleClasses) { for (Class<?> ruleClass : ruleClasses) { addRuleByAnnotatedClass(repository, ruleClass); } } }
RuleMetadataLoader { public void addRulesByAnnotatedClass(NewRepository repository, List<Class<?>> ruleClasses) { for (Class<?> ruleClass : ruleClasses) { addRuleByAnnotatedClass(repository, ruleClass); } } RuleMetadataLoader(String resourceFolder); RuleMetadataLoader(String resourceFolder, String defaultProfilePath); private RuleMetadataLoader(String resourceFolder, Set<String> activatedByDefault); }
RuleMetadataLoader { public void addRulesByAnnotatedClass(NewRepository repository, List<Class<?>> ruleClasses) { for (Class<?> ruleClass : ruleClasses) { addRuleByAnnotatedClass(repository, ruleClass); } } RuleMetadataLoader(String resourceFolder); RuleMetadataLoader(String resourceFolder, String defaultProfilePath); private RuleMetadataLoader(String resourceFolder, Set<String> activatedByDefault); void addRulesByAnnotatedClass(NewRepository repository, List<Class<?>> ruleClasses); void addRulesByRuleKey(NewRepository repository, List<String> ruleKeys); }
RuleMetadataLoader { public void addRulesByAnnotatedClass(NewRepository repository, List<Class<?>> ruleClasses) { for (Class<?> ruleClass : ruleClasses) { addRuleByAnnotatedClass(repository, ruleClass); } } RuleMetadataLoader(String resourceFolder); RuleMetadataLoader(String resourceFolder, String defaultProfilePath); private RuleMetadataLoader(String resourceFolder, Set<String> activatedByDefault); void addRulesByAnnotatedClass(NewRepository repository, List<Class<?>> ruleClasses); void addRulesByRuleKey(NewRepository repository, List<String> ruleKeys); }
@Test public void test_security_standards() { @Rule(key = "S112") class TestRule { } ruleMetadataLoader = new RuleMetadataLoader(RESOURCE_FOLDER, DEFAULT_PROFILE_PATH); ruleMetadataLoader.addRulesByAnnotatedClass(newRepository, Collections.singletonList(TestRule.class)); newRepository.done(); RulesDefinition.Rule rule = context.repository(RULE_REPOSITORY_KEY).rule("S112"); assertThat(rule.type()).isEqualTo(RuleType.CODE_SMELL); assertThat(rule.securityStandards()).containsExactlyInAnyOrder("cwe:397"); }
public void addRulesByAnnotatedClass(NewRepository repository, List<Class<?>> ruleClasses) { for (Class<?> ruleClass : ruleClasses) { addRuleByAnnotatedClass(repository, ruleClass); } }
RuleMetadataLoader { public void addRulesByAnnotatedClass(NewRepository repository, List<Class<?>> ruleClasses) { for (Class<?> ruleClass : ruleClasses) { addRuleByAnnotatedClass(repository, ruleClass); } } }
RuleMetadataLoader { public void addRulesByAnnotatedClass(NewRepository repository, List<Class<?>> ruleClasses) { for (Class<?> ruleClass : ruleClasses) { addRuleByAnnotatedClass(repository, ruleClass); } } RuleMetadataLoader(String resourceFolder); RuleMetadataLoader(String resourceFolder, String defaultProfilePath); private RuleMetadataLoader(String resourceFolder, Set<String> activatedByDefault); }
RuleMetadataLoader { public void addRulesByAnnotatedClass(NewRepository repository, List<Class<?>> ruleClasses) { for (Class<?> ruleClass : ruleClasses) { addRuleByAnnotatedClass(repository, ruleClass); } } RuleMetadataLoader(String resourceFolder); RuleMetadataLoader(String resourceFolder, String defaultProfilePath); private RuleMetadataLoader(String resourceFolder, Set<String> activatedByDefault); void addRulesByAnnotatedClass(NewRepository repository, List<Class<?>> ruleClasses); void addRulesByRuleKey(NewRepository repository, List<String> ruleKeys); }
RuleMetadataLoader { public void addRulesByAnnotatedClass(NewRepository repository, List<Class<?>> ruleClasses) { for (Class<?> ruleClass : ruleClasses) { addRuleByAnnotatedClass(repository, ruleClass); } } RuleMetadataLoader(String resourceFolder); RuleMetadataLoader(String resourceFolder, String defaultProfilePath); private RuleMetadataLoader(String resourceFolder, Set<String> activatedByDefault); void addRulesByAnnotatedClass(NewRepository repository, List<Class<?>> ruleClasses); void addRulesByRuleKey(NewRepository repository, List<String> ruleKeys); }
@Test public void test_createDocumentBuilder() { assertThat(SafetyFactory.createDocumentBuilder(true)).isNotNull(); assertThat(SafetyFactory.createDocumentBuilder(false)).isNotNull(); }
@Deprecated public static DocumentBuilder createDocumentBuilder(boolean namespaceAware) { return SafeDomParserFactory.createDocumentBuilder(namespaceAware); }
SafetyFactory { @Deprecated public static DocumentBuilder createDocumentBuilder(boolean namespaceAware) { return SafeDomParserFactory.createDocumentBuilder(namespaceAware); } }
SafetyFactory { @Deprecated public static DocumentBuilder createDocumentBuilder(boolean namespaceAware) { return SafeDomParserFactory.createDocumentBuilder(namespaceAware); } private SafetyFactory(); }
SafetyFactory { @Deprecated public static DocumentBuilder createDocumentBuilder(boolean namespaceAware) { return SafeDomParserFactory.createDocumentBuilder(namespaceAware); } private SafetyFactory(); @Deprecated static XMLInputFactory createXMLInputFactory(); @Deprecated static DocumentBuilder createDocumentBuilder(boolean namespaceAware); }
SafetyFactory { @Deprecated public static DocumentBuilder createDocumentBuilder(boolean namespaceAware) { return SafeDomParserFactory.createDocumentBuilder(namespaceAware); } private SafetyFactory(); @Deprecated static XMLInputFactory createXMLInputFactory(); @Deprecated static DocumentBuilder createDocumentBuilder(boolean namespaceAware); }
@Test public void test_invalid_json_string() { @Rule(key = "rule_missing_title") class TestRule { } ruleMetadataLoader = new RuleMetadataLoader(RESOURCE_FOLDER); try { ruleMetadataLoader.addRulesByAnnotatedClass(newRepository, Collections.singletonList(TestRule.class)); fail("Should have failed"); } catch (Exception e) { assertThat(e).isInstanceOf(IllegalStateException.class); assertThat(e).hasMessage("Invalid property: title"); } }
public void addRulesByAnnotatedClass(NewRepository repository, List<Class<?>> ruleClasses) { for (Class<?> ruleClass : ruleClasses) { addRuleByAnnotatedClass(repository, ruleClass); } }
RuleMetadataLoader { public void addRulesByAnnotatedClass(NewRepository repository, List<Class<?>> ruleClasses) { for (Class<?> ruleClass : ruleClasses) { addRuleByAnnotatedClass(repository, ruleClass); } } }
RuleMetadataLoader { public void addRulesByAnnotatedClass(NewRepository repository, List<Class<?>> ruleClasses) { for (Class<?> ruleClass : ruleClasses) { addRuleByAnnotatedClass(repository, ruleClass); } } RuleMetadataLoader(String resourceFolder); RuleMetadataLoader(String resourceFolder, String defaultProfilePath); private RuleMetadataLoader(String resourceFolder, Set<String> activatedByDefault); }
RuleMetadataLoader { public void addRulesByAnnotatedClass(NewRepository repository, List<Class<?>> ruleClasses) { for (Class<?> ruleClass : ruleClasses) { addRuleByAnnotatedClass(repository, ruleClass); } } RuleMetadataLoader(String resourceFolder); RuleMetadataLoader(String resourceFolder, String defaultProfilePath); private RuleMetadataLoader(String resourceFolder, Set<String> activatedByDefault); void addRulesByAnnotatedClass(NewRepository repository, List<Class<?>> ruleClasses); void addRulesByRuleKey(NewRepository repository, List<String> ruleKeys); }
RuleMetadataLoader { public void addRulesByAnnotatedClass(NewRepository repository, List<Class<?>> ruleClasses) { for (Class<?> ruleClass : ruleClasses) { addRuleByAnnotatedClass(repository, ruleClass); } } RuleMetadataLoader(String resourceFolder); RuleMetadataLoader(String resourceFolder, String defaultProfilePath); private RuleMetadataLoader(String resourceFolder, Set<String> activatedByDefault); void addRulesByAnnotatedClass(NewRepository repository, List<Class<?>> ruleClasses); void addRulesByRuleKey(NewRepository repository, List<String> ruleKeys); }
@Test public void test_invalid_json_string_array() { @Rule(key = "rule_wrong_tag") class TestRule { } ruleMetadataLoader = new RuleMetadataLoader(RESOURCE_FOLDER); try { ruleMetadataLoader.addRulesByAnnotatedClass(newRepository, Collections.singletonList(TestRule.class)); fail("Should have failed"); } catch (Exception e) { assertThat(e).isInstanceOf(IllegalStateException.class); assertThat(e).hasMessage("Invalid property: tags"); } }
public void addRulesByAnnotatedClass(NewRepository repository, List<Class<?>> ruleClasses) { for (Class<?> ruleClass : ruleClasses) { addRuleByAnnotatedClass(repository, ruleClass); } }
RuleMetadataLoader { public void addRulesByAnnotatedClass(NewRepository repository, List<Class<?>> ruleClasses) { for (Class<?> ruleClass : ruleClasses) { addRuleByAnnotatedClass(repository, ruleClass); } } }
RuleMetadataLoader { public void addRulesByAnnotatedClass(NewRepository repository, List<Class<?>> ruleClasses) { for (Class<?> ruleClass : ruleClasses) { addRuleByAnnotatedClass(repository, ruleClass); } } RuleMetadataLoader(String resourceFolder); RuleMetadataLoader(String resourceFolder, String defaultProfilePath); private RuleMetadataLoader(String resourceFolder, Set<String> activatedByDefault); }
RuleMetadataLoader { public void addRulesByAnnotatedClass(NewRepository repository, List<Class<?>> ruleClasses) { for (Class<?> ruleClass : ruleClasses) { addRuleByAnnotatedClass(repository, ruleClass); } } RuleMetadataLoader(String resourceFolder); RuleMetadataLoader(String resourceFolder, String defaultProfilePath); private RuleMetadataLoader(String resourceFolder, Set<String> activatedByDefault); void addRulesByAnnotatedClass(NewRepository repository, List<Class<?>> ruleClasses); void addRulesByRuleKey(NewRepository repository, List<String> ruleKeys); }
RuleMetadataLoader { public void addRulesByAnnotatedClass(NewRepository repository, List<Class<?>> ruleClasses) { for (Class<?> ruleClass : ruleClasses) { addRuleByAnnotatedClass(repository, ruleClass); } } RuleMetadataLoader(String resourceFolder); RuleMetadataLoader(String resourceFolder, String defaultProfilePath); private RuleMetadataLoader(String resourceFolder, Set<String> activatedByDefault); void addRulesByAnnotatedClass(NewRepository repository, List<Class<?>> ruleClasses); void addRulesByRuleKey(NewRepository repository, List<String> ruleKeys); }
@Test public void test_invalid_json_int_array() { @Rule(key = "rule_wrong_cwe") class TestRule { } ruleMetadataLoader = new RuleMetadataLoader(RESOURCE_FOLDER, "org/sonarsource/analyzer/commons/profile_wrong_cwe.json"); try { ruleMetadataLoader.addRulesByAnnotatedClass(newRepository, Collections.singletonList(TestRule.class)); fail("Should have failed"); } catch (Exception e) { assertThat(e).isInstanceOf(IllegalStateException.class); assertThat(e).hasMessage("Invalid property: CWE"); } }
public void addRulesByAnnotatedClass(NewRepository repository, List<Class<?>> ruleClasses) { for (Class<?> ruleClass : ruleClasses) { addRuleByAnnotatedClass(repository, ruleClass); } }
RuleMetadataLoader { public void addRulesByAnnotatedClass(NewRepository repository, List<Class<?>> ruleClasses) { for (Class<?> ruleClass : ruleClasses) { addRuleByAnnotatedClass(repository, ruleClass); } } }
RuleMetadataLoader { public void addRulesByAnnotatedClass(NewRepository repository, List<Class<?>> ruleClasses) { for (Class<?> ruleClass : ruleClasses) { addRuleByAnnotatedClass(repository, ruleClass); } } RuleMetadataLoader(String resourceFolder); RuleMetadataLoader(String resourceFolder, String defaultProfilePath); private RuleMetadataLoader(String resourceFolder, Set<String> activatedByDefault); }
RuleMetadataLoader { public void addRulesByAnnotatedClass(NewRepository repository, List<Class<?>> ruleClasses) { for (Class<?> ruleClass : ruleClasses) { addRuleByAnnotatedClass(repository, ruleClass); } } RuleMetadataLoader(String resourceFolder); RuleMetadataLoader(String resourceFolder, String defaultProfilePath); private RuleMetadataLoader(String resourceFolder, Set<String> activatedByDefault); void addRulesByAnnotatedClass(NewRepository repository, List<Class<?>> ruleClasses); void addRulesByRuleKey(NewRepository repository, List<String> ruleKeys); }
RuleMetadataLoader { public void addRulesByAnnotatedClass(NewRepository repository, List<Class<?>> ruleClasses) { for (Class<?> ruleClass : ruleClasses) { addRuleByAnnotatedClass(repository, ruleClass); } } RuleMetadataLoader(String resourceFolder); RuleMetadataLoader(String resourceFolder, String defaultProfilePath); private RuleMetadataLoader(String resourceFolder, Set<String> activatedByDefault); void addRulesByAnnotatedClass(NewRepository repository, List<Class<?>> ruleClasses); void addRulesByRuleKey(NewRepository repository, List<String> ruleKeys); }
@Test public void read_resource() throws Exception { assertThat(Resources.toString("org/sonarsource/analyzer/commons/ResourcesTest.txt", UTF_8)).isEqualTo("hello\n"); }
static String toString(String path, Charset charset) throws IOException { try (InputStream input = Resources.class.getClassLoader().getResourceAsStream(path)) { if (input == null) { throw new IOException("Resource not found in the classpath: " + path); } ByteArrayOutputStream out = new ByteArrayOutputStream(); byte[] buffer = new byte[BUFFER_SIZE]; for (int read = input.read(buffer); read != -1; read = input.read(buffer)) { out.write(buffer, 0, read); } return new String(out.toByteArray(), charset); } }
Resources { static String toString(String path, Charset charset) throws IOException { try (InputStream input = Resources.class.getClassLoader().getResourceAsStream(path)) { if (input == null) { throw new IOException("Resource not found in the classpath: " + path); } ByteArrayOutputStream out = new ByteArrayOutputStream(); byte[] buffer = new byte[BUFFER_SIZE]; for (int read = input.read(buffer); read != -1; read = input.read(buffer)) { out.write(buffer, 0, read); } return new String(out.toByteArray(), charset); } } }
Resources { static String toString(String path, Charset charset) throws IOException { try (InputStream input = Resources.class.getClassLoader().getResourceAsStream(path)) { if (input == null) { throw new IOException("Resource not found in the classpath: " + path); } ByteArrayOutputStream out = new ByteArrayOutputStream(); byte[] buffer = new byte[BUFFER_SIZE]; for (int read = input.read(buffer); read != -1; read = input.read(buffer)) { out.write(buffer, 0, read); } return new String(out.toByteArray(), charset); } } private Resources(); }
Resources { static String toString(String path, Charset charset) throws IOException { try (InputStream input = Resources.class.getClassLoader().getResourceAsStream(path)) { if (input == null) { throw new IOException("Resource not found in the classpath: " + path); } ByteArrayOutputStream out = new ByteArrayOutputStream(); byte[] buffer = new byte[BUFFER_SIZE]; for (int read = input.read(buffer); read != -1; read = input.read(buffer)) { out.write(buffer, 0, read); } return new String(out.toByteArray(), charset); } } private Resources(); }
Resources { static String toString(String path, Charset charset) throws IOException { try (InputStream input = Resources.class.getClassLoader().getResourceAsStream(path)) { if (input == null) { throw new IOException("Resource not found in the classpath: " + path); } ByteArrayOutputStream out = new ByteArrayOutputStream(); byte[] buffer = new byte[BUFFER_SIZE]; for (int read = input.read(buffer); read != -1; read = input.read(buffer)) { out.write(buffer, 0, read); } return new String(out.toByteArray(), charset); } } private Resources(); }
@Test(expected = IOException.class) public void read_invalid_resource() throws Exception { Resources.toString("invalid/path.txt", UTF_8); }
static String toString(String path, Charset charset) throws IOException { try (InputStream input = Resources.class.getClassLoader().getResourceAsStream(path)) { if (input == null) { throw new IOException("Resource not found in the classpath: " + path); } ByteArrayOutputStream out = new ByteArrayOutputStream(); byte[] buffer = new byte[BUFFER_SIZE]; for (int read = input.read(buffer); read != -1; read = input.read(buffer)) { out.write(buffer, 0, read); } return new String(out.toByteArray(), charset); } }
Resources { static String toString(String path, Charset charset) throws IOException { try (InputStream input = Resources.class.getClassLoader().getResourceAsStream(path)) { if (input == null) { throw new IOException("Resource not found in the classpath: " + path); } ByteArrayOutputStream out = new ByteArrayOutputStream(); byte[] buffer = new byte[BUFFER_SIZE]; for (int read = input.read(buffer); read != -1; read = input.read(buffer)) { out.write(buffer, 0, read); } return new String(out.toByteArray(), charset); } } }
Resources { static String toString(String path, Charset charset) throws IOException { try (InputStream input = Resources.class.getClassLoader().getResourceAsStream(path)) { if (input == null) { throw new IOException("Resource not found in the classpath: " + path); } ByteArrayOutputStream out = new ByteArrayOutputStream(); byte[] buffer = new byte[BUFFER_SIZE]; for (int read = input.read(buffer); read != -1; read = input.read(buffer)) { out.write(buffer, 0, read); } return new String(out.toByteArray(), charset); } } private Resources(); }
Resources { static String toString(String path, Charset charset) throws IOException { try (InputStream input = Resources.class.getClassLoader().getResourceAsStream(path)) { if (input == null) { throw new IOException("Resource not found in the classpath: " + path); } ByteArrayOutputStream out = new ByteArrayOutputStream(); byte[] buffer = new byte[BUFFER_SIZE]; for (int read = input.read(buffer); read != -1; read = input.read(buffer)) { out.write(buffer, 0, read); } return new String(out.toByteArray(), charset); } } private Resources(); }
Resources { static String toString(String path, Charset charset) throws IOException { try (InputStream input = Resources.class.getClassLoader().getResourceAsStream(path)) { if (input == null) { throw new IOException("Resource not found in the classpath: " + path); } ByteArrayOutputStream out = new ByteArrayOutputStream(); byte[] buffer = new byte[BUFFER_SIZE]; for (int read = input.read(buffer); read != -1; read = input.read(buffer)) { out.write(buffer, 0, read); } return new String(out.toByteArray(), charset); } } private Resources(); }