content
stringlengths 10
4.9M
|
---|
package com.github.freeacs.web.app.page.file;
import com.github.freeacs.dbi.File;
import com.github.freeacs.dbi.FileType;
import com.github.freeacs.web.app.input.DropDownSingleSelect;
import java.util.List;
public class SoftwarePage extends FilePage {
@Override
public DropDownSingleSelect<FileType> getTypeSelect(FileType filetype) {
return getTypeSelect(null, FileType.SOFTWARE);
}
@Override
public List<File> getFiles() {
return getFiles(FileType.SOFTWARE);
}
}
|
package com.tarena.poll.dao.impl;
import java.util.List;
import org.hibernate.Session;
import org.hibernate.Transaction;
import com.tarena.poll.dao.DaoException;
import com.tarena.poll.dao.IClassDao;
import com.tarena.poll.entity.TClass;
import com.tarena.poll.entity.TClassType;
import com.tarena.poll.util.HBDaoFactory;
/**
*本来用来演示
*author datong
*/
public class ClassDaoImpl2 implements IClassDao{
@Override
public void saveClass(TClass clazz) throws DaoException {
Session s =HBDaoFactory.getSession();
Transaction ts =s.getTransaction();
try {
ts.begin();
s.save(clazz);
ts.commit();
} catch (Exception e) {
ts.rollback();
e.printStackTrace();
}finally{
s.close();
}
}
@Override
public void updateClass(TClass clazz) throws DaoException {
Session s =HBDaoFactory.getSession();
Transaction ts =s.getTransaction();
try {
ts.begin();
s.update(clazz);
ts.commit();
} catch (Exception e) {
ts.rollback();
e.printStackTrace();
}finally{
s.close();
}
}
@Override
public void deleteClass(int id) throws DaoException {
ClassDaoImpl2 dao = new ClassDaoImpl2();
TClass c = dao.findById(id);
Session s =HBDaoFactory.getSession();
Transaction ts =s.getTransaction();
try {
ts.begin();
s.delete(c);
ts.commit();
} catch (Exception e) {
ts.rollback();
e.printStackTrace();
}finally{
s.close();
}
}
@Override
public TClass findById(int id) throws DaoException {
Session s =HBDaoFactory.getSession();
String hql="from TClass where id=:id";
TClass c=(TClass) s.createQuery(hql).setParameter("id", id).uniqueResult();
s.close();
return c;
}
@Override
public List findByClassName(String className) throws DaoException {
Session s =HBDaoFactory.getSession();
String hql="from TClass where className=:className";
List<TClass> classes=s.createQuery(hql).setParameter("className", className).list();
s.close();
return classes;
}
@Override
public List findAll() throws DaoException {
Session s =HBDaoFactory.getSession();
String hql="from TClass";
List<TClass> classes=s.createQuery(hql).list();
s.close();
return classes;
}
@Override
public List findByType(TClassType type) throws DaoException {
return null;
}
@Override
public List<TClass> findByStatus(int status) throws DaoException {
Session s =HBDaoFactory.getSession();
String hql="from TClass where status=:status";
List<TClass> classes=s.createQuery(hql).setParameter("status", status).list();
s.close();
return classes;
}
}
|
import { ReviewType } from './Post';
export enum ReferStoreType {
ReviewItemRefer = 'ReviewItemRefer',
SharerRefer = 'SharerRefer',
}
export enum ReferPriority {
High = 'HIGH',
Low = 'LOW',
}
type BaseRefer = {
priority: ReferPriority;
itemId: number;
targetId: number;
expireExpectAt: string;
};
export type ReviewItemRefer = BaseRefer & {
reviewType: ReviewType;
};
export type SharerRefer = BaseRefer;
export type ReferStore = {
ReviewItemRefer: ReviewItemRefer[];
SharerRefer: SharerRefer[];
};
|
// List will list all the requests received
func (o *OptimisationRequest) List(ctx context.Context, log *log.Logger, w http.ResponseWriter, r *http.Request, params map[string]string) error {
dbConn := o.MasterDB.Copy()
defer dbConn.Close()
requests, err := optimisationRequest.List(ctx, dbConn)
if err = translate(err); err != nil {
return errors.Wrap(err, "")
}
web.Respond(ctx, log, w, requests, http.StatusOK)
return nil
} |
<reponame>bannerxu/boot-learning<filename>spring-boot-mybatis-plus/src/main/java/top/banner/service/impl/UserServiceImpl.java
package top.banner.service.impl;
import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl;
import org.springframework.stereotype.Service;
import top.banner.entity.User;
import top.banner.mapper.UserMapper;
import top.banner.service.UserService;
/**
* @author XGL
*/
@Service
public class UserServiceImpl extends ServiceImpl<UserMapper, User> implements UserService {
}
|
/// Create a new [GdbTriager] using the built-in GDBTriage script
pub fn new(gdb_path: String) -> GdbTriager {
let mut triage_script =
GdbTriageScript::Internal(tempfile::Builder::new().suffix(".py").tempfile().unwrap());
if let GdbTriageScript::Internal(ref mut tf) = triage_script {
tf.write_all(INTERNAL_TRIAGE_SCRIPT).unwrap();
} else {
panic!("Unsupported script path");
}
GdbTriager {
triage_script,
gdb_path,
}
} |
def imul_group_word(self, v1, g):
assert isinstance(g, AbstractGroupWord) and g.group.is_mmgroup
a = g.mmdata
nn = np.zeros(5, dtype = np.uint32)
nnw = np.zeros(5, dtype = np.uint32)
buf = np.zeros(v1.data[255].ops.MMV_INTS, dtype = np.uint64)
while len(a):
mm_group_n_clear(nn)
i = mm_group_n_mul_word_scan(nn, a, len(a))
length = mm_group_n_to_word(nn, nnw)
self._imul_word(v1, nnw[:length], buf)
a = a[i:]
if len(a):
self._imul_word(v1, a[:1], buf)
a = a[1:]
del buf
v1.expanded = False
return v1 |
/**
* A MetricReporter that does nothing.
* <p>
* Can be used as an alternative to turning off metrics reporting.
* </p>
*/
public class NoopReporter implements MetricReporter {
@Override
public void report(ReportMetrics reportMetrics) {
// do nothing
}
@Override
public void cleanup() {
// do nothing
}
} |
def check_convert_string(obj, name=None,
no_leading_trailing_whitespace=True,
no_whitespace=False,
no_newline=True,
as_tag=False,
min_len=1,
max_len=0):
if not name:
name = 'Argument'
obj = ensure_unicode(obj, name=name)
if no_whitespace:
if _PATTERN_WHITESPACE.match(obj):
raise ValueError('%s cannot contain whitespace' % name)
elif no_leading_trailing_whitespace and _PATTERN_LEAD_TRAIL_WHITESPACE.match(obj):
raise ValueError('%s contains leading/trailing whitespace' % name)
if (min_len and len(obj) < min_len) or (max_len and len(obj) > max_len):
raise ValueError('%s too short/long (%d/%d)' % (name, min_len, max_len))
if as_tag:
if not _PATTERN_TAG.match(obj):
raise ValueError('%s can only contain alphanumeric (unicode) characters, numbers and the underscore'
% name)
elif no_newline and '\n' in obj:
raise ValueError('%s cannot contain line breaks' % name)
return obj |
package com.bugsnag.android;
import android.content.Context;
import android.content.SharedPreferences;
import android.content.pm.ApplicationInfo;
import android.content.pm.PackageManager;
import android.support.annotation.NonNull;
import android.support.annotation.Nullable;
import android.text.TextUtils;
import java.util.Locale;
import java.util.Collections;
import java.util.Map;
import java.util.Observable;
import java.util.Observer;
enum DeliveryStyle {
SAME_THREAD,
ASYNC,
ASYNC_WITH_CACHE
}
/**
* A Bugsnag Client instance allows you to use Bugsnag in your Android app.
* Typically you'd instead use the static access provided in the Bugsnag class.
* <p/>
* Example usage:
* <p/>
* Client client = new Client(this, "your-api-key");
* client.notify(new RuntimeException("something broke!"));
*
* @see Bugsnag
*/
public class Client extends Observable implements Observer {
private static final boolean BLOCKING = true;
private static final String SHARED_PREF_KEY = "com.bugsnag.android";
private static final String USER_ID_KEY = "user.id";
private static final String USER_NAME_KEY = "user.name";
private static final String USER_EMAIL_KEY = "user.email";
protected final Configuration config;
private final Context appContext;
protected final AppData appData;
protected final DeviceData deviceData;
final Breadcrumbs breadcrumbs;
protected final User user = new User();
protected final ErrorStore errorStore;
/**
* Initialize a Bugsnag client
*
* @param androidContext an Android context, usually <code>this</code>
*/
public Client(@NonNull Context androidContext) {
this(androidContext, null, true);
}
/**
* Initialize a Bugsnag client
*
* @param androidContext an Android context, usually <code>this</code>
* @param apiKey your Bugsnag API key from your Bugsnag dashboard
*/
public Client(@NonNull Context androidContext, @Nullable String apiKey) {
this(androidContext, apiKey, true);
}
/**
* Initialize a Bugsnag client
*
* @param androidContext an Android context, usually <code>this</code>
* @param apiKey your Bugsnag API key from your Bugsnag dashboard
* @param enableExceptionHandler should we automatically handle uncaught exceptions?
*/
public Client(@NonNull Context androidContext, @Nullable String apiKey, boolean enableExceptionHandler) {
this(androidContext, createNewConfiguration(androidContext, apiKey, enableExceptionHandler));
}
/**
* Initialize a Bugsnag client
*
* @param androidContext an Android context, usually <code>this</code>
* @param configuration a configuration for the Client
*/
public Client(@NonNull Context androidContext, @NonNull Configuration configuration) {
appContext = androidContext.getApplicationContext();
config = configuration;
String buildUUID = null;
try {
ApplicationInfo ai = appContext.getPackageManager().getApplicationInfo(appContext.getPackageName(), PackageManager.GET_META_DATA);
buildUUID = ai.metaData.getString("com.bugsnag.android.BUILD_UUID");
} catch (Exception ignore) {
}
if (buildUUID != null) {
config.setBuildUUID(buildUUID);
}
// Set up and collect constant app and device diagnostics
appData = new AppData(appContext, config);
deviceData = new DeviceData(appContext);
AppState.init();
// Set up breadcrumbs
breadcrumbs = new Breadcrumbs();
// Set sensible defaults
setProjectPackages(appContext.getPackageName());
if (config.getPersistUserBetweenSessions()) {
// Check to see if a user was stored in the SharedPreferences
SharedPreferences sharedPref = appContext.getSharedPreferences(SHARED_PREF_KEY, Context.MODE_PRIVATE);
user.setId(sharedPref.getString(USER_ID_KEY, deviceData.getUserId()));
user.setName(sharedPref.getString(USER_NAME_KEY, null));
user.setEmail(sharedPref.getString(USER_EMAIL_KEY, null));
} else {
user.setId(deviceData.getUserId());
}
// Create the error store that is used in the exception handler
errorStore = new ErrorStore(config, appContext);
// Install a default exception handler with this client
if (config.getEnableExceptionHandler()) {
enableExceptionHandler();
}
config.addObserver(this);
// Flush any on-disk errors
errorStore.flush();
}
public void notifyBugsnagObservers(NotifyType type) {
setChanged();
super.notifyObservers(type.getValue());
}
@Override
public void update(Observable o, Object arg) {
if (arg instanceof Integer) {
NotifyType type = NotifyType.fromInt((Integer) arg);
if (type != null) {
notifyBugsnagObservers(type);
}
}
}
/**
* Creates a new configuration object based on the provided parameters
* will read the API key from the manifest file if it is not provided
*
* @param androidContext The context of the application
* @param apiKey The API key to use
* @param enableExceptionHandler should we automatically handle uncaught exceptions?
* @return The created config
*/
private static Configuration createNewConfiguration(@NonNull Context androidContext, String apiKey, boolean enableExceptionHandler) {
Context appContext = androidContext.getApplicationContext();
// Attempt to load API key from AndroidManifest.xml if not passed in
if (TextUtils.isEmpty(apiKey)) {
try {
ApplicationInfo ai = appContext.getPackageManager().getApplicationInfo(appContext.getPackageName(), PackageManager.GET_META_DATA);
apiKey = ai.metaData.getString("com.bugsnag.android.API_KEY");
} catch (Exception ignore) {
}
}
if (apiKey == null) {
throw new NullPointerException("You must provide a Bugsnag API key");
}
// Build a configuration object
Configuration newConfig = new Configuration(apiKey);
newConfig.setEnableExceptionHandler(enableExceptionHandler);
return newConfig;
}
/**
* Set the application version sent to Bugsnag. By default we'll pull this
* from your AndroidManifest.xml
*
* @param appVersion the app version to send
*/
public void setAppVersion(String appVersion) {
config.setAppVersion(appVersion);
}
/**
* Gets the context to be sent to Bugsnag.
*
* @return Context
*/
public String getContext() {
return config.getContext();
}
/**
* Set the context sent to Bugsnag. By default we'll attempt to detect the
* name of the top-most activity at the time of a report, and use this
* as the context, but sometime this is not possible.
*
* @param context set what was happening at the time of a crash
*/
public void setContext(String context) {
config.setContext(context);
}
/**
* Set the endpoint to send data to. By default we'll send reports to
* the standard https://notify.bugsnag.com endpoint, but you can override
* this if you are using Bugsnag Enterprise to point to your own Bugsnag
* endpoint.
*
* @param endpoint the custom endpoint to send report to
*/
public void setEndpoint(String endpoint) {
config.setEndpoint(endpoint);
}
/**
* Set the buildUUID to your own value. This is used to identify proguard
* mapping files in the case that you publish multiple different apps with
* the same appId and versionCode. The default value is read from the
* com.bugsnag.android.BUILD_UUID meta-data field in your app manifest.
*
* @param buildUUID the buildUUID.
*/
public void setBuildUUID(final String buildUUID) {
config.setBuildUUID(buildUUID);
}
/**
* Set which keys should be filtered when sending metaData to Bugsnag.
* Use this when you want to ensure sensitive information, such as passwords
* or credit card information is stripped from metaData you send to Bugsnag.
* Any keys in metaData which contain these strings will be marked as
* [FILTERED] when send to Bugsnag.
* <p/>
* For example:
* <p/>
* client.setFilters("password", "<PASSWORD>");
*
* @param filters a list of keys to filter from metaData
*/
public void setFilters(String... filters) {
config.setFilters(filters);
}
/**
* Set which exception classes should be ignored (not sent) by Bugsnag.
* <p/>
* For example:
* <p/>
* client.setIgnoreClasses("java.lang.RuntimeException");
*
* @param ignoreClasses a list of exception classes to ignore
*/
public void setIgnoreClasses(String... ignoreClasses) {
config.setIgnoreClasses(ignoreClasses);
}
/**
* Set for which releaseStages errors should be sent to Bugsnag.
* Use this to stop errors from development builds being sent.
* <p/>
* For example:
* <p/>
* client.setNotifyReleaseStages("production");
*
* @param notifyReleaseStages a list of releaseStages to notify for
* @see #setReleaseStage
*/
public void setNotifyReleaseStages(String... notifyReleaseStages) {
config.setNotifyReleaseStages(notifyReleaseStages);
}
/**
* Set which packages should be considered part of your application.
* Bugsnag uses this to help with error grouping, and stacktrace display.
* <p/>
* For example:
* <p/>
* client.setProjectPackages("com.example.myapp");
* <p/>
* By default, we'll mark the current package name as part of you app.
*
* @param projectPackages a list of package names
*/
public void setProjectPackages(String... projectPackages) {
config.setProjectPackages(projectPackages);
}
/**
* Set the current "release stage" of your application.
* By default, we'll set this to "development" for debug builds and
* "production" for non-debug builds.
*
* @param releaseStage the release stage of the app
* @see #setNotifyReleaseStages
*/
public void setReleaseStage(String releaseStage) {
config.setReleaseStage(releaseStage);
}
/**
* Set whether to send thread-state with report.
* By default, this will be true.
*
* @param sendThreads should we send thread-state with report?
*/
public void setSendThreads(boolean sendThreads) {
config.setSendThreads(sendThreads);
}
/**
* Set details of the user currently using your application.
* You can search for this information in your Bugsnag dashboard.
* <p/>
* For example:
* <p/>
* client.setUser("12345", "<EMAIL>", "<NAME>");
*
* @param id a unique identifier of the current user (defaults to a unique id)
* @param email the email address of the current user
* @param name the name of the current user
*/
public void setUser(String id, String email, String name) {
setUserId(id);
setUserEmail(email);
setUserName(name);
}
/**
* Removes the current user data and sets it back to defaults
*/
public void clearUser() {
user.setId(deviceData.getUserId());
user.setEmail(null);
user.setName(null);
SharedPreferences sharedPref = appContext.getSharedPreferences(SHARED_PREF_KEY, Context.MODE_PRIVATE);
sharedPref.edit()
.remove(USER_ID_KEY)
.remove(USER_EMAIL_KEY)
.remove(USER_NAME_KEY)
.commit();
notifyBugsnagObservers(NotifyType.USER);
}
/**
* Set a unique identifier for the user currently using your application.
* By default, this will be an automatically generated unique id
* You can search for this information in your Bugsnag dashboard.
*
* @param id a unique identifier of the current user
*/
public void setUserId(String id) {
setUserId(id, true);
}
/**
* Sets the user ID with the option to not notify any NDK components of the change
*
* @param id a unique identifier of the current user
* @param notify whether or not to notify NDK components
*/
void setUserId(String id, boolean notify) {
user.setId(id);
if (config.getPersistUserBetweenSessions()) {
storeInSharedPrefs(USER_ID_KEY, id);
}
if (notify) {
notifyBugsnagObservers(NotifyType.USER);
}
}
/**
* Set the email address of the current user.
* You can search for this information in your Bugsnag dashboard.
*
* @param email the email address of the current user
*/
public void setUserEmail(String email) {
setUserEmail(email, true);
}
/**
* Sets the user email with the option to not notify any NDK components of the change
*
* @param email the email address of the current user
* @param notify whether or not to notify NDK components
*/
void setUserEmail(String email, boolean notify) {
user.setEmail(email);
if (config.getPersistUserBetweenSessions()) {
storeInSharedPrefs(USER_EMAIL_KEY, email);
}
if (notify) {
notifyBugsnagObservers(NotifyType.USER);
}
}
/**
* Set the name of the current user.
* You can search for this information in your Bugsnag dashboard.
*
* @param name the name of the current user
*/
public void setUserName(String name) {
setUserName(name, true);
}
/**
* Sets the user name with the option to not notify any NDK components of the change
*
* @param name the name of the current user
* @param notify whether or not to notify NDK components
*/
void setUserName(String name, boolean notify) {
user.setName(name);
if (config.getPersistUserBetweenSessions()) {
storeInSharedPrefs(USER_NAME_KEY, name);
}
if (notify) {
notifyBugsnagObservers(NotifyType.USER);
}
}
/**
* Add a "before notify" callback, to execute code before every
* report to Bugsnag.
* <p/>
* You can use this to add or modify information attached to an error
* before it is sent to your dashboard. You can also return
* <code>false</code> from any callback to halt execution.
* <p/>
* For example:
* <p/>
* client.beforeNotify(new BeforeNotify() {
* public boolean run(Error error) {
* error.setSeverity(Severity.INFO);
* return true;
* }
* })
*
* @param beforeNotify a callback to run before sending errors to Bugsnag
* @see BeforeNotify
*/
public void beforeNotify(BeforeNotify beforeNotify) {
config.beforeNotify(beforeNotify);
}
/**
* Notify Bugsnag of a handled exception
*
* @param exception the exception to send to Bugsnag
*/
public void notify(Throwable exception) {
Error error = new Error(config, exception);
notify(error, !BLOCKING);
}
/**
* Notify Bugsnag of a handled exception
*
* @param exception the exception to send to Bugsnag
*/
public void notifyBlocking(Throwable exception) {
Error error = new Error(config, exception);
notify(error, BLOCKING);
}
/**
* Notify Bugsnag of a handled exception
*
* @param exception the exception to send to Bugsnag
* @param callback callback invoked on the generated error report for
* additional modification
*/
public void notify(Throwable exception, Callback callback) {
Error error = new Error(config, exception);
notify(error, DeliveryStyle.ASYNC, callback);
}
/**
* Notify Bugsnag of a handled exception
*
* @param exception the exception to send to Bugsnag
* @param callback callback invoked on the generated error report for
* additional modification
*/
public void notifyBlocking(Throwable exception, Callback callback) {
Error error = new Error(config, exception);
notify(error, DeliveryStyle.SAME_THREAD, callback);
}
/**
* Notify Bugsnag of an error
*
* @param name the error name or class
* @param message the error message
* @param stacktrace the stackframes associated with the error
* @param callback callback invoked on the generated error report for
* additional modification
*/
public void notify(String name, String message, StackTraceElement[] stacktrace, Callback callback) {
Error error = new Error(config, name, message, stacktrace);
notify(error, DeliveryStyle.ASYNC, callback);
}
/**
* Notify Bugsnag of an error
*
* @param name the error name or class
* @param message the error message
* @param stacktrace the stackframes associated with the error
* @param callback callback invoked on the generated error report for
* additional modification
*/
public void notifyBlocking(String name, String message, StackTraceElement[] stacktrace, Callback callback) {
Error error = new Error(config, name, message, stacktrace);
notify(error, DeliveryStyle.SAME_THREAD, callback);
}
/**
* Notify Bugsnag of a handled exception
*
* @param exception the exception to send to Bugsnag
* @param severity the severity of the error, one of Severity.ERROR,
* Severity.WARNING or Severity.INFO
*/
public void notify(Throwable exception, Severity severity) {
Error error = new Error(config, exception);
error.setSeverity(severity);
notify(error, !BLOCKING);
}
/**
* Notify Bugsnag of a handled exception
*
* @param exception the exception to send to Bugsnag
* @param severity the severity of the error, one of Severity.ERROR,
* Severity.WARNING or Severity.INFO
*/
public void notifyBlocking(Throwable exception, Severity severity) {
Error error = new Error(config, exception);
error.setSeverity(severity);
notify(error, BLOCKING);
}
/**
* Add diagnostic information to every error report.
* Diagnostic information is collected in "tabs" on your dashboard.
* <p/>
* For example:
* <p/>
* client.addToTab("account", "name", "<NAME>.");
* client.addToTab("account", "payingCustomer", true);
*
* @param tab the dashboard tab to add diagnostic data to
* @param key the name of the diagnostic information
* @param value the contents of the diagnostic information
*/
public void addToTab(String tab, String key, Object value) {
config.getMetaData().addToTab(tab, key, value);
}
/**
* Remove a tab of app-wide diagnostic information
*
* @param tabName the dashboard tab to remove diagnostic data from
*/
public void clearTab(String tabName) {
config.getMetaData().clearTab(tabName);
}
/**
* Get the global diagnostic information currently stored in MetaData.
*
* @see MetaData
*/
public MetaData getMetaData() {
return config.getMetaData();
}
/**
* Set the global diagnostic information to be send with every error.
*
* @see MetaData
*/
public void setMetaData(MetaData metaData) {
config.setMetaData(metaData);
}
/**
* Leave a "breadcrumb" log message, representing an action that occurred
* in your app, to aid with debugging.
*
* @param breadcrumb the log message to leave (max 140 chars)
*/
public void leaveBreadcrumb(String breadcrumb) {
breadcrumbs.add(breadcrumb);
notifyBugsnagObservers(NotifyType.BREADCRUMB);
}
public void leaveBreadcrumb(String name, BreadcrumbType type, Map<String, String> metadata) {
leaveBreadcrumb(name, type, metadata, true);
}
void leaveBreadcrumb(String name,
BreadcrumbType type,
Map<String, String> metadata,
boolean notify) {
breadcrumbs.add(name, type, metadata);
if (notify) {
notifyBugsnagObservers(NotifyType.BREADCRUMB);
}
}
/**
* Set the maximum number of breadcrumbs to keep and sent to Bugsnag.
* By default, we'll keep and send the 20 most recent breadcrumb log
* messages.
*
* @param numBreadcrumbs number of breadcrumb log messages to send
*/
public void setMaxBreadcrumbs(int numBreadcrumbs) {
breadcrumbs.setSize(numBreadcrumbs);
}
/**
* Clear any breadcrumbs that have been left so far.
*/
public void clearBreadcrumbs() {
breadcrumbs.clear();
notifyBugsnagObservers(NotifyType.BREADCRUMB);
}
/**
* Enable automatic reporting of unhandled exceptions.
* By default, this is automatically enabled in the constructor.
*/
public void enableExceptionHandler() {
ExceptionHandler.enable(this);
}
/**
* Disable automatic reporting of unhandled exceptions.
*/
public void disableExceptionHandler() {
ExceptionHandler.disable(this);
}
private void notify(Error error, boolean blocking) {
DeliveryStyle style = blocking ? DeliveryStyle.SAME_THREAD : DeliveryStyle.ASYNC;
notify(error, style, null);
}
private void notify(Error error, DeliveryStyle style, Callback callback) {
// Don't notify if this error class should be ignored
if (error.shouldIgnoreClass()) {
return;
}
// Don't notify unless releaseStage is in notifyReleaseStages
if (!config.shouldNotifyForReleaseStage(appData.getReleaseStage())) {
return;
}
// Capture the state of the app and device and attach diagnostics to the error
error.setAppData(appData);
error.setDeviceData(deviceData);
error.setAppState(new AppState(appContext));
error.setDeviceState(new DeviceState(appContext));
// Attach breadcrumbs to the error
error.setBreadcrumbs(breadcrumbs);
// Attach user info to the error
error.setUser(user);
// Run beforeNotify tasks, don't notify if any return true
if (!runBeforeNotifyTasks(error)) {
Logger.info("Skipping notification - beforeNotify task returned false");
return;
}
// Build the report
Report report = new Report(config.getApiKey(), error);
if (callback != null) {
callback.beforeNotify(report);
}
switch (style) {
case SAME_THREAD:
deliver(report, error);
break;
case ASYNC:
final Report finalReport = report;
final Error finalError = error;
// Attempt to send the report in the background
Async.run(new Runnable() {
@Override
public void run() {
deliver(finalReport, finalError);
}
});
break;
case ASYNC_WITH_CACHE:
errorStore.write(error);
errorStore.flush();
}
// Add a breadcrumb for this error occurring
breadcrumbs.add(error.getExceptionName(), BreadcrumbType.ERROR, Collections.singletonMap("message", error.getExceptionMessage()));
}
void deliver(Report report, Error error) {
try {
HttpClient.post(config.getEndpoint(), report);
Logger.info(String.format(Locale.US, "Sent 1 new error to Bugsnag"));
} catch (HttpClient.NetworkException e) {
Logger.info("Could not send error(s) to Bugsnag, saving to disk to send later");
// Save error to disk for later sending
errorStore.write(error);
} catch (HttpClient.BadResponseException e) {
Logger.info("Bad response when sending data to Bugsnag");
} catch (Exception e) {
Logger.warn("Problem sending error to Bugsnag", e);
}
}
void cacheAndNotify(Throwable exception, Severity severity) {
Error error = new Error(config, exception);
error.setSeverity(severity);
notify(error, DeliveryStyle.ASYNC_WITH_CACHE, null);
}
private boolean runBeforeNotifyTasks(Error error) {
for (BeforeNotify beforeNotify : config.getBeforeNotifyTasks()) {
try {
if (!beforeNotify.run(error)) {
return false;
}
} catch (Throwable ex) {
Logger.warn("BeforeNotify threw an Exception", ex);
}
}
// By default, allow the error to be sent if there were no objections
return true;
}
/**
* Stores the given key value pair into shared preferences
* @param key The key to store
* @param value The value to store
* @return Whether the value was stored successfully or not
*/
private boolean storeInSharedPrefs(String key, String value) {
SharedPreferences sharedPref = appContext.getSharedPreferences(SHARED_PREF_KEY, Context.MODE_PRIVATE);
return sharedPref.edit().putString(key, value).commit();
}
/**
* Notify Bugsnag of a handled exception
*
* @param exception the exception to send to Bugsnag
* @param metaData additional information to send with the exception
*
* @deprecated Use {@link #notify(Throwable,Callback)}
* to send and modify error reports
*/
public void notify(Throwable exception, MetaData metaData) {
Error error = new Error(config, exception);
error.setMetaData(metaData);
notify(error, !BLOCKING);
}
/**
* Notify Bugsnag of a handled exception
*
* @param exception the exception to send to Bugsnag
* @param metaData additional information to send with the exception
*
* @deprecated Use {@link #notify(Throwable,Callback)}
* to send and modify error reports
*/
public void notifyBlocking(Throwable exception, MetaData metaData) {
Error error = new Error(config, exception);
error.setMetaData(metaData);
notify(error, BLOCKING);
}
/**
* Notify Bugsnag of a handled exception
*
* @param exception the exception to send to Bugsnag
* @param severity the severity of the error, one of Severity.ERROR,
* Severity.WARNING or Severity.INFO
* @param metaData additional information to send with the exception
*
* @deprecated Use {@link #notify(Throwable,Callback)} to send and
* modify error reports
*/
@Deprecated
public void notify(Throwable exception, Severity severity, MetaData metaData) {
Error error = new Error(config, exception);
error.setSeverity(severity);
error.setMetaData(metaData);
notify(error, !BLOCKING);
}
/**
* Notify Bugsnag of a handled exception
*
* @param exception the exception to send to Bugsnag
* @param severity the severity of the error, one of Severity.ERROR,
* Severity.WARNING or Severity.INFO
* @param metaData additional information to send with the exception
*
* @deprecated Use {@link #notifyBlocking(Throwable,Callback)} to send
* and modify error reports
*/
@Deprecated
public void notifyBlocking(Throwable exception, Severity severity, MetaData metaData) {
Error error = new Error(config, exception);
error.setSeverity(severity);
error.setMetaData(metaData);
notify(error, BLOCKING);
}
/**
* Notify Bugsnag of an error
*
* @param name the error name or class
* @param message the error message
* @param stacktrace the stackframes associated with the error
* @param severity the severity of the error, one of Severity.ERROR,
* Severity.WARNING or Severity.INFO
* @param metaData additional information to send with the exception
*
* @deprecated Use {@link #notify(String,String,StackTraceElement[],Callback)}
* to send and modify error reports
*/
@Deprecated
public void notify(String name, String message, StackTraceElement[] stacktrace, Severity severity, MetaData metaData) {
Error error = new Error(config, name, message, stacktrace);
error.setSeverity(severity);
error.setMetaData(metaData);
notify(error, !BLOCKING);
}
/**
* Notify Bugsnag of an error
*
* @param name the error name or class
* @param message the error message
* @param stacktrace the stackframes associated with the error
* @param severity the severity of the error, one of Severity.ERROR,
* Severity.WARNING or Severity.INFO
* @param metaData additional information to send with the exception
*
* @deprecated Use {@link #notifyBlocking(String,String,StackTraceElement[],Callback)}
* to send and modify error reports
*/
@Deprecated
public void notifyBlocking(String name, String message, StackTraceElement[] stacktrace, Severity severity, MetaData metaData) {
Error error = new Error(config, name, message, stacktrace);
error.setSeverity(severity);
error.setMetaData(metaData);
notify(error, BLOCKING);
}
/**
* Notify Bugsnag of an error
*
* @param name the error name or class
* @param message the error message
* @param context the error context
* @param stacktrace the stackframes associated with the error
* @param severity the severity of the error, one of Severity.ERROR,
* Severity.WARNING or Severity.INFO
* @param metaData additional information to send with the exception
*
* @deprecated Use {@link #notify(String,String,StackTraceElement[],Callback)}
* to send and modify error reports
*/
@Deprecated
public void notify(String name, String message, String context, StackTraceElement[] stacktrace, Severity severity, MetaData metaData) {
Error error = new Error(config, name, message, stacktrace);
error.setSeverity(severity);
error.setMetaData(metaData);
error.setContext(context);
notify(error, !BLOCKING);
}
/**
* Notify Bugsnag of an error
*
* @param name the error name or class
* @param message the error message
* @param context the error context
* @param stacktrace the stackframes associated with the error
* @param severity the severity of the error, one of Severity.ERROR,
* Severity.WARNING or Severity.INFO
* @param metaData additional information to send with the exception
*
* @deprecated Use {@link #notifyBlocking(String,String,StackTraceElement[],Callback)}
* to send and modify error reports
*/
@Deprecated
public void notifyBlocking(String name, String message, String context, StackTraceElement[] stacktrace, Severity severity, MetaData metaData) {
Error error = new Error(config, name, message, stacktrace);
error.setSeverity(severity);
error.setMetaData(metaData);
error.setContext(context);
notify(error, BLOCKING);
}
}
|
<filename>sql-jdbc/src/main/java/com/amazon/opendistroforelasticsearch/jdbc/internal/results/ColumnMetaData.java
/*
* Copyright <2019> Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*
*/
package com.amazon.opendistroforelasticsearch.jdbc.internal.results;
import com.amazon.opendistroforelasticsearch.jdbc.types.ElasticsearchType;
import com.amazon.opendistroforelasticsearch.jdbc.protocol.ColumnDescriptor;
public class ColumnMetaData {
private String name;
private String label;
private String tableSchemaName;
private int precision = -1;
private int scale = -1;
private String tableName;
private String catalogName;
private String esTypeName;
private ElasticsearchType esType;
public ColumnMetaData(ColumnDescriptor descriptor) {
this.name = descriptor.getName();
// if a label isn't specified, the name is the label
this.label = descriptor.getLabel() == null ? this.name : descriptor.getLabel();
this.esTypeName = descriptor.getType();
this.esType = ElasticsearchType.fromTypeName(esTypeName);
// use canned values until server can return this
this.precision = this.esType.getPrecision();
this.scale = 0;
// JDBC has these, but our protocol does not yet convey these
this.tableName = "";
this.catalogName = "";
this.tableSchemaName = "";
}
public String getName() {
return name;
}
public String getLabel() {
return label;
}
public String getTableSchemaName() {
return tableSchemaName;
}
public int getPrecision() {
return precision;
}
public int getScale() {
return scale;
}
public String getTableName() {
return tableName;
}
public String getCatalogName() {
return catalogName;
}
public ElasticsearchType getEsType() {
return esType;
}
public String getEsTypeName() {
return esTypeName;
}
}
|
// Platform-specific initialization
// On Windows, enable colors in terminal. On other systems, do nothing.
// Return value: 0 on success, negative number on failure.
int initialize()
{
#ifdef _WIN32
HANDLE h_stdout = GetStdHandle(STD_OUTPUT_HANDLE);
if (h_stdout == INVALID_HANDLE_VALUE)
return -1;
DWORD mode;
if (! GetConsoleMode(h_stdout, &mode))
return -2;
mode |= ENABLE_VIRTUAL_TERMINAL_PROCESSING;
if (! SetConsoleMode(h_stdout, mode))
return -3;
return 0;
#else
return 0;
#endif
} |
/**
* The method moves figure from start position to the destination position.
* @param figure to be moved.
* @param destinationX destination coordinate X.
* @param destinationY destination coordinate Y.
*/
public void moveFigure(Figure figure, int destinationX, int destinationY) {
Figure currentFigure = this.board[figure.getX()][figure.getY()].takeFigure();
if (currentFigure != null) {
this.board[destinationX][destinationY].setFigure(currentFigure);
}
} |
// SPDX-License-Identifier: BSD-3-Clause
//
// Copyright(c) 2021 Intel Corporation. All rights reserved.
#ifndef ACA_MODULE_H_
#define ACA_MODULE_H_
#include "loadable_processing_module.h"
#include "build/module_design_config.h"
#include "aca_config.h"
#ifdef NOTIFICATION_SUPPORT
#include <notification_message.h>
#endif
/*!
* \brief The AcaModule class is an implementation example
* of ProcessingModuleInterface which simply amplifies the input stream
* by a constant gain value.
*
* The AcaModule is a single input-single output module.
* It can take any size of the input frame as long as it is suitable with the length of sample word.
*/
class AcaModule : public intel_adsp::ProcessingModule<DESIGN_CONFIG>
{
public:
/*! \brief Set of error codes value specific to this module
*/
enum InternalError
{
PROCESS_SUCCEED = 0,
#ifdef NOTIFICATION_SUPPORT
PROCESS_NOTIFICATION_ERROR = 1,
#endif
};
/*! Defines alias for the base class */
typedef intel_adsp::ProcessingModule<DESIGN_CONFIG> ProcessingModule;
/*! \brief Initializes a new instance of AcaModule
*
* \param [in] num_channels number of channels.
* \param [in] bits_per_sample bits per input and output audio sample.
* \param [in] sampling_frequency sampling frequency.
* \param [in] ibs input buffer size.
* \param [in] system_agent system_agent to check in the instance which is initializing.
*/
AcaModule(
uint32_t num_channels,
size_t bits_per_sample,
uint32_t sampling_frequency,
uint32_t ibs,
intel_adsp::SystemAgentInterface &system_agent):
ProcessingModule(system_agent),
ms_per_frame_(ibs / ((bits_per_sample / 8) * num_channels * ( sampling_frequency / 1000 )))
{
processing_mode_ = intel_adsp::ProcessingMode::NORMAL;
ACA_Environment_notification_counter_ = 0;
}
virtual uint32_t Process(intel_adsp::InputStreamBuffer *input_stream_buffers,
intel_adsp::OutputStreamBuffer *output_stream_buffers) /*override*/;
virtual ErrorCode::Type SetConfiguration(
uint32_t config_id,
intel_adsp::ConfigurationFragmentPosition fragment_position,
uint32_t data_offset_size,
const uint8_t *fragment_buffer,
size_t fragment_size,
uint8_t *response,
size_t &response_size
); /*override*/
virtual ErrorCode::Type GetConfiguration(
uint32_t config_id,
intel_adsp::ConfigurationFragmentPosition fragment_position,
uint32_t &data_offset_size,
uint8_t *fragment_buffer,
size_t &fragment_size
); /*override*/
virtual void SetProcessingMode(intel_adsp::ProcessingMode mode); /*override*/
virtual intel_adsp::ProcessingMode GetProcessingMode(); /*override*/
virtual void Reset(); /*override*/
private:
InternalError CheckEnvironment(uint8_t* input_buffer, size_t data_size);
InternalError CheckResult(uint8_t* input_buffer, size_t data_size);
#ifdef NOTIFICATION_SUPPORT
// function used to send notification
InternalError Send_ACA_Environment_Notification();
InternalError Send_ACA_Sound_Notification();
#endif //#ifdef NOTIFICATION_SUPPORT
const uint16_t ms_per_frame_;
uint16_t ACA_Environment_notification_counter_;
// current processing mode
intel_adsp::ProcessingMode processing_mode_;
// reserves space for module instances' bss
AcaBss bss_;
#ifdef NOTIFICATION_SUPPORT
// Notification object used to send message to host
// NOTE: Template<> is expected to contain max size of the Aca notification messages (if several)
intel_adsp::ModuleNotificationMessage<sizeof(ACA_SoundNotificationParams)> notification_event_message_;
intel_adsp::ModuleNotificationMessage<sizeof(ACA_EnvironmentNotificationParams)> notification_environment_message_;
#endif //#ifdef NOTIFICATION_SUPPORT
}; // class AcaModule
class AcaModuleFactory
: public intel_adsp::ProcessingModuleFactory<AcaModuleFactory,
AcaModule>
{
public:
AcaModuleFactory(
intel_adsp::SystemAgentInterface &system_agent)
: intel_adsp::ProcessingModuleFactory<AcaModuleFactory,
AcaModule>(
system_agent)
{}
ErrorCode::Type Create(
intel_adsp::SystemAgentInterface &system_agent,
intel_adsp::ModulePlaceholder *module_placeholder,
intel_adsp::ModuleInitialSettings initial_settings
);
}; // class AcaModuleFactory
#endif // ACA_MODULE_H_
|
<gh_stars>1-10
import Dexie from 'dexie';
import faker from 'faker/locale/en';
import { encrypted } from '../../src/encrypted';
import { Encryption } from '../../src/encryption.class';
export interface Friend {
id?: string;
testProp?: string;
hasAge?: boolean;
firstName: string;
lastName: string;
shoeSize: number;
}
class TestDatabase extends Dexie {
public friends: Dexie.Table<Friend, string>;
constructor(name: string, secret?: string) {
super(name);
encrypted(this, { secretKey: secret });
this.version(1).stores({
friends: '++#id, firstName, $lastName, $shoeSize, age',
buddies: '++id, buddyName, buddyAge',
dudes: '++id, $dudeName, $dudeAge',
empty: ''
});
}
}
class TestDatabaseAddons extends Dexie {
public friends: Dexie.Table<Friend, string>;
constructor(name: string, secret: string) {
super(name, {
addons: [encrypted.setOptions({ secretKey: secret })]
});
this.version(1).stores({
friends: '++#id, firstName, $lastName, $shoeSize, age',
});
}
}
class TestDatabaseAddonsNoSecret extends Dexie {
public friends: Dexie.Table<Friend, string>;
constructor(name: string) {
super(name, {
addons: [encrypted]
});
this.version(1).stores({
friends: '++#id, firstName, $lastName, $shoeSize, age',
});
}
}
class TestDatabaseNoEncryptedKeys extends Dexie {
public friends: Dexie.Table<Friend, string>;
constructor(name: string) {
super(name);
encrypted(this);
this.version(1).stores({
friends: '++#id, firstName, lastName, shoeSize, age',
});
}
}
class TestDatabaseNoHashPrimary extends Dexie {
public friends: Dexie.Table<Friend, string>;
constructor(name: string) {
super(name);
encrypted(this);
this.version(1).stores({
friends: '++id, firstName, lastName, shoeSize, age',
});
}
}
class TestDatabaseNoIndexes extends Dexie {
public friends: Dexie.Table<Friend, string>;
constructor(name: string) {
super(name);
encrypted(this);
this.version(1).stores({
friends: '',
});
}
}
function testDatabaseJs(): TestDatabase {
const db = new Dexie('TestDatabaseJs', {
addons: [encrypted]
});
db.version(1).stores({
friends: '++#id, firstName, $lastName, $shoeSize, age',
buddies: '++id, buddyName, buddyAge',
dudes: '++id, $dudeName, $dudeAge',
empty: ''
});
return db as TestDatabase;
}
function testDatabaseJsWithSecret(): TestDatabase {
const secret = Encryption.createRandomEncryptionKey();
const db = new Dexie('TestDatabaseJs', {
addons: [encrypted.setOptions({ secretKey: secret })]
});
db.version(1).stores({
friends: '++#id, firstName, $lastName, $shoeSize, age',
buddies: '++id, buddyName, buddyAge',
dudes: '++id, $dudeName, $dudeAge',
empty: ''
});
return db as TestDatabase;
}
export const databasesPositive = [
{
desc: 'TestDatabase',
db: () => new TestDatabase('TestDatabase')
},
{
desc: 'TestDatabaseNoEncryptedKeys',
db: () => new TestDatabaseNoEncryptedKeys('TestDatabaseNoEncryptedKeys')
},
{
desc: 'TestDatabaseAddons',
db: () => new TestDatabaseAddons('TestDatabaseAddons', Encryption.createRandomEncryptionKey())
},
{
desc: 'TestDatabaseAddons',
db: () => new TestDatabaseAddonsNoSecret('TestDatabaseAddons')
},
{
desc: 'testDatabaseJs',
db: () => testDatabaseJs()
},
{
desc: 'testDatabaseJsWithSecret',
db: () => testDatabaseJsWithSecret()
}
];
export const databasesNegative = [
{
desc: 'TestDatabaseNoHashPrimary',
db: () => new TestDatabaseNoHashPrimary('TestDatabaseNoHashPrimary')
},
{
desc: 'TestDatabaseNoIndexesNoHash',
db: () => new TestDatabaseNoIndexes('TestDatabaseNoIndexesNoHash')
}
];
export class TestDatabaseNotImmutable extends Dexie {
public friends: Dexie.Table<Friend, string>;
constructor(name: string) {
super(name);
encrypted(this, { immutable: false });
this.version(1).stores({
friends: '++#id, firstName, $lastName, $shoeSize, age',
});
}
}
export const mockFriends = (count: number = 5): Friend[] => {
const friend = () => ({
firstName: faker.name.firstName(),
lastName: faker.name.lastName(),
age: faker.random.number({ min: 1, max: 80 }),
shoeSize: faker.random.number({ min: 5, max: 12 })
});
return new Array(count).fill(null).map(() => friend());
};
|
Changes in Vegetation Cover of Yukon River Drainages in Interior Alaska: Estimated from MODIS Greenness Trends, 2000 to 2018
Abstract Trends and transitions in the growing season MODerate resolution Imaging Spectroradiometer (MODIS) Normalized Difference Vegetation Index (NDVI) time series at 250-m resolution were analyzed for the period from 2000 to 2018 to understand recent patterns of vegetation change in ecosystems of the Yukon River basin in interior Alaska. Statistical analysis of changes in the NDVI time series was conducted using the “Breaks for Additive Seasonal and Trend” method (BFAST). This structural change analysis indicated that NDVI breakpoints and negative 18-yr trends in vegetation greenness over the years since 2000 could be explained in large part by the impacts of severe wildfires, commonly affecting shrubland and forested ecosystems at relatively low elevations (< 300 m). At least one NDVI breakpoint was detected at 29% of the MODIS pixels within the Yukon River basin study area. The warmest and wettest years in the study time period were found to be associated with a sizeable fraction (30%) of NDVI breakpoints. Among pixels with no NDVI breakpoints detected, both forest and shrubland trends were strongly skewed toward positive trend values. Results from gradual NDVI trend analysis supported the hypothesis that air temperature warming has enhanced the rates of (unburned) vegetation growth in shrubland and woodlands across interior Alaska over the past two decades. |
"""
Scheduled Sampling
Anneal input of decoder
variational latent embedding
conv embed
"""
import numpy as np
from functools import partial
import tensorflow as tf
import tensorflow.contrib.rnn as rnn
import tensorflow.contrib.distributions as tfd
def gausspdf(x, mean, sigma):
return tf.exp(-(x - mean)**2 /
(2 * sigma**2)) / (tf.sqrt(2.0 * np.pi) * sigma)
class RegressionHelper(tf.contrib.seq2seq.Helper):
"""Helper interface. Helper instances are used by SamplingDecoder."""
def __init__(self, batch_size, max_sequence_size, n_features):
self._batch_size = batch_size
self._max_sequence_size = max_sequence_size
self._n_features = n_features
self._batch_size_tensor = tf.convert_to_tensor(
batch_size, dtype=tf.int32, name="batch_size")
@property
def batch_size(self):
"""Returns a scalar int32 tensor."""
return self._batch_size_tensor
@property
def sample_ids_dtype(self):
return tf.float32
@property
def sample_ids_shape(self):
return self._n_features
def initialize(self, name=None):
finished = tf.tile([False], [self._batch_size])
start_inputs = tf.fill([self._batch_size, self._n_features], 0.0)
return (finished, start_inputs)
def sample(self, time, outputs, state, name=None):
"""Returns `sample_ids`."""
del time, state
return outputs
def next_inputs(self, time, outputs, state, sample_ids, name=None):
"""Returns `(finished, next_inputs, next_state)`."""
del sample_ids
finished = tf.cond(
tf.less(time, self._max_sequence_size), lambda: False, lambda: True)
del time
all_finished = tf.reduce_all(finished)
next_inputs = tf.cond(
all_finished,
# If we're finished, the next_inputs value doesn't matter
lambda: tf.zeros_like(outputs),
lambda: outputs)
return (finished, next_inputs, state)
class MDNRegressionHelper(tf.contrib.seq2seq.Helper):
"""Helper interface. Helper instances are used by SamplingDecoder."""
def __init__(self, batch_size, max_sequence_size, n_features, n_gaussians):
self._batch_size = batch_size
self._max_sequence_size = max_sequence_size
self._n_features = n_features
self._n_gaussians = n_gaussians
self._batch_size_tensor = tf.convert_to_tensor(
batch_size, dtype=tf.int32, name="batch_size")
@property
def batch_size(self):
"""Returns a scalar int32 tensor."""
return self._batch_size_tensor
@property
def sample_ids_dtype(self):
return tf.float32
@property
def sample_ids_shape(self):
return self._n_features
def initialize(self, name=None):
finished = tf.tile([False], [self._batch_size])
start_inputs = tf.fill([self._batch_size, self._n_features], 0.0)
return (finished, start_inputs)
def sample(self, time, outputs, state, name=None):
"""Returns `sample_ids`."""
del time, state
# return outputs
with tf.variable_scope('mdn'):
means = tf.reshape(
tf.slice(
outputs, [0, 0],
[self._batch_size, self._n_features * self._n_gaussians]),
[self._batch_size, self._n_features, self._n_gaussians],
name='means')
sigmas = tf.nn.softplus(
tf.reshape(
tf.slice(
outputs, [0, self._n_features * self._n_gaussians], [
self._batch_size,
self._n_features * self._n_gaussians
],
name='sigmas_pre_norm'),
[self._batch_size, self._n_features, self._n_gaussians]),
name='sigmas')
weights = tf.nn.softmax(
tf.reshape(
tf.slice(
outputs, [0, 2 * self._n_features * self._n_gaussians],
[self._batch_size, self._n_gaussians],
name='weights_pre_norm'),
[self._batch_size, self._n_gaussians]),
name='weights')
components = []
for gauss_i in range(self._n_gaussians):
mean_i = means[:, :, gauss_i]
sigma_i = sigmas[:, :, gauss_i]
components.append(
tfd.MultivariateNormalDiag(
loc=mean_i, scale_diag=sigma_i))
gauss = tfd.Mixture(
cat=tfd.Categorical(probs=weights), components=components)
sample = gauss.sample()
return sample
def next_inputs(self, time, outputs, state, sample_ids, name=None):
"""Returns `(finished, next_inputs, next_state)`."""
finished = tf.cond(
tf.less(time, self._max_sequence_size), lambda: False, lambda: True)
del time
del outputs
all_finished = tf.reduce_all(finished)
next_inputs = tf.cond(
all_finished,
# If we're finished, the next_inputs value doesn't matter
lambda: tf.zeros_like(sample_ids),
lambda: sample_ids)
del sample_ids
return (finished, next_inputs, state)
def _create_embedding(x, embed_size, embed_matrix=None):
batch_size, sequence_length, n_input = x.shape.as_list()
# Creating an embedding matrix if one isn't given
if embed_matrix is None:
embed_matrix = tf.get_variable(
name='embed_matrix',
shape=[n_input, embed_size],
dtype=tf.float32,
initializer=tf.contrib.layers.xavier_initializer())
embed = tf.reshape(
tf.matmul(
tf.reshape(x, [batch_size * sequence_length, n_input]),
embed_matrix), [batch_size, sequence_length, embed_size])
return embed, embed_matrix
def _create_rnn_cell(n_neurons, n_layers, keep_prob):
cell_fw = rnn.LayerNormBasicLSTMCell(
num_units=n_neurons, dropout_keep_prob=keep_prob)
# Build deeper recurrent net if using more than 1 layer
if n_layers > 1:
cells = [cell_fw]
for layer_i in range(1, n_layers):
with tf.variable_scope('{}'.format(layer_i)):
cell_fw = rnn.LayerNormBasicLSTMCell(
num_units=n_neurons, dropout_keep_prob=keep_prob)
cells.append(cell_fw)
cell_fw = rnn.MultiRNNCell(cells)
return cell_fw
def _create_encoder(source, lengths, batch_size, n_enc_neurons, n_layers,
keep_prob):
# Create the RNN Cells for encoder
with tf.variable_scope('forward'):
cell_fw = _create_rnn_cell(n_enc_neurons, n_layers, keep_prob)
# Create the internal multi-layer cell for the backward RNN.
with tf.variable_scope('backward'):
cell_bw = _create_rnn_cell(n_enc_neurons, n_layers, keep_prob)
# Now hookup the cells to the input
# [batch_size, max_time, embed_size]
(outputs_fw, output_bw), (final_state_fw, final_state_bw) = \
tf.nn.bidirectional_dynamic_rnn(
cell_fw=cell_fw,
cell_bw=cell_bw,
inputs=source,
sequence_length=lengths,
time_major=False,
dtype=tf.float32)
return outputs_fw, final_state_fw
def _create_decoder(n_dec_neurons,
n_layers,
keep_prob,
batch_size,
encoder_outputs,
encoder_state,
encoder_lengths,
decoding_inputs,
decoding_lengths,
n_features,
scope,
max_sequence_size,
n_gaussians,
use_attention=False,
use_mdn=False):
from tensorflow.python.layers.core import Dense
if use_mdn:
n_outputs = n_features * n_gaussians + n_features * n_gaussians + n_gaussians
else:
n_outputs = n_features
output_layer = Dense(n_outputs, name='output_projection')
with tf.variable_scope('forward'):
cells = _create_rnn_cell(n_dec_neurons, n_layers, keep_prob)
if use_attention:
attn_mech = tf.contrib.seq2seq.LuongAttention(
cells.output_size, encoder_outputs, encoder_lengths, scale=False)
cells = tf.contrib.seq2seq.AttentionWrapper(
cell=cells,
attention_mechanism=attn_mech,
attention_layer_size=cells.output_size,
alignment_history=False)
initial_state = cells.zero_state(
dtype=tf.float32, batch_size=batch_size)
initial_state = initial_state.clone(cell_state=encoder_state)
else:
initial_state = encoder_state
helper = tf.contrib.seq2seq.TrainingHelper(
inputs=decoding_inputs,
sequence_length=decoding_lengths,
time_major=False)
decoder = tf.contrib.seq2seq.BasicDecoder(
cell=cells,
helper=helper,
initial_state=initial_state,
output_layer=output_layer)
outputs, _, _ = tf.contrib.seq2seq.dynamic_decode(
decoder,
output_time_major=False,
impute_finished=True,
maximum_iterations=max_sequence_size)
if use_mdn:
helper = MDNRegressionHelper(
batch_size=batch_size,
max_sequence_size=max_sequence_size,
n_features=n_features,
n_gaussians=n_gaussians)
else:
helper = RegressionHelper(
batch_size=batch_size,
max_sequence_size=max_sequence_size,
n_features=n_features)
scope.reuse_variables()
infer_decoder = tf.contrib.seq2seq.BasicDecoder(
cell=cells,
helper=helper,
initial_state=initial_state,
output_layer=output_layer)
infer_outputs, _, _ = tf.contrib.seq2seq.dynamic_decode(
infer_decoder,
output_time_major=False,
impute_finished=True,
maximum_iterations=max_sequence_size)
# infer_logits = tf.identity(infer_outputs.sample_id, name='infer_logits')
return outputs, infer_outputs
def create_model(batch_size=50,
sequence_length=120,
n_features=72,
n_neurons=512,
input_embed_size=None,
target_embed_size=None,
n_layers=2,
n_gaussians=5,
use_mdn=False,
use_attention=False):
# [batch_size, max_time, n_features]
source = tf.placeholder(
tf.float32,
shape=(batch_size, sequence_length, n_features),
name='source')
target = tf.placeholder(
tf.float32,
shape=(batch_size, sequence_length, n_features),
name='target')
lengths = tf.multiply(
tf.ones((batch_size,), tf.int32),
sequence_length,
name='source_lengths')
# Dropout
keep_prob = tf.placeholder(tf.float32, name='keep_prob')
with tf.variable_scope('target/slicing'):
source_last = tf.slice(source, [0, sequence_length - 1, 0], [batch_size, 1, n_features])
decoder_input = tf.slice(target, [0, 0, 0],
[batch_size, sequence_length - 1, n_features])
decoder_input = tf.concat([source_last, decoder_input], axis=1)
decoder_output = tf.slice(target, [0, 0, 0],
[batch_size, sequence_length, n_features])
if input_embed_size:
with tf.variable_scope('source/embedding'):
source_embed, source_embed_matrix = _create_embedding(
x=source, embed_size=input_embed_size)
else:
source_embed = source
# Build the encoder
with tf.variable_scope('encoder'):
encoder_outputs, encoder_state = _create_encoder(
source=source_embed,
lengths=lengths,
batch_size=batch_size,
n_enc_neurons=n_neurons,
n_layers=n_layers,
keep_prob=keep_prob)
# TODO: Add (vq?) variational loss
# Build the decoder
with tf.variable_scope('decoder') as scope:
outputs, infer_outputs = _create_decoder(
n_dec_neurons=n_neurons,
n_layers=n_layers,
keep_prob=keep_prob,
batch_size=batch_size,
encoder_outputs=encoder_outputs,
encoder_state=encoder_state,
encoder_lengths=lengths,
decoding_inputs=decoder_input,
decoding_lengths=lengths,
n_features=n_features,
scope=scope,
max_sequence_size=sequence_length,
n_gaussians=n_gaussians,
use_mdn=use_mdn)
if use_mdn:
max_sequence_size = sequence_length
with tf.variable_scope('mdn'):
means = tf.reshape(
tf.slice(
outputs[0], [0, 0, 0],
[batch_size, max_sequence_size, n_features * n_gaussians]),
[batch_size, max_sequence_size, n_features, n_gaussians])
sigmas = tf.nn.softplus(
tf.reshape(
tf.slice(outputs[0], [0, 0, n_features * n_gaussians], [
batch_size, max_sequence_size, n_features * n_gaussians
]),
[batch_size, max_sequence_size, n_features, n_gaussians]))
weights = tf.nn.softmax(
tf.reshape(
tf.slice(outputs[0], [
0, 0,
n_features * n_gaussians + n_features * n_gaussians
], [batch_size, max_sequence_size, n_gaussians]),
[batch_size, max_sequence_size, n_gaussians]))
components = []
for gauss_i in range(n_gaussians):
mean_i = means[:, :, :, gauss_i]
sigma_i = sigmas[:, :, :, gauss_i]
components.append(
tfd.MultivariateNormalDiag(
loc=mean_i, scale_diag=sigma_i))
gauss = tfd.Mixture(
cat=tfd.Categorical(probs=weights), components=components)
sample = gauss.sample()
with tf.variable_scope('loss'):
negloglike = -gauss.log_prob(decoder_output)
weighted_reconstruction = tf.reduce_mean(
tf.expand_dims(weights, 2) * means, 3)
mdn_loss = tf.reduce_mean(negloglike)
mse_loss = tf.losses.mean_squared_error(weighted_reconstruction,
decoder_output)
loss = mdn_loss
else:
with tf.variable_scope('loss'):
mdn_loss = tf.reduce_mean(tf.reduce_sum([[0.0]], 1))
mse_loss = tf.losses.mean_squared_error(outputs[0], decoder_output)
loss = mse_loss
return {
'source': source,
'target': target,
'keep_prob': keep_prob,
'encoding': encoder_state,
'decoding': infer_outputs,
'sample': sample,
'weighted': weighted_reconstruction,
'loss': loss,
'mdn_loss': mdn_loss,
'mse_loss': mse_loss
}
|
<filename>regrun/vbb-rest-python/vbb/modules/locations.py
from basemodule import BaseModule
class Locations(BaseModule):
"""Module to invoke locations API of VBB REST"""
def __init__(self, requester=None):
super(Locations, self).__init__(requester)
def get_locations(self, query=None, results=10, stations=True, poi=True, addresses=True):
data, headers = self.r.request('locations', payload=locals())
return data
|
/// Generate multiplication array
///
/// Generate a string representing a 5d multiplication array. This array uses the associativity
/// of multiplication `(a + b) * (c + d) == a*c + a*d + b*c + b*d` to reduce table size.
///
/// The input is split into bit chunks e.g. for a GF_1024 number we take the lower 8 bit and the
/// remaining 2 and calculate the multiplications for each separately. Then we can cheaply add them
/// together to get the the result with requiring a full 1024 * 1024 input.
fn generate_mul_table_string(modulus: G2Poly) -> String {
assert!(modulus.is_irreducible());
let field_size = 1 << modulus.degree().expect("Irreducible polynomial has positive degree");
let nparts = ceil_log256(field_size as usize);
let mut mul_table = Vec::with_capacity(nparts as usize);
for left in 0..nparts {
let mut left_parts = Vec::with_capacity(nparts as usize);
for right in 0..nparts {
let mut right_parts = Vec::with_capacity(256);
for i in 0..256 {
let i = i << 8 * left;
let mut row = Vec::with_capacity(256);
for j in 0..256 {
let j = j << 8 * right;
let v = if i < field_size && j < field_size {
let v = G2Poly(i as u64) * G2Poly(j as u64) % modulus;
v
} else {
G2Poly(0)
};
row.push(format!("{}", v.0));
}
right_parts.push(format!("[{}]", row.join(",")));
}
left_parts.push(format!("[{}]", right_parts.join(",")));
}
mul_table.push(format!("[{}]", left_parts.join(",")));
}
format!("[{}]", mul_table.join(","))
} |
/**
* Removes the specified step from the tutorial.
*
* @param index The position of the step to remove.
* @return the step that was removed.
*/
public Step removeStep(int index) {
Step step = m_steps.remove(index);
step.setIndex(null);
for (int i = index; i < m_steps.size(); i++) {
m_steps.get(i).setIndex((long)i);
}
return step;
} |
Samsung has dropped Samsung Wallet into the Play Store, an app previously only available through Samsung Apps on devices such as the Galaxy S4, Note 2, and Galaxy S3. We first heard about the offering back at this year's Mobile World Congress, and its functionality hasn't changed from what was promised. This remains Samsung's answer to Apple's Passbook, just easier to find than before.
The app functions like a digital wallet, holding coupons, tickets, and gift cards from select partners. This won't replace physical wallets though, as it's not integrated with an actual payment system, nor is there NFC integration in anyway. Still, Samsung Wallet does support importing from other applications, and it's packed with quite a few features.
Key Features: Optimized for HD and Full-HD Samsung devices
Store and manage tickets, coupons, mobile boarding passes and membership cards in one convenient place
Offers 5 default categories for easy management of Tickets. You can also add categories for more personalization.
Customizable time and location-based reminders to show the Tickets at the right time and place.
Synchronize all your Tickets to multiple devices using Samsung account
Receive and store Co-marketing offers
Create your own tickets manually by using ‘Create my ticket’ feature
Discover grocery coupons and digitally save them to loyalty card of participating groceries
Samsung Wallet is available for free, but don't be surprised if you're greeted with "This app is incompatible with all of your devices." The name kind of gives it away.
The app was not found in the store. :-( Go to store Google websearch
Thanks. |
// Values returns all known values for PhoneNumberFilterName. Note that this can be
// expanded in the future, and so it is only as up to date as the client. The
// ordering of this slice is not guaranteed to be stable across updates.
func (PhoneNumberFilterName) Values() []PhoneNumberFilterName {
return []PhoneNumberFilterName{
"status",
"iso-country-code",
"message-type",
"number-capability",
"number-type",
"two-way-enabled",
"self-managed-opt-outs-enabled",
"opt-out-list-name",
"deletion-protection-enabled",
}
} |
<gh_stars>100-1000
// SPDX-FileCopyrightText: 2014-2021 SAP SE
//
// SPDX-License-Identifier: Apache-2.0
package protocol
import (
"fmt"
"github.com/SAP/go-hdb/driver/internal/protocol/encoding"
)
type optBooleanType bool
type optTinyintType int8
type optIntType int32
type optBigintType int64
type optDoubleType float64
type optStringType []byte
type optBinaryStringType []byte
func (t optBooleanType) String() string { return fmt.Sprintf("%t", bool(t)) }
func (t optTinyintType) String() string { return fmt.Sprintf("%d", int8(t)) }
func (t optIntType) String() string { return fmt.Sprintf("%d", int(t)) }
func (t optBigintType) String() string { return fmt.Sprintf("%d", int64(t)) }
func (t optDoubleType) String() string { return fmt.Sprintf("%g", float64(t)) }
func (t optStringType) String() string { return string(t) }
func (t optBinaryStringType) String() string { return fmt.Sprintf("%v", []byte(t)) }
type multiLineOptions []plainOptions
func (o multiLineOptions) size() int {
size := 0
for _, m := range o {
size += m.size()
}
return size
}
func (o *multiLineOptions) reset(size int) {
if o == nil || size > cap(*o) {
*o = make(multiLineOptions, size)
} else {
*o = (*o)[:size]
}
}
func (o *multiLineOptions) decode(dec *encoding.Decoder, lineCnt int) {
o.reset(lineCnt)
for i := 0; i < lineCnt; i++ {
m := plainOptions{}
(*o)[i] = m
cnt := dec.Int16()
m.decode(dec, int(cnt))
}
}
func (o multiLineOptions) encode(enc *encoding.Encoder) {
for _, m := range o {
enc.Int16(int16(len(m)))
m.encode(enc)
}
}
type plainOptions map[int8]interface{}
func (o plainOptions) asString(k int8) string {
v, ok := o[k]
if !ok {
return ""
}
switch v := v.(type) {
case optStringType:
return string(v)
default:
return ""
}
}
func (o plainOptions) asInt(k int8) int {
v, ok := o[k]
if !ok {
return 0
}
switch v := v.(type) {
case optIntType:
return int(v)
default:
return 0
}
}
func (o plainOptions) asBool(k int8) bool {
v, ok := o[k]
if !ok {
return false
}
switch v := v.(type) {
case optBooleanType:
return bool(v)
default:
return false
}
}
func (o plainOptions) size() int {
size := 2 * len(o) //option + type
for _, v := range o {
switch v := v.(type) {
default:
plog.Fatalf("type %T not implemented", v)
case optBooleanType:
size++
case optTinyintType:
size++
case optIntType:
size += 4
case optBigintType:
size += 8
case optDoubleType:
size += 8
case optStringType:
size += (2 + len(v)) //length int16 + string length
case optBinaryStringType:
size += (2 + len(v)) //length int16 + string length
}
}
return size
}
func (o plainOptions) decode(dec *encoding.Decoder, cnt int) {
for i := 0; i < cnt; i++ {
k := dec.Int8()
tc := dec.Byte()
switch typeCode(tc) {
default:
plog.Fatalf("type code %s not implemented", typeCode(tc))
case tcBoolean:
o[k] = optBooleanType(dec.Bool())
case tcTinyint:
o[k] = optTinyintType(dec.Int8())
case tcInteger:
o[k] = optIntType(dec.Int32())
case tcBigint:
o[k] = optBigintType(dec.Int64())
case tcDouble:
o[k] = optDoubleType(dec.Float64())
case tcString:
size := dec.Int16()
v := make([]byte, size)
dec.Bytes(v)
o[k] = optStringType(v)
case tcBstring:
size := dec.Int16()
v := make([]byte, size)
dec.Bytes(v)
o[k] = optBinaryStringType(v)
}
}
}
func (o plainOptions) encode(enc *encoding.Encoder) {
for k, v := range o {
enc.Int8(int8(k))
switch v := v.(type) {
default:
plog.Fatalf("type %T not implemented", v)
case optBooleanType:
enc.Int8(int8(tcBoolean))
enc.Bool(bool(v))
case optTinyintType:
enc.Int8(int8(tcTinyint))
enc.Int8(int8(v))
case optIntType:
enc.Int8(int8(tcInteger))
enc.Int32(int32(v))
case optBigintType:
enc.Int8(int8(tcBigint))
enc.Int64(int64(v))
case optDoubleType:
enc.Int8(int8(tcDouble))
enc.Float64(float64(v))
case optStringType:
enc.Int8(int8(tcString))
enc.Int16(int16(len(v)))
enc.Bytes(v)
case optBinaryStringType:
enc.Int8(int8(tcBstring))
enc.Int16(int16(len(v)))
enc.Bytes(v)
}
}
}
|
def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, Any]]:
pagination = response.json().get("pagination")
if pagination.get("current"):
return {"page": response.json().get("pagination").get("current") + 1}
else:
return None |
/**
* Removes the client method handler represented by this subscription.
*/
public void unsubscribe() {
List<InvocationHandler> handler = this.handlers.get(target);
if (handler != null) {
handler.remove(this.handler);
}
} |
Atomic and electronic structure of the Pb/Si(111)-√7x√3 surface: DFT calculations
The atomic and electronic structure of the Pb adsorbed Si(111)-( x ) surface has been studied by density-functional theory (DFT) calculations. We examined two structural models: One is the 1.0 ML trimer model proposed by a STM experiment , and the other is the 1.2 ML triangular model, characterized by a Y-shape arrangement of four Pb atoms, proposed by a X-ray experiment . We find that while the 1.0 ML trimer model is locally unstable, the 1.2 ML triangular model is stable, which is consistent with a previous DFT study . The triangular model was found to reproduce the experimental Y-shape STM images. This structure shows metallic band structure, the surface-state bands of which have free-electron-like parabolic dispersions. These characters are in agreement with the band structure reported by a recent ARPES experiment . Our calculations, however, do not reproduce some of the experimental bands. The origin of this discrepancy will be discussed in connection with the triple-domain LEED pattern reported in the ARPES experiment. |
<reponame>niorad/Skewbacca
import { app, remote, BrowserWindow, dialog, ipcMain } from "electron";
import * as path from "path";
import { Coordinates, Config, State } from "../types/types";
import { initialize } from "./setup";
import { ImageConverter } from "./ImageConverter";
import { FileManager } from "./FileManager";
const state: State = {
activeSourceFile: "",
activePreviewFile: ""
};
const config: Config = {
previewSizePercent: 20,
filePath: path.join((app || remote.app).getPath("userData"), "generated"),
previewFilePrefix: "tmp_sbprev_"
};
const imageConverter = new ImageConverter();
const fileManager = new FileManager();
initialize(config);
export const onOpenFileRequested = (): void => {
const currentBrowserWindow = BrowserWindow.getFocusedWindow()!;
fileManager.getFileFromUser(currentBrowserWindow).then(file => {
state.activeSourceFile = file;
openFile(file);
});
};
const openFile = (file: string): void => {
const previewFileName = config.previewFilePrefix + path.basename(file);
state.activeSourceFile = file;
state.activePreviewFile = path.join(config.filePath, previewFileName);
imageConverter
.resizeImage(file, config.previewSizePercent, state.activePreviewFile)
.then(() => {
BrowserWindow.getFocusedWindow()!.webContents.send(
"file-opened",
file,
null
);
})
.catch(err => {
console.log("Error on resizing Image: ", err);
});
};
ipcMain.on("open-file", (_data, path: string) => {
openFile(path);
});
const convertFull = (coords: Coordinates, nw: number, nh: number): void => {
const file = fileManager.getSavingDestinationFromUser(
BrowserWindow.getFocusedWindow()!
);
console.log("CONVERT FULL", file);
imageConverter
.unskewImage(state.activeSourceFile, coords, nw, nh, file)
.then(() => {
console.log("Conversion Done!");
})
.catch(err => {
console.log("Cenvert Full Error:", err);
});
};
ipcMain.on(
"convert-full",
(_data, coords: Coordinates, nw: number, nh: number) => {
convertFull(coords, nw, nh);
}
);
const convertPreview = (
targetFileName: string,
coords: Coordinates,
nw: number,
nh: number
): void => {
const fullTargetFilePath = path.join(config.filePath, targetFileName);
imageConverter
.unskewImage(state.activePreviewFile, coords, nw, nh, fullTargetFilePath)
.then(() => {
BrowserWindow.getFocusedWindow()!.webContents.send(
"file-saved",
fullTargetFilePath
);
})
.catch(err => {
console.log("Convert Preview Error:", err);
});
};
ipcMain.on(
"convert-preview",
(_data, path: string, coords: Coordinates, nw: number, nh: number) => {
convertPreview(path, coords, nw, nh);
}
);
|
/*
KeccakTools
The Keccak sponge function, designed by Guido Bertoni, Joan Daemen,
Michaël Peeters and Gilles Van Assche. For more information, feedback or
questions, please refer to our website: http://keccak.noekeon.org/
Implementation by the designers,
hereby denoted as "the implementer".
To the extent possible under law, the implementer has waived all copyright
and related or neighboring rights to the source code in this file.
http://creativecommons.org/publicdomain/zero/1.0/
*/
#include <sstream>
#include "Keccak-fParityBounds.h"
#include "Keccak-fPositions.h"
#include "progress.h"
#include "translationsymmetry.h"
using namespace std;
unsigned int getBoundOfTotalWeightGivenTotalHammingWeight(const KeccakFPropagation& DCorLC, unsigned int totalHW)
{
return DCorLC.getLowerBoundOnReverseWeightGivenHammingWeight(totalHW);
// It has been explicitly checked that putting all the Hamming weight
// before λ always gives the minimum lower bound.
// The check was done both for DC and LC.
}
unsigned int getLowerBoundTotalActiveRowsFromACandUOC(const KeccakFPropagation& DCorLC,
const vector<ColumnPosition>& xzAC, const vector<ColumnPosition>& xzUOC)
{
unsigned int activeRows = 0;
vector<vector<bool> > rowTakenLeft(5, vector<bool>(DCorLC.laneSize, false));
vector<vector<bool> > rowTakenRight(5, vector<bool>(DCorLC.laneSize, false));
for(unsigned int i=0; i<xzAC.size(); i++) {
unsigned int x = xzAC[i].x;
unsigned int z = xzAC[i].z;
for(unsigned int y=0; y<5; y++) {
BitPosition left(x, y, z);
DCorLC.reverseRhoPiBeforeTheta(left);
BitPosition right(x, y, z);
DCorLC.directRhoPiAfterTheta(right);
if ((!rowTakenLeft[left.y][left.z]) && (!rowTakenRight[right.y][right.z])) {
activeRows++;
rowTakenLeft[left.y][left.z] = true;
rowTakenRight[right.y][right.z] = true;
}
}
}
for(unsigned int i=0; i<xzUOC.size(); i++) {
unsigned int x = xzUOC[i].x;
unsigned int z = xzUOC[i].z;
bool takenLeft = false;
bool takenRight = false;
for(unsigned int y=0; y<5; y++) {
BitPosition left(x, y, z);
DCorLC.reverseRhoPiBeforeTheta(left);
BitPosition right(x, y, z);
DCorLC.directRhoPiAfterTheta(right);
takenLeft |= rowTakenLeft[left.y][left.z];
takenRight |= rowTakenRight[right.y][right.z];
rowTakenLeft[left.y][left.z] = true;
rowTakenRight[right.y][right.z] = true;
}
if (!takenLeft)
activeRows++;
if (!takenRight)
activeRows++;
for(unsigned int y=0; y<5; y++) {
BitPosition left(x, y, z);
DCorLC.reverseRhoPiBeforeTheta(left);
BitPosition right(x, y, z);
DCorLC.directRhoPiAfterTheta(right);
if (takenLeft)
rowTakenLeft[left.y][left.z] = true;
if (takenRight)
rowTakenRight[right.y][right.z] = true;
}
}
return activeRows;
}
unsigned int getLowerBoundTotalActiveRows(const KeccakFPropagation& DCorLC,
const vector<RowValue>& C, const vector<RowValue>& D)
{
vector<ColumnPosition> xzAC, xzUOC;
for(unsigned int x=0; x<5; x++)
for(unsigned int z=0; z<DCorLC.laneSize; z++) {
bool odd = (getBit(C, x, z) != 0);
bool affected = (getBit(D, x, z) != 0);
if (affected)
xzAC.push_back(ColumnPosition(x, z));
else {
if (odd)
xzUOC.push_back(ColumnPosition(x, z));
}
}
return getLowerBoundTotalActiveRowsFromACandUOC(DCorLC, xzAC, xzUOC);
}
string Run::display() const
{
stringstream str;
str << "[" << dec << tStart;
if (length == 1)
str << "]";
else
str << "-" << (tStart+length-1) << "]";
return str.str();
}
string ParityAsRuns::display() const
{
string result;
for(unsigned int i=0; i<runs.size(); i++) {
result += runs[i].display();
if (i < (runs.size()-1))
result += " ";
}
return result;
}
void ParityAsRuns::toParityAndParityEffect(const KeccakFPropagation& DCorLC, vector<RowValue>& C, vector<RowValue>& D) const
{
C.assign(DCorLC.laneSize, 0);
D.assign(DCorLC.laneSize, 0);
for(unsigned int i=0; i<runs.size(); i++) {
unsigned int x, z;
DCorLC.getXandZfromT(DCorLC.translateAlongXinT(runs[i].tStart), x, z);
setBitToOne(D, x, z);
DCorLC.getXandZfromT(DCorLC.translateAlongXinT(runs[i].tStart + runs[i].length), x, z);
setBitToOne(D, x, z);
for(unsigned int t=runs[i].tStart; t<runs[i].tStart+runs[i].length; t++) {
unsigned int x, z;
DCorLC.getXandZfromT(t, x, z);
setBitToOne(C, x, z);
}
}
}
unsigned int ParityAsRuns::getLowerBoundTotalHammingWeight(const KeccakFPropagation& DCorLC) const
{
vector<bool> affected(DCorLC.laneSize*5, false);
for(unsigned int i=0; i<runs.size(); i++) {
affected[DCorLC.translateAlongXinT(runs[i].tStart)] = true;
affected[DCorLC.translateAlongXinT(runs[i].tStart + runs[i].length)] = true;
}
unsigned int total = 5*2*runs.size();
for(unsigned int i=0; i<runs.size(); i++) {
for(unsigned int t=runs[i].tStart; t<runs[i].tStart+runs[i].length; t++) {
if (!affected[t % (DCorLC.laneSize*5)])
total += 2;
}
}
return total;
}
unsigned int ParityAsRuns::getLowerBoundTotalActiveRowsUsingOnlyAC(const KeccakFPropagation& DCorLC) const
{
vector<ColumnPosition> xzAC, xzUOC;
for(unsigned int i=0; i<runs.size(); i++) {
unsigned int x, z;
DCorLC.getXandZfromT(DCorLC.translateAlongXinT(runs[i].tStart), x, z);
xzAC.push_back(ColumnPosition(x, z));
DCorLC.getXandZfromT(DCorLC.translateAlongXinT(runs[i].tStart + runs[i].length), x, z);
xzAC.push_back(ColumnPosition(x, z));
}
return getLowerBoundTotalActiveRowsFromACandUOC(DCorLC, xzAC, xzUOC);
}
unsigned int ParityAsRuns::getLowerBoundTotalActiveRows(const KeccakFPropagation& DCorLC) const
{
vector<ColumnPosition> xzAC, xzUOC;
vector<bool> affected(DCorLC.laneSize*5, false);
for(unsigned int i=0; i<runs.size(); i++) {
unsigned int x, z;
DCorLC.getXandZfromT(DCorLC.translateAlongXinT(runs[i].tStart), x, z);
xzAC.push_back(ColumnPosition(x, z));
DCorLC.getXandZfromT(DCorLC.translateAlongXinT(runs[i].tStart + runs[i].length), x, z);
xzAC.push_back(ColumnPosition(x, z));
affected[DCorLC.translateAlongXinT(runs[i].tStart)] = true;
affected[DCorLC.translateAlongXinT(runs[i].tStart + runs[i].length)] = true;
}
for(unsigned int i=0; i<runs.size(); i++) {
for(unsigned int t=runs[i].tStart; t<runs[i].tStart+runs[i].length; t++) {
if (!affected[t % (DCorLC.laneSize*5)]) {
unsigned int x, z;
DCorLC.getXandZfromT(t, x, z);
xzUOC.push_back(ColumnPosition(x, z));
}
}
}
return getLowerBoundTotalActiveRowsFromACandUOC(DCorLC, xzAC, xzUOC);
}
void lookForRunsBelowTargetWeight(const KeccakFPropagation& DCorLC, ostream& out,
unsigned int targetWeight, ParityAsRuns& parity, ProgressMeter& progress, bool verbose)
{
unsigned int lowerBound;
unsigned int weightBoundBasedOnTotalHammingWeight = getBoundOfTotalWeightGivenTotalHammingWeight(DCorLC,
parity.getLowerBoundTotalHammingWeight(DCorLC));
if (weightBoundBasedOnTotalHammingWeight <= targetWeight) {
unsigned int minActiveRows = parity.getLowerBoundTotalActiveRowsUsingOnlyAC(DCorLC);
lowerBound = max(minActiveRows*2, weightBoundBasedOnTotalHammingWeight);
}
else
lowerBound = weightBoundBasedOnTotalHammingWeight;
if (lowerBound <= targetWeight) {
unsigned int thisOneLowerBound = parity.getLowerBoundTotalActiveRows(DCorLC)*2;
if (thisOneLowerBound <= targetWeight) {
vector<RowValue> C, D;
parity.toParityAndParityEffect(DCorLC, C, D);
unsigned int thisOneLowerBoundAgain = getLowerBoundTotalActiveRows(DCorLC, C, D)*2;
if (thisOneLowerBoundAgain <= targetWeight) {
if (verbose) {
displayParity(cout, C, D);
cout << "Lower bound = " << dec << max(thisOneLowerBound, thisOneLowerBoundAgain) << endl;
cout << endl;
}
vector<RowValue> Cmin;
getSymmetricMinimum(C, Cmin);
writeParity(out, Cmin);
}
}
progress.stack("Adding runs to "+parity.display());
for(unsigned int tStart=parity.runs.back().tStart+parity.runs.back().length+1;
tStart<DCorLC.laneSize*5; tStart++) {
unsigned int maxLength = DCorLC.laneSize*5-1-tStart+parity.runs[0].tStart;
for(unsigned int length=1; length<=maxLength; length++) {
Run run;
run.tStart = tStart;
run.length = length;
parity.runs.push_back(run);
lookForRunsBelowTargetWeight(DCorLC, out, targetWeight, parity, progress, verbose);
parity.runs.pop_back();
++progress;
}
}
progress.unstack();
}
}
void lookForRunsBelowTargetWeight(const KeccakFPropagation& DCorLC, ostream& out, unsigned int targetWeight, bool verbose)
{
ProgressMeter progress;
progress.stack("Initial run starting point", 5);
for(unsigned int tStart=0; tStart<5; tStart++) {
progress.stack("Initial run length", DCorLC.laneSize*5-1);
for(unsigned int length=1; length<=DCorLC.laneSize*5-1; length++) {
ParityAsRuns parity;
Run run;
run.tStart = tStart;
run.length = length;
parity.runs.push_back(run);
lookForRunsBelowTargetWeight(DCorLC, out, targetWeight, parity, progress, verbose);
++progress;
}
progress.unstack();
++progress;
}
progress.unstack();
}
|
#include <iostream>
#include <cstdlib>
#include <fstream>
using namespace std;
int reverseNumber(int number)
{
int rev = 0;
while (number > 0)
{
// multipy the rev by 10 and the append the last number
rev = rev * 10 + number % 10;
// update the number
number /= 10;
}
return rev;
}
bool isPalidrome(int number)
{
return reverseNumber(number) == number;
}
int main()
{
int num;
cout << "ENTER AN INTEGER NUMBER TO CHECK IF IS IS A PALIDROME INTEGER\n";
cout << "INTEGER: \t";
cin >> num;
if (isPalidrome(num))
{
cout << num << " is a palindrome number!!" << endl;
}
else
{
cout << num << " is not a palindrome number!!" << endl;
}
return 0;
}
|
/* This basically bins and then flushes any outstanding full-screen
* clears.
*
* TODO: fast path for fullscreen clears and no triangles.
*/
static boolean
execute_clears( struct lp_setup_context *setup )
{
LP_DBG(DEBUG_SETUP, "%s\n", __FUNCTION__);
return begin_binning( setup );
} |
/**
* Created with IntelliJ IDEA.
* User: neil
* Date: 18/07/2013
* Time: 16:04
* To change this template use File | Settings | File Templates.
*/
public class StringKeyHashMapTest {
@Test
public void shouldDoSImpleStuff() throws Exception {
StringKeyHashMap map = new StringKeyHashMap();
map.put("hello", "helloValue");
assertTrue(map.containsKey("hello"));
assertEquals("helloValue", map.get("hello"));
map.put("hello2", "helloValue2");
map.put("hello2", "helloValue2");
assertTrue(map.containsKey("hello2"));
assertEquals("helloValue2", map.get("hello2"));
}
} |
/**
* Adds a command to this provider.
*
* @param command the command to add
*/
public void add(Command command) {
for (var alias : command.getAliases()) {
commandMap.put(alias, command);
}
} |
<reponame>Doomsdayrs/android_packages_apps_GmsCore
/*
* SPDX-FileCopyrightText: 2020, microG Project Team
* SPDX-License-Identifier: Apache-2.0
*/
package com.google.firebase.auth.api.internal;
import com.google.firebase.auth.ActionCodeSettings;
import org.microg.safeparcel.AutoSafeParcelable;
public class SendGetOobConfirmationCodeEmailAidlRequest extends AutoSafeParcelable {
@Field(1)
public String email;
@Field(2)
public ActionCodeSettings settings;
@Field(3)
public String tenantId;
public static final Creator<SendGetOobConfirmationCodeEmailAidlRequest> CREATOR = new AutoCreator<>(SendGetOobConfirmationCodeEmailAidlRequest.class);
}
|
/** Arranges the first three vertices in order of increasing x using bubble sort.*/
static private void xSort(ScreenVertex[] v) {
if (v[1].x < v[0].x) swap(v, 0, 1);
if (v[2].x < v[1].x) swap(v, 1, 2);
if (v[1].x < v[0].x) swap(v, 0, 1);
} |
/**
* read data from serial port
*
* @param serialPort : The SerialPort object to which the connection is currently established
* @return Retrieved data
*/
public static byte[] readFromPort(SerialPort serialPort) {
InputStream in = null;
byte[] bytes = {};
try {
in = serialPort.getInputStream();
byte[] readBuffer = new byte[1];
int bytesNum = in.read(readBuffer);
while (bytesNum > 0) {
bytes = ArrayUtils.concat(bytes, readBuffer);
bytesNum = in.read(readBuffer);
}
} catch (IOException e) {
e.printStackTrace();
} finally {
try {
if (in != null) {
in.close();
in = null;
}
} catch (IOException e) {
e.printStackTrace();
}
}
return bytes;
} |
Why the Atlanta Falcons Will Not Make the Playoffs by Sam Kweon
Here at FanSided and Blogging Dirty, we hope to keep you entertained with off-topic material! And what’s more fun than creating an Atlanta Falcons logo pancake and enjoy the best breakfast you’ve ever had in your life! A very talented person of FanSided made tutorials
Below is a quick link to the directions step by step for creating the perfect Falcons pancake.
Now, here is a link to the other pancakes from the NFC South. No way those New Orleans Saints, Carolina Panthers, or the lowly Tampa Bay Buccaneers beat us in this contest with their lame logo’s but go check theirs out and compare.
And when you have checked our the logo’s, go vote on the best pancake logo in the NFL!
In other news, it was a tough loss for the Atlanta Falcons on Sunday. But being that they are only a game away from being on the top of the NFC South there is still plenty of room for optimism. The less encouraging fact is that they are faced against one of the best teams in the league in the Arizona Cardinals next Sunday. It won’t be easy, but let’s go Atlanta Falcons get the win! |
<filename>src/plugin/spacy.rs
// # type Corpus implements Node {
// # id: ID!,
// # type: KSpacePointer!,
// # }
// # type Document implements Node {
// # id: ID!,
// # # Encourage reuse of corpus
// # corpuses: [Corpus]
// # }
// # type Paragraph implements Node {
// # id: ID!,
// # //
// # documents: [Document]
// # }
// # type Sentence implements Node {
// # id: ID!,
// # }
// # type NounPhrase implements Node {
// # id: ID!,
// # }
// # type VerbPhrase implements Node {
// # id: ID!,
// # }
// # type Verb implements Node {
// # id: ID!,
// # }
// # type Determiner implements Node {
// # id: ID!,
// # }
// # type Noun implements Node {
// # id: ID!,
// # }
|
// Set quit variable when SIGINT is received so we can do proper cleanup
void quitSignal(int unused) {
(void)unused;
quit = 1;
printf("\nCaught kill signal, quitting...\n");
} |
/*
NASA/TRMM, Code 910.1.
This is the TRMM Office Radar Software Library.
Copyright (C) 1996, 1997
<NAME>
Space Applications Corporation
Vienna, Virginia
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Library General Public
License as published by the Free Software Foundation; either
version 2 of the License, or (at your option) any later version.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Library General Public License for more details.
You should have received a copy of the GNU Library General Public
License along with this library; if not, write to the Free
Software Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
*/
/*
* Radar routines coded in this file:
*
* RSL_radar_verbose_on();
* RSL_radar_verbose_off();
* Radar *RSL_new_radar(int nvolumes);
* void RSL_free_radar(Radar *r);
* Radar *RSL_clear_radar(Radar *r);
* Volume *RSL_get_volume(Radar *r, int type_wanted);
* Radar *RSL_wsr88d_to_radar(char *infile, unsigned int data_mask);
*
* Internal routines:
* print_vect(float v[], int istart, int istop);
* void radar_load_date_time(Radar *radar);
* int wsr88d_load_sweep_into_volume(Wsr88d_sweep ws,
* Volume *v, int nsweep, unsigned int vmask);
*
* Radar routines not coded in this file:
*
* Radar *RSL_read_radar(char *infile);
* int RSL_write_radar(Radar *radar, char *outfile);
* Radar *RSL_clear_radar(Radar *r);
* void RSL_radar_to_uf(Radar *r, char *outfile);
* Radar *RSL_uf_to_radar(char *infile);
*
* See the file radar.ez and version.notes for more detailed documentation.
*
* All routines herein coded, unless otherwise stated, by:
* <NAME>
* Space Applications Corporation
*
*/
#include <stdio.h>
#include <string.h>
#include <stdlib.h>
#include "rsl.h"
void RSL_print_version()
{
printf("RSL version %s.\n", RSL_VERSION_STR);
}
/* Debug printing global variable: radar_verbose_flag */
int radar_verbose_flag = 0;
void RSL_radar_verbose_on()
{
radar_verbose_flag = 1;
}
void RSL_radar_verbose_off()
{
radar_verbose_flag = 0;
}
void print_vect(float v[], int istart, int istop)
{
int i;
for (i=istart; i<=istop; i++)
fprintf(stderr,"v[%d] = %f\n", i, v[i]);
}
/**********************************************************************/
/* */
/* RSL_get_nyquist_from_radar */
/* */
/**********************************************************************/
float RSL_get_nyquist_from_radar(Radar *radar)
{
/* Find a velocity volume.
* Find first sweep in that volume.
* Find first ray in that sweep.
* Return the nyquist velocity.
*
* This code required for loading nyquist value in non-velocity
* volumes; UF output is affected by this in a good way.
*/
Volume *vol;
Ray *ray;
if (radar == NULL) return 0.0;
if (radar->h.nvolumes <= VR_INDEX) return 0.0;
vol = radar->v[VR_INDEX];
ray = RSL_get_first_ray_of_volume(vol);
if (ray == NULL) return 0.0;
return ray->h.nyq_vel;
}
/**********************************************************************/
/* */
/* done 3/30 new_radar() */
/* done 3/30 free_radar() */
/* done 4/21 clear_radar() */
/* */
/**********************************************************************/
Radar *RSL_new_radar(int nvolumes)
{
Radar *r;
r = (Radar *) calloc(1, sizeof(Radar));
r->v = (Volume **) calloc(nvolumes, sizeof(Volume *));
r->h.nvolumes = nvolumes;
return r;
}
void RSL_free_radar(Radar *r)
{
int i;
/* Chase down all the pointers and free everything in sight. */
if (r) {
for (i=0; i<r->h.nvolumes; i++)
RSL_free_volume(r->v[i]);
if (r->v) free(r->v);
free(r);
}
}
Radar *RSL_clear_radar(Radar *r)
{
int i;
if (r == NULL) return r;
for (i=0; i<r->h.nvolumes; i++)
RSL_clear_volume(r->v[i]);
return r;
}
/**********************************************************************/
/* */
/* done 8/26 radar_load_date_time */
/* */
/**********************************************************************/
void radar_load_date_time(Radar *radar)
{
/* Search for the first existing ray of the first sweep of the first
* volume; steal that information.
*/
int i;
Ray *first_ray;
radar->h.month = 0;
radar->h.day = 0;
radar->h.year = 0;
radar->h.hour = 0;
radar->h.minute= 0;
radar->h.sec= 0.0;
first_ray = NULL;
for (i=0; i<MAX_RADAR_VOLUMES; i++) {
if (radar->v[i] != NULL) {
first_ray = RSL_get_first_ray_of_volume(radar->v[i]);
if (first_ray) {
radar->h.month = first_ray->h.month;
radar->h.day = first_ray->h.day;
radar->h.year = first_ray->h.year;
radar->h.hour = first_ray->h.hour;
radar->h.minute= first_ray->h.minute;
radar->h.sec = first_ray->h.sec;
return;
}
}
}
}
/**********************************************************************/
/* */
/* done 3/30 Volume *RSL_get_volume */
/* */
/**********************************************************************/
Volume *RSL_get_volume(Radar *r, int type_wanted)
{
return r->v[type_wanted];
}
|
/**
* This Xml Interface allows parsing the annotation class instance from a respective XML node. Note that
* this node must be the root node for the relevant annotation class, and as such is found within the
* XML document of an annotatable text document.
*
* @see TextModelDataXmlInterface
* @see AnnotationClass
*/
public class AnnotationClassXmlInterface implements XmlNodeParserInterface<AnnotationClass> {
@Override
public AnnotationClass parseXml(Node node) {
XmlNodeWrapper wrappedNode = new XmlNodeWrapper(node);
NamedNodeMap attributes = node.getAttributes();
List<Integer> rgb = Arrays.stream(attributes.getNamedItem("color")
.getTextContent().replace(" ", "").split(","))
.map(Integer::parseInt).collect(Collectors.toList());
Color color = new Color(Display.getCurrent(), rgb.get(0), rgb.get(1), rgb.get(2));
String description = wrappedNode.findChild(KEY_PROFILE_ANNOTATIONCLASS_DESCRIPTION_ELEMENT)
.map(Node::getTextContent).orElse(null);
AnnotationClass annotationClass = new AnnotationClass(
attributes.getNamedItem(KEY_PROFILE_ANNOTATIONCLASS_ATTR_ID).getTextContent(),
attributes.getNamedItem(KEY_PROFILE_ANNOTATIONCLASS_ATTR_NAME).getTextContent(),
color,
description
);
wrappedNode.forEach(n -> {
if (n.getNodeName().equals(KEY_PROFILE_ANNOTATIONCLASS_METADATA_ELEMENT)) {
annotationClass.metaData.put(n.getAttributes()
.getNamedItem(KEY_PROFILE_ANNOTATIONCLASS_METADATA_ATTR_NAME)
.getTextContent(), n.getTextContent());
}
});
return annotationClass;
}
} |
<reponame>MancerBr/tranningApp<gh_stars>0
import { createFeatureSelector, createSelector } from '@ngrx/store';
import { authFeatureKey, AuthState } from './auth.reducers';
export const selectAuth = createFeatureSelector<AuthState>(authFeatureKey);
export const getAuthLoading = createSelector(
selectAuth,
(state: AuthState) => state && state.loading,
);
export const getAuthSuccess = createSelector(
selectAuth,
(state: AuthState) => state && state.success,
);
export const getAuthError = createSelector(
selectAuth,
(state: AuthState) => state && state.error,
);
export const getAccessToken = createSelector(
selectAuth,
(state: AuthState) => state && state.access_token,
);
|
def xor_string(string: str) -> str:
result = ""
for c in string:
result += chr(ord(c) ^ len(string))
return result |
/***
* Removes the given edge from the graph.
*
* @param from vertex ID for the source vertex.
* @param to vertex ID for the destination vertex.
*/
public void removeEdge(int from, int to) {
if (adjList == null) {
return;
}
HashMap<Integer, E> adjListV = adjList.get(from);
if (adjListV == null) {
return;
}
E removed = adjListV.remove(to);
if (removed == null) {
return;
}
edgeSize--;
if (incidentVertices == null) {
return;
}
Set<Integer> incidentNodeTo = incidentVertices.get(to);
if (incidentNodeTo == null) {
return;
}
incidentNodeTo.remove(from);
} |
//----------------------------------------------------------------------------
//
// Project : Call To Power 2
// File type : C++ header file
// Description : the Goal motherclass
// Id : $Id$
//
//----------------------------------------------------------------------------
//
// Disclaimer
//
// THIS FILE IS NOT GENERATED OR SUPPORTED BY ACTIVISION.
//
// This material has been developed at apolyton.net by the Apolyton CtP2
// Source Code Project. Contact the authors at <EMAIL>.
//
//----------------------------------------------------------------------------
//
// Compiler flags
//
// - None
//
//----------------------------------------------------------------------------
//
// Modifications from the original Activision code:
//
// - Resolved ambiguous sqrt calls.
// - Added the Target Position (X,Y) to The ArmyText (the text that is
// shown on the screen in Debug mode, with the command 'armytext') - calvitix
// update : it now show if the Army is grouping or going to board...
// - Added an attribute for the goal subtask (used by armytext display)
// - Changed the task forces for goals (based on objectif's threat)
// - Added a Debug Log (activated with k_dbg_scheduler_all) to see the goal priority computing
// (raw priority, plus value of each modifier)
// - Added an Ungroup condition that can be associated to goals (as it exists RallyFirst) - Calvitix
// - Added consitions for Treaspassing units : (to favorise their Retreat) - Calvitix
// - Added conditions for wounded units : IsWoundedbonus (see goals.txt) - Calvitix
// - Added conditions for territory owner and IsVisible (see goals.txt) - Calvitix
// - Correct the determination of Empire Center and foreign Empire center - Calvitix
// - Invalid goals if in territory with nontrespassing treaty - Calvitix
// - Consider a goal with max armies engaged as satisfied - the limitation of
// army size : we cannot form a group with more armies than the max (can
// disturb the goals with RallyFirst() - Calvitix
// - Changes the const attribute for Compute_Matching_Value (Raw_Priority will
// be changed on wounded case) - Calvitix
// - Linux support modifications + cleanup.
// - Allow incursion permission to stealth units
// - Rollback the determination of check_dest (problem with special units that couldn't find
// their path to goal anymore
// - Added other conditions to determine the RETREAT goals (and set max of 2 units per city
// for that goals, hardcoded (to not depend on threat)
// - Forbid to army with settlers to perform ATTACK or SEIGE goals
// - Allow Units that are grouping to both move (if they are far enough)
// - Corrected condition to give unowned bonus in Compute_Raw_Priority, now it is
// given if the according territory has owner -1 instead of 0
// - Feb. 21st 2005 <NAME>
// - Removed unnecessary reinitialization in Compute_Needed_Troop_Flow for trade
// routes and tileimps. - Feb. 21st 2005 <NAME>
// - Started to reimplement the refuel order. - May 7th 2005 <NAME>
// - Removed .NET warnings - May 7th 2005 <NAME>
// - Initialized local variables. (Sep 9th 2005 <NAME>)
// - Added checks and Asserts to try to prevent crashes.
// - Improved AI debug goal report. (Jan 2nd 2007 <NAME>)
// - Matching value computation now uses the distance measurement from Agent. (25-Jan-2008 <NAME>)
// - Added goody hut goal bonuses, so that there is a bonus if a goody hut is
// in vision range of the agent and whether there could Barbarians pop up from
// the goody hut and the goody hut opeing army can defend against such Barbarians. (25-Jan-2008 <NAME>)
// - Fixed Goal subtask handling. (26-Jan-2008 <NAME>)
// - Improved transporter cargo loading. (30-Jan-2008 <NAME>)
// - The AI does not use settlers for attack anymore. (3-Feb-2008 <NAME>)
// - The AI now uses the closest transport to the unit transported, even if
// there is only one army to be transported. (3-Feb-2008 Martin Gühmann)
// - Use more than one transporter if the goal needs more than one. (8-Feb-2008 <NAME>hmann)
// - Standartized army strength computation. (30-Apr-2008 <NAME>hmann)
// - AI force matches are now based on attack, defense, ranged, land bombard,
// sea bombard, and air bombard. (30-Apr-2008 <NAME>)
// - USE_LOGGING now works in a final version. (30-Jun-2008 <NAME>)
// - Redesigned AI, so that the matching algorithm is now a greedy algorithm. (13-Aug-2008 <NAME>)
// - Now the goals are used for the matching process, the goal match value
// is the avarage match value of the matches needed for the goal.
// - Strength calcualtion for offensive goals is now based on the average of
// grid values and target position vales. For Barbarian targets it is
// only the position strength. (13-Aug-2008 <NAME>hmann)
// - Slavers now go to cities with enough population. (13-Aug-2008 Martin Gühmann)
// - Rally is now complete if there is only one stack left with less than
// twelve units. (13-Aug-2008 <NAME>)
// - Added new rally algorithm. (13-Aug-2008 <NAME>)
// - Slavers don't go to cities with city walls. (06-Sep-2008 <NAME>)
// - Fixed unit garrison assignment. (23-Jan-2009 <NAME>)
// - Merged in CTPGoal, removed virtual functions, for design and speed
// improvement. (28-Jan-2009 <NAME>)
// - Changed occurances of UnitRecord::GetMaxHP to
// UnitData::CalculateTotalHP. (Aug 3rd 2009 Maq)
//
//----------------------------------------------------------------------------
#include "c3.h"
#include "Goal.h"
using namespace std;
const Utility Goal::BAD_UTILITY = -99999999;
const Utility Goal::MAX_UTILITY = 99999999;
#include "ConstRecord.h"
#include "GoalRecord.h"
#include "OrderRecord.h"
#include "StrategyRecord.h"
#include "WonderRecord.h"
#include "advanceutil.h"
#include "terrainutil.h"
#include "wonderutil.h"
#include "squad_Strength.h"
#include "agent.h"
#include "ArmyPool.h"
#include "ArmyData.h"
#include "UnitData.h"
#include "UnitPool.h"
#include "CityInfluenceIterator.h"
#include "ctpai.h"
#include "Diplomat.h"
#include "Barbarians.h"
#include "mapanalysis.h"
#include "settlemap.h"
#include "AgreementMatrix.h"
#include "CityAstar.h"
#include "debugassert.h"
#include "gstypes.h"
#include "gfx_options.h"
#include "World.h"
#include "ctpaidebug.h"
extern CityAstar g_city_astar;
Goal::Goal()
:
m_current_needed_strength (),
m_current_attacking_strength (),
m_matches (),
m_agents (),
m_playerId (PLAYER_UNASSIGNED),
m_raw_priority (BAD_UTILITY),
m_combinedUtility (0),
m_target_pos (),
m_target_city (),
m_target_army (),
m_sub_task (SUB_TASK_GOAL),
m_goal_type (GOAL_TYPE_NULL),
m_needs_sorting (false)
{
}
Goal::Goal(const Goal &goal)
:
m_current_needed_strength (goal.m_current_needed_strength),
m_current_attacking_strength (0), // Nothing since the agent list is not copied
m_matches (), // Contains refernces that are invalid after copy
m_agents (), // Agents are just pointers, which are changed on copy
m_playerId (goal.m_playerId),
m_raw_priority (goal.m_raw_priority),
m_combinedUtility (goal.m_combinedUtility),
m_target_pos (goal.m_target_pos),
m_target_city (goal.m_target_city),
m_target_army (goal.m_target_army),
m_sub_task (goal.m_sub_task),
m_goal_type (goal.m_goal_type),
m_needs_sorting (goal.m_needs_sorting)
{
}
Goal::~Goal()
{
// Nothing to delete, references only
}
Goal& Goal::operator= (const Goal &goal)
{
m_goal_type = goal.m_goal_type;
m_playerId = goal.m_playerId;
m_raw_priority = goal.m_raw_priority;
m_current_needed_strength = goal.m_current_needed_strength;
m_current_attacking_strength = goal.m_current_attacking_strength;
m_matches = goal.m_matches;
m_agents = goal.m_agents;
m_combinedUtility = goal.m_combinedUtility;
m_needs_sorting = goal.m_needs_sorting;
m_target_pos = goal.m_target_pos;
m_target_city = goal.m_target_city;
m_target_army = goal.m_target_army;
m_sub_task = goal.m_sub_task;
Assert(false); // Hopefully not used
return *this;
}
bool Goal::operator == (const Goal & rval) const
{
return (m_playerId == rval.m_playerId) &&
(m_goal_type == rval.m_goal_type) &&
(m_target_army == rval.m_target_army) &&
(m_target_city == rval.m_target_city) &&
(m_target_pos == rval.m_target_pos);
}
bool Goal::Is_Satisfied() const
{
if(m_agents.size() == 0)
return false;
// Limitation of army size: Cannot form a group with more
// armies than the max (without that limitation, it can
// disturb the goals with RallyFirst() - Calvitix
// if (m_current_attacking_strength.Get_Agent_Count() == k_MAX_ARMY_SIZE)
// return true;
if(g_theGoalDB->Get(m_goal_type)->GetNeverSatisfied())
{
// return false; // Another problem to be fixed but this has to wait, and not here
}
// if(m_current_needed_strength > m_current_attacking_strength)
return m_current_attacking_strength.HasEnough(m_current_needed_strength);
}
void Goal::Commit_Agent(const Agent_ptr & agent)
{
#ifdef _DEBUG_SCHEDULER
for
(
Agent_List::iterator agent_iter = m_agents.begin();
agent_iter != m_agents.end();
++agent_iter
)
{
Assert((*agent_iter) != agent);
}
#endif
MapPoint dest_pos = Get_Target_Pos(); // Get cheap target position first, no need for pillage checking, yet.
MapPoint curr_pos = agent->Get_Pos();
if(agent->IsNeededForGarrison() && dest_pos != curr_pos)
{
return;
}
Squad_Strength strength = agent->Compute_Squad_Strength();
strength += m_current_attacking_strength;
double oldMissingStrength = m_current_needed_strength.GetTotalMissing(m_current_attacking_strength);
double newMissingStrength = m_current_needed_strength.GetTotalMissing(strength);
if(
oldMissingStrength > newMissingStrength
||
(
Needs_Transporter() // Add function
&& strength.Get_Transport() > m_current_attacking_strength.Get_Transport()
)
){
m_current_attacking_strength.Add_Agent_Strength(agent);
m_agents.push_back(agent);
agent->Set_Goal(this);
Assert(m_current_attacking_strength.Get_Agent_Count() >= m_agents.size());
}
}
void Goal::Rollback_Agent(Agent_ptr agent_ptr)
{
if(m_agents.size() == 0)
return;
Agent_List::iterator next_agent_iter;
for
(
next_agent_iter = m_agents.begin();
next_agent_iter != m_agents.end();
++next_agent_iter
)
{
if(agent_ptr == *next_agent_iter)
{
break;
}
}
Assert(next_agent_iter != m_agents.end());
if(next_agent_iter == m_agents.end())
{
return;
}
Rollback_Agent(next_agent_iter);
}
void Goal::Rollback_Agent(Agent_List::iterator & agent_iter)
{
Agent_ptr agent_ptr = *agent_iter;
Assert(agent_ptr);
if(!agent_ptr->Get_Is_Dead()
&& g_player[m_playerId]
&& g_player[m_playerId]->IsRobot()
){
agent_ptr->ClearOrders();
}
m_current_attacking_strength.Remove_Agent_Strength(agent_ptr);
agent_iter = m_agents.erase(agent_iter);
Assert(m_current_attacking_strength.Get_Agent_Count() >= m_agents.size());
agent_ptr->Set_Goal(NULL);
}
bool Goal::Can_Be_Executed() const
{
bool can_be_executed = false;
Agent_List::const_iterator agent_iter;
for
(
agent_iter = m_agents.begin();
agent_iter != m_agents.end();
++agent_iter
)
{
can_be_executed |= (*agent_iter)->Get_Can_Be_Executed();
}
return can_be_executed;
}
void Goal::Set_Can_Be_Executed(const bool & can_be_executed)
{
Agent_List::iterator agent_iter;
for
(
agent_iter = m_agents.begin();
agent_iter != m_agents.end();
++agent_iter
)
{
(*agent_iter)->Set_Can_Be_Executed(can_be_executed);
}
}
void Goal::Set_Needs_Transporter(const bool needs_transporter)
{
for
(
Plan_List::iterator match_iter = m_matches.begin();
match_iter != m_matches.end();
++match_iter
)
{
match_iter->Set_Needs_Transporter(needs_transporter);
}
}
void Goal::Set_Needs_Transporter(Agent_ptr agent_ptr)
{
for
(
Plan_List::iterator match_iter = m_matches.begin();
match_iter != m_matches.end();
++match_iter
)
{
if(agent_ptr == match_iter->Get_Agent())
match_iter->Set_Needs_Transporter(true);
}
}
void Goal::Set_Type(const GOAL_TYPE & type)
{
m_goal_type = type;
}
void Goal::Set_Raw_Priority(const Utility & priority)
{
m_raw_priority = priority;
}
bool Goal::Get_Is_Appropriate() const
{
return m_matches.size() > 0;
}
bool Goal::Satisfied_By(const Squad_Strength & army_strength) const
{
Squad_Strength needed_strength = m_current_needed_strength;
needed_strength -= m_current_attacking_strength;
if ( army_strength.Get_Transport() > 0)
{
if ( needed_strength.Get_Transport() > 0)
return true;
}
//Check if the army has too much units to fit in one tile - Calvitix
if (m_current_attacking_strength.Get_Agent_Count() +
army_strength.Get_Agent_Count() > k_MAX_ARMY_SIZE)
return false;
if ((needed_strength.Get_Agent_Count() > 0) &&
(army_strength.Get_Agent_Count() > 0))
return true;
if ((needed_strength.Get_Attack() > 0) &&
(army_strength.Get_Attack() > 0))
return true;
if ((needed_strength.Get_Defense() > 0) &&
(army_strength.Get_Defense() > 0) )
return true;
if ((needed_strength.Get_Defenders() > 0) &&
(army_strength.Get_Defenders() > 0))
return true;
if ((needed_strength.Get_Ranged() > 0) &&
(army_strength.Get_Ranged() > 0))
return true;
if ((needed_strength.Get_Ranged_Units() > 0) &&
(army_strength.Get_Ranged_Units() > 0))
return true;
return false;
}
bool Goal::Needs_Transporter() const
{
Assert(m_current_attacking_strength.Get_Transport() >= 0);
return m_current_needed_strength.Get_Transport() - m_current_attacking_strength.Get_Transport() > 0;
}
const Squad_Strength Goal::Get_Strength_Needed() const // Rename to missing strength
{
Squad_Strength needed_strength = m_current_needed_strength;
needed_strength -= m_current_attacking_strength;
return needed_strength;
}
Utility Goal::Compute_Matching_Value(Plan_List & matches, const bool update)
{
AI_DPRINTF
(
k_DBG_SCHEDULER_ALL,
m_playerId,
m_goal_type,
-1,
(
"\tCompute Matching Value for goal: %s, raw_match: %i (%s)\n",
g_theGoalDB->Get(m_goal_type)->GetNameText(),
m_raw_priority,
(g_theWorld->HasCity(Get_Target_Pos()) ? g_theWorld->GetCity(Get_Target_Pos()).GetName() : "field")
)
);
if(this->Get_Invalid())
{
m_combinedUtility = Goal::BAD_UTILITY;
return m_combinedUtility;
}
sint32 count = 0;
for
(
Plan_List::iterator match_iter = matches.begin();
match_iter != matches.end();
++match_iter
)
{
// Maybe this also needs to be handled.
if(!match_iter->Plan_Is_Needed_And_Valid())
{
Assert(false);
continue;
}
Utility matchUtility = match_iter->Compute_Matching_Value(this);
if(update)
{
if(matchUtility > Goal::BAD_UTILITY)
{
AI_DPRINTF(k_DBG_SCHEDULER_ALL, m_playerId, m_goal_type, -1,
("\t\t[%3d] match = %d %s\n", count, match_iter->Get_Matching_Value(), g_theGoalDB->Get(m_goal_type)->GetNameText()));
}
}
++count;
}
AI_DPRINTF(k_DBG_SCHEDULER_ALL, m_playerId, m_goal_type, -1,
("\t\tThere were %3d matches\n", count));
Sort_Matches(matches);
return Recompute_Matching_Value(matches, update);
}
Utility Goal::Recompute_Matching_Value(Plan_List & matches, const bool update, const bool show_strength)
{
if(!update)
{
AI_DPRINTF(k_DBG_SCHEDULER_ALL, m_playerId, m_goal_type, -1,
("\tCompute Matching Value for goal: %x, %s, raw_match: %i\n", this, g_theGoalDB->Get(m_goal_type)->GetNameText(), m_raw_priority));
}
const GoalRecord * goal_record = g_theGoalDB->Get(m_goal_type);
Utility combinedUtility = 0;
sint32 count = 0;
Squad_Strength projected_strength;
for
(
Plan_List::iterator
match_iter = matches.begin();
match_iter != matches.end();
++match_iter
)
{
if(!match_iter->All_Unused_Or_Used_By_This(this))
continue;
if(match_iter->Get_Needs_Cargo())
continue;
if(!projected_strength.HasEnough(m_current_needed_strength)
// || goal_record->GetNeverSatisfied() // Should be considered in Commit_Agents
){
Utility matchUtility = match_iter->Get_Matching_Value();
if(matchUtility > Goal::BAD_UTILITY)
{
projected_strength += match_iter->Get_Agent()->Get_Squad_Strength();
combinedUtility += matchUtility;
AI_DPRINTF(k_DBG_SCHEDULER_ALL, m_playerId, m_goal_type, -1,
("\t\t[%3d] match = %d %s\n", count, matchUtility, g_theGoalDB->Get(m_goal_type)->GetNameText()));
++count;
}
else
{
AI_DPRINTF(k_DBG_SCHEDULER_ALL, m_playerId, m_goal_type, -1,
("\t\t[%3d]First match with bad utility for goal %s, stop matching, last match in list: %i\n", count, g_theGoalDB->Get(m_goal_type)->GetNameText(), matches.rbegin()->Get_Matching_Value()));
if(count == 0)
{
Log_Debug_Info(k_DBG_SCHEDULER_ALL);
}
break;
}
}
else
{
AI_DPRINTF(k_DBG_SCHEDULER_ALL, m_playerId, m_goal_type, -1,
("\t\t[%3d] Enough ressources found for goal %s\n", count, g_theGoalDB->Get(m_goal_type)->GetNameText()));
break;
}
}
#if defined(_DEBUG) || defined(USE_LOGGING)
if(CtpAiDebug::DebugLogCheck(m_playerId, -1, -1))
{
AI_DPRINTF(k_DBG_SCHEDULER_ALL, m_playerId, m_goal_type, -1, ("\n"));
projected_strength .Log_Debug_Info(k_DBG_SCHEDULER_ALL, m_playerId, m_goal_type, "The Projected Strength: ");
m_current_needed_strength .Log_Debug_Info(k_DBG_SCHEDULER_ALL, m_playerId, m_goal_type, "The Needed Strength: ");
Squad_Strength strength;
strength.Set_Pos_Strength(Get_Target_Pos());
strength .Log_Debug_Info(k_DBG_SCHEDULER_ALL, m_playerId, m_goal_type, "The Target Pos Strength: ");
Squad_Strength grid_strength;
grid_strength.Set_Enemy_Grid_Strength(Get_Target_Pos(), m_playerId);
grid_strength .Log_Debug_Info(k_DBG_SCHEDULER_ALL, m_playerId, m_goal_type, "The Target Grid Strength:");
}
#endif
if
(
(
!projected_strength.HasEnough(m_current_needed_strength)
&& !goal_record->GetExecuteIncrementally()
)
|| count == 0
)
{
combinedUtility = Goal::BAD_UTILITY;
}
else
{
combinedUtility /= count;
}
AI_DPRINTF(k_DBG_SCHEDULER_ALL, m_playerId, m_goal_type, -1,("\tMatch value combined utility: %i, raw priority: %i\n", combinedUtility, m_raw_priority));
if(update)
{
m_combinedUtility = combinedUtility;
AI_DPRINTF(k_DBG_SCHEDULER_ALL, m_playerId, m_goal_type, -1, ("\t\n"));
return Get_Matching_Value();
}
else
{
AI_DPRINTF(k_DBG_SCHEDULER_ALL, m_playerId, m_goal_type, -1, ("\t\n"));
return ( !g_theGoalDB->Get(m_goal_type)->GetIsGlobalGoal()
|| combinedUtility == Goal::BAD_UTILITY) ?
combinedUtility : m_raw_priority;
}
}
Utility Goal::Get_Matching_Value() const
{
return ( !g_theGoalDB->Get(m_goal_type)->GetIsGlobalGoal()
|| m_combinedUtility == Goal::BAD_UTILITY) ?
m_combinedUtility : m_raw_priority;
}
void Goal::Set_Matching_Value(Utility combinedUtility)
{
m_combinedUtility = combinedUtility;
}
bool Goal::Add_Match(const Agent_ptr & agent, const bool update_match_value, const bool needsCargo)
{
#if defined(_DEBUG)
for
(
Plan_List::iterator plan_test_iter = m_matches.begin();
plan_test_iter != m_matches.end();
++plan_test_iter
)
{
Assert(plan_test_iter->Get_Agent() != agent);
}
#endif
if(!agent->Get_Is_Dead())
{
Plan the_match(agent, needsCargo);
if(update_match_value)
{
the_match.Compute_Matching_Value(this);
}
m_matches.push_back(the_match);
m_needs_sorting = true;
return true;
}
else
{
return false;
}
}
bool Goal::CanGoalBeReevaluated() const
{
GoalRecord const * goalRecord = g_theGoalDB->Get(m_goal_type);
Assert(goalRecord);
return goalRecord && !goalRecord->GetNoRollback();
}
bool Goal::Commited_Agents_Need_Orders() const
{
for
(
Plan_List::const_iterator match_iter = m_matches.begin();
match_iter != m_matches.end();
++match_iter
)
{
if(match_iter->Get_Agent()->Has_Goal(this))
{
if(match_iter->Get_Agent()->Get_Army()->NumOrders() <= 0)
return true;
}
}
return false;
}
void Goal::Rollback_All_Agents()
{
if(m_agents.size() == 0)
return;
Agent_List::iterator agent_iter;
for
(
agent_iter = m_agents.begin();
agent_iter != m_agents.end();
Rollback_Agent(agent_iter)
)
{
}
Assert(m_current_attacking_strength.NothingNeeded());
}
void Goal::Commit_Agents()
{
for
(
Plan_List::iterator match_iter = m_matches.begin();
match_iter != m_matches.end();
++match_iter
)
{
if(match_iter->Get_Matching_Value() <= Goal::BAD_UTILITY)
{
break;
}
else if(Is_Satisfied() || Get_Totally_Complete())
{
AI_DPRINTF(k_DBG_SCHEDULER_DETAIL, m_playerId, m_goal_type, -1,
("\t\tNO AGENTS COMMITTED: (goal: %x agent: %x, id: 0%x)\n", this, match_iter->Get_Agent(), match_iter->Get_Agent()->Get_Army().m_id));
break;
}
else
{
AI_DPRINTF(k_DBG_SCHEDULER_DETAIL, m_playerId, m_goal_type, -1,
("\t\tAGENTS CAN BE COMMITTED: (goal: %x agent: %x, id: 0%x)\n", this, match_iter->Get_Agent(), match_iter->Get_Agent()->Get_Army().m_id));
if(!match_iter->Get_Needs_Cargo())
{
match_iter->Commit_Agent_Common(this);
}
}
}
if(Get_Agent_Count() > 0)
{
Log_Debug_Info(
(Is_Satisfied() || Is_Execute_Incrementally()) ?
k_DBG_SCHEDULER :
k_DBG_SCHEDULER_DETAIL
);
}
}
void Goal::Commit_Transport_Agents()
{
for
(
Plan_List::iterator match_iter = m_matches.begin(); // Maybe resort the matches, by agents that need a transporter and their distance
match_iter != m_matches.end();
++match_iter
)
{
if(match_iter->Get_Matching_Value() <= Goal::BAD_UTILITY)
{
break;
}
else if(!Needs_Transporter() || Get_Totally_Complete())
{
AI_DPRINTF(k_DBG_SCHEDULER_DETAIL, m_playerId, m_goal_type, -1,
("\t\tNO TRANSPORT AGENTS COMMITTED: (goal: %x agent: %x, id: 0%x)\n", this, match_iter->Get_Agent(), match_iter->Get_Agent()->Get_Army().m_id));
break;
}
else if(match_iter->Get_Cannot_Be_Used())
{
break;
}
else if(match_iter->Get_Needs_Cargo())
{
AI_DPRINTF(k_DBG_SCHEDULER_DETAIL, m_playerId, m_goal_type, -1,
("\t\tTRANSPORT AGENTS COMMITTED: (goal: %x agent: %x, id: 0%x)\n", this, match_iter->Get_Agent(), match_iter->Get_Agent()->Get_Army().m_id));
if(match_iter->Get_Agent()->Get_Army()->CanTransport())
{
match_iter->Commit_Agent_Common(this);
}
}
}
}
void Goal::Remove_Matches()
{
Rollback_All_Agents();
m_matches.clear();
}
void Goal::Remove_Match(const Agent_ptr & agent)
{
for
(
Plan_List::iterator match_iter = m_matches.begin();
match_iter != m_matches.end();
++match_iter
)
{
if(agent == match_iter->Get_Agent())
{
if(agent->Has_Goal(this))
{
Rollback_Agent(agent);
}
m_matches.erase(match_iter);
return;
}
}
}
void Goal::Rollback_Emptied_Transporters()
{
if(m_agents.size() == 0)
return;
Agent_List::iterator agent_iter;
for
(
agent_iter = m_agents.begin();
agent_iter != m_agents.end();
++agent_iter
)
{
Agent_ptr agent_ptr = *agent_iter;
const MapPoint pos = agent_ptr->Get_Target_Pos();
MapPoint goalPos(-1,-1);
if(Get_Target_Army().m_id == 0 || Get_Target_Army().IsValid())
{
goalPos = Get_Target_Pos();
}
if(pos == goalPos)
{
if(!Pretest_Bid(agent_ptr, goalPos))
{
AI_DPRINTF(k_DBG_SCHEDULER, agent_ptr->Get_Army()->GetOwner(), m_goal_type, -1,
("\t\tTransporter not needed anymore, removing from goal\n"));
Rollback_Agent(agent_iter); // increases the agent iterator
--agent_iter;
}
}
}
}
bool Goal::Has_Agent_And_Set_Needs_Cargo(Agent* agent)
{
for
(
Plan_List::iterator match_iter = m_matches.begin();
match_iter != m_matches.end();
++match_iter
)
{
if(agent == match_iter->Get_Agent())
{
match_iter->Set_Needs_Cargo(true);
return true;
}
}
return false;
}
bool Goal::Needs_Cargo(Agent* agent)
{
for
(
Plan_List::iterator match_iter = m_matches.begin();
match_iter != m_matches.end();
++match_iter
)
{
if(agent == match_iter->Get_Agent())
{
return match_iter->Get_Needs_Cargo();
}
}
return false;
}
bool Goal::Cannot_Be_Used(Agent* agent)
{
for
(
Plan_List::iterator match_iter = m_matches.begin();
match_iter != m_matches.end();
++match_iter
)
{
if(agent == match_iter->Get_Agent())
{
return match_iter->Get_Cannot_Be_Used();
}
}
return false;
}
void Goal::Set_Cannot_Be_Used(Agent* agent, bool cannotBeUsed)
{
for
(
Plan_List::iterator match_iter = m_matches.begin();
match_iter != m_matches.end();
++match_iter
)
{
if(agent == match_iter->Get_Agent())
{
match_iter->Set_Cannot_Be_Used(cannotBeUsed);
}
}
}
void Goal::Recompute_Current_Attacking_Strength()
{
m_current_attacking_strength = Squad_Strength(0);
for
(
Agent_List::iterator agent_iter = m_agents.begin();
agent_iter != m_agents.end();
++agent_iter
)
{
m_current_attacking_strength += (*agent_iter)->Get_Squad_Strength();
}
}
Squad_Strength Goal::Compute_Current_Strength()
{
Squad_Strength strength;
for
(
Agent_List::iterator agent_iter = m_agents.begin();
agent_iter != m_agents.end();
++agent_iter
)
{
strength += (*agent_iter)->Get_Squad_Strength();
}
return strength;
}
void Goal::Sort_Matches(Plan_List & matches)
{
matches.sort(greater< Plan >());
m_needs_sorting = false;
}
void Goal::Sort_Matches_If_Necessary()
{
if(m_needs_sorting)
{
Sort_Matches(m_matches);
}
}
void Goal::Set_Target_Pos(const MapPoint & pos)
{
m_target_pos = pos;
}
void Goal::Set_Target_Army(const Army & army)
{
m_target_army = army;
}
void Goal::Set_Target_City(const Unit & city)
{
m_target_city = city;
}
const MapPoint Goal::Get_Target_Pos(const Army & army) const
{
const GoalRecord *rec = g_theGoalDB->Get(m_goal_type);
if (rec->GetTargetTypeTradeRoute())
{
const sint32 max_squared_dist = 0x7fffffff;
sint32 best_squared_dist = 0x7fffffff;
MapPoint best_target_pos;
Assert( m_target_city.m_id != 0);
const TradeDynamicArray* trade_routes =
m_target_city.GetCityData()->GetTradeSourceList();
for(sint32 i = 0; i < trade_routes->Num(); i++)
{
const DynamicArray<MapPoint>* path = (*trade_routes)[i].GetPath();
for (sint32 j = 1; j < path->Num()-1; j++)
{
Cell *cell = g_theWorld->GetCell(path->Get(j));
if (cell->HasCity())
continue;
if (cell->CanEnter(army->GetMovementType()))
{
sint32 tmp_squared_dist =
MapPoint::GetSquaredDistance(path->Get(j), army->RetPos());
if (tmp_squared_dist < best_squared_dist)
{
best_squared_dist = tmp_squared_dist;
best_target_pos = path->Get(j);
}
}
}
if (best_squared_dist < max_squared_dist)
return best_target_pos;
else
{
bool NO_TRADE_ROUTE_TARGET_POS_FOUND = true;
Assert(NO_TRADE_ROUTE_TARGET_POS_FOUND);
}
}
}
else if (rec->GetTargetTypeImprovement())
{
Assert( m_target_city.m_id != 0);
if (m_target_city->GetCityData()->WasTerrainImprovementBuilt())
{
const sint32 max_squared_dist = 0x7fffffff;
sint32 best_squared_dist = 0x7fffffff;
MapPoint best_target_pos;
CityInfluenceIterator it(m_target_city.RetPos(), m_target_city.GetCityData()->GetSizeIndex());
for(it.Start(); !it.End(); it.Next())
{
Cell *cell = g_theWorld->GetCell(it.Pos());
if (m_target_city.RetPos() == it.Pos())
continue;
if (!(cell->GetCityOwner() == m_target_city))
continue;
if (cell->GetNumDBImprovements() <= 0)
continue;
if (cell->CanEnter(army->GetMovementType()))
{
sint32 tmp_squared_dist = MapPoint::GetSquaredDistance(it.Pos(), army->RetPos());
if (tmp_squared_dist < best_squared_dist)
{
best_squared_dist = tmp_squared_dist;
best_target_pos = it.Pos();
}
}
}
if (best_squared_dist < max_squared_dist)
return best_target_pos;
else
{
bool NO_TILE_IMPROVEMENT_TARGET_POS_FOUND = true;
Assert(NO_TILE_IMPROVEMENT_TARGET_POS_FOUND);
}
}
}
else if (rec->GetTargetTypePetrolStation())
{
sint32 distance_to_refuel;
MapPoint refuel_pos;
CtpAi::GetNearestRefuel(army, army->RetPos(), refuel_pos, distance_to_refuel);
return refuel_pos;
}
return Get_Target_Pos();
}
const MapPoint & Goal::Get_Target_Pos() const
{
static MapPoint pos; // ugly life-time extension
if (m_target_army != ID())
{
if (m_target_army.IsValid())
{
m_target_army->GetPos(pos);
}
else
{
pos.x = -1;
pos.y = -1;
}
}
else if (m_target_city != ID())
{
if (m_target_city.IsValid())
{
m_target_city.GetPos(pos);
}
else
{
pos.x = -1;
pos.y = -1;
}
}
else
{
return m_target_pos;
}
Assert(pos.IsValid());
return pos;
}
const Army & Goal::Get_Target_Army() const
{
return m_target_army;
}
void Goal::Set_Sub_Task(const SUB_TASK_TYPE & sub_task)
{
m_sub_task = sub_task;
}
const SUB_TASK_TYPE & Goal::Get_Sub_Task() const
{
return m_sub_task;
}
const Unit & Goal::Get_Target_City() const
{
return m_target_city;
}
sint32 Goal::Get_Target_Value() const
{
sint32 value = 0;
const GoalRecord * rec = g_theGoalDB->Get(m_goal_type);
Assert(rec);
if(rec->GetTargetTypeAttackUnit()
|| rec->GetTargetTypeSpecialUnit()
){
const Army & army = Get_Target_Army();
sint8 tmpCount;
float tmp;
float tmpValue;
army->ComputeStrength(tmp,tmp,tmp,tmpCount,tmpCount,tmp,tmp,tmp,tmpValue, false);
value = static_cast<sint32>(tmpValue);
}
else if(rec->GetTargetTypeCity())
{
if(m_target_city.IsValid())
{
value = m_target_city->GetCityData()->GetValue();
}
}
else if( rec->GetTargetTypeTradeRoute() )
{
const Unit & city = Get_Target_City();
value = city->GetCityData()->GetGoldFromTradeRoutes();
}
return value;
}
PLAYER_INDEX Goal::Get_Target_Owner() const
{
PLAYER_INDEX target_owner = PLAYER_UNASSIGNED;
GoalRecord const * goal_record = g_theGoalDB->Get(m_goal_type);
Assert(goal_record);
if(goal_record->GetTargetTypeAttackUnit()
|| goal_record->GetTargetTypeSpecialUnit()
)
{
target_owner = m_target_army.GetOwner();
}
else if(goal_record->GetTargetTypePetrolStation()){
if(m_target_city != ID() || g_theWorld->IsAirfield(Get_Target_Pos())){
target_owner = g_theWorld->GetOwner(Get_Target_Pos());
}
else if(m_target_army != ID()){
target_owner = m_target_army.GetOwner();
}
}
else{
MapPoint pos(Get_Target_Pos());
if(pos.x >= 0){
target_owner = g_theWorld->GetOwner(Get_Target_Pos());
}
}
return target_owner;
}
bool Goal::Is_Execute_Incrementally() const
{
return g_theGoalDB->Get(m_goal_type)->GetExecuteIncrementally();
}
void Goal::Compute_Needed_Troop_Flow()
{
MapAnalysis & mapAnalysis = MapAnalysis::GetMapAnalysis();
const MapPoint pos = Get_Target_Pos();
const float threat = static_cast<float>(mapAnalysis.GetThreat (m_playerId, pos));
const float attack = static_cast<float>(mapAnalysis.GetEnemyAttack (m_playerId, pos));
const float defense = static_cast<float>(mapAnalysis.GetEnemyDefense (m_playerId, pos));
const float ranged = static_cast<float>(mapAnalysis.GetEnemyRanged (m_playerId, pos));
const float bombardLand = static_cast<float>(mapAnalysis.GetEnemyBombardLand(m_playerId, pos));
const float bombardSea = static_cast<float>(mapAnalysis.GetEnemyBombardSea (m_playerId, pos));
const float bombardAir = static_cast<float>(mapAnalysis.GetEnemyBombardAir (m_playerId, pos));
const float value = static_cast<float>(mapAnalysis.GetEnemyValue (m_playerId, pos));
m_current_needed_strength = Squad_Strength(1);
// why only one unit ? Why then zero units? - <NAME>
// by bringing a real army to pirate or pillage, it can be ready for seige or attack
// a single unit is quite defenseless - Calvitix
// A real army may not be ready for siege but a few turns later some more units would be
// there and in the meantime the army was dissolved
const GoalRecord *goal_record = g_theGoalDB->Get(m_goal_type);
if(goal_record->GetTargetTypeChokePoint())
{
// Need also attack and ranged strength - Calvitix
m_current_needed_strength.Set_Attack (attack * 0.5f);
m_current_needed_strength.Set_Defense (defense * 0.5f);
m_current_needed_strength.Set_Ranged (ranged * 0.5f);
m_current_needed_strength.Set_Bombard_Land(bombardLand * 0.5f);
m_current_needed_strength.Set_Bombard_Sea (bombardSea * 0.5f);
m_current_needed_strength.Set_Bombard_Air (bombardAir * 0.5f);
m_current_needed_strength.Set_Value (value * 0.5f);
}
else if(goal_record->GetTargetTypeImprovement()
|| goal_record->GetTargetTypeTradeRoute()
){
m_current_needed_strength.Set_Attack(attack * 0.5f);
m_current_needed_strength.Set_Ranged(defense * 0.5f);
m_current_needed_strength.Set_Value(value);
}
else if(goal_record->GetTargetTypeEndgame())
{
if(goal_record->GetTargetOwnerSelf())
m_current_needed_strength.Set_Defense(threat);
else
m_current_needed_strength.Set_Attack(threat);
//to be sure that the global force of the army will be enough
// (not only a wounded unit for example)
m_current_needed_strength.Set_Value(value);
}
else if(goal_record->GetTargetTypeCity()
&& goal_record->GetTargetOwnerSelf()
){
// tweak to obtain RETREAT Goal definition : TO DO - 'cleaner' method - Calvitix
// Set to 2 to so that units with no goals will retreat to the nearest city
// (I Prefer that method than the GARRISON troops, that are not able to leave the city)
// cities will be better defended if their is enough units, otherwise units will be
// affected to relevant goals
if(g_theGoalDB->Get(m_goal_type)->GetTargetTypeCity()
&& g_theGoalDB->Get(m_goal_type)->GetTargetOwnerSelf()
&& goal_record->GetTreaspassingArmyBonus() > 0
){
m_current_needed_strength.Set_Agent_Count(2);
}
else
{
const StrategyRecord & strategy =
Diplomat::GetDiplomat(m_playerId).GetCurrentStrategy();
sint32 offensive_garrison;
sint32 defensive_garrison;
sint32 ranged_garrison;
strategy.GetOffensiveGarrisonCount(offensive_garrison);
strategy.GetDefensiveGarrisonCount(defensive_garrison);
strategy.GetRangedGarrisonCount(ranged_garrison);
// why only defensive units ?
// added ranged units - Calvitix
m_current_needed_strength.Set_Defense(threat * 2 / 3);
m_current_needed_strength.Set_Ranged(threat / 3);
m_current_needed_strength.Set_Value(value);
//not used for the moment (only attack or defense strength is considerated
//(see army_strength > operator) - Calvitix
m_current_needed_strength.Set_Defenders(static_cast<sint8>(defensive_garrison + offensive_garrison));
m_current_needed_strength.Set_Ranged_Units(static_cast<sint8>(ranged_garrison));
}
}
else if
(
( goal_record->GetTargetTypeAttackUnit()
|| goal_record->GetTargetTypeCity()
)
&& !goal_record->GetTargetOwnerSelf()
&& !goal_record->GetTargetTypeSpecialUnit()
)
{
// A real Attack force, depending on threat
m_current_needed_strength.Set_Attack (attack);
m_current_needed_strength.Set_Defense (defense);
// m_current_needed_strength.Set_Attack (defense); // Reverse attack and defense
// m_current_needed_strength.Set_Defense (attack);
m_current_needed_strength.Set_Ranged (ranged);
m_current_needed_strength.Set_Bombard_Land(bombardLand);
m_current_needed_strength.Set_Bombard_Sea (bombardSea);
m_current_needed_strength.Set_Bombard_Air (bombardAir);
m_current_needed_strength.Set_Value (value);
Squad_Strength strength;
strength.Set_Pos_Strength(pos);
//
if
(
Get_Target_Owner() != 0
|| strength.Get_Agent_Count() > 0
)
{
// Set this to zero, since the units we need doesn't depent on the number of units at the target.
strength.Set_Agent_Count(0);
strength.Set_Defenders(0);
strength.Set_Ranged_Units(0);
m_current_needed_strength += strength;
m_current_needed_strength.Set_Force_Matching(0.5,0.5,0.5,0.5,0.5);
}
}
else if(goal_record->GetTargetTypeBorder())
{
// assuming threat is the global strength to use (to be coherent with other changes) - Calvitix
m_current_needed_strength.Set_Attack (attack * 0.5f);
m_current_needed_strength.Set_Defense (defense * 0.5f);
m_current_needed_strength.Set_Ranged (ranged * 0.5f);
m_current_needed_strength.Set_Bombard_Land(bombardLand * 0.5f);
m_current_needed_strength.Set_Bombard_Sea (bombardSea * 0.5f);
m_current_needed_strength.Set_Bombard_Air (bombardAir * 0.5f);
m_current_needed_strength.Set_Value (value * 0.5f); // Actually this stuff should go to the force matches
}
else if ( goal_record->GetTargetTypeSettleLand()
|| goal_record->GetTargetTypeSettleSea()
|| goal_record->GetTargetTypePetrolStation()
)
{
// No strength is needed
}
else
{
m_current_needed_strength.Set_Pos_Strength(pos);
}
const StrategyRecord & strategy =
Diplomat::GetDiplomat(m_playerId).GetCurrentStrategy();
const StrategyRecord::ForceMatch *force_match = NULL;
switch (goal_record->GetForceMatch())
{
case k_Goal_ForceMatch_Offensive_Bit:
force_match = strategy.GetOffensivePtr();
break;
case k_Goal_ForceMatch_Defensive_Bit:
force_match = strategy.GetDefensivePtr();
break;
case k_Goal_ForceMatch_StealthAttack_Bit:
force_match = strategy.GetStealthAttackPtr();
break;
case k_Goal_ForceMatch_Bombard_Bit:
force_match = strategy.GetBombardPtr();
break;
case k_Goal_ForceMatch_Special_Bit:
force_match = strategy.GetSpecialPtr();
break;
case k_Goal_ForceMatch_Harass_Bit:
force_match = strategy.GetHarassPtr();
break;
default:
Assert(false);
}
Assert(force_match);
m_current_needed_strength.Set_Force_Matching(
static_cast<float>(force_match->GetAttackMatch()),
static_cast<float>(force_match->GetDefenseMatch()),
static_cast<float>(force_match->GetRangedMatch()),
static_cast<float>(force_match->GetBombardMatch()),
static_cast<float>(force_match->GetValueMatch()));
// Set_Pos_Strength also retrieves the transport capacity at pos
// which of course need not be matched
m_current_needed_strength.Set_Transport(0);
}
Utility Goal::Compute_Agent_Matching_Value(const Agent_ptr agent_ptr) const
{
#if defined(_DEBUG)
Player *player_ptr = g_player[ m_playerId ];
Assert(player_ptr && agent_ptr);
#endif
if(agent_ptr->Get_Is_Dead())
{
return Goal::BAD_UTILITY;
}
MapPoint dest_pos = Get_Target_Pos(); // Get cheap target position first, no need for pillage checking, yet.
MapPoint curr_pos = agent_ptr->Get_Pos();
if(agent_ptr->IsNeededForGarrison() && dest_pos != curr_pos)
{
return Goal::BAD_UTILITY;
}
// This is expensive, because of pillage, get city target first.
dest_pos = Get_Target_Pos(agent_ptr->Get_Army());
// This is expensive, so remove first the garrison units.
if(!Pretest_Bid(agent_ptr, dest_pos))
{
return Goal::BAD_UTILITY;
}
PLAYER_INDEX target_owner = Get_Target_Owner();
const Diplomat & diplomat = Diplomat::GetDiplomat(m_playerId);
if( target_owner > 0 &&
(!diplomat.IncursionPermission(target_owner)))
{
bool isspecial, cancapture, haszoc, canbombard;
bool isstealth;
sint32 maxattack, maxdefense;
if(!agent_ptr->Get_Army()->HasCargo())
{
agent_ptr->Get_Army()->CharacterizeArmy( isspecial,
isstealth,
maxattack,
maxdefense,
cancapture,
haszoc,
canbombard);
}
else
{
agent_ptr->Get_Army()->CharacterizeCargo( isspecial,
isstealth,
maxattack,
maxdefense,
cancapture,
haszoc,
canbombard);
}
if (!isspecial || maxattack > 0 || haszoc)
{
return Goal::BAD_UTILITY;
}
}
#if defined(_DEBUG) || defined(USE_LOGGING)
// Maybe this is of some use later
bool is_transporter = false;
#endif
sint32 transports, max,empty;
if(Needs_Transporter()
&& agent_ptr->Get_Army()->GetCargo(transports, max, empty)
&& empty > 0
&& m_agents.size() > 0
){
Utility transport_utility = 0;
Utility utility = 0;
sint32 count = 0;
for
(
Plan_List::const_iterator match_iter = m_matches.begin();
match_iter != m_matches.end();
++match_iter
)
{
Agent_ptr agent_trans_ptr = match_iter->Get_Agent();
if
(
!agent_trans_ptr->Get_Is_Dead()
&& match_iter->Get_Needs_Transporter()
&& agent_trans_ptr->EstimateTransportUtility(agent_ptr, utility)
)
{
transport_utility += utility;
++count;
}
}
if(count > 0)
{
Utility match = (transport_utility / count);
return match + Get_Raw_Priority();
}
#if defined(_DEBUG) || defined(USE_LOGGING)
// Maybe this is of some use later
// is_transporter = true;
#endif
}
if(agent_ptr->Get_Army()->HasCargo() && !CanReachTargetContinent(agent_ptr))
{
return Goal::BAD_UTILITY;
}
Utility bonus = 0;
//Set if unit is wounded and it is a retreat of defense goal, add bonus
//to goalpriority + matching
MapPoint armyPos = agent_ptr->Get_Pos();
PLAYER_INDEX PosOwner = g_theWorld->GetOwner(armyPos);
if(g_theGoalDB->Get(m_goal_type)->GetTargetTypeCity()
&& g_theGoalDB->Get(m_goal_type)->GetTargetOwnerSelf()
){
// For Defend or Retreat goals
if
(
agent_ptr->Get_Army()->HasCargo()
&& agent_ptr->Get_Army()->IsCargoWounded()
&& !agent_ptr->Get_Army()->IsCargoObsolete()
|| !agent_ptr->Get_Army()->HasCargo()
&& agent_ptr->Get_Army()->IsWounded()
&& !agent_ptr->Get_Army()->IsObsolete()
)
{
bonus+= g_theGoalDB->Get(m_goal_type)->GetWoundedArmyBonus();
}
if(PosOwner != m_playerId
&&!diplomat.IncursionPermission(PosOwner)
){
bonus+= g_theGoalDB->Get(m_goal_type)->GetTreaspassingArmyBonus();
}
}
else if((g_theGoalDB->Get(m_goal_type)->GetTargetOwnerColdEnemy()
|| g_theGoalDB->Get(m_goal_type)->GetTargetOwnerHotEnemy())
&& (g_theGoalDB->Get(m_goal_type)->GetTargetTypeAttackUnit()
|| g_theGoalDB->Get(m_goal_type)->GetTargetTypeCity())
) //For Attack goals (unit or city)
{
if(agent_ptr->Get_Army()->CanSettle())
{
if
(
g_theGoalDB->Get(m_goal_type)->GetTargetTypeCity()
&& m_target_city.IsValid()
&& g_theWorld->GetCell(m_target_city->GetPos())->GetNumUnits() == 0
)
{
// Opportunity action march into the city even if the army contains a settler
}
else
{
// If there is a settler in the army...
return Goal::BAD_UTILITY;
}
}
if(m_target_army.IsValid() && !agent_ptr->Get_Army()->CanFight(*m_target_army.AccessData()))
{
return Goal::BAD_UTILITY;
}
if
(
m_target_city.IsValid()
&& !m_target_city->HasAdjacentFreeLand()
&& g_theWorld->GetCell(m_target_city->GetPos())->GetNumUnits() != 0
&& !agent_ptr->Get_Army()->CanBeachAssault()
)
{
return Goal::BAD_UTILITY;
}
if
(
agent_ptr->Get_Army()->HasCargo()
&& agent_ptr->Get_Army()->IsCargoWounded()
&& !agent_ptr->Get_Army()->IsCargoObsolete()
|| !agent_ptr->Get_Army()->HasCargo()
&& agent_ptr->Get_Army()->IsWounded()
&& !agent_ptr->Get_Army()->IsObsolete()
)
{
bonus+= g_theGoalDB->Get(m_goal_type)->GetWoundedArmyBonus();
}
if(PosOwner != m_playerId && !diplomat.IncursionPermission(PosOwner))
{
bonus += g_theGoalDB->Get(m_goal_type)->GetTreaspassingArmyBonus();
}
}
else if((g_theGoalDB->Get(m_goal_type)->GetTargetOwnerColdEnemy()
|| g_theGoalDB->Get(m_goal_type)->GetTargetOwnerHotEnemy())
&& (g_theGoalDB->Get(m_goal_type)->GetTargetTypeTradeRoute())
) // For trade routes
{
if(agent_ptr->Get_Army()->CanSettle()) // CargoCanSettle
{
// If there is a settler in the army...
return Goal::BAD_UTILITY;
}
}
#if defined(_DEBUG) || defined(USE_LOGGING) // Add a debug report of goal computing (raw priority and all modifiers)
double report_wounded = bonus;
#endif //_DEBUG
if(agent_ptr->Get_Army()->HasCargo() ? agent_ptr->Get_Army()->IsCargoObsolete() : agent_ptr->Get_Army()->IsObsolete())
bonus += g_theGoalDB->Get(m_goal_type)->GetObsoleteArmyBonus();
#if defined(_DEBUG) || defined(USE_LOGGING) // Add a debug report of goal computing (raw priority and all modifiers)
double report_obsolete = bonus - report_wounded;
#endif //_DEBUG
MapPoint point(-1, -1);
if(Needs_Transporter())
{
sint32 transports;
sint32 max;
sint32 empty;
if(
agent_ptr->Get_Army()->GetCargo(transports, max, empty)
&& empty > 0
)
{
point = GetClosestCargoPos(agent_ptr); // If is transporter
}
else
{
point = GetClosestTransporterPos(agent_ptr); // If needs to be transported
}
}
else
{
point = dest_pos;
}
point = (!point.IsValid()) ? dest_pos : point;
sint32 squared_distance = 0;
double eta = agent_ptr->GetRoundsPrecise(point, squared_distance);
double cell_dist = sqrt(static_cast<double>(squared_distance));
Utility raw_priority = Get_Raw_Priority();
const StrategyRecord & strategy =
Diplomat::GetDiplomat(m_playerId).GetCurrentStrategy();
sint32 distance_modifier = 1;
strategy.GetDistanceModifierFactor(distance_modifier);
Utility time_term = static_cast<Utility>( (eta * distance_modifier) + cell_dist);
if(g_theGoalDB->Get(m_goal_type)->GetTreaspassingArmyBonus() > 0)
{
PLAYER_INDEX pos_owner = g_theWorld->GetCell(agent_ptr->Get_Pos())->GetOwner();
bool incursion_permissin = Diplomat::GetDiplomat(m_playerId).IncursionPermission(pos_owner);
if (pos_owner >= 0 && !(incursion_permissin))
{
bonus += g_theGoalDB->Get(m_goal_type)->GetTreaspassingArmyBonus();
}
}
#if defined(_DEBUG) || defined(USE_LOGGING) // Add a debug report of goal computing (raw priority and all modifiers)
double report_Treaspassing = bonus - report_obsolete;
double report_InVisionRange = 0.0;
double report_NoBarbsPresent = 0.0;
#endif //_DEBUG
if(agent_ptr->Get_Army()->IsInVisionRangeAndCanEnter(dest_pos))
{
/// @ToDo: Use the actual path cost, to check whether the goody hut is really so close.
bonus += g_theGoalDB->Get(m_goal_type)->GetInVisionRangeBonus();
#if defined(_DEBUG) || defined(USE_LOGGING) // Add a debug report of goal computing (raw priority and all modifiers)
report_InVisionRange = bonus - report_Treaspassing;
#endif //_DEBUG
if (!Barbarians::InBarbarianPeriod()
|| wonderutil_GetProtectFromBarbarians(g_player[agent_ptr->Get_Army()->GetOwner()]->m_builtWonders)
){
bonus += g_theGoalDB->Get(m_goal_type)->GetNoBarbarianBonus();
}
else if ((agent_ptr->Get_Squad_Class() & k_Goal_SquadClass_CanAttack_Bit) != 0x0)
{
bonus += g_theGoalDB->Get(m_goal_type)->GetCanAttackBonus();
}
#if defined(_DEBUG) || defined(USE_LOGGING) // Add a debug report of goal computing (raw priority and all modifiers)
report_NoBarbsPresent = bonus - report_InVisionRange;
#endif //_DEBUG
}
if(!agent_ptr->Get_Army()->HasCargo())
{
if(!g_theWorld->IsOnSameContinent(dest_pos, agent_ptr->Get_Pos()) // Same continent problem
&& !agent_ptr->Get_Army()->GetMovementTypeAir()
&& g_player[m_playerId]->GetCargoCapacity() <= 0
&& m_current_attacking_strength.Get_Transport() <= 0
){
return Goal::BAD_UTILITY;
}
}
Utility match = bonus + time_term + raw_priority;
const ArmyData* army = agent_ptr->Get_Army().GetData();
Utility tieBreaker = 0;
for(sint32 i = 0; i < army->Num(); ++i) // HasCargo, maybe has not be corrected, since we just want to make the sort order stable.
{
const UnitRecord* rec = army->Get(i)->GetDBRec();
tieBreaker += static_cast<Utility>(rec->GetAttack());
tieBreaker += static_cast<Utility>(rec->GetDefense());
tieBreaker += rec->GetZBRangeAttack();
tieBreaker += rec->GetFirepower();
tieBreaker += static_cast<Utility>(rec->GetArmor());
tieBreaker += army->Get(i)->CalculateTotalHP();//rec->GetMaxHP();
}
match += tieBreaker;
#if defined(_DEBUG) || defined(USE_LOGGING) // Add a debug report of goal computing (raw priority and all modifiers) - Calvitix
MapPoint target_pos = Get_Target_Pos();
AI_DPRINTF(k_DBG_SCHEDULER_DETAIL, m_playerId, m_goal_type, -1,
("\t\t%9x,\t %9x,\t%9x (%3d,%3d),\t%s (%3d,%3d) (%3d,%3d),\t%8d,\t%8d,\t%8f,\t%8f,\t%8d,\t%8f,\t%8f,\t%8f,\t%8d,\t%8f,\t%8f,\t%8d,\t%9x,\t%s \n",
this, // This goal
agent_ptr->Get_Army().m_id, // The army
agent_ptr, // The agent
agent_ptr->Get_Pos().x, // Agent pos.x
agent_ptr->Get_Pos().y, // Agent pos.y
g_theGoalDB->Get(m_goal_type)->GetNameText(), // Goal name
target_pos.x, // Target pos.x
target_pos.y, // Target pos.y
dest_pos.x, // Sub target pos.x
dest_pos.y, // Sub target pos.y
match, // Computed match value
raw_priority, // Raw match value
cell_dist, // Distance to target (Quare rooted quare distance), not identical with path distance
eta, // Rounds to target
bonus, // Total bonus
report_wounded, // Whether there wounded units
report_obsolete, // Whether there are obselete units
report_Treaspassing, // Whether we are treaspassing
time_term, // Time needed to goal, if we would follow a bee line
report_InVisionRange, // In vision range bonus
report_NoBarbsPresent, // If no Barbarian are present bonus
is_transporter, // Whether the agent is a transporter
agent_ptr->Get_Goal(), // The goal to that this agent is asigned to
(g_theWorld->HasCity(target_pos) ? g_theWorld->GetCity(target_pos).GetName() : "field")
));
#endif //_DEBUG
return match;
}
Utility Goal::Get_Initial_Priority() const
{
const StrategyRecord & strategy = Diplomat::GetDiplomat(m_playerId).GetCurrentStrategy();
for(sint32 i = 0; i < strategy.GetNumGoalElement(); i++)
{
Assert(strategy.GetGoalElement(i)->GetGoalIndex() >= 0);
if(m_goal_type == strategy.GetGoalElement(i)->GetGoalIndex())
{
return strategy.GetGoalElement(i)->GetPriority();
}
}
return Goal::BAD_UTILITY;
}
Utility Goal::Compute_Raw_Priority()
{
Player * player_ptr = g_player[m_playerId];
Assert(player_ptr);
if (!player_ptr || Get_Totally_Complete())
{
m_raw_priority = Goal::BAD_UTILITY;
return m_raw_priority;
}
const MapPoint & target_pos = Get_Target_Pos();
if(!player_ptr->CanUseSeaTab()
&& (g_theWorld->IsWater(target_pos) || g_theWorld->IsShallowWater(target_pos))
){
AI_DPRINTF(k_DBG_SCHEDULER_DETAIL, m_playerId, m_goal_type, -1, ("\t No sea tab\n"));
m_raw_priority = Goal::BAD_UTILITY;
return m_raw_priority;
}
const GoalRecord * goal_rec = g_theGoalDB->Get(m_goal_type);
if
(
goal_rec->GetHasTransportersOrNoCoastalCities()
&&
(
!player_ptr->CanUseSeaTab()
||
(
player_ptr->HasCostalCities()
&& !player_ptr->HasTransporters()
)
)
)
{
AI_DPRINTF(k_DBG_SCHEDULER_DETAIL, m_playerId, m_goal_type, -1, ("\t No sea tab\n"));
m_raw_priority = Goal::BAD_UTILITY;
return m_raw_priority;
}
const MapAnalysis & map = MapAnalysis::GetMapAnalysis();
PLAYER_INDEX target_owner = Get_Target_Owner();
// Allways compute a foreign center (even if the target is owned by the player
// otherwise it compute with coords (0,0) !!
MapPoint empire_center = map.GetEmpireCenter(m_playerId);
MapPoint foreign_empire_center = (target_owner > 0 && m_playerId != target_owner) ? map.GetEmpireCenter(target_owner) : map.GetNearestForeigner(m_playerId, target_pos);
double cell_value = Get_Initial_Priority();
if(cell_value == Goal::BAD_UTILITY)
{
m_raw_priority = Goal::BAD_UTILITY;
return m_raw_priority;
}
#if defined(_DEBUG) || defined(USE_LOGGING) // Add a debug report of goal computing (raw priority and all modifiers) - Calvitix
double report_cell_initvalue = cell_value;
double report_cell_lastvalue = cell_value;
double report_cell_threat = 0.0;
double report_cell_EnemyValue = 0.0;
double report_cell_AlliedValue = 0.0;
double report_cell_HomeDistance = 0.0;
double report_cell_EnemyDistance = 0.0;
double report_cell_Settle = 0.0;
double report_cell_Chokepoint = 0.0;
double report_cell_MaxPower = 0.0;
double report_cell_Unexplored = 0.0;
double report_cell_NotVisible = 0.0;
double report_cell_NoOwnerTerritory = 0.0;
double report_cell_InHomeTerritory = 0.0;
double report_cell_InEnemyTerritory = 0.0;
double report_cell_SlaveryProtection = 0.0;
double report_cell_SmallCitySize = 0.0;
double report_cell_CityConnected = 0.0;
double report_cell_SmallEmpireBonus = 0.0;
double report_cell_WeakestEnemyBonus = 0.0;
#endif //_DEBUG
double maxThreat = static_cast<double>(map.GetMaxThreat(m_playerId));
if ( maxThreat > 0.0 )
{
cell_value +=
( ( static_cast<double>(map.GetThreat(m_playerId, target_pos)) /
maxThreat) *
g_theGoalDB->Get(m_goal_type)->GetThreatBonus() );
#if defined(_DEBUG) || defined(USE_LOGGING) // Add a debug report of goal computing (raw priority and all modifiers) - Calvitix
report_cell_threat = cell_value - report_cell_lastvalue;
report_cell_lastvalue = cell_value;
#endif //_DEBUG
}
double maxEnemyValue = static_cast<double>(map.GetMaxEnemyValue(m_playerId));
if(maxEnemyValue > 0.0)
{
cell_value +=
( ( static_cast<double>(map.GetEnemyValue( m_playerId, target_pos)) /
maxEnemyValue ) *
goal_rec->GetEnemyValueBonus() );
#if defined(_DEBUG) || defined(USE_LOGGING) // Add a debug report of goal computing (raw priority and all modifiers) - Calvitix
report_cell_EnemyValue = cell_value - report_cell_lastvalue;
report_cell_lastvalue = cell_value;
#endif //_DEBUG
}
double maxAlliedValue = static_cast<double>(map.GetMaxAlliedValue(m_playerId));
if ( maxAlliedValue > 0)
{
cell_value +=
( ( static_cast<double>(map.GetAlliedValue(m_playerId, target_pos)) /
maxAlliedValue ) *
goal_rec->GetAlliedValueBonus() );
#if defined(_DEBUG) || defined(USE_LOGGING) // Add a debug report of goal computing (raw priority and all modifiers) - Calvitix
report_cell_AlliedValue = cell_value - report_cell_lastvalue;
report_cell_lastvalue = cell_value;
#endif //_DEBUG
}
double maxPower = static_cast<double>(map.GetMaxPower(m_playerId)); // Get Max Power of all Allies
if (maxPower > 0)
{
cell_value +=
( ( static_cast<double>(map.GetPower( m_playerId, target_pos)) / // Allies' Power at target_pos
maxPower ) *
goal_rec->GetPowerBonus() );
#if defined(_DEBUG) || defined(USE_LOGGING) // Add a debug report of goal computing (raw priority and all modifiers) - Calvitix
report_cell_MaxPower = cell_value - report_cell_lastvalue;
report_cell_lastvalue = cell_value;
#endif //_DEBUG
}
#if 0
if(!goal_rec->GetIsGlobalGoal())
{
cell_value += sqrt(static_cast<double>
(MapPoint::GetSquaredDistance(target_pos, empire_center))) * goal_rec->GetDistanceToHomeBonus();
}
else
{
if(g_theWorld->IsOnSameContinent(target_pos, empire_center))
{
sint32 distance;
float costs = 0.0f;
g_city_astar.IsLandConnected(m_playerId, target_pos, empire_center, costs, distance);
cell_value += static_cast<double>(distance * goal_rec->GetDistanceToHomeBonus());
}
else
{
sint32 distance;
float costs = 0.0f;
g_city_astar.FindCityDist(m_playerId, target_pos, empire_center, costs, distance);
cell_value += static_cast<double>(distance * goal_rec->GetDistanceToHomeBonus());
}
}
#if defined(_DEBUG) || defined(USE_LOGGING) // Add a debug report of goal computing (raw priority and all modifiers) - Calvitix
report_cell_HomeDistance = cell_value - report_cell_lastvalue;
report_cell_lastvalue = cell_value;
#endif //_DEBUG
if (foreign_empire_center.x != 0 && foreign_empire_center.y != 0)//Dangerious if the empire center has coords (0,0)
{
if(!goal_rec->GetIsGlobalGoal())
{
cell_value += sqrt(static_cast<double>
(MapPoint::GetSquaredDistance(target_pos, foreign_empire_center))) * goal_rec->GetDistanceToEnemyBonus();
}
else
{
if(g_theWorld->IsOnSameContinent(target_pos, foreign_empire_center))
{
sint32 distance;
float costs = 0.0f;
g_city_astar.IsLandConnected(m_playerId, target_pos, foreign_empire_center, costs, distance);
cell_value += static_cast<double>(distance * goal_rec->GetDistanceToEnemyBonus());
}
else
{
sint32 distance;
float costs = 0.0f;
g_city_astar.FindCityDist(m_playerId, target_pos, foreign_empire_center, costs, distance);
cell_value += static_cast<double>(distance * goal_rec->GetDistanceToEnemyBonus());
}
}
}
#else
cell_value += sqrt(static_cast<double>
(MapPoint::GetSquaredDistance(target_pos, empire_center))) * goal_rec->GetDistanceToHomeBonus();
#if defined(_DEBUG) || defined(USE_LOGGING) // Add a debug report of goal computing (raw priority and all modifiers) - Calvitix
report_cell_HomeDistance = cell_value - report_cell_lastvalue;
report_cell_lastvalue = cell_value;
#endif //_DEBUG
// if (foreign_empire_center.IsValid()) // Replace when default values of MapPoint are invalid
if (foreign_empire_center.x != 0 && foreign_empire_center.y != 0)//Dangerious if the empire center has coords (0,0)
{
cell_value += sqrt(static_cast<double>
(MapPoint::GetSquaredDistance(target_pos, foreign_empire_center))) * goal_rec->GetDistanceToEnemyBonus();
}
#endif
#if defined(_DEBUG) || defined(USE_LOGGING) // Add a debug report of goal computing (raw priority and all modifiers) - Calvitix
report_cell_EnemyDistance = cell_value - report_cell_lastvalue;
report_cell_lastvalue = cell_value;
#endif //_DEBUG
if (g_theWorld->GetCell( target_pos )->GetIsChokePoint())
{
cell_value += goal_rec->GetChokePointBonus();
}
#if defined(_DEBUG) || defined(USE_LOGGING) // Add a debug report of goal computing (raw priority and all modifiers) - Calvitix
report_cell_Chokepoint = cell_value - report_cell_lastvalue;
report_cell_lastvalue = cell_value;
#endif //_DEBUG
if(!player_ptr->IsExplored(target_pos))
{
if
(
g_theWorld->IsCity(target_pos)
&& g_theWorld->GetCell(target_pos)->m_cityHasVisibleTileImprovement
)
{
// Nothing to add
}
else
{
cell_value += goal_rec->GetUnexploredBonus();
}
}
#if defined(_DEBUG) || defined(USE_LOGGING) // Add a debug report of goal computing (raw priority and all modifiers) - Calvitix
report_cell_Unexplored = cell_value - report_cell_lastvalue;
report_cell_lastvalue = cell_value;
#endif //_DEBUG
if(!player_ptr->IsVisible(target_pos))
{
cell_value += goal_rec->GetNotVisibleBonus();
}
#if defined(_DEBUG) || defined(USE_LOGGING) // Add a debug report of goal computing (raw priority and all modifiers) - Calvitix
report_cell_NotVisible = cell_value - report_cell_lastvalue;
report_cell_lastvalue = cell_value;
#endif //_DEBUG
PLAYER_INDEX territoryOwner = g_theWorld->GetCell( target_pos )->GetOwner();
if(m_playerId == territoryOwner)
{
cell_value += goal_rec->GetInHomeTerritoryBonus();
}
#if defined(_DEBUG) || defined(USE_LOGGING) // Add a debug report of goal computing (raw priority and all modifiers) - Calvitix
report_cell_InHomeTerritory = cell_value - report_cell_lastvalue;
report_cell_lastvalue = cell_value;
#endif //_DEBUG
if(m_playerId != territoryOwner && territoryOwner >= 0) // 0: Barbarian player
{
cell_value += goal_rec->GetInEnemyTerritoryBonus();
}
#if defined(_DEBUG) || defined(USE_LOGGING) // Add a debug report of goal computing (raw priority and all modifiers) - Calvitix
report_cell_InEnemyTerritory = cell_value - report_cell_lastvalue;
report_cell_lastvalue = cell_value;
#endif //_DEBUG
if(territoryOwner == PLAYER_UNASSIGNED)
{
cell_value += goal_rec->GetNoOwnerTerritoryBonus();
}
#if defined(_DEBUG) || defined(USE_LOGGING) // Add a debug report of goal computing (raw priority and all modifiers) - Calvitix
report_cell_NoOwnerTerritory = cell_value - report_cell_lastvalue;
report_cell_lastvalue = cell_value;
#endif //_DEBUG
if ( goal_rec->GetTargetTypeSettleLand() ||
goal_rec->GetTargetTypeSettleSea() )
{
cell_value += SettleMap::s_settleMap.GetValue(target_pos);
#if defined(_DEBUG) || defined(USE_LOGGING)
report_cell_Settle = cell_value - report_cell_lastvalue;
report_cell_lastvalue = cell_value;
#endif //_DEBUG
}
if(m_target_city.IsValid())
{
cell_value += goal_rec->GetSlaveryProtectionBonus() * (1.0 - m_target_city.IsProtectedFromSlavery());
#if defined(_DEBUG) || defined(USE_LOGGING)
report_cell_SlaveryProtection = cell_value - report_cell_lastvalue;
report_cell_lastvalue = cell_value;
#endif //_DEBUG
sint32 pop;
m_target_city->GetPop(pop);
cell_value += (1.0 - (static_cast<double>(pop) / static_cast<double>(m_target_city.CD()->GetOverallMaxPop()))) * static_cast<double>(goal_rec->GetSmallCitySizeBonus());
if(pop == m_target_city.CD()->GetMaxPop())
{
m_raw_priority = Goal::BAD_UTILITY;
return m_raw_priority;
}
#if defined(_DEBUG) || defined(USE_LOGGING)
report_cell_SmallCitySize = cell_value - report_cell_lastvalue;
report_cell_lastvalue = cell_value;
#endif //_DEBUG
}
const sint32 doubleDistanceFactor = 4;
sint32 distance;
bool isLandConnected = goal_rec->HasConnectionBoni() && player_ptr->IsConnected(target_pos, doubleDistanceFactor * g_theConstDB->Get(0)->GetBorderSquaredRadius(), distance);
bool isConnected = goal_rec->HasConnectionBoni() && player_ptr->IsConnected(target_pos, doubleDistanceFactor * g_theConstDB->Get(0)->GetBorderSquaredRadius(), distance, false);
const GoalRecord::ConnectionBoni* cbRec = isLandConnected || isConnected ? goal_rec->GetConnectionBoniPtr() : NULL;
// A little ugly but this way I don't have to mess with the debug reports
if(cbRec != NULL)
{
if(isLandConnected)
{
double value = -static_cast<double>(cbRec->GetLandToCloseCityConnectionBonus() * distance);
value /= sqrt( static_cast<double>(doubleDistanceFactor * doubleDistanceFactor * g_theConstDB->Get(0)->GetBorderSquaredRadius())); // Maybe this factor is not the possible maximum
value += static_cast<double>(cbRec->GetLandToCloseCityConnectionBonus());
if(value > 0)
{
cell_value += value;
}
}
if(isConnected)
{
double value = -static_cast<double>(cbRec->GetCloseCityConnectionBonus() * distance);
value /= sqrt( static_cast<double>(doubleDistanceFactor * doubleDistanceFactor * g_theConstDB->Get(0)->GetBorderSquaredRadius())); // Maybe this factor is not the possible maximum
value += static_cast<double>(cbRec->GetCloseCityConnectionBonus());
if(value > 0)
{
cell_value += value;
}
}
}
#if defined(_DEBUG) || defined(USE_LOGGING)
report_cell_CityConnected = cell_value - report_cell_lastvalue;
report_cell_lastvalue = cell_value;
#endif //_DEBUG
if
(
cbRec != NULL
&& cbRec->GetSmallTargetEmpireBonus() != 0
&& target_owner != m_playerId
&& target_owner > -1
&& g_player[target_owner]->GetNumCities() < cbRec->GetSmallTargetEmpireSize()
)
{
cell_value += cbRec->GetSmallTargetEmpireBonus();
}
#if defined(_DEBUG) || defined(USE_LOGGING)
report_cell_SmallEmpireBonus = cell_value - report_cell_lastvalue;
report_cell_lastvalue = cell_value;
#endif //_DEBUG
if
(
target_owner != m_playerId
&& cbRec
&& cbRec->GetWeakestEnemyBonus() != 0
&& target_owner > -1
&& (
g_player[m_playerId]->GetWeakestEnemy() == target_owner
|| target_owner == 0
)
)
{
cell_value += cbRec->GetWeakestEnemyBonus();
}
#if defined(_DEBUG) || defined(USE_LOGGING)
report_cell_WeakestEnemyBonus = cell_value - report_cell_lastvalue;
report_cell_lastvalue = cell_value;
#endif //_DEBUG
sint32 threaten_bonus = GetThreatenBonus();
m_raw_priority = (Utility) cell_value + threaten_bonus;
Assert(m_raw_priority < Goal::MAX_UTILITY);
Assert(m_raw_priority >= Goal::BAD_UTILITY);
if (m_raw_priority > Goal::MAX_UTILITY)
m_raw_priority = Goal::MAX_UTILITY-1;
else if (m_raw_priority < Goal::BAD_UTILITY)
m_raw_priority = Goal::BAD_UTILITY;
#if defined(_DEBUG) || defined(USE_LOGGING) // Add a debug report of goal computing (raw priority and all modifiers) - Calvitix
if(CtpAiDebug::DebugLogCheck(this->Get_Player_Index(), this->Get_Goal_Type(), -1))
{
char buff[1024];
sprintf(buff, "\t %9x,\t%s,\t%i, \t\trc(%3d,%3d),\t%8f,\t%8f,\t%8f,\t%8f,\t%8f,\t%8f,\t%8f, rc(%3d,%3d),\t%8f, rc(%3d,%3d), \t%8f,\t%8f,\t%8f,\t%8f,\t%8f,\t%8f,\t%8f,",
this,
goal_rec->GetNameText(),
m_raw_priority,
target_pos.x,
target_pos.y,
report_cell_initvalue,
report_cell_lastvalue,
report_cell_threat,
report_cell_EnemyValue,
report_cell_AlliedValue,
report_cell_MaxPower,
report_cell_HomeDistance,
empire_center.x,
empire_center.y,
report_cell_EnemyDistance,
foreign_empire_center.x,
foreign_empire_center.y,
report_cell_Settle,
report_cell_Chokepoint,
report_cell_Unexplored,
report_cell_NotVisible,
report_cell_InHomeTerritory,
report_cell_InEnemyTerritory,
report_cell_NoOwnerTerritory
);
AI_DPRINTF(k_DBG_SCHEDULER_DETAIL, m_playerId, m_goal_type, -1, ("%s\t%8f,\t\t%8f,\t%i,\t\t%8f,\t%8f,\t%8f,\t%s\n",
buff,
report_cell_SlaveryProtection,
report_cell_SmallCitySize,
threaten_bonus,
report_cell_CityConnected,
report_cell_SmallEmpireBonus,
report_cell_WeakestEnemyBonus,
(g_theWorld->HasCity(target_pos) ? g_theWorld->GetCity(target_pos).GetName() : "field")));
}
// For some reason the following does not work in VC6:
/* AI_DPRINTF(k_DBG_SCHEDULER_DETAIL, m_playerId, m_goal_type, -1,
("\t %9x,\t%s,\t%i, \t\trc(%3d,%3d),\t%8f,\t%8f,\t%8f,\t%8f,\t%8f,\t%8f,\t%8f, rc(%3d,%3d),\t%8f, rc(%3d,%3d), \t%8f,\t%8f,\t%8f,\t%8f,\t%8f,\t%s \n",
this,
goal_rec->GetNameText(),
m_raw_priority,
target_pos.x,
target_pos.y,
report_cell_initvalue,
report_cell_lastvalue,
report_cell_threat,
report_cell_EnemyValue,
report_cell_AlliedValue,
report_cell_MaxPower,
report_cell_HomeDistance,
empire_center.x,
empire_center.y,
report_cell_EnemyDistance,
foreign_empire_center.x,
foreign_empire_center.y,
report_cell_Settle,
report_cell_Chokepoint,
report_cell_Unexplored,
report_cell_NotVisible,
threaten_bonus,
(g_theWorld->HasCity(target_pos) ? g_theWorld->GetCity(target_pos).GetName() : "field")
));*/
#endif //_DEBUG
return m_raw_priority;
}
GOAL_RESULT Goal::Execute_Task()
{
if(Get_Totally_Complete())
return GOAL_COMPLETE;
if(!Can_Be_Executed())
return GOAL_ALREADY_MOVED;
Assert(m_agents.begin() != m_agents.end());
const GoalRecord * goal_record = g_theGoalDB->Get(m_goal_type);
Set_Sub_Task(SUB_TASK_GOAL);
bool hastogowithoutgrouping = false;
Agent_List tmpTransporters;
for
(
Agent_List::reverse_iterator agent_iter = m_agents.rbegin();
agent_iter != m_agents.rend();
++agent_iter
)
{
Agent_ptr agent_ptr = (Agent_ptr) *agent_iter;
sint32 transporters = 0;
sint32 max = 0;
sint32 empty = 0;
if(agent_ptr->Get_Army()->GetCargo(transporters, max, empty) && empty > 0)
{
m_agents.remove(agent_ptr);
tmpTransporters.push_back(agent_ptr);
}
}
m_agents.splice(m_agents.end(), tmpTransporters);
if(goal_record->GetNeverSatisfied())
{
for
(
Agent_List::iterator agent_iter = m_agents.begin();
agent_iter != m_agents.end();
++agent_iter
)
{
Agent_ptr agent_ptr = (Agent_ptr) *agent_iter;
// Add this condition to avoid that a 12 units army with SEIGE goal retreat
// at 1 tile near the city, because it has left 1 unit and has to group with
// another one. I Think it is better to go on an seige the city (if there is
// more than 2/3 left, if more than 8 units).
/// @ToDo: Reconsider NeverSatisfied
MapPoint target__pos = Get_Target_Pos(agent_ptr->Get_Army());
MapPoint current_pos = agent_ptr->Get_Pos();
if
(
MapPoint::GetSquaredDistance(target__pos, current_pos) <= 1
&& (
agent_ptr->Get_Army()->Num() > (2*k_MAX_ARMY_SIZE/3)
|| agent_ptr->Get_Army()->Num() > m_current_attacking_strength.Get_Agent_Count()
)
)
{
hastogowithoutgrouping = true;
break;
}
}
}
if(Is_Satisfied() || Is_Execute_Incrementally() || hastogowithoutgrouping)
{
if(goal_record->GetRallyFirst())
{
if(Goal_Too_Expensive())
return GOAL_FAILED;
if(!RallyComplete())
{
Set_Sub_Task(SUB_TASK_RALLY);
if(Ok_To_Rally())
{
if(!RallyTroops())
{
return GOAL_FAILED;
}
else
{
// If hastogowithoutgrouping is true, execute the goal
// even if the rally is not complete
if(!hastogowithoutgrouping)
{
return GOAL_IN_PROGRESS;
}
else
{
Set_Sub_Task(SUB_TASK_GOAL);
}
}
}
}
else
{
Set_Sub_Task(SUB_TASK_GOAL);
}
}
// Added an Ungroup method (sometimes, for example to explore,
// it is more interessant to have a lot of small units rather
// than a huge army
else if(goal_record->GetUnGroupFirst())
{
Set_Sub_Task(SUB_TASK_UNGROUP);
if (!UnGroupTroops())
{
return GOAL_FAILED;
}
if (!UnGroupComplete())
{
return GOAL_IN_PROGRESS;
}
else
{
Set_Sub_Task(SUB_TASK_GOAL);
}
}
/* else
{
Set_Sub_Task(SUB_TASK_GOAL);
}*/
for
(
Agent_List::iterator agent_iter = m_agents.begin();
agent_iter != m_agents.end();
++agent_iter
)
{
Agent_ptr agent_ptr = (Agent_ptr) *agent_iter;
if(agent_ptr->Get_Is_Dead())
continue;
if(!agent_ptr->Get_Can_Be_Executed())
continue;
MapPoint pos = Get_Target_Pos(agent_ptr->Get_Army());
if(!GotoGoalTaskSolution(agent_ptr, pos))
{
if(Needs_Transporter() && Get_Transporters_Num() < 1)
{
Set_Needs_Transporter(agent_ptr);
return GOAL_NEEDS_TRANSPORT;
}
else
{
return GOAL_FAILED;
}
}
}
}
return GOAL_IN_PROGRESS; // Maybe this is good as it is
}
bool Goal::Get_Totally_Complete() const
{
if (Get_Invalid())
return true;
const GoalRecord *goal_record = g_theGoalDB->Get(m_goal_type);
Diplomat & diplomat = Diplomat::GetDiplomat(m_playerId);
PLAYER_INDEX target_owner = Get_Target_Owner();
MapPoint target_pos = Get_Target_Pos();
Player *player_ptr = g_player[ m_playerId ];
Assert(player_ptr != NULL);
// Don't attack as Barbarian a target that is protected by the Great Wall
if
(
m_playerId == PLAYER_INDEX_VANDALS
&& wonderutil_GetProtectFromBarbarians(g_player[target_owner]->m_builtWonders)
)
{
return true;
}
if(m_playerId > 0 && target_owner > 0 && goal_record->HasTargetProtectionWonder())
{
const WonderRecord *wonder_rec = goal_record->GetTargetProtectionWonderPtr();
if ((AgreementMatrix::s_agreements.TurnsAtWar(m_playerId, target_owner) < 0) &&
g_player[target_owner] &&
(g_player[target_owner]->GetBuiltWonders() & ((uint64)1 << (uint64)(wonder_rec->GetIndex()))))
return true;
}
if(!MapAnalysis::GetMapAnalysis().PlayerCanEnter(m_playerId, target_pos))
return true;
bool isspecial;
bool isstealth = false;
sint32 maxattack = 0;
bool iscivilian = false;
if ( goal_record->GetTargetTypeAttackUnit() ||
goal_record->GetTargetTypeSpecialUnit() )
{
if(g_theWorld->GetCity(m_target_army->RetPos()).m_id != 0)
return true;
sint32 maxdefense;
bool cancapture;
bool haszoc;
bool canbombard;
m_target_army->CharacterizeArmy
(
isspecial,
isstealth,
maxattack,
maxdefense,
cancapture,
haszoc,
canbombard
);
iscivilian = m_target_army->IsCivilian();
if(isspecial && !m_target_army->IsVisible(m_playerId))
return true;
}
if( goal_record->GetTargetTypeSpecialUnit()
&& !goal_record->GetTargetTypeAttackUnit()
&& maxattack > 0
){
return true;
}
if(goal_record->GetTargetTypeUnexplored())
{
Unit city = g_theWorld->GetCity(target_pos);
CellUnitList army;
g_theWorld->GetArmy(target_pos,army);
if (army.Num() > 0 || city.m_id != 0x0)
return true;
}
if(m_playerId != 0 && target_owner > 0 && target_owner != m_playerId)
{
bool regard_checked = false;
bool diplomacy_match = true;
if ( player_ptr->HasContactWith(target_owner))
{
if (iscivilian &&
goal_record->GetTargetOwnerHotEnemy() &&
(diplomat.GetPersonality()->GetAlignmentGood() ||
diplomat.GetPersonality()->GetAlignmentNeutral()))
return true;
if (goal_record->GetTargetOwnerNeutral())
{
diplomacy_match =
diplomat.TestEffectiveRegard(target_owner, NEUTRAL_REGARD);
regard_checked = true;
}
if ( (!regard_checked || !diplomacy_match) &&
goal_record->GetTargetOwnerColdEnemy() )
{
diplomacy_match =
diplomat.TestEffectiveRegard(target_owner, COLDWAR_REGARD);
regard_checked = true;
}
if ( (!regard_checked || !diplomacy_match) &&
goal_record->GetTargetOwnerHotEnemy() )
{
diplomacy_match =
diplomat.TestEffectiveRegard(target_owner, HOTWAR_REGARD);
regard_checked = true;
}
if ( (!regard_checked || !diplomacy_match) &&
goal_record->GetTargetOwnerAlly() )
{
diplomacy_match =
diplomat.TestEffectiveRegard(target_owner, ALLIED_REGARD);
regard_checked = true;
}
// If the goal is not executed by stealth units, forbid to
// execute it if there is no incursion permission
// (depending on alignement) - Calvitix
if ((!diplomat.IncursionPermission(target_owner) &&
(diplomat.GetPersonality()->GetAlignmentGood() ||
diplomat.GetPersonality()->GetAlignmentNeutral()))
&& !goal_record->GetSquadClassStealth())
{
AI_DPRINTF(k_DBG_SCHEDULER, m_playerId, m_goal_type, 0,
("\t GOAL %x (%s) (%3d,%3d): Diplomacy match failed : No permission to enter territory\n", this, g_theGoalDB->Get(m_goal_type)->GetNameText(),target_pos.x,target_pos.y));
return true;
}
if(!diplomacy_match)
{
AI_DPRINTF(k_DBG_SCHEDULER, m_playerId, m_goal_type, 0,
("\t GOAL %x (%s) (%3d,%3d): Diplomacy match failed.\n", this, g_theGoalDB->Get(m_goal_type)->GetNameText(),target_pos.x,target_pos.y));
return true;
}
}
else if(!goal_record->GetTargetOwnerNoContact())
{
AI_DPRINTF(k_DBG_SCHEDULER, m_playerId, m_goal_type, 0,
("\t GOAL %x (%s) (%3d,%3d): Target owner not contacted.\n", this, g_theGoalDB->Get(m_goal_type)->GetNameText(),target_pos.x,target_pos.y));
return true;
}
}
const OrderRecord * order_record = goal_record->GetExecute();
switch(order_record->GetTargetPretest())
{
case k_Order_TargetPretest_EnemyArmy_Bit:
case k_Order_TargetPretest_EnemySpecialUnit_Bit:
case k_Order_TargetPretest_EnemySettler_Bit:
case k_Order_TargetPretest_EnemyTradeUnit_Bit:
case k_Order_TargetPretest_AttackPosition_Bit:
if ((m_target_army.m_id != 0) && (m_target_army.GetOwner() == m_playerId))
{
Assert(false);
return true;
}
break;
case k_Order_TargetPretest_TradeRoute_Bit:
Assert( m_target_city.m_id != 0);
if ((m_target_city.m_id == 0) ||
(m_target_city.GetCityData()->GetTradeSourceList() == NULL) ||
(m_target_city.GetCityData()->GetTradeSourceList()->Num() <= 0))
return true;
break;
case k_Order_TargetPretest_TerrainImprovement_Bit:
if (goal_record->GetTargetTypeImprovement())
{
Assert( m_target_city.m_id != 0);
if (m_target_city.m_id == 0 ||
!m_target_city->GetCityData()->WasTerrainImprovementBuilt())
return true;
}
else
{
if (g_theWorld->GetCell(target_pos)->GetNumDBImprovements() <= 0)
return true;
}
break;
}
if(g_player[m_playerId]->GetGold() < order_record->GetGold())
{
AI_DPRINTF(k_DBG_SCHEDULER, m_playerId, m_goal_type, 0,
("GOAL %x (%s): Not enough gold to perform goal.\n", this, g_theGoalDB->Get(m_goal_type)->GetNameText()));
return true;
}
if(goal_record->GetAvoidWatchfulCity())
{
Assert(m_target_city.m_id != 0);
if(m_target_city->IsWatchful())
return true;
}
if(goal_record->GetTargetTypeChokePoint())
{
CellUnitList army;
g_theWorld->GetArmy(target_pos, army);
if(g_theWorld->GetCell(target_pos)->GetNumUnits() > 0
&& army.GetOwner() != m_playerId)
return false;
if(Is_Satisfied() && ArmiesAtGoal())
return true;
}
if(order_record->GetUnitPretest_CanPlantNuke())
{
if (!g_player[m_playerId]->HasAdvance(advanceutil_GetNukeAdvance()))
return true;
}
if(order_record->GetTargetPretestEnemySpecialUnit())
{
if(g_theWorld->GetOwner(target_pos) != m_playerId)
return true;
}
if(!order_record->GetTargetPretestTerrainImprovement()
&& !order_record->GetTargetPretestTradeRoute()
){
if(!ArmyData::TargetValidForOrder(order_record, target_pos))
return true;
}
if(order_record->GetTargetPretestEnemySpecialUnit()
&& m_target_army.m_id != 0
&& m_target_army->CanBeExpelled()
){
return true;
}
if (order_record->GetUnitPretest_CanReformCity())
{
if (!m_target_city.GetCityData()->IsConverted())
return true;
}
if (order_record->GetUnitPretest_CanSueFranchise())
{
if (!m_target_city.GetCityData()->IsFranchised())
return true;
}
if (order_record->GetUnitPretest_EstablishEmbassy())
{
if (g_player[m_playerId]->HasEmbassyWith(m_target_city->GetOwner()))
return true;
}
if (order_record->GetUnitPretest_CanCreateFranchise())
{
if (m_target_city.GetCityData()->GetFranchiseTurnsRemaining() <= 0)
return true;
}
if (order_record->GetUnitPretest_CanBioTerror())
{
if (m_target_city.GetCityData()->IsBioInfected())
return true;
}
if (order_record->GetUnitPretest_CanNanoInfect())
{
if (m_target_city.GetCityData()->IsNanoInfected())
return true;
}
if (order_record->GetUnitPretest_CanConvertCity())
{
if (m_target_city.GetCityData()->GetConvertedTo() == m_playerId)
return true;
}
if (order_record->GetUnitPretest_CanInterceptTrade())
{
Assert( m_target_city.m_id != 0);
if ((m_target_city.m_id == 0) ||
((m_target_city.GetCityData()->GetTradeSourceList()) &&
(m_target_city.GetCityData()->GetTradeSourceList()->Num() <= 0)))
return true;
}
if (order_record->GetUnitPretest_CanInciteRevolution())
{
sint32 cost;
if (ArmyData::GetInciteRevolutionCost(target_pos, cost) &&
( cost > g_player[m_playerId]->GetGold()))
return true;
}
if (order_record->GetUnitPretest_CanNukeCity())
{
if (diplomat.GetNuclearLaunchTarget() == target_owner)
return true;
}
// Try to steal technology only if the other civ has more advances than player
// Otherwise, spy can do anything else
if (order_record->GetUnitPretest_CanStealTechnology())
{
sint32 num = 0;
delete [] g_player[m_playerId]->m_advances->CanAskFor(g_player[target_owner]->m_advances, num);
if(num <= 0)
return true;
}
// Abolisionist has to go to cities with slaves
if(order_record->GetUnitPretest_CanInciteUprising()
|| order_record->GetUnitPretest_CanUndergroundRailway ()
){
if (m_target_city.GetCityData()->SlaveCount() == 0)
return true;
}
if(order_record->GetUnitPretest_CanSlaveRaid())
{
sint32 popCount;
m_target_city.GetCityData()->GetPop(popCount);
// Slavers must to go to cities with population to enslave, give an extra point so that the city isn't killed on conquest
if(popCount <= 2)
return true;
}
if(order_record->GetUnitPretest_NoFuelThenCrash())
{
if(g_theUnitPool->IsValid(m_target_city)
&& target_owner != m_playerId
){
return true;
}
else if(g_theArmyPool->IsValid(m_target_army)
&& target_owner != m_playerId
){
return true;
}
else if(!g_theWorld->IsAirfield(target_pos)){
return true;
}
}
return false;
}
bool Goal::Get_Invalid() const
{
if(m_playerId < 0)
{
Assert(false);
return true;
}
const GoalRecord *goal_record = g_theGoalDB->Get(m_goal_type);
if(goal_record->GetTargetTypeAttackUnit()
|| goal_record->GetTargetTypeSpecialUnit()
){
if(!g_theArmyPool->IsValid(m_target_army))
return true;
else if(m_target_army->Num() <= 0)
return true;
else
return false;
}
if(goal_record->GetTargetTypeCity()
|| goal_record->GetTargetTypeTradeRoute()
|| goal_record->GetTargetTypeImprovement()
){
if(!g_theUnitPool->IsValid(m_target_city))
return true;
CityData *city = m_target_city->GetCityData();
if(city == NULL)
{
Assert(false);
return true;
}
if(goal_record->GetTargetOwnerSelf()
&& city->GetOwner() != m_playerId)
return true;
else if(!goal_record->GetTargetOwnerSelf()
&& city->GetOwner() == m_playerId)
return true;
}
if(goal_record->GetTargetTypeEndgame())
{
return(!terrainutil_HasEndgame( Get_Target_Pos() ));
}
if (goal_record->GetTargetTypeUnexplored() )
return(g_player[m_playerId]->IsExplored(Get_Target_Pos()));
if(goal_record->GetTargetTypeSettleLand()
|| goal_record->GetTargetTypeSettleSea()
){
if(!SettleMap::s_settleMap.CanSettlePos(Get_Target_Pos()))
return true;
if(g_theWorld->HasCity(Get_Target_Pos()))
return true;
}
if(goal_record->GetTargetTypeGoodyHut())
return(g_theWorld->GetGoodyHut(Get_Target_Pos()) == NULL);
// Check whether the target can refuel the given army
if(goal_record->GetTargetTypePetrolStation())
{
if(g_theUnitPool->IsValid(m_target_city))
{
CityData *city = m_target_city->GetCityData();
if(city == NULL){
Assert(0);
return true;
}
if(goal_record->GetTargetOwnerSelf()
&& city->GetOwner() != m_playerId)
return true;
else if(!goal_record->GetTargetOwnerSelf()
&& city->GetOwner() == m_playerId)
return true;
}
// else if(g_theArmyPool->IsValid(m_target_army)){ // Aircraft carriers are missing
// return Cannot carry aircrafts;
// }
return !g_theWorld->IsAirfield(Get_Target_Pos());
}
return false;
}
bool Goal::Get_Removal_Time() const
{
if ( Get_Invalid() ||
(g_theGoalDB->Get(m_goal_type)->GetRemoveWhenComplete() &&
Get_Totally_Complete() ) )
return true;
return false;
}
bool Goal::Pretest_Bid(const Agent_ptr agent_ptr, const MapPoint & target_pos) const
{
const Army & army = agent_ptr->Get_Army();
if (army->GetMinFuel() != 0x7fffffff)
{
sint32 num_tiles_to_half;
sint32 num_tiles_to_empty;
army->CalcRemainingFuel(num_tiles_to_half, num_tiles_to_empty);
num_tiles_to_empty = static_cast<sint32>(num_tiles_to_empty / k_MOVE_AIR_COST);
num_tiles_to_half = static_cast<sint32>(num_tiles_to_half / k_MOVE_AIR_COST);
sint32 distance_to_refuel;
sint32 distance_to_target;
MapPoint refuel_pos(-1, -1);
CtpAi::GetNearestRefuel(army, target_pos, refuel_pos, distance_to_refuel);
distance_to_target =
static_cast<sint32>(sqrt(static_cast<double>
(MapPoint::GetSquaredDistance(army->RetPos(), target_pos))
));
if (num_tiles_to_empty < distance_to_target + distance_to_refuel)
return false;
}
const GoalRecord *goal_rec = g_theGoalDB->Get(m_goal_type);
if (goal_rec->GetSquadClassCanBombard())
{
if(g_theWorld->GetCell(target_pos)->IsAnyUnitInCell())
{
CellUnitList* defenders = g_theWorld->GetCell(target_pos)->UnitArmy();
if(!army->CanBombardTargetType(*defenders))
return false;
}
else
{
return false;
}
}
const OrderRecord *order_rec = goal_rec->GetExecute();
sint32 transports;
sint32 max_cargo_slots;
sint32 empty_cargo_slots;
army->GetCargo(transports, max_cargo_slots, empty_cargo_slots);
if (transports > 0 )
{
if ((empty_cargo_slots < max_cargo_slots) || (army->HasCargo()))
{
ORDER_TEST cargo_test = army->CargoTestOrderHere(order_rec, target_pos);
if ( cargo_test == ORDER_TEST_ILLEGAL ||
cargo_test == ORDER_TEST_INVALID_TARGET )
return false;
else
return true;
}
else if (Needs_Transporter())
return true;
}
bool needs_land_unit = goal_rec->GetTargetTypeCity();
needs_land_unit &= goal_rec->GetTargetOwnerSelf();
needs_land_unit |= goal_rec->GetTargetTypeGoodyHut();
uint32 movement_type = army->GetMovementType();
if (army->HasCargo())
movement_type |= army->GetCargoMovementType();
bool land_movement = ((movement_type &
(k_Unit_MovementType_Land_Bit | k_Unit_MovementType_Mountain_Bit)) != 0x0);
land_movement &= ((movement_type & k_Unit_MovementType_Air_Bit) == 0x0);
if (needs_land_unit && !land_movement)
{
return false;
}
if ( army->CanNukeCity() &&
(g_theWorld->GetCell(target_pos)->GetOwner() != m_playerId ||
g_theWorld->GetCity(target_pos).m_id != 0x0) )
return false;
if ( army->TestOrder(order_rec) == ORDER_TEST_ILLEGAL )
return false;
if ( !army->CanEnter(target_pos) )
return false;
return true;
}
void Goal::Log_Debug_Info(const int &log) const
{
#if defined(_DEBUG) || defined(USE_LOGGING)
const char *name = g_theGoalDB->Get(m_goal_type)->GetNameText();
MapPoint pos = Get_Target_Pos();
if(m_raw_priority > BAD_UTILITY)
{
AI_DPRINTF
(
log,
m_playerId,
m_goal_type,
-1,
(
"\tGoal %9x,\t%s,\tRaw priority: %8d,\t(%3d,%3d) (%s)\n",
this,
name,
m_raw_priority,
pos.x,
pos.y,
(g_theWorld->HasCity(pos) ? g_theWorld->GetCity(pos).GetName() : "field")
)
);
}
else
{
MapPoint pos = Get_Target_Pos();
AI_DPRINTF
(
log,
m_playerId,
m_goal_type,
-1,
(
"\tGoal %9x,\t%s,\tBAD_UTILITY,\t(%d,%d) (%s)\n",
this,
name,
pos.x,
pos.y,
(g_theWorld->HasCity(pos) ? g_theWorld->GetCity(pos).GetName() : "field")
)
);
}
Plan_List::const_iterator plan_iter;
sint32 count = 0;
for
(
plan_iter = m_matches.begin();
plan_iter != m_matches.end();
++plan_iter
)
{
Agent_ptr agent = plan_iter->Get_Agent();
Utility value = plan_iter->Get_Matching_Value();
if(value > Goal::BAD_UTILITY)
{
SQUAD_CLASS goal_squad_class = g_theGoalDB->Get(m_goal_type)->GetSquadClass();
AI_DPRINTF(log, m_playerId, m_goal_type, -1,
("\t\t[%3d] match=%d %s (agent: %10x), goal class=%3x, squad class=%3x, test class=%d\t",
count++, value, g_theGoalDB->Get(m_goal_type)->GetNameText(), agent, goal_squad_class, agent->Get_Squad_Class(), ((goal_squad_class & agent->Get_Squad_Class()) == goal_squad_class)));
agent->Log_Debug_Info(k_DBG_SCHEDULER_ALL, this);
}
else
{
AI_DPRINTF
(
log,
m_playerId,
m_goal_type,
-1,
(
"\t\t[%3d] First match with bad utility: In all, there were %d matches with bad utility.\n",
count,
m_matches.size() - count
)
);
break;
}
}
bool log_army = false;
bool log_goal = CtpAiDebug::DebugLogCheck(m_playerId, m_goal_type, -1);
Agent_List::const_iterator agent_iter;
for
(
agent_iter = m_agents.begin();
agent_iter != m_agents.end();
++agent_iter
)
{
Agent_ptr agent_ptr = (Agent_ptr) *agent_iter;
if(CtpAiDebug::DebugLogCheck(m_playerId, m_goal_type, agent_ptr->Get_Army().m_id))
{
log_army = true;
pos = Get_Target_Pos(agent_ptr->Get_Army());
break;
}
}
if(!log_army || !log_goal)
{
return;
}
if (m_agents.size() > 0)
AI_DPRINTF(log, m_playerId, m_goal_type, -1, ("\t\t\tCommitted Agents (%d):\n", m_agents.size()));
for( agent_iter = m_agents.begin();
agent_iter != m_agents.end();
agent_iter++
){
Agent_ptr agent_ptr = (Agent_ptr) *agent_iter;
agent_ptr->Log_Debug_Info(log, this);
}
#endif // _DEBUG
}
bool Goal::FollowPathToTask( Agent_ptr first_army,
Agent_ptr second_army,
MapPoint &dest_pos,
Path &found_path)
{
Unit city = g_theWorld->GetCity(first_army->Get_Pos());
if(city.m_id != 0)
{
if(first_army->Get_Pos() != dest_pos)
{
if(first_army->IsNeededForGarrison())
{
Assert(false);
AI_DPRINTF(k_DBG_SCHEDULER, m_playerId, m_goal_type, first_army->Get_Army().m_id,
("GOAL %x (%d): FollowPathToTask::Can not send army needed for garrison to destination (x=%d,y=%d):\n", this,
m_goal_type, dest_pos.x, dest_pos.y));
first_army->Log_Debug_Info(k_DBG_SCHEDULER, this);
uint8 magnitude = 220;
g_graphicsOptions->AddTextToArmy(first_army->Get_Army(), "GARRISON", magnitude);
return false;
}
else
{
// Do nothing, city garrisons are calculated before build phase, where it only matters.
// Garrison units are only calculated ones, so that nothing changes between process matches.
}
}
}
const GoalRecord *goal_rec = g_theGoalDB->Get(m_goal_type);
const OrderRecord *order_rec;
ORDER_TEST test = ORDER_TEST_OK;
if (m_sub_task == SUB_TASK_GOAL
|| m_sub_task == SUB_TASK_TRANSPORT_TO_GOAL) // @ToDo clean this
{
order_rec = goal_rec->GetExecute();
if (first_army->Get_Army()->HasCargo() )
{
test = first_army->Get_Army()->CargoTestOrderHere(order_rec, dest_pos );
if
(
first_army->Get_Army()->GetMovementTypeAir()
&& !first_army->Get_Army()->TestOrderAny(order_rec)
&& first_army->Get_Army()->TestCargoOrderAny(order_rec)
)
{
order_rec = CtpAi::GetUnloadOrder();
dest_pos = found_path.SnipEndUntilCargoCanEnter(first_army->Get_Army());
}
else if
(
(
first_army->Get_Pos() == dest_pos
&& city.m_id != 0x0
)
||
(
g_theWorld->HasCity(dest_pos)
&& g_theWorld->GetCity(dest_pos)->GetOwner() == first_army->Get_Army()->GetOwner()
)
)
{
order_rec = CtpAi::GetUnloadOrder();
}
}
else
{
test = first_army->Get_Army()->TestOrderHere(order_rec, dest_pos );
}
}
else
{
order_rec = CtpAi::GetMoveOrder();
}
if(test == ORDER_TEST_OK
|| test == ORDER_TEST_NO_MOVEMENT
){
//I want to see armytext even in optimized test version - Calvitix
Utility val = Compute_Agent_Matching_Value(first_army);
uint8 magnitude = (uint8) (((5000000 - val)* 255.0) / 5000000);
const char * myText = goal_rec->GetNameText();
MBCHAR * myString = new MBCHAR[strlen(myText) + 80];
MBCHAR * goalString = new MBCHAR[strlen(myText) + 40];
memset(goalString, 0, strlen(myText) + 40);
memset(myString, 0, strlen(myText) + 80);
for (uint8 myComp = 0; myComp < strlen(myText) - 5; myComp++)
{
goalString[myComp] = myText[myComp + 5];
}
MapPoint targetPos = Get_Target_Pos(first_army->Get_Army());
switch (m_sub_task)
{
case SUB_TASK_RALLY:
sprintf(myString, "Group to (%d,%d), %s (%d,%d)", dest_pos.x, dest_pos.y, goalString, targetPos.x, targetPos.y);
break;
case SUB_TASK_TRANSPORT_TO_BOARD:
sprintf(myString, "Boat to (%d,%d), %s (%d,%d)", dest_pos.x, dest_pos.y, goalString, targetPos.x, targetPos.y);
break;
case SUB_TASK_TRANSPORT_TO_GOAL:
sprintf(myString, "Transp. to (%d,%d), %s (%d,%d)", dest_pos.x, dest_pos.y, goalString, targetPos.x, targetPos.y);
break;
case SUB_TASK_CARGO_TO_BOARD:
sprintf(myString, "Cargo. to (%d,%d), %s (%d,%d)", dest_pos.x, dest_pos.y, goalString, targetPos.x, targetPos.y);
break;
case SUB_TASK_AIRLIFT:
sprintf(myString, "Airlift to (%d,%d), %s (%d,%d)", dest_pos.x, dest_pos.y, goalString, targetPos.x, targetPos.y);
break;
case SUB_TASK_GOAL:
default:
sprintf(myString, "%s (%d,%d)", goalString, dest_pos.x, dest_pos.y);
break;
}
g_graphicsOptions->AddTextToArmy(first_army->Get_Army(), myString, magnitude, m_goal_type);
delete[] myString;
delete[] goalString;
if (first_army->Get_Can_Be_Executed())
{
Assert(order_rec);
if (order_rec)
{
if(m_sub_task != SUB_TASK_CARGO_TO_BOARD)
{
first_army->PerformOrderHere(order_rec, (Path *) &found_path);
}
else
{
first_army->PerformOrderHere(order_rec, (Path *) &found_path, GEV_INSERT_Tail);
}
}
}
else
{
// Nothing
}
return true;
}
else
{
const char * myText = goal_rec->GetNameText();
MBCHAR * myString = new MBCHAR[strlen(myText) + 80];
memset(myString, 0, strlen(myText) + 80);
sprintf(myString, "%s failed at (%d, %d), order: %s", goal_rec->GetNameText(), dest_pos.x, dest_pos.y, order_rec->GetNameText());
g_graphicsOptions->AddTextToArmy(first_army->Get_Army(), myString, 0, m_goal_type);
delete[] myString;
if(test != ORDER_TEST_OK && test == ORDER_TEST_NO_MOVEMENT)
{
AI_DPRINTF(k_DBG_SCHEDULER, m_playerId, m_goal_type, first_army->Get_Army().m_id,
("GOAL %x (%d): FollowPathToTask:: failed TestOrderHere( %s, (%d,%d))\n", this, m_goal_type,
order_rec->GetNameText(),dest_pos.x,dest_pos.y));
}
return false;
}
return true;
}
bool Goal::GotoTransportTaskSolution(Agent_ptr the_army, Agent_ptr the_transport, MapPoint & pos)
{
MapPoint dest_pos;
Path found_path;
bool check_dest;
bool found = false;
switch (m_sub_task)
{
case SUB_TASK_AIRLIFT:
{
MapPoint start_pos = the_army->Get_Pos();
sint16 cargo_cont = g_theWorld->GetContinent(start_pos); // Dangerous with transport target can be closer
Unit nearest_city;
MapPoint nearest_airfield;
double airfield_distance = 0.0;
double city_distance = 0.0;
bool airfield_found = g_player[m_playerId]->
GetNearestAirfield(start_pos, nearest_airfield, cargo_cont);
bool city_found = g_player[m_playerId]->
GetNearestCity(start_pos, nearest_city, city_distance, false);
if (airfield_found)
{
airfield_distance = MapPoint::GetSquaredDistance(start_pos, nearest_airfield);
}
if (airfield_found && city_found && (airfield_distance < city_distance))
dest_pos = nearest_airfield;
else if (city_found)
{
dest_pos = nearest_city.RetPos();
}
else
{
return false;
}
if
(
dest_pos == start_pos
&& dest_pos == the_transport->Get_Army()->RetPos()
)
{
the_army->MoveIntoTransport();
the_transport->Set_Can_Be_Executed(false);
pos = dest_pos;
return true;
}
check_dest = true;
found = Agent::FindPath(the_army->Get_Army(), dest_pos, check_dest, found_path);
if (found && FollowPathToTask(the_army, the_transport, dest_pos, found_path) )
{
// move_intersection = the_transport->Get_Army().GetMovementType();
found = Agent::FindPath(the_transport->Get_Army(), dest_pos, check_dest, found_path);
if (found && FollowPathToTask(the_transport, the_army, dest_pos, found_path) )
{
pos = dest_pos;
return true;
}
}
break;
}
case SUB_TASK_TRANSPORT_TO_BOARD:
{
if(!pos.IsValid())
{
dest_pos = the_army->Get_Pos();
check_dest = false;
}
else
{
dest_pos = pos;
check_dest = true;
}
if(!the_transport->Get_Can_Be_Executed())
{
pos = the_transport->Get_Target_Pos();
Assert(pos.IsValid());
return true;
}
if(dest_pos == the_transport->Get_Pos()
|| the_transport->Get_Army()->CheckValidDestination(dest_pos)
){
if(!pos.IsValid())
{
pos = dest_pos;
}
the_transport->Set_Target_Pos(dest_pos);
the_transport->Set_Can_Be_Executed(false);
return true;
}
uint32 move_intersection =
the_transport->Get_Army().GetMovementType() | the_army->Get_Army().GetMovementType();
found = the_transport->FindPathToBoard(move_intersection, dest_pos, check_dest, found_path);
if (!found)
{
AI_DPRINTF(k_DBG_SCHEDULER, m_playerId, m_goal_type, the_transport->Get_Army().m_id,
("GOAL %x (%d):GotoTransportTaskSolution:: No path found from army to destination (x=%d,y=%d) (SUB_TASK_TRANSPORT_TO_BOARD):\n",
this, m_goal_type, dest_pos.x, dest_pos.y));
the_transport->Log_Debug_Info(k_DBG_SCHEDULER, this);
uint8 magnitude = 220;
MBCHAR * myString = new MBCHAR[256];
sprintf(myString, "NO PATH -> BOARD (%d,%d)", dest_pos.x, dest_pos.y);
g_graphicsOptions->AddTextToArmy(the_army->Get_Army(), myString, magnitude, m_goal_type);
delete[] myString;
Set_Cannot_Be_Used(the_transport, true);
}
else
{
MapPoint last = found_path.SnipEndUntilCanEnter(the_transport->Get_Army());
if ( (found_path.GetMovesRemaining() > 0) &&
!FollowPathToTask(the_transport, the_army, last, found_path) )
return false;
if(!pos.IsValid())
{
pos = last;
}
if(found_path.GetMovesRemaining() == 0)
{
the_transport->Set_Can_Be_Executed(false);
the_transport->Set_Target_Pos(pos);
}
return true;
}
break;
}
case SUB_TASK_CARGO_TO_BOARD:
{
dest_pos = pos;
MapPoint start_pos = the_army->Get_Pos();
Assert(pos.IsValid());
if (dest_pos == start_pos)
{
the_army->Set_Can_Be_Executed(true);
the_army->MoveIntoTransport();
return true;
}
bool transport_at_rendezvous;
if(!the_transport->Get_Army()->AtEndOfPath())
{
// This maybe a problem
MapPoint next_pos;
transport_at_rendezvous = the_transport->Get_Army()->GetNextPathPoint(next_pos);
transport_at_rendezvous &= !the_transport->Get_Army()->CanEnter(next_pos);
}
else
{
transport_at_rendezvous = true;
}
if(transport_at_rendezvous)
{
uint32 move_intersection = the_army->Get_Army().GetMovementType();
check_dest = true;
found = the_army->FindPathToBoard(move_intersection, dest_pos, check_dest, found_path);
}
else
{
check_dest = false;
found = Agent::FindPath(the_army->Get_Army(), dest_pos, check_dest, found_path);
if(!found)
{
uint32 move_intersection = the_transport->Get_Army().GetMovementType() |
the_army->Get_Army().GetMovementType();
found = the_army->FindPathToBoard(move_intersection, dest_pos, check_dest, found_path);
}
}
if(!found)
{
AI_DPRINTF(k_DBG_SCHEDULER, m_playerId, m_goal_type, the_army->Get_Army().m_id,
("GOAL %x (%d): GotoTransportTaskSolution: No path found from army to destination (x=%d,y=%d) (SUB_TASK_CARGO_TO_BOARD):\n",
this, m_goal_type, dest_pos.x, dest_pos.y));
the_army->Log_Debug_Info(k_DBG_SCHEDULER, this);
uint8 magnitude = 220;
MBCHAR * myString = new MBCHAR[256];
sprintf(myString, "NO PATH -> BOARD (%d,%d)", dest_pos.x, dest_pos.y);
g_graphicsOptions->AddTextToArmy(the_army->Get_Army(), myString, magnitude, m_goal_type);
delete[] myString;
}
if (found && FollowPathToTask(the_army, the_transport, dest_pos, found_path) )
{
return true;
}
/* else
{
Set_Cannot_Be_Used(the_army, true);
}*/
break;
}
default:
Assert(false);
}
return false;
}
bool Goal::GotoGoalTaskSolution(Agent_ptr the_army, MapPoint & goal_pos)
{
if(the_army->Get_Army()->CheckValidDestination(goal_pos)) // If we are already moving along a path
return true;
Path found_path;
sint32 range = 0;
(void) g_theGoalDB->Get(m_goal_type)->GetExecute()->GetRange(range);
bool check_dest;
const GoalRecord * goal_rec = g_theGoalDB->Get(m_goal_type);
if (range > 0 ||
goal_rec->GetExecute()->GetTargetPretestAttackPosition() ||
(goal_rec->GetTargetTypeCity() && goal_rec->GetTargetOwnerSelf()))
check_dest = false;
else
check_dest = true;
bool waiting_for_buddies = !Ok_To_Rally()
&& m_sub_task == SUB_TASK_RALLY
&& g_theWorld->IsOnSameContinent(goal_pos, the_army->Get_Pos())
&& g_theWorld->IsLand(goal_pos)
&& g_theWorld->IsLand(the_army->Get_Pos());
bool found = false;
if(the_army->Get_Army()->HasCargo()
&& the_army->Get_Army()->GetMovementTypeAir()
&& the_army->Get_Army()->CanSpaceLaunch()
){
sint16 target_cont = g_theWorld->GetContinent(goal_pos);
Unit nearest_city;
double city_distance = 0.0;
bool city_found = g_player[m_playerId]->
GetNearestCity(goal_pos, nearest_city, city_distance, false, target_cont);
if (city_found)
{
found = Agent::FindPath(the_army->Get_Army(), nearest_city.RetPos(), true, found_path);
if (found) Set_Sub_Task(SUB_TASK_AIRLIFT);
if (the_army->Get_Pos() == nearest_city.RetPos())
{
the_army->PerformOrderHere(CtpAi::GetUnloadOrder(), (Path *) &found_path);
}
else if(!the_army->Get_Army()->HasLeftMap())
{
the_army->PerformOrderHere(CtpAi::GetSpaceLaunchOrder(), (Path *) &found_path);
}
return true;
}
}
else if ( the_army->Get_Army()->HasCargo()
&& !the_army->Get_Army()->GetMovementTypeAir())
{
// Check if is single squad
// Return true if we are a transporter and we need transporters
// SUB_TASK_TRANSPORT_TO_GOAL
uint32 move_intersection =
the_army->Get_Army()->GetMovementType() | the_army->Get_Army()->GetCargoMovementType();
found = the_army->FindPathToBoard(move_intersection, goal_pos, false, found_path);
Assert(found);
if (found)
{
if(Get_Target_Pos(the_army->Get_Army()) == goal_pos)
{
Set_Sub_Task(SUB_TASK_TRANSPORT_TO_GOAL);
}
else
{
Set_Sub_Task(SUB_TASK_RALLY);
}
}
else
{
bool test = the_army->CanReachTargetContinent(goal_pos);
Assert(test);
}
}
else if ( the_army->Get_Army()->CanTransport()
&& !the_army->Get_Army()->GetMovementTypeAir()
&& !the_army->Get_Army()->CanEnter(goal_pos) )
{
// There is still a problem with air transporters
return true;
}
else
{
if (!waiting_for_buddies)
{
found = Agent::FindPath(the_army->Get_Army(), goal_pos, check_dest, found_path);
}
}
switch (m_sub_task) {
case SUB_TASK_GOAL:
if (!found)
{
AI_DPRINTF(k_DBG_SCHEDULER, m_playerId, m_goal_type, the_army->Get_Army().m_id,
("GOAL %x (%s): GotoGoalTaskSolution: No path found from army (x=%d,y=%d) to goal (x=%d,y=%d) (SUB_TASK_GOAL):\n",
this, g_theGoalDB->Get(m_goal_type)->GetNameText(), the_army->Get_Pos().x, the_army->Get_Pos().y, goal_pos.x, goal_pos.y));
the_army->Log_Debug_Info(k_DBG_SCHEDULER, this);
uint8 magnitude = 220;
MBCHAR * myString = new MBCHAR[256];
sprintf(myString, "NO PATH to (%d,%d) - %s", goal_pos.x, goal_pos.y, g_theGoalDB->Get(m_goal_type)->GetNameText());
g_graphicsOptions->AddTextToArmy(the_army->Get_Army(), myString, magnitude, m_goal_type);
delete[] myString;
}
break;
case SUB_TASK_RALLY:
if (waiting_for_buddies)
{
Utility val = Compute_Agent_Matching_Value(the_army);
uint8 magnitude = (uint8)(((5000000 - val) * 255.0) / 5000000);
MBCHAR * myString = new MBCHAR[256];
sprintf(myString, "Waiting GROUP to GO (%d,%d)\n", goal_pos.x, goal_pos.y);
g_graphicsOptions->AddTextToArmy(the_army->Get_Army(), myString, magnitude, m_goal_type);
delete[] myString;
return true;
}
if (!found)
{
AI_DPRINTF(k_DBG_SCHEDULER, m_playerId, m_goal_type, the_army->Get_Army().m_id,
("GOAL %x (%d):GotoGoalTaskSolution: No path found from army (x=%d,y=%d) to goal (x=%d,y=%d) (SUB_TASK_RALLY):\n",
this, m_goal_type, the_army->Get_Pos().x, the_army->Get_Pos().y, goal_pos.x, goal_pos.y));
the_army->Log_Debug_Info(k_DBG_SCHEDULER, this);
uint8 magnitude = 220;
MBCHAR * myString = new MBCHAR[256];
sprintf(myString, "NO PATH (GROUP)(%d,%d)", goal_pos.x, goal_pos.y);
g_graphicsOptions->AddTextToArmy(the_army->Get_Army(), myString, magnitude, m_goal_type);
delete[] myString;
}
break;
default:
{
if (!found)
{
AI_DPRINTF(k_DBG_SCHEDULER, m_playerId, m_goal_type, the_army->Get_Army().m_id,
("GOAL %x (%d):GotoGoalTaskSolution: No path found from army (x=%d,y=%d) to goal (x=%d,y=%d) (SUB_TASK_TRANSPORT):\n",
this, m_goal_type, the_army->Get_Pos().x, the_army->Get_Pos().y, goal_pos.x, goal_pos.y));
the_army->Log_Debug_Info(k_DBG_SCHEDULER, this);
uint8 magnitude = 220;
MBCHAR * myString = new MBCHAR[256];
sprintf(myString, "NO PATH (TRANSP.)(%d,%d)", goal_pos.x, goal_pos.y);
g_graphicsOptions->AddTextToArmy(the_army->Get_Army(), myString, magnitude, m_goal_type);
delete[] myString;
}
}
}
bool move_success = false;
if ( found )
{
move_success = FollowPathToTask(the_army, NULL, goal_pos, found_path);
}
else
{
move_success = TryTransport(the_army, goal_pos);
}
if (move_success)
{
Assert(!the_army->Get_Can_Be_Executed())
the_army->Set_Can_Be_Executed(false);
}
return move_success;
}
bool Goal::Ok_To_Rally() const
{
if (m_agents.size() == 1)
return false;
if (!g_theWorld->IsLand(Get_Target_Pos())) // Problematic if we want to attack a stack of Destroyers
return true;
sint32 num_in_water = 0;
sint32 num_at_dest = 0;
sint32 count = 0;
MapPoint targetPos = Get_Target_Pos();
for
(
Agent_List::const_iterator agent_iter = m_agents.begin();
agent_iter != m_agents.end();
++agent_iter
)
{
const Agent_ptr agent_ptr = (Agent_ptr) *agent_iter;
Assert(agent_ptr);
if(!agent_ptr->Get_Is_Dead())
{
MapPoint const army_pos = agent_ptr->Get_Pos();
if (g_theWorld->IsLand(army_pos))
{
if(g_theWorld->IsOnSameContinent(army_pos, targetPos))
{
++num_at_dest;
}
// Would have been removed for the outcommented stuff below
else
{
return false;
}
}
else
{
++num_in_water;
}
++count;
}
}
/* //So far this isn't good it makes transporters to take part in the rally
if(m_current_needed_strength.Get_Transport() > 0)
{
if(!Needs_Transporter())
{
return true;
}
}
*/
return (num_in_water + num_at_dest) == count;
}
bool Goal::RallyComplete() const
{
if (m_agents.size() == 1)
return true;
bool incompleteStackFound = false;
for
(
Agent_List::const_iterator agent_iter = m_agents.begin();
agent_iter != m_agents.end();
++agent_iter
)
{
Agent_ptr agent_ptr = (Agent_ptr) *agent_iter;
Assert(agent_ptr);
if (agent_ptr->Get_Is_Dead())
continue;
if(!agent_ptr->IsArmyPosFilled())
{
if(incompleteStackFound)
{
return false;
}
incompleteStackFound = true;
}
}
return true;
}
void Goal::GroupTroops()
{
// Maybe sort the agent list by id
// so that we use the lower ids.
for
(
Agent_List::iterator agent1_iter = m_agents.begin();
agent1_iter != m_agents.end();
++agent1_iter
)
{
Agent_ptr agent1_ptr = (Agent_ptr) *agent1_iter;
if
(
agent1_ptr->GetUnitsAtPos() == agent1_ptr->Get_Army()->Num() // Nothing to group here
|| agent1_ptr->Get_Is_Dead()
|| !agent1_ptr->Get_Can_Be_Executed()
)
{
continue;
}
for
(
Agent_List::iterator agent2_iter = m_agents.begin();
agent2_iter != m_agents.end();
++agent2_iter
)
{
Agent_ptr agent2_ptr = (Agent_ptr) *agent2_iter;
if( agent2_ptr->Get_Is_Dead()
|| !agent2_ptr->Get_Can_Be_Executed()
|| agent1_ptr == agent2_ptr
){
continue;
}
if(agent1_ptr->Get_Pos() == agent2_ptr->Get_Pos())
{
ORDER_TEST test1 = agent1_ptr->Get_Army()->CargoTestOrderHere(CtpAi::GetUnloadOrder(), agent1_ptr->Get_Pos());
ORDER_TEST test2 = agent2_ptr->Get_Army()->CargoTestOrderHere(CtpAi::GetUnloadOrder(), agent2_ptr->Get_Pos());
if(agent1_ptr->Get_Army()->HasCargo() && test1 != ORDER_TEST_ILLEGAL)
{
agent1_ptr->UnloadCargo();
}
else if(agent2_ptr->Get_Army()->HasCargo() && test2 != ORDER_TEST_ILLEGAL)
{
agent2_ptr->UnloadCargo();
}
else
{
agent1_ptr->Group_With(agent2_ptr);
}
}
}
}
}
MapPoint Goal::MoveToTarget(Agent_ptr rallyAgent)
{
bool check_dest;
const GoalRecord * goal_rec = g_theGoalDB->Get(m_goal_type);
sint32 range = 0;
(void) goal_rec->GetExecute()->GetRange(range);
if (range > 0 ||
goal_rec->GetExecute()->GetTargetPretestAttackPosition() ||
(goal_rec->GetTargetTypeCity() && goal_rec->GetTargetOwnerSelf()))
check_dest = false;
else
check_dest = true;
Path found_path;
bool found = Agent::FindPath(rallyAgent->Get_Army(), Get_Target_Pos(rallyAgent->Get_Army()), check_dest, found_path);
Assert(found);
if(!found)
{
return rallyAgent->Get_Pos();
}
// MapPoint rallyPos = found_path.SnipEndUntilCannotEnter(rallyAgent->Get_Army());
MapPoint rallyPos = found_path.SnipEndUntilCargoCanEnter(rallyAgent->Get_Army());
FollowPathToTask(rallyAgent, NULL, rallyPos, found_path);
return rallyPos;
}
MapPoint Goal::MoveOutOfCity(Agent_ptr rallyAgent)
{
MapPoint rallyPos = rallyAgent->Get_Pos();
if(g_theWorld->GetCity(rallyPos).IsValid())
{
MapPoint tempPos;
for(sint32 i = 0 ; i < NOWHERE; i++)
{
bool result = rallyPos.GetNeighborPosition(WORLD_DIRECTION(i), tempPos);
if(result)
{
CellUnitList *the_army = NULL;
the_army = g_theWorld->GetArmyPtr(tempPos);
if(!the_army
&& rallyAgent->Get_Army()->CanEnter(tempPos)
){ //search for cell without army
rallyPos = tempPos;
break;
}
}
}
if(!GotoGoalTaskSolution(rallyAgent, rallyPos))
Assert(false);
}
return rallyPos;
}
Agent_ptr Goal::GetRallyAgent() const
{
MapPoint targetPos = Get_Target_Pos();
Agent_ptr rallyAgent = NULL;
sint32 minDistance = 0x7fffffff;
for
(
Agent_List::const_iterator agent_iter = m_agents.begin();
agent_iter != m_agents.end();
++agent_iter
)
{
Agent_ptr agent_ptr = (Agent_ptr) *agent_iter;
if(agent_ptr->GetUnitsAtPos() >= k_MAX_ARMY_SIZE)
{
continue;
}
sint32 distance = MapPoint::GetSquaredDistance(agent_ptr->Get_Pos(), targetPos);
if(distance < minDistance)
{
minDistance = distance;
rallyAgent = agent_ptr;
}
}
Assert(rallyAgent != NULL);
return rallyAgent;
}
MapPoint Goal::GetFreeNeighborPos(MapPoint pos) const
{
MapPoint nextPos(-1, -1);
MapPoint tempPos;
for(sint32 i = 0 ; i < NOWHERE; i++)
{
bool result = pos.GetNeighborPosition(WORLD_DIRECTION(i), tempPos);
if(result)
{
CellUnitList *the_army = g_theWorld->GetArmyPtr(tempPos);
if(!the_army)
{ //search for cell without army
nextPos = tempPos;
break;
}
}
}
// Could happen in that case we have to find something else
// Assert(nextPos.IsValid());
return nextPos;
}
bool Goal::RallyTroops()
{
Set_Sub_Task(SUB_TASK_RALLY);
// A new rally algorithm, but actually the old one is better in principle
// but unfortunately less complete
#if 1
// Group armies first
GroupTroops();
Agent_ptr rallyAgent = GetRallyAgent();
if(rallyAgent == NULL)
{
return true;
}
MapPoint rallyPos = rallyAgent->Get_Army()->HasCargo() ? MoveToTarget(rallyAgent) : MoveOutOfCity(rallyAgent);
rallyAgent->WaitHere(Get_Target_Pos(rallyAgent->Get_Army()));
Squad_Strength strength = m_current_attacking_strength;
sint32 unitsAtRallyPos = (rallyPos == rallyAgent->Get_Pos()) ? rallyAgent->GetUnitsAtPos() : rallyAgent->Get_Army()->Num();
for
(
Agent_List::iterator agent_iter = m_agents.begin();
agent_iter != m_agents.end();
++agent_iter
)
{
Agent_ptr agent_ptr = (Agent_ptr) *agent_iter;
Assert(agent_ptr);
if( agent_ptr->Get_Is_Dead()
|| !agent_ptr->Get_Can_Be_Executed()
){
continue;
}
if(!agent_ptr->CanMove()
&& !agent_ptr->Get_Army()->HasCargo()
|| agent_ptr == rallyAgent
){
agent_ptr->Set_Can_Be_Executed(false);
continue;
}
if
(
agent_ptr->Get_Army()->Num() >= k_MAX_ARMY_SIZE
&& rallyPos.IsNextTo(agent_ptr->Get_Pos())
)
{
continue;
}
// Send units to rally position
if(!GotoGoalTaskSolution(agent_ptr, rallyPos))
{
// @ToDo: Check for transports, too.
strength -= agent_ptr->Get_Squad_Strength();
if(strength.HasEnough(m_current_needed_strength))
{
continue;
}
else
{
return false;
}
}
// Count units at target position or sent to target position
unitsAtRallyPos += agent_ptr->Get_Army()->Num();
// If target position is full or will be full
if(unitsAtRallyPos > k_MAX_ARMY_SIZE)
{
unitsAtRallyPos -= k_MAX_ARMY_SIZE;
// Change target
if(agent_ptr->Get_Army()->Num() < k_MAX_ARMY_SIZE)
{
rallyPos = GetFreeNeighborPos(rallyPos);
if(!rallyPos.IsValid())
{
Assert(false);
// Find another rally point
}
// Outgroup the units over limit
// Outgrouping is done before the army is send to their target
agent_ptr->Get_Army()->RemainNumUnits(agent_ptr->Get_Army()->Num() - unitsAtRallyPos);
}
}
}
#else
Agent_List tmp_agents = m_agents;
Agent_List::iterator agent1_iter = tmp_agents.begin();
/// @ToDo: Check whether this sqaure design is necessary
while (agent1_iter != tmp_agents.end())
{
Agent_ptr agent1_ptr = (Agent_ptr) *agent1_iter;
Assert(agent1_ptr);
if( agent1_ptr->Get_Is_Dead()
|| !agent1_ptr->Get_Can_Be_Executed()
){
++agent1_iter;
continue;
}
bool agent1_is_partial = (agent1_ptr->Get_Army().Num() < k_MAX_ARMY_SIZE );
sint32 min_distance = (g_mp_size.x + g_mp_size.y);
min_distance *= min_distance;
Agent_List::iterator closest_agent_iter = tmp_agents.end();
bool partiality_found = false;
for
(
Agent_List::iterator agent2_iter = tmp_agents.begin();
agent2_iter != tmp_agents.end();
++agent2_iter
)
{
Agent_ptr agent2_ptr = (Agent_ptr) *agent2_iter;
if(agent1_ptr == agent2_ptr)
continue;
if(agent2_ptr->Get_Is_Dead())
continue;
bool agent2_is_partial = (agent2_ptr->Get_Army().Num() < k_MAX_ARMY_SIZE);
if( (partiality_found) &&
(agent2_is_partial != agent1_is_partial) )
continue;
sint32 const distance =
MapPoint::GetSquaredDistance(agent1_ptr->Get_Pos(), agent2_ptr->Get_Pos());
if(distance < min_distance)
{
min_distance = distance;
closest_agent_iter = agent2_iter;
}
if( agent1_is_partial == agent2_is_partial &&
!partiality_found)
{
partiality_found = true;
min_distance = distance;
closest_agent_iter = agent2_iter;
}
}
if(min_distance < 1)
{
agent1_ptr->Group_With((Agent_ptr) *closest_agent_iter);
}
else if( closest_agent_iter != tmp_agents.end() )
{
MapPoint closest_agent_pos;
Agent_ptr closest_agent_ptr = (Agent_ptr) *closest_agent_iter;
MapPoint closest_agent_pos;
if(closest_agent_ptr->Get_Can_Be_Executed())
{
closest_agent_pos = closest_agent_ptr->Get_Pos();
}
else
{
closest_agent_pos = closest_agent_ptr->Get_Target_Pos();
}
// To avoid Groups to be blocked when an unit is in a city
// (problem with garrison -> not enough room)
sint32 cells;
if(!g_theWorld->GetCity(closest_agent_pos).IsValid()
|| agent1_ptr->GetRounds(closest_agent_pos, cells) > 2
){
// Should be superflous
Set_Sub_Task(SUB_TASK_RALLY);
if (!GotoGoalTaskSolution(agent1_ptr, closest_agent_pos))
return false;
}
else
{
uint8 magnitude = 220;
MBCHAR * myString = new MBCHAR[256];
MapPoint goal_pos;
goal_pos = Get_Target_Pos(agent1_ptr->Get_Army());
sprintf(myString, "Waiting GROUP to GO (%d,%d)", goal_pos.x, goal_pos.y);
g_graphicsOptions->AddTextToArmy(agent1_ptr->Get_Army(), myString, magnitude);
delete[] myString;
}
MapPoint agent1_pos = agent1_ptr->Get_Pos();
if( g_theWorld->GetCity(closest_agent_pos).IsValid() || closest_agent_ptr->GetRounds(agent1_pos, cells) > 2)
{
if (g_theWorld->GetCity(agent1_pos).IsValid() && g_theWorld->GetCity(closest_agent_pos).IsValid()) //two units are in another town
{
MapPoint tempPos;
for(sint32 i = 0 ; i < NOWHERE; i++)
{
if(closest_agent_pos.GetNeighborPosition(WORLD_DIRECTION(i),tempPos))
{
CellUnitList *the_army = g_theWorld->GetArmyPtr(tempPos);
if(!the_army)
{ //search for cell without army
agent1_pos = tempPos;
break;
}
}
}
}
// Should be superflous
Set_Sub_Task(SUB_TASK_RALLY);
if(!GotoGoalTaskSolution(closest_agent_ptr, agent1_pos))
return false;
}
else
{
CellUnitList *the_army = g_theWorld->GetArmyPtr(closest_agent_pos);
if(static_cast<uint32>(the_army->Num()) >= m_agents.size() && m_agents.size() > k_MAX_ARMY_SIZE/2)
{
MapPoint tempPos;
for(sint32 i = 0 ; i < NOWHERE; i++)
{
if(closest_agent_pos.GetNeighborPosition(WORLD_DIRECTION(i),tempPos))
{
CellUnitList *the_army = g_theWorld->GetArmyPtr(tempPos);
if(!the_army)
{ //search for cell without army
// Should be superflous
Set_Sub_Task(SUB_TASK_RALLY);
if(!GotoGoalTaskSolution(closest_agent_ptr, tempPos))
return false;
break;
}
}
}
}
}
/// @ToDo: Check whether this is necessary, and what does it do
tmp_agents.insert(tmp_agents.begin(), *closest_agent_iter);
tmp_agents.erase(closest_agent_iter);
}
else
{
Assert(false);
}
++agent1_iter;
}
#endif
return true;
}
bool Goal::UnGroupTroops()
{
bool breturn = false;
for
(
Agent_List::iterator agent1_iter = m_agents.begin();
agent1_iter != m_agents.end();
++agent1_iter
)
{
Agent_ptr agent1_ptr = (Agent_ptr) *agent1_iter;
Assert(agent1_ptr);
if (!agent1_ptr->Get_Is_Dead())
{
if (agent1_ptr->Get_Army().Num() > 1)
{
agent1_ptr->Ungroup_Order();
}
breturn = true;
}
}
return breturn;
}
bool Goal::UnGroupComplete() const
{
for
(
Agent_List::const_iterator agent_iter = m_agents.begin();
agent_iter != m_agents.end();
++agent_iter
)
{
Agent_ptr agent_ptr = (Agent_ptr) *agent_iter;
Assert(agent_ptr);
if (!agent_ptr->Get_Is_Dead() &&
(agent_ptr->Get_Army().Num() > 1)
)
{
return false;
}
}
return true;
}
bool Goal::TryTransport(Agent_ptr agent_ptr, const MapPoint & goal_pos)
{
if (g_theGoalDB->Get(m_goal_type)->GetNoTransport())
return false;
return LoadTransporters(agent_ptr);
}
bool Goal::FindTransporters(const Agent_ptr & agent_ptr, std::list< std::pair<Utility, Agent_ptr> > & transporter_list)
{
std::pair<Utility, Agent_ptr> transporter;
double max_utility = Goal::BAD_UTILITY;
for
(
Agent_List::iterator agent_iter = m_agents.begin();
agent_iter != m_agents.end();
++agent_iter
)
{
Agent_ptr possible_transport = (Agent_ptr) *agent_iter;
sint32 transports = 0;
sint32 max_slots = 0;
sint32 empty_slots = 0;
possible_transport->Get_Army()->GetCargo(transports, max_slots, empty_slots);
if(max_slots <= 0)
continue;
if(empty_slots <= 0)
continue;
if(possible_transport == agent_ptr)
continue;
SQUAD_CLASS goal_squad_class = g_theGoalDB->Get(m_goal_type)->GetSquadClass();
if(max_slots != empty_slots && ((goal_squad_class & possible_transport->Get_Squad_Class()) != goal_squad_class))
continue;
if(!possible_transport->CanReachTargetContinent(Get_Target_Pos()))
{
continue;
}
Utility utility = Goal::BAD_UTILITY;
if(agent_ptr->EstimateTransportUtility(possible_transport, utility))
{
transporter.first = utility;
transporter.second = possible_transport;
transporter_list.push_back(transporter);
}
}
// Probably more stuff needs to be done here
m_current_needed_strength.Set_Transport(static_cast<sint8>(agent_ptr->Get_Army()->Num()));
transporter_list.sort(std::greater<std::pair<Utility,class Agent *> >());
return transporter_list.size() > 0;
}
bool Goal::LoadTransporters(Agent_ptr agent_ptr)
{
std::list< std::pair<Utility, Agent_ptr> > transporter_list;
if(!FindTransporters(agent_ptr, transporter_list))
return false;
bool success = false;
sint32 foundSlots = 0;
MapPoint pos(-1,-1);
/// @ToDo: Add transport escorts
for
(
std::list< std::pair<Utility, Agent_ptr> >::iterator
transporter = transporter_list.begin();
transporter != transporter_list.end();
++transporter
)
{
Agent_ptr transport_ptr = transporter->second;
transport_ptr->Log_Debug_Info(k_DBG_SCHEDULER, this);
if(transport_ptr->Get_Army()->GetMovementTypeAir()
&& transport_ptr->Get_Army()->CanSpaceLaunch()
){
// @ToDo: Check whether this really works
Set_Sub_Task(SUB_TASK_AIRLIFT);
}
else
{
Set_Sub_Task(SUB_TASK_TRANSPORT_TO_BOARD);
}
if(!GotoTransportTaskSolution(agent_ptr, transport_ptr, pos))
continue;
success = true;
Assert(pos.IsValid());
sint32 transporters = 0;
sint32 max = 0;
sint32 empty = 0;
transport_ptr->Get_Army()->GetCargo(transporters, max, empty);
foundSlots += empty;
if(agent_ptr->Get_Army()->Num() <= foundSlots)
break;
}
if (success)
{
Assert(foundSlots != 0);
if(foundSlots == 0)
{
return false;
}
Agent_ptr transport_ptr = transporter_list.begin()->second;
Set_Sub_Task(SUB_TASK_CARGO_TO_BOARD);
success = GotoTransportTaskSolution(agent_ptr, transport_ptr, pos);
g_player[m_playerId]->
AddCargoCapacity(static_cast<sint16>(-1 * agent_ptr->Get_Army().Num()));
}
else
{
Set_Sub_Task(SUB_TASK_GOAL);
}
return success;
}
bool Goal::CanReachTargetContinent(Agent_ptr agent_ptr) const
{
return agent_ptr->CanReachTargetContinent(Get_Target_Pos());
}
bool Goal::ArmiesAtGoal() const
{
MapPoint pos = Get_Target_Pos();
for
(
Agent_List::const_iterator agent_iter = m_agents.begin();
agent_iter != m_agents.end();
++agent_iter
)
{
Agent_ptr agent_ptr = (Agent_ptr) *agent_iter;
if (agent_ptr->Get_Pos() != pos)
return false;
}
return true;
}
sint32 Goal::GetThreatenBonus() const
{
const GoalRecord *goal_record = g_theGoalDB->Get(m_goal_type);
if (goal_record->GetThreatenTypeNone())
return 0;
switch (goal_record->GetThreatenType())
{
case k_Goal_ThreatenType_DestroyCity_Bit:
{
Threat tmp_threat;
const Diplomat & diplomat = Diplomat::GetDiplomat(m_playerId);
if (diplomat.HasThreat(Get_Target_Owner(), THREAT_DESTROY_CITY, tmp_threat))
{
if (tmp_threat.detail.arg.cityId == m_target_city.m_id)
return goal_record->GetThreatenBonus();
}
break;
}
}
return 0;
}
bool Goal::Goal_Too_Expensive() const
{
return (m_current_attacking_strength.Get_Agent_Count() > k_MAX_ARMY_SIZE)
&& (m_current_attacking_strength.Get_Value() >
m_current_needed_strength.Get_Value() * 3
);
}
MapPoint Goal::GetClosestTransporterPos(const Agent_ptr agent_ptr) const
{
sint32 best_squared_dist = 0x7fffffff;
MapPoint best_target_pos = MapPoint(-1, -1);
for
(
Plan_List::const_iterator match_iter = m_matches.begin();
match_iter != m_matches.end();
++match_iter
)
{
if(!match_iter->Get_Agent()->Get_Is_Dead() && match_iter->Get_Needs_Cargo())
{
sint32 tmp_squared_dist = MapPoint::GetSquaredDistance(match_iter->Get_Agent()->Get_Pos(), agent_ptr->Get_Pos());
if (tmp_squared_dist < best_squared_dist)
{
best_squared_dist = tmp_squared_dist;
best_target_pos = match_iter->Get_Agent()->Get_Pos();
}
}
}
return best_target_pos;
}
MapPoint Goal::GetClosestCargoPos(const Agent_ptr agent_ptr) const
{
sint32 best_squared_dist = 0x7fffffff;
MapPoint best_target_pos = MapPoint(-1, -1);
for
(
Plan_List::const_iterator match_iter = m_matches.begin();
match_iter != m_matches.end();
++match_iter
)
{
if(!match_iter->Get_Agent()->Get_Is_Dead() && match_iter->Get_Needs_Transporter())
{
sint32 tmp_squared_dist = MapPoint::GetSquaredDistance(match_iter->Get_Agent()->Get_Pos(), agent_ptr->Get_Pos());
if (tmp_squared_dist < best_squared_dist)
{
best_squared_dist = tmp_squared_dist;
best_target_pos = match_iter->Get_Agent()->Get_Pos();
}
}
}
return best_target_pos;
}
void Goal::ResetNeededTransport()
{
m_current_needed_strength.Set_Transport(0);
}
|
/**
* The implementation of the rmic for WebLogic
*
* @since Ant 1.4
*/
public class WLRmic extends DefaultRmicAdapter {
public boolean execute() throws BuildException {
getRmic().log("Using WebLogic rmic", Project.MSG_VERBOSE);
Commandline cmd = setupRmicCommand(new String[] {"-noexit"});
AntClassLoader loader = null;
try {
// Create an instance of the rmic
Class c = null;
if (getRmic().getClasspath() == null) {
c = Class.forName("weblogic.rmic");
} else {
loader
= getRmic().getProject().createClassLoader(getRmic().getClasspath());
c = Class.forName("weblogic.rmic", true, loader);
}
Method doRmic = c.getMethod("main",
new Class [] {String[].class});
doRmic.invoke(null, new Object[] {cmd.getArguments()});
return true;
} catch (ClassNotFoundException ex) {
throw new BuildException("Cannot use WebLogic rmic, as it is not "
+ "available. A common solution is to "
+ "set the environment variable "
+ "CLASSPATH.", getRmic().getLocation());
} catch (Exception ex) {
if (ex instanceof BuildException) {
throw (BuildException) ex;
} else {
throw new BuildException("Error starting WebLogic rmic: ", ex,
getRmic().getLocation());
}
} finally {
if (loader != null) {
loader.cleanup();
}
}
}
/**
* Get the suffix for the rmic stub classes
*/
public String getStubClassSuffix() {
return "_WLStub";
}
/**
* Get the suffix for the rmic skeleton classes
*/
public String getSkelClassSuffix() {
return "_WLSkel";
}
} |
from setuptools import setup
with open('./README.md', 'r') as fi:
readme = fi.read()
setup(
name='{{cookiecutter.name}}',
version='{{cookiecutter.version}}',
author='{{cookiecutter.author}}',
author_email='{{cookiecutter.author_email}}',
packages=['{{cookiecutter.name}}'],
description='{{cookiecutter.description}}',
long_description=readme,
long_description_content_type='text/markdown',
)
|
<gh_stars>0
"""This contains various functions used to compose the Bonaire API SSH commands."""
import re
from typing import Optional
from rassh.datatypes import WellFormedCommand
from rassh.exceptions.exception_with_status import ExceptionWithStatus
def set_group(ssh_command, ap_wiredmac: str, ap_group: str, cmd: WellFormedCommand):
lines = ssh_command.expect_command(ssh_command.ssh_manager.master_controller,
"whitelist-db rap modify mac-address " + ap_wiredmac + " ap-group "
+ ap_group, cmd)
for line in lines:
if line.startswith("Entry Does not Exist"):
_ = ssh_command.expect_command(ssh_command.ssh_manager.master_controller,
"whitelist-db rap add mac-address " + ap_wiredmac
+ " ap-group " + ap_group, cmd)
lines = ssh_command.expect_command(ssh_command.ssh_manager.master_controller,
"ap-regroup wired-mac " + ap_wiredmac + " " + ap_group, cmd)
if lines:
for line in lines:
if line.startswith("AP with MAC"):
# Might say "AP with MAC address PP:QQ:RR:SS:TT:UU not found."
# This means the AP was not found on the controller.
raise ExceptionWithStatus("Error: AP not found on controller when setting group.", 500)
elif line.startswith("NOTE: For cert RAP ap-group specified in RAP whitelist will take precedence"):
# You will see this line even if the group name is completely fictitious, no error is shown.
# This is as close as we ever get to knowing it was a success. Return (without an exception).
return
# TODO Is this correct? Or will RAP show "AP with MAC address ..."
raise ExceptionWithStatus("Error: Unexpected output when setting group.", 500)
# TODO is this actually an error? Can you ever set a group and *not* see "NOTE: ..."?
raise ExceptionWithStatus("Error: No output when setting group.", 500)
def reprovision_remote(ssh_command, remote_ap: int, cmd: WellFormedCommand):
if remote_ap == 1:
_ = ssh_command.expect_command(ssh_command.ssh_manager.master_controller, "remote ap", cmd)
else:
_ = ssh_command.expect_command(ssh_command.ssh_manager.master_controller, "no remote ap", cmd)
def enter_provisioning_mode(ssh_command, cmd: WellFormedCommand):
_ = ssh_command.expect_command(ssh_command.ssh_manager.master_controller, "configure t", cmd)
_ = ssh_command.expect_command(ssh_command.ssh_manager.master_controller, "provision-ap", cmd)
def end_end(ssh_command, cmd: WellFormedCommand):
_ = ssh_command.expect_command(ssh_command.ssh_manager.master_controller, "end", cmd)
_ = ssh_command.expect_command(ssh_command.ssh_manager.master_controller, "end", cmd)
def reprovision_or_enqueue(ssh_command, request: str, ap_wiredmac: str, cmd: WellFormedCommand):
"""If an AP is down when attempting to reprovision, postpone the reprovisioning (and other actions).
Run this command near the start of the process so we can fail (enqueue) early."""
lines = ssh_command.expect_command(ssh_command.ssh_manager.master_controller,
"read-bootinfo wired-mac " + ap_wiredmac, cmd)
for line in lines:
if line.startswith("AP with MAC"):
# Might say "AP with MAC address PP:QQ:RR:SS:TT:UU not found."
# This means the AP was not found on the controller.
raise ExceptionWithStatus("Error: AP was not found on the controller when reprovisioning.", 500)
if line.startswith("AP is down"):
# Enqueue the entire provisioning task until the AP is up.
enqueue_status = ssh_command.ssh_manager.queue.enqueue_request(request)
if enqueue_status:
raise ExceptionWithStatus("AP is down, command has been enqueued.", 202)
else:
raise ExceptionWithStatus("Error: AP is down and command could not be enqueued because of"
+ "a queue error.", 500)
def get_lms_ip_and_ap_status(ssh_command, ap_wiredmac: str, cmd: WellFormedCommand) -> (Optional[str], str):
"""Get the IP of the LMS that knows more about this AP."""
lines = ssh_command.expect_command(ssh_command.ssh_manager.master_controller,
"show ap details wired-mac " + ap_wiredmac, cmd)
lms = None
ap_status = "Down"
for line in lines:
if line.startswith("LMS"):
parts = line.split()
try:
lms = parts[3]
except IndexError:
raise ExceptionWithStatus("Could not parse LMS IP.", 500)
if line.startswith("Status"):
parts = line.split()
try:
ap_status = parts[1]
except IndexError:
raise ExceptionWithStatus("Could not parse AP status.", 500)
# Might say """AP with MAC address PP:QQ:RR:SS:TT:UU not found."""
if line.startswith("AP with MAC "):
raise ExceptionWithStatus("AP not found on master when getting LMS.", 404)
return lms, ap_status
def get_lms_ip_and_connect_lms(ssh_command, ap_wiredmac: str, cmd: WellFormedCommand) -> str:
"""Connect to the IP of the LMS that knows more about this AP. Use this as a prelude to running LMS commands.
Includes some helpful exceptions so we know not to proceed with configuration if LMS is unavailable."""
lms, ap_status = get_lms_ip_and_ap_status(ssh_command, ap_wiredmac, cmd)
if ap_status == "Down":
raise ExceptionWithStatus("AP is down, cannot proceed.", 412)
if lms is None:
raise ExceptionWithStatus("No LMS found for this AP.", 404)
# Dynamically add an LMS if it is not already known (we may not have connected to it before).
if lms not in ssh_command.ssh_manager.switches:
ssh_command.ssh_manager.lms_ssh_connections[lms] = ssh_command.ssh_manager.get_new_expect_connection(lms)
# Get back to the "enable" prompt, in case something went wrong the last time we used this LMS.
_ = ssh_command.expect_command(lms, "end", cmd)
_ = ssh_command.expect_command(lms, "end", cmd)
return lms
def get_ap_name(ssh_command, ap_wiredmac: str, cmd: WellFormedCommand) -> str:
"""Get the name of this AP from its wired MAC."""
lines = ssh_command.expect_command(ssh_command.ssh_manager.master_controller,
"show ap details wired-mac " + ap_wiredmac + " | include Basic", cmd)
ap_name = None
for line in lines:
if line.startswith("AP"):
groups = re.findall(r'AP "(.*)" Basic Information', line)
try:
ap_name = groups[0]
break
except IndexError:
raise ExceptionWithStatus("Could not parse AP name.", 500)
if not ap_name:
raise ExceptionWithStatus("AP name not found for this wired MAC.", 404)
return ap_name
def get_group(ssh_command, ap_wiredmac: str, cmd: WellFormedCommand) -> Optional[str]:
lines = ssh_command.expect_command(ssh_command.ssh_manager.master_controller,
"show ap details wired-mac " + ap_wiredmac + " | include Group", cmd)
group = None
for line in lines:
if line.startswith("Group"):
parts = line.split()
try:
group = parts[1]
except IndexError:
raise ExceptionWithStatus("Could not parse group name from controller output.", 500)
if line.startswith("AP with MAC"):
# """AP with MAC address PP:QQ:RR:SS:TT:UU not found."""
raise ExceptionWithStatus("AP not found when getting group.", 404)
return group
def lms_get_gains(ssh_command, lms: str, ap_wiredmac: str, cmd: WellFormedCommand) -> tuple:
lines = ssh_command.expect_command(lms, "show ap provisioning wired-mac " + ap_wiredmac
+ ' | include "gain for 802.11"', cmd)
a_ant_gain = None
g_ant_gain = None
for line in lines:
# Might say """AP is not registered with this switch"""
if line.startswith("AP is not registered with this switch"):
raise ExceptionWithStatus("AP not registered on LMS when getting gains.", 404)
# Might say """AP with MAC address PP:QQ:RR:SS:TT:UU not found."""
if line.startswith("AP with MAC "):
raise ExceptionWithStatus("AP not found on LMS when getting gains.", 404)
if line.startswith("Antenna gain for 802.11a"):
parts = line.split()
try:
if parts[4] == "N/A":
a_ant_gain = None
else:
a_ant_gain = parts[4]
except IndexError:
raise ExceptionWithStatus("Could not parse antenna gain (a) from controller output.", 500)
if line.startswith("Antenna gain for 802.11g"):
parts = line.split()
try:
if parts[4] == "N/A":
g_ant_gain = None
else:
g_ant_gain = parts[4]
except IndexError:
raise ExceptionWithStatus("Could not parse antenna gain (g) from controller output.", 500)
return (a_ant_gain, g_ant_gain)
def lms_get_remote_ap(ssh_command, lms: str, ap_wiredmac: str, cmd: WellFormedCommand) -> Optional[int]:
lines = ssh_command.expect_command(lms, "show ap provisioning wired-mac " + ap_wiredmac
+ ' | include "Remote AP"', cmd)
remote_ap = None
for line in lines:
# Might say """AP is not registered with this switch"""
if line.startswith("AP is not registered with this switch"):
raise ExceptionWithStatus("AP not registered on LMS when getting remote AP.", 404)
# Might say """AP with MAC address PP:QQ:RR:SS:TT:UU not found."""
if line.startswith("AP with MAC "):
raise ExceptionWithStatus("AP not found on LMS when getting remote AP.", 404)
if line.startswith("Remote AP"):
parts = line.split()
try:
if parts[2] == "No":
remote_ap = 0
elif parts[2] == "Yes":
remote_ap = 1
else:
raise ExceptionWithStatus("Could not recognise remote AP from controller output.", 500)
except IndexError:
raise ExceptionWithStatus("Could not parse remote AP from controller output.", 500)
return remote_ap
def get_remote_ap(ssh_command, ap_wiredmac, cmd: WellFormedCommand) -> int:
lms = get_lms_ip_and_connect_lms(ssh_command, ap_wiredmac, cmd)
remote_ap = lms_get_remote_ap(ssh_command, lms, ap_wiredmac, cmd)
return remote_ap
def get_gains(ssh_command, ap_wiredmac, cmd: WellFormedCommand) -> tuple:
lms = get_lms_ip_and_connect_lms(ssh_command, ap_wiredmac, cmd)
gains = lms_get_gains(ssh_command, lms, ap_wiredmac, cmd)
return gains
def get_gains_and_remote_ap(ssh_command, ap_wiredmac, cmd: WellFormedCommand) -> dict:
lms = get_lms_ip_and_connect_lms(ssh_command, ap_wiredmac, cmd)
gains = lms_get_gains(ssh_command, lms, ap_wiredmac, cmd)
remote_ap = lms_get_remote_ap(ssh_command, lms, ap_wiredmac, cmd)
return {"gains": gains, "remote_ap": remote_ap}
|
"""
Interaction Model
This module basically maps out the response tree for the skill.
"""
from datetime import timedelta, datetime, date
from typing import Union, Optional, Dict, Tuple
from textwrap import dedent
from enum import Enum
import logging
from . import wods
from . import speechlet
from . import env
from .incoming_types import RequestTypes, LambdaEvent, Intent, Slot
LOG = logging.getLogger(__name__)
LOG.setLevel(logging.DEBUG)
DEFAULT_QUERY_INTENT = 'DefaultQuery'
REQUEST_SLOT = 'RequestType'
RELATIVE_SLOT = 'RelativeTo'
def _get_speech_date(d: Union[date, datetime]) -> str:
return '{} {}, {}'.format(d.strftime('%A %B'), d.day, d.year)
def _titleify(word:str) -> str:
return word[0].upper() + word[1:]
class RelativeToSlot(Enum):
TODAY = (timedelta(), 'today')
YESTERDAY = (timedelta(days=-1), 'yesterday')
TOMORROW = (timedelta(days=1), 'tomorrow')
def __init__(self, day_offset: timedelta, spoken_name: str):
self.day_offset = day_offset
self.spoken_name = spoken_name
@property
def title_name(self) -> str:
return _titleify(self.spoken_name)
class RequestTypeSlot(Enum):
FULL = ('workout', {
'workout': 'workout',
'wod': 'wod',
'wad': 'wod',
'both': 'workout',
'everything': 'workout',
'full': 'workout'})
STRENGTH = ('strength', {
'strength': 'strength',
'lifting': 'lifting'
})
CONDITIONING = ('conditioning', {
'conditioning': 'conditioning',
'metcon': 'metcon',
'cardio': 'cardio',
'endurance': 'endurance'})
def __init__(self, default_spoken_word: str, synonyms: Dict[str, str]):
self.default_spoken_word = default_spoken_word
self.synonyms = synonyms
TEMPLATE_NO_THING = 'There {iswas} no {thing} {relative_to} {date}.'
TEMPLATE_FOUND = '<p>The {thing} for {relative_to}, {date}</p>{content}'
CARD_TITLE_TEMPLATE = '{thing} for {relative_to}, {date}'
def _build_wod_query_response(wod: Optional[wods.WOD],
wod_query_date: datetime,
relative_to: RelativeToSlot,
ebcf_slot_word: Optional[str],
request_type_slot: RequestTypeSlot) -> speechlet.SpeechletResponse:
thing = ebcf_slot_word or request_type_slot.default_spoken_word
speech_date = _get_speech_date(wod_query_date)
card_cls = speechlet.SimpleCard
if wod:
if request_type_slot == RequestTypeSlot.FULL:
ssml_txt = wod.full_ssml()
card_content = wod.pprint()
if wod.image:
card_cls = lambda title, content: speechlet.StandardCard(title, content, wod.image)
elif request_type_slot == RequestTypeSlot.STRENGTH:
ssml_txt = wod.strength_ssml()
card_content = wod.strength_pprint()
elif request_type_slot == RequestTypeSlot.CONDITIONING:
ssml_txt = wod.conditioning_ssml()
card_content = wod.conditioning_pprint()
else:
assert False, 'Unknown EBCF section'
if ssml_txt:
return speechlet.SpeechletResponse(
output_speech=speechlet.SSML(
TEMPLATE_FOUND.format(
thing=thing,
relative_to=relative_to.spoken_name,
date=speech_date,
content=ssml_txt
)
),
card=card_cls(
title=CARD_TITLE_TEMPLATE.format(
thing=_titleify(thing),
relative_to=relative_to.title_name,
date=speech_date
),
content=card_content
),
should_end=True
)
iswas = 'is' if relative_to != RelativeToSlot.YESTERDAY else 'was'
return speechlet.SpeechletResponse(
output_speech=speechlet.PlainText(TEMPLATE_NO_THING.format(
iswas=iswas,
thing=thing,
relative_to=relative_to.spoken_name,
date=speech_date)
),
should_end=True
)
def wod_query(relative_to: RelativeToSlot=RelativeToSlot.TODAY,
ebcf_slot_word: Optional[str]=None,
request_type_slot: RequestTypeSlot=RequestTypeSlot.FULL) -> speechlet.SpeechletResponse:
wod_query_date = env.localnow()
if relative_to != RelativeToSlot.TODAY:
wod_query_date += relative_to.day_offset
wod = wods.get_wod(wod_query_date.date())
return _build_wod_query_response(
wod, wod_query_date, relative_to, ebcf_slot_word, request_type_slot
)
def _get_relative_to_slot(slot: Slot) -> RelativeToSlot:
LOG.debug('RelativeTo: %r', slot)
if slot.has_value and slot.value:
test_val = slot.value.lower()
for rel in RelativeToSlot:
if test_val.startswith(rel.spoken_name):
slot.is_valid = True
slot.value = rel.spoken_name
return rel
return RelativeToSlot.TODAY
def _resolve_request_type_slot(slot: Slot) -> Tuple[RequestTypeSlot, Optional[str]]:
if slot.has_value and slot.value:
test_val = slot.value.lower()
for ebcfsec in RequestTypeSlot:
if test_val in ebcfsec.synonyms:
return ebcfsec, ebcfsec.synonyms[test_val]
for syn in ebcfsec.synonyms:
if test_val.startswith(syn):
return ebcfsec, ebcfsec.synonyms[syn]
def _get_request_type_slot(intent: Intent) -> Tuple[RequestTypeSlot, Optional[str]]:
try:
slot = intent.slots[REQUEST_SLOT]
except KeyError:
raise MissingSlot(REQUEST_SLOT)
LOG.debug('RequestType: %r', slot)
resolved = _resolve_request_type_slot(slot)
if resolved is None and intent.last_intent is not None \
and REQUEST_SLOT in intent.last_intent.slots:
# Maybe we picked up a new value that was some garbage. Try old value.
slot = intent.last_intent.slots[REQUEST_SLOT]
LOG.debug('RequestType from last: %r', slot)
resolved = _resolve_request_type_slot(slot)
if resolved is not None:
return resolved
raise MissingSlot(REQUEST_SLOT)
def _prompt_missing_request_type_slot(intent: Intent) -> speechlet.SpeechletResponse:
return speechlet.SpeechletResponse(
output_speech=speechlet.SSML(
'I didn\'t understand what you wanted. '
'Did you want strength, conditioning, or both?'
),
should_end=False,
attributes={
'intents': {
intent.name: intent.to_dict()
}
},
reprompt=speechlet.SSML(
'Did you want strength, conditioning, or both?'
)
)
class MissingSlot(Exception):
"""raised when we don't know what section the user wanted, either
because Alexa didn't hear it correctly or the user gave us some BS
that we can't process.
"""
def query_intent(intent: Intent) -> speechlet.SpeechletResponse:
"""
Responds to most queries of the skill.
"""
try:
relative_to_slot = intent.slots[RELATIVE_SLOT]
except KeyError:
relative_to = RelativeToSlot.TODAY
else:
relative_to = _get_relative_to_slot(relative_to_slot)
try:
request_type_slot, word_used = _get_request_type_slot(intent)
except MissingSlot:
return _prompt_missing_request_type_slot(intent)
return wod_query(relative_to, word_used, request_type_slot)
HELP_SSML = (
'<speak>'
'<s>Ok, Help.</s>'
# Init options
'<p>First, you can ask me for the workout, strength, or conditioning.</p>'
# Yesterday/Tomorrow
'<p>You can also add words like: "yesterday", or, "tomorrow". '
'<s>For example, ask me for yesterday’s workout or tomorrow’s conditioning.</s></p>'
# Quit
'<p>Finally, you can say: "exit", to quit.</p>'
# Prompt
'<s>What will it be?</s>'
'</speak>')
def help_intent(intent: Intent) -> speechlet.SpeechletResponse:
"""
This is triggered when the user asks for "help".
:param intent:
:param attributes:
:return:
"""
ssml = speechlet.SSML(HELP_SSML)
card = speechlet.SimpleCard(
title='Help',
content=dedent(
'''
Example Phrases:
"workout", "strength", "conditioning", "yesterday's workout", "tomorrow's conditioning".
'''
)
)
return speechlet.SpeechletResponse(
ssml,
card=card,
should_end=False
)
def cancel_intent(intent: Intent) -> speechlet.SpeechletResponse:
return speechlet.SpeechletResponse(
speechlet.PlainText('Goodbye.'),
should_end=True
)
_INTENTS = {
DEFAULT_QUERY_INTENT: query_intent,
'AMAZON.HelpIntent': help_intent,
'AMAZON.CancelIntent': cancel_intent,
'AMAZON.StopIntent': cancel_intent
}
class UnkownIntentException(Exception):
def __init__(self, intent: Intent):
super().__init__()
self.intent = intent
def __str__(self):
return str(self.intent)
def on_intent_request(event: LambdaEvent) -> speechlet.SpeechletResponse:
intent = event.request.intent
intent_func = _INTENTS.get(intent.name, None)
if not intent_func:
LOG.error('UNKNOWN INTENT: %s', intent)
raise UnkownIntentException(intent)
return intent_func(intent)
def on_launch_request(event: LambdaEvent) -> speechlet.SpeechletResponse:
return wod_query()
def on_session_end_request(event: LambdaEvent) -> speechlet.SpeechletResponse:
return speechlet.SpeechletResponse(should_end=True)
class UnsupportedEventType(Exception):
"""raised when an unsupported event type comes in"""
def handle_event(event: LambdaEvent) -> speechlet.SpeechletResponse:
request_type = event.request.type
if request_type == RequestTypes.LaunchRequest:
return on_launch_request(event)
elif request_type == RequestTypes.IntentRequest:
return on_intent_request(event)
elif request_type == RequestTypes.SessionEndedRequest:
return on_session_end_request(event)
raise UnsupportedEventType(event)
|
/* Line numbers refer to:
https://github.com/codybartfast/sicp/blob/master/chapter5/mc-evaluator-50.scm */
#include "environment.h"
#include <string.h>
#include "custom.h"
#include "error.h"
#include "list.h"
#include "output.h"
#include "primproc.h"
#define AREA "ENVIRONMENT"
static obj the_empty_environment(void);
static obj lvv_env_loop(obj var, obj env);
static obj svv_env_loop(obj var, obj val, obj env);
//ln 231
static obj enclosing_environment(obj env)
{
return cdr(env);
}
// ln 232
static obj first_frame(obj env)
{
return car(env);
}
// ln 233
static obj the_empty_environment(void)
{
return emptylst;
}
// ln 235
static obj make_frame(obj variables, obj values)
{
return cons(variables, values);
}
// ln 237
static obj frame_variables(obj frame)
{
return car(frame);
}
// ln 238
static obj frame_values(obj frame)
{
return cdr(frame);
}
// ln 239
static obj add_binding_to_frame(obj var, obj val, obj frame)
{
obj r = set_car(frame, cons(var, car(frame)));
if (is_err(r))
return r;
r = set_cdr(frame, cons(val, cdr(frame)));
return r;
}
// ln 243
obj extend_environment(obj vars, obj vals, obj base_env, obj proc_name)
{
int nvars = length_i(vars, 0, false);
int nvals = length_i(vals, 0, false);
if (nvars >= 0 && nvals >= 0) {
return (nvars == nvals) ?
cons(make_frame(vars, vals), base_env) :
error_arity(
AREA,
"Too %s arguments to '%s', var: %s, vals: %s",
nvars < nvals ? "many" : "few",
errstr(proc_name), errstr(vars),
errstr(vals));
} else if (nvals < 0) {
return error_syntax(
AREA,
"Arguments to '%s' are not a proper list, var: %s, vals: %s",
errstr(proc_name), errstr(vars), errstr(vals));
} else {
// "dotted tail"
obj ovars = vars, ovals = vals;
obj rvars, rvals;
for (rvars = emptylst, rvals = emptylst;
is_pair(vars) && is_pair(vals);
vars = cdr(vars), vals = cdr(vals)) {
rvars = cons(car(vars), rvars);
rvals = cons(car(vals), rvals);
}
if (is_pair(vars)) {
return error_arity(
AREA,
"Too few arguments to '%s', var: %s, vals: %s",
errstr(proc_name), errstr(ovars),
errstr(ovals));
}
rvars = cons(vars, rvars); // vars should be a var
rvals = cons(vals, rvals); // vals should be a list
return cons(make_frame(reverse(rvars), reverse(rvals)),
base_env);
}
}
// ln 250
static obj lvv_scan(obj var, obj env, obj vars, obj vals)
{
if (is_null(vars)) {
return lvv_env_loop(var, enclosing_environment(env));
} else if (is_eq(var, car(vars))) {
/*
This test is only relevant to exercise 4.20 (but not needed)
it can be left in for the ec-evaluaor but creates an error
with the analyzing evaluator when the *unassigned* object is
analyzed.
*/
// return is_unassigned_obj(car(vals)) ?
// error_unbound_variable(AREA,
// "%s is *unassigned*",
// errstr(car(vars))) :
// car(vals);
return car(vals);
} else {
return lvv_scan(var, env, cdr(vars), cdr(vals));
}
}
// ln 250
static obj lvv_env_loop(obj var, obj env)
{
if (is_eq(env, the_empty_environment())) {
return error_unbound_variable(AREA, "%s", to_string(var));
}
obj frame = first_frame(env);
return lvv_scan(var, env, frame_variables(frame), frame_values(frame));
}
// ln 250
obj lookup_variable_value(obj var, obj env)
{
return lvv_env_loop(var, env);
}
// ln 265
static obj svv_scan(obj var, obj val, obj env, obj vars, obj vals)
{
if (is_null(vars)) {
return svv_env_loop(var, val, enclosing_environment(env));
} else if (is_eq(var, car(vars))) {
return set_car(vals, val);
} else {
return svv_scan(var, val, env, cdr(vars), cdr(vals));
}
}
// ln 265
static obj svv_env_loop(obj var, obj val, obj env)
{
if (is_eq(env, the_empty_environment())) {
return error_unbound_variable(AREA, "%s", to_string(var));
}
obj frame = first_frame(env);
return svv_scan(var, val, env, frame_variables(frame),
frame_values(frame));
}
// ln 265
obj set_variable_value(obj var, obj val, obj env)
{
return svv_env_loop(var, val, env);
}
// ln 280
static obj dv_scan(obj vars, obj vals, obj var, obj val, obj frame)
{
if (is_null(vars))
return add_binding_to_frame(var, val, frame);
if (is_eq(var, car(vars)))
return set_car(vals, val);
return dv_scan(cdr(vars), cdr(vals), var, val, frame);
}
// ln 280
obj define_variable(obj var, obj val, obj env)
{
obj frame = first_frame(env);
return dv_scan(frame_variables(frame), frame_values(frame), var, val,
frame);
}
// ln 295
static obj _initial_procedures;
static obj initial_procedures(void)
{
obj initial_primprocs =
listn(10, // must match number of items below
list2(of_identifier("true"), true_o), // 1
list2(of_identifier("false"), false_o), // 2
list2(of_identifier("+"), of_function(add)), // 3
list2(of_identifier("-"), of_function(sub)), // 4
list2(of_identifier("*"), of_function(mul)), // 5
list2(of_identifier("/"), of_function(divd)), // 6
list2(of_identifier("<"), of_function(lt)), // 7
list2(of_identifier("="), of_function(eqn)), // 8
list2(of_identifier(">"), of_function(gt)), // 9
list2(of_identifier("not"), of_function(not ))); // 10
return is_pair(_initial_procedures) ?
_initial_procedures :
(_initial_procedures = reverse(initial_primprocs));
}
// ln 301
static obj initial_procedure_names(void)
{
return map_u(car, initial_procedures());
}
// ln 305
static obj initial_procedure_objects(void)
{
return map_u(cadr, initial_procedures());
}
// ln 309
obj setup_environment(void)
{
obj initial_env = extend_environment(initial_procedure_names(),
initial_procedure_objects(),
the_empty_environment(),
of_string("initial_env"));
return initial_env;
}
// ln 317
static obj _the_global_environment;
obj the_global_environment(void)
{
if (!is_pair(_the_global_environment))
_the_global_environment = setup_environment();
return _the_global_environment;
}
obj tge(void)
{
return the_global_environment();
}
// new
void set_global_environment(obj tge)
{
_the_global_environment = tge;
}
|
/**
* @file mali_kbase_softjobs.c
*
* This file implements the logic behind software only jobs that are
* executed within the driver rather than being handed over to the GPU.
*/
static void kbasep_add_waiting_soft_job(struct kbase_jd_atom *katom)
{
struct kbase_context *kctx = katom->kctx;
unsigned long lflags;
spin_lock_irqsave(&kctx->waiting_soft_jobs_lock, lflags);
list_add_tail(&katom->queue, &kctx->waiting_soft_jobs);
spin_unlock_irqrestore(&kctx->waiting_soft_jobs_lock, lflags);
} |
package inference
import (
"context"
"io"
"github.com/grafana/regexp"
"github.com/sourcegraph/sourcegraph/internal/api"
"github.com/sourcegraph/sourcegraph/internal/authz"
"github.com/sourcegraph/sourcegraph/internal/database"
"github.com/sourcegraph/sourcegraph/internal/gitserver"
"github.com/sourcegraph/sourcegraph/internal/luasandbox"
"github.com/sourcegraph/sourcegraph/internal/vcs/git"
)
type SandboxService interface {
CreateSandbox(ctx context.Context, opts luasandbox.CreateOptions) (*luasandbox.Sandbox, error)
}
type GitService interface {
ListFiles(ctx context.Context, repo api.RepoName, commit string, pattern *regexp.Regexp) ([]string, error)
Archive(ctx context.Context, repo api.RepoName, opts gitserver.ArchiveOptions) (io.ReadCloser, error)
}
type gitService struct {
db database.DB
checker authz.SubRepoPermissionChecker
}
func NewDefaultGitService(checker authz.SubRepoPermissionChecker, db database.DB) GitService {
if checker == nil {
checker = authz.DefaultSubRepoPermsChecker
}
return &gitService{
db: db,
checker: checker,
}
}
func (s *gitService) ListFiles(ctx context.Context, repo api.RepoName, commit string, pattern *regexp.Regexp) ([]string, error) {
return git.ListFiles(ctx, s.db, repo, api.CommitID(commit), pattern, authz.DefaultSubRepoPermsChecker)
}
func (s *gitService) Archive(ctx context.Context, repo api.RepoName, opts gitserver.ArchiveOptions) (io.ReadCloser, error) {
// Note: the sub-repo perms checker is nil here because all paths were already checked via a previous call to s.ListFiles
return git.ArchiveReader(ctx, s.db, nil, repo, opts)
}
|
Holbrook Mohr, Heather Hollingsworth and Mike Kunzelman, The Associated Press
BATON ROUGE, La. -- Gavin Long was a man of mixed messages. He peddled self-published books with abstract themes about self-empowerment and spiritual enlightenment, but also posted rambling Internet videos calling for violent action in response to what he considered oppression.
In the last message sent from his Twitter account early Sunday, he wrote: "Just bc you wake up every morning doesn't mean that you're living. And just bc you shed your physical body doesn't mean that you're dead."
Nine hours later, he ambushed law enforcement officers in Baton Rouge, fatally shooting two police officers and a sheriff's deputy and injuring three others before being shot dead himself. It was his 29th birthday.
The black military veteran, whose last known address was in Kansas City, Missouri, had spent five years in the Marine Corps, serving one tour in Iraq before being honourably discharged and taking a series of college classes. Then, according to his website, he had a spiritual awakening, sold all his possessions and moved to Africa for a time.
By May 2015, back in the U.S., Long sought to legally change his name to Cosmo Ausar Setepenra in a non-binding document filed in Jackson County, Missouri, though he never followed through with an official request, county spokeswoman Brenda Hill said.
In the document, he said he belonged to the Washitaw de Dugdahmoundyah, also known as the Washitaw Nation, a black anti-government group whose members believe they are indigenous to the United States and beyond the federal government's reach, according to the Southern Poverty Law Center.
"Under common law, an adult or emancipated person has the right to change his or her name without legal formality or permission of court to any name he or she lawfully chooses," Jackson wrote in the document. He also said: "I AM restored to my own aboriginal-indigenous appellation ... without colorable law (legal) contract from GAVIN EUGENE LONG to Cosmo Ausar Setepenra in accord with the laws, customs, religious practices, traditions, distinct identities, characteristics and divine principles and language(s) of my Ancestors ..."
In a video posted July 10, Long, as Setepenra, said he was speaking from Dallas after another black man had killed five police officers there. He said he had already decided to travel to the city before the shooting, and guessed that "the spiritual was just telling me it was the right place to come."
Long also discussed protests in Baton Rouge, which broke out after police fatally shot a black man in a confrontation in a convenience store parking lot July 5. He urged viewers to question their "mindsets" and fight back, insisting that protests alone don't work.
"You see, that's what separates me from the 7 billion. And that's why I'm so powerful because I stand on my rights," he said.
He claimed no affiliations. In one video, Long declared: "Oh, and this is very important, I just wanted to let you all know -- because if anything happens with me . I just wanted to let you all know: don't affiliate me with nothing . yeah, I was also a Nation of Islam member, I'm not affiliated with it . They try to put you with ISIS or some other terrorist group . I'm affiliated with the spirit of Justice, nothing else."
In the months leading up his fatal encounter with police, Long had used videos to promote his three-volume book series "The Cosmo Way," self-published last year. He called himself a "Freedom Strategist, Mental Game Coach, Nutritionist, Author and Spiritual Advisor" who wrote books he described as lessons about nutrition, self-awareness and empowerment.
"My advice is to question everything and everyone," he wrote in "The Laws of the Cosmos," the first volume. "Your parents, what they taught you growing up, your schooling, your society, your history, your beliefs, and everything you've been taught regarding what and who you really are."
In more recent videos, Long portrayed himself as a sort of spiritual leader and revolutionary, a man willing to take action while others focused on protests.
"You've got to fight back. That's the only way a bully knows to quit," he said.
In his rambling videos and written posts, Long discussed topics ranging from what he considered the extermination of Native Americans to the United States' fight for independence. He said that it is celebrated when "Europeans" fight oppression, "but when an African fights back, he's wrong."
Military records show Long was a Marine from 2005 to 2010, attaining the rank of sergeant. He served in Iraq from June 2008 to January 2009, and records show he received several medals during his military career, including one for good conduct. He was listed as a "data network specialist."
After the Marines, Long attended the University of Alabama for just the spring 2012 semester, according to university spokesman Chris Bryant. University police had no interaction with Long during that time, Bryant said. He also was briefly enrolled at Clark Atlanta University during the 2012-13 academic year, the school said.
Missouri court records show that a Gavin Eugene Long filed a petition for divorce in February 2011. The records don't indicate why the couple divorced, but the petition indicates they had no children. Three months after the petition was filed, his ex-wife was granted restoration of her maiden name. Last month, a case against Long by the city of Kansas City over unpaid city earnings taxes was dismissed.
------
AP journalists Hillel Italie, Maria Sudekum, Kimberly Chandler, Gerald Herbert and Janet McConnaughey, and AP researcher Rhonda Shafer contributed to this story. Hollingsworth reported from Kansas City; Mohr from Brandon, Mississippi. |
/**
* spec p. 71:
* For a given value of policy context identifier, this method must always return the same instance of PolicyConfiguration and there must be at
* most one actual instance of a PolicyConfiguration with a given policy context identifier (during a process context).
*
* @throws Exception
*/
public void testSamePolicyConfigurationInstance() throws Exception {
PolicyConfigurationGeneric policyConfiguration1 = (PolicyConfigurationGeneric) policyConfigurationFactory.getPolicyConfiguration(CONTEXT_ID, false);
PolicyConfigurationGeneric policyConfiguration2 = (PolicyConfigurationGeneric) policyConfigurationFactory.getPolicyConfiguration(CONTEXT_ID, false);
assertSame(policyConfiguration1, policyConfiguration2);
policyConfiguration2 = (PolicyConfigurationGeneric) policyConfigurationFactory.getPolicyConfiguration(CONTEXT_ID, false);
assertSame(policyConfiguration1, policyConfiguration2);
policyConfiguration2 = (PolicyConfigurationGeneric) policyConfigurationFactory.getPolicyConfiguration(CONTEXT_ID, true);
assertSame(policyConfiguration1, policyConfiguration2);
policyConfiguration2.commit();
policyConfiguration2 = (PolicyConfigurationGeneric) policyConfigurationFactory.getPolicyConfiguration(CONTEXT_ID, false);
assertSame(policyConfiguration1, policyConfiguration2);
policyConfiguration2.commit();
policyConfiguration2 = (PolicyConfigurationGeneric) policyConfigurationFactory.getPolicyConfiguration(CONTEXT_ID, true);
assertSame(policyConfiguration1, policyConfiguration2);
policyConfiguration2.delete();
policyConfiguration2 = (PolicyConfigurationGeneric) policyConfigurationFactory.getPolicyConfiguration(CONTEXT_ID, false);
assertSame(policyConfiguration1, policyConfiguration2);
policyConfiguration2.delete();
policyConfiguration2 = (PolicyConfigurationGeneric) policyConfigurationFactory.getPolicyConfiguration(CONTEXT_ID, true);
assertSame(policyConfiguration1, policyConfiguration2);
} |
import { Prescripcion } from '../modelos/prescripcion';
export const PRESCRIPCIONES: Prescripcion[] = [
{
id: 312,
nombre: 'Clorhidrato De Metformina 100 Mg/Ml, Solución Oral',
nombreComercial: 'Clorhidrato De Metformina 100 Mg/Ml, Solución Oral',
cantidad: '1 unidad',
formaFarmaceutica: 'tabletas',
fechaIndicacion: '13/07/2020',
recetable: true,
activo: true,
indicacion: {
horario: '12:00',
referencia: 'antes del almuerzo',
administracion: 'oral',
vigencia: '6 meses',
dosis: '20 mg',
distribucion: ['dom', 'lun', 'mar', 'mier', 'jue', 'vie', 'sab'],
},
profesional: 'Molini, <NAME>',
organizacion: 'Hospital Provincial Neuquén - Dr. <NAME>',
servicio: 'Servicio de clínica médica',
ambito: 'internación',
nota: 'Indicado para paciente insulino-dependiente',
},
{
id: 311,
nombre: 'Enalapril 5 Mg/Ml, Solución Oral',
nombreComercial: 'Enalapril',
cantidad: '3 Envases',
formaFarmaceutica: 'tabletas',
fechaIndicacion: '21/12/2020',
recetable: true,
activo: true,
indicacion: {
horario: '22:00',
referencia: 'después de la cena',
administracion: 'oral',
vigencia: '3 meses',
dosis: '20 mg',
distribucion: ['dom', 'lun', 'mar', 'mier', 'jue', 'vie', 'sab'],
},
profesional: 'Monteverde, <NAME>',
organizacion: 'Hospital Provincial Neuquén - Dr. <NAME>',
servicio: 'Unidad de terapia intermedia de adultos',
ambito: 'internación',
nota: 'Hipertensión esencial: Presión sistólica: 145 mm/Hg | Presión diastólica: 95 mm de Hg',
},
{
id: 313,
nombre: 'Salbutamol',
nombreComercial: 'Ventolin',
cantidad: '2 unidades',
formaFarmaceutica: 'Inhalador',
fechaIndicacion: '03/10/2020',
recetable: true,
activo: false,
indicacion: {
horario: '',
referencia: 'suministro a demanda',
administracion: 'oral',
vigencia: '60 dias',
dosis: '20 mg',
distribucion: ['dom', 'lun', 'mar', 'mier', 'jue', 'vie', 'sab'],
},
profesional: 'Monteverde, <NAME>',
organizacion: 'Hospital Provincial Neuquén - Dr. <NAME>',
servicio: 'Unidad de terapia intermedia de adultos',
ambito: 'internación',
nota: 'El paciente presenta 3 episodios por día',
},
{
id: 315,
nombre: '<NAME>',
nombreComercial: '<NAME>',
cantidad: '4 unidades',
formaFarmaceutica: 'Inyectable',
fechaIndicacion: '27/11/2020',
recetable: true,
activo: true,
indicacion: {
horario: '',
referencia: 'entre comidas',
administracion: 'intravenosa',
vigencia: 'durante 30 días',
dosis: '300 mg diarios',
distribucion: ['dom', 'lun', 'mar', 'mier', 'jue', 'vie', 'sab'],
},
profesional: 'Rinaldi, <NAME>',
organizacion: 'Hospital de San Martín de los Andes - Dr. <NAME>',
servicio: 'Unidad de terapia intermedia de adultos',
ambito: 'internación',
nota: 'el paciente presenta un cuadro anémico leve',
},
{
id: 319,
nombre: 'Carbamazepina',
nombreComercial: 'Tegretol 200 mg',
cantidad: '3 unidades',
formaFarmaceutica: 'Tabletas',
fechaIndicacion: '11/05/2019',
recetable: true,
activo: false,
indicacion: {
horario: '',
referencia: 'despues de las comidas',
administracion: 'oral',
vigencia: 'durante 60 días',
dosis: '500 mg',
distribucion: ['dom', 'lun', 'mar', 'mier', 'jue', 'vie', 'sab'],
},
profesional: 'Reznik, <NAME>',
organizacion: 'Hospital de Plottier',
servicio: 'Unidad de terapia intermedia de adultos',
ambito: 'internación',
nota: 'El paciente presenta 1 episodios por día',
},
] |
This could ruin a lot of good science fiction movies … and create interesting plots for the next generation of them, not to mention influencing how humans deal with space aliens when they first encounter each other (assuming they haven’t already). A timely article by The Daily Galaxy reviews the study “Alien Minds” by Susan Schneider where the professor and author discusses her theory that our first meeting with an extraterrestrial will be with a billion-year-old robot. Wait, what?
“I do not believe that most advanced alien civilizations will be biological. The most sophisticated civilizations will be postbiological, forms of artificial intelligence or alien superintelligence.”
Susan Schneider is an associate Professor in the Department of Philosophy Cognitive Science Program at the University of Connecticut. “Alien Minds” has been presented at NASA and the 2016 IdeaFestival in Kentucky and was published in The Impact of Discovering Life Beyond Earth. It is her response to the question: “How would intelligent aliens think? Would they have conscious experiences? Would it feel a certain way to be an alien?”
“I actually think the first discovery of life on other planets will probably involve microbial life; I am concentrating on intelligent life in my work on this topic though. I only claim that the most advanced civilizations will likely be post biological.”
Schneider’s theory is based on three components or “observations.” In her “short window observation,” she presents the idea that a civilization or species that can conquer long-distance space travel is already very close to moving from biological to artificially-intelligent beings. An example of this “short window” is the relatively brief 120 years it took humans to go from the first radio signals to cell phones.
Some of those species will be much older than us, which is Schneider’s “the greater age of alien civilizations” observation – one accepted by many. And not just a few generations older but billions of years beyond us, making them far more advanced and intelligent. How much more?
Schneider’s last observation is that any species that can travel to Earth will be intelligent enough to develop robots that they can upload their brains to. The robots would probably be silicon-based for speed of ‘thinking’ and durability, making them nearly immortal.
Should we fear these superintelligent robots from other galaxies? Schneider thinks we may not have a chance to. If they’re so much further advanced than we are, why would they bother dealing with is in the first place? Or, as she so bluntly puts it in an interview with UConn Today:
“Would you really cross the universe to interact with an ant?”
That’s a relief … I guess. However, she leans to the side of caution when the discussion turns to sending signals into space to proactively contact any life forms listening.
Like the philosopher that she is, Susan Schneider leaves herself an out: there may not be any other species – superintelligent or lower – at all:
“The universe could be a lonely place, we still do not know the conditions that created life on this planet.”
Will our first ET encounter be with a billion-year-old superintelligent robot? Just to be on the safe side, repeat after me: |
/**
* Created by Ikasan Development Team on 14/02/2017.
*/
public abstract class SolrDaoBase<T> implements SolrInitialisationService
{
/** Logger for this class */
private static Logger logger = LoggerFactory.getLogger(SolrDaoBase.class);
private static final List<String> ALL = Arrays.asList("*");
public static final String ID = "id";
public static final String ERROR_URI = "errorUri";
public static final String TYPE = "type";
public static final String MODULE_NAME = "moduleName";
public static final String FLOW_NAME = "flowName";
public static final String COMPONENT_NAME = "componentName";
public static final String CREATED_DATE_TIME = "timestamp";
public static final String PAYLOAD_CONTENT = "payload";
public static final String PAYLOAD_CONTENT_RAW = "payloadRaw";
public static final String EVENT = "event";
public static final String RELATED_EVENT = "relatedEventId";
public static final String EXPIRY = "expiry";
public static final String ERROR_DETAIL = "errorDetail";
public static final String ERROR_ACTION = "errorAction";
public static final String ERROR_MESSAGE = "errorMessage";
public static final String EXCEPTION_CLASS = "exceptionClass";
public static final String AND = " AND ";
public static final String OR = " OR ";
public static final String TO = " TO ";
public static final String OPEN_BRACKET = "(";
public static final String CLOSE_BRACKET = ")";
public static final String COLON = ":";
protected SolrClient solrClient = null;
protected int daysToKeep = 7;
protected String solrUsername;
protected String solrPassword;
/**
* Method to initialise all solr cloud DAO objects.
*
* @param solrCloudUrls
* @param daysToKeep
*/
public void initCloud(List<String> solrCloudUrls, int daysToKeep)
{
solrClient = new CloudSolrClient.Builder().withSolrUrl(solrCloudUrls).build();
((CloudSolrClient)solrClient).setDefaultCollection("ikasan");
this.daysToKeep = daysToKeep;
}
@Override
public void initStandalone(String solrCloudUrl, int daysToKeep)
{
solrClient = new HttpSolrClient.Builder().withBaseSolrUrl(solrCloudUrl).build();
this.daysToKeep = daysToKeep;
}
/**
* Set the solr client
*
* @param solrClient
*/
public void setSolrClient(SolrClient solrClient)
{
this.solrClient = solrClient;
}
/**
* Helper method to build the query that is issued to Solr.
*
* @param moduleNames
* @param flowNames
* @param componentNames
* @param fromDate
* @param untilDate
* @param payloadContent
* @param eventId
* @param type
* @return String
*/
protected String buildQuery(Collection<String> moduleNames, Collection<String> flowNames, Collection<String> componentNames, Date fromDate
, Date untilDate, String payloadContent, String eventId, String type, boolean negateQuery) throws IOException {
ArrayList<String> types = new ArrayList<String>();
types.add(type);
return this.buildQuery(moduleNames, flowNames, componentNames, fromDate, untilDate, payloadContent, eventId, types, negateQuery);
}
/**
* Helper method to build the query that is issued to Solr.
*
* @param moduleNames
* @param flowNames
* @param componentNames
* @param fromDate
* @param untilDate
* @param searchTerm
* @param eventId
* @param types
* @return String
*/
protected String buildQuery(Collection<String> moduleNames, Collection<String> flowNames, Collection<String> componentNames, Date fromDate
, Date untilDate, String searchTerm, String eventId, List<String> types, boolean negateQuery) throws IOException
{
// Setup the predicates
StringBuffer moduleNamesBuffer = this.buildStringListQueryPart(moduleNames, MODULE_NAME);
StringBuffer flowNamesBuffer = this.buildStringListQueryPart(flowNames, FLOW_NAME);
StringBuffer componentNamesBuffer = this.buildStringListQueryPart(componentNames, COMPONENT_NAME);
StringBuffer dateBuffer = this.buildDatePredicate(CREATED_DATE_TIME, fromDate, untilDate);
StringBuffer payloadBuffer = this.buildSearchStringPredicate(searchTerm, PAYLOAD_CONTENT, negateQuery);
StringBuffer errorBuffer = this.buildSearchStringPredicate(searchTerm, ERROR_DETAIL, negateQuery);
StringBuffer errorUriBuffer = this.buildSearchStringPredicate(searchTerm, ERROR_URI, negateQuery);
StringBuffer eventBuffer = this.buildSearchStringPredicate(searchTerm, EVENT, negateQuery);
StringBuffer eventIdBuffer = this.buildFieldPredicate(eventId, EVENT);
StringBuffer typeBuffer = this.buildStringListQueryPart(types, TYPE);
String logicalOperator = OR;
if (negateQuery)
{
logicalOperator = AND;
}
// Construct the query
StringBuffer bufferFinalQuery = new StringBuffer();
Boolean hasPrevious = this.addQueryPart(bufferFinalQuery, payloadBuffer, false, AND, true, false);
hasPrevious = this.addQueryPart(bufferFinalQuery, errorBuffer, hasPrevious, logicalOperator, false, false);
hasPrevious = this.addQueryPart(bufferFinalQuery, errorUriBuffer, hasPrevious, logicalOperator, false, false);
hasPrevious = this.addQueryPart(bufferFinalQuery, eventBuffer, hasPrevious, logicalOperator, false, true);
hasPrevious = this.addQueryPart(bufferFinalQuery, moduleNamesBuffer, hasPrevious, AND, false, false);
hasPrevious = this.addQueryPart(bufferFinalQuery, flowNamesBuffer, hasPrevious, AND, false, false);
hasPrevious = this.addQueryPart(bufferFinalQuery, componentNamesBuffer, hasPrevious, AND, false, false);
hasPrevious = this.addQueryPart(bufferFinalQuery, eventIdBuffer, hasPrevious, AND, false, false);
hasPrevious = this.addQueryPart(bufferFinalQuery, typeBuffer, hasPrevious, AND, false, false);
this.addQueryPart(bufferFinalQuery, dateBuffer, hasPrevious, AND, false, false);
return bufferFinalQuery.toString();
}
/**
* Helper method to build query parts.
*
* @param values
* @param field
* @return
*/
protected StringBuffer buildStringListQueryPart(Collection<String> values, String field)
{
StringBuffer queryPart = new StringBuffer();
if(values != null && values.size() > 0)
{
queryPart.append(this.buildPredicate(field, values));
}
else if(values != null && values.size() == 0)
{
queryPart.append(this.buildPredicate(field, ALL));
}
return queryPart;
}
/**
* Helper method to add query part to the solr query.
*
* @param bufferFinalQuery
* @param queryPart
* @param hasPrevious
* @return
*/
protected boolean addQueryPart(StringBuffer bufferFinalQuery, StringBuffer queryPart, Boolean hasPrevious
, String logicalOperator, boolean addPreceedingBracket, boolean addFollowingBracket)
{
if(queryPart != null && queryPart.length() > 0)
{
if(hasPrevious)
{
bufferFinalQuery.append(logicalOperator);
}
if(addPreceedingBracket)
{
bufferFinalQuery.append(OPEN_BRACKET);
}
bufferFinalQuery.append(queryPart);
if(addFollowingBracket)
{
bufferFinalQuery.append(CLOSE_BRACKET);
}
hasPrevious = true;
}
return hasPrevious;
}
/**
* Helper method to build a field predicate.
*
* @param field
* @param predicateValues
* @return
*/
protected StringBuffer buildPredicate(String field, Collection<String> predicateValues)
{
String delim = "";
StringBuffer predicate = new StringBuffer();
predicate.append(field + COLON);
predicate.append(OPEN_BRACKET);
for (String predicateValue : predicateValues)
{
if(predicateValue.contains("*"))
{
predicate.append(delim).append(predicateValue).append(" ");
}
else
{
predicate.append(delim).append("\"").append(predicateValue).append("\"").append(" ");
}
delim = OR;
}
predicate.append(CLOSE_BRACKET);
return predicate;
}
/**
* Helper method to build date predicate.
*
* @param field
* @param fromDate
* @param untilDate
* @return
*/
protected StringBuffer buildDatePredicate(String field, Date fromDate, Date untilDate)
{
StringBuffer dateBuffer = new StringBuffer();
if(fromDate != null && untilDate != null)
{
dateBuffer.append(field + COLON).append("[").append(fromDate.getTime())
.append(TO).append(untilDate.getTime()).append("]");
}
return dateBuffer;
}
/**
* Helper method to build the search string predicate.
*
* @param searchTerm
* @param field
* @return
* @throws IOException
*/
protected StringBuffer buildSearchStringPredicate(String searchTerm, String field, boolean negateQuery) throws IOException
{
StringBuffer searchTermBuffer = new StringBuffer();
if(searchTerm != null && !searchTerm.trim().isEmpty())
{
searchTermBuffer.append("(").append(SolrTokenizerQueryBuilder.buildQuery(searchTerm, field, negateQuery)).append(")");
}
return searchTermBuffer;
}
/**
* Helper method to build general field predicates.
*
* @param value
* @param field
* @return
*/
protected StringBuffer buildFieldPredicate(String value, String field)
{
StringBuffer predicateBuffer = new StringBuffer();
if(value != null && !value.trim().isEmpty())
{
predicateBuffer.append(field + COLON);
predicateBuffer.append(value).append(" ");
}
return predicateBuffer;
}
/**
* Query solr index by id for a given type
*
* @param id
* @param type
* @return String
*/
protected String buildIdQuery(Long id, String type)
{
StringBuffer idBuffer = new StringBuffer();
StringBuffer typeBuffer = new StringBuffer();
idBuffer.append(ID + COLON + id);
if(type != null && !type.trim().isEmpty())
{
typeBuffer.append(TYPE + COLON);
typeBuffer.append("\"").append(type).append("\" ");
}
StringBuffer bufferFinalQuery = new StringBuffer(idBuffer);
boolean hasPrevious = true;
if(typeBuffer.length() > 0)
{
if(hasPrevious)
{
bufferFinalQuery.append(AND);
}
bufferFinalQuery.append(typeBuffer);
hasPrevious = true;
}
return bufferFinalQuery.toString();
}
/**
* Method to remove expired records from the solr index by type.
*
* @param type
*/
public void removeExpired(String type)
{
long currentTime = System.currentTimeMillis();
StringBuffer query = new StringBuffer();
query.append(TYPE).append(COLON).append(type);
query.append(AND);
query.append(EXPIRY).append(COLON).append("{").append("*").append(TO).append(currentTime).append("}");
this.deleteByQuery(query.toString());
}
/**
* Method to remove records from the solr index by type and id.
*
* @param type
*/
public void removeById(String type, String id)
{
StringBuffer query = new StringBuffer();
query.append(TYPE).append(COLON).append("\"").append(type).append("\"");
query.append(AND);
query.append(ID).append(COLON).append("\"").append(id).append("\"");
this.deleteByQuery(query.toString());
}
/**
* Method to remove expired records from the solr index.
*/
public void removeExpired()
{
long currentTime = System.currentTimeMillis();
StringBuffer query = new StringBuffer();
query.append(EXPIRY).append(COLON).append("{").append("*").append(TO).append(currentTime).append("}");
this.deleteByQuery(query.toString());
}
/**
* Set the entity days to keep.
* @param daysToKeep
*/
public void setDaysToKeep(int daysToKeep)
{
this.daysToKeep = daysToKeep;
}
/**
* Set the solr username
*
* @param solrUsername
*/
public void setSolrUsername(String solrUsername)
{
this.solrUsername = solrUsername;
}
/**
* Set the solr password
*
* @param solrPassword
*/
public void setSolrPassword(String solrPassword)
{
this.solrPassword = solrPassword;
}
/**
* Helper method to delete records based on query.
*
* @param query
*/
protected void deleteByQuery(String query)
{
try
{
UpdateRequest req = new UpdateRequest();
req.setBasicAuthCredentials(this.solrUsername, this.solrPassword);
req.deleteByQuery(query.toString());
if(this.solrClient == null)
{
logger.warn("Solr client has not been initialised. This indicates that the platform has not been configured for solr.");
return;
}
commitSolrRequest(req);
}
catch (Exception e)
{
throw new RuntimeException("An error has occurred deleting using query [" + query + "].: " + e.getMessage(), e);
}
}
public void save(T event)
{
long millisecondsInDay = (this.daysToKeep * TimeUnit.DAYS.toMillis(1));
long expiry = millisecondsInDay + System.currentTimeMillis();
SolrInputDocument document = getSolrInputFields(expiry, event);
try
{
UpdateRequest req = new UpdateRequest();
req.setBasicAuthCredentials(this.solrUsername, this.solrPassword);
req.add(document);
commitSolrRequest(req);
}
catch (Exception e)
{
throw new RuntimeException("An exception has occurred attempting to write an exclusion to Solr", e);
}
}
public void save(List<T> events)
{
long millisecondsInDay = (this.daysToKeep * TimeUnit.DAYS.toMillis(1));
long expiry = millisecondsInDay + System.currentTimeMillis();
try
{
UpdateRequest req = new UpdateRequest();
req.setBasicAuthCredentials(this.solrUsername, this.solrPassword);
for (T event : events)
{
SolrInputDocument document = getSolrInputFields(expiry, event);
req.add(document);
logger.debug("Adding document: " + document);
}
commitSolrRequest(req);
}
catch (Exception e)
{
throw new RuntimeException("An exception has occurred attempting to write an exclusion to Solr", e);
}
}
protected void commitSolrRequest(UpdateRequest req)
throws org.apache.solr.client.solrj.SolrServerException, java.io.IOException
{
UpdateResponse rsp = req.process(this.solrClient, SolrConstants.CORE);
logger.debug("Solr Response: " + rsp.toString());
rsp = req.commit(solrClient, SolrConstants.CORE);
logger.debug("Solr Commit Response: " + rsp.toString());
}
protected abstract SolrInputDocument getSolrInputFields(Long expiry, T event);
} |
/**
* Handles a general Write exception.
*/
private void handleWriteException(Throwable ex) {
if (ex instanceof ObjectClosedException && !this.closed.get()) {
log.warn("{}: Caught ObjectClosedException but not closed; closing now.", this.traceObjectId, ex);
close();
}
} |
// DO NOT MODIFY.
// Automatically generated by Sbml2Cellontro from ./sbml/YeastGly.xml
// Thu Jan 18 19:00:19 EST 2007
package org.primordion.cellontro.app.sbml;
public interface CeYeastGlyPritchardKel {
public static final int XholonClassCE = 0;
public static final int SbmlCompartmentCE = 1;
public static final int compartmentCE = 2;
public static final int SpeciesCE = 3;
public static final int GLCiCE = 4;
public static final int ATPCE = 5;
public static final int G6PCE = 6;
public static final int ADPCE = 7;
public static final int F6PCE = 8;
public static final int F16bPCE = 9;
public static final int AMPCE = 10;
public static final int DHAPCE = 11;
public static final int GAPCE = 12;
public static final int NADCE = 13;
public static final int BPGCE = 14;
public static final int NADHCE = 15;
public static final int P3GCE = 16;
public static final int P2GCE = 17;
public static final int PEPCE = 18;
public static final int PYRCE = 19;
public static final int AcAldCE = 20;
public static final int SuccinateCE = 21;
public static final int TrehaloseCE = 22;
public static final int GlycogenCE = 23;
public static final int GlycerolCE = 24;
public static final int EtOHCE = 25;
public static final int CO2CE = 26;
public static final int F26bPCE = 27;
public static final int GLCoCE = 28;
public static final int ReactionCE = 29;
public static final int HXTCE = 30;
public static final int HKCE = 31;
public static final int PGICE = 32;
public static final int PFKCE = 33;
public static final int ALDCE = 34;
public static final int TPICE = 35;
public static final int GAPDHCE = 36;
public static final int PGKCE = 37;
public static final int PGMCE = 38;
public static final int ENOCE = 39;
public static final int PYKCE = 40;
public static final int PDCCE = 41;
public static final int ADHCE = 42;
public static final int ATPaseCE = 43;
public static final int AKCE = 44;
public static final int G3PDHCE = 45;
public static final int Glycogen_BranchCE = 46;
public static final int Trehalose_BranchCE = 47;
public static final int Succinate_BranchCE = 48;
}
|
// TODO(nhiroki): Remove this once the off-main-thread WebSocket is enabled by
// default (https://crbug.com/825740).
std::unique_ptr<blink::WebSocketHandshakeThrottle>
ChromeContentRendererClient::CreateWebSocketHandshakeThrottle() {
InitSafeBrowsingIfNecessary();
return std::make_unique<safe_browsing::WebSocketSBHandshakeThrottle>(
safe_browsing_.get(), MSG_ROUTING_NONE);
} |
<filename>pkg/utl/models/user_test.go
package models_test
import (
"testing"
"github.com/stretchr/testify/assert"
"github.com/johncoleman83/cerebrum/pkg/utl/models"
)
func TestChangePassword(t *testing.T) {
user := &models.User{
FirstName: "TestGuy",
}
hashedPassword := "<PASSWORD>"
user.ChangePassword(hashedPassword)
if user.LastPasswordChange.IsZero() {
t.Errorf("Last password change was not changed")
}
if user.Password != hashedPassword {
t.Errorf("Password was not changed")
}
}
func TestUpdateLastLogin(t *testing.T) {
user := &models.User{
FirstName: "TestGuy",
}
token := "<PASSWORD>"
user.UpdateLastLogin(token)
if user.LastLogin.IsZero() {
t.Errorf("Last login time was not changed")
}
if user.Token != token {
t.Errorf("Tooken was not changed")
}
}
func TestPaginationLimit(t *testing.T) {
reqNegativeLimit := models.PaginationReq{Limit: -5, Page: 2}
expected := &models.Pagination{Limit: 100, Offset: 200}
assert.Equal(t, expected, reqNegativeLimit.NewPagination(), "negative limit should get set to default")
reqMaxLimit := models.PaginationReq{Limit: 1001, Page: 2}
expected.Limit, expected.Offset = 1000, 2000
assert.Equal(t, expected, reqMaxLimit.NewPagination(), "beyond max limit should get set to default")
reqTooBigLimit := models.PaginationReq{Limit: 9999999, Page: 2}
expected.Limit, expected.Offset = 1000, 2000
assert.Equal(t, expected, reqTooBigLimit.NewPagination(), "way beyond max limit should get set to default")
reqNoChangeAllZeros := models.PaginationReq{Limit: 0, Page: 0}
expected.Limit, expected.Offset = 100, 0
assert.Equal(t, expected, reqNoChangeAllZeros.NewPagination(), "zeros should get set to default")
reqNoChange := models.PaginationReq{Limit: 95, Page: 25}
expected.Limit, expected.Offset = 95, 2375
assert.Equal(t, expected, reqNoChange.NewPagination(), "some random offset and limit within the bounds should stay the same")
}
|
Venus Williams survived a match point against the Czech Republic’s Barbora Zahlavova-Strycova on her way to the quarterfinals of the Qatar Open.
Williams looked set for a routine win early in the third set, pulling away to a 5-3 lead before Zahlavova-Strycova fought back, winning three straight games to go up 6-5, and in that game she brought up a match point with Williams serving at 30-40.
But Williams summoned one last burst of energy, winning three points in a row before sneaking out a tiebreak to finish off Zahlavova-Strycova, 7-5 3-6 7-6 (5).
The Czech player attempted to stare down Venus at the ritual exchange of handshakes at the net and Venus was not impressed.
Venus: Was that look for something?
Barbora: Well done?
Venus: OK, good. |
<reponame>Klarrio/dcos-diagnostics
package api
import (
"strings"
"github.com/dcos/dcos-diagnostics/dcos"
"github.com/mitchellh/mapstructure"
"github.com/sirupsen/logrus"
)
func normalizeProperty(unitProps map[string]interface{}, tools dcos.Tooler) (HealthResponseValues, error) {
var (
description, prettyName string
propsResponse UnitPropertiesResponse
)
if err := mapstructure.Decode(unitProps, &propsResponse); err != nil {
return HealthResponseValues{}, err
}
unitHealth, unitOutput, err := propsResponse.CheckUnitHealth()
if err != nil {
return HealthResponseValues{}, err
}
if unitHealth > 0 {
journalOutput, err := tools.GetJournalOutput(propsResponse.ID)
if err == nil {
unitOutput += "\n"
unitOutput += journalOutput
} else {
logrus.Errorf("Could not read journalctl: %s", err)
}
}
s := strings.Split(propsResponse.Description, ": ")
if len(s) != 2 {
description = strings.Join(s, " ")
prettyName = propsResponse.ID
logrus.Debugf("No name found in description, using ID as name: %s", prettyName)
} else {
prettyName, description = s[0], s[1]
}
return HealthResponseValues{
UnitID: propsResponse.ID,
UnitHealth: unitHealth,
UnitOutput: unitOutput,
UnitTitle: description,
Help: "",
PrettyName: prettyName,
}, nil
}
|
/**
* @brief This method sets the initial deformation gradient matrix
* @param rInitialDeformationGradientMatrix The vector to be set
*/
void InitialState::SetInitialDeformationGradientMatrix(const Matrix& rInitialDeformationGradientMatrix) {
const SizeType dimension = rInitialDeformationGradientMatrix.size1();
KRATOS_ERROR_IF(dimension <= 0) << "The imposed Matrix is null..." << std::endl;
mInitialDeformationGradientMatrix.resize(dimension, dimension, false);
noalias(mInitialDeformationGradientMatrix) = rInitialDeformationGradientMatrix;
} |
A bill that would allow for lawful concealed carry without a permit is heading to Gov. Steve Bullock for signature over the opposition of gun control advocates.
The proposal, drafted last October, gained momentum in recent weeks, passing by a 56-43 margin in the House last month before winning a closer 28-21 roll call Thursday.
Advocates for the measure feel the time was right for an update to the state’s concealed carry laws.
“”We should stop kidding ourselves,” said state Sen. Scott Sales, R-Bozeman. “People that want to commit a crime aren’t going to pay attention to the concealed carry portion of the law right now.”
The legislation, HB 298, is a simple two-page act that would allow the carry of concealed firearms in the state without a permit providing the residents can legally possess a firearm. Currently, the state enjoys a rural form of constitutional carry, with permits only required inside city limits. If signed into law, the patchwork system would be homogenized statewide with no permits needed.
Nevertheless, the seemingly simple proposal has drawn fire from gun control groups who criticize the measure.
Backed by a recent poll paid for by former New York Mayor Michael Bloomberg’s Everytown for Gun Safety group that suggests a majority of those asked support a requirement to have a permit for concealed carry, some are calling upon the governor to veto the bill.
“It’s clear from the broad coalition gathered here today and from recent polling showing 83 percent of Montanans support requiring a permit to carry a concealed weapon, that these bills are not what we need to keep our families and communities safe;” said Pamela Owen, with Moms Demand Action, part of the Everytown group in a statement.
Constitutional carry bills have been a popular item nationwide with state legislatures this session. Texas, New Hampshire, Utah and Kansas have all made progress on measures of their own in recent weeks.
This week West Virginia lawmakers gave their final seal of approval to a measure to bring the practice to that state despite opposition similar to that seen in Montana.
The bill is now headed to the desk of Gov. Steve Bullock (D) who has vetoed gun reform measures on nullification and campus carry since taking office in 2013.
If he elects to not address the legislation within 10 days of official bill transmittal from the legislature, HB 298 will become law without his signature. |
/* tslint:disable:no-default-export no-import-side-effect */
import { Badge, Header } from '@opengov/component-library/capital';
import PropTypes from 'prop-types';
import React from 'react';
// import { Link } from 'gatsby';
import components from '../../data/components.json';
import './base.scss';
import styles from './Layout.scss';
import LayoutBase from './LayoutBase';
// <ul className={styles.tabs} role="tabs">
// <li>
// <Link
// activeClassName={styles.tabsActive}
// to={components[props.component].url}
// >
// Code
// </Link>
// </li>
// <li>
// <Link
// activeClassName={styles.tabsActive}
// to={`${components[props.component].url}/usage`}
// >
// Usage
// </Link>
// </li>
// </ul>
const LayoutComponents = props => (
<LayoutBase activeTab="components">
<div className={styles.pageHeader}>
<div className={styles.tags}>
{components[props.component] ? <Badge variant="strong" text={components[props.component].type} /> : ``}
{components[props.component].tags.map((value, index) => {
return (<Badge key={value} variant="neutral" text={value} />);
})}
</div>
<Header variant="hero">{components[props.component] ? components[props.component].title : props.title}</Header>
<p className={styles.pageDescription}>{components[props.component].description}</p>
</div>
<div className={styles.content}>{props.children}</div>
</LayoutBase>
);
LayoutComponents.propTypes = {
component: PropTypes.string,
componentFolder: PropTypes.string.isRequired,
children: PropTypes.node.isRequired,
description: PropTypes.string,
tags: PropTypes.array,
title: PropTypes.string.isRequired
};
export default LayoutComponents;
|
<filename>2.10.1/errors_test.go
package freetype2
import (
"testing"
)
func TestGetErr(t *testing.T) {
var want error
want = ErrCannotOpenResource
if got := testErrCannotOpenResource(); got != want {
t.Errorf("want err: %v, got %v", want, got)
}
want = ErrUnknownError
if got := testUnmappedErr(); got != want {
t.Errorf("want err: %v, got %v", want, got)
}
}
|
No no no no no no no. TERRIBLE.
Okay.
Kimberly-Clark’s takeover on the paper goods front continues here with Stroller Rentals now “Hosted by Huggies.” The Disney Jr. show is now “Presented by Pull-Ups.” One sponsorship that seems like a no-brainer outside of the “World” is the snacks on airplanes. Why doesn’t some snack company pay to serve new items to a captive, hungry audience?
Lower right.
As with most bathroom refurbishments, easydubz was proud to BREAK the story that these next to City Hall would close through the middle of next month. As budget cuts have closed all other restrooms on property, you will just have to hold it from now on.
Just kidding, there are a couple other options, including in between Casey’s Corner and Crystal Palace, to the right of Guest Relations outside the Park, and you can also head straight back through the Tony’s Town Square lobby though there may be a wait with the number of frozen chicken parmesans they sell.
That concludes the bathroom jokes portion of this post.
Stage work continues at the Castle Forecourt with the new show debuting next month.
The website reasserts that the mosaics seen as you pass through Cinderella Castle are worth taking a moment to appreciate.
The timing on this post is primarily going to be right after Early Morning Magic, the $69/adult, $59/child event that I reviewed here. This is the view that your money buys you at 8:43am.
While this is the scene, and the back of the FastPass+ line for Peter Pan’s Flight, just after 11am.
Peter Pan’s Flight is already posting 20 minutes just five minutes after official open at 9:05am. The actual wait is likely closer to 15.
There were a few questions about when to eat breakfast during Early Morning Magic and what you should do immediately after. It doesn’t make a whole lot of sense in my eyes to eat breakfast during the 8am-9am hour and then rush off to another faraway attraction. From 8:30am – 8:45am you could ride Mine Train or Peter Pan’s Flight three full times. Or you could spend that time eating eggs. Of course, if you’ve gotten your fill of the three rides in 30 minutes, you might elect to eat and head elsewhere. But if you are happy riding Mine Train, Peter Pan’s Flight, and Winnie the Pooh once, it would make more sense to save your $70/person and show up to regular rope drop or at a minimum, do the Be Our Guest Breakfast instead. That would get you one ride on Mine Train before open and then you could hop on Peter Pan’s Flight and Winnie the Pooh with negligible waits immediately after.
It makes the most sense to stick to Fantasyland and potentially Haunted Mansion from 9am-9:45am. I’d look at doing Haunted Mansion and it’s a small world or the Ariel Meet and Greet along with Barnstormer and Dumbo in that slot. Either set of two or three attractions would take 30 to 40 minutes and then you could easily circle back to Village Haus for breakfast closer to 9:50am. Merida would also make sense and Tomorrowland Speedway is probably close enough that it wouldn’t require a great amount of walking.
This is Haunted Mansion at 9:06am when you could walk on.
This is 11:15am with the standby line backed up well past the entrance.
But you could conceivably eat during the 8am-9am hour and head elsewhere. Kenny and I walked right on Big Thunder Mountain around 9:18am.
One ride through took 12 minutes, which is par for the course these days.
Splash would be a walk-on this early as well. It does kind of look like there are a lot of people here, but anybody in this picture would be able to walk the full queue without resistance and wait maybe two minutes at loading for their log.
Feels crowded.
Peter Pan’s Flight up to 40 minutes before 9:30am. FastPass+ ruins lives.
It will be interesting to see if the Early Morning Magic moves over to the other Parks, particularly Epcot for Frozen and Studios for the three or four things to do over there. I’m not sure how much sense it makes elsewhere in Magic Kingdom. One of the big reasons why I think there’s value in the Fantasyland option is just how many times you can move through the rides because their durations are so short. Each only takes about three minutes plus a minute or two to walk the queue. Splash Mountain, even without an initial wait, is going to take at least 20 minutes with a second ride potentially taking around 15 if there’s a way to get back to the loading area without having to walk all the way out and back inside the mountain. Pirates, Jungle Cruise, Haunted Mansion, Space Mountain, and a lot of others are 15-20 minute experiences. Soarin’ would take at least 25 with the pre-show. But we’ll see.
65 minutes for Anna/Elsa at 9:50am. I waited around five first thing.
Mine Train standby backed up outside the entrance and heading towards Storybook Circus. You’d wait about 75 minutes if you got in line now.
30 at Winnie the Pooh, still before 10am here.
Alice meeting with Mad Hatter early.
The website’s various touring plans usually put you here in Storybook Circus around now with Barnstormer still at five minutes and virtually no wait. Disney doesn’t really have any choice other than to run this one at full capacity. There is no easy way to remove and reinsert vehicles.
It’s a lot easier at Dumbo, where Disney is just running one carousel with a 25-minute posted wait and the line backed up to the entrance.
Grounded.
Fortunately, Disney began testing the second carousel as I stood there just before 10am. This family is already using FastPass+
And doubling capacity more or less halves the wait, so I decided to get in line to see how long it would take. Another reason why the line was backed up so far probably has to do with the fact that the play area inside the queue wasn’t even open.
It still ended up taking about 20 minutes, which is too much time to spend this early. I was whining about waiting in the Studios’ rope drop post last week and someone mentioned that they didn’t think waiting 30 minutes for Tower of Terror was very long. And I would agree with that sentiment if it was 1pm during the busiest part of the day. But waiting that long before 10am is significantly more than most of us are accustomed to waiting. Hopefully Chapek, Iger, and company will be able to find their checkbook come May 27th as expected. Waiting 25 minutes for Dumbo when you get in line at 9:30am and not even having the option of letting the kids run around in the play area is kind of a bummer. But maybe it’s great. This website is needlessly negative after all.
It’s strange to see a popular Meet and Greet with a wait that’s less than Dumbo. I posted a screenshot of waits on Twitter today:
This is around 2:30pm on May the 3rd with Dumbo posting the same wait as Anna/Elsa or three times as long as Big Thunder or twice as long as Haunted Mansion.
I hopped on Journey of the Little Mermaid with the 20-minute posted wait at 10:20am.
I ended up being on the ride in 15 minutes, which isn’t too bad.
Me writing this post.
After a couple of lengthy stops on-ride, I was back out front at 10:48am for a total experience time of 28 minutes, which is about eight longer than we’d like.
Always nice to see a familiar face as the crane was back for some turret painting.
Pricing has gotten pretty rough at Be Our Guest lunch as I pointed out when prices rose resort-wide on March 1st. I paid $12.49 for that Roast Beef Sandwich around this time last year. When the menu debuted back in 2012, they served a higher quality steak sandwich that was $8.99 during Passholder previews and then up to $10.99 thereafter. That’s a 54.6% increase in four years or almost double if you want to skew the numbers with the introductory rate.
But people still clamor to eat here. Sometimes I wonder what the maximum price is before this place wouldn’t be booked solid every day. You’d have to expect that $17 is getting close.
Mine Train FP+ is backed up over there somewhere. You might remember that my early morning rides were averaging four minutes including the duration of the ride. I don’t think I ever spent more than a minute in line from the time I entered the building to the time I was on my way.
Standby is still out the door off to the right.
Fantasyland doesn’t look quite as bad as it might have from the back of the Peter Pan’s Flight FP+ queue. Any time you can see concrete is a below average day.
Nick and Judy from Zootopia continue appearing during Move It Shake It.
But they continue being confined to their float, unlike most of the other characters that come down and dance with guests. They actually have several cast members guarding them.
George, a name you might recognize from the forum, and Greg were nice enough to invite me out to lunch at Liberty Tree Tavern.
You may notice something is missing from the sign and the top of the menu below:
The restaurant is no longer sponsored by Craisins. It doesn’t bode well for the cranberry bog at Epcot this fall. The restaurant otherwise closed for several months last year for kitchen modifications that brought Skipper Canteen online last December.
Greg ordered an old standby in the $19 Pilgrims’ Feast – Traditional Roast Turkey served with Herb Bread Stuffing, Mashed Potatoes, and a Garden Vegetable.
With the 20% Tables in Wonderland discount, that puts it just $1.95 more than Be Our Guest’s vastly inferior Turkey Sandwich or the Taco Burger at Pecos Bill. Something to consider perhaps. Anyway, lunch here is reliably fresh and satisfying with several thick cut slices of turkey on top of the rest of Thanksgiving dinner.
I usually recommend lunch over the $33 all-you-care-to-enjoy dinner because you can fill up for about $10 less than the later meal even if you order a fountain beverage and split the Ooey Gooey Toffee Cake, which is out of this world.
George returned to an entree that he had tried the last time we were here together in the The Liberty Boys Sandwich – Slow-roasted Pork, Arugula, and Tomatoes on House-made Bread with Caramelized Shallots, Pickled Jalapeños and a Mushroom-Mayonnaise Spread.
While the description is exactly the same as it was before, the pork is actually much different. If you were to order the sandwich back in 2014, your server would have instinctively warned you that it would arrive with more of a crispy pork belly than you would typically expect from “slow roasted pork.”
Back in 2015, they added “pork belly” to the description as pictured here unattractively. Now it’s a much leaner cut of pork roast.
That concludes today’s lesson on the history of pork at Liberty Tree Tavern.
The pork belly was considerably more decadent, though I think a lot of people will be more accustomed to what they’re serving now. And at $15, it’s less expensive than more than half of Be Our Guest’s entrees. And the fries here are among the best on property, crispy and thick with a fantastic crunch.
I ordered the $14 Vegetarian Proclamation – Roasted Seasonal Vegetables Sandwich, fresh Greens, and Tomatoes topped with a Tangy Vegan Mayonnaise Spread with fresh Fruit or Sweet Potato Fries. I am not sure what other people’s opinions of it are, but it did not work well for me. The vegetables are very similar to the Grilled Vegetable Stack over at Harambe Market, which you go to town on with a knife and fork. The vegetables here in the sandwich slipped right off the thick, dense bread every time I tried to pick it up and smoosh it together to fit in my mouth. I didn’t care much for the texture either, which was my big problem with it. All of the vegetables were soft and wet and it was just not to my sensitive tastes. Those of you that enjoyed it (or not) are welcome to say so in the comments.
Service was friendly and efficient as usual and I don’t have any qualms about recommending lunch here for what might not be much more expensive than quick service. With the sandwiches arriving in two halves, you could probably share an appetizer, entree, and dessert for less than a quick service meal elsewhere.
I originally had a Splash Mountain FastPass+ that was canceled because the ride was down. I returned at 1pm to find the FP+ return line backed up all the way here.
Feels crowded.
So I headed to Pirates of the Caribbean instead with “just” a 40-minute posted wait.
It was also the longest I had ever waited here with FastPass+, getting in line at 1:05pm and not being on the boat until 1:17pm. 12 minutes is not the end of the world, of course, but I think most of us remember when the standby wait was rarely that long.
One of these days I am going to get a usable picture of this. A few more:
The total experience time was 25 minutes, which is five minutes longer than the website’s conservative estimate.
No backup at Jungle Cruise at least.
Despite initially queuing up outside the Monsters Inc. Laugh Floor building, we still managed to get into the next show.
Just under 15 minutes between getting in line and the show starting. It’s worth heading in here I think if you have some extra time. It’s a charming show in a relatively comfortable, air-conditioned theater. And Stitch is nowhere to be found.
We then headed into Buzz with FastPass+ and a 45-minute standby wait at 2:17pm.
And into the gift shop 11 minutes later. Just about the only thing that happened during Buzz’s lengthy refurbishment was Disney added more bars in front of Zurg so people can’t go in between them and get a picture.
You can read more about what George and Greg were able to accomplish by booking additional FastPass+ experiences in this forum post. It looks like they secured a total of ten experiences throughout the afternoon. I think it’s time to rewrite “Just Keep Swimming” to “Just Keep Refreshing.”
As if this wasn’t long and boring enough, I will be back with a general Magic Kingdom update along with a general Hollywood Studios update. I will be at Epcot Friday for breakfast at Garden Grill and if I’m feeling adventurous I will rope drop it and see how long it takes to get through the Joy & Sadness Meet and Greet along with Baymax. |
Model Transformations in YATL. Studies and Experiments
This report describes three examples of model transformations, which have been implemented using YATL and the support provided by Kent Modelling Framework . Model transformations are supported in KMF by a set of tools such as YATL-Studio, KMF-Studio, OCLCommon, and OCL4KMF. The core of the model transformations in KMF is YATL-Studio, a software environment used to create YATL projects and perform model transformations on them. The implementations of the source and target model are generated by KMF-Studio. The OCL 2.0 support is provided by OCLCommon and OCL4KMF, described in more details in , which implement the OCL 2.0 standard. ENVIRONMENT The OMG's MDA is a new approach to develop large software systems. The core technologies of MDA are the Unified Modeling Language (UML), Meta-Object Facility (MOF), XML Meta-Data Interchange (XMI) and Common Warehouse Metamodel (CWM). These standards are used to facilitate the design, description, exchange, and storage of models. MDA also introduces other important conceps: Platform-Independent Model (PIM), Platform-Specific Model (PSM), transformation language, and transformation engine. The relations and interactions between these concepts in KMF is depicted in Figure 1.1. |
<gh_stars>0
/************************************************************************
Copyright 2014 <NAME>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
************************************************************************/
package com.virtualcoinclub.common;
/**
* <h1>OfferUpdate (object for JSON transactions)</h1>
* Object to send information about an offer to the back end
* (e.g. an update to the item_desc)
* <p>
* Sample usage:
* <pre><code>
* OfferUpdate upd = gson.fromJson(reader, OfferUpdate.class);
* int oid = upd.getOID();
* String action = upd.getAction();
* String data = upd.getData();
* allSQL.updateOffer(oid, action, data);
* </code></pre>
* <p>
*
* @author <NAME>
* @version 1.0
* @since 2014-08-23
*/
public class OfferUpdate {
private String sid;
private int mid;
private int oid;
private String action;
private String data;
private String result;
public OfferUpdate(String _sid, int _mid) {
sid = _sid;
mid = _mid;
}
public String getSID() {
return sid;
}
public void setSID(String _sid) {
sid = _sid;
}
public int getMID() {
return mid;
}
public void setMID(int _mid) {
mid = _mid;
}
public void setOID(int _oid) {
oid = _oid;
}
public void setOID(String oidString) {
oid = Integer.parseInt(oidString);
}
public int getOID() {
return oid;
}
public void setData(String _data) {
data = _data;
}
public String getData() {
return data;
}
public void setAction(String _action) {
action = _action;
}
public String getAction() {
return action;
}
public void setResult(String _result) {
result = _result;
}
public String getResult() {
return result;
}
public String toJSON() {
String json = "{";
json = json + "\"sid\" : \"" + sid;
json = json + "\", \"mid\" : \"" + mid;
json = json + "\", \"oid\" : \"" + oid;
json = json + "\", \"action\" : \"" + action;
json = json + "\", \"data\" : \"" + data;
json = json + "\", \"result\" : \"" + result;
json = json + "\"}";
return json;
}
}
|
/*
* Schedules the timer for the customer level. Once the timer expires, the game is over.
*/
public void scheduleCustomerLevelTimer(Level level) {
myLevelTimer = new Timer();
myLevelTimer.schedule(new TimerTask() {
public void run() {
Platform.runLater(new Runnable() {
public void run() {
level.setGameOver(true);
endGame(level);
}
});
}
}, LEVEL_DURATION);
} |
<filename>src/main/java/Models/Types/package-info.java
/**
* Defines Types/Enums.
*/
package Models.Types;
|
/**
* NO CHANGES NEEDED ON THIS CLASS FOR THE liveProject
*
* Request Listener for the initialization and destroying requests. This class
* registers into Tomcat via the web.xml and provides logs to help in debugging
* the webapp.
*
*/
public class RequestListener implements ServletRequestListener {
public void requestInitialized(ServletRequestEvent servletRequestEvent) {
ServletRequest servletRequest = servletRequestEvent.getServletRequest();
AppLogger.log("ServletRequest initialized. Remote IP: "
+ servletRequest.getRemoteAddr());
}
public void requestDestroyed(ServletRequestEvent servletRequestEvent) {
ServletRequest servletRequest = servletRequestEvent.getServletRequest();
AppLogger.log("ServletRequest destroyed. Remote IP: "
+ servletRequest.getRemoteAddr());
}
} |
<filename>external/blend2d/src/blend2d/pipegen/blfetchpatternpart_p.h
// [Blend2D]
// 2D Vector Graphics Powered by a JIT Compiler.
//
// [License]
// Zlib - See LICENSE.md file in the package.
#ifndef BLEND2D_PIPEGEN_BLFETCHPATTERNPART_P_H
#define BLEND2D_PIPEGEN_BLFETCHPATTERNPART_P_H
#include "../pipegen/blfetchpart_p.h"
//! \cond INTERNAL
//! \addtogroup blend2d_internal_pipegen
//! \{
namespace BLPipeGen {
// ============================================================================
// [BLPipeGen::FetchPatternPart]
// ============================================================================
//! Base class for all pattern fetch parts.
class FetchPatternPart : public FetchPart {
public:
BL_NONCOPYABLE(FetchPatternPart)
//! Common registers (used by all fetch types).
struct CommonRegs {
//! Pattern width (32-bit).
x86::Gp w;
//! Pattern height (32-bit).
x86::Gp h;
//! Pattern pixels (pointer to the first scanline).
x86::Gp srctop;
//! Pattern stride.
x86::Gp stride;
//! Pattern stride (original value, used by PatternSimple only).
x86::Gp strideOrig;
//! Pointer to the previous scanline and/or pixel (fractional).
x86::Gp srcp0;
//! Pointer to the current scanline and/or pixel (aligned).
x86::Gp srcp1;
};
FetchPatternPart(PipeCompiler* pc, uint32_t fetchType, uint32_t fetchPayload, uint32_t format) noexcept;
//! Tests whether the fetch-type is simple pattern {axis-aligned or axis-unaligned}.
inline bool isSimple() const noexcept { return isFetchType(BL_PIPE_FETCH_TYPE_PATTERN_SIMPLE_FIRST, BL_PIPE_FETCH_TYPE_PATTERN_SIMPLE_LAST); }
//! Tests whether the fetch-type is an affine pattern style.
inline bool isAffine() const noexcept { return isFetchType(BL_PIPE_FETCH_TYPE_PATTERN_AFFINE_FIRST, BL_PIPE_FETCH_TYPE_PATTERN_AFFINE_LAST); }
};
// ============================================================================
// [BLPipeGen::FetchSimplePatternPart]
// ============================================================================
//! Simple pattern fetch part.
//!
//! Simple pattern fetch doesn't do scaling or affine transformations, however,
//! can perform fractional pixel translation described as Fx and Fy values.
class FetchSimplePatternPart : public FetchPatternPart {
public:
BL_NONCOPYABLE(FetchSimplePatternPart)
//! Aligned and fractional blits.
struct SimpleRegs : public CommonRegs {
//! X position.
x86::Gp x;
//! Y position (counter, decreases to zero).
x86::Gp y;
//! X repeat/reflect.
x86::Gp rx;
//! Y repeat/reflect.
x86::Gp ry;
//! X padded to [0-W) range.
x86::Gp xPadded;
//! X origin, assigned to `x` at the beginning of each scanline.
x86::Gp xOrigin;
//! X restart (used by scalar implementation, points to either -W or 0).
x86::Gp xRestart;
//! Last loaded pixel (or combined pixel) of the first (srcp0) scanline.
x86::Xmm pixL;
x86::Xmm wb_wb;
x86::Xmm wd_wd;
x86::Xmm wa_wb;
x86::Xmm wc_wd;
// Only used by fetchN.
//! X position vector `[ x, x+1, x+2, x+3]`.
x86::Xmm xVec4;
//! X setup vector `[ 0, 1, 2, 3]`.
x86::Xmm xSet4;
//! X increment vector `[ 4, 4, 4, 4]`.
x86::Xmm xInc4;
//! X normalize vector.
x86::Xmm xNrm4;
//! X maximum vector `[max, max, max, max]`.
x86::Xmm xMax4;
};
uint8_t _extendX;
BLWrap<SimpleRegs> f;
FetchSimplePatternPart(PipeCompiler* pc, uint32_t fetchType, uint32_t fetchPayload, uint32_t format) noexcept;
//! Tests whether the fetch-type is axis-aligned blit (no extend modes, no overflows)
inline bool isBlitA() const noexcept { return isFetchType(BL_PIPE_FETCH_TYPE_PATTERN_AA_BLIT); }
//! Tests whether the fetch-type is axis-aligned pattern.
inline bool isPatternA() const noexcept { return isFetchType(BL_PIPE_FETCH_TYPE_PATTERN_AA_FIRST, BL_PIPE_FETCH_TYPE_PATTERN_AA_LAST); }
//! Tests whether the fetch-type is a "FracBi" pattern style.
inline bool isPatternF() const noexcept { return isFetchType(BL_PIPE_FETCH_TYPE_PATTERN_AU_FIRST, BL_PIPE_FETCH_TYPE_PATTERN_AU_LAST); }
//! Tests whether the fetch-type is a "FracBiX" pattern style.
inline bool isPatternFx() const noexcept { return isFetchType(BL_PIPE_FETCH_TYPE_PATTERN_FX_FIRST, BL_PIPE_FETCH_TYPE_PATTERN_FX_LAST); }
//! Tests whether the fetch-type is a "FracBiY" pattern style.
inline bool isPatternFy() const noexcept { return isFetchType(BL_PIPE_FETCH_TYPE_PATTERN_FY_FIRST, BL_PIPE_FETCH_TYPE_PATTERN_FY_LAST); }
//! Tests whether the fetch-type is a "FracBiXY" pattern style.
inline bool isPatternFxFy() const noexcept { return isFetchType(BL_PIPE_FETCH_TYPE_PATTERN_FX_FY_FIRST, BL_PIPE_FETCH_TYPE_PATTERN_FX_FY_LAST); }
//! Tests whether the fetch is pattern style that has fractional `x` or `x & y`.
inline bool hasFracX() const noexcept { return isPatternFx() || isPatternFxFy(); }
//! Tests whether the fetch is pattern style that has fractional `y` or `x & y`.
inline bool hasFracY() const noexcept { return isPatternFy() || isPatternFxFy(); }
//! Returns the extend-x mode.
inline uint32_t extendX() const noexcept { return _extendX; }
void _initPart(x86::Gp& x, x86::Gp& y) noexcept override;
void _finiPart() noexcept override;
void advanceY() noexcept override;
void startAtX(x86::Gp& x) noexcept override;
void advanceX(x86::Gp& x, x86::Gp& diff) noexcept override;
void advanceXByOne() noexcept;
void repeatOrReflectX() noexcept;
void prefetchAccX() noexcept;
// NOTE: We don't do prefetch here. Since the prefetch we need is the same
// for `prefetch1()` and `prefetchN()` we always prefetch by `prefetchAccX()`
// during `startAtX()` and `advanceX()`.
void fetch1(PixelARGB& p, uint32_t flags) noexcept override;
void enterN() noexcept override;
void leaveN() noexcept override;
void prefetchN() noexcept override;
void postfetchN() noexcept override;
void fetch4(PixelARGB& p, uint32_t flags) noexcept override;
void fetch8(PixelARGB& p, uint32_t flags) noexcept override;
};
// ============================================================================
// [BLPipeGen::FetchAffinePatternPart]
// ============================================================================
//! Affine pattern fetch part.
class FetchAffinePatternPart : public FetchPatternPart {
public:
BL_NONCOPYABLE(FetchAffinePatternPart)
struct AffineRegs : public CommonRegs {
//! Horizontal X/Y increments.
x86::Xmm xx_xy;
//! Vertical X/Y increments.
x86::Xmm yx_yy;
x86::Xmm tx_ty;
x86::Xmm px_py;
x86::Xmm ox_oy;
//! Normalization after `px_py` gets out of bounds.
x86::Xmm rx_ry;
//! Like `px_py` but one pixel ahead [fetch4].
x86::Xmm qx_qy;
//! Advance twice (like `xx_xy`, but doubled) [fetch4].
x86::Xmm xx2_xy2;
//! Pad minimum coords.
x86::Xmm minx_miny;
//! Pad maximum coords.
x86::Xmm maxx_maxy;
//! Correction .
x86::Xmm corx_cory;
//! Pattern width and height as doubles.
x86::Xmm tw_th;
//! Vector of pattern indexes.
x86::Xmm vIdx;
//! Vector containing multipliers for Y/X pairs.
x86::Xmm vAddrMul;
};
BLWrap<AffineRegs> f;
FetchAffinePatternPart(PipeCompiler* pc, uint32_t fetchType, uint32_t fetchPayload, uint32_t format) noexcept;
inline bool isAffineNn() const noexcept { return isFetchType(BL_PIPE_FETCH_TYPE_PATTERN_AFFINE_NN_ANY) | isFetchType(BL_PIPE_FETCH_TYPE_PATTERN_AFFINE_NN_OPT); }
inline bool isAffineBi() const noexcept { return isFetchType(BL_PIPE_FETCH_TYPE_PATTERN_AFFINE_BI_ANY) | isFetchType(BL_PIPE_FETCH_TYPE_PATTERN_AFFINE_BI_OPT); }
inline bool isOptimized() const noexcept { return isFetchType(BL_PIPE_FETCH_TYPE_PATTERN_AFFINE_NN_OPT) | isFetchType(BL_PIPE_FETCH_TYPE_PATTERN_AFFINE_BI_OPT); }
void _initPart(x86::Gp& x, x86::Gp& y) noexcept override;
void _finiPart() noexcept override;
void advanceY() noexcept override;
void startAtX(x86::Gp& x) noexcept override;
void advanceX(x86::Gp& x, x86::Gp& diff) noexcept override;
void advancePxPy(x86::Xmm& px_py, const x86::Gp& i) noexcept;
void normalizePxPy(x86::Xmm& px_py) noexcept;
void prefetch1() noexcept override;
void fetch1(PixelARGB& p, uint32_t flags) noexcept override;
void enterN() noexcept override;
void leaveN() noexcept override;
void prefetchN() noexcept override;
void postfetchN() noexcept override;
void fetch4(PixelARGB& p, uint32_t flags) noexcept override;
enum ClampStep : uint32_t {
kClampStepA_NN,
kClampStepA_BI,
kClampStepB_NN,
kClampStepB_BI,
kClampStepC_NN,
kClampStepC_BI
};
void clampVIdx32(x86::Xmm& dst, const x86::Xmm& src, uint32_t step) noexcept;
};
} // {BLPipeGen}
//! \}
//! \endcond
#endif // BLEND2D_PIPEGEN_BLFETCHPATTERNPART_P_H
|
<reponame>muyangye/CSCI-104-Labs-Assignments
#include <iostream>
#include "llist.h"
struct Pokemon {
int id;
std::string name;
Pokemon()
: id(0), name("") { }
Pokemon(int i, std::string n)
: id(i), name(n) { }
};
int main() {
LList<Pokemon> pokedex;
pokedex.push_back(Pokemon(1, "Bulbasaur"));
pokedex.push_back(Pokemon(4, "Charmander"));
pokedex.push_back(Pokemon(7, "Squirtle"));
for (int i = 0; i < pokedex.size(); i++) {
std::cout << pokedex.get(i).id << " " << pokedex.get(i).name << std::endl;
}
pokedex.clear();
pokedex.push_back(Pokemon(144, "Articuno"));
pokedex.push_back(Pokemon(145, "Zapdos"));
pokedex.push_back(Pokemon(146, "Moltres"));
for (int i = 0; i < pokedex.size(); i++) {
std::cout << pokedex.get(i).id << " " << pokedex.get(i).name << std::endl;
}
return 0;
}
|
def clear_bl():
bl = BusyLight()
bl.write()
bl.close() |
//we do not call this method with parse data from inet_aton or single segment strings, so the cast to int is fine.
//this is only for addresses with standard segment counts, although we do allow compressed.
func (parseData *parsedIPAddress) isPrefixSubnet(networkPrefixLength BitCount) bool {
var bytesPerSegment int
var max SegInt
var bitsPerSegment BitCount
if parseData.isProvidingIPv4() {
bytesPerSegment = IPv4BytesPerSegment
bitsPerSegment = IPv4BitsPerSegment
max = IPv4MaxValuePerSegment
} else {
bytesPerSegment = IPv6BytesPerSegment
bitsPerSegment = IPv6BitsPerSegment
max = IPv6MaxValuePerSegment
}
addressParseData := parseData.getAddressParseData()
segmentCount := addressParseData.getSegmentCount()
if parseData.isCompressed() {
compressedCount := IPv6SegmentCount - segmentCount
compressedIndex := addressParseData.getConsecutiveSeparatorSegmentIndex()
return isPrefixSubnet(
func(segmentIndex int) SegInt {
if segmentIndex >= compressedIndex {
if segmentIndex-compressedIndex < compressedCount {
return 0
}
segmentIndex -= compressedCount
}
return SegInt(parseData.getValue(segmentIndex, keyLower))
},
func(segmentIndex int) SegInt {
if segmentIndex >= compressedIndex {
if segmentIndex-compressedIndex < compressedCount {
return 0
}
segmentIndex -= compressedCount
}
return SegInt(parseData.getValue(segmentIndex, keyUpper))
},
segmentCount+compressedCount,
bytesPerSegment,
bitsPerSegment,
max,
networkPrefixLength,
zerosOrFullRange)
}
return isPrefixSubnet(
func(segmentIndex int) SegInt {
return SegInt(parseData.getValue(segmentIndex, keyLower))
},
func(segmentIndex int) SegInt {
return SegInt(parseData.getValue(segmentIndex, keyUpper))
},
segmentCount,
bytesPerSegment,
bitsPerSegment,
max,
networkPrefixLength,
zerosOrFullRange)
} |
extern crate dotenv;
extern crate env_logger;
extern crate thruster;
extern crate futures;
extern crate serde;
extern crate serde_json;
extern crate tokio;
extern crate tokio_proto;
extern crate tokio_service;
extern crate uuid;
#[macro_use] extern crate serde_derive;
#[macro_use] extern crate diesel;
pub mod schema;
pub mod models;
mod context;
mod util;
use std::env;
use std::boxed::Box;
use dotenv::dotenv;
use futures::{future, Future};
use thruster::{middleware, App, MiddlewareChain, MiddlewareReturnValue};
use thruster::server::Server;
use thruster::ThrusterServer;
use std::time::Instant;
use crate::context::{generate_context, Ctx};
fn profiling(context: Ctx, next: impl Fn(Ctx) -> MiddlewareReturnValue<Ctx> + Send + Sync) -> MiddlewareReturnValue<Ctx> {
let start_time = Instant::now();
let ctx_future = next(context)
.and_then(move |ctx| {
let elapsed_time = start_time.elapsed();
println!("[{}μs] {} -- {}",
elapsed_time.as_micros(),
ctx.request.method(),
ctx.request.path());
future::ok(ctx)
});
Box::new(ctx_future)
}
fn ping(mut context: Ctx, _next: impl Fn(Ctx) -> MiddlewareReturnValue<Ctx> + Send + Sync) -> MiddlewareReturnValue<Ctx> {
let val = "pong";
context.body(val);
Box::new(future::ok(context))
}
fn not_found(mut context: Ctx, _next: impl Fn(Ctx) -> MiddlewareReturnValue<Ctx> + Send + Sync) -> MiddlewareReturnValue<Ctx> {
context.body("Whoops! Nothing here!");
context.status(404);
Box::new(future::ok(context))
}
fn main() {
dotenv().ok();
std::env::set_var("RUST_LOG", "actix_server=info,actix_web=error");
let mut app = App::create(generate_context);
//app.use_middleware("/", middleware![Ctx => profiling]);
app.get("/v1/secret", middleware![Ctx => ping]);
app.set404(middleware![Ctx => not_found]);
let host = env::var("HOST")
.unwrap_or("0.0.0.0".to_string());
let port = env::var("PORT")
.unwrap_or("8080".to_string());
println!("Running on {}:{}", &host, &port);
let server = Server::new(app);
server.start(&host, port.parse::<u16>().unwrap());
}
|
<reponame>the-butcher/vector-tile-analysis
import { IProtocolType } from "./IProtocolType";
import { WireTypeVarint32, WireType } from "../../WireType";
import { ISubSource } from "../source/ISubSource";
import { CodedInputStream } from "../source/CodedInputStream";
/**
* protocol type specific to a 64-bit signed numeric value (concerning output, may be shorter when encoded)<br>
*
* @author h.fleischer
* @since 26.07.2019
*/
export class ProtocolTypeSint64 implements IProtocolType<number, WireTypeVarint32> {
decode(source: ISubSource): number {
return CodedInputStream.decodeZigZag(source.readRawVarint64());
}
getWireType(): WireTypeVarint32 {
return WireType.get(WireType.INDEX_________VARINT32);
}
} |
PHYSIOLOGICAL RESPONSES OF PEANUT CROPS TO IRRIGATION WITH BRACKISH WATERS AND APPLICATION OF ORGANO-MINERAL FERTILIZERS
ABSTRACT The use of organo-mineral fertilizer is an alternative measure to mitigate salt stress in semiarid regions. Thus, the objective of this work was to evaluate the physiological indexes of peanut crops under irrigations with fresh and brackish waters and applications of organo-mineral fertilizers. The experiment was conducted from June to September, 2019, at the Universidade da Integração Internacional da Lusofonia Afro-Brasileira (UNILAB), in Redenção, state of Ceará, Brazil, using a completely randomized experimental design in a 5 × 2 factorial arrangement, with four replications. The treatments consisted of five soil fertilizers (F1= 100% NPK mineral fertilizer at the recommended rate; F2= 100% bovine manure-based biofertilizer; F3= 100% plant ash; F4= 50% mineral fertilizer and 50% bovine manure-based biofertilizer; and F5= 50% mineral fertilizer and 50% plant ash); and two salinity levels (electrical conductivities) of the irrigation water (1.0 and 5.0 dS m-1). Photosynthetic rate, transpiration, stomatal conductance, leaf temperature, internal CO2 concentration, water use efficiency, and chlorophyll index of the plants were evaluated at 40 and 54 days after sowing (DAS). Plants irrigated with fresh water presented higher stomatal conductance, photosynthetic rate, and transpiration, regardless of the fertilizer used. The use of 100% bovine manure-based biofertilizer resulted in decreases in salt stress and increases in water use efficiency at 40 DAS, and decreases in leaf temperature and increases in relative chlorophyll content at 54 DAS. |
<reponame>stormy-sandy/dx-scanner
import { RestEndpointMethodTypes } from '@octokit/plugin-rest-endpoint-methods';
// https://github.com/octokit/plugin-rest-endpoint-methods.js
export type PullsListParams = RestEndpointMethodTypes['pulls']['list']['parameters'];
export type IssuesListForRepoParams = RestEndpointMethodTypes['issues']['listForRepo']['parameters'];
export type IssuesListCommentsParams = RestEndpointMethodTypes['issues']['listComments']['parameters'];
export type PullsListCommitsParams = RestEndpointMethodTypes['pulls']['listCommits']['parameters'];
export type GetContentsResponse = RestEndpointMethodTypes['repos']['getContent']['response'] & GitHubSymlink;
export type ReposGetResponseData = RestEndpointMethodTypes['repos']['get']['response'];
// fix type as the target is missing https://developer.github.com/v3/repos/contents/#response-if-content-is-a-symlink
type GitHubSymlink = {
data: {
target: string;
};
};
|
CYP2D6 genotyping and the use of tamoxifen in breast cancer.
Donald Berry in his recent editorial (1) regarding the role of CYP2D6 and tamoxifen highlights several controversies that exist in the clinical and biomarker field for women with breast cancer. The International Tamoxifen Pharmacogenomics Consortium (ITPC) was formed with the intent to aggregate, curate, and analyze the data available in published breast cancer studies with the hope of answering the question, “Should CYP2D6 genotyping guide the use of tamoxifen in breast cancer?” The ITPC analyzed nearly 5000 patients regarding the association between CYP2D6 and clinical outcomes (2). These data demonstrated that CYP2D6 genotype was associated with the risk of recurrence or death in those patients who received tamoxifen monotherapy for 5 years but not in patients who received different doses or duration of tamoxifen or those who received chemotherapy along with tamoxifen. We are in agreement with Berry that further research is required to understand these complex relationships, and our intent has always been that others will be able to use the ITPC dataset to understand the data as fully as possible in conjunction with their own research efforts. The complete dataset of genotypes and clinical variables, the analysis code, and the full analyses are transparently available at PharmGKB (http://www.pharmgkb. org). Although we disagree with Berry about his conclusion in answering the allimportant question of whether CYP2D6guided genotype therapy is appropriate, we invite other researchers and practitioners to examine the data from their own perspectives and engage in ongoing efforts to use these data to the benefit of patients with breast cancer. |
a = input()[1:-1].split(", ")
letters = "abcdefghijklmnopqrstuvwxyz"
seen = []
count = 0
for char in a:
if char not in seen and char != "":
seen.append(char)
count+=1
print(count)
|
<filename>src/day18/mod.rs
use core::cmp::min;
use std::collections::{hash_map::DefaultHasher, HashMap};
use std::hash::{Hash, Hasher};
#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)]
enum Acre {
OpenGround,
Trees,
Lumberyard,
}
fn parse(input: &str) -> (Vec<Acre>, usize) {
let acres = input
.chars()
.filter_map(|c| match c {
'.' => Some(Acre::OpenGround),
'|' => Some(Acre::Trees),
'#' => Some(Acre::Lumberyard),
_ => None,
})
.collect::<Vec<_>>();
// Integer square root...
let size = (1..acres.len()).find(|i| i * i == acres.len()).expect("Invalid size");
(acres, size)
}
#[allow(dead_code)]
fn draw(acres: &[Acre], size: usize) {
for y in 0..size {
let line = (0..size)
.map(|x| match acres[x + size * y] {
Acre::OpenGround => '.',
Acre::Trees => '|',
Acre::Lumberyard => '#',
})
.collect::<String>();
println!("{}", line);
}
}
#[allow(dead_code)]
fn solve(input: &str, iterations: usize) -> usize {
let (acres, size) = parse(input);
let mut index_by_hash = HashMap::<u64, usize>::new();
let mut answers_by_index = Vec::<usize>::new();
let mut next_acres = acres.clone();
let mut prev_acres = acres;
let mut i = 0;
loop {
for y in 0..size {
for x in 0..size {
let mut trees_count = 0;
let mut lumberyard_count = 0;
let mut count = |x, y| match prev_acres[x + size * y] {
Acre::Trees => trees_count += 1,
Acre::Lumberyard => lumberyard_count += 1,
_ => {}
};
#[allow(clippy::needless_range_loop)]
for y2 in if y == 0 { 0 } else { y - 1 }..min(y + 2, size) {
for x2 in if x == 0 { 0 } else { x - 1 }..min(x + 2, size) {
if (x, y) != (x2, y2) {
count(x2, y2);
}
}
}
next_acres[x + size * y] = match prev_acres[x + size * y] {
Acre::OpenGround if trees_count >= 3 => Acre::Trees,
Acre::Trees if lumberyard_count >= 3 => Acre::Lumberyard,
Acre::Lumberyard if trees_count == 0 || lumberyard_count == 0 => {
Acre::OpenGround
}
acre => acre,
};
}
}
let mut trees_count = 0;
let mut lumberyard_count = 0;
for acre in next_acres.iter() {
match acre {
Acre::Trees => trees_count += 1,
Acre::Lumberyard => lumberyard_count += 1,
_ => {}
}
}
let answer = trees_count * lumberyard_count;
if i + 1 == iterations {
return answer;
}
let hash = {
let mut hasher = DefaultHasher::new();
Hash::hash_slice(&next_acres, &mut hasher);
hasher.finish()
};
if let Some(i0) = index_by_hash.get(&hash) {
let repeating_slice = &answers_by_index[*i0..i];
return repeating_slice[(iterations - i0 - 1) % repeating_slice.len()];
}
answers_by_index.push(answer);
index_by_hash.insert(hash, i);
prev_acres.clone_from(&next_acres);
i += 1;
}
}
#[cfg(test)]
mod tests {
use super::*;
const TEST_INPUT: &str = include_str!("test_input");
const INPUT: &str = include_str!("input");
#[test]
fn part1_works() {
assert_eq!(solve(TEST_INPUT, 10), 1147);
assert_eq!(solve(INPUT, 10), 511_000);
}
#[test]
fn part2_works() {
assert_eq!(solve(INPUT, 542), 211_050);
assert_eq!(solve(INPUT, 1_000_000_000), 194_934);
}
}
|
/**
* Returns true if the given profile_row is applied.
*/
static bool
is_row_applied(const struct ovsrec_qos *profile_row)
{
if (profile_row == NULL) {
return false;
}
const struct ovsrec_system *system_row = ovsrec_system_first(idl);
if (system_row->qos == profile_row) {
return true;
}
const struct ovsrec_port *port_row;
OVSREC_PORT_FOR_EACH(port_row, idl) {
if (port_row->qos == profile_row) {
return true;
}
}
return false;
} |
<reponame>mg6613/Web_Project_Wine
package com.jsplec.wp.command;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.http.HttpSession;
import com.jsplec.wp.dao.LoginSubDao;
import com.jsplec.wp.dao.NoticeDao;
import com.sun.corba.se.spi.activation.Repository;
public class LoginSubCommand implements ACommand {
@Override
public void execute(HttpServletRequest request, HttpServletResponse response, HttpSession session){
//String id = (String)session.getAttribute("USERID");
int userno = (Integer)session.getAttribute("USERNO");
LoginSubDao LoginSubDao = new LoginSubDao();
int subjud = LoginSubDao.subjud(userno);
session.setAttribute("subjud", subjud);
}
}; |
<reponame>ejKang/building-microservices-with-spring-boot
package ecommerce.firstcart.util;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.jms.annotation.JmsListener;
import org.springframework.stereotype.Component;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.JsonMappingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import ecommerce.cartservice.entity.Product;
import ecommerce.cartservice.repository.ProductRepository;
@Component
public class JmsConsumer {
@Autowired
ProductRepository productRepository;
// @JmsListener(destination = "${product.jms.destination}")
public void consumeMessage(String data) {
try {
ObjectMapper mapper = new ObjectMapper();
Product product = mapper.readValue(data, Product.class);
productRepository.save(product);
} catch (JsonMappingException e) {
e.printStackTrace();
} catch (JsonProcessingException e) {
e.printStackTrace();
}
}
}
|
<reponame>pjsier/topojson
// Copyright 2018 The GeoRust Developers
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use json::{Deserialize, Deserializer, JsonObject, Serialize, Serializer};
use serde_json;
use {util, Arc, Bbox, Error, NamedGeometry, TopoJson};
/// Transforms
///
/// [TopoJSON Format Specification § 2.1.2](https://github.com/topojson/topojson-specification#212-transforms)
#[derive(Clone, Debug, PartialEq)]
pub struct TransformParams {
pub scale: [f64; 2],
pub translate: [f64; 2],
}
impl<'a> From<&'a TransformParams> for JsonObject {
fn from(transform: &'a TransformParams) -> JsonObject {
let mut map = JsonObject::new();
map.insert(
String::from("scale"),
::serde_json::to_value(&transform.scale).unwrap(),
);
map.insert(
String::from("translate"),
::serde_json::to_value(&transform.translate).unwrap(),
);
map
}
}
impl TransformParams {
pub fn from_json_object(mut object: JsonObject) -> Result<Self, Error> {
let scale_translate = util::get_scale_translate(&mut object)?;
Ok(scale_translate.unwrap())
}
}
impl Serialize for TransformParams {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
JsonObject::from(self).serialize(serializer)
}
}
impl<'de> Deserialize<'de> for TransformParams {
fn deserialize<D>(deserializer: D) -> Result<TransformParams, D::Error>
where
D: Deserializer<'de>,
{
use serde::de::Error as SerdeError;
let val = JsonObject::deserialize(deserializer)?;
TransformParams::from_json_object(val).map_err(D::Error::custom)
}
}
/// Topology object
///
/// [TopoJSON Format Specification § 2.1](https://github.com/topojson/topojson-specification#21-topology-objects)
#[derive(Clone, Debug, PartialEq)]
pub struct Topology {
pub bbox: Option<Bbox>,
pub objects: Vec<NamedGeometry>,
pub transform: Option<TransformParams>,
pub arcs: Vec<Arc>,
pub foreign_members: Option<JsonObject>,
}
impl<'a> From<&'a Topology> for JsonObject {
fn from(topo: &'a Topology) -> JsonObject {
let mut map = JsonObject::new();
map.insert(String::from("type"), json!("Topology"));
map.insert(
String::from("arcs"),
serde_json::to_value(&topo.arcs).unwrap(),
);
let mut objects = JsonObject::new();
for named_geom in topo.objects.iter() {
objects.insert(
named_geom.name.clone(),
serde_json::to_value(named_geom.geometry.clone()).unwrap(),
);
}
map.insert(String::from("objects"), serde_json::Value::Object(objects));
if let Some(ref bbox) = topo.bbox {
map.insert(String::from("bbox"), serde_json::to_value(bbox).unwrap());
}
if let Some(ref transform_params) = topo.transform {
map.insert(
String::from("transform"),
serde_json::to_value(transform_params).unwrap(),
);
}
if let Some(ref foreign_members) = topo.foreign_members {
for (key, value) in foreign_members {
map.insert(key.to_owned(), value.to_owned());
}
}
map
}
}
impl Topology {
pub fn from_json_object(mut object: JsonObject) -> Result<Self, Error> {
match util::expect_type(&mut object)? {
ref type_ if type_ == "Topology" => Ok(Topology {
bbox: util::get_bbox(&mut object)?,
objects: util::get_objects(&mut object)?,
transform: util::get_scale_translate(&mut object)?,
arcs: util::get_arcs_position(&mut object)?,
foreign_members: util::get_foreign_members(object)?,
}),
type_ => Err(Error::ExpectedType {
expected: "Topology".to_owned(),
actual: type_,
}),
}
}
pub fn list_names(&self) -> Vec<String> {
self.objects
.iter()
.cloned()
.map(|g| g.name)
.collect::<Vec<String>>()
}
}
impl Serialize for Topology {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
JsonObject::from(self).serialize(serializer)
}
}
impl<'de> Deserialize<'de> for Topology {
fn deserialize<D>(deserializer: D) -> Result<Topology, D::Error>
where
D: Deserializer<'de>,
{
use serde::de::Error as SerdeError;
let val = JsonObject::deserialize(deserializer)?;
Topology::from_json_object(val).map_err(D::Error::custom)
}
}
impl Into<Option<Topology>> for TopoJson {
fn into(self) -> Option<Topology> {
match self {
TopoJson::Topology(i) => Some(i),
_ => None,
}
}
}
#[cfg(test)]
mod tests {
use json::JsonObject;
use serde_json;
use {Error, Geometry, NamedGeometry, TopoJson, Topology, TransformParams, Value};
fn encode(topo: &Topology) -> String {
serde_json::to_string(&topo).unwrap()
}
fn decode(json_string: String) -> TopoJson {
json_string.parse().unwrap()
}
#[test]
fn encode_decode_topology_arcs() {
let topo_json_str =
"{\"arcs\":[[[2.2,2.2],[3.3,3.3]]],\"objects\":{},\"type\":\"Topology\"}";
let topo = Topology {
arcs: vec![vec![vec![2.2, 2.2], vec![3.3, 3.3]]],
objects: vec![],
bbox: None,
transform: None,
foreign_members: None,
};
// Test encode
let json_string = encode(&topo);
assert_eq!(json_string, topo_json_str);
// Test decode
let decoded_topo = match decode(json_string) {
TopoJson::Topology(t) => t,
_ => unreachable!(),
};
assert_eq!(decoded_topo, topo);
}
#[test]
fn decode_invalid_topology_no_objects() {
let topo_json_str = "{\"arcs\":[[[2.2,2.2],[3.3,3.3]]],\"type\":\"Topology\"}";
// Decode should fail due to the absence of the 'objects' member:
let result = topo_json_str.to_string().parse::<TopoJson>();
assert!(result.is_err());
match result {
Err(e) => assert_eq!(e, Error::TopologyExpectedObjects),
_ => panic!(),
}
}
#[test]
fn decode_invalid_topology_no_arcs() {
let topo_json_str = "{\"objects\":{},\"type\":\"Topology\"}";
// Decode should fail due to the absence of the 'objects' member:
let result = topo_json_str.to_string().parse::<TopoJson>();
assert!(result.is_err());
match result {
Err(e) => assert_eq!(e, Error::TopologyExpectedArcs),
_ => panic!(),
}
}
#[test]
fn decode_invalid_topology_bad_type() {
let topo_json_str = "{\"arcs\":[[[2.2,2.2],[3.3,3.3]]],\"type\":\"foo\",\"objects\":{\"example\":{\"arcs\":[0],\"type\":\"LineString\"}}}";
// Decode should fail due to the absence of the 'objects' member:
let result = topo_json_str.to_string().parse::<TopoJson>();
assert!(result.is_err());
match result {
Err(e) => assert_eq!(e, Error::TopoJsonUnknownType),
_ => panic!(),
}
}
#[test]
fn list_names_objects() {
let topo = Topology {
arcs: vec![vec![vec![2.2, 2.2], vec![3.3, 3.3]]],
objects: vec![NamedGeometry {
name: String::from("example"),
geometry: Geometry {
value: Value::LineString(vec![0]),
bbox: None,
id: None,
properties: None,
foreign_members: None,
},
}],
bbox: None,
transform: None,
foreign_members: None,
};
let names = topo.list_names();
assert_eq!(names.len(), 1);
assert_eq!(names[0], "example");
}
#[test]
fn encode_decode_topology_arcs_object() {
let topo_json_str = "{\"arcs\":[[[2.2,2.2],[3.3,3.3]]],\"objects\":{\"example\":{\"arcs\":[0],\"type\":\"LineString\"}},\"type\":\"Topology\"}";
let topo = Topology {
arcs: vec![vec![vec![2.2, 2.2], vec![3.3, 3.3]]],
objects: vec![NamedGeometry {
name: String::from("example"),
geometry: Geometry {
value: Value::LineString(vec![0]),
bbox: None,
id: None,
properties: None,
foreign_members: None,
},
}],
bbox: None,
transform: None,
foreign_members: None,
};
// Test encode
let json_string = encode(&topo);
assert_eq!(json_string, topo_json_str);
// Test decode
let decoded_topo = match decode(json_string) {
TopoJson::Topology(t) => t,
_ => unreachable!(),
};
assert_eq!(decoded_topo, topo);
}
#[test]
fn encode_decode_topology_arcs_transform() {
let topo_json_str = "{\"arcs\":[[[2.2,2.2],[3.3,3.3]]],\"objects\":{},\"transform\":{\"scale\":[0.12,0.12],\"translate\":[1.1,1.1]},\"type\":\"Topology\"}";
let topo = Topology {
arcs: vec![vec![vec![2.2, 2.2], vec![3.3, 3.3]]],
objects: vec![],
bbox: None,
transform: Some(TransformParams {
scale: [0.12, 0.12],
translate: [1.1, 1.1],
}),
foreign_members: None,
};
// Test encode
let json_string = encode(&topo);
assert_eq!(json_string, topo_json_str);
// Test decode
let decoded_topo = match decode(json_string) {
TopoJson::Topology(t) => t,
_ => unreachable!(),
};
assert_eq!(decoded_topo, topo);
}
#[test]
fn encode_decode_topology_arcs_transform_geom_collection() {
let topo_json_str = "{\"arcs\":[[[2.2,2.2],[3.3,3.3]]],\"objects\":{\"example\":{\"geometries\":[{\"coordinates\":[100.0,0.0],\"properties\":{\"prop1\":1},\"type\":\"Point\"},{\"arcs\":[0],\"type\":\"LineString\"}],\"properties\":{\"prop0\":0},\"type\":\"GeometryCollection\"}},\"transform\":{\"scale\":[0.12,0.12],\"translate\":[1.1,1.1]},\"type\":\"Topology\"}";
// Properties for the geometry collection:
let mut properties0 = JsonObject::new();
properties0.insert(String::from("prop0"), serde_json::to_value(0).unwrap());
// Properties for one the geometry in the geometry collection:
let mut properties1 = JsonObject::new();
properties1.insert(String::from("prop1"), serde_json::to_value(1).unwrap());
let topo = Topology {
arcs: vec![vec![vec![2.2, 2.2], vec![3.3, 3.3]]],
objects: vec![NamedGeometry {
name: String::from("example"),
geometry: Geometry {
bbox: None,
id: None,
value: Value::GeometryCollection(vec![
Geometry {
bbox: None,
id: None,
value: Value::Point(vec![100.0, 0.0]),
properties: Some(properties1),
foreign_members: None,
},
Geometry {
bbox: None,
id: None,
value: Value::LineString(vec![0]),
properties: None,
foreign_members: None,
},
]),
properties: Some(properties0),
foreign_members: None,
},
}],
bbox: None,
foreign_members: None,
transform: Some(TransformParams {
scale: [0.12, 0.12],
translate: [1.1, 1.1],
}),
};
// Test encode
let json_string = encode(&topo);
assert_eq!(json_string, topo_json_str);
// Test decode
let decoded_topo = match decode(json_string) {
TopoJson::Topology(t) => t,
_ => unreachable!(),
};
assert_eq!(decoded_topo, topo);
}
#[test]
fn encode_decode_topology_example_quantized_specifications() {
let topo_json_str = "{\"arcs\":[[[4000.0,0.0],[1999.0,9999.0],[2000.0,-9999.0],[2000.0,9999.0]],[[0.0,0.0],[0.0,9999.0],[2000.0,0.0],[0.0,-9999.0],[-2000.0,0.0]]],\"objects\":{\"example\":{\"geometries\":[{\"coordinates\":[4000.0,5000.0],\"properties\":{\"prop0\":\"value0\"},\"type\":\"Point\"},{\"arcs\":[0],\"properties\":{\"prop0\":\"value0\",\"prop1\":0},\"type\":\"LineString\"},{\"arcs\":[[1]],\"properties\":{\"prop0\":\"value0\",\"prop1\":{\"this\":\"that\"}},\"type\":\"Polygon\"}],\"type\":\"GeometryCollection\"}},\"transform\":{\"scale\":[0.0005000500050005,0.0001000100010001],\"translate\":[100.0,0.0]},\"type\":\"Topology\"}";
// Properties for the 1st geometry of the geometry collection:
let mut properties0 = JsonObject::new();
properties0.insert(
String::from("prop0"),
serde_json::to_value("value0").unwrap(),
);
// Properties for the 2nd geometry of the geometry collection:
let mut properties1 = properties0.clone();
properties1.insert(String::from("prop1"), serde_json::to_value(0).unwrap());
// Properties for the 3rd geometry of the geometry collection:
let mut properties2 = properties0.clone();
let mut inner_prop = JsonObject::new();
inner_prop.insert(
String::from("this"),
serde_json::to_value(String::from("that")).unwrap(),
);
properties2.insert(
String::from("prop1"),
serde_json::to_value(inner_prop).unwrap(),
);
let topo = Topology {
bbox: None,
objects: vec![NamedGeometry {
name: String::from("example"),
geometry: Geometry {
bbox: None,
id: None,
value: Value::GeometryCollection(vec![
Geometry {
bbox: None,
id: None,
value: Value::Point(vec![4000.0, 5000.0]),
properties: Some(properties0),
foreign_members: None,
},
Geometry {
bbox: None,
id: None,
value: Value::LineString(vec![0]),
properties: Some(properties1),
foreign_members: None,
},
Geometry {
bbox: None,
id: None,
value: Value::Polygon(vec![vec![1]]),
properties: Some(properties2),
foreign_members: None,
},
]),
properties: None,
foreign_members: None,
},
}],
transform: Some(TransformParams {
scale: [0.0005000500050005, 0.0001000100010001],
translate: [100.0, 0.0],
}),
arcs: vec![
vec![
vec![4000.0, 0.0],
vec![1999.0, 9999.0],
vec![2000.0, -9999.0],
vec![2000.0, 9999.0],
],
vec![
vec![0.0, 0.0],
vec![0.0, 9999.0],
vec![2000.0, 0.0],
vec![0.0, -9999.0],
vec![-2000.0, 0.0],
],
],
foreign_members: None,
};
// Test encode
let json_string = encode(&topo);
assert_eq!(json_string, topo_json_str);
// Test decode
let decoded_topo = match decode(json_string) {
TopoJson::Topology(t) => t,
_ => unreachable!(),
};
assert_eq!(decoded_topo, topo);
}
}
|
/*-----------------------------------------------------------------------------
xlnx_scal_context_init: Initializes the scaler context with default values.
Parameters:
scal_ctx: Scaler context
-----------------------------------------------------------------------------*/
void xlnx_scal_context_init(XlnxScalerCtx *scal_ctx)
{
scal_ctx->scal_props.nb_outputs = 0;
scal_ctx->frames_out = 0;
scal_ctx->session_nb_outputs[0] = 0;
scal_ctx->session_nb_outputs[1] = 0;
scal_ctx->scal_props.out_width[0] = 1280;
scal_ctx->scal_props.out_height[0] = 720;
scal_ctx->scal_props.out_width[1] = 852;
scal_ctx->scal_props.out_height[1] = 480;
scal_ctx->scal_props.out_width[2] = 640;
scal_ctx->scal_props.out_height[2] = 360;
scal_ctx->scal_props.out_width[3] = 424;
scal_ctx->scal_props.out_height[3] = 240;
scal_ctx->scal_props.enable_pipeline = 0;
scal_ctx->scal_props.log_level = 0;
scal_ctx->scal_props.p_mixrate_session = 0;
scal_ctx->scal_props.latency_logging = 0;
scal_ctx->session_frame = 0;
scal_ctx->scal_props.fr_num = 25;
scal_ctx->scal_props.fr_den = 1;
for (int32_t i = 0; i < SCAL_MAX_ABR_CHANNELS; i++) {
memset (scal_ctx->out_frame[i], 0, sizeof (XmaFrame));
scal_ctx->out_frame[i]->data[0].buffer_type = XMA_DEVICE_BUFFER_TYPE;
strcpy(scal_ctx->scal_props.out_rate[i], "full");
}
return;
} |
package commands
import (
"encoding/json"
"fmt"
"github.com/NexonSU/telegram-go-chatbot/utils"
tele "gopkg.in/telebot.v3"
)
//Return message on /debug command
func Debug(context tele.Context) error {
err := utils.Bot.Delete(context.Message())
if err != nil {
return err
}
var message = context.Message()
if context.Message().ReplyTo != nil {
message = context.Message().ReplyTo
}
MarshalledMessage, _ := json.MarshalIndent(message, "", " ")
_, err = utils.Bot.Send(context.Sender(), fmt.Sprintf("<pre>%v</pre>", string(MarshalledMessage)))
return err
}
|
/**
* <p>Java class for CommonColumnDefnType complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType name="CommonColumnDefnType">
* <complexContent>
* <extension base="{http://www.pharmml.org/2013/03/CommonTypes}PharmMLRootType">
* <attribute name="columnNum" use="required" type="{http://www.w3.org/2001/XMLSchema}positiveInteger" />
* </extension>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@SuppressWarnings("deprecation")
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "CommonColumnDefnType")
@XmlSeeAlso({
ColumnDefinition.class,
DataSetTableDefnType.class
})
public class CommonColumnDefinition
extends PharmMLRootType
{
@XmlAttribute(name = "columnNum", required = true) // TODO: must be positive
protected Integer columnNum;
/**
* Gets the value of the columnNum property.
*
* @return
* possible object is
* {@link Integer }
*
*/
public Integer getColumnNum() {
return columnNum;
}
/**
* Sets the value of the columnNum property.
*
* @param value
* allowed object is
* {@link Integer }
*
*/
public void setColumnNum(Integer value) {
this.columnNum = value;
}
/**
* Sets the value of the columnNum property.
*
* @param value
* allowed object is
* {@link BigInteger }
*
* @deprecated PharmML now uses xs:int values. Use {@link #setColumnNum(Integer)}.
*/
@Deprecated
public void setColumnNum(BigInteger value) {
this.columnNum = value.intValue();
}
@Override
protected List<TreeNode> listChildren() {
return Collections.emptyList();
}
@Override
public void accept(Visitor visitor) {
visitor.visit(this);
}
} |
#include <iostream>
#include <string>
using namespace std;
int main()
{
string str, substr;
cin>>str>>substr;
int d[str.length()];
d[0] = 0;
for(int k = 0, i = 1; i < substr.length(); i++)
{
while(k > 0 && substr[i] != substr[k])
k = d[k - 1];
if(substr[i] == substr[k])
k++;
d[i] = k;
}
bool found = false;
for(int k = 0, i = 0; i < str.length(); i++)
{
while(k > 0 && substr[k] != str[i])
k = d[k - 1];
if(substr[k] == str[i])
k++;
if(k == substr.length())
{
found = true;
cout<<i - substr.length() + 1<<'\n';
break;
}
}
if(!found)
cout<<-1<<'\n';
return 0;
}
|
<reponame>makannew/react-profile-photo
import React from 'react'
import { ImgPhoto } from './components/img'
import Context from './context/context'
import { useStateGateway } from 'use-linked-state'
import { CropDefault } from './types and defaults/crop-default'
import { SourceDefault } from './types and defaults/source-default'
import { SrcManager } from './components/src-manager'
import { ImgSrcDefault } from './types and defaults/img-src-default'
import { CanvasSrcDefault } from './types and defaults/canvas-src-default'
import { Draw } from './components/draw'
export { Photo } from './components/photo'
export { Avatar } from './components/avatar'
// export { PhotoLoader } from './components/photo-loader'
export interface ProfilePhoto {
readonly className?: string
readonly src: string
}
const ProfilePhoto: React.FC<ProfilePhoto> = ({ className, src, children }) => {
const cropGateway = useStateGateway(CropDefault)
const sourceGateway = useStateGateway(SourceDefault)
const imgSrcGateway = useStateGateway(ImgSrcDefault)
const canvasSrcGateway = useStateGateway(CanvasSrcDefault)
return (
<div className={className}>
<Context.Provider
value={{
cropGateway,
sourceGateway,
imgSrcGateway,
canvasSrcGateway
}}
>
<SrcManager photoSrc={src} />
<ImgPhoto />
<Draw />
{children}
</Context.Provider>
</div>
)
}
export default ProfilePhoto
|
def testTurbiniaTaskRunWrapperExceptionThrown(self):
self.setResults()
self.task.run = mock.MagicMock(side_effect=TurbiniaException)
new_result = self.task.run_wrapper(self.evidence.__dict__)
new_result = TurbiniaTaskResult.deserialize(new_result)
self.assertEqual(type(new_result), TurbiniaTaskResult)
self.assertIn('failed', new_result.status) |
// NewDecoder initializes a Decoder that will decode CBOR-encoded bytes from the
// given io.Reader.
//
// It sets the given address as the owner (can be `nil`).
//
func NewDecoder(r io.Reader, owner *common.Address) (*Decoder, error) {
decMode, err := cbor.DecOptions{}.DecModeWithTags(cborTagSet)
if err != nil {
return nil, err
}
return &Decoder{
decoder: decMode.NewDecoder(r),
owner: owner,
}, nil
} |
<filename>faebryk/library/traits/__init__.py
# This file is part of the faebryk project
# SPDX-License-Identifier: MIT
import faebryk.library.traits.component
import faebryk.library.traits.footprint
import faebryk.library.traits.interface
import faebryk.library.traits.link
import faebryk.library.traits.parameter
|
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
// Code generated by Microsoft (R) AutoRest Code Generator.
package com.azure.resourcemanager.paloaltonetworks.ngfw.implementation;
import com.azure.core.http.rest.Response;
import com.azure.core.management.SystemData;
import com.azure.core.util.Context;
import com.azure.resourcemanager.paloaltonetworks.ngfw.fluent.models.LocalRulesResourceInner;
import com.azure.resourcemanager.paloaltonetworks.ngfw.models.ActionEnum;
import com.azure.resourcemanager.paloaltonetworks.ngfw.models.BooleanEnum;
import com.azure.resourcemanager.paloaltonetworks.ngfw.models.Category;
import com.azure.resourcemanager.paloaltonetworks.ngfw.models.DecryptionRuleTypeEnum;
import com.azure.resourcemanager.paloaltonetworks.ngfw.models.DestinationAddr;
import com.azure.resourcemanager.paloaltonetworks.ngfw.models.LocalRulesResource;
import com.azure.resourcemanager.paloaltonetworks.ngfw.models.ProvisioningState;
import com.azure.resourcemanager.paloaltonetworks.ngfw.models.RuleCounter;
import com.azure.resourcemanager.paloaltonetworks.ngfw.models.RuleCounterReset;
import com.azure.resourcemanager.paloaltonetworks.ngfw.models.SourceAddr;
import com.azure.resourcemanager.paloaltonetworks.ngfw.models.StateEnum;
import com.azure.resourcemanager.paloaltonetworks.ngfw.models.TagInfo;
import java.util.Collections;
import java.util.List;
public final class LocalRulesResourceImpl
implements LocalRulesResource, LocalRulesResource.Definition, LocalRulesResource.Update {
private LocalRulesResourceInner innerObject;
private final com.azure.resourcemanager.paloaltonetworks.ngfw.PaloAltoNetworksNgfwManager serviceManager;
public String id() {
return this.innerModel().id();
}
public String name() {
return this.innerModel().name();
}
public String type() {
return this.innerModel().type();
}
public SystemData systemData() {
return this.innerModel().systemData();
}
public String etag() {
return this.innerModel().etag();
}
public String ruleName() {
return this.innerModel().ruleName();
}
public Integer priority() {
return this.innerModel().priority();
}
public String description() {
return this.innerModel().description();
}
public StateEnum ruleState() {
return this.innerModel().ruleState();
}
public SourceAddr source() {
return this.innerModel().source();
}
public BooleanEnum negateSource() {
return this.innerModel().negateSource();
}
public DestinationAddr destination() {
return this.innerModel().destination();
}
public BooleanEnum negateDestination() {
return this.innerModel().negateDestination();
}
public List<String> applications() {
List<String> inner = this.innerModel().applications();
if (inner != null) {
return Collections.unmodifiableList(inner);
} else {
return Collections.emptyList();
}
}
public Category category() {
return this.innerModel().category();
}
public String protocol() {
return this.innerModel().protocol();
}
public List<String> protocolPortList() {
List<String> inner = this.innerModel().protocolPortList();
if (inner != null) {
return Collections.unmodifiableList(inner);
} else {
return Collections.emptyList();
}
}
public String inboundInspectionCertificate() {
return this.innerModel().inboundInspectionCertificate();
}
public String auditComment() {
return this.innerModel().auditComment();
}
public ActionEnum actionType() {
return this.innerModel().actionType();
}
public StateEnum enableLogging() {
return this.innerModel().enableLogging();
}
public DecryptionRuleTypeEnum decryptionRuleType() {
return this.innerModel().decryptionRuleType();
}
public List<TagInfo> tags() {
List<TagInfo> inner = this.innerModel().tags();
if (inner != null) {
return Collections.unmodifiableList(inner);
} else {
return Collections.emptyList();
}
}
public ProvisioningState provisioningState() {
return this.innerModel().provisioningState();
}
public String resourceGroupName() {
return resourceGroupName;
}
public LocalRulesResourceInner innerModel() {
return this.innerObject;
}
private com.azure.resourcemanager.paloaltonetworks.ngfw.PaloAltoNetworksNgfwManager manager() {
return this.serviceManager;
}
private String resourceGroupName;
private String localRulestackName;
private String priority;
public LocalRulesResourceImpl withExistingLocalRulestack(String resourceGroupName, String localRulestackName) {
this.resourceGroupName = resourceGroupName;
this.localRulestackName = localRulestackName;
return this;
}
public LocalRulesResource create() {
this.innerObject =
serviceManager
.serviceClient()
.getLocalRules()
.createOrUpdate(resourceGroupName, localRulestackName, priority, this.innerModel(), Context.NONE);
return this;
}
public LocalRulesResource create(Context context) {
this.innerObject =
serviceManager
.serviceClient()
.getLocalRules()
.createOrUpdate(resourceGroupName, localRulestackName, priority, this.innerModel(), context);
return this;
}
LocalRulesResourceImpl(
String name, com.azure.resourcemanager.paloaltonetworks.ngfw.PaloAltoNetworksNgfwManager serviceManager) {
this.innerObject = new LocalRulesResourceInner();
this.serviceManager = serviceManager;
this.priority = name;
}
public LocalRulesResourceImpl update() {
return this;
}
public LocalRulesResource apply() {
this.innerObject =
serviceManager
.serviceClient()
.getLocalRules()
.createOrUpdate(resourceGroupName, localRulestackName, priority, this.innerModel(), Context.NONE);
return this;
}
public LocalRulesResource apply(Context context) {
this.innerObject =
serviceManager
.serviceClient()
.getLocalRules()
.createOrUpdate(resourceGroupName, localRulestackName, priority, this.innerModel(), context);
return this;
}
LocalRulesResourceImpl(
LocalRulesResourceInner innerObject,
com.azure.resourcemanager.paloaltonetworks.ngfw.PaloAltoNetworksNgfwManager serviceManager) {
this.innerObject = innerObject;
this.serviceManager = serviceManager;
this.resourceGroupName = Utils.getValueFromIdByName(innerObject.id(), "resourceGroups");
this.localRulestackName = Utils.getValueFromIdByName(innerObject.id(), "localRulestacks");
this.priority = Utils.getValueFromIdByName(innerObject.id(), "localRules");
}
public LocalRulesResource refresh() {
this.innerObject =
serviceManager
.serviceClient()
.getLocalRules()
.getWithResponse(resourceGroupName, localRulestackName, priority, Context.NONE)
.getValue();
return this;
}
public LocalRulesResource refresh(Context context) {
this.innerObject =
serviceManager
.serviceClient()
.getLocalRules()
.getWithResponse(resourceGroupName, localRulestackName, priority, context)
.getValue();
return this;
}
public Response<RuleCounter> getCountersWithResponse(String firewallName, Context context) {
return serviceManager
.localRules()
.getCountersWithResponse(resourceGroupName, localRulestackName, priority, firewallName, context);
}
public RuleCounter getCounters() {
return serviceManager.localRules().getCounters(resourceGroupName, localRulestackName, priority);
}
public Response<Void> refreshCountersWithResponse(String firewallName, Context context) {
return serviceManager
.localRules()
.refreshCountersWithResponse(resourceGroupName, localRulestackName, priority, firewallName, context);
}
public void refreshCounters() {
serviceManager.localRules().refreshCounters(resourceGroupName, localRulestackName, priority);
}
public Response<RuleCounterReset> resetCountersWithResponse(String firewallName, Context context) {
return serviceManager
.localRules()
.resetCountersWithResponse(resourceGroupName, localRulestackName, priority, firewallName, context);
}
public RuleCounterReset resetCounters() {
return serviceManager.localRules().resetCounters(resourceGroupName, localRulestackName, priority);
}
public LocalRulesResourceImpl withRuleName(String ruleName) {
this.innerModel().withRuleName(ruleName);
return this;
}
public LocalRulesResourceImpl withTags(List<TagInfo> tags) {
this.innerModel().withTags(tags);
return this;
}
public LocalRulesResourceImpl withEtag(String etag) {
this.innerModel().withEtag(etag);
return this;
}
public LocalRulesResourceImpl withDescription(String description) {
this.innerModel().withDescription(description);
return this;
}
public LocalRulesResourceImpl withRuleState(StateEnum ruleState) {
this.innerModel().withRuleState(ruleState);
return this;
}
public LocalRulesResourceImpl withSource(SourceAddr source) {
this.innerModel().withSource(source);
return this;
}
public LocalRulesResourceImpl withNegateSource(BooleanEnum negateSource) {
this.innerModel().withNegateSource(negateSource);
return this;
}
public LocalRulesResourceImpl withDestination(DestinationAddr destination) {
this.innerModel().withDestination(destination);
return this;
}
public LocalRulesResourceImpl withNegateDestination(BooleanEnum negateDestination) {
this.innerModel().withNegateDestination(negateDestination);
return this;
}
public LocalRulesResourceImpl withApplications(List<String> applications) {
this.innerModel().withApplications(applications);
return this;
}
public LocalRulesResourceImpl withCategory(Category category) {
this.innerModel().withCategory(category);
return this;
}
public LocalRulesResourceImpl withProtocol(String protocol) {
this.innerModel().withProtocol(protocol);
return this;
}
public LocalRulesResourceImpl withProtocolPortList(List<String> protocolPortList) {
this.innerModel().withProtocolPortList(protocolPortList);
return this;
}
public LocalRulesResourceImpl withInboundInspectionCertificate(String inboundInspectionCertificate) {
this.innerModel().withInboundInspectionCertificate(inboundInspectionCertificate);
return this;
}
public LocalRulesResourceImpl withAuditComment(String auditComment) {
this.innerModel().withAuditComment(auditComment);
return this;
}
public LocalRulesResourceImpl withActionType(ActionEnum actionType) {
this.innerModel().withActionType(actionType);
return this;
}
public LocalRulesResourceImpl withEnableLogging(StateEnum enableLogging) {
this.innerModel().withEnableLogging(enableLogging);
return this;
}
public LocalRulesResourceImpl withDecryptionRuleType(DecryptionRuleTypeEnum decryptionRuleType) {
this.innerModel().withDecryptionRuleType(decryptionRuleType);
return this;
}
}
|
<reponame>LuizzAugusto/componentize
export function parseProps(props?: Record<string,unknown> | null) {
if (!props) {
return ''
}
return Object.keys(props)
.map((name: string) => ` ${name}="${props[name]}"`).join('')
} |
Ocular findings in 34 patients with Alport syndrome: correlation of the findings to mutations in COL4A5 gene.
PURPOSE
To describe the incidence and type of ocular findings of 34 patients with Alport syndrome and to analyze the association of gene defect in COL4A5 gene to ocular abnormalities found.
METHODS
A nationwide search of Alport syndrome patients was performed in Finland, and patients were invited to take part in a thorough ophthalmologic investigation.
RESULTS
A total of 34 Alport syndrome patients from 14 different pedigrees were examined, and ocular abnormalities were found in 32% of them. The visual acuities were normal except in 4 of the 34 patients. Six individuals had retinal flecks and 4 men had anterior lenticonus. In 57% of the pedigrees the defect in COL4A5 gene was known.
CONCLUSION
Ocular abnormalities were rare in childhood and increased with age. There was no correlation between the type of mutation and the type of ocular changes. In addition, the penetrance of the ocular findings varied considerably within most families. |
<reponame>DVSR1966/par4all
/* Impact of dynamic aliasing */
int pointer03()
{
int i = 3;
int * ip = &i;
*ip = 4;
return i;
}
|
# ptf --test-dir saitests copp_tests --qlen=100000 --platform nn -t "verbose=True;target_port=3" --device-socket 0-3@tcp://127.0.0.1:10900 --device-socket 1-3@tcp://10.3.147.47:10900
# or
# ptf --test-dir saitests copp_tests --qlen=100000 --platform nn -t "verbose=True;target_port=10" --device-socket 0-10@tcp://127.0.0.1:10900 --device-socket 1-10@tcp://10.3.147.47:10900
#
# copp_test.${name_test}
#
# ARPTest
# DHCPTest
# DHCPTopoT1Test
# LLDPTest
# BGPTest
# LACPTest
# SNMPTest
# SSHTest
# IP2METest
# DefaultTest
import datetime
import os
import ptf
import signal
import threading
import time
import ptf.testutils as testutils
from ptf.base_tests import BaseTest
from ptf import config
class ControlPlaneBaseTest(BaseTest):
MAX_PORTS = 128
PPS_LIMIT = 600
PPS_LIMIT_MIN = PPS_LIMIT * 0.9
PPS_LIMIT_MAX = PPS_LIMIT * 1.3
NO_POLICER_LIMIT = PPS_LIMIT * 1.4
TARGET_PORT = "3" # Historically we have port 3 as a target port
TASK_TIMEOUT = 300 # Wait up to 5 minutes for tasks to complete
DEFAULT_PRE_SEND_INTERVAL_SEC = 1
DEFAULT_SEND_INTERVAL_SEC = 10
DEFAULT_RECEIVE_WAIT_TIME = 3
DEFAULT_SERVER_SEND_RATE_LIMIT_PPS = 2000
def __init__(self):
BaseTest.__init__(self)
self.log_fp = open('/tmp/copp.log', 'a')
test_params = testutils.test_params_get()
self.verbose = 'verbose' in test_params and test_params['verbose']
target_port_str = test_params.get('target_port', self.TARGET_PORT)
self.target_port = int(target_port_str)
self.timeout_thr = None
self.myip = test_params.get('myip', None)
self.peerip = test_params.get('peerip', None)
self.needPreSend = None
def log(self, message, debug=False):
current_time = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")
if (debug and self.verbose) or (not debug):
print("%s : %s" % (current_time, message))
self.log_fp.write("%s : %s\n" % (current_time, message))
def setUp(self):
self.dataplane = ptf.dataplane_instance
self.my_mac = {}
self.peer_mac = {}
for port_id, port in self.dataplane.ports.iteritems():
if port_id[0] == 0:
self.my_mac[port_id[1]] = port.mac()
elif port_id[0] == 1:
self.peer_mac[port_id[1]] = port.mac()
else:
assert True
self.dataplane.flush()
if config["log_dir"] is not None:
filename = os.path.join(config["log_dir"], str(self)) + ".pcap"
self.dataplane.start_pcap(filename)
def tearDown(self):
if config["log_dir"] is not None:
self.dataplane.stop_pcap()
self.log_fp.close()
def timeout(self, seconds, message):
def timeout_exception(self, message):
self.log('Timeout is reached: %s' % message)
self.tearDown()
os.kill(os.getpid(), signal.SIGINT)
if self.timeout_thr is None:
self.timeout_thr = threading.Timer(seconds, timeout_exception, args=(self, message))
self.timeout_thr.start()
else:
raise Exception("Timeout already set")
def cancel_timeout(self):
if self.timeout_thr is not None:
self.timeout_thr.cancel()
self.timeout_thr = None
def copp_test(self, packet, send_intf, recv_intf):
'''
Pre-send some packets for a second to absorb the CBS capacity.
'''
if self.needPreSend:
pre_send_count = 0
end_time = datetime.datetime.now() + datetime.timedelta(seconds=self.DEFAULT_PRE_SEND_INTERVAL_SEC)
while datetime.datetime.now() < end_time:
testutils.send_packet(self, send_intf, packet)
pre_send_count += 1
rcv_pkt_cnt = testutils.count_matched_packets(self, packet, recv_intf[1], recv_intf[0], timeout=0.01)
self.log("Send %d and receive %d packets in the first second (PolicyTest)" % (pre_send_count, rcv_pkt_cnt))
self.dataplane.flush()
pre_test_ptf_tx_counter = self.dataplane.get_counters(*send_intf)
pre_test_ptf_rx_counter = self.dataplane.get_counters(*recv_intf)
pre_test_nn_tx_counter = self.dataplane.get_nn_counters(*send_intf)
pre_test_nn_rx_counter = self.dataplane.get_nn_counters(*recv_intf)
start_time = datetime.datetime.now()
end_time = datetime.datetime.now() + datetime.timedelta(seconds=self.DEFAULT_SEND_INTERVAL_SEC)
send_count = 0
while datetime.datetime.now() < end_time:
testutils.send_packet(self, send_intf, packet)
send_count += 1
# Depending on the server/platform combination it is possible for the server to
# overwhelm the DUT, so we add an artificial delay here to rate-limit the server.
time.sleep(1.0 / self.DEFAULT_SERVER_SEND_RATE_LIMIT_PPS)
self.log("Sent out %d packets in %ds" % (send_count, self.DEFAULT_SEND_INTERVAL_SEC))
time.sleep(self.DEFAULT_RECEIVE_WAIT_TIME) # Wait a little bit for all the packets to make it through
recv_count = testutils.count_matched_packets(self, packet, recv_intf[1], recv_intf[0])
post_test_ptf_tx_counter = self.dataplane.get_counters(*send_intf)
post_test_ptf_rx_counter = self.dataplane.get_counters(*recv_intf)
post_test_nn_tx_counter = self.dataplane.get_nn_counters(*send_intf)
post_test_nn_rx_counter = self.dataplane.get_nn_counters(*recv_intf)
ptf_tx_count = int(post_test_ptf_tx_counter[1] - pre_test_ptf_tx_counter[1])
nn_tx_count = int(post_test_nn_tx_counter[1] - pre_test_nn_tx_counter[1])
ptf_rx_count = int(post_test_ptf_rx_counter[0] - pre_test_ptf_rx_counter[0])
nn_rx_count = int(post_test_nn_rx_counter[0] - pre_test_nn_rx_counter[0])
self.log("", True)
self.log("Counters before the test:", True)
self.log("If counter (0, n): %s" % str(pre_test_ptf_tx_counter), True)
self.log("NN counter (0, n): %s" % str(pre_test_nn_tx_counter), True)
self.log("If counter (1, n): %s" % str(pre_test_ptf_rx_counter), True)
self.log("NN counter (1, n): %s" % str(pre_test_nn_rx_counter), True)
self.log("", True)
self.log("Counters after the test:", True)
self.log("If counter (0, n): %s" % str(post_test_ptf_tx_counter), True)
self.log("NN counter (0, n): %s" % str(post_test_nn_tx_counter), True)
self.log("If counter (1, n): %s" % str(post_test_ptf_rx_counter), True)
self.log("NN counter (1, n): %s" % str(post_test_nn_rx_counter), True)
self.log("")
self.log("Sent through NN to local ptf_nn_agent: %d" % ptf_tx_count)
self.log("Sent through If to remote ptf_nn_agent: %d" % nn_tx_count)
self.log("Recv from If on remote ptf_nn_agent: %d" % ptf_rx_count)
self.log("Recv from NN on from remote ptf_nn_agent: %d" % nn_rx_count)
time_delta = end_time - start_time
time_delta_ms = (time_delta.microseconds + time_delta.seconds * 10**6) / 1000
tx_pps = int(send_count / (float(time_delta_ms) / 1000))
rx_pps = int(recv_count / (float(time_delta_ms) / 1000))
return send_count, recv_count, time_delta, time_delta_ms, tx_pps, rx_pps
def contruct_packet(self, port_number):
raise NotImplementedError
def check_constraints(self, send_count, recv_count, time_delta_ms, rx_pps):
raise NotImplementedError
def one_port_test(self, port_number):
packet = self.contruct_packet(port_number)
send_count, recv_count, time_delta, time_delta_ms, tx_pps, rx_pps = \
self.copp_test(str(packet), (0, port_number), (1, port_number))
self.printStats(send_count, recv_count, time_delta, tx_pps, rx_pps)
self.check_constraints(send_count, recv_count, time_delta_ms, rx_pps)
# FIXME: better make it decorator
def run_suite(self):
self.timeout(self.TASK_TIMEOUT, "The test case hasn't been completed in %d seconds" % self.TASK_TIMEOUT)
self.one_port_test(self.target_port)
self.cancel_timeout()
def printStats(self, pkt_send_count, recv_count, time_delta, tx_pps, rx_pps):
self.log("")
self.log('test stats')
self.log('Packet sent = %10d' % pkt_send_count)
self.log('Packet rcvd = %10d' % recv_count)
self.log('Test time = %s' % str(time_delta))
self.log('TX PPS = %d' % tx_pps)
self.log('RX PPS = %d' % rx_pps)
class NoPolicyTest(ControlPlaneBaseTest):
def __init__(self):
ControlPlaneBaseTest.__init__(self)
self.needPreSend = False
def check_constraints(self, send_count, recv_count, time_delta_ms, rx_pps):
pkt_rx_limit = send_count * 0.90
self.log("")
self.log("Checking constraints (NoPolicy):")
self.log(
"rx_pps (%d) > NO_POLICER_LIMIT (%d): %s" %
(int(rx_pps), int(self.NO_POLICER_LIMIT), str(rx_pps > self.NO_POLICER_LIMIT))
)
self.log(
"recv_count (%d) > pkt_rx_limit (%d): %s" %
(int(recv_count), int(pkt_rx_limit), str(recv_count > pkt_rx_limit))
)
assert(rx_pps > self.NO_POLICER_LIMIT)
assert(recv_count > pkt_rx_limit)
class PolicyTest(ControlPlaneBaseTest):
def __init__(self):
ControlPlaneBaseTest.__init__(self)
self.needPreSend = True
def check_constraints(self, send_count, recv_count, time_delta_ms, rx_pps):
self.log("")
self.log("Checking constraints (PolicyApplied):")
self.log(
"PPS_LIMIT_MIN (%d) <= rx_pps (%d) <= PPS_LIMIT_MAX (%d): %s" %
(int(self.PPS_LIMIT_MIN),
int(rx_pps),
int(self.PPS_LIMIT_MAX),
str(self.PPS_LIMIT_MIN <= rx_pps <= self.PPS_LIMIT_MAX))
)
assert(self.PPS_LIMIT_MIN <= rx_pps <= self.PPS_LIMIT_MAX)
# SONIC config contains policer CIR=600 for ARP
class ARPTest(PolicyTest):
def __init__(self):
PolicyTest.__init__(self)
def runTest(self):
self.log("ARPTest")
self.run_suite()
def contruct_packet(self, port_number):
src_mac = self.my_mac[port_number]
src_ip = self.myip
dst_ip = self.peerip
packet = testutils.simple_arp_packet(
eth_dst='ff:ff:ff:ff:ff:ff',
eth_src=src_mac,
arp_op=1,
ip_snd=src_ip,
ip_tgt=dst_ip,
hw_snd=src_mac,
hw_tgt='ff:ff:ff:ff:ff:ff'
)
return packet
# SONIC configuration has no packets to CPU for DHCP-T1 Topo
class DHCPTopoT1Test(PolicyTest):
def __init__(self):
PolicyTest.__init__(self)
# T1 DHCP no packet to packet to CPU so police rate is 0
self.PPS_LIMIT_MIN = 0
self.PPS_LIMIT_MAX = 0
def runTest(self):
self.log("DHCPTopoT1Test")
self.run_suite()
def contruct_packet(self, port_number):
src_mac = self.my_mac[port_number]
packet = testutils.simple_udp_packet(
pktlen=100,
eth_dst='ff:ff:ff:ff:ff:ff',
eth_src=src_mac,
dl_vlan_enable=False,
vlan_vid=0,
vlan_pcp=0,
dl_vlan_cfi=0,
ip_src='0.0.0.0',
ip_dst='255.255.255.255',
ip_tos=0,
ip_ttl=64,
udp_sport=68,
udp_dport=67,
ip_ihl=None,
ip_options=False,
with_udp_chksum=True
)
return packet
# SONIC configuration has no policer limiting for DHCP
class DHCPTest(NoPolicyTest):
def __init__(self):
NoPolicyTest.__init__(self)
def runTest(self):
self.log("DHCPTest")
self.run_suite()
def contruct_packet(self, port_number):
src_mac = self.my_mac[port_number]
packet = testutils.simple_udp_packet(
pktlen=100,
eth_dst='ff:ff:ff:ff:ff:ff',
eth_src=src_mac,
dl_vlan_enable=False,
vlan_vid=0,
vlan_pcp=0,
dl_vlan_cfi=0,
ip_src='0.0.0.0',
ip_dst='255.255.255.255',
ip_tos=0,
ip_ttl=64,
udp_sport=68,
udp_dport=67,
ip_ihl=None,
ip_options=False,
with_udp_chksum=True
)
return packet
# SONIC configuration has no policer limiting for LLDP
class LLDPTest(NoPolicyTest):
def __init__(self):
NoPolicyTest.__init__(self)
def runTest(self):
self.log("LLDPTest")
self.run_suite()
def contruct_packet(self, port_number):
src_mac = self.my_mac[port_number]
packet = testutils.simple_eth_packet(
eth_dst='01:80:c2:00:00:0e',
eth_src=src_mac,
eth_type=0x88cc
)
return packet
# SONIC configuration has no policer limiting for UDLD
class UDLDTest(NoPolicyTest):
def __init__(self):
NoPolicyTest.__init__(self)
def runTest(self):
self.log("UDLDTest")
self.run_suite()
# UDLD uses Ethernet multicast address 01-00-0c-cc-cc-cc
# as its destination MAC address. eth_type is to indicate
# the length of the data in Ethernet 802.3 frame. pktlen
# = 117 = 103 (0x67) + 6 (dst MAC) + 6 (dst MAC) + 2 (len)
def contruct_packet(self, port_number):
src_mac = self.my_mac[port_number]
packet = testutils.simple_eth_packet(
pktlen=117,
eth_dst='01:00:0c:cc:cc:cc',
eth_src=src_mac,
eth_type=0x0067
)
return packet
# SONIC configuration has no policer limiting for BGP
class BGPTest(NoPolicyTest):
def __init__(self):
NoPolicyTest.__init__(self)
def runTest(self):
self.log("BGPTest")
self.run_suite()
def contruct_packet(self, port_number):
dst_mac = self.peer_mac[port_number]
dst_ip = self.peerip
packet = testutils.simple_tcp_packet(
eth_dst=dst_mac,
ip_dst=dst_ip,
ip_ttl=1,
tcp_dport=179
)
return packet
# SONIC configuration has no policer limiting for LACP
class LACPTest(NoPolicyTest):
def __init__(self):
NoPolicyTest.__init__(self)
def runTest(self):
self.log("LACPTest")
self.run_suite()
def contruct_packet(self, port_number):
packet = testutils.simple_eth_packet(
pktlen=14,
eth_dst='01:80:c2:00:00:02',
eth_type=0x8809
) / (chr(0x01)*50)
return packet
# SNMP packets are trapped as IP2ME packets.
# IP2ME configuration in SONIC contains policer CIR=600
class SNMPTest(PolicyTest): # FIXME: trapped as ip2me. mellanox should add support for SNMP trap
def __init__(self):
PolicyTest.__init__(self)
def runTest(self):
self.log("SNMPTest")
self.run_suite()
def contruct_packet(self, port_number):
src_mac = self.my_mac[port_number]
dst_mac = self.peer_mac[port_number]
dst_ip = self.peerip
packet = testutils.simple_udp_packet(
eth_dst=dst_mac,
ip_dst=dst_ip,
eth_src=src_mac,
udp_dport=161
)
return packet
# SONIC config contains policer CIR=600 for SSH
class SSHTest(PolicyTest):
def __init__(self):
PolicyTest.__init__(self)
def runTest(self):
self.log("SSHTest")
self.run_suite()
def contruct_packet(self, port_number):
dst_mac = self.peer_mac[port_number]
src_ip = self.myip
dst_ip = self.peerip
packet = testutils.simple_tcp_packet(
eth_dst=dst_mac,
ip_dst=dst_ip,
ip_src=src_ip,
tcp_flags='F',
tcp_sport=22,
tcp_dport=22
)
return packet
# IP2ME configuration in SONIC contains policer CIR=600
class IP2METest(PolicyTest):
def __init__(self):
PolicyTest.__init__(self)
def runTest(self):
self.log("IP2METest")
self.run_suite()
def one_port_test(self, port_number):
for port in self.dataplane.ports.iterkeys():
if port[0] == 0:
continue
packet = self.contruct_packet(port[1])
send_count, recv_count, time_delta, time_delta_ms, tx_pps, rx_pps = \
self.copp_test(str(packet), (0, port_number), (1, port_number))
self.printStats(send_count, recv_count, time_delta, tx_pps, rx_pps)
self.check_constraints(send_count, recv_count, time_delta_ms, rx_pps)
def contruct_packet(self, port_number):
src_mac = self.my_mac[port_number]
dst_mac = self.peer_mac[port_number]
dst_ip = self.peerip
packet = testutils.simple_tcp_packet(
eth_src=src_mac,
eth_dst=dst_mac,
ip_dst=dst_ip
)
return packet
class DefaultTest(PolicyTest):
def __init__(self):
PolicyTest.__init__(self)
def runTest(self):
self.log("DefaultTest")
self.run_suite()
def contruct_packet(self, port_number):
dst_mac = self.peer_mac[port_number]
src_ip = self.myip
dst_ip = self.peerip
packet = testutils.simple_tcp_packet(
eth_dst=dst_mac,
ip_dst=dst_ip,
ip_src=src_ip,
tcp_sport=10000,
tcp_dport=10000,
ip_ttl=1
)
return packet
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.