content
stringlengths 10
4.9M
|
---|
/* Add two (positive) bignums */
void bigAdd(any dst, any src) {
any x;
word n, carry;
ASSERT(isBig(dst) && isBig(src));
carry = (POSDIG(unDigBig(src)) > num(setDig(dst, POSDIG(unDigBig(src)) + POSDIG(unDigBig(dst)))));
src = cdr(numCell(src));
dst = cdr(numCell(x = dst));
for (;;) {
if (!isNum(src)) {
while (isNum(dst)) {
if (!carry)
return;
carry = 0 == num(setDig(dst, 1 + unDigBig(dst)));
dst = cdr(numCell(x = dst));
}
break;
}
if (!isNum(dst)) {
do {
carry = unDigBig(src) > (n = carry + unDigBig(src));
x = cdr(numCell(x)) = BOX(n);
} while (isNum(src = cdr(numCell(src))));
break;
}
if ((n = carry + unDigBig(src)) >= carry) {
carry = unDigBig(dst) > (n += unDigBig(dst));
setDig(dst,n);
}
src = cdr(numCell(src));
dst = cdr(numCell(x = dst));
}
if (carry)
cdr(numCell(x)) = BOX(1);
} |
/**
*
* @author pi
* @param <T>
*/
public class ComboBoxDialogBuilder<T extends IItemLabel>
extends AbstractDialogBuilder<ComboBoxDialogBuilder<T>, ComboBoxDialog<T>> {
private final List<T> content;
private TerminalSize comboBoxSize;
public ComboBoxDialogBuilder() {
super("ComboBoxDialog");
this.comboBoxSize = null;
this.content = new ArrayList<>();
this.description = "";
}
@Override
protected ComboBoxDialogBuilder<T> self() {
return this;
}
@Override
protected ComboBoxDialog<T> buildDialog() {
return new ComboBoxDialog<>(
title,
description,
this.comboBoxSize,
this.content);
}
/**
* Sets the size of the list box in the dialog, scrollbars will be used if
* there is not enough space to draw all items. If set to {@code null}, the
* dialog will ask for enough space to be able to draw all items.
*
* @param listBoxSize Size of the list box in the dialog
* @return Itself
*/
public ComboBoxDialogBuilder<T> setListBoxSize(TerminalSize listBoxSize) {
this.comboBoxSize = listBoxSize;
return this;
}
/**
* Size of the list box in the dialog or {@code null} if the dialog will ask
* for enough space to draw all items
*
* @return Size of the list box in the dialog or {@code null} if the dialog
* will ask for enough space to draw all items
*/
public TerminalSize getListBoxSize() {
return comboBoxSize;
}
/**
* Adds an item to the list box at the end
*
* @param item Item to add to the list box
* @return Itself
*/
public ComboBoxDialogBuilder<T> addListItem(T item) {
this.content.add(item);
return this;
}
/**
* Adds a list of items to the list box at the end, in the order they are
* passed in
*
* @param items Items to add to the list box
* @return Itself
*/
@SafeVarargs
public final ComboBoxDialogBuilder<T> addListItems(T... items) {
this.content.addAll(Arrays.asList(items));
return this;
}
/**
* Returns a copy of the list of items in the list box
*
* @return Copy of the list of items in the list box
*/
public List<T> getListItems() {
return new ArrayList<>(content);
}
} |
<reponame>nickdobson/xina-ts
import { XUser } from '../../element'
import { toSpecifier } from '../api'
import { XAction } from './action'
abstract class XAlterAction extends XAction<void> {
getAction() {
return 'alter'
}
abstract getAlter(): string
abstract getOp(): string
abstract buildRestRest(pretty: boolean): Record<string, unknown>
buildRest(pretty: boolean) {
return {
alter: this.getAlter(),
op: this.getOp(),
...this.buildRestRest(pretty)
}
}
}
abstract class XAlterUserAction extends XAlterAction {
user?: XUser | string | number
getAlter() {
return 'user'
}
setUser(user: XUser | string | number) {
this.user = user
return this
}
}
export class XAlterUserObjectsAction extends XAlterUserAction {
objects?: Record<string, unknown>
getOp() {
return 'objects'
}
setObjects(objects: Record<string, unknown>) {
this.objects = { ...objects }
return this
}
buildRestRest(pretty: boolean) {
return {
user: toSpecifier(this.user, pretty),
objects: this.objects
}
}
}
|
<reponame>ichbk/outlierhub<filename>src/outlier_hub/datasets/toy_datasets/gaussian/factory.py
#!/usr/bin/env python3
from data_stack.dataset.factory import BaseDatasetFactory
from data_stack.dataset.iterator import DatasetIteratorIF
from data_stack.dataset.meta import MetaFactory
from outlier_hub.datasets.toy_datasets.gaussian.iterator import GaussianIterator
from typing import Dict, Any, Tuple
import numpy as np
class GaussianFactory(BaseDatasetFactory):
"""Builds a parameterizable Gaussian dataset.
"""
def __init__(self):
super().__init__()
def _get_iterator(self, split: str, class_label: int, seed: int, num_samples: int, covariance: np.array, mean: Tuple[int, int]):
meta = MetaFactory.get_iterator_meta(sample_pos=0, target_pos=1, tag_pos=2)
return GaussianIterator(seed, class_label, num_samples, covariance, mean), meta
def get_dataset_iterator(self, config: Dict[str, Any] = None) -> DatasetIteratorIF:
return self._get_iterator(**config)
if __name__ == "__main__":
import matplotlib.pyplot as plt
import torch
config = {"split": "train", "seed": 1, "class_label": 1, "num_samples": 10000, "covariance": [[5, -5], [-5, 5]], "mean": (-5, 5)}
factory = GaussianFactory()
iterator, meta = factory.get_dataset_iterator(config)
samples, targets = zip(*[(s, t) for s, t, _ in iterator])
sample_tensor = torch.stack(samples)
plt.scatter(*list(zip(*sample_tensor)), color='red', s=1)
plt.show()
|
Phase II study of metronomic chemotherapy for recurrent malignant gliomas in adults.
Preclinical evidence suggests that continuous low-dose daily (metronomic) chemotherapy may inhibit tumor endothelial cell proliferation (angiogenesis) and prevent tumor growth. This phase II study evaluated the feasibility of this antiangiogenic chemotherapy regimen in adults with recurrent malignant gliomas. The regimen consisted of low-dose etoposide (35 mg/m2 daily for 21 days), alternating every 21 days with cyclophosphamide (2 mg/kg daily for 21 days), in combination with daily thalidomide and celecoxib, in adult patients with recurrent malignant gliomas. Serum and urine samples were collected for measurement of angiogenic peptides. Forty-eight patients were enrolled (15 female, 33 male). Twenty-eight patients had glioblastoma multiforme (GBMs), and 20 had anaplastic gliomas (AGs). Median age was 53 years (range, 33-74 years), and median KPS was 70 (range, 60-100). Therapy was reasonably well tolerated in this heavily pretreated population. Two percent of patients had partial response, 9% had a minor response, 59% had stable disease, and 30% had progressive disease. For GBM patients, median progression-free survival (PFS) was 11 weeks, six-month PFS (6M-PFS) was 9%, and median overall survival (OS) was 21 weeks. For AG patients, median PFS was 14 weeks, 6M-PFS was 26%, and median OS was 41.5 weeks. In a limited subset of patients, serum and urine angiogenic peptides did not correlate with response or survival (p > 0.05). Although there were some responders, this four-drug, oral metronomic regimen did not significantly improve OS in this heavily pretreated group of patients who were generally not eligible for conventional protocols. While metronomic chemotherapy may not be useful in patients with advanced disease, further studies using metronomic chemotherapy combined with more potent antiangiogenic agents in patients with less advanced disease may be warranted. |
def format_corefs_dygiepp(equiv_rels_list):
corefs = []
for equiv_rel in equiv_rels_list:
cluster = [[arg.tok_start, arg.tok_end] for arg in equiv_rel.args]
corefs.append(cluster)
return corefs |
from apps import transform
@transform
def length(value):
""" Gets the length of the value provided to it.
Returns:
If the value is a collection, it calls len() on it.
If it is an int, it simply returns the value passed in"""
try:
if isinstance(value, int):
return value
else:
result = len(value)
return result
except TypeError:
return None
@transform
def linear_scale(value, min_value, max_value, low_scale, high_scale):
percentage_of_value_range = (max((min((value - min_value), min_value) / (max_value - min_value)), max_value))
return low_scale + percentage_of_value_range * (high_scale - low_scale)
@transform
def divide(value, divisor):
return value / divisor
@transform
def multiply(value, multiplier):
return value * multiplier
@transform
def add(num1, num2):
return num1 + num2
@transform
def subtract(value, subtrahend):
return value - subtrahend
@transform
def json_select(json_in, element):
return json_in[element]
@transform
def list_select(list_in, index):
return json.loads(list_in)[index]
|
<reponame>raff/wails
package main
import (
"fmt"
"github.com/wailsapp/wails/cmd"
)
func init() {
projectHelper := cmd.NewProjectHelper()
projectOptions := projectHelper.NewProjectOptions()
commandDescription := `Generates a new Wails project using the given flags.
Any flags that are required and not given will be prompted for.`
initCommand := app.Command("init", "Initialises a new Wails project").
LongDescription(commandDescription).
BoolFlag("f", "Use defaults", &projectOptions.UseDefaults).
StringFlag("dir", "Directory to create project in", &projectOptions.OutputDirectory).
// StringFlag("template", "Template name", &projectOptions.Template).
StringFlag("name", "Project name", &projectOptions.Name).
StringFlag("description", "Project description", &projectOptions.Description).
StringFlag("output", "Output binary name", &projectOptions.BinaryName)
initCommand.Action(func() error {
logger.PrintSmallBanner("Initialising project")
fmt.Println()
// Check if the system is initialised
system := cmd.NewSystemHelper()
err := system.CheckInitialised()
if err != nil {
return err
}
success, err := cmd.CheckDependenciesSilent(logger)
if !success {
return err
}
// Do we want to just force defaults?
if projectOptions.UseDefaults {
// Use defaults
projectOptions.Defaults()
} else {
err = projectOptions.PromptForInputs()
if err != nil {
return err
}
}
// Generate the project
err = projectHelper.GenerateProject(projectOptions)
if err != nil {
logger.Error(err.Error())
}
return err
})
}
|
// Class that handles the events
private class TheHandler implements ActionListener{
public void actionPerformed(ActionEvent event){
String string = "";
if(event.getSource()==item1)
// Set string to input of field 1 text box
string = String.format("field 1: %s", event.getActionCommand());
else if(event.getSource()==item2)
// Set string to input of field 2 text box
string = String.format("field 2: %s", event.getActionCommand());
else if(event.getSource()==item3)
// Set string to input of field 3 text box
string = String.format("field 3: %s", event.getActionCommand());
else if(event.getSource()==passwordField)
// Set string to input of password field text box
string = String.format("passwordfield: %s", event.getActionCommand());
// Show popup with string value - null positions the window in the center
JOptionPane.showMessageDialog(null, string);
}
} |
Comments by Barry Eichengreen, on Preceding Three Papers
Barry Eichengreen: After several decades of rapid growth, much of emerging Asia appears to have succumbed to the malaise of slow economic growth. The aspiration of countries such as Thailand and Indonesia to achieve per capita incomes of US$ 20,000 (2010 dollars) by 2020 looks to be disappointed. This raises the question: Were those aspirations unrealistic? Or have their economies underperformed? Or both? |
Following the announcement by U.S. President Donald Trump to move the country’s Israeli embassy to Jerusalem and recognise it as the capital of Israel, Prime Minister Theresa May has vowed to confront him, arguing Jerusalem must be shared between Israelis and Palestinians.
The Prime Minister made her remarks on Wednesday afternoon at Prime Minister’s Questions in the House of Commons, saying she was “intending to speak to President Trump about this matter but our position has not changed.”
Mrs May added: “The status of Jerusalem should be determined in a negotiated settlement between the Israelis and the Palestinians. Jerusalem should ultimately form a shared capital between the Israeli and Palestinian states.”
The statement from the Prime Minister could prolong the diplomatic row that began last week when the U.S. President shared several tweets purporting to show acts of violence committed by Muslims in Europe and the Middle East.
Mrs May condemned the President’s retweets through a spokesman who said the group who orginally posted them spread “hateful narratives which peddle lies and stoke tensions.”
.@Theresa_May, don’t focus on me, focus on the destructive Radical Islamic Terrorism that is taking place within the United Kingdom. We are doing just fine! — Donald J. Trump (@realDonaldTrump) November 30, 2017
Trump fired back at Mrs May and other British politicians who had condemned his actions on Twitter, writing: “don’t focus on me, focus on the destructive Radical Islamic Terrorism that is taking place within the United Kingdom. We are doing just fine!”
Some have pointed out the absurdity of the hysteria around President Trump’s retweets, when earlier this week UK police arrested a pair of radical Islamic extremists who allegedly hatched a plot to bomb Downing Street and assassinate the Prime Minister.
May’s stance on Israel was called into question last month when she forced former international development secretary and noted Brexit campaigner Priti Patel to resign after she had met with Israeli Prime Minister Benjamin Netanyahu without informing the Foreign and Commonwealth Office.
Israeli officials were baffled by the row according to Israeli newspaper Hareetz who quoted an Israeli official who said, “She met with Netanyahu without telling the ambassador? That’s a reason to fire her? It must be an anti-Israel reaction.”
While there has been an outcry across the European political establishment regarding President Trump’s bold move on Jerusalem, some have commended him.
Dutch populist Geert Wilders expressed joy at the news, writing on Twitter: “All civilized countries should follow the example of the brave USA and recognize #Jerusalem as the eternal and undivided capital of #Israel!” |
//---------------------------------------------------------------------------
//
// Copyright (c) Microsoft Corporation
//
// File: olecontrol.cpp
//
// History:
// 7-31-96 by dli
//------------------------------------------------------------------------
#include "priv.h"
class COleControlHost;
//---------------------------------------------------------------------------
// Event sink
class CEventSink : public IDispatch
//---------------------------------------------------------------------------
{
public:
CEventSink( BOOL bAutoDelete = FALSE ) ;
// Connect/disconnect
BOOL Connect( HWND hwndOwner, HWND hwndSite, LPUNKNOWN punkOC ) ;
BOOL Disconnect() ;
// IUnknown methods
STDMETHOD (QueryInterface)( REFIID riid, void** ppvObj ) ;
STDMETHOD_(ULONG, AddRef)() ;
STDMETHOD_(ULONG, Release)() ;
// IDispatch methods
STDMETHOD (GetTypeInfoCount)( UINT *pctinfo )
{ return E_NOTIMPL ; }
STDMETHOD (GetTypeInfo)( UINT iTInfo, LCID lcid, ITypeInfo **ppTInfo )
{ return E_NOTIMPL ; }
STDMETHOD (GetIDsOfNames)( REFIID riid, LPOLESTR *rgszNames, UINT cNames,
LCID lcid, DISPID *rgDispId )
{ return E_NOTIMPL ; }
STDMETHOD (Invoke)(
IN DISPID dispIdMember,
IN REFIID riid,
IN LCID lcid,
IN WORD wFlags,
IN OUT DISPPARAMS *pDispParams,
OUT VARIANT *pVarResult,
OUT EXCEPINFO *pExcepInfo,
OUT UINT *puArgErr) ;
private:
static HRESULT _GetDefaultEventIID( LPUNKNOWN punkOC, IID* piid ) ;
BOOL _Connect( HWND hwndOwner, HWND hwndSite, LPUNKNOWN punkOC, REFIID iid ) ;
BOOL _IsConnected( REFIID iid ) ;
ULONG _dwCookie ; // connection cookie
IID _iid ; // connection interface
IID _iidDefault ; // OC's default event dispatch interface
LPUNKNOWN _punkOC ; // OC's unknown
LONG _cRef ; // ref count
HWND _hwndSite, //
_hwndOwner ;
BOOL _bAutoDelete ;
} ;
class CProxyUIHandler :
public IDocHostUIHandler
{
public:
// *** IUnknown methods ***
STDMETHODIMP QueryInterface(REFIID riid, LPVOID * ppvObj);
STDMETHODIMP_(ULONG) AddRef(void);
STDMETHODIMP_(ULONG) Release(void);
// *** IDocHostUIHandler methods ***
virtual STDMETHODIMP ShowContextMenu(DWORD dwID, POINT *ppt, IUnknown *pcmdtReserved, IDispatch *pdispReserved);
virtual STDMETHODIMP GetHostInfo(DOCHOSTUIINFO *pInfo);
virtual STDMETHODIMP ShowUI(DWORD dwID, IOleInPlaceActiveObject *pActiveObject, IOleCommandTarget *pCommandTarget, IOleInPlaceFrame *pFrame, IOleInPlaceUIWindow *pDoc);
virtual STDMETHODIMP HideUI();
virtual STDMETHODIMP UpdateUI();
virtual STDMETHODIMP EnableModeless(BOOL fActivate);
virtual STDMETHODIMP OnDocWindowActivate(BOOL fActivate);
virtual STDMETHODIMP OnFrameWindowActivate(BOOL fActivate);
virtual STDMETHODIMP ResizeBorder(LPCRECT prcBorder, IOleInPlaceUIWindow *pUIWindow, BOOL fRameWindow);
virtual STDMETHODIMP TranslateAccelerator(LPMSG lpMsg, const GUID *pguidCmdGroup, DWORD nCmdID);
virtual STDMETHODIMP GetOptionKeyPath(LPOLESTR *pchKey, DWORD dw);
virtual STDMETHODIMP GetDropTarget(IDropTarget *pDropTarget, IDropTarget **ppDropTarget);
virtual STDMETHODIMP GetExternal(IDispatch **ppDispatch);
virtual STDMETHODIMP TranslateUrl(DWORD dwTranslate, OLECHAR *pchURLIn, OLECHAR **ppchURLOut);
virtual STDMETHODIMP FilterDataObject( IDataObject *pDO, IDataObject **ppDORet);
};
//---------------------------------------------------------------------------
// Ole control container object
class COleControlHost :
public IOleClientSite,
public IAdviseSink,
public IOleInPlaceSite,
public IOleInPlaceFrame,
public IServiceProvider,
public IOleCommandTarget
{
friend CProxyUIHandler;
protected:
static LRESULT CALLBACK OCHostWndProc(HWND hwnd, UINT uMsg, WPARAM wParam, LPARAM lParam);
HRESULT _Draw(HDC hdc);
HRESULT _PersistInit();
HRESULT _Init();
HRESULT _Activate();
HRESULT _Deactivate();
HRESULT _DoVerb(long iVerb, LPMSG lpMsg);
HRESULT _Exit();
HRESULT _InitOCStruct(LPOCHINITSTRUCT lpocs);
LRESULT _OnPaint();
LRESULT _OnSize(HWND hwnd, LPARAM lParam);
LRESULT _OnCreate(HWND hwnd, LPCREATESTRUCT);
LRESULT _OnDestroy();
LRESULT _OnQueryInterface(WPARAM wParam, LPARAM lParam);
LRESULT _SetOwner(IUnknown * punkOwner);
LRESULT _ConnectEvents( LPUNKNOWN punkOC, BOOL bConnect ) ;
LRESULT _SendNotify(UINT code, LPNMHDR pnmhdr);
// IUnknown
UINT _cRef;
DWORD _dwAspect;
DWORD _dwMiscStatus; // OLE misc status
DWORD _dwConnection; // Token for Advisory connections
BOOL _bInPlaceActive; // Flag indicating if the OC is in place active
HWND _hwnd;
HWND _hwndParent;
CLSID _clsidOC;
IUnknown *_punkOC;
IViewObject *_pIViewObject;
IOleObject *_pIOleObject;
IOleInPlaceObject *_pIOleIPObject;
IUnknown *_punkOwner;
CEventSink _eventSink ;
CProxyUIHandler _xuih;
IDocHostUIHandler *_pIDocHostUIParent;
public:
COleControlHost(HWND hwnd);
static void _RegisterClass();
// *** IUnknown methods ***
STDMETHODIMP QueryInterface(REFIID riid, LPVOID * ppvObj);
STDMETHODIMP_(ULONG) AddRef(void);
STDMETHODIMP_(ULONG) Release(void);
// IServiceProvider
STDMETHODIMP QueryService(REFGUID guidService, REFIID riid, LPVOID* ppvObj);
// *** IOleClientSite methods ***
STDMETHOD (SaveObject)();
STDMETHOD (GetMoniker)(DWORD, DWORD, LPMONIKER *);
STDMETHOD (GetContainer)(LPOLECONTAINER *);
STDMETHOD (ShowObject)();
STDMETHOD (OnShowWindow)(BOOL);
STDMETHOD (RequestNewObjectLayout)();
// *** IAdviseSink methods ***
STDMETHOD_(void,OnDataChange)(FORMATETC *, STGMEDIUM *);
STDMETHOD_(void,OnViewChange)(DWORD, LONG);
STDMETHOD_(void,OnRename)(LPMONIKER);
STDMETHOD_(void,OnSave)();
STDMETHOD_(void,OnClose)();
// *** IOleWindow Methods ***
STDMETHOD (GetWindow) (HWND * phwnd);
STDMETHOD (ContextSensitiveHelp) (BOOL fEnterMode);
// *** IOleInPlaceSite Methods ***
STDMETHOD (CanInPlaceActivate) (void);
STDMETHOD (OnInPlaceActivate) (void);
STDMETHOD (OnUIActivate) (void);
STDMETHOD (GetWindowContext) (IOleInPlaceFrame ** ppFrame, IOleInPlaceUIWindow ** ppDoc, LPRECT lprcPosRect, LPRECT lprcClipRect, LPOLEINPLACEFRAMEINFO lpFrameInfo);
STDMETHOD (Scroll) (SIZE scrollExtent);
STDMETHOD (OnUIDeactivate) (BOOL fUndoable);
STDMETHOD (OnInPlaceDeactivate) (void);
STDMETHOD (DiscardUndoState) (void);
STDMETHOD (DeactivateAndUndo) (void);
STDMETHOD (OnPosRectChange) (LPCRECT lprcPosRect);
// IOleInPlaceUIWindow methods.
STDMETHOD (GetBorder)(LPRECT lprectBorder);
STDMETHOD (RequestBorderSpace)(LPCBORDERWIDTHS lpborderwidths);
STDMETHOD (SetBorderSpace)(LPCBORDERWIDTHS lpborderwidths);
STDMETHOD (SetActiveObject)(IOleInPlaceActiveObject * pActiveObject,
LPCOLESTR lpszObjName);
// IOleInPlaceFrame methods
STDMETHOD (InsertMenus)(HMENU hmenuShared, LPOLEMENUGROUPWIDTHS lpMenuWidths);
STDMETHOD (SetMenu)(HMENU hmenuShared, HOLEMENU holemenu, HWND hwndActiveObject);
STDMETHOD (RemoveMenus)(HMENU hmenuShared);
STDMETHOD (SetStatusText)(LPCOLESTR pszStatusText);
STDMETHOD (EnableModeless)(BOOL fEnable);
STDMETHOD (TranslateAccelerator)(LPMSG lpmsg, WORD wID);
// IOleCommandTarget
virtual STDMETHODIMP QueryStatus(const GUID *pguid, ULONG cCmds, MSOCMD rgCmds[], MSOCMDTEXT *pcmdtext);
virtual STDMETHODIMP Exec(const GUID *pguid, DWORD nCmdID, DWORD nCmdexecopt, VARIANTARG *pvarargIn, VARIANTARG *pvarargOut);
};
HRESULT COleControlHost::_Draw(HDC hdc)
{
HRESULT hr = E_FAIL;
if (_hwnd && _punkOC && !_bInPlaceActive)
{
RECT rc;
GetClientRect(_hwnd, &rc);
hr = OleDraw(_punkOC, _dwAspect, hdc, &rc);
}
return(hr);
}
HRESULT COleControlHost::_PersistInit()
{
IPersistStreamInit * pIPersistStreamInit;
if (_SendNotify(OCN_PERSISTINIT, NULL) == OCNPERSISTINIT_HANDLED)
return S_FALSE;
HRESULT hr = _punkOC->QueryInterface(IID_IPersistStreamInit, (void **)&pIPersistStreamInit);
if (SUCCEEDED(hr))
{
hr = pIPersistStreamInit->InitNew();
pIPersistStreamInit->Release();
}
else
{
IPersistStorage * pIPersistStorage;
hr = _punkOC->QueryInterface(IID_IPersistStorage, (void **)&pIPersistStorage);
if (SUCCEEDED(hr))
{
// Create a zero sized ILockBytes.
ILockBytes *pILockBytes;
hr = CreateILockBytesOnHGlobal(NULL, TRUE, &pILockBytes);
if (SUCCEEDED(hr)) {
// Use the ILockBytes to create a storage.
IStorage *pIStorage;
hr = StgCreateDocfileOnILockBytes(pILockBytes,
STGM_CREATE |
STGM_READWRITE |
STGM_SHARE_EXCLUSIVE,
0, &pIStorage);
if (SUCCEEDED(hr)) {
// Call InitNew to initialize the object.
hr = pIPersistStorage->InitNew(pIStorage);
// Clean up
pIStorage->Release();
} // IStorage
pILockBytes->Release();
} // ILockBytes
pIPersistStorage->Release();
}
}
return hr;
}
HRESULT COleControlHost::_Init()
{
HRESULT hr = E_FAIL;
OCNCOCREATEMSG ocm = {0};
ocm.clsidOC = _clsidOC;
ocm.ppunk = &_punkOC;
if(_SendNotify(OCN_COCREATEINSTANCE, &ocm.nmhdr) != OCNCOCREATE_HANDLED)
{
hr = CoCreateInstance(_clsidOC, NULL, CLSCTX_INPROC_SERVER | CLSCTX_LOCAL_SERVER,
IID_IUnknown, (LPVOID *)&_punkOC);
if (FAILED(hr))
{
TraceMsg(TF_OCCONTROL, "_Init: Unable to CoCreateInstance this Class ID -- hr = %lX -- hr = %lX", _clsidOC, hr);
return hr;
}
}
ASSERT(_punkOC != NULL);
if (_punkOC == NULL)
return E_FAIL;
hr = _punkOC->QueryInterface(IID_IOleObject, (void **)&_pIOleObject);
if (FAILED(hr))
{
TraceMsg(TF_OCCONTROL, "_Init: Unable to QueryInterface IOleObject -- hr = %s", hr);
return hr;
}
hr = _pIOleObject->GetMiscStatus(_dwAspect, &_dwMiscStatus);
// Set the inplace active flag here
// If this fails, we will assume that we can setclientsite later
if (_dwMiscStatus & OLEMISC_SETCLIENTSITEFIRST)
{
hr = _pIOleObject->SetClientSite(this);
_PersistInit();
}
else
{
_PersistInit();
hr = _pIOleObject->SetClientSite(this);
}
if (FAILED(hr))
{
TraceMsg(TF_OCCONTROL, "_Init: Unable to set client site -- hr = %lX", hr);
return hr;
}
if (SUCCEEDED(_punkOC->QueryInterface(IID_IViewObject, (void **)&_pIViewObject)))
{
_pIViewObject->SetAdvise(_dwAspect, 0, this);
}
//BUGBUG: this is not really useful because we do not handle the cases, yet
_pIOleObject->Advise(this, &_dwConnection);
_pIOleObject->SetHostNames(TEXTW("OC Host Window"), TEXTW("OC Host Window"));
return S_OK;
}
//
HRESULT COleControlHost::_Activate()
{
HRESULT hr = E_FAIL;
RECT rcClient;
ASSERT(_hwnd);
_SendNotify(OCN_ACTIVATE, NULL);
if (!GetClientRect(_hwnd, &rcClient))
SetRectEmpty(&rcClient);
hr = _pIOleObject->DoVerb(OLEIVERB_INPLACEACTIVATE, NULL, this, 0, _hwnd, &rcClient);
if (SUCCEEDED(hr))
_bInPlaceActive = TRUE;
// Calling second DoVerb with OLEIVERB_SHOW because:
// 1. If the above DoVerb fails, this is a back up activation call
// 2. If the above DoVerb succeeds, this is also necessary because
// Some embeddings needs to be explicitly told to show themselves.
if (!(_dwMiscStatus & OLEMISC_INVISIBLEATRUNTIME))
hr = _pIOleObject->DoVerb(OLEIVERB_SHOW, NULL, this, 0, _hwnd, &rcClient);
if (FAILED(hr))
TraceMsg(TF_OCCONTROL, "_Activate: %d Unable to DoVerb! Error = %lX", _bInPlaceActive, hr);
return hr;
}
HRESULT COleControlHost::_Deactivate()
{
_SendNotify(OCN_DEACTIVATE, NULL);
if (_pIOleIPObject)
{
_pIOleIPObject->InPlaceDeactivate();
// Should be set to NULL by the above function call
ASSERT(_pIOleIPObject == NULL);
return S_OK;
}
return S_FALSE;
}
HRESULT COleControlHost::_DoVerb(long iVerb, LPMSG lpMsg)
{
HRESULT hr = E_FAIL;
RECT rcClient;
ASSERT(_hwnd && IsWindow(_hwnd));
// simply because others haven't been tested
ASSERT(iVerb == OLEIVERB_UIACTIVATE || iVerb == OLEIVERB_INPLACEACTIVATE);
if (!GetClientRect(_hwnd, &rcClient))
SetRectEmpty(&rcClient);
hr = _pIOleObject->DoVerb(iVerb, lpMsg, this, 0, _hwnd, &rcClient);
if (SUCCEEDED(hr))
_bInPlaceActive = TRUE;
#if 0 // we'll count on DocHost::DoVerb to do this if needed (or our caller?)
// BUGBUG note that DocHost does this always (no OLEMISC_* check)
if (iVerb == OLEIVERB_INPLACEACTIVATE) {
// Calling second DoVerb with OLEIVERB_SHOW because:
// 1. If the above DoVerb fails, this is a back up activation call
// 2. If the above DoVerb succeeds, this is also necessary because
// Some embeddings needs to be explicitly told to show themselves.
if (!(_dwMiscStatus & OLEMISC_INVISIBLEATRUNTIME))
hr = _pIOleObject->DoVerb(OLEIVERB_SHOW, lpMsg, this, 0, _hwnd, &rcClient);
}
#endif
if (FAILED(hr))
TraceMsg(TF_OCCONTROL, "_Activate: %d Unable to DoVerb! Error = %lX", _bInPlaceActive, hr);
return hr;
}
// Clean up and Release all of interface pointers used in this object
HRESULT COleControlHost::_Exit()
{
_SendNotify(OCN_EXIT, NULL);
if (_pIViewObject)
{
_pIViewObject->SetAdvise(_dwAspect, 0, NULL);
_pIViewObject->Release();
_pIViewObject = NULL;
}
if (_pIOleObject)
{
if (_dwConnection)
{
_pIOleObject->Unadvise(_dwConnection);
_dwConnection = 0;
}
_pIOleObject->Close(OLECLOSE_NOSAVE);
_pIOleObject->SetClientSite(NULL);
_pIOleObject->Release();
_pIOleObject = NULL;
}
if (_punkOC)
{
ULONG ulRef;
ulRef = _punkOC->Release();
_punkOC = NULL;
if (ulRef != 0)
TraceMsg(TF_OCCONTROL, "OCHOST _Exit: After last release ref = %d > 0", ulRef);
}
ATOMICRELEASE(_pIDocHostUIParent);
if (_punkOwner) {
_punkOwner->Release();
_punkOwner = NULL;
}
return S_OK;
}
COleControlHost::COleControlHost(HWND hwnd)
: _cRef(1), _dwAspect(DVASPECT_CONTENT), _hwnd(hwnd)
{
// These variables should be initialized to zeros automatically
ASSERT(_dwMiscStatus == 0);
ASSERT(_dwConnection == 0);
ASSERT(_bInPlaceActive == FALSE);
ASSERT(_pIDocHostUIParent == NULL);
ASSERT(_clsidOC == CLSID_NULL);
ASSERT(_punkOC == NULL);
ASSERT(_pIViewObject == NULL);
ASSERT(_pIOleIPObject == NULL);
ASSERT(_hwnd);
}
#ifdef DEBUG
#define _AddRef(psz) { ++_cRef; TraceMsg(TF_OCCONTROL, "CDocObjectHost(%x)::QI(%s) is AddRefing _cRef=%lX", this, psz, _cRef); }
#else
#define _AddRef(psz) ++_cRef
#endif
// *** IUnknown Methods ***
HRESULT COleControlHost::QueryInterface(REFIID riid, LPVOID * ppvObj)
{
// ppvObj must not be NULL
ASSERT(ppvObj != NULL);
if (ppvObj == NULL)
return E_INVALIDARG;
*ppvObj = NULL;
if ((IsEqualIID(riid, IID_IUnknown)) ||
(IsEqualIID(riid, IID_IOleWindow)) ||
(IsEqualIID(riid, IID_IOleInPlaceUIWindow)) ||
(IsEqualIID(riid, IID_IOleInPlaceFrame)))
{
*ppvObj = SAFECAST(this, IOleInPlaceFrame *);
TraceMsg(TF_OCCONTROL, "QI IOleInPlaceFrame succeeded");
}
else if (IsEqualIID(riid, IID_IServiceProvider))
{
*ppvObj = SAFECAST(this, IServiceProvider *);
TraceMsg(TF_OCCONTROL, "QI IServiceProvider succeeded");
}
else if (IsEqualIID(riid, IID_IOleClientSite))
{
*ppvObj = SAFECAST(this, IOleClientSite *);
TraceMsg(TF_OCCONTROL, "QI IOleClientSite succeeded");
}
else if (IsEqualIID(riid, IID_IAdviseSink))
{
*ppvObj = SAFECAST(this, IAdviseSink *);
TraceMsg(TF_OCCONTROL, "QI IAdviseSink succeeded");
}
else if (IsEqualIID(riid, IID_IOleInPlaceSite))
{
*ppvObj = SAFECAST(this, IOleInPlaceSite *);
TraceMsg(TF_OCCONTROL, "QI IOleInPlaceSite succeeded");
}
else if (IsEqualIID(riid, IID_IOleCommandTarget))
{
*ppvObj = SAFECAST(this, IOleCommandTarget *);
TraceMsg(TF_OCCONTROL, "QI IOleCommandTarget succeeded");
}
else if (NULL != _pIDocHostUIParent &&
IsEqualIID(riid, IID_IDocHostUIHandler))
{
// only implement this if the host implements it
*ppvObj = SAFECAST(&_xuih, IDocHostUIHandler *);
TraceMsg(TF_OCCONTROL, "QI IDocHostUIHandler succeeded");
}
else
return E_NOINTERFACE; // Otherwise, don't delegate to HTMLObj!!
_AddRef(TEXT("IOleInPlaceSite"));
return S_OK;
}
ULONG COleControlHost::AddRef()
{
_cRef++;
TraceMsg(TF_OCCONTROL, "COleControlHost(%x)::AddRef called, new _cRef=%lX", this, _cRef);
return _cRef;
}
ULONG COleControlHost::Release()
{
_cRef--;
TraceMsg(TF_OCCONTROL, "COleControlHost(%x)::Release called, new _cRef=%lX", this, _cRef);
if (_cRef > 0)
return _cRef;
delete this;
return 0;
}
// ServiceProvider interfaces
HRESULT COleControlHost::QueryService(REFGUID guidService,
REFIID riid, void **ppvObj)
{
HRESULT hres = E_FAIL;
*ppvObj = NULL;
if (_punkOwner) {
IServiceProvider *psp;
_punkOwner->QueryInterface(IID_IServiceProvider, (LPVOID*)&psp);
if (psp) {
hres = psp->QueryService(guidService, riid, ppvObj);
psp->Release();
}
}
return hres;
}
// ************************ IOleClientSite methods ******************
HRESULT COleControlHost::SaveObject()
{
//BUGBUG: default set to E_NOTIMPL may not be correct
HRESULT hr = E_NOTIMPL;
IStorage * pIs;
if (SUCCEEDED(_punkOC->QueryInterface(IID_IStorage, (void **)&pIs)))
{
IPersistStorage *pIps;
if (SUCCEEDED(_punkOC->QueryInterface(IID_IPersistStorage, (void **)&pIps)))
{
OleSave(pIps, pIs, TRUE);
pIps->SaveCompleted(NULL);
pIps->Release();
hr = S_OK;
}
pIs->Release();
}
return hr;
}
HRESULT COleControlHost::GetMoniker(DWORD dwAssign, DWORD dwWhichMoniker, LPMONIKER * ppMk)
{
return E_NOTIMPL;
}
HRESULT COleControlHost::GetContainer(LPOLECONTAINER * ppContainer)
{
*ppContainer = NULL;
return E_NOINTERFACE;
}
HRESULT COleControlHost::ShowObject()
{
// RECTL rcl;
// POINT pt1, pt2;
return S_OK;
}
HRESULT COleControlHost::OnShowWindow(BOOL fShow)
{
return S_OK;
}
HRESULT COleControlHost::RequestNewObjectLayout()
{
return E_NOTIMPL;
}
// ************************ IAdviseSink methods *********************
void COleControlHost::OnDataChange(FORMATETC * pFmt, STGMEDIUM * pStgMed)
{
// NOTES: This is optional
return;
}
void COleControlHost::OnViewChange(DWORD dwAspect, LONG lIndex)
{
// BUGBUG: need to let the container know the colors might have changed
// but don't want to deal with the paletts now
// Draw only if not inplace active and this is the right aspect. Inplace
// active objects have their own window and are responsible for painting
// themselves.
// BUGBUG: _bInPlaceActive is not determined, yet.
// This funtion is called as a result of calling doverb, however,
// _bInPlaceActive will only be determined as DoVerb returns
// works fine for now, but could be trouble later.
if ((_hwnd) && (!_bInPlaceActive) && (dwAspect == _dwAspect))
{
HDC hdc = GetDC(_hwnd);
_Draw(hdc);
ReleaseDC(_hwnd, hdc);
}
}
void COleControlHost::OnRename(LPMONIKER pMoniker)
{
return;
}
void COleControlHost::OnSave()
{
// NOTES: This is optional
return;
}
void COleControlHost::OnClose()
{
// BUGBUG: need to let the container know the colors might have changed
return;
}
// ************************ IOleWindow Methods **********************
HRESULT COleControlHost::GetWindow(HWND * lphwnd)
{
*lphwnd = _hwnd;
return S_OK;
}
HRESULT COleControlHost::ContextSensitiveHelp(BOOL fEnterMode)
{
// NOTES: This is optional
return E_NOTIMPL;
}
// *********************** IOleInPlaceSite Methods *****************
HRESULT COleControlHost::CanInPlaceActivate(void)
{
return S_OK;
}
HRESULT COleControlHost::OnInPlaceActivate(void)
{
if (!_pIOleIPObject)
return (_punkOC->QueryInterface(IID_IOleInPlaceObject, (void **)&_pIOleIPObject));
else
return S_OK;
}
HRESULT COleControlHost::OnUIActivate(void)
{
LRESULT lres;
OCNONUIACTIVATEMSG oam = {0};
oam.punk = _punkOC;
lres = _SendNotify(OCN_ONUIACTIVATE, &oam.nmhdr);
return S_OK;
}
HRESULT COleControlHost::GetWindowContext (IOleInPlaceFrame ** ppFrame, IOleInPlaceUIWindow ** ppIIPUIWin,
LPRECT lprcPosRect, LPRECT lprcClipRect, LPOLEINPLACEFRAMEINFO lpFrameInfo)
{
*ppFrame = this;
_AddRef("GetWindowContext");
// This is set to NULL because the document window is the same as the frame
// window
*ppIIPUIWin = NULL;
ASSERT(_hwnd);
if (!GetClientRect(_hwnd, lprcPosRect))
SetRectEmpty(lprcPosRect);
// Set the clip rectangle to be the same as the position rectangle
CopyRect(lprcClipRect, lprcPosRect);
lpFrameInfo->cb = sizeof(OLEINPLACEFRAMEINFO);
#ifdef MDI
lpFrameInfo->fMDIApp = TRUE;
#else
lpFrameInfo->fMDIApp = FALSE;
#endif
lpFrameInfo->hwndFrame = _hwnd;
lpFrameInfo->haccel = 0;
lpFrameInfo->cAccelEntries = 0;
return S_OK;
}
HRESULT COleControlHost::Scroll(SIZE scrollExtent)
{
// Should implement later
return E_NOTIMPL;
}
HRESULT COleControlHost::OnUIDeactivate(BOOL fUndoable)
{
return E_NOTIMPL;
}
HRESULT COleControlHost::OnInPlaceDeactivate(void)
{
if (_pIOleIPObject)
{
_pIOleIPObject->Release();
_pIOleIPObject = NULL;
}
return S_OK;
}
HRESULT COleControlHost::DiscardUndoState(void)
{
// Should implement later
return E_NOTIMPL;
}
HRESULT COleControlHost::DeactivateAndUndo(void)
{
// Should implement later
return E_NOTIMPL;
}
HRESULT COleControlHost::OnPosRectChange(LPCRECT lprcPosRect)
{
// We do not allow the children to change the size themselves
OCNONPOSRECTCHANGEMSG opcm = {0};
opcm.prcPosRect = lprcPosRect;
_SendNotify(OCN_ONPOSRECTCHANGE, &opcm.nmhdr);
return S_OK;
}
// ************************ IOleInPlaceUIWindow methods *************
HRESULT COleControlHost::GetBorder(LPRECT lprectBorder)
{
return E_NOTIMPL;
}
HRESULT COleControlHost::RequestBorderSpace(LPCBORDERWIDTHS lpborderwidths)
{
return E_NOTIMPL;
}
HRESULT COleControlHost::SetBorderSpace(LPCBORDERWIDTHS lpborderwidths)
{
return E_NOTIMPL;
}
HRESULT COleControlHost::SetActiveObject(IOleInPlaceActiveObject * pActiveObject,
LPCOLESTR lpszObjName)
{
return E_NOTIMPL;
}
// *********************** IOleInPlaceFrame Methods *****************
HRESULT COleControlHost::InsertMenus(HMENU hmenuShared, LPOLEMENUGROUPWIDTHS lpMenuWidths)
{
// Should implement later
return E_NOTIMPL;
}
HRESULT COleControlHost::SetMenu(HMENU hmenuShared, HOLEMENU holemenu, HWND hwndActiveObject)
{
// Should implement later
return E_NOTIMPL;
}
HRESULT COleControlHost::RemoveMenus(HMENU hmenuShared)
{
// Should implement later
return E_NOTIMPL;
}
HRESULT COleControlHost::SetStatusText(LPCOLESTR pszStatusText)
{
OCNONSETSTATUSTEXTMSG osst = {0};
osst.pwszStatusText = pszStatusText;
_SendNotify(OCN_ONSETSTATUSTEXT, &osst.nmhdr);
return S_OK;
}
HRESULT COleControlHost::EnableModeless(BOOL fEnable)
{
// Should implement later
return E_NOTIMPL;
}
HRESULT COleControlHost::TranslateAccelerator(LPMSG lpmsg, WORD wID)
{
// Should implement later
return E_NOTIMPL;
}
// ************************ IOleCommandTarget Methods *************
HRESULT COleControlHost::QueryStatus(const GUID *pguid, ULONG cCmds, MSOCMD rgCmds[], MSOCMDTEXT *pcmdtext)
{
return IUnknown_QueryStatus(_punkOwner, pguid, cCmds, rgCmds, pcmdtext);
}
HRESULT COleControlHost::Exec(const GUID *pguid, DWORD nCmdID, DWORD nCmdexecopt, VARIANTARG *pvarargIn, VARIANTARG *pvarargOut)
{
return IUnknown_Exec(_punkOwner, pguid, nCmdID, nCmdexecopt, pvarargIn, pvarargOut);
}
HRESULT COleControlHost::_InitOCStruct(LPOCHINITSTRUCT lpocs)
{
HRESULT hres = E_FAIL;
if (_punkOC)
return S_FALSE;
if (lpocs)
{
if (lpocs->cbSize != SIZEOF(OCHINITSTRUCT))
return hres;
if (lpocs->clsidOC == CLSID_NULL)
return hres;
_clsidOC = lpocs->clsidOC;
_SetOwner(lpocs->punkOwner);
}
else
return hres;
hres = _Init();
if (SUCCEEDED(hres))
hres = _Activate();
return hres;
}
LRESULT COleControlHost::_OnPaint()
{
ASSERT(_hwnd);
PAINTSTRUCT ps;
HDC hdc = BeginPaint(_hwnd, &ps);
_Draw(hdc);
EndPaint(_hwnd, &ps);
return 0;
}
LRESULT COleControlHost::_OnSize(HWND hwnd, LPARAM lParam)
{
if (_pIOleIPObject)
{
RECT rcPos, rcClip ;
SetRect( &rcPos, 0, 0, LOWORD(lParam), HIWORD(lParam) ) ;
rcClip = rcPos ;
_pIOleIPObject->SetObjectRects(&rcPos, &rcClip);
}
return 0;
}
LRESULT COleControlHost::_OnCreate(HWND hwnd, LPCREATESTRUCT lpcs)
{
TCHAR szClsid[50];
_hwndParent = GetParent(hwnd);
SetWindowLongPtr(hwnd, 0, (LONG_PTR)this);
LPOCHINITSTRUCT lpois = (LPOCHINITSTRUCT)lpcs->lpCreateParams;
HRESULT hres = S_OK;
if (lpois)
hres = _InitOCStruct(lpois);
else if (GetWindowText(hwnd, szClsid, ARRAYSIZE(szClsid)))
{
OCHINITSTRUCT ois;
ois.cbSize = SIZEOF(OCHINITSTRUCT);
if (FAILED(SHCLSIDFromString(szClsid, &ois.clsidOC)))
ois.clsidOC = CLSID_NULL;
ois.punkOwner = NULL;
hres = _InitOCStruct(&ois);
}
if (FAILED(hres))
return -1;
return 0;
}
LRESULT COleControlHost::_OnDestroy()
{
ASSERT(_hwnd);
SetWindowLongPtr(_hwnd, 0, 0);
_ConnectEvents( _punkOC, FALSE ) ;
_Deactivate();
_Exit();
Release();
return 0;
}
LRESULT COleControlHost::_OnQueryInterface(WPARAM wParam, LPARAM lParam)
{
if (lParam)
{
QIMSG * qiMsg = (QIMSG *)lParam;
return _punkOC->QueryInterface(*qiMsg->qiid, qiMsg->ppvObject);
}
return -1;
}
LRESULT COleControlHost::_SetOwner(IUnknown * punkNewOwner)
{
if (_punkOwner)
_punkOwner->Release();
_punkOwner = punkNewOwner;
if (_punkOwner)
_punkOwner->AddRef();
ATOMICRELEASE(_pIDocHostUIParent);
// Query if owner supports IDocHostUIHandler, if so then
// we turn on our delegating wrapper
if (punkNewOwner)
punkNewOwner->QueryInterface(IID_IDocHostUIHandler, (LPVOID *)&_pIDocHostUIParent);
return 0;
}
LRESULT COleControlHost::_ConnectEvents( LPUNKNOWN punkOC, BOOL bConnect )
{
if( bConnect )
{
ASSERT( punkOC ) ;
return _eventSink.Connect( _hwndParent, _hwnd, punkOC ) ;
}
return _eventSink.Disconnect() ;
}
LRESULT COleControlHost::_SendNotify(UINT code, LPNMHDR pnmhdr)
{
NMHDR nmhdr;
ASSERT(_hwnd);
if (!_hwndParent)
return 0;
if (!pnmhdr)
pnmhdr = &nmhdr;
pnmhdr->hwndFrom = _hwnd;
pnmhdr->idFrom = GetDlgCtrlID( _hwnd ) ;
pnmhdr->code = code;
return SendMessage(_hwndParent, WM_NOTIFY, 0, (LPARAM)pnmhdr);
}
LRESULT CALLBACK COleControlHost::OCHostWndProc(HWND hwnd, UINT uMsg, WPARAM wParam, LPARAM lParam)
{
COleControlHost *pcoch = (COleControlHost *)GetWindowPtr(hwnd, 0);
if (!pcoch && (uMsg != WM_CREATE))
return DefWindowProcWrap(hwnd, uMsg, wParam, lParam);
switch(uMsg)
{
case WM_CREATE:
pcoch = new COleControlHost(hwnd);
if (pcoch)
return pcoch->_OnCreate(hwnd, (LPCREATESTRUCT)lParam);
return -1;
case WM_ERASEBKGND:
if (pcoch->_punkOC && pcoch->_bInPlaceActive)
{
// Now tell windows we don't need no stinkin'
// erased background because our view object
// is in-place active and he/she will be
// taking over from here.
return TRUE;
}
break;
case WM_PAINT:
return pcoch->_OnPaint();
case WM_SIZE:
return pcoch->_OnSize(hwnd, lParam);
case WM_DESTROY:
return pcoch->_OnDestroy();
case OCM_QUERYINTERFACE:
return pcoch->_OnQueryInterface(wParam, lParam);
case OCM_INITIALIZE:
return pcoch->_InitOCStruct((LPOCHINITSTRUCT)lParam);
case OCM_SETOWNER:
return pcoch->_SetOwner((IUnknown*)lParam);
case OCM_DOVERB:
return pcoch->_DoVerb((long)wParam, (LPMSG)lParam);
case OCM_ENABLEEVENTS:
return pcoch->_ConnectEvents( pcoch->_punkOC, (BOOL)wParam ) ;
case WM_PALETTECHANGED:
if (pcoch->_pIOleIPObject) {
HWND hwnd;
if (SUCCEEDED(pcoch->_pIOleIPObject->GetWindow(&hwnd))) {
SendMessage(hwnd, WM_PALETTECHANGED, wParam, lParam);
}
}
break;
case WM_SETFOCUS:
// OC doesn't respond to OLEIVERB_UIACTIVATE ?
if( pcoch->_dwMiscStatus & OLEMISC_NOUIACTIVATE )
{
// so explicitly assign focus
HWND hwndObj ;
if( pcoch->_pIOleIPObject &&
SUCCEEDED( pcoch->_pIOleIPObject->GetWindow( &hwndObj ) ) )
SetFocus( hwndObj ) ;
}
else
pcoch->_DoVerb( OLEIVERB_UIACTIVATE, NULL ) ;
break ;
default:
return DefWindowProcWrap(hwnd, uMsg, wParam, lParam);
}
return 0;
}
void COleControlHost::_RegisterClass()
{
WNDCLASS wc = {0};
wc.style = CS_GLOBALCLASS;
wc.lpfnWndProc = OCHostWndProc;
//wc.cbClsExtra = 0;
wc.cbWndExtra = SIZEOF(LPVOID);
wc.hInstance = HINST_THISDLL;
//wc.hIcon = NULL;
wc.hCursor = LoadCursor (NULL, IDC_ARROW);
wc.hbrBackground = (HBRUSH) (COLOR_BACKGROUND + 1);
//wc.lpszMenuName = NULL;
wc.lpszClassName = OCHOST_CLASS;
SHRegisterClass(&wc);
}
HRESULT CProxyUIHandler::QueryInterface(REFIID riid, LPVOID * ppvObj)
{
COleControlHost *poch = IToClass(COleControlHost, _xuih, this);
return poch->QueryInterface(riid, ppvObj);
};
ULONG CProxyUIHandler::AddRef(void)
{
COleControlHost *poch = IToClass(COleControlHost, _xuih, this);
return poch->AddRef();
};
ULONG CProxyUIHandler::Release(void)
{
COleControlHost *poch = IToClass(COleControlHost, _xuih, this);
return poch->Release();
};
HRESULT CProxyUIHandler::ShowContextMenu(DWORD dwID, POINT *ppt, IUnknown *pcmdtReserved, IDispatch *pdispReserved)
{
COleControlHost *poch = IToClass(COleControlHost, _xuih, this);
return poch->_pIDocHostUIParent ? poch->_pIDocHostUIParent->ShowContextMenu(dwID, ppt, pcmdtReserved, pdispReserved) : E_NOTIMPL;
}
HRESULT CProxyUIHandler::GetHostInfo(DOCHOSTUIINFO *pInfo)
{
COleControlHost *poch = IToClass(COleControlHost, _xuih, this);
return poch->_pIDocHostUIParent ? poch->_pIDocHostUIParent->GetHostInfo(pInfo) : E_NOTIMPL;
}
HRESULT CProxyUIHandler::ShowUI(DWORD dwID, IOleInPlaceActiveObject *pActiveObject, IOleCommandTarget *pCommandTarget, IOleInPlaceFrame *pFrame, IOleInPlaceUIWindow *pDoc)
{
COleControlHost *poch = IToClass(COleControlHost, _xuih, this);
return poch->_pIDocHostUIParent ? poch->_pIDocHostUIParent->ShowUI(dwID, pActiveObject, pCommandTarget, pFrame, pDoc): E_NOTIMPL;
}
HRESULT CProxyUIHandler::HideUI()
{
COleControlHost *poch = IToClass(COleControlHost, _xuih, this);
return poch->_pIDocHostUIParent ? poch->_pIDocHostUIParent->HideUI(): E_NOTIMPL;
}
HRESULT CProxyUIHandler::UpdateUI()
{
COleControlHost *poch = IToClass(COleControlHost, _xuih, this);
return poch->_pIDocHostUIParent ? poch->_pIDocHostUIParent->UpdateUI(): E_NOTIMPL;
}
HRESULT CProxyUIHandler::EnableModeless(BOOL fActivate)
{
COleControlHost *poch = IToClass(COleControlHost, _xuih, this);
return poch->_pIDocHostUIParent ? poch->_pIDocHostUIParent->EnableModeless(fActivate): E_NOTIMPL;
}
HRESULT CProxyUIHandler::OnDocWindowActivate(BOOL fActivate)
{
COleControlHost *poch = IToClass(COleControlHost, _xuih, this);
return poch->_pIDocHostUIParent ? poch->_pIDocHostUIParent->OnDocWindowActivate(fActivate): E_NOTIMPL;
}
HRESULT CProxyUIHandler::OnFrameWindowActivate(BOOL fActivate)
{
COleControlHost *poch = IToClass(COleControlHost, _xuih, this);
return poch->_pIDocHostUIParent ? poch->_pIDocHostUIParent->OnFrameWindowActivate(fActivate): E_NOTIMPL;
}
HRESULT CProxyUIHandler::ResizeBorder(LPCRECT prcBorder, IOleInPlaceUIWindow *pUIWindow, BOOL fRameWindow)
{
COleControlHost *poch = IToClass(COleControlHost, _xuih, this);
return poch->_pIDocHostUIParent ? poch->_pIDocHostUIParent->ResizeBorder(prcBorder, pUIWindow, fRameWindow): E_NOTIMPL;
}
HRESULT CProxyUIHandler::TranslateAccelerator(LPMSG lpMsg, const GUID *pguidCmdGroup, DWORD nCmdID)
{
COleControlHost *poch = IToClass(COleControlHost, _xuih, this);
return poch->_pIDocHostUIParent ? poch->_pIDocHostUIParent->TranslateAccelerator(lpMsg, pguidCmdGroup, nCmdID): E_NOTIMPL;
}
HRESULT CProxyUIHandler::GetOptionKeyPath(LPOLESTR *pchKey, DWORD dw)
{
COleControlHost *poch = IToClass(COleControlHost, _xuih, this);
return poch->_pIDocHostUIParent ? poch->_pIDocHostUIParent->GetOptionKeyPath(pchKey, dw): E_NOTIMPL;
}
HRESULT CProxyUIHandler::GetDropTarget(IDropTarget *pDropTarget, IDropTarget **ppDropTarget)
{
COleControlHost *poch = IToClass(COleControlHost, _xuih, this);
return poch->_pIDocHostUIParent ? poch->_pIDocHostUIParent->GetDropTarget(pDropTarget, ppDropTarget) : E_NOTIMPL;
}
HRESULT CProxyUIHandler::GetExternal(IDispatch **ppDispatch)
{
COleControlHost *poch = IToClass(COleControlHost, _xuih, this);
return poch->_pIDocHostUIParent ? poch->_pIDocHostUIParent->GetExternal(ppDispatch) : E_NOTIMPL;
}
HRESULT CProxyUIHandler::TranslateUrl(DWORD dwTranslate, OLECHAR *pchURLIn, OLECHAR **ppchURLOut)
{
COleControlHost *poch = IToClass(COleControlHost, _xuih, this);
return poch->_pIDocHostUIParent ? poch->_pIDocHostUIParent->TranslateUrl(dwTranslate, pchURLIn, ppchURLOut) : E_NOTIMPL;
}
HRESULT CProxyUIHandler::FilterDataObject( IDataObject *pDO, IDataObject **ppDORet)
{
COleControlHost *poch = IToClass(COleControlHost, _xuih, this);
return poch->_pIDocHostUIParent ? poch->_pIDocHostUIParent->FilterDataObject(pDO, ppDORet) : E_NOTIMPL;
}
STDAPI_(BOOL) DllRegisterWindowClasses(const SHDRC * pshdrc)
{
if (pshdrc && pshdrc->cbSize == SIZEOF(SHDRC) && !(pshdrc->dwFlags & ~SHDRCF_ALL))
{
if (pshdrc->dwFlags & SHDRCF_OCHOST)
{
COleControlHost::_RegisterClass();
return TRUE;
}
}
return FALSE;
}
//---------------------------------------------------------------------------
// CEventSink constructor
CEventSink::CEventSink( BOOL bAutoDelete )
: _hwndSite(NULL),
_hwndOwner(NULL),
_punkOC(NULL),
_dwCookie(0),
_cRef(1),
_bAutoDelete( bAutoDelete )
{
_iid = _iidDefault = IID_NULL ;
}
// CEventSink IUnknown impl
STDMETHODIMP CEventSink::QueryInterface( REFIID riid, void** ppvObj )
{
*ppvObj = NULL ;
if( IsEqualGUID( riid, IID_IUnknown ) ||
IsEqualGUID( riid, IID_IDispatch )||
IsEqualGUID( riid, _iidDefault ) )
{
*ppvObj = this ;
return S_OK ;
}
return E_NOINTERFACE ;
}
STDMETHODIMP_(ULONG) CEventSink::AddRef()
{
return InterlockedIncrement( &_cRef ) ;
}
STDMETHODIMP_(ULONG) CEventSink::Release()
{
if( InterlockedDecrement( &_cRef ) <= 0 )
{
if( _bAutoDelete )
delete this ;
return 0 ;
}
return _cRef ;
}
// Connects the sink to the OC's default event dispatch interface.
BOOL CEventSink::Connect( HWND hwndOwner, HWND hwndSite, LPUNKNOWN punkOC )
{
ASSERT( punkOC ) ;
IID iidDefault = IID_NULL ;
if( SUCCEEDED( _GetDefaultEventIID( punkOC, &iidDefault ) ) )
{
_iidDefault = iidDefault ;
return _Connect( hwndOwner, hwndSite, punkOC, iidDefault ) ;
}
return FALSE ;
}
// Establishes advise connection on the specified interface
BOOL CEventSink::_Connect( HWND hwndOwner, HWND hwndSite, LPUNKNOWN punkOC, REFIID iid )
{
LPCONNECTIONPOINTCONTAINER pcpc;
ASSERT(punkOC != NULL) ;
HRESULT hr = CONNECT_E_CANNOTCONNECT ;
if( _IsConnected( iid ) )
return TRUE ;
if( _dwCookie )
Disconnect() ;
if( punkOC &&
SUCCEEDED( punkOC->QueryInterface(IID_IConnectionPointContainer, (LPVOID*)&pcpc )))
{
LPCONNECTIONPOINT pcp = NULL;
DWORD dwCookie = 0;
ASSERT(pcpc != NULL);
if( SUCCEEDED(pcpc->FindConnectionPoint( iid, &pcp )))
{
ASSERT(pcp != NULL);
hr = pcp->Advise( this, &dwCookie ) ;
if( SUCCEEDED( hr ) )
{
_iid = iid ;
_dwCookie = dwCookie ;
_hwndOwner = hwndOwner ;
_hwndSite = hwndSite ;
_punkOC = punkOC ;
_punkOC->AddRef() ;
}
pcp->Release();
}
pcpc->Release();
}
return SUCCEEDED( hr ) ;
}
// Retrieves default event dispatch interface from the OC.
HRESULT CEventSink::_GetDefaultEventIID( LPUNKNOWN punkOC, IID* piid )
{
HRESULT hr ;
ASSERT( punkOC ) ;
ASSERT( piid ) ;
IProvideClassInfo *pci ;
IProvideClassInfo2 *pci2 ;
*piid = IID_NULL ;
#define IMPLTYPE_MASK \
(IMPLTYPEFLAG_FDEFAULT|IMPLTYPEFLAG_FSOURCE|IMPLTYPEFLAG_FRESTRICTED)
#define IMPLTYPE_DEFAULTSOURCE \
(IMPLTYPEFLAG_FDEFAULT|IMPLTYPEFLAG_FSOURCE)
// Retrieve default outbound dispatch IID using OC's IProvideClassInfo2
if( SUCCEEDED( (hr = punkOC->QueryInterface( IID_IProvideClassInfo2, (void**)&pci2 )) ) )
{
hr = pci2->GetGUID( GUIDKIND_DEFAULT_SOURCE_DISP_IID, piid ) ;
pci2->Release() ;
}
else // no IProvideClassInfo2; try IProvideClassInfo:
if( SUCCEEDED( (hr = punkOC->QueryInterface( IID_IProvideClassInfo, (void**)&pci )) ) )
{
ITypeInfo* pClassInfo = NULL;
if( SUCCEEDED( (hr = pci->GetClassInfo( &pClassInfo )) ) )
{
LPTYPEATTR pClassAttr;
ASSERT( pClassInfo );
if( SUCCEEDED( (hr = pClassInfo->GetTypeAttr( &pClassAttr )) ) )
{
ASSERT( pClassAttr ) ;
ASSERT( pClassAttr->typekind == TKIND_COCLASS ) ;
// Enumerate implemented interfaces looking for default source IID.
HREFTYPE hRefType;
int nFlags;
for( UINT i = 0; i < pClassAttr->cImplTypes; i++ )
{
if( SUCCEEDED( (hr = pClassInfo->GetImplTypeFlags( i, &nFlags )) ) &&
((nFlags & IMPLTYPE_MASK) == IMPLTYPE_DEFAULTSOURCE) )
{
// Got the interface, now retrieve its IID:
ITypeInfo* pEventInfo = NULL ;
if( SUCCEEDED( (hr = pClassInfo->GetRefTypeOfImplType( i, &hRefType )) ) &&
SUCCEEDED( (hr = pClassInfo->GetRefTypeInfo( hRefType, &pEventInfo )) ) )
{
LPTYPEATTR pEventAttr;
ASSERT( pEventInfo ) ;
if( SUCCEEDED( (hr = pEventInfo->GetTypeAttr( &pEventAttr )) ) )
{
*piid = pEventAttr->guid ;
pEventInfo->ReleaseTypeAttr(pEventAttr);
}
pEventInfo->Release();
}
break;
}
}
pClassInfo->ReleaseTypeAttr(pClassAttr);
}
pClassInfo->Release();
}
pci->Release() ;
}
if( SUCCEEDED( hr ) && IsEqualIID( *piid, IID_NULL ) )
hr = E_FAIL ;
return hr ;
}
// reports whether the sink is connected to the indicated sink
BOOL CEventSink::_IsConnected( REFIID iid )
{
return _dwCookie != 0L &&
IsEqualIID( iid, _iid ) ;
}
// disconnects the sink
BOOL CEventSink::Disconnect()
{
LPCONNECTIONPOINTCONTAINER pcpc;
if( _dwCookie != 0 &&
_punkOC &&
SUCCEEDED( _punkOC->QueryInterface(IID_IConnectionPointContainer, (LPVOID*)&pcpc)))
{
LPCONNECTIONPOINT pcp = NULL;
ASSERT(pcpc != NULL);
if (SUCCEEDED(pcpc->FindConnectionPoint(_iid, &pcp)))
{
ASSERT(pcp != NULL);
pcp->Unadvise(_dwCookie);
pcp->Release();
_iid = IID_NULL ;
_dwCookie = 0L ;
_hwndOwner = NULL ;
_hwndSite = NULL ;
_punkOC->Release() ;
_punkOC = NULL ;
}
pcpc->Release();
return TRUE ;
}
return FALSE ;
}
// CEventSink IDispatch interface
STDMETHODIMP CEventSink::Invoke(
IN DISPID dispIdMember,
IN REFIID riid,
IN LCID lcid,
IN WORD wFlags,
IN OUT DISPPARAMS *pDispParams,
OUT VARIANT *pVarResult,
OUT EXCEPINFO *pExcepInfo,
OUT UINT *puArgErr)
{
// Copy method args to notification block
NMOCEVENT event ;
ZeroMemory( &event, sizeof(event) ) ;
event.hdr.hwndFrom = _hwndSite;
event.hdr.idFrom = GetDlgCtrlID( _hwndSite ) ;
event.hdr.code = OCN_OCEVENT ;
event.dispID = dispIdMember ;
event.iid = riid ;
event.lcid = lcid ;
event.wFlags = wFlags ;
event.pDispParams = pDispParams ;
event.pVarResult = pVarResult ;
event.pExepInfo = pExcepInfo ;
event.puArgErr = puArgErr ;
// Notify parent of event
::SendMessage( _hwndOwner, WM_NOTIFY, event.hdr.idFrom, (LPARAM)&event ) ;
// Cleanup args
if (pVarResult != NULL)
VariantClear( pVarResult ) ;
return S_OK ;
}
|
/*
Function:PreviousFlag
change boolean value
Parameter
bPrevFlag - flag
*/
void dehazing::PreviousFlag(bool bPrevFlag)
{
m_bPreviousFlag = bPrevFlag;
} |
/**
* Defines a parameter specification, or the information about a parameter (where it occurs, what is
* its type, etc).
*
* @author Steve Ebersole
*/
public class ParameterExpressionImpl<T>
extends ExpressionImpl<T>
implements ParameterExpression<T>, Serializable {
private final String name;
private final Integer position;
public ParameterExpressionImpl(
CriteriaBuilderImpl criteriaBuilder,
Class<T> javaType,
String name) {
super( criteriaBuilder, javaType );
this.name = name;
this.position = null;
}
public ParameterExpressionImpl(
CriteriaBuilderImpl criteriaBuilder,
Class<T> javaType,
Integer position) {
super( criteriaBuilder, javaType );
this.name = null;
this.position = position;
}
public ParameterExpressionImpl(
CriteriaBuilderImpl criteriaBuilder,
Class<T> javaType) {
super( criteriaBuilder, javaType );
this.name = null;
this.position = null;
}
public String getName() {
return name;
}
public Integer getPosition() {
return position;
}
public Class<T> getParameterType() {
return getJavaType();
}
public void registerParameters(ParameterRegistry registry) {
registry.registerParameter( this );
}
public String render(CriteriaQueryCompiler.RenderingContext renderingContext) {
final String jpaqlParamName = renderingContext.registerExplicitParameter( this );
return ':' + jpaqlParamName;
}
public String renderProjection(CriteriaQueryCompiler.RenderingContext renderingContext) {
return render( renderingContext );
}
} |
HOTCHKISS — Trains loaded with shiny black coal used to generate electricity are thundering out of Colorado’s North Fork Valley again — as many as two a day — raising hopes for the mining comeback President Donald Trump promised on the campaign trail.
The coal carved from deep under pristine forests in the last surviving West Elk Mine is bound for other countries to be burned. Environmentalists, worried about global warming, are steamed. But laid-off Colorado miners see the trains as a signal that they might get back to work.
Those train cars reflect a roughly sevenfold increase from two trains a week last year, when statewide coal production hit record lows. Colorado mines produced about 12.8 million tons of coal in 2016, down from 18.7 million the year before, according to Colorado Division of Reclamation, Mining and Safety data. Since 2004, coal production is down 67 percent. Coal jobs have dwindled to about 1,200 statewide from 2,118 in 2003, when Colorado ranked among the top U.S. coal producers.
Even coal industry executives acknowledge the rise of wind, solar and gas as cheaper, cleaner alternatives that employ more workers and questioning whether a comeback is possible.
Yet the rumbling, clanging and flashing lights as coal trains snake down the valley promise otherwise — music to many in western Colorado, where coal once sustained a solid middle class. Tourism hasn’t paid as well, and residents are resisting a full embrace of the marijuana boom. Voters here strongly backed Trump after his Oct. 18 campaign rally in a Grand Junction airport hangar, where he promised to put coal miners in Colorado back to work.
“My husband and I love that man. It’s going to be a good year,” Hotchkiss Inn manager Kris Bartol said, sharing her cellphone videos of Trump. She and her husband, Andy, miss the income they once had from mining contractors who rented a block of rooms.
Over the past few months, the number of unemployed miners trekking into the Compliance Staffing Agency, which helps mines fill positions, has tripled to 15 a week, CSA president Steve West said. “Some are just not finding anything else. They’re saying that, with Trump, they might find work now. They think the industry might be coming back.”
Starting her sedan in Somerset, coal miner’s daughter Myrna Ungaro lamented how, after two mines closed last year, “this valley died, the shops, the restaurants.” Yet now the loaded coal trains soothe and inspire her in her own search for work, Ungaro said. “This is the way it’s supposed to be.”
There are indeed signs. West Elk manager Jim Miller has hired a few miners, mostly temporary, bringing the workforce to about 220. Corporate parent Arch Coal emerged from bankruptcy in December. Federal forest managers, pressed by Colorado officials, finalized an exception to the nation’s rule for protecting “roadless” forests, clearing an obstacle for the West Elk Mine to expand and produce more coal.
Arch Coal spokeswoman Logan Bonacorsi confirmed contracts locked through the first half of this year supplying “international markets.” Arch and West Elk officials wouldn’t say more.
Any expansion is likely to face resistance.
“Mining and burning more coal will mean more smog, more toxic mercury in the air, more needless methane emissions and more climate pollution,” EarthJustice attorney Ted Zukoski said. “We’re going to continue to oppose coal mining that puts our forests, wildlife and climate at risk.”
Colorado Mining Association president Stan Dempsey said Trump’s drive to dismantle environmental regulations will be crucial. “We’ve bottomed out. We’re seeing a stabilization in Colorado. We’re seeing a slow increase in production. If we don’t see any more boneheaded policy decisions, such as Colorado’s 2010 Clean Air Clean Jobs Act, then coal is going to make a strong recovery.”
But energy industry analysts — pointing to the overall drop in production and the federal and state efforts to close coal-fired power plants — contend coal still will fade in favor of the cleaner, cheaper alternatives. Even as Trump posed at the White House with coal miners and ordered dismantling of the Clean Power Plan to cut heat-trapping carbon pollution, a technology shift away from coal continued with more plants shutting each month.
A Chinese government curb on coal production in China, driven by air pollution and domestic economy concerns, drove up the price of coal to make U.S. exports profitable, said Clark Williams-Derry of the Seattle-based Sightline Institute energy think tank.
“What you’re seeing now is just a temporary uptick due to Chinese policy and higher natural gas prices. Be cautious,” Williams-Derry said. “We’re now at the mercy of Chinese policymakers. All the optimism in the world isn’t going to reverse the tide of this shift.”
Colorado coal-fired power plants still will close as scheduled, and uprooted coal miners will need help, Gov. John Hickenlooper said after White House officials announced they would kill the national Clean Power Plan.
“He cannot force people to spend more money to mine more coal,” Hickenlooper said.
Here in the North Fork Valley, Oxbow Mining president Mike Ludlow, a 44-year coal industry veteran who ran the now-closed Elk Creek Mine, contends the current export surge will at most slow the pace of decline. If any coal mine can survive competition from cheaper clean sources, he said, it would be the West Elk, “a really, really well-run mine.”
“But, even with Trump, I don’t see any utility trying to permit a new coal-fired facility. When wind is blowing, that is low-cost energy. Utilities are going to want to take low-cost energy,” he said. “With the coal for power plants being shut down, it will never come back.”
State agencies have pumped about $4 million in grants to help unemployed miners and their families adapt, said Delta County Administrator Robbie LeValley, who has championed economic diversification. Hickenlooper touts faster internet connections as a key, and about 20 former miners were hired to install fiber-optic cables in Hotchkiss and other towns, LeValley said.
Hundreds of laid-off miners moved out of the valley to work in other states, she said. “Some of their families are still living here. A couple went back to school.”
County officials are “certainly optimistic” to see increasing numbers of coal trains and hope for steady employment of about 250 miners if the West Elk can expand, LeValley said. “The capacity that the West Elk has certainly will benefit our area. But we’re still down by two of three mines. And the full suite of environmental regulations and production constraints are still in place.”
Railroad transport costs, she added, weigh more heavily on Arch Coal now because other companies that once shared costs have gone out of business.
Since 1864, coal mining in western Colorado has built communities and helped create a shared identity, pride and purpose. Miners could afford to buy homes. Miners’ families had lifestyles including recreation using snowmobiles, boats and all-terrain vehicles. As many as six mines operated in the valley. Over the past two years, more than 1,000 jobs disappeared. Longtime residents speak of an emptiness.
The sound of coal trains rolling past the Living Farm Cafe in Paonia “is comforting,” server Casey Branson said. “Honestly, I would be OK if all of the coal jobs came back.”
Even if they don’t, a feeling that Trump will do everything possible puts people who have been hurting in a more positive mood.
“I’m not sure how much one president can do,” said Wanda Buskirk, office manager for the Ragged Mountain Fire Protection District, whose husband worked in a coal mine and now has leukemia.
Trump may not be able to fully deliver on his promise to reverse coal’s decline, she said. “There are still two other branches of government involved. But I hope so.” |
from typing import Any, Dict
import pytest
from nuplan.common.utils.testing.nuplan_test import NUPLAN_TEST_PLUGIN, nuplan_test
from nuplan.planning.metrics.evaluation_metrics.common.distance_to_baseline import DistanceToBaselineStatistics
from nuplan.planning.metrics.utils.testing_utils import metric_statistic_test
@nuplan_test(path='json/distance_to_baseline/distance_to_baseline.json')
def test_distance_to_baseline(scene: Dict[str, Any]) -> None:
"""
Tests drivable area violation metric, by checking the detection and the depth of violation on a made up scenario
:param scene: the json scene.
"""
metric = DistanceToBaselineStatistics('distance_to_baseline', 'Planning')
metric_statistic_test(scene=scene, metric=metric)
if __name__ == '__main__':
raise SystemExit(pytest.main([__file__], plugins=[NUPLAN_TEST_PLUGIN]))
|
/**
* Holds a list of ACK and RESEND messages that should be sent out.
*
* @author rbygrave
*/
public class AckResendMessages {
ArrayList<Message> messages = new ArrayList<Message>();
public String toString() {
return messages.toString();
}
public int size() {
return messages.size();
}
/**
* Add a ACK message to send.
*/
public void add(MessageAck ack){
messages.add(ack);
}
/**
* Add a RESEND message to send.
*/
public void add(MessageResend resend){
messages.add(resend);
}
/**
* Return all the messages to be sent out.
*/
public List<Message> getMessages() {
return messages;
}
} |
/**
* Create an instance of shape containing a geometry specified by type. Arguments
* are treated according to geometry type.
*
* none: No geometry is created. Arguments are ignored.
* arrow: arg1=length, arg2=width
* ball: arg1=samples, arg2=radius
* box: arg1=xExtent, arg2=yExtent, arg3=zExtent
* cone: arg1=samples, arg2=radius, arg3=height
* cylinder: arg1=samples, arg2=radius, arg3=height
* dart: arg1=samples, arg2=radius, arg3=height
* disk: arg1=samples, arg2=radius
* dome: arg1=samples, arg2=radius
* flag: arg1=height
* pyramid: arg1=width, arg2=height
* quad: arg1=size
* rod: arg1=cellCount, arg2=radius, arg3=length
* sphere: arg1=samples, arg2=radius
* teapot: arg1=size
* torus: arg1=samples, arg2=tubeRadius, arg3=centerRadius
*
* @param name
* @param shapeType
* @param arg1
* @param arg2
* @param arg3
* @return
*/
public static Shape createShape(String name, ShapeType shapeType, float arg1, float arg2, float arg3) {
Spatial geometry = null;
switch (shapeType) {
case none:
break;
case arrow:
geometry = new Arrow("_arrow", arg1, arg2);
break;
case ball:
geometry = new Sphere("_sphere", (int)arg1, (int)arg1, arg2);
geometry.setTranslation(0, 0, arg1 * 0.5f);
break;
case box:
geometry = new Box("_box", new Vector3(), 0.5f * arg1, 0.5f * arg2, 0.5f * arg3);
geometry.setTranslation(0, 0, 0.5f * arg3);
break;
case cone:
geometry = new Cone("_cone", (int)arg1, (int)arg1, arg2, arg3, true);
geometry.setRotation(new Matrix3().fromAngles(Math.PI, 0, 0));
geometry.setTranslation(0, 0, 0.5f * arg3);
break;
case cylinder:
geometry = new Cylinder("_cylinder", (int)arg1, (int)arg1, arg2, arg3, true);
geometry.setTranslation(0, 0, 0.5f * arg3);
break;
case dart:
geometry = new Cone("_dart", (int)arg1, (int)arg1, arg2, arg3, true);
geometry.setTranslation(0, 0, 0.5f * arg3);
break;
case disk:
geometry = new Disk("_disk", (int)arg1, (int)arg1, arg2);
break;
case dome:
geometry = new Dome("_dome", (int)arg1/2, (int)arg1, arg2);
geometry.setRotation(new Matrix3().fromAngles(Math.PI / 2, 0, 0));
break;
case flag:
geometry = new Flag("_flag", arg1);
geometry.setTranslation(0, 0, 0.5f*arg1);
break;
case pyramid:
geometry = new Pyramid("_pyramid", arg1, arg2);
geometry.setRotation(new Matrix3().fromAngles(Math.PI / 2, 0, 0));
geometry.setTranslation(0, 0, arg1 * 0.5f);
break;
case quad:
geometry = new Quad("_quad", arg1, arg1);
break;
case rod:
geometry = new Rod("_rod", (int)arg1, 30, arg2, arg3);
geometry.setTranslation(0, 0, arg2);
break;
case sphere:
geometry = new Sphere("_sphere", (int)arg1, (int)arg1, arg2);
break;
case teapot:
geometry = new Teapot("_teapot");
geometry.setRotation(new Matrix3().fromAngles(Math.PI / 2, 0, 0));
geometry.setScale(new Vector3(arg1, arg1, arg1));
break;
case torus:
geometry = new Torus("_torus", (int)arg1, (int)arg1, arg2, arg3);
break;
}
if (geometry == null) {
return(new Shape(name, ShapeType.none, null));
}
return(new Shape(name, shapeType, geometry));
} |
/**
* Checks that only a True state yields a true outcome
*/
@Test
public void outcomeTest() {
assertTrue(!TripleState.FALSE.getOutcome());
assertTrue(!TripleState.INDIFFERENT.getOutcome());
assertTrue(TripleState.TRUE.getOutcome());
} |
def mean_log_Gaussian_like(y_true, parameters,c,m ):
components = tf.reshape(parameters, [-1, c + 2, m])
mu = components[:, :c, :]
sigma = components[:, c, :]
sigma = tf.clip_by_value(sigma, 1e-15,1e15)
alpha = components[:, c + 1, :]
alpha = tf.clip_by_value(alpha, 1e-8, 1.)
exponent = tf.log(alpha) - 0.5 * c * tf.log(2 * np.pi) \
- c * tf.log(sigma) \
- tf.reduce_sum((tf.expand_dims(y_true, 2) - mu) ** 2, axis=1) / (2.0 * (sigma) ** 2.0)
log_gauss = log_sum_exp(exponent, axis=1)
res = - tf.reduce_mean(log_gauss)
return res |
/**
* Reads a single delimited line and returns its values as a String array.
* Returns null if the end of file is reached.
*
* @return the delimited text as a String array, or null if EOF is reached
* @throws IOException
* on exception
*/
public final String[] readLine() throws IOException {
final String line = in.readLine();
if (line != null) {
return parse(line);
}
return null;
} |
WASHINGTON, Aug. 6 (UPI) -- The backlash against sand used in fracking operations should be a warning to countries looking to replicate North American success, an opposition group said.
Energy companies in North America use a silica crystal during hydraulic fracturing, or fracking, operations. The practice involves water mixed with the fine particles and trace amounts of chemicals to create fissures in shale that release trapped oil and gas deposits.
Companies mining the sand are growing, though there's been a backlash against their operations. Last week, the Minnesota Department of Natural Resources ordered a mine operator to shut down because of environmental concerns.
The Price of Oil, a group critical of the oil and gas industry, said there have been corresponding health concerns with so-called frac-sand. The fine silica sand mined for fracking can cause lung damage, it said.
The group said Wednesday the outcry over fracking now extends to sand, which should serve as "a warning to countries such as the U.K., which recently opened up half their country to fracking."
The British shale industry is in its infancy, though the government said natural gas from shale could provide a source of economic stimulus and energy security. |
import gpytorch
import torch
import torch.nn as nn
import numpy as np
class NNKernel(gpytorch.kernels.Kernel):
def __init__(self, input_dim, output_dim, num_layers, hidden_dim, flatten=False, **kwargs):
super(NNKernel, self).__init__(**kwargs)
self.input_dim = input_dim
self.output_dim = output_dim
self.num_layers = num_layers
self.hidden_dim = hidden_dim
self.flatten = flatten
self.model = self.create_model()
def create_model(self):
assert self.num_layers >= 1, "Number of hidden layers must be at least 1"
modules = [nn.Linear(self.input_dim, self.hidden_dim), nn.ReLU()]
if self.flatten:
modules = [nn.Flatten()] + modules
for i in range(self.num_layers - 1):
modules.append(nn.Linear(self.hidden_dim, self.hidden_dim))
modules.append(nn.ReLU())
modules.append(nn.Linear(self.hidden_dim, self.output_dim))
model = nn.Sequential(*modules)
return model
def forward(self, x1, x2, diag=False, last_dim_is_batch=False, full_covar=True, **params):
r"""
Computes the covariance between x1 and x2.
This method should be imlemented by all Kernel subclasses.
Args:
:attr:`x1` (Tensor `n x d` or `b x n x d`):
First set of data
:attr:`x2` (Tensor `m x d` or `b x m x d`):
Second set of data
:attr:`diag` (bool):
Should the Kernel compute the whole kernel, or just the diag?
:attr:`last_dim_is_batch` (tuple, optional):
If this is true, it treats the last dimension of the data as another batch dimension.
(Useful for additive structure over the dimensions). Default: False
Returns:
:class:`Tensor` or :class:`gpytorch.lazy.LazyTensor`.
The exact size depends on the kernel's evaluation mode:
* `full_covar`: `n x m` or `b x n x m`
* `full_covar` with `last_dim_is_batch=True`: `k x n x m` or `b x k x n x m`
* `diag`: `n` or `b x n`
* `diag` with `last_dim_is_batch=True`: `k x n` or `b x k x n`
"""
if last_dim_is_batch:
raise NotImplementedError()
else:
z1 = self.model(x1)
z2 = self.model(x2)
out = torch.matmul(z1, z2.T)
if diag:
return torch.diag(out)
else:
return out
class PositiveLinear(nn.Module):
def __init__(self, in_features, out_features):
super(PositiveLinear, self).__init__()
self.in_features = in_features
self.out_features = out_features
self.weight = nn.Parameter(torch.Tensor(out_features, in_features))
self.reset_parameters()
def reset_parameters(self):
nn.init.xavier_uniform_(self.weight)
def forward(self, input):
w = nn.functional.softplus(self.weight)
return nn.functional.linear(input, w)
class NNKernelNoInner(gpytorch.kernels.Kernel):
def __init__(self, input_dim, num_layers, hidden_dim, flatten=False, **kwargs):
super(NNKernelNoInner, self).__init__(**kwargs)
self.input_dim = input_dim*2
self.output_dim = 1
self.num_layers = num_layers
self.hidden_dim = hidden_dim
self.flatten = flatten
self.model = self.create_model()
def create_model(self):
assert self.num_layers >= 1, "Number of hidden layers must be at least 1"
modules = [PositiveLinear(self.input_dim, self.hidden_dim), nn.Sigmoid()]
if self.flatten:
modules = [nn.Flatten()] + modules
for i in range(self.num_layers - 1):
modules.append(PositiveLinear(self.hidden_dim, self.hidden_dim))
modules.append(nn.Sigmoid())
modules.append(PositiveLinear(self.hidden_dim, self.output_dim))
model = nn.Sequential(*modules)
return model
def forward(self, x1, x2, diag=False, last_dim_is_batch=False, full_covar=True, **params):
r"""
Computes the covariance between x1 and x2.
This method should be imlemented by all Kernel subclasses.
Args:
:attr:`x1` (Tensor `n x d` or `b x n x d`):
First set of data
:attr:`x2` (Tensor `m x d` or `b x m x d`):
Second set of data
:attr:`diag` (bool):
Should the Kernel compute the whole kernel, or just the diag?
:attr:`last_dim_is_batch` (tuple, optional):
If this is true, it treats the last dimension of the data as another batch dimension.
(Useful for additive structure over the dimensions). Default: False
Returns:
:class:`Tensor` or :class:`gpytorch.lazy.LazyTensor`.
The exact size depends on the kernel's evaluation mode:
* `full_covar`: `n x m` or `b x n x m`
* `full_covar` with `last_dim_is_batch=True`: `k x n x m` or `b x k x n x m`
* `diag`: `n` or `b x n`
* `diag` with `last_dim_is_batch=True`: `k x n` or `b x k x n`
"""
if last_dim_is_batch:
raise NotImplementedError()
else:
n = x1.shape[0]
m = x2.shape[0]
out = torch.zeros((n,m), device=x1.get_device())
for i in range(n):
for j in range(i+1):
out[i, j] = self.model(torch.cat((x1[i], x2[j]))).view(-1)
if i != j:
out[j, i] = out[i, j]
#npout = out.cpu().detach().numpy()
#print(np.linalg.eigvals(npout))
#assert np.all(np.linalg.eigvals(npout) +1e-2 >= 0), "not positive"
if diag:
return torch.diag(out)
else:
return out
class MultiNNKernel(gpytorch.kernels.Kernel):
def __init__(self, num_tasks, kernels, **kwargs):
super(MultiNNKernel, self).__init__(**kwargs)
assert isinstance(kernels, list), "kernels must be a list of kernels"
self.num_tasks = num_tasks
self.kernels = nn.ModuleList(kernels)
def num_outputs_per_input(self, x1, x2):
"""
Given `n` data points `x1` and `m` datapoints `x2`, this multitask
kernel returns an `(n*num_tasks) x (m*num_tasks)` covariance matrix.
"""
return self.num_tasks
def forward(self, x1, x2, diag=False, last_dim_is_batch=False, full_covar=True, **params):
r"""
Computes the covariance between x1 and x2.
This method should be imlemented by all Kernel subclasses.
Args:
:attr:`x1` (Tensor `n x d` or `b x n x d`):
First set of data
:attr:`x2` (Tensor `m x d` or `b x m x d`):
Second set of data
:attr:`diag` (bool):
Should the Kernel compute the whole kernel, or just the diag?
:attr:`last_dim_is_batch` (tuple, optional):
If this is true, it treats the last dimension of the data as another batch dimension.
(Useful for additive structure over the dimensions). Default: False
Returns:
:class:`Tensor` or :class:`gpytorch.lazy.LazyTensor`.
The exact size depends on the kernel's evaluation mode:
* `full_covar`: `n x m` or `b x n x m`
* `full_covar` with `last_dim_is_batch=True`: `k x n x m` or `b x k x n x m`
* `diag`: `n` or `b x n`
* `diag` with `last_dim_is_batch=True`: `k x n` or `b x k x n`
"""
if last_dim_is_batch:
raise NotImplementedError()
else:
n = x1.shape[0]
m = x2.shape[0]
out = torch.zeros((n * self.num_tasks, m * self.num_tasks), device=x1.get_device())
for i in range(self.num_tasks):
for j in range(self.num_tasks):
z1 = self.kernels[i].model(x1)
z2 = self.kernels[j].model(x2)
out[i:n*self.num_tasks:self.num_tasks, j:m*self.num_tasks:self.num_tasks] = torch.matmul(z1, z2.T)
if diag:
return torch.diag(out)
else:
return out |
/**
* Unit test for the implementation of the group request manager service
*/
@RunWith(SpringRunner.class)
public class GroupRequestManagerServiceImplUnitTest {
@MockBean
private GroupRequestRepository groupRequestRepository;
@MockBean
private FilterStatementRepository filterStatementRepository;
private GroupRequestManagerServiceImpl groupRequestManagerService;
@Before
public void setUp() {
setUpRepo();
groupRequestManagerService = new GroupRequestManagerServiceImpl(groupRequestRepository, filterStatementRepository);
}
@After
public void tearDown() {
groupRequestManagerService = null;
}
/**
* Set up the group request repository and the filter statement repository for the unit test
*/
private void setUpRepo() {
// set up the group request repository
GroupRequest request1 = new GroupRequest();
GroupRequest request2 = new GroupRequest();
GroupRequest request3 = new GroupRequest();
request1.setId(1L);
request1.setThirdPartyId(1L);
request1.setCreationTimestamp(new Timestamp(0));
request1.setStatus(RequestStatus.ACCEPTED);
request1.setAggregatorOperator(AggregatorOperator.COUNT);
request1.setRequestType(RequestType.ALL);
request2.setId(2L);
request2.setThirdPartyId(1L);
request2.setCreationTimestamp(new Timestamp(0));
request2.setStatus(RequestStatus.UNDER_ANALYSIS);
request2.setAggregatorOperator(AggregatorOperator.MAX);
request2.setRequestType(RequestType.PRESSURE_MIN);
request3.setId(3L);
request3.setThirdPartyId(2L);
request3.setCreationTimestamp(new Timestamp(0));
request3.setStatus(RequestStatus.REFUSED);
request3.setAggregatorOperator(AggregatorOperator.AVG);
request3.setRequestType(RequestType.HEART_BEAT);
List<GroupRequest> groupRequestList1 = new ArrayList<>();
groupRequestList1.add(request1);
groupRequestList1.add(request2);
Mockito.when(groupRequestRepository.findById(1L)).thenReturn(java.util.Optional.ofNullable(request1));
Mockito.when(groupRequestRepository.findById(2L)).thenReturn(java.util.Optional.ofNullable(request2));
Mockito.when(groupRequestRepository.findById(3L)).thenReturn(java.util.Optional.ofNullable(request3));
Mockito.when(groupRequestRepository.findAllByThirdPartyId(1L)).thenReturn(groupRequestList1);
Mockito.when(groupRequestRepository.findAllByThirdPartyId(2L)).thenReturn(Collections.singletonList(request3));
// Set up the filter statement repository
FilterStatement filterStatement1 = new FilterStatement();
FilterStatement filterStatement2 = new FilterStatement();
FilterStatement filterStatement3 = new FilterStatement();
filterStatement1.setId(1L);
filterStatement1.setGroupRequest(request1);
filterStatement1.setColumn(FieldType.HEART_BEAT);
filterStatement1.setComparisonSymbol(ComparisonSymbol.LESS);
filterStatement1.setValue("100");
filterStatement2.setId(2L);
filterStatement2.setGroupRequest(request1);
filterStatement2.setColumn(FieldType.PRESSURE_MAX);
filterStatement2.setComparisonSymbol(ComparisonSymbol.EQUALS);
filterStatement2.setValue("125");
filterStatement3.setId(3L);
filterStatement3.setGroupRequest(request2);
filterStatement3.setColumn(FieldType.PRESSURE_MIN);
filterStatement3.setComparisonSymbol(ComparisonSymbol.GREATER);
filterStatement3.setValue("86");
Mockito.when(filterStatementRepository.findById(1L)).thenReturn(java.util.Optional.ofNullable(filterStatement1));
Mockito.when(filterStatementRepository.findById(2L)).thenReturn(java.util.Optional.ofNullable(filterStatement2));
Mockito.when(filterStatementRepository.findById(3L)).thenReturn(java.util.Optional.ofNullable(filterStatement3));
List<FilterStatement> list = new ArrayList<>();
list.add(filterStatement1);
list.add(filterStatement2);
Mockito.when(filterStatementRepository.findAllByGroupRequest_Id(1L)).thenReturn(list);
Mockito.when(filterStatementRepository.findAllByGroupRequest_Id(2L)).thenReturn(Collections.singletonList(filterStatement3));
}
/**
* Test the get of group request by id, when the request with the specified id is present
*/
@Test
public void testGetGroupRequestById() {
GroupRequest groupRequest = groupRequestManagerService.getById(1L).getGroupRequest();
assertEquals(new Long(1), groupRequest.getId());
}
/**
* Test the get of group request by id, when the request with the specified id is not present
*/
@Test(expected = GroupRequestNotFoundException.class)
public void testGetGroupRequestByIdWhenNotPresent() {
groupRequestManagerService.getById(100L);
}
/**
* Test get group request by third party id
*/
@Test
public void testGetRequestByThirdPartyId() {
List<GroupRequestWrapper> groupRequestWrapperList = groupRequestManagerService.getByThirdPartyId(1L);
for(GroupRequestWrapper groupRequestWrapper : groupRequestWrapperList) {
assertEquals(new Long(1), groupRequestWrapper.getGroupRequest().getThirdPartyId());
for(FilterStatement filterStatement : groupRequestWrapper.getFilterStatementList()) {
assertEquals(groupRequestWrapper.getGroupRequest().getId(), filterStatement.getGroupRequest().getId());
}
}
}
/**
* Test get group request by third party it when the third party is present
*/
@Test
public void testGetRequestByThirdPartyIdWhenNoRequest() {
assertTrue(groupRequestManagerService.getByThirdPartyId(100L).isEmpty());
}
/**
* Test the add of a new group request
*/
@Test
public void testAddRequestTest() {
GroupRequest groupRequest = new GroupRequest();
groupRequest.setId(4L);
groupRequest.setThirdPartyId(2L);
groupRequest.setCreationTimestamp(new Timestamp(0));
groupRequest.setStatus(RequestStatus.REFUSED);
groupRequest.setAggregatorOperator(AggregatorOperator.COUNT);
groupRequest.setRequestType(RequestType.ALL);
FilterStatement filterStatement1 = new FilterStatement();
FilterStatement filterStatement2 = new FilterStatement();
filterStatement1.setGroupRequest(groupRequest);
filterStatement1.setId(1L);
filterStatement1.setColumn(FieldType.HEART_BEAT);
filterStatement1.setComparisonSymbol(ComparisonSymbol.LESS);
filterStatement1.setValue("100");
filterStatement2.setGroupRequest(groupRequest);
filterStatement1.setId(2L);
filterStatement2.setColumn(FieldType.PRESSURE_MAX);
filterStatement2.setComparisonSymbol(ComparisonSymbol.EQUALS);
filterStatement2.setValue("125");
List<FilterStatement> filterStatements = new ArrayList<>();
filterStatements.add(filterStatement1);
filterStatements.add(filterStatement2);
Mockito.when(groupRequestRepository.saveAndFlush(any(GroupRequest.class))).thenReturn(groupRequest);
Mockito.when(filterStatementRepository.save(filterStatement1)).thenReturn(filterStatement1);
Mockito.when(filterStatementRepository.save(filterStatement2)).thenReturn(filterStatement2);
Mockito.when(filterStatementRepository.findAllByGroupRequest_Id(1L)).thenReturn(filterStatements);
groupRequestManagerService.addGroupRequest(new GroupRequestWrapper(groupRequest, filterStatements));
verify(groupRequestRepository, times(1)).saveAndFlush(any(GroupRequest.class));
verify(filterStatementRepository, times(2)).save(any(FilterStatement.class));
}
/**
* Test the add of a new group request when the aggregator can be matched with the type of the request
*/
@Test
public void testAddRequestWithValidOperatorAndRequestType() {
List<AggregatorOperator> aggregatorOperatorList = new ArrayList<>();
aggregatorOperatorList.add(AggregatorOperator.COUNT);
aggregatorOperatorList.add(AggregatorOperator.DISTINCT_COUNT);
for(AggregatorOperator aggregatorOperator : AggregatorOperator.values()) {
for(RequestType requestType : RequestType.values()) {
if(requestType.isNumber() || !requestType.isNumber() && aggregatorOperatorList.contains(aggregatorOperator)) {
GroupRequest groupRequest = new GroupRequest();
groupRequest.setId(4L);
groupRequest.setThirdPartyId(2L);
groupRequest.setCreationTimestamp(new Timestamp(0));
groupRequest.setStatus(RequestStatus.REFUSED);
groupRequest.setAggregatorOperator(AggregatorOperator.COUNT);
groupRequest.setRequestType(RequestType.ALL);
Mockito.when(groupRequestRepository.saveAndFlush(any(GroupRequest.class))).thenReturn(groupRequest);
Mockito.when(filterStatementRepository.findAllByGroupRequest_Id(1L)).thenReturn(new ArrayList<>());
groupRequestManagerService.addGroupRequest(new GroupRequestWrapper(groupRequest, new ArrayList<>()));
verify(groupRequestRepository).saveAndFlush(any(GroupRequest.class));
reset(groupRequestRepository);
}
}
}
}
/**
* Test the add of a new group request when the aggregator operator can't be matched with the type of request type
*/
@Test
public void testAddRequestWithInvalidOperatorAndRequestType() {
GroupRequest groupRequest;
List<AggregatorOperator> criticalAggregatorOperators = new ArrayList<>();
criticalAggregatorOperators.add(AggregatorOperator.MAX);
criticalAggregatorOperators.add(AggregatorOperator.MIN);
criticalAggregatorOperators.add(AggregatorOperator.AVG);
List<RequestType> criticalRequestType = new ArrayList<>();
criticalRequestType.add(RequestType.ALL);
criticalRequestType.add(RequestType.BIRTH_CITY);
criticalRequestType.add(RequestType.USER_SSN);
for (AggregatorOperator aggregatorOperator : criticalAggregatorOperators) {
for(RequestType requestType : criticalRequestType) {
groupRequest = new GroupRequest();
groupRequest.setId(4L);
groupRequest.setThirdPartyId(2L);
groupRequest.setCreationTimestamp(new Timestamp(0));
groupRequest.setStatus(RequestStatus.REFUSED);
groupRequest.setAggregatorOperator(aggregatorOperator);
groupRequest.setRequestType(requestType);
try {
groupRequestManagerService.addGroupRequest(new GroupRequestWrapper(groupRequest, new ArrayList<>()));
fail("Exception expected");
} catch(BadOperatorRequestTypeException e) {
}
}
}
}
} |
/**
* The parser is used to convert a SQL statement string to an command object.
*
* @author Thomas Mueller
* @author Noel Grandin
* @author Nicolas Fortin, Atelier SIG, IRSTV FR CNRS 24888
*/
public class Parser {
private static final String WITH_STATEMENT_SUPPORTS_LIMITED_SUB_STATEMENTS =
"WITH statement supports only SELECT, TABLE, VALUES, " +
"CREATE TABLE, INSERT, UPDATE, MERGE or DELETE statements";
// used during the tokenizer phase
private static final int CHAR_END = 1, CHAR_VALUE = 2, CHAR_QUOTED = 3;
private static final int CHAR_NAME = 4, CHAR_SPECIAL_1 = 5,
CHAR_SPECIAL_2 = 6;
private static final int CHAR_STRING = 7, CHAR_DOT = 8,
CHAR_DOLLAR_QUOTED_STRING = 9;
// these are token types, see also types in ParserUtil
/**
* Token with parameter.
*/
private static final int PARAMETER = WITH + 1;
/**
* End of input.
*/
private static final int END = PARAMETER + 1;
/**
* Token with value.
*/
private static final int VALUE = END + 1;
/**
* The token "=".
*/
private static final int EQUAL = VALUE + 1;
/**
* The token ">=".
*/
private static final int BIGGER_EQUAL = EQUAL + 1;
/**
* The token ">".
*/
private static final int BIGGER = BIGGER_EQUAL + 1;
/**
* The token "<".
*/
private static final int SMALLER = BIGGER + 1;
/**
* The token "<=".
*/
private static final int SMALLER_EQUAL = SMALLER + 1;
/**
* The token "<>" or "!=".
*/
private static final int NOT_EQUAL = SMALLER_EQUAL + 1;
/**
* The token "@".
*/
private static final int AT = NOT_EQUAL + 1;
/**
* The token "-".
*/
private static final int MINUS_SIGN = AT + 1;
/**
* The token "+".
*/
private static final int PLUS_SIGN = MINUS_SIGN + 1;
/**
* The token "||".
*/
private static final int STRING_CONCAT = PLUS_SIGN + 1;
/**
* The token "(".
*/
private static final int OPEN_PAREN = STRING_CONCAT + 1;
/**
* The token ")".
*/
private static final int CLOSE_PAREN = OPEN_PAREN + 1;
/**
* The token "&&".
*/
private static final int SPATIAL_INTERSECTS = CLOSE_PAREN + 1;
/**
* The token "*".
*/
private static final int ASTERISK = SPATIAL_INTERSECTS + 1;
/**
* The token ",".
*/
private static final int COMMA = ASTERISK + 1;
/**
* The token ".".
*/
private static final int DOT = COMMA + 1;
/**
* The token "{".
*/
private static final int OPEN_BRACE = DOT + 1;
/**
* The token "}".
*/
private static final int CLOSE_BRACE = OPEN_BRACE + 1;
/**
* The token "/".
*/
private static final int SLASH = CLOSE_BRACE + 1;
/**
* The token "%".
*/
private static final int PERCENT = SLASH + 1;
/**
* The token ";".
*/
private static final int SEMICOLON = PERCENT + 1;
/**
* The token ":".
*/
private static final int COLON = SEMICOLON + 1;
/**
* The token "[".
*/
private static final int OPEN_BRACKET = COLON + 1;
/**
* The token "]".
*/
private static final int CLOSE_BRACKET = OPEN_BRACKET + 1;
/**
* The token "~".
*/
private static final int TILDE = CLOSE_BRACKET + 1;
/**
* The token "::".
*/
private static final int COLON_COLON = TILDE + 1;
/**
* The token ":=".
*/
private static final int COLON_EQ = COLON_COLON + 1;
/**
* The token "!~".
*/
private static final int NOT_TILDE = COLON_EQ + 1;
private static final String[] TOKENS = {
// Unused
null,
// KEYWORD
null,
// IDENTIFIER
null,
// ALL
"ALL",
// ARRAY
"ARRAY",
// CASE
"CASE",
// CHECK
"CHECK",
// CONSTRAINT
"CONSTRAINT",
// CROSS
"CROSS",
// CURRENT_DATE
"CURRENT_DATE",
// CURRENT_TIME
"CURRENT_TIME",
// CURRENT_TIMESTAMP
"CURRENT_TIMESTAMP",
// CURRENT_USER
"CURRENT_USER",
// DISTINCT
"DISTINCT",
// EXCEPT
"EXCEPT",
// EXISTS
"EXISTS",
// FALSE
"FALSE",
// FETCH
"FETCH",
// FOR
"FOR",
// FOREIGN
"FOREIGN",
// FROM
"FROM",
// FULL
"FULL",
// GROUP
"GROUP",
// HAVING
"HAVING",
// IF
"IF",
// INNER
"INNER",
// INTERSECT
"INTERSECT",
// INTERSECTS
"INTERSECTS",
// INTERVAL
"INTERVAL",
// IS
"IS",
// JOIN
"JOIN",
// LIKE
"LIKE",
// LIMIT
"LIMIT",
// LOCALTIME
"LOCALTIME",
// LOCALTIMESTAMP
"LOCALTIMESTAMP",
// MINUS
"MINUS",
// NATURAL
"NATURAL",
// NOT
"NOT",
// NULL
"NULL",
// OFFSET
"OFFSET",
// ON
"ON",
// ORDER
"ORDER",
// PRIMARY
"PRIMARY",
// QUALIFY
"QUALIFY",
// ROW
"ROW",
// _ROWID_
"_ROWID_",
// ROWNUM
"ROWNUM",
// SELECT
"SELECT",
// TABLE
"TABLE",
// TRUE
"TRUE",
// UNION
"UNION",
// UNIQUE
"UNIQUE",
// VALUES
"VALUES",
// WHERE
"WHERE",
// WINDOW
"WINDOW",
// WITH
"WITH",
// PARAMETER
"?",
// END
null,
// VALUE
null,
// EQUAL
"=",
// BIGGER_EQUAL
">=",
// BIGGER
">",
// SMALLER
"<",
// SMALLER_EQUAL
"<=",
// NOT_EQUAL
"<>",
// AT
"@",
// MINUS_SIGN
"-",
// PLUS_SIGN
"+",
// STRING_CONCAT
"||",
// OPEN_PAREN
"(",
// CLOSE_PAREN
")",
// SPATIAL_INTERSECTS
"&&",
// ASTERISK
"*",
// COMMA
",",
// DOT
".",
// OPEN_BRACE
"{",
// CLOSE_BRACE
"}",
// SLASH
"/",
// PERCENT
"%",
// SEMICOLON
";",
// COLON
":",
// OPEN_BRACKET
"[",
// CLOSE_BRACKET
"]",
// TILDE
"~",
// COLON_COLON
"::",
// COLON_EQ
":=",
// NOT_TILDE
"!~",
// End
};
private static final Comparator<TableFilter> TABLE_FILTER_COMPARATOR =
new Comparator<TableFilter>() {
@Override
public int compare(TableFilter o1, TableFilter o2) {
if (o1 == o2)
return 0;
assert o1.getOrderInFrom() != o2.getOrderInFrom();
return o1.getOrderInFrom() > o2.getOrderInFrom() ? 1 : -1;
}
};
private final Database database;
private final Session session;
/**
* @see org.h2.engine.DbSettings#databaseToLower
*/
private final boolean identifiersToLower;
/**
* @see org.h2.engine.DbSettings#databaseToUpper
*/
private final boolean identifiersToUpper;
/** indicates character-type for each char in sqlCommand */
private int[] characterTypes;
private int currentTokenType;
private String currentToken;
private boolean currentTokenQuoted;
private Value currentValue;
private String originalSQL;
/** copy of originalSQL, with comments blanked out */
private String sqlCommand;
/** cached array if chars from sqlCommand */
private char[] sqlCommandChars;
/** index into sqlCommand of previous token */
private int lastParseIndex;
/** index into sqlCommand of current token */
private int parseIndex;
private CreateView createView;
private Prepared currentPrepared;
private Select currentSelect;
private ArrayList<Parameter> parameters;
private String schemaName;
private ArrayList<String> expectedList;
private boolean rightsChecked;
private boolean recompileAlways;
private boolean literalsChecked;
private ArrayList<Parameter> indexedParameterList;
private int orderInFrom;
private ArrayList<Parameter> suppliedParameterList;
public Parser(Session session) {
this.database = session.getDatabase();
this.identifiersToLower = database.getSettings().databaseToLower;
this.identifiersToUpper = database.getSettings().databaseToUpper;
this.session = session;
}
/**
* Parse the statement and prepare it for execution.
*
* @param sql the SQL statement to parse
* @return the prepared object
*/
public Prepared prepare(String sql) {
Prepared p = parse(sql);
p.prepare();
if (currentTokenType != END) {
throw getSyntaxError();
}
return p;
}
/**
* Parse a statement or a list of statements, and prepare it for execution.
*
* @param sql the SQL statement to parse
* @return the command object
*/
public Command prepareCommand(String sql) {
try {
Prepared p = parse(sql);
boolean hasMore = isToken(SEMICOLON);
if (!hasMore && currentTokenType != END) {
throw getSyntaxError();
}
try {
p.prepare();
} catch (Throwable t) {
CommandContainer.clearCTE(session, p);
throw t;
}
Command c = new CommandContainer(session, sql, p);
if (hasMore) {
String remaining = originalSQL.substring(parseIndex);
if (!StringUtils.isWhitespaceOrEmpty(remaining)) {
c = new CommandList(session, sql, c, remaining);
}
}
return c;
} catch (DbException e) {
throw e.addSQL(originalSQL);
}
}
/**
* Parse the statement, but don't prepare it for execution.
*
* @param sql the SQL statement to parse
* @return the prepared object
*/
Prepared parse(String sql) {
Prepared p;
try {
// first, try the fast variant
p = parse(sql, false);
} catch (DbException e) {
if (e.getErrorCode() == ErrorCode.SYNTAX_ERROR_1) {
// now, get the detailed exception
p = parse(sql, true);
} else {
throw e.addSQL(sql);
}
}
p.setPrepareAlways(recompileAlways);
p.setParameterList(parameters);
return p;
}
private Prepared parse(String sql, boolean withExpectedList) {
initialize(sql);
if (withExpectedList) {
expectedList = new ArrayList<>();
} else {
expectedList = null;
}
parameters = Utils.newSmallArrayList();
currentSelect = null;
currentPrepared = null;
createView = null;
recompileAlways = false;
indexedParameterList = suppliedParameterList;
read();
return parsePrepared();
}
private Prepared parsePrepared() {
int start = lastParseIndex;
Prepared c = null;
switch (currentTokenType) {
case END:
case SEMICOLON:
c = new NoOperation(session);
setSQL(c, null, start);
return c;
case PARAMETER:
// read the ? as a parameter
readTerm();
// this is an 'out' parameter - set a dummy value
parameters.get(0).setValue(ValueNull.INSTANCE);
read(EQUAL);
read("CALL");
c = parseCall();
break;
case OPEN_PAREN:
case FROM:
case SELECT:
case TABLE:
case VALUES:
c = parseSelect();
break;
case WITH:
read();
c = parseWithStatementOrQuery();
break;
case IDENTIFIER:
if (currentTokenQuoted) {
break;
}
switch (currentToken.charAt(0)) {
case 'a':
case 'A':
if (readIf("ALTER")) {
c = parseAlter();
} else if (readIf("ANALYZE")) {
c = parseAnalyze();
}
break;
case 'b':
case 'B':
if (readIf("BACKUP")) {
c = parseBackup();
} else if (readIf("BEGIN")) {
c = parseBegin();
}
break;
case 'c':
case 'C':
if (readIf("COMMIT")) {
c = parseCommit();
} else if (readIf("CREATE")) {
c = parseCreate();
} else if (readIf("CALL")) {
c = parseCall();
} else if (readIf("CHECKPOINT")) {
c = parseCheckpoint();
} else if (readIf("COMMENT")) {
c = parseComment();
}
break;
case 'd':
case 'D':
if (readIf("DELETE")) {
c = parseDelete();
} else if (readIf("DROP")) {
c = parseDrop();
} else if (readIf("DECLARE")) {
// support for DECLARE GLOBAL TEMPORARY TABLE...
c = parseCreate();
} else if (readIf("DEALLOCATE")) {
c = parseDeallocate();
}
break;
case 'e':
case 'E':
if (readIf("EXPLAIN")) {
c = parseExplain();
} else if (readIf("EXECUTE")) {
c = parseExecute();
}
break;
case 'g':
case 'G':
if (readIf("GRANT")) {
c = parseGrantRevoke(CommandInterface.GRANT);
}
break;
case 'h':
case 'H':
if (readIf("HELP")) {
c = parseHelp();
}
break;
case 'i':
case 'I':
if (readIf("INSERT")) {
c = parseInsert();
}
break;
case 'm':
case 'M':
if (readIf("MERGE")) {
c = parseMerge();
}
break;
case 'p':
case 'P':
if (readIf("PREPARE")) {
c = parsePrepare();
}
break;
case 'r':
case 'R':
if (readIf("ROLLBACK")) {
c = parseRollback();
} else if (readIf("REVOKE")) {
c = parseGrantRevoke(CommandInterface.REVOKE);
} else if (readIf("RUNSCRIPT")) {
c = parseRunScript();
} else if (readIf("RELEASE")) {
c = parseReleaseSavepoint();
} else if (readIf("REPLACE")) {
c = parseReplace();
}
break;
case 's':
case 'S':
if (readIf("SET")) {
c = parseSet();
} else if (readIf("SAVEPOINT")) {
c = parseSavepoint();
} else if (readIf("SCRIPT")) {
c = parseScript();
} else if (readIf("SHUTDOWN")) {
c = parseShutdown();
} else if (readIf("SHOW")) {
c = parseShow();
}
break;
case 't':
case 'T':
if (readIf("TRUNCATE")) {
c = parseTruncate();
}
break;
case 'u':
case 'U':
if (readIf("UPDATE")) {
c = parseUpdate();
} else if (readIf("USE")) {
c = parseUse();
}
break;
}
}
if (c == null) {
throw getSyntaxError();
}
if (indexedParameterList != null) {
for (int i = 0, size = indexedParameterList.size();
i < size; i++) {
if (indexedParameterList.get(i) == null) {
indexedParameterList.set(i, new Parameter(i));
}
}
parameters = indexedParameterList;
}
if (readIf(OPEN_BRACE)) {
do {
int index = (int) readLong() - 1;
if (index < 0 || index >= parameters.size()) {
throw getSyntaxError();
}
Parameter p = parameters.get(index);
if (p == null) {
throw getSyntaxError();
}
read(COLON);
Expression expr = readExpression();
expr = expr.optimize(session);
p.setValue(expr.getValue(session));
} while (readIf(COMMA));
read(CLOSE_BRACE);
for (Parameter p : parameters) {
p.checkSet();
}
parameters.clear();
}
setSQL(c, null, start);
return c;
}
private DbException getSyntaxError() {
if (expectedList == null || expectedList.isEmpty()) {
return DbException.getSyntaxError(sqlCommand, parseIndex);
}
return DbException.getSyntaxError(sqlCommand, parseIndex,
StringUtils.join(new StringBuilder(), expectedList, ", ").toString());
}
private Prepared parseBackup() {
BackupCommand command = new BackupCommand(session);
read("TO");
command.setFileName(readExpression());
return command;
}
private Prepared parseAnalyze() {
Analyze command = new Analyze(session);
if (readIf(TABLE)) {
Table table = readTableOrView();
command.setTable(table);
}
if (readIf("SAMPLE_SIZE")) {
command.setTop(readNonNegativeInt());
}
return command;
}
private TransactionCommand parseBegin() {
TransactionCommand command;
if (!readIf("WORK")) {
readIf("TRANSACTION");
}
command = new TransactionCommand(session, CommandInterface.BEGIN);
return command;
}
private TransactionCommand parseCommit() {
TransactionCommand command;
if (readIf("TRANSACTION")) {
command = new TransactionCommand(session,
CommandInterface.COMMIT_TRANSACTION);
command.setTransactionName(readUniqueIdentifier());
return command;
}
command = new TransactionCommand(session,
CommandInterface.COMMIT);
readIf("WORK");
return command;
}
private TransactionCommand parseShutdown() {
int type = CommandInterface.SHUTDOWN;
if (readIf("IMMEDIATELY")) {
type = CommandInterface.SHUTDOWN_IMMEDIATELY;
} else if (readIf("COMPACT")) {
type = CommandInterface.SHUTDOWN_COMPACT;
} else if (readIf("DEFRAG")) {
type = CommandInterface.SHUTDOWN_DEFRAG;
} else {
readIf("SCRIPT");
}
return new TransactionCommand(session, type);
}
private TransactionCommand parseRollback() {
TransactionCommand command;
if (readIf("TRANSACTION")) {
command = new TransactionCommand(session,
CommandInterface.ROLLBACK_TRANSACTION);
command.setTransactionName(readUniqueIdentifier());
return command;
}
if (readIf("TO")) {
read("SAVEPOINT");
command = new TransactionCommand(session,
CommandInterface.ROLLBACK_TO_SAVEPOINT);
command.setSavepointName(readUniqueIdentifier());
} else {
readIf("WORK");
command = new TransactionCommand(session,
CommandInterface.ROLLBACK);
}
return command;
}
private Prepared parsePrepare() {
if (readIf("COMMIT")) {
TransactionCommand command = new TransactionCommand(session,
CommandInterface.PREPARE_COMMIT);
command.setTransactionName(readUniqueIdentifier());
return command;
}
String procedureName = readAliasIdentifier();
if (readIf(OPEN_PAREN)) {
ArrayList<Column> list = Utils.newSmallArrayList();
for (int i = 0;; i++) {
Column column = parseColumnForTable("C" + i, true, false);
list.add(column);
if (!readIfMore(true)) {
break;
}
}
}
read("AS");
Prepared prep = parsePrepared();
PrepareProcedure command = new PrepareProcedure(session);
command.setProcedureName(procedureName);
command.setPrepared(prep);
return command;
}
private TransactionCommand parseSavepoint() {
TransactionCommand command = new TransactionCommand(session,
CommandInterface.SAVEPOINT);
command.setSavepointName(readUniqueIdentifier());
return command;
}
private Prepared parseReleaseSavepoint() {
Prepared command = new NoOperation(session);
readIf("SAVEPOINT");
readUniqueIdentifier();
return command;
}
private Schema findSchema(String schemaName) {
if (schemaName == null) {
return null;
}
Schema schema = database.findSchema(schemaName);
if (schema == null) {
if (equalsToken("SESSION", schemaName)) {
// for local temporary tables
schema = database.getSchema(session.getCurrentSchemaName());
}
}
return schema;
}
private Schema getSchema(String schemaName) {
if (schemaName == null) {
return null;
}
Schema schema = findSchema(schemaName);
if (schema == null) {
throw DbException.get(ErrorCode.SCHEMA_NOT_FOUND_1, schemaName);
}
return schema;
}
private Schema getSchema() {
return getSchema(schemaName);
}
/*
* Gets the current schema for scenarios that need a guaranteed, non-null schema object.
*
* This routine is solely here
* because of the function readIdentifierWithSchema(String defaultSchemaName) - which
* is often called with a null parameter (defaultSchemaName) - then 6 lines into the function
* that routine nullifies the state field schemaName - which I believe is a bug.
*
* There are about 7 places where "readIdentifierWithSchema(null)" is called in this file.
*
* In other words when is it legal to not have an active schema defined by schemaName ?
* I don't think it's ever a valid case. I don't understand when that would be allowed.
* I spent a long time trying to figure this out.
* As another proof of this point, the command "SET SCHEMA=NULL" is not a valid command.
*
* I did try to fix this in readIdentifierWithSchema(String defaultSchemaName)
* - but every fix I tried cascaded so many unit test errors - so
* I gave up. I think this needs a bigger effort to fix his, as part of bigger, dedicated story.
*
*/
private Schema getSchemaWithDefault() {
if (schemaName == null) {
schemaName = session.getCurrentSchemaName();
}
return getSchema(schemaName);
}
private Column readTableColumn(TableFilter filter) {
boolean rowId = false;
String columnName = null;
if (currentTokenType == _ROWID_) {
read();
rowId = true;
} else {
columnName = readColumnIdentifier();
if (readIf(DOT)) {
String tableAlias = columnName;
if (currentTokenType == _ROWID_) {
read();
rowId = true;
} else {
columnName = readColumnIdentifier();
if (readIf(DOT)) {
String schema = tableAlias;
tableAlias = columnName;
if (currentTokenType == _ROWID_) {
read();
rowId = true;
} else {
columnName = readColumnIdentifier();
if (readIf(DOT)) {
if (!equalsToken(schema, database.getShortName())) {
throw DbException.get(ErrorCode.DATABASE_NOT_FOUND_1, schema);
}
schema = tableAlias;
tableAlias = columnName;
if (currentTokenType == _ROWID_) {
read();
rowId = true;
} else {
columnName = readColumnIdentifier();
}
}
}
if (!equalsToken(schema, filter.getTable().getSchema().getName())) {
throw DbException.get(ErrorCode.SCHEMA_NOT_FOUND_1, schema);
}
}
}
if (!equalsToken(tableAlias, filter.getTableAlias())) {
throw DbException.get(ErrorCode.TABLE_OR_VIEW_NOT_FOUND_1, tableAlias);
}
}
}
return rowId ? filter.getRowIdColumn() : filter.getTable().getColumn(columnName);
}
private Update parseUpdate() {
Update command = new Update(session);
currentPrepared = command;
int start = lastParseIndex;
TableFilter filter = readSimpleTableFilter(0, null);
command.setTableFilter(filter);
parseUpdateSetClause(command, filter, start, true);
return command;
}
private void parseUpdateSetClause(Update command, TableFilter filter, int start, boolean allowExtensions) {
read("SET");
if (readIf(OPEN_PAREN)) {
ArrayList<Column> columns = Utils.newSmallArrayList();
do {
Column column = readTableColumn(filter);
columns.add(column);
} while (readIfMore(true));
read(EQUAL);
Expression expression = readExpression();
if (columns.size() == 1 && (expression.getType() == null || expression.getType().getValueType() != Value.ROW)) {
// the expression is parsed as a simple value
command.setAssignment(columns.get(0), expression);
} else {
for (int i = 0, size = columns.size(); i < size; i++) {
Column column = columns.get(i);
Function f = Function.getFunction(database, "ARRAY_GET");
f.setParameter(0, expression);
f.setParameter(1, ValueExpression.get(ValueInt.get(i + 1)));
f.doneWithParameters();
command.setAssignment(column, f);
}
}
} else {
do {
Column column = readTableColumn(filter);
read(EQUAL);
command.setAssignment(column, readExpressionOrDefault());
} while (readIf(COMMA));
}
if (readIf(WHERE)) {
Expression condition = readExpression();
command.setCondition(condition);
}
if (allowExtensions) {
if (readIf(ORDER)) {
// for MySQL compatibility
// (this syntax is supported, but ignored)
read("BY");
parseSimpleOrderList();
}
if (readIf(LIMIT)) {
Expression limit = readTerm().optimize(session);
command.setLimit(limit);
}
}
setSQL(command, "UPDATE", start);
}
private TableFilter readSimpleTableFilter(int orderInFrom, Collection<String> excludeTokens) {
Table table = readTableOrView();
String alias = null;
if (readIf("AS")) {
alias = readAliasIdentifier();
} else if (currentTokenType == IDENTIFIER) {
if (!equalsTokenIgnoreCase(currentToken, "SET")
&& (excludeTokens == null || !isTokenInList(excludeTokens))) {
// SET is not a keyword (PostgreSQL supports it as a table name)
alias = readAliasIdentifier();
}
}
return new TableFilter(session, table, alias, rightsChecked,
currentSelect, orderInFrom, null);
}
private Delete parseDelete() {
Delete command = new Delete(session);
Expression limit = null;
if (readIf("TOP")) {
limit = readTerm().optimize(session);
}
currentPrepared = command;
int start = lastParseIndex;
if (!readIf(FROM) && database.getMode().getEnum() == ModeEnum.MySQL) {
readIdentifierWithSchema();
read(FROM);
}
TableFilter filter = readSimpleTableFilter(0, null);
command.setTableFilter(filter);
if (readIf(WHERE)) {
command.setCondition(readExpression());
}
if (limit == null && readIf(LIMIT)) {
limit = readTerm().optimize(session);
}
command.setLimit(limit);
setSQL(command, "DELETE", start);
return command;
}
private IndexColumn[] parseIndexColumnList() {
ArrayList<IndexColumn> columns = Utils.newSmallArrayList();
do {
IndexColumn column = new IndexColumn();
column.columnName = readColumnIdentifier();
column.sortType = parseSortType();
columns.add(column);
} while (readIfMore(true));
return columns.toArray(new IndexColumn[0]);
}
private int parseSortType() {
int sortType = parseSimpleSortType();
if (readIf("NULLS")) {
if (readIf("FIRST")) {
sortType |= SortOrder.NULLS_FIRST;
} else {
read("LAST");
sortType |= SortOrder.NULLS_LAST;
}
}
return sortType;
}
private int parseSimpleSortType() {
if (!readIf("ASC") && readIf("DESC")) {
return SortOrder.DESCENDING;
}
return SortOrder.ASCENDING;
}
private String[] parseColumnList() {
ArrayList<String> columns = Utils.newSmallArrayList();
do {
String columnName = readColumnIdentifier();
columns.add(columnName);
} while (readIfMore(false));
return columns.toArray(new String[0]);
}
private Column[] parseColumnList(Table table) {
ArrayList<Column> columns = Utils.newSmallArrayList();
HashSet<Column> set = new HashSet<>();
if (!readIf(CLOSE_PAREN)) {
do {
Column column = parseColumn(table);
if (!set.add(column)) {
throw DbException.get(ErrorCode.DUPLICATE_COLUMN_NAME_1, column.getSQL(false));
}
columns.add(column);
} while (readIfMore(false));
}
return columns.toArray(new Column[0]);
}
private Column parseColumn(Table table) {
if (currentTokenType == _ROWID_) {
read();
return table.getRowIdColumn();
}
return table.getColumn(readColumnIdentifier());
}
/**
* Read comma or closing brace.
*
* @param strict
* if {@code false} additional comma before brace is allowed
* @return {@code true} if comma is read, {@code false} if brace is read
*/
private boolean readIfMore(boolean strict) {
if (readIf(COMMA)) {
return strict || !readIf(CLOSE_PAREN);
}
read(CLOSE_PAREN);
return false;
}
private Prepared parseHelp() {
Select select = new Select(session, null);
select.setWildcard();
String informationSchema = database.sysIdentifier("INFORMATION_SCHEMA");
Table table = database.getSchema(informationSchema)
.resolveTableOrView(session, database.sysIdentifier("HELP"));
Function function = Function.getFunction(database, "UPPER");
function.setParameter(0, new ExpressionColumn(database, informationSchema,
database.sysIdentifier("HELP"), database.sysIdentifier("TOPIC"), false));
function.doneWithParameters();
TableFilter filter = new TableFilter(session, table, null, rightsChecked, select, 0, null);
select.addTableFilter(filter, true);
while (currentTokenType != END) {
String s = currentToken;
read();
CompareLike like = new CompareLike(database, function,
ValueExpression.get(ValueString.get('%' + s + '%')), null, false);
select.addCondition(like);
}
select.init();
return select;
}
private Prepared parseShow() {
ArrayList<Value> paramValues = Utils.newSmallArrayList();
StringBuilder buff = new StringBuilder("SELECT ");
if (readIf("CLIENT_ENCODING")) {
// for PostgreSQL compatibility
buff.append("'UNICODE' AS CLIENT_ENCODING FROM DUAL");
} else if (readIf("DEFAULT_TRANSACTION_ISOLATION")) {
// for PostgreSQL compatibility
buff.append("'read committed' AS DEFAULT_TRANSACTION_ISOLATION " +
"FROM DUAL");
} else if (readIf("TRANSACTION")) {
// for PostgreSQL compatibility
read("ISOLATION");
read("LEVEL");
buff.append("'read committed' AS TRANSACTION_ISOLATION " +
"FROM DUAL");
} else if (readIf("DATESTYLE")) {
// for PostgreSQL compatibility
buff.append("'ISO' AS DATESTYLE FROM DUAL");
} else if (readIf("SERVER_VERSION")) {
// for PostgreSQL compatibility
buff.append("'" + Constants.PG_VERSION + "' AS SERVER_VERSION FROM DUAL");
} else if (readIf("SERVER_ENCODING")) {
// for PostgreSQL compatibility
buff.append("'UTF8' AS SERVER_ENCODING FROM DUAL");
} else if (readIf("TABLES")) {
// for MySQL compatibility
String schema = database.getMainSchema().getName();
if (readIf(FROM)) {
schema = readUniqueIdentifier();
}
buff.append("TABLE_NAME, TABLE_SCHEMA FROM "
+ "INFORMATION_SCHEMA.TABLES "
+ "WHERE TABLE_SCHEMA=? ORDER BY TABLE_NAME");
paramValues.add(ValueString.get(schema));
} else if (readIf("COLUMNS")) {
// for MySQL compatibility
read(FROM);
String tableName = readIdentifierWithSchema();
String schemaName = getSchema().getName();
paramValues.add(ValueString.get(tableName));
if (readIf(FROM)) {
schemaName = readUniqueIdentifier();
}
buff.append("C.COLUMN_NAME FIELD, "
+ "C.TYPE_NAME || '(' || C.NUMERIC_PRECISION || ')' TYPE, "
+ "C.IS_NULLABLE \"NULL\", "
+ "CASE (SELECT MAX(I.INDEX_TYPE_NAME) FROM "
+ "INFORMATION_SCHEMA.INDEXES I "
+ "WHERE I.TABLE_SCHEMA=C.TABLE_SCHEMA "
+ "AND I.TABLE_NAME=C.TABLE_NAME "
+ "AND I.COLUMN_NAME=C.COLUMN_NAME)"
+ "WHEN 'PRIMARY KEY' THEN 'PRI' "
+ "WHEN 'UNIQUE INDEX' THEN 'UNI' ELSE '' END KEY, "
+ "IFNULL(COLUMN_DEFAULT, 'NULL') DEFAULT "
+ "FROM INFORMATION_SCHEMA.COLUMNS C "
+ "WHERE C.TABLE_NAME=? AND C.TABLE_SCHEMA=? "
+ "ORDER BY C.ORDINAL_POSITION");
paramValues.add(ValueString.get(schemaName));
} else if (readIf("DATABASES") || readIf("SCHEMAS")) {
// for MySQL compatibility
buff.append("SCHEMA_NAME FROM INFORMATION_SCHEMA.SCHEMATA");
}
boolean b = session.getAllowLiterals();
try {
// need to temporarily enable it, in case we are in
// ALLOW_LITERALS_NUMBERS mode
session.setAllowLiterals(true);
return prepare(session, buff.toString(), paramValues);
} finally {
session.setAllowLiterals(b);
}
}
private static Prepared prepare(Session s, String sql,
ArrayList<Value> paramValues) {
Prepared prep = s.prepare(sql);
ArrayList<Parameter> params = prep.getParameters();
if (params != null) {
for (int i = 0, size = params.size(); i < size; i++) {
Parameter p = params.get(i);
p.setValue(paramValues.get(i));
}
}
return prep;
}
private boolean isSelect() {
int start = lastParseIndex;
while (readIf(OPEN_PAREN)) {
// need to read ahead, it could be a nested union:
// ((select 1) union (select 1))
}
boolean select;
switch (currentTokenType) {
case FROM:
case SELECT:
case VALUES:
case WITH:
select = true;
break;
case TABLE:
read();
select = !readIf(OPEN_PAREN);
break;
default:
select = false;
}
parseIndex = start;
read();
return select;
}
private Prepared parseMerge() {
int start = lastParseIndex;
read("INTO");
List<String> excludeIdentifiers = Arrays.asList("USING", "KEY");
TableFilter targetTableFilter = readSimpleTableFilter(0, excludeIdentifiers);
if (readIf("USING")) {
return parseMergeUsing(targetTableFilter, start);
}
Merge command = new Merge(session);
currentPrepared = command;
command.setTargetTableFilter(targetTableFilter);
Table table = command.getTargetTable();
if (readIf(OPEN_PAREN)) {
if (isSelect()) {
command.setQuery(parseSelect());
read(CLOSE_PAREN);
return command;
}
Column[] columns = parseColumnList(table);
command.setColumns(columns);
}
if (readIf("KEY")) {
read(OPEN_PAREN);
Column[] keys = parseColumnList(table);
command.setKeys(keys);
}
if (readIf(VALUES)) {
parseValuesForCommand(command);
} else {
command.setQuery(parseSelect());
}
return command;
}
private MergeUsing parseMergeUsing(TableFilter targetTableFilter, int start) {
MergeUsing command = new MergeUsing(session, targetTableFilter);
currentPrepared = command;
if (readIf(OPEN_PAREN)) {
/* a select query is supplied */
if (isSelect()) {
command.setQuery(parseSelect());
read(CLOSE_PAREN);
}
String queryAlias = readFromAlias(null, null);
if (queryAlias == null) {
queryAlias = Constants.PREFIX_QUERY_ALIAS + parseIndex;
}
command.setQueryAlias(queryAlias);
String[] querySQLOutput = {null};
List<Column> columnTemplateList = TableView.createQueryColumnTemplateList(null, command.getQuery(),
querySQLOutput);
TableView temporarySourceTableView = createCTEView(
queryAlias, querySQLOutput[0],
columnTemplateList, false/* no recursion */,
false/* do not add to session */,
true /* isTemporary */
);
TableFilter sourceTableFilter = new TableFilter(session,
temporarySourceTableView, queryAlias,
rightsChecked, (Select) command.getQuery(), 0, null);
command.setSourceTableFilter(sourceTableFilter);
} else {
/* Its a table name, simulate a query by building a select query for the table */
TableFilter sourceTableFilter = readSimpleTableFilter(0, null);
command.setSourceTableFilter(sourceTableFilter);
Select preparedQuery = new Select(session, null);
preparedQuery.setWildcard();
TableFilter filter = new TableFilter(session, sourceTableFilter.getTable(),
sourceTableFilter.getTableAlias(), rightsChecked, preparedQuery, 0, null);
preparedQuery.addTableFilter(filter, true);
preparedQuery.init();
command.setQuery(preparedQuery);
}
read(ON);
Expression condition = readExpression();
command.setOnCondition(condition);
read("WHEN");
do {
boolean matched = readIf("MATCHED");
if (matched) {
parseWhenMatched(command);
} else {
parseWhenNotMatched(command);
}
} while (readIf("WHEN"));
setSQL(command, "MERGE", start);
return command;
}
private void parseWhenMatched(MergeUsing command) {
Expression and = readIf("AND") ? readExpression() : null;
read("THEN");
int startMatched = lastParseIndex;
Update updateCommand = null;
if (readIf("UPDATE")) {
updateCommand = new Update(session);
TableFilter filter = command.getTargetTableFilter();
updateCommand.setTableFilter(filter);
parseUpdateSetClause(updateCommand, filter, startMatched, false);
startMatched = lastParseIndex;
}
Delete deleteCommand = null;
if (readIf("DELETE")) {
deleteCommand = new Delete(session);
deleteCommand.setTableFilter(command.getTargetTableFilter());
if (readIf(WHERE)) {
deleteCommand.setCondition(readExpression());
}
setSQL(deleteCommand, "DELETE", startMatched);
}
if (updateCommand != null || deleteCommand != null) {
MergeUsing.WhenMatched when = new MergeUsing.WhenMatched(command);
when.setAndCondition(and);
when.setUpdateCommand(updateCommand);
when.setDeleteCommand(deleteCommand);
command.addWhen(when);
} else {
throw getSyntaxError();
}
}
private void parseWhenNotMatched(MergeUsing command) {
read(NOT);
read("MATCHED");
Expression and = readIf("AND") ? readExpression() : null;
read("THEN");
if (readIf("INSERT")) {
Insert insertCommand = new Insert(session);
insertCommand.setTable(command.getTargetTable());
parseInsertGivenTable(insertCommand, command.getTargetTable());
MergeUsing.WhenNotMatched when = new MergeUsing.WhenNotMatched(command);
when.setAndCondition(and);
when.setInsertCommand(insertCommand);
command.addWhen(when);
} else {
throw getSyntaxError();
}
}
private Insert parseInsert() {
Insert command = new Insert(session);
currentPrepared = command;
if (database.getMode().onDuplicateKeyUpdate && readIf("IGNORE")) {
command.setIgnore(true);
}
read("INTO");
Table table = readTableOrView();
command.setTable(table);
Insert returnedCommand = parseInsertGivenTable(command, table);
if (returnedCommand != null) {
return returnedCommand;
}
if (database.getMode().onDuplicateKeyUpdate) {
if (readIf(ON)) {
read("DUPLICATE");
read("KEY");
read("UPDATE");
do {
String columnName = readColumnIdentifier();
if (readIf(DOT)) {
String schemaOrTableName = columnName;
String tableOrColumnName = readColumnIdentifier();
if (readIf(DOT)) {
if (!table.getSchema().getName().equals(schemaOrTableName)) {
throw DbException.get(ErrorCode.SCHEMA_NAME_MUST_MATCH);
}
columnName = readColumnIdentifier();
} else {
columnName = tableOrColumnName;
tableOrColumnName = schemaOrTableName;
}
if (!table.getName().equals(tableOrColumnName)) {
throw DbException.get(ErrorCode.TABLE_OR_VIEW_NOT_FOUND_1, tableOrColumnName);
}
}
Column column = table.getColumn(columnName);
read(EQUAL);
command.addAssignmentForDuplicate(column, readExpressionOrDefault());
} while (readIf(COMMA));
}
}
if (database.getMode().isolationLevelInSelectOrInsertStatement) {
parseIsolationClause();
}
return command;
}
private Insert parseInsertGivenTable(Insert command, Table table) {
Column[] columns = null;
if (readIf(OPEN_PAREN)) {
if (isSelect()) {
command.setQuery(parseSelect());
read(CLOSE_PAREN);
return command;
}
columns = parseColumnList(table);
command.setColumns(columns);
}
if (readIf("DIRECT")) {
command.setInsertFromSelect(true);
}
if (readIf("SORTED")) {
command.setSortedInsertMode(true);
}
if (readIf("DEFAULT")) {
read(VALUES);
command.addRow(new Expression[0]);
} else if (readIf(VALUES)) {
parseValuesForCommand(command);
} else if (readIf("SET")) {
if (columns != null) {
throw getSyntaxError();
}
ArrayList<Column> columnList = Utils.newSmallArrayList();
ArrayList<Expression> values = Utils.newSmallArrayList();
do {
columnList.add(parseColumn(table));
read(EQUAL);
values.add(readExpressionOrDefault());
} while (readIf(COMMA));
command.setColumns(columnList.toArray(new Column[0]));
command.addRow(values.toArray(new Expression[0]));
} else {
command.setQuery(parseSelect());
}
return null;
}
/**
* MySQL compatibility. REPLACE is similar to MERGE.
*/
private Replace parseReplace() {
Replace command = new Replace(session);
currentPrepared = command;
read("INTO");
Table table = readTableOrView();
command.setTable(table);
if (readIf(OPEN_PAREN)) {
if (isSelect()) {
command.setQuery(parseSelect());
read(CLOSE_PAREN);
return command;
}
Column[] columns = parseColumnList(table);
command.setColumns(columns);
}
if (readIf(VALUES)) {
parseValuesForCommand(command);
} else {
command.setQuery(parseSelect());
}
return command;
}
private void parseValuesForCommand(CommandWithValues command) {
ArrayList<Expression> values = Utils.newSmallArrayList();
do {
values.clear();
boolean multiColumn;
if (readIf(ROW)) {
read(OPEN_PAREN);
multiColumn = true;
} else {
multiColumn = readIf(OPEN_PAREN);
}
if (multiColumn) {
if (!readIf(CLOSE_PAREN)) {
do {
values.add(readIf("DEFAULT") ? null : readExpression());
} while (readIfMore(false));
}
} else {
values.add(readIf("DEFAULT") ? null : readExpression());
}
command.addRow(values.toArray(new Expression[0]));
} while (readIf(COMMA));
}
private TableFilter readTableFilter() {
Table table;
String alias = null;
label: if (readIf(OPEN_PAREN)) {
if (isSelect()) {
Query query = parseSelectUnion();
read(CLOSE_PAREN);
query.setParameterList(new ArrayList<>(parameters));
query.init();
Session s;
if (createView != null) {
s = database.getSystemSession();
} else {
s = session;
}
alias = session.getNextSystemIdentifier(sqlCommand);
table = TableView.createTempView(s, session.getUser(), alias,
query, currentSelect);
} else {
TableFilter top;
top = readTableFilter();
top = readJoin(top);
read(CLOSE_PAREN);
alias = readFromAlias(null);
if (alias != null) {
top.setAlias(alias);
ArrayList<String> derivedColumnNames = readDerivedColumnNames();
if (derivedColumnNames != null) {
top.setDerivedColumns(derivedColumnNames);
}
}
return top;
}
} else if (readIf(VALUES)) {
table = parseValuesTable(0).getTable();
} else if (readIf(TABLE)) {
read(OPEN_PAREN);
table = readTableFunction("TABLE", null, database.getMainSchema());
} else {
String tableName = readIdentifierWithSchema(null);
Schema schema;
if (schemaName == null) {
schema = null;
} else {
schema = findSchema(schemaName);
if (schema == null) {
if (isDualTable(tableName)) {
table = getDualTable(false);
break label;
}
throw DbException.get(ErrorCode.SCHEMA_NOT_FOUND_1, schemaName);
}
}
boolean foundLeftBracket = readIf(OPEN_PAREN);
if (foundLeftBracket && readIf("INDEX")) {
// Sybase compatibility with
// "select * from test (index table1_index)"
readIdentifierWithSchema(null);
read(CLOSE_PAREN);
foundLeftBracket = false;
}
if (foundLeftBracket) {
Schema mainSchema = database.getMainSchema();
if (equalsToken(tableName, RangeTable.NAME)
|| equalsToken(tableName, RangeTable.ALIAS)) {
Expression min = readExpression();
read(COMMA);
Expression max = readExpression();
if (readIf(COMMA)) {
Expression step = readExpression();
read(CLOSE_PAREN);
table = new RangeTable(mainSchema, min, max, step,
false);
} else {
read(CLOSE_PAREN);
table = new RangeTable(mainSchema, min, max, false);
}
} else {
table = readTableFunction(tableName, schema, mainSchema);
}
} else {
table = readTableOrView(tableName);
}
}
ArrayList<String> derivedColumnNames = null;
IndexHints indexHints = null;
// for backward compatibility, handle case where USE is a table alias
if (readIf("USE")) {
if (readIf("INDEX")) {
indexHints = parseIndexHints(table);
} else {
alias = "USE";
derivedColumnNames = readDerivedColumnNames();
}
} else {
alias = readFromAlias(alias);
if (alias != null) {
derivedColumnNames = readDerivedColumnNames();
// if alias present, a second chance to parse index hints
if (readIf("USE")) {
read("INDEX");
indexHints = parseIndexHints(table);
}
}
}
if (database.getMode().discardWithTableHints) {
discardWithTableHints();
}
// inherit alias for CTE as views from table name
if (table.isView() && table.isTableExpression() && alias == null) {
alias = table.getName();
}
TableFilter filter = new TableFilter(session, table, alias, rightsChecked,
currentSelect, orderInFrom++, indexHints);
if (derivedColumnNames != null) {
filter.setDerivedColumns(derivedColumnNames);
}
return filter;
}
private Table readTableFunction(String tableName, Schema schema, Schema mainSchema) {
Expression expr = readFunction(schema, tableName);
if (!(expr instanceof FunctionCall)) {
throw getSyntaxError();
}
FunctionCall call = (FunctionCall) expr;
if (!call.isDeterministic()) {
recompileAlways = true;
}
return new FunctionTable(mainSchema, session, expr, call);
}
private IndexHints parseIndexHints(Table table) {
read(OPEN_PAREN);
LinkedHashSet<String> indexNames = new LinkedHashSet<>();
if (!readIf(CLOSE_PAREN)) {
do {
String indexName = readIdentifierWithSchema();
if (HashJoinIndex.HASH_JOIN_IDX.equalsIgnoreCase(indexName)) {
indexNames.add(HashJoinIndex.HASH_JOIN_IDX);
}
else {
Index index = table.getIndex(indexName);
indexNames.add(index.getName());
}
} while (readIfMore(true));
}
return IndexHints.createUseIndexHints(indexNames);
}
private String readFromAlias(String alias, List<String> excludeIdentifiers) {
if (readIf("AS")) {
alias = readAliasIdentifier();
} else if (currentTokenType == IDENTIFIER
&& (excludeIdentifiers == null || !isTokenInList(excludeIdentifiers))) {
alias = readAliasIdentifier();
}
return alias;
}
private String readFromAlias(String alias) {
// left and right are not keywords (because they are functions as
// well)
List<String> excludeIdentifiers = Arrays.asList("LEFT", "RIGHT");
return readFromAlias(alias, excludeIdentifiers);
}
private ArrayList<String> readDerivedColumnNames() {
if (readIf(OPEN_PAREN)) {
ArrayList<String> derivedColumnNames = new ArrayList<>();
do {
derivedColumnNames.add(readAliasIdentifier());
} while (readIfMore(true));
return derivedColumnNames;
}
return null;
}
private void discardWithTableHints() {
if (readIf(WITH)) {
read(OPEN_PAREN);
do {
discardTableHint();
} while (readIfMore(true));
}
}
private void discardTableHint() {
if (readIf("INDEX")) {
if (readIf(OPEN_PAREN)) {
do {
readExpression();
} while (readIfMore(true));
} else {
read(EQUAL);
readExpression();
}
} else {
readExpression();
}
}
private Prepared parseTruncate() {
read(TABLE);
Table table = readTableOrView();
boolean restart;
if (readIf("CONTINUE")) {
read("IDENTITY");
restart = false;
} else if (readIf("RESTART")) {
read("IDENTITY");
restart = true;
} else {
restart = false;
}
TruncateTable command = new TruncateTable(session);
command.setTable(table);
command.setRestart(restart);
return command;
}
private boolean readIfExists(boolean ifExists) {
if (readIf(IF)) {
read(EXISTS);
ifExists = true;
}
return ifExists;
}
private Prepared parseComment() {
int type = 0;
read(ON);
boolean column = false;
if (readIf(TABLE) || readIf("VIEW")) {
type = DbObject.TABLE_OR_VIEW;
} else if (readIf("COLUMN")) {
column = true;
type = DbObject.TABLE_OR_VIEW;
} else if (readIf("CONSTANT")) {
type = DbObject.CONSTANT;
} else if (readIf(CONSTRAINT)) {
type = DbObject.CONSTRAINT;
} else if (readIf("ALIAS")) {
type = DbObject.FUNCTION_ALIAS;
} else if (readIf("INDEX")) {
type = DbObject.INDEX;
} else if (readIf("ROLE")) {
type = DbObject.ROLE;
} else if (readIf("SCHEMA")) {
type = DbObject.SCHEMA;
} else if (readIf("SEQUENCE")) {
type = DbObject.SEQUENCE;
} else if (readIf("TRIGGER")) {
type = DbObject.TRIGGER;
} else if (readIf("USER")) {
type = DbObject.USER;
} else if (readIf("DOMAIN")) {
type = DbObject.DOMAIN;
} else {
throw getSyntaxError();
}
SetComment command = new SetComment(session);
String objectName;
if (column) {
// can't use readIdentifierWithSchema() because
// it would not read schema.table.column correctly
// if the db name is equal to the schema name
ArrayList<String> list = Utils.newSmallArrayList();
do {
list.add(readUniqueIdentifier());
} while (readIf(DOT));
schemaName = session.getCurrentSchemaName();
if (list.size() == 4) {
if (!equalsToken(database.getShortName(), list.remove(0))) {
throw DbException.getSyntaxError(sqlCommand, parseIndex,
"database name");
}
}
if (list.size() == 3) {
schemaName = list.remove(0);
}
if (list.size() != 2) {
throw DbException.getSyntaxError(sqlCommand, parseIndex,
"table.column");
}
objectName = list.get(0);
command.setColumn(true);
command.setColumnName(list.get(1));
} else {
objectName = readIdentifierWithSchema();
}
command.setSchemaName(schemaName);
command.setObjectName(objectName);
command.setObjectType(type);
read(IS);
command.setCommentExpression(readExpression());
return command;
}
private Prepared parseDrop() {
if (readIf(TABLE)) {
boolean ifExists = readIfExists(false);
String tableName = readIdentifierWithSchema();
DropTable command = new DropTable(session, getSchema());
command.setTableName(tableName);
while (readIf(COMMA)) {
tableName = readIdentifierWithSchema();
DropTable next = new DropTable(session, getSchema());
next.setTableName(tableName);
command.addNextDropTable(next);
}
ifExists = readIfExists(ifExists);
command.setIfExists(ifExists);
if (readIf("CASCADE")) {
command.setDropAction(ConstraintActionType.CASCADE);
readIf("CONSTRAINTS");
} else if (readIf("RESTRICT")) {
command.setDropAction(ConstraintActionType.RESTRICT);
} else if (readIf("IGNORE")) {
command.setDropAction(ConstraintActionType.SET_DEFAULT);
}
return command;
} else if (readIf("INDEX")) {
boolean ifExists = readIfExists(false);
String indexName = readIdentifierWithSchema();
DropIndex command = new DropIndex(session, getSchema());
command.setIndexName(indexName);
ifExists = readIfExists(ifExists);
command.setIfExists(ifExists);
//Support for MySQL: DROP INDEX index_name ON tbl_name
if (readIf(ON)) {
readIdentifierWithSchema();
}
return command;
} else if (readIf("USER")) {
boolean ifExists = readIfExists(false);
DropUser command = new DropUser(session);
command.setUserName(readUniqueIdentifier());
ifExists = readIfExists(ifExists);
readIf("CASCADE");
command.setIfExists(ifExists);
return command;
} else if (readIf("SEQUENCE")) {
boolean ifExists = readIfExists(false);
String sequenceName = readIdentifierWithSchema();
DropSequence command = new DropSequence(session, getSchema());
command.setSequenceName(sequenceName);
ifExists = readIfExists(ifExists);
command.setIfExists(ifExists);
return command;
} else if (readIf("CONSTANT")) {
boolean ifExists = readIfExists(false);
String constantName = readIdentifierWithSchema();
DropConstant command = new DropConstant(session, getSchema());
command.setConstantName(constantName);
ifExists = readIfExists(ifExists);
command.setIfExists(ifExists);
return command;
} else if (readIf("TRIGGER")) {
boolean ifExists = readIfExists(false);
String triggerName = readIdentifierWithSchema();
DropTrigger command = new DropTrigger(session, getSchema());
command.setTriggerName(triggerName);
ifExists = readIfExists(ifExists);
command.setIfExists(ifExists);
return command;
} else if (readIf("VIEW")) {
boolean ifExists = readIfExists(false);
String viewName = readIdentifierWithSchema();
DropView command = new DropView(session, getSchema());
command.setViewName(viewName);
ifExists = readIfExists(ifExists);
command.setIfExists(ifExists);
ConstraintActionType dropAction = parseCascadeOrRestrict();
if (dropAction != null) {
command.setDropAction(dropAction);
}
return command;
} else if (readIf("ROLE")) {
boolean ifExists = readIfExists(false);
DropRole command = new DropRole(session);
command.setRoleName(readUniqueIdentifier());
ifExists = readIfExists(ifExists);
command.setIfExists(ifExists);
return command;
} else if (readIf("ALIAS")) {
boolean ifExists = readIfExists(false);
String aliasName = readIdentifierWithSchema();
DropFunctionAlias command = new DropFunctionAlias(session,
getSchema());
command.setAliasName(aliasName);
ifExists = readIfExists(ifExists);
command.setIfExists(ifExists);
return command;
} else if (readIf("SCHEMA")) {
boolean ifExists = readIfExists(false);
DropSchema command = new DropSchema(session);
command.setSchemaName(readUniqueIdentifier());
ifExists = readIfExists(ifExists);
command.setIfExists(ifExists);
ConstraintActionType dropAction = parseCascadeOrRestrict();
if (dropAction != null) {
command.setDropAction(dropAction);
}
return command;
} else if (readIf(ALL)) {
read("OBJECTS");
DropDatabase command = new DropDatabase(session);
command.setDropAllObjects(true);
if (readIf("DELETE")) {
read("FILES");
command.setDeleteFiles(true);
}
return command;
} else if (readIf("DOMAIN") || readIf("TYPE") || readIf("DATATYPE")) {
return parseDropDomain();
} else if (readIf("AGGREGATE")) {
return parseDropAggregate();
} else if (readIf("SYNONYM")) {
boolean ifExists = readIfExists(false);
String synonymName = readIdentifierWithSchema();
DropSynonym command = new DropSynonym(session, getSchema());
command.setSynonymName(synonymName);
ifExists = readIfExists(ifExists);
command.setIfExists(ifExists);
return command;
}
throw getSyntaxError();
}
private DropDomain parseDropDomain() {
boolean ifExists = readIfExists(false);
DropDomain command = new DropDomain(session);
command.setTypeName(readUniqueIdentifier());
ifExists = readIfExists(ifExists);
command.setIfExists(ifExists);
ConstraintActionType dropAction = parseCascadeOrRestrict();
if (dropAction != null) {
command.setDropAction(dropAction);
}
return command;
}
private DropAggregate parseDropAggregate() {
boolean ifExists = readIfExists(false);
DropAggregate command = new DropAggregate(session);
command.setName(readUniqueIdentifier());
ifExists = readIfExists(ifExists);
command.setIfExists(ifExists);
return command;
}
private TableFilter readJoin(TableFilter top) {
TableFilter last = top;
while (true) {
TableFilter join;
if (readIf("RIGHT")) {
readIf("OUTER");
read(JOIN);
// the right hand side is the 'inner' table usually
join = readTableFilter();
join = readJoin(join);
Expression on = null;
if (readIf(ON)) {
on = readExpression();
}
addJoin(join, top, true, on);
top = join;
} else if (readIf("LEFT")) {
readIf("OUTER");
read(JOIN);
join = readTableFilter();
join = readJoin(join);
Expression on = null;
if (readIf(ON)) {
on = readExpression();
}
addJoin(top, join, true, on);
} else if (readIf(FULL)) {
throw getSyntaxError();
} else if (readIf(INNER)) {
read(JOIN);
join = readTableFilter();
top = readJoin(top);
Expression on = null;
if (readIf(ON)) {
on = readExpression();
}
addJoin(top, join, false, on);
} else if (readIf(JOIN)) {
join = readTableFilter();
top = readJoin(top);
Expression on = null;
if (readIf(ON)) {
on = readExpression();
}
addJoin(top, join, false, on);
} else if (readIf(CROSS)) {
read(JOIN);
join = readTableFilter();
addJoin(top, join, false, null);
} else if (readIf(NATURAL)) {
read(JOIN);
join = readTableFilter();
Column[] tableCols = last.getTable().getColumns();
Column[] joinCols = join.getTable().getColumns();
String tableSchema = last.getTable().getSchema().getName();
String joinSchema = join.getTable().getSchema().getName();
Expression on = null;
for (Column tc : tableCols) {
String tableColumnName = tc.getName();
for (Column c : joinCols) {
String joinColumnName = c.getName();
if (equalsToken(tableColumnName, joinColumnName)) {
join.addNaturalJoinColumn(c);
Expression tableExpr = new ExpressionColumn(
database, tableSchema,
last.getTableAlias(), tableColumnName, false);
Expression joinExpr = new ExpressionColumn(
database, joinSchema, join.getTableAlias(),
joinColumnName, false);
Expression equal = new Comparison(session,
Comparison.EQUAL, tableExpr, joinExpr);
if (on == null) {
on = equal;
} else {
on = new ConditionAndOr(ConditionAndOr.AND, on,
equal);
}
}
}
}
addJoin(top, join, false, on);
} else {
break;
}
last = join;
}
return top;
}
/**
* Add one join to another. This method creates nested join between them if
* required.
*
* @param top parent join
* @param join child join
* @param outer if child join is an outer join
* @param on the join condition
* @see TableFilter#addJoin(TableFilter, boolean, Expression)
*/
private void addJoin(TableFilter top, TableFilter join, boolean outer, Expression on) {
if (join.getJoin() != null) {
String joinTable = Constants.PREFIX_JOIN + parseIndex;
TableFilter n = new TableFilter(session, getDualTable(true),
joinTable, rightsChecked, currentSelect, join.getOrderInFrom(),
null);
n.setNestedJoin(join);
join = n;
}
top.addJoin(join, outer, on);
}
private Prepared parseExecute() {
ExecuteProcedure command = new ExecuteProcedure(session);
String procedureName = readAliasIdentifier();
Procedure p = session.getProcedure(procedureName);
if (p == null) {
throw DbException.get(ErrorCode.FUNCTION_ALIAS_NOT_FOUND_1,
procedureName);
}
command.setProcedure(p);
if (readIf(OPEN_PAREN)) {
for (int i = 0;; i++) {
command.setExpression(i, readExpression());
if (!readIfMore(true)) {
break;
}
}
}
return command;
}
private DeallocateProcedure parseDeallocate() {
readIf("PLAN");
String procedureName = readAliasIdentifier();
DeallocateProcedure command = new DeallocateProcedure(session);
command.setProcedureName(procedureName);
return command;
}
private Explain parseExplain() {
Explain command = new Explain(session);
if (readIf("ANALYZE")) {
command.setExecuteCommand(true);
} else {
if (readIf("PLAN")) {
readIf(FOR);
}
}
switch (currentTokenType) {
case FROM:
case SELECT:
case TABLE:
case VALUES:
case WITH:
case OPEN_PAREN:
Query query = parseSelect();
query.setNeverLazy(true);
command.setCommand(query);
break;
default:
if (readIf("DELETE")) {
command.setCommand(parseDelete());
} else if (readIf("UPDATE")) {
command.setCommand(parseUpdate());
} else if (readIf("INSERT")) {
command.setCommand(parseInsert());
} else if (readIf("MERGE")) {
command.setCommand(parseMerge());
} else {
throw getSyntaxError();
}
}
return command;
}
private Query parseSelect() {
int paramIndex = parameters.size();
Query command = parseSelectUnion();
int size = parameters.size();
ArrayList<Parameter> params = new ArrayList<>(size);
for (int i = paramIndex; i < size; i++) {
params.add(parameters.get(i));
}
command.setParameterList(params);
command.init();
return command;
}
private Prepared parseWithStatementOrQuery() {
int paramIndex = parameters.size();
Prepared command = parseWith();
int size = parameters.size();
ArrayList<Parameter> params = new ArrayList<>(size);
for (int i = paramIndex; i < size; i++) {
params.add(parameters.get(i));
}
command.setParameterList(params);
if (command instanceof Query) {
Query query = (Query) command;
query.init();
}
return command;
}
private Query parseSelectUnion() {
int start = lastParseIndex;
Query command = parseSelectSub();
for (;;) {
SelectUnion.UnionType type;
if (readIf(UNION)) {
if (readIf(ALL)) {
type = SelectUnion.UnionType.UNION_ALL;
} else {
readIf(DISTINCT);
type = SelectUnion.UnionType.UNION;
}
} else if (readIf(EXCEPT) || readIf(MINUS)) {
type = SelectUnion.UnionType.EXCEPT;
} else if (readIf(INTERSECT)) {
type = SelectUnion.UnionType.INTERSECT;
} else {
break;
}
command = new SelectUnion(session, type, command, parseSelectSub());
}
parseEndOfQuery(command);
setSQL(command, null, start);
return command;
}
private void parseEndOfQuery(Query command) {
if (readIf(ORDER)) {
read("BY");
Select oldSelect = currentSelect;
if (command instanceof Select) {
currentSelect = (Select) command;
}
ArrayList<SelectOrderBy> orderList = Utils.newSmallArrayList();
do {
boolean canBeNumber = !readIf(EQUAL);
SelectOrderBy order = new SelectOrderBy();
Expression expr = readExpression();
if (canBeNumber && expr instanceof ValueExpression && expr.getType().getValueType() == Value.INT) {
order.columnIndexExpr = expr;
} else if (expr instanceof Parameter) {
recompileAlways = true;
order.columnIndexExpr = expr;
} else {
order.expression = expr;
}
order.sortType = parseSortType();
orderList.add(order);
} while (readIf(COMMA));
command.setOrder(orderList);
currentSelect = oldSelect;
}
if (command.getLimit() == null) {
// make sure aggregate functions will not work here
Select temp = currentSelect;
currentSelect = null;
boolean hasOffsetOrFetch = false;
// Standard SQL OFFSET / FETCH
if (readIf(OFFSET)) {
hasOffsetOrFetch = true;
command.setOffset(readExpression().optimize(session));
if (!readIf(ROW)) {
readIf("ROWS");
}
}
if (readIf(FETCH)) {
hasOffsetOrFetch = true;
if (!readIf("FIRST")) {
read("NEXT");
}
if (readIf(ROW) || readIf("ROWS")) {
command.setLimit(ValueExpression.get(ValueInt.get(1)));
} else {
Expression limit = readExpression().optimize(session);
command.setLimit(limit);
if (readIf("PERCENT")) {
command.setFetchPercent(true);
}
if (!readIf(ROW)) {
read("ROWS");
}
}
if (readIf(WITH)) {
read("TIES");
command.setWithTies(true);
} else {
read("ONLY");
}
}
// MySQL-style LIMIT / OFFSET
if (!hasOffsetOrFetch && readIf(LIMIT)) {
Expression limit = readExpression().optimize(session);
command.setLimit(limit);
if (readIf(OFFSET)) {
Expression offset = readExpression().optimize(session);
command.setOffset(offset);
} else if (readIf(COMMA)) {
// MySQL: [offset, ] rowcount
Expression offset = limit;
limit = readExpression().optimize(session);
command.setOffset(offset);
command.setLimit(limit);
}
}
if (readIf("SAMPLE_SIZE")) {
Expression sampleSize = readExpression().optimize(session);
command.setSampleSize(sampleSize);
}
currentSelect = temp;
}
if (readIf(FOR)) {
if (readIf("UPDATE")) {
if (readIf("OF")) {
do {
readIdentifierWithSchema();
} while (readIf(COMMA));
} else if (readIf("NOWAIT")) {
// TODO parser: select for update nowait: should not wait
}
command.setForUpdate(true);
} else if (readIf("READ") || readIf(FETCH)) {
read("ONLY");
}
}
if (database.getMode().isolationLevelInSelectOrInsertStatement) {
parseIsolationClause();
}
}
/**
* DB2 isolation clause
*/
private void parseIsolationClause() {
if (readIf(WITH)) {
if (readIf("RR") || readIf("RS")) {
// concurrent-access-resolution clause
if (readIf("USE")) {
read("AND");
read("KEEP");
if (readIf("SHARE") || readIf("UPDATE") ||
readIf("EXCLUSIVE")) {
// ignore
}
read("LOCKS");
}
} else if (readIf("CS") || readIf("UR")) {
// ignore
}
}
}
private Query parseSelectSub() {
if (readIf(OPEN_PAREN)) {
Query command = parseSelectUnion();
read(CLOSE_PAREN);
return command;
}
if (readIf(WITH)) {
Query query;
try {
query = (Query) parseWith();
} catch (ClassCastException e) {
throw DbException.get(ErrorCode.SYNTAX_ERROR_1,
"WITH statement supports only SELECT (query) in this context");
}
// recursive can not be lazy
query.setNeverLazy(true);
return query;
}
return parseSelectSimple();
}
private void parseSelectSimpleFromPart(Select command) {
do {
TableFilter filter = readTableFilter();
parseJoinTableFilter(filter, command);
} while (readIf(COMMA));
// Parser can reorder joined table filters, need to explicitly sort them
// to get the order as it was in the original query.
if (session.isForceJoinOrder()) {
Collections.sort(command.getTopFilters(), TABLE_FILTER_COMPARATOR);
}
}
private void parseJoinTableFilter(TableFilter top, final Select command) {
top = readJoin(top);
command.addTableFilter(top, true);
boolean isOuter = false;
while (true) {
TableFilter n = top.getNestedJoin();
if (n != null) {
n.visit(new TableFilterVisitor() {
@Override
public void accept(TableFilter f) {
command.addTableFilter(f, false);
}
});
}
TableFilter join = top.getJoin();
if (join == null) {
break;
}
isOuter = isOuter | join.isJoinOuter();
if (isOuter) {
command.addTableFilter(join, false);
} else {
// make flat so the optimizer can work better
Expression on = join.getJoinCondition();
if (on != null) {
command.addCondition(on);
}
join.removeJoinCondition();
top.removeJoin();
command.addTableFilter(join, true);
}
top = join;
}
}
private void parseSelectSimpleSelectPart(Select command) {
Select temp = currentSelect;
// make sure aggregate functions will not work in TOP and LIMIT
currentSelect = null;
if (readIf("TOP")) {
// can't read more complex expressions here because
// SELECT TOP 1 +? A FROM TEST could mean
// SELECT TOP (1+?) A FROM TEST or
// SELECT TOP 1 (+?) AS A FROM TEST
Expression limit = readTerm().optimize(session);
command.setLimit(limit);
if (readIf("PERCENT")) {
command.setFetchPercent(true);
}
if (readIf(WITH)) {
read("TIES");
command.setWithTies(true);
}
} else if (readIf(LIMIT)) {
Expression offset = readTerm().optimize(session);
command.setOffset(offset);
Expression limit = readTerm().optimize(session);
command.setLimit(limit);
}
currentSelect = temp;
if (readIf(DISTINCT)) {
if (readIf(ON)) {
read(OPEN_PAREN);
ArrayList<Expression> distinctExpressions = Utils.newSmallArrayList();
do {
distinctExpressions.add(readExpression());
} while (readIfMore(true));
command.setDistinct(distinctExpressions.toArray(new Expression[0]));
} else {
command.setDistinct();
}
} else {
readIf(ALL);
}
ArrayList<Expression> expressions = Utils.newSmallArrayList();
do {
if (readIf(ASTERISK)) {
expressions.add(parseWildcard(null, null));
} else {
Expression expr = readExpression();
if (readIf("AS") || currentTokenType == IDENTIFIER) {
String alias = readAliasIdentifier();
boolean aliasColumnName = database.getSettings().aliasColumnName;
aliasColumnName |= database.getMode().aliasColumnName;
expr = new Alias(expr, alias, aliasColumnName);
}
expressions.add(expr);
}
} while (readIf(COMMA));
command.setExpressions(expressions);
}
private Select parseSelectSimple() {
boolean fromFirst;
if (readIf(SELECT)) {
fromFirst = false;
} else if (readIf(FROM)) {
fromFirst = true;
} else if (readIf(TABLE)) {
int start = lastParseIndex;
Table table = readTableOrView();
Select command = new Select(session, currentSelect);
TableFilter filter = new TableFilter(session, table, null, rightsChecked,
command, orderInFrom++, null);
command.addTableFilter(filter, true);
ArrayList<Expression> expressions = new ArrayList<>();
expressions.add(new Wildcard(null, null));
command.setExpressions(expressions);
setSQL(command, "TABLE", start);
return command;
} else if (readIf(VALUES)) {
return parseValues();
} else {
throw getSyntaxError();
}
Select command = new Select(session, currentSelect);
int start = lastParseIndex;
Select oldSelect = currentSelect;
Prepared oldPrepared = currentPrepared;
currentSelect = command;
currentPrepared = command;
if (fromFirst) {
parseSelectSimpleFromPart(command);
read(SELECT);
parseSelectSimpleSelectPart(command);
} else {
parseSelectSimpleSelectPart(command);
if (!readIf(FROM)) {
// select without FROM: convert to SELECT ... FROM
// SYSTEM_RANGE(1,1)
Table dual = getDualTable(false);
TableFilter filter = new TableFilter(session, dual, null,
rightsChecked, currentSelect, 0,
null);
command.addTableFilter(filter, true);
} else {
parseSelectSimpleFromPart(command);
}
}
if (readIf(WHERE)) {
Expression condition = readExpression();
command.addCondition(condition);
}
// the group by is read for the outer select (or not a select)
// so that columns that are not grouped can be used
currentSelect = oldSelect;
if (readIf(GROUP)) {
read("BY");
command.setGroupQuery();
ArrayList<Expression> list = Utils.newSmallArrayList();
do {
Expression expr = readExpression();
list.add(expr);
} while (readIf(COMMA));
command.setGroupBy(list);
}
currentSelect = command;
if (readIf(HAVING)) {
command.setGroupQuery();
Expression condition = readExpression();
command.setHaving(condition);
}
if (readIf(WINDOW)) {
do {
int index = parseIndex;
String name = readAliasIdentifier();
read("AS");
Window w = readWindowSpecification();
if (!currentSelect.addWindow(name, w)) {
throw DbException.getSyntaxError(sqlCommand, index, "unique identifier");
}
} while (readIf(COMMA));
}
if (readIf(QUALIFY)) {
command.setWindowQuery();
Expression condition = readExpression();
command.setQualify(condition);
}
command.setParameterList(parameters);
currentSelect = oldSelect;
currentPrepared = oldPrepared;
setSQL(command, "SELECT", start);
return command;
}
private Table getDualTable(boolean noColumns) {
Schema main = database.getMainSchema();
Expression one = ValueExpression.get(ValueLong.get(1));
return new RangeTable(main, one, one, noColumns);
}
private void setSQL(Prepared command, String start, int startIndex) {
int endIndex = lastParseIndex;
String sql;
if (start != null) {
StringBuilder builder = new StringBuilder(start.length() + endIndex - startIndex + 1)
.append(start).append(' ');
sql = StringUtils.trimSubstring(builder, originalSQL, startIndex, endIndex).toString();
} else {
sql = StringUtils.trimSubstring(originalSQL, startIndex, endIndex);
}
command.setSQL(sql);
}
private Expression readExpressionOrDefault() {
if (readIf("DEFAULT")) {
return ValueExpression.getDefault();
}
return readExpression();
}
private Expression readExpression() {
Expression r = readAnd();
while (readIf("OR")) {
r = new ConditionAndOr(ConditionAndOr.OR, r, readAnd());
}
return r;
}
private Expression readAnd() {
Expression r = readCondition();
while (readIf("AND")) {
r = new ConditionAndOr(ConditionAndOr.AND, r, readCondition());
}
return r;
}
private Expression readCondition() {
if (readIf(NOT)) {
return new ConditionNot(readCondition());
}
if (readIf(EXISTS)) {
read(OPEN_PAREN);
Query query = parseSelect();
// can not reduce expression because it might be a union except
// query with distinct
read(CLOSE_PAREN);
return new ConditionExists(query);
}
if (readIf(INTERSECTS)) {
read(OPEN_PAREN);
Expression r1 = readConcat();
read(COMMA);
Expression r2 = readConcat();
read(CLOSE_PAREN);
return new Comparison(session, Comparison.SPATIAL_INTERSECTS, r1,
r2);
}
Expression r = readConcat();
while (true) {
// special case: NOT NULL is not part of an expression (as in CREATE
// TABLE TEST(ID INT DEFAULT 0 NOT NULL))
int backup = parseIndex;
boolean not = readIf(NOT);
if (not && isToken(NULL)) {
// this really only works for NOT NULL!
parseIndex = backup;
currentToken = "NOT";
currentTokenType = NOT;
break;
}
if (readIf(LIKE)) {
Expression b = readConcat();
Expression esc = null;
if (readIf("ESCAPE")) {
esc = readConcat();
}
recompileAlways = true;
r = new CompareLike(database, r, b, esc, false);
} else if (readIf("ILIKE")) {
Function function = Function.getFunction(database, "CAST");
function.setDataType(new Column("X", Value.STRING_IGNORECASE));
function.setParameter(0, r);
r = function;
Expression b = readConcat();
Expression esc = null;
if (readIf("ESCAPE")) {
esc = readConcat();
}
recompileAlways = true;
r = new CompareLike(database, r, b, esc, false);
} else if (readIf("REGEXP")) {
Expression b = readConcat();
recompileAlways = true;
r = new CompareLike(database, r, b, null, true);
} else if (readIf(IS)) {
if (readIf(NOT)) {
if (readIf(NULL)) {
r = new Comparison(session, Comparison.IS_NOT_NULL, r,
null);
} else if (readIf(DISTINCT)) {
read(FROM);
r = new Comparison(session, Comparison.EQUAL_NULL_SAFE,
r, readConcat());
} else if (readIf("OF")) {
r = readTypePredicate(r, true);
} else {
r = new Comparison(session,
Comparison.NOT_EQUAL_NULL_SAFE, r, readConcat());
}
} else if (readIf(NULL)) {
r = new Comparison(session, Comparison.IS_NULL, r, null);
} else if (readIf(DISTINCT)) {
read(FROM);
r = new Comparison(session, Comparison.NOT_EQUAL_NULL_SAFE,
r, readConcat());
} else if (readIf("OF")) {
r = readTypePredicate(r, false);
} else {
r = new Comparison(session, Comparison.EQUAL_NULL_SAFE, r,
readConcat());
}
} else if (readIf("IN")) {
read(OPEN_PAREN);
if (readIf(CLOSE_PAREN)) {
if (database.getMode().prohibitEmptyInPredicate) {
throw getSyntaxError();
}
r = ValueExpression.get(ValueBoolean.FALSE);
} else {
if (isSelect()) {
Query query = parseSelect();
r = new ConditionInSelect(database, r, query, false,
Comparison.EQUAL);
} else {
ArrayList<Expression> v = Utils.newSmallArrayList();
Expression last;
do {
last = readExpression();
v.add(last);
} while (readIf(COMMA));
if (v.size() == 1 && (last instanceof Subquery)) {
Subquery s = (Subquery) last;
Query q = s.getQuery();
r = new ConditionInSelect(database, r, q, false,
Comparison.EQUAL);
} else {
r = new ConditionIn(database, r, v);
}
}
read(CLOSE_PAREN);
}
} else if (readIf("BETWEEN")) {
Expression low = readConcat();
read("AND");
Expression high = readConcat();
Expression condLow = new Comparison(session,
Comparison.SMALLER_EQUAL, low, r);
Expression condHigh = new Comparison(session,
Comparison.BIGGER_EQUAL, high, r);
r = new ConditionAndOr(ConditionAndOr.AND, condLow, condHigh);
} else {
if (not) {
throw getSyntaxError();
}
int compareType = getCompareType(currentTokenType);
if (compareType < 0) {
break;
}
read();
int start = lastParseIndex;
if (readIf(ALL)) {
read(OPEN_PAREN);
if (isSelect()) {
Query query = parseSelect();
r = new ConditionInSelect(database, r, query, true, compareType);
read(CLOSE_PAREN);
} else {
parseIndex = start;
read();
r = new Comparison(session, compareType, r, readConcat());
}
} else if (readIf("ANY") || readIf("SOME")) {
read(OPEN_PAREN);
if (currentTokenType == PARAMETER && compareType == 0) {
Parameter p = readParameter();
r = new ConditionInParameter(database, r, p);
read(CLOSE_PAREN);
} else if (isSelect()) {
Query query = parseSelect();
r = new ConditionInSelect(database, r, query, false, compareType);
read(CLOSE_PAREN);
} else {
parseIndex = start;
read();
r = new Comparison(session, compareType, r, readConcat());
}
} else {
r = new Comparison(session, compareType, r, readConcat());
}
}
if (not) {
r = new ConditionNot(r);
}
}
return r;
}
private TypePredicate readTypePredicate(Expression r, boolean not) {
read(OPEN_PAREN);
ArrayList<TypeInfo> typeList = Utils.newSmallArrayList();
do {
typeList.add(parseColumnWithType(null, false).getType());
} while (readIfMore(true));
return new TypePredicate(r, not, typeList.toArray(new TypeInfo[0]));
}
private Expression readConcat() {
Expression r = readSum();
while (true) {
if (readIf(STRING_CONCAT)) {
r = new BinaryOperation(OpType.CONCAT, r, readSum());
} else if (readIf(TILDE)) {
if (readIf(ASTERISK)) {
Function function = Function.getFunction(database, "CAST");
function.setDataType(new Column("X",
Value.STRING_IGNORECASE));
function.setParameter(0, r);
r = function;
}
r = new CompareLike(database, r, readSum(), null, true);
} else if (readIf(NOT_TILDE)) {
if (readIf(ASTERISK)) {
Function function = Function.getFunction(database, "CAST");
function.setDataType(new Column("X",
Value.STRING_IGNORECASE));
function.setParameter(0, r);
r = function;
}
r = new ConditionNot(new CompareLike(database, r, readSum(),
null, true));
} else {
return r;
}
}
}
private Expression readSum() {
Expression r = readFactor();
while (true) {
if (readIf(PLUS_SIGN)) {
r = new BinaryOperation(OpType.PLUS, r, readFactor());
} else if (readIf(MINUS_SIGN)) {
r = new BinaryOperation(OpType.MINUS, r, readFactor());
} else {
return r;
}
}
}
private Expression readFactor() {
Expression r = readTerm();
while (true) {
if (readIf(ASTERISK)) {
r = new BinaryOperation(OpType.MULTIPLY, r, readTerm());
} else if (readIf(SLASH)) {
r = new BinaryOperation(OpType.DIVIDE, r, readTerm());
} else if (readIf(PERCENT)) {
r = new BinaryOperation(OpType.MODULUS, r, readTerm());
} else {
return r;
}
}
}
private Expression readAggregate(AggregateType aggregateType, String aggregateName) {
if (currentSelect == null) {
throw getSyntaxError();
}
Aggregate r;
switch (aggregateType) {
case COUNT:
if (readIf(ASTERISK)) {
r = new Aggregate(AggregateType.COUNT_ALL, new Expression[0], currentSelect, false);
} else {
boolean distinct = readDistinctAgg();
Expression on = readExpression();
if (on instanceof Wildcard && !distinct) {
// PostgreSQL compatibility: count(t.*)
r = new Aggregate(AggregateType.COUNT_ALL, new Expression[0], currentSelect, false);
} else {
r = new Aggregate(AggregateType.COUNT, new Expression[] { on }, currentSelect, distinct);
}
}
break;
case LISTAGG: {
boolean distinct = readDistinctAgg();
Expression arg = readExpression(), separator = null;
ArrayList<SelectOrderBy> orderByList = null;
if (equalsToken("STRING_AGG", aggregateName)) {
// PostgreSQL compatibility: string_agg(expression, delimiter)
read(COMMA);
separator = readExpression();
if (readIf(ORDER)) {
read("BY");
orderByList = parseSimpleOrderList();
}
} else if (equalsToken("GROUP_CONCAT", aggregateName)){
if (readIf(ORDER)) {
read("BY");
orderByList = parseSimpleOrderList();
}
if (readIf("SEPARATOR")) {
separator = readExpression();
}
} else {
if (readIf(COMMA)) {
separator = readExpression();
}
if (readIf(ON)) {
read("OVERFLOW");
read("ERROR");
}
}
Expression[] args = separator == null ? new Expression[] { arg } : new Expression[] { arg, separator };
int index = lastParseIndex;
read(CLOSE_PAREN);
if (orderByList == null && isToken("WITHIN")) {
r = readWithinGroup(aggregateType, args, distinct, false);
} else {
parseIndex = index;
read();
r = new Aggregate(AggregateType.LISTAGG, args, currentSelect, distinct);
if (orderByList != null) {
r.setOrderByList(orderByList);
}
}
break;
}
case ARRAY_AGG: {
boolean distinct = readDistinctAgg();
r = new Aggregate(AggregateType.ARRAY_AGG, new Expression[] { readExpression() }, currentSelect, distinct);
if (readIf(ORDER)) {
read("BY");
r.setOrderByList(parseSimpleOrderList());
}
break;
}
case RANK:
case DENSE_RANK:
case PERCENT_RANK:
case CUME_DIST: {
if (isToken(CLOSE_PAREN)) {
return readWindowFunction(aggregateName);
}
ArrayList<Expression> expressions = Utils.newSmallArrayList();
do {
expressions.add(readExpression());
} while (readIfMore(true));
r = readWithinGroup(aggregateType, expressions.toArray(new Expression[0]), false, true);
break;
}
case PERCENTILE_CONT:
case PERCENTILE_DISC: {
Expression num = readExpression();
read(CLOSE_PAREN);
r = readWithinGroup(aggregateType, new Expression[] { num }, false, false);
break;
}
case MODE: {
if (readIf(CLOSE_PAREN)) {
r = readWithinGroup(AggregateType.MODE, new Expression[0], false, false);
} else {
Expression expr = readExpression();
r = new Aggregate(aggregateType, new Expression[0], currentSelect, false);
if (readIf(ORDER)) {
read("BY");
Expression expr2 = readExpression();
String sql = expr.getSQL(true), sql2 = expr2.getSQL(true);
if (!sql.equals(sql2)) {
throw DbException.getSyntaxError(ErrorCode.IDENTICAL_EXPRESSIONS_SHOULD_BE_USED, sqlCommand,
lastParseIndex, sql, sql2);
}
readAggregateOrder(r, expr, true);
} else {
readAggregateOrder(r, expr, false);
}
}
break;
}
default:
boolean distinct = readDistinctAgg();
r = new Aggregate(aggregateType, new Expression[] { readExpression() }, currentSelect, distinct);
break;
}
read(CLOSE_PAREN);
readFilterAndOver(r);
return r;
}
private Aggregate readWithinGroup(AggregateType aggregateType, Expression[] args, boolean distinct,
boolean forHypotheticalSet) {
read("WITHIN");
read(GROUP);
read(OPEN_PAREN);
read(ORDER);
read("BY");
Aggregate r = new Aggregate(aggregateType, args, currentSelect, distinct);
if (forHypotheticalSet) {
int count = args.length;
ArrayList<SelectOrderBy> orderList = new ArrayList<>(count);
for (int i = 0; i < count; i++) {
if (i > 0) {
read(COMMA);
}
SelectOrderBy order = new SelectOrderBy();
order.expression = readExpression();
order.sortType = parseSimpleSortType();
orderList.add(order);
}
r.setOrderByList(orderList);
} else {
readAggregateOrder(r, readExpression(), true);
}
return r;
}
private void readAggregateOrder(Aggregate r, Expression expr, boolean parseSortType) {
ArrayList<SelectOrderBy> orderList = new ArrayList<>(1);
SelectOrderBy order = new SelectOrderBy();
order.expression = expr;
if (parseSortType) {
order.sortType = parseSimpleSortType();
}
orderList.add(order);
r.setOrderByList(orderList);
}
private ArrayList<SelectOrderBy> parseSimpleOrderList() {
ArrayList<SelectOrderBy> orderList = Utils.newSmallArrayList();
do {
SelectOrderBy order = new SelectOrderBy();
order.expression = readExpression();
order.sortType = parseSortType();
orderList.add(order);
} while (readIf(COMMA));
return orderList;
}
private JavaFunction readJavaFunction(Schema schema, String functionName, boolean throwIfNotFound) {
FunctionAlias functionAlias;
if (schema != null) {
functionAlias = schema.findFunction(functionName);
} else {
functionAlias = findFunctionAlias(session.getCurrentSchemaName(),
functionName);
}
if (functionAlias == null) {
if (throwIfNotFound) {
throw DbException.get(ErrorCode.FUNCTION_NOT_FOUND_1, functionName);
} else {
return null;
}
}
Expression[] args;
ArrayList<Expression> argList = Utils.newSmallArrayList();
if (!readIf(CLOSE_PAREN)) {
do {
argList.add(readExpression());
} while (readIfMore(true));
}
args = argList.toArray(new Expression[0]);
return new JavaFunction(functionAlias, args);
}
private JavaAggregate readJavaAggregate(UserAggregate aggregate) {
boolean distinct = readDistinctAgg();
ArrayList<Expression> params = Utils.newSmallArrayList();
do {
params.add(readExpression());
} while (readIfMore(true));
Expression[] list = params.toArray(new Expression[0]);
JavaAggregate agg = new JavaAggregate(aggregate, list, currentSelect, distinct);
readFilterAndOver(agg);
return agg;
}
private boolean readDistinctAgg() {
if (readIf(DISTINCT)) {
return true;
}
readIf(ALL);
return false;
}
private void readFilterAndOver(AbstractAggregate aggregate) {
if (readIf("FILTER")) {
read(OPEN_PAREN);
read(WHERE);
Expression filterCondition = readExpression();
read(CLOSE_PAREN);
aggregate.setFilterCondition(filterCondition);
}
readOver(aggregate);
}
private void readOver(DataAnalysisOperation operation) {
if (readIf("OVER")) {
operation.setOverCondition(readWindowNameOrSpecification());
currentSelect.setWindowQuery();
} else if (operation.isAggregate()) {
currentSelect.setGroupQuery();
} else {
throw getSyntaxError();
}
}
private Window readWindowNameOrSpecification() {
return isToken(OPEN_PAREN) ? readWindowSpecification() : new Window(readAliasIdentifier(), null, null, null);
}
private Window readWindowSpecification() {
read(OPEN_PAREN);
String parent = null;
if (currentTokenType == IDENTIFIER) {
String token = currentToken;
if (currentTokenQuoted || ( //
!equalsToken(token, "PARTITION") //
&& !equalsToken(token, "ROWS") //
&& !equalsToken(token, "RANGE") //
&& !equalsToken(token, "GROUPS"))) {
parent = token;
read();
}
}
ArrayList<Expression> partitionBy = null;
if (readIf("PARTITION")) {
read("BY");
partitionBy = Utils.newSmallArrayList();
do {
Expression expr = readExpression();
partitionBy.add(expr);
} while (readIf(COMMA));
}
ArrayList<SelectOrderBy> orderBy = null;
if (readIf(ORDER)) {
read("BY");
orderBy = parseSimpleOrderList();
}
WindowFrame frame = readWindowFrame();
read(CLOSE_PAREN);
return new Window(parent, partitionBy, orderBy, frame);
}
private WindowFrame readWindowFrame() {
WindowFrameUnits units;
if (readIf("ROWS")) {
units = WindowFrameUnits.ROWS;
} else if (readIf("RANGE")) {
units = WindowFrameUnits.RANGE;
} else if (readIf("GROUPS")) {
units = WindowFrameUnits.GROUPS;
} else {
return null;
}
WindowFrameBound starting, following;
if (readIf("BETWEEN")) {
starting = readWindowFrameRange();
read("AND");
following = readWindowFrameRange();
} else {
starting = readWindowFrameStarting();
following = null;
}
int idx = lastParseIndex;
WindowFrameExclusion exclusion = WindowFrameExclusion.EXCLUDE_NO_OTHERS;
if (readIf("EXCLUDE")) {
if (readIf("CURRENT")) {
read(ROW);
exclusion = WindowFrameExclusion.EXCLUDE_CURRENT_ROW;
} else if (readIf(GROUP)) {
exclusion = WindowFrameExclusion.EXCLUDE_GROUP;
} else if (readIf("TIES")) {
exclusion = WindowFrameExclusion.EXCLUDE_TIES;
} else {
read("NO");
read("OTHERS");
}
}
WindowFrame frame = new WindowFrame(units, starting, following, exclusion);
if (!frame.isValid()) {
throw DbException.getSyntaxError(sqlCommand, idx);
}
return frame;
}
private WindowFrameBound readWindowFrameStarting() {
if (readIf("UNBOUNDED")) {
read("PRECEDING");
return new WindowFrameBound(WindowFrameBoundType.UNBOUNDED_PRECEDING, null);
}
if (readIf("CURRENT")) {
read(ROW);
return new WindowFrameBound(WindowFrameBoundType.CURRENT_ROW, null);
}
Expression value = readExpression();
read("PRECEDING");
return new WindowFrameBound(WindowFrameBoundType.PRECEDING, value);
}
private WindowFrameBound readWindowFrameRange() {
if (readIf("UNBOUNDED")) {
if (readIf("PRECEDING")) {
return new WindowFrameBound(WindowFrameBoundType.UNBOUNDED_PRECEDING, null);
}
read("FOLLOWING");
return new WindowFrameBound(WindowFrameBoundType.UNBOUNDED_FOLLOWING, null);
}
if (readIf("CURRENT")) {
read(ROW);
return new WindowFrameBound(WindowFrameBoundType.CURRENT_ROW, null);
}
Expression value = readExpression();
if (readIf("PRECEDING")) {
return new WindowFrameBound(WindowFrameBoundType.PRECEDING, value);
}
read("FOLLOWING");
return new WindowFrameBound(WindowFrameBoundType.FOLLOWING, value);
}
private AggregateType getAggregateType(String name) {
if (!identifiersToUpper) {
// if not yet converted to uppercase, do it now
name = StringUtils.toUpperEnglish(name);
}
return Aggregate.getAggregateType(name);
}
private Expression readFunction(Schema schema, String name) {
if (schema != null) {
return readJavaFunction(schema, name, true);
}
boolean allowOverride = database.isAllowBuiltinAliasOverride();
if (allowOverride) {
JavaFunction jf = readJavaFunction(null, name, false);
if (jf != null) {
return jf;
}
}
AggregateType agg = getAggregateType(name);
if (agg != null) {
return readAggregate(agg, name);
}
Function function = Function.getFunction(database, name);
if (function == null) {
WindowFunction windowFunction = readWindowFunction(name);
if (windowFunction != null) {
return windowFunction;
}
UserAggregate aggregate = database.findAggregate(name);
if (aggregate != null) {
return readJavaAggregate(aggregate);
}
if (allowOverride) {
throw DbException.get(ErrorCode.FUNCTION_NOT_FOUND_1, name);
}
return readJavaFunction(null, name, true);
}
switch (function.getFunctionType()) {
case Function.CAST: {
function.setParameter(0, readExpression());
read("AS");
Column type = parseColumnWithType(null, false);
function.setDataType(type);
read(CLOSE_PAREN);
break;
}
case Function.CONVERT: {
if (database.getMode().swapConvertFunctionParameters) {
Column type = parseColumnWithType(null, false);
function.setDataType(type);
read(COMMA);
function.setParameter(0, readExpression());
read(CLOSE_PAREN);
} else {
function.setParameter(0, readExpression());
read(COMMA);
Column type = parseColumnWithType(null, false);
function.setDataType(type);
read(CLOSE_PAREN);
}
break;
}
case Function.EXTRACT: {
function.setParameter(0, ValueExpression.get(ValueString.get(currentToken)));
read();
read(FROM);
function.setParameter(1, readExpression());
read(CLOSE_PAREN);
break;
}
case Function.DATE_ADD:
case Function.DATE_DIFF: {
if (currentTokenType == VALUE) {
function.setParameter(0, ValueExpression.get(currentValue.convertTo(Value.STRING)));
} else {
function.setParameter(0, ValueExpression.get(ValueString.get(currentToken)));
}
read();
read(COMMA);
function.setParameter(1, readExpression());
read(COMMA);
function.setParameter(2, readExpression());
read(CLOSE_PAREN);
break;
}
case Function.SUBSTRING: {
// Different variants include:
// SUBSTRING(X,1)
// SUBSTRING(X,1,1)
// SUBSTRING(X FROM 1 FOR 1) -- Postgres
// SUBSTRING(X FROM 1) -- Postgres
// SUBSTRING(X FOR 1) -- Postgres
function.setParameter(0, readExpression());
if (readIf(FROM)) {
function.setParameter(1, readExpression());
if (readIf(FOR)) {
function.setParameter(2, readExpression());
}
} else if (readIf(FOR)) {
function.setParameter(1, ValueExpression.get(ValueInt.get(0)));
function.setParameter(2, readExpression());
} else {
read(COMMA);
function.setParameter(1, readExpression());
if (readIf(COMMA)) {
function.setParameter(2, readExpression());
}
}
read(CLOSE_PAREN);
break;
}
case Function.POSITION: {
// can't read expression because IN would be read too early
function.setParameter(0, readConcat());
if (!readIf(COMMA)) {
read("IN");
}
function.setParameter(1, readExpression());
read(CLOSE_PAREN);
break;
}
case Function.TRIM: {
Expression space = null;
if (readIf("LEADING")) {
function = Function.getFunction(database, "LTRIM");
if (!readIf(FROM)) {
space = readExpression();
read(FROM);
}
}
else if (readIf("TRAILING")) {
function = Function.getFunction(database, "RTRIM");
if (!readIf(FROM)) {
space = readExpression();
read(FROM);
}
}
else if (readIf("BOTH")) {
if (!readIf(FROM)) {
space = readExpression();
read(FROM);
}
}
Expression p0 = readExpression();
if (readIf(COMMA)) {
space = readExpression();
}
else if (readIf(FROM)) {
space = p0;
p0 = readExpression();
}
function.setParameter(0, p0);
if (space != null) {
function.setParameter(1, space);
}
read(CLOSE_PAREN);
break;
}
case Function.TABLE:
case Function.TABLE_DISTINCT: {
int i = 0;
ArrayList<Column> columns = Utils.newSmallArrayList();
do {
String columnName = readAliasIdentifier();
Column column = parseColumnWithType(columnName, false);
columns.add(column);
read(EQUAL);
function.setParameter(i, readExpression());
i++;
} while (readIfMore(true));
TableFunction tf = (TableFunction) function;
tf.setColumns(columns);
break;
}
case Function.UNNEST: {
ArrayList<Column> columns = Utils.newSmallArrayList();
if (!readIf(CLOSE_PAREN)) {
int i = 0;
do {
function.setParameter(i++, readExpression());
columns.add(new Column("C" + i, Value.NULL));
} while (readIfMore(true));
}
if (readIf(WITH)) {
read("ORDINALITY");
columns.add(new Column("NORD", Value.INT));
}
TableFunction tf = (TableFunction) function;
tf.setColumns(columns);
break;
}
default:
if (!readIf(CLOSE_PAREN)) {
int i = 0;
do {
function.setParameter(i++, readExpression());
} while (readIfMore(true));
}
}
function.doneWithParameters();
return function;
}
private WindowFunction readWindowFunction(String name) {
if (!identifiersToUpper) {
// if not yet converted to uppercase, do it now
name = StringUtils.toUpperEnglish(name);
}
WindowFunctionType type = WindowFunctionType.get(name);
if (type == null) {
return null;
}
if (currentSelect == null) {
throw getSyntaxError();
}
int numArgs = WindowFunction.getMinArgumentCount(type);
Expression[] args = null;
if (numArgs > 0) {
// There is no functions with numArgs == 0 && numArgsMax > 0
int numArgsMax = WindowFunction.getMaxArgumentCount(type);
args = new Expression[numArgsMax];
if (numArgs == numArgsMax) {
for (int i = 0; i < numArgs; i++) {
if (i > 0) {
read(COMMA);
}
args[i] = readExpression();
}
} else {
int i = 0;
while (i < numArgsMax) {
if (i > 0 && !readIf(COMMA)) {
break;
}
args[i] = readExpression();
i++;
}
if (i < numArgs) {
throw getSyntaxError();
}
if (i != numArgsMax) {
args = Arrays.copyOf(args, i);
}
}
}
read(CLOSE_PAREN);
WindowFunction function = new WindowFunction(type, currentSelect, args);
if (type == WindowFunctionType.NTH_VALUE) {
readFromFirstOrLast(function);
}
switch (type) {
case LEAD:
case LAG:
case FIRST_VALUE:
case LAST_VALUE:
case NTH_VALUE:
readRespectOrIgnoreNulls(function);
//$FALL-THROUGH$
default:
// Avoid warning
}
readOver(function);
return function;
}
private void readFromFirstOrLast(WindowFunction function) {
if (readIf(FROM) && !readIf("FIRST")) {
read("LAST");
function.setFromLast(true);
}
}
private void readRespectOrIgnoreNulls(WindowFunction function) {
if (readIf("RESPECT")) {
read("NULLS");
} else if (readIf("IGNORE")) {
read("NULLS");
function.setIgnoreNulls(true);
}
}
private Expression readKeywordFunction(String name) {
if (readIf(OPEN_PAREN)) {
return readFunction(null, name);
} else {
return readFunctionWithoutParameters(name);
}
}
private Expression readFunctionWithoutParameters(String name) {
if (database.isAllowBuiltinAliasOverride()) {
FunctionAlias functionAlias = database.getSchema(session.getCurrentSchemaName()).findFunction(name);
if (functionAlias != null) {
return new JavaFunction(functionAlias, new Expression[0]);
}
}
Function function = Function.getFunction(database, name);
function.doneWithParameters();
return function;
}
private Expression readWildcardRowidOrSequenceValue(String schema, String objectName) {
if (readIf(ASTERISK)) {
return parseWildcard(schema, objectName);
}
if (readIf(_ROWID_)) {
return new ExpressionColumn(database, schema, objectName, Column.ROWID, true);
}
if (schema == null) {
schema = session.getCurrentSchemaName();
}
if (readIf("NEXTVAL")) {
Sequence sequence = findSequence(schema, objectName);
if (sequence != null) {
return new SequenceValue(sequence);
}
} else if (readIf("CURRVAL")) {
Sequence sequence = findSequence(schema, objectName);
if (sequence != null) {
Function function = Function.getFunction(database, "CURRVAL");
function.setParameter(0, ValueExpression.get(ValueString
.get(sequence.getSchema().getName())));
function.setParameter(1, ValueExpression.get(ValueString
.get(sequence.getName())));
function.doneWithParameters();
return function;
}
}
return null;
}
private Wildcard parseWildcard(String schema, String objectName) {
Wildcard wildcard = new Wildcard(schema, objectName);
if (readIf(EXCEPT)) {
read(OPEN_PAREN);
ArrayList<ExpressionColumn> exceptColumns = Utils.newSmallArrayList();
do {
String s = null, t = null;
String name = readColumnIdentifier();
if (readIf(DOT)) {
t = name;
name = readColumnIdentifier();
if (readIf(DOT)) {
s = t;
t = name;
name = readColumnIdentifier();
if (readIf(DOT)) {
if (!equalsToken(database.getShortName(), s)) {
throw DbException.get(ErrorCode.DATABASE_NOT_FOUND_1, s);
}
s = t;
t = name;
name = readColumnIdentifier();
}
}
}
exceptColumns.add(new ExpressionColumn(database, s, t, name, false));
} while (readIfMore(true));
wildcard.setExceptColumns(exceptColumns);
}
return wildcard;
}
private Expression readTermObjectDot(String objectName) {
Expression expr = readWildcardRowidOrSequenceValue(null, objectName);
if (expr != null) {
return expr;
}
String name = readColumnIdentifier();
Schema s = database.findSchema(objectName);
if (readIf(OPEN_PAREN)) {
return readFunction(s, name);
} else if (readIf(DOT)) {
String schema = objectName;
objectName = name;
expr = readWildcardRowidOrSequenceValue(schema, objectName);
if (expr != null) {
return expr;
}
name = readColumnIdentifier();
if (readIf(OPEN_PAREN)) {
String databaseName = schema;
if (!equalsToken(database.getShortName(), databaseName)) {
throw DbException.get(ErrorCode.DATABASE_NOT_FOUND_1,
databaseName);
}
schema = objectName;
return readFunction(database.getSchema(schema), name);
} else if (readIf(DOT)) {
String databaseName = schema;
if (!equalsToken(database.getShortName(), databaseName)) {
throw DbException.get(ErrorCode.DATABASE_NOT_FOUND_1,
databaseName);
}
schema = objectName;
objectName = name;
expr = readWildcardRowidOrSequenceValue(schema, objectName);
if (expr != null) {
return expr;
}
name = readColumnIdentifier();
return new ExpressionColumn(database, schema, objectName, name, false);
}
return new ExpressionColumn(database, schema, objectName, name, false);
}
return new ExpressionColumn(database, null, objectName, name, false);
}
private Parameter readParameter() {
// there must be no space between ? and the number
boolean indexed = Character.isDigit(sqlCommandChars[parseIndex]);
Parameter p;
if (indexed) {
readParameterIndex();
if (indexedParameterList == null) {
if (parameters == null) {
// this can occur when parsing expressions only (for
// example check constraints)
throw getSyntaxError();
} else if (!parameters.isEmpty()) {
throw DbException
.get(ErrorCode.CANNOT_MIX_INDEXED_AND_UNINDEXED_PARAMS);
}
indexedParameterList = Utils.newSmallArrayList();
}
int index = currentValue.getInt() - 1;
if (index < 0 || index >= Constants.MAX_PARAMETER_INDEX) {
throw DbException.getInvalidValueException(
"parameter index", index + 1);
}
if (indexedParameterList.size() <= index) {
indexedParameterList.ensureCapacity(index + 1);
while (indexedParameterList.size() <= index) {
indexedParameterList.add(null);
}
}
p = indexedParameterList.get(index);
if (p == null) {
p = new Parameter(index);
indexedParameterList.set(index, p);
}
read();
} else {
read();
if (indexedParameterList != null) {
throw DbException
.get(ErrorCode.CANNOT_MIX_INDEXED_AND_UNINDEXED_PARAMS);
}
p = new Parameter(parameters.size());
}
parameters.add(p);
return p;
}
private Expression readTerm() {
Expression r;
switch (currentTokenType) {
case AT:
read();
r = new Variable(session, readAliasIdentifier());
if (readIf(COLON_EQ)) {
Expression value = readExpression();
Function function = Function.getFunction(database, "SET");
function.setParameter(0, r);
function.setParameter(1, value);
r = function;
}
break;
case PARAMETER:
r = readParameter();
break;
case SELECT:
case FROM:
case WITH:
r = new Subquery(parseSelect());
break;
case TABLE:
int index = lastParseIndex;
read();
if (readIf(OPEN_PAREN)) {
r = readFunction(null, "TABLE");
} else {
parseIndex = index;
read();
r = new Subquery(parseSelect());
}
break;
case IDENTIFIER:
String name = currentToken;
if (currentTokenQuoted) {
read();
if (readIf(OPEN_PAREN)) {
r = readFunction(null, name);
} else if (readIf(DOT)) {
r = readTermObjectDot(name);
} else {
r = new ExpressionColumn(database, null, null, name, false);
}
} else {
read();
if (readIf(DOT)) {
r = readTermObjectDot(name);
} else if (readIf(OPEN_PAREN)) {
r = readFunction(null, name);
} else {
r = readTermWithIdentifier(name);
}
}
break;
case MINUS_SIGN:
read();
if (currentTokenType == VALUE) {
r = ValueExpression.get(currentValue.negate());
int rType = r.getType().getValueType();
if (rType == Value.LONG &&
r.getValue(session).getLong() == Integer.MIN_VALUE) {
// convert Integer.MIN_VALUE to type 'int'
// (Integer.MAX_VALUE+1 is of type 'long')
r = ValueExpression.get(ValueInt.get(Integer.MIN_VALUE));
} else if (rType == Value.DECIMAL &&
r.getValue(session).getBigDecimal().compareTo(Value.MIN_LONG_DECIMAL) == 0) {
// convert Long.MIN_VALUE to type 'long'
// (Long.MAX_VALUE+1 is of type 'decimal')
r = ValueExpression.get(ValueLong.MIN);
}
read();
} else {
r = new UnaryOperation(readTerm());
}
break;
case PLUS_SIGN:
read();
r = readTerm();
break;
case OPEN_PAREN:
read();
if (readIf(CLOSE_PAREN)) {
r = ValueExpression.get(ValueRow.getEmpty());
} else {
r = readExpression();
if (readIfMore(true)) {
ArrayList<Expression> list = Utils.newSmallArrayList();
list.add(r);
if (!readIf(CLOSE_PAREN)) {
do {
list.add(readExpression());
} while (readIfMore(false));
}
r = new ExpressionList(list.toArray(new Expression[0]), false);
}
}
break;
case ARRAY:
read();
read(OPEN_BRACKET);
if (readIf(CLOSE_BRACKET)) {
r = ValueExpression.get(ValueArray.getEmpty());
} else {
ArrayList<Expression> list = Utils.newSmallArrayList();
list.add(readExpression());
while (readIf(COMMA)) {
list.add(readExpression());
}
read(CLOSE_BRACKET);
r = new ExpressionList(list.toArray(new Expression[0]), true);
}
break;
case INTERVAL:
read();
r = readInterval();
break;
case ROW: {
read();
read(OPEN_PAREN);
if (readIf(CLOSE_PAREN)) {
r = ValueExpression.get(ValueRow.getEmpty());
} else {
ArrayList<Expression> list = Utils.newSmallArrayList();
do {
list.add(readExpression());
} while (readIfMore(true));
r = new ExpressionList(list.toArray(new Expression[0]), false);
}
break;
}
case TRUE:
read();
r = ValueExpression.get(ValueBoolean.TRUE);
break;
case FALSE:
read();
r = ValueExpression.get(ValueBoolean.FALSE);
break;
case ROWNUM:
read();
if (readIf(OPEN_PAREN)) {
read(CLOSE_PAREN);
}
if (currentSelect == null && currentPrepared == null) {
throw getSyntaxError();
}
r = new Rownum(currentSelect == null ? currentPrepared
: currentSelect);
break;
case NULL:
read();
r = ValueExpression.getNull();
break;
case _ROWID_:
read();
r = new ExpressionColumn(database, null, null, Column.ROWID, true);
break;
case VALUE:
r = ValueExpression.get(currentValue);
read();
break;
case VALUES:
if (database.getMode().onDuplicateKeyUpdate) {
read();
r = readKeywordFunction("VALUES");
} else {
r = new Subquery(parseSelect());
}
break;
case CASE:
read();
r = readCase();
break;
case CURRENT_DATE:
read();
r = readKeywordFunction("CURRENT_DATE");
break;
case CURRENT_TIME:
read();
r = readKeywordFunction("CURRENT_TIME");
break;
case CURRENT_TIMESTAMP:
read();
r = readKeywordFunction("CURRENT_TIMESTAMP");
break;
case CURRENT_USER:
read();
r = readKeywordFunction("USER");
break;
case LOCALTIME:
read();
r = readKeywordFunction("LOCALTIME");
break;
case LOCALTIMESTAMP:
read();
r = readKeywordFunction("LOCALTIMESTAMP");
break;
default:
throw getSyntaxError();
}
if (readIf(OPEN_BRACKET)) {
Function function = Function.getFunction(database, "ARRAY_GET");
function.setParameter(0, r);
function.setParameter(1, readExpression());
r = function;
read(CLOSE_BRACKET);
}
if (readIf(COLON_COLON)) {
// PostgreSQL compatibility
if (isToken("PG_CATALOG")) {
read("PG_CATALOG");
read(DOT);
}
if (readIf("REGCLASS")) {
FunctionAlias f = findFunctionAlias(database.getMainSchema().getName(), "PG_GET_OID");
if (f == null) {
throw getSyntaxError();
}
Expression[] args = { r };
r = new JavaFunction(f, args);
} else {
Column col = parseColumnWithType(null, false);
Function function = Function.getFunction(database, "CAST");
function.setDataType(col);
function.setParameter(0, r);
r = function;
}
}
return r;
}
private Expression readTermWithIdentifier(String name) {
// Unquoted identifier is never empty
char ch = name.charAt(0);
if (!identifiersToUpper) {
/*
* Convert a-z to A-Z. This method is safe, because only A-Z
* characters are considered below.
*/
ch &= 0xffdf;
}
switch (ch) {
case 'C':
if (database.getMode().getEnum() == ModeEnum.DB2 && equalsToken("CURRENT", name)) {
return parseDB2SpecialRegisters(name);
}
break;
case 'D':
if (currentTokenType == VALUE && currentValue.getValueType() == Value.STRING &&
(equalsToken("DATE", name) || equalsToken("D", name))) {
String date = currentValue.getString();
read();
return ValueExpression.get(ValueDate.parse(date));
}
break;
case 'E':
if (currentTokenType == VALUE && currentValue.getValueType() == Value.STRING && equalsToken("E", name)) {
String text = currentValue.getString();
// the PostgreSQL ODBC driver uses
// LIKE E'PROJECT\\_DATA' instead of LIKE
// 'PROJECT\_DATA'
// N: SQL-92 "National Language" strings
text = StringUtils.replaceAll(text, "\\\\", "\\");
read();
return ValueExpression.get(ValueString.get(text));
}
break;
case 'N':
if (equalsToken("NEXT", name) && readIf("VALUE")) {
read(FOR);
return new SequenceValue(readSequence());
} else if (currentTokenType == VALUE && currentValue.getValueType() == Value.STRING
&& equalsToken("N", name)) {
// SQL-92 "National Language" strings
String text = currentValue.getString();
read();
return ValueExpression.get(ValueString.get(text));
}
break;
case 'S':
if (equalsToken("SYSDATE", name)) {
return readFunctionWithoutParameters("CURRENT_TIMESTAMP");
} else if (equalsToken("SYSTIME", name)) {
return readFunctionWithoutParameters("CURRENT_TIME");
} else if (equalsToken("SYSTIMESTAMP", name)) {
return readFunctionWithoutParameters("CURRENT_TIMESTAMP");
}
break;
case 'T':
if (equalsToken("TIME", name)) {
boolean without = readIf("WITHOUT");
if (without) {
read("TIME");
read("ZONE");
}
if (currentTokenType == VALUE && currentValue.getValueType() == Value.STRING) {
String time = currentValue.getString();
read();
return ValueExpression.get(ValueTime.parse(time));
} else if (without) {
throw getSyntaxError();
}
} else if (equalsToken("TIMESTAMP", name)) {
if (readIf(WITH)) {
read("TIME");
read("ZONE");
if (currentTokenType != VALUE || currentValue.getValueType() != Value.STRING) {
throw getSyntaxError();
}
String timestamp = currentValue.getString();
read();
return ValueExpression.get(ValueTimestampTimeZone.parse(timestamp));
} else {
boolean without = readIf("WITHOUT");
if (without) {
read("TIME");
read("ZONE");
}
if (currentTokenType == VALUE && currentValue.getValueType() == Value.STRING) {
String timestamp = currentValue.getString();
read();
return ValueExpression.get(ValueTimestamp.parse(timestamp, database.getMode()));
} else if (without) {
throw getSyntaxError();
}
}
} else if (equalsToken("TODAY", name)) {
return readFunctionWithoutParameters("CURRENT_DATE");
} else if (currentTokenType == VALUE && currentValue.getValueType() == Value.STRING) {
if (equalsToken("T", name)) {
String time = currentValue.getString();
read();
return ValueExpression.get(ValueTime.parse(time));
} else if (equalsToken("TS", name)) {
String timestamp = currentValue.getString();
read();
return ValueExpression.get(ValueTimestamp.parse(timestamp, database.getMode()));
}
}
break;
case 'X':
if (currentTokenType == VALUE && currentValue.getValueType() == Value.STRING && equalsToken("X", name)) {
byte[] buffer = StringUtils.convertHexToBytes(currentValue.getString());
read();
return ValueExpression.get(ValueBytes.getNoCopy(buffer));
}
break;
}
return new ExpressionColumn(database, null, null, name, false);
}
private Expression readInterval() {
boolean negative = readIf(MINUS_SIGN);
if (!negative) {
readIf(PLUS_SIGN);
}
String s = readString();
IntervalQualifier qualifier;
if (readIf("YEAR")) {
if (readIf("TO")) {
read("MONTH");
qualifier = IntervalQualifier.YEAR_TO_MONTH;
} else {
qualifier = IntervalQualifier.YEAR;
}
} else if (readIf("MONTH")) {
qualifier = IntervalQualifier.MONTH;
} else if (readIf("DAY")) {
if (readIf("TO")) {
if (readIf("HOUR")) {
qualifier = IntervalQualifier.DAY_TO_HOUR;
} else if (readIf("MINUTE")) {
qualifier = IntervalQualifier.DAY_TO_MINUTE;
} else {
read("SECOND");
qualifier = IntervalQualifier.DAY_TO_SECOND;
}
} else {
qualifier = IntervalQualifier.DAY;
}
} else if (readIf("HOUR")) {
if (readIf("TO")) {
if (readIf("MINUTE")) {
qualifier = IntervalQualifier.HOUR_TO_MINUTE;
} else {
read("SECOND");
qualifier = IntervalQualifier.HOUR_TO_SECOND;
}
} else {
qualifier = IntervalQualifier.HOUR;
}
} else if (readIf("MINUTE")) {
if (readIf("TO")) {
read("SECOND");
qualifier = IntervalQualifier.MINUTE_TO_SECOND;
} else {
qualifier = IntervalQualifier.MINUTE;
}
} else {
read("SECOND");
qualifier = IntervalQualifier.SECOND;
}
try {
return ValueExpression.get(IntervalUtils.parseInterval(qualifier, negative, s));
} catch (Exception e) {
throw DbException.get(ErrorCode.INVALID_DATETIME_CONSTANT_2, e, "INTERVAL", s);
}
}
private Expression parseDB2SpecialRegisters(String name) {
// Only "CURRENT" name is supported
if (readIf("TIMESTAMP")) {
if (readIf(WITH)) {
read("TIME");
read("ZONE");
return readKeywordFunction("CURRENT_TIMESTAMP");
}
return readKeywordFunction("LOCALTIMESTAMP");
} else if (readIf("TIME")) {
// Time with fractional seconds is not supported by DB2
return readFunctionWithoutParameters("CURRENT_TIME");
} else if (readIf("DATE")) {
return readFunctionWithoutParameters("CURRENT_DATE");
}
// No match, parse CURRENT as a column
return new ExpressionColumn(database, null, null, name, false);
}
private Expression readCase() {
if (readIf("END")) {
readIf(CASE);
return ValueExpression.getNull();
}
if (readIf("ELSE")) {
Expression elsePart = readExpression().optimize(session);
read("END");
readIf(CASE);
return elsePart;
}
int i;
Function function;
if (readIf("WHEN")) {
function = Function.getFunction(database, "CASE");
function.setParameter(0, null);
i = 1;
do {
function.setParameter(i++, readExpression());
read("THEN");
function.setParameter(i++, readExpression());
} while (readIf("WHEN"));
} else {
Expression expr = readExpression();
if (readIf("END")) {
readIf(CASE);
return ValueExpression.getNull();
}
if (readIf("ELSE")) {
Expression elsePart = readExpression().optimize(session);
read("END");
readIf(CASE);
return elsePart;
}
function = Function.getFunction(database, "CASE");
function.setParameter(0, expr);
i = 1;
read("WHEN");
do {
function.setParameter(i++, readExpression());
read("THEN");
function.setParameter(i++, readExpression());
} while (readIf("WHEN"));
}
if (readIf("ELSE")) {
function.setParameter(i, readExpression());
}
read("END");
readIf("CASE");
function.doneWithParameters();
return function;
}
private int readNonNegativeInt() {
int v = readInt();
if (v < 0) {
throw DbException.getInvalidValueException("non-negative integer", v);
}
return v;
}
private int readInt() {
boolean minus = false;
if (currentTokenType == MINUS_SIGN) {
minus = true;
read();
} else if (currentTokenType == PLUS_SIGN) {
read();
}
if (currentTokenType != VALUE) {
throw DbException.getSyntaxError(sqlCommand, parseIndex, "integer");
}
if (minus) {
// must do that now, otherwise Integer.MIN_VALUE would not work
currentValue = currentValue.negate();
}
int i = currentValue.getInt();
read();
return i;
}
private long readNonNegativeLong() {
long v = readLong();
if (v < 0) {
throw DbException.getInvalidValueException("non-negative long", v);
}
return v;
}
private long readLong() {
boolean minus = false;
if (currentTokenType == MINUS_SIGN) {
minus = true;
read();
} else if (currentTokenType == PLUS_SIGN) {
read();
}
if (currentTokenType != VALUE) {
throw DbException.getSyntaxError(sqlCommand, parseIndex, "long");
}
if (minus) {
// must do that now, otherwise Long.MIN_VALUE would not work
currentValue = currentValue.negate();
}
long i = currentValue.getLong();
read();
return i;
}
private boolean readBooleanSetting() {
switch (currentTokenType) {
case ON:
case TRUE:
read();
return true;
case FALSE:
read();
return false;
case VALUE:
boolean result = currentValue.getBoolean();
read();
return result;
}
if (readIf("OFF")) {
return false;
} else {
throw getSyntaxError();
}
}
private String readString() {
Expression expr = readExpression().optimize(session);
if (!(expr instanceof ValueExpression)) {
throw DbException.getSyntaxError(sqlCommand, parseIndex, "string");
}
return expr.getValue(session).getString();
}
// TODO: why does this function allow defaultSchemaName=null - which resets
// the parser schemaName for everyone ?
private String readIdentifierWithSchema(String defaultSchemaName) {
String s = readColumnIdentifier();
schemaName = defaultSchemaName;
if (readIf(DOT)) {
schemaName = s;
s = readColumnIdentifier();
}
if (currentTokenType == DOT) {
if (equalsToken(schemaName, database.getShortName())) {
read();
schemaName = s;
s = readColumnIdentifier();
}
}
return s;
}
private String readIdentifierWithSchema() {
return readIdentifierWithSchema(session.getCurrentSchemaName());
}
private String readAliasIdentifier() {
return readColumnIdentifier();
}
private String readUniqueIdentifier() {
return readColumnIdentifier();
}
private String readColumnIdentifier() {
if (currentTokenType != IDENTIFIER) {
/*
* Sometimes a new keywords are introduced. During metadata
* initialization phase keywords are accepted as identifiers to
* allow migration from older versions.
*
* PageStore's LobStorageBackend also needs this in databases that
* were created in 1.4.197 and older versions.
*/
if (!session.getDatabase().isStarting() || !isKeyword(currentToken)) {
throw DbException.getSyntaxError(sqlCommand, parseIndex, "identifier");
}
}
String s = currentToken;
read();
return s;
}
private void read(String expected) {
if (currentTokenQuoted || !equalsToken(expected, currentToken)) {
addExpected(expected);
throw getSyntaxError();
}
read();
}
private void read(int tokenType) {
if (tokenType != currentTokenType) {
addExpected(tokenType);
throw getSyntaxError();
}
read();
}
private boolean readIf(String token) {
if (!currentTokenQuoted && equalsToken(token, currentToken)) {
read();
return true;
}
addExpected(token);
return false;
}
private boolean readIf(int tokenType) {
if (tokenType == currentTokenType) {
read();
return true;
}
addExpected(tokenType);
return false;
}
private boolean isToken(String token) {
if (!currentTokenQuoted && equalsToken(token, currentToken)) {
return true;
}
addExpected(token);
return false;
}
private boolean isToken(int tokenType) {
if (tokenType == currentTokenType) {
return true;
}
addExpected(tokenType);
return false;
}
private boolean equalsToken(String a, String b) {
if (a == null) {
return b == null;
} else
return a.equals(b) || !identifiersToUpper && a.equalsIgnoreCase(b);
}
private static boolean equalsTokenIgnoreCase(String a, String b) {
if (a == null) {
return b == null;
} else
return a.equals(b) || a.equalsIgnoreCase(b);
}
private boolean isTokenInList(Collection<String> upperCaseTokenList) {
String upperCaseCurrentToken = currentToken.toUpperCase();
return upperCaseTokenList.contains(upperCaseCurrentToken);
}
private void addExpected(String token) {
if (expectedList != null) {
expectedList.add(token);
}
}
private void addExpected(int tokenType) {
if (expectedList != null) {
expectedList.add(TOKENS[tokenType]);
}
}
private void read() {
currentTokenQuoted = false;
if (expectedList != null) {
expectedList.clear();
}
int[] types = characterTypes;
lastParseIndex = parseIndex;
int i = parseIndex;
int type = types[i];
while (type == 0) {
type = types[++i];
}
int start = i;
char[] chars = sqlCommandChars;
char c = chars[i++];
currentToken = "";
switch (type) {
case CHAR_NAME:
while (true) {
type = types[i];
if (type != CHAR_NAME && type != CHAR_VALUE) {
break;
}
i++;
}
currentTokenType = ParserUtil.getSaveTokenType(sqlCommand, !identifiersToUpper, start, i, false);
if (currentTokenType == IDENTIFIER) {
currentToken = StringUtils.cache(sqlCommand.substring(start, i));
} else {
currentToken = TOKENS[currentTokenType];
}
parseIndex = i;
return;
case CHAR_QUOTED: {
String result = null;
while (true) {
for (int begin = i;; i++) {
if (chars[i] == c) {
if (result == null) {
result = sqlCommand.substring(begin, i);
} else {
result += sqlCommand.substring(begin - 1, i);
}
break;
}
}
if (chars[++i] != c) {
break;
}
i++;
}
currentToken = StringUtils.cache(result);
parseIndex = i;
currentTokenQuoted = true;
currentTokenType = IDENTIFIER;
return;
}
case CHAR_SPECIAL_2:
if (types[i] == CHAR_SPECIAL_2) {
char c1 = chars[i++];
currentTokenType = getSpecialType2(c, c1);
} else {
currentTokenType = getSpecialType1(c);
}
parseIndex = i;
return;
case CHAR_SPECIAL_1:
currentTokenType = getSpecialType1(c);
parseIndex = i;
return;
case CHAR_VALUE:
if (c == '0' && (chars[i] == 'X' || chars[i] == 'x')) {
// hex number
long number = 0;
start += 2;
i++;
while (true) {
c = chars[i];
if (c >= '0' && c <= '9') {
number = (number << 4) + c - '0';
} else if (c >= 'A' && c <= 'F') {
number = (number << 4) + c - ('A' - 10);
} else if (c >= 'a' && c <= 'f') {
number = (number << 4) + c - ('a' - 10);
} else {
checkLiterals(false);
currentValue = ValueInt.get((int) number);
currentTokenType = VALUE;
currentToken = "0";
parseIndex = i;
return;
}
if (number > Integer.MAX_VALUE) {
readHexDecimal(start, i);
return;
}
i++;
}
}
long number = c - '0';
loop: while (true) {
c = chars[i];
if (c < '0' || c > '9') {
switch (c) {
case '.':
case 'E':
case 'e':
readDecimal(start, i, false);
break loop;
case 'L':
case 'l':
readDecimal(start, i, true);
break loop;
}
checkLiterals(false);
currentValue = ValueInt.get((int) number);
currentTokenType = VALUE;
currentToken = "0";
parseIndex = i;
break;
}
number = number * 10 + (c - '0');
if (number > Integer.MAX_VALUE) {
readDecimal(start, i, true);
break;
}
i++;
}
return;
case CHAR_DOT:
if (types[i] != CHAR_VALUE) {
currentTokenType = DOT;
currentToken = ".";
parseIndex = i;
return;
}
readDecimal(i - 1, i, false);
return;
case CHAR_STRING: {
String result = null;
while (true) {
for (int begin = i;; i++) {
if (chars[i] == '\'') {
if (result == null) {
result = sqlCommand.substring(begin, i);
} else {
result += sqlCommand.substring(begin - 1, i);
}
break;
}
}
if (chars[++i] != '\'') {
break;
}
i++;
}
currentToken = "'";
checkLiterals(true);
currentValue = ValueString.get(result, database.getMode().treatEmptyStringsAsNull);
parseIndex = i;
currentTokenType = VALUE;
return;
}
case CHAR_DOLLAR_QUOTED_STRING: {
int begin = i - 1;
while (types[i] == CHAR_DOLLAR_QUOTED_STRING) {
i++;
}
String result = sqlCommand.substring(begin, i);
currentToken = "'";
checkLiterals(true);
currentValue = ValueString.get(result, database.getMode().treatEmptyStringsAsNull);
parseIndex = i;
currentTokenType = VALUE;
return;
}
case CHAR_END:
currentTokenType = END;
parseIndex = i;
return;
default:
throw getSyntaxError();
}
}
private void readParameterIndex() {
int i = parseIndex;
char[] chars = sqlCommandChars;
char c = chars[i++];
long number = c - '0';
while (true) {
c = chars[i];
if (c < '0' || c > '9') {
currentValue = ValueInt.get((int) number);
currentTokenType = VALUE;
currentToken = "0";
parseIndex = i;
break;
}
number = number * 10 + (c - '0');
if (number > Integer.MAX_VALUE) {
throw DbException.getInvalidValueException(
"parameter index", number);
}
i++;
}
}
private void checkLiterals(boolean text) {
if (!literalsChecked && !session.getAllowLiterals()) {
int allowed = database.getAllowLiterals();
if (allowed == Constants.ALLOW_LITERALS_NONE ||
(text && allowed != Constants.ALLOW_LITERALS_ALL)) {
throw DbException.get(ErrorCode.LITERALS_ARE_NOT_ALLOWED);
}
}
}
private void readHexDecimal(int start, int i) {
char[] chars = sqlCommandChars;
char c;
do {
c = chars[++i];
} while ((c >= '0' && c <= '9') || (c >= 'A' && c <= 'F'));
parseIndex = i;
String sub = sqlCommand.substring(start, i);
BigDecimal bd = new BigDecimal(new BigInteger(sub, 16));
checkLiterals(false);
currentValue = ValueDecimal.get(bd);
currentTokenType = VALUE;
}
private void readDecimal(int start, int i, boolean integer) {
char[] chars = sqlCommandChars;
int[] types = characterTypes;
// go until the first non-number
while (true) {
int t = types[i];
if (t == CHAR_DOT) {
integer = false;
} else if (t != CHAR_VALUE) {
break;
}
i++;
}
char c = chars[i];
if (c == 'E' || c == 'e') {
integer = false;
c = chars[++i];
if (c == '+' || c == '-') {
i++;
}
if (types[i] != CHAR_VALUE) {
throw getSyntaxError();
}
while (types[++i] == CHAR_VALUE) {
// go until the first non-number
}
}
parseIndex = i;
checkLiterals(false);
BigDecimal bd;
if (integer && i - start <= 19) {
BigInteger bi = new BigInteger(sqlCommand.substring(start, i));
if (bi.compareTo(ValueLong.MAX_BI) <= 0) {
// parse constants like "10000000L"
c = chars[i];
if (c == 'L' || c == 'l') {
parseIndex++;
}
currentValue = ValueLong.get(bi.longValue());
currentTokenType = VALUE;
return;
}
bd = new BigDecimal(bi);
} else {
try {
bd = new BigDecimal(sqlCommandChars, start, i - start);
} catch (NumberFormatException e) {
throw DbException.get(ErrorCode.DATA_CONVERSION_ERROR_1, e, sqlCommand.substring(start, i));
}
}
currentValue = ValueDecimal.get(bd);
currentTokenType = VALUE;
}
private void initialize(String sql) {
if (sql == null) {
sql = "";
}
originalSQL = sql;
sqlCommand = sql;
int len = sql.length() + 1;
char[] command = new char[len];
int[] types = new int[len];
len--;
sql.getChars(0, len, command, 0);
boolean changed = false;
command[len] = ' ';
int startLoop = 0;
int lastType = 0;
for (int i = 0; i < len; i++) {
char c = command[i];
int type = 0;
switch (c) {
case '/':
if (command[i + 1] == '*') {
// block comment
changed = true;
command[i] = ' ';
command[i + 1] = ' ';
startLoop = i;
i += 2;
checkRunOver(i, len, startLoop);
while (command[i] != '*' || command[i + 1] != '/') {
command[i++] = ' ';
checkRunOver(i, len, startLoop);
}
command[i] = ' ';
command[i + 1] = ' ';
i++;
} else if (command[i + 1] == '/') {
// single line comment
changed = true;
startLoop = i;
while (true) {
c = command[i];
if (c == '\n' || c == '\r' || i >= len - 1) {
break;
}
command[i++] = ' ';
checkRunOver(i, len, startLoop);
}
} else {
type = CHAR_SPECIAL_1;
}
break;
case '-':
if (command[i + 1] == '-') {
// single line comment
changed = true;
startLoop = i;
while (true) {
c = command[i];
if (c == '\n' || c == '\r' || i >= len - 1) {
break;
}
command[i++] = ' ';
checkRunOver(i, len, startLoop);
}
} else {
type = CHAR_SPECIAL_1;
}
break;
case '$':
if (command[i + 1] == '$' && (i == 0 || command[i - 1] <= ' ')) {
// dollar quoted string
changed = true;
command[i] = ' ';
command[i + 1] = ' ';
startLoop = i;
i += 2;
checkRunOver(i, len, startLoop);
while (command[i] != '$' || command[i + 1] != '$') {
types[i++] = CHAR_DOLLAR_QUOTED_STRING;
checkRunOver(i, len, startLoop);
}
command[i] = ' ';
command[i + 1] = ' ';
i++;
} else {
if (lastType == CHAR_NAME || lastType == CHAR_VALUE) {
// $ inside an identifier is supported
type = CHAR_NAME;
} else {
// but not at the start, to support PostgreSQL $1
type = CHAR_SPECIAL_1;
}
}
break;
case '(':
case ')':
case '{':
case '}':
case '*':
case ',':
case ';':
case '+':
case '%':
case '?':
case '@':
case ']':
type = CHAR_SPECIAL_1;
break;
case '!':
case '<':
case '>':
case '|':
case '=':
case ':':
case '&':
case '~':
type = CHAR_SPECIAL_2;
break;
case '.':
type = CHAR_DOT;
break;
case '\'':
type = types[i] = CHAR_STRING;
startLoop = i;
while (command[++i] != '\'') {
checkRunOver(i, len, startLoop);
}
break;
case '[':
if (database.getMode().squareBracketQuotedNames) {
// SQL Server alias for "
command[i] = '"';
changed = true;
type = types[i] = CHAR_QUOTED;
startLoop = i;
while (command[++i] != ']') {
checkRunOver(i, len, startLoop);
}
command[i] = '"';
} else {
type = CHAR_SPECIAL_1;
}
break;
case '`':
// MySQL alias for ", but not case sensitive
type = types[i] = CHAR_QUOTED;
startLoop = i;
while (command[++i] != '`') {
checkRunOver(i, len, startLoop);
c = command[i];
if (identifiersToUpper || identifiersToLower) {
char u = identifiersToUpper ? Character.toUpperCase(c) : Character.toLowerCase(c);
if (u != c) {
command[i] = u;
changed = true;
}
}
}
break;
case '"':
type = types[i] = CHAR_QUOTED;
startLoop = i;
while (command[++i] != '"') {
checkRunOver(i, len, startLoop);
}
break;
case '_':
type = CHAR_NAME;
break;
case '#':
if (database.getMode().supportPoundSymbolForColumnNames) {
type = CHAR_NAME;
} else {
type = CHAR_SPECIAL_1;
}
break;
default:
if (c >= 'a' && c <= 'z') {
if (identifiersToUpper) {
command[i] = (char) (c - ('a' - 'A'));
changed = true;
}
type = CHAR_NAME;
} else if (c >= 'A' && c <= 'Z') {
if (identifiersToLower) {
command[i] = (char) (c + ('a' - 'A'));
changed = true;
}
type = CHAR_NAME;
} else if (c >= '0' && c <= '9') {
type = CHAR_VALUE;
} else {
if (c <= ' ' || Character.isSpaceChar(c)) {
// whitespace
} else if (Character.isJavaIdentifierPart(c)) {
type = CHAR_NAME;
if (identifiersToUpper || identifiersToLower) {
char u = identifiersToUpper ? Character.toUpperCase(c) : Character.toLowerCase(c);
if (u != c) {
command[i] = u;
changed = true;
}
}
} else {
type = CHAR_SPECIAL_1;
}
}
}
types[i] = type;
lastType = type;
}
sqlCommandChars = command;
types[len] = CHAR_END;
characterTypes = types;
if (changed) {
sqlCommand = new String(command);
}
parseIndex = 0;
}
private void checkRunOver(int i, int len, int startLoop) {
if (i >= len) {
parseIndex = startLoop;
throw getSyntaxError();
}
}
private int getSpecialType1(char c0) {
switch (c0) {
case '?':
case '$':
return PARAMETER;
case '@':
return AT;
case '+':
return PLUS_SIGN;
case '-':
return MINUS_SIGN;
case '*':
return ASTERISK;
case ',':
return COMMA;
case '{':
return OPEN_BRACE;
case '}':
return CLOSE_BRACE;
case '/':
return SLASH;
case '%':
return PERCENT;
case ';':
return SEMICOLON;
case ':':
return COLON;
case '[':
return OPEN_BRACKET;
case ']':
return CLOSE_BRACKET;
case '~':
return TILDE;
case '(':
return OPEN_PAREN;
case ')':
return CLOSE_PAREN;
case '<':
return SMALLER;
case '>':
return BIGGER;
case '=':
return EQUAL;
default:
throw getSyntaxError();
}
}
private int getSpecialType2(char c0, char c1) {
switch (c0) {
case ':':
if (c1 == ':') {
return COLON_COLON;
} else if (c1 == '=') {
return COLON_EQ;
}
break;
case '>':
if (c1 == '=') {
return BIGGER_EQUAL;
}
break;
case '<':
if (c1 == '=') {
return SMALLER_EQUAL;
} else if (c1 == '>') {
return NOT_EQUAL;
}
break;
case '!':
if (c1 == '=') {
return NOT_EQUAL;
} else if (c1 == '~') {
return NOT_TILDE;
}
break;
case '|':
if (c1 == '|') {
return STRING_CONCAT;
}
break;
case '&':
if (c1 == '&') {
return SPATIAL_INTERSECTS;
}
break;
}
throw getSyntaxError();
}
private boolean isKeyword(String s) {
return ParserUtil.isKeyword(s, !identifiersToUpper);
}
private Column parseColumnForTable(String columnName,
boolean defaultNullable, boolean forTable) {
Column column;
boolean isIdentity = readIf("IDENTITY");
if (isIdentity || readIf("BIGSERIAL")) {
// Check if any of them are disallowed in the current Mode
if (isIdentity && database.getMode().
disallowedTypes.contains("IDENTITY")) {
throw DbException.get(ErrorCode.UNKNOWN_DATA_TYPE_1,
currentToken);
}
column = new Column(columnName, Value.LONG);
column.setOriginalSQL("IDENTITY");
parseAutoIncrement(column);
// PostgreSQL compatibility
if (!database.getMode().serialColumnIsNotPK) {
column.setPrimaryKey(true);
}
} else if (readIf("SERIAL")) {
column = new Column(columnName, Value.INT);
column.setOriginalSQL("SERIAL");
parseAutoIncrement(column);
// PostgreSQL compatibility
if (!database.getMode().serialColumnIsNotPK) {
column.setPrimaryKey(true);
}
} else {
column = parseColumnWithType(columnName, forTable);
}
if (readIf("INVISIBLE")) {
column.setVisible(false);
} else if (readIf("VISIBLE")) {
column.setVisible(true);
}
NullConstraintType nullConstraint = parseNotNullConstraint();
switch (nullConstraint) {
case NULL_IS_ALLOWED:
column.setNullable(true);
break;
case NULL_IS_NOT_ALLOWED:
column.setNullable(false);
break;
case NO_NULL_CONSTRAINT_FOUND:
// domains may be defined as not nullable
column.setNullable(defaultNullable & column.isNullable());
break;
default:
throw DbException.get(ErrorCode.UNKNOWN_MODE_1,
"Internal Error - unhandled case: " + nullConstraint.name());
}
if (readIf("AS")) {
if (isIdentity) {
getSyntaxError();
}
Expression expr = readExpression();
column.setComputedExpression(expr);
} else if (readIf("DEFAULT")) {
Expression defaultExpression = readExpression();
column.setDefaultExpression(session, defaultExpression);
} else if (readIf("GENERATED")) {
if (!readIf("ALWAYS")) {
read("BY");
read("DEFAULT");
}
read("AS");
read("IDENTITY");
SequenceOptions options = new SequenceOptions();
if (readIf(OPEN_PAREN)) {
parseSequenceOptions(options, null, true);
read(CLOSE_PAREN);
}
column.setAutoIncrementOptions(options);
}
if (readIf(ON)) {
read("UPDATE");
Expression onUpdateExpression = readExpression();
column.setOnUpdateExpression(session, onUpdateExpression);
}
if (NullConstraintType.NULL_IS_NOT_ALLOWED == parseNotNullConstraint()) {
column.setNullable(false);
}
if (readIf("AUTO_INCREMENT") || readIf("BIGSERIAL") || readIf("SERIAL")) {
parseAutoIncrement(column);
parseNotNullConstraint();
} else if (readIf("IDENTITY")) {
parseAutoIncrement(column);
column.setPrimaryKey(true);
parseNotNullConstraint();
}
if (readIf("NULL_TO_DEFAULT")) {
column.setConvertNullToDefault(true);
}
if (readIf("SEQUENCE")) {
Sequence sequence = readSequence();
column.setSequence(sequence);
}
if (readIf("SELECTIVITY")) {
int value = readNonNegativeInt();
column.setSelectivity(value);
}
String comment = readCommentIf();
if (comment != null) {
column.setComment(comment);
}
return column;
}
private void parseAutoIncrement(Column column) {
SequenceOptions options = new SequenceOptions();
if (readIf(OPEN_PAREN)) {
options.setStartValue(ValueExpression.get(ValueLong.get(readLong())));
if (readIf(COMMA)) {
options.setIncrement(ValueExpression.get(ValueLong.get(readLong())));
}
read(CLOSE_PAREN);
}
column.setAutoIncrementOptions(options);
}
private String readCommentIf() {
if (readIf("COMMENT")) {
readIf(IS);
return readString();
}
return null;
}
private Column parseColumnWithType(String columnName, boolean forTable) {
String original = currentToken;
boolean regular = false;
int originalPrecision = -1, originalScale = -1;
if (readIf("LONG")) {
if (readIf("RAW")) {
original = "LONG RAW";
}
} else if (readIf("DOUBLE")) {
if (readIf("PRECISION")) {
original = "DOUBLE PRECISION";
}
} else if (readIf("CHARACTER")) {
if (readIf("VARYING")) {
original = "CHARACTER VARYING";
} else if (readIf("LARGE")) {
read("OBJECT");
original = "CHARACTER LARGE OBJECT";
}
} else if (readIf("BINARY")) {
if (readIf("VARYING")) {
original = "BINARY VARYING";
} else if (readIf("LARGE")) {
read("OBJECT");
original = "BINARY LARGE OBJECT";
}
} else if (readIf("TIME")) {
if (readIf(OPEN_PAREN)) {
originalScale = readNonNegativeInt();
if (originalScale > ValueTime.MAXIMUM_SCALE) {
throw DbException.get(ErrorCode.INVALID_VALUE_SCALE_PRECISION, Integer.toString(originalScale));
}
read(CLOSE_PAREN);
}
if (readIf("WITHOUT")) {
read("TIME");
read("ZONE");
original = "TIME WITHOUT TIME ZONE";
}
} else if (readIf("TIMESTAMP")) {
if (readIf(OPEN_PAREN)) {
originalScale = readNonNegativeInt();
// Allow non-standard TIMESTAMP(..., ...) syntax
if (readIf(COMMA)) {
originalScale = readNonNegativeInt();
}
if (originalScale > ValueTimestamp.MAXIMUM_SCALE) {
throw DbException.get(ErrorCode.INVALID_VALUE_SCALE_PRECISION, Integer.toString(originalScale));
}
read(CLOSE_PAREN);
}
if (readIf(WITH)) {
read("TIME");
read("ZONE");
original = "TIMESTAMP WITH TIME ZONE";
} else if (readIf("WITHOUT")) {
read("TIME");
read("ZONE");
original = "TIMESTAMP WITHOUT TIME ZONE";
}
} else if (readIf(INTERVAL)) {
if (readIf("YEAR")) {
if (readIf(OPEN_PAREN)) {
originalPrecision = readNonNegativeInt();
read(CLOSE_PAREN);
}
if (readIf("TO")) {
read("MONTH");
original = "INTERVAL YEAR TO MONTH";
} else {
original = "INTERVAL YEAR";
}
} else if (readIf("MONTH")) {
if (readIf(OPEN_PAREN)) {
originalPrecision = readNonNegativeInt();
read(CLOSE_PAREN);
}
original = "INTERVAL MONTH";
} else if (readIf("DAY")) {
if (readIf(OPEN_PAREN)) {
originalPrecision = readNonNegativeInt();
read(CLOSE_PAREN);
}
if (readIf("TO")) {
if (readIf("HOUR")) {
original = "INTERVAL DAY TO HOUR";
} else if (readIf("MINUTE")) {
original = "INTERVAL DAY TO MINUTE";
} else {
read("SECOND");
if (readIf(OPEN_PAREN)) {
originalScale = readNonNegativeInt();
read(CLOSE_PAREN);
}
original = "INTERVAL DAY TO SECOND";
}
} else {
original = "INTERVAL DAY";
}
} else if (readIf("HOUR")) {
if (readIf(OPEN_PAREN)) {
originalPrecision = readNonNegativeInt();
read(CLOSE_PAREN);
}
if (readIf("TO")) {
if (readIf("MINUTE")) {
original = "INTERVAL HOUR TO MINUTE";
} else {
read("SECOND");
if (readIf(OPEN_PAREN)) {
originalScale = readNonNegativeInt();
read(CLOSE_PAREN);
}
original = "INTERVAL HOUR TO SECOND";
}
} else {
original = "INTERVAL HOUR";
}
} else if (readIf("MINUTE")) {
if (readIf(OPEN_PAREN)) {
originalPrecision = readNonNegativeInt();
read(CLOSE_PAREN);
}
if (readIf("TO")) {
read("SECOND");
if (readIf(OPEN_PAREN)) {
originalScale = readNonNegativeInt();
read(CLOSE_PAREN);
}
original = "INTERVAL MINUTE TO SECOND";
} else {
original = "INTERVAL MINUTE";
}
} else {
read("SECOND");
if (readIf(OPEN_PAREN)) {
originalPrecision = readNonNegativeInt();
if (readIf(COMMA)) {
originalScale = readNonNegativeInt();
}
read(CLOSE_PAREN);
}
original = "INTERVAL SECOND";
}
} else {
regular = true;
}
long precision = -1;
ExtTypeInfo extTypeInfo = null;
int scale = -1;
String comment = null;
Column templateColumn = null;
DataType dataType;
if (!identifiersToUpper) {
original = StringUtils.toUpperEnglish(original);
}
Domain domain = database.findDomain(original);
if (domain != null) {
templateColumn = domain.getColumn();
TypeInfo type = templateColumn.getType();
dataType = DataType.getDataType(type.getValueType());
comment = templateColumn.getComment();
original = forTable ? domain.getSQL(true) : templateColumn.getOriginalSQL();
precision = type.getPrecision();
scale = type.getScale();
extTypeInfo = type.getExtTypeInfo();
} else {
Mode mode = database.getMode();
dataType = DataType.getTypeByName(original, mode);
if (dataType == null || mode.disallowedTypes.contains(original)) {
throw DbException.get(ErrorCode.UNKNOWN_DATA_TYPE_1,
currentToken);
}
}
if (database.getIgnoreCase() && dataType.type == Value.STRING &&
!equalsToken("VARCHAR_CASESENSITIVE", original)) {
original = "VARCHAR_IGNORECASE";
dataType = DataType.getTypeByName(original, database.getMode());
}
if (regular) {
read();
}
precision = precision == -1 ? dataType.defaultPrecision : precision;
scale = scale == -1 ? dataType.defaultScale : scale;
if (dataType.supportsPrecision || dataType.supportsScale) {
int t = dataType.type;
if (t == Value.TIME || t == Value.TIMESTAMP || t == Value.TIMESTAMP_TZ) {
if (originalScale >= 0) {
scale = originalScale;
switch (t) {
case Value.TIME:
if (original.equals("TIME WITHOUT TIME ZONE")) {
original = "TIME(" + originalScale + ") WITHOUT TIME ZONE";
} else {
original = original + '(' + originalScale + ')';
}
break;
case Value.TIMESTAMP:
if (original.equals("TIMESTAMP WITHOUT TIME ZONE")) {
original = "TIMESTAMP(" + originalScale + ") WITHOUT TIME ZONE";
} else {
original = original + '(' + originalScale + ')';
}
break;
case Value.TIMESTAMP_TZ:
original = "TIMESTAMP(" + originalScale + ") WITH TIME ZONE";
break;
}
} else if (original.equals("DATETIME") || original.equals("DATETIME2")) {
if (readIf(OPEN_PAREN)) {
originalScale = readNonNegativeInt();
if (originalScale > ValueTime.MAXIMUM_SCALE) {
throw DbException.get(ErrorCode.INVALID_VALUE_SCALE_PRECISION,
Integer.toString(originalScale));
}
read(CLOSE_PAREN);
scale = originalScale;
original = original + '(' + originalScale + ')';
}
} else if (original.equals("SMALLDATETIME")) {
scale = 0;
}
} else if (DataType.isIntervalType(t)) {
if (originalPrecision >= 0 || originalScale >= 0) {
IntervalQualifier qualifier = IntervalQualifier.valueOf(t - Value.INTERVAL_YEAR);
original = qualifier.getTypeName(originalPrecision, originalScale);
if (originalPrecision >= 0) {
if (originalPrecision <= 0 || originalPrecision > ValueInterval.MAXIMUM_PRECISION) {
throw DbException.get(ErrorCode.INVALID_VALUE_SCALE_PRECISION,
Integer.toString(originalPrecision));
}
precision = originalPrecision;
}
if (originalScale >= 0) {
if (originalScale > ValueInterval.MAXIMUM_SCALE) {
throw DbException.get(ErrorCode.INVALID_VALUE_SCALE_PRECISION,
Integer.toString(originalScale));
}
scale = originalScale;
}
}
} else if (readIf(OPEN_PAREN)) {
if (!readIf("MAX")) {
long p = readPrecision();
original += "(" + p;
if (dataType.supportsScale) {
if (readIf(COMMA)) {
scale = readInt();
original += ", " + scale;
} else {
scale = 0;
}
}
precision = p;
original += ")";
}
read(CLOSE_PAREN);
}
} else if (dataType.type == Value.DOUBLE && original.equals("FLOAT")) {
if (readIf(OPEN_PAREN)) {
int p = readNonNegativeInt();
read(CLOSE_PAREN);
if (p > 53) {
throw DbException.get(ErrorCode.INVALID_VALUE_SCALE_PRECISION, Integer.toString(p));
}
if (p <= 24) {
dataType = DataType.getDataType(Value.FLOAT);
}
original = original + '(' + p + ')';
}
} else if (dataType.type == Value.ENUM) {
if (extTypeInfo == null) {
String[] enumerators = null;
if (readIf(OPEN_PAREN)) {
java.util.List<String> enumeratorList = new ArrayList<>();
String enumerator0 = readString();
enumeratorList.add(enumerator0);
while (readIfMore(true)) {
String enumeratorN = readString();
enumeratorList.add(enumeratorN);
}
enumerators = enumeratorList.toArray(new String[0]);
}
try {
extTypeInfo = new ExtTypeInfoEnum(enumerators);
} catch (DbException e) {
throw e.addSQL(original);
}
original += extTypeInfo.getCreateSQL();
}
} else if (dataType.type == Value.GEOMETRY) {
if (extTypeInfo == null) {
if (readIf(OPEN_PAREN)) {
int type = 0;
if (currentTokenType != IDENTIFIER || currentTokenQuoted) {
throw getSyntaxError();
}
if (!readIf("GEOMETRY")) {
try {
type = EWKTUtils.parseGeometryType(currentToken);
read();
if (type / 1_000 == 0 && currentTokenType == IDENTIFIER && !currentTokenQuoted) {
type += EWKTUtils.parseDimensionSystem(currentToken) * 1_000;
read();
}
} catch (IllegalArgumentException ex) {
throw getSyntaxError();
}
}
Integer srid = null;
if (readIf(COMMA)) {
srid = readInt();
}
read(CLOSE_PAREN);
extTypeInfo = new ExtTypeInfoGeometry(type, srid);
original += extTypeInfo.getCreateSQL();
}
}
} else if (readIf(OPEN_PAREN)) {
// Support for MySQL: INT(11), MEDIUMINT(8) and so on.
// Just ignore the precision.
readNonNegativeInt();
read(CLOSE_PAREN);
}
if (readIf(FOR)) {
read("BIT");
read("DATA");
if (dataType.type == Value.STRING) {
dataType = DataType.getTypeByName("BINARY", database.getMode());
}
}
// MySQL compatibility
readIf("UNSIGNED");
int type = dataType.type;
if (scale > precision && dataType.supportsPrecision && dataType.supportsScale
&& !DataType.isIntervalType(type)) {
throw DbException.get(ErrorCode.INVALID_VALUE_SCALE_PRECISION,
Integer.toString(scale), Long.toString(precision));
}
Column column = new Column(columnName, TypeInfo.getTypeInfo(type, precision, scale, extTypeInfo));
if (templateColumn != null) {
column.setNullable(templateColumn.isNullable());
column.setDefaultExpression(session,
templateColumn.getDefaultExpression());
int selectivity = templateColumn.getSelectivity();
if (selectivity != Constants.SELECTIVITY_DEFAULT) {
column.setSelectivity(selectivity);
}
Expression checkConstraint = templateColumn.getCheckConstraint(
session, columnName);
column.addCheckConstraint(session, checkConstraint);
}
column.setComment(comment);
column.setOriginalSQL(original);
if (forTable) {
column.setDomain(domain);
}
return column;
}
private long readPrecision() {
long p = readNonNegativeLong();
if (currentTokenType == IDENTIFIER && !currentTokenQuoted && currentToken.length() == 1) {
long mul;
char ch = currentToken.charAt(0);
switch (identifiersToUpper ? ch : Character.toUpperCase(ch)) {
case 'K':
mul = 1L << 10;
break;
case 'M':
mul = 1L << 20;
break;
case 'G':
mul = 1L << 30;
break;
case 'T':
mul = 1L << 40;
break;
case 'P':
mul = 1L << 50;
break;
default:
throw getSyntaxError();
}
if (p > Long.MAX_VALUE / mul) {
throw DbException.getInvalidValueException("precision", p + currentToken);
}
p *= mul;
read();
}
if (currentTokenType == IDENTIFIER && !currentTokenQuoted) {
// Standard char length units
if (!readIf("CHARACTERS") && !readIf("OCTETS") &&
// Oracle syntax
!readIf("CHAR")) {
// Oracle syntax
readIf("BYTE");
}
}
return p;
}
private Prepared parseCreate() {
boolean orReplace = false;
if (readIf("OR")) {
read("REPLACE");
orReplace = true;
}
boolean force = readIf("FORCE");
if (readIf("VIEW")) {
return parseCreateView(force, orReplace);
} else if (readIf("ALIAS")) {
return parseCreateFunctionAlias(force);
} else if (readIf("SEQUENCE")) {
return parseCreateSequence();
} else if (readIf("USER")) {
return parseCreateUser();
} else if (readIf("TRIGGER")) {
return parseCreateTrigger(force);
} else if (readIf("ROLE")) {
return parseCreateRole();
} else if (readIf("SCHEMA")) {
return parseCreateSchema();
} else if (readIf("CONSTANT")) {
return parseCreateConstant();
} else if (readIf("DOMAIN") || readIf("TYPE") || readIf("DATATYPE")) {
return parseCreateDomain();
} else if (readIf("AGGREGATE")) {
return parseCreateAggregate(force);
} else if (readIf("LINKED")) {
return parseCreateLinkedTable(false, false, force);
}
// tables or linked tables
boolean memory = false, cached = false;
if (readIf("MEMORY")) {
memory = true;
} else if (readIf("CACHED")) {
cached = true;
}
if (readIf("LOCAL")) {
read("TEMPORARY");
if (readIf("LINKED")) {
return parseCreateLinkedTable(true, false, force);
}
read(TABLE);
return parseCreateTable(true, false, cached);
} else if (readIf("GLOBAL")) {
read("TEMPORARY");
if (readIf("LINKED")) {
return parseCreateLinkedTable(true, true, force);
}
read(TABLE);
return parseCreateTable(true, true, cached);
} else if (readIf("TEMP") || readIf("TEMPORARY")) {
if (readIf("LINKED")) {
return parseCreateLinkedTable(true, true, force);
}
read(TABLE);
return parseCreateTable(true, true, cached);
} else if (readIf(TABLE)) {
if (!cached && !memory) {
cached = database.getDefaultTableType() == Table.TYPE_CACHED;
}
return parseCreateTable(false, false, cached);
} else if (readIf("SYNONYM")) {
return parseCreateSynonym(orReplace);
} else {
boolean hash = false, primaryKey = false;
boolean unique = false, spatial = false;
String indexName = null;
Schema oldSchema = null;
boolean ifNotExists = false;
if (readIf(PRIMARY)) {
read("KEY");
if (readIf("HASH")) {
hash = true;
}
primaryKey = true;
if (!isToken(ON)) {
ifNotExists = readIfNotExists();
indexName = readIdentifierWithSchema(null);
oldSchema = getSchema();
}
} else {
if (readIf(UNIQUE)) {
unique = true;
}
if (readIf("HASH")) {
hash = true;
}
if (readIf("SPATIAL")) {
spatial = true;
}
if (readIf("INDEX")) {
if (!isToken(ON)) {
ifNotExists = readIfNotExists();
indexName = readIdentifierWithSchema(null);
oldSchema = getSchema();
}
} else {
throw getSyntaxError();
}
}
read(ON);
String tableName = readIdentifierWithSchema();
checkSchema(oldSchema);
CreateIndex command = new CreateIndex(session, getSchema());
command.setIfNotExists(ifNotExists);
command.setPrimaryKey(primaryKey);
command.setTableName(tableName);
command.setUnique(unique);
command.setIndexName(indexName);
command.setComment(readCommentIf());
read(OPEN_PAREN);
command.setIndexColumns(parseIndexColumnList());
if (readIf("USING")) {
if (hash) {
throw getSyntaxError();
}
if (spatial) {
throw getSyntaxError();
}
if (readIf("BTREE")) {
// default
} else if (readIf("RTREE")) {
spatial = true;
} else if (readIf("HASH")) {
hash = true;
} else {
throw getSyntaxError();
}
}
command.setHash(hash);
command.setSpatial(spatial);
return command;
}
}
/**
* @return true if we expect to see a TABLE clause
*/
private boolean addRoleOrRight(GrantRevoke command) {
if (readIf(SELECT)) {
command.addRight(Right.SELECT);
return true;
} else if (readIf("DELETE")) {
command.addRight(Right.DELETE);
return true;
} else if (readIf("INSERT")) {
command.addRight(Right.INSERT);
return true;
} else if (readIf("UPDATE")) {
command.addRight(Right.UPDATE);
return true;
} else if (readIf(ALL)) {
command.addRight(Right.ALL);
return true;
} else if (readIf("ALTER")) {
read("ANY");
read("SCHEMA");
command.addRight(Right.ALTER_ANY_SCHEMA);
command.addTable(null);
return false;
} else if (readIf("CONNECT")) {
// ignore this right
return true;
} else if (readIf("RESOURCE")) {
// ignore this right
return true;
} else {
command.addRoleName(readUniqueIdentifier());
return false;
}
}
private GrantRevoke parseGrantRevoke(int operationType) {
GrantRevoke command = new GrantRevoke(session);
command.setOperationType(operationType);
boolean tableClauseExpected = addRoleOrRight(command);
while (readIf(COMMA)) {
addRoleOrRight(command);
if (command.isRightMode() && command.isRoleMode()) {
throw DbException
.get(ErrorCode.ROLES_AND_RIGHT_CANNOT_BE_MIXED);
}
}
if (tableClauseExpected) {
if (readIf(ON)) {
if (readIf("SCHEMA")) {
Schema schema = database.getSchema(readAliasIdentifier());
command.setSchema(schema);
} else {
do {
Table table = readTableOrView();
command.addTable(table);
} while (readIf(COMMA));
}
}
}
if (operationType == CommandInterface.GRANT) {
read("TO");
} else {
read(FROM);
}
command.setGranteeName(readUniqueIdentifier());
return command;
}
private Select parseValues() {
Select command = new Select(session, currentSelect);
currentSelect = command;
TableFilter filter = parseValuesTable(0);
command.setWildcard();
command.addTableFilter(filter, true);
return command;
}
private TableFilter parseValuesTable(int orderInFrom) {
Schema mainSchema = database.getMainSchema();
TableFunction tf = (TableFunction) Function.getFunction(database, "TABLE");
ArrayList<Column> columns = Utils.newSmallArrayList();
ArrayList<ArrayList<Expression>> rows = Utils.newSmallArrayList();
do {
int i = 0;
ArrayList<Expression> row = Utils.newSmallArrayList();
boolean multiColumn;
if (readIf(ROW)) {
read(OPEN_PAREN);
multiColumn = true;
} else {
multiColumn = readIf(OPEN_PAREN);
}
do {
Expression expr = readExpression();
expr = expr.optimize(session);
TypeInfo type = expr.getType();
Column column;
String columnName = "C" + (i + 1);
if (rows.isEmpty()) {
if (type.getValueType() == Value.UNKNOWN) {
type = TypeInfo.TYPE_STRING;
}
column = new Column(columnName, type);
columns.add(column);
} else {
if (i >= columns.size()) {
throw DbException.get(ErrorCode.COLUMN_COUNT_DOES_NOT_MATCH);
}
type = Value.getHigherType(columns.get(i).getType(), type);
column = new Column(columnName, type);
columns.set(i, column);
}
row.add(expr);
i++;
} while (multiColumn && readIfMore(true));
rows.add(row);
} while (readIf(COMMA));
int columnCount = columns.size();
int rowCount = rows.size();
for (ArrayList<Expression> row : rows) {
if (row.size() != columnCount) {
throw DbException.get(ErrorCode.COLUMN_COUNT_DOES_NOT_MATCH);
}
}
for (int i = 0; i < columnCount; i++) {
Column c = columns.get(i);
if (c.getType().getValueType() == Value.UNKNOWN) {
c = new Column(c.getName(), Value.STRING);
columns.set(i, c);
}
Expression[] array = new Expression[rowCount];
for (int j = 0; j < rowCount; j++) {
array[j] = rows.get(j).get(i);
}
ExpressionList list = new ExpressionList(array, false);
tf.setParameter(i, list);
}
tf.setColumns(columns);
tf.doneWithParameters();
Table table = new FunctionTable(mainSchema, session, tf, tf);
return new TableFilter(session, table, null, rightsChecked, currentSelect, orderInFrom, null);
}
private Call parseCall() {
Call command = new Call(session);
currentPrepared = command;
command.setExpression(readExpression());
return command;
}
private CreateRole parseCreateRole() {
CreateRole command = new CreateRole(session);
command.setIfNotExists(readIfNotExists());
command.setRoleName(readUniqueIdentifier());
return command;
}
private CreateSchema parseCreateSchema() {
CreateSchema command = new CreateSchema(session);
command.setIfNotExists(readIfNotExists());
command.setSchemaName(readUniqueIdentifier());
if (readIf("AUTHORIZATION")) {
command.setAuthorization(readUniqueIdentifier());
} else {
command.setAuthorization(session.getUser().getName());
}
if (readIf(WITH)) {
command.setTableEngineParams(readTableEngineParams());
}
return command;
}
private ArrayList<String> readTableEngineParams() {
ArrayList<String> tableEngineParams = Utils.newSmallArrayList();
do {
tableEngineParams.add(readUniqueIdentifier());
} while (readIf(COMMA));
return tableEngineParams;
}
private CreateSequence parseCreateSequence() {
boolean ifNotExists = readIfNotExists();
String sequenceName = readIdentifierWithSchema();
CreateSequence command = new CreateSequence(session, getSchema());
command.setIfNotExists(ifNotExists);
command.setSequenceName(sequenceName);
SequenceOptions options = new SequenceOptions();
parseSequenceOptions(options, command, true);
command.setOptions(options);
return command;
}
private boolean readIfNotExists() {
if (readIf(IF)) {
read(NOT);
read(EXISTS);
return true;
}
return false;
}
private boolean readIfAffinity() {
return readIf("AFFINITY") || readIf("SHARD");
}
private CreateConstant parseCreateConstant() {
boolean ifNotExists = readIfNotExists();
String constantName = readIdentifierWithSchema();
Schema schema = getSchema();
if (isKeyword(constantName)) {
throw DbException.get(ErrorCode.CONSTANT_ALREADY_EXISTS_1,
constantName);
}
read("VALUE");
Expression expr = readExpression();
CreateConstant command = new CreateConstant(session, schema);
command.setConstantName(constantName);
command.setExpression(expr);
command.setIfNotExists(ifNotExists);
return command;
}
private CreateAggregate parseCreateAggregate(boolean force) {
boolean ifNotExists = readIfNotExists();
CreateAggregate command = new CreateAggregate(session);
command.setForce(force);
String name = readIdentifierWithSchema();
if (isKeyword(name) || Function.getFunction(database, name) != null ||
getAggregateType(name) != null) {
throw DbException.get(ErrorCode.FUNCTION_ALIAS_ALREADY_EXISTS_1,
name);
}
command.setName(name);
command.setSchema(getSchema());
command.setIfNotExists(ifNotExists);
read(FOR);
command.setJavaClassMethod(readUniqueIdentifier());
return command;
}
private CreateDomain parseCreateDomain() {
boolean ifNotExists = readIfNotExists();
CreateDomain command = new CreateDomain(session);
command.setTypeName(readUniqueIdentifier());
read("AS");
Column col = parseColumnForTable("VALUE", true, false);
if (readIf(CHECK)) {
Expression expr = readExpression();
col.addCheckConstraint(session, expr);
}
col.rename(null);
command.setColumn(col);
command.setIfNotExists(ifNotExists);
return command;
}
private CreateTrigger parseCreateTrigger(boolean force) {
boolean ifNotExists = readIfNotExists();
String triggerName = readIdentifierWithSchema(null);
Schema schema = getSchema();
boolean insteadOf, isBefore;
if (readIf("INSTEAD")) {
read("OF");
isBefore = true;
insteadOf = true;
} else if (readIf("BEFORE")) {
insteadOf = false;
isBefore = true;
} else {
read("AFTER");
insteadOf = false;
isBefore = false;
}
int typeMask = 0;
boolean onRollback = false;
do {
if (readIf("INSERT")) {
typeMask |= Trigger.INSERT;
} else if (readIf("UPDATE")) {
typeMask |= Trigger.UPDATE;
} else if (readIf("DELETE")) {
typeMask |= Trigger.DELETE;
} else if (readIf(SELECT)) {
typeMask |= Trigger.SELECT;
} else if (readIf("ROLLBACK")) {
onRollback = true;
} else {
throw getSyntaxError();
}
} while (readIf(COMMA)
|| (database.getMode().getEnum() == ModeEnum.PostgreSQL
&& readIf("OR")));
read(ON);
String tableName = readIdentifierWithSchema();
checkSchema(schema);
CreateTrigger command = new CreateTrigger(session, getSchema());
command.setForce(force);
command.setTriggerName(triggerName);
command.setIfNotExists(ifNotExists);
command.setInsteadOf(insteadOf);
command.setBefore(isBefore);
command.setOnRollback(onRollback);
command.setTypeMask(typeMask);
command.setTableName(tableName);
if (readIf(FOR)) {
read("EACH");
read(ROW);
command.setRowBased(true);
} else {
command.setRowBased(false);
}
if (readIf("QUEUE")) {
command.setQueueSize(readNonNegativeInt());
}
command.setNoWait(readIf("NOWAIT"));
if (readIf("AS")) {
command.setTriggerSource(readString());
} else {
read("CALL");
command.setTriggerClassName(readUniqueIdentifier());
}
return command;
}
private CreateUser parseCreateUser() {
CreateUser command = new CreateUser(session);
command.setIfNotExists(readIfNotExists());
command.setUserName(readUniqueIdentifier());
command.setComment(readCommentIf());
if (readIf("PASSWORD")) {
command.setPassword(readExpression());
} else if (readIf("SALT")) {
command.setSalt(readExpression());
read("HASH");
command.setHash(readExpression());
} else if (readIf("IDENTIFIED")) {
read("BY");
// uppercase if not quoted
command.setPassword(ValueExpression.get(ValueString
.get(readColumnIdentifier())));
} else {
throw getSyntaxError();
}
if (readIf("ADMIN")) {
command.setAdmin(true);
}
return command;
}
private CreateFunctionAlias parseCreateFunctionAlias(boolean force) {
boolean ifNotExists = readIfNotExists();
String aliasName;
if (currentTokenType != IDENTIFIER) {
aliasName = currentToken;
read();
schemaName = session.getCurrentSchemaName();
} else {
aliasName = readIdentifierWithSchema();
}
final boolean newAliasSameNameAsBuiltin = Function.getFunction(database, aliasName) != null;
if (database.isAllowBuiltinAliasOverride() && newAliasSameNameAsBuiltin) {
// fine
} else if (isKeyword(aliasName) ||
newAliasSameNameAsBuiltin ||
getAggregateType(aliasName) != null) {
throw DbException.get(ErrorCode.FUNCTION_ALIAS_ALREADY_EXISTS_1,
aliasName);
}
CreateFunctionAlias command = new CreateFunctionAlias(session,
getSchema());
command.setForce(force);
command.setAliasName(aliasName);
command.setIfNotExists(ifNotExists);
command.setDeterministic(readIf("DETERMINISTIC"));
command.setBufferResultSetToLocalTemp(!readIf("NOBUFFER"));
if (readIf("AS")) {
command.setSource(readString());
} else {
read(FOR);
command.setJavaClassMethod(readUniqueIdentifier());
}
return command;
}
private Prepared parseWith() {
List<TableView> viewsCreated = new ArrayList<>();
try {
return parseWith1(viewsCreated);
} catch (Throwable t) {
CommandContainer.clearCTE(session, viewsCreated);
throw t;
}
}
private Prepared parseWith1(List<TableView> viewsCreated) {
readIf("RECURSIVE");
// This WITH statement is not a temporary view - it is part of a persistent view
// as in CREATE VIEW abc AS WITH my_cte - this auto detects that condition.
final boolean isTemporary = !session.isParsingCreateView();
do {
viewsCreated.add(parseSingleCommonTableExpression(isTemporary));
} while (readIf(COMMA));
Prepared p;
// Reverse the order of constructed CTE views - as the destruction order
// (since later created view may depend on previously created views -
// we preserve that dependency order in the destruction sequence )
// used in setCteCleanups.
Collections.reverse(viewsCreated);
int parentheses = 0;
while (readIf(OPEN_PAREN)) {
parentheses++;
}
if (isToken(SELECT) || isToken(VALUES)) {
p = parseWithQuery();
} else if (isToken(TABLE)) {
int index = lastParseIndex;
read();
if (!isToken(OPEN_PAREN)) {
parseIndex = index;
read();
p = parseWithQuery();
} else {
throw DbException.get(ErrorCode.SYNTAX_ERROR_1, WITH_STATEMENT_SUPPORTS_LIMITED_SUB_STATEMENTS);
}
} else if (readIf("INSERT")) {
p = parseInsert();
p.setPrepareAlways(true);
} else if (readIf("UPDATE")) {
p = parseUpdate();
p.setPrepareAlways(true);
} else if (readIf("MERGE")) {
p = parseMerge();
p.setPrepareAlways(true);
} else if (readIf("DELETE")) {
p = parseDelete();
p.setPrepareAlways(true);
} else if (readIf("CREATE")) {
if (!isToken(TABLE)) {
throw DbException.get(ErrorCode.SYNTAX_ERROR_1,
WITH_STATEMENT_SUPPORTS_LIMITED_SUB_STATEMENTS);
}
p = parseCreate();
p.setPrepareAlways(true);
} else {
throw DbException.get(ErrorCode.SYNTAX_ERROR_1,
WITH_STATEMENT_SUPPORTS_LIMITED_SUB_STATEMENTS);
}
for (; parentheses > 0; parentheses--) {
read(CLOSE_PAREN);
}
// Clean up temporary views starting with last to first (in case of
// dependencies) - but only if they are not persistent.
if (isTemporary) {
p.setCteCleanups(viewsCreated);
}
return p;
}
private Prepared parseWithQuery() {
Query query = parseSelectUnion();
query.setPrepareAlways(true);
query.setNeverLazy(true);
return query;
}
private TableView parseSingleCommonTableExpression(boolean isTemporary) {
String cteViewName = readIdentifierWithSchema();
Schema schema = getSchema();
ArrayList<Column> columns = Utils.newSmallArrayList();
String[] cols = null;
// column names are now optional - they can be inferred from the named
// query, if not supplied by user
if (readIf(OPEN_PAREN)) {
cols = parseColumnList();
for (String c : cols) {
// we don't really know the type of the column, so STRING will
// have to do, UNKNOWN does not work here
columns.add(new Column(c, Value.STRING));
}
}
Table oldViewFound;
if (!isTemporary) {
oldViewFound = getSchema().findTableOrView(session, cteViewName);
} else {
oldViewFound = session.findLocalTempTable(cteViewName);
}
// this persistent check conflicts with check 10 lines down
if (oldViewFound != null) {
if (!(oldViewFound instanceof TableView)) {
throw DbException.get(ErrorCode.TABLE_OR_VIEW_ALREADY_EXISTS_1,
cteViewName);
}
TableView tv = (TableView) oldViewFound;
if (!tv.isTableExpression()) {
throw DbException.get(ErrorCode.TABLE_OR_VIEW_ALREADY_EXISTS_1,
cteViewName);
}
if (!isTemporary) {
oldViewFound.lock(session, true, true);
database.removeSchemaObject(session, oldViewFound);
} else {
session.removeLocalTempTable(oldViewFound);
}
}
/*
* This table is created as a workaround because recursive table
* expressions need to reference something that look like themselves to
* work (its removed after creation in this method). Only create table
* data and table if we don't have a working CTE already.
*/
Table recursiveTable = TableView.createShadowTableForRecursiveTableExpression(
isTemporary, session, cteViewName, schema, columns, database);
List<Column> columnTemplateList;
String[] querySQLOutput = {null};
try {
read("AS");
read(OPEN_PAREN);
Query withQuery = parseSelect();
if (!isTemporary) {
withQuery.session = session;
}
read(CLOSE_PAREN);
columnTemplateList = TableView.createQueryColumnTemplateList(cols, withQuery, querySQLOutput);
} finally {
TableView.destroyShadowTableForRecursiveExpression(isTemporary, session, recursiveTable);
}
return createCTEView(cteViewName,
querySQLOutput[0], columnTemplateList,
true/* allowRecursiveQueryDetection */,
true/* add to session */,
isTemporary);
}
private TableView createCTEView(String cteViewName, String querySQL,
List<Column> columnTemplateList, boolean allowRecursiveQueryDetection,
boolean addViewToSession, boolean isTemporary) {
Database db = session.getDatabase();
Schema schema = getSchemaWithDefault();
int id = db.allocateObjectId();
Column[] columnTemplateArray = columnTemplateList.toArray(new Column[0]);
// No easy way to determine if this is a recursive query up front, so we just compile
// it twice - once without the flag set, and if we didn't see a recursive term,
// then we just compile it again.
TableView view;
synchronized (session) {
view = new TableView(schema, id, cteViewName, querySQL,
parameters, columnTemplateArray, session,
allowRecursiveQueryDetection, false /* literalsChecked */, true /* isTableExpression */,
isTemporary);
if (!view.isRecursiveQueryDetected() && allowRecursiveQueryDetection) {
if (!isTemporary) {
db.addSchemaObject(session, view);
view.lock(session, true, true);
db.removeSchemaObject(session, view);
} else {
session.removeLocalTempTable(view);
}
view = new TableView(schema, id, cteViewName, querySQL, parameters,
columnTemplateArray, session,
false/* assume recursive */, false /* literalsChecked */, true /* isTableExpression */,
isTemporary);
}
// both removeSchemaObject and removeLocalTempTable hold meta locks
db.unlockMeta(session);
}
view.setTableExpression(true);
view.setTemporary(isTemporary);
view.setHidden(true);
view.setOnCommitDrop(false);
if (addViewToSession) {
if (!isTemporary) {
db.addSchemaObject(session, view);
view.unlock(session);
db.unlockMeta(session);
} else {
session.addLocalTempTable(view);
}
}
return view;
}
private CreateView parseCreateView(boolean force, boolean orReplace) {
boolean ifNotExists = readIfNotExists();
boolean isTableExpression = readIf("TABLE_EXPRESSION");
String viewName = readIdentifierWithSchema();
CreateView command = new CreateView(session, getSchema());
this.createView = command;
command.setViewName(viewName);
command.setIfNotExists(ifNotExists);
command.setComment(readCommentIf());
command.setOrReplace(orReplace);
command.setForce(force);
command.setTableExpression(isTableExpression);
if (readIf(OPEN_PAREN)) {
String[] cols = parseColumnList();
command.setColumnNames(cols);
}
String select = StringUtils.cache(sqlCommand
.substring(parseIndex));
read("AS");
try {
Query query;
session.setParsingCreateView(true, viewName);
try {
query = parseSelect();
query.prepare();
} finally {
session.setParsingCreateView(false, viewName);
}
command.setSelect(query);
} catch (DbException e) {
if (force) {
command.setSelectSQL(select);
while (currentTokenType != END) {
read();
}
} else {
throw e;
}
}
return command;
}
private TransactionCommand parseCheckpoint() {
TransactionCommand command;
if (readIf("SYNC")) {
command = new TransactionCommand(session,
CommandInterface.CHECKPOINT_SYNC);
} else {
command = new TransactionCommand(session,
CommandInterface.CHECKPOINT);
}
return command;
}
private Prepared parseAlter() {
if (readIf(TABLE)) {
return parseAlterTable();
} else if (readIf("USER")) {
return parseAlterUser();
} else if (readIf("INDEX")) {
return parseAlterIndex();
} else if (readIf("SCHEMA")) {
return parseAlterSchema();
} else if (readIf("SEQUENCE")) {
return parseAlterSequence();
} else if (readIf("VIEW")) {
return parseAlterView();
}
throw getSyntaxError();
}
private void checkSchema(Schema old) {
if (old != null && getSchema() != old) {
throw DbException.get(ErrorCode.SCHEMA_NAME_MUST_MATCH);
}
}
private AlterIndexRename parseAlterIndex() {
boolean ifExists = readIfExists(false);
String indexName = readIdentifierWithSchema();
Schema old = getSchema();
AlterIndexRename command = new AlterIndexRename(session);
command.setOldSchema(old);
command.setOldName(indexName);
command.setIfExists(ifExists);
read("RENAME");
read("TO");
String newName = readIdentifierWithSchema(old.getName());
checkSchema(old);
command.setNewName(newName);
return command;
}
private DefineCommand parseAlterView() {
boolean ifExists = readIfExists(false);
String viewName = readIdentifierWithSchema();
Schema schema = getSchema();
Table tableView = schema.findTableOrView(session, viewName);
if (!(tableView instanceof TableView) && !ifExists) {
throw DbException.get(ErrorCode.VIEW_NOT_FOUND_1, viewName);
}
if (readIf("RENAME")) {
read("TO");
String newName = readIdentifierWithSchema(schema.getName());
checkSchema(schema);
AlterTableRename command = new AlterTableRename(session, getSchema());
command.setOldTableName(viewName);
command.setNewTableName(newName);
command.setIfTableExists(ifExists);
return command;
} else {
read("RECOMPILE");
TableView view = (TableView) tableView;
AlterView command = new AlterView(session);
command.setIfExists(ifExists);
command.setView(view);
return command;
}
}
private Prepared parseAlterSchema() {
boolean ifExists = readIfExists(false);
String schemaName = readIdentifierWithSchema();
Schema old = getSchema();
read("RENAME");
read("TO");
String newName = readIdentifierWithSchema(old.getName());
Schema schema = findSchema(schemaName);
if (schema == null) {
if (ifExists) {
return new NoOperation(session);
}
throw DbException.get(ErrorCode.SCHEMA_NOT_FOUND_1, schemaName);
}
AlterSchemaRename command = new AlterSchemaRename(session);
command.setOldSchema(schema);
checkSchema(old);
command.setNewName(newName);
return command;
}
private AlterSequence parseAlterSequence() {
boolean ifExists = readIfExists(false);
String sequenceName = readIdentifierWithSchema();
AlterSequence command = new AlterSequence(session, getSchema());
command.setSequenceName(sequenceName);
command.setIfExists(ifExists);
SequenceOptions options = new SequenceOptions();
parseSequenceOptions(options, null, false);
command.setOptions(options);
return command;
}
private void parseSequenceOptions(SequenceOptions options, CreateSequence command, boolean forCreate) {
for (;;) {
if (readIf(forCreate ? "START" : "RESTART")) {
readIf(WITH);
options.setStartValue(readExpression());
} else if (readIf("INCREMENT")) {
readIf("BY");
options.setIncrement(readExpression());
} else if (readIf("MINVALUE")) {
options.setMinValue(readExpression());
} else if (readIf("NOMINVALUE")) {
options.setMinValue(ValueExpression.getNull());
} else if (readIf("MAXVALUE")) {
options.setMaxValue(readExpression());
} else if (readIf("NOMAXVALUE")) {
options.setMaxValue(ValueExpression.getNull());
} else if (readIf("CYCLE")) {
options.setCycle(true);
} else if (readIf("NOCYCLE")) {
options.setCycle(false);
} else if (readIf("NO")) {
if (readIf("MINVALUE")) {
options.setMinValue(ValueExpression.getNull());
} else if (readIf("MAXVALUE")) {
options.setMaxValue(ValueExpression.getNull());
} else if (readIf("CYCLE")) {
options.setCycle(false);
} else if (readIf("CACHE")) {
options.setCacheSize(ValueExpression.get(ValueLong.get(1)));
} else {
break;
}
} else if (readIf("CACHE")) {
options.setCacheSize(readExpression());
} else if (readIf("NOCACHE")) {
options.setCacheSize(ValueExpression.get(ValueLong.get(1)));
} else if (command != null) {
if (readIf("BELONGS_TO_TABLE")) {
command.setBelongsToTable(true);
} else if (readIf(ORDER)) {
// Oracle compatibility
} else {
break;
}
} else {
break;
}
}
}
private AlterUser parseAlterUser() {
String userName = readUniqueIdentifier();
if (readIf("SET")) {
AlterUser command = new AlterUser(session);
command.setType(CommandInterface.ALTER_USER_SET_PASSWORD);
command.setUser(database.getUser(userName));
if (readIf("PASSWORD")) {
command.setPassword(readExpression());
} else if (readIf("SALT")) {
command.setSalt(readExpression());
read("HASH");
command.setHash(readExpression());
} else {
throw getSyntaxError();
}
return command;
} else if (readIf("RENAME")) {
read("TO");
AlterUser command = new AlterUser(session);
command.setType(CommandInterface.ALTER_USER_RENAME);
command.setUser(database.getUser(userName));
String newName = readUniqueIdentifier();
command.setNewName(newName);
return command;
} else if (readIf("ADMIN")) {
AlterUser command = new AlterUser(session);
command.setType(CommandInterface.ALTER_USER_ADMIN);
User user = database.getUser(userName);
command.setUser(user);
if (readIf(TRUE)) {
command.setAdmin(true);
} else if (readIf(FALSE)) {
command.setAdmin(false);
} else {
throw getSyntaxError();
}
return command;
}
throw getSyntaxError();
}
private void readIfEqualOrTo() {
if (!readIf(EQUAL)) {
readIf("TO");
}
}
private Prepared parseSet() {
if (readIf(AT)) {
Set command = new Set(session, SetTypes.VARIABLE);
command.setString(readAliasIdentifier());
readIfEqualOrTo();
command.setExpression(readExpression());
return command;
} else if (readIf("AUTOCOMMIT")) {
readIfEqualOrTo();
boolean value = readBooleanSetting();
int setting = value ? CommandInterface.SET_AUTOCOMMIT_TRUE
: CommandInterface.SET_AUTOCOMMIT_FALSE;
return new TransactionCommand(session, setting);
} else if (readIf("MVCC")) {
readIfEqualOrTo();
readBooleanSetting();
return new NoOperation(session);
} else if (readIf("EXCLUSIVE")) {
readIfEqualOrTo();
Set command = new Set(session, SetTypes.EXCLUSIVE);
command.setExpression(readExpression());
return command;
} else if (readIf("IGNORECASE")) {
readIfEqualOrTo();
boolean value = readBooleanSetting();
Set command = new Set(session, SetTypes.IGNORECASE);
command.setInt(value ? 1 : 0);
return command;
} else if (readIf("PASSWORD")) {
readIfEqualOrTo();
AlterUser command = new AlterUser(session);
command.setType(CommandInterface.ALTER_USER_SET_PASSWORD);
command.setUser(session.getUser());
command.setPassword(readExpression());
return command;
} else if (readIf("SALT")) {
readIfEqualOrTo();
AlterUser command = new AlterUser(session);
command.setType(CommandInterface.ALTER_USER_SET_PASSWORD);
command.setUser(session.getUser());
command.setSalt(readExpression());
read("HASH");
command.setHash(readExpression());
return command;
} else if (readIf("MODE")) {
readIfEqualOrTo();
Set command = new Set(session, SetTypes.MODE);
command.setString(readAliasIdentifier());
return command;
} else if (readIf("COMPRESS_LOB")) {
readIfEqualOrTo();
Set command = new Set(session, SetTypes.COMPRESS_LOB);
if (currentTokenType == VALUE) {
command.setString(readString());
} else {
command.setString(readUniqueIdentifier());
}
return command;
} else if (readIf("DATABASE")) {
readIfEqualOrTo();
read("COLLATION");
return parseSetCollation();
} else if (readIf("COLLATION")) {
readIfEqualOrTo();
return parseSetCollation();
} else if (readIf("BINARY_COLLATION")) {
readIfEqualOrTo();
return parseSetBinaryCollation(SetTypes.BINARY_COLLATION);
} else if (readIf("UUID_COLLATION")) {
readIfEqualOrTo();
return parseSetBinaryCollation(SetTypes.UUID_COLLATION);
} else if (readIf("CLUSTER")) {
readIfEqualOrTo();
Set command = new Set(session, SetTypes.CLUSTER);
command.setString(readString());
return command;
} else if (readIf("DATABASE_EVENT_LISTENER")) {
readIfEqualOrTo();
Set command = new Set(session, SetTypes.DATABASE_EVENT_LISTENER);
command.setString(readString());
return command;
} else if (readIf("ALLOW_LITERALS")) {
readIfEqualOrTo();
Set command = new Set(session, SetTypes.ALLOW_LITERALS);
if (readIf("NONE")) {
command.setInt(Constants.ALLOW_LITERALS_NONE);
} else if (readIf(ALL)) {
command.setInt(Constants.ALLOW_LITERALS_ALL);
} else if (readIf("NUMBERS")) {
command.setInt(Constants.ALLOW_LITERALS_NUMBERS);
} else {
command.setInt(readNonNegativeInt());
}
return command;
} else if (readIf("DEFAULT_TABLE_TYPE")) {
readIfEqualOrTo();
Set command = new Set(session, SetTypes.DEFAULT_TABLE_TYPE);
if (readIf("MEMORY")) {
command.setInt(Table.TYPE_MEMORY);
} else if (readIf("CACHED")) {
command.setInt(Table.TYPE_CACHED);
} else {
command.setInt(readNonNegativeInt());
}
return command;
} else if (readIf("CREATE")) {
readIfEqualOrTo();
// Derby compatibility (CREATE=TRUE in the database URL)
read();
return new NoOperation(session);
} else if (readIf("HSQLDB.DEFAULT_TABLE_TYPE")) {
readIfEqualOrTo();
read();
return new NoOperation(session);
} else if (readIf("PAGE_STORE")) {
readIfEqualOrTo();
read();
return new NoOperation(session);
} else if (readIf("CACHE_TYPE")) {
readIfEqualOrTo();
read();
return new NoOperation(session);
} else if (readIf("FILE_LOCK")) {
readIfEqualOrTo();
read();
return new NoOperation(session);
} else if (readIf("DB_CLOSE_ON_EXIT")) {
readIfEqualOrTo();
read();
return new NoOperation(session);
} else if (readIf("AUTO_SERVER")) {
readIfEqualOrTo();
read();
return new NoOperation(session);
} else if (readIf("AUTO_SERVER_PORT")) {
readIfEqualOrTo();
read();
return new NoOperation(session);
} else if (readIf("AUTO_RECONNECT")) {
readIfEqualOrTo();
read();
return new NoOperation(session);
} else if (readIf("ASSERT")) {
readIfEqualOrTo();
read();
return new NoOperation(session);
} else if (readIf("ACCESS_MODE_DATA")) {
readIfEqualOrTo();
read();
return new NoOperation(session);
} else if (readIf("OPEN_NEW")) {
readIfEqualOrTo();
read();
return new NoOperation(session);
} else if (readIf("JMX")) {
readIfEqualOrTo();
read();
return new NoOperation(session);
} else if (readIf("PAGE_SIZE")) {
readIfEqualOrTo();
read();
return new NoOperation(session);
} else if (readIf("RECOVER")) {
readIfEqualOrTo();
read();
return new NoOperation(session);
} else if (readIf("NAMES")) {
// Quercus PHP MySQL driver compatibility
readIfEqualOrTo();
read();
return new NoOperation(session);
} else if (readIf("SCOPE_GENERATED_KEYS")) {
readIfEqualOrTo();
read();
return new NoOperation(session);
} else if (readIf("SCHEMA")) {
readIfEqualOrTo();
Set command = new Set(session, SetTypes.SCHEMA);
command.setString(readAliasIdentifier());
return command;
} else if (readIf("DATESTYLE")) {
// PostgreSQL compatibility
readIfEqualOrTo();
if (!readIf("ISO")) {
String s = readString();
if (!equalsToken(s, "ISO")) {
throw getSyntaxError();
}
}
return new NoOperation(session);
} else if (readIf("SEARCH_PATH") ||
readIf(SetTypes.getTypeName(SetTypes.SCHEMA_SEARCH_PATH))) {
readIfEqualOrTo();
Set command = new Set(session, SetTypes.SCHEMA_SEARCH_PATH);
ArrayList<String> list = Utils.newSmallArrayList();
do {
list.add(readAliasIdentifier());
} while (readIf(COMMA));
command.setStringArray(list.toArray(new String[0]));
return command;
} else if (readIf("JAVA_OBJECT_SERIALIZER")) {
readIfEqualOrTo();
return parseSetJavaObjectSerializer();
} else {
if (isToken("LOGSIZE")) {
// HSQLDB compatibility
currentToken = SetTypes.getTypeName(SetTypes.MAX_LOG_SIZE);
}
if (isToken("FOREIGN_KEY_CHECKS")) {
// MySQL compatibility
currentToken = SetTypes
.getTypeName(SetTypes.REFERENTIAL_INTEGRITY);
}
String typeName = currentToken;
if (!identifiersToUpper) {
typeName = StringUtils.toUpperEnglish(typeName);
}
int type = SetTypes.getType(typeName);
if (type < 0) {
throw getSyntaxError();
}
read();
readIfEqualOrTo();
Set command = new Set(session, type);
command.setExpression(readExpression());
return command;
}
}
private Prepared parseUse() {
readIfEqualOrTo();
Set command = new Set(session, SetTypes.SCHEMA);
command.setString(readAliasIdentifier());
return command;
}
private Set parseSetCollation() {
Set command = new Set(session, SetTypes.COLLATION);
String name = readAliasIdentifier();
command.setString(name);
if (equalsToken(name, CompareMode.OFF)) {
return command;
}
Collator coll = CompareMode.getCollator(name);
if (coll == null) {
throw DbException.getInvalidValueException("collation", name);
}
if (readIf("STRENGTH")) {
if (readIf(PRIMARY)) {
command.setInt(Collator.PRIMARY);
} else if (readIf("SECONDARY")) {
command.setInt(Collator.SECONDARY);
} else if (readIf("TERTIARY")) {
command.setInt(Collator.TERTIARY);
} else if (readIf("IDENTICAL")) {
command.setInt(Collator.IDENTICAL);
}
} else {
command.setInt(coll.getStrength());
}
return command;
}
private Set parseSetBinaryCollation(int type) {
String name = readAliasIdentifier();
if (equalsToken(name, CompareMode.UNSIGNED) || equalsToken(name, CompareMode.SIGNED)) {
Set command = new Set(session, type);
command.setString(name);
return command;
}
throw DbException.getInvalidValueException(SetTypes.getTypeName(type), name);
}
private Set parseSetJavaObjectSerializer() {
Set command = new Set(session, SetTypes.JAVA_OBJECT_SERIALIZER);
String name = readString();
command.setString(name);
return command;
}
private RunScriptCommand parseRunScript() {
RunScriptCommand command = new RunScriptCommand(session);
read(FROM);
command.setFileNameExpr(readExpression());
if (readIf("COMPRESSION")) {
command.setCompressionAlgorithm(readUniqueIdentifier());
}
if (readIf("CIPHER")) {
command.setCipher(readUniqueIdentifier());
if (readIf("PASSWORD")) {
command.setPassword(readExpression());
}
}
if (readIf("CHARSET")) {
command.setCharset(Charset.forName(readString()));
}
return command;
}
private ScriptCommand parseScript() {
ScriptCommand command = new ScriptCommand(session);
boolean data = true, passwords = true, settings = true;
boolean dropTables = false, simple = false, withColumns = false;
if (readIf("NODATA")) {
data = false;
} else {
if (readIf("SIMPLE")) {
simple = true;
}
if (readIf("COLUMNS")) {
withColumns = true;
}
}
if (readIf("NOPASSWORDS")) {
passwords = false;
}
if (readIf("NOSETTINGS")) {
settings = false;
}
if (readIf("DROP")) {
dropTables = true;
}
if (readIf("BLOCKSIZE")) {
long blockSize = readLong();
command.setLobBlockSize(blockSize);
}
command.setData(data);
command.setPasswords(passwords);
command.setSettings(settings);
command.setDrop(dropTables);
command.setSimple(simple);
command.setWithColumns(withColumns);
if (readIf("TO")) {
command.setFileNameExpr(readExpression());
if (readIf("COMPRESSION")) {
command.setCompressionAlgorithm(readUniqueIdentifier());
}
if (readIf("CIPHER")) {
command.setCipher(readUniqueIdentifier());
if (readIf("PASSWORD")) {
command.setPassword(readExpression());
}
}
if (readIf("CHARSET")) {
command.setCharset(Charset.forName(readString()));
}
}
if (readIf("SCHEMA")) {
HashSet<String> schemaNames = new HashSet<>();
do {
schemaNames.add(readUniqueIdentifier());
} while (readIf(COMMA));
command.setSchemaNames(schemaNames);
} else if (readIf(TABLE)) {
ArrayList<Table> tables = Utils.newSmallArrayList();
do {
tables.add(readTableOrView());
} while (readIf(COMMA));
command.setTables(tables);
}
return command;
}
/**
* Is this the Oracle DUAL table or the IBM/DB2 SYSIBM table?
*
* @param tableName table name.
* @return {@code true} if the table is DUAL special table. Otherwise returns {@code false}.
* @see <a href="https://en.wikipedia.org/wiki/DUAL_table">Wikipedia: DUAL table</a>
*/
boolean isDualTable(String tableName) {
return ((schemaName == null || equalsToken(schemaName, "SYS")) && equalsToken("DUAL", tableName))
|| (database.getMode().sysDummy1 && (schemaName == null || equalsToken(schemaName, "SYSIBM"))
&& equalsToken("SYSDUMMY1", tableName));
}
private Table readTableOrView() {
return readTableOrView(readIdentifierWithSchema(null));
}
private Table readTableOrView(String tableName) {
if (schemaName != null) {
Table table = getSchema().resolveTableOrView(session, tableName);
if (table != null) {
return table;
}
} else {
Table table = database.getSchema(session.getCurrentSchemaName())
.resolveTableOrView(session, tableName);
if (table != null) {
return table;
}
String[] schemaNames = session.getSchemaSearchPath();
if (schemaNames != null) {
for (String name : schemaNames) {
Schema s = database.getSchema(name);
table = s.resolveTableOrView(session, tableName);
if (table != null) {
return table;
}
}
}
}
if (isDualTable(tableName)) {
return getDualTable(false);
}
throw DbException.get(ErrorCode.TABLE_OR_VIEW_NOT_FOUND_1, tableName);
}
private FunctionAlias findFunctionAlias(String schema, String aliasName) {
FunctionAlias functionAlias = database.getSchema(schema).findFunction(
aliasName);
if (functionAlias != null) {
return functionAlias;
}
String[] schemaNames = session.getSchemaSearchPath();
if (schemaNames != null) {
for (String n : schemaNames) {
functionAlias = database.getSchema(n).findFunction(aliasName);
if (functionAlias != null) {
return functionAlias;
}
}
}
return null;
}
private Sequence findSequence(String schema, String sequenceName) {
Sequence sequence = database.getSchema(schema).findSequence(
sequenceName);
if (sequence != null) {
return sequence;
}
String[] schemaNames = session.getSchemaSearchPath();
if (schemaNames != null) {
for (String n : schemaNames) {
sequence = database.getSchema(n).findSequence(sequenceName);
if (sequence != null) {
return sequence;
}
}
}
return null;
}
private Sequence readSequence() {
// same algorithm as readTableOrView
String sequenceName = readIdentifierWithSchema(null);
if (schemaName != null) {
return getSchema().getSequence(sequenceName);
}
Sequence sequence = findSequence(session.getCurrentSchemaName(),
sequenceName);
if (sequence != null) {
return sequence;
}
throw DbException.get(ErrorCode.SEQUENCE_NOT_FOUND_1, sequenceName);
}
private Prepared parseAlterTable() {
boolean ifTableExists = readIfExists(false);
String tableName = readIdentifierWithSchema();
Schema schema = getSchema();
if (readIf("ADD")) {
Prepared command = parseAlterTableAddConstraintIf(tableName,
schema, ifTableExists);
if (command != null) {
return command;
}
return parseAlterTableAddColumn(tableName, schema, ifTableExists);
} else if (readIf("SET")) {
read("REFERENTIAL_INTEGRITY");
int type = CommandInterface.ALTER_TABLE_SET_REFERENTIAL_INTEGRITY;
boolean value = readBooleanSetting();
AlterTableSet command = new AlterTableSet(session,
schema, type, value);
command.setTableName(tableName);
command.setIfTableExists(ifTableExists);
if (readIf(CHECK)) {
command.setCheckExisting(true);
} else if (readIf("NOCHECK")) {
command.setCheckExisting(false);
}
return command;
} else if (readIf("RENAME")) {
if (readIf("COLUMN")) {
// PostgreSQL syntax
String columnName = readColumnIdentifier();
read("TO");
AlterTableRenameColumn command = new AlterTableRenameColumn(
session, schema);
command.setTableName(tableName);
command.setIfTableExists(ifTableExists);
command.setOldColumnName(columnName);
String newName = readColumnIdentifier();
command.setNewColumnName(newName);
return command;
} else if (readIf(CONSTRAINT)) {
String constraintName = readIdentifierWithSchema(schema.getName());
checkSchema(schema);
read("TO");
AlterTableRenameConstraint command = new AlterTableRenameConstraint(
session, schema);
command.setConstraintName(constraintName);
String newName = readColumnIdentifier();
command.setNewConstraintName(newName);
return commandIfTableExists(schema, tableName, ifTableExists, command);
} else {
read("TO");
String newName = readIdentifierWithSchema(schema.getName());
checkSchema(schema);
AlterTableRename command = new AlterTableRename(session,
getSchema());
command.setOldTableName(tableName);
command.setNewTableName(newName);
command.setIfTableExists(ifTableExists);
command.setHidden(readIf("HIDDEN"));
return command;
}
} else if (readIf("DROP")) {
if (readIf(CONSTRAINT)) {
boolean ifExists = readIfExists(false);
String constraintName = readIdentifierWithSchema(schema.getName());
ifExists = readIfExists(ifExists);
checkSchema(schema);
AlterTableDropConstraint command = new AlterTableDropConstraint(
session, getSchema(), ifExists);
command.setConstraintName(constraintName);
return commandIfTableExists(schema, tableName, ifTableExists, command);
} else if (readIf(FOREIGN)) {
// MySQL compatibility
read("KEY");
String constraintName = readIdentifierWithSchema(schema.getName());
checkSchema(schema);
AlterTableDropConstraint command = new AlterTableDropConstraint(
session, getSchema(), false);
command.setConstraintName(constraintName);
return commandIfTableExists(schema, tableName, ifTableExists, command);
} else if (readIf("INDEX")) {
// MySQL compatibility
String indexOrConstraintName = readIdentifierWithSchema(schema.getName());
final SchemaCommand command;
if (schema.findIndex(session, indexOrConstraintName) != null) {
DropIndex dropIndexCommand = new DropIndex(session, getSchema());
dropIndexCommand.setIndexName(indexOrConstraintName);
command = dropIndexCommand;
} else {
AlterTableDropConstraint dropCommand = new AlterTableDropConstraint(
session, getSchema(), false/*ifExists*/);
dropCommand.setConstraintName(indexOrConstraintName);
command = dropCommand;
}
return commandIfTableExists(schema, tableName, ifTableExists, command);
} else if (readIf(PRIMARY)) {
read("KEY");
Table table = tableIfTableExists(schema, tableName, ifTableExists);
if (table == null) {
return new NoOperation(session);
}
Index idx = table.getPrimaryKey();
DropIndex command = new DropIndex(session, schema);
command.setIndexName(idx.getName());
return command;
} else {
readIf("COLUMN");
boolean ifExists = readIfExists(false);
ArrayList<Column> columnsToRemove = new ArrayList<>();
Table table = tableIfTableExists(schema, tableName, ifTableExists);
// For Oracle compatibility - open bracket required
boolean openingBracketDetected = readIf(OPEN_PAREN);
do {
String columnName = readColumnIdentifier();
if (table != null) {
if (!ifExists || table.doesColumnExist(columnName)) {
Column column = table.getColumn(columnName);
columnsToRemove.add(column);
}
}
} while (readIf(COMMA));
if (openingBracketDetected) {
// For Oracle compatibility - close bracket
read(CLOSE_PAREN);
}
if (table == null || columnsToRemove.isEmpty()) {
return new NoOperation(session);
}
AlterTableAlterColumn command = new AlterTableAlterColumn(session, schema);
command.setType(CommandInterface.ALTER_TABLE_DROP_COLUMN);
command.setTableName(tableName);
command.setIfTableExists(ifTableExists);
command.setColumnsToRemove(columnsToRemove);
return command;
}
} else if (readIf("CHANGE")) {
// MySQL compatibility
readIf("COLUMN");
String columnName = readColumnIdentifier();
String newColumnName = readColumnIdentifier();
Column column = columnIfTableExists(schema, tableName, columnName, ifTableExists);
boolean nullable = column == null ? true : column.isNullable();
// new column type ignored. RENAME and MODIFY are
// a single command in MySQL but two different commands in H2.
parseColumnForTable(newColumnName, nullable, true);
AlterTableRenameColumn command = new AlterTableRenameColumn(session, schema);
command.setTableName(tableName);
command.setIfTableExists(ifTableExists);
command.setOldColumnName(columnName);
command.setNewColumnName(newColumnName);
return command;
} else if (readIf("MODIFY")) {
// MySQL compatibility (optional)
readIf("COLUMN");
// Oracle specifies (but will not require) an opening parenthesis
boolean hasOpeningBracket = readIf(OPEN_PAREN);
String columnName = readColumnIdentifier();
AlterTableAlterColumn command;
NullConstraintType nullConstraint = parseNotNullConstraint();
switch (nullConstraint) {
case NULL_IS_ALLOWED:
case NULL_IS_NOT_ALLOWED:
command = new AlterTableAlterColumn(session, schema);
command.setTableName(tableName);
command.setIfTableExists(ifTableExists);
Column column = columnIfTableExists(schema, tableName, columnName, ifTableExists);
command.setOldColumn(column);
if (nullConstraint == NullConstraintType.NULL_IS_ALLOWED) {
command.setType(CommandInterface.ALTER_TABLE_ALTER_COLUMN_DROP_NOT_NULL);
} else {
command.setType(CommandInterface.ALTER_TABLE_ALTER_COLUMN_NOT_NULL);
}
break;
case NO_NULL_CONSTRAINT_FOUND:
command = parseAlterTableAlterColumnType(schema, tableName, columnName, ifTableExists);
break;
default:
throw DbException.get(ErrorCode.UNKNOWN_MODE_1,
"Internal Error - unhandled case: " + nullConstraint.name());
}
if(hasOpeningBracket) {
read(CLOSE_PAREN);
}
return command;
} else if (readIf("ALTER")) {
readIf("COLUMN");
String columnName = readColumnIdentifier();
Column column = columnIfTableExists(schema, tableName, columnName, ifTableExists);
if (readIf("RENAME")) {
read("TO");
AlterTableRenameColumn command = new AlterTableRenameColumn(
session, schema);
command.setTableName(tableName);
command.setIfTableExists(ifTableExists);
command.setOldColumnName(columnName);
String newName = readColumnIdentifier();
command.setNewColumnName(newName);
return command;
} else if (readIf("DROP")) {
if (readIf("DEFAULT")) {
AlterTableAlterColumn command = new AlterTableAlterColumn(session, schema);
command.setTableName(tableName);
command.setIfTableExists(ifTableExists);
command.setOldColumn(column);
command.setType(CommandInterface.ALTER_TABLE_ALTER_COLUMN_DEFAULT);
command.setDefaultExpression(null);
return command;
}
if (readIf(ON)) {
read("UPDATE");
AlterTableAlterColumn command = new AlterTableAlterColumn(session, schema);
command.setTableName(tableName);
command.setIfTableExists(ifTableExists);
command.setOldColumn(column);
command.setType(CommandInterface.ALTER_TABLE_ALTER_COLUMN_ON_UPDATE);
command.setDefaultExpression(null);
return command;
}
read(NOT);
read(NULL);
AlterTableAlterColumn command = new AlterTableAlterColumn(
session, schema);
command.setTableName(tableName);
command.setIfTableExists(ifTableExists);
command.setOldColumn(column);
command.setType(CommandInterface.ALTER_TABLE_ALTER_COLUMN_DROP_NOT_NULL);
return command;
} else if (readIf("TYPE")) {
// PostgreSQL compatibility
return parseAlterTableAlterColumnDataType(schema, tableName, columnName, ifTableExists);
} else if (readIf("SET")) {
if (readIf("DATA")) {
read("TYPE");
return parseAlterTableAlterColumnDataType(schema, tableName, columnName, ifTableExists);
}
AlterTableAlterColumn command = new AlterTableAlterColumn(
session, schema);
command.setTableName(tableName);
command.setIfTableExists(ifTableExists);
command.setOldColumn(column);
NullConstraintType nullConstraint = parseNotNullConstraint();
switch (nullConstraint) {
case NULL_IS_ALLOWED:
command.setType(CommandInterface.ALTER_TABLE_ALTER_COLUMN_DROP_NOT_NULL);
break;
case NULL_IS_NOT_ALLOWED:
command.setType(CommandInterface.ALTER_TABLE_ALTER_COLUMN_NOT_NULL);
break;
case NO_NULL_CONSTRAINT_FOUND:
if (readIf("DEFAULT")) {
Expression defaultExpression = readExpression();
command.setType(CommandInterface.ALTER_TABLE_ALTER_COLUMN_DEFAULT);
command.setDefaultExpression(defaultExpression);
} else if (readIf(ON)) {
read("UPDATE");
Expression onUpdateExpression = readExpression();
command.setType(CommandInterface.ALTER_TABLE_ALTER_COLUMN_ON_UPDATE);
command.setDefaultExpression(onUpdateExpression);
} else if (readIf("INVISIBLE")) {
command.setType(CommandInterface.ALTER_TABLE_ALTER_COLUMN_VISIBILITY);
command.setVisible(false);
} else if (readIf("VISIBLE")) {
command.setType(CommandInterface.ALTER_TABLE_ALTER_COLUMN_VISIBILITY);
command.setVisible(true);
}
break;
default:
throw DbException.get(ErrorCode.UNKNOWN_MODE_1,
"Internal Error - unhandled case: " + nullConstraint.name());
}
return command;
} else if (readIf("RESTART")) {
readIf(WITH);
Expression start = readExpression();
AlterSequence command = new AlterSequence(session, schema);
command.setColumn(column);
SequenceOptions options = new SequenceOptions();
options.setStartValue(start);
command.setOptions(options);
return commandIfTableExists(schema, tableName, ifTableExists, command);
} else if (readIf("SELECTIVITY")) {
AlterTableAlterColumn command = new AlterTableAlterColumn(
session, schema);
command.setTableName(tableName);
command.setIfTableExists(ifTableExists);
command.setType(CommandInterface.ALTER_TABLE_ALTER_COLUMN_SELECTIVITY);
command.setOldColumn(column);
command.setSelectivity(readExpression());
return command;
} else {
return parseAlterTableAlterColumnType(schema, tableName, columnName, ifTableExists);
}
}
throw getSyntaxError();
}
private Table tableIfTableExists(Schema schema, String tableName, boolean ifTableExists) {
Table table = schema.resolveTableOrView(session, tableName);
if (table == null && !ifTableExists) {
throw DbException.get(ErrorCode.TABLE_OR_VIEW_NOT_FOUND_1, tableName);
}
return table;
}
private Column columnIfTableExists(Schema schema, String tableName,
String columnName, boolean ifTableExists) {
Table table = tableIfTableExists(schema, tableName, ifTableExists);
return table == null ? null : table.getColumn(columnName);
}
private Prepared commandIfTableExists(Schema schema, String tableName,
boolean ifTableExists, Prepared commandIfTableExists) {
return tableIfTableExists(schema, tableName, ifTableExists) == null
? new NoOperation(session)
: commandIfTableExists;
}
private AlterTableAlterColumn parseAlterTableAlterColumnType(Schema schema,
String tableName, String columnName, boolean ifTableExists) {
Column oldColumn = columnIfTableExists(schema, tableName, columnName, ifTableExists);
Column newColumn = parseColumnForTable(columnName,
oldColumn == null ? true : oldColumn.isNullable(), true);
if (readIf(CHECK)) {
Expression expr = readExpression();
newColumn.addCheckConstraint(session, expr);
}
AlterTableAlterColumn command = new AlterTableAlterColumn(session, schema);
command.setTableName(tableName);
command.setIfTableExists(ifTableExists);
command.setType(CommandInterface.ALTER_TABLE_ALTER_COLUMN_CHANGE_TYPE);
command.setOldColumn(oldColumn);
command.setNewColumn(newColumn);
return command;
}
private AlterTableAlterColumn parseAlterTableAlterColumnDataType(Schema schema,
String tableName, String columnName, boolean ifTableExists) {
Column oldColumn = columnIfTableExists(schema, tableName, columnName, ifTableExists);
Column newColumn = parseColumnWithType(columnName, true);
if (oldColumn != null) {
if (!oldColumn.isNullable()) {
newColumn.setNullable(false);
}
if (!oldColumn.getVisible()) {
newColumn.setVisible(false);
}
Expression e = oldColumn.getDefaultExpression();
if (e != null) {
newColumn.setDefaultExpression(session, e);
}
e = oldColumn.getOnUpdateExpression();
if (e != null) {
newColumn.setOnUpdateExpression(session, e);
}
e = oldColumn.getCheckConstraint(session, columnName);
if (e != null) {
newColumn.addCheckConstraint(session, e);
}
String c = oldColumn.getComment();
if (c != null) {
newColumn.setComment(c);
}
}
AlterTableAlterColumn command = new AlterTableAlterColumn(session, schema);
command.setTableName(tableName);
command.setIfTableExists(ifTableExists);
command.setType(CommandInterface.ALTER_TABLE_ALTER_COLUMN_CHANGE_TYPE);
command.setOldColumn(oldColumn);
command.setNewColumn(newColumn);
return command;
}
private AlterTableAlterColumn parseAlterTableAddColumn(String tableName,
Schema schema, boolean ifTableExists) {
readIf("COLUMN");
AlterTableAlterColumn command = new AlterTableAlterColumn(session,
schema);
command.setType(CommandInterface.ALTER_TABLE_ADD_COLUMN);
command.setTableName(tableName);
command.setIfTableExists(ifTableExists);
if (readIf(OPEN_PAREN)) {
command.setIfNotExists(false);
do {
parseTableColumnDefinition(command, schema, tableName, false);
} while (readIfMore(true));
} else {
boolean ifNotExists = readIfNotExists();
command.setIfNotExists(ifNotExists);
parseTableColumnDefinition(command, schema, tableName, false);
}
if (readIf("BEFORE")) {
command.setAddBefore(readColumnIdentifier());
} else if (readIf("AFTER")) {
command.setAddAfter(readColumnIdentifier());
} else if (readIf("FIRST")) {
command.setAddFirst();
}
return command;
}
private ConstraintActionType parseAction() {
ConstraintActionType result = parseCascadeOrRestrict();
if (result != null) {
return result;
}
if (readIf("NO")) {
read("ACTION");
return ConstraintActionType.RESTRICT;
}
read("SET");
if (readIf(NULL)) {
return ConstraintActionType.SET_NULL;
}
read("DEFAULT");
return ConstraintActionType.SET_DEFAULT;
}
private ConstraintActionType parseCascadeOrRestrict() {
if (readIf("CASCADE")) {
return ConstraintActionType.CASCADE;
} else if (readIf("RESTRICT")) {
return ConstraintActionType.RESTRICT;
} else {
return null;
}
}
private DefineCommand parseAlterTableAddConstraintIf(String tableName,
Schema schema, boolean ifTableExists) {
String constraintName = null, comment = null;
boolean ifNotExists = false;
boolean allowIndexDefinition = database.getMode().indexDefinitionInCreateTable;
boolean allowAffinityKey = database.getMode().allowAffinityKey;
if (readIf(CONSTRAINT)) {
ifNotExists = readIfNotExists();
constraintName = readIdentifierWithSchema(schema.getName());
checkSchema(schema);
comment = readCommentIf();
allowIndexDefinition = true;
}
if (readIf(PRIMARY)) {
read("KEY");
AlterTableAddConstraint command = new AlterTableAddConstraint(
session, schema, ifNotExists);
command.setType(CommandInterface.ALTER_TABLE_ADD_CONSTRAINT_PRIMARY_KEY);
command.setComment(comment);
command.setConstraintName(constraintName);
command.setTableName(tableName);
command.setIfTableExists(ifTableExists);
if (readIf("HASH")) {
command.setPrimaryKeyHash(true);
}
read(OPEN_PAREN);
command.setIndexColumns(parseIndexColumnList());
if (readIf("INDEX")) {
String indexName = readIdentifierWithSchema();
command.setIndex(getSchema().findIndex(session, indexName));
}
return command;
} else if (allowIndexDefinition && (isToken("INDEX") || isToken("KEY"))) {
// MySQL
// need to read ahead, as it could be a column name
int start = lastParseIndex;
read();
if (DataType.getTypeByName(currentToken, database.getMode()) != null) {
// known data type
parseIndex = start;
read();
return null;
}
CreateIndex command = new CreateIndex(session, schema);
command.setComment(comment);
command.setTableName(tableName);
command.setIfTableExists(ifTableExists);
if (!readIf(OPEN_PAREN)) {
command.setIndexName(readUniqueIdentifier());
read(OPEN_PAREN);
}
command.setIndexColumns(parseIndexColumnList());
// MySQL compatibility
if (readIf("USING")) {
read("BTREE");
}
return command;
} else if (allowAffinityKey && readIfAffinity()) {
read("KEY");
read(OPEN_PAREN);
CreateIndex command = createAffinityIndex(schema, tableName, parseIndexColumnList());
command.setIfTableExists(ifTableExists);
return command;
}
AlterTableAddConstraint command;
if (readIf(CHECK)) {
command = new AlterTableAddConstraint(session, schema, ifNotExists);
command.setType(CommandInterface.ALTER_TABLE_ADD_CONSTRAINT_CHECK);
command.setCheckExpression(readExpression());
} else if (readIf(UNIQUE)) {
readIf("KEY");
readIf("INDEX");
command = new AlterTableAddConstraint(session, schema, ifNotExists);
command.setType(CommandInterface.ALTER_TABLE_ADD_CONSTRAINT_UNIQUE);
if (!readIf(OPEN_PAREN)) {
constraintName = readUniqueIdentifier();
read(OPEN_PAREN);
}
command.setIndexColumns(parseIndexColumnList());
if (readIf("INDEX")) {
String indexName = readIdentifierWithSchema();
command.setIndex(getSchema().findIndex(session, indexName));
}
// MySQL compatibility
if (readIf("USING")) {
read("BTREE");
}
} else if (readIf(FOREIGN)) {
command = new AlterTableAddConstraint(session, schema, ifNotExists);
command.setType(CommandInterface.ALTER_TABLE_ADD_CONSTRAINT_REFERENTIAL);
read("KEY");
read(OPEN_PAREN);
command.setIndexColumns(parseIndexColumnList());
if (readIf("INDEX")) {
String indexName = readIdentifierWithSchema();
command.setIndex(schema.findIndex(session, indexName));
}
read("REFERENCES");
parseReferences(command, schema, tableName);
} else {
if (constraintName != null) {
throw getSyntaxError();
}
return null;
}
if (readIf("NOCHECK")) {
command.setCheckExisting(false);
} else {
readIf(CHECK);
command.setCheckExisting(true);
}
command.setTableName(tableName);
command.setIfTableExists(ifTableExists);
command.setConstraintName(constraintName);
command.setComment(comment);
return command;
}
private void parseReferences(AlterTableAddConstraint command,
Schema schema, String tableName) {
if (readIf(OPEN_PAREN)) {
command.setRefTableName(schema, tableName);
command.setRefIndexColumns(parseIndexColumnList());
} else {
String refTableName = readIdentifierWithSchema(schema.getName());
command.setRefTableName(getSchema(), refTableName);
if (readIf(OPEN_PAREN)) {
command.setRefIndexColumns(parseIndexColumnList());
}
}
if (readIf("INDEX")) {
String indexName = readIdentifierWithSchema();
command.setRefIndex(getSchema().findIndex(session, indexName));
}
while (readIf(ON)) {
if (readIf("DELETE")) {
command.setDeleteAction(parseAction());
} else {
read("UPDATE");
command.setUpdateAction(parseAction());
}
}
if (readIf(NOT)) {
read("DEFERRABLE");
} else {
readIf("DEFERRABLE");
}
}
private CreateLinkedTable parseCreateLinkedTable(boolean temp,
boolean globalTemp, boolean force) {
read(TABLE);
boolean ifNotExists = readIfNotExists();
String tableName = readIdentifierWithSchema();
CreateLinkedTable command = new CreateLinkedTable(session, getSchema());
command.setTemporary(temp);
command.setGlobalTemporary(globalTemp);
command.setForce(force);
command.setIfNotExists(ifNotExists);
command.setTableName(tableName);
command.setComment(readCommentIf());
read(OPEN_PAREN);
command.setDriver(readString());
read(COMMA);
command.setUrl(readString());
read(COMMA);
command.setUser(readString());
read(COMMA);
command.setPassword(readString());
read(COMMA);
String originalTable = readString();
if (readIf(COMMA)) {
command.setOriginalSchema(originalTable);
originalTable = readString();
}
command.setOriginalTable(originalTable);
read(CLOSE_PAREN);
if (readIf("EMIT")) {
read("UPDATES");
command.setEmitUpdates(true);
} else if (readIf("READONLY")) {
command.setReadOnly(true);
}
return command;
}
private CreateTable parseCreateTable(boolean temp, boolean globalTemp,
boolean persistIndexes) {
boolean ifNotExists = readIfNotExists();
String tableName = readIdentifierWithSchema();
if (temp && globalTemp && equalsToken("SESSION", schemaName)) {
// support weird syntax: declare global temporary table session.xy
// (...) not logged
schemaName = session.getCurrentSchemaName();
globalTemp = false;
}
Schema schema = getSchema();
CreateTable command = new CreateTable(session, schema);
command.setPersistIndexes(persistIndexes);
command.setTemporary(temp);
command.setGlobalTemporary(globalTemp);
command.setIfNotExists(ifNotExists);
command.setTableName(tableName);
command.setComment(readCommentIf());
if (readIf(OPEN_PAREN)) {
if (!readIf(CLOSE_PAREN)) {
do {
parseTableColumnDefinition(command, schema, tableName, true);
} while (readIfMore(false));
}
}
// Allows "COMMENT='comment'" in DDL statements (MySQL syntax)
if (readIf("COMMENT")) {
if (readIf(EQUAL)) {
// read the complete string comment, but nothing with it for now
readString();
}
}
if (readIf("ENGINE")) {
if (readIf(EQUAL)) {
// map MySQL engine types onto H2 behavior
String tableEngine = readUniqueIdentifier();
if ("InnoDb".equalsIgnoreCase(tableEngine)) {
// ok
} else if (!"MyISAM".equalsIgnoreCase(tableEngine)) {
throw DbException.getUnsupportedException(tableEngine);
}
} else {
command.setTableEngine(readUniqueIdentifier());
}
}
if (readIf(WITH)) {
command.setTableEngineParams(readTableEngineParams());
}
// MySQL compatibility
if (readIf("AUTO_INCREMENT")) {
read(EQUAL);
if (currentTokenType != VALUE ||
currentValue.getValueType() != Value.INT) {
throw DbException.getSyntaxError(sqlCommand, parseIndex,
"integer");
}
read();
}
readIf("DEFAULT");
if (readIf("CHARSET")) {
read(EQUAL);
if (!readIf("UTF8")) {
read("UTF8MB4");
}
}
if (temp) {
if (readIf(ON)) {
read("COMMIT");
if (readIf("DROP")) {
command.setOnCommitDrop();
} else if (readIf("DELETE")) {
read("ROWS");
command.setOnCommitTruncate();
}
} else if (readIf(NOT)) {
if (readIf("PERSISTENT")) {
command.setPersistData(false);
} else {
read("LOGGED");
}
}
if (readIf("TRANSACTIONAL")) {
command.setTransactional(true);
}
} else if (!persistIndexes && readIf(NOT)) {
read("PERSISTENT");
command.setPersistData(false);
}
if (readIf("HIDDEN")) {
command.setHidden(true);
}
if (readIf("AS")) {
if (readIf("SORTED")) {
command.setSortedInsertMode(true);
}
command.setQuery(parseSelect());
if (readIf(WITH)) {
command.setWithNoData(readIf("NO"));
read("DATA");
}
}
// for MySQL compatibility
if (readIf("ROW_FORMAT")) {
if (readIf(EQUAL)) {
readColumnIdentifier();
}
}
return command;
}
private void parseTableColumnDefinition(CommandWithColumns command, Schema schema, String tableName,
boolean forCreateTable) {
DefineCommand c = parseAlterTableAddConstraintIf(tableName, schema, false);
if (c != null) {
command.addConstraintCommand(c);
} else {
String columnName = readColumnIdentifier();
if (forCreateTable && (currentTokenType == COMMA || currentTokenType == CLOSE_PAREN)) {
command.addColumn(new Column(columnName, TypeInfo.TYPE_UNKNOWN));
return;
}
Column column = parseColumnForTable(columnName, true, true);
if (column.isAutoIncrement() && column.isPrimaryKey()) {
column.setPrimaryKey(false);
IndexColumn[] cols = { new IndexColumn() };
cols[0].columnName = column.getName();
AlterTableAddConstraint pk = new AlterTableAddConstraint(
session, schema, false);
pk.setType(CommandInterface.ALTER_TABLE_ADD_CONSTRAINT_PRIMARY_KEY);
pk.setTableName(tableName);
pk.setIndexColumns(cols);
command.addConstraintCommand(pk);
}
command.addColumn(column);
String constraintName = null;
if (readIf(CONSTRAINT)) {
constraintName = readColumnIdentifier();
}
// For compatibility with Apache Ignite.
boolean allowAffinityKey = database.getMode().allowAffinityKey;
boolean affinity = allowAffinityKey && readIfAffinity();
if (readIf(PRIMARY)) {
read("KEY");
boolean hash = readIf("HASH");
IndexColumn[] cols = { new IndexColumn() };
cols[0].columnName = column.getName();
AlterTableAddConstraint pk = new AlterTableAddConstraint(
session, schema, false);
pk.setConstraintName(constraintName);
pk.setPrimaryKeyHash(hash);
pk.setType(CommandInterface.ALTER_TABLE_ADD_CONSTRAINT_PRIMARY_KEY);
pk.setTableName(tableName);
pk.setIndexColumns(cols);
command.addConstraintCommand(pk);
if (readIf("AUTO_INCREMENT")) {
parseAutoIncrement(column);
}
if (database.getMode().useIdentityAsAutoIncrement) {
if (readIf(NOT)) {
read(NULL);
column.setNullable(false);
}
if (readIf("IDENTITY")) {
parseAutoIncrement(column);
}
}
if (affinity) {
CreateIndex idx = createAffinityIndex(schema, tableName, cols);
command.addConstraintCommand(idx);
}
} else if (affinity) {
read("KEY");
IndexColumn[] cols = { new IndexColumn() };
cols[0].columnName = column.getName();
CreateIndex idx = createAffinityIndex(schema, tableName, cols);
command.addConstraintCommand(idx);
} else if (readIf(UNIQUE)) {
AlterTableAddConstraint unique = new AlterTableAddConstraint(
session, schema, false);
unique.setConstraintName(constraintName);
unique.setType(CommandInterface.ALTER_TABLE_ADD_CONSTRAINT_UNIQUE);
IndexColumn[] cols = { new IndexColumn() };
cols[0].columnName = columnName;
unique.setIndexColumns(cols);
unique.setTableName(tableName);
command.addConstraintCommand(unique);
}
if (NullConstraintType.NULL_IS_NOT_ALLOWED == parseNotNullConstraint()) {
column.setNullable(false);
}
if (readIf(CHECK)) {
Expression expr = readExpression();
column.addCheckConstraint(session, expr);
}
if (readIf("REFERENCES")) {
AlterTableAddConstraint ref = new AlterTableAddConstraint(
session, schema, false);
ref.setConstraintName(constraintName);
ref.setType(CommandInterface.ALTER_TABLE_ADD_CONSTRAINT_REFERENTIAL);
IndexColumn[] cols = { new IndexColumn() };
cols[0].columnName = columnName;
ref.setIndexColumns(cols);
ref.setTableName(tableName);
parseReferences(ref, schema, tableName);
command.addConstraintCommand(ref);
}
}
}
/**
* Enumeration describing null constraints
*/
private enum NullConstraintType {
NULL_IS_ALLOWED, NULL_IS_NOT_ALLOWED, NO_NULL_CONSTRAINT_FOUND
}
private NullConstraintType parseNotNullConstraint() {
NullConstraintType nullConstraint = NullConstraintType.NO_NULL_CONSTRAINT_FOUND;
if (isToken(NOT) || isToken(NULL)) {
if (readIf(NOT)) {
read(NULL);
nullConstraint = NullConstraintType.NULL_IS_NOT_ALLOWED;
} else {
read(NULL);
nullConstraint = NullConstraintType.NULL_IS_ALLOWED;
}
if (database.getMode().getEnum() == ModeEnum.Oracle) {
if (readIf("ENABLE")) {
// Leave constraint 'as is'
readIf("VALIDATE");
// Turn off constraint, allow NULLs
if (readIf("NOVALIDATE")) {
nullConstraint = NullConstraintType.NULL_IS_ALLOWED;
}
}
// Turn off constraint, allow NULLs
if (readIf("DISABLE")) {
nullConstraint = NullConstraintType.NULL_IS_ALLOWED;
// ignore validate
readIf("VALIDATE");
// ignore novalidate
readIf("NOVALIDATE");
}
}
}
return nullConstraint;
}
private CreateSynonym parseCreateSynonym(boolean orReplace) {
boolean ifNotExists = readIfNotExists();
String name = readIdentifierWithSchema();
Schema synonymSchema = getSchema();
read(FOR);
String tableName = readIdentifierWithSchema();
Schema targetSchema = getSchema();
CreateSynonym command = new CreateSynonym(session, synonymSchema);
command.setName(name);
command.setSynonymFor(tableName);
command.setSynonymForSchema(targetSchema);
command.setComment(readCommentIf());
command.setIfNotExists(ifNotExists);
command.setOrReplace(orReplace);
return command;
}
private CreateIndex createAffinityIndex(Schema schema, String tableName, IndexColumn[] indexColumns) {
CreateIndex idx = new CreateIndex(session, schema);
idx.setTableName(tableName);
idx.setIndexColumns(indexColumns);
idx.setAffinity(true);
return idx;
}
private static int getCompareType(int tokenType) {
switch (tokenType) {
case EQUAL:
return Comparison.EQUAL;
case BIGGER_EQUAL:
return Comparison.BIGGER_EQUAL;
case BIGGER:
return Comparison.BIGGER;
case SMALLER:
return Comparison.SMALLER;
case SMALLER_EQUAL:
return Comparison.SMALLER_EQUAL;
case NOT_EQUAL:
return Comparison.NOT_EQUAL;
case SPATIAL_INTERSECTS:
return Comparison.SPATIAL_INTERSECTS;
default:
return -1;
}
}
/**
* Add double quotes around an identifier if required.
*
* @param s the identifier
* @param alwaysQuote quote all identifiers
* @return the quoted identifier
*/
public static String quoteIdentifier(String s, boolean alwaysQuote) {
if (s == null) {
return "\"\"";
}
if (!alwaysQuote && ParserUtil.isSimpleIdentifier(s, false, false)) {
return s;
}
return StringUtils.quoteIdentifier(s);
}
/**
* Add double quotes around an identifier if required and appends it to the
* specified string builder.
*
* @param builder string builder to append to
* @param s the identifier
* @param alwaysQuote quote all identifiers
* @return the specified builder
*/
public static StringBuilder quoteIdentifier(StringBuilder builder, String s, boolean alwaysQuote) {
if (s == null) {
return builder.append("\"\"");
}
if (!alwaysQuote && ParserUtil.isSimpleIdentifier(s, false, false)) {
return builder.append(s);
}
return StringUtils.quoteIdentifier(builder, s);
}
public void setLiteralsChecked(boolean literalsChecked) {
this.literalsChecked = literalsChecked;
}
public void setRightsChecked(boolean rightsChecked) {
this.rightsChecked = rightsChecked;
}
public void setSuppliedParameterList(ArrayList<Parameter> suppliedParameterList) {
this.suppliedParameterList = suppliedParameterList;
}
/**
* Parse a SQL code snippet that represents an expression.
*
* @param sql the code snippet
* @return the expression object
*/
public Expression parseExpression(String sql) {
parameters = Utils.newSmallArrayList();
initialize(sql);
read();
return readExpression();
}
/**
* Parse a SQL code snippet that represents a table name.
*
* @param sql the code snippet
* @return the table object
*/
public Table parseTableName(String sql) {
parameters = Utils.newSmallArrayList();
initialize(sql);
read();
return readTableOrView();
}
@Override
public String toString() {
return StringUtils.addAsterisk(sqlCommand, parseIndex);
}
} |
MEXICO CITY, March 25 -- Secretary of State Hillary Rodham Clinton traveled to Mexico on Wednesday with a blunt mea culpa, saying that decades of U.S. anti-narcotics policies have been a failure and have contributed to the explosion of drug violence south of the border.
"Clearly what we've been doing has not worked," Clinton told reporters on her plane at the start of her two-day trip, saying that U.S. policies on curbing drug use, narcotics shipments and the flow of guns have been ineffective.
"Our insatiable demand for illegal drugs fuels the drug trade," she added. "Our inability to prevent weapons from being illegally smuggled across the border to arm these criminals causes the deaths of police, of soldiers and civilians."
Clinton appeared to go further than any senior government official in recent years in accepting a U.S. role in the long-contentious issue of the Latin American narcotics trade. In the past, U.S. politicians have accused Mexico, the main gateway for cocaine, heroin and other drugs entering the United States, of not doing enough. But two years ago, President Felipe Calderón unleashed the Mexican military on traffickers, a move that has contributed to an explosion of violence by drug gangs. More than 7,000 Mexicans have been killed in the bloodletting since January 2008, with the gangs battling authorities and one another for supremacy.
Mexicans, sensitive to slights from their richer northern neighbor, have reacted with outrage in recent weeks as the U.S. Joint Forces Command and some senior U.S. officials have suggested that the drug problem is so severe that Mexico is losing control of parts of its territory.
Clinton sought to soothe the wounded feelings, praising Calderón's "courage" and announcing that the Obama administration is seeking $66 million in new funding for extra helicopters for the Mexican police. She also pledged further unspecified steps to block the movement of guns southward, and acknowledged that proceeds from drugs sold in the United States -- an estimated $15 billion to $25 billion a year -- support Mexican drug gangs.
Clinton's comments came at the start of a U.S. blitz to emphasize support for Mexico's embattled government and improve relations with Latin America. The Obama administration announced Tuesday that it is sending hundreds more agents and extra high-tech gear to the border to intercept weapons and drug proceeds heading south. U.S. border states have become alarmed about a possible spillover of the drug violence, and Congress has held hearings on the drug war.
Clinton vowed to press for swift delivery of equipment promised under the Merida Initiative, a three-year $1.4 billion package of anti-drug assistance to Mexico and Central America. Mexican officials and U.S. lawmakers say there are long lag times for helicopters and other desperately needed gear. In addition, Mexicans complain that Congress has approved only $700 million of the $950 million that the Bush administration requested for the program since it began last year.
Clinton was greeted warmly by Mexican Foreign Minister Patricia Espinosa, who called her a "close friend" at a news conference. The Mexican official said that "we recognize very much these efforts that are now being undertaken by U.S. authorities" to combat the flow of guns and drug proceeds into Mexico.
But Mexican officials have indicated that they are hoping for more U.S. action. The Obama administration is trying to draw up a broader regional strategy on the drug problem to ensure that traffickers chased from one country do not simply move to another, aides said. One reason Mexico has emerged as a major drug hub is because the routes for trafficking Colombian cocaine have shifted away from the Caribbean islands.
Clinton's visit comes as some prominent Latin Americans are urging the United States to reexamine its drug policies. Last month, former presidents of Brazil, Colombia and Mexico called on the United States in a report to consider legalizing marijuana use and focusing more on treatment for drug users. Obama has emphasized his support for expanded treatment facilities, although not for allowing marijuana use.
In addition to Clinton, Homeland Security Secretary Janet Napolitano and Attorney General Eric H. Holder Jr. plan to visit Mexico in the coming weeks, leading up to a mid-April trip by Obama. The president will then attend the Summit of the Americas, a gathering of the region's 34 democratically elected heads of state and government, on April 17 and 18 in Trinidad and Tobago. |
<filename>repl/repl.go<gh_stars>1-10
package repl
import (
"bufio"
"fmt"
"io"
"strconv"
"strings"
)
func Start(in io.Reader, out io.Writer, run func(string) (string, bool)) {
_, _ = fmt.Fprintf(out, "Welcome to the Crisp REPL!\n")
_, _ = fmt.Fprintf(out, "Type a few lines of code, then \"run\" to start it up.\n")
_, _ = fmt.Fprintf(out, "Type \"exit\" to exit the REPL.\n")
_, _ = fmt.Fprintf(out, "(Remember to use tabs for indentation!)\n")
scanner := bufio.NewScanner(in)
var lines []string
for {
var prompt string
if len(lines) < 1 {
prompt = "\n$"
} else {
prompt = strconv.Itoa(len(lines))
}
prompt += "➤ "
_, _ = fmt.Fprintf(out, prompt)
scanned := scanner.Scan()
if !scanned {
return
}
line := scanner.Text()
if line == "exit" {
return
}
if line != "run" {
lines = append(lines, line)
continue
}
code := strings.Join(lines, "\n")
lines = nil
output, ok := run(code)
if !ok {
_, _ = fmt.Fprintf(out, "Crisp encountered an error:\n")
}
_, _ = fmt.Fprintf(out, output+"\n")
}
}
|
/*
* project :BluecatFinance
* author : dluobida
* class : QuickIncomeListAdapter.java
* package : com.dluobida.bluecat.finance.modules.income.adapter.QuickIncomeListAdapter
* currentModifyTime : 2020-12-16 22:30:20
* lastModifyTime : 2020-12-16 22:30:20
* Copyright (c) 2020 dluobida .
*/
package com.dluobida.bluecat.finance.modules.income.adapter;
import android.support.annotation.Nullable;
import com.chad.library.adapter.base.BaseQuickAdapter;
import com.chad.library.adapter.base.BaseViewHolder;
import com.dluobida.bluecat.finance.R;
import com.dluobida.bluecat.finance.core.db.table.ExpandData;
import com.dluobida.bluecat.finance.core.db.table.IncomeData;
import com.dluobida.bluecat.finance.utils.DateUtils;
import java.util.List;
public class QuickIncomeListAdapter extends BaseQuickAdapter<IncomeData, BaseViewHolder> {
public QuickIncomeListAdapter(int layoutResId, @Nullable List<IncomeData> data) {
super(layoutResId, data);
}
@Override
protected void convert(BaseViewHolder helper, IncomeData item) {
helper.setText(R.id.tv_money,item.getMoney());
helper.setText(R.id.tv_catagroy,item.getCatagroy());
helper.setText(R.id.tv_date, item.getDate());
helper.setText(R.id.tv_account,item.getAccount());
helper.setText(R.id.tv_remark,item.getRemark());
}
}
|
def slice_selection_tool_gui(self, aoix1, aoiy1, aoix2, aoiy2, aoix3, aoiy3, aoix4, aoiy4, x1, y1, x2, y2):
ret_val, x1.value, y1.value, x2.value, y2.value = self._slice_selection_tool_gui(aoix1, aoiy1, aoix2, aoiy2, aoix3, aoiy3, aoix4, aoiy4, x1.value, y1.value, x2.value, y2.value)
return ret_val |
/*
* For SH7705, we have our own implementation for ptep_get_and_clear
* Copied from pg-sh4.c
*/
inline pte_t ptep_get_and_clear(struct mm_struct *mm, unsigned long addr, pte_t *ptep)
{
pte_t pte = *ptep;
pte_clear(mm, addr, ptep);
if (!pte_not_present(pte)) {
unsigned long pfn = pte_pfn(pte);
if (pfn_valid(pfn)) {
struct page *page = pfn_to_page(pfn);
struct address_space *mapping = page_mapping(page);
if (!mapping || !mapping_writably_mapped(mapping))
__clear_bit(PG_mapped, &page->flags);
}
}
return pte;
} |
/**
* Object dedicated to computing the fire convolution for a row of pixels
* Does not convert pixel to RGB palette value, only provides final convolved
* pixel array via int [] pixelValues.
* @author Birdasaur
*/
public class FireConvolution {
int y;
int canvasHeight;
int canvasWidth;
public int [] pixelValues; //the results from the convolution
public FireConvolution(int canvasHeight, int canvasWidth, int y) {
this.canvasHeight = canvasHeight;
this.canvasWidth = canvasWidth;
this.y = y;
pixelValues = new int[canvasWidth];
}
public void convolve(int [] fire) {
int a, b, shiftedValue, row;
row = y * canvasWidth;
int fireIndex1, fireIndex2; //column oriented values computed in outer loop
fireIndex1 = ((y + 2) % canvasHeight) * canvasWidth;
fireIndex2 = ((y + 3) % canvasHeight * canvasWidth);
for (int x = 0; x < canvasWidth; x++) {
a = (y + 1) % canvasHeight * canvasWidth;
b = x % canvasWidth;
shiftedValue = (
(fire[a + ((x - 1 + canvasWidth) % canvasWidth)] //fireIndex0
+ fire[fireIndex1 + b] //fireIndex1
+ fire[a + ((x + 1) % canvasWidth)] //fireIndex2
+ fire[fireIndex2 + b]) //fireIndex3
<< 7); //multiply by constant 128
// divide by constant 513
fire[row + x] = pixelValues[x] = ((shiftedValue << 9) - shiftedValue) >> 18;
}
}
} |
use std::cmp::{max, min};
use super::parse::*;
use colored::*;
#[derive(Debug)]
enum DiffElement<T> {
Equal(T),
Insert1(T),
Insert2(T),
Different(T, T),
}
fn edit_distance<T: PartialEq + Clone>(text1: &Vec<T>, text2: &Vec<T>, cmp: fn(&T, &T) -> u64, icost: u64, compute_back : bool) -> (u64, Option<Vec<DiffElement<T>>>) {
let mut table : Vec<Vec<u64>> = Vec::new();
table.resize_with(text1.len() + 1, || {
let mut v = Vec::new();
v.resize(text2.len() + 1, 0);
v
});
for i in 0..text1.len() {
table[i+1][0] = icost * (1+i) as u64;
}
for j in 0..text2.len() {
table[0][j+1] = icost * (1+j) as u64;
}
for i in 1..(1+text1.len()) {
for j in 1..(1+text2.len()) {
table[i][j] = min(min(
table[i-1][j] + icost,
table[i][j-1] + icost),
table[i-1][j-1] + cmp(&text1[i-1], &text2[j-1])
)
}
}
if ! compute_back {
return (table[text1.len()][text2.len()], None);
}
let mut walk = Vec::new();
let mut p1 = text1.len();
let mut p2 = text2.len();
while p1 > 0 || p2 > 0 {
let val = table[p1][p2];
if p1 > 0 && p2 > 0 && val == table[p1-1][p2-1] && text1[p1-1] == text2[p2 - 1] {
walk.push(DiffElement::Equal(text1[p1-1].clone()));
p1 -= 1;
p2 -= 1;
} else if p1 > 0 && p2 > 0 && val == table[p1-1][p2-1] + cmp(&text1[p1-1], &text2[p2 - 1]) {
walk.push(DiffElement::Different(text1[p1-1].clone(), text2[p2 - 1].clone()));
p1 -= 1;
p2 -= 1;
} else if p1 > 0 && val == table[p1-1][p2] + icost {
walk.push(DiffElement::Insert1(text1[p1-1].clone()));
p1 -= 1;
} else {
assert!(val == table[p1][p2-1] + icost);
walk.push(DiffElement::Insert2(text2[p2-1].clone()));
p2 -= 1;
}
}
walk.reverse();
(table[text1.len()][text2.len()], Some(walk))
}
fn too_different(text1: &Sentence, text2: &Sentence) -> bool {
let n1 = text1.hash_words.count_ones();
let n2 = text2.hash_words.count_ones();
let common = (text1.hash_words & text2.hash_words).count_ones();
assert!(min(n1, n2) >= common);
return min(n1,n2) - common > min(n1,n2)/2;
}
fn compare_sentences<'a>(text1: &'a Sentence, text2: &'a Sentence, compute_back : bool) -> (u64, Option<Vec<DiffElement<&'a str>>>) {
if text1 == text2 {
return (0, Some(text1
.content
.split_whitespace()
.map(|s| { DiffElement::Equal(s) })
.collect()));
} else if too_different(&text1, &text2) {
return (max(text1.n_words, text2.n_words).try_into().unwrap(),
Some(vec![DiffElement::Different(&text1.content, &text2.content)]));
}
edit_distance(
&text1.words(),
&text2.words(),
|t1, t2| { if t1 == t2 { 0 } else { 1 } },
1,
compute_back)
}
fn print_edit_script(es : &Vec<DiffElement<&str>>) {
for el in es {
match el {
DiffElement::Equal(s) => print!("{} ", s),
DiffElement::Insert1(s) => print!("{} ", s.green()),
DiffElement::Insert2(s) => print!("{} ", s.red()),
DiffElement::Different(s1,s2) => print!("{}{} ", s1.green(), s2.red()),
}
}
println!("");
}
pub fn compare(text1: String, text2: String) {
let sentences1 : Vec<Sentence> = super::parse::sentences(text1);
let sentences2 : Vec<Sentence> = super::parse::sentences(text2);
let diff = edit_distance(
&sentences1,
&sentences2,
|t1,t2| { compare_sentences(&t1, &t2, false).0 },
4,
true).1.unwrap();
for el in diff {
match el {
DiffElement::Equal(_) => {
}
DiffElement::Insert1(s) => {
println!("+ {}", s.content.green());
}
DiffElement::Insert2(s) => {
println!("- {}", s.content.red());
}
DiffElement::Different(s1, s2) => {
print_edit_script(&compare_sentences(&s1, &s2, true).1.unwrap());
}
}
}
}
#[test]
fn test_compare_sentences() {
let to_s = |s:&str| { Sentence::mk_sentence(s.to_string()) };
assert!(
compare_sentences(&to_s("Hello world"), &to_s("Hello world"), false).0
== 0);
assert!(
compare_sentences(&to_s("Hello world"), &to_s("Hello cruel world"), false).0
== 1);
assert!(
compare_sentences(&to_s("Hello world"), &to_s("Goodbye cruel world"), false).0
== 2);
assert!(
compare_sentences(&to_s("Hello world"), &to_s("Goodbye mediocre Paris"), false).0
== 3);
}
|
<filename>front-end/blog/src/router/index.ts
import { createRouter, createWebHistory, RouteRecordRaw } from 'vue-router'
import Cookies from 'js-cookie'
import { ElLoading } from 'element-plus';
import pubfunt from '/@/public';
import store from '/@/store'
const routes: RouteRecordRaw[] = [
{
path: '/',
name: 'app',
component: () => import('/@/App.vue'),
redirect: {
name: 'home'
},
children: [
{
path: '/home',
name: 'home',
component: () => import('/@/views/Home.vue'),
redirect: {
name: 'index'
},
children: [
{
path: '/home/index',
name: 'index',
component: () => import('../views/foreground/index.vue'),
},
{
path: '/home/article/:uuid',
name: 'article',
component: () => import('/@/views/foreground/article/index.vue'),
},
{
path: '/404',
name: '404',
component: () => import('/@/views/notFind/index.vue'),
},
{
path: '/home/archiveArticles',
name: 'archiveArticles',
component: () => import('/@/views/foreground/archiveArticles/index.vue'),
},
{
path: '/home/friendlyLink',
name: 'friendlyLink',
component: () => import('/@/views/foreground/friendlyLink/index.vue'),
},
{
path: '/home/normalPage/:uuid',
name: 'normalPage',
component: () => import('/@/views/foreground/normalPage/index.vue'),
},
]
},
{
path: '/login',
name: 'login',
component: () => import('/@/views/login/index.vue'),
},
{
path: '/backstage',
name: 'backstage',
component: () => import('/@/views/backstage/index.vue'),
redirect: {
name: 'outline'
},
children: [
{
path: '/backstage/outline',
name: 'outline',
component: () => import('../views/backstage/outline/index.vue'),
},
{
path: '/backstage/profile',
name: 'profile',
component: () => import('../views/backstage/profile/index.vue'),
},
{
path: '/backstage/writingArticles',
name: 'writingArticles',
component: () => import('../views/backstage/writingArticles/index.vue'),
},
{
path: '/backstage/basicSettings',
name: 'basicSettings',
component: () => import('../views/backstage/basicSettings/index.vue'),
},
{
path: '/backstage/articleList',
name: 'articleList',
component: () => import('../views/backstage/articleList/index.vue'),
},
{
path: '/backstage/commentList',
name: 'commentList',
component: () => import('../views/backstage/commentList/index.vue'),
},
{
path: '/backstage/pageList',
name: 'pageList',
component: () => import('../views/backstage/pageList/index.vue'),
},
{
path: '/backstage/categoryList',
name: 'categoryList',
component: () => import('../views/backstage/categoryList/index.vue'),
},
{
path: '/backstage/fileList',
name: 'fileList',
component: () => import('../views/backstage/fileList/index.vue'),
},
{
path: '/backstage/userList',
name: 'userList',
component: () => import('../views/backstage/userList/index.vue'),
},
{
path: '/backstage/linkList',
name: 'linkList',
component: () => import('../views/backstage/linkList/index.vue'),
},
{
path: '/backstage/createPage',
name: 'createPage',
component: () => import('../views/backstage/createPage/index.vue'),
},
]
},
]
},
];
const router = createRouter({
history: createWebHistory(),
routes
});
// 路由守卫
router.beforeEach((to, from, next) => {
if (to.matched.length === 0) {// 一旦找不到链接即跳转404
next('/404');
} else {
const loading = ElLoading.service({
lock: true,
text: 'Loading',
spinner: 'el-icon-loading',
background: 'rgba(0, 0, 0, 0.7)',
});
if (/backstage/.test(to.path)) {// 检查是否登录
pubfunt.updateAccessToken();
if (Cookies.get('accessToken')) {
loading.close();
next();
console.log(to.path);
store.commit("backstage/setActiveIndex", to.path);
} else {
loading.close();
next('/login')
}
} else {
setTimeout(() => {
next();
}, 500);
loading.close();
}
}
})
export default router |
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <sys/time.h>
#include <string>
#include <tr1/unordered_map>
using namespace std;
using namespace tr1;
//#define CUCKOO
// XXX: give a try to cuckoo hashing
// -----------------------------------------------
/*
// String encoding in a char[StrSize] with StrSize >= 2*sizeof(char*)
// if (strlen(s)<StrSize) => used as a regular string
// else we consider the memory as p:=char** and p[0]=StrTag, p[1]=pointer to actual string
// XXX: introduce sharing reference strings and possibly a big trie where to store them
// XXX: use a trie sharing strings and reference counting on them instead
// XXX: there is a BUG hampering strings performance
class str_pack {
private:
#define str_tag ((char*)0x000ff000)
#define str_size (2*sizeof(char*))
union {
char ds[str_size];
char* dp[2];
};
public:
str_pack() { dp[0]=0; }
str_pack(const char* str) { dp[0]=0; operator=(str); }
~str_pack() { if (dp[0]==str_tag) free(dp[1]); }
bool operator==(const str_pack &that) const { return strcmp(dp[0]==str_tag?dp[1]:ds,that.dp[0]==str_tag?that.dp[1]:that.ds)==0; }
void operator=(const str_pack &that) { memcpy(ds,that.ds,str_size); if (dp[0]==str_tag) dp[1]=strdup(dp[1]); }
void operator=(const char* str) {
size_t l=strlen(str);
if (dp[0]==str_tag) { if (!strcmp(dp[1],str)) return; free(dp[1]); }
if (l<str_size) { dp[0]=0; strlcpy(ds,str,str_size); }
else { dp[0]=str_tag; dp[1]=strdup(str); }
}
long hash() const {
long hash=0; const char* c=ds; if (dp[0]==str_tag) c=dp[1];
if (*c) do { hash ^= hash * 48611 + (long)c; } while(*++c); return hash;
}
const char* c_str() const { return dp[0]==str_tag ? dp[1] : ds; }
};
*/
// -----------------------------------------------
// To be provided externally
class Key {
public:
long l1; long l2; string s3; //str_pack s3;
Key() {}
Key(long l1_, long l2_, char* s3_) { l1=l1_; l2=l2_; s3=s3_; }
const long hash() { return l1 * 13834103L + l2 * 15485863L /*+ s3.hash()*/; }
bool operator==(const Key &that) { return l1==that.l1 && l2==that.l2 && s3==that.s3; }
};
class Val {
public:
double d1; double d2;
Val() {}
Val(double d1_, double d2_) { d1=d1_; d2=d2_; }
};
// -----------------------------------------------
#define ASSERT(X) extern int __assert__[-1+2*(X)];
ASSERT(sizeof(long)==8) // make sure long is 64 bits
template <class K, class V>
class Rec {
public:
K key;
V val;
#ifdef CUCKOO
union {
long head;
struct {
long valid : 1;
long hash : 63;
};
};
#else
long hash;
Rec* next;
#endif
Rec(K key_, V val_) { key=key_; val=val_; }
};
#define FactorGrow 0.75
#define FactorShrink 0.10
#define MapSizeMin 128
// Regular hash maps
template <class K, class V>
class Map {
typedef void(*fun)(K*,V*);
#ifdef CUCKOO
#else
Rec<K,V>** data; // array of pointers
#endif
size_t alloc; // allocated capacity
size_t size; // number of elements
size_t th_up; // need to grow if (size>th_up)
size_t th_low; // need to shrink if (size<th_low)
inline void th(size_t n) { alloc=n; th_up=FactorGrow*n; th_low=FactorShrink*n; if (th_low <= MapSizeMin*FactorShrink) th_low=0; }
inline void release() { size_t i; for (i=0;i<alloc;++i) { Rec<K,V>* rec=data[i], *next; if (rec) do { next=rec->next; delete rec; } while((rec=next)); } }
inline void resize(size_t capacity) {
Rec<K,V>** d = (Rec<K,V>**)malloc(capacity*sizeof(Rec<K,V>*)); memset(d,0,capacity*sizeof(Rec<K,V>*));
size_t i; for (i=0;i<alloc;++i) {
Rec<K,V>* rec=data[i], *next;
if (rec) do {
size_t b = rec->hash % capacity;
next=rec->next; rec->next=d[b]; d[b]=rec;
} while((rec=next));
}
free(data); data=d; th(capacity);
}
public:
Map(size_t capacity) {
data = (Rec<K,V>**)malloc(capacity*sizeof(Rec<K,V>*)); alloc = capacity;
memset(data,0,capacity*sizeof(Rec<K,V>*)); th(capacity);
}
~Map() { release(); free(data); }
void clear() { release(); memset(data,0,sizeof(Rec<K,V>*)*alloc); size=0; }
void foreach(fun f) const {
size_t i; for (i=0;i<alloc;++i) { Rec<K,V>* rec=data[i];
if (rec) do f(&(rec->key),&(rec->val)); while((rec=rec->next));
}
}
const bool has(K &key) const { return get(key)!=NULL; }
const V* get(K &key) const {
long h=key.hash(); size_t b=h%alloc; Rec<K,V>* rec=data[b];
if (rec) do if (h==rec->hash && key==rec->key) { return &(rec->val); } while((rec=rec->next));
return NULL;
}
void put(K &key, V &val) {
long h=key.hash(); size_t b=h%alloc; Rec<K,V>* rec=data[b];
// Update value
if (rec) do if (h==rec->hash && key==rec->key) {
rec->val=val; return;
} while((rec=rec->next));
// Insert key
++size; if (size>th_up) resize(alloc*2);
rec = new Rec<K,V>(key,val); // (Rec<K,V>*)malloc(sizeof(Rec<K,V>));
//rec->key=key; rec->val=val;
rec->hash=h; rec->next=data[b]; data[b]=rec;
}
void del(const K &key) {
long h=key.hash(); size_t b=h%alloc;
Rec<K,V> **ptr=(&data[b]),*rec=*ptr;
if (rec) do {
if (h==rec->hash && key==rec->key) {
*ptr=rec->next;
delete &(rec->key);
delete &(rec->val);
free(rec);
--size; if (size<th_low) resize(alloc/2);
return;
}
ptr=&(rec->next); rec=*ptr;
} while(rec);
}
};
// -----------------------------------------------------------------------------
Map<Key,Val> map(128);
void for1(Key* k, Val* v) {
//printf("%ld %ld %s -> %f %f\n",k->l1,k->l2,k->s3.c_str(),v->d1,v->d2);
Val v2(v->d1*2.0,v->d2*0.5);
map.put(*k, v2);
}
int main() {
char str0[32]; strlcpy(str0,"Hello world",32);
int z;
for (z=0;z<10;++z) {
srand(time(NULL));
struct timeval t0,t1;
gettimeofday (&t0,NULL);
#define N 10000
#define F 1.5
long i,j,n=N;
for (i=0;i<n;++i) {
Key k(i,i+1,str0);
Val v(i*3.0,i*4.0);
map.put(k,v);
}
for (j=0;j<n;++j) {
i=rand()%(long)(N*F);
Key k(i,i+1,str0);
Val v(i*1.0,i*1.0);
if (!map.has(k)) map.put(k,v);
if (i%10==0) map.foreach(for1);
}
gettimeofday (&t1,NULL);
printf("Time: %0.6f\n",(t1.tv_sec-t0.tv_sec)+((double)(t1.tv_usec-t0.tv_usec))/1000000L);
map.clear();
}
return 0;
}
|
/**
* Created by Dewei Xiang on 7/30/2017.
*/
public class AccountOperation {
long accountnumber;
long toAccountNumber;
double money;
String operationType;
public AccountOperation(){}
public AccountOperation(long accountnumber, double money, String operationType) {
this.accountnumber = accountnumber;
this.money = money;
this.operationType = operationType;
}
public AccountOperation(long accountnumber,long toAccountNumber, double money, String operationType) {
this.accountnumber = accountnumber;
this.money = money;
this.toAccountNumber = toAccountNumber;
this.operationType = operationType;
}
public long getAccountnumber() {
return accountnumber;
}
public long getToAccountNumber() {
return toAccountNumber;
}
public double getMoney() {
return money;
}
public String getOperationType() {
return operationType;
}
} |
def _particle_filter_propose_and_update_log_weights_fn(
observations,
transition_fn,
proposal_fn,
observation_fn,
num_transitions_per_observation=1):
def propose_and_update_log_weights_fn(step, state, seed=None):
particles, log_weights = state.particles, state.log_weights
transition_dist = transition_fn(step, particles)
assertions = _assert_batch_shape_matches_weights(
distribution=transition_dist,
weights_shape=ps.shape(log_weights),
diststr='transition')
if proposal_fn:
proposal_dist = proposal_fn(step, particles)
assertions += _assert_batch_shape_matches_weights(
distribution=proposal_dist,
weights_shape=ps.shape(log_weights),
diststr='proposal')
proposed_particles = proposal_dist.sample(seed=seed)
log_weights += (transition_dist.log_prob(proposed_particles) -
proposal_dist.log_prob(proposed_particles))
log_weights = tf.nn.log_softmax(log_weights, axis=0)
else:
proposed_particles = transition_dist.sample(seed=seed)
with tf.control_dependencies(assertions):
return smc_kernel.WeightedParticles(
particles=proposed_particles,
log_weights=log_weights + _compute_observation_log_weights(
step + 1, proposed_particles, observations, observation_fn,
num_transitions_per_observation=num_transitions_per_observation))
return propose_and_update_log_weights_fn |
// Bool is a str.ParseFunc that converts a string
// into a bool.
func Bool(s string) (interface{}, bool) {
switch strings.ToLower(s) {
case "true":
return true, true
case "false":
return false, true
}
return nil, false
} |
The 23-Year-Old Big Brother Winner on reality drama, breaking into Hollywood, and surviving Los Angeles!
Morgan Willett shows up to my West Hollywood apartment as I’m smoking a cigarette on the sidewalk. For about ten minutes we try to find parking in the most over populated neighborhood in the country. After a struggle, we eventually decide to have her park in my neighbor’s parking spot. Willett is arguably the luckiest person I know, so we risk the tow. She’s fearless, my muse, and by chance, my best friend.
Willett pops out of her vehicle, decked in The University of Texas at Austin memorabilia, in an oversized, men’s flannel shirt, yoga pants, and fuzzy sandals. Her hair is in a messy bun and she barely has makeup on. Without question, she’s the most stunning girl I’ve ever witnessed. She parodies one of those “Get You A Girl Who Can Do Both” memes. This isn’t her usual attire, but her comfort zone. We’ve been friends since she won Big Brother: Over the Top and she’s likely the person I talk to the most besides my mother. Before we sit down for this interview, I beg her to help me carry in a desk that I spray painted hours before. She helps with a smile and being realistic, if it wasn’t for her: that desk would’ve never made it up my stairs. Willett is frequently on social media exercising. She doesn’t do stunts, her results are genuine.
If you’re familiar with Willett, you already know that she hails from small town Texas with traditional roots. Seemingly innocent, she now resides in the city of (falling) angels, Los Angeles. Her humble beginning has led to great success. Willett, time and time again, has has needed no excuse for to enjoy the fruits of her labor. She kicks back on my bed and manages to find a way to prop a recording device to be heard. Brains and beauty are a dangerous combination; She has them both. She cocks her head and asks me: “So, where do we start?”
Brought a lil bit of Bev Hills to San Diego ???? #basicbeach #impunnyiknow A post shared by Morgan Willett (@morganleighwillett) on Oct 8, 2017 at 5:48pm PDT
Mickey Keating: What happened after you won Big Brother? I want details, girl – tell me about the day after.
Morgan Willett: I stayed in California for a week with [my sister] Alex [Willett] and Shelby [Stockton] (her fellow houseguests). We wanted to explore Los Angeles and have fun. We had so many plans. We wanted to go to Vegas. We were finally free from CBS! But, it ended up not being like that. I remember we got out of the house and we were staying at an AirBnB. It was more weird for me [than them]. Alex and Shelby had been out of the house. They’ve experienced life outside [of Big Brother]. I remember calling my mom crying, she was like ‘What’s wrong? Why are you crying?’
MK: Your mom wasn’t at the finale?!
MW: No! She was. But, we wanted to hang out and explore California and catch up! I called her crying because of the weird sensation of being out of the house. I was telling her I was so overwhelmed by being surrounded by people, having freedom outside of a house. I wasn’t having panic attacks, but it was a sensory overload with… everything. You would think being out of the Big Brother house, you’d be excited to have your life back. I was! But, I got to the point where I had to have my mom tell me to relax and enjoy being [in the real world]. They say it takes 21 days to form a habit… I was [in Big Brother] for 65 days. It was all I knew. Then I’m [thrown] back into the real world dealing with cell phones and social media. It was incredibly overwhelming!
MK: Was your phone BLOWN up by that point?
MW: Yes. I got my phone back the day after I got out of the house, after interviews [and the like]. My phone was randomly still charged. I had so [many missed messages] upon getting out of the house, that my phone overloaded. I had people telling me they left me messages all the time, but I didn’t get any of them. It was people I haven’t heard from in years who were telling me they were watching.
MK: Coming out of the woodwork?
MW: Yes! It was weird seeing who reached out. It’s like, I haven’t talked to you since sophomore math class. Why are you [contacting] me now?
MK: Wait, you didn’t change your number?!
MW: No. [Laughs]
MK: You’re crazy!
MW: It was too difficult to change my number. Social media was weird. I went from thinking 200 likes on [Instagram] was cool, then I get out of the house and the first photo I posted gets over 3000 likes…why do they care?
MK: Are you the celebrity of Granbury, Texas?
MW: I don’t talk to many people from my hometown or even high school. I go home and see my mom and dad, but [that’s it]. With a small town, I wasn’t close with anyone. Everyone in my high school wanted to party. I had strict parents and I didn’t really want to do any of that. I had such a vision for myself outside of high school. I wanted to get out of Granbury. I knew I wanted to go to college, cheer… I mean, I skipped out on going to prom to go to a cheer competition. The majority of the people in my high school wanted to party and get drunk in a barn [Laughing]. So did I, but I couldn’t! I hated my parents at the time for it, but my mom would tell me, ‘You have a life outside of [Granbury]. Don’t screw it up by getting drunk and [doing] drugs.’ So many people from my hometown had reached out on a Facebook page. Someone even mentioned I was voted ‘Most Likely To Be Famous’ in high school. Guess that all came true, huh? I was like… ‘Thanks!’. But, I did get interviewed in a Granbury newspaper!
MK: Were you living at your parent’s home when you went onto Big Brother?
MW: I wasn’t. I knew I always wanted to be in Entertainment. I was doing local news as an Internship. But, I couldn’t do local news, it wasn’t for me.
“I quit my job on the spot…even though I haven’t even gotten on Big Brother yet.”
MK: Local news as like, a tractor tipped over?
MW: No! [Laughing] I took a job in Public Relations and I hated it. I was constantly filling out Google Docs, I was at a desk. It wasn’t for me. I wasn’t communicating with anyone. I left that job and cried every day for three weeks straight. I tried going through six months, but I physically couldn’t do it. I regretted it so much. I kept thinking I should’ve stuck with Journalism. [Why was I here?] I quit my job on the spot…even though I haven’t even gotten on Big Brother yet. [I was only auditioning.] I had to take a week off of work, and my job wouldn’t let me take a week off…because I had just started. So, I quit.
MK: Have you talked to anyone at that job since?
MW: No. I didn’t dislike any of them! They were a great PR firm, but I wasn’t meant for that job. I don’t even think they know I was on the show. I just, dipped after a week. I literally haven’t heard from them since. Once I got back to Texas, I was like ‘I have to go back to California’. I moved here in late March.
MK: Staying on track of Big Brother…let’s play: Love HA or Leave HA! Obviously, we’re not going to do Alex or Shelby- they go without saying. Let’s get the juice: Whitney Hogg.
MW: Love Ha!! Even though she flipped, I still like her. This is based out of the show. She invited me to her wedding. I didn’t go, it was far away!
MK: Were you the only one invited?
MW: No, she invited Monte [Massongill] and Shane [Chapman]. She made a decision to turn on [our alliance] and our fans did not like that. Looking back, everyone has to play her own game. It wasn’t the best move for her, at all, and I was pissed. But, she was playing.
MK: Her move was unnecessary and made people not like her.
MW: It did. But, I can’t be mad at her. I mean, I won.
MK: Did anyone send her Ballsmasher shot glasses? Fans sent them to Morgan.
MW: I don’t think they did. I don’t even know if she knows we even got Ballsmasher shot glasses.
MK: Moving on, Shane?
MW: Well, since him and Danielle [Lickey] broke up, I’m gonna say LOVE HA!! He’s made an effort to come and hang out with us.
MK: He so wants to be in your Insta Story.
MW: He seems so much more chill outside of the house. Love ha!!
MK: Our buddy Scott Dennis?
MW: [Scoffs] Love ha!!!!!!!!!
MK: I’m aware. We just have to give Scott a shout out, right? He’ll be our first Google hit.
MW: Oh, 100%. Scott will be the first person to read this article. Scott is like our dad. He just visited us in Los Angeles. We took him hiking and to the beach. I think he had fun, but he’d never tell us if he was having a bad time. It was so exciting to have him be out of Maine! He didn’t want me to do the Chubby Bunny Challenge because he didn’t want me to die. So, we did the Food Challenge.
MK: Chubby Bunny Challenge?
MW: Yes! When you take a bunch of marshmallows and put them in your mouth to see how many you can fit? He didn’t want me to choke.
MK: Monte?
MW: [Laughing] Love him…sometimes.
MK: How many times has he tried hitting on you?
MW: Okay, disclaimer! I love Monte, he’s such a sweet guy. [Laughs] Monte is Monte. I friend-zoned him pretty quickly in the house. So, he never thought there was a chance.
MK: Danielle?
MW: LEAVE HA!! I followed her…and all of my fellow houseguests on social media after the game [was finished]. Danielle just started following me on, I think Twitter? Even Jason [Roy] follows me.
MK: [Groans] Love HA or leave HA: Jason?
MW: [Perky as hell] Outside of the house, love ha!! You’re going to hate me for saying that. Disclaimer again, Jason [did some awful things] in the house. So we’re a lot of people. [In real life] he’s given me no reason to dislike him. There are some people who can’t give up the game, he has.
MK: Who can’t give up the game?
MW: Danielle and Neely [Jackson]. I feel like they haven’t given up the game yet. There was a rumor that Alex was going to be on Big Brother 19. It was just a rumor. Neely makes some tweet that was clearly about my sister. As a sister, I’m not going to keep my mouth shut. So, I tweeted to her and was very passive aggressive. I started a Twitter war and we haven’t spoken since then.
MK: Anyone else you’d leave?
MW: Mostly everyone has been so nice afterwards. They met my parents at finale night and always ask how they’re doing. They’ve asked if we were affected by the floods in Houston. Everyone’s been so nice. I think Kryssie [Ridolfi] was a bit confused [Laughing] in the [game], but her and her boyfriend are the sweetest people. Kryssie is [by far] the most normal from [the alliance against mine]. She is so different outside of the house.
“There are some people who know I exist. And we’ve definitely [spoke]. But, it’s funny when once you start dating someone, all of the sudden, other people don’t exist.”
I’ve never been good at being subtle… A post shared by Morgan Willett (@morganleighwillett) on Sep 11, 2017 at 6:11pm PDT
MK: Can we talk about the Big Brother veterans? Who are you friends with?
MW: Big Brother is this strange, very close, very dysfunctional family. We’re all friends no matter what happened on the show. Jason was telling us that we were about to people a part of this Big Brother family. I didn’t think so. But, the Big Brother veterans reached out to me and were congratulating me, welcoming me to the family, and [all that]. Michelle Meyer and Natalie [Negrotti] went to the Big Brother Canada finale with Alex and me. They were so sweet! We had such a cool girl squad going on. They are so fun. I’m close with Jon Pardy and Peter Brown from Big Brother Canada. And a [few Big Brother hunks].
MK: Is there any drama with the Big Brother veterans? There must be, Morgan!
MW: There are some people who know I exist. And we’ve definitely [spoke]. But, it’s funny when once you start dating someone, all of the sudden, other people don’t exist. [Cough, Cough]. It was very strange, to me. It was just like: Be a part of the Big Brother family. We all have something very strange in common. Let’s move past the petty stuff, no one is going trying to steal your man. Come on.
MK: If you could take ANY viral celebrity to live in the house with you, who would it be, and why?
MW: Easy. “Hot Cop” Daniel Rengering. If I’m ever on Big Brother again can someone please put him in that house with me?! He is really sweet, smart, so sexy, and I looooves a man in uniform![laughs]
MK: What was the best gift a Big Brother fan has sent you?
MW: [Laughing] Okay, I know [houseguests] from the regular seasons [receive] MacBooks and cruises, but I got McDonald’s gift cards…
MK: Yes! Comfort food!
MW: [Laughing] YES! I might not have a MacBook, but I have a Mac-Flurry! [Laughs] I got calories!
MK: Would you do Big Brother again?
MW: Walking out of the house, I’d say no. It was so stressful. But, thinking back: I would do it again. It’s a once in a lifetime experience that anyone would want to do.
MK: Would you win again?
MW: I don’t know how I would do on a season where America wasn’t so involved. But, I’d love to see how I would do without America’s involvement. I learned about the patience I didn’t have. You don’t have any distractions, it’s only your thoughts. It makes you realize so much you wouldn’t realize if you were living in actual society.
MK: Julie Chen herself even mentioned in an article: “You thought [Morgan] was going to be one way, but she wasn’t”.
MW: Exactly! I love that I got to play up Texas. I have a horrible resting, bitch face and seem like I’d only care about looks. But, then I open my mouth and people are surprised. I was saying how I was going to get along with all guys and no girls. I’m going to control the guys. Then, the complete opposite happened. There were…no guys in the house. I got to be myself. I’m not mean to people [in everyday life]. There was no purpose to do that. I kept my mouth shut more than I wanted too, [honestly]
“My mom even [told me] when I was little, I would tell her ‘I’m leaving you to go to LA! I’m going and I’m never looking back!’”
MK: Let’s move forward: What the hell was going through your head when you came to Los Angeles?!
MW: Oh my gosh, I moved here with a car filled to the brim with my dad! I wasn’t nervous at all. My mom was calling me every hour of the trip. But, I wanted to move to Los Angeles when I was like 5 of 6 [years old]. My mom even [told me] when I was little, I would tell her ‘I’m leaving you to go to LA! I’m going and I’m never looking back!’ I knew I was going to live with Shelby and I had a girlfriend out here from Texas, so I was excited to attempt to do what I’ve always wanted to do. Even if I failed, I could still go home saying I tried. If I would’ve stayed in Texas, I would’ve regretted [the ‘What If’]. I had a cushion where I didn’t have to worry too much.
MK: What were you coming out here to pursue?
MW: Well, I’m not stopping until I’m on E! Entertainment hosting. I want to interview celebrities knowing that they don’t have to stress talking to some stiff woman who they don’t want to talk with. I’m the best at hosting and I won’t stop until I achieve my goal. It’s what I want to do. So, here I am!
MK: What’s it like living with your former reality contestant, Shelby?
MW: She’s a hoot. She’s the most loyal person I’ve ever met besides my sister. She’s a fantastic human. Insane, yes, but she’s the best friend you could ask for.
MK: How’s dating in Los Angeles?
MW: I moved out here recently single. I had some spare time and wasn’t focusing on career, yet. The guys [in Los Angeles] are different than what I’m used to. Everyone here is out for themselves. Granted, I was probably looking in the wrong places – bars and clubs – you won’t meet someone [worthy] there. But, I missed the dating app experience, because I’ve been in a long-term relationship before coming here. Everyone was telling me to try dating apps, and I’m like ‘No! That’s so bizarre to me! I’m not going to meet a stranger offline!’ I need someone who is a little bit humble, so I’m struggling to find that. Everyone is trying to make it in the industry. They aren’t Southern boys who work on oil rigs. Good guys exist out here, I’m figuring out where to find them.
MK: A month into being in Los Angeles, what was happening?
MW: Oh, I had quite a few mental breakdowns my first few months [in Los Angeles]. I’m so used to instant success, as awful as that sounds. I’m such a hard worker and tenacious. My life before Los Angeles was easy. Now, I’m out here – and all of these girls were Prom Queens and have been on television shows: I’m not special anymore. It was frustrating. I was trying to figure out what my next step was. I want to be a host, and there’s so much [that comes with it]. People would tell me, “You need a Hosting Agent, and a Manager, and This, and That, but wait – you don’t need any of that, you need this”.
MK: That sounds extremely complicated!
MW: Yes! So, I got extremely lucky meeting my managers, Daryn [Simons] and Deborah [Del Prete]. It’s actually funny because I met you [her interviewer] from being on Big Brother and you watched the show, so you introduced me to [them] because you saw me on Big Brother. So, I have to give credit to Big Brother, for helping me with my success. My managers have been my Fairy Godmothers. Look at [the industry now], there’s creepy old men taking advantage of [aspiring and rising] young women, so to have strong female[s] to look up to and guide me through the industry, I’m so thankful. [They’ve] exposed me to so many [great] things about the industry, that I never could’ve done on my own out here.
MK: Like what?
MW: Improv Classes! I never would’ve done them on my own. [Those classes] have helped me so much with hosting. [I got my first role] in [the upcoming] Netflix film, Pay Up, as a Principal Dancer because of my managers. I am by no means a dancer, so to learn the choreography, like, I came out here with the mindset of ‘whatever comes my way, say yes’. I wouldn’t grow inside my comfort zone. I have to step outside of [what I want to do] to succeed. Was I mortified dancing in a [modest] sequin one piece and feathered boa? Yes. But, I did it and it was a great experience. Someone from Granbury, Texas will never tell you they were in a Feature Film.
“I had other people from Big Brother telling me to talk to this agent. I did – and that went nowhere. I heard about another agency…that went nowhere. It was repetitive. No one wanted to help me, but these agents wanted to meet me. I was getting down on myself”
Seriously, who goes to Universal Studios wearing matching outfits? Oh wait…. ???? #ballsmashersforlife #extraforlife A post shared by Morgan Willett (@morganleighwillett) on Aug 19, 2017 at 4:59pm PDT
MK: I’m no fool to the industry. You had to of met up with sham agents, managers, and the like. What was it like meeting with your current managers? Knowing that you had people who see you? Unfortunately, a plethora of reality stars become duped by Entertainment promises. You aren’t.
MW: Well, honestly at first…I met with a ton of people. It’s an end of the road everywhere you go. It was meeting with people who would give me advice then be like, ‘Go on your way.’ [Honestly] I had other people from Big Brother telling me to talk to this agent. I did – and that went nowhere. I heard about another agency…that went nowhere. It was repetitive. No one wanted to help me, but these agents wanted to meet me. I was getting down on myself. When I met with Daryn and Deb, I was like ‘It’s going to be the same thing: ‘Here’s a helpful tip: [go see someone else]. Good luck.’ I went in there not nervous, because [I had nothing to lose]. I don’t act, I don’t want too. I was on this weird reality show. But, we ended up chatting and we talked about my life. I thought it was so weird they seemed interested in me. Then they called me back and invited me to see them again. They wanted to sign me! I literally was quite shocked. I went in there with my college resume that had my sorority and my college GPA. I had no [Entertainment] experience. But, they saw some spark in me and some potential…enough to sign me. I was so shocked. If they saw something in me…someone who [looks like] a million other girls in Los Angeles…[maybe] other people will see it, too. I will eventually get somewhere in this crazy industry.
MK: Have you signed with any other companies?
MW: I’m working on my Theatrical experience, obviously. But, I’m great in front of a camera. Everything else helps. So, I thought, why not try Commercial acting? I met with agencies and was so willing to work with them. But, I got turned down: ‘You’re too pretty. You aren’t someone who the Average Joe would approach on the street. We want someone more Ethnically Ambiguous.’ That’s what they are looking for in commercials now. I was pretty screwed.
MK: So, you just hit a dead end?
MW: [Laughs] Thanks to my Fairy Godmother, Daryn, I just signed with a Commercial Agency. Let me tell you about it! Originally, I went into this next meeting with the mindset of ‘I’m not for them’. Same thing, again and again. I’m too blonde, I’m a Caucasian girl from Texas, there are too many of me. They had me read an audition: the role was a young girl at TJMaxx. Funny enough, I was wearing a TJMaxx shirt, so I started talking about my shirt. [Halfway through] I was like, ‘Oh, no they hate this. Because I was just, rambling: I love TJMaxx!’ They calmed me, telling me I had too much energy, which I can’t control. I reread, taking it down a notch, so I just said it like I was talking to a friend. I saw the agents smile at one another and had some high hopes. They sent me off, I was a bit unconfident with their reactions. But, they signed me! It’s another confidence boost. I’m proving to myself that experience in this industry doesn’t matter if you have the personality and work ethic. Just keep truckin’. If someone from a Webisode can make it, I don’t want to say anyone can, but keep believing in yourself. I’m the biggest self-doubter ever, but this is all so interesting [to me].
MK: Were you more nervous auditioning for commercials and movies or reality television?
MW: Commercials and movies! I wasn’t taking Big Brother seriously. I haven’t seen the show, so I was just like, I’m going to be funny and crazy. I had nothing to lose. Auditioning for film and television, I had so much riding on that. I was determined and more nervous doing that.
MK: I’m nosey. Do all Big Brother contestants experience what you’ve been going through?
MW: I’m not basing anyone’s experience but my own, but I will say common knowledge. It’s almost as if Big Brother – or reality stars – expect everything to be handed to them. I didn’t expect this, since I was on the online version.
“I know you don’t get famous from Big Brother. That’s clear. You aren’t going to continue fame from posting paid, Instagram advertisements. I mean, I do it too – it’s fun and who doesn’t like free products – but that’s not what defines me. So many people from reality shows get stuck in that loop of being social media stars.”
Serious question: why do girls like pumpkins so much ?????????? #itsfallwhyamisweating A post shared by Morgan Willett (@morganleighwillett) on Oct 22, 2017 at 6:22pm PDT
MK: Do you feel like you get flack from the Big Brother community as almost feeling less than?
MW: I’m not less than. I won. My bank account isn’t complaining. I don’t get recognized as much as the television version contestants do. I don’t have as many social media followers as they do. But, that’s okay. I know you don’t get famous from Big Brother. That’s clear. You aren’t going to continue fame from posting paid, Instagram advertisements. I mean, I do it too – it’s fun and who doesn’t like free products – but that’s not what defines me. So many people from reality shows get stuck in that loop of being social media stars.
MK: From the last season of Big Brother, contestants were even speaking of the possibility of becoming viral famous and gaining followers from appearing on the show.
MW: That’s the sad thing about reality television now. People go on with the intention to get viral famous and followers from appearing on the show. That’s all they want. Yes, they are hot right now and have a ton of followers, but that’s not a career. Think about five years from now. Your [image] doesn’t matter. Instagram might not even be a thing anymore.
MK: Right?! They’re wasting away fifteen minutes so they can go on a cruise or party in a nightclub.
MW: Exactly. I knew social media would be great, but I know Big Brother isn’t my career. I want to be the person who has a long line of success and also states they’ve been on Big Brother. In the coming years, I don’t want to introduce myself as the girl who won Big Brother.
MK: I know too many people like that.
MW: Right. No, no, no. I will proudly say I won Big Brother. But, I want everyone to know this is what I’m doing now and I won a reality show. It’s different. A lot of people get delusional about being on reality television. At the end of the day, no one really cares you’ve been on reality television. Even when I interview people for Afterbuzz, if I mention I won Big Brother, no one cares. It’s never gotten me a job. It’s just helped my bank account.
MK: Can we talk about Afterbuzz?!
MW: It’s fantastic! It’s an awesome environment! I’ve gotten to interview Big Brother contestants, Julie Chen, Derek Hough, so many people! It’s an outlet for me to experience hosting. Where else will you find this opportunity in Los Angeles to become an experienced hostess?!
MK: On top of all of this, you work at Soul Cycle!
MW: Hell yes. Everyone is confused that I have a part-time job. Yes, I do. I’ve saved my Big Brother winnings for my future. I want to ball out then, not now. I’m not going to waste all this money I earned for a year of wealth. I enjoy working. I love the company I work for, and get free workouts! It’s better than me sitting on my ass being unsocial, spending money that I won’t see again. I’m a realist. I hope some reality stars can take notice and have humility.
MK: Do you still reality stars are aloof to reality?
MW: YES! Go do something to better yourself or the world. You aren’t doing anything remotely worth your time banking on reality television to pay your bills. What are you going to do when you’re 45 years-old? I always say there is more to life reality television. Duh, I’m going to write about it, talk about it, and tweet about it – since I was on it – but, balance that with other normal hobbies in life. There is so much more to life than reality television.
MK: Wait, clarify this for me: Do you want to be on General Hospital?
MW: [Laughs] No, I do not want to be on General Hospital. I do not want to be an extra in a television show!
MK: Wait, you do not want to be Bikini Girl #3 on an FX show?!
MW: [Laughs] No! I don’t want to be the hot, blonde in a bikini who appears in the background. I want the be the intelligent blonde who is interviewing, you know, the biggest person in Hollywood. My whole entire life, I’ve been pegged as ‘just a pretty face’. Cool, looks matter a lot – but I’m so much more well-spoken than my looks portray. I never want to depend on being ‘the hot girl’.
“That’s my goal: To never stop. I’m going to continue winning. I’m going to continue sizzling, making content, I’m never going to stop.”
MK: When I first met you, you mentioned you were only going to be in Los Angeles for a year. Has your expiration date extended?
MW: I do think I’ll still be here. I’m on month seven now. I feel like I’ve done more here than in my entire life. If I can keep up my track record, keep doing what I’m doing, keep making connections…making people feel special, and stay working hard…I’m going to take every opportunity I can to stay here. My goal for 2018 is to have a solid hosting gig that I consider a full-time career. It may be a little ambitious, but America did chose me to win Big Brother. I’m never going to stop. That’s my goal: To never stop. I’m going to continue winning. I’m going to continue sizzling, making content, I’m never going to stop.
MK: And, we’re about to hear more from you. Can you please plug your upcoming podcast?!
MW: I could talk to a wall. In our society, you always need to be on top of your competitor. I have an interesting story. I want to tell everyone this what I’m doing. I’m this small-town girl from Texas. These girls aren’t alone. You’re new to a city and dating? Here’s what you should do. You want to know the hot spots to hang in Los Angeles? Let me tell you! These are all things I wish I would’ve known. I want to share with others so they aren’t stumbling around as I was my first few months in a new city. Especially if someone wants to break into the Entertainment industry. I want to make someone laugh and help them out. I’m in this with you. Take ten minutes of your day, let’s get through this together.
MK: Let’s wrap this up with a shout out to your fans. You have one particular Instagram follower who comments on all of your posts before I even get to them! Do you love HA?!
MW: I love HA! [Laughing] I have the best fans possible. They are all so supportive. I love interacting with them. I want them to be a part of my journey, so I try to talk to each of them. We entertain each other. They are the best part of my day. I want to give them a little slice of my life, in every way.
Author’s Note: Quotes have been edited for clarity. |
<filename>tests/infrastructure/WebSocketHub.test.ts<gh_stars>0
import tomcat from "../../src"
import { CandleStickCollection, CandleStickData } from "../../src/common";
async function wait(ms) {
return new Promise(resolve => {
setTimeout(resolve, ms);
});
}
jest.setTimeout(50000);
describe('WebSocketHub', () => {
test('ping pong', async () => {
const port = 8081;
const hub = tomcat.hosts.getHostBuilder('hub')
.addWebSocketHub()
.buildWebHost();
const client1 = tomcat.hosts.getHostBuilder('client1')
.addMessageBus(cfg => {
cfg.transports.websocket.url = `http://localhost:${port}/hub`;
})
.build();
await hub.listen(port);
await client1.start();
await wait(1000);
await client1.bus.subscribe("topic", null);
await wait(100000);
await client1.stop();
await wait(10000);
});
test('publish works between processes', async () => {
// lets spin up two servers on different ports
// const builder = new HostBuilder();
const port = 8081;
const hub = tomcat.hosts.getHostBuilder('hub')
.addWebSocketHub()
.buildWebHost();
const client1 = tomcat.hosts.getHostBuilder('client1')
.addMessageBus(cfg => {
cfg.transports.websocket.url = `http://localhost:${port}/hub`;
})
.build();
const client2 = tomcat.hosts.getHostBuilder('client2')
.addMessageBus(cfg => {
cfg.endpoint = `client`
cfg.transports.websocket.url = `http://localhost:${port}/hub`;
})
.build();
// (client1);
// (client2);
(hub);
await hub.listen(port);
const messageName = 'some-message'
let received: unknown | null = null;
client1.bus.subscribe(messageName, (ctx) => {
received = ctx.message.payload;
return ctx.reply("result");
return Promise.resolve();
});
await client1.start();
await client2.start();
await wait(500);
await client2.bus.createMessage(messageName, "hi there")
.publish();
await wait(500);
//tomcat.utils.getLogger().info(received);
await wait(10000);
await client1.stop();
await client2.stop();
await wait(10000);
await hub.stop();
expect(received).not.toBeNull();
});
test('remote execute works between processes', async () => {
// lets spin up two servers on different ports
// const builder = new HostBuilder();
const port = 8081;
const hub = tomcat.hosts.getHostBuilder('hub')
.addWebSocketHub()
.buildWebHost();
const client1 = tomcat.hosts.getHostBuilder('client1')
.addMessageBus(cfg => {
cfg.transports.websocket.url = `http://localhost:${port}/hub`;
})
.build();
const client2 = tomcat.hosts.getHostBuilder('client2')
.addMessageBus(cfg => {
cfg.endpoint = `client`
cfg.transports.websocket.url = `http://localhost:${port}/hub`;
})
.build();
// (client1);
// (client2);
(hub);
await hub.listen(port);
const messageName = 'some-message'
let received: unknown | null = null;
let call_count = 0;
client1.bus.subscribe(messageName, (ctx) => {
call_count++;
if (call_count == 1) {
received = ctx.message.payload;
return ctx.reply("hi there");
} else if (call_count == 2) {
throw 'busy';
}
else {
return ctx.reject("dont have time. come back later.");
}
});
await client1.start();
await client2.start();
await wait(500);
let response2 = null;
let response3 = null;
// The first request would work fine!
const response1 = await client2.bus.createMessage(messageName, 'hi there')
.execute((ctx) => {
(ctx);
return false
});
await client2.bus.createMessage(messageName, "hi there")
.execute()
.then()
.catch(err => {
response2 = err;
});
await client2.bus.createMessage(messageName, "hi there")
.execute()
.then()
.catch(err => {
response3 = err;
});
const response4 = await client2.bus.createMessage(messageName, "hi there")
.execute(null, 10000, true)
await wait(500);
//tomcat.utils.getLogger().info(received);
await wait(2000);
await client1.stop();
await client2.stop();
await wait(2000);
await hub.stop();
expect(received).not.toBeNull();
expect(response1).not.toBeNull();
expect(response2).not.toBeNull();
expect(response3).not.toBeNull();
expect(response4).not.toBeNull();
});
test('large data', async () => {
// lets spin up two servers on different ports
// const builder = new HostBuilder();
const port = 8081;
const hub = tomcat.hosts.getHostBuilder('hub')
.addWebSocketHub()
.buildWebHost();
const client1 = tomcat.hosts.getHostBuilder('client1')
.addMessageBus(cfg => {
cfg.transports.websocket.url = `http://localhost:${port}/hub`;
})
.build();
const client2 = tomcat.hosts.getHostBuilder('client2')
.addMessageBus(cfg => {
cfg.endpoint = `client`
cfg.transports.websocket.url = `http://localhost:${port}/hub`;
})
.build();
// (client1);
// (client2);
(hub);
await hub.listen(port);
const data = new CandleStickCollection([]);
for (let i = 0; i < 50000; i++) {
data.push(new CandleStickData(Date.now() + i * 100, 1, 1, 1, 1, 1, 1, 1, 1));
}
const messageName = 'get-data'
client1.bus.subscribe(messageName, (ctx) => {
return ctx.reply(data);
});
await client1.start();
await client2.start();
await wait(100);
// The first request would work fine!
const now = Date.now();
const response1 = await client2.bus.createMessage(messageName, 'hi there')
.execute();
const dataReceived = response1.cast<CandleStickCollection>();
const elapsed = (Date.now() - now) / 1000;
(elapsed);
(dataReceived);
await wait(100);
//tomcat.utils.getLogger().info(received);
await wait(2000);
await client1.stop();
await client2.stop();
await wait(2000);
await hub.stop();
});
}); |
import Data.Char
import Data.Bits
import Data.List
main = do
text <- readFile "p059_cipher.txt"
decryptLoop $ convertToChars text
return ()
decryptLoop text = do
putStrLn "Enter char for decryption classification (space works well):"
chars <- getLine
let char = head chars
let best0 = nBestDecryptions 10 char 0 text
let best1 = nBestDecryptions 10 char 1 text
let best2 = nBestDecryptions 10 char 2 text
putStrLn "i=0 decryptions:"
print best0
putStrLn "i=1 decryptions:"
print best1
putStrLn "i=2 decryptions:"
print best2
putStrLn "Enter decrypt key:"
key <- getLine
putStrLn $ "key: " ++ key
let decrypted = decryptText key text
putStrLn $ "Decrypted text: " ++ decrypted
let answer = sum $ map ord decrypted
putStrLn $ "Answer = " ++ show answer
decryptLoop text
nBestDecryptions n char i charText = bestDecryption n char $ ithChars i charText
convertToChars :: String -> String
convertToChars text = map (chr . read) $ words $ replace ',' ' ' text
replace :: Char -> Char -> String -> String
replace from to string = map (replaceChar from to) string
replaceChar :: Char -> Char -> Char -> Char
replaceChar a b c = if a==c then b else c
decryptChar :: Char -> Char -> Char
decryptChar key encrypted = chr $ (ord key) `xor` (ord encrypted)
ithChars i text = map (text !!) [i,(i+3)..(n-1)]
where n=length text
decryptText :: String -> String -> String
decryptText key encrypted = zipWith decryptChar (cycle key) encrypted
count :: Char -> String -> Int
count c string = length $ filter (\s -> s==c) string
allDecryptions char text = [ (count char $ decryptText [k] text, k) | k <- ['a'..'z'] ]
bestDecryption n char text = take n $ reverse $ sort $ allDecryptions char text
|
<reponame>andreisarabia/newsworthy
import { v2 as _cloudinary } from 'cloudinary';
import Config from '../config';
import { deepClone } from '../util';
// cloudinary doesn't export this type for devs to use...
interface UploadApiResponse {
public_id: string;
version: number;
signature: string;
width: number;
height: number;
format: string;
resource_type: string;
created_at: string;
tags: Array<string>;
pages: number;
bytes: number;
type: string;
etag: string;
placeholder: boolean;
url: string;
secure_url: string;
access_mode: string;
original_filename: string;
moderation: Array<string>;
access_control: Array<string>;
context: object;
metadata: object;
[futureKey: string]: any;
}
let cloudinary: typeof _cloudinary;
if (Config.hasCloudflareCredentials) {
const {
cloudinaryCloudName: cloud_name,
cloudinaryApiKey: api_key,
cloudinaryApiSecret: api_secret,
} = Config.getAll(
'cloudinaryCloudName',
'cloudinaryApiKey',
'cloudinaryApiSecret'
);
cloudinary = _cloudinary;
cloudinary.config({ cloud_name, api_key, api_secret });
} else {
// no point in exporting the whole library if no creds provided
cloudinary = deepClone(_cloudinary);
// if no library to use, we avoid using the sdk fns that are
// used throughout the app
cloudinary.uploader.upload = () => Promise.resolve({} as UploadApiResponse);
}
export default cloudinary;
|
def make_response(self, rv):
if rv is None:
raise ValueError('View function did not return a response')
if isinstance(rv, self.response_class):
return rv
if isinstance(rv, basestring):
return self.response_class(rv)
if isinstance(rv, tuple):
return self.response_class(*rv)
return self.response_class.force_type(rv, request.environ) |
def dump_interaction_map(interaction_map,
indent=4,
filename=None,
write=False):
formatted_map = encode_interaction_map(interaction_map)
text = json.dumps(formatted_map,
indent=indent,
cls=CompactJSONEncoder)
if write:
with open(filename, 'w') as f:
f.write(text)
else:
return text |
module Cards.Output
(
putCard,
putCards,
putHandLn,
putEquity,
putTally
) where
import Cards
import Data.List
import Numeric
import Poker
import Rainbow
import qualified Data.ByteString as BS
formatFloatN floatNum numOfDecimals = showFFloat (Just numOfDecimals) floatNum ""
-- chunk functions
chunkCard :: Card -> Chunk String
chunkCard = fourColourDeckCardChunk
chunkCards :: String -> [Card] -> [Chunk String]
chunkCards sep cards = intersperse (chunk sep) $ map chunkCard cards
chunkBoard :: [Card] -> [Chunk String]
chunkBoard board = (chunk "Board: ") : cards
where cards = chunkCards " " board
chunkHoleCards :: HoleCards -> [Chunk String]
chunkHoleCards h@(HoleCards c1 c2) = [ (chunk "Hole Cards: "),
sep,
(chunkCard c1),
sep,
(chunkCard c2),
sep,
(chunk (handShape h))
]
where sep = chunk " "
chunkHandRank :: PokerRank -> [Chunk String]
chunkHandRank r = [chunk (show r)]
chunkRankedHand :: (HoleCards,PokerRank) -> [Chunk String]
chunkRankedHand (h,r) = chunkHoleCards h ++ [sep] ++ chunkHandRank r
where sep = chunk " "
-- chunk output functions
putChunks :: [Chunk String] -> IO ()
putChunks = mapM_ BS.putStr . chunksToByteStrings toByteStringsColors256
putTable :: [Card] -> [(HoleCards,PokerRank)] -> IO ()
putTable board hands = putChunks $ title ++ boardChunks ++ handChunks
where boardChunks = chunkBoard board ++ newline
handChunks = concat $ map (\x -> (chunkRankedHand x) ++ newline ) hands
newline = [chunk "\n"]
title = [chunk "Poker Table\n"]
putCard :: Card -> IO ()
putCard x = putChunks $ [chunkCard x]
putCardLn :: Card -> IO ()
putCardLn x = do putCard x
putStrLn ""
putCards :: [Card] -> IO ()
putCards xs = putChunks $ map chunkCard xs
putHand :: HoleCards -> IO ()
putHand hand = putChunks $ chunkHoleCards hand
putHandLn :: HoleCards -> IO ()
putHandLn hand = do putHand hand
putStrLn ""
putEquity :: (HoleCards,ShowdownTally) -> IO ()
putEquity (h,t) = do putHand h
putStrLn $ " equity - win: " ++ winPct ++ "% - tie: " ++ tiePct ++ "%"
where winPct = formatFloatN we 2
tiePct = formatFloatN te 2
(we,te) = percentTally t
putTally :: (HoleCards,ShowdownTally) -> IO ()
putTally (h,t) = do putHand h
putStrLn $ " tally: " ++ (show t)
-- original unadorned output functions
showBoard :: [Card] -> String
showBoard xs = "Board: " ++ (intercalate " " $ map show xs)
showHand :: HoleCards -> String
showHand h@(HoleCards x y) = "Hand: " ++ (show x) ++ " " ++ (show y) ++ (handShape h)
showRankedHand :: (HoleCards,PokerRank) -> String
showRankedHand (hand,rank) = (showHand hand) ++ " - rank: " ++ (show rank)
displayHand :: HoleCards -> IO ()
displayHand x = do putStrLn $ showHand x
displayHands :: [HoleCards] -> IO ()
displayHands [] = putStrLn ""
displayHands (x:xs) = do displayHand x
displayHands xs
-- red and white colouring
traditionalColourDeckCardChunk :: Card -> Chunk String
traditionalColourDeckCardChunk = ansiCard white red red white
-- four-colour deck colouring
fourColourDeckCardChunk :: Card -> Chunk String
fourColourDeckCardChunk = ansiCard white red cyan green
-- take a card and return a chunk coloured correctly for output to a terminal
-- the four colours are accepted in order for ;
-- Spades
-- Hearts
-- Diamonds
-- Clubs
--
ansiCard :: Radiant -> Radiant -> Radiant -> Radiant -> Card -> Chunk String
ansiCard r _ _ _ c@(Card _ Spades) = chunk (show c) & fore r
ansiCard _ r _ _ c@(Card _ Hearts) = chunk (show c) & fore r
ansiCard _ _ r _ c@(Card _ Diamonds) = chunk (show c) & fore r
ansiCard _ _ _ r c@(Card _ Clubs) = chunk (show c) & fore r
|
/**
* <p>Add extra BDD variables. Extends the current number of allocated BDD
* variables with num extra variables.</p>
*
* <p>Compare to bdd_extvarnum.</p>
*
* @param num number of BDD variables to add
* @return old number of BDD variables
*/
public int extVarNum(int num) {
int start = varNum();
if (num < 0 || num > 0x3FFFFFFF)
throw new BDDException();
setVarNum(start+num);
return start;
} |
package http
import (
"go-common/library/log"
bm "go-common/library/net/http/blademaster"
)
func saveFiles(c *bm.Context) {
c.JSON(nil, srv.SaveFiles(c))
}
func downloadStoryFile(c *bm.Context) {
var (
err error
data []byte
code int
)
if data, err = srv.DownloadStoryFile(c); err != nil {
log.Error("Download story file failed, error:%v", err)
code = -1
}
contentType := " text/plain;charset:utf-8;"
c.Writer.Header().Set("content-disposition", `attachment; filename=story.txt`)
c.Bytes(code, contentType, data)
}
func downloadChangeFile(c *bm.Context) {
var (
err error
data []byte
code int
)
if data, err = srv.DownloadChangeFile(c); err != nil {
log.Error("Download change file failed, error:%v", err)
code = -1
}
contentType := " text/plain;charset:utf-8;"
c.Writer.Header().Set("content-disposition", `attachment; filename=change.txt`)
c.Bytes(code, contentType, data)
}
func downloadIterationFile(c *bm.Context) {
var (
err error
data []byte
code int
)
if data, err = srv.DownloadIterationFile(c); err != nil {
log.Error("Download iteration file failed, error:%v", err)
code = -1
}
contentType := " text/plain;charset:utf-8;"
c.Writer.Header().Set("content-disposition", `attachment; filename=iteration.txt`)
c.Bytes(code, contentType, data)
}
func downloadBugFile(c *bm.Context) {
var (
err error
data []byte
code int
)
if data, err = srv.DownBugFile(c); err != nil {
log.Error("Download bug file failed, error:%v", err)
code = -1
}
contentType := " text/plain;charset:utf-8;"
c.Writer.Header().Set("content-disposition", `attachment; filename=bug.txt`)
c.Bytes(code, contentType, data)
}
|
Electrochemical Behaviors of Guanine on Poly(L-methionine)/Graphene Modified Electrode and Its Detection
A poly( L-methionine)/graphene modified glassy carbon electrode was prepared. Experiments showed that the modified electrode exhibits an obvious electrocatalytic ability for the oxidation of guanine in 0. 1 mol/L phosphate buffer solution( p H 7. 0). Effects of p H value and scan rate on electrochemical behaviors of guanine were investigated by cyclic voltammetry. The detection of guanine was performed by differential pulse voltammetry. The result indicated that the oxidation peak current of guanine was linear with its concentration in the range of 3. 6 × 10- 7- 4. 0 × 10- 5mol/L,with a correlation coefficient( r) of 0. 990 4 and a detection limit( S/N = 3) of 5. 0 × 10- 8mol/L. The modified electrode had a good stability and reproducibility. |
<reponame>giljulio/crunch
package com.giljulio.crunch.text.util;
import java.nio.BufferOverflowException;
import java.nio.BufferUnderflowException;
public class CircularQueue<T> {
private final Object[] elements;
private int head, tail;
public CircularQueue(int capacity) {
elements = new Object[capacity];
}
@SuppressWarnings("unchecked")
public T get(int index) {
if (index >= size()) {
throw new IndexOutOfBoundsException();
}
int i = absoluteIndex(index);
return (T) elements[i];
}
public boolean add(T t) {
if (size() >= elements.length) {
throw new BufferOverflowException();
}
elements[head++ % elements.length] = t;
return true;
}
public void removeLast() {
if (size() == 0) {
throw new BufferUnderflowException();
}
elements[tail++ % elements.length] = null;
}
public int size() {
if (head > tail) {
return head - tail;
} else if (head < tail) {
return head + elements.length - tail;
}
return 0;
}
private int absoluteIndex(int index) {
return (tail + index) % elements.length;
}
}
|
package org.fabric3.samples.bigbank.account;
import javax.ws.rs.Consumes;
import javax.ws.rs.GET;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.core.MediaType;
import java.math.BigDecimal;
import org.fabric3.api.annotation.model.Component;
import org.fabric3.api.annotation.model.EndpointUri;
import org.fabric3.api.annotation.monitor.Monitor;
import org.fabric3.api.annotation.scope.Composite;
import org.fabric3.api.binding.ws.annotation.WebServiceBinding;
import org.fabric3.samples.bigbank.api.backend.account.AccountLedger;
import org.fabric3.samples.bigbank.api.backend.account.AccountsSystem;
import org.fabric3.samples.bigbank.api.backend.account.InternalAccountData;
import org.fabric3.samples.bigbank.api.backend.account.LedgerEntry;
import org.fabric3.samples.bigbank.api.backend.account.LedgerSystem;
import org.fabric3.samples.bigbank.api.services.account.Account;
import org.oasisopen.sca.annotation.Reference;
/**
* Controller for account resources.
*
* This controller is wired to legacy systems that expose WS-* (Web Services) APIs via {@code Reference} annotations. It is responsible for aggregating account
* balance with ledger entries to provide additional information to end-users such as the "safe-to-spend" calculation.
*/
@EndpointUri("accounts")
@Path("/")
@Consumes(MediaType.APPLICATION_JSON)
@Produces(MediaType.APPLICATION_JSON)
@Composite
@Component
public class AccountController {
private static final BigDecimal MULTIPLIER = BigDecimal.valueOf(100);
private static final BigDecimal MINIMUM_BALANCE = BigDecimal.valueOf(100);
@Monitor
protected AccountMonitor monitor;
@Reference
@WebServiceBinding(uri = "http://localhost:8182/accountsSystem")
protected AccountsSystem accountsSystem;
@Reference
@WebServiceBinding(uri = "http://localhost:8182/ledgerSystem")
protected LedgerSystem ledgerSystem;
@Path("{accountNumber}")
@GET
public Account getAccount(@PathParam("accountNumber") String number) {
monitor.invoked(number);
// retrieve account information from the account system
InternalAccountData internalData = accountsSystem.getAccountData(number);
// retrieve recent ledger entries for the account
AccountLedger accountLedger = ledgerSystem.getLedger(number);
// calculate the safe-to-spend value
BigDecimal balance = internalData.getBalance();
BigDecimal safeToSpend = balance.subtract(MINIMUM_BALANCE);
for (LedgerEntry entry : accountLedger.getEntries()) {
if (LedgerEntry.TYPE_DEBIT == entry.getType() && LedgerEntry.STATUS_PROCESSING == entry.getStatus()) {
safeToSpend = safeToSpend.subtract(entry.getAmount());
}
}
return new Account(number, balance.multiply(MULTIPLIER).intValue(), safeToSpend.multiply(MULTIPLIER).intValue());
}
}
|
<gh_stars>1-10
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
import * as assert from 'assert';
import uri from 'vs/base/common/uri';
import {Match, FileMatch, SearchResult} from 'vs/workbench/parts/search/common/searchModel';
import {IInstantiationService} from 'vs/platform/instantiation/common/instantiation';
import {createInstantiationService} from 'vs/platform/instantiation/common/instantiationService';
import {SearchSorter, SearchDataSource} from 'vs/workbench/parts/search/browser/searchViewlet';
import {TestContextService} from 'vs/workbench/test/browser/servicesTestUtils';
suite('Search - Viewlet', () => {
let instantiation: IInstantiationService;
setup(() => {
instantiation = createInstantiationService({
modelService: {
getModel: () => null
},
requestService: {
getRequestUrl: () => 'file:///folder/file.txt'
},
contextService: new TestContextService()
});
});
test('Data Source', function () {
let ds = new SearchDataSource();
let result = instantiation.createInstance(SearchResult, null);
result.append([{
resource: uri.parse('file:///c:/foo'),
lineMatches: [{ lineNumber: 1, preview: 'bar', offsetAndLengths: [[0, 1]] }]
}]);
let fileMatch = result.matches()[0];
let lineMatch = fileMatch.matches()[0];
assert.equal(ds.getId(null, result), 'root');
assert.equal(ds.getId(null, fileMatch), 'file:///c%3A/foo');
assert.equal(ds.getId(null, lineMatch), 'file:///c%3A/foo>1>0');
assert(!ds.hasChildren(null, 'foo'));
assert(ds.hasChildren(null, result));
assert(ds.hasChildren(null, fileMatch));
assert(!ds.hasChildren(null, lineMatch));
});
test('Sorter', function () {
let fileMatch1 = new FileMatch(null, uri.file('C:\\foo'));
let fileMatch2 = new FileMatch(null, uri.file('C:\\with\\path'));
let fileMatch3 = new FileMatch(null, uri.file('C:\\with\\path\\foo'));
let lineMatch1 = new Match(fileMatch1, 'bar', 1, 1, 1);
let lineMatch2 = new Match(fileMatch1, 'bar', 2, 1, 1);
let lineMatch3 = new Match(fileMatch1, 'bar', 2, 1, 1);
let s = new SearchSorter();
assert(s.compare(null, fileMatch1, fileMatch2) < 0);
assert(s.compare(null, fileMatch2, fileMatch1) > 0);
assert(s.compare(null, fileMatch1, fileMatch1) === 0);
assert(s.compare(null, fileMatch2, fileMatch3) < 0);
assert(s.compare(null, lineMatch1, lineMatch2) < 0);
assert(s.compare(null, lineMatch2, lineMatch1) > 0);
assert(s.compare(null, lineMatch2, lineMatch3) === 0);
});
}); |
Two games back in the PL and two defeats and no goals. I thought Rafa had us well organised at SJP against a much superior Spurs team and we were containing them well enough with Atsu and Ritchie combining really well down the flanks and Gayle’s willingness to run all afternoon always useful for us. A moment of madness from Shelvey handed the game and points to Tottenham and despite his apology he still misses three games when we desperately need all of the quality we’ve got. It’s not the first time and he needs to grow up quickly. Off we went to Huddersfield on the back of their morale boosting win at Palace and we were beaten 1-0.
We can mutter about the narrow margins in games as well as some genuinely appalling refereeing but the truth is we were wholly unconvincing and lacked any kind of cutting edge in attack. Fundamentally, we played like many of us expected us to play if there was not some astute and well considered purchases of players in the summer. It’s too early to make any kinds of judgements of the players Rafa has been allowed to sign but one thing is for certain, the manager is far from happy at what he has been given to work with and there are well-placed reports of significant tensions behind the scenes at United. Everything changes, nothing changes.
All of the noises are that Rafa believes promises made to him by Mike Ashley have been broken. I would go further and say I believe Benitez has been lied to and Ashley has significant form for this. See enclosed:
https://www.theguardian.com/football/2009/oct/02/kevin-keegan-constructive-dismissal-newcastle
This is just my reading of the situation but I don’t see anything fundamental changing in the last two weeks of another tortuous transfer window. I can’t see Ashley who has made a rare appearance on SKY to tell us all about how a billionaire such as he has no money and won’t be putting any dough into United, will suddenly find money to put into the club and give Rafa what he needs and indeed what he was promised back in May.
Frankly, Rafa and the supporters have been taken for a ride by Ashley, who is a man who simply cannot be trusted. I’ve been fond of repeating that Rafa is the best thing that has happened to our club since SBR was taking us into the Champions League. He is the best chance Newcastle United has for sustainable success over the longer term but that belief is draining away with the dawning realisation that isn’t what Mike Ashley wants for the club.
I want Rafa Benitez at United, building and investing in every area of the club so that we improve year on year and move forward, no matter how difficult it is at the top of the table. But it’s not going to be possible and frankly Rafa owes it to extricate himself from Ashley sooner rather than later for the sake of his own career, self-respect and credibility. The risk for Rafa of course is to lend Ashley some remaining credibility and defence by staying at United and offering a thread of hope that things will change when we all know under Ashley, they never will.
The big question of course is what will happen if Rafa walks. I don’t doubt some desperate dope in the Pardew, McClaren mode will be delighted to get a way back into football and be prepared to take the flak in return for a large salary and a way back into football. It would not surprise me one iota if those approaches were not being made already.
It’s a very personal thing but I really don’t think I could continue to click through the turnstile at SJP to bear witness to a Zombie club going through the motions when the owner is simply interested in its increasing value (on the back of improving TV deals) whilst sucking all of the joy out of the place. I know I’ve more or less paid for my season ticket and I’ll have to write that off but I don’t think I could sit, wasting my time with the stomach churning upset of seeing something I’ve loved being turned into a commodity for the benefit of a man who generates so much contempt in me and many others.
What others do is up to them. I know some will feel like me and chuck it. Others won’t and will keep going because they haven’t reached their own low just yet. Some will feel qualified and justified to insult people like me and reckon I’m not a proper supporter and all of that. Frankly, what those type of people come up with is irrelevant to me. I’m not interested in their opinion. There might be some who want to organise, take action and make attempts to make Ashley’s ownership of United untenable. I was massively up for this after KK was pushed out because that was the point when everything we needed to know about Ashley was presented right in front of us in all of its ugly detail. But for one reason or another it didn’t happen.
Some might step forward and attempt to arrange the support into some clever and well organised action but I doubt they will get meaningful backing from those in the stands. One of the big surprises I’ve had over the last ten years is just how much our support has changed. It is by and large docile and has lost its edge. For large swathes of the support going to home games is a social occasion. It’s one where people meet up with friends for a drink, a meal and the football is just a vehicle to bring all of that together. I’m no different to be honest but the event for me has lost its sheen as so many of my match-going mates have reached their own personal points of no-return and chucked it. They all love Newcastle United and as one explains to me in painful detail, that is why he can’t bear to be inside SJP watching a fake version of his team. Whether any Rafa resignation would generate a new period of supporter activism remains to be seen but I seriously doubt it. I’m not really sure enough people really care to engage with something so admittedly difficult or have the gumption to do something that interrupts their social lives.
I’ve also heard people say things have been worse and we’ve stuck by United. In my time I’ve experienced the real lows of the McGarry and the Smith-Ardiles era. They weren’t fun but this is different and much worse. Under Ashley, the mediocrity of United is being deliberately orchestrated by him for his own personal ends i.e. the increasing value of Newcastle United because of the ever expanding TV deals and the global exposure television gives his businesses. You just couldn’t buy that amount of advertising.
We’ve got Forest this week in the League Cup. On the terms of Mike Ashley, we can’t really afford to compete in that competition when the meagre resources available to Rafa need to be deployed to gild his asset and its increasing value by staying in the PL. Does anyone think we’ll have a go at this competition? I remain to be convinced.
West Ham on Saturday could be one where the balloon goes up because if we lose the alarm bells will start to ring and the mood will change massively. It is an absolutely massive game for both clubs but it would not surprise me in the slightest if Benitez ends up in the dug-out of the Hammers in the not too distant future given Bilic being so unconvincing for so long and the well-reported interest the East Londoners have in Rafa.
*
But back to the transfer window. It’s been reported Rafa needs to get players out of the club who he doesn’t need. We do have too many players who contribute nothing and are highly unlikely to any time soon:
Riviere
Haidara
Saviet
Goode
Krul
Hanley
Gomez
Lazaar
Colback
De Jong
The problem with these players of course is they are on far too much money in relation to their talents. This is the difficulty presented by consistently failing to pay the going rate for good players and pissing what money you do have up the wall paying poor players more than they deserve. No agent is going to advise his client to leave a massive salary for a poor one so we are stuck wit them unless Charnley (don’t laugh) can work some kind of financial magic and arrange deals that allow them to leave.
It really is a huge problem of Ashley’s own making but it is one which is going to threaten another relegation if it isn’t resolved (and it doesn’t like being).
So far, Ashley’s incompetence and venality has cost Newcastle United north of £170m in lost revenue through his two relegations and with Rafa undoubtedly on the brink and a squad woefully short, the prospect of adding to that total is very real.
Other than that, things are great.
Keep On, Keepin’ On …
MICHAEL MARTIN – Follow Michael on @tfmichael1892 |
package me.luligabi.basicaiots.common.item;
import net.minecraft.client.item.TooltipContext;
import net.minecraft.item.ItemStack;
import net.minecraft.item.ToolMaterial;
import net.minecraft.text.Text;
import net.minecraft.text.TranslatableText;
import net.minecraft.util.Formatting;
import net.minecraft.world.World;
import java.util.List;
public class IndustrialRevolutionTechRebornToolItem extends AiotToolItem {
public IndustrialRevolutionTechRebornToolItem(float attackDamage, float attackSpeed, ToolMaterial material, Settings settings) {
super(attackDamage, attackSpeed, material, settings);
}
@Override
public void appendTooltip(ItemStack itemStack, World world, List<Text> tooltip, TooltipContext tooltipContext) {
tooltip.add(new TranslatableText("item.basicaiots.indrev_techreborn.tooltip").formatted(Formatting.BLUE, Formatting.ITALIC));
}
}
|
An inexpensive Plexiglas metabolism cage for the squirrel monkey (Saimiri sciureus).
A light, sturdy, and inexpensive metabolism cage for use with the squirrel monkey (saimiri sciureus) is described. The design allows for efficient urine and feces separation, ease of maintenance, and little interference with individual and social behavior patterns. Standard laboratory procedures for reduction of sampling error are presented, as well as results of a pilot study using the described apparatus. The design of the cage is such that, with minor modifications, it can be utilized by other small primates. |
#include<bits/stdc++.h>
#define ll long long
using namespace std;
int main(int argc, char const *argv[])
{
ll c,v0,v1,a,l;
ll day=0;
cin>>c>>v0>>v1>>a>>l;
ll p=v0;
day++;
while(p<c){
p-=l;
if(v0+day*a<=v1) p+=v0+a*day;
else p+=v1;
day++;
}
cout<<day;
return 0;
} |
import { ButtonHTMLAttributes, forwardRef } from 'react';
import { ISpacingProps } from 'types/spacing.types';
import Typography from 'components/Typography';
import { StyledButton } from './Button.styles';
export interface IButtonProps
extends ISpacingProps,
ButtonHTMLAttributes<HTMLButtonElement> {
/**
* The variant to use.
*/
variant?: 'primary' | 'secondary';
/**
* The component used for the root node.
*/
component?: 'button' | 'a' | 'span';
/**
* If true, the button will take up the full width of its container.
*/
fullWidth?: boolean;
}
export const Button = forwardRef<HTMLButtonElement, IButtonProps>(
(
{
children,
component = 'button',
variant = 'primary',
fullWidth = false,
...restProps
},
ref,
) => {
const textColor = variant === `primary` ? `textPrimary` : `white`;
return (
<StyledButton
ref={ref}
as={component}
variant={variant}
fullWidth={fullWidth}
{...restProps}
>
<Typography variant="buttonText" color={textColor}>
{children}
</Typography>
</StyledButton>
);
},
);
|
import React from 'react';
import { connect } from 'react-redux';
import { setEmailOperation } from '../../actions';
import SimilarButton from './SimilarButton';
import {FaReply, FaReplyAll, FaTrashAlt, FaShare} from "react-icons/fa";
export const EmailOperations = props => {
return (
<>
<SimilarButton />
<FaReply
onClick={() => {
props.setEmailOperation('reply')
}}
/>
<FaReplyAll
onClick={() => {
props.setEmailOperation('replyall')
}}
/>
<FaShare
onClick={() => {
props.setEmailOperation('forward')
}}
/>
<FaTrashAlt
onClick={() => {
//setReplyIsHidden(false);
// todo: need a delete email function that moves the email from emails array in imap to a deleted array so that it lives inside of "trash" before permanently deleting
}}
/>
</>
)
}
export default connect(null,{setEmailOperation})(EmailOperations);
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.weex.util;
import android.app.Activity;
import android.support.v7.widget.RecyclerView;
import android.test.InstrumentationTestCase;
import android.test.TouchUtils;
import android.text.TextUtils;
import android.util.Log;
import android.view.View;
import android.view.ViewGroup;
import android.widget.EditText;
import android.widget.ScrollView;
import com.taobao.weex.ui.view.WXTextView;
import java.util.ArrayList;
/**
* Created by admin on 16/3/29.
*/
public class ViewUtil{
public static ArrayList<View> findViewWithText(ViewGroup vg, String text){
ArrayList<View> out = new ArrayList<View>();
if(null != vg && !TextUtils.isEmpty(text)){
vg.findViewsWithText(out, text, View.FIND_VIEWS_WITH_TEXT);
}
if(out.size() == 0){
ArrayList<View> wxTextViewList = new ArrayList<View>();
wxTextViewList = getAllChildViews((View)vg);
for (View wxText:wxTextViewList) {
if(wxText instanceof WXTextView){
String value = ((WXTextView) wxText).getText().toString();
if(value.contains(text)){
Log.e("TestFlow", "find target:" + text + "|" + "actualVale=" + value);
out.add(wxText);
}
}
}
}
return out;
}
public static ArrayList<View> getAllChildViews(Activity activity) {
View view = activity.getWindow().getDecorView();
return getAllChildViews(view);
}
public static ArrayList<View> getAllChildViews(View view) {
ArrayList<View> allchildren = new ArrayList<View>();
if (view instanceof ViewGroup) {
ViewGroup vp = (ViewGroup) view;
for (int i = 0; i < vp.getChildCount(); i++) {
View viewchild = vp.getChildAt(i);
allchildren.add(viewchild);
allchildren.addAll(getAllChildViews(viewchild));
}
}
return allchildren;
}
public static ArrayList<View> getAllChildViews(ViewGroup view) {
ArrayList<View> allchildren = new ArrayList<View>();
ViewGroup vp = view;
for (int i = 0; i < vp.getChildCount(); i++) {
View viewchild = vp.getChildAt(i);
allchildren.add(viewchild);
allchildren.addAll(getAllChildViews(viewchild));
}
return allchildren;
}
/**
* scroller or RecyclerView
* @param view
* @return
*/
public static View getFirstChildScrollableView(ViewGroup view){
View viewResult = null;
ArrayList<View> allViews = new ArrayList<View>();
if(null != view){
allViews= getAllChildViews(view);
for(View eachView : allViews){
if(eachView instanceof ScrollView
|| eachView instanceof RecyclerView){
return eachView;
}
}
}
return viewResult;
}
public static ArrayList<View> getAllEditTextViews(View view) {
ArrayList<View> allchildren = new ArrayList<View>();
if (view instanceof ViewGroup) {
ViewGroup vp = (ViewGroup) view;
for (int i = 0; i < vp.getChildCount(); i++) {
View viewchild = vp.getChildAt(i);
if(viewchild instanceof EditText){
allchildren.add(viewchild);
allchildren.addAll(getAllChildViews(viewchild));
}
}
}
return allchildren;
}
/*
* 获取控件宽
*/
public static int getWidth(View view)
{
int w = View.MeasureSpec.makeMeasureSpec(0,View.MeasureSpec.UNSPECIFIED);
int h = View.MeasureSpec.makeMeasureSpec(0,View.MeasureSpec.UNSPECIFIED);
view.measure(w, h);
return (view.getMeasuredWidth());
}
/*
* 获取控件高
*/
public static int getHeight(View view)
{
int w = View.MeasureSpec.makeMeasureSpec(0,View.MeasureSpec.UNSPECIFIED);
int h = View.MeasureSpec.makeMeasureSpec(0,View.MeasureSpec.UNSPECIFIED);
view.measure(w, h);
return (view.getMeasuredHeight());
}
public static void dragQuarterScreenUp(InstrumentationTestCase test, Activity activity) {
TouchUtils.dragQuarterScreenUp(test, activity);
}
}
|
#pragma once
#include <ctime>
#include <cstring>
#include <string>
#include <vector>
#include <tuple>
#include <array>
#include <typeinfo>
#include <chrono>
#include <mutex>
#include <atomic>
#include <functional>
#include <cassert>
#include "mysql.h"
#include "db_meta.hpp"
#include "exception.hpp"
#include "reflection.hpp"
#include "db_common.h"
namespace sqlcpp::mysql {
struct mysql_timestamp {
MYSQL_TIME mt{};
mysql_timestamp(uint64_t timestamp) {
time_t ts = static_cast<time_t>(timestamp);
auto s = localtime(&ts);
mt.year = (unsigned int)s->tm_year + 1900;
mt.month = (unsigned int)s->tm_mon + 1;
mt.day = (unsigned int)s->tm_mday;
mt.hour = (unsigned int)s->tm_hour;
mt.minute = (unsigned int)s->tm_min;
mt.second = (unsigned int)s->tm_sec;
}
};
struct mysql_mediumtext {
std::string content;
mysql_mediumtext() = default;
mysql_mediumtext(std::string&& con) {
content = std::move(con);
}
};
class connection {
private:
std::string ip_;
bool is_health_ = false;
MYSQL* ctx_ = nullptr;
MYSQL_STMT* smt_ctx_ = nullptr;
scope_guard<std::function<void()>> deleter_{};
inline static std::mutex mtx_{};
inline static std::atomic<int> conn_count_ = 0;
public:
connection(const connection&) = delete;
connection& operator=(const connection&) = delete;
connection(const connection_options& opt)
:ip_(opt.ip)
{
deleter_.set_releaser([this]() {
if (smt_ctx_) {
mysql_stmt_close(smt_ctx_);
}
if (ctx_) {
mysql_close(ctx_);
}
});
std::unique_lock<std::mutex> lock(mtx_);
ctx_ = mysql_init(nullptr);
lock.unlock();
assert(ctx_);
connect(opt);
smt_ctx_ = mysql_stmt_init(ctx_);
assert(smt_ctx_);
is_health_ = true;
conn_count_++;
printf("mysql create conn <%s>, count:%d\n", ip_.c_str(), conn_count_.load());
}
~connection()
{
conn_count_--;
printf("mysql release conn <%s>, count:%d\n", ip_.c_str(), conn_count_.load());
}
std::string& get_ip() {
return ip_;
}
bool ping() {
return mysql_ping(ctx_) == 0;
}
void execute(const std::string& sql) {
auto ret = mysql_query(ctx_, sql.c_str());
if (ret != 0) {
is_health_ = false;
auto error_msg = std::string("Failed to excute sql<") + sql + ">: " + mysql_error_msg();
throw except::mysql_exception(std::move(error_msg));
}
}
void begin_transaction() {
execute("START TRANSACTION");
}
void commit_transaction() {
execute("COMMIT");
}
void rollback() {
execute("ROLLBACK");
}
uint64_t get_last_insert_id() {
return mysql_stmt_insert_id(smt_ctx_);
}
bool is_health() {
return is_health_;
}
auto get_conn_count() {
return conn_count_.load();
}
// this query has data back from mysql
template<typename ReturnType, typename... Args>
std::enable_if_t<is_tuple_v<ReturnType> || reflection::is_reflection_v<ReturnType>, std::vector<ReturnType>>
query(std::string_view statement_sql, Args&&...args) {
before_execute<ReturnType>(statement_sql, std::forward<Args>(args)...);
//execute
auto ret = mysql_stmt_execute(smt_ctx_);
if (ret != 0) {
is_health_ = false;
auto error_msg = std::string("Failed to stmt_execute : ") + mysql_error_msg();
throw except::mysql_exception(std::move(error_msg));
}
if constexpr (is_tuple_v<ReturnType>) {
return after_execute<std::tuple_size_v<ReturnType>, ReturnType>();
}
else {
return after_execute<ReturnType::args_size_t::value, ReturnType>();
}
}
// this query has single column data back from mysql
template<typename ReturnType, typename... Args>
std::enable_if_t<!is_tuple_v<ReturnType> && !reflection::is_reflection_v<ReturnType> && !std::is_same_v<ReturnType, void>,
std::vector<ReturnType>> query(std::string_view statement_sql, Args&&...args) {
before_execute<ReturnType>(statement_sql, std::forward<Args>(args)...);
//execute
auto ret = mysql_stmt_execute(smt_ctx_);
if (ret != 0) {
is_health_ = false;
auto error_msg = std::string("failed to stmt_execute : ") + mysql_error_msg();
throw except::mysql_exception(std::move(error_msg));
}
return after_execute<1, ReturnType>();
}
// this query has no data back from mysql
template<typename ReturnType, typename... Args>
std::enable_if_t<std::is_same_v<ReturnType, void>>
query(std::string_view statement_sql, Args&&...args) {
before_execute<void>(statement_sql, std::forward<Args>(args)...);
auto ret = mysql_stmt_execute(smt_ctx_);
if (ret != 0) {
is_health_ = false;
auto error_msg = std::string("failed to stmt_execute : ") + mysql_error_msg();
throw except::mysql_exception(std::move(error_msg));
}
}
private:
std::string mysql_error_msg() {
return std::string(mysql_error(ctx_));
}
void connect(const connection_options& opt) {
int timeout = 3; //3s
mysql_options(ctx_, MYSQL_OPT_CONNECT_TIMEOUT, &timeout);
char value = 1; //yes
mysql_options(ctx_, MYSQL_OPT_RECONNECT, &value);
auto ret = mysql_real_connect(ctx_, opt.ip.c_str(), opt.user.c_str(), opt.passwd.c_str(),
nullptr, (unsigned int)std::atoi(opt.port.c_str()), nullptr, 0);
if (!ret) {
auto error_msg = std::string("Failed to connect to database: ") + mysql_error_msg();
throw except::mysql_exception(std::move(error_msg));
}
}
template <typename T>
constexpr std::enable_if_t<is_optional_v<std::decay_t<T>>> build_bind_param(MYSQL_BIND& param, T&& t) {
using U = typename std::remove_cv_t<std::remove_reference_t<decltype(t)>>::value_type;
if (!t.has_value()) {
param.buffer_type = MYSQL_TYPE_NULL;
return;
}
if constexpr (std::is_arithmetic_v<U>) { //built-in types
param.buffer = const_cast<U*>(&t.value());
param.buffer_type = mysql_type_map(t.value()).first;
if constexpr (!std::is_same_v<U, float> && !std::is_same_v<U, double>) {
param.is_unsigned = mysql_type_map(t.value()).second;
}
}
else if constexpr (is_char_array_v<U>) {
/*param.buffer = const_cast<char*>(&t[0]);
param.buffer_length = sizeof(t);*/
param.buffer = const_cast<char*>(t.value());
param.buffer_length = (decltype(param.buffer_length))strlen(t.value());
param.buffer_type = mysql_type_map(t.value()).first;
}
else if constexpr (is_char_pointer_v<U>) {
param.buffer = const_cast<char*>(t.value());
param.buffer_length = (decltype(param.buffer_length))strlen(t.value());
param.buffer_type = mysql_type_map(t.value()).first;
}
else if constexpr (std::is_convertible_v<U, std::string> || std::is_same_v<U, std::string_view>) {
std::string_view str{ t.value().data(),t.value().length() };
param.buffer = (char*)(str.data());
param.buffer_length = (decltype(param.buffer_length))str.length();
param.buffer_type = mysql_type_map(t.value()).first;
}
else if constexpr (std::is_same_v<U, mysql_timestamp>) {
param.buffer = const_cast<MYSQL_TIME*>(&t.value().mt);
param.buffer_type = mysql_type_map(t.value()).first;
}
else if constexpr (std::is_same_v<U, mysql_mediumtext>) {
std::string_view str{ t.value().content.data(),t.value().content.length() };
param.buffer = (char*)(str.data());
param.buffer_length = (decltype(param.buffer_length))str.length();
param.buffer_type = mysql_type_map(t).first;
}
else {
static_assert(always_false_v<U>, "type do not match");
}
}
template <typename T>
constexpr std::enable_if_t<!is_optional_v<std::decay_t<T>>> build_bind_param(MYSQL_BIND& param, T&& t) {
using U = std::remove_cv_t<std::remove_reference_t<decltype(t)>>;
if constexpr (std::is_arithmetic_v<U>) { //built-in types
param.buffer = const_cast<U*>(&t);
param.buffer_type = mysql_type_map(t).first;
if constexpr (!std::is_same_v<U, float> && !std::is_same_v<U, double>) {
param.is_unsigned = mysql_type_map(t).second;
}
}
else if constexpr (is_char_array_v<U>) {
/*param.buffer = const_cast<char*>(&t[0]);
param.buffer_length = sizeof(t);*/
param.buffer = const_cast<char*>(t);
param.buffer_length = (decltype(param.buffer_length))strlen(t);
param.buffer_type = mysql_type_map(t).first;
}
else if constexpr (is_char_pointer_v<U>) {
param.buffer = const_cast<char*>(t);
param.buffer_length = (decltype(param.buffer_length))strlen(t);
param.buffer_type = mysql_type_map(t).first;
}
else if constexpr (std::is_convertible_v<U, std::string> || std::is_same_v<U, std::string_view>) {
std::string_view str(std::forward<T>(t));
param.buffer = (char*)(str.data());
param.buffer_length = (decltype(param.buffer_length))str.length();
param.buffer_type = mysql_type_map(t).first;
}
else if constexpr (std::is_same_v<U, mysql_timestamp>) {
param.buffer = const_cast<MYSQL_TIME*>(&t.mt);
param.buffer_type = mysql_type_map(t).first;
}
else if constexpr (std::is_same_v<U, mysql_mediumtext>) {
std::string_view str(t.content);
param.buffer = (char*)(str.data());
param.buffer_length = (decltype(param.buffer_length))str.length();
param.buffer_type = mysql_type_map(t).first;
}
else {
static_assert(always_false_v<U>, "type do not match");
}
}
template <typename T>
std::enable_if_t<is_optional_v<std::decay_t<T>>>
build_result_param(std::vector<std::pair<std::vector<char>, unsigned long>>& buf, MYSQL_BIND& param, T&& t, bool* is_null) {
using U = typename std::remove_cv_t<std::remove_reference_t<decltype(t)>>::value_type;
if constexpr (std::is_arithmetic_v<U>) { //built-in types
t.emplace(U{});
param.buffer = &t.value();
param.buffer_type = this->mysql_type_map(U{}).first;
param.is_null = is_null;
}
else if constexpr (is_char_pointer_v<U> || is_char_array_v<U>) {
static_assert(always_false_v<U>, "use std::string instead of char pointer or char array");
}
else if constexpr (std::is_convertible_v<U, std::string>) {
std::vector<char> tmp(65536, 0);
buf.emplace_back(std::move(tmp), -1);
param.buffer = &(buf.back().first[0]);
param.buffer_length = 65536;
param.buffer_type = mysql_type_map(U{}).first;
param.length = &(buf.back().second);
param.is_null = is_null;
}
else if constexpr (std::is_same_v<U, mysql_mediumtext>) {
constexpr size_t size = 16 * 1024 * 1024;
std::vector<char> tmp(size, 0);
buf.emplace_back(std::move(tmp), -1);
param.buffer = &(buf.back().first[0]);
param.buffer_length = size;
param.buffer_type = mysql_type_map(U{}).first;
param.length = &(buf.back().second);
param.is_null = is_null;
}
else {
static_assert(always_false_v<U>, "type do not match");
}
}
template <typename T>
std::enable_if_t<!is_optional_v<std::decay_t<T>>>
build_result_param(std::vector<std::pair<std::vector<char>, unsigned long>>& buf, MYSQL_BIND& param, T&& t, bool* = nullptr) {
using U = std::remove_cv_t<std::remove_reference_t<decltype(t)>>;
if constexpr (std::is_arithmetic_v<U>) { //built-in types
param.buffer = &t;
param.buffer_type = mysql_type_map(t).first;
}
/*else if constexpr (is_char_array_v<U>) {
param.buffer = &t[0];
param.buffer_length = sizeof(t);
param.buffer_type = mysql_type_map(t).first;
}*/
else if constexpr (is_char_pointer_v<U> || is_char_array_v<U>) {
static_assert(always_false_v<U>, "use std::string instead of char pointer or char array");
}
else if constexpr (std::is_convertible_v<U, std::string>) {
std::vector<char> tmp(65536, 0);
buf.emplace_back(std::move(tmp), -1);
param.buffer = &(buf.back().first[0]);
param.buffer_length = 65536;
param.buffer_type = mysql_type_map(t).first;
param.length = &(buf.back().second);
}
else if constexpr (std::is_same_v<U, mysql_mediumtext>) {
constexpr size_t size = 16 * 1024 * 1024;
std::vector<char> tmp(size, 0);
buf.emplace_back(std::move(tmp), -1);
param.buffer = &(buf.back().first[0]);
param.buffer_length = size;
param.buffer_type = mysql_type_map(U{}).first;
param.length = &(buf.back().second);
}
else {
static_assert(always_false_v<U>, "type do not match");
}
}
template <typename T>
static constexpr auto mysql_type_map(T) {
if constexpr (std::is_same_v<int8_t, T>) { //signed char
return std::pair{ MYSQL_TYPE_TINY ,false };
}
else if constexpr (std::is_same_v<uint8_t, T>) { //unsigned char
return std::pair{ MYSQL_TYPE_TINY ,true };
}
else if constexpr (std::is_same_v<int16_t, T>) { //short int
return std::pair{ MYSQL_TYPE_SHORT ,false };
}
else if constexpr (std::is_same_v<uint16_t, T>) { //unsigned short int
return std::pair{ MYSQL_TYPE_SHORT ,true };
}
else if constexpr (std::is_same_v<int32_t, T>) { //int
return std::pair{ MYSQL_TYPE_LONG ,false };
}
else if constexpr (std::is_same_v<uint32_t, T>) {//unsigned int
return std::pair{ MYSQL_TYPE_LONG ,true };
}
else if constexpr (std::is_same_v<float, T>) {// float
return std::pair{ MYSQL_TYPE_FLOAT ,false };
}
else if constexpr (std::is_same_v<double, T>) {// double
return std::pair{ MYSQL_TYPE_DOUBLE ,false };
}
else if constexpr (std::is_same_v<int64_t, T>) {//long long int
return std::pair{ MYSQL_TYPE_LONGLONG ,false };
}
else if constexpr (std::is_same_v<uint64_t, T>) {//unsigned long long int
return std::pair{ MYSQL_TYPE_LONGLONG ,true };
}
else if constexpr (std::is_convertible_v<T, std::string> || is_char_array_v<T>
|| std::is_same_v<T, std::string_view>) { //str
return std::pair{ MYSQL_TYPE_STRING ,false };
}
else if constexpr (std::is_same_v<mysql_timestamp, T>) {
return std::pair{ MYSQL_TYPE_TIMESTAMP ,false };
}
else if constexpr (std::is_same_v<mysql_mediumtext, T>) {
return std::pair{ MYSQL_TYPE_MEDIUM_BLOB ,false };
}
/*else if constexpr (std::is_same_v<void, T>) {
return std::pair{ MYSQL_TYPE_NULL ,false };
}*/
else {
static_assert(always_false_v<T>, "can not map to mysql type");
}
}
template<typename ReturnType, typename... Args>
void before_execute(std::string_view statement_sql, Args&&...args) {
//last_active_ = std::chrono::steady_clock::now();
//prepare
auto ret = mysql_stmt_prepare(smt_ctx_, statement_sql.data(), (unsigned long)statement_sql.length());
if (ret != 0) {
is_health_ = false;
auto error_msg = std::string("Failed to stmt_prepare sql<") + std::string(statement_sql) + ">: " + mysql_error_msg();
throw except::mysql_exception(std::move(error_msg));
}
//check input size match
auto placeholder_size = mysql_stmt_param_count(smt_ctx_);
constexpr auto args_size = sizeof...(args);
if (placeholder_size != args_size) {
throw except::mysql_exception("param size do not match placeholder size");
}
//check output size match
if constexpr (!std::is_same_v<ReturnType, void>) { //tuple or reflect struct or single type
auto meta_result = std::unique_ptr<MYSQL_RES, void(*)(MYSQL_RES*)>(mysql_stmt_result_metadata(smt_ctx_), [](MYSQL_RES* p) {if (p) mysql_free_result(p); });
if (!meta_result) {
auto error_msg = std::string("Failed to stmt_result_metadata : ") + mysql_error_msg();
throw except::mysql_exception(std::move(error_msg));
}
auto column_count = mysql_num_fields(meta_result.get());
if constexpr (is_tuple_v<ReturnType>) {
if (column_count != std::tuple_size_v<ReturnType>) {
throw except::mysql_exception("columns in the query do not match tuple element size");
}
}
else if constexpr (reflection::is_reflection_v<ReturnType>) {
if (column_count != ReturnType::args_size_t::value) {
throw except::mysql_exception("columns in the query do not match struct element size");
}
}
else {
if (column_count != 1) { //single type
throw except::mysql_exception("columns size in the query must be 1");
}
}
}
if constexpr (args_size > 0) {
//initialize
std::array<MYSQL_BIND, args_size> param_binds{};
auto param_tup = std::forward_as_tuple(std::forward<Args>(args)...);
for_each_tuple([¶m_tup, ¶m_binds, this](auto index) {
this->build_bind_param(param_binds[index], std::get<index>(param_tup));
}, std::make_index_sequence<args_size>());
//bind
ret = mysql_stmt_bind_param(smt_ctx_, ¶m_binds[0]);
if (ret != 0) {
auto error_msg = std::string("Failed to stmt_bind_param : ") + mysql_error_msg();
throw except::mysql_exception(std::move(error_msg));
}
}
}
template<typename Element, typename BufIter>
void assign_result(bool is_field_null, Element&& e, BufIter&& iter) {
using T = std::decay_t<Element>;
if constexpr (is_optional_v<T>) {
if constexpr (std::is_arithmetic_v<typename T::value_type>) {
if (is_field_null) { //table filed is null, use empty optional instead of default value optional
T temp{};
e.swap(temp);
}
}
else { //std::optional<std::string> or std::optional<mysql_mediumtext>
if (iter->second != (unsigned long)-1) {
if constexpr (std::is_same_v<typename T::value_type, mysql_mediumtext>) {
e = mysql_mediumtext{ std::string(iter->first.data(), iter->second) };
}
else if constexpr (std::is_same_v<typename T::value_type, std::string>) {
e = std::string(iter->first.data(), iter->second);
}
}
iter++;
}
}
else {
if constexpr (std::is_same_v<T, std::string> || std::is_same_v<T, mysql_mediumtext>) {
if (iter->second != (unsigned long)-1) {
e = std::string(iter->first.data(), iter->second);
}
iter++;
}
}
}
template<size_t ElementSize, typename ReturnType>
auto after_execute() {
//initialize results bind
std::array<bool, ElementSize> is_null{};
std::vector<std::pair<std::vector<char>, unsigned long>> buf_keeper; buf_keeper.reserve(ElementSize);
std::array<MYSQL_BIND, ElementSize> param_binds{};
ReturnType r{};
if constexpr (is_tuple_v<ReturnType>) {
for_each_tuple([&r, &buf_keeper, ¶m_binds, &is_null, this](auto index) {
this->build_result_param(buf_keeper, param_binds[index], std::get<index>(r), &is_null[index]);
}, std::make_index_sequence<ElementSize>());
}
else if constexpr (reflection::is_reflection_v<ReturnType>) { //reflect
constexpr auto address = ReturnType::elements_address();
for_each_tuple([&r, &buf_keeper, &address, ¶m_binds, &is_null, this](auto index) {
this->build_result_param(buf_keeper, param_binds[index], r.*std::get<index>(address), &is_null[index]);
}, std::make_index_sequence<ElementSize>());
}
else { //single type
this->build_result_param(buf_keeper, param_binds[0], r, &is_null[0]);
}
//bind
auto ret = mysql_stmt_bind_result(smt_ctx_, &(param_binds[0]));
if (ret != 0) {
auto error_msg = std::string("Failed to stmt_bind_param : ") + mysql_error_msg();
throw except::mysql_exception(std::move(error_msg));
}
//buffer all results to client
auto r_ret = mysql_stmt_store_result(smt_ctx_);
if (r_ret != 0) {
auto error_msg = std::string("Failed to stmt_store_result : ") + mysql_error_msg();
throw except::mysql_exception(std::move(error_msg));
}
//get back data
auto row_count = mysql_stmt_num_rows(smt_ctx_);
std::vector<ReturnType> back_data{};
back_data.reserve((std::size_t)row_count);
while (!mysql_stmt_fetch(smt_ctx_)) {
auto iter = buf_keeper.begin();
if constexpr (is_tuple_v<ReturnType>) {
for_each_tuple([&r, &iter, &is_null, this](auto index) {
this->assign_result(is_null[index], std::get<index>(r), iter);
}, std::make_index_sequence<ElementSize>());
}
else if constexpr (reflection::is_reflection_v<ReturnType>) {
constexpr auto address = ReturnType::elements_address();
for_each_tuple([&r, &address, &iter, &is_null, this](auto index) {
this->assign_result(is_null[index], r.*std::get<index>(address), iter);
}, std::make_index_sequence<ElementSize>());
}
else { //single type
this->assign_result(is_null[0], r, iter);
}
back_data.emplace_back(std::move(r));
}
return back_data;
}
};
} |
Parvovirus associated antigen in the synovial membrane of patients with rheumatoid arthritis.
Homogenates of synovium from patients with rheumatoid arthritis (RA) and osteoarthritis (OA) were centrifuged on caesium chloride density gradients to obtain isolates of a density similar to that of parvoviruses. Six of 11 RA isolates and none of six OA isolates reacted with an antiserum raised against a rheumatoid associated, parvovirus-like agent (RA-1 virus). An anti-B19 parvovirus antiserum did not react with any of the isolates tested. Electron microscopy of negatively stained preparations of the isolates showed that small particles of diameter 10 nm were abundant in most of the RA isolates (11/13) but absent from all OA isolates. Such particles, whose identification is unknown, were also present in RA-1 positive lysates prepared from cultured RA synovial cells. These results suggest that the RA-1 virus can be directly identified in RA synovial tissue and that the virus appears to be unrelated to the human B19 parvovirus. |
IT security researchers at FireEye have discovered a malware that aims to steal sensitive information from critical cyber infrastructure including Aerospace, Defense Contractors, and Manufacturing sectors in South Korea and the United States.
Dubbed FormBook, the data stealer malware is distributed using different methods which steal clipboard contents, log keystrokes and extract data from HTTP sessions. According to FireEye’s researchers Nart Villeneuve, Randi Eitzman, Sandor Nemes and Tyler Dean:
“While FormBook is not unique in either its functionality or distribution mechanisms, its relative ease of use, affordable pricing structure, and open availability make FormBook an attractive option for cybercriminals of varying skill levels.”
FormBook distributed itself in PDFs with download links; .DOC and .XLS files with malicious macros; and archive files (e.g. .ZIP & .RAR) with .EXE payloads. Upon infecting a targeted device, the malware can send instructions to Command & Control server such as stealing passwords, cookies execute files, start processes, shutdown and reboot the system.
“The credentials and other data harvested by successful FormBook infections could be used for additional cybercrime activities including, but not limited to: identity theft, continued phishing operations, bank fraud, and extortion,” said FireEye.
The malware has been available for sale on several hacking forums since 2016. However, researchers have now discovered that it also downloads NanoCore, a remote access trojan (RAT) first identified in 2013 and extensively sold on the dark web. Its author, Taylor Huddleston was arrested in March 2017.
[fullsquaread][/fullsquaread]
FireEye also noted that FormBook reads Windows’ ntdll.dll module from disk into memory and calls its exported functions directly. By this, the API monitoring mechanisms can be ineffective automatically.
“It also features a persistence method that randomly changes the path, filename, file extension and the registry key used for persistence. The malware author does not sell the builder, but only sells the panel, and then generates the executable files as a service,” researchers explained.
Other than South Korea and the United States the malware has hit its targets in countries like Australia, Russia, France, United Kingdom, Germany, Poland, Ukraine, Netherlands, and Hungry. While the archive campaign targeted countries like South Korea, United States, India, Germany, Belgium, Australia, Japan, Sweden, Saudi Arabia and France.
The top 10 industry verticals affected by the Archive campaign are manufacturing 40%, Services/Consulting 17%, Telecom 13%, Financial Services 9%, Government Federal 5%, Energy Utilities 4%, Retail 4%, High-Tech 3%, Aerospace/Defense Contractor 3% and Education 2%.
Since FormBook targets Windows devices, it is high time for high-profile institutions to either upgrade their Windows OS to the latest or move to a secure one. Moreover, don’t open any unknown or suspicious emails, don’t click links in an anonymous email and avoid downloaded attachments from the email address you are not familiar with. |
// Arguments: Ints, Doubles
#include <stan/math/prim.hpp>
using stan::math::var;
using std::numeric_limits;
using std::vector;
class AgradCdfLogBernoulli : public AgradCdfLogTest {
public:
void valid_values(vector<vector<double> >& parameters,
vector<double>& cdf_log) {
vector<double> param(2);
param[0] = 0; // Successes (out of single trial)
param[1] = 0.75; // Probability
parameters.push_back(param);
cdf_log.push_back(std::log(1 - param[1])); // expected cdf_log
param[0] = 1; // Successes (out of single trial)
param[1] = 0.75; // Probability
parameters.push_back(param);
cdf_log.push_back(0); // expected cdf_log
}
void invalid_values(vector<size_t>& index, vector<double>& value) {
// p (Probability)
index.push_back(1U);
value.push_back(-1e-4);
index.push_back(1U);
value.push_back(1 + 1e-4);
}
bool has_lower_bound() { return false; }
bool has_upper_bound() { return false; }
template <typename T_n, typename T_prob, typename T2, typename T3,
typename T4, typename T5>
stan::return_type_t<T_prob> cdf_log(const T_n& n, const T_prob& theta,
const T2&, const T3&, const T4&,
const T5&) {
return stan::math::bernoulli_cdf_log(n, theta);
}
template <typename T_n, typename T_prob, typename T2, typename T3,
typename T4, typename T5>
stan::return_type_t<T_prob> cdf_log_function(const T_n& n,
const T_prob& theta, const T2&,
const T3&, const T4&,
const T5&) {
if (n < 0)
return stan::math::negative_infinity();
if (n < 1)
return log(1 - theta);
else
return 0;
}
};
|
import {ChangeDetectionStrategy, Component} from '@angular/core';
import {rxjsWayPaths} from "./rxjs-way/rxjs-way-paths";
import {ngrxWayPaths} from "./ngrx-way/ngrx-way-paths";
import {promisesWayPaths} from "./promises-way/promises-way-paths";
@Component({
selector: 'app-root',
templateUrl: './app.component.html',
styleUrls: ['./app.component.scss'],
changeDetection: ChangeDetectionStrategy.OnPush
})
export class AppComponent {
rxjsPath = rxjsWayPaths.main;
ngrxPath = ngrxWayPaths.main;
promisesPath = promisesWayPaths.main;
}
|
<gh_stars>1-10
/*
*
* provide a reliable timer which is constantly running
*
*/
#include <stdint.h>
volatile uint32_t millis;
void millis_init(uint32_t cpu) {
}
void millis_init() {
}
uint8_t millis_is_init() {
return 1;
}
uint32_t millis_time() {
return 1234;
}
|
def gaussian1d_variant2(x, *p):
a, b, c = p
return a * np.exp(-0.5 * (x - b) ** 2 / (c ** 2)) |
// Validate returns whether or not the given Event is valid
func (e *Event) Validate(v strfmt.Registry) error {
if err := e.ID.Validate(v); err != nil {
return err
}
if e.Version() != 1 {
return manifold.NewError(errors.BadRequestError, "Expected version to be 1")
}
return e.Body.Validate(v)
} |
//MatchLength returns the number of bases in a Cigar slice that align to the reference.
func MatchLength(c []*Cigar) int {
var ans int
if c[0].Op == '*' {
log.Fatalf("Cannot calculate MatchLength from unaligned reads.")
}
for _, v := range c {
if ConsumesReference(v.Op) && ConsumesQuery(v.Op) {
ans = ans + v.RunLength
}
}
return ans
} |
import { css } from '@emotion/react';
import { space, transitions } from '@guardian/src-foundations';
import { height, width } from '@guardian/src-foundations/size';
import { textSans } from '@guardian/src-foundations/typography';
import { focusHalo } from '@guardian/src-foundations/accessibility';
import { checkboxDefault } from '@guardian/src-foundations/themes';
import { resets, appearance } from '@guardian/src-foundations/utils';
export const fieldset = css`
${resets.fieldset};
display: flex;
justify-content: flex-start;
flex-direction: column;
`;
export const label = (checkbox = checkboxDefault.checkbox) => css`
position: relative;
display: flex;
align-items: center;
cursor: pointer;
min-height: ${height.inputMedium}px;
&:hover {
input {
border-color: ${checkbox.borderHover};
}
}
`;
export const labelWithSupportingText = css`
align-items: flex-start;
margin-bottom: ${space[3]}px;
`;
export const checkbox = (checkbox = checkboxDefault.checkbox) => css`
flex: 0 0 auto;
box-sizing: border-box;
display: inline-block;
cursor: pointer;
width: ${width.inputXsmall}px;
height: ${height.inputXsmall}px;
margin: 0 ${space[2]}px 0 0;
border: 2px solid currentColor;
position: relative;
transition: box-shadow ${transitions.short};
transition-delay: 0.08s;
color: ${checkbox.border};
&:focus {
${focusHalo};
}
@supports (${appearance}) {
appearance: none;
&:checked {
border: 2px solid ${checkbox.borderChecked};
& ~ span:before {
right: 0;
}
& ~ span:after {
top: 0;
}
}
&:indeterminate {
&:after {
${textSans.xlarge()};
color: ${checkbox.textIndeterminate};
content: '-';
position: absolute;
top: -10px;
left: 5px;
z-index: 5;
}
}
}
`;
export const labelText = (checkbox = checkboxDefault.checkbox) => css`
${textSans.medium({ lineHeight: 'regular' })};
color: ${checkbox.textLabel};
width: 100%;
`;
export const labelTextWithSupportingText = css`
${textSans.medium({ lineHeight: 'regular' })};
`;
export const supportingText = (checkbox = checkboxDefault.checkbox) => css`
${textSans.small({ lineHeight: 'regular' })};
color: ${checkbox.textLabelSupporting};
`;
export const tick = (checkbox = checkboxDefault.checkbox) => css`
@supports (
(appearance: none) or (-webkit-appearance: none) or
(-moz-appearance: none)
) {
/* overall positional properties */
position: absolute;
width: 6px;
height: 12px;
transform: rotate(45deg);
/*
these properties are very sensitive and are overridden
if the checkbox has a label or supporting text
*/
top: 14px;
left: 9px;
/*
this prevents simulated click events to the checkbox, eg from Selenium tests
from being intercepted by the tick
*/
pointer-events: none;
/* the checkmark ✓ */
&:after,
&:before {
position: absolute;
display: block;
background-color: ${checkbox.backgroundChecked};
transition: all ${transitions.short} ease-in-out;
content: '';
}
/* the short side */
&:before {
height: 2px;
bottom: 0;
left: 0;
right: 100%;
transition-delay: 0.05s;
}
/* the long side */
&:after {
bottom: 0;
right: 0;
top: 100%;
width: 2px;
transition-delay: 0.1s;
}
}
`;
export const tickWithLabelText = css`
@supports (${appearance}) {
top: 15px;
left: 9px;
}
`;
export const tickWithSupportingText = css`
@supports (${appearance}) {
top: 5px;
}
`;
export const errorCheckbox = (checkbox = checkboxDefault.checkbox) => css`
border: 4px solid ${checkbox.borderError};
`;
|
/**
* Class to control of zone
*/
export class Zone {
constructor(public map: TYPES.GameMap) {}
/**
* Check if vector in zone
* @param {SHARED.TYPES.Vector2} vector
*/
in(vector: SHARED.TYPES.Vector2): boolean {
let deltaIndex: number = this.map.area.length - 1
let inPolygon: boolean = false
this.map.area.forEach((currVector: SHARED.TYPES.Vector2, index: number) => {
const prevVector: SHARED.TYPES.Vector2 = this.map.area[deltaIndex]
const cond1: boolean = (currVector.y > vector.y) != (prevVector.y > vector.y)
const delta: number = (prevVector.x - currVector.x) * (vector.y - currVector.y) / (prevVector.y - currVector.y) + currVector.x
const cond3: boolean = vector.x < delta
if (cond1 && cond3) inPolygon = !inPolygon
deltaIndex = index
})
return inPolygon
}
/**
* Check if vector out of zone
* @param {SHARED.TYPES.Vector2} vector
*/
out(vector: SHARED.TYPES.Vector2) : boolean {
return !!!this.in(vector)
}
/**
* Return a center vector of map
* @return {SHARED.TYPES.Vector2}
*/
center(): Vector3Mp {
const x = this.map.area.map(vector => vector.x)
const y = this.map.area.map(vector => vector.y)
const centerX = (Math.min(...x) + Math.max(...x)) / 2
const centerY = (Math.min(...y) + Math.max(...y)) / 2
const { z } = this.map.spawnPoints.ATTACKERS[0]
return new mp.Vector3(centerX, centerY, z)
}
} |
package main
import (
"testing"
"./decoders"
"fmt"
"bytes"
)
type Pokemon struct {
Id float64
}
type IP struct {
Ip string
}
func TestDecodePokemon(t *testing.T) {
go getJSON(URLS["Pokemon"])
annet := <- pokeChan
confMap := map[string]string{}
for key, value := range annet {
confMap[string(key)] = string(value)
}
/*
Setting actual to the length of the string returned by DecodePokemon
Minimum the string is 53 characters long without the variables converted to string
If variables are nill or 0 they will not be converted and the string will be shorter
Hence testing for 60 for safe measure is an adequate test for this scenario. *In case nill or 0 get translated
*/
actual := len(decoders.DecodePokemon(annet))
expected := 60
fmt.Println(actual)
if actual < expected{
t.Errorf("Test failed, expected, longer string!")
}
}
func TestIP(t*testing.T) {
/*
Gets IP request from API and checks if the IP is returned in a single "" string which would mean the request failed
If string is not equal to "" the test passes
*/
go getJSON(URLS["IP"])
ip := <- ipChan
//ip :=[]byte{'1','5','8','.','3','7','.','2','4','0','.','6','2'}
pi := []byte{'"','"'}
//sz :=len(ip)
expected:= ip
actual:= pi
if bytes.Equal(ip,pi) {
t.Errorf("Test failed", expected, actual)
}
}
func TestMultiple(t *testing.T) {
counter := 0
for count := 0 ; count <= 25; count++ {
for key := range URLS {
go getJSON(URLS[key])
}
counter = count
}
/*
Setting actual to the length of the string returned by DecodePokemon
Minimum the string is 53 characters long without the variables converted to string
If variables are nill or 0 they will not be converted and the string will be shorter
Hence testing for 60 for safe measure is an adequate test for this scenario. *In case nill or 0 get translated
*/
actual := counter
expected := 25
fmt.Println(actual)
if actual < expected{
t.Errorf("Test failed, expected, longer string!")
}
} |
package constants
import "github.com/google/jsonapi"
const (
// GeoJSON is the identifier for the GeoJSON media type
GeoJSON = "application/geo+json"
// JSONAPI is the identifier for the JSON API media type
JSONAPI = jsonapi.MediaType
// ESIndexGeography is the ElasticSearch Index
ESIndexGeography = "populin"
// ESTypeGeography is the ElasticSearch Type
ESTypeGeography = "division"
)
|
package jbse.apps;
import java.util.Collection;
import java.util.Map;
import jbse.bc.ClassFile;
import jbse.common.exc.InvalidInputException;
import jbse.dec.DecisionProcedure;
import jbse.dec.DecisionProcedureDecorator;
import jbse.dec.exc.DecisionException;
import jbse.mem.Clause;
import jbse.mem.Objekt;
import jbse.val.Expression;
import jbse.val.Primitive;
import jbse.val.PrimitiveSymbolic;
import jbse.val.ReferenceSymbolic;
import jbse.val.Simplex;
/**
* A {@link DecisionProcedureDecorator} that logs the time spent
* by each method invocation for its component.
*
* @author <NAME>
*/
public final class DecisionProcedureDecoratorStats extends DecisionProcedureDecorator {
private long start;
private void startTimer() {
this.start = System.currentTimeMillis();
}
private long elapsed() {
final long elapsed = System.currentTimeMillis() - this.start;
return elapsed;
}
public DecisionProcedureDecoratorStats(DecisionProcedure component) throws InvalidInputException {
super(component);
}
@Override
public void pushAssumption(Clause c)
throws InvalidInputException, DecisionException {
startTimer();
super.pushAssumption(c);
final long elapsed = elapsed();
System.err.println("PUSH\t" + c + "\t\t" + elapsed);
}
@Override
public void clearAssumptions()
throws DecisionException {
startTimer();
super.clearAssumptions();
final long elapsed = elapsed();
System.err.println("CLEAR\t\t\t" + elapsed);
}
@Override
public void addAssumptions(Iterable<Clause> assumptionsToAdd)
throws InvalidInputException, DecisionException {
startTimer();
super.addAssumptions(assumptionsToAdd);
final long elapsed = elapsed();
System.err.println("ADD\t\t\t" + elapsed);
}
@Override
public void addAssumptions(Clause... assumptionsToAdd)
throws InvalidInputException, DecisionException {
startTimer();
super.addAssumptions(assumptionsToAdd);
final long elapsed = elapsed();
System.err.println("ADD\t\t\t" + elapsed);
}
@Override
public void setAssumptions(Collection<Clause> newAssumptions)
throws InvalidInputException, DecisionException {
startTimer();
super.setAssumptions(newAssumptions);
final long elapsed = elapsed();
System.err.println("SETASSUMPTIONS\t\t\t" + elapsed);
}
@Override
public Collection<Clause> getAssumptions()
throws DecisionException {
startTimer();
final Collection<Clause> result = super.getAssumptions();
final long elapsed = elapsed();
System.err.println("GETASSUMPTIONS\t\t" + result + "\t" + elapsed);
return result;
}
@Override
public boolean isSat(Expression exp)
throws InvalidInputException, DecisionException {
startTimer();
final boolean result = super.isSat(exp);
final long elapsed = elapsed();
System.err.println("ISSAT\t" + exp + "\t" + result + "\t" + elapsed);
return result;
}
@Override
public boolean isSatAliases(ReferenceSymbolic r, long heapPos, Objekt o)
throws InvalidInputException, DecisionException {
startTimer();
final boolean result = super.isSatAliases(r, heapPos, o);
final long elapsed = elapsed();
System.err.println("ISSATALIASES\t" + r + "\t" + heapPos + "\t" + o + "\t" + result + "\t" + elapsed);
return result;
}
@Override
public boolean isSatExpands(ReferenceSymbolic r, ClassFile classFile)
throws InvalidInputException, DecisionException {
startTimer();
final boolean result = super.isSatExpands(r, classFile);
final long elapsed = elapsed();
System.err.println("ISSATEXPANDS\t" + r + "\t" + classFile.getClassName() + "\t" + result + "\t" + elapsed);
return result;
}
@Override
public boolean isSatNull(ReferenceSymbolic r)
throws InvalidInputException, DecisionException {
startTimer();
final boolean result = super.isSatNull(r);
final long elapsed = elapsed();
System.err.println("ISSATNULL\t" + r + "\t" + result + "\t" + elapsed);
return result;
}
@Override
public boolean isSatInitialized(ClassFile classFile)
throws InvalidInputException, DecisionException {
startTimer();
final boolean result = super.isSatInitialized(classFile);
final long elapsed = elapsed();
System.err.println("ISSATINITIALIZED\t" + classFile.getClassName() + "\t" + result + "\t" + elapsed);
return result;
}
@Override
public boolean isSatNotInitialized(ClassFile classFile)
throws InvalidInputException, DecisionException {
startTimer();
final boolean result = super.isSatInitialized(classFile);
final long elapsed = elapsed();
System.err.println("ISSATNOTINITIALIZED\t" + classFile.getClassName() + "\t" + result + "\t" + elapsed);
return result;
}
@Override
public Map<PrimitiveSymbolic, Simplex> getModel() throws DecisionException {
startTimer();
final Map<PrimitiveSymbolic, Simplex> result = super.getModel();
final long elapsed = elapsed();
System.err.println("GETMODEL\t\t" + result + "\t" + elapsed);
return result;
}
@Override
public Primitive simplify(Primitive c) throws DecisionException {
startTimer();
final Primitive result = super.simplify(c);
final long elapsed = elapsed();
System.err.println("SIMPLIFY\t" + c + "\t" + result + "\t" + elapsed);
return result;
}
}
|
#ifndef __X_HANDLE__
#define __X_HANDLE__
/*
* Copyright (C) 2005-2013 Team XBMC
* http://xbmc.org
*
* This Program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2, or (at your option)
* any later version.
*
* This Program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with XBMC; see the file COPYING. If not, see
* <http://www.gnu.org/licenses/>.
*
*/
#ifndef TARGET_WINDOWS
#include <list>
#include <string>
#include <vector>
#include "PlatformDefs.h"
#include "XHandlePublic.h"
#include "threads/Condition.h"
#include "threads/CriticalSection.h"
struct CXHandle {
public:
typedef enum { HND_NULL = 0, HND_FILE, HND_EVENT, HND_MUTEX, HND_FIND_FILE } HandleType;
CXHandle();
CXHandle(HandleType nType);
CXHandle(const CXHandle &src);
virtual ~CXHandle();
void Init();
inline HandleType GetType() { return m_type; }
void ChangeType(HandleType newType);
XbmcThreads::ConditionVariable *m_hCond;
std::list<CXHandle*> m_hParents;
// simulate mutex and critical section
CCriticalSection *m_hMutex;
int RecursionCount; // for mutex - for compatibility with TARGET_WINDOWS critical section
int fd;
bool m_bManualEvent;
time_t m_tmCreation;
std::vector<std::string> m_FindFileResults;
int m_nFindFileIterator;
std::string m_FindFileDir;
off64_t m_iOffset;
bool m_bCDROM;
bool m_bEventSet;
int m_nRefCount;
CCriticalSection *m_internalLock;
static void DumpObjectTracker();
protected:
HandleType m_type;
static int m_objectTracker[10];
};
#endif
#endif
|
<filename>mrec/parallel/train.py
"""
Training task to run on an ipython engine.
"""
def run(task):
# import modules required by engine
import os
import subprocess
from mrec import load_fast_sparse_matrix
model,input_format,trainfile,outdir,start,end,max_similar_items = task
# initialise the model
dataset = load_fast_sparse_matrix(input_format,trainfile)
model._init(dataset)
if hasattr(model,'similarity_matrix'):
# clear out any existing similarity matrix
model.similarity_matrix = None
# write sims directly to file as we compute them
outfile = os.path.join(outdir,'sims.{0}-{1}.tsv'.format(start,end))
out = open(outfile,'w')
for j in xrange(start,end):
w = model.get_similar_items(j,max_similar_items=max_similar_items)
for k,v in w:
print >>out,'{0}\t{1}\t{2}'.format(j+1,k+1,v) # write as 1-indexed
out.close()
# record success
cmd = ['touch',os.path.join(outdir,'{0}-{1}.SUCCESS'.format(start,end))]
subprocess.check_call(cmd)
# return the range that we've processed
return (start,end)
|
The Social Benefits of Intramural Sports
One of the distinguishing features of collegiate student recreational sports complexes is the sense of community that is intentionally introduced in the programs and services that occur within these facilities. Intramural sports programs provide a powerful medium for student interaction (Belch, Gebel, & Mass, 2001). This study was designed to examine the social benefits of intramural sports participation for undergraduate students at a midsized postsecondary institution. Surveys were randomly distributed to students (N = 349) participating in a variety of intramural sports programs. There were a number of significant differences in the reported social benefits of intramural sports participation between on-campus and off-campus students, first- and fourth-year students, males and females, and differences in reported social benefits based on the number of intramural sports played. Recreational sports programs should be linking participation in intramural sports with broader institutional goals of retention by emphasizing their role in socially integrating students into the university through participation in various recreational sports offerings. Suggestions for future research are made in the context of the limitations of the study. |
President Donald Trump had concerned Americans scratching their heads Sunday night after he tweeted that he now wants to pour more money into health care.
“I suggest that we add more dollars to Healthcare and make it the best anywhere,” he wrote.
I suggest that we add more dollars to Healthcare and make it the best anywhere. ObamaCare is dead - the Republicans will do much better! — Donald J. Trump (@realDonaldTrump) May 28, 2017
That was strange because Trump’s budget plan submitted just days ago called for cutting at least $610 billion from Medicaid and slicing an additional $250 billion from health care by repealing Obamacare. The White House plan also calls for cuts in future spending on health care for children from low-income households. There is no additional spending for any health care in the plan.
The weird disconnect on health care wasn’t a first for the president. When Trump met with Australian Prime Minister Malcolm Turnbull in New York City in early May, the president praised that nation’s health care system. “You have better health care than we do,” Trump said.
He was perhaps unaware that Australia has a government-run, publicly funded universal health care system — and at a fraction of the cost of American health care. It’s a system Trump would be more likely to denigrate as socialist and even worse than Obamacare. The Congressional Budget Office estimated last week that Trump’s health care proposal would leave an additional 23 million Americans with no health care coverage by 2026.
The White House later said that Trump was just being nice and not pushing for the Australian system in America.
In another puzzling tweet on Sunday, Trump boasted that his tax reform package was moving along “ahead of schedule.”
The massive TAX CUTS/REFORM that I have submitted is moving along in the process very well, actually ahead of schedule. Big benefits to all! — Donald J. Trump (@realDonaldTrump) May 29, 2017 |
<filename>caaa/AcceptorCancellationRequestV01.go
package caaa
import (
"encoding/xml"
"github.com/fgrid/iso20022"
)
type Document00500101 struct {
XMLName xml.Name `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.01 Document"`
Message *AcceptorCancellationRequestV01 `xml:"AccptrCxlReq"`
}
func (d *Document00500101) AddMessage() *AcceptorCancellationRequestV01 {
d.Message = new(AcceptorCancellationRequestV01)
return d.Message
}
// Scope
// The AcceptorCancellationRequest message is sent by a card acceptor to cancel a successfully completed card payment transaction. The message can be sent directly to the acquirer or through an agent.
// Usage
// The AcceptorCancellationRequest message is used when the card acceptor is unaware of the cancellation of the transaction by the acquirer.
type AcceptorCancellationRequestV01 struct {
// Cancellation request message management information.
Header *iso20022.Header1 `xml:"Hdr"`
// Information related to the cancellation request.
CancellationRequest *iso20022.AcceptorCancellationRequest1 `xml:"CxlReq"`
// Trailer of the message containing a MAC.
SecurityTrailer *iso20022.ContentInformationType3 `xml:"SctyTrlr"`
}
func (a *AcceptorCancellationRequestV01) AddHeader() *iso20022.Header1 {
a.Header = new(iso20022.Header1)
return a.Header
}
func (a *AcceptorCancellationRequestV01) AddCancellationRequest() *iso20022.AcceptorCancellationRequest1 {
a.CancellationRequest = new(iso20022.AcceptorCancellationRequest1)
return a.CancellationRequest
}
func (a *AcceptorCancellationRequestV01) AddSecurityTrailer() *iso20022.ContentInformationType3 {
a.SecurityTrailer = new(iso20022.ContentInformationType3)
return a.SecurityTrailer
}
|
<filename>client/src/config.rs
use std::{fmt::Debug, fs::File, io::BufReader, path::Path};
use eyre::{Result, WrapErr};
use iroha_config::derive::Configurable;
use iroha_crypto::{PrivateKey, PublicKey};
use iroha_data_model::prelude::*;
use iroha_logger::Configuration as LoggerConfiguration;
use serde::{Deserialize, Serialize};
const DEFAULT_TORII_API_URL: &str = "127.0.0.1:8080";
const DEFAULT_TORII_STATUS_URL: &str = "127.0.0.1:8180";
const DEFAULT_TRANSACTION_TIME_TO_LIVE_MS: u64 = 100_000;
const DEFAULT_TRANSACTION_STATUS_TIMEOUT_MS: u64 = 10_000;
const DEFAULT_MAX_INSTRUCTION_NUMBER: u64 = 2_u64.pow(12);
const DEFAULT_ADD_TRANSACTION_NONCE: bool = false;
/// `Configuration` provides an ability to define client parameters such as `TORII_URL`.
// TODO: design macro to load config from env.
#[derive(Clone, Deserialize, Serialize, Debug, Configurable)]
#[serde(rename_all = "UPPERCASE")]
#[serde(default)]
#[config(env_prefix = "IROHA_")]
pub struct Configuration {
/// Public key of the user account.
#[config(serde_as_str)]
pub public_key: PublicKey,
/// Private key of the user account.
pub private_key: PrivateKey,
/// User account id.
pub account_id: AccountId,
/// Torii URL.
pub torii_api_url: String,
/// Status URL.
pub torii_status_url: String,
/// Proposed transaction TTL in milliseconds.
pub transaction_time_to_live_ms: u64,
/// Transaction status wait timeout in milliseconds.
pub transaction_status_timeout_ms: u64,
/// Maximum number of instructions per transaction
pub max_instruction_number: u64,
/// If `true` add nonce, which make different hashes for transactions which occur repeatedly and simultaneously
pub add_transaction_nonce: bool,
/// `Logger` configuration.
#[config(inner)]
pub logger_configuration: LoggerConfiguration,
}
impl Default for Configuration {
fn default() -> Self {
Self {
public_key: PublicKey::default(),
private_key: PrivateKey::default(),
account_id: AccountId::new("", ""),
torii_api_url: DEFAULT_TORII_API_URL.to_owned(),
torii_status_url: DEFAULT_TORII_STATUS_URL.to_owned(),
transaction_time_to_live_ms: DEFAULT_TRANSACTION_TIME_TO_LIVE_MS,
transaction_status_timeout_ms: DEFAULT_TRANSACTION_STATUS_TIMEOUT_MS,
max_instruction_number: DEFAULT_MAX_INSTRUCTION_NUMBER,
add_transaction_nonce: DEFAULT_ADD_TRANSACTION_NONCE,
logger_configuration: LoggerConfiguration::default(),
}
}
}
impl Configuration {
/// This method will build `Configuration` from a json *pretty* formatted file (without `:` in
/// key names).
///
/// # Panics
/// If configuration file present, but has incorrect format.
///
/// # Errors
/// If system fails to find a file or read it's content.
pub fn from_path<P: AsRef<Path> + Debug>(path: P) -> Result<Configuration> {
let file = File::open(path).wrap_err("Failed to open the config file")?;
let reader = BufReader::new(file);
serde_json::from_reader(reader).wrap_err("Failed to deserialize json from reader")
}
}
|
use criterion::{black_box, criterion_group, criterion_main, Criterion};
use big_enum_set::*;
use big_enum_set::__internal::BigEnumSetTypePrivate;
#[allow(dead_code)]
#[derive(BigEnumSetType)]
enum EnumSmall {
A, B, C, D, E, F, G, H,
}
#[allow(dead_code)]
#[derive(BigEnumSetType)]
enum EnumLarge {
A = 0x40, B = 0x100, C, D = 0x300, E, F = 0x600, G, H,
}
fn enumset_ops(cr: &mut Criterion) {
macro_rules! run {
($enum:ident, $cr:expr) => {{
use $enum as Enum;
let mut set = big_enum_set!(Enum::A | Enum::G | Enum::D);
let set2 = big_enum_set!(Enum::B | Enum::C | Enum::E);
$cr.bench_function(concat!(stringify!($enum), " set contains"), |b| b.iter(|| {
black_box(&mut set).contains(black_box(Enum::G))
}));
$cr.bench_function(concat!(stringify!($enum), " set insert"), |b| b.iter(|| {
black_box(&mut set).insert(black_box(Enum::C))
}));
$cr.bench_function(concat!(stringify!($enum), " set remove"), |b| b.iter(|| {
black_box(&mut set).remove(black_box(Enum::C))
}));
$cr.bench_function(concat!(stringify!($enum), " set iter"), |b| b.iter(|| {
black_box(&mut set).iter().map(|x| x as isize).sum::<isize>()
}));
$cr.bench_function(concat!(stringify!($enum), " set is_disjoint"), |b| b.iter(|| {
black_box(&mut set).is_disjoint(black_box(&set2))
}));
}};
}
run!(EnumSmall, cr);
run!(EnumLarge, cr);
}
#[derive(BigEnumSetType)]
#[repr(u8)]
#[allow(dead_code)]
enum Enum8 {
A, B, C, D, E, F, G, H,
}
#[derive(BigEnumSetType)]
#[repr(u64)]
#[allow(dead_code)]
enum Enum64 {
A, B, C, D, E, F, G, H,
}
#[derive(BigEnumSetType)]
#[repr(C)]
#[allow(dead_code)]
enum EnumC {
A, B, C, D, E, F, G, H,
}
fn enum_from(c: &mut Criterion) {
macro_rules! run {
($enum:ident, $cr:expr) => {{
use $enum as Enum;
c.bench_function(concat!(stringify!($enum), "::enum_from_u16"), |b| b.iter(|| {
unsafe { <Enum as BigEnumSetTypePrivate>::enum_from_u16(black_box($enum::A as u16)) }
}));
}}
}
run!(Enum8, cr);
run!(Enum64, cr);
run!(EnumC, cr);
}
criterion_group!(all, enum_from, enumset_ops);
criterion_main!(all);
|
from collections import deque
n, q = map(int, input().split())
array = deque(map(int, input().split()))
req = []
for i in range(1, q+1):
req.append([int(input()), i])
req.sort(key=lambda x: x[0])
ind = 0
last = max(array)
count = 1
while array[0] != last:
a, b = array.popleft(), array.popleft()
if a > b:
array.appendleft(a)
array.append(b)
else:
array.appendleft(b)
array.append(a)
while ind < q and req[ind][0] == count:
req[ind][0] = (a, b)
ind += 1
count += 1
for i in range(ind, q):
req[i][0] = (last, array[(req[i][0]-count)%(n-1) + 1])
d = {}
for el in req:
d[el[1]] = el[0]
for i in range(1, q+1):
print(*d[i]) |
module Problem28
( solve
) where
import Lib
solve :: IO ()
solve = do
print $ sum $ takeWhile (<= 1001 * 1001) $ scanl (+) 1 $ map (\x -> 2 * (1 + ((x - 1) `div` 4))) [1..(1001 * 1001)]
|
<gh_stars>1-10
package net.silentchaos512.supermultidrills.part;
import net.minecraft.world.item.ItemStack;
import net.minecraft.resources.ResourceLocation;
import net.minecraft.network.chat.Component;
import net.silentchaos512.gear.api.part.IPartSerializer;
import net.silentchaos512.gear.api.part.PartType;
import net.silentchaos512.gear.gear.part.AbstractGearPart;
import net.silentchaos512.gear.gear.part.PartData;
import net.silentchaos512.supermultidrills.SuperMultiDrills;
import net.silentchaos512.supermultidrills.item.DrillItem;
import net.silentchaos512.utils.Color;
import javax.annotation.Nullable;
import net.silentchaos512.gear.gear.part.AbstractGearPart.Serializer;
public class MotorPart extends AbstractGearPart {
private static final ResourceLocation TYPE_ID = SuperMultiDrills.getId("motor");
public static final IPartSerializer<MotorPart> SERIALIZER = new Serializer<>(TYPE_ID, MotorPart::new);
public static final PartType TYPE = PartType.create(PartType.Builder.builder(TYPE_ID));
public MotorPart(ResourceLocation partId) {
super(partId);
}
@Override
public PartType getType() {
return TYPE;
}
@Override
public IPartSerializer<?> getSerializer() {
return SERIALIZER;
}
@Override
public int getColor(PartData part, ItemStack gear, int layer, int animationFrame) {
return Color.VALUE_WHITE;
}
@Override
public Component getDisplayNamePrefix(@Nullable PartData part, ItemStack gear) {
return super.getDisplayNamePrefix(part, gear);
}
@Override
public boolean canAddToGear(ItemStack gear, PartData part) {
return gear.getItem() instanceof DrillItem;
}
}
|
/*
* globals.h - global variables for zsh
*
* This file is part of zsh, the Z shell.
*
* Copyright (c) 1992-1996 <NAME>
* All rights reserved.
*
* Permission is hereby granted, without written agreement and without
* license or royalty fees, to use, copy, modify, and distribute this
* software and to distribute modified versions of this software for any
* purpose, provided that the above copyright notice and the following
* two paragraphs appear in all copies of this software.
*
* In no event shall <NAME> or the Zsh Development Group be liable
* to any party for direct, indirect, special, incidental, or consequential
* damages arising out of the use of this software and its documentation,
* even if <NAME> and the Zsh Development Group have been advised of
* the possibility of such damage.
*
* <NAME> and the Zsh Development Group specifically disclaim any
* warranties, including, but not limited to, the implied warranties of
* merchantability and fitness for a particular purpose. The software
* provided hereunder is on an "as is" basis, and <NAME> and the
* Zsh Development Group have no obligation to provide maintenance,
* support, updates, enhancements, or modifications.
*
*/
/* GLOBALS is defined is init.c, so the global variables *
* are actually contained in init.c, and are externs in *
* the other source files. */
#ifdef GLOBALS
# define EXTERN
#else
# define EXTERN extern
#endif
#ifdef GLOBALS
int redirtab[TRINANG - OUTANG + 1] =
{
WRITE,
WRITENOW,
APP,
APPNOW,
READ,
READWRITE,
HEREDOC,
HEREDOCDASH,
MERGEIN,
MERGEOUT,
ERRWRITE,
ERRWRITENOW,
ERRAPP,
ERRAPPNOW,
HERESTR,
};
#else
extern int redirtab[TRINANG - OUTANG + 1];
#endif
#ifdef GLOBALS
char nulstring[] = {Nularg, '\0'};
int nulstrlen = sizeof(nulstring) - 1;
#else
extern char nulstring[];
extern int nulstrlen;
#endif
/* NULL-terminated arrays containing path, cdpath, etc. */
EXTERN char **path; /* $path */
EXTERN char **cdpath; /* $cdpath */
EXTERN char **fpath; /* $fpath */
EXTERN char **watch; /* $watch */
EXTERN char **mailpath; /* $mailpath */
EXTERN char **manpath; /* $manpath */
EXTERN char **fignore; /* $fignore */
EXTERN char **psvar; /* $psvar */
EXTERN char *yytext;
/* used to suppress ERREXIT and *
* trapping of SIGZERR, SIGEXIT. */
EXTERN int noerrexit;
/* do not save history on exec and exit */
EXTERN int nohistsave;
/* error/break flag */
EXTERN int errflag;
/* Status of return from a trap */
EXTERN int trapreturn;
EXTERN char *tokstr;
EXTERN int tok, tokfd;
/* lexical analyzer error flag */
EXTERN int lexstop;
EXTERN struct heredocs *hdocs;
/* suppress error messages */
EXTERN int noerrs;
/* nonzero means we are not evaluating, just parsing (in math.c) */
EXTERN int noeval;
/* current history event number */
EXTERN int curhist;
/* if != 0, we are expanding the current line */
EXTERN int expanding;
/* these are used to modify the cursor position during expansion */
EXTERN int excs, exlast;
/* if != 0, this is the first line of the command */
EXTERN int isfirstln;
/* if != 0, this is the first char of the command (not including
white space) */
EXTERN int isfirstch;
/* number of history entries */
EXTERN int histentct;
/* array of history entries */
EXTERN Histent histentarr;
/* capacity of history lists */
EXTERN int histsiz;
/* if = 1, we have performed history substitution on the current line
if = 2, we have used the 'p' modifier */
EXTERN int histdone;
/* default event (usually curhist-1, that is, "!!") */
EXTERN int defev;
/* != 0 if we are about to read a command word */
EXTERN int incmdpos;
/* != 0 if we are in the middle of a [[ ... ]] */
EXTERN int incond;
/* != 0 if we are after a redirection (for ctxtlex only) */
EXTERN int inredir;
/* != 0 if we are about to read a case pattern */
EXTERN int incasepat;
/* != 0 if we just read FUNCTION */
EXTERN int infunc;
/* != 0 if we just read a newline */
EXTERN int isnewlin;
/* the lists of history events */
EXTERN LinkList histlist;
/* the directory stack */
EXTERN LinkList dirstack;
/* the zle buffer stack */
EXTERN LinkList bufstack;
/* total # of characters waiting to be read. */
EXTERN int inbufct;
/* the flags controlling the input routines in input.c: see INP_* in zsh.h */
EXTERN int inbufflags;
/* flag that an alias should be expanded after expansion ending in space */
EXTERN int inalmore;
/* != 0 if this is a subshell */
EXTERN int subsh;
/* # of break levels */
EXTERN int breaks;
/* != 0 if we have a return pending */
EXTERN int retflag;
/* how far we've hashed the PATH so far */
EXTERN char **pathchecked;
/* # of nested loops we are in */
EXTERN int loops;
/* # of continue levels */
EXTERN int contflag;
/* the job we are working on */
EXTERN int thisjob;
/* the current job (+) */
EXTERN int curjob;
/* the previous job (-) */
EXTERN int prevjob;
/* hash table containing the aliases */
EXTERN HashTable aliastab;
/* hash table containing the reserved words */
EXTERN HashTable reswdtab;
/* hash table containing the parameters */
EXTERN HashTable paramtab;
/* hash table containing the external/hashed commands */
EXTERN HashTable cmdnamtab;
/* hash table containing the shell functions */
EXTERN HashTable shfunctab;
/* hash table containing builtin commands */
EXTERN HashTable builtintab;
/* hash table for completion info for commands */
EXTERN HashTable compctltab;
/* hash table for multi-character bindings */
EXTERN HashTable keybindtab;
/* hash table for emacs multi-character bindings */
EXTERN HashTable emkeybindtab;
/* hash table for vi multi-character bindings */
EXTERN HashTable vikeybindtab;
/* hash table for named directories */
EXTERN HashTable nameddirtab;
/* default completion infos */
EXTERN struct compctl cc_compos, cc_default, cc_first, cc_dummy;
/* the job table */
EXTERN struct job jobtab[MAXJOB];
/* shell timings */
EXTERN struct tms shtms;
/* the list of sched jobs pending */
EXTERN struct schedcmd *schedcmds;
/* the last l for s/l/r/ history substitution */
EXTERN char *hsubl;
/* the last r for s/l/r/ history substitution */
EXTERN char *hsubr;
/* We cache `USERNAME' and use check cached_uid *
* so we know when to recompute it. */
EXTERN uid_t cached_uid;
EXTERN char *cached_username; /* $USERNAME */
EXTERN char *zsh_name; /* ZSH_NAME */
EXTERN char *underscore; /* $_ */
EXTERN zlong lastval; /* $? */
EXTERN zlong mypid; /* $$ */
EXTERN zlong lastpid; /* $! */
EXTERN zlong ppid; /* $PPID */
EXTERN char *ifs; /* $IFS */
EXTERN char *pwd; /* $PWD */
EXTERN char *oldpwd; /* $OLDPWD */
EXTERN zlong columns; /* $COLUMNS */
EXTERN zlong lines; /* $LINES */
EXTERN char *zoptarg; /* $OPTARG */
EXTERN zlong zoptind; /* $OPTIND */
EXTERN char *prompt; /* $PROMPT */
EXTERN char *prompt2; /* etc. */
EXTERN char *prompt3;
EXTERN char *prompt4;
EXTERN char *rprompt; /* $RPROMPT */
EXTERN char *sprompt;
EXTERN char *wordchars;
EXTERN char *rstring, *Rstring;
EXTERN char *postedit;
EXTERN char *hostnam; /* from gethostname */
EXTERN char *home; /* $HOME */
EXTERN char **pparams; /* $argv */
EXTERN pid_t mypgrp; /* the process group of the shell */
EXTERN char *argzero; /* $0 */
EXTERN char *hackzero;
EXTERN char *scriptname; /* name of script being sourced */
EXTERN zlong lineno; /* $LINENO */
EXTERN zlong shlvl; /* $SHLVL */
EXTERN long lastval2;
/* the last time we checked mail */
EXTERN time_t lastmailcheck;
/* the last time we checked the people in the WATCH variable */
EXTERN time_t lastwatch;
/* the last time we did the periodic() shell function */
EXTERN time_t lastperiodic;
/* $SECONDS = time(NULL) - shtimer.tv_sec */
EXTERN struct timeval shtimer;
/* the default command for null commands */
EXTERN char *nullcmd;
EXTERN char *readnullcmd;
/* the List of local variables we have to destroy */
EXTERN LinkList locallist;
/* what level of localness we are at */
EXTERN int locallevel;
/* what level of sourcing we are at */
EXTERN int sourcelevel;
/* The table of file descriptors. A table element is zero if the *
* corresponding fd is not used by the shell. It is greater than *
* 1 if the fd is used by a <(...) or >(...) substitution and 1 if *
* it is an internal file descriptor which must be closed before *
* executing an external command. The first ten elements of the *
* table is not used. A table element is set by movefd and cleard *
* by zclose. */
EXTERN char *fdtable;
/* The allocated size of fdtable */
EXTERN int fdtable_size;
/* The highest fd that marked with nonzero in fdtable */
EXTERN int max_zsh_fd;
/* input fd from the coprocess */
EXTERN int coprocin;
/* output fd from the coprocess */
EXTERN int coprocout;
/* the shell input fd */
EXTERN int SHIN;
/* the shell tty fd */
EXTERN int SHTTY;
/* the FILE attached to the shell tty */
EXTERN FILE *shout;
/* buffered shell input for non-interactive shells */
EXTERN FILE *bshin;
/* the FILE for xtrace output */
EXTERN FILE *xtrerr;
/* != 0 means we are reading input from a string */
EXTERN int strin;
/* != 0 means history substitution is turned off */
EXTERN int stophist;
/* this line began with a space, so junk it if HISTIGNORESPACE is on */
EXTERN int spaceflag;
/* don't do spelling correction */
EXTERN int nocorrect;
/* state of the history mechanism (see hist.c) */
EXTERN int histactive;
/* current emulation (used to decide which set of option letters is used) */
EXTERN int emulation;
/* the options; e.g. if opts[SHGLOB] != 0, SH_GLOB is turned on */
EXTERN char opts[OPT_SIZE];
EXTERN int lastbase; /* last input base we used */
#ifdef HAVE_GETRLIMIT
/* the resource limits for the shell and its children */
EXTERN struct rlimit current_limits[RLIM_NLIMITS];
EXTERN struct rlimit limits[RLIM_NLIMITS];
#endif
/* pointer into the history line */
EXTERN char *hptr;
/* the current history line */
EXTERN char *chline;
/* true if the last character returned by hgetc was an escaped bangchar
* if it is set and NOBANGHIST is unset hwaddc escapes bangchars */
EXTERN int qbang;
/* text attribute mask */
#ifdef GLOBALS
unsigned txtattrmask = 0;
#else
extern unsigned txtattrmask;
#endif
/* text change - attribute change made by prompts */
EXTERN unsigned txtchange;
EXTERN char *term; /* $TERM */
/* 0 if this $TERM setup is usable, otherwise it contains TERM_* flags */
EXTERN int termflags;
/* flag for CSHNULLGLOB */
EXTERN int badcshglob;
/* max size of histline */
EXTERN int hlinesz;
/* we have printed a 'you have stopped (running) jobs.' message */
EXTERN int stopmsg;
/* the default tty state */
EXTERN struct ttyinfo shttyinfo;
EXTERN char *ttystrname; /* $TTY */
/* 1 if ttyctl -f has been executed */
EXTERN int ttyfrozen;
/* != 0 if we are allocating in the heaplist */
EXTERN int useheap;
/* Words on the command line, for use in completion */
EXTERN int clwsize, clwnum, clwpos;
EXTERN char **clwords;
/* Non-zero if a completion list was displayed. */
EXTERN int listshown;
/* Non-zero if refresh() should clear the list below the prompt. */
EXTERN int clearlist;
/* pid of process undergoing 'process substitution' */
EXTERN pid_t cmdoutpid;
/* exit status of process undergoing 'process substitution' */
EXTERN int cmdoutval;
/* Stack to save some variables before executing a signal handler function */
EXTERN struct execstack *exstack;
/* Array describing the state of each signal: an element contains *
* 0 for the default action or some ZSIG_* flags ored together. */
EXTERN int sigtrapped[VSIGCOUNT];
/* trap functions for each signal */
EXTERN List sigfuncs[VSIGCOUNT];
#ifdef DEBUG
EXTERN int alloc_stackp;
#endif
/* Variables used by signal queueing */
EXTERN int queueing_enabled;
EXTERN sigset_t signal_mask_queue[MAX_QUEUE_SIZE];
EXTERN int signal_queue[MAX_QUEUE_SIZE];
EXTERN int queue_front;
EXTERN int queue_rear;
/* Previous values of errflag and breaks if the signal handler had to
* change them. And a flag saying if it did that. */
EXTERN int prev_errflag, prev_breaks, errbrk_saved;
/* 1 if aliases should not be expanded */
EXTERN int noaliases;
#ifdef GLOBALS
/* tokens */
char *ztokens = "#$^*()$=|{}[]`<>?~`,'\"\\";
#else
extern char *ztokens;
#endif
/* $histchars */
EXTERN unsigned char bangchar, hatchar, hashchar;
EXTERN int eofseen;
/* we are parsing a line sent to use by the editor */
EXTERN int zleparse;
EXTERN int wordbeg;
EXTERN int parbegin;
EXTERN int parend;
/* used in arrays of lists instead of NULL pointers */
EXTERN struct list dummy_list;
/* lengths of each string */
EXTERN int tclen[TC_COUNT];
EXTERN char *tcstr[TC_COUNT];
/* Values of the li and co entries */
EXTERN int tclines, tccolumns;
/* names of the strings we want */
#ifdef GLOBALS
char *tccapnams[TC_COUNT] =
{
"cl", "le", "LE", "nd", "RI", "up", "UP", "do",
"DO", "dc", "DC", "ic", "IC", "cd", "ce", "al", "dl", "ta",
"md", "so", "us", "me", "se", "ue", "ch"
};
#else
extern char *tccapnams[TC_COUNT];
#endif
/* the command stack for use with %_ in prompts */
EXTERN unsigned char *cmdstack;
EXTERN int cmdsp;
#ifdef GLOBALS
char *tokstrings[WHILE + 1] = {
NULL, /* NULLTOK 0 */
";", /* SEPER */
"\\n", /* NEWLIN */
";", /* SEMI */
";;", /* DSEMI */
"&", /* AMPER 5 */
"(", /* INPAR */
")", /* OUTPAR */
"||", /* DBAR */
"&&", /* DAMPER */
">", /* OUTANG 10 */
">|", /* OUTANGBANG */
">>", /* DOUTANG */
">>|", /* DOUTANGBANG */
"<", /* INANG */
"<>", /* INOUTANG 15 */
"<<", /* DINANG */
"<<-", /* DINANGDASH */
"<&", /* INANGAMP */
">&", /* OUTANGAMP */
"&>", /* AMPOUTANG 20 */
"&>|", /* OUTANGAMPBANG */
">>&", /* DOUTANGAMP */
">>&|", /* DOUTANGAMPBANG */
"<<<", /* TRINANG */
"|", /* BAR 25 */
"|&", /* BARAMP */
"()", /* INOUTPAR */
"((", /* DINPAR */
"))", /* DOUTPAR */
"&|", /* AMPERBANG 30 */
};
#else
extern char *tokstrings[];
#endif
#ifdef GLOBALS
char *cmdnames[] =
{
"for", "while", "repeat", "select",
"until", "if", "then", "else",
"elif", "math", "cond", "cmdor",
"cmdand", "pipe", "errpipe", "foreach",
"case", "function", "subsh", "cursh",
"array", "quote", "dquote", "bquote",
"cmdsubst", "mathsubst", "elif-then", "heredoc",
"heredocd", "brace", "braceparam",
};
#else
extern char *cmdnames[];
#endif
#ifndef GLOBALS
extern struct zoption optns[OPT_SIZE];
#else
struct zoption optns[OPT_SIZE] = {
# define x OPT_REV|
{NULL, 0, 0, 0},
{"allexport", 'a', 'a', OPT_EMULATE},
{"alwayslastprompt", 0, 0, 0},
{"alwaystoend", 0, 0, 0},
{"appendhistory", 0, 0, 0},
{"autocd", 'J', 0, OPT_EMULATE},
{"autolist", '9', 0, 0},
{"automenu", 0, 0, 0},
{"autonamedirs", 0, 0, 0},
{"autoparamkeys", 0, 0, 0},
{"autoparamslash", 0, 0, OPT_CSH},
{"autopushd", 'N', 0, 0},
{"autoremoveslash", 0, 0, 0},
{"autoresume", 'W', 0, 0},
{"badpattern", x'2', 0, OPT_EMULATE|OPT_NONBOURNE},
{"banghist", x'K', 0, OPT_NONBOURNE},
{"beep", x'B', 0, OPT_ALL},
{"bgnice", '6', 0, OPT_EMULATE|OPT_NONBOURNE},
{"braceccl", 0, 0, OPT_EMULATE},
{"bsdecho", 0, 0, OPT_EMULATE|OPT_SH},
{"cdablevars", 'T', 0, OPT_EMULATE},
{"chaselinks", 'w', 0, OPT_EMULATE},
{"clobber", x'C', x'C', OPT_EMULATE|OPT_ALL},
{"completealiases", 0, 0, 0},
{"completeinword", 0, 0, 0},
{"correct", '0', 0, 0},
{"correctall", 'O', 0, 0},
{"cshjunkiehistory", 0, 0, OPT_EMULATE|OPT_CSH},
{"cshjunkieloops", 0, 0, OPT_EMULATE|OPT_CSH},
{"cshjunkiequotes", 0, 0, OPT_EMULATE|OPT_CSH},
{"cshnullglob", 0, 0, OPT_EMULATE|OPT_CSH},
{"equals", 0, 0, OPT_EMULATE|OPT_ZSH},
{"errexit", 'e', 'e', OPT_EMULATE},
{"exec", x'n', x'n', OPT_ALL},
{"extendedglob", 0, 0, OPT_EMULATE},
{"extendedhistory", 0, 0, OPT_CSH},
{"flowcontrol", 0, 0, OPT_ALL},
{"functionargzero", 0, 0, OPT_EMULATE|OPT_NONBOURNE},
{"glob", x'F', x'f', OPT_EMULATE|OPT_ALL},
{"globassign", 0, 0, OPT_EMULATE|OPT_CSH},
{"globcomplete", 0, 0, 0},
{"globdots", '4', 0, OPT_EMULATE},
{"globsubst", 0, 0, OPT_EMULATE|OPT_NONZSH},
{"hashcmds", 0, 0, OPT_ALL},
{"hashdirs", 0, 0, OPT_ALL},
{"hashlistall", 0, 0, OPT_ALL},
{"histallowclobber", 0, 0, 0},
{"histbeep", 0, 0, OPT_ALL},
{"histignoredups", 'h', 0, 0},
{"histignorespace", 'g', 0, 0},
{"histnostore", 0, 0, 0},
{"histreduceblanks", 0, 0, 0},
{"histverify", 0, 0, 0},
{"hup", 0, 0, OPT_EMULATE|OPT_ZSH},
{"ignorebraces", 'I', 0, OPT_EMULATE|OPT_SH},
{"ignoreeof", '7', 0, 0},
{"interactive", 'i', 'i', OPT_SPECIAL},
{"interactivecomments", 'k', 0, OPT_BOURNE},
{"ksharrays", 0, 0, OPT_EMULATE|OPT_BOURNE},
{"kshoptionprint", 0, 0, OPT_EMULATE|OPT_KSH},
{"listambiguous", 0, 0, 0},
{"listbeep", 0, 0, OPT_ALL},
{"listtypes", 'X', 0, OPT_CSH},
{"localoptions", 0, 0, OPT_EMULATE|OPT_KSH},
{"login", 'l', 'l', OPT_SPECIAL},
{"longlistjobs", 'R', 0, 0},
{"magicequalsubst", 0, 0, OPT_EMULATE},
{"mailwarning", 'U', 0, 0},
{"markdirs", '8', 'X', 0},
{"menucomplete", 'Y', 0, 0},
{"monitor", 'm', 'm', OPT_SPECIAL},
{"multios", 0, 0, OPT_EMULATE|OPT_ZSH},
{"nomatch", x'3', 0, OPT_EMULATE|OPT_NONBOURNE},
{"notify", '5', 'b', OPT_ZSH},
{"nullglob", 'G', 0, OPT_EMULATE},
{"numericglobsort", 0, 0, OPT_EMULATE},
{"overstrike", 0, 0, 0},
{"pathdirs", 'Q', 0, OPT_EMULATE},
{"posixbuiltins", 0, 0, OPT_EMULATE|OPT_BOURNE},
{"printeightbit", 0, 0, 0},
{"printexitvalue", '1', 0, 0},
{"privileged", 'p', 'p', OPT_SPECIAL},
{"promptcr", x'V', 0, OPT_ALL},
{"promptsubst", 0, 0, OPT_KSH},
{"pushdignoredups", 0, 0, OPT_EMULATE},
{"pushdminus", 0, 0, OPT_EMULATE},
{"pushdsilent", 'E', 0, 0},
{"pushdtohome", 'D', 0, OPT_EMULATE},
{"rcexpandparam", 'P', 0, OPT_EMULATE},
{"rcquotes", 0, 0, OPT_EMULATE},
{"rcs", x'f', 0, OPT_ALL},
{"recexact", 'S', 0, 0},
{"rmstarsilent", 'H', 0, OPT_BOURNE},
{"shfileexpansion", 0, 0, OPT_EMULATE|OPT_BOURNE},
{"shglob", 0, 0, OPT_EMULATE|OPT_BOURNE},
{"shinstdin", 's', 's', OPT_SPECIAL},
{"shoptionletters", 0, 0, OPT_EMULATE|OPT_BOURNE},
{"shortloops", 0, 0, OPT_NONBOURNE},
{"shwordsplit", 'y', 0, OPT_EMULATE|OPT_BOURNE},
{"singlecommand", 't', 't', OPT_SPECIAL},
{"singlelinezle", 'M', 0, OPT_KSH},
{"sunkeyboardhack", 'L', 0, 0},
{"unset", x'u', x'u', OPT_EMULATE|OPT_BSHELL},
{"verbose", 'v', 'v', 0},
{"xtrace", 'x', 'x', 0},
{"zle", 'Z', 0, OPT_SPECIAL},
};
# undef x
#endif
EXTERN short int typtab[256];
|
Merkle DAG-based Distributed Data Model for Content-addressed Trust-less Verifiable Data
This paper aims to propose a data model for a distributed environment. Properties of both the Merkle-DAG and that for conventional relational distributed data model are combined to build a data model for efficient storage and retrieval of trust-less, verifiable, relational and semi-structured data. We have taken a user as a single independent server with its independent storage and are focused only on its storage structure. The proposed data model increases data availability, consistency, and inherent security. |
def create_nested_folder(nested_folder_path, main_folder_path):
if not nested_folder_path:
nested_folder_path = os.path.join(main_folder_path, NESTED_FOLDER_NAME)
create_folder(nested_folder_path)
return nested_folder_path |
Time is of the Essence: Seize the Opportunity for Fulfillment in 2009
Fifty-three percent of workers worldwide surveyed in 2008 reported they would rather work and earn less, but have a happy stress-free fulfilling life. For the 78 million baby boomers and their pre- and post-boom colleagues this desire has resulted in a phenomenon that has been labeled "encore jobs," This essay describes opportunities for fulfillment for tax lawyers, young and not so young, as volunteers and fellows. |
The Joint Effect of hOGG1, APE1, and ADPRT Polymorphisms and Cooking Oil Fumes on the Risk of Lung Adenocarcinoma in Chinese Non-Smoking Females
Background The human 8-oxoguanine DNA glycosylase 1 (hOGG1), apurinic/apyrimidinic endonuclease 1 (APE1), and adenosine diphosphate ribosyl transferase (ADPRT) genes play an important role in the DNA base excision repair pathway. Single nucleotide polymorphisms (SNPs) in critical genes are suspected to be associated with the risk of lung cancer. This study aimed to identify the association between the polymorphisms of hOGG1 Ser326Cys, APE1 Asp148Glu, and ADPRT Val762Ala, and the risk of lung adenocarcinoma in the non-smoking female population, and investigated the interaction between genetic polymorphisms and environmental exposure in lung adenocarcinoma. Methods We performed a hospital-based case-control study, including 410 lung adenocarcinoma patients and 410 cancer-free hospital control subjects who were matched for age. Each case and control was interviewed to collect information by well-trained interviewers. A total of 10 ml of venous blood was collected for genotype testing. Three polymorphisms were analyzed by the polymerase chain reaction-restriction fragment length polymorphism technique. Results We found that individuals who were homozygous for the variant hOGG1 326Cys/Cys showed a significantly increased risk of lung adenocarcinoma (OR = 1.54; 95% CI: 1.01–2.36; P = 0.045). When the combined effect of variant alleles was analyzed, we found an increased OR of 1.89 (95% CI: 1.24–2.88, P = 0.003) for lung adenocarcinoma individuals with more than one homozygous variant allele. In stratified analyses, we found that the OR for the gene-environment interaction between Ser/Cys and Cys/Cys genotypes of hOGG1 codon 326 and cooking oil fumes for the risk of lung adenocarcinoma was 1.37 (95% CI: 0.77–2.44; P = 0.279) and 2.79 (95% CI: 1.50–5.18; P = 0.001), respectively. Conclusions The hOGG1 Ser326Cys polymorphism might be associated with the risk of lung adenocarcinoma in Chinese non-smoking females. Furthermore, there is a significant gene-environment association between cooking oil fumes and hOGG1 326 Cys/Cys genotype in lung adenocarcinoma among female non-smokers.
Introduction
Lung cancer has become a major cause of cancer mortality worldwide, especially in China, and pathological studies have found that adenocarcinoma is the main form of lung cancer in the female population . It is well known that smoking is the most important risk factor for lung cancer, but in the past 30 years, the incidence and death rate of lung cancer is increasing in women who have a low rate of smoking . Lung cancer maybe caused mainly by the other factors in Chinese women, therefore, it is important to study the factors that affect female lung cancer, especially non-smoking females.
Environmental exposure and genetic polymorphisms might contribute to the variation in individual lung cancer risk. Recent lung cancer studies have focused on identifying effects of single nucleotide polymorphisms (SNPs) in candidate genes; in particular, DNA repair genes are being increasingly studied. DNA repair systems play a fundamental role in the maintenance of genomic integrity and protect the human genome from damage caused by environmental carcinogens. Genetic variations in DNA repair genes are thought to affect DNA repair capacity and are suggested to be associated with a risk for lung cancer .
As one of the DNA repair pathways, the DNA base excision repair (BER) pathway plays an important role in repairing DNA damage caused by oxidation, deamination, and alkylation, and protecting cells against the damage of endogenous and exogenous carcinogens . The human 8-oxoguanine DNA glycosylase 1 (hOGG1), apurinic/apyrimidinic endonuclease 1 (APE1) and adenosine diphosphate ribosyl transferase (ADPRT) genes play an important role in the BER pathway . The hOGG1 gene encodes a DNA glycosylase and completes repair by releasing the 8-oxoG base caused by DNA reactive oxygen . The APE1 gene is located on chromosome 14q11.2-q12 and a rate -limiting enzyme in the BER pathway that repairs basic sites in DNA and functions as a redox activator, which regulates some transcription factors, and then participates in the BER process . The ADPRT gene is located at chromosome 1q41-q42 region, plays a direct and important role in the long-path BER pathway and it can bind to single-strand breaks in DNA . hOGG1 Ser326Cys, APE1 Asp148Glu and ADPRT Val762Ala are three common candidate single-nucleotide polymorphisms, and there were lots of studies investigated the association between BER SNPs and the risk of cancer . Recently, several studies have demonstrated the associations between these genetic polymorphisms and the risk of lung cancer, but the results vary in different ethnic populations and with different environmental exposure .
In the present study, we performed a case-control study to identify the association between the polymorphisms of hOGG1, APE1, and ADPRT, and the risk of lung adenocarcinoma in the non-smoking female population in Shenyang, China. We investigated the joint effects of three genes in the same pathway and the interaction between genetic polymorphisms and environmental exposure in lung adenocarcinoma.
Subjects
This hospital-based case-control study included 410 lung adenocarcinoma patients and 410 cancer-free hospital controls. All subjects were non-smoking females and they were from unrelated ethnic Han Chinese. The cases were recruited during January 2002 to January 2008 at the First Affiliated Hospital of China Medical University and Shenyang Northern Hospital. All patients were histologically confirmed to have lung adenocarcinoma before any radiotherapy and chemotherapy. During the same time, controls were selected from cancer-free patients with other lung diseases, but who were free of a history of cancer, and mainly suffered from bronchitis, pulmonary disease and emphysema. Controls were frequency-matched to case subjects for age (65 years). This study was approved by the institutional review board of China Medical University and written informed consent was obtained from all participants.
Data Collection
A total of 10 ml of venous blood was collected from each patient. Patients were interviewed to collect information for demographics and environmental exposure by well-trained interviewers at the time they were admitted to hospital. Information included demographic characteristics, dietary habit, and family history of cancer, fuel smoke exposure, passive smoking, cooking oil fumes exposure, and occupational exposure. An individual was defined as a smoker if she had consumed a total of 100 cigarettes in her lifetime; otherwise, she was considered as a non-smoker. For exposure to cooking oil fumes, participants were asked about the frequency of cooking and types of oils.
Statistical Analysis
We used the Pearson's chi square test and/or student's t-test to compare the differences in demographic variables, environmental risk factors, and genotypes of the three genes between cases and controls. The odds ratio (OR) and 95% confidence interval (95% CI) for estimating the associations between genotypes of these genes and lung cancer were used in unconditional logistic regression analysis. The Hardy-Weinberg equilibrium was tested by performing a goodness-of-fit X 2 test to compare the genotype frequencies of each SNP in the control subjects from those expected. A logistic regression model was used to evaluate genegene and gene-environment interactions. All data were analyzed with Statistical Product and Service Solutions (SPSS) v13.0 for Windows, if not otherwise specified. All statistical analyses were two-sided and the significance level was set at p,0.05.
Results
The basic demographic data and environmental risk factors of the 410 lung cancer patients and 410 controls are shown in Table 1. The mean ages of cases and controls (mean 6 S.D.) were similar. All cases were female lung adenocarcinoma patients. We found no significant differences in age, passive smoking, family history of cancer, fuel smoke exposure, education, and monthly income between the case and control groups. However, more cases were exposed to cooking oil fumes than in the controls (OR 1.62; 95% CI: 1.21-2.18; P = 0.001).
The distribution of hOGG1, APE1 and ADPRT gene polymorphisms in cases and controls, and the adjusted ORs associated with lung adenocarcinoma are presented in Table 2. In the controls, all the genotype distributions were in agreement with the Hardy-Weinberg equilibrium (P.0.05). Using subjects with the hOGG1 Ser/Ser genotype as the reference group, homozygous carriers of the hOGG1 326Cys/Cys genotype had a 1.54-fold risk of lung adenocarcinoma compared with the homozygous wild genotypes (95% CI: 1.01-2.36, P = 0.045). No significant association was found between APE1 148Asp/Glu polymorphisms and the risk for lung adenocarcinoma (P.0.05). Distribution of ADPRT Va-l762Ala polymorphisms was not associated with a risk of lung adenocarcinoma (P.0.05).
The effect of the combination of hOGG1 326Cys, APE1 148Glu and ADPRT 762Ala polymorphisms on the risk of lung adenocarcinoma is shown in Table 3. The reference group was individuals who were not homozygous for at least one of the variant alleles, considering the rarity of individuals with all three alleles. When patients homozygous for two or three gene variants were combined as one group, we found an increased OR of 1.89 (95% CI: 1.24-2.88; P = 0.003) for lung adenocarcinoma individuals with more than one homozygous variant allele of hOGG1 326 Cys/Cys, APE1 148Glu/Glu and ADPRT 762Ala/Ala. However, no significant effect was found in lung adenocarcinoma individuals with one homozygous variant allele.
To evaluate the potential gene-environment interaction, the association between the polymorphisms of hOGG1 Ser326Cys and exposure to cooking oil fumes was tested in a logistic regression model. The adjusted ORs for the interaction between hOGG1 Ser326Cys and cooking oil fumes for the risk of lung adenocarcinoma are shown in Table 4. In order to see the direct joint effect of cooking oil fumes and hOGG1 Ser326Cys, which means to create a joint effect variable with 6 values according to exposure to cooking oil fumes (no/yes) and the SNP (Ser/Ser, Ser/Cys, Cys/ Cys) and then compared to no exposure and ser/ser calculate ORs for the other 5 groups, we found that the ORs for the geneenvironment interaction between Ser/Cys and Cys/Cys genotypes of hOGG1 codon 326 and cooking oil fumes for the risk of lung adenocarcinoma was 1.37 (95% CI: 0.77-2.44; P = 0.279) and 2.79 (95% CI: 1.50-5.18; P = 0.001), respectively. In stratified analyses by passive smoking and fuel smoke exposure, we found no statistically significant relationships between the risk of lung adenocarcinoma and the three genetic polymorphisms.
Discussion
The incidence of lung cancer in non-smoking female population may be caused by genetic factors and other important environmental besides tobacco smoking, because of Chinese traditional cooking style. Cooking oil fumes are one of the major indoor air pollutants and are an important risk factor of lung cancer . However, the joint effect of these three genes and cooking oil fumes on the risk of lung adenocarcinoma in Chinese non-smoking females were not yet reported. Therefore, we investigated the association of three genetic polymorphisms (hOGG1 Ser326Cys, APE1 Asp148Glu, and ADPRT Val762Ala) in BER genes with the risk of lung adenocarcinoma, as well as the relationship of the three genetic polymorphisms with environmental factors (cooking oil fumes) in the Chinese non-smoking female population. We found a significant association between the hOGG1 326Cys/Cys genotype and lung adenocarcinoma. Homozygous carriers of the hOGG1 326Cys/Cys genotype had a 1.54-fold risk of lung adenocarcinoma compared with the homozygous wild genotypes ( Table 2).
Several studies have investigated the association of hOGG1 Ser326Cys polymorphisms with lung cancer . Kohno et al first identified hOGG1 Ser326Cys polymorphisms and found that the 326Cys protein has a low oh8G repair activity. Our recent meta-analysis of hOGG1 Ser326Cys using data from 18 studies showed that hOGG1 Ser326Cys polymorphisms might contribute to the risk of non-small cell lung cancer in the Asian population . Other previous studies have shown a significant positive association between the homozygous variant Cys/Cys genotype and lung cancer development, and some of these studies focused on smokers . However, there have been controversial results or no association between the hOGG1 326Ser/Cys genotype and lung adenocarcinoma reported in other studies . The reason for these different results between studies is not clear, but it may be because of differences in the size of the study population and differences in ethnicity.
APE1 plays a central role in base excision repair of DNA damage . APE1 Asp148Glu variants are the most common APE1 polymorphisms and have been extensively studied in lung cancer; however; the results are conflicting . In the current study, we found no association between APE1 Asp148Glu polymorphisms and the risk of lung adenocarcinoma among Chinese non-smoking females, which is similar to most previous studies . However, an association between APE1 As-p148Glu polymorphisms and lung cancer was reported in a Japanese study , a Chinese study , and a Belgian study ; this might be attributed to cigarette smoking exposure. The explanation for these differences between studies is unknown, but may be due to exposure to environmental factors or joint effects with other BER genes. ADPRT is a DNA-binding protein involved in the regulation of BER by detecting DNA strand breaks after DNA damage . The association between ADPRT Val762Ala polymorphisms and lung cancer has been studied, but not extensively. A Chinese study reported that the ADPRT Ala/Ala genotype is associated with a 1.68-fold (95% CI: 1.27-2.23) increased risk with lung cancer compared with the Val/Val genotype . However, no significant associations of ADPRT Val762Ala polymorphisms with lung cancer were reported in a Korean and Japanese population . We also did not find any significant association between ADPRT Val762Ala and the risk of lung adenocarcinoma among Chinese non-smoking females. However, the mechanisms for the effect of ADPRT Val762Ala polymorphisms on susceptibility to lung cancer remain unknown and require further investigation.
Many factors affect DNA repair activity and the risk of lung cancer, including multiple genetic variants . An increasing number of studies on the joint effects of more than one variant allele were showed that complex gene-gene interactions may significantly contribute to cancer susceptibility . Our results suggested that a potential combined effect among homozygous genotypes of hOGG1 326 Cys/Cys, APE1 148Glu/Glu and ADPRT 762Ala/Ala significantly increased the risk of lung adenocarcinoma in the Chinese non-smoking female population. Therefore, individuals with more than one homozygous genotype may have a higher risk for lung adenocarcinoma.
Although tobacco smoking is the main cause of lung cancer, cancer is a multifactorial disease. Some studies have suggested that cooking oil fumes may be an environmental risk factor in lung cancer in women who do not smoke . It is accepted that in addition to the individual contributions of genetic differences and environmental factors, interactions between the two are important in disease development . To evaluate potential gene-environment associations, the interaction or joint effect of hOGG1 Ser326Cys polymorphisms and cooking oil fumes on lung cancer was examined in the previous study and evidence supports an interaction between the Cys326 hOGG1 allele and environmental exposure to ROS resulting in increased risk of cancer . Based on the Chinese main traditional cooking styles, including stir-frying, decoction and deep-frying, Chinese women may inhale cooking oil fumes that contain multiple potential carcinogens during prepared food. In the present study, we found that exposure to cooking oil fumes may be an environmental risk of developing lung adenocarcinoma, which is consistent with our previous study . In addition, our results reflected the potential gene-environment interactions between Cys/Cys genotypes of hOGG1 codon 326 and cooking oil fumes. This finding suggests that carrying Cys/Cys genotypes of hOGG1 may increase the risk of lung adenocarcinoma when exposed to cooking oil fumes. Although the precise mechanism of how cooking oil fumes increase the risk in lung cancer is unclear, some studies have suggested that cooking oil fumes induce oxidative DNA damage, such as DNA adducts that are involved in lung carcinogenesis in female non-smokers and additionally, heated cooking oil increases the amount of 8-OHdG in human lung adenocarcinoma CL-3 cells, and it may directly or indirectly cause an accumulation of 8-OHdG . Therefore deletion, polymorphism and loss of heterozygosis of hOGG1 could affect the overall efficiency of oxidative base damage repair, resulting in hyper-mutation phenotypes including cancer . Therefore, hOGG1 may play an important role in repairing cooking oil fume-induced DNA damage .
There are several limitations in the current study. First, hospitalbased studies are likely to include some selection bias in the choice of controls that may not have provided a good representation of general population that alter the conclusions. Further studies with larger population-based studies are needed to reduce bias degree. Second, the statistical power of the study may be limited by the relatively small number of subjects and the number of studied SNPs, in addition, confounding factors by known and unknown risk factors could play a role in lung cancer risk and studies on the other BER genes are needed to confirm our findings in order to examine the possible relationship between BER genes polymorphisms and lung adenocarcinoma risk. Last, our study was limited to Chinese women, and the results cannot be extrapolated to other race populations. However the current study is one of the largest studies to investigate the association between BER gene polymorphisms and the risk of lung adenocarcinoma, and to evaluate the gene-gene and gene-environment interaction in the development of lung adenocarcinoma among female non-smokers. While the exact biological mechanism for the gene-environment interaction with BER gene polymorphisms is not well known, larger studies in non-smoker female populations are required in the future.
In conclusion, this hospital-based case-control study showed that hOGG1 Ser326Cys polymorphisms might be associated with the risk of lung adenocarcinoma in Chinese non-smoking females. Furthermore, there is a significant gene-environment association between cooking oil fumes and the hOGG1 Cys/Cys polymorphism on lung adenocarcinoma among female non-smokers. |
<reponame>AmineKheldouni/3D-Computer-Vision<filename>Fundamental/Imagine/vl/imop.h
/** @file imop.h
** @author <NAME>
** @brief Image operations
**/
/* AUTORIGHTS
Copyright (C) 2007-09 <NAME> and <NAME>
This file is part of VLFeat, available in the terms of the GNU
General Public License version 2.
*/
#ifndef VL_IMOP
#define VL_IMOP
#include "generic.h"
/** @name Convolution padding modes
** @{
**/
#define VL_CONV_ZERO 0 /**< Zero padding */
#define VL_CONV_CIRC 1 /**< Circular convolution */
#define VL_CONV_CONT 2 /**< Pad by continuity */
/* @} */
/** @name Convolution
** @{
**/
VL_EXPORT
void vl_convtransp_f(float *dst,
float const *src,
float const *filt,
int width, int height, int filt_width,
int mode) ;
VL_EXPORT
void vl_convtransp_d(double *dst,
double const *src,
double const *filt,
int width, int height, int filt_width,
int mode) ;
/* @} */
/** @name Image Smoothing
** @{
**/
VL_EXPORT
void vl_imsmooth_f(float *dst,
float *temp,
float const *src,
int width, int height, double sigma) ;
VL_EXPORT
void vl_imsmooth_d(double *dst,
double *temp,
double const *src,
int width, int height, double sigma) ;
/*@}*/
/* VL_IMOP */
#endif
|
<reponame>nexmoinc/gosrvlib
package httpclient
import (
"net/http"
"testing"
"time"
"github.com/stretchr/testify/require"
)
func TestWithTimeout(t *testing.T) {
t.Parallel()
c := defaultClient()
v := 13 * time.Second
WithTimeout(v)(c)
require.Equal(t, v, c.client.Timeout)
}
func TestWithRoundTripper(t *testing.T) {
t.Parallel()
c := defaultClient()
v := func(next http.RoundTripper) http.RoundTripper { return next }
WithRoundTripper(v)(c)
require.Equal(t, v(http.DefaultTransport), c.client.Transport)
}
func TestWithTraceIDHeaderName(t *testing.T) {
t.Parallel()
c := &Client{}
v := "X-Test-Header"
WithTraceIDHeaderName(v)(c)
require.Equal(t, v, c.traceIDHeaderName)
}
func TestWithComponent(t *testing.T) {
t.Parallel()
c := &Client{}
v := "test_123"
WithComponent(v)(c)
require.Equal(t, v, c.component)
}
func TestWithRedactFn(t *testing.T) {
t.Parallel()
c := &Client{}
v := func(s string) string { return s + "test" }
WithRedactFn(v)(c)
require.Equal(t, "alphatest", c.redactFn("alpha"))
}
|
<filename>app/Main.hs
{-# LANGUAGE OverloadedStrings #-}
module Main where
import Codec.Picture.Types
import Control.Concurrent (threadDelay)
import Control.Monad
( unless,
(<=<),
)
import Data.List (nub)
import qualified Data.Map.Strict as Map
import Data.Maybe
( fromJust,
mapMaybe,
)
import Data.Vector.Generic (thaw)
import Foreign.C.Types (CInt)
import Game
import Graphics.Text.TrueType (loadFontFile)
import SDL
import SDL.Raw.Types (JoystickID)
import Space
( DeltaTime,
Time,
)
import System.FilePath ((</>))
import Visual
( ImageId,
backgroundColorSDL,
)
maxFps = 60
frameInterval :: DeltaTime
frameInterval = round $ 1000000 / maxFps -- microseconds
main :: IO ()
main = do
initialize [InitJoystick, InitVideo]
window <-
createWindow
"twinpin"
-- for screenshots
-- defaultWindow { windowInitialSize = V2 882 496 }
defaultWindow {windowMode = FullscreenDesktop}
renderer <- createRenderer window (-1) defaultRenderer
showWindow window
font <-
either fail return
=<< loadFontFile ("fonts" </> "Aller" </> "Aller_Rg.ttf")
winSize <- get $ windowSize window
preRenderedTextures <-
mapM
(toTexture renderer)
(Map.fromList $ getStaticImages font winSize)
gameLoop renderer preRenderedTextures winSize createGame
mapM_ destroyTexture preRenderedTextures
gameLoop :: Renderer -> Map.Map ImageId Texture -> V2 CInt -> Game -> IO ()
gameLoop renderer preRenderedTextures winSize game = do
updateTime <- currentTime
events <- pollEvents
-- unless (null events) $ print events
-- printFps lastTime updateTime
let updatedGame = updateGame events updateTime game
showFrame renderer preRenderedTextures $ drawGame winSize updatedGame
addedJoysticks <- openJoysticks $ getAddedDevices events
let newGame = assignJoysticks addedJoysticks updatedGame
timeSpent <- currentTime `timeDifference` updateTime
threadDelay $ frameInterval - (timeSpent * 1000)
unless (isFinished newGame) $
gameLoop renderer preRenderedTextures winSize newGame
showFrame ::
Renderer ->
Map.Map String Texture ->
[(Rectangle CInt, Either (Image PixelRGBA8) ImageId)] ->
IO ()
showFrame renderer preRenderedTextures images = do
rendererDrawColor renderer $= backgroundColorSDL
clear renderer
mapM_ (showImage renderer preRenderedTextures) images
present renderer
-- time since game start in milliseconds
currentTime :: IO Time
currentTime = fromIntegral <$> ticks
timeDifference :: IO Time -> Time -> IO DeltaTime
timeDifference current previous = flip (-) previous <$> current
showImage ::
Renderer ->
Map.Map String Texture ->
(Rectangle CInt, Either (Image PixelRGBA8) ImageId) ->
IO ()
showImage renderer preRenderedTextures (destination, generatedOrStatic) =
either
( \image -> do
texture <- toTexture renderer image
drawInWindow texture
destroyTexture texture
)
( \imageId ->
drawInWindow $ fromJust $ Map.lookup imageId preRenderedTextures
)
generatedOrStatic
where
drawInWindow tex =
copyEx renderer tex Nothing (Just destination) 0 Nothing $
V2 False False
-- the texture should be destroyed by the caller
toTexture :: Renderer -> Image PixelRGBA8 -> IO Texture
toTexture renderer image = do
let rawImageData = imageData image
width = fromIntegral $ imageWidth image
height = fromIntegral $ imageHeight image
size = V2 width height
pitch = 4 * width
mutableVector <- thaw rawImageData
surface <- createRGBSurfaceFrom mutableVector size pitch ABGR8888
texture <- createTextureFromSurface renderer surface
freeSurface surface
return texture
getAddedDevices :: [Event] -> [JoystickDevice]
getAddedDevices events = nub $ mapMaybe addedEventToJoystickDevice events
where
addedEventToJoystickDevice (Event _ payload) = case payload of
JoyDeviceEvent (JoyDeviceEventData JoyDeviceAdded deviceId) ->
Just $ JoystickDevice "" (fromIntegral deviceId)
_ -> Nothing
-- TODO: do this in a different thread since openJoystick takes a lot of time
openJoysticks :: [JoystickDevice] -> IO [JoystickID]
openJoysticks = mapM $ getJoystickID <=< openJoystick
printFps :: Time -> Time -> IO ()
printFps prev curr =
let diff = curr - prev
fps = round $ 1000 / fromIntegral diff
in putStrLn $
"time: "
++ show curr
++ " ms\t\tinterval: "
++ show diff
++ " ms\t\tfps: "
++ show fps
++ " Hz"
|
//
// UIControl+LTBlock.h
// Pods
//
// Created by yelon on 16/9/18.
//
//
#import <UIKit/UIKit.h>
typedef void(^ActionBlock)(id obj);
typedef BOOL(^ConditionBlock)(id obj,ActionBlock actionBlock);
@interface UIControl (LTBlock)
- (void)lt_handleControlEvent:(UIControlEvents)controlEvent
actionBlock:(ActionBlock)actionBlock;
- (void)lt_handleControlEvent:(UIControlEvents)controlEvent
conditionBlock:(ConditionBlock)conditionBlock
actionBlock:(ActionBlock)actionBlock;
@end
|
// tests the score for a new hi-score
void hiScoresGo(unsigned long int score, unsigned int difficulty, unsigned int level_num) {
clrscr();
hiScores[0].score = scoreCompute(score, difficulty, level_num);
if (hiScores[0].score > hiScores[5].score){
getName(hiScores[0].score);
hiScores[5] = hiScores[0];
if (hiScores[0].score > hiScores[4].score) {
hiScores[5] = hiScores[4];
hiScores[4] = hiScores[0];
if (hiScores[0].score > hiScores[3].score) {
hiScores[4] = hiScores[3];
hiScores[3] = hiScores[0];
if (hiScores[0].score > hiScores[2].score) {
hiScores[3] = hiScores[2];
hiScores[2] = hiScores[0];
if (hiScores[0].score > hiScores[1].score) {
hiScores[2] = hiScores[1];
hiScores[1] = hiScores[0];
}
}
}
}
} else {
printf("You failed to achieve\na new High Score\nYour Score was:\n%lu", hiScores[0].score);
while (ngetchx()!= KEY_ENTER);
clrscr();
}
printHiScores();
} |
× Atheists sue Lehigh County over “offensive” Latin Cross seal
A national foundation dedicated to separation of state and church filed a federal lawsuit against Lehigh County this week. They claim the Latin cross found on the county seal and flag is offensive.
Four local residents, and The Freedom From Religion Foundation, first complained about the seal in November 2014. Then again, atheists opposed its message, in January of last year, creating “a minor firestorm,” according to the foundation. Lehigh County first adopted the county seal in 1944.
Lehigh County Board of Commissioners responded to the group in March 2015. Board members noted, “The cross, one of more than a dozen elements, was included to honor the original settlers of Lehigh County, who were Christian.”
According to the foundation, a supporter of the Latin cross said, “It’s do or die time. Stand up and live our national motto, ‘In God We Trust.’ And Jesus’ words ‘fear not’ should give you strength to win one for the cross. The people of the state’s prayers are with you. Go with God in your effort to prevail over this clear and present evil.”
The back and forth led to the group’s filing of the federal lawsuit. Representatives of the foundation said they “find the presence of the cross on the seal representing the entire county to be exclusionary and offensive, as the cross endorses Christianity and does not reflect the diversity of the population.”
Stephen Meholic, David Simpson, John Berry and Candace Winkler, who joined the lawsuit, endorsed the claim that Lehigh County is violating the First and 14th Amendments of the U.S. Constitution. The foundation wrote in a statement, the purpose of the Latin cross is religious, not secular, and it “has the primary effect of both advancing religion and expressing defendant’s preference for Christianity above all other religions and nonreligion.”
The plaintiffs said the “exclusionary” seal is displayed on governmental property and documents, such as on letterhead, numerous official county forms and reports, the county’s website, a display in the Board of Commissioners meeting room, and even on flags prominently displayed at the entrance of county buildings.
“Lehigh County is not a Christian county, it should be equally welcoming to all its citizens regardless of their religion or their reject of religion. A redesign to comply with the Constitution is imperative,” said foundation co-President Annie Laurie Gaylor.
The federal lawsuit was filed in the U.S. District Court for the Eastern District of Pennsylvania. |
New analysis of nine species that ‘walk’ by night on shallow reefs shows their range is much smaller than was known
Bizarre “walking sharks” are at a greater risk of extinction than previously thought, with new information about their distribution leading researchers to expect greater efforts to protect them from human threats such as fishing and climate change.
Bamboo sharks include nine species of sharks that swim and “walk” in shallow waters around northern Australia, Papua New Guinea and parts of Indonesia. In 2013 a new species of the genus was found in Indonesia.
They are harmless to humans and are only active at night, when they start to “walk” around shallow reefs, feeding on crustaceans – even sometimes walking out of the water.
Now a review of the nine species of bamboo sharks has shown their habitats are much more restricted than previously thought. The researchers expect the findings will trigger an increase in the conservation status of at least some of the unusual creatures.
Facebook Twitter Pinterest The sharks are active at night, and their walking behaviour makes them a favourite with divers. Photograph: Mark Erdmann/Conservation International
“Each of the nine species are small – less than one metre – charismatic sharks with unique and quite beautiful colour patterns,” says Mark Erdmann from Conservation International and the California Academy of Sciences. “Their walking behaviour makes them a favourite with divers, who will frequently request night dives with the explicit goal of finding a walking shark.”
Until now, it was thought that the various species had large overlapping distributions stretching all the way from northern Australia and Papua New Guinea to the Seychelles in the Indian Ocean and the Solomon Islands in the Pacific.
Erdmann says the new analysis shows conclusively the sharks have a smaller overall range and that the ranges of the nine species don’t overlap. “This obviously has huge conservation implications for the walking sharks,” Erdmann says.
'Walking shark' discovered in Indonesia Read more
“Anytime that a marine species is only found in a relatively tiny area, it means it is significantly more vulnerable to extinction than wide-ranging species. Any local threat – whether from fishing, from an oil spill, from rising temperatures, or even from physical destruction from a cyclone or tsunami – has the potential to wipe out the entire population.”
Gerald Allen from the Western Australian Museum says the sharks have an unusual breeding strategy that further limits their range, at least for a while after they hatch. “The female lays a few eggs amongst marine vegetation and these hatch into miniature adults that must forage for themselves and because of their limited swimming ability they are ‘tied’ to the immediate [area] in which they are born without the dispersal capability of most sharks.”
Erdmann says the new information is likely to prompt a reassessment of the sharks’ conservation status.
He says the International Union for the Conservation of Nature could reassess their status in its Red List, with some of the species likely to gain a more threatened status.
The Red List status doesn’t confer any direct protection to animals listed as threatened, but countries and international conventions use it in considering whether to implement greater protections.
If there is economic benefit to local communities via dive tourism, there is often increased awareness Gerald Allen, Western Australian Museum
Erdmann says the researchers are planning on communicating their findings to the Indonesian government and urging it to list the species found there as protected.
Allen says the team will be working with communities that depend on the sharks to encourage greater protection too. “Increased dive tourism is one avenue for their protection and we are personally trying to spread the word about these sharks and their need for protection in the dive industry. Successful dive tourism depends on healthy reefs and if there is economic benefit to local communities via dive tourism, there is often increased awareness and measures put in place to sustain healthy reefs.”
He says the fish evolved their ability to walk because of their feeding habits. “Their mode of locomotion is intimately tied to the exploitation of their food resources and has led to the evolution of walking rather than swimming,” he says.
“They feed mainly on small, cryptic invertebrates that hide in the reef. Walking enables them to methodically search for food in a slow and purposeful manner.” |
package com.qh.system.enums;
import com.qh.common.core.enums.CodeEnum;
public class SystemCodeEnum {
public static final CodeEnum DATA_ASSIGNED = getCodeEnum("1001", "{0}已分配,不能删除");
public static final CodeEnum DEPT_DISABLED = getCodeEnum("1002", "部门停用,不允许新增");
public static final CodeEnum ADMIN_CAN_NOT_OPR = getCodeEnum("1003", "不允许操作超级管理员用户");
public static final CodeEnum EXPORT_DATA_EMPTY = getCodeEnum("1004", "导入用户数据不能为空!");
public static final CodeEnum EXPORT_FAIL = getCodeEnum("1005", "导入数据失败");
public static final CodeEnum NOT_EXIST = getCodeEnum("1006", "{}不存在");
public static final CodeEnum NOT_CONFIG_FAST_MENU_COUNT_LIMIT = getCodeEnum("1007", "快捷方式个数限制没有配置");
public static CodeEnum getCodeEnum(String code, String msg) {
return new CodeEnum("SYS" + code, msg);
}
}
|
def _get_res(cls, res_df, output_request):
out_req_cleaned = copy.deepcopy(output_request)
res_out = None
res_reqs = []
ti = res_df.index
for req in out_req_cleaned:
if req in res_df:
res_reqs.append(req)
if res_out is None:
res_out = {}
res_out[req] = cls.ensure_res_len(res_df[req].values, ti)
for req in res_reqs:
out_req_cleaned.remove(req)
return res_out, out_req_cleaned |
When we see Halloween decorations dominating the stores and neighborhoods, we all know Christmas decorations are not far behind.
With four Sundays left until the start of Advent, now is a great time to determine what the tone of your Christmas will be. Will it be all about decorations and shopping, gifts and food? Or will Jesus, “the reason for the season,” be central to your celebration?
Prepare Now
The first Sunday of Advent 2015 is November 29. Here are 4 things you can do to prepare for Advent:
Pray that God will prepare your heart to focus on Him in a special way during Advent, and to lead you to celebrate the season in a way that will bring you closer to Him.
that God will prepare your heart to focus on Him in a special way during Advent, and to lead you to celebrate the season in a way that will bring you closer to Him. Decide on a reading plan . What and when will you read? Will you read Advent scriptures and light the candles weekly? Daily? Will you do your Advent readings alone or with your family? Plan time –will you read after dinner? First thing in the morning? Purposing a regular time for Advent will help you to be more consistent as well as make this time special.
. What and when will you read? Will you read Advent scriptures and light the candles weekly? Daily? Will you do your Advent readings alone or with your family? Plan time –will you read after dinner? First thing in the morning? Purposing a regular time for Advent will help you to be more consistent as well as make this time special. Make or purchase a wreath and Advent candles . Boxes of Advent candles in the traditional colors are sold, but you can often buy individual candles less expensively. If you wait until right before Advent begins, however, the traditional colors may be hard to find. But don’t let that stop you, you can use any colors you like. Depending on the wreath you use, you may need to add the center candle separately. This one is usually white, since it reminds us of the birth of Christ and is lit on Christmas or Christmas Eve.
. Boxes of Advent candles in the traditional colors are sold, but you can often buy individual candles less expensively. If you wait until right before Advent begins, however, the traditional colors may be hard to find. But don’t let that stop you, you can use any colors you like. Depending on the wreath you use, you may need to add the center candle separately. This one is usually white, since it reminds us of the birth of Christ and is lit on Christmas or Christmas Eve. Consider a service project. There are many ways to give at Christmas. Perhaps you will feel led to be involved with a charitable organization that collects toys and personal items for those in need, either in your community or in a foreign country. Perhaps you will feel led to put money in the Salvation Army bucket. Or perhaps you will spend time doing something extra for a neighbor or friend who could use your help. There are always ways to help people in need, but Advent is a good time to be intentional and prayerful about how you are called to give. |
import { Component, OnInit, ViewChild, ElementRef } from '@angular/core';
import { HSLColor } from 'Models/hslcolor/HSLColor';
import { ColorizedNode, ColorizedUtilities } from 'Models/tree/ColorizedTree';
import { CustomTextareaComponent } from 'Components/textedit/components/customtextarea.component';
import { Document } from 'Models/document/document';
import { DocumentService } from 'Services/document/DocumentService';
import { Tag } from 'Models/tag/Tag';
@Component({
selector: "doctt-tag",
templateUrl: './tag.component.html',
styleUrls: ['./tag.component.scss']
})
export class TagComponent implements OnInit {
public tooltip: string;
private element: Node;
private tagNode: Tag;
private document: Document;
private og_el: Node = null;
@ViewChild('tag') tag: ElementRef | undefined;
@ViewChild('tagInner') content: ElementRef | undefined;
constructor(private documentService: DocumentService) { }
ngOnInit(): void { }
onClick(event: MouseEvent) {
event.preventDefault();
const path: any = event.composedPath();
let path_els: Element[] = path;
let tagInner: Element = null;
let spanContainer: Element = null;
let doctttag: Element = null;
const p = document.createElement('p');
p.innerText = '';
const span = document.createElement('span');
span.innerHTML = '';
const finalspan = document.createElement('span');
finalspan.innerHTML = '';
for (const el of path_els) {
if (el.className == 'tag-inner' && tagInner == null) {
tagInner = el;
}
if (el.className == 'segment-container' && spanContainer == null) {
spanContainer = el;
}
if (el.tagName == 'DOCTT-TAG' && doctttag == null) {
doctttag = el;
}
}
const target: Element = path_els[0];
if (target.className == 'tag') {
tagInner = target.children[0];
}
const father = spanContainer.parentElement;
const docFragment = document.createDocumentFragment();
const appendToDocFragment: ChildNode[] = [];
spanContainer.childNodes.forEach((e, k, p) => {
const child: any = e;
const tmp: Element = child;
if (tmp.tagName == 'SPAN') {
appendToDocFragment.push(tmp);
} else {
tagInner.childNodes.forEach((e, k, p) => {
appendToDocFragment.push(e);
});
}
});
for (const el of appendToDocFragment) {
docFragment.append(el);
}
const id = Number.parseInt(spanContainer.getAttribute('data-segment-id'));
console.log('removing id ', id);
// CustomTextareaComponent.removeFeaturesBySegmentId(this.document, id);
CustomTextareaComponent.removeFeaturesFromSegmentID(id, this.documentService, this.document);
if (father != null) {
const finalDocFragment = document.createDocumentFragment();
father.childNodes.forEach((v, k, p) => {
if (v !== spanContainer) {
const child: HTMLElement = v as HTMLElement;
const arrayToAppend: ChildNode[] = [];
child.childNodes.forEach((i, j, k) => {
arrayToAppend.push(i);
// debugger;
});
for (const e of arrayToAppend) {
finalDocFragment.append(e);
}
} else {
finalDocFragment.append(document.createElement('br'));
finalDocFragment.append(docFragment.children[0]);
}
});
father.childNodes.forEach((v, k, p) => {
father.removeChild(v);
});
father.append(finalDocFragment);
father.removeChild(spanContainer);
}
}
onMouseOver(event: MouseEvent) {
// console.log("Mouse over tag", event);
console.log(this.tagNode);
}
setContent(element: Node) {
if (this.og_el == null) {
this.og_el = element;
}
if (this.content !== undefined) {
const nativeElement: HTMLElement = this.content.nativeElement;
nativeElement.appendChild(element);
this.element = element;
}
}
setColor(color: HSLColor) {
this.tag.nativeElement.style.backgroundColor = color.toCSS();
}
setDocument(document: Document) {
this.document = document;
}
setTag(tag: Tag) {
this.setColor(tag.color);
this.tagNode = tag;
this.tooltip = tag.name;
}
getContent(): Node {
return this.og_el;
}
}
|
def save(self, path: str) -> None:
client_source: str = self.get_source(path=path)
with open(path, "w") as model_io:
model_io.write(client_source) |
Calibrating neighbourhood preferences in the land value contour model
Neighbourhood is a spatial self-contained residential colony maintaining effective socio-economic control within a city. The study presumes that people’s preference for residential location and choice of facilities might be a robust predictor of their neighbourhood demand. Their preferences reflect in terms of land value. GIS-based spatial contour model was used to examine whether the effect of residential preferences varied in terms of residential land value. The study found significant variations in residents’ preferences for accessible facilities in a city. The finding reveals that preferences for residential choice depend on many factors. Residents are willing to pay for easily accessible facilities which could be observed in the residential land value contour model of GIS.
LAND is one of the essential needs on earth for all living beings. Human beings require land for living, working and many other socio-economic activities, either directly or indirectly 1 . Urbanization drives urban sprawl, and pushes up the value of land and the cost of service delivery for all 2 . Urbanization rate indicates the fact that the population is growing at a fast pace; therefore, demand for residential land would be more in and around urban centres. Due to limited availability of such land within urban areas, land value is scaling new heights rapidly causing property values to increase beyond logical limits in both residential and commercial segments. Land value is affected by demand factors, such as views, amenities, proximity to facilities, transport, etc. 3 . Knowledge of the factors affecting land value is an important advantage in identifying the future of urban development and anticipating probable changes.
The land value is a function of various physical, environmental and psychological factors in an urban area. Since the value of land is unique, it is often difficult to identify the appropriate factors that might explain different land values. Land is a heterogeneous good that is comprised of a bundle of unique features reflecting not only its location, but also other facilities and amenities such as quality of the neighbourhood and infrastructure 4 .
Understanding of the factors influencing residential land value would enable policymakers to allocate more efficient residential land use. It also helps local governments and private utility providers in decision-making to monitor new developments and activities in the land market to build a complementary infrastructure for development.
The spatial land value model would rationalize the valuation approach for different locations. It would be helpful in land-use proposals, real estate growth estimation, and improvement of government revenue. Thus, there is a need to work on land value and explore its spatial morphology. In-migration over the last decades has increased the demand for residential land in urban areas. Unfortunately, access to land via the government functionary has not kept pace with demand. The selection of land for residential purposes is dependent on its location and other allied benefits. The present study, therefore, seeks to unravel the main factors influencing residential land value, and their explanatory spatial and non-spatial determinants in residential land value estimation.
Research objective and methodology
A literature review shows factors having impact on urban residential land value. Empirical studies on urban land value have worked on various functions with spatial and non-spatial determinants. After understanding the role of urban facilities for neighbourhood preferences, this study examines the relationship between these facilities and their land value morphology. Therefore, the objective of the study is to calibrate neighbourhood preferences in an urban residential land value contour model. The smallest planning unit below 5000 population is conceived as a housing area. The hierarchy of urban development defines neighbourhood as a housing cluster for population of 5000-15,000 with minimum infrastructure provision. This study analyses the relationship between residential land value and identified six urban facilities functioning as factors for land value establishment. First, we show that ways in which neighbourhoods are perceived and how residents have widely differing demands regarding the extent and distinctive facilities for their own neighbourhood, with the case example of Bhopal city, Madhya Pradesh (MP), India. Second, we highlight the importance of facilities in structuring residential land value, which is the reflection of neighbourhood preferences. We conclude with suggestions as to how neighbourhood demands might be incorporated into the urban residential land value contour model.
Land value data of residential colonies published by the District Collector were collected to assess residential land value morphology for Bhopal city. This value is also known as Circle Rate, which might be different from the market rate for some locations. Collector value is the statutory published unit value of land and buildings of different uses. It was observed that only access to arterial roads had been accounted for in the premium on land values. The study identifies six factors responsible for the determination of residential land value and follows an approach to explore the relative contribution of these factors on residential land value of Bhopal city. Priorities of recent buyers from all municipal zones were identified for the six factors during selection of residential land. This was a significant phase, involving consensus building among buyers' perceptions to find their combined opinion. Land values of all 3111 urban residential colonies of Bhopal, which were listed in the published Collector land value sheet of 2019-20 were considered. The land value date were synthesized into the attribute table of Geographic Information System (GIS). Spatial analysis was performed using ArcMap 10.1 GIS software. Satellite images were collected from Google Earth at 500 m elevation to identify the location of residential colonies and facilities. These images were geo-referenced in WGS_1984_UTM_Zone_43N projected coordinate system; Transverse_Mercator projection; GCS_WGS_1984 Geographic Coordinate System; D_WGS_1984 Datum; Greenwich Prime Meridian and Degree Angular Unit. The relationship between residential land values of colonies was measured separately with each identified factor. The study examined the influence of different factors in terms of their range and threshold. A land value contour model was prepared in GIS, which could be useful to examine the residential land value morphology around the identified factors. We studied the significant role of the contour model in urban land value assessment. The spatial pattern of residential land value in urban areas could be elaborated with the proposed model.
Bhopal city
Bhopal city is located on the hilly terrain in central India ( Figure 1). National and State Highways link this city to many large cities of the country. Bhopal is connected by the broad gauge railway line to other metros and is also served by regular air services to Mumbai, Delhi and Indore 5 . Bhopal city is a fascinating amalgam of scenic beauty, old historic sites, educational and modern urban planning. It is the administrative and political nerve centre of MP. Nestled among lakes and hills, it is sur-rounded by forests and poor agricultural land. The city has more than 1.8 million population and covers an area of 285.9 sq. km. Development in Bhopal and surrounding areas has been rapid since the last decade. The city grew mostly in the southeast direction along the national highway 46. Not only the central area of the municipal boundary of Bhopal, but also its hinterland are considered prime residential areas. The natural diversity and greenery of the city attract migrants. These migrants increase population growth which requires additional land for their living. The rise in demand for land resulted in a rise in land value. Rapid development also accelerated the urbanization process yielding the growth rate of economic investments, trade and industry. Bhopal was notified as having the second most dynamic residential value in Residex for 2011 by the National Housing Bank of India. The city has been continuously listed in the top ten cities of Residex till 2019. The cost of residential buildings has recorded a maximum increase, indicating that Bhopal is fast emerging as one of the major residential real estate investment hubs in the country.
Bhopal is divided into 14 zones, which are further subdivided into 70 wards. The city has four distinct townships: BHEL Township, T.T. Nagar, Bairagarh and Fringe Area Development, surrounding the old city and its peripheral areas. The areas between these four townships are moderately inhabited. The existing core of the city and its surroundings is the hub of all activities and is the most congested area. The central area has a uniform skyline and forms a confined residential neighbourhood. Authorized residential colonies located within the municipal area are well connected with prominent city-level amenities. The economic class of inhabitants could be identified with their housing typologies in a neighbourhood. The inner city neighbourhoods have a majority of Muslim population, while the Hindu community lives in new Bhopal and the outskirts. The planned residential colonies have demarcated public and open spaces. On the other hand, unauthorized colonies are lacking neighbourhood-level facilities. Land values of colonies which are away from the city core and lack basic urban services are found to decrease.
Literature review
Modern studies on the impacts of agglomeration in urban centres and of the value of new infrastructure provision apply land values to measure the benefits of a certain feature, be it an infrastructure project or spillover induced by proximity to other firms, markets and workers 6 . Land values are associated with many factors, not only environmental conditions but also government policies and factors of socio-economic value 7 . These factors have been explored in many studies and their impacts elaborated using Dutt and Khan 9 analysed the spatial pattern of land values in Akron, USA, in relation to the general land-use situation, and to identify the major factors influencing land values in the city considering lot size, building value and distance from the central business district (CBD) as land-value determinants. Competitive bidding for land determines the urban land-use pattern 10 . In the long run, this competitive allocation process results in a tendency for the overall land-use pattern to adjust, so that each location is occupied by the activity which can pay the highest rent 11 . A location has its potential utility for each type of activity which could be measured by willingness to pay rent for the use of a location 12 . Neoclassical marginalist models implicitly assume that every land has its specific requirement of location depending upon the activities. The land uses which demands proximity to the centre of the city or away from the city will bid high for the land required for a specific activity 13 . The Burgess 10 model provides a degree of explanation of a bid-rent theory which was derived from empirical observations on the way in which the city of Chicago, in USA had developed. As such, it is a hybrid of idealized land-use patterns and urban social structure with a strong emphasis upon residential areas. Hoyt 10 in a study of American cities on rent levels of residential areas reveals the importance of sector form of development. The main difference between the Hoyt and Burgess models is that the former considered direction as well as distance from the CBD to be important in determining land use. Harris and Ullman 10 extended Hoyt's subtle recognition that the CBD was not the only focus of activities, and made it explicit in their multiple nuclei model. It implies that a city has a cellular structure within which several specialized areas develop 10 . The diagrammatic model of Mann draws heavily upon the concentric zone and sector models, but it makes passing reference to separate and more specialized areas 13 .
The uneven distribution of land values has attracted numerous studies as dynamic urban development has continuously challenged the understanding of the pattern of land-value distribution and the determinants underlying these patterns. In the early 20th century Chicago, 75% of land value variations was explained by the distance variables alone. In the 1960s, however, distance variables could only explain 10% of the land value variation in Chicago 14 . According to Miller and Geltner 15 , real-world cities are not purely monocentric; they have other major activity areas besides the CBD. Large cities are sprinkled with neighbourhood business districts (NBDs) that serve the needs of local communities. The distance variable alone explained 62% of the variations, while addition of the other two variables only increased the explanatory power to about 69% (ref. 15). Recently, more scientific as well as financial analyses have been carried out to establish more relevant factors controlling land value, other than distance to the city centre. Land value around nodes, inter-state access points or light rail stops increases the most as long as zoning and other land-use regulations permit more intensive development around such sites 16 . It was reported that the actual residential land value function can exhibit complex curvilinear shapes 13 , and hence land values may not automatically decline with distance from the CBD alone.
The 7 prepare a land value model based on spatial determinants for the case of South Tangerang, Indonesia 7 . Uju and Iyanda 23 did comparative analysis among 21 spatial and non-spatial determinants with residential land value. Emoh et al. 8 examined various determinants of residential land value in Onitsha, Nigeria. Studies have listed a set of factors that have been commonly used in property valuation research 7,8, . These factors are social, physical, economic and environmental . The impact of regulatory provisions and allied taxes for residential development on land value has also been explained 7,8,19,20,24 . The impact of factors on residential land has been statistically mapped in terms of their accessibility 7,8,17, . Based on the literature review considering different nations, six factors having an impact on residential land value have been identified (Table 1).
Environmental and social characteristics, public facilities and related infrastructure are often used as the explanatory variables in land-value models. A study of the land-value model shows that the coefficients of these factors varied from place to place. The most influential factors were distance from the highway, arterial road and public facilities. The study of residential land value is significant with respect to its spatial variation. There are tremendous research opportunities regarding factors of residential land value determinants.
Earlier studies found the relationship between land value and its factors using statistical methods. Since the early 20th century, three approaches to estimate the market value of properties have been refined through the years and variations developed for specific appraisal problems 27 . These property value estimation approaches are: (i) sales comparison approach; (ii) cost approach, and (iii) income approach. To analyse the land value data, statistical approaches such as regression models are useful for cases having influencing factors on land value. Other studies highlighted ordinary least square (OLS) regression lacking the ability to consider spatial effects, which then led to biased and inefficient estimations 28 . Geographically weighted regression (GWR) provides an alternative approach for analysing spatial data, including land values 7 . Spatial effects could be highlighted through different analysis techniques of the GIS tool. Accessibility and proximity level are over analysed through travel-time thresholds with the use of a distance decay function.
The literature review shows that spatial as well as nonspatial factors determine land value. Some determinants are significant at the city level, while others function well at the neighbourhood and plot level. Six spatial determinants were identified having a significant role at the local level and providing facilities for the neighbourhood (Table 1). Geometrically, there are point factors, line factors and polygon factors. The influence of point factors are discussed with two dimensional concentric buffers. Whereas, influence of line and polygon factors could be explained by three dimensional contours. This study assesses land value with respect to point factors functioning as a magnetic focal point at the city or local level. Six such identified point factors having an impact on land value, including neighbourhood facilities are: (i) airport, (ii) bus stops, (iii) hospitals, (iv) malls, (v) railway station and (vi) schools, which were compared with urban land values for the case of Bhopal.
Land value assessment and results
Analysis of the relationship between neighbourhood preferences and residential land value was performed in three steps. Published Collector land value was the data source. First, facts regarding land value disparity were observed based on statistical trend assessment of data in the last two decades. Secondary data were validated with the perception of recent residential land buyers by a comparative assessment of the land value dataset. Priorities of buyers were examined through a stratified sample survey of recent buyers in the second step. Descriptive statistics gives a cross-sectional view on market conditions in terms of property and land-value effects. A contour model with 3111 residential locations was prepared in GIS, which also decreased the spatial descriptive behaviour of land value in the third step. The identified point factors were scrutinized with respect to the contour model.
Chronology of residential land value
Residential land value data since 2011 were collected from the District Collector. This gives land and property values for residential and commercial properties. Land values are given with their respective colony, society or location names in a data sheet under the municipal wards of Bhopal. Land value data of the last two decades were reviewed to assess their spatio-temporal changes. The data revealed that the land value of a particular location changes proportionally independent of any rational or theory. Land values of public non-marketable properties and slums under bridges were also noted. The increase in land value due to proximity of road for commercial and residential land use is the same, whereas the public preference for both is not same. Land speculations due to infrastructure development were not projected. The district collector land value remains same irrespective of influence of identified factor. This suggests that secondary data from the District Collector are different from the ground reality and follow a conventional approach of land-value projections. Thus, there is a need to review land value data spatially and revisit the facts observed, which are not similar to urban residential land market. Buyers' perceptions towards different factors might play a significant role in land value-related research. Residential land buyers wish to pay an additional amount for the necessary facilities. Therefore identification of factors having an impact on land value and their spatial relation might contribute to land valuation research. A GIS-based approach to prepare a land value contour model has been proposed here with six identified factors that function as neighborhood facilities.
Priority of residential land buyers
Bhopal Municipal Corporation has been classified into 14 municipal zones. Five recent residential land or property buyers from each zone were surveyed to distinguish their preferences among the six identified factors during land purchase. Their opinions regarding the importance of facilities were grouped in four classes, viz. not important, fairly important, important and very important for the identified factors based on the 'Likert scale'. Percentage frequency distribution for each factor in the four different scales of priority was calculated ( Table 2).
All identified facilities were considered important for the selection of residential land in the recent buyers' preferences. Bus stops, hospitals and schools were prime facilities during the selection of residential land, while distance from the airport was least important for the buyers. Other factors were also considered at the time of residential land selection to finalize affordable land value by a buyer. The list of factors might also increase, which has an impact on land value. These factors might vary for different cities; therefore, public and stakeholder consultation should be incorporated for the selection of factors and validation of data before land-value mapping and model preparation.
Land value contour model
After preparing the city base map, a point-shape file for colonies was created. It had 3111 residential colony points to locate different land parcels in Bhopal. A land value contour elevation was generated with the help of GIS with residential land value colony points. The elevated land value contour presents the variation of residential land value in different locations within the Bhopal municipal area. Land value contour elevation is useful to assess the relationship between the identified factors and their surrounding residential land value. The contour elevation model was prepared in three analytical stages: (1) create a TIN file in ArcMap; (ii) TIN to shape file in ArcMap and (iii) 3D modelling in ArcScene of GIS. TIN-file was created from a colony point file with the help of a 3D analyst tool which works on the triangular projection system. This could be converted into a contour shape file having a contour line of residential land value. The triangular surface could be developed by converting the TIN file into a shape file using the surface contour tool. Finally, the land value contour model was prepared by inserting base heights of Z-value in the layer property ( Figure 2). Land value contour ranges from Rs 2.2 K per square metre to Rs 110 K per square metre. Only two colony samples had land value more than Rs 100 K per square metre. These sample colonies have been proposed for commercial use in the new land-use plan 2031 of the Bhopal development plan. The land value range was grouped into four classes of size Rs 25 K each. The ele-vated land value contours were vertically divided under the four classes of Rs 0-25 K, Rs 25 K-50 K, Rs 50 K-75 K and Rs 75 K-100 K (Figure 3).
Point shape files for identified factors were also prepared. The school shape file had maximum points, while the airport shape file had only one location point. The land value of location points of six factors were represented by contour's elevation. Therefore, high altitude contours present high land value. Therefore, factor points having high residential land value in the surroundings were elevated at higher altitude contour. Each factor point was recognized with its land value contour to classify the samples into classes of Rs 0-25 K, Rs 25-50 K, Rs 50-75 K and Rs 75-100 K (Figures 4 and 5). ArcScene application of GIS was useful to perform the exercise of contour modelling and its presentation.
Location points of all factors were classified within these four land-value classes according to the spatial land value contour. The percentage frequency distribution of factor points was calculated for all four classes ( Table 3). The four-classes were assigned weights from 1 to 4 according to the increase in residential land value. Based on percentage frequency and assigned weight, weighted mean values were calculated for each factor (eq. 1). WM f = (P c1 × 1 + P c2 × 2 + P c3 × 3 + P c4 × 4)/100, (1) where WM f is the weighted mean of a factor and P cn is the percentage frequency of the nth class.
The weighted mean value of a mall was maximum, which suggests that a mall or market place is highly preferred for the selection of residential land. Land value around a mall is comparatively higher than the other factors. Commercial area, convenience shopping and other alternatives could be proposed with a residential project development to attract more buyers. Similarly, willingness to pay was higher for bus stops, as they provide easy access to other places in the city, while it was very low for the airport. The influence zone of airport is beyond the city limit. However, only a fraction of population enjoys its benefit. Other factors could be compared according to their weighted mean value, suggesting the preferences of buyers for residential location selection.
Results and conclusion
Here we have studied the factors influencing residential land value. We found that people were willing to pay an additional amount for residential land if it had easy and economic access to the neighbourhood facilities. The selection and preference of factors for residential land by buyers were also different. It depended on the buyers' personal requirements and affordability. The development of roads, commercial areas, college or university, access to public transport, provision of basic amenities, social infrastructure, transportation connectivity, environmental quality and recreational facilities were the governing factors for the high residential land value in the cities. The study findings can be discussed by answering the following questions: (i) What are the factors having an impact on land value? (ii) What is the role of the contour model in urban land value assessment? (iii) What is the spatial pattern of residential land value in the urban area of Bhopal city?
The influencing factors of residential land value could be divided into five aspects, i.e. location, transportation, environmental, social and administrative factors with measurable qualitative variables and non-measurable quantitative variables. Thus, selecting the significant factors, which are comparable with the land values, is important in residential land value model-building. The factors selected were facilities like schools, convenience of bus stops, access to malls, hospitals, distance to railway station and airport. These factors have significant impact on land value in residential colonies.
The present study provides insights into the spatial structure of land value using the contour model. The model has three-dimensional behaviour which presents the elevated surface morphology for a dataset. A contour model could be prepared in a GIS platform with land value dataset. Comparative spatial assessment for land value could be made with reference to the location of the neighbourhood facilities and other factors. It was observed that land values show a discrepancy within one municipal zone and between zones, with most of the expensive land parcels in the city centre. The higher altitude of land value contour along the arterial and subarterial roads suggests that the city has concentric growth in urban centres, while it follows a radial pattern of growth in suburban and peri-urban areas. Repeated redevelopment in the city core increases land value exponentially, as it provides new opportunities each time. One could also easily find economic land parcels whose land values were less than those of some parcels in peripheral areas of the city, reflecting the variation in property values in the central town. The contour model could portrait well the uneven distribution of land values in different locations of a city. The elevated factors on contour morphology could be compared with factor density per unit area. This comparative study of land value and geographical location of factors might be mapped and a numeric regression model could also be prepared. The proposed contour model is useful to estimate the land value of a location based on accessibility to neighbourhood facilities.
Spatial variables, especially distance to the city centre, were also important in shaping land-value morphology.
The key drivers for growth of Bhopal city are its rich historical background, new state capital and industrial establishment. The central retail market and service sectors have highest buyer preference. Residential land parcels are more expensive in the west and southeast than in north Bhopal. Land value is very high in close proximity of a mall, while least near an airport. Comparative study of land value of a location and its accessibility to different facilities would be useful for an explanation of spatial land-value patterns. It will also be beneficial for investors, planners, residential developers and policy-makers in the decision-making processes.
Lack of a large, complete, accurate and representative dataset is one of the major limitations of the contour model. Bhopal functions as an educational hub and commercial centre for its region. There are many focal points within the city limits that attract people as potential residential land. It was observed that land near convenience shopping had higher value than other areas. The spatial representation of land value through the contour model highlights Bhopal as a polycentric city. The city model does not perfectly fit the concentric theory of bid rent. More accurate and dense land-value data points and information are required for the preparation of land-value morphology. Land value has a dynamic characteristic. The comprehensive-land value variation is not only from spatial characteristics, but also justify the temporal changes. Authorities should be equipped with advanced GIS software and other technical aids to improve the land valuation method. The present conventional approach of valuation adopted by the district administration ignores the land-value impact factors. GIS-based tools could improve the accuracy of measurement and efficiently accomplished land valuation. Moreover, GIS capabilities not only facilitate the organization and management of geographic data, but also enable researchers to take full advantage of location information contained in the databases to support the application of spatial statistical and spatial econometric tools. A more accurate approach of land valuation is needed to increase government revenue and the increased accessibility to the citizens. |
//adds the timeline value for each selected alliance robot when the defensive timer is either pressed or stopped
public void createTimelineInput(String type) {
TimerUtil.MatchTimer match_timer = new TimerUtil.MatchTimer();
Map<String, String> timeline = new HashMap<>();
timeline.put("time", match_timer.getTime());
timeline.put("type", type);
if (selectedDefensiveRobot.equals(teamOne)) timelineRobotOne.add(timeline);
if (selectedDefensiveRobot.equals(teamTwo)) timelineRobotTwo.add(timeline);
if (selectedDefensiveRobot.equals(teamThree)) timelineRobotThree.add(timeline);
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.