content
stringlengths
10
4.9M
package ru.stqa.pft.soap; import com.lavasoft.GeoIPService; import org.testng.Assert; import org.testng.annotations.Test; public class GeoIpServiceTests { @Test public void testMyIp() { String ipLocation = new GeoIPService().getGeoIPServiceSoap12().getIpLocation("192.168.0.15"); Assert.assertEquals(ipLocation, "<GeoIP><Country>US</Country><State>CA</State></GeoIP>"); } }
from dataclasses import dataclass, asdict, field, InitVar import random import collections import shutil from pathlib import Path import unittest from zensols.persist import ( DelegateDefaults, DirectoryCompositeStash, MissingDataKeys, PersistableError, ) class DcsTestStash(DirectoryCompositeStash): def __init__(self, *args, **kwargs): super().__init__(*args, attribute_name='agg', **kwargs) @dataclass class DataItem(object): apple: int orange: int dog: str cat: str is_ordered: InitVar[bool] = field(default=False) def __post_init__(self, is_ordered: bool): data = asdict(self) agg = collections.OrderedDict() if is_ordered else {} keys = list(data.keys()) random.shuffle(keys) for k in keys: agg[k] = data[k] self.agg = agg def __str__(self): return f'{super().__str__()}, agg={self.agg}' class TestDirectoryCompStash(unittest.TestCase): def setUp(self): DelegateDefaults.CLASS_CHECK = True targdir = Path('target/ctmp') if targdir.exists(): shutil.rmtree(targdir) self.targdir = targdir self.groups = (set('apple orange'.split()), set('dog cat'.split())) def test_create(self): path = self.targdir / 'create' stash = DcsTestStash(path, groups=self.groups) stash_path = self.targdir / 'create' / DcsTestStash.INSTANCE_DIRECTORY_NAME self.assertEqual(stash_path, stash.path) cstashes = stash.stash_by_attribute self.assertEqual(set(cstashes.keys()), set('apple orange dog cat'.split())) gnames = set(map(lambda s: s.group_name, cstashes.values())) self.assertEqual(gnames, set('cat-dog apple-orange '.split())) self.assertEqual(set(stash.stash_by_group.keys()), set('cat-dog apple-orange '.split())) def test_dict_to_composite(self): path = self.targdir / 'to_dict' stash = DcsTestStash(path, self.groups) self.assertFalse(path.exists()) di = DataItem(1, 2, 'rover', 'fuzzy') composite = stash._to_composite(di.agg)[1] composite = dict(composite) self.assertEqual(set(composite.keys()), set('apple-orange cat-dog'.split())) s1 = composite['apple-orange'] s2 = composite['cat-dog'] self.assertEqual({'apple': 1, 'orange': 2}, s1) self.assertEqual({'dog': 'rover', 'cat': 'fuzzy'}, s2) def test_dump(self): path = self.targdir / 'dump' inst_path = path / DcsTestStash.INSTANCE_DIRECTORY_NAME stash = DcsTestStash(path, self.groups) di = DataItem(1, 2, 'rover', 'fuzzy') stash.dump('1', di) self.assertTrue(path.is_dir()) self.assertTrue(inst_path.is_dir()) self.assertTrue((inst_path / '1.dat').is_file()) comp_path = path / DcsTestStash.COMPOSITE_DIRECTORY_NAME self.assertTrue((comp_path / 'apple-orange').is_dir()) self.assertTrue((comp_path / 'apple-orange/1.dat').is_file()) self.assertTrue((comp_path / 'cat-dog').is_dir()) self.assertTrue((comp_path / 'cat-dog/1.dat').is_file()) def test_load(self): path = self.targdir / 'load' stash = DcsTestStash(path, self.groups) di = DataItem(1, 2, 'rover', 'fuzzy') stash.dump('1', di) di2 = stash.load('1') self.assertEqual(di.agg, di2.agg) self.assertNotEqual(id(di), id(di2)) def test_load_ordered(self): path = self.targdir / 'load-ordered' stash = DcsTestStash(path, self.groups) tdata = [[2, 3, 'blue', 'paws'], [3, 4, 'stumpy', 'patches'], [5, 10, 'rascal', 'cuddles']] for i, idata in enumerate(tdata): for ordered in (True, False): key = str(i) di = DataItem(1, 2, 'rover', 'fuzzy', ordered) stash.dump(key, di) di2 = stash.load(key) self.assertNotEqual(id(di), id(di2)) self.assertEqual(di.agg, di2.agg) def test_load_comp_missing(self): path = self.targdir / 'load-comp' stash = DcsTestStash(path, self.groups) di = DataItem(1, 2, 'rover', 'fuzzy') stash.dump('1', di) comp_path = path / DcsTestStash.COMPOSITE_DIRECTORY_NAME dat_path = comp_path / 'apple-orange' / '1.dat' self.assertTrue(dat_path.is_file()) dat_path = comp_path / 'cat-dog' / '1.dat' self.assertTrue(dat_path.is_file()) dat_path.unlink() self.assertRaises(PersistableError, lambda: stash.load('1')) def test_load_comp_skip_load(self): path = self.targdir / 'load-comp' stash = DcsTestStash(path, self.groups, load_keys=set('apple'.split())) di = DataItem(1, 2, 'rover', 'fuzzy') stash.dump('1', di) comp_path = path / DcsTestStash.COMPOSITE_DIRECTORY_NAME dat_path = comp_path / 'apple-orange' / '1.dat' self.assertTrue(dat_path.is_file()) dat_path = comp_path / 'cat-dog' / '1.dat' self.assertTrue(dat_path.is_file()) dat_path.unlink() di2 = stash.load('1') self.assertEqual({'apple': 1}, di2.agg) def test_dump_missing(self): path = self.targdir / 'dump_missing' stash = DcsTestStash(path, self.groups) di = DataItem(1, 2, 'rover', 'fuzzy') del di.agg['dog'] try: stash.dump('1', di) self.assertTrue(False, 'should have thrown exception') except MissingDataKeys as e: self.assertEqual({'dog'}, e.keys)
#define DEBUG(...) /* Strip me down and go to town */ /* Lick me over upside down */ /* Pound and pound and pound and pound */ /* Baby make me make that sound */ /* Aaah, aaah, aaah, aaah, aaaaaaaaaaaaaaaaaah! */ #include<stdio.h> #include<vector> #include<queue> using namespace std; const int maxN = 100000; const int INF = 1000000; int N; vector<int>adj[maxN * 3]; int vis[maxN * 3]; priority_queue<pair<int,int>>pq; int conv(int x, int p) { return x + p*N; } int main() { int m; scanf("%d %d", &N, &m); for (int i = 0; i < N*3; i++) { vis[i] = INF; } for (int i = 0; i < m; i++) { int a, b; scanf("%d %d", &a, &b); for (int j = 0; j < 3; j++) { adj[conv(a-1, j)].push_back(conv(b-1, (j+1)%3)); } } int S, T; scanf("%d %d", &S, &T); pq.push(pair<int,int>(0, S-1)); while (!pq.empty()) { int v = pq.top().second; int c = pq.top().first; pq.pop(); if (vis[v] != INF) continue; DEBUG(printf("on %d %d\n", v, c);) vis[v] = c; for (int x : adj[v]) { if (vis[x] == INF) { pq.push(pair<int,int>(c - 1, x)); } } } int ans = vis[conv(T-1, 0)]; if (ans == INF) { ans = 3; } printf("%d\n", -ans / 3); return 0; }
A Summary of the International Colloquium “French Theory in China” An international colloquium, “French Theory in China”, was held at Shanghai University from October 11 to 13 , 2019 . The colloquium was jointly organized by the Department of Chinese Language and Literature, Center for French Thought and Culture, and Center for Critical Theory at Shanghai University. Thirty-one scholars from home and foreign institutions participated in the event. The colloquium focused on the retrospective definition and evaluation of French Theory, its new development in the world, its reception in China and its influence on contemporary Chinese social and cultural studies.
def check_in_known_filters(istring, known_filters, exclude_filters): filter_splits = istring.split('-') if exclude_filters: all_known_filters = known_filters.union(exclude_filters) else: all_known_filters = known_filters return set(filter_splits).issubset(all_known_filters)
<reponame>SaladDais/LSO2-VM-Performance /** * @file llsdmessagereader.cpp * @brief LLSDMessageReader class implementation. * * $LicenseInfo:firstyear=2007&license=viewerlgpl$ * Second Life Viewer Source Code * Copyright (C) 2010, Linden Research, Inc. * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; * version 2.1 of the License only. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this library; if not, write to the Free Software * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA * * Linden Research, Inc., 945 Battery Street, San Francisco, CA 94111 USA * $/LicenseInfo$ */ #include "linden_common.h" #include "llsdmessagereader.h" #include "llmessagebuilder.h" #include "llsdmessagebuilder.h" #include "llsdutil.h" #include "llsdutil_math.h" #include "v3math.h" #include "v4math.h" #include "v3dmath.h" #include "v2math.h" #include "llquaternion.h" #include "v4color.h" LLSDMessageReader::LLSDMessageReader() : mMessageName(NULL) { } //virtual LLSDMessageReader::~LLSDMessageReader() { } LLSD getLLSD(const LLSD& input, const char* block, const char* var, S32 blocknum) { // babbage: log error to LL_ERRS() if variable not found to mimic // LLTemplateMessageReader::getData behaviour if(NULL == block) { LL_ERRS() << "NULL block name" << LL_ENDL; return LLSD(); } if(NULL == var) { LL_ERRS() << "NULL var name" << LL_ENDL; return LLSD(); } if(! input[block].isArray()) { // NOTE: babbage: need to return default for missing blocks to allow // backwards/forwards compatibility - handlers must cope with default // values. LL_WARNS() << "block " << block << " not found" << LL_ENDL; return LLSD(); } LLSD result = input[block][blocknum][var]; if(result.isUndefined()) { // NOTE: babbage: need to return default for missing vars to allow // backwards/forwards compatibility - handlers must cope with default // values. LL_WARNS() << "var " << var << " not found" << LL_ENDL; } return result; } //virtual void LLSDMessageReader::getBinaryData(const char *block, const char *var, void *datap, S32 size, S32 blocknum, S32 max_size) { std::vector<U8> data = getLLSD(mMessage, block, var, blocknum); S32 data_size = (S32)data.size(); if (size && data_size != size) { return; } if (max_size < data_size) { data_size = max_size; } // Calls to memcpy will fail if data_size is not positive. // Phoenix 2009-02-27 if(data_size <= 0) { return; } memcpy(datap, &(data[0]), data_size); } //virtual void LLSDMessageReader::getBOOL(const char *block, const char *var, BOOL &data, S32 blocknum) { data = getLLSD(mMessage, block, var, blocknum); } //virtual void LLSDMessageReader::getS8(const char *block, const char *var, S8 &data, S32 blocknum) { data = getLLSD(mMessage, block, var, blocknum).asInteger(); } //virtual void LLSDMessageReader::getU8(const char *block, const char *var, U8 &data, S32 blocknum) { data = getLLSD(mMessage, block, var, blocknum).asInteger(); } //virtual void LLSDMessageReader::getS16(const char *block, const char *var, S16 &data, S32 blocknum) { data = getLLSD(mMessage, block, var, blocknum).asInteger(); } //virtual void LLSDMessageReader::getU16(const char *block, const char *var, U16 &data, S32 blocknum) { data = getLLSD(mMessage, block, var, blocknum).asInteger(); } //virtual void LLSDMessageReader::getS32(const char *block, const char *var, S32 &data, S32 blocknum) { data = getLLSD(mMessage, block, var, blocknum); } //virtual void LLSDMessageReader::getF32(const char *block, const char *var, F32 &data, S32 blocknum) { data = (F32)getLLSD(mMessage, block, var, blocknum).asReal(); } //virtual void LLSDMessageReader::getU32(const char *block, const char *var, U32 &data, S32 blocknum) { data = ll_U32_from_sd(getLLSD(mMessage, block, var, blocknum)); } //virtual void LLSDMessageReader::getU64(const char *block, const char *var, U64 &data, S32 blocknum) { data = ll_U64_from_sd(getLLSD(mMessage, block, var, blocknum)); } //virtual void LLSDMessageReader::getF64(const char *block, const char *var, F64 &data, S32 blocknum) { data = getLLSD(mMessage, block, var, blocknum); } //virtual void LLSDMessageReader::getVector3(const char *block, const char *var, LLVector3 &vec, S32 blocknum) { vec = ll_vector3_from_sd(getLLSD(mMessage, block, var, blocknum)); } //virtual void LLSDMessageReader::getVector4(const char *block, const char *var, LLVector4 &vec, S32 blocknum) { vec = ll_vector4_from_sd(getLLSD(mMessage, block, var, blocknum)); } //virtual void LLSDMessageReader::getVector3d(const char *block, const char *var, LLVector3d &vec, S32 blocknum) { vec = ll_vector3d_from_sd(getLLSD(mMessage, block, var, blocknum)); } //virtual void LLSDMessageReader::getQuat(const char *block, const char *var, LLQuaternion &q, S32 blocknum) { q = ll_quaternion_from_sd(getLLSD(mMessage, block, var, blocknum)); } //virtual void LLSDMessageReader::getUUID(const char *block, const char *var, LLUUID &uuid, S32 blocknum) { uuid = getLLSD(mMessage, block, var, blocknum); } //virtual void LLSDMessageReader::getIPAddr(const char *block, const char *var, U32 &ip, S32 blocknum) { ip = ll_ipaddr_from_sd(getLLSD(mMessage, block, var, blocknum)); } //virtual void LLSDMessageReader::getIPPort(const char *block, const char *var, U16 &port, S32 blocknum) { port = getLLSD(mMessage, block, var, blocknum).asInteger(); } //virtual void LLSDMessageReader::getString(const char *block, const char *var, S32 buffer_size, char *buffer, S32 blocknum) { if(buffer_size <= 0) { LL_WARNS() << "buffer_size <= 0" << LL_ENDL; return; } std::string data = getLLSD(mMessage, block, var, blocknum); S32 data_size = data.size(); if (data_size >= buffer_size) { data_size = buffer_size - 1; } memcpy(buffer, data.data(), data_size); buffer[data_size] = '\0'; } //virtual void LLSDMessageReader::getString(const char *block, const char *var, std::string& outstr, S32 blocknum) { outstr = getLLSD(mMessage, block, var, blocknum).asString(); } //virtual S32 LLSDMessageReader::getNumberOfBlocks(const char *blockname) { return mMessage[blockname].size(); } S32 getElementSize(const LLSD& llsd) { LLSD::Type type = llsd.type(); switch(type) { case LLSD::TypeBoolean: return sizeof(bool); case LLSD::TypeInteger: return sizeof(S32); case LLSD::TypeReal: return sizeof(F64); case LLSD::TypeString: return llsd.size(); case LLSD::TypeUUID: return sizeof(LLUUID); case LLSD::TypeDate: return sizeof(LLDate); case LLSD::TypeURI: return sizeof(LLURI); case LLSD::TypeBinary: { std::vector<U8> data = llsd; return data.size() * sizeof(U8); } case LLSD::TypeMap: case LLSD::TypeArray: case LLSD::TypeUndefined: default: // TypeLLSDTypeEnd, TypeLLSDNumTypes, etc. return 0; } //return 0; } //virtual //Mainly used to find size of binary block of data S32 LLSDMessageReader::getSize(const char *blockname, const char *varname) { return getElementSize(mMessage[blockname][0][varname]); } //virtual S32 LLSDMessageReader::getSize(const char *blockname, S32 blocknum, const char *varname) { return getElementSize(mMessage[blockname][blocknum][varname]); } //virtual void LLSDMessageReader::clearMessage() { mMessage = LLSD(); } //virtual const char* LLSDMessageReader::getMessageName() const { return mMessageName; } // virtual S32 LLSDMessageReader::getMessageSize() const { return 0; } //virtual void LLSDMessageReader::copyToBuilder(LLMessageBuilder& builder) const { builder.copyFromLLSD(mMessage); } void LLSDMessageReader::setMessage(const char* name, const LLSD& message) { mMessageName = name; // TODO: Validate mMessage = message; }
def _step_cog(self): def course_utm(df): df.reset_index(inplace=True) df["Step_Azimuth"] = azimuth_utm( df["geometry"].shift(), df.loc[1:, "geometry"] ) return df.set_index("index") self.gdf = self.grouped_trip.apply(course_utm) self.gdf["Step_Azimuth"].fillna(method="bfill", inplace=True) self.gdf["Step_Azimuth"] = round( self.gdf["Step_Azimuth"].apply(lambda x: angles.normalize(x, 0, 360)) ) self.gdf["Error_COG"] = 180 - abs( abs(self.gdf["COG"] - self.gdf["Step_Azimuth"]) - 180 ) self.gdf["Error_Heading"] = 180 - abs( abs(self.gdf["Heading"] - self.gdf["Step_Azimuth"]) - 180 )
[The following text has been excerpted from Peter Schilling Jr.’s forthcoming book on the duck stories of Carl Barks, published by Uncivilized Books.] Carl Barks consistently referred to “Lost in the Andes” as his finest work, but I would counter with “The Golden Helmet.” “Helmet,” with its almost zen-like appraisal of a peaceful life, its condemnation of greed and avarice (not to mention lawyers), a story that has humor but not too much, that actually takes itself somewhat seriously, is his finest effort, at the very least in terms of writing (though the art is brilliant as usual). Barks’ claimed that the gags in “Andes” were executed perfectly, the repeating jokes of the gum bubbles and the square eggs, etc., and this is true—but “Helmet,” whose themes run smoothly through this story, is less reliant on knee-slapping gags. “The Golden Helmet” isn’t just a story of adventure, a story of humor, or even bravery (though those traits exist here.) “The Golden Helmet” is Barks’ most somber effort, a story of evil, the evil that lurks in everyone. Even children. Barks’ visual style in “The Golden Helmet” seems to also suggest that we’re in for a more sobering ride. Gone are the crazy splash panels of “Vacation Time” (or any of the other stories mentioned here, almost all of which have a bent panel or two at least). For the opening scene, as with the rest of this tale, there will be not one skewed panel. Splash panels vanish until twelve pages in, and even then there will be only four of them. In fact, the opening scene, is one of stasis, boredom. (see Fig. 1) Contrast the narration with the opening of “Lost in the Andes.” Both feature Donald working at a museum: in the latter, Barks wrote, “It is morning of a day destined to live long in history! At the Museum of Natural Science the third assistant janitor is giving orders to the fourth assistant janitor!” This joke just brims with energy. Donald has a huge smile on his face, even as he holds a feather duster. Here, the narration box simply reads, “Donald is the assistant guard in the Museum at Duckburg!” A guard strikes me as a more intriguing position than janitor, and yet in “The Golden Helmet,” Donald leans against a sculpture of a strange looking “prehistoric cow” (as the placard reads), his eyes are almost closed, and he’s bored beyond belief. Donald’s opening line? “Ho hum!” The Museum doesn’t even have a name—the “Museum at Duckburg?”—and gone is the joke about multiple assistants, even though he’s an assistant guard. A Viking ship will play a large part in this tale, as Barks foreshadowed his plot points as usual—there’s a big Viking ship in the background, with a large placard describing the thing. Banality wins out: “Old Viking Ship” (there’s no more descriptive term than that?) And this: “This ancient hulk was dug up in Herring, Norway, where it was buried by the Vikings about 920 A.D.” “Hulk” suggests that this ship is merely junk, and the bland sentence communicates simply the most rote facts, and even those aren’t all that interesting. In this opening page, Donald wanders the museum (as a guard must do), alternating between yawning and bemoaning his fate, a fate of having nothing to do. The museum offers a collection of history’s detritus: the Headless Horseman’s toupee, Lady Godiva’s laundry bag, and, perhaps most telling, a morose statue with a long face, titled “Joy.” At the close of the first page, and over the next few, Donald has circled back to the Viking ship, where he muses over how soft modern man has become. So here the theme asserts itself: the boredom of modern man. The vessels and castoffs of history’s bravest people are trapped in these four walls and behind glass, while we simply wander about, dazed, gazing upon them. At this point, a fat, myopic man with Coke-bottle glasses asks where to find the butterfly collection, leaving Donald shaking his head. And when an effete fellow with large eyelashes says, “Mister Guard, where is the lace and tatting collection?” Donald cannot help but actually climb aboard the “hulk” and pretend, in his words, that he is a “he-man.” It is while he’s playing this game of the imagination that Donald stumbles across the villain of the piece, Azure Blue, a craggy, goatee’d sourpuss who’s breaking the ship apart looking for a map. Donald shoos him away and then, of course, discovers the map that will transport him from Duckburg to the high seas. This is quite a find. No mere treasure map, it is, instead, a “log of that old Viking ship!”, as the museum’s curator points out. The curator is a friendly soul, balding, bushy beard, but oddly enough he will never have a name other than “The Curator”. The scene that follows is reminiscent of the opening (though not the prologue) of Raiders of the Lost Ark: in The Curator’s office, we learn that the map reveals that the ship was helmed by one Olaf the Blue, and that in the year 901 landed on the coast of North America, “years before Eric the Red!” (The man who truly “discovered” America.) To prove that he landed on the shore of North America, Olaf buried the eponymous helmet, and the map points to exactly where this thing is located. Excited that the Duckburg Museum has proof of exactly who discovered America, the Curator spins Donald around, happily shouting, “You’ll be famous! The museum will be famous!” Ready to dispatch an expedition to retrieve the prize, the Curator is suddenly interrupted by Azure Blue and his lawyer, Sharky, who spouts a weird Latinate legalbabble. They demand the map, claiming that Blue is the descendant of the original Olaf, and that by owning said helmet Azure will, in fact, become the King of North America. Barks concocted a wonderful historical mini-backstory, suggesting that during the time of Charlemagne the rulers of the world agreed that whomever discovers a new land shall own it, unless he claims it for his king. Olaf claimed it for himself, so now it goes to his distant relative, Azure Blue. “How can you prove he is Olaf’s nearest of kin?” The Curator shouts, to which Sharky replies, “Flickus, Flackus, Fumdeedledum!”, and then: “Which is legal language for, ‘How can you prove that he isn’t?’” And with that, they grab the map, and head out the door to retrieve the Golden Helmet. What’s interesting is that this is essentially an adventure of attitudes. Whereas, say, in Raiders, Indy is out for adventure, but he’s also going to help stop a menace that is physical—the Nazis will use the Ark of the Covenant to literally destroy other, freedom-loving armies. With the Ark, they could (had their faces not been melted off and heads exploded) ruled the world against everyone’s will. Here, this is a threat of perception: if Azure Blue gets the helmet, he will own North America, but his control over the continent depends solely on whether the population as a whole actually believes him to be so. It depends on armies of lawyers and politicians, each one, we imagine, angling for whatever advantage they can get from Blue. Such is the power of this “law” that it will even dissolve boundaries—notice that the owner of the Golden Helmet is not king of America or Canada, but North America. This is what modern man has come to: if someone says they’re the king of North America, we won’t fight, but merely hope that politicians and lawyers can get us out of this mess. Even before we can get this story rolling, we need to see the worst of people, this time in the kids. Donald races home to find his nephews stupidly shooting marbles in the living room, bored, eyes half closed, making one wonder if kids back then were as challenged to get out and about as they are today. So ignorant of geography that one of them asks if Labrador is a coat, Donald runs headlong into a wall out of exasperation. But soon they’re on their way, taking an all-night flight to Labrador (with a nifty fold out queen-sized bed in it—man, travel was awesome back then). The chase is on. And we notice that the sea excited Barks more than any other landscape. Inspired by the richly detailed newspaper comic strip Prince Valiant, Barks played not only with the breathtaking visuals of a turbulent sea, but also with a rocky coastline that is as inhospitable as Mars. This is a bleak, unforgiving world that encompasses this chase, and Blue is ready for it, with his yacht, filled with reporters (to verify his finding the helmet), and a warship (strangely numbered ‘313,’ same as Donald’s jalopy) for protection. On the other hand, Donald will give chase in a smaller boat, tossed about by the waves. “It isn’t speed that’s going to win this race—it’s ruggedness!” he says, as a cold spray blasts him at the wheel of his boat. (see Fig. 2) Again, Barks revealed Donald’s impressive skills through action: he’s adept with a sextant and compass, knowing exactly where he’s going with both. Like the fire scenes in “Vacation Time,” he presses forwards, even as scores of puffins sail by in the opposite direction, away from an impending storm. Eventually, things get so rough even the destroyer turns around, and Azure’s ship is crashed upon an iceberg. We know this because Donald spies the crew of Blue’s ship, and its passengers, in lifeboats heading back south. And he is not the least bit interested in seeing if he can help, or even radioing for help, and in fact the boys dance a little jig, celebrating their victory. The race for this helmet has already stripped our heroes of their humanity. Of course, it cannot be this simple, as the luck of Azure Blue demands that, in a thick fog, he accidentally encounters the ducks and steals their boat, leaving them to survive in a life raft themselves. But they’re equally fortunate: discovering the wreckage of Azure’s boat, which includes a sail, they’re able to jerry-rig a mast and shoot across the sea and up the coast just like Vikings, and in hot pursuit. Azure reaches the coast first, the location of the helmet, and yet cannot find it, thus prompting Sharky, his lawyer to sneer that Azure should “sue somebody—anybody.” This particular comic was written in late 1951 or early 1952, during or just after the time when Barks was involved in an awful divorce from his alcoholic second wife. Lawyers no doubt left a bitter taste in his mouth, and thus Sharky provides a constantly negative (and at times tiresome) dialogue, suggesting, again and again, that Azure can sue for every setback life sends him. (see Fig. 3) But Sharky’s important, at least in the way that this odd story is constructed: Azure can only make his claim through the power of law, which Barks seemed to believe was eminently pliable, provided you had a good lawyer. Without Sharky, this adventure loses its power, a power that becomes seriously warped as the story progresses. Donald is able to withstand the worst the sea has to offer, and though the kids endure this as well, they do so with a lot of complaining. Up and down the headland they go, vainly seeking the cross-marked spot, to no avail. Finally, Azure and Sharky literally crash into Donald and the boys yet again, this time ramming their boat and sinking them offshore. But this helps our heroes. Washed ashore, one of the kids realizes that the cross-shaped headland would in fact erode over time—and it turns out they’re standing right by the mound that contains the Golden Helmet. (And ensconced beneath a puffin’s nest—when Donald goes to retrieve it, he gets a “golden helmet” of smashed eggs on his head. Again, eggs!) Looking through binoculars, Blue sees that Donald has the helmet, and in the one mediocre plot twist, steals it back from Donald by… sneaking up and taking it from him. After Blue sneers and proclaims victory, as they march toward his boat (where the Ducks are to become his slaves), the Curator drops a stone on Blue’s head, and takes the Helmet yet again. With the evil prize safely in his possession, the Curator demands they board the ship and sail into deep waters, where he can toss the thing into the ocean. And now comes the dark heart of “The Golden Helmet.” As they sail deeper and deeper into the Atlantic, which becomes as bleak and depressing as the desert in “The Magic Hourglass,” the backgrounds become sparse, languid. The seas have waves, but the ship moves in a straight, flat line across each panel. Gone is adventure, gone is the turbulence (and beauty) of the Atlantic, and what remains is pure avarice. For the Curator, on Sharky’s advice, decides to keep the Golden Helmet. Since Sharky has proven that anyone can be Olaf the Blue’s kin, the Curator has as much right to be the owner of North America as Azure Blue, or anyone else for that matter. “Everybody will have to go to a museum twice a day!” he proclaims, and then proceeds to list off all the museum-friendly things that will occupy the lives of North Americans under his rule. But the Curator is exhausted. He collapses, and Donald grabs the Helmet. Midway to tossing it into the sea, Donald suddenly has a vivid image of himself, clad in ermine, sitting atop a gold throne that reads KING of North America. Sharky leans in, whispering, and tells our man that a good lawyer can finagle the legal angle. Donald’s full of life now, shouting, “I won’t take a thing away from them!” as the Helmet, which he’s wearing, comes down over his eyes. When Sharky inquires about what Donald expects to possess, he sneers that he’s going to force people to wear meters on their chests and pay him for the very air we breathe. (see Fig. 4) Leave it to Donald to take this to its farthest point, even growing paranoid and depositing everyone—including his nephews—on an iceberg when he thinks they’re trying to steal back the Helmet. But one of the kids sneaks the compass from Donald, leaving him, on a cloudy day, unable to read the stars to determine where he’s sailing. After a polar bear lands on Donald’s ship, devouring all their supplies (Sharky has joined him, of course), the Helmet and its power do little to feed either of them (much like the conclusion of “The Magic Hourglass”). Suddenly, through the fog we see a Viking ship—Huey, Dewey and Louie took an axe and shaped the iceberg like the old hulk in the museum (and well done, boys). Donald is so happy to see his nephews that he renounces ownership, which of course makes Sharky grab the Helmet and proclaim himself owner, but by now everyone’s tired of the joke (and besides Donald was the best, evilest owner, anyway.) One of the boys grabs a fish and hurls it in Sharky’s face. The Golden Helmet flies from his hand and down, down into the depths, never to be seen again. So we come around full circle. Donald’s had his adventure, even had a close up look at the worst instincts in his own breast, and is back to work at the museum. The laywer is back to his job, we presume, the Curator back curating. “That rugged life had its points—but I don’t know—“ Donald says, staring again at the Viking ship. His ruminations are interrupted by another effete gentlemen, with giant eyelashes and weird pursed lips, who asks where to find the embroidered lampshades. As Donald begins to give directions, he gives up, choosing instead to take the man there. “Darned if I ain’t interested in embroidered lampshades, myself!” What is the message here? Was it simply to tell people not to question their peaceful lives? The opening scenes of “The Golden Helmet” suggest a world of total banality, of boredom. But by its end the notion that the adventurous life is worth living—even if it’s not financially rewarding—has vanished, with the final panels, suggesting that this life, this comfortable life in America, is worth living, dull though it may be. Barks was no doubt sick of lawyers, but perhaps “The Golden Helmet” is a note to himself, a suggestion to give up any dreams of wealth, to celebrate simply working as a Disney artist with its meager pay. Is this Zen-like resignation to the nature of our prosperous banality, or Barks giving up on life? I don’t know, but I do love the bitter aftertaste of “The Golden Helmet,” which remains, to me, a startling concoction even by Barks’ often dark standards. Peter Schilling Jr. is the author of The End of Baseball, and writes about film and the arts for a variety of publications. He has been reading and studying Carl Barks’ entire catalogue since he was a child.
Comparison of pressure standards in the range 10 kPa to 140 kPa This report summarizes the results obtained by twelve laboratories in a comparison of pressure measurement standards in the range 10 kPa to 140 kPa, carried out under the auspices of the Consultative Committee for Mass and Related Quantities. Measurements were taken in the absolute mode and, by some participants, in the gauge mode. Good repeatability and, until near the end, reproducibility were observed in the transfer standard. The results displayed significant differences between some of the participants beyond those expected from their claimed uncertainties.
#ifndef LED_H #define LED_H namespace StatusLED { void blink(); void force_on(); void force_off(); } #endif
/** For Jetty, injects the filter into instances of Jetty's ServletHandler. * * We instrument the constructor of org.eclipse.jetty.servlet.ServletHandler to inject a call to * add the supplied filter. * * @author robertf */ public class JettyAdapter implements Adapter { private final InjectableFilter filter; public JettyAdapter(InjectableFilter filter) { this.filter = filter; } public ClassVisitor getClassVisitor(final ClassLoader classLoader, final ClassVisitor cv) { if (ClassLoaderUtil.isAvailable(classLoader, "org.eclipse.jetty.servlet.FilterHolder") && ClassLoaderUtil.isAvailable(classLoader, "org.eclipse.jetty.servlet.FilterMapping")) { // jetty Log.debug("jetty adapter", "injecting filter for jetty"); if (filter.isAvailable(classLoader)) return new Visitor(cv, filter); } return null; } private static class Visitor extends ClassVisitor { private final InjectableFilter filter; public Visitor(final ClassVisitor cv, final InjectableFilter filter) { super(Opcodes.ASM7, cv); this.filter = filter; } private String className = ""; @Override public void visit(int version, int access, String name, String signature, String superName, String[] interfaces) { super.visit(version, access, name, signature, superName, interfaces); this.className = name; } @Override public MethodVisitor visitMethod(int access, String name, String desc, String signature, String[] exceptions) { MethodVisitor mv = super.visitMethod(access, name, desc, signature, exceptions); if ((access & Opcodes.ACC_PUBLIC) == Opcodes.ACC_PUBLIC && name.equals("<init>")) return new ConstructorInstrumentor(mv, className, filter); return mv; } } private static class ConstructorInstrumentor extends MethodVisitor { private final String className; private final InjectableFilter filter; public ConstructorInstrumentor(final MethodVisitor mv, final String className, final InjectableFilter filter) { super(Opcodes.ASM7, mv); this.className = className; this.filter = filter; if (Log.DEBUG) Log.debug("jetty adapter", String.format("instrumenting %s constructor", className)); } @Override public void visitInsn(int opcode) { if (opcode == Opcodes.RETURN) { /* Essentially, we're injecting the following code: holder = new FilterHolder(filter); holder.setName(...) this.addFilterWithMapping(holder, "/*", FilterMapping.REQUEST) */ Type servletFilter = Type.getObjectType("javax/servlet/Filter"); Type filterHolder = Type.getObjectType("org/eclipse/jetty/servlet/FilterHolder"); Type filterMapping = Type.getObjectType("org/eclipse/jetty/servlet/FilterMapping"); Type context = Type.getObjectType("org/eclipse/jetty/servlet/ServletHandler"); // build the filter filter.constructFilter(mv); // new FilterHolder(filter) mv.visitTypeInsn(Opcodes.NEW, filterHolder.getInternalName()); mv.visitInsn(Opcodes.DUP_X1); mv.visitInsn(Opcodes.SWAP); mv.visitMethodInsn(Opcodes.INVOKESPECIAL, filterHolder.getInternalName(), "<init>", Type.getMethodDescriptor(Type.VOID_TYPE, servletFilter), false); // .setName(<filter.getName()>) mv.visitInsn(Opcodes.DUP); mv.visitLdcInsn(filter.getName()); mv.visitMethodInsn(Opcodes.INVOKEVIRTUAL, filterHolder.getInternalName(), "setName", Type.getMethodDescriptor(Type.VOID_TYPE, Type.getType(String.class)), false); // this.addFilterWithMapping(holder, "/*", FilterMapping.REQUEST) mv.visitVarInsn(Opcodes.ALOAD, 0); mv.visitInsn(Opcodes.SWAP); mv.visitLdcInsn("/*"); mv.visitFieldInsn(Opcodes.GETSTATIC, filterMapping.getInternalName(), "REQUEST", Type.INT_TYPE.getDescriptor()); mv.visitMethodInsn(Opcodes.INVOKEVIRTUAL, className, "addFilterWithMapping", Type.getMethodDescriptor(Type.VOID_TYPE, filterHolder, Type.getType(String.class), Type.INT_TYPE), false); } super.visitInsn(opcode); } } }
<reponame>ckamtsikis/cmssw<gh_stars>100-1000 #ifndef HcalTestBeamData_HcalTB06BeamParameters_h #define HcalTestBeamData_HcalTB06BeamParameters_h #include "CondFormats/Serialization/interface/Serializable.h" #include <string> #include <vector> class HcalTB06BeamParameters { public: HcalTB06BeamParameters() = default; std::vector<std::string> wchambers_; std::string material_; COND_SERIALIZABLE; }; #endif
# This code is part of Qiskit. # # (C) Copyright IBM 2022. # # This code is licensed under the Apache License, Version 2.0. You may # obtain a copy of this license in the LICENSE.txt file in the root directory # of this source tree or at http://www.apache.org/licenses/LICENSE-2.0. # # Any modifications or derivative works of this code must retain this # copyright notice, and modified files need to carry a notice indicating # that they have been altered from the originals. # pylint: disable=cyclic-import """Routines for computing expectation values from sampled distributions""" import numpy as np from qiskit._accelerate.sampled_exp_val import sampled_expval_float, sampled_expval_complex from qiskit.exceptions import QiskitError from .distributions import QuasiDistribution, ProbDistribution # A list of valid diagonal operators OPERS = {"Z", "I", "0", "1"} def sampled_expectation_value(dist, oper): """Computes expectation value from a sampled distribution Note that passing a raw dict requires bit-string keys. Parameters: dist (Counts or QuasiDistribution or ProbDistribution or dict): Input sampled distribution oper (str or Pauli or PauliOp or PauliSumOp or SparsePauliOp): The operator for the observable Returns: float: The expectation value Raises: QiskitError: if the input distribution or operator is an invalid type """ from .counts import Counts from qiskit.quantum_info import Pauli, SparsePauliOp from qiskit.opflow import PauliOp, PauliSumOp # This should be removed when these return bit-string keys if isinstance(dist, (QuasiDistribution, ProbDistribution)): dist = dist.binary_probabilities() if not isinstance(dist, (Counts, dict)): raise QiskitError("Invalid input distribution type") if isinstance(oper, str): oper_strs = [oper.upper()] coeffs = np.asarray([1.0]) elif isinstance(oper, Pauli): oper_strs = [oper.to_label()] coeffs = np.asarray([1.0]) elif isinstance(oper, PauliOp): oper_strs = [oper.primitive.to_label()] coeffs = np.asarray([1.0]) elif isinstance(oper, PauliSumOp): spo = oper.primitive oper_strs = spo.paulis.to_labels() coeffs = np.asarray(spo.coeffs) * oper.coeff elif isinstance(oper, SparsePauliOp): oper_strs = oper.paulis.to_labels() coeffs = np.asarray(oper.coeffs) else: raise QiskitError("Invalid operator type") # Do some validation here bitstring_len = len(next(iter(dist))) if any(len(op) != bitstring_len for op in oper_strs): raise QiskitError( f"One or more operators not same length ({bitstring_len}) as input bitstrings" ) for op in oper_strs: if set(op).difference(OPERS): raise QiskitError(f"Input operator {op} is not diagonal") # Dispatch to Rust routines if coeffs.dtype == np.dtype(complex).type: return sampled_expval_complex(oper_strs, coeffs, dist) else: return sampled_expval_float(oper_strs, coeffs, dist)
Priscilla Foster has 20 full-time positions open at the McDonald’s restaurant she owns in Wexford. She’s having a hard time filling them because many applicants don’t have a car and cannot get bus service. “I can’t even tell you how many people can’t get to work because they don’t have a ride,” she said. Foster, who owns three other McDonald’s stores in the North Hills, is considering subsidizing transportation for her employees to help them get to work. She was one of about 70 people, elected officials and business owners from several North Hills municipalities who met Thursday night in the West View Fireman’s Hall on Perry Highway to discuss the lack of bus service along the corridor. The group will meet again at 7 p.m. Nov. 12 in the fire hall. Community leaders suggested ways to best organize to get transit service, and participants met in small groups to discuss ideas and develop lobbying strategies. Community College of Alle­gheny County North, Northland Public Library, North Hills School District, North Hills Community Outreach, Ross Township, Ross Business and Economic Development Group, and Pittsburghers for Public Transit hosted the meeting. “We’re losing students; it’s distressing,” said Gretchen Mullin-Sawicki, president of CCAC North. “We’re the Community College of Allegheny County, and we can’t get a bus to our campus.” The campus has specialty programs available only at that campus that many students are unable to get to, she said. Students from North Hills High School lamented that college keeps getting more expensive, especially if students have to buy cars to get to school. “It’s a great school. It’s a great college. There’s no transportation to get there,” said Cody Green, a senior at North Hills High who is thinking of attending CCAC North. He has no car and said that is making him think twice. About 23 percent of graduating North Hills seniors go to CCAC North, according to the school district. Port Authority of Allegheny County eliminated or reduced service on several bus routes. In 2007, the 11C route along Perry Highway was axed, along with several express routes to Perrysville and Franklin Park. In 2011, Route 9 was cut, which was a combination of Routes 11C and 13C that was formed in 2009. The Port Authority has said it has no plans to restore bus service and the soonest requests will be considered is in the spring of 2016. The Port Authority board meets Oct. 23 and will accept public comment. Katelyn Ferral is a staff writer for Trib Total Media.
/** * Does a region registerInterest on a server described by the given connection * <p> * Note that this call by-passes the RegisterInterestTracker. * * @param conn the connection to do the register interest on. * @param key describes what we are interested in * @param interestType the {@link InterestType} for this registration * @param policy the interest result policy for this registration * @param isDurable true if this registration is durable * @param receiveUpdatesAsInvalidates whether to act like notify-by-subscription is false. * @param regionDataPolicy the data policy ordinal of the region * @return list of keys */ List registerInterestOn(Connection conn, final Object key, final int interestType, final InterestResultPolicy policy, final boolean isDurable, final boolean receiveUpdatesAsInvalidates, final byte regionDataPolicy) { if (interestType == InterestType.KEY && key instanceof List) { return RegisterInterestListOp.executeOn(conn, pool, regionName, (List) key, policy, isDurable, receiveUpdatesAsInvalidates, regionDataPolicy); } else { return RegisterInterestOp.executeOn(conn, pool, regionName, key, interestType, policy, isDurable, receiveUpdatesAsInvalidates, regionDataPolicy); } }
/* * Copyright (C) 2019-2020 BiiLabs Co., Ltd. and Contributors * All Rights Reserved. * This is free software; you can redistribute it and/or modify it under the * terms of the MIT license. A copy of the license can be found in the file * "LICENSE" at the root of this distribution. */ #ifndef CONN_HTTP_H #define CONN_HTTP_H #ifdef __cplusplus extern "C" { #endif #include <stdbool.h> #include "common/ta_errors.h" #include "mbedtls/certs.h" #include "mbedtls/ctr_drbg.h" #include "mbedtls/entropy.h" #include "mbedtls/error.h" #include "mbedtls/net.h" #include "mbedtls/ssl.h" typedef struct { bool https; /**< Flag for check whether SSL connection is enabled or not */ mbedtls_net_context *net_ctx; /**< mbedtls_net context */ mbedtls_entropy_context *entropy; /**< mbedtls_entropy context */ mbedtls_ctr_drbg_context *ctr_drbg; /**< mbedlts_ctr_drbg context */ mbedtls_ssl_context *ssl_ctx; /**< mbedtls_ssl context */ mbedtls_ssl_config *ssl_config; /**< mbedtls_ssl_config context */ mbedtls_x509_crt *cacert; /**< mbedtls_x509 container */ } connect_info_t; /** * @brief Initialize logger of conn_http */ void conn_http_logger_init(); /** * @brief Release logger of conn_http * * @return * - zero on success * - EXIT_FAILURE on error */ int conn_http_logger_release(); /** * @brief Open HTTP(S) connection * * @param[in, out] info Context for HTTP(S) connection * @param[in] seed_nonce Personalization data, that is device-specific identifiers. Can be NULL. * @param[in] host HTTP(S) host to connect * @param[in] port HTTP(S) port to connect * * @return * - #SC_UTILS_HTTPS_INIT_ERROR failed on HTTP(S) init error * - #SC_UTILS_HTTPS_CONN_ERROR failed on HTTP(S) connect error * - #SC_UTILS_HTTPS_X509_ERROR failed on HTTP(S) certificate setting error * - #SC_UTILS_HTTPS_SSL_ERROR failed on HTTP ssl setting error * - #SC_OK on success * @see #status_t */ status_t http_open(connect_info_t *const info, char const *const seed_nonce, char const *const host, char const *const port); /** * @brief Send request to HTTP(S) connection * * @param[in] info Context for HTTP(S) connection * @param[in] req Buffer holding the data * * @return * - #SC_OK on success * - non-zero on error * @see #status_t */ status_t http_send_request(connect_info_t *const info, const char *req); /** * @brief Read response from HTTP(S) server * * @param[in] info Context for HTTP(S) connection * @param[out] res Buffer that will hold the data * @param[out] res_len Length of res * * @return * - #SC_OK on success * - non-zero on error * @see #status_t */ status_t http_read_response(connect_info_t *const info, char *res, size_t res_len); /** * @brief Close HTTP(S) connection * * @param[in] info Context for HTTP(S) connection * * @return * - #SC_OK on success * - non-zero on error * @see #status_t */ status_t http_close(connect_info_t *const info); /** * @brief Set POST request message * * @param[in] path API path for POST request to HTTP(S) server, i.e "transaction/". It * must be in string. * @param[in] host HTTP(S) host to connect * @param[in] port HTTP(S) port to connect * @param[in] req_body Pointer of POST request body * @param[out] out POST request message * * @return * - #SC_OK on success * - #SC_OOM failed on out of memory error * - non-zero on error * @see #status_t */ status_t set_post_request(char const *const path, char const *const host, const uint32_t port, char const *const req_body, char **out); /** * @brief Set GET request message * * @param[in] path API path for GET request to HTTP(S) server, i.e "transaction/". It * must be in string. * @param[in] host HTTP(S) host to connect * @param[in] port HTTP(S) port to connect * @param[out] out GET request message * * @return * - #SC_OK on success * - #SC_OOM failed on out of memory error * - non-zero on error * @see #status_t */ status_t set_get_request(char const *const path, char const *const host, const uint32_t port, char **out); #ifdef __cplusplus } #endif #endif // CONN_HTTP_H
JAK kinases are required for the bacterial RNA and poly I:C induced tyrosine phosphorylation of PKR. Discriminating the molecular patterns associated with RNA is central to innate immunity. The protein kinase PKR is a cytosolic sensor involved in the recognition of viral dsRNA and triggering interferon-induced signaling. Here, we identified bacterial RNA as a novel distinct pattern recognized by PKR. We show that the tyrosine phosphorylation of PKR induced by either bacterial RNA or poly I:C is impaired in mutant cells lacking TYK2, JAK1, or JAK2 kinases. PKR was found to be a direct substrate for the activated JAKs. Our results indicated that the double-stranded structures of bacterial RNA are required to fully activate PKR. These results suggest that bacterial RNA signaling is analogous in some respects to that of viral RNA and interferons and may have implications in bacterial immunity.
/** Error messages related to {@link Multibinds @Multibinds} methods. */ static final class MultibindsMessages { static final String METHOD_MUST_RETURN_MAP_OR_SET = "@%s methods must return Map<K, V> or Set<T>"; static final String PARAMETERS = "@%s methods cannot have parameters"; private MultibindsMessages() {} }
By Marshall Auerback, Stephanie Kelton and L. Randall Wray Should the federal government bailout Detroit? That’s the question everyone is debating. We think the discussion should be expanded well beyond this narrow question. Detroit is the canary in the coal mine, but it’s symptomatic of a bigger problem, which is the lack of jobs and decent demand in the economy. The problem is that President Obama believes we can cure our jobless problem by providing the proper incentives to the business community. So we’re sure there will be all sorts of talk about “incentive zones” for Detroit. And here he is committing one of the few big policy blunders from Lyndon Johnson’s War on Poverty. Like Johnson, who focused on retraining the unemployed for jobs that did not exist, Obama has focused on incentivizing the businesses community to hire workers to produce for customers that do not exist. Time and again, Obama has shown that he will only tinker around the edges, relying on the same tired supply-side initiatives that will not work: more incentives to build business confidence, subsidies to reduce labor costs and to promote exports, and maybe even tax cuts to please Republicans. He told a Labor Day crowd in Detroit a few years ago that he wants to match the more than 1 million construction workers with an infrastructure-related rebuilding program to improve the nation’s roads and bridges. That is an improvement over his efforts to date, but it falls far short of the 20-plus million jobs we need. So what should be done? Well, the three of us (and others) have long advocated a longer term solution to deal with all of the Detroits that are out there: The government could serve as the “employer of last resort” under a job guarantee program modeled on the WPA (the Works Progress Administration, in existence from 1935 to 1943 after being renamed the Work Projects Administration in 1939) and the CCC (Civilian Conservation Corps, 1933-1942). The program would offer a job to any American who was ready and willing to work at the federal minimum wage, plus legislated benefits. No time limits. No means testing. No minimum education or skill requirements. The program would operate like a buffer stock, absorbing and releasing workers during the economy’s natural boom-and-bust cycles. In a boom, employers would recruit workers out of the program; in a slump the safety net would allow those who had lost their jobs to continue to work to preserve good habits, making them easier to re-employ when activity picked up. The program would also take those whose education, training or job experience was initially inadequate to obtain work outside the program, enhancing their employability through on-the-job training. Work records would be maintained for all program participants and would be available for potential employers. Unemployment offices could be converted to employment offices, to match workers with jobs in the program, and to help private and public employers recruit workers. Funding for the job guarantee program must come from the federal government—and the wage should be periodically adjusted to reflect changes in the cost of living and to allow workers to share in rising national productivity so that real living standards would rise—but the administration and operation of the program should be decentralized to the state and local level. Registered not-for-profit organizations could propose projects for approval by responsible offices designated within each of the states and U.S. territories as well as the District of Columbia. Then the proposals should be submitted to the federal office for final approval and funding. To ensure transparency and accountability, the Labor Department should maintain a website providing details on all projects submitted, all projects approved and all projects started. To avoid simple “make-work” employment, project proposals could be evaluated on the following criteria: (a) value to the community; (b) value to the participants; (c) likelihood of successful implementation of project; (d) contribution to preparing workers for employment outside the program. The program would take workers as they were and where they were, with jobs designed so that they could be performed by workers with the education and training they already had, but it would strive to improve the education and skills of all workers as they participated in the program. Proposals would come from every community in America, to employ workers in every community. Project proposals should include provisions for part-time work and other flexible arrangements for workers who need them, including but not restricted to flexible arrangements for parents of young children. That’s the approach we would take on behalf of all of the Detroits out there.
The E3 ubiquitin ligase Itch regulates tumor suppressor protein RASSF5/NORE1 stability in an acetylation-dependent manner Ras association (RalGDS/AF-6) domain family member RASSF5 is a non-enzymatic RAS effector super family protein, known to be involved in cell growth regulation. Expression of RASSF5 is found to be extinguished by promoter hypermethylation in different human cancers, and its ectopic expression suppresses cell proliferation and tumorigenicity. Interestingly, this role in tumorigenesis has been confounded by the fact that regulation at molecular level remains unclear and many transformed cells actually display elevated RASSF5 expression. Here, we demonstrate that E3 ubiquitin ligase Itch is a unique binding partner of RASSF5. Itch can interact with PPxY motif in RASSF5 both in vivo and in vitro through its WW domains. Importantly, the overexpression of Itch induces RASSF5 degradation by poly-ubiquitination via 26S proteasome pathway. In addition, our results indicate that the elevated levels of RASSF5 found in tumor cells due to acetylation, which restricts its binding to Itch and results in a more stable inert protein. Inhibition of RASSF5 acetylation permits its interaction with Itch and provokes proteasomal degradation. These data suggest that apart from promoter methylation, hyperacetylation could also be downregulating RASSF5 function in different human cancer. Finally, results from functional assays suggest that the overexpression of wild type, not the ligase activity defective Itch negatively regulate RASSF5-mediated G1 phase transition of cell cycle as well as apoptosis, suggesting that Itch alone is sufficient to alter RASSF5 function. Collectively, the present investigation identifies a HECT class E3 ubiquitin ligase Itch as a unique negative regulator of RASSF5, and suggests the possibility that acetylation as a potential therapeutic target for human cancer. R Suryaraja 1 , M Anitha 1 , K Anbarasu 1 , G Kumari 1 and S Mahalingam* ,1 Ras association (RalGDS/AF-6) domain family member RASSF5 is a non-enzymatic RAS effector super family protein, known to be involved in cell growth regulation. Expression of RASSF5 is found to be extinguished by promoter hypermethylation in different human cancers, and its ectopic expression suppresses cell proliferation and tumorigenicity. Interestingly, this role in tumorigenesis has been confounded by the fact that regulation at molecular level remains unclear and many transformed cells actually display elevated RASSF5 expression. Here, we demonstrate that E3 ubiquitin ligase Itch is a unique binding partner of RASSF5. Itch can interact with PPxY motif in RASSF5 both in vivo and in vitro through its WW domains. Importantly, the overexpression of Itch induces RASSF5 degradation by poly-ubiquitination via 26S proteasome pathway. In addition, our results indicate that the elevated levels of RASSF5 found in tumor cells due to acetylation, which restricts its binding to Itch and results in a more stable inert protein. Inhibition of RASSF5 acetylation permits its interaction with Itch and provokes proteasomal degradation. These data suggest that apart from promoter methylation, hyperacetylation could also be downregulating RASSF5 function in different human cancer. Finally, results from functional assays suggest that the overexpression of wild type, not the ligase activity defective Itch negatively regulate RASSF5-mediated G1 phase transition of cell cycle as well as apoptosis, suggesting that Itch alone is sufficient to alter RASSF5 function. Collectively, the present investigation identifies a HECT class E3 ubiquitin ligase Itch as a unique negative regulator of RASSF5, and suggests the possibility that acetylation as a potential therapeutic target for human cancer. Ras association (RalGDS/AF-6) domain family member RASSF5/NORE1 is the founding member of RAS effector super family protein that inhibits tumor growth by promoting G1/S arrest. RASSF5 is a proapoptotic factor of RAS and induces apoptosis via p53. 2 RASSF5 binds with components (mammalian sterile 20-like kinase, MST1/2 and yes associated protein 1, YAP1) of hippo pathway, and regulates cell proliferation and apoptosis. Mice deficient for RASSF5 were resistant to TNF-a-induced apoptosis. 4 Furthermore, targeted ablation of RASSF5 resulted in spontaneous immortalization of mouse embryo fibroblasts (MEFs) and the RASSF5-deficient MEFs were transformed by GTP-bound form of K-Ras. 4 In addition, loss of RASSF5 expression by promoter hypermethylation has been linked to tumor formation. 9,10 Given its important role in tumor, studies of RASSF5 may provide new insights in our understanding on the mechanism that leads to tumorigenesis. Recent studies suggest that post-translational modifications regulate the functions of proteins that are involved in cell cycle regulation, differentiation, oxidative stress response and transcription. 3, Interestingly, acetylation of proteins resulted in both the activation and deregulation of protein function. 24,26,27,29,30 In addition, altered expression of deacetylases resulted in tumorigenesis. 24,27 Furthermore, acetylation of proteins modulate ubiquitination resulted in the alteration of protein function suggest that deacetylation may promote ubiquitination. 27 Together, these data suggest that acetylation and ubiquitination have a critical role in regulating protein stability and function. In the present study, we have provided evidence that E3 ubiquitin ligase Itch interacts with RASSF5, and promotes degradation via proteasome pathway. In addition, the acetylated form of RASSF5 fails to interact with Itch in transformed cells. These finding suggests that Itch mediates the degradation of RASSF5 by proteasome-mediated pathway in acetylation-dependent manner. Results Ubiquitin ligase Itch interacts with RASSF5. Detailed analysis of the amino acid sequences of RASSF super family proteins (RASSF1 to RASSF10) suggests the presence of PPxY motif only in RASSF5 (Supplementary Figure S1A). It is well established that PPxY motif is responsible for the interaction with WW domain-containing proteins. Recent reports suggest that WW domain-containing E3 ligase Itch utilizes PPxY motif to target substrates for ubiquitindependent degradation. 16,20,21 To confirm whether RASSF5 interacts with Itch, we examined glutathione S-transferase (GST)-Itch WW domains (amino acids 251-529) for their ability to associate with RASSF5-Flag in GST pull-down experiments. Interestingly, RASSF5A-Flag from human embryonic kidney 293T (HEK293T) cell lysates specifically interacts with the GST-Itch WW domain to the same extent as the positive control GST-Importin-b (Figure 1a). Despite the presence of PPxY motif only in RASSF5, we tested whether Itch interacts with other RASSF members. Results in figure 1b demonstrate that Itch interacts specifically with RASSF5, not with other RASSF family members. These data suggest that Itch is a specific interacting partner for RASSF5. RASSF5 interacts with Itch both in vitro and in vivo. Comparative sequence analysis of all three alternatively spliced forms of RASSF5 indicates Itch interaction motif, PPxY is present only in RASSF5A and RASSF5B, not in RASSF5C (Supplementary Figure S1B). Results in Figure 1c indicate that WW domain of Itch interacts with RASSF5A-green fluorescent protein (GFP) and RASSF5B-GFP, but not with RASSF5C-GFP or GFP. GST was used as negative control ( Figure 1c). These data suggest that PPxY motif in RASSF5 is critical to interact with Itch. To HEK293T cell lysates containing RASSF5A-Flag was incubated with equal amount of either GST or GST-Itch WW domain or GST-Importin-b and the bound proteins were resolved in SDS-12% PAGE followed by western blotting using anti-Flag antibodies. GST-Importin -b was used as positive control. (b) HEK293T cell lysates containing equal amounts of Flag-tagged RAS effectors (classical Ras effectors: RASSF1, RASSF2 and RASSF5; N-terminal Ras association domain containing Ras effectors: RASSF7, RASSF8 and RASSF10) were pull-down with either GST or GST-Itch WW domain followed by western blotting using anti-Flag antibodies. Results indicate that Itch specifically interacts only with RASSF5. (c) HEK293T cell lysates containing equal amounts of alternate spliced forms of GFP-tagged RASSF5 (RASSF5A, RASSF5B and RASSF5C (no PPxY motif)) were incubated with either GST or GST-Itch WW domain, and the bound proteins were resolved in SDS-12%PAGE followed by western blotting using anti-GFP antibodies. (d) RASSF5A-Flag or RASSF5C-Flag was transiently co-transfected with either Itch wt or Itch C830A mutant (ligase activity defective) in HEK293T cells and treated with proteasome inhibitor, MG132. The expression of all the indicated proteins (top) was determined by western blot analysis using anti-Flag or anti-Itch antibodies. Cell lysates containing equal amounts of indicated proteins (top) were used for coimmunoprecipitation with anti-Flag or anti-Itch antibodies followed by western blot analysis using anti-Itch or anti-Flag antibodies as indicated Functional interaction between RASSF5 and Itch R Suryaraja et al verify this interaction in vivo, full-length Itch expression plasmid was co-transfected with RASSF5A-Flag and RASSF5C-Flag expression vectors in HEK293T cells. Interestingly, RASSF5C was efficiently expressed in the presence of Itch, but not RASSF5A ( Figure 1d). These data lead to the hypothesis that Itch and RASSF5A interaction through PPxY motif resulted in the degradation of RASSF5A. To confirm this possibility, RASSF5A and RASSF5C were coexpressed with the catalytically inactive Itch mutant, Itch C830A , and their expression checked. Figure 1d suggests that the amount of RASSF5A protein was similar to that of RASSF5C in Itch C830A coexpressed cells. These data suggest that the catalytic activity of Itch is required for RASSF5A degradation. We next checked the expression pattern of both RASSF5A and RASSF5C in the presence of proteasome inhibitor, MG132 with or without Itch wt expression. Results in Figure 1d indicate that MG132 rescues RASSF5A expression despite the overexpression of Itch wt , while the level of RASSF5C expression was not altered. Together, these data provided evidence that Itch may mediate the degradation of RASSF5A via poly-ubiquitination/proteasome-mediated pathway. We next performed coimmunoprecipitation experiments from HEK293T cell lysates containing RASSF5A-Flag or RASSF5C-Flag coexpressed with Itch wt or Itch C830A in presence or absence of MG132. Results in Figure 1d indicate that Itch physically interacts with RASSF5A in vivo, and interestingly, interaction of Itch C830A with RASSF5A suggests that the catalytic activity of Itch is not essential for its association. In addition, while a comparable levels of RASSF5A associates with Itch wt or Itch C830A in presence of MG132, no association with RASSF5C detected even in the presence of MG132 (Figure 1d). Furthermore, reciprocal coimmunoprecipitation suggests that RASSF5A was detected in Itch immunoprecipitates ( Figure 1d). The observed faint signal for RASSF5A when coexpressed with Itch wt may be due to Itch-mediated degradation. These data provided evidence that Itch physically interacts with RASSF5A in vivo and mediates RASSF5A degradation. It is well-known that Itch encodes four WW domains, 17 and we performed series of GST pull-down assays to find out whether any one or all of Itch WW domains are involved in RASSF5A interaction. Towards this end, several GST-Itch WW domain mutants were generated (Supplementary Figure S2A) and results in Supplementary Figure S2B indicate that Itch mutants W291, 323A; W291, 403A and W291, 443A failed to interact with RASSF5A. Interestingly, exchange of individual tryptophan residues within WW domains did not alter Itch interaction, but the exchange of W291 in combination with W323 or W403 or W443 completely abrogated Itch interaction with RASSF5A (Supplementary Figure S2B). These data suggest that Trp 291 has a critical role in Itch interaction with RASSF5. Taken together, our results demonstrate that Itch interacts with RASSF5 both in vitro and in vivo, and this interaction is critical for RASSF5 turn-over in cells. Itch-dependent degradation of RASSF5. Itch is known to target multiple tumor suppressor proteins for ubiquitination and degradation. 17,20,21 We next tested whether RASSF5A is an ubiquitin substrate for Itch. Results in figure 2a indicate that the overexpression of Itch significantly down-modulates Results indicate the loss of ligase activity impairs its effect on RASSF5 degradation. (c) Itch mediates the RASSF5 degradation by 26S proteasome pathway. HEK293T cells were co-transfected with RASSF5A-Flag and Itch as described in Materials and Methods. Transfected cells were treated with CHX alone or with proteasome inhibitor MG132. (d and e) CHX chase analysis of RASSF5 degradation upon Itch over expression. HEK293T cells expressing either RASSF5A-Flag (d) or RASSF5C-Flag (e) alone or with Itch were treated with CHX to inhibit de novo protein synthesis. Cell lysates were prepared at indicated time periods and determined RASSF5A and RASSF5C levels by western blot analysis using anti-Flag antibodies RASSF5A in a dose-dependent manner. In contrast, coexpression of increasing amounts of Itch C830A has no effect on RASSF5A levels ( Figure 2b) despite a robust interaction ( Figure 1d). These data suggest that catalytic activity of Itch is important for RASSF5 degradation. In addition, the downregulation of RASSF5A in the presence of Itch wt was blocked by the treatment of MG132 in a time-dependent manner ( Figure 2c). To further confirm the specificity of RASSF5A downregulation by Itch at protein not at mRNA levels, we determined whether Itch can degrade RASSF5C in cycloheximide (CHX) chase experiment. Interestingly, overexpression of Itch wt but not Itch C830A efficiently promotes RASSF5A degradation with and without CHX treatment (Figure 2d and Supplementary Figure S3A) in a timedependent manner. In contrast, RASSF5C level remains stable in presence of CHX (Figure 2e). However, the levels of RASSF5A-GFP mRNA were unchanged with or without CHX treatment upon Itch overexpression (Supplementary Figure S3B). Furthermore, analysis of RASSF5A-GFP and RASSF5C-GFP immunofluorescence clearly indicates that Itch downmodulates RASSF5A, but not RASSF5C in the absence of MG132 in vivo (Supplementary Figure S4). Interestingly, RASSF5A signal was observed in the nucleus when overexpressed with Itch wt in the presence of MG132, and similar signal was observed when RASSF5A-GFP coexpressed with Itch C830A (Supplementary Figure S4). Collectively, these data strongly suggest that the destabilizing effect of Itch on RASSF5A is specific at the protein level in proteasome-dependent manner. RASSF5A contains 10 RPYP 14 as well as 21 PPRY 24 motifs in the amino-terminus (Supplementary Figure S1), and Itch is known to recognize its substrate via PPxY motif (Supplementary Figure S5A). RASSF5A Y13A -Flag and RASSF5A Y24A -Flag mutants (Supplementary Figure S5B) were generated and GST pull-down assays were performed to determine the contribution of tyrosine residues in RASSF5 interaction with Itch. We observed reduced ability of RASS-F5A Y13A to interact with Itch compared with RASSF5A wt ; however, RASSF5A Y24A mutant failed to interact with Itch ( Figure 3a). These results suggest that the PPxY motif in RASSF5A is essential for its interaction with Itch. To further understand the role of PPxY motif on Itch-mediated downregulation of RASSF5A, the stability of RASSF5A mutants was determined in the presence of Itch. As expected, while the stability of RASSF5A was susceptible, the steady state level of RASSF5A Y24A -Flag was independent of Itch coexpression ( Figure 3b). Interestingly, reduced level of RASSF5A Y13A -Flag mutant protein degradation was noticed in the presence of Itch coexpression compared with RASSF5A wt (Figure 3b). Collectively, these data provided evidence that PPxY motif mediates the interaction of RASSF5A with Itch and regulate the steady state levels of RASSF5. Itch degrades RASSF5 via poly-ubiquitination. We next carried out an in vivo ubiquitination assay to determine whether Itch can catalyze ubiquitination of RASSF5A in cells. Results in Figure 3c indicate that high-molecular weight poly-ubiquitinated forms of RASSF5A were observed only when the Itch wt but not the Itch C830A coexpressed with RASSF5A. In contrast, RASSF5C or RASSF5A Y24A are not decorated by poly-ubiquitin chains when both are coexpressed with Itch ( Figure 3c). These results suggest that interaction as well as catalytic activity of Itch is essential for poly-ubiquitination of RASSF5A. Interestingly, RASSF5 level was stabilized in presence of MG132 suggests that Itch mediates RASSF5 degradation via 26S proteasome pathway ( Figure 1d). Together, these results provide evidence for the first time that Itch promotes downregulation of RASSF5A by catalyzing its poly-ubiquitination followed by 26S proteasome-mediated proteolysis. Itch failed to degrade endogenous RASSF5 in cancer cell lines. We next determined the steady state levels of endogenous RASSF5 (eRASSF5) in HEK293T cells. Results in Figure 4a indicate that the stability of eRASSF5 was maintained despite Itch overexpression even after the de novo protein synthesis was inhibited by CHX. Conversely, we observed the destabilization of RASSF5 in human primary peripheral blood mononuclear cell (hPBMCs) treated with CHX (Supplementary Figure S6). These data suggest that eRASSF5 in HEK293T cells is resistant to Itch-mediated degradation. To further confirm this, RASSF5-GFP was coexpressed with Itch, and determined the stability of both the ectopically expressed as well as the eRASSF5 in the presence of CHX under the same experimental condition. As shown in Figure 4b, Itch is able to degrade the ectopically expressed but not the eRASSF5. Interestingly, RASSF5-GFP degraded within 3 h after CHX treatment without Itch overexpression (Figure 4b). Taken together, these data suggest the possibility that RASSF5 in cancer cells may undergo some post-translational modification(s) that may inhibit its degradation. To understand further, we checked whether the eRASSF5 in transformed cell lines interacts with Itch. Towards this end, the levels of eRASSF5 were determined in various transformed cell lines as well as in primary cells. In addition, RASSF5A-GFP and RASSF5C-GFP were expressed in HEK293T cells as positive and negative controls for Itch interaction. Results in Figure 4c indicate that equal levels of eRASSF5 expression in both transformed cells and primary hPBMCs. Surprisingly, eRASSF5 from hPBMC but not from transformed cells interact with Itch under the same experimental conditions (Figure 4c). Interestingly, Itch interaction with eRASSF5 was observed when it coexpressed with RASSF5A-GFP, (Figure 4c) suggesting the possibility of homotypic interaction between eRASSF5 and the RASSF5A-GFP. To confirm further, we performed coimmunoprecipitation to determine eRASSF5 interaction with Itch in vivo. RASSF5 and Itch complexes were coimmunoprecipitated with anti-RASSF5 antibody from the lysates of hPBMC and HEK293T cells followed by western blot using anti-Itch antibody. Results in Figure 4d indicate that eRASSF5 from hPBMC but not from HEK293T cells interact with Itch despite equal expression of Itch and RASSF5. Together, these data suggest that RASSF5 in transformed cells may be post-translationally modified or mutation within RASSF5 ORF that may prevent protein-protein interaction. This was intruiging as it may explain the persistence of RASSF5 in tumor where the expression is not silenced by promoter methylation. Figure S7A indicate that eRASSF5 from HEK293T cells is more acetylated compared with hPBMCs despite equal amount of RASSF5 expression. Surprisingly, no acetylation was observed for the ectopically expressed RASSF5A-GFP (Supplementary Figure S7A). To confirm the specificity of RASSF5 acetylation, RASSF5 was immuno-depleted with anti-RASSF5 antibodies, and acetylated forms of RASSF5 was determined. Results in Supplementary Figure S7A reveal that acetylated RASSF5 was disappeared from immuno-depleted cell lysates. These results provide evidence that RASSF5 is acetylated both in transformed as well as in primary cells, but level of RASSF5 acetylation is elevated upon transformation. We next determined whether acetylation modulates the stability of RASSF5 in transformed cell lines. As shown in Supplementary Figure S7B, acetylation inhibitors (anacardic acid (AA) and C646) reduced RASSF5 acetylation while, in contrast, increased levels of RASSF5 acetylation was observed in trichostatin A (TSA; inhibitor of deacetylation)treated cells. As same level of inhibition of RASSF5 acetylation was observed by both AA and C646, we used C646 for further experiments. Results in Figure 5a indicate that inhibition of acetylation by C646 significantly promotes the destabilization of eRASSF5 in a time-dependent manner in contrast to DMSO-treated cells. To confirm whether deacetylated form of RASSF5 can interact with Itch, HEK293T cells were transfected with Itch expression plasmids and treated with C646 or TSA. Interestingly, reduced RASSF5 The expression levels of RASSF5A mutants and Itch were determined by western blot analysis using anti-Flag and anti-Itch antibodies respectively. b-Actin was used as internal control. (c) HA-ubiquitin was transiently coexpressed with indicated combination of wild type or mutants of RASSF5 (RASSF5A wt , RASSF5A Y24A and RASSF5C) and wild type or catalytic activity deficient Itch (Itch C830A ) in HEK293T cells. Expression of all the indicated Itch and RASSF5 variants were determined by western blotting using anti-Itch or anti-Flag antibodies, respectively. Poly-ubiquitinated forms of RASSF5 was detected by immunoprecipitation with anti-Flag antibodies, followed by the detection of ubiquitin using anti-HA antibody Functional interaction between RASSF5 and Itch R Suryaraja et al acetylation promoted interaction with Itch, whereas increased acetylation attenuated its association with Itch ( Figure 5b). We next determined whether the observed destabilization of nonacetylated RASSF5 is due to poly-ubiquitination. Results in Figure 5c suggest that Itch wt but not Itch C830A , induces polyubiquitination of eRASSF5 in C646-treated cells, suggesting that acetylation negatively modulates RASSF5 interaction with Itch. To understand further, we correlated acetylation status as well as the levels of RASSF5 expression in various human primary as well as cancer cell lines. Higher levels of RASSF5 acetylation were observed in cancer cell lines compared with the indicated primary cells (Figure 5d) despite comparable levels of RASSF5 and Itch expression. To further confirm the specificity of RASSF5 degradation by Itch, human umbilical vein endothelial cells (HUVEC) were transfected with Itch siRNA, and the levels of both Itch and RASSF5 were determined. Results indicate that the RASSF5 level was stabilized when Itch was downregulated by specific siRNA compared with scrambled siRNA (Figure 5e). These results suggest that RASSF5 is degraded by Itch-mediated ubiquitination. Collectively, these data provide evidence that acetylation in transformed cell lines stabilizes RASSF5 by preventing its interaction with Itch. Itch negatively regulates RASSF5 function. To understand the physiological role of RASSF5 downregulation by Itch during cell cycle progression, RASSF5A was coexpressed with either Itch wt or Itch C830A in HEK293T cells. As shown in Figure 6A, transient overexpression of Itch wt but not Itch C830A significantly suppresses RASSF5A-mediated G1 arrest. However, the profile of S and G2/M phases of the cell cycle was not significantly altered when RASSF5A coexpressed with Itch (Supplementary Figures S8A, B and C). Furthermore, C646-promoted G1 arrest was downregulated by Itch overexpression (Supplementary Figures S9A and B). The patterns of G1 arrest inversely correlated with the acetylation status of RASSF5 (Figure 5a), suggesting that acetylation modulates RASSF5 function during cell cycle. Interestingly, RASSF5-mediated apoptosis was impaired by Itch wt but not Itch C830A (Figure 6b). The specificity of regulation was further confirmed by the fact that Itch failed to alter the G1 cell cycle pattern as well as apoptosis when it is overexpressed with RASSF5A Y24A (defective for Itch interaction) (Figures 6a and b). These data indicate that the association of RASSF5 with Itch, or additional PPxY-dependent protein interaction, may be required to elicit apoptosis (Figure 6b). This later fact is a likely reason behind the inconsistency between RASSF5A expression in tumor cells and its role as a tumor suppressor, as high levels may indicate enhance protein acetylation and inactivation of the ability to promote apoptosis of tumor cells. Discussion Ras effector RASSF5 (NORE1) is found to be downregulated in many tumors, but little is known about the molecular mechanism of its function during cell proliferation. Detailed analysis of RASSF5 amino acid sequences and the data from the in vitro and in vivo protein-protein interaction experiments identified E3 ubiquitin ligase Itch as a unique b d Figure 4 Itch failed to mediate the degradation of eRASSF5 in transformed cells. (a) Itch expression plasmid was transiently transfected in HEK293T cells and treated with CHX for various time periods. eRASSF5 upon Itch overexpression were determined by western blot analysis using anti-RASSF5 antibody. (b) RASSF5A-GFP was expressed alone or together with Itch in HEK293T cells and treated with CHX. At indicated time intervals, the cell lysates were prepared and checked the levels of ectopically expressed RASSF5-GFP and the eRASSF5 by western blotting using anti-GFP and anti-RASSF5 antibodies, respectively. (c) RASSF5 from human primary cells but not from transformed cells interacts with Itch in vitro. Indicated cell lysates (HEK293T, HeLa, A549 and hPBMC) containing equal amounts of RASSF5 (eRASSF5 as well as GFPtagged RASSF5A and RASSF5C) were incubated with either GST or GST-Itch WW domain, and the bound proteins were detected by western blot analysis using anti-RASSF5 (eRASSF5) and anti-GFP (ectopically expressed RASSF5-GFP) antibodies. (d) RASSF5 from human primary cells but not from transformed cells interacts with Itch in vivo. HEK293T and hPBMC lysates containing equal amount of eRASSF5 and Itch were subjected to coimmunoprecipitation with anti-RASSF5 antibody followed by western blotting with anti-Itch antibody HEK293T cells were transfected with Itch expression plasmids and treated with CHX in the presence or absence of acetylation inhibitor C646. Acetylation status as well as steady state level of RASSF5 were determined by western blot analysis using anti-acetyl-lysine or anti-RASSF5 antibodies, respectively. b-Actin was used as internal control. (b) Acetylation prevents RASSF5 interaction with Itch. Itch was overexpressed in HEK293T cells and treated with inhibitors of acetylation (C646) or deacetylation (TSA). Cell lysates containing equal amounts of eRASSF5 and Itch were subjected to coimmunoprecipitation with anti-RASSF5 antibodies followed by western blot using anti-Itch antibodies. Acetylation status of RASSF5 was determined using anti-acetyl-lysine antibodies. (c) Inhibition of acetylation promotes ubiquitin-mediated degradation of RASSF5. HA-ubiquitin was coexpressed with wild type or ligase activity defective Itch in HEK293T cells and treated with C646. Poly-ubiquitinated eRASSF5 was detected by immunoprecipitation with anti-RASSF5 antibodies, followed by detection of ubiquitin using anti-HA antibody. (d) Expression and acetylation status of RASSF5 in different transformed as well as primary cells. Endogenous level of RASSF5 and Itch expression were determined by western blot analysis using anti-RASSF5 and Itch antibodies, respectively. RASSF5 acetylation was determined using acetylated-lysine antibodies. (e) Downregulation of endogenous Itch stabilize RASSF5. HUVEC cells were transfected with indicated siRNA and cells were collected in different time periods after CHX treatment. The expression levels of RASSF5 and Itch were determined by western blot analysis using anti-RASSF5 and Itch antibodies, respectively Figure 6 Itch negatively regulates RASSF5 function. Wild type not the catalytically defective Itch suppress the RASSF5-mediated G1 phase of the cell cycle arrest (a) and apoptosis (b). HEK293T cells were transiently co-transfected with wild type or indicated mutant of RASSF5 alone or in combination with either Itch wt or Itch C830A , and the cell cycle profile as well as apoptosis were analyzed using Flowcytometry as described in Materials and Methods. All numbers are mean and S.D. of three independent experiments. (**Po0.001) Statistically significant difference Functional interaction between RASSF5 and Itch R Suryaraja et al interacting partner. Results from the present study indicate that Itch promotes the poly-ubiquitination and degradation of RASSF5 by 26S proteasome-mediated pathway. In addition, RASSF5 is acetylated in transformed cells and its interaction with Itch is dependent on the status of acetylation. Furthermore, by destabilizing RASSF5, Itch showed a strong inhibitory effect on RASSF5-mediated G1 phase of cell cycle arrest and apoptosis. To the best of our knowledge, Itch is the first negative regulator of RASSF5, and the downregulation is dependent upon the status of acetylation in transformed cells. Recent reports demonstrate that Itch downregulates LATS1, an essential component of hippo pathway, have an important role in the regulation of various biological processes such as cancer development, organ size control, differentiation, apoptosis and cell growth. 8, 31,32 On the other hand, RASSF5 interacts with MST1/2, which are essential for the activation of LATS1/2 in the hippo pathway to regulate the fundamental cellular processes. 22,25 Together, these data suggest that an interplay between RASSF5, Itch and hippo pathway may be essential to regulate the cell growth and apoptosis. In this investigation, we demonstrated that RASSF5-Itch interaction is mediated by WW domain of Itch with PPxY motif in RASSF5. Itch directly recognizes many of its substrates such as p73, p63, and LATS1 through PPxY motifs. 16,20,21 It is possible that Itch may compete with other WW domain containing ubiquitin ligases to regulate RASSF5 half-life. On the other hand, recent study suggests that PPxY motifs interact with the transcriptional activator YAP1 and modulates many cellular processes. 15,19,33,34 Together, these results suggest that the PPxY motif not only interacts with ubiquitin ligase but also with transcriptional regulators. It is possible to hypothesize that RASSF5 may interact with the transcriptional regulators in addition to Itch to modulate downstream signaling pathway to control cell proliferation. It is important to further examine how RASSF5 co-ordinate with ubiquitin ligases and other downstream targets to regulate various signaling pathways. It is interesting to note that, in addition to the repression of RASSF5 expression by promoter hypermethylation, 9,10 yet another unknown mechanism may downregulates RASSF5 function in cancer cells. Our data suggest that acetylation of RASSF5 prevents its interaction with Itch and resistant to Itchmediated degradation in transformed cells. Recent reports suggest that acetylation increases the stability of transcription factor such as FOXO3 and attenuate the transcriptional activity by impairing its DNA binding ability. 24,27 In addition, acetyl transferases like p300 was found to be overexpressed in tumors and resulted in poor survival rate of patients with prostate cancer, colon carcinoma, breast cancer and nonsmall cell lung cancers. 35,36 On the other hand, downregulation of deacetylases like SIRT1 activity resulted in tumorigenesis. 27 Interestingly, inhibition of eRASSF5 acetylation in HEK293T cells by acetyl transferase inhibitor, C646 promotes its binding with Itch, and resulted in RASSF5 degradation. As lysine residues in proteins undergo both acetylation and ubiquitination, 27 the data from the present study speculate the hypothesis that acetylated form of RASSF5 may not be recognized by ubiquitin ligase Itch, which leads to the stabilization of RASSF5 in tumors. Furthermore, the acetylated form of RASSF5 may be a nonfunctional protein in tumor cells, suggesting the possibility that acetylation may have an important role in negative regulation of RASSF5 function during tumorigenesis. This is in agreement with the recent reports that demonstrate that acetylation attenuates the transcriptional activity of FOXO1 and FOXO3. 24,27 Together, these observations lead to the hypothesis that acetylation may act as both positive and negative regulator of protein function in a context-dependent manner. Therefore, further experiments are needed to examine whether RASSF5 is a transcriptional regulator, and how acetylation regulates its function during tumorigenesis. In conclusion, the present investigation provides evidence that E3 ubiquitin ligase Itch negatively regulates the Ras effector RASSF5 in acetylation-dependent manner. Further characterization of their functional interaction as well as the role of acetylation on RASSF5 function in clinical cancer patients will provide new insights in diagnostics and treatment of human cancers. Materials and Methods Plasmids. Generation of wild type and mutants (Y13A and Y24A) of RASSF5 expression plasmids has been described previously. 3,11 The plasmid encoding wild type and variants of Itch WW domains were generated by Quick-change sitedirected mutagenesis according to the manufacturer's instruction (Stratagene, La Jolla, CA, USA), and were expressed as GST fusion proteins. Itch (wild type as well as ligase activity defective) expression plasmid was kindly provided by Dr. Pweson 37 and Dr. Melino. 21 The primers used for PCR amplification to create RASSF5 and Itch fusion constructs were listed in Supplementary Table 1. All mutant constructs were sequenced to verify the integrity of each clone. Cell culture, transfection, coimmunoprecipitation and western blotting. A549, HeLa and HEK293T cells were maintained in Dulbecco's modified Eagle's Medium as described elsewhere. 3,11 HUVEC were isolated from umbilical cords by digestion with collagenase as described elsewhere. 38 HEK293T cells were transfected with RASSF5 or Itch expression plasmids using Lipofectin (Invitrogen Life Technologies, Grand Island, NY, USA) as described elsewhere. 3,11 To check the expression of RASSF5 or Itch, transfected cells were lysed in 1X cell lysis buffer (25 mM Tris-HCl, pH7.4, 150 mM KCl, 1 mM Na 2 EDTA, 1 mM EGTA, 1% TritonX100, 2.5 mM sodium pyrophosphate, 1 mM b-glycerophosphate, 0.4 mM PMSF, 1 mM Na-fluoride, 1 mM Na-orthovanadate and 1 mg/ml each of aprotinin, leupeptin and pepstatin) after 32-h post-transfection, and separated on a SDS-12%PAGE. For coimmunoprecipitation, equal amounts of cell lysates were incubated with anti-RASSF5 (Abcam, Cambridge, UK) or anti-Itch antibodies (BD Transduction Laboratories, San Jose, CA, USA), and the bound protein complexes were eluted and resolved in SDS-12%PAGE. Separated proteins were transferred to a Hybond-P membrane (GE HealthCare, Stockholm, Sweden) and probed with appropriate antibodies (anti-RASSF5 or anti-Itch or anti-GFP antibodies). Anti-rabbit acetylated-lysine antibody (Cell Signaling, Danvers, USA) was used to determine the acetylation status of RASSF5. Protein bound antibodies were probed with horseradish peroxidase conjugated specific secondary antibodies and developed using the enhanced chemiluminescenceplus detection system (GE HealthCare). Fluorescence microscopy. HeLa cells grown on chamber culture slides (BD Biosciences, Franklin Lake, NY, USA) were transfected with expression plasmids containing RASSF5-GFP or Itch using Lipofectin as described elsewhere. 3,11 After 32 h of transfection, cells were fixed with 3% paraformaldehyde for 20 min and incubated with anti-Itch antibodies. Alexa Flour 594 were used Functional interaction between RASSF5 and Itch R Suryaraja et al as secondary antibodies and then mounted in mounting medium (Vector Laboratories, Burlingame, CA, USA) containing 4,6-diamidino-2-phenylindole to stain nuclei. RASSF5-GFP signals were visualized directly. Samples were viewed with LSM710 laser scanning confocal microscopy (Carl Zeiss, Jena, Germany). Image acquisition was carried out using Zen 2009 Image software (Carl Zeiss) and Adobe Photoshop CS3 was used for image processing. Expression and purification of GST-Itch WW domain fusion proteins. Wild type and different variants of Itch WW domain containing expression vectors were transformed into Escherichia coli BL21DE-3, single colony was grown at 37 1C in kanamycin-containing medium, and the protein expression was induced for 4 h at 37 1C with 1 mM IPTG. Cells were harvested and cell lysates were mixed with Glutathione-Sepharose beads (GE Healthcare). Bound proteins were eluted in elution buffer (10 mM reduced glutathione in 50 mM Tris-Cl, pH7.4) and the integrity of fusion proteins were analyzed by SDS-12%PAGE followed by staining with Coomassie blue. Half-life determination. HEK293T cells were transfected with RASSF5A, RASSF5C and Itch expression plasmids. Twenty-four hours after transfection cell were treated with 25 mg/ml of CHX for different time periods. Cells were collected at different time periods and equal amount of total cellular protein was loaded on SDS-12%PAGE followed by western blot analysis. GST pull-down assay. HEK293T cells were transfected with RASSF5A-Flag (wild type or mutants) expression constructs as described above. Cell lysates containing equal amounts of RASSF5 were incubated with glutathione-sepharose beads prebound with Itch WW domain (wild type or mutants). After 4 h, glutathione-sepharose beads containing proteins were washed and the eluted proteins were separated by SDS-12% PAGE followed by western blot analysis using anti-Flag or anti-RASSF5 antibodies. In vivo ubiquitination assay. HEK293T cells were transiently transfected with plasmid expressing HA-ubiquitin and RASS5A-Flag or RASSF5C-Flag alone or together with Itch (wild type or ligase defective mutant, Itch C830A ). After 24 h of transfection, cells were treated with 20 mM MG132 for 12 h before being harvested. Cell extracts were subjected to immunoprecipitation using anti-Flag antibodies (Sigma) and ubiquitin conjugates were detected by western blot analysis using anti-HA antibodies (Santa Cruz, Santacruz, CA, USA). RNA interference. HUVEC cells were transiently transfected with control siRNA or Itch siRNA by using Oligofectamine (Invitrogen Life Technologies) according to the manufacturer's instructions. The sequence of Itch specific siRNAs (VBC Biotech, Vienna, Austria) are as follows: Itch-1: 5 0 -GGUGACAAAGAGCC AACAGAGdTdT-3 0 and scrambled siRNA 5 0 -ACAGACUUCGGAGUACCUGdTd T-3 0 . After 36 h of post-transfection, cells were collected and the levels of eRASSF5 and Itch, and the acetylation status of eRASSF5 were determined by western blot analysis using respective antibodies as described above. Cell cycle and apoptosis assay. HEK293T cells were transfected with RASSF5 alone or in combination with Itch (wild type or ligase activity defective mutant) using lipofectamine. For cell cycle analysis, cells were treated with hypertonic buffer (0.1% sodium citrate, 0.1% NP40, 45 mg/ml propidium iodide, 50 mg/ml ribonuclease A pH 7.4) for 10 min at room temperature after 36 h of posttransfection. For apoptosis assay, cells were collected after 36 h of posttransfection and incubated with buffer containing 5 ml FITC-Annexin V, 5 ml Propidium Iodide (BD Pharmingen, Sandiego, CA, USA), 0.1 M Hepes/NaOH (pH 7.4), 1.4 M NaCl, 25 mM CaCl2 for 15 min at room temperature. All the samples were analyzed by flow cytometry (FACSCanto II, BD Biosciences) and the data were processed using FACS Diva software.
import java.util.Arrays; import java.util.Scanner; public class Main { public static void main(String []args) { String balance; Scanner input=new Scanner(System.in); balance=input.next(); String[] arr=balance.split("\\|"); String weights; weights=input.next(); String a=""; String b=""; if(arr.length>=1) a=arr[0]; if(arr.length >= 2) b=arr[1]; if(Math.abs(a.length()-b.length())>weights.length() || ((a.length() + b.length()+weights.length())%2!=0)) { System.out.println("Impossible"); } else { int c=0; long x=a.length(); long y=b.length(); if(x<y) { for(int i=0;i<(y-x);i++) { a+=weights.charAt(c); c++; } } else { for(int i=0;i<(x-y);i++) { b+=weights.charAt(c); c++; } } for(int i=c;i<weights.length();i+=2) { a+=weights.charAt(i); b+=weights.charAt(i+1); } System.out.println(a+"|"+b); } input.close(); } }
<gh_stars>1000+ /* * Copyright 2016-2021 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.databasemigrationservice.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.protocol.StructuredPojo; import com.amazonaws.protocol.ProtocolMarshaller; /** * <p> * Settings for exporting data to Amazon S3. * </p> * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/dms-2016-01-01/S3Settings" target="_top">AWS API * Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class S3Settings implements Serializable, Cloneable, StructuredPojo { /** * <p> * The Amazon Resource Name (ARN) used by the service to access the IAM role. The role must allow the * <code>iam:PassRole</code> action. It is a required parameter that enables DMS to write and read objects from an * S3 bucket. * </p> */ private String serviceAccessRoleArn; /** * <p> * Specifies how tables are defined in the S3 source files only. * </p> */ private String externalTableDefinition; /** * <p> * The delimiter used to separate rows in the .csv file for both source and target. The default is a carriage return * (<code>\n</code>). * </p> */ private String csvRowDelimiter; /** * <p> * The delimiter used to separate columns in the .csv file for both source and target. The default is a comma. * </p> */ private String csvDelimiter; /** * <p> * An optional parameter to set a folder name in the S3 bucket. If provided, tables are created in the path * <code> <i>bucketFolder</i>/<i>schema_name</i>/<i>table_name</i>/</code>. If this parameter isn't specified, then * the path used is <code> <i>schema_name</i>/<i>table_name</i>/</code>. * </p> */ private String bucketFolder; /** * <p> * The name of the S3 bucket. * </p> */ private String bucketName; /** * <p> * An optional parameter to use GZIP to compress the target files. Set to GZIP to compress the target files. Either * set this parameter to NONE (the default) or don't use it to leave the files uncompressed. This parameter applies * to both .csv and .parquet file formats. * </p> */ private String compressionType; /** * <p> * The type of server-side encryption that you want to use for your data. This encryption type is part of the * endpoint settings or the extra connections attributes for Amazon S3. You can choose either <code>SSE_S3</code> * (the default) or <code>SSE_KMS</code>. * </p> * <note> * <p> * For the <code>ModifyEndpoint</code> operation, you can change the existing value of the * <code>EncryptionMode</code> parameter from <code>SSE_KMS</code> to <code>SSE_S3</code>. But you can’t change the * existing value from <code>SSE_S3</code> to <code>SSE_KMS</code>. * </p> * </note> * <p> * To use <code>SSE_S3</code>, you need an Identity and Access Management (IAM) role with permission to allow * <code>"arn:aws:s3:::dms-*"</code> to use the following actions: * </p> * <ul> * <li> * <p> * <code>s3:CreateBucket</code> * </p> * </li> * <li> * <p> * <code>s3:ListBucket</code> * </p> * </li> * <li> * <p> * <code>s3:DeleteBucket</code> * </p> * </li> * <li> * <p> * <code>s3:GetBucketLocation</code> * </p> * </li> * <li> * <p> * <code>s3:GetObject</code> * </p> * </li> * <li> * <p> * <code>s3:PutObject</code> * </p> * </li> * <li> * <p> * <code>s3:DeleteObject</code> * </p> * </li> * <li> * <p> * <code>s3:GetObjectVersion</code> * </p> * </li> * <li> * <p> * <code>s3:GetBucketPolicy</code> * </p> * </li> * <li> * <p> * <code>s3:PutBucketPolicy</code> * </p> * </li> * <li> * <p> * <code>s3:DeleteBucketPolicy</code> * </p> * </li> * </ul> */ private String encryptionMode; /** * <p> * If you are using <code>SSE_KMS</code> for the <code>EncryptionMode</code>, provide the KMS key ID. The key that * you use needs an attached policy that enables Identity and Access Management (IAM) user permissions and allows * use of the key. * </p> * <p> * Here is a CLI example: * <code>aws dms create-endpoint --endpoint-identifier <i>value</i> --endpoint-type target --engine-name s3 --s3-settings ServiceAccessRoleArn=<i>value</i>,BucketFolder=<i>value</i>,BucketName=<i>value</i>,EncryptionMode=SSE_KMS,ServerSideEncryptionKmsKeyId=<i>value</i> </code> * </p> */ private String serverSideEncryptionKmsKeyId; /** * <p> * The format of the data that you want to use for output. You can choose one of the following: * </p> * <ul> * <li> * <p> * <code>csv</code> : This is a row-based file format with comma-separated values (.csv). * </p> * </li> * <li> * <p> * <code>parquet</code> : Apache Parquet (.parquet) is a columnar storage file format that features efficient * compression and provides faster query response. * </p> * </li> * </ul> */ private String dataFormat; /** * <p> * The type of encoding you are using: * </p> * <ul> * <li> * <p> * <code>RLE_DICTIONARY</code> uses a combination of bit-packing and run-length encoding to store repeated values * more efficiently. This is the default. * </p> * </li> * <li> * <p> * <code>PLAIN</code> doesn't use encoding at all. Values are stored as they are. * </p> * </li> * <li> * <p> * <code>PLAIN_DICTIONARY</code> builds a dictionary of the values encountered in a given column. The dictionary is * stored in a dictionary page for each column chunk. * </p> * </li> * </ul> */ private String encodingType; /** * <p> * The maximum size of an encoded dictionary page of a column. If the dictionary page exceeds this, this column is * stored using an encoding type of <code>PLAIN</code>. This parameter defaults to 1024 * 1024 bytes (1 MiB), the * maximum size of a dictionary page before it reverts to <code>PLAIN</code> encoding. This size is used for * .parquet file format only. * </p> */ private Integer dictPageSizeLimit; /** * <p> * The number of rows in a row group. A smaller row group size provides faster reads. But as the number of row * groups grows, the slower writes become. This parameter defaults to 10,000 rows. This number is used for .parquet * file format only. * </p> * <p> * If you choose a value larger than the maximum, <code>RowGroupLength</code> is set to the max row group length in * bytes (64 * 1024 * 1024). * </p> */ private Integer rowGroupLength; /** * <p> * The size of one data page in bytes. This parameter defaults to 1024 * 1024 bytes (1 MiB). This number is used for * .parquet file format only. * </p> */ private Integer dataPageSize; /** * <p> * The version of the Apache Parquet format that you want to use: <code>parquet_1_0</code> (the default) or * <code>parquet_2_0</code>. * </p> */ private String parquetVersion; /** * <p> * A value that enables statistics for Parquet pages and row groups. Choose <code>true</code> to enable statistics, * <code>false</code> to disable. Statistics include <code>NULL</code>, <code>DISTINCT</code>, <code>MAX</code>, and * <code>MIN</code> values. This parameter defaults to <code>true</code>. This value is used for .parquet file * format only. * </p> */ private Boolean enableStatistics; /** * <p> * A value that enables a full load to write INSERT operations to the comma-separated value (.csv) output files only * to indicate how the rows were added to the source database. * </p> * <note> * <p> * DMS supports the <code>IncludeOpForFullLoad</code> parameter in versions 3.1.4 and later. * </p> * </note> * <p> * For full load, records can only be inserted. By default (the <code>false</code> setting), no information is * recorded in these output files for a full load to indicate that the rows were inserted at the source database. If * <code>IncludeOpForFullLoad</code> is set to <code>true</code> or <code>y</code>, the INSERT is recorded as an I * annotation in the first field of the .csv file. This allows the format of your target records from a full load to * be consistent with the target records from a CDC load. * </p> * <note> * <p> * This setting works together with the <code>CdcInsertsOnly</code> and the <code>CdcInsertsAndUpdates</code> * parameters for output to .csv files only. For more information about how these settings work together, see <a * href="https://docs.aws.amazon.com/dms/latest/userguide/CHAP_Target.S3.html#CHAP_Target.S3.Configuring.InsertOps"> * Indicating Source DB Operations in Migrated S3 Data</a> in the <i>Database Migration Service User Guide.</i>. * </p> * </note> */ private Boolean includeOpForFullLoad; /** * <p> * A value that enables a change data capture (CDC) load to write only INSERT operations to .csv or columnar storage * (.parquet) output files. By default (the <code>false</code> setting), the first field in a .csv or .parquet * record contains the letter I (INSERT), U (UPDATE), or D (DELETE). These values indicate whether the row was * inserted, updated, or deleted at the source database for a CDC load to the target. * </p> * <p> * If <code>CdcInsertsOnly</code> is set to <code>true</code> or <code>y</code>, only INSERTs from the source * database are migrated to the .csv or .parquet file. For .csv format only, how these INSERTs are recorded depends * on the value of <code>IncludeOpForFullLoad</code>. If <code>IncludeOpForFullLoad</code> is set to * <code>true</code>, the first field of every CDC record is set to I to indicate the INSERT operation at the * source. If <code>IncludeOpForFullLoad</code> is set to <code>false</code>, every CDC record is written without a * first field to indicate the INSERT operation at the source. For more information about how these settings work * together, see <a href= * "https://docs.aws.amazon.com/dms/latest/userguide/CHAP_Target.S3.html#CHAP_Target.S3.Configuring.InsertOps" * >Indicating Source DB Operations in Migrated S3 Data</a> in the <i>Database Migration Service User Guide.</i>. * </p> * <note> * <p> * DMS supports the interaction described preceding between the <code>CdcInsertsOnly</code> and * <code>IncludeOpForFullLoad</code> parameters in versions 3.1.4 and later. * </p> * <p> * <code>CdcInsertsOnly</code> and <code>CdcInsertsAndUpdates</code> can't both be set to <code>true</code> for the * same endpoint. Set either <code>CdcInsertsOnly</code> or <code>CdcInsertsAndUpdates</code> to <code>true</code> * for the same endpoint, but not both. * </p> * </note> */ private Boolean cdcInsertsOnly; /** * <p> * A value that when nonblank causes DMS to add a column with timestamp information to the endpoint data for an * Amazon S3 target. * </p> * <note> * <p> * DMS supports the <code>TimestampColumnName</code> parameter in versions 3.1.4 and later. * </p> * </note> * <p> * DMS includes an additional <code>STRING</code> column in the .csv or .parquet object files of your migrated data * when you set <code>TimestampColumnName</code> to a nonblank value. * </p> * <p> * For a full load, each row of this timestamp column contains a timestamp for when the data was transferred from * the source to the target by DMS. * </p> * <p> * For a change data capture (CDC) load, each row of the timestamp column contains the timestamp for the commit of * that row in the source database. * </p> * <p> * The string format for this timestamp column value is <code>yyyy-MM-dd HH:mm:ss.SSSSSS</code>. By default, the * precision of this value is in microseconds. For a CDC load, the rounding of the precision depends on the commit * timestamp supported by DMS for the source database. * </p> * <p> * When the <code>AddColumnName</code> parameter is set to <code>true</code>, DMS also includes a name for the * timestamp column that you set with <code>TimestampColumnName</code>. * </p> */ private String timestampColumnName; /** * <p> * A value that specifies the precision of any <code>TIMESTAMP</code> column values that are written to an Amazon S3 * object file in .parquet format. * </p> * <note> * <p> * DMS supports the <code>ParquetTimestampInMillisecond</code> parameter in versions 3.1.4 and later. * </p> * </note> * <p> * When <code>ParquetTimestampInMillisecond</code> is set to <code>true</code> or <code>y</code>, DMS writes all * <code>TIMESTAMP</code> columns in a .parquet formatted file with millisecond precision. Otherwise, DMS writes * them with microsecond precision. * </p> * <p> * Currently, Amazon Athena and Glue can handle only millisecond precision for <code>TIMESTAMP</code> values. Set * this parameter to <code>true</code> for S3 endpoint object files that are .parquet formatted only if you plan to * query or process the data with Athena or Glue. * </p> * <note> * <p> * DMS writes any <code>TIMESTAMP</code> column values written to an S3 file in .csv format with microsecond * precision. * </p> * <p> * Setting <code>ParquetTimestampInMillisecond</code> has no effect on the string format of the timestamp column * value that is inserted by setting the <code>TimestampColumnName</code> parameter. * </p> * </note> */ private Boolean parquetTimestampInMillisecond; /** * <p> * A value that enables a change data capture (CDC) load to write INSERT and UPDATE operations to .csv or .parquet * (columnar storage) output files. The default setting is <code>false</code>, but when * <code>CdcInsertsAndUpdates</code> is set to <code>true</code> or <code>y</code>, only INSERTs and UPDATEs from * the source database are migrated to the .csv or .parquet file. * </p> * <p> * For .csv file format only, how these INSERTs and UPDATEs are recorded depends on the value of the * <code>IncludeOpForFullLoad</code> parameter. If <code>IncludeOpForFullLoad</code> is set to <code>true</code>, * the first field of every CDC record is set to either <code>I</code> or <code>U</code> to indicate INSERT and * UPDATE operations at the source. But if <code>IncludeOpForFullLoad</code> is set to <code>false</code>, CDC * records are written without an indication of INSERT or UPDATE operations at the source. For more information * about how these settings work together, see <a href= * "https://docs.aws.amazon.com/dms/latest/userguide/CHAP_Target.S3.html#CHAP_Target.S3.Configuring.InsertOps" * >Indicating Source DB Operations in Migrated S3 Data</a> in the <i>Database Migration Service User Guide.</i>. * </p> * <note> * <p> * DMS supports the use of the <code>CdcInsertsAndUpdates</code> parameter in versions 3.3.1 and later. * </p> * <p> * <code>CdcInsertsOnly</code> and <code>CdcInsertsAndUpdates</code> can't both be set to <code>true</code> for the * same endpoint. Set either <code>CdcInsertsOnly</code> or <code>CdcInsertsAndUpdates</code> to <code>true</code> * for the same endpoint, but not both. * </p> * </note> */ private Boolean cdcInsertsAndUpdates; /** * <p> * When set to <code>true</code>, this parameter partitions S3 bucket folders based on transaction commit dates. The * default value is <code>false</code>. For more information about date-based folder partitioning, see <a * href="https://docs.aws.amazon.com/dms/latest/userguide/CHAP_Target.S3.html#CHAP_Target.S3.DatePartitioning">Using * date-based folder partitioning</a>. * </p> */ private Boolean datePartitionEnabled; /** * <p> * Identifies the sequence of the date format to use during folder partitioning. The default value is * <code>YYYYMMDD</code>. Use this parameter when <code>DatePartitionedEnabled</code> is set to <code>true</code>. * </p> */ private String datePartitionSequence; /** * <p> * Specifies a date separating delimiter to use during folder partitioning. The default value is <code>SLASH</code>. * Use this parameter when <code>DatePartitionedEnabled</code> is set to <code>true</code>. * </p> */ private String datePartitionDelimiter; /** * <p> * This setting applies if the S3 output files during a change data capture (CDC) load are written in .csv format. * If set to <code>true</code> for columns not included in the supplemental log, DMS uses the value specified by <a * href="https://docs.aws.amazon.com/dms/latest/APIReference/API_S3Settings.html#DMS-Type-S3Settings-CsvNoSupValue"> * <code>CsvNoSupValue</code> </a>. If not set or set to <code>false</code>, DMS uses the null value for these * columns. * </p> * <note> * <p> * This setting is supported in DMS versions 3.4.1 and later. * </p> * </note> */ private Boolean useCsvNoSupValue; /** * <p> * This setting only applies if your Amazon S3 output files during a change data capture (CDC) load are written in * .csv format. If <a href= * "https://docs.aws.amazon.com/dms/latest/APIReference/API_S3Settings.html#DMS-Type-S3Settings-UseCsvNoSupValue"> * <code>UseCsvNoSupValue</code> </a> is set to true, specify a string value that you want DMS to use for all * columns not included in the supplemental log. If you do not specify a string value, DMS uses the null value for * these columns regardless of the <code>UseCsvNoSupValue</code> setting. * </p> * <note> * <p> * This setting is supported in DMS versions 3.4.1 and later. * </p> * </note> */ private String csvNoSupValue; /** * <p> * If set to <code>true</code>, DMS saves the transaction order for a change data capture (CDC) load on the Amazon * S3 target specified by <a * href="https://docs.aws.amazon.com/dms/latest/APIReference/API_S3Settings.html#DMS-Type-S3Settings-CdcPath"> * <code>CdcPath</code> </a>. For more information, see <a href= * "https://docs.aws.amazon.com/dms/latest/userguide/CHAP_Target.S3.html#CHAP_Target.S3.EndpointSettings.CdcPath" * >Capturing data changes (CDC) including transaction order on the S3 target</a>. * </p> * <note> * <p> * This setting is supported in DMS versions 3.4.2 and later. * </p> * </note> */ private Boolean preserveTransactions; /** * <p> * Specifies the folder path of CDC files. For an S3 source, this setting is required if a task captures change * data; otherwise, it's optional. If <code>CdcPath</code> is set, DMS reads CDC files from this path and replicates * the data changes to the target endpoint. For an S3 target if you set <a href= * "https://docs.aws.amazon.com/dms/latest/APIReference/API_S3Settings.html#DMS-Type-S3Settings-PreserveTransactions" * > <code>PreserveTransactions</code> </a> to <code>true</code>, DMS verifies that you have set this parameter to a * folder path on your S3 target where DMS can save the transaction order for the CDC load. DMS creates this CDC * folder path in either your S3 target working directory or the S3 target location specified by <a * href="https://docs.aws.amazon.com/dms/latest/APIReference/API_S3Settings.html#DMS-Type-S3Settings-BucketFolder"> * <code>BucketFolder</code> </a> and <a * href="https://docs.aws.amazon.com/dms/latest/APIReference/API_S3Settings.html#DMS-Type-S3Settings-BucketName"> * <code>BucketName</code> </a>. * </p> * <p> * For example, if you specify <code>CdcPath</code> as <code>MyChangedData</code>, and you specify * <code>BucketName</code> as <code>MyTargetBucket</code> but do not specify <code>BucketFolder</code>, DMS creates * the CDC folder path following: <code>MyTargetBucket/MyChangedData</code>. * </p> * <p> * If you specify the same <code>CdcPath</code>, and you specify <code>BucketName</code> as * <code>MyTargetBucket</code> and <code>BucketFolder</code> as <code>MyTargetData</code>, DMS creates the CDC * folder path following: <code>MyTargetBucket/MyTargetData/MyChangedData</code>. * </p> * <p> * For more information on CDC including transaction order on an S3 target, see <a href= * "https://docs.aws.amazon.com/dms/latest/userguide/CHAP_Target.S3.html#CHAP_Target.S3.EndpointSettings.CdcPath" * >Capturing data changes (CDC) including transaction order on the S3 target</a>. * </p> * <note> * <p> * This setting is supported in DMS versions 3.4.2 and later. * </p> * </note> */ private String cdcPath; /** * <p> * A value that enables DMS to specify a predefined (canned) access control list for objects created in an Amazon S3 * bucket as .csv or .parquet files. For more information about Amazon S3 canned ACLs, see <a * href="http://docs.aws.amazon.com/AmazonS3/latest/dev/acl-overview.html#canned-acl">Canned ACL</a> in the * <i>Amazon S3 Developer Guide.</i> * </p> * <p> * The default value is NONE. Valid values include NONE, PRIVATE, PUBLIC_READ, PUBLIC_READ_WRITE, * AUTHENTICATED_READ, AWS_EXEC_READ, BUCKET_OWNER_READ, and BUCKET_OWNER_FULL_CONTROL. * </p> */ private String cannedAclForObjects; /** * <p> * An optional parameter that, when set to <code>true</code> or <code>y</code>, you can use to add column name * information to the .csv output file. * </p> * <p> * The default value is <code>false</code>. Valid values are <code>true</code>, <code>false</code>, <code>y</code>, * and <code>n</code>. * </p> */ private Boolean addColumnName; /** * <p> * Maximum length of the interval, defined in seconds, after which to output a file to Amazon S3. * </p> * <p> * When <code>CdcMaxBatchInterval</code> and <code>CdcMinFileSize</code> are both specified, the file write is * triggered by whichever parameter condition is met first within an DMS CloudFormation template. * </p> * <p> * The default value is 60 seconds. * </p> */ private Integer cdcMaxBatchInterval; /** * <p> * Minimum file size, defined in megabytes, to reach for a file output to Amazon S3. * </p> * <p> * When <code>CdcMinFileSize</code> and <code>CdcMaxBatchInterval</code> are both specified, the file write is * triggered by whichever parameter condition is met first within an DMS CloudFormation template. * </p> * <p> * The default value is 32 MB. * </p> */ private Integer cdcMinFileSize; /** * <p> * An optional parameter that specifies how DMS treats null values. While handling the null value, you can use this * parameter to pass a user-defined string as null when writing to the target. For example, when target columns are * not nullable, you can use this option to differentiate between the empty string value and the null value. So, if * you set this parameter value to the empty string ("" or ''), DMS treats the empty string as the null value * instead of <code>NULL</code>. * </p> * <p> * The default value is <code>NULL</code>. Valid values include any valid string. * </p> */ private String csvNullValue; /** * <p> * When this value is set to 1, DMS ignores the first row header in a .csv file. A value of 1 turns on the feature; * a value of 0 turns off the feature. * </p> * <p> * The default is 0. * </p> */ private Integer ignoreHeaderRows; /** * <p> * A value that specifies the maximum size (in KB) of any .csv file to be created while migrating to an S3 target * during full load. * </p> * <p> * The default value is 1,048,576 KB (1 GB). Valid values include 1 to 1,048,576. * </p> */ private Integer maxFileSize; /** * <p> * For an S3 source, when this value is set to <code>true</code> or <code>y</code>, each leading double quotation * mark has to be followed by an ending double quotation mark. This formatting complies with RFC 4180. When this * value is set to <code>false</code> or <code>n</code>, string literals are copied to the target as is. In this * case, a delimiter (row or column) signals the end of the field. Thus, you can't use a delimiter as part of the * string, because it signals the end of the value. * </p> * <p> * For an S3 target, an optional parameter used to set behavior to comply with RFC 4180 for data migrated to Amazon * S3 using .csv file format only. When this value is set to <code>true</code> or <code>y</code> using Amazon S3 as * a target, if the data has quotation marks or newline characters in it, DMS encloses the entire column with an * additional pair of double quotation marks ("). Every quotation mark within the data is repeated twice. * </p> * <p> * The default value is <code>true</code>. Valid values include <code>true</code>, <code>false</code>, * <code>y</code>, and <code>n</code>. * </p> */ private Boolean rfc4180; /** * <p> * The Amazon Resource Name (ARN) used by the service to access the IAM role. The role must allow the * <code>iam:PassRole</code> action. It is a required parameter that enables DMS to write and read objects from an * S3 bucket. * </p> * * @param serviceAccessRoleArn * The Amazon Resource Name (ARN) used by the service to access the IAM role. The role must allow the * <code>iam:PassRole</code> action. It is a required parameter that enables DMS to write and read objects * from an S3 bucket. */ public void setServiceAccessRoleArn(String serviceAccessRoleArn) { this.serviceAccessRoleArn = serviceAccessRoleArn; } /** * <p> * The Amazon Resource Name (ARN) used by the service to access the IAM role. The role must allow the * <code>iam:PassRole</code> action. It is a required parameter that enables DMS to write and read objects from an * S3 bucket. * </p> * * @return The Amazon Resource Name (ARN) used by the service to access the IAM role. The role must allow the * <code>iam:PassRole</code> action. It is a required parameter that enables DMS to write and read objects * from an S3 bucket. */ public String getServiceAccessRoleArn() { return this.serviceAccessRoleArn; } /** * <p> * The Amazon Resource Name (ARN) used by the service to access the IAM role. The role must allow the * <code>iam:PassRole</code> action. It is a required parameter that enables DMS to write and read objects from an * S3 bucket. * </p> * * @param serviceAccessRoleArn * The Amazon Resource Name (ARN) used by the service to access the IAM role. The role must allow the * <code>iam:PassRole</code> action. It is a required parameter that enables DMS to write and read objects * from an S3 bucket. * @return Returns a reference to this object so that method calls can be chained together. */ public S3Settings withServiceAccessRoleArn(String serviceAccessRoleArn) { setServiceAccessRoleArn(serviceAccessRoleArn); return this; } /** * <p> * Specifies how tables are defined in the S3 source files only. * </p> * * @param externalTableDefinition * Specifies how tables are defined in the S3 source files only. */ public void setExternalTableDefinition(String externalTableDefinition) { this.externalTableDefinition = externalTableDefinition; } /** * <p> * Specifies how tables are defined in the S3 source files only. * </p> * * @return Specifies how tables are defined in the S3 source files only. */ public String getExternalTableDefinition() { return this.externalTableDefinition; } /** * <p> * Specifies how tables are defined in the S3 source files only. * </p> * * @param externalTableDefinition * Specifies how tables are defined in the S3 source files only. * @return Returns a reference to this object so that method calls can be chained together. */ public S3Settings withExternalTableDefinition(String externalTableDefinition) { setExternalTableDefinition(externalTableDefinition); return this; } /** * <p> * The delimiter used to separate rows in the .csv file for both source and target. The default is a carriage return * (<code>\n</code>). * </p> * * @param csvRowDelimiter * The delimiter used to separate rows in the .csv file for both source and target. The default is a carriage * return (<code>\n</code>). */ public void setCsvRowDelimiter(String csvRowDelimiter) { this.csvRowDelimiter = csvRowDelimiter; } /** * <p> * The delimiter used to separate rows in the .csv file for both source and target. The default is a carriage return * (<code>\n</code>). * </p> * * @return The delimiter used to separate rows in the .csv file for both source and target. The default is a * carriage return (<code>\n</code>). */ public String getCsvRowDelimiter() { return this.csvRowDelimiter; } /** * <p> * The delimiter used to separate rows in the .csv file for both source and target. The default is a carriage return * (<code>\n</code>). * </p> * * @param csvRowDelimiter * The delimiter used to separate rows in the .csv file for both source and target. The default is a carriage * return (<code>\n</code>). * @return Returns a reference to this object so that method calls can be chained together. */ public S3Settings withCsvRowDelimiter(String csvRowDelimiter) { setCsvRowDelimiter(csvRowDelimiter); return this; } /** * <p> * The delimiter used to separate columns in the .csv file for both source and target. The default is a comma. * </p> * * @param csvDelimiter * The delimiter used to separate columns in the .csv file for both source and target. The default is a * comma. */ public void setCsvDelimiter(String csvDelimiter) { this.csvDelimiter = csvDelimiter; } /** * <p> * The delimiter used to separate columns in the .csv file for both source and target. The default is a comma. * </p> * * @return The delimiter used to separate columns in the .csv file for both source and target. The default is a * comma. */ public String getCsvDelimiter() { return this.csvDelimiter; } /** * <p> * The delimiter used to separate columns in the .csv file for both source and target. The default is a comma. * </p> * * @param csvDelimiter * The delimiter used to separate columns in the .csv file for both source and target. The default is a * comma. * @return Returns a reference to this object so that method calls can be chained together. */ public S3Settings withCsvDelimiter(String csvDelimiter) { setCsvDelimiter(csvDelimiter); return this; } /** * <p> * An optional parameter to set a folder name in the S3 bucket. If provided, tables are created in the path * <code> <i>bucketFolder</i>/<i>schema_name</i>/<i>table_name</i>/</code>. If this parameter isn't specified, then * the path used is <code> <i>schema_name</i>/<i>table_name</i>/</code>. * </p> * * @param bucketFolder * An optional parameter to set a folder name in the S3 bucket. If provided, tables are created in the path * <code> <i>bucketFolder</i>/<i>schema_name</i>/<i>table_name</i>/</code>. If this parameter isn't * specified, then the path used is <code> <i>schema_name</i>/<i>table_name</i>/</code>. */ public void setBucketFolder(String bucketFolder) { this.bucketFolder = bucketFolder; } /** * <p> * An optional parameter to set a folder name in the S3 bucket. If provided, tables are created in the path * <code> <i>bucketFolder</i>/<i>schema_name</i>/<i>table_name</i>/</code>. If this parameter isn't specified, then * the path used is <code> <i>schema_name</i>/<i>table_name</i>/</code>. * </p> * * @return An optional parameter to set a folder name in the S3 bucket. If provided, tables are created in the path * <code> <i>bucketFolder</i>/<i>schema_name</i>/<i>table_name</i>/</code>. If this parameter isn't * specified, then the path used is <code> <i>schema_name</i>/<i>table_name</i>/</code>. */ public String getBucketFolder() { return this.bucketFolder; } /** * <p> * An optional parameter to set a folder name in the S3 bucket. If provided, tables are created in the path * <code> <i>bucketFolder</i>/<i>schema_name</i>/<i>table_name</i>/</code>. If this parameter isn't specified, then * the path used is <code> <i>schema_name</i>/<i>table_name</i>/</code>. * </p> * * @param bucketFolder * An optional parameter to set a folder name in the S3 bucket. If provided, tables are created in the path * <code> <i>bucketFolder</i>/<i>schema_name</i>/<i>table_name</i>/</code>. If this parameter isn't * specified, then the path used is <code> <i>schema_name</i>/<i>table_name</i>/</code>. * @return Returns a reference to this object so that method calls can be chained together. */ public S3Settings withBucketFolder(String bucketFolder) { setBucketFolder(bucketFolder); return this; } /** * <p> * The name of the S3 bucket. * </p> * * @param bucketName * The name of the S3 bucket. */ public void setBucketName(String bucketName) { this.bucketName = bucketName; } /** * <p> * The name of the S3 bucket. * </p> * * @return The name of the S3 bucket. */ public String getBucketName() { return this.bucketName; } /** * <p> * The name of the S3 bucket. * </p> * * @param bucketName * The name of the S3 bucket. * @return Returns a reference to this object so that method calls can be chained together. */ public S3Settings withBucketName(String bucketName) { setBucketName(bucketName); return this; } /** * <p> * An optional parameter to use GZIP to compress the target files. Set to GZIP to compress the target files. Either * set this parameter to NONE (the default) or don't use it to leave the files uncompressed. This parameter applies * to both .csv and .parquet file formats. * </p> * * @param compressionType * An optional parameter to use GZIP to compress the target files. Set to GZIP to compress the target files. * Either set this parameter to NONE (the default) or don't use it to leave the files uncompressed. This * parameter applies to both .csv and .parquet file formats. * @see CompressionTypeValue */ public void setCompressionType(String compressionType) { this.compressionType = compressionType; } /** * <p> * An optional parameter to use GZIP to compress the target files. Set to GZIP to compress the target files. Either * set this parameter to NONE (the default) or don't use it to leave the files uncompressed. This parameter applies * to both .csv and .parquet file formats. * </p> * * @return An optional parameter to use GZIP to compress the target files. Set to GZIP to compress the target files. * Either set this parameter to NONE (the default) or don't use it to leave the files uncompressed. This * parameter applies to both .csv and .parquet file formats. * @see CompressionTypeValue */ public String getCompressionType() { return this.compressionType; } /** * <p> * An optional parameter to use GZIP to compress the target files. Set to GZIP to compress the target files. Either * set this parameter to NONE (the default) or don't use it to leave the files uncompressed. This parameter applies * to both .csv and .parquet file formats. * </p> * * @param compressionType * An optional parameter to use GZIP to compress the target files. Set to GZIP to compress the target files. * Either set this parameter to NONE (the default) or don't use it to leave the files uncompressed. This * parameter applies to both .csv and .parquet file formats. * @return Returns a reference to this object so that method calls can be chained together. * @see CompressionTypeValue */ public S3Settings withCompressionType(String compressionType) { setCompressionType(compressionType); return this; } /** * <p> * An optional parameter to use GZIP to compress the target files. Set to GZIP to compress the target files. Either * set this parameter to NONE (the default) or don't use it to leave the files uncompressed. This parameter applies * to both .csv and .parquet file formats. * </p> * * @param compressionType * An optional parameter to use GZIP to compress the target files. Set to GZIP to compress the target files. * Either set this parameter to NONE (the default) or don't use it to leave the files uncompressed. This * parameter applies to both .csv and .parquet file formats. * @see CompressionTypeValue */ public void setCompressionType(CompressionTypeValue compressionType) { withCompressionType(compressionType); } /** * <p> * An optional parameter to use GZIP to compress the target files. Set to GZIP to compress the target files. Either * set this parameter to NONE (the default) or don't use it to leave the files uncompressed. This parameter applies * to both .csv and .parquet file formats. * </p> * * @param compressionType * An optional parameter to use GZIP to compress the target files. Set to GZIP to compress the target files. * Either set this parameter to NONE (the default) or don't use it to leave the files uncompressed. This * parameter applies to both .csv and .parquet file formats. * @return Returns a reference to this object so that method calls can be chained together. * @see CompressionTypeValue */ public S3Settings withCompressionType(CompressionTypeValue compressionType) { this.compressionType = compressionType.toString(); return this; } /** * <p> * The type of server-side encryption that you want to use for your data. This encryption type is part of the * endpoint settings or the extra connections attributes for Amazon S3. You can choose either <code>SSE_S3</code> * (the default) or <code>SSE_KMS</code>. * </p> * <note> * <p> * For the <code>ModifyEndpoint</code> operation, you can change the existing value of the * <code>EncryptionMode</code> parameter from <code>SSE_KMS</code> to <code>SSE_S3</code>. But you can’t change the * existing value from <code>SSE_S3</code> to <code>SSE_KMS</code>. * </p> * </note> * <p> * To use <code>SSE_S3</code>, you need an Identity and Access Management (IAM) role with permission to allow * <code>"arn:aws:s3:::dms-*"</code> to use the following actions: * </p> * <ul> * <li> * <p> * <code>s3:CreateBucket</code> * </p> * </li> * <li> * <p> * <code>s3:ListBucket</code> * </p> * </li> * <li> * <p> * <code>s3:DeleteBucket</code> * </p> * </li> * <li> * <p> * <code>s3:GetBucketLocation</code> * </p> * </li> * <li> * <p> * <code>s3:GetObject</code> * </p> * </li> * <li> * <p> * <code>s3:PutObject</code> * </p> * </li> * <li> * <p> * <code>s3:DeleteObject</code> * </p> * </li> * <li> * <p> * <code>s3:GetObjectVersion</code> * </p> * </li> * <li> * <p> * <code>s3:GetBucketPolicy</code> * </p> * </li> * <li> * <p> * <code>s3:PutBucketPolicy</code> * </p> * </li> * <li> * <p> * <code>s3:DeleteBucketPolicy</code> * </p> * </li> * </ul> * * @param encryptionMode * The type of server-side encryption that you want to use for your data. This encryption type is part of the * endpoint settings or the extra connections attributes for Amazon S3. You can choose either * <code>SSE_S3</code> (the default) or <code>SSE_KMS</code>. </p> <note> * <p> * For the <code>ModifyEndpoint</code> operation, you can change the existing value of the * <code>EncryptionMode</code> parameter from <code>SSE_KMS</code> to <code>SSE_S3</code>. But you can’t * change the existing value from <code>SSE_S3</code> to <code>SSE_KMS</code>. * </p> * </note> * <p> * To use <code>SSE_S3</code>, you need an Identity and Access Management (IAM) role with permission to allow * <code>"arn:aws:s3:::dms-*"</code> to use the following actions: * </p> * <ul> * <li> * <p> * <code>s3:CreateBucket</code> * </p> * </li> * <li> * <p> * <code>s3:ListBucket</code> * </p> * </li> * <li> * <p> * <code>s3:DeleteBucket</code> * </p> * </li> * <li> * <p> * <code>s3:GetBucketLocation</code> * </p> * </li> * <li> * <p> * <code>s3:GetObject</code> * </p> * </li> * <li> * <p> * <code>s3:PutObject</code> * </p> * </li> * <li> * <p> * <code>s3:DeleteObject</code> * </p> * </li> * <li> * <p> * <code>s3:GetObjectVersion</code> * </p> * </li> * <li> * <p> * <code>s3:GetBucketPolicy</code> * </p> * </li> * <li> * <p> * <code>s3:PutBucketPolicy</code> * </p> * </li> * <li> * <p> * <code>s3:DeleteBucketPolicy</code> * </p> * </li> * @see EncryptionModeValue */ public void setEncryptionMode(String encryptionMode) { this.encryptionMode = encryptionMode; } /** * <p> * The type of server-side encryption that you want to use for your data. This encryption type is part of the * endpoint settings or the extra connections attributes for Amazon S3. You can choose either <code>SSE_S3</code> * (the default) or <code>SSE_KMS</code>. * </p> * <note> * <p> * For the <code>ModifyEndpoint</code> operation, you can change the existing value of the * <code>EncryptionMode</code> parameter from <code>SSE_KMS</code> to <code>SSE_S3</code>. But you can’t change the * existing value from <code>SSE_S3</code> to <code>SSE_KMS</code>. * </p> * </note> * <p> * To use <code>SSE_S3</code>, you need an Identity and Access Management (IAM) role with permission to allow * <code>"arn:aws:s3:::dms-*"</code> to use the following actions: * </p> * <ul> * <li> * <p> * <code>s3:CreateBucket</code> * </p> * </li> * <li> * <p> * <code>s3:ListBucket</code> * </p> * </li> * <li> * <p> * <code>s3:DeleteBucket</code> * </p> * </li> * <li> * <p> * <code>s3:GetBucketLocation</code> * </p> * </li> * <li> * <p> * <code>s3:GetObject</code> * </p> * </li> * <li> * <p> * <code>s3:PutObject</code> * </p> * </li> * <li> * <p> * <code>s3:DeleteObject</code> * </p> * </li> * <li> * <p> * <code>s3:GetObjectVersion</code> * </p> * </li> * <li> * <p> * <code>s3:GetBucketPolicy</code> * </p> * </li> * <li> * <p> * <code>s3:PutBucketPolicy</code> * </p> * </li> * <li> * <p> * <code>s3:DeleteBucketPolicy</code> * </p> * </li> * </ul> * * @return The type of server-side encryption that you want to use for your data. This encryption type is part of * the endpoint settings or the extra connections attributes for Amazon S3. You can choose either * <code>SSE_S3</code> (the default) or <code>SSE_KMS</code>. </p> <note> * <p> * For the <code>ModifyEndpoint</code> operation, you can change the existing value of the * <code>EncryptionMode</code> parameter from <code>SSE_KMS</code> to <code>SSE_S3</code>. But you can’t * change the existing value from <code>SSE_S3</code> to <code>SSE_KMS</code>. * </p> * </note> * <p> * To use <code>SSE_S3</code>, you need an Identity and Access Management (IAM) role with permission to * allow <code>"arn:aws:s3:::dms-*"</code> to use the following actions: * </p> * <ul> * <li> * <p> * <code>s3:CreateBucket</code> * </p> * </li> * <li> * <p> * <code>s3:ListBucket</code> * </p> * </li> * <li> * <p> * <code>s3:DeleteBucket</code> * </p> * </li> * <li> * <p> * <code>s3:GetBucketLocation</code> * </p> * </li> * <li> * <p> * <code>s3:GetObject</code> * </p> * </li> * <li> * <p> * <code>s3:PutObject</code> * </p> * </li> * <li> * <p> * <code>s3:DeleteObject</code> * </p> * </li> * <li> * <p> * <code>s3:GetObjectVersion</code> * </p> * </li> * <li> * <p> * <code>s3:GetBucketPolicy</code> * </p> * </li> * <li> * <p> * <code>s3:PutBucketPolicy</code> * </p> * </li> * <li> * <p> * <code>s3:DeleteBucketPolicy</code> * </p> * </li> * @see EncryptionModeValue */ public String getEncryptionMode() { return this.encryptionMode; } /** * <p> * The type of server-side encryption that you want to use for your data. This encryption type is part of the * endpoint settings or the extra connections attributes for Amazon S3. You can choose either <code>SSE_S3</code> * (the default) or <code>SSE_KMS</code>. * </p> * <note> * <p> * For the <code>ModifyEndpoint</code> operation, you can change the existing value of the * <code>EncryptionMode</code> parameter from <code>SSE_KMS</code> to <code>SSE_S3</code>. But you can’t change the * existing value from <code>SSE_S3</code> to <code>SSE_KMS</code>. * </p> * </note> * <p> * To use <code>SSE_S3</code>, you need an Identity and Access Management (IAM) role with permission to allow * <code>"arn:aws:s3:::dms-*"</code> to use the following actions: * </p> * <ul> * <li> * <p> * <code>s3:CreateBucket</code> * </p> * </li> * <li> * <p> * <code>s3:ListBucket</code> * </p> * </li> * <li> * <p> * <code>s3:DeleteBucket</code> * </p> * </li> * <li> * <p> * <code>s3:GetBucketLocation</code> * </p> * </li> * <li> * <p> * <code>s3:GetObject</code> * </p> * </li> * <li> * <p> * <code>s3:PutObject</code> * </p> * </li> * <li> * <p> * <code>s3:DeleteObject</code> * </p> * </li> * <li> * <p> * <code>s3:GetObjectVersion</code> * </p> * </li> * <li> * <p> * <code>s3:GetBucketPolicy</code> * </p> * </li> * <li> * <p> * <code>s3:PutBucketPolicy</code> * </p> * </li> * <li> * <p> * <code>s3:DeleteBucketPolicy</code> * </p> * </li> * </ul> * * @param encryptionMode * The type of server-side encryption that you want to use for your data. This encryption type is part of the * endpoint settings or the extra connections attributes for Amazon S3. You can choose either * <code>SSE_S3</code> (the default) or <code>SSE_KMS</code>. </p> <note> * <p> * For the <code>ModifyEndpoint</code> operation, you can change the existing value of the * <code>EncryptionMode</code> parameter from <code>SSE_KMS</code> to <code>SSE_S3</code>. But you can’t * change the existing value from <code>SSE_S3</code> to <code>SSE_KMS</code>. * </p> * </note> * <p> * To use <code>SSE_S3</code>, you need an Identity and Access Management (IAM) role with permission to allow * <code>"arn:aws:s3:::dms-*"</code> to use the following actions: * </p> * <ul> * <li> * <p> * <code>s3:CreateBucket</code> * </p> * </li> * <li> * <p> * <code>s3:ListBucket</code> * </p> * </li> * <li> * <p> * <code>s3:DeleteBucket</code> * </p> * </li> * <li> * <p> * <code>s3:GetBucketLocation</code> * </p> * </li> * <li> * <p> * <code>s3:GetObject</code> * </p> * </li> * <li> * <p> * <code>s3:PutObject</code> * </p> * </li> * <li> * <p> * <code>s3:DeleteObject</code> * </p> * </li> * <li> * <p> * <code>s3:GetObjectVersion</code> * </p> * </li> * <li> * <p> * <code>s3:GetBucketPolicy</code> * </p> * </li> * <li> * <p> * <code>s3:PutBucketPolicy</code> * </p> * </li> * <li> * <p> * <code>s3:DeleteBucketPolicy</code> * </p> * </li> * @return Returns a reference to this object so that method calls can be chained together. * @see EncryptionModeValue */ public S3Settings withEncryptionMode(String encryptionMode) { setEncryptionMode(encryptionMode); return this; } /** * <p> * The type of server-side encryption that you want to use for your data. This encryption type is part of the * endpoint settings or the extra connections attributes for Amazon S3. You can choose either <code>SSE_S3</code> * (the default) or <code>SSE_KMS</code>. * </p> * <note> * <p> * For the <code>ModifyEndpoint</code> operation, you can change the existing value of the * <code>EncryptionMode</code> parameter from <code>SSE_KMS</code> to <code>SSE_S3</code>. But you can’t change the * existing value from <code>SSE_S3</code> to <code>SSE_KMS</code>. * </p> * </note> * <p> * To use <code>SSE_S3</code>, you need an Identity and Access Management (IAM) role with permission to allow * <code>"arn:aws:s3:::dms-*"</code> to use the following actions: * </p> * <ul> * <li> * <p> * <code>s3:CreateBucket</code> * </p> * </li> * <li> * <p> * <code>s3:ListBucket</code> * </p> * </li> * <li> * <p> * <code>s3:DeleteBucket</code> * </p> * </li> * <li> * <p> * <code>s3:GetBucketLocation</code> * </p> * </li> * <li> * <p> * <code>s3:GetObject</code> * </p> * </li> * <li> * <p> * <code>s3:PutObject</code> * </p> * </li> * <li> * <p> * <code>s3:DeleteObject</code> * </p> * </li> * <li> * <p> * <code>s3:GetObjectVersion</code> * </p> * </li> * <li> * <p> * <code>s3:GetBucketPolicy</code> * </p> * </li> * <li> * <p> * <code>s3:PutBucketPolicy</code> * </p> * </li> * <li> * <p> * <code>s3:DeleteBucketPolicy</code> * </p> * </li> * </ul> * * @param encryptionMode * The type of server-side encryption that you want to use for your data. This encryption type is part of the * endpoint settings or the extra connections attributes for Amazon S3. You can choose either * <code>SSE_S3</code> (the default) or <code>SSE_KMS</code>. </p> <note> * <p> * For the <code>ModifyEndpoint</code> operation, you can change the existing value of the * <code>EncryptionMode</code> parameter from <code>SSE_KMS</code> to <code>SSE_S3</code>. But you can’t * change the existing value from <code>SSE_S3</code> to <code>SSE_KMS</code>. * </p> * </note> * <p> * To use <code>SSE_S3</code>, you need an Identity and Access Management (IAM) role with permission to allow * <code>"arn:aws:s3:::dms-*"</code> to use the following actions: * </p> * <ul> * <li> * <p> * <code>s3:CreateBucket</code> * </p> * </li> * <li> * <p> * <code>s3:ListBucket</code> * </p> * </li> * <li> * <p> * <code>s3:DeleteBucket</code> * </p> * </li> * <li> * <p> * <code>s3:GetBucketLocation</code> * </p> * </li> * <li> * <p> * <code>s3:GetObject</code> * </p> * </li> * <li> * <p> * <code>s3:PutObject</code> * </p> * </li> * <li> * <p> * <code>s3:DeleteObject</code> * </p> * </li> * <li> * <p> * <code>s3:GetObjectVersion</code> * </p> * </li> * <li> * <p> * <code>s3:GetBucketPolicy</code> * </p> * </li> * <li> * <p> * <code>s3:PutBucketPolicy</code> * </p> * </li> * <li> * <p> * <code>s3:DeleteBucketPolicy</code> * </p> * </li> * @see EncryptionModeValue */ public void setEncryptionMode(EncryptionModeValue encryptionMode) { withEncryptionMode(encryptionMode); } /** * <p> * The type of server-side encryption that you want to use for your data. This encryption type is part of the * endpoint settings or the extra connections attributes for Amazon S3. You can choose either <code>SSE_S3</code> * (the default) or <code>SSE_KMS</code>. * </p> * <note> * <p> * For the <code>ModifyEndpoint</code> operation, you can change the existing value of the * <code>EncryptionMode</code> parameter from <code>SSE_KMS</code> to <code>SSE_S3</code>. But you can’t change the * existing value from <code>SSE_S3</code> to <code>SSE_KMS</code>. * </p> * </note> * <p> * To use <code>SSE_S3</code>, you need an Identity and Access Management (IAM) role with permission to allow * <code>"arn:aws:s3:::dms-*"</code> to use the following actions: * </p> * <ul> * <li> * <p> * <code>s3:CreateBucket</code> * </p> * </li> * <li> * <p> * <code>s3:ListBucket</code> * </p> * </li> * <li> * <p> * <code>s3:DeleteBucket</code> * </p> * </li> * <li> * <p> * <code>s3:GetBucketLocation</code> * </p> * </li> * <li> * <p> * <code>s3:GetObject</code> * </p> * </li> * <li> * <p> * <code>s3:PutObject</code> * </p> * </li> * <li> * <p> * <code>s3:DeleteObject</code> * </p> * </li> * <li> * <p> * <code>s3:GetObjectVersion</code> * </p> * </li> * <li> * <p> * <code>s3:GetBucketPolicy</code> * </p> * </li> * <li> * <p> * <code>s3:PutBucketPolicy</code> * </p> * </li> * <li> * <p> * <code>s3:DeleteBucketPolicy</code> * </p> * </li> * </ul> * * @param encryptionMode * The type of server-side encryption that you want to use for your data. This encryption type is part of the * endpoint settings or the extra connections attributes for Amazon S3. You can choose either * <code>SSE_S3</code> (the default) or <code>SSE_KMS</code>. </p> <note> * <p> * For the <code>ModifyEndpoint</code> operation, you can change the existing value of the * <code>EncryptionMode</code> parameter from <code>SSE_KMS</code> to <code>SSE_S3</code>. But you can’t * change the existing value from <code>SSE_S3</code> to <code>SSE_KMS</code>. * </p> * </note> * <p> * To use <code>SSE_S3</code>, you need an Identity and Access Management (IAM) role with permission to allow * <code>"arn:aws:s3:::dms-*"</code> to use the following actions: * </p> * <ul> * <li> * <p> * <code>s3:CreateBucket</code> * </p> * </li> * <li> * <p> * <code>s3:ListBucket</code> * </p> * </li> * <li> * <p> * <code>s3:DeleteBucket</code> * </p> * </li> * <li> * <p> * <code>s3:GetBucketLocation</code> * </p> * </li> * <li> * <p> * <code>s3:GetObject</code> * </p> * </li> * <li> * <p> * <code>s3:PutObject</code> * </p> * </li> * <li> * <p> * <code>s3:DeleteObject</code> * </p> * </li> * <li> * <p> * <code>s3:GetObjectVersion</code> * </p> * </li> * <li> * <p> * <code>s3:GetBucketPolicy</code> * </p> * </li> * <li> * <p> * <code>s3:PutBucketPolicy</code> * </p> * </li> * <li> * <p> * <code>s3:DeleteBucketPolicy</code> * </p> * </li> * @return Returns a reference to this object so that method calls can be chained together. * @see EncryptionModeValue */ public S3Settings withEncryptionMode(EncryptionModeValue encryptionMode) { this.encryptionMode = encryptionMode.toString(); return this; } /** * <p> * If you are using <code>SSE_KMS</code> for the <code>EncryptionMode</code>, provide the KMS key ID. The key that * you use needs an attached policy that enables Identity and Access Management (IAM) user permissions and allows * use of the key. * </p> * <p> * Here is a CLI example: * <code>aws dms create-endpoint --endpoint-identifier <i>value</i> --endpoint-type target --engine-name s3 --s3-settings ServiceAccessRoleArn=<i>value</i>,BucketFolder=<i>value</i>,BucketName=<i>value</i>,EncryptionMode=SSE_KMS,ServerSideEncryptionKmsKeyId=<i>value</i> </code> * </p> * * @param serverSideEncryptionKmsKeyId * If you are using <code>SSE_KMS</code> for the <code>EncryptionMode</code>, provide the KMS key ID. The key * that you use needs an attached policy that enables Identity and Access Management (IAM) user permissions * and allows use of the key.</p> * <p> * Here is a CLI example: * <code>aws dms create-endpoint --endpoint-identifier <i>value</i> --endpoint-type target --engine-name s3 --s3-settings ServiceAccessRoleArn=<i>value</i>,BucketFolder=<i>value</i>,BucketName=<i>value</i>,EncryptionMode=SSE_KMS,ServerSideEncryptionKmsKeyId=<i>value</i> </code> */ public void setServerSideEncryptionKmsKeyId(String serverSideEncryptionKmsKeyId) { this.serverSideEncryptionKmsKeyId = serverSideEncryptionKmsKeyId; } /** * <p> * If you are using <code>SSE_KMS</code> for the <code>EncryptionMode</code>, provide the KMS key ID. The key that * you use needs an attached policy that enables Identity and Access Management (IAM) user permissions and allows * use of the key. * </p> * <p> * Here is a CLI example: * <code>aws dms create-endpoint --endpoint-identifier <i>value</i> --endpoint-type target --engine-name s3 --s3-settings ServiceAccessRoleArn=<i>value</i>,BucketFolder=<i>value</i>,BucketName=<i>value</i>,EncryptionMode=SSE_KMS,ServerSideEncryptionKmsKeyId=<i>value</i> </code> * </p> * * @return If you are using <code>SSE_KMS</code> for the <code>EncryptionMode</code>, provide the KMS key ID. The * key that you use needs an attached policy that enables Identity and Access Management (IAM) user * permissions and allows use of the key.</p> * <p> * Here is a CLI example: * <code>aws dms create-endpoint --endpoint-identifier <i>value</i> --endpoint-type target --engine-name s3 --s3-settings ServiceAccessRoleArn=<i>value</i>,BucketFolder=<i>value</i>,BucketName=<i>value</i>,EncryptionMode=SSE_KMS,ServerSideEncryptionKmsKeyId=<i>value</i> </code> */ public String getServerSideEncryptionKmsKeyId() { return this.serverSideEncryptionKmsKeyId; } /** * <p> * If you are using <code>SSE_KMS</code> for the <code>EncryptionMode</code>, provide the KMS key ID. The key that * you use needs an attached policy that enables Identity and Access Management (IAM) user permissions and allows * use of the key. * </p> * <p> * Here is a CLI example: * <code>aws dms create-endpoint --endpoint-identifier <i>value</i> --endpoint-type target --engine-name s3 --s3-settings ServiceAccessRoleArn=<i>value</i>,BucketFolder=<i>value</i>,BucketName=<i>value</i>,EncryptionMode=SSE_KMS,ServerSideEncryptionKmsKeyId=<i>value</i> </code> * </p> * * @param serverSideEncryptionKmsKeyId * If you are using <code>SSE_KMS</code> for the <code>EncryptionMode</code>, provide the KMS key ID. The key * that you use needs an attached policy that enables Identity and Access Management (IAM) user permissions * and allows use of the key.</p> * <p> * Here is a CLI example: * <code>aws dms create-endpoint --endpoint-identifier <i>value</i> --endpoint-type target --engine-name s3 --s3-settings ServiceAccessRoleArn=<i>value</i>,BucketFolder=<i>value</i>,BucketName=<i>value</i>,EncryptionMode=SSE_KMS,ServerSideEncryptionKmsKeyId=<i>value</i> </code> * @return Returns a reference to this object so that method calls can be chained together. */ public S3Settings withServerSideEncryptionKmsKeyId(String serverSideEncryptionKmsKeyId) { setServerSideEncryptionKmsKeyId(serverSideEncryptionKmsKeyId); return this; } /** * <p> * The format of the data that you want to use for output. You can choose one of the following: * </p> * <ul> * <li> * <p> * <code>csv</code> : This is a row-based file format with comma-separated values (.csv). * </p> * </li> * <li> * <p> * <code>parquet</code> : Apache Parquet (.parquet) is a columnar storage file format that features efficient * compression and provides faster query response. * </p> * </li> * </ul> * * @param dataFormat * The format of the data that you want to use for output. You can choose one of the following: </p> * <ul> * <li> * <p> * <code>csv</code> : This is a row-based file format with comma-separated values (.csv). * </p> * </li> * <li> * <p> * <code>parquet</code> : Apache Parquet (.parquet) is a columnar storage file format that features efficient * compression and provides faster query response. * </p> * </li> * @see DataFormatValue */ public void setDataFormat(String dataFormat) { this.dataFormat = dataFormat; } /** * <p> * The format of the data that you want to use for output. You can choose one of the following: * </p> * <ul> * <li> * <p> * <code>csv</code> : This is a row-based file format with comma-separated values (.csv). * </p> * </li> * <li> * <p> * <code>parquet</code> : Apache Parquet (.parquet) is a columnar storage file format that features efficient * compression and provides faster query response. * </p> * </li> * </ul> * * @return The format of the data that you want to use for output. You can choose one of the following: </p> * <ul> * <li> * <p> * <code>csv</code> : This is a row-based file format with comma-separated values (.csv). * </p> * </li> * <li> * <p> * <code>parquet</code> : Apache Parquet (.parquet) is a columnar storage file format that features * efficient compression and provides faster query response. * </p> * </li> * @see DataFormatValue */ public String getDataFormat() { return this.dataFormat; } /** * <p> * The format of the data that you want to use for output. You can choose one of the following: * </p> * <ul> * <li> * <p> * <code>csv</code> : This is a row-based file format with comma-separated values (.csv). * </p> * </li> * <li> * <p> * <code>parquet</code> : Apache Parquet (.parquet) is a columnar storage file format that features efficient * compression and provides faster query response. * </p> * </li> * </ul> * * @param dataFormat * The format of the data that you want to use for output. You can choose one of the following: </p> * <ul> * <li> * <p> * <code>csv</code> : This is a row-based file format with comma-separated values (.csv). * </p> * </li> * <li> * <p> * <code>parquet</code> : Apache Parquet (.parquet) is a columnar storage file format that features efficient * compression and provides faster query response. * </p> * </li> * @return Returns a reference to this object so that method calls can be chained together. * @see DataFormatValue */ public S3Settings withDataFormat(String dataFormat) { setDataFormat(dataFormat); return this; } /** * <p> * The format of the data that you want to use for output. You can choose one of the following: * </p> * <ul> * <li> * <p> * <code>csv</code> : This is a row-based file format with comma-separated values (.csv). * </p> * </li> * <li> * <p> * <code>parquet</code> : Apache Parquet (.parquet) is a columnar storage file format that features efficient * compression and provides faster query response. * </p> * </li> * </ul> * * @param dataFormat * The format of the data that you want to use for output. You can choose one of the following: </p> * <ul> * <li> * <p> * <code>csv</code> : This is a row-based file format with comma-separated values (.csv). * </p> * </li> * <li> * <p> * <code>parquet</code> : Apache Parquet (.parquet) is a columnar storage file format that features efficient * compression and provides faster query response. * </p> * </li> * @see DataFormatValue */ public void setDataFormat(DataFormatValue dataFormat) { withDataFormat(dataFormat); } /** * <p> * The format of the data that you want to use for output. You can choose one of the following: * </p> * <ul> * <li> * <p> * <code>csv</code> : This is a row-based file format with comma-separated values (.csv). * </p> * </li> * <li> * <p> * <code>parquet</code> : Apache Parquet (.parquet) is a columnar storage file format that features efficient * compression and provides faster query response. * </p> * </li> * </ul> * * @param dataFormat * The format of the data that you want to use for output. You can choose one of the following: </p> * <ul> * <li> * <p> * <code>csv</code> : This is a row-based file format with comma-separated values (.csv). * </p> * </li> * <li> * <p> * <code>parquet</code> : Apache Parquet (.parquet) is a columnar storage file format that features efficient * compression and provides faster query response. * </p> * </li> * @return Returns a reference to this object so that method calls can be chained together. * @see DataFormatValue */ public S3Settings withDataFormat(DataFormatValue dataFormat) { this.dataFormat = dataFormat.toString(); return this; } /** * <p> * The type of encoding you are using: * </p> * <ul> * <li> * <p> * <code>RLE_DICTIONARY</code> uses a combination of bit-packing and run-length encoding to store repeated values * more efficiently. This is the default. * </p> * </li> * <li> * <p> * <code>PLAIN</code> doesn't use encoding at all. Values are stored as they are. * </p> * </li> * <li> * <p> * <code>PLAIN_DICTIONARY</code> builds a dictionary of the values encountered in a given column. The dictionary is * stored in a dictionary page for each column chunk. * </p> * </li> * </ul> * * @param encodingType * The type of encoding you are using: </p> * <ul> * <li> * <p> * <code>RLE_DICTIONARY</code> uses a combination of bit-packing and run-length encoding to store repeated * values more efficiently. This is the default. * </p> * </li> * <li> * <p> * <code>PLAIN</code> doesn't use encoding at all. Values are stored as they are. * </p> * </li> * <li> * <p> * <code>PLAIN_DICTIONARY</code> builds a dictionary of the values encountered in a given column. The * dictionary is stored in a dictionary page for each column chunk. * </p> * </li> * @see EncodingTypeValue */ public void setEncodingType(String encodingType) { this.encodingType = encodingType; } /** * <p> * The type of encoding you are using: * </p> * <ul> * <li> * <p> * <code>RLE_DICTIONARY</code> uses a combination of bit-packing and run-length encoding to store repeated values * more efficiently. This is the default. * </p> * </li> * <li> * <p> * <code>PLAIN</code> doesn't use encoding at all. Values are stored as they are. * </p> * </li> * <li> * <p> * <code>PLAIN_DICTIONARY</code> builds a dictionary of the values encountered in a given column. The dictionary is * stored in a dictionary page for each column chunk. * </p> * </li> * </ul> * * @return The type of encoding you are using: </p> * <ul> * <li> * <p> * <code>RLE_DICTIONARY</code> uses a combination of bit-packing and run-length encoding to store repeated * values more efficiently. This is the default. * </p> * </li> * <li> * <p> * <code>PLAIN</code> doesn't use encoding at all. Values are stored as they are. * </p> * </li> * <li> * <p> * <code>PLAIN_DICTIONARY</code> builds a dictionary of the values encountered in a given column. The * dictionary is stored in a dictionary page for each column chunk. * </p> * </li> * @see EncodingTypeValue */ public String getEncodingType() { return this.encodingType; } /** * <p> * The type of encoding you are using: * </p> * <ul> * <li> * <p> * <code>RLE_DICTIONARY</code> uses a combination of bit-packing and run-length encoding to store repeated values * more efficiently. This is the default. * </p> * </li> * <li> * <p> * <code>PLAIN</code> doesn't use encoding at all. Values are stored as they are. * </p> * </li> * <li> * <p> * <code>PLAIN_DICTIONARY</code> builds a dictionary of the values encountered in a given column. The dictionary is * stored in a dictionary page for each column chunk. * </p> * </li> * </ul> * * @param encodingType * The type of encoding you are using: </p> * <ul> * <li> * <p> * <code>RLE_DICTIONARY</code> uses a combination of bit-packing and run-length encoding to store repeated * values more efficiently. This is the default. * </p> * </li> * <li> * <p> * <code>PLAIN</code> doesn't use encoding at all. Values are stored as they are. * </p> * </li> * <li> * <p> * <code>PLAIN_DICTIONARY</code> builds a dictionary of the values encountered in a given column. The * dictionary is stored in a dictionary page for each column chunk. * </p> * </li> * @return Returns a reference to this object so that method calls can be chained together. * @see EncodingTypeValue */ public S3Settings withEncodingType(String encodingType) { setEncodingType(encodingType); return this; } /** * <p> * The type of encoding you are using: * </p> * <ul> * <li> * <p> * <code>RLE_DICTIONARY</code> uses a combination of bit-packing and run-length encoding to store repeated values * more efficiently. This is the default. * </p> * </li> * <li> * <p> * <code>PLAIN</code> doesn't use encoding at all. Values are stored as they are. * </p> * </li> * <li> * <p> * <code>PLAIN_DICTIONARY</code> builds a dictionary of the values encountered in a given column. The dictionary is * stored in a dictionary page for each column chunk. * </p> * </li> * </ul> * * @param encodingType * The type of encoding you are using: </p> * <ul> * <li> * <p> * <code>RLE_DICTIONARY</code> uses a combination of bit-packing and run-length encoding to store repeated * values more efficiently. This is the default. * </p> * </li> * <li> * <p> * <code>PLAIN</code> doesn't use encoding at all. Values are stored as they are. * </p> * </li> * <li> * <p> * <code>PLAIN_DICTIONARY</code> builds a dictionary of the values encountered in a given column. The * dictionary is stored in a dictionary page for each column chunk. * </p> * </li> * @see EncodingTypeValue */ public void setEncodingType(EncodingTypeValue encodingType) { withEncodingType(encodingType); } /** * <p> * The type of encoding you are using: * </p> * <ul> * <li> * <p> * <code>RLE_DICTIONARY</code> uses a combination of bit-packing and run-length encoding to store repeated values * more efficiently. This is the default. * </p> * </li> * <li> * <p> * <code>PLAIN</code> doesn't use encoding at all. Values are stored as they are. * </p> * </li> * <li> * <p> * <code>PLAIN_DICTIONARY</code> builds a dictionary of the values encountered in a given column. The dictionary is * stored in a dictionary page for each column chunk. * </p> * </li> * </ul> * * @param encodingType * The type of encoding you are using: </p> * <ul> * <li> * <p> * <code>RLE_DICTIONARY</code> uses a combination of bit-packing and run-length encoding to store repeated * values more efficiently. This is the default. * </p> * </li> * <li> * <p> * <code>PLAIN</code> doesn't use encoding at all. Values are stored as they are. * </p> * </li> * <li> * <p> * <code>PLAIN_DICTIONARY</code> builds a dictionary of the values encountered in a given column. The * dictionary is stored in a dictionary page for each column chunk. * </p> * </li> * @return Returns a reference to this object so that method calls can be chained together. * @see EncodingTypeValue */ public S3Settings withEncodingType(EncodingTypeValue encodingType) { this.encodingType = encodingType.toString(); return this; } /** * <p> * The maximum size of an encoded dictionary page of a column. If the dictionary page exceeds this, this column is * stored using an encoding type of <code>PLAIN</code>. This parameter defaults to 1024 * 1024 bytes (1 MiB), the * maximum size of a dictionary page before it reverts to <code>PLAIN</code> encoding. This size is used for * .parquet file format only. * </p> * * @param dictPageSizeLimit * The maximum size of an encoded dictionary page of a column. If the dictionary page exceeds this, this * column is stored using an encoding type of <code>PLAIN</code>. This parameter defaults to 1024 * 1024 * bytes (1 MiB), the maximum size of a dictionary page before it reverts to <code>PLAIN</code> encoding. * This size is used for .parquet file format only. */ public void setDictPageSizeLimit(Integer dictPageSizeLimit) { this.dictPageSizeLimit = dictPageSizeLimit; } /** * <p> * The maximum size of an encoded dictionary page of a column. If the dictionary page exceeds this, this column is * stored using an encoding type of <code>PLAIN</code>. This parameter defaults to 1024 * 1024 bytes (1 MiB), the * maximum size of a dictionary page before it reverts to <code>PLAIN</code> encoding. This size is used for * .parquet file format only. * </p> * * @return The maximum size of an encoded dictionary page of a column. If the dictionary page exceeds this, this * column is stored using an encoding type of <code>PLAIN</code>. This parameter defaults to 1024 * 1024 * bytes (1 MiB), the maximum size of a dictionary page before it reverts to <code>PLAIN</code> encoding. * This size is used for .parquet file format only. */ public Integer getDictPageSizeLimit() { return this.dictPageSizeLimit; } /** * <p> * The maximum size of an encoded dictionary page of a column. If the dictionary page exceeds this, this column is * stored using an encoding type of <code>PLAIN</code>. This parameter defaults to 1024 * 1024 bytes (1 MiB), the * maximum size of a dictionary page before it reverts to <code>PLAIN</code> encoding. This size is used for * .parquet file format only. * </p> * * @param dictPageSizeLimit * The maximum size of an encoded dictionary page of a column. If the dictionary page exceeds this, this * column is stored using an encoding type of <code>PLAIN</code>. This parameter defaults to 1024 * 1024 * bytes (1 MiB), the maximum size of a dictionary page before it reverts to <code>PLAIN</code> encoding. * This size is used for .parquet file format only. * @return Returns a reference to this object so that method calls can be chained together. */ public S3Settings withDictPageSizeLimit(Integer dictPageSizeLimit) { setDictPageSizeLimit(dictPageSizeLimit); return this; } /** * <p> * The number of rows in a row group. A smaller row group size provides faster reads. But as the number of row * groups grows, the slower writes become. This parameter defaults to 10,000 rows. This number is used for .parquet * file format only. * </p> * <p> * If you choose a value larger than the maximum, <code>RowGroupLength</code> is set to the max row group length in * bytes (64 * 1024 * 1024). * </p> * * @param rowGroupLength * The number of rows in a row group. A smaller row group size provides faster reads. But as the number of * row groups grows, the slower writes become. This parameter defaults to 10,000 rows. This number is used * for .parquet file format only. </p> * <p> * If you choose a value larger than the maximum, <code>RowGroupLength</code> is set to the max row group * length in bytes (64 * 1024 * 1024). */ public void setRowGroupLength(Integer rowGroupLength) { this.rowGroupLength = rowGroupLength; } /** * <p> * The number of rows in a row group. A smaller row group size provides faster reads. But as the number of row * groups grows, the slower writes become. This parameter defaults to 10,000 rows. This number is used for .parquet * file format only. * </p> * <p> * If you choose a value larger than the maximum, <code>RowGroupLength</code> is set to the max row group length in * bytes (64 * 1024 * 1024). * </p> * * @return The number of rows in a row group. A smaller row group size provides faster reads. But as the number of * row groups grows, the slower writes become. This parameter defaults to 10,000 rows. This number is used * for .parquet file format only. </p> * <p> * If you choose a value larger than the maximum, <code>RowGroupLength</code> is set to the max row group * length in bytes (64 * 1024 * 1024). */ public Integer getRowGroupLength() { return this.rowGroupLength; } /** * <p> * The number of rows in a row group. A smaller row group size provides faster reads. But as the number of row * groups grows, the slower writes become. This parameter defaults to 10,000 rows. This number is used for .parquet * file format only. * </p> * <p> * If you choose a value larger than the maximum, <code>RowGroupLength</code> is set to the max row group length in * bytes (64 * 1024 * 1024). * </p> * * @param rowGroupLength * The number of rows in a row group. A smaller row group size provides faster reads. But as the number of * row groups grows, the slower writes become. This parameter defaults to 10,000 rows. This number is used * for .parquet file format only. </p> * <p> * If you choose a value larger than the maximum, <code>RowGroupLength</code> is set to the max row group * length in bytes (64 * 1024 * 1024). * @return Returns a reference to this object so that method calls can be chained together. */ public S3Settings withRowGroupLength(Integer rowGroupLength) { setRowGroupLength(rowGroupLength); return this; } /** * <p> * The size of one data page in bytes. This parameter defaults to 1024 * 1024 bytes (1 MiB). This number is used for * .parquet file format only. * </p> * * @param dataPageSize * The size of one data page in bytes. This parameter defaults to 1024 * 1024 bytes (1 MiB). This number is * used for .parquet file format only. */ public void setDataPageSize(Integer dataPageSize) { this.dataPageSize = dataPageSize; } /** * <p> * The size of one data page in bytes. This parameter defaults to 1024 * 1024 bytes (1 MiB). This number is used for * .parquet file format only. * </p> * * @return The size of one data page in bytes. This parameter defaults to 1024 * 1024 bytes (1 MiB). This number is * used for .parquet file format only. */ public Integer getDataPageSize() { return this.dataPageSize; } /** * <p> * The size of one data page in bytes. This parameter defaults to 1024 * 1024 bytes (1 MiB). This number is used for * .parquet file format only. * </p> * * @param dataPageSize * The size of one data page in bytes. This parameter defaults to 1024 * 1024 bytes (1 MiB). This number is * used for .parquet file format only. * @return Returns a reference to this object so that method calls can be chained together. */ public S3Settings withDataPageSize(Integer dataPageSize) { setDataPageSize(dataPageSize); return this; } /** * <p> * The version of the Apache Parquet format that you want to use: <code>parquet_1_0</code> (the default) or * <code>parquet_2_0</code>. * </p> * * @param parquetVersion * The version of the Apache Parquet format that you want to use: <code>parquet_1_0</code> (the default) or * <code>parquet_2_0</code>. * @see ParquetVersionValue */ public void setParquetVersion(String parquetVersion) { this.parquetVersion = parquetVersion; } /** * <p> * The version of the Apache Parquet format that you want to use: <code>parquet_1_0</code> (the default) or * <code>parquet_2_0</code>. * </p> * * @return The version of the Apache Parquet format that you want to use: <code>parquet_1_0</code> (the default) or * <code>parquet_2_0</code>. * @see ParquetVersionValue */ public String getParquetVersion() { return this.parquetVersion; } /** * <p> * The version of the Apache Parquet format that you want to use: <code>parquet_1_0</code> (the default) or * <code>parquet_2_0</code>. * </p> * * @param parquetVersion * The version of the Apache Parquet format that you want to use: <code>parquet_1_0</code> (the default) or * <code>parquet_2_0</code>. * @return Returns a reference to this object so that method calls can be chained together. * @see ParquetVersionValue */ public S3Settings withParquetVersion(String parquetVersion) { setParquetVersion(parquetVersion); return this; } /** * <p> * The version of the Apache Parquet format that you want to use: <code>parquet_1_0</code> (the default) or * <code>parquet_2_0</code>. * </p> * * @param parquetVersion * The version of the Apache Parquet format that you want to use: <code>parquet_1_0</code> (the default) or * <code>parquet_2_0</code>. * @see ParquetVersionValue */ public void setParquetVersion(ParquetVersionValue parquetVersion) { withParquetVersion(parquetVersion); } /** * <p> * The version of the Apache Parquet format that you want to use: <code>parquet_1_0</code> (the default) or * <code>parquet_2_0</code>. * </p> * * @param parquetVersion * The version of the Apache Parquet format that you want to use: <code>parquet_1_0</code> (the default) or * <code>parquet_2_0</code>. * @return Returns a reference to this object so that method calls can be chained together. * @see ParquetVersionValue */ public S3Settings withParquetVersion(ParquetVersionValue parquetVersion) { this.parquetVersion = parquetVersion.toString(); return this; } /** * <p> * A value that enables statistics for Parquet pages and row groups. Choose <code>true</code> to enable statistics, * <code>false</code> to disable. Statistics include <code>NULL</code>, <code>DISTINCT</code>, <code>MAX</code>, and * <code>MIN</code> values. This parameter defaults to <code>true</code>. This value is used for .parquet file * format only. * </p> * * @param enableStatistics * A value that enables statistics for Parquet pages and row groups. Choose <code>true</code> to enable * statistics, <code>false</code> to disable. Statistics include <code>NULL</code>, <code>DISTINCT</code>, * <code>MAX</code>, and <code>MIN</code> values. This parameter defaults to <code>true</code>. This value is * used for .parquet file format only. */ public void setEnableStatistics(Boolean enableStatistics) { this.enableStatistics = enableStatistics; } /** * <p> * A value that enables statistics for Parquet pages and row groups. Choose <code>true</code> to enable statistics, * <code>false</code> to disable. Statistics include <code>NULL</code>, <code>DISTINCT</code>, <code>MAX</code>, and * <code>MIN</code> values. This parameter defaults to <code>true</code>. This value is used for .parquet file * format only. * </p> * * @return A value that enables statistics for Parquet pages and row groups. Choose <code>true</code> to enable * statistics, <code>false</code> to disable. Statistics include <code>NULL</code>, <code>DISTINCT</code>, * <code>MAX</code>, and <code>MIN</code> values. This parameter defaults to <code>true</code>. This value * is used for .parquet file format only. */ public Boolean getEnableStatistics() { return this.enableStatistics; } /** * <p> * A value that enables statistics for Parquet pages and row groups. Choose <code>true</code> to enable statistics, * <code>false</code> to disable. Statistics include <code>NULL</code>, <code>DISTINCT</code>, <code>MAX</code>, and * <code>MIN</code> values. This parameter defaults to <code>true</code>. This value is used for .parquet file * format only. * </p> * * @param enableStatistics * A value that enables statistics for Parquet pages and row groups. Choose <code>true</code> to enable * statistics, <code>false</code> to disable. Statistics include <code>NULL</code>, <code>DISTINCT</code>, * <code>MAX</code>, and <code>MIN</code> values. This parameter defaults to <code>true</code>. This value is * used for .parquet file format only. * @return Returns a reference to this object so that method calls can be chained together. */ public S3Settings withEnableStatistics(Boolean enableStatistics) { setEnableStatistics(enableStatistics); return this; } /** * <p> * A value that enables statistics for Parquet pages and row groups. Choose <code>true</code> to enable statistics, * <code>false</code> to disable. Statistics include <code>NULL</code>, <code>DISTINCT</code>, <code>MAX</code>, and * <code>MIN</code> values. This parameter defaults to <code>true</code>. This value is used for .parquet file * format only. * </p> * * @return A value that enables statistics for Parquet pages and row groups. Choose <code>true</code> to enable * statistics, <code>false</code> to disable. Statistics include <code>NULL</code>, <code>DISTINCT</code>, * <code>MAX</code>, and <code>MIN</code> values. This parameter defaults to <code>true</code>. This value * is used for .parquet file format only. */ public Boolean isEnableStatistics() { return this.enableStatistics; } /** * <p> * A value that enables a full load to write INSERT operations to the comma-separated value (.csv) output files only * to indicate how the rows were added to the source database. * </p> * <note> * <p> * DMS supports the <code>IncludeOpForFullLoad</code> parameter in versions 3.1.4 and later. * </p> * </note> * <p> * For full load, records can only be inserted. By default (the <code>false</code> setting), no information is * recorded in these output files for a full load to indicate that the rows were inserted at the source database. If * <code>IncludeOpForFullLoad</code> is set to <code>true</code> or <code>y</code>, the INSERT is recorded as an I * annotation in the first field of the .csv file. This allows the format of your target records from a full load to * be consistent with the target records from a CDC load. * </p> * <note> * <p> * This setting works together with the <code>CdcInsertsOnly</code> and the <code>CdcInsertsAndUpdates</code> * parameters for output to .csv files only. For more information about how these settings work together, see <a * href="https://docs.aws.amazon.com/dms/latest/userguide/CHAP_Target.S3.html#CHAP_Target.S3.Configuring.InsertOps"> * Indicating Source DB Operations in Migrated S3 Data</a> in the <i>Database Migration Service User Guide.</i>. * </p> * </note> * * @param includeOpForFullLoad * A value that enables a full load to write INSERT operations to the comma-separated value (.csv) output * files only to indicate how the rows were added to the source database.</p> <note> * <p> * DMS supports the <code>IncludeOpForFullLoad</code> parameter in versions 3.1.4 and later. * </p> * </note> * <p> * For full load, records can only be inserted. By default (the <code>false</code> setting), no information * is recorded in these output files for a full load to indicate that the rows were inserted at the source * database. If <code>IncludeOpForFullLoad</code> is set to <code>true</code> or <code>y</code>, the INSERT * is recorded as an I annotation in the first field of the .csv file. This allows the format of your target * records from a full load to be consistent with the target records from a CDC load. * </p> * <note> * <p> * This setting works together with the <code>CdcInsertsOnly</code> and the <code>CdcInsertsAndUpdates</code> * parameters for output to .csv files only. For more information about how these settings work together, see * <a href= * "https://docs.aws.amazon.com/dms/latest/userguide/CHAP_Target.S3.html#CHAP_Target.S3.Configuring.InsertOps" * >Indicating Source DB Operations in Migrated S3 Data</a> in the <i>Database Migration Service User * Guide.</i>. * </p> */ public void setIncludeOpForFullLoad(Boolean includeOpForFullLoad) { this.includeOpForFullLoad = includeOpForFullLoad; } /** * <p> * A value that enables a full load to write INSERT operations to the comma-separated value (.csv) output files only * to indicate how the rows were added to the source database. * </p> * <note> * <p> * DMS supports the <code>IncludeOpForFullLoad</code> parameter in versions 3.1.4 and later. * </p> * </note> * <p> * For full load, records can only be inserted. By default (the <code>false</code> setting), no information is * recorded in these output files for a full load to indicate that the rows were inserted at the source database. If * <code>IncludeOpForFullLoad</code> is set to <code>true</code> or <code>y</code>, the INSERT is recorded as an I * annotation in the first field of the .csv file. This allows the format of your target records from a full load to * be consistent with the target records from a CDC load. * </p> * <note> * <p> * This setting works together with the <code>CdcInsertsOnly</code> and the <code>CdcInsertsAndUpdates</code> * parameters for output to .csv files only. For more information about how these settings work together, see <a * href="https://docs.aws.amazon.com/dms/latest/userguide/CHAP_Target.S3.html#CHAP_Target.S3.Configuring.InsertOps"> * Indicating Source DB Operations in Migrated S3 Data</a> in the <i>Database Migration Service User Guide.</i>. * </p> * </note> * * @return A value that enables a full load to write INSERT operations to the comma-separated value (.csv) output * files only to indicate how the rows were added to the source database.</p> <note> * <p> * DMS supports the <code>IncludeOpForFullLoad</code> parameter in versions 3.1.4 and later. * </p> * </note> * <p> * For full load, records can only be inserted. By default (the <code>false</code> setting), no information * is recorded in these output files for a full load to indicate that the rows were inserted at the source * database. If <code>IncludeOpForFullLoad</code> is set to <code>true</code> or <code>y</code>, the INSERT * is recorded as an I annotation in the first field of the .csv file. This allows the format of your target * records from a full load to be consistent with the target records from a CDC load. * </p> * <note> * <p> * This setting works together with the <code>CdcInsertsOnly</code> and the * <code>CdcInsertsAndUpdates</code> parameters for output to .csv files only. For more information about * how these settings work together, see <a href= * "https://docs.aws.amazon.com/dms/latest/userguide/CHAP_Target.S3.html#CHAP_Target.S3.Configuring.InsertOps" * >Indicating Source DB Operations in Migrated S3 Data</a> in the <i>Database Migration Service User * Guide.</i>. * </p> */ public Boolean getIncludeOpForFullLoad() { return this.includeOpForFullLoad; } /** * <p> * A value that enables a full load to write INSERT operations to the comma-separated value (.csv) output files only * to indicate how the rows were added to the source database. * </p> * <note> * <p> * DMS supports the <code>IncludeOpForFullLoad</code> parameter in versions 3.1.4 and later. * </p> * </note> * <p> * For full load, records can only be inserted. By default (the <code>false</code> setting), no information is * recorded in these output files for a full load to indicate that the rows were inserted at the source database. If * <code>IncludeOpForFullLoad</code> is set to <code>true</code> or <code>y</code>, the INSERT is recorded as an I * annotation in the first field of the .csv file. This allows the format of your target records from a full load to * be consistent with the target records from a CDC load. * </p> * <note> * <p> * This setting works together with the <code>CdcInsertsOnly</code> and the <code>CdcInsertsAndUpdates</code> * parameters for output to .csv files only. For more information about how these settings work together, see <a * href="https://docs.aws.amazon.com/dms/latest/userguide/CHAP_Target.S3.html#CHAP_Target.S3.Configuring.InsertOps"> * Indicating Source DB Operations in Migrated S3 Data</a> in the <i>Database Migration Service User Guide.</i>. * </p> * </note> * * @param includeOpForFullLoad * A value that enables a full load to write INSERT operations to the comma-separated value (.csv) output * files only to indicate how the rows were added to the source database.</p> <note> * <p> * DMS supports the <code>IncludeOpForFullLoad</code> parameter in versions 3.1.4 and later. * </p> * </note> * <p> * For full load, records can only be inserted. By default (the <code>false</code> setting), no information * is recorded in these output files for a full load to indicate that the rows were inserted at the source * database. If <code>IncludeOpForFullLoad</code> is set to <code>true</code> or <code>y</code>, the INSERT * is recorded as an I annotation in the first field of the .csv file. This allows the format of your target * records from a full load to be consistent with the target records from a CDC load. * </p> * <note> * <p> * This setting works together with the <code>CdcInsertsOnly</code> and the <code>CdcInsertsAndUpdates</code> * parameters for output to .csv files only. For more information about how these settings work together, see * <a href= * "https://docs.aws.amazon.com/dms/latest/userguide/CHAP_Target.S3.html#CHAP_Target.S3.Configuring.InsertOps" * >Indicating Source DB Operations in Migrated S3 Data</a> in the <i>Database Migration Service User * Guide.</i>. * </p> * @return Returns a reference to this object so that method calls can be chained together. */ public S3Settings withIncludeOpForFullLoad(Boolean includeOpForFullLoad) { setIncludeOpForFullLoad(includeOpForFullLoad); return this; } /** * <p> * A value that enables a full load to write INSERT operations to the comma-separated value (.csv) output files only * to indicate how the rows were added to the source database. * </p> * <note> * <p> * DMS supports the <code>IncludeOpForFullLoad</code> parameter in versions 3.1.4 and later. * </p> * </note> * <p> * For full load, records can only be inserted. By default (the <code>false</code> setting), no information is * recorded in these output files for a full load to indicate that the rows were inserted at the source database. If * <code>IncludeOpForFullLoad</code> is set to <code>true</code> or <code>y</code>, the INSERT is recorded as an I * annotation in the first field of the .csv file. This allows the format of your target records from a full load to * be consistent with the target records from a CDC load. * </p> * <note> * <p> * This setting works together with the <code>CdcInsertsOnly</code> and the <code>CdcInsertsAndUpdates</code> * parameters for output to .csv files only. For more information about how these settings work together, see <a * href="https://docs.aws.amazon.com/dms/latest/userguide/CHAP_Target.S3.html#CHAP_Target.S3.Configuring.InsertOps"> * Indicating Source DB Operations in Migrated S3 Data</a> in the <i>Database Migration Service User Guide.</i>. * </p> * </note> * * @return A value that enables a full load to write INSERT operations to the comma-separated value (.csv) output * files only to indicate how the rows were added to the source database.</p> <note> * <p> * DMS supports the <code>IncludeOpForFullLoad</code> parameter in versions 3.1.4 and later. * </p> * </note> * <p> * For full load, records can only be inserted. By default (the <code>false</code> setting), no information * is recorded in these output files for a full load to indicate that the rows were inserted at the source * database. If <code>IncludeOpForFullLoad</code> is set to <code>true</code> or <code>y</code>, the INSERT * is recorded as an I annotation in the first field of the .csv file. This allows the format of your target * records from a full load to be consistent with the target records from a CDC load. * </p> * <note> * <p> * This setting works together with the <code>CdcInsertsOnly</code> and the * <code>CdcInsertsAndUpdates</code> parameters for output to .csv files only. For more information about * how these settings work together, see <a href= * "https://docs.aws.amazon.com/dms/latest/userguide/CHAP_Target.S3.html#CHAP_Target.S3.Configuring.InsertOps" * >Indicating Source DB Operations in Migrated S3 Data</a> in the <i>Database Migration Service User * Guide.</i>. * </p> */ public Boolean isIncludeOpForFullLoad() { return this.includeOpForFullLoad; } /** * <p> * A value that enables a change data capture (CDC) load to write only INSERT operations to .csv or columnar storage * (.parquet) output files. By default (the <code>false</code> setting), the first field in a .csv or .parquet * record contains the letter I (INSERT), U (UPDATE), or D (DELETE). These values indicate whether the row was * inserted, updated, or deleted at the source database for a CDC load to the target. * </p> * <p> * If <code>CdcInsertsOnly</code> is set to <code>true</code> or <code>y</code>, only INSERTs from the source * database are migrated to the .csv or .parquet file. For .csv format only, how these INSERTs are recorded depends * on the value of <code>IncludeOpForFullLoad</code>. If <code>IncludeOpForFullLoad</code> is set to * <code>true</code>, the first field of every CDC record is set to I to indicate the INSERT operation at the * source. If <code>IncludeOpForFullLoad</code> is set to <code>false</code>, every CDC record is written without a * first field to indicate the INSERT operation at the source. For more information about how these settings work * together, see <a href= * "https://docs.aws.amazon.com/dms/latest/userguide/CHAP_Target.S3.html#CHAP_Target.S3.Configuring.InsertOps" * >Indicating Source DB Operations in Migrated S3 Data</a> in the <i>Database Migration Service User Guide.</i>. * </p> * <note> * <p> * DMS supports the interaction described preceding between the <code>CdcInsertsOnly</code> and * <code>IncludeOpForFullLoad</code> parameters in versions 3.1.4 and later. * </p> * <p> * <code>CdcInsertsOnly</code> and <code>CdcInsertsAndUpdates</code> can't both be set to <code>true</code> for the * same endpoint. Set either <code>CdcInsertsOnly</code> or <code>CdcInsertsAndUpdates</code> to <code>true</code> * for the same endpoint, but not both. * </p> * </note> * * @param cdcInsertsOnly * A value that enables a change data capture (CDC) load to write only INSERT operations to .csv or columnar * storage (.parquet) output files. By default (the <code>false</code> setting), the first field in a .csv or * .parquet record contains the letter I (INSERT), U (UPDATE), or D (DELETE). These values indicate whether * the row was inserted, updated, or deleted at the source database for a CDC load to the target.</p> * <p> * If <code>CdcInsertsOnly</code> is set to <code>true</code> or <code>y</code>, only INSERTs from the source * database are migrated to the .csv or .parquet file. For .csv format only, how these INSERTs are recorded * depends on the value of <code>IncludeOpForFullLoad</code>. If <code>IncludeOpForFullLoad</code> is set to * <code>true</code>, the first field of every CDC record is set to I to indicate the INSERT operation at the * source. If <code>IncludeOpForFullLoad</code> is set to <code>false</code>, every CDC record is written * without a first field to indicate the INSERT operation at the source. For more information about how these * settings work together, see <a href= * "https://docs.aws.amazon.com/dms/latest/userguide/CHAP_Target.S3.html#CHAP_Target.S3.Configuring.InsertOps" * >Indicating Source DB Operations in Migrated S3 Data</a> in the <i>Database Migration Service User * Guide.</i>. * </p> * <note> * <p> * DMS supports the interaction described preceding between the <code>CdcInsertsOnly</code> and * <code>IncludeOpForFullLoad</code> parameters in versions 3.1.4 and later. * </p> * <p> * <code>CdcInsertsOnly</code> and <code>CdcInsertsAndUpdates</code> can't both be set to <code>true</code> * for the same endpoint. Set either <code>CdcInsertsOnly</code> or <code>CdcInsertsAndUpdates</code> to * <code>true</code> for the same endpoint, but not both. * </p> */ public void setCdcInsertsOnly(Boolean cdcInsertsOnly) { this.cdcInsertsOnly = cdcInsertsOnly; } /** * <p> * A value that enables a change data capture (CDC) load to write only INSERT operations to .csv or columnar storage * (.parquet) output files. By default (the <code>false</code> setting), the first field in a .csv or .parquet * record contains the letter I (INSERT), U (UPDATE), or D (DELETE). These values indicate whether the row was * inserted, updated, or deleted at the source database for a CDC load to the target. * </p> * <p> * If <code>CdcInsertsOnly</code> is set to <code>true</code> or <code>y</code>, only INSERTs from the source * database are migrated to the .csv or .parquet file. For .csv format only, how these INSERTs are recorded depends * on the value of <code>IncludeOpForFullLoad</code>. If <code>IncludeOpForFullLoad</code> is set to * <code>true</code>, the first field of every CDC record is set to I to indicate the INSERT operation at the * source. If <code>IncludeOpForFullLoad</code> is set to <code>false</code>, every CDC record is written without a * first field to indicate the INSERT operation at the source. For more information about how these settings work * together, see <a href= * "https://docs.aws.amazon.com/dms/latest/userguide/CHAP_Target.S3.html#CHAP_Target.S3.Configuring.InsertOps" * >Indicating Source DB Operations in Migrated S3 Data</a> in the <i>Database Migration Service User Guide.</i>. * </p> * <note> * <p> * DMS supports the interaction described preceding between the <code>CdcInsertsOnly</code> and * <code>IncludeOpForFullLoad</code> parameters in versions 3.1.4 and later. * </p> * <p> * <code>CdcInsertsOnly</code> and <code>CdcInsertsAndUpdates</code> can't both be set to <code>true</code> for the * same endpoint. Set either <code>CdcInsertsOnly</code> or <code>CdcInsertsAndUpdates</code> to <code>true</code> * for the same endpoint, but not both. * </p> * </note> * * @return A value that enables a change data capture (CDC) load to write only INSERT operations to .csv or columnar * storage (.parquet) output files. By default (the <code>false</code> setting), the first field in a .csv * or .parquet record contains the letter I (INSERT), U (UPDATE), or D (DELETE). These values indicate * whether the row was inserted, updated, or deleted at the source database for a CDC load to the * target.</p> * <p> * If <code>CdcInsertsOnly</code> is set to <code>true</code> or <code>y</code>, only INSERTs from the * source database are migrated to the .csv or .parquet file. For .csv format only, how these INSERTs are * recorded depends on the value of <code>IncludeOpForFullLoad</code>. If <code>IncludeOpForFullLoad</code> * is set to <code>true</code>, the first field of every CDC record is set to I to indicate the INSERT * operation at the source. If <code>IncludeOpForFullLoad</code> is set to <code>false</code>, every CDC * record is written without a first field to indicate the INSERT operation at the source. For more * information about how these settings work together, see <a href= * "https://docs.aws.amazon.com/dms/latest/userguide/CHAP_Target.S3.html#CHAP_Target.S3.Configuring.InsertOps" * >Indicating Source DB Operations in Migrated S3 Data</a> in the <i>Database Migration Service User * Guide.</i>. * </p> * <note> * <p> * DMS supports the interaction described preceding between the <code>CdcInsertsOnly</code> and * <code>IncludeOpForFullLoad</code> parameters in versions 3.1.4 and later. * </p> * <p> * <code>CdcInsertsOnly</code> and <code>CdcInsertsAndUpdates</code> can't both be set to <code>true</code> * for the same endpoint. Set either <code>CdcInsertsOnly</code> or <code>CdcInsertsAndUpdates</code> to * <code>true</code> for the same endpoint, but not both. * </p> */ public Boolean getCdcInsertsOnly() { return this.cdcInsertsOnly; } /** * <p> * A value that enables a change data capture (CDC) load to write only INSERT operations to .csv or columnar storage * (.parquet) output files. By default (the <code>false</code> setting), the first field in a .csv or .parquet * record contains the letter I (INSERT), U (UPDATE), or D (DELETE). These values indicate whether the row was * inserted, updated, or deleted at the source database for a CDC load to the target. * </p> * <p> * If <code>CdcInsertsOnly</code> is set to <code>true</code> or <code>y</code>, only INSERTs from the source * database are migrated to the .csv or .parquet file. For .csv format only, how these INSERTs are recorded depends * on the value of <code>IncludeOpForFullLoad</code>. If <code>IncludeOpForFullLoad</code> is set to * <code>true</code>, the first field of every CDC record is set to I to indicate the INSERT operation at the * source. If <code>IncludeOpForFullLoad</code> is set to <code>false</code>, every CDC record is written without a * first field to indicate the INSERT operation at the source. For more information about how these settings work * together, see <a href= * "https://docs.aws.amazon.com/dms/latest/userguide/CHAP_Target.S3.html#CHAP_Target.S3.Configuring.InsertOps" * >Indicating Source DB Operations in Migrated S3 Data</a> in the <i>Database Migration Service User Guide.</i>. * </p> * <note> * <p> * DMS supports the interaction described preceding between the <code>CdcInsertsOnly</code> and * <code>IncludeOpForFullLoad</code> parameters in versions 3.1.4 and later. * </p> * <p> * <code>CdcInsertsOnly</code> and <code>CdcInsertsAndUpdates</code> can't both be set to <code>true</code> for the * same endpoint. Set either <code>CdcInsertsOnly</code> or <code>CdcInsertsAndUpdates</code> to <code>true</code> * for the same endpoint, but not both. * </p> * </note> * * @param cdcInsertsOnly * A value that enables a change data capture (CDC) load to write only INSERT operations to .csv or columnar * storage (.parquet) output files. By default (the <code>false</code> setting), the first field in a .csv or * .parquet record contains the letter I (INSERT), U (UPDATE), or D (DELETE). These values indicate whether * the row was inserted, updated, or deleted at the source database for a CDC load to the target.</p> * <p> * If <code>CdcInsertsOnly</code> is set to <code>true</code> or <code>y</code>, only INSERTs from the source * database are migrated to the .csv or .parquet file. For .csv format only, how these INSERTs are recorded * depends on the value of <code>IncludeOpForFullLoad</code>. If <code>IncludeOpForFullLoad</code> is set to * <code>true</code>, the first field of every CDC record is set to I to indicate the INSERT operation at the * source. If <code>IncludeOpForFullLoad</code> is set to <code>false</code>, every CDC record is written * without a first field to indicate the INSERT operation at the source. For more information about how these * settings work together, see <a href= * "https://docs.aws.amazon.com/dms/latest/userguide/CHAP_Target.S3.html#CHAP_Target.S3.Configuring.InsertOps" * >Indicating Source DB Operations in Migrated S3 Data</a> in the <i>Database Migration Service User * Guide.</i>. * </p> * <note> * <p> * DMS supports the interaction described preceding between the <code>CdcInsertsOnly</code> and * <code>IncludeOpForFullLoad</code> parameters in versions 3.1.4 and later. * </p> * <p> * <code>CdcInsertsOnly</code> and <code>CdcInsertsAndUpdates</code> can't both be set to <code>true</code> * for the same endpoint. Set either <code>CdcInsertsOnly</code> or <code>CdcInsertsAndUpdates</code> to * <code>true</code> for the same endpoint, but not both. * </p> * @return Returns a reference to this object so that method calls can be chained together. */ public S3Settings withCdcInsertsOnly(Boolean cdcInsertsOnly) { setCdcInsertsOnly(cdcInsertsOnly); return this; } /** * <p> * A value that enables a change data capture (CDC) load to write only INSERT operations to .csv or columnar storage * (.parquet) output files. By default (the <code>false</code> setting), the first field in a .csv or .parquet * record contains the letter I (INSERT), U (UPDATE), or D (DELETE). These values indicate whether the row was * inserted, updated, or deleted at the source database for a CDC load to the target. * </p> * <p> * If <code>CdcInsertsOnly</code> is set to <code>true</code> or <code>y</code>, only INSERTs from the source * database are migrated to the .csv or .parquet file. For .csv format only, how these INSERTs are recorded depends * on the value of <code>IncludeOpForFullLoad</code>. If <code>IncludeOpForFullLoad</code> is set to * <code>true</code>, the first field of every CDC record is set to I to indicate the INSERT operation at the * source. If <code>IncludeOpForFullLoad</code> is set to <code>false</code>, every CDC record is written without a * first field to indicate the INSERT operation at the source. For more information about how these settings work * together, see <a href= * "https://docs.aws.amazon.com/dms/latest/userguide/CHAP_Target.S3.html#CHAP_Target.S3.Configuring.InsertOps" * >Indicating Source DB Operations in Migrated S3 Data</a> in the <i>Database Migration Service User Guide.</i>. * </p> * <note> * <p> * DMS supports the interaction described preceding between the <code>CdcInsertsOnly</code> and * <code>IncludeOpForFullLoad</code> parameters in versions 3.1.4 and later. * </p> * <p> * <code>CdcInsertsOnly</code> and <code>CdcInsertsAndUpdates</code> can't both be set to <code>true</code> for the * same endpoint. Set either <code>CdcInsertsOnly</code> or <code>CdcInsertsAndUpdates</code> to <code>true</code> * for the same endpoint, but not both. * </p> * </note> * * @return A value that enables a change data capture (CDC) load to write only INSERT operations to .csv or columnar * storage (.parquet) output files. By default (the <code>false</code> setting), the first field in a .csv * or .parquet record contains the letter I (INSERT), U (UPDATE), or D (DELETE). These values indicate * whether the row was inserted, updated, or deleted at the source database for a CDC load to the * target.</p> * <p> * If <code>CdcInsertsOnly</code> is set to <code>true</code> or <code>y</code>, only INSERTs from the * source database are migrated to the .csv or .parquet file. For .csv format only, how these INSERTs are * recorded depends on the value of <code>IncludeOpForFullLoad</code>. If <code>IncludeOpForFullLoad</code> * is set to <code>true</code>, the first field of every CDC record is set to I to indicate the INSERT * operation at the source. If <code>IncludeOpForFullLoad</code> is set to <code>false</code>, every CDC * record is written without a first field to indicate the INSERT operation at the source. For more * information about how these settings work together, see <a href= * "https://docs.aws.amazon.com/dms/latest/userguide/CHAP_Target.S3.html#CHAP_Target.S3.Configuring.InsertOps" * >Indicating Source DB Operations in Migrated S3 Data</a> in the <i>Database Migration Service User * Guide.</i>. * </p> * <note> * <p> * DMS supports the interaction described preceding between the <code>CdcInsertsOnly</code> and * <code>IncludeOpForFullLoad</code> parameters in versions 3.1.4 and later. * </p> * <p> * <code>CdcInsertsOnly</code> and <code>CdcInsertsAndUpdates</code> can't both be set to <code>true</code> * for the same endpoint. Set either <code>CdcInsertsOnly</code> or <code>CdcInsertsAndUpdates</code> to * <code>true</code> for the same endpoint, but not both. * </p> */ public Boolean isCdcInsertsOnly() { return this.cdcInsertsOnly; } /** * <p> * A value that when nonblank causes DMS to add a column with timestamp information to the endpoint data for an * Amazon S3 target. * </p> * <note> * <p> * DMS supports the <code>TimestampColumnName</code> parameter in versions 3.1.4 and later. * </p> * </note> * <p> * DMS includes an additional <code>STRING</code> column in the .csv or .parquet object files of your migrated data * when you set <code>TimestampColumnName</code> to a nonblank value. * </p> * <p> * For a full load, each row of this timestamp column contains a timestamp for when the data was transferred from * the source to the target by DMS. * </p> * <p> * For a change data capture (CDC) load, each row of the timestamp column contains the timestamp for the commit of * that row in the source database. * </p> * <p> * The string format for this timestamp column value is <code>yyyy-MM-dd HH:mm:ss.SSSSSS</code>. By default, the * precision of this value is in microseconds. For a CDC load, the rounding of the precision depends on the commit * timestamp supported by DMS for the source database. * </p> * <p> * When the <code>AddColumnName</code> parameter is set to <code>true</code>, DMS also includes a name for the * timestamp column that you set with <code>TimestampColumnName</code>. * </p> * * @param timestampColumnName * A value that when nonblank causes DMS to add a column with timestamp information to the endpoint data for * an Amazon S3 target.</p> <note> * <p> * DMS supports the <code>TimestampColumnName</code> parameter in versions 3.1.4 and later. * </p> * </note> * <p> * DMS includes an additional <code>STRING</code> column in the .csv or .parquet object files of your * migrated data when you set <code>TimestampColumnName</code> to a nonblank value. * </p> * <p> * For a full load, each row of this timestamp column contains a timestamp for when the data was transferred * from the source to the target by DMS. * </p> * <p> * For a change data capture (CDC) load, each row of the timestamp column contains the timestamp for the * commit of that row in the source database. * </p> * <p> * The string format for this timestamp column value is <code>yyyy-MM-dd HH:mm:ss.SSSSSS</code>. By default, * the precision of this value is in microseconds. For a CDC load, the rounding of the precision depends on * the commit timestamp supported by DMS for the source database. * </p> * <p> * When the <code>AddColumnName</code> parameter is set to <code>true</code>, DMS also includes a name for * the timestamp column that you set with <code>TimestampColumnName</code>. */ public void setTimestampColumnName(String timestampColumnName) { this.timestampColumnName = timestampColumnName; } /** * <p> * A value that when nonblank causes DMS to add a column with timestamp information to the endpoint data for an * Amazon S3 target. * </p> * <note> * <p> * DMS supports the <code>TimestampColumnName</code> parameter in versions 3.1.4 and later. * </p> * </note> * <p> * DMS includes an additional <code>STRING</code> column in the .csv or .parquet object files of your migrated data * when you set <code>TimestampColumnName</code> to a nonblank value. * </p> * <p> * For a full load, each row of this timestamp column contains a timestamp for when the data was transferred from * the source to the target by DMS. * </p> * <p> * For a change data capture (CDC) load, each row of the timestamp column contains the timestamp for the commit of * that row in the source database. * </p> * <p> * The string format for this timestamp column value is <code>yyyy-MM-dd HH:mm:ss.SSSSSS</code>. By default, the * precision of this value is in microseconds. For a CDC load, the rounding of the precision depends on the commit * timestamp supported by DMS for the source database. * </p> * <p> * When the <code>AddColumnName</code> parameter is set to <code>true</code>, DMS also includes a name for the * timestamp column that you set with <code>TimestampColumnName</code>. * </p> * * @return A value that when nonblank causes DMS to add a column with timestamp information to the endpoint data for * an Amazon S3 target.</p> <note> * <p> * DMS supports the <code>TimestampColumnName</code> parameter in versions 3.1.4 and later. * </p> * </note> * <p> * DMS includes an additional <code>STRING</code> column in the .csv or .parquet object files of your * migrated data when you set <code>TimestampColumnName</code> to a nonblank value. * </p> * <p> * For a full load, each row of this timestamp column contains a timestamp for when the data was transferred * from the source to the target by DMS. * </p> * <p> * For a change data capture (CDC) load, each row of the timestamp column contains the timestamp for the * commit of that row in the source database. * </p> * <p> * The string format for this timestamp column value is <code>yyyy-MM-dd HH:mm:ss.SSSSSS</code>. By default, * the precision of this value is in microseconds. For a CDC load, the rounding of the precision depends on * the commit timestamp supported by DMS for the source database. * </p> * <p> * When the <code>AddColumnName</code> parameter is set to <code>true</code>, DMS also includes a name for * the timestamp column that you set with <code>TimestampColumnName</code>. */ public String getTimestampColumnName() { return this.timestampColumnName; } /** * <p> * A value that when nonblank causes DMS to add a column with timestamp information to the endpoint data for an * Amazon S3 target. * </p> * <note> * <p> * DMS supports the <code>TimestampColumnName</code> parameter in versions 3.1.4 and later. * </p> * </note> * <p> * DMS includes an additional <code>STRING</code> column in the .csv or .parquet object files of your migrated data * when you set <code>TimestampColumnName</code> to a nonblank value. * </p> * <p> * For a full load, each row of this timestamp column contains a timestamp for when the data was transferred from * the source to the target by DMS. * </p> * <p> * For a change data capture (CDC) load, each row of the timestamp column contains the timestamp for the commit of * that row in the source database. * </p> * <p> * The string format for this timestamp column value is <code>yyyy-MM-dd HH:mm:ss.SSSSSS</code>. By default, the * precision of this value is in microseconds. For a CDC load, the rounding of the precision depends on the commit * timestamp supported by DMS for the source database. * </p> * <p> * When the <code>AddColumnName</code> parameter is set to <code>true</code>, DMS also includes a name for the * timestamp column that you set with <code>TimestampColumnName</code>. * </p> * * @param timestampColumnName * A value that when nonblank causes DMS to add a column with timestamp information to the endpoint data for * an Amazon S3 target.</p> <note> * <p> * DMS supports the <code>TimestampColumnName</code> parameter in versions 3.1.4 and later. * </p> * </note> * <p> * DMS includes an additional <code>STRING</code> column in the .csv or .parquet object files of your * migrated data when you set <code>TimestampColumnName</code> to a nonblank value. * </p> * <p> * For a full load, each row of this timestamp column contains a timestamp for when the data was transferred * from the source to the target by DMS. * </p> * <p> * For a change data capture (CDC) load, each row of the timestamp column contains the timestamp for the * commit of that row in the source database. * </p> * <p> * The string format for this timestamp column value is <code>yyyy-MM-dd HH:mm:ss.SSSSSS</code>. By default, * the precision of this value is in microseconds. For a CDC load, the rounding of the precision depends on * the commit timestamp supported by DMS for the source database. * </p> * <p> * When the <code>AddColumnName</code> parameter is set to <code>true</code>, DMS also includes a name for * the timestamp column that you set with <code>TimestampColumnName</code>. * @return Returns a reference to this object so that method calls can be chained together. */ public S3Settings withTimestampColumnName(String timestampColumnName) { setTimestampColumnName(timestampColumnName); return this; } /** * <p> * A value that specifies the precision of any <code>TIMESTAMP</code> column values that are written to an Amazon S3 * object file in .parquet format. * </p> * <note> * <p> * DMS supports the <code>ParquetTimestampInMillisecond</code> parameter in versions 3.1.4 and later. * </p> * </note> * <p> * When <code>ParquetTimestampInMillisecond</code> is set to <code>true</code> or <code>y</code>, DMS writes all * <code>TIMESTAMP</code> columns in a .parquet formatted file with millisecond precision. Otherwise, DMS writes * them with microsecond precision. * </p> * <p> * Currently, Amazon Athena and Glue can handle only millisecond precision for <code>TIMESTAMP</code> values. Set * this parameter to <code>true</code> for S3 endpoint object files that are .parquet formatted only if you plan to * query or process the data with Athena or Glue. * </p> * <note> * <p> * DMS writes any <code>TIMESTAMP</code> column values written to an S3 file in .csv format with microsecond * precision. * </p> * <p> * Setting <code>ParquetTimestampInMillisecond</code> has no effect on the string format of the timestamp column * value that is inserted by setting the <code>TimestampColumnName</code> parameter. * </p> * </note> * * @param parquetTimestampInMillisecond * A value that specifies the precision of any <code>TIMESTAMP</code> column values that are written to an * Amazon S3 object file in .parquet format.</p> <note> * <p> * DMS supports the <code>ParquetTimestampInMillisecond</code> parameter in versions 3.1.4 and later. * </p> * </note> * <p> * When <code>ParquetTimestampInMillisecond</code> is set to <code>true</code> or <code>y</code>, DMS writes * all <code>TIMESTAMP</code> columns in a .parquet formatted file with millisecond precision. Otherwise, DMS * writes them with microsecond precision. * </p> * <p> * Currently, Amazon Athena and Glue can handle only millisecond precision for <code>TIMESTAMP</code> values. * Set this parameter to <code>true</code> for S3 endpoint object files that are .parquet formatted only if * you plan to query or process the data with Athena or Glue. * </p> * <note> * <p> * DMS writes any <code>TIMESTAMP</code> column values written to an S3 file in .csv format with microsecond * precision. * </p> * <p> * Setting <code>ParquetTimestampInMillisecond</code> has no effect on the string format of the timestamp * column value that is inserted by setting the <code>TimestampColumnName</code> parameter. * </p> */ public void setParquetTimestampInMillisecond(Boolean parquetTimestampInMillisecond) { this.parquetTimestampInMillisecond = parquetTimestampInMillisecond; } /** * <p> * A value that specifies the precision of any <code>TIMESTAMP</code> column values that are written to an Amazon S3 * object file in .parquet format. * </p> * <note> * <p> * DMS supports the <code>ParquetTimestampInMillisecond</code> parameter in versions 3.1.4 and later. * </p> * </note> * <p> * When <code>ParquetTimestampInMillisecond</code> is set to <code>true</code> or <code>y</code>, DMS writes all * <code>TIMESTAMP</code> columns in a .parquet formatted file with millisecond precision. Otherwise, DMS writes * them with microsecond precision. * </p> * <p> * Currently, Amazon Athena and Glue can handle only millisecond precision for <code>TIMESTAMP</code> values. Set * this parameter to <code>true</code> for S3 endpoint object files that are .parquet formatted only if you plan to * query or process the data with Athena or Glue. * </p> * <note> * <p> * DMS writes any <code>TIMESTAMP</code> column values written to an S3 file in .csv format with microsecond * precision. * </p> * <p> * Setting <code>ParquetTimestampInMillisecond</code> has no effect on the string format of the timestamp column * value that is inserted by setting the <code>TimestampColumnName</code> parameter. * </p> * </note> * * @return A value that specifies the precision of any <code>TIMESTAMP</code> column values that are written to an * Amazon S3 object file in .parquet format.</p> <note> * <p> * DMS supports the <code>ParquetTimestampInMillisecond</code> parameter in versions 3.1.4 and later. * </p> * </note> * <p> * When <code>ParquetTimestampInMillisecond</code> is set to <code>true</code> or <code>y</code>, DMS writes * all <code>TIMESTAMP</code> columns in a .parquet formatted file with millisecond precision. Otherwise, * DMS writes them with microsecond precision. * </p> * <p> * Currently, Amazon Athena and Glue can handle only millisecond precision for <code>TIMESTAMP</code> * values. Set this parameter to <code>true</code> for S3 endpoint object files that are .parquet formatted * only if you plan to query or process the data with Athena or Glue. * </p> * <note> * <p> * DMS writes any <code>TIMESTAMP</code> column values written to an S3 file in .csv format with microsecond * precision. * </p> * <p> * Setting <code>ParquetTimestampInMillisecond</code> has no effect on the string format of the timestamp * column value that is inserted by setting the <code>TimestampColumnName</code> parameter. * </p> */ public Boolean getParquetTimestampInMillisecond() { return this.parquetTimestampInMillisecond; } /** * <p> * A value that specifies the precision of any <code>TIMESTAMP</code> column values that are written to an Amazon S3 * object file in .parquet format. * </p> * <note> * <p> * DMS supports the <code>ParquetTimestampInMillisecond</code> parameter in versions 3.1.4 and later. * </p> * </note> * <p> * When <code>ParquetTimestampInMillisecond</code> is set to <code>true</code> or <code>y</code>, DMS writes all * <code>TIMESTAMP</code> columns in a .parquet formatted file with millisecond precision. Otherwise, DMS writes * them with microsecond precision. * </p> * <p> * Currently, Amazon Athena and Glue can handle only millisecond precision for <code>TIMESTAMP</code> values. Set * this parameter to <code>true</code> for S3 endpoint object files that are .parquet formatted only if you plan to * query or process the data with Athena or Glue. * </p> * <note> * <p> * DMS writes any <code>TIMESTAMP</code> column values written to an S3 file in .csv format with microsecond * precision. * </p> * <p> * Setting <code>ParquetTimestampInMillisecond</code> has no effect on the string format of the timestamp column * value that is inserted by setting the <code>TimestampColumnName</code> parameter. * </p> * </note> * * @param parquetTimestampInMillisecond * A value that specifies the precision of any <code>TIMESTAMP</code> column values that are written to an * Amazon S3 object file in .parquet format.</p> <note> * <p> * DMS supports the <code>ParquetTimestampInMillisecond</code> parameter in versions 3.1.4 and later. * </p> * </note> * <p> * When <code>ParquetTimestampInMillisecond</code> is set to <code>true</code> or <code>y</code>, DMS writes * all <code>TIMESTAMP</code> columns in a .parquet formatted file with millisecond precision. Otherwise, DMS * writes them with microsecond precision. * </p> * <p> * Currently, Amazon Athena and Glue can handle only millisecond precision for <code>TIMESTAMP</code> values. * Set this parameter to <code>true</code> for S3 endpoint object files that are .parquet formatted only if * you plan to query or process the data with Athena or Glue. * </p> * <note> * <p> * DMS writes any <code>TIMESTAMP</code> column values written to an S3 file in .csv format with microsecond * precision. * </p> * <p> * Setting <code>ParquetTimestampInMillisecond</code> has no effect on the string format of the timestamp * column value that is inserted by setting the <code>TimestampColumnName</code> parameter. * </p> * @return Returns a reference to this object so that method calls can be chained together. */ public S3Settings withParquetTimestampInMillisecond(Boolean parquetTimestampInMillisecond) { setParquetTimestampInMillisecond(parquetTimestampInMillisecond); return this; } /** * <p> * A value that specifies the precision of any <code>TIMESTAMP</code> column values that are written to an Amazon S3 * object file in .parquet format. * </p> * <note> * <p> * DMS supports the <code>ParquetTimestampInMillisecond</code> parameter in versions 3.1.4 and later. * </p> * </note> * <p> * When <code>ParquetTimestampInMillisecond</code> is set to <code>true</code> or <code>y</code>, DMS writes all * <code>TIMESTAMP</code> columns in a .parquet formatted file with millisecond precision. Otherwise, DMS writes * them with microsecond precision. * </p> * <p> * Currently, Amazon Athena and Glue can handle only millisecond precision for <code>TIMESTAMP</code> values. Set * this parameter to <code>true</code> for S3 endpoint object files that are .parquet formatted only if you plan to * query or process the data with Athena or Glue. * </p> * <note> * <p> * DMS writes any <code>TIMESTAMP</code> column values written to an S3 file in .csv format with microsecond * precision. * </p> * <p> * Setting <code>ParquetTimestampInMillisecond</code> has no effect on the string format of the timestamp column * value that is inserted by setting the <code>TimestampColumnName</code> parameter. * </p> * </note> * * @return A value that specifies the precision of any <code>TIMESTAMP</code> column values that are written to an * Amazon S3 object file in .parquet format.</p> <note> * <p> * DMS supports the <code>ParquetTimestampInMillisecond</code> parameter in versions 3.1.4 and later. * </p> * </note> * <p> * When <code>ParquetTimestampInMillisecond</code> is set to <code>true</code> or <code>y</code>, DMS writes * all <code>TIMESTAMP</code> columns in a .parquet formatted file with millisecond precision. Otherwise, * DMS writes them with microsecond precision. * </p> * <p> * Currently, Amazon Athena and Glue can handle only millisecond precision for <code>TIMESTAMP</code> * values. Set this parameter to <code>true</code> for S3 endpoint object files that are .parquet formatted * only if you plan to query or process the data with Athena or Glue. * </p> * <note> * <p> * DMS writes any <code>TIMESTAMP</code> column values written to an S3 file in .csv format with microsecond * precision. * </p> * <p> * Setting <code>ParquetTimestampInMillisecond</code> has no effect on the string format of the timestamp * column value that is inserted by setting the <code>TimestampColumnName</code> parameter. * </p> */ public Boolean isParquetTimestampInMillisecond() { return this.parquetTimestampInMillisecond; } /** * <p> * A value that enables a change data capture (CDC) load to write INSERT and UPDATE operations to .csv or .parquet * (columnar storage) output files. The default setting is <code>false</code>, but when * <code>CdcInsertsAndUpdates</code> is set to <code>true</code> or <code>y</code>, only INSERTs and UPDATEs from * the source database are migrated to the .csv or .parquet file. * </p> * <p> * For .csv file format only, how these INSERTs and UPDATEs are recorded depends on the value of the * <code>IncludeOpForFullLoad</code> parameter. If <code>IncludeOpForFullLoad</code> is set to <code>true</code>, * the first field of every CDC record is set to either <code>I</code> or <code>U</code> to indicate INSERT and * UPDATE operations at the source. But if <code>IncludeOpForFullLoad</code> is set to <code>false</code>, CDC * records are written without an indication of INSERT or UPDATE operations at the source. For more information * about how these settings work together, see <a href= * "https://docs.aws.amazon.com/dms/latest/userguide/CHAP_Target.S3.html#CHAP_Target.S3.Configuring.InsertOps" * >Indicating Source DB Operations in Migrated S3 Data</a> in the <i>Database Migration Service User Guide.</i>. * </p> * <note> * <p> * DMS supports the use of the <code>CdcInsertsAndUpdates</code> parameter in versions 3.3.1 and later. * </p> * <p> * <code>CdcInsertsOnly</code> and <code>CdcInsertsAndUpdates</code> can't both be set to <code>true</code> for the * same endpoint. Set either <code>CdcInsertsOnly</code> or <code>CdcInsertsAndUpdates</code> to <code>true</code> * for the same endpoint, but not both. * </p> * </note> * * @param cdcInsertsAndUpdates * A value that enables a change data capture (CDC) load to write INSERT and UPDATE operations to .csv or * .parquet (columnar storage) output files. The default setting is <code>false</code>, but when * <code>CdcInsertsAndUpdates</code> is set to <code>true</code> or <code>y</code>, only INSERTs and UPDATEs * from the source database are migrated to the .csv or .parquet file. </p> * <p> * For .csv file format only, how these INSERTs and UPDATEs are recorded depends on the value of the * <code>IncludeOpForFullLoad</code> parameter. If <code>IncludeOpForFullLoad</code> is set to * <code>true</code>, the first field of every CDC record is set to either <code>I</code> or <code>U</code> * to indicate INSERT and UPDATE operations at the source. But if <code>IncludeOpForFullLoad</code> is set to * <code>false</code>, CDC records are written without an indication of INSERT or UPDATE operations at the * source. For more information about how these settings work together, see <a href= * "https://docs.aws.amazon.com/dms/latest/userguide/CHAP_Target.S3.html#CHAP_Target.S3.Configuring.InsertOps" * >Indicating Source DB Operations in Migrated S3 Data</a> in the <i>Database Migration Service User * Guide.</i>. * </p> * <note> * <p> * DMS supports the use of the <code>CdcInsertsAndUpdates</code> parameter in versions 3.3.1 and later. * </p> * <p> * <code>CdcInsertsOnly</code> and <code>CdcInsertsAndUpdates</code> can't both be set to <code>true</code> * for the same endpoint. Set either <code>CdcInsertsOnly</code> or <code>CdcInsertsAndUpdates</code> to * <code>true</code> for the same endpoint, but not both. * </p> */ public void setCdcInsertsAndUpdates(Boolean cdcInsertsAndUpdates) { this.cdcInsertsAndUpdates = cdcInsertsAndUpdates; } /** * <p> * A value that enables a change data capture (CDC) load to write INSERT and UPDATE operations to .csv or .parquet * (columnar storage) output files. The default setting is <code>false</code>, but when * <code>CdcInsertsAndUpdates</code> is set to <code>true</code> or <code>y</code>, only INSERTs and UPDATEs from * the source database are migrated to the .csv or .parquet file. * </p> * <p> * For .csv file format only, how these INSERTs and UPDATEs are recorded depends on the value of the * <code>IncludeOpForFullLoad</code> parameter. If <code>IncludeOpForFullLoad</code> is set to <code>true</code>, * the first field of every CDC record is set to either <code>I</code> or <code>U</code> to indicate INSERT and * UPDATE operations at the source. But if <code>IncludeOpForFullLoad</code> is set to <code>false</code>, CDC * records are written without an indication of INSERT or UPDATE operations at the source. For more information * about how these settings work together, see <a href= * "https://docs.aws.amazon.com/dms/latest/userguide/CHAP_Target.S3.html#CHAP_Target.S3.Configuring.InsertOps" * >Indicating Source DB Operations in Migrated S3 Data</a> in the <i>Database Migration Service User Guide.</i>. * </p> * <note> * <p> * DMS supports the use of the <code>CdcInsertsAndUpdates</code> parameter in versions 3.3.1 and later. * </p> * <p> * <code>CdcInsertsOnly</code> and <code>CdcInsertsAndUpdates</code> can't both be set to <code>true</code> for the * same endpoint. Set either <code>CdcInsertsOnly</code> or <code>CdcInsertsAndUpdates</code> to <code>true</code> * for the same endpoint, but not both. * </p> * </note> * * @return A value that enables a change data capture (CDC) load to write INSERT and UPDATE operations to .csv or * .parquet (columnar storage) output files. The default setting is <code>false</code>, but when * <code>CdcInsertsAndUpdates</code> is set to <code>true</code> or <code>y</code>, only INSERTs and UPDATEs * from the source database are migrated to the .csv or .parquet file. </p> * <p> * For .csv file format only, how these INSERTs and UPDATEs are recorded depends on the value of the * <code>IncludeOpForFullLoad</code> parameter. If <code>IncludeOpForFullLoad</code> is set to * <code>true</code>, the first field of every CDC record is set to either <code>I</code> or <code>U</code> * to indicate INSERT and UPDATE operations at the source. But if <code>IncludeOpForFullLoad</code> is set * to <code>false</code>, CDC records are written without an indication of INSERT or UPDATE operations at * the source. For more information about how these settings work together, see <a href= * "https://docs.aws.amazon.com/dms/latest/userguide/CHAP_Target.S3.html#CHAP_Target.S3.Configuring.InsertOps" * >Indicating Source DB Operations in Migrated S3 Data</a> in the <i>Database Migration Service User * Guide.</i>. * </p> * <note> * <p> * DMS supports the use of the <code>CdcInsertsAndUpdates</code> parameter in versions 3.3.1 and later. * </p> * <p> * <code>CdcInsertsOnly</code> and <code>CdcInsertsAndUpdates</code> can't both be set to <code>true</code> * for the same endpoint. Set either <code>CdcInsertsOnly</code> or <code>CdcInsertsAndUpdates</code> to * <code>true</code> for the same endpoint, but not both. * </p> */ public Boolean getCdcInsertsAndUpdates() { return this.cdcInsertsAndUpdates; } /** * <p> * A value that enables a change data capture (CDC) load to write INSERT and UPDATE operations to .csv or .parquet * (columnar storage) output files. The default setting is <code>false</code>, but when * <code>CdcInsertsAndUpdates</code> is set to <code>true</code> or <code>y</code>, only INSERTs and UPDATEs from * the source database are migrated to the .csv or .parquet file. * </p> * <p> * For .csv file format only, how these INSERTs and UPDATEs are recorded depends on the value of the * <code>IncludeOpForFullLoad</code> parameter. If <code>IncludeOpForFullLoad</code> is set to <code>true</code>, * the first field of every CDC record is set to either <code>I</code> or <code>U</code> to indicate INSERT and * UPDATE operations at the source. But if <code>IncludeOpForFullLoad</code> is set to <code>false</code>, CDC * records are written without an indication of INSERT or UPDATE operations at the source. For more information * about how these settings work together, see <a href= * "https://docs.aws.amazon.com/dms/latest/userguide/CHAP_Target.S3.html#CHAP_Target.S3.Configuring.InsertOps" * >Indicating Source DB Operations in Migrated S3 Data</a> in the <i>Database Migration Service User Guide.</i>. * </p> * <note> * <p> * DMS supports the use of the <code>CdcInsertsAndUpdates</code> parameter in versions 3.3.1 and later. * </p> * <p> * <code>CdcInsertsOnly</code> and <code>CdcInsertsAndUpdates</code> can't both be set to <code>true</code> for the * same endpoint. Set either <code>CdcInsertsOnly</code> or <code>CdcInsertsAndUpdates</code> to <code>true</code> * for the same endpoint, but not both. * </p> * </note> * * @param cdcInsertsAndUpdates * A value that enables a change data capture (CDC) load to write INSERT and UPDATE operations to .csv or * .parquet (columnar storage) output files. The default setting is <code>false</code>, but when * <code>CdcInsertsAndUpdates</code> is set to <code>true</code> or <code>y</code>, only INSERTs and UPDATEs * from the source database are migrated to the .csv or .parquet file. </p> * <p> * For .csv file format only, how these INSERTs and UPDATEs are recorded depends on the value of the * <code>IncludeOpForFullLoad</code> parameter. If <code>IncludeOpForFullLoad</code> is set to * <code>true</code>, the first field of every CDC record is set to either <code>I</code> or <code>U</code> * to indicate INSERT and UPDATE operations at the source. But if <code>IncludeOpForFullLoad</code> is set to * <code>false</code>, CDC records are written without an indication of INSERT or UPDATE operations at the * source. For more information about how these settings work together, see <a href= * "https://docs.aws.amazon.com/dms/latest/userguide/CHAP_Target.S3.html#CHAP_Target.S3.Configuring.InsertOps" * >Indicating Source DB Operations in Migrated S3 Data</a> in the <i>Database Migration Service User * Guide.</i>. * </p> * <note> * <p> * DMS supports the use of the <code>CdcInsertsAndUpdates</code> parameter in versions 3.3.1 and later. * </p> * <p> * <code>CdcInsertsOnly</code> and <code>CdcInsertsAndUpdates</code> can't both be set to <code>true</code> * for the same endpoint. Set either <code>CdcInsertsOnly</code> or <code>CdcInsertsAndUpdates</code> to * <code>true</code> for the same endpoint, but not both. * </p> * @return Returns a reference to this object so that method calls can be chained together. */ public S3Settings withCdcInsertsAndUpdates(Boolean cdcInsertsAndUpdates) { setCdcInsertsAndUpdates(cdcInsertsAndUpdates); return this; } /** * <p> * A value that enables a change data capture (CDC) load to write INSERT and UPDATE operations to .csv or .parquet * (columnar storage) output files. The default setting is <code>false</code>, but when * <code>CdcInsertsAndUpdates</code> is set to <code>true</code> or <code>y</code>, only INSERTs and UPDATEs from * the source database are migrated to the .csv or .parquet file. * </p> * <p> * For .csv file format only, how these INSERTs and UPDATEs are recorded depends on the value of the * <code>IncludeOpForFullLoad</code> parameter. If <code>IncludeOpForFullLoad</code> is set to <code>true</code>, * the first field of every CDC record is set to either <code>I</code> or <code>U</code> to indicate INSERT and * UPDATE operations at the source. But if <code>IncludeOpForFullLoad</code> is set to <code>false</code>, CDC * records are written without an indication of INSERT or UPDATE operations at the source. For more information * about how these settings work together, see <a href= * "https://docs.aws.amazon.com/dms/latest/userguide/CHAP_Target.S3.html#CHAP_Target.S3.Configuring.InsertOps" * >Indicating Source DB Operations in Migrated S3 Data</a> in the <i>Database Migration Service User Guide.</i>. * </p> * <note> * <p> * DMS supports the use of the <code>CdcInsertsAndUpdates</code> parameter in versions 3.3.1 and later. * </p> * <p> * <code>CdcInsertsOnly</code> and <code>CdcInsertsAndUpdates</code> can't both be set to <code>true</code> for the * same endpoint. Set either <code>CdcInsertsOnly</code> or <code>CdcInsertsAndUpdates</code> to <code>true</code> * for the same endpoint, but not both. * </p> * </note> * * @return A value that enables a change data capture (CDC) load to write INSERT and UPDATE operations to .csv or * .parquet (columnar storage) output files. The default setting is <code>false</code>, but when * <code>CdcInsertsAndUpdates</code> is set to <code>true</code> or <code>y</code>, only INSERTs and UPDATEs * from the source database are migrated to the .csv or .parquet file. </p> * <p> * For .csv file format only, how these INSERTs and UPDATEs are recorded depends on the value of the * <code>IncludeOpForFullLoad</code> parameter. If <code>IncludeOpForFullLoad</code> is set to * <code>true</code>, the first field of every CDC record is set to either <code>I</code> or <code>U</code> * to indicate INSERT and UPDATE operations at the source. But if <code>IncludeOpForFullLoad</code> is set * to <code>false</code>, CDC records are written without an indication of INSERT or UPDATE operations at * the source. For more information about how these settings work together, see <a href= * "https://docs.aws.amazon.com/dms/latest/userguide/CHAP_Target.S3.html#CHAP_Target.S3.Configuring.InsertOps" * >Indicating Source DB Operations in Migrated S3 Data</a> in the <i>Database Migration Service User * Guide.</i>. * </p> * <note> * <p> * DMS supports the use of the <code>CdcInsertsAndUpdates</code> parameter in versions 3.3.1 and later. * </p> * <p> * <code>CdcInsertsOnly</code> and <code>CdcInsertsAndUpdates</code> can't both be set to <code>true</code> * for the same endpoint. Set either <code>CdcInsertsOnly</code> or <code>CdcInsertsAndUpdates</code> to * <code>true</code> for the same endpoint, but not both. * </p> */ public Boolean isCdcInsertsAndUpdates() { return this.cdcInsertsAndUpdates; } /** * <p> * When set to <code>true</code>, this parameter partitions S3 bucket folders based on transaction commit dates. The * default value is <code>false</code>. For more information about date-based folder partitioning, see <a * href="https://docs.aws.amazon.com/dms/latest/userguide/CHAP_Target.S3.html#CHAP_Target.S3.DatePartitioning">Using * date-based folder partitioning</a>. * </p> * * @param datePartitionEnabled * When set to <code>true</code>, this parameter partitions S3 bucket folders based on transaction commit * dates. The default value is <code>false</code>. For more information about date-based folder partitioning, * see <a href= * "https://docs.aws.amazon.com/dms/latest/userguide/CHAP_Target.S3.html#CHAP_Target.S3.DatePartitioning" * >Using date-based folder partitioning</a>. */ public void setDatePartitionEnabled(Boolean datePartitionEnabled) { this.datePartitionEnabled = datePartitionEnabled; } /** * <p> * When set to <code>true</code>, this parameter partitions S3 bucket folders based on transaction commit dates. The * default value is <code>false</code>. For more information about date-based folder partitioning, see <a * href="https://docs.aws.amazon.com/dms/latest/userguide/CHAP_Target.S3.html#CHAP_Target.S3.DatePartitioning">Using * date-based folder partitioning</a>. * </p> * * @return When set to <code>true</code>, this parameter partitions S3 bucket folders based on transaction commit * dates. The default value is <code>false</code>. For more information about date-based folder * partitioning, see <a href= * "https://docs.aws.amazon.com/dms/latest/userguide/CHAP_Target.S3.html#CHAP_Target.S3.DatePartitioning" * >Using date-based folder partitioning</a>. */ public Boolean getDatePartitionEnabled() { return this.datePartitionEnabled; } /** * <p> * When set to <code>true</code>, this parameter partitions S3 bucket folders based on transaction commit dates. The * default value is <code>false</code>. For more information about date-based folder partitioning, see <a * href="https://docs.aws.amazon.com/dms/latest/userguide/CHAP_Target.S3.html#CHAP_Target.S3.DatePartitioning">Using * date-based folder partitioning</a>. * </p> * * @param datePartitionEnabled * When set to <code>true</code>, this parameter partitions S3 bucket folders based on transaction commit * dates. The default value is <code>false</code>. For more information about date-based folder partitioning, * see <a href= * "https://docs.aws.amazon.com/dms/latest/userguide/CHAP_Target.S3.html#CHAP_Target.S3.DatePartitioning" * >Using date-based folder partitioning</a>. * @return Returns a reference to this object so that method calls can be chained together. */ public S3Settings withDatePartitionEnabled(Boolean datePartitionEnabled) { setDatePartitionEnabled(datePartitionEnabled); return this; } /** * <p> * When set to <code>true</code>, this parameter partitions S3 bucket folders based on transaction commit dates. The * default value is <code>false</code>. For more information about date-based folder partitioning, see <a * href="https://docs.aws.amazon.com/dms/latest/userguide/CHAP_Target.S3.html#CHAP_Target.S3.DatePartitioning">Using * date-based folder partitioning</a>. * </p> * * @return When set to <code>true</code>, this parameter partitions S3 bucket folders based on transaction commit * dates. The default value is <code>false</code>. For more information about date-based folder * partitioning, see <a href= * "https://docs.aws.amazon.com/dms/latest/userguide/CHAP_Target.S3.html#CHAP_Target.S3.DatePartitioning" * >Using date-based folder partitioning</a>. */ public Boolean isDatePartitionEnabled() { return this.datePartitionEnabled; } /** * <p> * Identifies the sequence of the date format to use during folder partitioning. The default value is * <code>YYYYMMDD</code>. Use this parameter when <code>DatePartitionedEnabled</code> is set to <code>true</code>. * </p> * * @param datePartitionSequence * Identifies the sequence of the date format to use during folder partitioning. The default value is * <code>YYYYMMDD</code>. Use this parameter when <code>DatePartitionedEnabled</code> is set to * <code>true</code>. * @see DatePartitionSequenceValue */ public void setDatePartitionSequence(String datePartitionSequence) { this.datePartitionSequence = datePartitionSequence; } /** * <p> * Identifies the sequence of the date format to use during folder partitioning. The default value is * <code>YYYYMMDD</code>. Use this parameter when <code>DatePartitionedEnabled</code> is set to <code>true</code>. * </p> * * @return Identifies the sequence of the date format to use during folder partitioning. The default value is * <code>YYYYMMDD</code>. Use this parameter when <code>DatePartitionedEnabled</code> is set to * <code>true</code>. * @see DatePartitionSequenceValue */ public String getDatePartitionSequence() { return this.datePartitionSequence; } /** * <p> * Identifies the sequence of the date format to use during folder partitioning. The default value is * <code>YYYYMMDD</code>. Use this parameter when <code>DatePartitionedEnabled</code> is set to <code>true</code>. * </p> * * @param datePartitionSequence * Identifies the sequence of the date format to use during folder partitioning. The default value is * <code>YYYYMMDD</code>. Use this parameter when <code>DatePartitionedEnabled</code> is set to * <code>true</code>. * @return Returns a reference to this object so that method calls can be chained together. * @see DatePartitionSequenceValue */ public S3Settings withDatePartitionSequence(String datePartitionSequence) { setDatePartitionSequence(datePartitionSequence); return this; } /** * <p> * Identifies the sequence of the date format to use during folder partitioning. The default value is * <code>YYYYMMDD</code>. Use this parameter when <code>DatePartitionedEnabled</code> is set to <code>true</code>. * </p> * * @param datePartitionSequence * Identifies the sequence of the date format to use during folder partitioning. The default value is * <code>YYYYMMDD</code>. Use this parameter when <code>DatePartitionedEnabled</code> is set to * <code>true</code>. * @see DatePartitionSequenceValue */ public void setDatePartitionSequence(DatePartitionSequenceValue datePartitionSequence) { withDatePartitionSequence(datePartitionSequence); } /** * <p> * Identifies the sequence of the date format to use during folder partitioning. The default value is * <code>YYYYMMDD</code>. Use this parameter when <code>DatePartitionedEnabled</code> is set to <code>true</code>. * </p> * * @param datePartitionSequence * Identifies the sequence of the date format to use during folder partitioning. The default value is * <code>YYYYMMDD</code>. Use this parameter when <code>DatePartitionedEnabled</code> is set to * <code>true</code>. * @return Returns a reference to this object so that method calls can be chained together. * @see DatePartitionSequenceValue */ public S3Settings withDatePartitionSequence(DatePartitionSequenceValue datePartitionSequence) { this.datePartitionSequence = datePartitionSequence.toString(); return this; } /** * <p> * Specifies a date separating delimiter to use during folder partitioning. The default value is <code>SLASH</code>. * Use this parameter when <code>DatePartitionedEnabled</code> is set to <code>true</code>. * </p> * * @param datePartitionDelimiter * Specifies a date separating delimiter to use during folder partitioning. The default value is * <code>SLASH</code>. Use this parameter when <code>DatePartitionedEnabled</code> is set to * <code>true</code>. * @see DatePartitionDelimiterValue */ public void setDatePartitionDelimiter(String datePartitionDelimiter) { this.datePartitionDelimiter = datePartitionDelimiter; } /** * <p> * Specifies a date separating delimiter to use during folder partitioning. The default value is <code>SLASH</code>. * Use this parameter when <code>DatePartitionedEnabled</code> is set to <code>true</code>. * </p> * * @return Specifies a date separating delimiter to use during folder partitioning. The default value is * <code>SLASH</code>. Use this parameter when <code>DatePartitionedEnabled</code> is set to * <code>true</code>. * @see DatePartitionDelimiterValue */ public String getDatePartitionDelimiter() { return this.datePartitionDelimiter; } /** * <p> * Specifies a date separating delimiter to use during folder partitioning. The default value is <code>SLASH</code>. * Use this parameter when <code>DatePartitionedEnabled</code> is set to <code>true</code>. * </p> * * @param datePartitionDelimiter * Specifies a date separating delimiter to use during folder partitioning. The default value is * <code>SLASH</code>. Use this parameter when <code>DatePartitionedEnabled</code> is set to * <code>true</code>. * @return Returns a reference to this object so that method calls can be chained together. * @see DatePartitionDelimiterValue */ public S3Settings withDatePartitionDelimiter(String datePartitionDelimiter) { setDatePartitionDelimiter(datePartitionDelimiter); return this; } /** * <p> * Specifies a date separating delimiter to use during folder partitioning. The default value is <code>SLASH</code>. * Use this parameter when <code>DatePartitionedEnabled</code> is set to <code>true</code>. * </p> * * @param datePartitionDelimiter * Specifies a date separating delimiter to use during folder partitioning. The default value is * <code>SLASH</code>. Use this parameter when <code>DatePartitionedEnabled</code> is set to * <code>true</code>. * @see DatePartitionDelimiterValue */ public void setDatePartitionDelimiter(DatePartitionDelimiterValue datePartitionDelimiter) { withDatePartitionDelimiter(datePartitionDelimiter); } /** * <p> * Specifies a date separating delimiter to use during folder partitioning. The default value is <code>SLASH</code>. * Use this parameter when <code>DatePartitionedEnabled</code> is set to <code>true</code>. * </p> * * @param datePartitionDelimiter * Specifies a date separating delimiter to use during folder partitioning. The default value is * <code>SLASH</code>. Use this parameter when <code>DatePartitionedEnabled</code> is set to * <code>true</code>. * @return Returns a reference to this object so that method calls can be chained together. * @see DatePartitionDelimiterValue */ public S3Settings withDatePartitionDelimiter(DatePartitionDelimiterValue datePartitionDelimiter) { this.datePartitionDelimiter = datePartitionDelimiter.toString(); return this; } /** * <p> * This setting applies if the S3 output files during a change data capture (CDC) load are written in .csv format. * If set to <code>true</code> for columns not included in the supplemental log, DMS uses the value specified by <a * href="https://docs.aws.amazon.com/dms/latest/APIReference/API_S3Settings.html#DMS-Type-S3Settings-CsvNoSupValue"> * <code>CsvNoSupValue</code> </a>. If not set or set to <code>false</code>, DMS uses the null value for these * columns. * </p> * <note> * <p> * This setting is supported in DMS versions 3.4.1 and later. * </p> * </note> * * @param useCsvNoSupValue * This setting applies if the S3 output files during a change data capture (CDC) load are written in .csv * format. If set to <code>true</code> for columns not included in the supplemental log, DMS uses the value * specified by <a href= * "https://docs.aws.amazon.com/dms/latest/APIReference/API_S3Settings.html#DMS-Type-S3Settings-CsvNoSupValue" * > <code>CsvNoSupValue</code> </a>. If not set or set to <code>false</code>, DMS uses the null value for * these columns.</p> <note> * <p> * This setting is supported in DMS versions 3.4.1 and later. * </p> */ public void setUseCsvNoSupValue(Boolean useCsvNoSupValue) { this.useCsvNoSupValue = useCsvNoSupValue; } /** * <p> * This setting applies if the S3 output files during a change data capture (CDC) load are written in .csv format. * If set to <code>true</code> for columns not included in the supplemental log, DMS uses the value specified by <a * href="https://docs.aws.amazon.com/dms/latest/APIReference/API_S3Settings.html#DMS-Type-S3Settings-CsvNoSupValue"> * <code>CsvNoSupValue</code> </a>. If not set or set to <code>false</code>, DMS uses the null value for these * columns. * </p> * <note> * <p> * This setting is supported in DMS versions 3.4.1 and later. * </p> * </note> * * @return This setting applies if the S3 output files during a change data capture (CDC) load are written in .csv * format. If set to <code>true</code> for columns not included in the supplemental log, DMS uses the value * specified by <a href= * "https://docs.aws.amazon.com/dms/latest/APIReference/API_S3Settings.html#DMS-Type-S3Settings-CsvNoSupValue" * > <code>CsvNoSupValue</code> </a>. If not set or set to <code>false</code>, DMS uses the null value for * these columns.</p> <note> * <p> * This setting is supported in DMS versions 3.4.1 and later. * </p> */ public Boolean getUseCsvNoSupValue() { return this.useCsvNoSupValue; } /** * <p> * This setting applies if the S3 output files during a change data capture (CDC) load are written in .csv format. * If set to <code>true</code> for columns not included in the supplemental log, DMS uses the value specified by <a * href="https://docs.aws.amazon.com/dms/latest/APIReference/API_S3Settings.html#DMS-Type-S3Settings-CsvNoSupValue"> * <code>CsvNoSupValue</code> </a>. If not set or set to <code>false</code>, DMS uses the null value for these * columns. * </p> * <note> * <p> * This setting is supported in DMS versions 3.4.1 and later. * </p> * </note> * * @param useCsvNoSupValue * This setting applies if the S3 output files during a change data capture (CDC) load are written in .csv * format. If set to <code>true</code> for columns not included in the supplemental log, DMS uses the value * specified by <a href= * "https://docs.aws.amazon.com/dms/latest/APIReference/API_S3Settings.html#DMS-Type-S3Settings-CsvNoSupValue" * > <code>CsvNoSupValue</code> </a>. If not set or set to <code>false</code>, DMS uses the null value for * these columns.</p> <note> * <p> * This setting is supported in DMS versions 3.4.1 and later. * </p> * @return Returns a reference to this object so that method calls can be chained together. */ public S3Settings withUseCsvNoSupValue(Boolean useCsvNoSupValue) { setUseCsvNoSupValue(useCsvNoSupValue); return this; } /** * <p> * This setting applies if the S3 output files during a change data capture (CDC) load are written in .csv format. * If set to <code>true</code> for columns not included in the supplemental log, DMS uses the value specified by <a * href="https://docs.aws.amazon.com/dms/latest/APIReference/API_S3Settings.html#DMS-Type-S3Settings-CsvNoSupValue"> * <code>CsvNoSupValue</code> </a>. If not set or set to <code>false</code>, DMS uses the null value for these * columns. * </p> * <note> * <p> * This setting is supported in DMS versions 3.4.1 and later. * </p> * </note> * * @return This setting applies if the S3 output files during a change data capture (CDC) load are written in .csv * format. If set to <code>true</code> for columns not included in the supplemental log, DMS uses the value * specified by <a href= * "https://docs.aws.amazon.com/dms/latest/APIReference/API_S3Settings.html#DMS-Type-S3Settings-CsvNoSupValue" * > <code>CsvNoSupValue</code> </a>. If not set or set to <code>false</code>, DMS uses the null value for * these columns.</p> <note> * <p> * This setting is supported in DMS versions 3.4.1 and later. * </p> */ public Boolean isUseCsvNoSupValue() { return this.useCsvNoSupValue; } /** * <p> * This setting only applies if your Amazon S3 output files during a change data capture (CDC) load are written in * .csv format. If <a href= * "https://docs.aws.amazon.com/dms/latest/APIReference/API_S3Settings.html#DMS-Type-S3Settings-UseCsvNoSupValue"> * <code>UseCsvNoSupValue</code> </a> is set to true, specify a string value that you want DMS to use for all * columns not included in the supplemental log. If you do not specify a string value, DMS uses the null value for * these columns regardless of the <code>UseCsvNoSupValue</code> setting. * </p> * <note> * <p> * This setting is supported in DMS versions 3.4.1 and later. * </p> * </note> * * @param csvNoSupValue * This setting only applies if your Amazon S3 output files during a change data capture (CDC) load are * written in .csv format. If <a href= * "https://docs.aws.amazon.com/dms/latest/APIReference/API_S3Settings.html#DMS-Type-S3Settings-UseCsvNoSupValue" * > <code>UseCsvNoSupValue</code> </a> is set to true, specify a string value that you want DMS to use for * all columns not included in the supplemental log. If you do not specify a string value, DMS uses the null * value for these columns regardless of the <code>UseCsvNoSupValue</code> setting.</p> <note> * <p> * This setting is supported in DMS versions 3.4.1 and later. * </p> */ public void setCsvNoSupValue(String csvNoSupValue) { this.csvNoSupValue = csvNoSupValue; } /** * <p> * This setting only applies if your Amazon S3 output files during a change data capture (CDC) load are written in * .csv format. If <a href= * "https://docs.aws.amazon.com/dms/latest/APIReference/API_S3Settings.html#DMS-Type-S3Settings-UseCsvNoSupValue"> * <code>UseCsvNoSupValue</code> </a> is set to true, specify a string value that you want DMS to use for all * columns not included in the supplemental log. If you do not specify a string value, DMS uses the null value for * these columns regardless of the <code>UseCsvNoSupValue</code> setting. * </p> * <note> * <p> * This setting is supported in DMS versions 3.4.1 and later. * </p> * </note> * * @return This setting only applies if your Amazon S3 output files during a change data capture (CDC) load are * written in .csv format. If <a href= * "https://docs.aws.amazon.com/dms/latest/APIReference/API_S3Settings.html#DMS-Type-S3Settings-UseCsvNoSupValue" * > <code>UseCsvNoSupValue</code> </a> is set to true, specify a string value that you want DMS to use for * all columns not included in the supplemental log. If you do not specify a string value, DMS uses the null * value for these columns regardless of the <code>UseCsvNoSupValue</code> setting.</p> <note> * <p> * This setting is supported in DMS versions 3.4.1 and later. * </p> */ public String getCsvNoSupValue() { return this.csvNoSupValue; } /** * <p> * This setting only applies if your Amazon S3 output files during a change data capture (CDC) load are written in * .csv format. If <a href= * "https://docs.aws.amazon.com/dms/latest/APIReference/API_S3Settings.html#DMS-Type-S3Settings-UseCsvNoSupValue"> * <code>UseCsvNoSupValue</code> </a> is set to true, specify a string value that you want DMS to use for all * columns not included in the supplemental log. If you do not specify a string value, DMS uses the null value for * these columns regardless of the <code>UseCsvNoSupValue</code> setting. * </p> * <note> * <p> * This setting is supported in DMS versions 3.4.1 and later. * </p> * </note> * * @param csvNoSupValue * This setting only applies if your Amazon S3 output files during a change data capture (CDC) load are * written in .csv format. If <a href= * "https://docs.aws.amazon.com/dms/latest/APIReference/API_S3Settings.html#DMS-Type-S3Settings-UseCsvNoSupValue" * > <code>UseCsvNoSupValue</code> </a> is set to true, specify a string value that you want DMS to use for * all columns not included in the supplemental log. If you do not specify a string value, DMS uses the null * value for these columns regardless of the <code>UseCsvNoSupValue</code> setting.</p> <note> * <p> * This setting is supported in DMS versions 3.4.1 and later. * </p> * @return Returns a reference to this object so that method calls can be chained together. */ public S3Settings withCsvNoSupValue(String csvNoSupValue) { setCsvNoSupValue(csvNoSupValue); return this; } /** * <p> * If set to <code>true</code>, DMS saves the transaction order for a change data capture (CDC) load on the Amazon * S3 target specified by <a * href="https://docs.aws.amazon.com/dms/latest/APIReference/API_S3Settings.html#DMS-Type-S3Settings-CdcPath"> * <code>CdcPath</code> </a>. For more information, see <a href= * "https://docs.aws.amazon.com/dms/latest/userguide/CHAP_Target.S3.html#CHAP_Target.S3.EndpointSettings.CdcPath" * >Capturing data changes (CDC) including transaction order on the S3 target</a>. * </p> * <note> * <p> * This setting is supported in DMS versions 3.4.2 and later. * </p> * </note> * * @param preserveTransactions * If set to <code>true</code>, DMS saves the transaction order for a change data capture (CDC) load on the * Amazon S3 target specified by <a href= * "https://docs.aws.amazon.com/dms/latest/APIReference/API_S3Settings.html#DMS-Type-S3Settings-CdcPath"> * <code>CdcPath</code> </a>. For more information, see <a href= * "https://docs.aws.amazon.com/dms/latest/userguide/CHAP_Target.S3.html#CHAP_Target.S3.EndpointSettings.CdcPath" * >Capturing data changes (CDC) including transaction order on the S3 target</a>.</p> <note> * <p> * This setting is supported in DMS versions 3.4.2 and later. * </p> */ public void setPreserveTransactions(Boolean preserveTransactions) { this.preserveTransactions = preserveTransactions; } /** * <p> * If set to <code>true</code>, DMS saves the transaction order for a change data capture (CDC) load on the Amazon * S3 target specified by <a * href="https://docs.aws.amazon.com/dms/latest/APIReference/API_S3Settings.html#DMS-Type-S3Settings-CdcPath"> * <code>CdcPath</code> </a>. For more information, see <a href= * "https://docs.aws.amazon.com/dms/latest/userguide/CHAP_Target.S3.html#CHAP_Target.S3.EndpointSettings.CdcPath" * >Capturing data changes (CDC) including transaction order on the S3 target</a>. * </p> * <note> * <p> * This setting is supported in DMS versions 3.4.2 and later. * </p> * </note> * * @return If set to <code>true</code>, DMS saves the transaction order for a change data capture (CDC) load on the * Amazon S3 target specified by <a href= * "https://docs.aws.amazon.com/dms/latest/APIReference/API_S3Settings.html#DMS-Type-S3Settings-CdcPath"> * <code>CdcPath</code> </a>. For more information, see <a href= * "https://docs.aws.amazon.com/dms/latest/userguide/CHAP_Target.S3.html#CHAP_Target.S3.EndpointSettings.CdcPath" * >Capturing data changes (CDC) including transaction order on the S3 target</a>.</p> <note> * <p> * This setting is supported in DMS versions 3.4.2 and later. * </p> */ public Boolean getPreserveTransactions() { return this.preserveTransactions; } /** * <p> * If set to <code>true</code>, DMS saves the transaction order for a change data capture (CDC) load on the Amazon * S3 target specified by <a * href="https://docs.aws.amazon.com/dms/latest/APIReference/API_S3Settings.html#DMS-Type-S3Settings-CdcPath"> * <code>CdcPath</code> </a>. For more information, see <a href= * "https://docs.aws.amazon.com/dms/latest/userguide/CHAP_Target.S3.html#CHAP_Target.S3.EndpointSettings.CdcPath" * >Capturing data changes (CDC) including transaction order on the S3 target</a>. * </p> * <note> * <p> * This setting is supported in DMS versions 3.4.2 and later. * </p> * </note> * * @param preserveTransactions * If set to <code>true</code>, DMS saves the transaction order for a change data capture (CDC) load on the * Amazon S3 target specified by <a href= * "https://docs.aws.amazon.com/dms/latest/APIReference/API_S3Settings.html#DMS-Type-S3Settings-CdcPath"> * <code>CdcPath</code> </a>. For more information, see <a href= * "https://docs.aws.amazon.com/dms/latest/userguide/CHAP_Target.S3.html#CHAP_Target.S3.EndpointSettings.CdcPath" * >Capturing data changes (CDC) including transaction order on the S3 target</a>.</p> <note> * <p> * This setting is supported in DMS versions 3.4.2 and later. * </p> * @return Returns a reference to this object so that method calls can be chained together. */ public S3Settings withPreserveTransactions(Boolean preserveTransactions) { setPreserveTransactions(preserveTransactions); return this; } /** * <p> * If set to <code>true</code>, DMS saves the transaction order for a change data capture (CDC) load on the Amazon * S3 target specified by <a * href="https://docs.aws.amazon.com/dms/latest/APIReference/API_S3Settings.html#DMS-Type-S3Settings-CdcPath"> * <code>CdcPath</code> </a>. For more information, see <a href= * "https://docs.aws.amazon.com/dms/latest/userguide/CHAP_Target.S3.html#CHAP_Target.S3.EndpointSettings.CdcPath" * >Capturing data changes (CDC) including transaction order on the S3 target</a>. * </p> * <note> * <p> * This setting is supported in DMS versions 3.4.2 and later. * </p> * </note> * * @return If set to <code>true</code>, DMS saves the transaction order for a change data capture (CDC) load on the * Amazon S3 target specified by <a href= * "https://docs.aws.amazon.com/dms/latest/APIReference/API_S3Settings.html#DMS-Type-S3Settings-CdcPath"> * <code>CdcPath</code> </a>. For more information, see <a href= * "https://docs.aws.amazon.com/dms/latest/userguide/CHAP_Target.S3.html#CHAP_Target.S3.EndpointSettings.CdcPath" * >Capturing data changes (CDC) including transaction order on the S3 target</a>.</p> <note> * <p> * This setting is supported in DMS versions 3.4.2 and later. * </p> */ public Boolean isPreserveTransactions() { return this.preserveTransactions; } /** * <p> * Specifies the folder path of CDC files. For an S3 source, this setting is required if a task captures change * data; otherwise, it's optional. If <code>CdcPath</code> is set, DMS reads CDC files from this path and replicates * the data changes to the target endpoint. For an S3 target if you set <a href= * "https://docs.aws.amazon.com/dms/latest/APIReference/API_S3Settings.html#DMS-Type-S3Settings-PreserveTransactions" * > <code>PreserveTransactions</code> </a> to <code>true</code>, DMS verifies that you have set this parameter to a * folder path on your S3 target where DMS can save the transaction order for the CDC load. DMS creates this CDC * folder path in either your S3 target working directory or the S3 target location specified by <a * href="https://docs.aws.amazon.com/dms/latest/APIReference/API_S3Settings.html#DMS-Type-S3Settings-BucketFolder"> * <code>BucketFolder</code> </a> and <a * href="https://docs.aws.amazon.com/dms/latest/APIReference/API_S3Settings.html#DMS-Type-S3Settings-BucketName"> * <code>BucketName</code> </a>. * </p> * <p> * For example, if you specify <code>CdcPath</code> as <code>MyChangedData</code>, and you specify * <code>BucketName</code> as <code>MyTargetBucket</code> but do not specify <code>BucketFolder</code>, DMS creates * the CDC folder path following: <code>MyTargetBucket/MyChangedData</code>. * </p> * <p> * If you specify the same <code>CdcPath</code>, and you specify <code>BucketName</code> as * <code>MyTargetBucket</code> and <code>BucketFolder</code> as <code>MyTargetData</code>, DMS creates the CDC * folder path following: <code>MyTargetBucket/MyTargetData/MyChangedData</code>. * </p> * <p> * For more information on CDC including transaction order on an S3 target, see <a href= * "https://docs.aws.amazon.com/dms/latest/userguide/CHAP_Target.S3.html#CHAP_Target.S3.EndpointSettings.CdcPath" * >Capturing data changes (CDC) including transaction order on the S3 target</a>. * </p> * <note> * <p> * This setting is supported in DMS versions 3.4.2 and later. * </p> * </note> * * @param cdcPath * Specifies the folder path of CDC files. For an S3 source, this setting is required if a task captures * change data; otherwise, it's optional. If <code>CdcPath</code> is set, DMS reads CDC files from this path * and replicates the data changes to the target endpoint. For an S3 target if you set <a href= * "https://docs.aws.amazon.com/dms/latest/APIReference/API_S3Settings.html#DMS-Type-S3Settings-PreserveTransactions" * > <code>PreserveTransactions</code> </a> to <code>true</code>, DMS verifies that you have set this * parameter to a folder path on your S3 target where DMS can save the transaction order for the CDC load. * DMS creates this CDC folder path in either your S3 target working directory or the S3 target location * specified by <a href= * "https://docs.aws.amazon.com/dms/latest/APIReference/API_S3Settings.html#DMS-Type-S3Settings-BucketFolder" * > <code>BucketFolder</code> </a> and <a * href="https://docs.aws.amazon.com/dms/latest/APIReference/API_S3Settings.html#DMS-Type-S3Settings-BucketName" * > <code>BucketName</code> </a>.</p> * <p> * For example, if you specify <code>CdcPath</code> as <code>MyChangedData</code>, and you specify * <code>BucketName</code> as <code>MyTargetBucket</code> but do not specify <code>BucketFolder</code>, DMS * creates the CDC folder path following: <code>MyTargetBucket/MyChangedData</code>. * </p> * <p> * If you specify the same <code>CdcPath</code>, and you specify <code>BucketName</code> as * <code>MyTargetBucket</code> and <code>BucketFolder</code> as <code>MyTargetData</code>, DMS creates the * CDC folder path following: <code>MyTargetBucket/MyTargetData/MyChangedData</code>. * </p> * <p> * For more information on CDC including transaction order on an S3 target, see <a href= * "https://docs.aws.amazon.com/dms/latest/userguide/CHAP_Target.S3.html#CHAP_Target.S3.EndpointSettings.CdcPath" * >Capturing data changes (CDC) including transaction order on the S3 target</a>. * </p> * <note> * <p> * This setting is supported in DMS versions 3.4.2 and later. * </p> */ public void setCdcPath(String cdcPath) { this.cdcPath = cdcPath; } /** * <p> * Specifies the folder path of CDC files. For an S3 source, this setting is required if a task captures change * data; otherwise, it's optional. If <code>CdcPath</code> is set, DMS reads CDC files from this path and replicates * the data changes to the target endpoint. For an S3 target if you set <a href= * "https://docs.aws.amazon.com/dms/latest/APIReference/API_S3Settings.html#DMS-Type-S3Settings-PreserveTransactions" * > <code>PreserveTransactions</code> </a> to <code>true</code>, DMS verifies that you have set this parameter to a * folder path on your S3 target where DMS can save the transaction order for the CDC load. DMS creates this CDC * folder path in either your S3 target working directory or the S3 target location specified by <a * href="https://docs.aws.amazon.com/dms/latest/APIReference/API_S3Settings.html#DMS-Type-S3Settings-BucketFolder"> * <code>BucketFolder</code> </a> and <a * href="https://docs.aws.amazon.com/dms/latest/APIReference/API_S3Settings.html#DMS-Type-S3Settings-BucketName"> * <code>BucketName</code> </a>. * </p> * <p> * For example, if you specify <code>CdcPath</code> as <code>MyChangedData</code>, and you specify * <code>BucketName</code> as <code>MyTargetBucket</code> but do not specify <code>BucketFolder</code>, DMS creates * the CDC folder path following: <code>MyTargetBucket/MyChangedData</code>. * </p> * <p> * If you specify the same <code>CdcPath</code>, and you specify <code>BucketName</code> as * <code>MyTargetBucket</code> and <code>BucketFolder</code> as <code>MyTargetData</code>, DMS creates the CDC * folder path following: <code>MyTargetBucket/MyTargetData/MyChangedData</code>. * </p> * <p> * For more information on CDC including transaction order on an S3 target, see <a href= * "https://docs.aws.amazon.com/dms/latest/userguide/CHAP_Target.S3.html#CHAP_Target.S3.EndpointSettings.CdcPath" * >Capturing data changes (CDC) including transaction order on the S3 target</a>. * </p> * <note> * <p> * This setting is supported in DMS versions 3.4.2 and later. * </p> * </note> * * @return Specifies the folder path of CDC files. For an S3 source, this setting is required if a task captures * change data; otherwise, it's optional. If <code>CdcPath</code> is set, DMS reads CDC files from this path * and replicates the data changes to the target endpoint. For an S3 target if you set <a href= * "https://docs.aws.amazon.com/dms/latest/APIReference/API_S3Settings.html#DMS-Type-S3Settings-PreserveTransactions" * > <code>PreserveTransactions</code> </a> to <code>true</code>, DMS verifies that you have set this * parameter to a folder path on your S3 target where DMS can save the transaction order for the CDC load. * DMS creates this CDC folder path in either your S3 target working directory or the S3 target location * specified by <a href= * "https://docs.aws.amazon.com/dms/latest/APIReference/API_S3Settings.html#DMS-Type-S3Settings-BucketFolder" * > <code>BucketFolder</code> </a> and <a * href="https://docs.aws.amazon.com/dms/latest/APIReference/API_S3Settings.html#DMS-Type-S3Settings-BucketName" * > <code>BucketName</code> </a>.</p> * <p> * For example, if you specify <code>CdcPath</code> as <code>MyChangedData</code>, and you specify * <code>BucketName</code> as <code>MyTargetBucket</code> but do not specify <code>BucketFolder</code>, DMS * creates the CDC folder path following: <code>MyTargetBucket/MyChangedData</code>. * </p> * <p> * If you specify the same <code>CdcPath</code>, and you specify <code>BucketName</code> as * <code>MyTargetBucket</code> and <code>BucketFolder</code> as <code>MyTargetData</code>, DMS creates the * CDC folder path following: <code>MyTargetBucket/MyTargetData/MyChangedData</code>. * </p> * <p> * For more information on CDC including transaction order on an S3 target, see <a href= * "https://docs.aws.amazon.com/dms/latest/userguide/CHAP_Target.S3.html#CHAP_Target.S3.EndpointSettings.CdcPath" * >Capturing data changes (CDC) including transaction order on the S3 target</a>. * </p> * <note> * <p> * This setting is supported in DMS versions 3.4.2 and later. * </p> */ public String getCdcPath() { return this.cdcPath; } /** * <p> * Specifies the folder path of CDC files. For an S3 source, this setting is required if a task captures change * data; otherwise, it's optional. If <code>CdcPath</code> is set, DMS reads CDC files from this path and replicates * the data changes to the target endpoint. For an S3 target if you set <a href= * "https://docs.aws.amazon.com/dms/latest/APIReference/API_S3Settings.html#DMS-Type-S3Settings-PreserveTransactions" * > <code>PreserveTransactions</code> </a> to <code>true</code>, DMS verifies that you have set this parameter to a * folder path on your S3 target where DMS can save the transaction order for the CDC load. DMS creates this CDC * folder path in either your S3 target working directory or the S3 target location specified by <a * href="https://docs.aws.amazon.com/dms/latest/APIReference/API_S3Settings.html#DMS-Type-S3Settings-BucketFolder"> * <code>BucketFolder</code> </a> and <a * href="https://docs.aws.amazon.com/dms/latest/APIReference/API_S3Settings.html#DMS-Type-S3Settings-BucketName"> * <code>BucketName</code> </a>. * </p> * <p> * For example, if you specify <code>CdcPath</code> as <code>MyChangedData</code>, and you specify * <code>BucketName</code> as <code>MyTargetBucket</code> but do not specify <code>BucketFolder</code>, DMS creates * the CDC folder path following: <code>MyTargetBucket/MyChangedData</code>. * </p> * <p> * If you specify the same <code>CdcPath</code>, and you specify <code>BucketName</code> as * <code>MyTargetBucket</code> and <code>BucketFolder</code> as <code>MyTargetData</code>, DMS creates the CDC * folder path following: <code>MyTargetBucket/MyTargetData/MyChangedData</code>. * </p> * <p> * For more information on CDC including transaction order on an S3 target, see <a href= * "https://docs.aws.amazon.com/dms/latest/userguide/CHAP_Target.S3.html#CHAP_Target.S3.EndpointSettings.CdcPath" * >Capturing data changes (CDC) including transaction order on the S3 target</a>. * </p> * <note> * <p> * This setting is supported in DMS versions 3.4.2 and later. * </p> * </note> * * @param cdcPath * Specifies the folder path of CDC files. For an S3 source, this setting is required if a task captures * change data; otherwise, it's optional. If <code>CdcPath</code> is set, DMS reads CDC files from this path * and replicates the data changes to the target endpoint. For an S3 target if you set <a href= * "https://docs.aws.amazon.com/dms/latest/APIReference/API_S3Settings.html#DMS-Type-S3Settings-PreserveTransactions" * > <code>PreserveTransactions</code> </a> to <code>true</code>, DMS verifies that you have set this * parameter to a folder path on your S3 target where DMS can save the transaction order for the CDC load. * DMS creates this CDC folder path in either your S3 target working directory or the S3 target location * specified by <a href= * "https://docs.aws.amazon.com/dms/latest/APIReference/API_S3Settings.html#DMS-Type-S3Settings-BucketFolder" * > <code>BucketFolder</code> </a> and <a * href="https://docs.aws.amazon.com/dms/latest/APIReference/API_S3Settings.html#DMS-Type-S3Settings-BucketName" * > <code>BucketName</code> </a>.</p> * <p> * For example, if you specify <code>CdcPath</code> as <code>MyChangedData</code>, and you specify * <code>BucketName</code> as <code>MyTargetBucket</code> but do not specify <code>BucketFolder</code>, DMS * creates the CDC folder path following: <code>MyTargetBucket/MyChangedData</code>. * </p> * <p> * If you specify the same <code>CdcPath</code>, and you specify <code>BucketName</code> as * <code>MyTargetBucket</code> and <code>BucketFolder</code> as <code>MyTargetData</code>, DMS creates the * CDC folder path following: <code>MyTargetBucket/MyTargetData/MyChangedData</code>. * </p> * <p> * For more information on CDC including transaction order on an S3 target, see <a href= * "https://docs.aws.amazon.com/dms/latest/userguide/CHAP_Target.S3.html#CHAP_Target.S3.EndpointSettings.CdcPath" * >Capturing data changes (CDC) including transaction order on the S3 target</a>. * </p> * <note> * <p> * This setting is supported in DMS versions 3.4.2 and later. * </p> * @return Returns a reference to this object so that method calls can be chained together. */ public S3Settings withCdcPath(String cdcPath) { setCdcPath(cdcPath); return this; } /** * <p> * A value that enables DMS to specify a predefined (canned) access control list for objects created in an Amazon S3 * bucket as .csv or .parquet files. For more information about Amazon S3 canned ACLs, see <a * href="http://docs.aws.amazon.com/AmazonS3/latest/dev/acl-overview.html#canned-acl">Canned ACL</a> in the * <i>Amazon S3 Developer Guide.</i> * </p> * <p> * The default value is NONE. Valid values include NONE, PRIVATE, PUBLIC_READ, PUBLIC_READ_WRITE, * AUTHENTICATED_READ, AWS_EXEC_READ, BUCKET_OWNER_READ, and BUCKET_OWNER_FULL_CONTROL. * </p> * * @param cannedAclForObjects * A value that enables DMS to specify a predefined (canned) access control list for objects created in an * Amazon S3 bucket as .csv or .parquet files. For more information about Amazon S3 canned ACLs, see <a * href="http://docs.aws.amazon.com/AmazonS3/latest/dev/acl-overview.html#canned-acl">Canned ACL</a> in the * <i>Amazon S3 Developer Guide.</i> </p> * <p> * The default value is NONE. Valid values include NONE, PRIVATE, PUBLIC_READ, PUBLIC_READ_WRITE, * AUTHENTICATED_READ, AWS_EXEC_READ, BUCKET_OWNER_READ, and BUCKET_OWNER_FULL_CONTROL. * @see CannedAclForObjectsValue */ public void setCannedAclForObjects(String cannedAclForObjects) { this.cannedAclForObjects = cannedAclForObjects; } /** * <p> * A value that enables DMS to specify a predefined (canned) access control list for objects created in an Amazon S3 * bucket as .csv or .parquet files. For more information about Amazon S3 canned ACLs, see <a * href="http://docs.aws.amazon.com/AmazonS3/latest/dev/acl-overview.html#canned-acl">Canned ACL</a> in the * <i>Amazon S3 Developer Guide.</i> * </p> * <p> * The default value is NONE. Valid values include NONE, PRIVATE, PUBLIC_READ, PUBLIC_READ_WRITE, * AUTHENTICATED_READ, AWS_EXEC_READ, BUCKET_OWNER_READ, and BUCKET_OWNER_FULL_CONTROL. * </p> * * @return A value that enables DMS to specify a predefined (canned) access control list for objects created in an * Amazon S3 bucket as .csv or .parquet files. For more information about Amazon S3 canned ACLs, see <a * href="http://docs.aws.amazon.com/AmazonS3/latest/dev/acl-overview.html#canned-acl">Canned ACL</a> in the * <i>Amazon S3 Developer Guide.</i> </p> * <p> * The default value is NONE. Valid values include NONE, PRIVATE, PUBLIC_READ, PUBLIC_READ_WRITE, * AUTHENTICATED_READ, AWS_EXEC_READ, BUCKET_OWNER_READ, and BUCKET_OWNER_FULL_CONTROL. * @see CannedAclForObjectsValue */ public String getCannedAclForObjects() { return this.cannedAclForObjects; } /** * <p> * A value that enables DMS to specify a predefined (canned) access control list for objects created in an Amazon S3 * bucket as .csv or .parquet files. For more information about Amazon S3 canned ACLs, see <a * href="http://docs.aws.amazon.com/AmazonS3/latest/dev/acl-overview.html#canned-acl">Canned ACL</a> in the * <i>Amazon S3 Developer Guide.</i> * </p> * <p> * The default value is NONE. Valid values include NONE, PRIVATE, PUBLIC_READ, PUBLIC_READ_WRITE, * AUTHENTICATED_READ, AWS_EXEC_READ, BUCKET_OWNER_READ, and BUCKET_OWNER_FULL_CONTROL. * </p> * * @param cannedAclForObjects * A value that enables DMS to specify a predefined (canned) access control list for objects created in an * Amazon S3 bucket as .csv or .parquet files. For more information about Amazon S3 canned ACLs, see <a * href="http://docs.aws.amazon.com/AmazonS3/latest/dev/acl-overview.html#canned-acl">Canned ACL</a> in the * <i>Amazon S3 Developer Guide.</i> </p> * <p> * The default value is NONE. Valid values include NONE, PRIVATE, PUBLIC_READ, PUBLIC_READ_WRITE, * AUTHENTICATED_READ, AWS_EXEC_READ, BUCKET_OWNER_READ, and BUCKET_OWNER_FULL_CONTROL. * @return Returns a reference to this object so that method calls can be chained together. * @see CannedAclForObjectsValue */ public S3Settings withCannedAclForObjects(String cannedAclForObjects) { setCannedAclForObjects(cannedAclForObjects); return this; } /** * <p> * A value that enables DMS to specify a predefined (canned) access control list for objects created in an Amazon S3 * bucket as .csv or .parquet files. For more information about Amazon S3 canned ACLs, see <a * href="http://docs.aws.amazon.com/AmazonS3/latest/dev/acl-overview.html#canned-acl">Canned ACL</a> in the * <i>Amazon S3 Developer Guide.</i> * </p> * <p> * The default value is NONE. Valid values include NONE, PRIVATE, PUBLIC_READ, PUBLIC_READ_WRITE, * AUTHENTICATED_READ, AWS_EXEC_READ, BUCKET_OWNER_READ, and BUCKET_OWNER_FULL_CONTROL. * </p> * * @param cannedAclForObjects * A value that enables DMS to specify a predefined (canned) access control list for objects created in an * Amazon S3 bucket as .csv or .parquet files. For more information about Amazon S3 canned ACLs, see <a * href="http://docs.aws.amazon.com/AmazonS3/latest/dev/acl-overview.html#canned-acl">Canned ACL</a> in the * <i>Amazon S3 Developer Guide.</i> </p> * <p> * The default value is NONE. Valid values include NONE, PRIVATE, PUBLIC_READ, PUBLIC_READ_WRITE, * AUTHENTICATED_READ, AWS_EXEC_READ, BUCKET_OWNER_READ, and BUCKET_OWNER_FULL_CONTROL. * @see CannedAclForObjectsValue */ public void setCannedAclForObjects(CannedAclForObjectsValue cannedAclForObjects) { withCannedAclForObjects(cannedAclForObjects); } /** * <p> * A value that enables DMS to specify a predefined (canned) access control list for objects created in an Amazon S3 * bucket as .csv or .parquet files. For more information about Amazon S3 canned ACLs, see <a * href="http://docs.aws.amazon.com/AmazonS3/latest/dev/acl-overview.html#canned-acl">Canned ACL</a> in the * <i>Amazon S3 Developer Guide.</i> * </p> * <p> * The default value is NONE. Valid values include NONE, PRIVATE, PUBLIC_READ, PUBLIC_READ_WRITE, * AUTHENTICATED_READ, AWS_EXEC_READ, BUCKET_OWNER_READ, and BUCKET_OWNER_FULL_CONTROL. * </p> * * @param cannedAclForObjects * A value that enables DMS to specify a predefined (canned) access control list for objects created in an * Amazon S3 bucket as .csv or .parquet files. For more information about Amazon S3 canned ACLs, see <a * href="http://docs.aws.amazon.com/AmazonS3/latest/dev/acl-overview.html#canned-acl">Canned ACL</a> in the * <i>Amazon S3 Developer Guide.</i> </p> * <p> * The default value is NONE. Valid values include NONE, PRIVATE, PUBLIC_READ, PUBLIC_READ_WRITE, * AUTHENTICATED_READ, AWS_EXEC_READ, BUCKET_OWNER_READ, and BUCKET_OWNER_FULL_CONTROL. * @return Returns a reference to this object so that method calls can be chained together. * @see CannedAclForObjectsValue */ public S3Settings withCannedAclForObjects(CannedAclForObjectsValue cannedAclForObjects) { this.cannedAclForObjects = cannedAclForObjects.toString(); return this; } /** * <p> * An optional parameter that, when set to <code>true</code> or <code>y</code>, you can use to add column name * information to the .csv output file. * </p> * <p> * The default value is <code>false</code>. Valid values are <code>true</code>, <code>false</code>, <code>y</code>, * and <code>n</code>. * </p> * * @param addColumnName * An optional parameter that, when set to <code>true</code> or <code>y</code>, you can use to add column * name information to the .csv output file.</p> * <p> * The default value is <code>false</code>. Valid values are <code>true</code>, <code>false</code>, * <code>y</code>, and <code>n</code>. */ public void setAddColumnName(Boolean addColumnName) { this.addColumnName = addColumnName; } /** * <p> * An optional parameter that, when set to <code>true</code> or <code>y</code>, you can use to add column name * information to the .csv output file. * </p> * <p> * The default value is <code>false</code>. Valid values are <code>true</code>, <code>false</code>, <code>y</code>, * and <code>n</code>. * </p> * * @return An optional parameter that, when set to <code>true</code> or <code>y</code>, you can use to add column * name information to the .csv output file.</p> * <p> * The default value is <code>false</code>. Valid values are <code>true</code>, <code>false</code>, * <code>y</code>, and <code>n</code>. */ public Boolean getAddColumnName() { return this.addColumnName; } /** * <p> * An optional parameter that, when set to <code>true</code> or <code>y</code>, you can use to add column name * information to the .csv output file. * </p> * <p> * The default value is <code>false</code>. Valid values are <code>true</code>, <code>false</code>, <code>y</code>, * and <code>n</code>. * </p> * * @param addColumnName * An optional parameter that, when set to <code>true</code> or <code>y</code>, you can use to add column * name information to the .csv output file.</p> * <p> * The default value is <code>false</code>. Valid values are <code>true</code>, <code>false</code>, * <code>y</code>, and <code>n</code>. * @return Returns a reference to this object so that method calls can be chained together. */ public S3Settings withAddColumnName(Boolean addColumnName) { setAddColumnName(addColumnName); return this; } /** * <p> * An optional parameter that, when set to <code>true</code> or <code>y</code>, you can use to add column name * information to the .csv output file. * </p> * <p> * The default value is <code>false</code>. Valid values are <code>true</code>, <code>false</code>, <code>y</code>, * and <code>n</code>. * </p> * * @return An optional parameter that, when set to <code>true</code> or <code>y</code>, you can use to add column * name information to the .csv output file.</p> * <p> * The default value is <code>false</code>. Valid values are <code>true</code>, <code>false</code>, * <code>y</code>, and <code>n</code>. */ public Boolean isAddColumnName() { return this.addColumnName; } /** * <p> * Maximum length of the interval, defined in seconds, after which to output a file to Amazon S3. * </p> * <p> * When <code>CdcMaxBatchInterval</code> and <code>CdcMinFileSize</code> are both specified, the file write is * triggered by whichever parameter condition is met first within an DMS CloudFormation template. * </p> * <p> * The default value is 60 seconds. * </p> * * @param cdcMaxBatchInterval * Maximum length of the interval, defined in seconds, after which to output a file to Amazon S3.</p> * <p> * When <code>CdcMaxBatchInterval</code> and <code>CdcMinFileSize</code> are both specified, the file write * is triggered by whichever parameter condition is met first within an DMS CloudFormation template. * </p> * <p> * The default value is 60 seconds. */ public void setCdcMaxBatchInterval(Integer cdcMaxBatchInterval) { this.cdcMaxBatchInterval = cdcMaxBatchInterval; } /** * <p> * Maximum length of the interval, defined in seconds, after which to output a file to Amazon S3. * </p> * <p> * When <code>CdcMaxBatchInterval</code> and <code>CdcMinFileSize</code> are both specified, the file write is * triggered by whichever parameter condition is met first within an DMS CloudFormation template. * </p> * <p> * The default value is 60 seconds. * </p> * * @return Maximum length of the interval, defined in seconds, after which to output a file to Amazon S3.</p> * <p> * When <code>CdcMaxBatchInterval</code> and <code>CdcMinFileSize</code> are both specified, the file write * is triggered by whichever parameter condition is met first within an DMS CloudFormation template. * </p> * <p> * The default value is 60 seconds. */ public Integer getCdcMaxBatchInterval() { return this.cdcMaxBatchInterval; } /** * <p> * Maximum length of the interval, defined in seconds, after which to output a file to Amazon S3. * </p> * <p> * When <code>CdcMaxBatchInterval</code> and <code>CdcMinFileSize</code> are both specified, the file write is * triggered by whichever parameter condition is met first within an DMS CloudFormation template. * </p> * <p> * The default value is 60 seconds. * </p> * * @param cdcMaxBatchInterval * Maximum length of the interval, defined in seconds, after which to output a file to Amazon S3.</p> * <p> * When <code>CdcMaxBatchInterval</code> and <code>CdcMinFileSize</code> are both specified, the file write * is triggered by whichever parameter condition is met first within an DMS CloudFormation template. * </p> * <p> * The default value is 60 seconds. * @return Returns a reference to this object so that method calls can be chained together. */ public S3Settings withCdcMaxBatchInterval(Integer cdcMaxBatchInterval) { setCdcMaxBatchInterval(cdcMaxBatchInterval); return this; } /** * <p> * Minimum file size, defined in megabytes, to reach for a file output to Amazon S3. * </p> * <p> * When <code>CdcMinFileSize</code> and <code>CdcMaxBatchInterval</code> are both specified, the file write is * triggered by whichever parameter condition is met first within an DMS CloudFormation template. * </p> * <p> * The default value is 32 MB. * </p> * * @param cdcMinFileSize * Minimum file size, defined in megabytes, to reach for a file output to Amazon S3.</p> * <p> * When <code>CdcMinFileSize</code> and <code>CdcMaxBatchInterval</code> are both specified, the file write * is triggered by whichever parameter condition is met first within an DMS CloudFormation template. * </p> * <p> * The default value is 32 MB. */ public void setCdcMinFileSize(Integer cdcMinFileSize) { this.cdcMinFileSize = cdcMinFileSize; } /** * <p> * Minimum file size, defined in megabytes, to reach for a file output to Amazon S3. * </p> * <p> * When <code>CdcMinFileSize</code> and <code>CdcMaxBatchInterval</code> are both specified, the file write is * triggered by whichever parameter condition is met first within an DMS CloudFormation template. * </p> * <p> * The default value is 32 MB. * </p> * * @return Minimum file size, defined in megabytes, to reach for a file output to Amazon S3.</p> * <p> * When <code>CdcMinFileSize</code> and <code>CdcMaxBatchInterval</code> are both specified, the file write * is triggered by whichever parameter condition is met first within an DMS CloudFormation template. * </p> * <p> * The default value is 32 MB. */ public Integer getCdcMinFileSize() { return this.cdcMinFileSize; } /** * <p> * Minimum file size, defined in megabytes, to reach for a file output to Amazon S3. * </p> * <p> * When <code>CdcMinFileSize</code> and <code>CdcMaxBatchInterval</code> are both specified, the file write is * triggered by whichever parameter condition is met first within an DMS CloudFormation template. * </p> * <p> * The default value is 32 MB. * </p> * * @param cdcMinFileSize * Minimum file size, defined in megabytes, to reach for a file output to Amazon S3.</p> * <p> * When <code>CdcMinFileSize</code> and <code>CdcMaxBatchInterval</code> are both specified, the file write * is triggered by whichever parameter condition is met first within an DMS CloudFormation template. * </p> * <p> * The default value is 32 MB. * @return Returns a reference to this object so that method calls can be chained together. */ public S3Settings withCdcMinFileSize(Integer cdcMinFileSize) { setCdcMinFileSize(cdcMinFileSize); return this; } /** * <p> * An optional parameter that specifies how DMS treats null values. While handling the null value, you can use this * parameter to pass a user-defined string as null when writing to the target. For example, when target columns are * not nullable, you can use this option to differentiate between the empty string value and the null value. So, if * you set this parameter value to the empty string ("" or ''), DMS treats the empty string as the null value * instead of <code>NULL</code>. * </p> * <p> * The default value is <code>NULL</code>. Valid values include any valid string. * </p> * * @param csvNullValue * An optional parameter that specifies how DMS treats null values. While handling the null value, you can * use this parameter to pass a user-defined string as null when writing to the target. For example, when * target columns are not nullable, you can use this option to differentiate between the empty string value * and the null value. So, if you set this parameter value to the empty string ("" or ''), DMS treats the * empty string as the null value instead of <code>NULL</code>.</p> * <p> * The default value is <code>NULL</code>. Valid values include any valid string. */ public void setCsvNullValue(String csvNullValue) { this.csvNullValue = csvNullValue; } /** * <p> * An optional parameter that specifies how DMS treats null values. While handling the null value, you can use this * parameter to pass a user-defined string as null when writing to the target. For example, when target columns are * not nullable, you can use this option to differentiate between the empty string value and the null value. So, if * you set this parameter value to the empty string ("" or ''), DMS treats the empty string as the null value * instead of <code>NULL</code>. * </p> * <p> * The default value is <code>NULL</code>. Valid values include any valid string. * </p> * * @return An optional parameter that specifies how DMS treats null values. While handling the null value, you can * use this parameter to pass a user-defined string as null when writing to the target. For example, when * target columns are not nullable, you can use this option to differentiate between the empty string value * and the null value. So, if you set this parameter value to the empty string ("" or ''), DMS treats the * empty string as the null value instead of <code>NULL</code>.</p> * <p> * The default value is <code>NULL</code>. Valid values include any valid string. */ public String getCsvNullValue() { return this.csvNullValue; } /** * <p> * An optional parameter that specifies how DMS treats null values. While handling the null value, you can use this * parameter to pass a user-defined string as null when writing to the target. For example, when target columns are * not nullable, you can use this option to differentiate between the empty string value and the null value. So, if * you set this parameter value to the empty string ("" or ''), DMS treats the empty string as the null value * instead of <code>NULL</code>. * </p> * <p> * The default value is <code>NULL</code>. Valid values include any valid string. * </p> * * @param csvNullValue * An optional parameter that specifies how DMS treats null values. While handling the null value, you can * use this parameter to pass a user-defined string as null when writing to the target. For example, when * target columns are not nullable, you can use this option to differentiate between the empty string value * and the null value. So, if you set this parameter value to the empty string ("" or ''), DMS treats the * empty string as the null value instead of <code>NULL</code>.</p> * <p> * The default value is <code>NULL</code>. Valid values include any valid string. * @return Returns a reference to this object so that method calls can be chained together. */ public S3Settings withCsvNullValue(String csvNullValue) { setCsvNullValue(csvNullValue); return this; } /** * <p> * When this value is set to 1, DMS ignores the first row header in a .csv file. A value of 1 turns on the feature; * a value of 0 turns off the feature. * </p> * <p> * The default is 0. * </p> * * @param ignoreHeaderRows * When this value is set to 1, DMS ignores the first row header in a .csv file. A value of 1 turns on the * feature; a value of 0 turns off the feature.</p> * <p> * The default is 0. */ public void setIgnoreHeaderRows(Integer ignoreHeaderRows) { this.ignoreHeaderRows = ignoreHeaderRows; } /** * <p> * When this value is set to 1, DMS ignores the first row header in a .csv file. A value of 1 turns on the feature; * a value of 0 turns off the feature. * </p> * <p> * The default is 0. * </p> * * @return When this value is set to 1, DMS ignores the first row header in a .csv file. A value of 1 turns on the * feature; a value of 0 turns off the feature.</p> * <p> * The default is 0. */ public Integer getIgnoreHeaderRows() { return this.ignoreHeaderRows; } /** * <p> * When this value is set to 1, DMS ignores the first row header in a .csv file. A value of 1 turns on the feature; * a value of 0 turns off the feature. * </p> * <p> * The default is 0. * </p> * * @param ignoreHeaderRows * When this value is set to 1, DMS ignores the first row header in a .csv file. A value of 1 turns on the * feature; a value of 0 turns off the feature.</p> * <p> * The default is 0. * @return Returns a reference to this object so that method calls can be chained together. */ public S3Settings withIgnoreHeaderRows(Integer ignoreHeaderRows) { setIgnoreHeaderRows(ignoreHeaderRows); return this; } /** * <p> * A value that specifies the maximum size (in KB) of any .csv file to be created while migrating to an S3 target * during full load. * </p> * <p> * The default value is 1,048,576 KB (1 GB). Valid values include 1 to 1,048,576. * </p> * * @param maxFileSize * A value that specifies the maximum size (in KB) of any .csv file to be created while migrating to an S3 * target during full load.</p> * <p> * The default value is 1,048,576 KB (1 GB). Valid values include 1 to 1,048,576. */ public void setMaxFileSize(Integer maxFileSize) { this.maxFileSize = maxFileSize; } /** * <p> * A value that specifies the maximum size (in KB) of any .csv file to be created while migrating to an S3 target * during full load. * </p> * <p> * The default value is 1,048,576 KB (1 GB). Valid values include 1 to 1,048,576. * </p> * * @return A value that specifies the maximum size (in KB) of any .csv file to be created while migrating to an S3 * target during full load.</p> * <p> * The default value is 1,048,576 KB (1 GB). Valid values include 1 to 1,048,576. */ public Integer getMaxFileSize() { return this.maxFileSize; } /** * <p> * A value that specifies the maximum size (in KB) of any .csv file to be created while migrating to an S3 target * during full load. * </p> * <p> * The default value is 1,048,576 KB (1 GB). Valid values include 1 to 1,048,576. * </p> * * @param maxFileSize * A value that specifies the maximum size (in KB) of any .csv file to be created while migrating to an S3 * target during full load.</p> * <p> * The default value is 1,048,576 KB (1 GB). Valid values include 1 to 1,048,576. * @return Returns a reference to this object so that method calls can be chained together. */ public S3Settings withMaxFileSize(Integer maxFileSize) { setMaxFileSize(maxFileSize); return this; } /** * <p> * For an S3 source, when this value is set to <code>true</code> or <code>y</code>, each leading double quotation * mark has to be followed by an ending double quotation mark. This formatting complies with RFC 4180. When this * value is set to <code>false</code> or <code>n</code>, string literals are copied to the target as is. In this * case, a delimiter (row or column) signals the end of the field. Thus, you can't use a delimiter as part of the * string, because it signals the end of the value. * </p> * <p> * For an S3 target, an optional parameter used to set behavior to comply with RFC 4180 for data migrated to Amazon * S3 using .csv file format only. When this value is set to <code>true</code> or <code>y</code> using Amazon S3 as * a target, if the data has quotation marks or newline characters in it, DMS encloses the entire column with an * additional pair of double quotation marks ("). Every quotation mark within the data is repeated twice. * </p> * <p> * The default value is <code>true</code>. Valid values include <code>true</code>, <code>false</code>, * <code>y</code>, and <code>n</code>. * </p> * * @param rfc4180 * For an S3 source, when this value is set to <code>true</code> or <code>y</code>, each leading double * quotation mark has to be followed by an ending double quotation mark. This formatting complies with RFC * 4180. When this value is set to <code>false</code> or <code>n</code>, string literals are copied to the * target as is. In this case, a delimiter (row or column) signals the end of the field. Thus, you can't use * a delimiter as part of the string, because it signals the end of the value.</p> * <p> * For an S3 target, an optional parameter used to set behavior to comply with RFC 4180 for data migrated to * Amazon S3 using .csv file format only. When this value is set to <code>true</code> or <code>y</code> using * Amazon S3 as a target, if the data has quotation marks or newline characters in it, DMS encloses the * entire column with an additional pair of double quotation marks ("). Every quotation mark within the data * is repeated twice. * </p> * <p> * The default value is <code>true</code>. Valid values include <code>true</code>, <code>false</code>, * <code>y</code>, and <code>n</code>. */ public void setRfc4180(Boolean rfc4180) { this.rfc4180 = rfc4180; } /** * <p> * For an S3 source, when this value is set to <code>true</code> or <code>y</code>, each leading double quotation * mark has to be followed by an ending double quotation mark. This formatting complies with RFC 4180. When this * value is set to <code>false</code> or <code>n</code>, string literals are copied to the target as is. In this * case, a delimiter (row or column) signals the end of the field. Thus, you can't use a delimiter as part of the * string, because it signals the end of the value. * </p> * <p> * For an S3 target, an optional parameter used to set behavior to comply with RFC 4180 for data migrated to Amazon * S3 using .csv file format only. When this value is set to <code>true</code> or <code>y</code> using Amazon S3 as * a target, if the data has quotation marks or newline characters in it, DMS encloses the entire column with an * additional pair of double quotation marks ("). Every quotation mark within the data is repeated twice. * </p> * <p> * The default value is <code>true</code>. Valid values include <code>true</code>, <code>false</code>, * <code>y</code>, and <code>n</code>. * </p> * * @return For an S3 source, when this value is set to <code>true</code> or <code>y</code>, each leading double * quotation mark has to be followed by an ending double quotation mark. This formatting complies with RFC * 4180. When this value is set to <code>false</code> or <code>n</code>, string literals are copied to the * target as is. In this case, a delimiter (row or column) signals the end of the field. Thus, you can't use * a delimiter as part of the string, because it signals the end of the value.</p> * <p> * For an S3 target, an optional parameter used to set behavior to comply with RFC 4180 for data migrated to * Amazon S3 using .csv file format only. When this value is set to <code>true</code> or <code>y</code> * using Amazon S3 as a target, if the data has quotation marks or newline characters in it, DMS encloses * the entire column with an additional pair of double quotation marks ("). Every quotation mark within the * data is repeated twice. * </p> * <p> * The default value is <code>true</code>. Valid values include <code>true</code>, <code>false</code>, * <code>y</code>, and <code>n</code>. */ public Boolean getRfc4180() { return this.rfc4180; } /** * <p> * For an S3 source, when this value is set to <code>true</code> or <code>y</code>, each leading double quotation * mark has to be followed by an ending double quotation mark. This formatting complies with RFC 4180. When this * value is set to <code>false</code> or <code>n</code>, string literals are copied to the target as is. In this * case, a delimiter (row or column) signals the end of the field. Thus, you can't use a delimiter as part of the * string, because it signals the end of the value. * </p> * <p> * For an S3 target, an optional parameter used to set behavior to comply with RFC 4180 for data migrated to Amazon * S3 using .csv file format only. When this value is set to <code>true</code> or <code>y</code> using Amazon S3 as * a target, if the data has quotation marks or newline characters in it, DMS encloses the entire column with an * additional pair of double quotation marks ("). Every quotation mark within the data is repeated twice. * </p> * <p> * The default value is <code>true</code>. Valid values include <code>true</code>, <code>false</code>, * <code>y</code>, and <code>n</code>. * </p> * * @param rfc4180 * For an S3 source, when this value is set to <code>true</code> or <code>y</code>, each leading double * quotation mark has to be followed by an ending double quotation mark. This formatting complies with RFC * 4180. When this value is set to <code>false</code> or <code>n</code>, string literals are copied to the * target as is. In this case, a delimiter (row or column) signals the end of the field. Thus, you can't use * a delimiter as part of the string, because it signals the end of the value.</p> * <p> * For an S3 target, an optional parameter used to set behavior to comply with RFC 4180 for data migrated to * Amazon S3 using .csv file format only. When this value is set to <code>true</code> or <code>y</code> using * Amazon S3 as a target, if the data has quotation marks or newline characters in it, DMS encloses the * entire column with an additional pair of double quotation marks ("). Every quotation mark within the data * is repeated twice. * </p> * <p> * The default value is <code>true</code>. Valid values include <code>true</code>, <code>false</code>, * <code>y</code>, and <code>n</code>. * @return Returns a reference to this object so that method calls can be chained together. */ public S3Settings withRfc4180(Boolean rfc4180) { setRfc4180(rfc4180); return this; } /** * <p> * For an S3 source, when this value is set to <code>true</code> or <code>y</code>, each leading double quotation * mark has to be followed by an ending double quotation mark. This formatting complies with RFC 4180. When this * value is set to <code>false</code> or <code>n</code>, string literals are copied to the target as is. In this * case, a delimiter (row or column) signals the end of the field. Thus, you can't use a delimiter as part of the * string, because it signals the end of the value. * </p> * <p> * For an S3 target, an optional parameter used to set behavior to comply with RFC 4180 for data migrated to Amazon * S3 using .csv file format only. When this value is set to <code>true</code> or <code>y</code> using Amazon S3 as * a target, if the data has quotation marks or newline characters in it, DMS encloses the entire column with an * additional pair of double quotation marks ("). Every quotation mark within the data is repeated twice. * </p> * <p> * The default value is <code>true</code>. Valid values include <code>true</code>, <code>false</code>, * <code>y</code>, and <code>n</code>. * </p> * * @return For an S3 source, when this value is set to <code>true</code> or <code>y</code>, each leading double * quotation mark has to be followed by an ending double quotation mark. This formatting complies with RFC * 4180. When this value is set to <code>false</code> or <code>n</code>, string literals are copied to the * target as is. In this case, a delimiter (row or column) signals the end of the field. Thus, you can't use * a delimiter as part of the string, because it signals the end of the value.</p> * <p> * For an S3 target, an optional parameter used to set behavior to comply with RFC 4180 for data migrated to * Amazon S3 using .csv file format only. When this value is set to <code>true</code> or <code>y</code> * using Amazon S3 as a target, if the data has quotation marks or newline characters in it, DMS encloses * the entire column with an additional pair of double quotation marks ("). Every quotation mark within the * data is repeated twice. * </p> * <p> * The default value is <code>true</code>. Valid values include <code>true</code>, <code>false</code>, * <code>y</code>, and <code>n</code>. */ public Boolean isRfc4180() { return this.rfc4180; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getServiceAccessRoleArn() != null) sb.append("ServiceAccessRoleArn: ").append(getServiceAccessRoleArn()).append(","); if (getExternalTableDefinition() != null) sb.append("ExternalTableDefinition: ").append(getExternalTableDefinition()).append(","); if (getCsvRowDelimiter() != null) sb.append("CsvRowDelimiter: ").append(getCsvRowDelimiter()).append(","); if (getCsvDelimiter() != null) sb.append("CsvDelimiter: ").append(getCsvDelimiter()).append(","); if (getBucketFolder() != null) sb.append("BucketFolder: ").append(getBucketFolder()).append(","); if (getBucketName() != null) sb.append("BucketName: ").append(getBucketName()).append(","); if (getCompressionType() != null) sb.append("CompressionType: ").append(getCompressionType()).append(","); if (getEncryptionMode() != null) sb.append("EncryptionMode: ").append(getEncryptionMode()).append(","); if (getServerSideEncryptionKmsKeyId() != null) sb.append("ServerSideEncryptionKmsKeyId: ").append(getServerSideEncryptionKmsKeyId()).append(","); if (getDataFormat() != null) sb.append("DataFormat: ").append(getDataFormat()).append(","); if (getEncodingType() != null) sb.append("EncodingType: ").append(getEncodingType()).append(","); if (getDictPageSizeLimit() != null) sb.append("DictPageSizeLimit: ").append(getDictPageSizeLimit()).append(","); if (getRowGroupLength() != null) sb.append("RowGroupLength: ").append(getRowGroupLength()).append(","); if (getDataPageSize() != null) sb.append("DataPageSize: ").append(getDataPageSize()).append(","); if (getParquetVersion() != null) sb.append("ParquetVersion: ").append(getParquetVersion()).append(","); if (getEnableStatistics() != null) sb.append("EnableStatistics: ").append(getEnableStatistics()).append(","); if (getIncludeOpForFullLoad() != null) sb.append("IncludeOpForFullLoad: ").append(getIncludeOpForFullLoad()).append(","); if (getCdcInsertsOnly() != null) sb.append("CdcInsertsOnly: ").append(getCdcInsertsOnly()).append(","); if (getTimestampColumnName() != null) sb.append("TimestampColumnName: ").append(getTimestampColumnName()).append(","); if (getParquetTimestampInMillisecond() != null) sb.append("ParquetTimestampInMillisecond: ").append(getParquetTimestampInMillisecond()).append(","); if (getCdcInsertsAndUpdates() != null) sb.append("CdcInsertsAndUpdates: ").append(getCdcInsertsAndUpdates()).append(","); if (getDatePartitionEnabled() != null) sb.append("DatePartitionEnabled: ").append(getDatePartitionEnabled()).append(","); if (getDatePartitionSequence() != null) sb.append("DatePartitionSequence: ").append(getDatePartitionSequence()).append(","); if (getDatePartitionDelimiter() != null) sb.append("DatePartitionDelimiter: ").append(getDatePartitionDelimiter()).append(","); if (getUseCsvNoSupValue() != null) sb.append("UseCsvNoSupValue: ").append(getUseCsvNoSupValue()).append(","); if (getCsvNoSupValue() != null) sb.append("CsvNoSupValue: ").append(getCsvNoSupValue()).append(","); if (getPreserveTransactions() != null) sb.append("PreserveTransactions: ").append(getPreserveTransactions()).append(","); if (getCdcPath() != null) sb.append("CdcPath: ").append(getCdcPath()).append(","); if (getCannedAclForObjects() != null) sb.append("CannedAclForObjects: ").append(getCannedAclForObjects()).append(","); if (getAddColumnName() != null) sb.append("AddColumnName: ").append(getAddColumnName()).append(","); if (getCdcMaxBatchInterval() != null) sb.append("CdcMaxBatchInterval: ").append(getCdcMaxBatchInterval()).append(","); if (getCdcMinFileSize() != null) sb.append("CdcMinFileSize: ").append(getCdcMinFileSize()).append(","); if (getCsvNullValue() != null) sb.append("CsvNullValue: ").append(getCsvNullValue()).append(","); if (getIgnoreHeaderRows() != null) sb.append("IgnoreHeaderRows: ").append(getIgnoreHeaderRows()).append(","); if (getMaxFileSize() != null) sb.append("MaxFileSize: ").append(getMaxFileSize()).append(","); if (getRfc4180() != null) sb.append("Rfc4180: ").append(getRfc4180()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof S3Settings == false) return false; S3Settings other = (S3Settings) obj; if (other.getServiceAccessRoleArn() == null ^ this.getServiceAccessRoleArn() == null) return false; if (other.getServiceAccessRoleArn() != null && other.getServiceAccessRoleArn().equals(this.getServiceAccessRoleArn()) == false) return false; if (other.getExternalTableDefinition() == null ^ this.getExternalTableDefinition() == null) return false; if (other.getExternalTableDefinition() != null && other.getExternalTableDefinition().equals(this.getExternalTableDefinition()) == false) return false; if (other.getCsvRowDelimiter() == null ^ this.getCsvRowDelimiter() == null) return false; if (other.getCsvRowDelimiter() != null && other.getCsvRowDelimiter().equals(this.getCsvRowDelimiter()) == false) return false; if (other.getCsvDelimiter() == null ^ this.getCsvDelimiter() == null) return false; if (other.getCsvDelimiter() != null && other.getCsvDelimiter().equals(this.getCsvDelimiter()) == false) return false; if (other.getBucketFolder() == null ^ this.getBucketFolder() == null) return false; if (other.getBucketFolder() != null && other.getBucketFolder().equals(this.getBucketFolder()) == false) return false; if (other.getBucketName() == null ^ this.getBucketName() == null) return false; if (other.getBucketName() != null && other.getBucketName().equals(this.getBucketName()) == false) return false; if (other.getCompressionType() == null ^ this.getCompressionType() == null) return false; if (other.getCompressionType() != null && other.getCompressionType().equals(this.getCompressionType()) == false) return false; if (other.getEncryptionMode() == null ^ this.getEncryptionMode() == null) return false; if (other.getEncryptionMode() != null && other.getEncryptionMode().equals(this.getEncryptionMode()) == false) return false; if (other.getServerSideEncryptionKmsKeyId() == null ^ this.getServerSideEncryptionKmsKeyId() == null) return false; if (other.getServerSideEncryptionKmsKeyId() != null && other.getServerSideEncryptionKmsKeyId().equals(this.getServerSideEncryptionKmsKeyId()) == false) return false; if (other.getDataFormat() == null ^ this.getDataFormat() == null) return false; if (other.getDataFormat() != null && other.getDataFormat().equals(this.getDataFormat()) == false) return false; if (other.getEncodingType() == null ^ this.getEncodingType() == null) return false; if (other.getEncodingType() != null && other.getEncodingType().equals(this.getEncodingType()) == false) return false; if (other.getDictPageSizeLimit() == null ^ this.getDictPageSizeLimit() == null) return false; if (other.getDictPageSizeLimit() != null && other.getDictPageSizeLimit().equals(this.getDictPageSizeLimit()) == false) return false; if (other.getRowGroupLength() == null ^ this.getRowGroupLength() == null) return false; if (other.getRowGroupLength() != null && other.getRowGroupLength().equals(this.getRowGroupLength()) == false) return false; if (other.getDataPageSize() == null ^ this.getDataPageSize() == null) return false; if (other.getDataPageSize() != null && other.getDataPageSize().equals(this.getDataPageSize()) == false) return false; if (other.getParquetVersion() == null ^ this.getParquetVersion() == null) return false; if (other.getParquetVersion() != null && other.getParquetVersion().equals(this.getParquetVersion()) == false) return false; if (other.getEnableStatistics() == null ^ this.getEnableStatistics() == null) return false; if (other.getEnableStatistics() != null && other.getEnableStatistics().equals(this.getEnableStatistics()) == false) return false; if (other.getIncludeOpForFullLoad() == null ^ this.getIncludeOpForFullLoad() == null) return false; if (other.getIncludeOpForFullLoad() != null && other.getIncludeOpForFullLoad().equals(this.getIncludeOpForFullLoad()) == false) return false; if (other.getCdcInsertsOnly() == null ^ this.getCdcInsertsOnly() == null) return false; if (other.getCdcInsertsOnly() != null && other.getCdcInsertsOnly().equals(this.getCdcInsertsOnly()) == false) return false; if (other.getTimestampColumnName() == null ^ this.getTimestampColumnName() == null) return false; if (other.getTimestampColumnName() != null && other.getTimestampColumnName().equals(this.getTimestampColumnName()) == false) return false; if (other.getParquetTimestampInMillisecond() == null ^ this.getParquetTimestampInMillisecond() == null) return false; if (other.getParquetTimestampInMillisecond() != null && other.getParquetTimestampInMillisecond().equals(this.getParquetTimestampInMillisecond()) == false) return false; if (other.getCdcInsertsAndUpdates() == null ^ this.getCdcInsertsAndUpdates() == null) return false; if (other.getCdcInsertsAndUpdates() != null && other.getCdcInsertsAndUpdates().equals(this.getCdcInsertsAndUpdates()) == false) return false; if (other.getDatePartitionEnabled() == null ^ this.getDatePartitionEnabled() == null) return false; if (other.getDatePartitionEnabled() != null && other.getDatePartitionEnabled().equals(this.getDatePartitionEnabled()) == false) return false; if (other.getDatePartitionSequence() == null ^ this.getDatePartitionSequence() == null) return false; if (other.getDatePartitionSequence() != null && other.getDatePartitionSequence().equals(this.getDatePartitionSequence()) == false) return false; if (other.getDatePartitionDelimiter() == null ^ this.getDatePartitionDelimiter() == null) return false; if (other.getDatePartitionDelimiter() != null && other.getDatePartitionDelimiter().equals(this.getDatePartitionDelimiter()) == false) return false; if (other.getUseCsvNoSupValue() == null ^ this.getUseCsvNoSupValue() == null) return false; if (other.getUseCsvNoSupValue() != null && other.getUseCsvNoSupValue().equals(this.getUseCsvNoSupValue()) == false) return false; if (other.getCsvNoSupValue() == null ^ this.getCsvNoSupValue() == null) return false; if (other.getCsvNoSupValue() != null && other.getCsvNoSupValue().equals(this.getCsvNoSupValue()) == false) return false; if (other.getPreserveTransactions() == null ^ this.getPreserveTransactions() == null) return false; if (other.getPreserveTransactions() != null && other.getPreserveTransactions().equals(this.getPreserveTransactions()) == false) return false; if (other.getCdcPath() == null ^ this.getCdcPath() == null) return false; if (other.getCdcPath() != null && other.getCdcPath().equals(this.getCdcPath()) == false) return false; if (other.getCannedAclForObjects() == null ^ this.getCannedAclForObjects() == null) return false; if (other.getCannedAclForObjects() != null && other.getCannedAclForObjects().equals(this.getCannedAclForObjects()) == false) return false; if (other.getAddColumnName() == null ^ this.getAddColumnName() == null) return false; if (other.getAddColumnName() != null && other.getAddColumnName().equals(this.getAddColumnName()) == false) return false; if (other.getCdcMaxBatchInterval() == null ^ this.getCdcMaxBatchInterval() == null) return false; if (other.getCdcMaxBatchInterval() != null && other.getCdcMaxBatchInterval().equals(this.getCdcMaxBatchInterval()) == false) return false; if (other.getCdcMinFileSize() == null ^ this.getCdcMinFileSize() == null) return false; if (other.getCdcMinFileSize() != null && other.getCdcMinFileSize().equals(this.getCdcMinFileSize()) == false) return false; if (other.getCsvNullValue() == null ^ this.getCsvNullValue() == null) return false; if (other.getCsvNullValue() != null && other.getCsvNullValue().equals(this.getCsvNullValue()) == false) return false; if (other.getIgnoreHeaderRows() == null ^ this.getIgnoreHeaderRows() == null) return false; if (other.getIgnoreHeaderRows() != null && other.getIgnoreHeaderRows().equals(this.getIgnoreHeaderRows()) == false) return false; if (other.getMaxFileSize() == null ^ this.getMaxFileSize() == null) return false; if (other.getMaxFileSize() != null && other.getMaxFileSize().equals(this.getMaxFileSize()) == false) return false; if (other.getRfc4180() == null ^ this.getRfc4180() == null) return false; if (other.getRfc4180() != null && other.getRfc4180().equals(this.getRfc4180()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getServiceAccessRoleArn() == null) ? 0 : getServiceAccessRoleArn().hashCode()); hashCode = prime * hashCode + ((getExternalTableDefinition() == null) ? 0 : getExternalTableDefinition().hashCode()); hashCode = prime * hashCode + ((getCsvRowDelimiter() == null) ? 0 : getCsvRowDelimiter().hashCode()); hashCode = prime * hashCode + ((getCsvDelimiter() == null) ? 0 : getCsvDelimiter().hashCode()); hashCode = prime * hashCode + ((getBucketFolder() == null) ? 0 : getBucketFolder().hashCode()); hashCode = prime * hashCode + ((getBucketName() == null) ? 0 : getBucketName().hashCode()); hashCode = prime * hashCode + ((getCompressionType() == null) ? 0 : getCompressionType().hashCode()); hashCode = prime * hashCode + ((getEncryptionMode() == null) ? 0 : getEncryptionMode().hashCode()); hashCode = prime * hashCode + ((getServerSideEncryptionKmsKeyId() == null) ? 0 : getServerSideEncryptionKmsKeyId().hashCode()); hashCode = prime * hashCode + ((getDataFormat() == null) ? 0 : getDataFormat().hashCode()); hashCode = prime * hashCode + ((getEncodingType() == null) ? 0 : getEncodingType().hashCode()); hashCode = prime * hashCode + ((getDictPageSizeLimit() == null) ? 0 : getDictPageSizeLimit().hashCode()); hashCode = prime * hashCode + ((getRowGroupLength() == null) ? 0 : getRowGroupLength().hashCode()); hashCode = prime * hashCode + ((getDataPageSize() == null) ? 0 : getDataPageSize().hashCode()); hashCode = prime * hashCode + ((getParquetVersion() == null) ? 0 : getParquetVersion().hashCode()); hashCode = prime * hashCode + ((getEnableStatistics() == null) ? 0 : getEnableStatistics().hashCode()); hashCode = prime * hashCode + ((getIncludeOpForFullLoad() == null) ? 0 : getIncludeOpForFullLoad().hashCode()); hashCode = prime * hashCode + ((getCdcInsertsOnly() == null) ? 0 : getCdcInsertsOnly().hashCode()); hashCode = prime * hashCode + ((getTimestampColumnName() == null) ? 0 : getTimestampColumnName().hashCode()); hashCode = prime * hashCode + ((getParquetTimestampInMillisecond() == null) ? 0 : getParquetTimestampInMillisecond().hashCode()); hashCode = prime * hashCode + ((getCdcInsertsAndUpdates() == null) ? 0 : getCdcInsertsAndUpdates().hashCode()); hashCode = prime * hashCode + ((getDatePartitionEnabled() == null) ? 0 : getDatePartitionEnabled().hashCode()); hashCode = prime * hashCode + ((getDatePartitionSequence() == null) ? 0 : getDatePartitionSequence().hashCode()); hashCode = prime * hashCode + ((getDatePartitionDelimiter() == null) ? 0 : getDatePartitionDelimiter().hashCode()); hashCode = prime * hashCode + ((getUseCsvNoSupValue() == null) ? 0 : getUseCsvNoSupValue().hashCode()); hashCode = prime * hashCode + ((getCsvNoSupValue() == null) ? 0 : getCsvNoSupValue().hashCode()); hashCode = prime * hashCode + ((getPreserveTransactions() == null) ? 0 : getPreserveTransactions().hashCode()); hashCode = prime * hashCode + ((getCdcPath() == null) ? 0 : getCdcPath().hashCode()); hashCode = prime * hashCode + ((getCannedAclForObjects() == null) ? 0 : getCannedAclForObjects().hashCode()); hashCode = prime * hashCode + ((getAddColumnName() == null) ? 0 : getAddColumnName().hashCode()); hashCode = prime * hashCode + ((getCdcMaxBatchInterval() == null) ? 0 : getCdcMaxBatchInterval().hashCode()); hashCode = prime * hashCode + ((getCdcMinFileSize() == null) ? 0 : getCdcMinFileSize().hashCode()); hashCode = prime * hashCode + ((getCsvNullValue() == null) ? 0 : getCsvNullValue().hashCode()); hashCode = prime * hashCode + ((getIgnoreHeaderRows() == null) ? 0 : getIgnoreHeaderRows().hashCode()); hashCode = prime * hashCode + ((getMaxFileSize() == null) ? 0 : getMaxFileSize().hashCode()); hashCode = prime * hashCode + ((getRfc4180() == null) ? 0 : getRfc4180().hashCode()); return hashCode; } @Override public S3Settings clone() { try { return (S3Settings) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } @com.amazonaws.annotation.SdkInternalApi @Override public void marshall(ProtocolMarshaller protocolMarshaller) { com.amazonaws.services.databasemigrationservice.model.transform.S3SettingsMarshaller.getInstance().marshall(this, protocolMarshaller); } }
Dye Precursor Molecules on NiO(001) Studied by Non-Contact Atomic Force Microscopy The properties of NiO, such as charge transport or optoelectronic characteristics, can be modified by functionalization with organic molecules. These kinds of organic/inorganic surfaces are of great interest, in particular, for the design of hybrid devices like dye sensitized solar cells . However, a key parameter in the design of optimized interfaces is not only the choice of the compounds but also the properties of adsorption. Thus, fundamental studies of such hybrid systems at the nanoscale are desirable. So far, characterization of adsorbates at ambient temperature through spectroscopy techniques, such as x-ray photoelectron spectroscopy, has been limited to large agglomerates or selfassembled molecules. Recently, first studies of the adsorption properties of single molecules on NiO measured by force microscopy at low temperatures have been published . This limit can be stretched to the level of individual adsorbates measured by means of non-contact atomic force microscopy at room temperature.
import * as sinon from 'sinon' import { expect } from 'chai' import * as peers from '../../../packages/shared/comms/peers' import { getVisibleAvatarsUserId } from '../../../packages/shared/sceneEvents/visibleAvatars' import { AvatarMessageType } from '../../../packages/shared/comms/interface/types' import * as sceneManager from '../../../packages/shared/world/parcelSceneManager' function sendAvatarMessage(uuid: string, visible: boolean) { peers.receiveUserVisible(uuid, visible) } function removeAvatarMessage(uuid: string) { peers.avatarMessageObservable.notifyObservers({ type: AvatarMessageType.USER_REMOVED, uuid }) } function mockGetUser() { sinon.stub(peers, 'getUser').callsFake((userId: string) => ({ userId })) } let sceneEventsMocked beforeEach(() => { mockGetUser() sceneEventsMocked = sinon.stub(sceneManager, 'allScenesEvent') }) afterEach(() => { // clear visible avatars cache const users = getVisibleAvatarsUserId() users.forEach(u => sendAvatarMessage(u, false)) sinon.restore() sinon.reset() }) describe('Avatar observable', () => { it('should return user A and B that are visible at the scene', () => { const userA = 'user-a' const userB = 'user-b' const userC = 'user-c' sendAvatarMessage(userA, true) sinon.assert.calledWith(sceneEventsMocked, { eventType: 'playerConnected', payload: { userId: userA } }) sendAvatarMessage(userB, true) sinon.assert.calledWith(sceneEventsMocked, { eventType: 'playerConnected', payload: { userId: userB } }) sceneEventsMocked.reset() sendAvatarMessage(userC, false) sinon.assert.notCalled(sceneEventsMocked) expect(getVisibleAvatarsUserId()).to.eql([userA, userB]) }) it('if should remove user when he leaves the scene', () => { const userA = 'user-a' const userB = 'user-b' sendAvatarMessage(userA, true) sinon.assert.calledWith(sceneEventsMocked, { eventType: 'playerConnected', payload: { userId: userA } }) sendAvatarMessage(userB, true) sinon.assert.calledWith(sceneEventsMocked, { eventType: 'playerConnected', payload: { userId: userB } }) expect(getVisibleAvatarsUserId()).to.eql([userA, userB]) sendAvatarMessage(userA, false) sinon.assert.calledWith(sceneEventsMocked, { eventType: 'playerDisconnected', payload: { userId: userA } }) expect(getVisibleAvatarsUserId()).to.eql([userB]) }) it('should not add the users if there is not info about them. Race cond', () => { sinon.restore() const userA = 'user-a' sendAvatarMessage(userA, true) expect(getVisibleAvatarsUserId()).to.eql([]) sinon.assert.notCalled(sceneEventsMocked) }) it('should remove the user from the cache if we receieve an USER_REMOVED action', () => { const userA = 'user-a' const userB = 'user-b' sendAvatarMessage(userA, true) sendAvatarMessage(userB, true) expect(getVisibleAvatarsUserId()).to.eql([userA, userB]) removeAvatarMessage(userA) sinon.assert.calledWith(sceneEventsMocked, { eventType: 'playerDisconnected', payload: { userId: userA } }) expect(getVisibleAvatarsUserId()).to.eql([userB]) }) })
For his Record Club series, Beck gathers friends together to cover an entire album in a day, then posts the results to his website. So far, the Record Club choices have come from canonical or at least critically respected artists: the Velvet Underground, Skip Spence, INXS, Leonard Cohen. But now Beck and friends have made their first batshit-ass crazy selection: Live at the Acropolis, the 1993 album from mustachioed Greek new age composer Yanni. As previously reported, Beck has enlisted Thurston Moore and Tortoise in his latest Record Club endeavor. (There's something weirdly appropriate about the idea of Tortoise covering Yanni.) The first track, "Santorini", is up now, with a new track to be posted every week on Beck's website throughout the summer. Tortoise don't appear on "Santorini"; they won't show up until later in the album. Beck's website reports that "several studio musician heavyweights were brought in to read a heavily doctored score with interpolations of everything from Stravinsky to Shania Twain (look for others)." While that's happening, Beck runs around making noise and Thurston improvises new lyrics that, per Beck's website, "give the track an added urgency and pathos." And since you asked, here's a sampling of those new lyrics: "Roasted pigs! Roasted pigs! Oinkin' oinkin' oinkin' to the night sky! Orange orange disaster!" Below, you can watch Beck, Thurston, and friends take on "Santorini". For comparison's sake, we've also posted a video of the original-- part of a Yanni concert special that ran over and over on PBS in the mid 90s. It's going to be a weird summer.
Canton of Switzerland Canton The canton of Appenzell Innerrhoden ( German: [ˈapənˌtsɛl ˈɪnərˌroːdən] (); in English sometimes Appenzell Inner-Rhodes) is the smallest canton of Switzerland by population and the second smallest by area, with canton of Basel-City being the smallest. It was the last Swiss canton to grant women the right to vote on local issues, in 1991. History [ edit ] Foundation [ edit ] The name Appenzell (Latin: abbatis cella) means "cell (i.e. estate) of the abbot". This refers to the Abbey of St. Gall, which exerted a great influence on the area. By the middle of the 11th century the abbots of St. Gall had established their power in the land later called Appenzell, which, too, became thoroughly Teutonized, its early inhabitants having probably been Romanized Raetians. By about 1360, conflicts over grazing rights, taxes, and tithes were causing concern for both the abbot and the farmers of Appenzell. Both parties wanted to protect their rights and interests by joining the new Swabian League. In 1377 Appenzell was allowed to join the League with the support of the cities of Konstanz and St. Gallen (the city of St. Gallen was often at odds with the neighboring Abbey of St. Gall). With the support of the League, Appenzell refused to pay many of the gifts and tithes that the Abbot Kuno von Stoffeln demanded. In response to the loss of revenue from his estates, Kuno approached the Austrian House of Habsburg for help. In 1392 he made an agreement with the Habsburgs, which was renewed in 1402. In response, in 1401 Appenzell entered into an alliance with the city of St. Gallen to protect their rights and freedom.[3] Independence and joining the Swiss Confederation [ edit ] Battle of Vögelinsegg Following increasing conflicts between the Appenzellers and the abbot's agents, including the bailiff of Appenzell demanding that a dead body be dug up because he wanted the man's clothes,[4] the Appenzellers planned an uprising. On a pre-arranged day, throughout the abbot's lands, they attacked the bailiffs and drove them out of the land. Following unsuccessful negotiations Appenzell and St. Gallen entered into a treaty, which marked a break between the abbot and his estates. Perhaps fearing the Habsburgs, in 1402 the League expelled Appenzell. During the same year, St. Gallen reached an agreement with the abbot, and Appenzell could no longer count on St. Gallen's support. Appenzell declared itself ready to stand against the abbot, and in 1403 formed an alliance with the Canton of Schwyz, a member of the Old Swiss Confederation that had defeated the Austrians in the previous century. Glarus provided less support, but authorized any citizen who wished to support Appenzell to do so.[4] In response, the League raised an army and marched to St. Gallen before heading toward Appenzell. On 15 May 1403, they entered the pass leading to Speicher and outside the village of Vögelinsegg met the Appenzell army. A small force of Appenzell and Confederation troops defeated the League army and the two sides signed a short-lived peace treaty. Following another Appenzell victory on 17 June 1405, at Stoss Pass on the border of Appenzell town, the new canton continued to expand.[3] During the expansion, Appenzell even captured the abbot of St. Gall and in response they were excommunicated by the Bishop of Constance.[4] However, while the Bund expanded, the Austrians used the peace to regain their strength. On 11 September 1406 an association of nobles formed a knightly order known as the Sankt Jörgenschild (Order of St. George's Shield) to oppose the rebellious commoners of the Bund.[5] Following a defeat at Bregenz, Appenzell was unable to hold the Bund together. The city of St. Gallen and the Canton of Schwyz each paid off the Austrians to avoid an attack, and the Bund was dissolved by King Rupert of Germany on 4 April 1408.[3] As part of the peace treaty, the abbot gave up his ownership of Appenzell, but was still owed certain taxes.[4] However, it was not until 1410 that the area was at peace.[3] In 1411 Appenzell signed a defensive treaty with the entire Swiss Confederation (except Bern), which strengthened their position against the abbot. Appenzell joined the Confederation as an "Associate Member", and did not become a full member until 1513. Following another battle, in 1429, Appenzell was granted freedom from the obligations in the future. This treaty represented the end of Appenzell's last financial tie to the Abbey of St. Gall, and a movement towards closer relationships with the Confederation.[3] Division of Appenzell [ edit ] Starting in 1522, followers of Martin Luther and Huldrych Zwingli began to preach the Protestant Reformation in Appenzell. The early reformers had the most success in the outer Rhoden, a term that in the singular is said to mean a "clearing", and occurs in 1070, long before the final separation. Following the initial small success, in 1523 Joachim von Watt (also known as Joachim Vadian) began to preach the reformed version of the Acts of the Apostles to friends and fellow clergy. His preaching brought the Reformation into the forefront of public debate. In October 1523, the Council supported the Protestant principle of scriptural sermons, and on 24 April 1524 the Landsgemeinde confirmed the Cantonal Council's decision. However, the work of the Anabaptists in the Appenzell region (as well as in Zurich and St. Gallen) in 1525 led to government crackdowns. The first police action against the Anabaptists took place in June 1525, followed by the Anabaptist Disputation in Teufen in October 1529.[6] To end the confrontation between the old and new faiths, the Landesgemeinde decided in April 1525 that each parish should choose a faith, but that the principle of free movement would be supported, so that the religious minority could attend the church of their choice regardless of where they lived. The entire Appenzell Ausserrhoden converted to the Reformation in 1529. The Innerrhoden remained with the old faith. While the majority of the residents of Appenzell town remained Catholic under Pastor Diepolt Huter, there was a strong Reformed minority. In 1531, the minority were nearly successful in getting the town to ally with the Protestant Ausserrhoden. But an armed mob of angry residents from the neighboring village of Gonten prevented the abolition of the Mass in Appenzell. The Catholic victory in the Second War of Kappel in 1531 ended plans for a reformation of the entire Canton of Appenzell.[6] After the Second War of Kappel, the two religions reached a generally peaceful parity. They remained united by common business interests, the same political and legal understanding, a shared desire to form an alliance with France and a shared opposition to the city of St. Gallen. This shared opposition to St. Gallen was demonstrated in the so-called linen affairs (1535–42, 1579), in which the weavers throughout Appenzell supported each other when they felt that they were unfairly treated by the linen industry of St. Gallen.[6] From 1798 to 1803 Appenzell, with the other domains of the abbot of St Gall, was formed into the canton of Säntis of the Helvetic Republic, but in 1803, on the creation of the new canton of St. Gall, shrank back within its former boundaries. Women's right to vote, 1991 [ edit ] Appenzell Innerrhoden was the last canton in Switzerland to grant women the right to vote on local issues, being forced to do so only in 1990 when two women from Appenzell filed a lawsuit in the Swiss Federal Court and won. A centuries-old law forbidding women from voting was changed in 1991, when Switzerland's federal court ordered the canton to grant women the right to vote.[7] Geography [ edit ] Most of the canton is pastoral, this despite being mountainous. The Säntis peak in the Appenzell Alps is one of the main attractions of the canton. There are three small mountain lakes in the canton: Seealpsee, Sämtisersee and Fälensee. Politics [ edit ] Appenzell is the capital of this canton. The constitution was established in 1872. Citizens from the canton assemble each year on the last Sunday of April for the Landsgemeinde (general assembly) in a square in Appenzell; they elect the cantonal government and judiciary, accept the oath of the elected members, and vote on several issues on the agenda; about 4000 citizens usually participate.[8] In 1991, following a decision by the Federal Supreme Court of Switzerland, Appenzell Innerrhoden became the last Swiss canton to grant women the vote on local issues.[9] It also was the canton with the strongest support for the minaret ban in the Swiss minaret referendum, 2009, at 71%. Political subdivisions [ edit ] Appenzell Innerrhoden districts In Appenzell Innerrhoden, districts are the lowest administrative division, as the canton has no municipalities (except for the Feuerschaugemeinde, a special-purpose municipality for firefighting, energy and water for the town of Appenzell). The districts are functionally equivalent to municipalities elsewhere in Switzerland, and are generally shown as municipalities on maps etc. The canton is divided into 6 districts: Government [ edit ] The State Commission (Standeskommission) constitutes the executive government of the canton of Appenzell Inner-Rhodes and operates as a collegiate authority. It is composed of seven councillors (German: Regierungsrat/-rätin), each presiding over a department (Departement) comprising several offices and bureaus. The president of the executive department acts as an executive president (regierender Landammann). In the mandate period (Legislatur) 2015 – 2017 the State Commission is presided by regierender Landammann Roland Innauen. The regierende Landammann and the stillstehender Landammann (stationary president) are switched every two years. Departmental tasks, coordination measures and implementation of laws decreed by the canton's parliament, the Grosse Rat, and the Landsgemeinde, as well as legal judgments are carried by the State Commission. The regular (re-)election of the State Commission by the Landsgemeinde is held every year. Any resident of Appenzell Inner-Rhodes allowed to vote can be elected as a member of the State Commission. The assignment of the departments for the Säckelmeister (minister of finance), Landeshauptmann (country's captain), Bauherr (minister of building and civil engineering), and Landesfähnrich (country's ensign) are given by their roles. The departments of education, economics, and health and social services are assigned by the collegiate to the two Landammänner and the Statthalter.[10] As of September 2016 , Appenzell Inner-Rhodes' State Commission is made up of five members of the CVP (Christian Democratic People's Party), and two independents of whom one is currently also the president. The last regular election (Landsgemeinde) was held on 24 April 2016.[10] The State Commission (Standeskommission) of Appenzell Innerrhoden[10] Executive Councillor (Regierungsrat/-rätin) Title Party Head of Department (Departement, since) of elected since Roland Innauen regierender Landammann 2015-2017 Independent Education (Erziehungsdepartement, ?) 2013 Daniel Fässler stillstehender Landammann 2015-2017 CVP Economics (Volkswirtschaftsdepartement, ?) 2008 Antonia Fässler Frau Statthalter CVP Health and Social Services (Gesundheits- und Sozialdepartement, 2010) 2010 Thomas Rechtsteiner Säckelmeister CVP Finances (Finanzdepartement, 2011) 2011 Stefan Müller Landeshauptmann CVP Agriculture and Forestry (Land- und Forstwirtschaftsdepartement, 2015) 2015 Stefan Sutter Bauherr CVP Building and Civil Engineering (Bau- und Umweltdepartement, 2005) 2005 Martin Bürki Landesfähnrich Independent Judiciary, Police, and Military (Justiz-, Polizei- und Militärdepartement, 2012) 2012 Parliament [ edit ] Landsgemeinde [ edit ] Citizens from the canton assemble on the last Sunday of April each year for the Landsgemeinde (literally: country's assembly) in the central square in Appenzell, elect the cantonal government and judiciary, accept the oath of the elected members, and vote on several issues put forward by the government and/or the parliament. It is presided over by the regierende Landammann. Every Swiss citizen of Appenzell Inner-Rhodes older than 18 has the right to vote and to step onto the Stuhl (literally the chair, but in fact the podium) and to debate the election of the proposed members of government and judiciary, or any of the issues to be voted on. After the State Commission have taken their seats on the Stuhl and the regierende Landammann opens and welcomes the assembly,(s)he then presents the state bill[clarification needed]. Then the discussions are opened. Then the elections and the subsequent oaths take place. Finally the votes on the programmed issues are taken.[11] Grosser Rat [ edit ] The Grosser Rat or Grand Council is composed of 50 members, elected in the six districts of the canton. In five of the six districts, the election takes place once a year in the annual session of the district equivalent of the Landsgemeinde and therefore takes place in the open. In the other district, Oberegg District, which forms two small exclaves of the canton a few miles northeast of the rest, a separate ballot is held in May. The most important tasks of the Council are providing preliminary advice on the constitutional and legislative proposals to be placed before the Landsgemeinde. Also, the issuing of regulations to enforce the canton's legislation and supervision of all the cantonal authorities. The Council reviews and approves the annual financial statements, and sets the budget and the tax rates.[12] Federal elections [ edit ] National Council [ edit ] On 18 October 2015, in the federal election for the Swiss National Council the most popular party was the Christian Democratic People's Party (CVP) which won the only seat for the canton with 76.3% of the vote. With this result the previous National Councillor Daniel Fässler, who also holds the position of the stillhaltender Landammann of Appenzell Inner-Rhodes, was re-elected. The opponent of the SP received 18.1% of the vote. In the federal election, a total of 4,090 votes were cast, and the voter turnout was 36.7%.[13] National Councillor (Nationalrat/ -rätin) of AI[14] Councillor Party part of the National Council since votes Daniel Fässler CVP 2011 3121 Council of States [ edit ] On 18 October 2015, in the federal election State Councillor (German: Ständerat) Ivo Bischofberger, member of the Christian Democratic People's Party (CVP), was re-elected in the first round as single representative of the canton of Appenzell Inner-Rhodes in the national Council of States (German: Ständerat). He has been a member of it since 2007.[16] Demographics [ edit ] The population of the canton (as of 31 December 2017) is 16,105.[17] As of 2007 , the population included 1,510 foreigners, or about 9.76% of the total population.[18] Owing to the split of Appenzell along religious lines, the population (as of 2000 ) is nearly all Roman Catholic (81%), with a small Protestant minority (10%).[19] Economy [ edit ] Cattle breeding and dairy farming are the main agricultural activities: Appenzeller cheese is widely available throughout Switzerland. Somewhat before the early 2000s, the idyllic countryside of Appenzell Innerrhoden apparently became popular with nudists, and at the 2009 Landsgemeinde the canton's residents voted to prohibit naked hiking. Violators would be fined.[20] However nudists who appealed against their fines to the federal court have been reimbursed by the local authorities, as nudism is not a crime under Swiss federal law which takes precedence.[21] It is common for cars rented in Switzerland to be registered in Appenzell Innerrhoden, and thus having license plates starting with "AI", because of the reduced tax on cars in this canton. See also [ edit ]
/** * Add a new pattern if pattern name is null * Update the pattern if a pattern name is provided * * @param patternName - patternName to be added or updated */ private void updateOrSave(String patternName) { InjectionPatternEntity patternEntity; if (null == patternName) { patternEntity = new InjectionPatternEntity(); } else { patternEntity = entity.getPattern(patternName); } final java.util.List<String> existingPatterns = getExistingInjectionFilterPatternNames(entity.getFilterName()); final InjectionPatternEntityDialog dialog = new InjectionPatternEntityDialog(InjectionFilterEntityDialog.this, patternEntity); dialog.setLocationRelativeTo(this); dialog.setVisible(true); if (dialog.isConfirmed()) { final InjectionPatternEntity updatedPatternEntity = dialog.getEntity(); final String updatedPatternName = updatedPatternEntity.getName(); if ((null == patternName && entity.patternExists(updatedPatternName)) || (patternName != null && !patternName.equals(updatedPatternName) && existingPatterns.contains(updatedPatternName))) { JOptionPane.showMessageDialog(dialog, String.format(ResourceBundle.getBundle(GUI).getString("InjectionFilterAssertion.ui.error.msg.pattern.save"), updatedPatternName), ResourceBundle.getBundle(GUI).getString("InjectionFilterAssertion.ui.error.title"), JOptionPane.WARNING_MESSAGE); dialog.unconfirm(); } else { entity.addOrUpdatePattern(updatedPatternEntity); this.reloadTable(); } } }
The New York Times reports that the Chrysler brinkmanship continues, with some small hedge funds acting as pigs in the hope of extracting yet more concessions from the government, as the bankruptcy deadline looms. The reason the funds can play such hardball is that the Administration does not want to BK Chrysler. Despite all the cheery assertions of a speedy bankruptcy, there is no assurance that would indeed come to pass. Even in prepaks, where the creditors have a an arrangement they want a judge to bless. the process can become protracted. And a Chapter 11 filing with no creditor deal runs the risk of loss of customers as uncertainty hangs over the manufacturer, and a Chapter 11 could morph into a liquidation, with not only big job losses, but damage to significant suppliers who in turn may fail, causing problems for other US automakers and transplants.. The latest update on the negotiations from the New York Times: Last-minute efforts by the Treasury Department to win over recalcitrant Chrysler debtholders failed Wednesday night, setting up a near-certain bankruptcy filing by the American automaker, according to people briefed on the talks. Barring an agreement, which looked increasingly difficult, Chrysler was expected to seek Chapter 11 protection on Thursday, most likely in New York, these people said. The automaker, which is in talks with the Italian automaker Fiat, would file for bankruptcy first. It subsequently would present an agreement with Fiat to the court for approval, possibly on Monday, these people said. They requested anonymity because they were not authorized to speak for the government. A bankruptcy filing by Chrysler would be the first by one of Detroit’s three auto companies amid a devastating slump, and could serve as a preview of what a filing by General Motors might look like. G.M., which like Chrysler received federal assistance last year, faces a June 1 deadline for its own restructuring. To win over several hedge funds, which have been holding out for better terms, the Treasury increased its cash offer to holders of Chrysler’s secured debt by $250 million, to $2.25 billion, these people said. If all of the secured holders would agree to the new deal, which would give them the cash in exchange for retiring about $6.9 billion of debt, Chrysler would still have a chance of restructuring out of bankruptcy court. Several investment funds, however, continued to reject the Treasury’s sweetened offer at a vote of the lenders on Wednesday evening, people familiar with the talks said…. The Obama administration is adamant that every lender participate in the debt swap, according to people close to the talks. One reason is that the deal would face legal challenges. The Wall Street Journal also sees a Chapter 11 filing as “imminent“ Talks between the Treasury Department and lenders aimed at keeping Chrysler LLC out of bankruptcy broke down Wednesday, making it all but certain the car maker will file for Chapter 11 protection Thursday, according to people familiar with the discussions. Administration officials, who have been braced for a Chrysler bankruptcy filing for weeks, say all the pieces are in place to get the company through the court quickly, perhaps in a matter of weeks. The talks with Chrysler’s lenders broke down after the Obama administration’s automotive task force worked into the evening to persuade several hedge funds and other lenders to accept a deal to reduce Chrysler’s debt, said people involved in the talks. The Treasury boosted its most recent offer to lenders on Wednesday by $250 million to $2.25 billion in cash for the banks and hedge funds to forgive $6.9 billion in Chrysler debt, people familiar with the matter said. J.P. Morgan Chase & Co., which leads the creditor group as Chrysler’s largest lender, gave the other 45 banks and hedge funds 90 minutes Wednesday to vote on the deal. A large number of the funds voted no and refused to budge, paving the way for an all but unavoidable trip to bankruptcy court, said people close to the talks. How can the Administration spew such rubbish? Bankruptcies go quickly in the courthouse ONLY if there is a pre-negotiated deal with creditors. With no agreement, the fight with the lenders will continue in court. James Kwak parsed the Administration’s dilemma: I’ve been writing a lot about the game of chicken recently, most often in connection with the GM and Chrysler bailouts. On the Chrysler front, the game is in its last hours. Even after a consortium of large banks agreed to the proposed debt-for-equity swap, some smaller hedge funds are holding out for more money, and even the extra $250 million that Treasury agreed to kick in seems unlikely to keep Chrysler out of bankruptcy. The problem is that bankruptcy is the only weapon Chrysler and Treasury have in this fight, and it’s a strategic nuclear weapon. Bankruptcy is the only threat that can get the bondholders to agree to a swap; but because a bankruptcy carries some risk of destroying Chrysler (because control will lie in the hands of a bankruptcy judge – not Chrysler, Treasury, the UAW, or Fiat), and taking hundreds of thousands of jobs with it, everyone knows that Treasury would prefer not to use it. The bondholders are betting that they can use Treasury’s fear of a bankruptcy to extract better terms at the last minute. (And it’s even possible that the large banks agreed to the swap knowing they could count on the smaller, less politically exposed hedge funds to veto it.) But Treasury may still press the button, because it needs to make a statement in advance of the bigger GM confrontation scheduled for a month from now. So Treasury cannot win, If it calls the banks’ bluff, it risks a slow motion Lehman. The Times says “people briefed at the negotiation” believe Chrysler would emerge from Chapter 11. But bankruptcy, like war, has uncertain outcomes, and no automaker has emerged from bankruptcy (they have either been liquidated or sold in pieces or entirety). Note Lehman was not rescued for essentially political reasons, that it was time to draw the line and show that there were indeed consequences for mismanagement. Here, again, some scalps ned to be harvested, since the banks are now completely out of hand in the utter shamelessness of their extortion. Kwak details their intransigence on other fronts in his post. Revenge for behavior is often served cold. Recall Bear Stearns’ refusal to participate in the LTCM bailout created Ill will that caught up with it a decade later. But that’s no remedy in real time, when this rent seeking is taking place. In the bad old days, you might have seen extra-legal measures. J, Edgar Hoover, then head of the FBI, was known to have dirt on pretty much anyone of consequence. People who take noisy political stands seem more subject than the average Joe to highly intrusive and costly IRS audits; uncooperative hedgies would seem ideal targets for that sort of harassment, and double goes for any entities that are subject to US regulations. But I am concerned this behavior is setting the stage for another sort of extra-legal measure: violence. I have been amazed at the vitriol directed at the banking classes. Suggestions for punishment have included the guillotine (frequent), hanging, pitchforks, even burning at the stake. Tar and feathering appears inadequate, and stoning hasn’t yet surfaced as an idea. And mind you, my readership is educated, older, typically well off (even if less so than three years ago). The fuse has to be shorter where the suffering is more acute But the banksters are eagerly, shamelessly, and openly harvesting their pound of flesh from financially stressed average taxpayers, and setting off a chain reaction in the auto industry which has the very real risk of creating even larger scale unemployment than the economy already faces. It’s reckless, utterly irresponsible, over-the-top greed. And there is a tipping point, which I sense is closer than most imagine. Nassim Nicholas Taleb points out that thirteen centuries of peaceful ethnic coexistence in Lebanon exploded overnight into brutal, completely unexpected civil war. Everyone assumes America is too complacent for class warfare in the literal sense to erupt. The way the banksters are demanding to be disciplined, that assumption may prove naive.
package orchi.HHCloud.provider; public class ProviderNotFoundException extends RuntimeException { public ProviderNotFoundException(){ super(); } public ProviderNotFoundException(String msg) { super(msg); } public ProviderNotFoundException(Exception e) { super(e); } }
<filename>dialer/quic/conn.go package quic import ( "context" "net" "github.com/lucas-clemente/quic-go" ) type quicSession struct { session quic.EarlyConnection } func (session *quicSession) GetConn() (*quicConn, error) { stream, err := session.session.OpenStreamSync(context.Background()) if err != nil { return nil, err } return &quicConn{ Stream: stream, laddr: session.session.LocalAddr(), raddr: session.session.RemoteAddr(), }, nil } func (session *quicSession) Close() error { return session.session.CloseWithError(quic.ApplicationErrorCode(0), "closed") } type quicConn struct { quic.Stream laddr net.Addr raddr net.Addr } func (c *quicConn) LocalAddr() net.Addr { return c.laddr } func (c *quicConn) RemoteAddr() net.Addr { return c.raddr }
/** * :: DeveloperApi :: * A fast hash map implementation for nullable keys. This hash map supports insertions and updates, * but not deletions. This map is about 5X faster than java.util.HashMap, while using much less * space overhead. * <p> * Under the hood, it uses our OpenHashSet implementation. */ class OpenHashMap<K extends java.lang.Object, V extends java.lang.Object> implements scala.collection.Iterable<scala.Tuple2<K, V>>, scala.Serializable { // not preceding // TypeTree().setOriginal(TypeBoundsTree(TypeTree().setOriginal(Select(Select(Ident(_root_), scala), scala.Nothing)), TypeTree().setOriginal(Select(Select(Ident(_root_), scala), scala.Any)))) // TypeTree().setOriginal(TypeBoundsTree(TypeTree().setOriginal(Select(Select(Ident(_root_), scala), scala.Nothing)), TypeTree().setOriginal(Select(Select(Ident(_root_), scala), scala.Any)))) public OpenHashMap (int initialCapacity, scala.reflect.ClassTag<K> evidence$1, scala.reflect.ClassTag<V> evidence$2) { throw new RuntimeException(); } public OpenHashMap (scala.reflect.ClassTag<K> evidence$3, scala.reflect.ClassTag<V> evidence$4) { throw new RuntimeException(); } protected org.apache.spark.util.collection.OpenHashSet<K> _keySet () { throw new RuntimeException(); } private java.lang.Object _values () { throw new RuntimeException(); } private java.lang.Object _oldValues () { throw new RuntimeException(); } private boolean haveNullValue () { throw new RuntimeException(); } private V nullValue () { throw new RuntimeException(); } public int size () { throw new RuntimeException(); } /** Tests whether this map contains a binding for a key. */ public boolean contains (K k) { throw new RuntimeException(); } /** Get the value for a given key */ public V apply (K k) { throw new RuntimeException(); } /** Set the value for a key */ public void update (K k, V v) { throw new RuntimeException(); } /** * If the key doesn't exist yet in the hash map, set its value to defaultValue; otherwise, * set its value to mergeValue(oldValue). * <p> * @return the newly updated value. * @param k (undocumented) * @param defaultValue (undocumented) * @param mergeValue (undocumented) */ public V changeValue (K k, scala.Function0<V> defaultValue, scala.Function1<V, V> mergeValue) { throw new RuntimeException(); } public scala.collection.Iterator<scala.Tuple2<K, V>> iterator () { throw new RuntimeException(); } protected scala.Function1<java.lang.Object, scala.runtime.BoxedUnit> grow () { throw new RuntimeException(); } protected scala.Function2<java.lang.Object, java.lang.Object, scala.runtime.BoxedUnit> move () { throw new RuntimeException(); } }
def main(): ans = random_word() result = dash(len(ans)) turns = N_TURNS history = '' while True: if turns == 0 or ans == result: break else: print('The word looks like '+result) print('You have '+str(turns)+' guess left') your_guess = input('Your guesses: ') your_guess = your_guess.upper() history += your_guess result = new_result(ans, result, your_guess) turns = check_your_guess(ans, your_guess, turns, history) final(ans, result, turns) print('The word was '+ans)
Fuzziness and PDE based models for the segmentation of medical image This paper presents a generic strategy to facilitate the segmentation of anatomical structures in medical images the segmentation of anatomical structures in medical images. The segmentation is performed using an adapted partial differential equation (PDE) based model by fuzzy c-means classification. The PDE defines a geometrical active contour model, which also uses the fuzzy decision to converge into the final contour. Furthermore, the fuzzy reasoning exploits a priori statistical information from several knowledge sources based on histogram analysis and the intensity values of the anatomical structures under consideration. The advantage of using a geometric representation is that the algorithm can handle changes in the topology of the shape as the curve/surface evolves in time. Moreover, the fuzzy-PDE model is able to solve some drawbacks present at other PDE based models.
import functools import gc from abc import ABC from sources.datasets.client_dataset_definitions.client_dataset_loaders.client_dataset_loader import ClientDatasetLoader, DatasetComponents from sources.datasets.client_dataset_definitions.client_dataset_processors.client_dataset_processor import ClientDatasetProcessor from sources.utils.exception_definitions import OutsideOfContextError def throw_error_outside_context(func): @functools.wraps(func) def wrapper_decorator(self, *args, **kwargs): if not self.within_context: raise OutsideOfContextError( """Error: Tried to access client Dataset outside of context manager. This might lead to data leaks and bad use of memory. Please wrap the usage of ClientDataset.dataset_x inside a "with statement". """) else: value = func(self, *args, **kwargs) return value return wrapper_decorator class ClientDataset(ABC): def __init__(self, client_identifier: str, client_dataset_loader: ClientDatasetLoader, client_dataset_processor: ClientDatasetProcessor, ): self.client_identifier = client_identifier self.client_dataset_loader = client_dataset_loader self.client_dataset_processor = client_dataset_processor self._train_data = None self._test_data = None self._validation_data = None self.within_context = False def process_x(self, raw_x_batch): """Pre-processes each batch of features before being fed to the model.""" return self.client_dataset_processor.process_x(raw_x_batch) def process_y(self, raw_y_batch): """Pre-processes each batch of labels before being fed to the model.""" return self.client_dataset_processor.process_y(raw_y_batch) def _lazy_initialise_data(self, data, dataset_component: DatasetComponents): if data is None: data = self.client_dataset_loader.load_dataset(self.client_identifier, dataset_component) return self.process_x(data["x"]), self.process_y(data["y"]) else: return data @property @throw_error_outside_context def training_data(self): """Returns the Training Data as pair of arrays containing the samples x, and classification y""" self._train_data = self._lazy_initialise_data(self._train_data, DatasetComponents.TRAIN) return self._train_data @property @throw_error_outside_context def training_data_x(self): """Returns the Training Data as an array of samples""" self._train_data = self._lazy_initialise_data(self._train_data, DatasetComponents.TRAIN) return self._train_data[0] @property @throw_error_outside_context def training_data_y(self): """Returns the Classifications for the Training Data as array""" self._train_data = self._lazy_initialise_data(self._train_data, DatasetComponents.TRAIN) return self._train_data[1] @property @throw_error_outside_context def test_data(self): """Returns the Training Data as pair of arrays containing the samples x, and classification y""" self._test_data = self._lazy_initialise_data(self._test_data, DatasetComponents.TEST) return self._test_data @property @throw_error_outside_context def test_data_x(self): """Returns the Test Data as an array of samples""" self._test_data = self._lazy_initialise_data(self._test_data, DatasetComponents.TEST) return self._test_data[0] @property @throw_error_outside_context def test_data_y(self): """Returns the Classifications for the Test Data as array""" self._test_data = self._lazy_initialise_data(self._test_data, DatasetComponents.TEST) return self._test_data[1] @property @throw_error_outside_context def validation_data(self): """Returns the Validation Data as pair of arrays containing the samples x, and classification y""" self._validation_data = self._lazy_initialise_data( self._validation_data, DatasetComponents.VALIDATION) return self._validation_data @property @throw_error_outside_context def validation_data_x(self): """Returns the Validation Data as an array of samples""" self._validation_data = self._lazy_initialise_data( self._validation_data, DatasetComponents.VALIDATION) return self._validation_data[0] @property @throw_error_outside_context def validation_data_y(self): """Returns the Classifications for the Validation Data as array""" self._validation_data = self._lazy_initialise_data( self._validation_data, DatasetComponents.VALIDATION) return self._validation_data[1] def __enter__(self): self.within_context = True def __exit__(self, exc_type, exc_value, exc_traceback): self.within_context = False self._train_data = None self._test_data = None self._validation_data = None gc.collect()
<filename>src/main/java/com/jd/jdbc/planbuilder/MemorySortPlan.java /* Copyright 2021 JD Project Authors. Licensed under Apache-2.0. Copyright 2019 The Vitess Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.jd.jdbc.planbuilder; import com.jd.jdbc.engine.MemorySortEngine; import com.jd.jdbc.engine.OrderByParams; import com.jd.jdbc.engine.PrimitiveEngine; import com.jd.jdbc.sqlparser.SqlParser; import com.jd.jdbc.sqlparser.ast.SQLExpr; import com.jd.jdbc.sqlparser.ast.SQLName; import com.jd.jdbc.sqlparser.ast.SQLOrderBy; import com.jd.jdbc.sqlparser.ast.SQLOrderingSpecification; import com.jd.jdbc.sqlparser.ast.expr.SQLLiteralExpr; import com.jd.jdbc.sqlparser.ast.statement.SQLSelectOrderByItem; import com.jd.jdbc.sqltypes.VtType; import java.sql.SQLException; import java.util.Map; import lombok.Getter; import lombok.Setter; /** * memorySort is the builder for engine.Limit. * This gets built if a limit needs to be applied * after rows are returned from an underlying * operation. Since a limit is the final operation * of a SELECT, most pushes are not applicable. */ @Getter @Setter public class MemorySortPlan extends ResultsBuilder implements Builder { private MemorySortEngine memorySortEngine; private MemorySortPlan(Builder bldr, Truncater truncater) { super(bldr, truncater); this.memorySortEngine = (MemorySortEngine) truncater; } /** * newMemorySort builds a new memorySort. * * @param bldr * @param orderBy * @return * @throws SQLException */ public static MemorySortPlan newMemorySortPlan(Builder bldr, SQLOrderBy orderBy) throws SQLException { MemorySortEngine memorySortEngine = new MemorySortEngine(); MemorySortPlan ms = new MemorySortPlan(bldr, memorySortEngine); if (orderBy != null && orderBy.getItems() != null) { for (SQLSelectOrderByItem orderByItem : orderBy.getItems()) { SQLExpr expr = orderByItem.getExpr(); int colNumber = -1; if (expr instanceof SQLLiteralExpr) { colNumber = PlanBuilder.resultFromNumber(ms.getResultColumnList(), (SQLLiteralExpr) expr); } else if (expr instanceof SQLName) { Column c = ((SQLName) expr).getMetadata(); for (int i = 0; i < ms.getResultColumnList().size(); i++) { ResultColumn rc = ms.getResultColumnList().get(i); if (rc.getColumn() == c) { colNumber = i; break; } } } else { throw new SQLException("unsupported: memory sort: complex order by expression: " + expr.toString()); } // If column is not found, then the order by is referencing // a column that's not on the select list. if (colNumber == -1) { throw new SQLException("unsupported: memory sort: order by must reference a column in the select list: " + orderByItem); } OrderByParams ob = new OrderByParams(colNumber, orderByItem.getType() == SQLOrderingSpecification.DESC); ms.memorySortEngine.getOrderByParams().add(ob); } } return ms; } /** * SetUpperLimit satisfies the builder interface. * This is a no-op because we actually call SetLimit for this primitive. * In the future, we may have to honor this call for subqueries. * * @param count */ @Override public void setUpperLimit(SQLExpr count) throws SQLException { this.memorySortEngine.setUpperLimit(SqlParser.newPlanValue(count)); } @Override public void wireup(Builder bldr, Jointab jt) throws SQLException { for (int i = 0; i < this.memorySortEngine.getOrderByParams().size(); i++) { OrderByParams orderBy = this.memorySortEngine.getOrderByParams().get(i); ResultColumn rc = this.getResultColumnList().get(orderBy.getCol()); if (VtType.isText(rc.getColumn().getType())) { // If a weight string was previously requested, reuse it. Map<ResultColumn, Integer> weightStrings = this.getWeightStrings(); Integer weightColNumber; if (weightStrings.containsKey(rc)) { weightColNumber = weightStrings.get(rc); this.memorySortEngine.getOrderByParams().get(i).setCol(weightColNumber); continue; } weightColNumber = this.getBldr().supplyWeightString(orderBy.getCol()); this.getWeightStrings().put(rc, weightColNumber); this.memorySortEngine.getOrderByParams().get(i).setCol(weightColNumber); this.memorySortEngine.setTruncateColumnCount(this.getResultColumnList().size()); } } this.getBldr().wireup(bldr, jt); } @Override public PrimitiveEngine getPrimitiveEngine() throws SQLException { this.memorySortEngine.setInput(this.getBldr().getPrimitiveEngine()); return this.memorySortEngine; } }
// The HierarchyLinker class allows you to link nodes into a tree according to the hierarchy. // The knowledge of geometry only is used here. class HierarchyLinker { public: using Node = tree_node::TreeNode<HierarchyPlace>; using Tree4d = m4::Tree<Node::Ptr>; explicit HierarchyLinker(Node::Ptrs && nodes); Node::Ptrs Link(); private: static Tree4d MakeTree4d(Node::Ptrs const & nodes); Node::Ptr FindPlaceParent(HierarchyPlace const & place); Node::Ptrs m_nodes; Tree4d m_tree; }
import * as gcp from "@pulumi/gcp"; import * as k8s from "@pulumi/kubernetes"; import * as pulumi from "@pulumi/pulumi"; import type {Config} from "./config"; export function makeKubernetes( config: Config, ): { cluster: gcp.container.Cluster; provider: k8s.Provider; kubeconfig: pulumi.Output<string>; } { const cluster = new gcp.container.Cluster( "kubernetes-cluster", { enableBinaryAuthorization: false, enableIntranodeVisibility: true, enableKubernetesAlpha: false, enableLegacyAbac: false, enableShieldedNodes: true, enableTpu: false, name: config.kubernetes.clusterName, network: "default", verticalPodAutoscaling: { enabled: true, }, workloadIdentityConfig: { identityNamespace: `${gcp.config.project}.svc.id.goog`, }, }, {protect: true}, ); const kubeconfig = pulumi .all([cluster.name, cluster.endpoint, cluster.masterAuth]) .apply(([name, endpoint, masterAuth]) => { const context = `${gcp.config.project}_${gcp.config.zone}_${name}`; return `apiVersion: v1 clusters: - cluster: certificate-authority-data: ${masterAuth.clusterCaCertificate} server: https://${endpoint} name: ${context} contexts: - context: cluster: ${context} user: ${context} name: ${context} current-context: ${context} kind: Config preferences: {} users: - name: ${context} user: auth-provider: config: cmd-args: config config-helper --format=json cmd-path: gcloud expiry-key: '{.credential.token_expiry}' token-key: '{.credential.access_token}' name: gcp `; }); const provider = new k8s.Provider("kubernetes-provider", { kubeconfig, }); return {cluster, provider, kubeconfig}; }
/** * If we are around a planet, render that. * * @param curOrbitalAngle */ protected void renderParentPlanet(double curOrbitalAngle) { double distanceToParent = curBody.getRelativeDistanceFromCenter().unScaledDistance; double mainBodyOrbitalAngle = Math.PI-curOrbitalAngle; double zIndex = (float) (20/distanceToParent); double distance = (float) (curBodyPlanet.getRelativeSize() / distanceToParent) * parentPlanetFactor; this.nearBodiesToRender.add( new BodyRenderTask(curBodyPlanet, mainBodyOrbitalAngle, zIndex, distance, curOrbitalAngle) ); }
/** * Check the results have a calibrated exposure time and pixel pitch. If not then show a dialog to * collect the calibration. * * @param results the results * @return True if calibrated */ private static boolean checkCalibration(MemoryPeakResults results) { if (results.getCalibration() == null || !results.getCalibrationReader().hasExposureTime() || !results.getCalibrationReader().hasNmPerPixel()) { final CalibrationWriter cal = results.getCalibrationWriterSafe(); final ExtendedGenericDialog gd = new ExtendedGenericDialog(TITLE); gd.addMessage("Uncalibrated results! Please enter the calibration:"); gd.addNumericField("Exposure_time (ms)", cal.getExposureTime(), 2); gd.addNumericField("Pixel_pitch (nm)", cal.getNmPerPixel(), 2); gd.showDialog(); if (gd.wasCanceled() || gd.invalidNumber()) { return false; } cal.setExposureTime(gd.getNextNumber()); cal.setNmPerPixel(gd.getNextNumber()); if (cal.getExposureTime() <= 0 || cal.getNmPerPixel() <= 0) { return false; } results.setCalibration(cal.getCalibration()); } return true; }
/** * Increments <code>count</code> and ends the initial phase if enough elements have been seen. */ private void incrementCount() { count++; if (count.equals(stepsUntilMerge)) { initialPhase = false; } }
/** * A parser and serializer for the 1.* JSON format. */ public final class CaV1JSONFormat implements CaDefinitionParserType, CaDefinitionSerializerType { private final ObjectMapper mapper; /** * Construct a new parser/serializer. * * @param in_mapper An object mapper */ public CaV1JSONFormat( final ObjectMapper in_mapper) { this.mapper = NullCheck.notNull(in_mapper, "Mapper"); } /** * @return The parserSupportedVersions supported by this parser */ public static SortedSet<CaFormatVersion> supported() { return TreeSet.of(CaFormatVersion.of(1, 0)); } @Override public Validation<javaslang.collection.List<CaParseError>, CaDefinitionSkeleton> parseSkeletonFromStream( final InputStream is, final URI uri) { NullCheck.notNull(is, "Input stream"); NullCheck.notNull(uri, "URI"); try { return Validation.valid( this.mapper.readValue(is, CaV1Skeleton.class).toSkeleton()); } catch (final JsonParseException e) { final Path path = Paths.get(uri.getPath()); final JsonParser proc = e.getProcessor(); final JsonLocation loc = proc.getCurrentLocation(); final javaslang.collection.List<CaParseError> xs = javaslang.collection.List.of( CaParseError.of( LexicalPosition.of( loc.getLineNr(), loc.getColumnNr(), Optional.of(path)), e.getMessage() )); return Validation.invalid(xs); } catch (final IOException e) { final Path path = Paths.get(uri.getPath()); final javaslang.collection.List<CaParseError> xs = javaslang.collection.List.of( CaParseError.of( LexicalPosition.of( -1, -1, Optional.of(path)), e.getMessage() )); return Validation.invalid(xs); } } @Override public void serializeSkeletonToStream( final CaDefinitionSkeleton skeleton, final OutputStream out) throws IOException { NullCheck.notNull(skeleton, "Skeleton"); NullCheck.notNull(out, "Out"); this.mapper.writeValue(out, CaV1Skeleton.fromCore(skeleton)); } /** * A version 1 action. */ @JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type") @JsonSubTypes({ @JsonSubTypes.Type(name = "curves", value = CaV1ActionCurves.class) }) @JsonDeserialize @JsonSerialize public abstract static class CaV1Action { @JsonProperty(value = "name", required = true) private final String name; @JsonProperty(value = "frames-per-second", required = true) private final int frames_per_second; protected CaV1Action( @JsonProperty(value = "name", required = true) final String in_name, @JsonProperty(value = "frames-per-second", required = true) final int in_fps) { this.name = NullCheck.notNull(in_name, "Name"); this.frames_per_second = in_fps; } /** * @param a An action * * @return An action */ public static CaV1Action fromCore( final CaDefinitionActionType a) { return a.matchAction( unit(), (ac_name, ac) -> CaV1ActionCurves.fromCore(ac)); } /** * @return An action */ public abstract CaDefinitionActionType toAction(); } /** * A version 1 action constructed from curves. */ @JsonDeserialize @JsonSerialize public static final class CaV1ActionCurves extends CaV1Action { @JsonProperty(value = "curves", required = true) private final List<CaV1Curve> curves; protected CaV1ActionCurves( @JsonProperty(value = "name", required = true) final String in_name, @JsonProperty(value = "frames-per-second", required = true) final int in_fps, @JsonProperty(value = "curves", required = true) final List<CaV1Curve> in_curves) { super(in_name, in_fps); this.curves = NullCheck.notNull(in_curves, "Curves"); } /** * @param c A curve-based action * * @return A curve-based action */ public static CaV1ActionCurves fromCore( final CaDefinitionActionCurvesType c) { final List<CaV1Curve> curves = new ArrayList<>(8); c.curves().forEach( p -> p._2.forEach(curve -> { curves.add(CaV1Curve.fromCore(curve)); })); return new CaV1ActionCurves( c.name().value(), c.framesPerSecond(), curves); } @Override public CaDefinitionActionCurvesType toAction() { final CaDefinitionActionCurves.Builder act_b = CaDefinitionActionCurves.builder(); act_b.setFramesPerSecond(super.frames_per_second); act_b.setName(CaActionName.of(super.name)); act_b.setCurves(this.curves.stream() .map(CaV1Curve::toCurve) .collect(javaslang.collection.List.collector()) .groupBy(CaDefinitionCurveType::joint)); return act_b.build(); } } /** * A version 1 joint. */ @JsonDeserialize @JsonSerialize public static final class CaV1Joint { @JsonProperty(value = "name", required = true) private final String name; @JsonProperty("parent") private final Optional<String> parent; @JsonProperty(value = "translation", required = true) private final PVectorI3D<CaSpaceJointType> translation; @JsonProperty(value = "orientation-xyzw", required = true) private final QuaternionI4D orientation; @JsonProperty(value = "scale", required = true) private final VectorI3D scale; /** * Construct a joint * * @param in_name The joint name * @param in_parent The joint parent * @param in_translation The joint translation * @param in_orientation The joint orientation * @param in_scale The joint scale */ @JsonCreator public CaV1Joint( @JsonProperty(value = "name", required = true) final String in_name, @JsonProperty("parent") final Optional<String> in_parent, @JsonProperty(value = "translation", required = true) final PVectorI3D<CaSpaceJointType> in_translation, @JsonProperty(value = "orientation-xyzw", required = true) final QuaternionI4D in_orientation, @JsonProperty(value = "scale", required = true) final VectorI3D in_scale) { this.name = NullCheck.notNull(in_name, "Name"); this.parent = NullCheck.notNull(in_parent, "Parent"); this.translation = NullCheck.notNull(in_translation, "Translation"); this.orientation = NullCheck.notNull(in_orientation, "Orientation"); this.scale = NullCheck.notNull(in_scale, "Scale"); } /** * @param b A joint * * @return A joint */ public static CaV1Joint fromCore( final CaDefinitionJoint b) { return new CaV1Joint( b.name().value(), b.parent().map(CaJointNameType::value), b.translation(), b.orientation(), b.scale()); } /** * @return A joint */ public CaDefinitionJoint toJoint() { final CaDefinitionJoint.Builder bb = CaDefinitionJoint.builder(); bb.setParent(this.parent.map(CaJointName::of)); bb.setOrientation(this.orientation); bb.setTranslation(this.translation); bb.setScale(this.scale); bb.setName(CaJointName.of(this.name)); return bb.build(); } } /** * A version 1 curve. */ @JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type") @JsonSubTypes({ @JsonSubTypes.Type(name = "orientation", value = CaV1CurveQuaternion.class), @JsonSubTypes.Type(name = "scale", value = CaV1CurveScale.class), @JsonSubTypes.Type(name = "translation", value = CaV1CurveTranslation.class) }) @JsonDeserialize @JsonSerialize public abstract static class CaV1Curve { @JsonProperty(value = "joint", required = true) private final String joint; protected CaV1Curve( @JsonProperty(value = "joint", required = true) final String in_joint) { this.joint = NullCheck.notNull(in_joint, "Joint"); } /** * @param curve A curve * * @return A curve */ public static CaV1Curve fromCore( final CaDefinitionCurveType curve) { return curve.matchCurve( unit(), (name, ct) -> CaV1CurveTranslation.fromCore(ct), (name, co) -> CaV1CurveQuaternion.fromCore(co), (name, cs) -> CaV1CurveScale.fromCore(cs)); } /** * @return A curve */ public abstract CaDefinitionCurveType toCurve(); } /** * A version 1 orientation curve. */ @JsonDeserialize @JsonSerialize public static final class CaV1CurveQuaternion extends CaV1Curve { @JsonProperty(value = "keyframes", required = true) private final List<CaV1KeyframeCurveOrientation> keyframes; protected CaV1CurveQuaternion( @JsonProperty(value = "joint", required = true) final String in_joint, @JsonProperty(value = "keyframes", required = true) final List<CaV1KeyframeCurveOrientation> in_keyframes) { super(in_joint); this.keyframes = NullCheck.notNull(in_keyframes, "Keyframes"); } /** * @param c A curve * * @return A serialized curve */ public static CaV1CurveQuaternion fromCore( final CaDefinitionCurveOrientationType c) { return new CaV1CurveQuaternion( c.joint().value(), c.keyframes().map(CaV1KeyframeCurveOrientation::fromCore).toJavaList()); } @Override public CaDefinitionCurveOrientationType toCurve() { final CaDefinitionCurveOrientation.Builder cb = CaDefinitionCurveOrientation.builder(); cb.setJoint(CaJointName.of(super.joint)); cb.setKeyframes(this.keyframes.stream() .map(CaV1KeyframeCurveOrientation::toKeyframe) .collect(javaslang.collection.List.collector())); return cb.build(); } } /** * A version 1 scale curve. */ @JsonDeserialize @JsonSerialize public static final class CaV1CurveScale extends CaV1Curve { @JsonProperty(value = "keyframes", required = true) private final List<CaV1KeyframeCurveScale> keyframes; protected CaV1CurveScale( @JsonProperty(value = "joint", required = true) final String in_joint, @JsonProperty(value = "keyframes", required = true) final List<CaV1KeyframeCurveScale> in_keyframes) { super(in_joint); this.keyframes = NullCheck.notNull(in_keyframes, "Keyframes"); } /** * @param c A curve * * @return A serialized curve */ public static CaV1CurveScale fromCore( final CaDefinitionCurveScaleType c) { return new CaV1CurveScale( c.joint().value(), c.keyframes().map(CaV1KeyframeCurveScale::fromCore).toJavaList()); } @Override public CaDefinitionCurveType toCurve() { final CaDefinitionCurveScale.Builder cb = CaDefinitionCurveScale.builder(); cb.setJoint(CaJointName.of(super.joint)); cb.setKeyframes(this.keyframes.stream() .map(CaV1KeyframeCurveScale::toKeyframe) .collect(javaslang.collection.List.collector())); return cb.build(); } } /** * A version 1 translation curve. */ @JsonDeserialize @JsonSerialize public static final class CaV1CurveTranslation extends CaV1Curve { @JsonProperty(value = "keyframes", required = true) private final List<CaV1KeyframeCurveTranslation> keyframes; protected CaV1CurveTranslation( @JsonProperty(value = "joint", required = true) final String in_joint, @JsonProperty(value = "keyframes", required = true) final List<CaV1KeyframeCurveTranslation> in_keyframes) { super(in_joint); this.keyframes = NullCheck.notNull(in_keyframes, "Keyframes"); } /** * @param c A curve * * @return A serialized curve */ public static CaV1CurveTranslation fromCore( final CaDefinitionCurveTranslationType c) { return new CaV1CurveTranslation( c.joint().value(), c.keyframes().map(CaV1KeyframeCurveTranslation::fromCore).toJavaList()); } @Override public CaDefinitionCurveType toCurve() { final CaDefinitionCurveTranslation.Builder cb = CaDefinitionCurveTranslation.builder(); cb.setJoint(CaJointName.of(super.joint)); cb.setKeyframes(this.keyframes.stream() .map(CaV1KeyframeCurveTranslation::toKeyframe) .collect(javaslang.collection.List.collector())); return cb.build(); } } /** * A version 1 orientation curve keyframe. */ @JsonDeserialize @JsonSerialize public static final class CaV1KeyframeCurveOrientation { @JsonProperty(value = "index", required = true) private final int index; @JsonProperty(value = "interpolation", required = true) private final CaCurveInterpolation interpolation; @JsonProperty(value = "easing", required = true) private final CaCurveEasing easing; @JsonProperty(value = "quaternion-xyzw", required = true) private final QuaternionI4D quaternion; CaV1KeyframeCurveOrientation( @JsonProperty(value = "index", required = true) final int in_index, @JsonProperty(value = "interpolation", required = true) final CaCurveInterpolation in_interpolation, @JsonProperty(value = "easing", required = true) final CaCurveEasing in_easing, @JsonProperty(value = "quaternion-xyzw", required = true) final QuaternionI4D in_quaternion) { this.index = in_index; this.interpolation = NullCheck.notNull(in_interpolation, "Interpolation"); this.easing = NullCheck.notNull(in_easing, "Easing"); this.quaternion = NullCheck.notNull(in_quaternion, "Quaternion"); } /** * @param d A keyframe * * @return A serialized keyframe */ public static CaV1KeyframeCurveOrientation fromCore( final CaDefinitionCurveKeyframeOrientationType d) { return new CaV1KeyframeCurveOrientation( d.index(), d.interpolation(), d.easing(), d.orientation()); } /** * @return A keyframe */ public CaDefinitionCurveKeyframeOrientationType toKeyframe() { final CaDefinitionCurveKeyframeOrientation.Builder cb = CaDefinitionCurveKeyframeOrientation.builder(); cb.setEasing(this.easing); cb.setInterpolation(this.interpolation); cb.setIndex(this.index); cb.setOrientation(this.quaternion); return cb.build(); } } /** * A version 1 translation curve keyframe. */ @JsonDeserialize @JsonSerialize public static final class CaV1KeyframeCurveTranslation { @JsonProperty(value = "index", required = true) private final int index; @JsonProperty(value = "interpolation", required = true) private final CaCurveInterpolation interpolation; @JsonProperty(value = "easing", required = true) private final CaCurveEasing easing; @JsonProperty(value = "translation", required = true) private final PVectorI3D<CaSpaceJointType> translation; CaV1KeyframeCurveTranslation( @JsonProperty(value = "index", required = true) final int in_index, @JsonProperty(value = "interpolation", required = true) final CaCurveInterpolation in_interpolation, @JsonProperty(value = "easing", required = true) final CaCurveEasing in_easing, @JsonProperty(value = "translation", required = true) final PVectorI3D<CaSpaceJointType> in_translation) { this.index = in_index; this.interpolation = NullCheck.notNull(in_interpolation, "Interpolation"); this.easing = NullCheck.notNull(in_easing, "Easing"); this.translation = NullCheck.notNull(in_translation, "Translation"); } /** * @param d A keyframe * * @return A serialized keyframe */ public static CaV1KeyframeCurveTranslation fromCore( final CaDefinitionCurveKeyframeTranslationType d) { return new CaV1KeyframeCurveTranslation( d.index(), d.interpolation(), d.easing(), d.translation()); } /** * @return A keyframe */ public CaDefinitionCurveKeyframeTranslationType toKeyframe() { final CaDefinitionCurveKeyframeTranslation.Builder cb = CaDefinitionCurveKeyframeTranslation.builder(); cb.setEasing(this.easing); cb.setInterpolation(this.interpolation); cb.setIndex(this.index); cb.setTranslation(this.translation); return cb.build(); } } /** * A version 1 scale curve keyframe. */ @JsonDeserialize @JsonSerialize public static final class CaV1KeyframeCurveScale { @JsonProperty(value = "index", required = true) private final int index; @JsonProperty(value = "interpolation", required = true) private final CaCurveInterpolation interpolation; @JsonProperty(value = "easing", required = true) private final CaCurveEasing easing; @JsonProperty(value = "scale", required = true) private final VectorI3D scale; CaV1KeyframeCurveScale( @JsonProperty(value = "index", required = true) final int in_index, @JsonProperty(value = "interpolation", required = true) final CaCurveInterpolation in_interpolation, @JsonProperty(value = "easing", required = true) final CaCurveEasing in_easing, @JsonProperty(value = "scale", required = true) final VectorI3D in_scale) { this.index = in_index; this.interpolation = NullCheck.notNull(in_interpolation, "Interpolation"); this.easing = NullCheck.notNull(in_easing, "Easing"); this.scale = NullCheck.notNull(in_scale, "Scale"); } /** * @param d A keyframe * * @return A serialized keyframe */ public static CaV1KeyframeCurveScale fromCore( final CaDefinitionCurveKeyframeScaleType d) { return new CaV1KeyframeCurveScale( d.index(), d.interpolation(), d.easing(), d.scale()); } /** * @return A keyframe */ public CaDefinitionCurveKeyframeScaleType toKeyframe() { final CaDefinitionCurveKeyframeScale.Builder cb = CaDefinitionCurveKeyframeScale.builder(); cb.setEasing(this.easing); cb.setInterpolation(this.interpolation); cb.setIndex(this.index); cb.setScale(this.scale); return cb.build(); } } /** * A version 1 skeleton. */ @JsonDeserialize @JsonSerialize public static final class CaV1Skeleton { @JsonProperty(value = "name", required = true) private final String name; @JsonProperty(value = "joints", required = true) private final List<CaV1Joint> joints; @JsonProperty(value = "actions", required = true) private final List<CaV1Action> actions; @JsonCreator CaV1Skeleton( @JsonProperty(value = "name", required = true) final String in_name, @JsonProperty(value = "joints", required = true) final List<CaV1Joint> in_joints, @JsonProperty(value = "actions", required = true) final List<CaV1Action> in_actions) { this.name = NullCheck.notNull(in_name, "Name"); this.joints = NullCheck.notNull(in_joints, "Joints"); this.actions = NullCheck.notNull(in_actions, "Actions"); final Collection<String> act_names = new HashSet<>(this.actions.size()); final Collection<String> act_dup = new HashSet<>(this.actions.size()); this.actions.forEach(act -> { if (act_names.contains(act.name)) { act_dup.add(act.name); } act_names.add(act.name); }); final Collection<String> joint_names = new HashSet<>(this.joints.size()); final Collection<String> joint_dup = new HashSet<>(this.joints.size()); this.joints.forEach(act -> { if (joint_names.contains(act.name)) { joint_dup.add(act.name); } joint_names.add(act.name); }); if ((!act_dup.isEmpty()) || (!joint_dup.isEmpty())) { final StringBuilder b = new StringBuilder(128); if (!act_dup.isEmpty()) { b.append("Duplicate actions: "); b.append(act_dup.stream().collect(Collectors.joining(" "))); b.append(System.lineSeparator()); } if (!joint_dup.isEmpty()) { b.append("Duplicate joints: "); b.append(joint_dup.stream().collect(Collectors.joining(" "))); b.append(System.lineSeparator()); } throw new IllegalArgumentException(b.toString()); } } /** * @param c A skeleton * * @return A 1.* skeleton */ public static CaV1Skeleton fromCore( final CaDefinitionSkeletonType c) { return new CaV1Skeleton( c.name().value(), c.joints().values().map(CaV1Joint::fromCore).toJavaList(), c.actions().values().map(CaV1Action::fromCore).toJavaList()); } /** * @return A skeleton */ public CaDefinitionSkeleton toSkeleton() { final CaDefinitionSkeleton.Builder sk_b = CaDefinitionSkeleton.builder(); sk_b.setName(CaSkeletonName.of(this.name)); sk_b.setActions(this.actions.stream() .map(CaV1Action::toAction) .collect(javaslang.collection.List.collector()) .toMap(act -> Tuple.of(act.name(), act))); sk_b.setJoints(this.joints.stream() .map(CaV1Joint::toJoint) .collect(javaslang.collection.List.collector()) .toMap(joint -> Tuple.of(joint.name(), joint))); return sk_b.build(); } } /** * A deserializer for {@link CaCurveInterpolation} values. */ public static final class CaCurveInterpolationDeserializer extends StdDeserializer<CaCurveInterpolation> { /** * Construct a deserializer */ public CaCurveInterpolationDeserializer() { super(PVectorI3D.class); } private static JsonMappingException error( final DeserializationContext ctxt, final TreeNode n) { final StringBuilder sb = new StringBuilder(128); sb.append("Received: "); sb.append(n); sb.append(System.lineSeparator()); sb.append("Expected: "); sb.append( javaslang.collection.List.of(CaCurveInterpolation.values()) .toJavaStream() .map(CaCurveInterpolation::getName) .collect(Collectors.joining("|"))); sb.append(System.lineSeparator()); return ctxt.mappingException(sb.toString()); } @Override public CaCurveInterpolation deserialize( final JsonParser p, final DeserializationContext ctxt) throws IOException, JsonProcessingException { final TreeNode n = p.getCodec().readTree(p); if (n instanceof TextNode) { try { return CaCurveInterpolation.of(((TextNode) n).asText()); } catch (final IllegalArgumentException e) { throw error(ctxt, n); } } throw error(ctxt, n); } } /** * A deserializer for {@link CaCurveEasing} values. */ public static final class CaCurveEasingDeserializer extends StdDeserializer<CaCurveEasing> { /** * Construct a deserializer */ public CaCurveEasingDeserializer() { super(PVectorI3D.class); } @Override public CaCurveEasing deserialize( final JsonParser p, final DeserializationContext ctxt) throws IOException, JsonProcessingException { final TreeNode n = p.getCodec().readTree(p); if (n instanceof TextNode) { switch (((TextNode) n).asText()) { case "in": return CaCurveEasing.CURVE_EASING_IN; case "out": return CaCurveEasing.CURVE_EASING_OUT; case "in-out": return CaCurveEasing.CURVE_EASING_IN_OUT; default: { // Nothing break; } } } throw new JsonParseException(p, "Expected: in | out | in-out"); } } /** * A serializer for {@link CaCurveInterpolation} values. */ public static final class CaCurveInterpolationSerializer extends StdSerializer<CaCurveInterpolation> { /** * Construct a serializer. */ public CaCurveInterpolationSerializer() { super(CaCurveInterpolation.class); } @Override public void serialize( final CaCurveInterpolation value, final JsonGenerator gen, final SerializerProvider provider) throws IOException { switch (value) { case CURVE_INTERPOLATION_CONSTANT: { gen.writeRawValue("\"constant\""); break; } case CURVE_INTERPOLATION_LINEAR: { gen.writeRawValue("\"linear\""); break; } case CURVE_INTERPOLATION_QUADRATIC: { gen.writeRawValue("\"quadratic\""); break; } case CURVE_INTERPOLATION_EXPONENTIAL: { gen.writeRawValue("\"exponential\""); break; } } } } /** * A serializer for {@link CaCurveEasing} values. */ public static final class CaCurveEasingSerializer extends StdSerializer<CaCurveEasing> { /** * Construct a serializer. */ public CaCurveEasingSerializer() { super(CaCurveEasing.class); } @Override public void serialize( final CaCurveEasing value, final JsonGenerator gen, final SerializerProvider provider) throws IOException { switch (value) { case CURVE_EASING_IN: { gen.writeRawValue("\"in\""); break; } case CURVE_EASING_OUT: { gen.writeRawValue("\"out\""); break; } case CURVE_EASING_IN_OUT: { gen.writeRawValue("\"in-out\""); break; } } } } }
/** * Send a delayed message so that enemy is ready to attack again after some time. * Needs to be overridden by each specific enemy class to actually send a projectile ! */ public void throwProjectile() { throwingProjectile = false; MessageManager.getInstance().dispatchMessage(timeBetweenAttacks, this, this, MessageType.READY_TO_ATTACK); }
<filename>tests/test_generator/test_stereo.py """Stereo sound wave generators tests.""" import numpy as np import pytest from waves import Sound @pytest.mark.parametrize("fps", (44100, 22050), ids=("fps=44100", "22050")) @pytest.mark.parametrize( "frequencies", ((110, 220), (440, 880), (120, 360, 720)), ids=("frequency=110", "frequency=220", "frequency=440"), ) @pytest.mark.parametrize( "volume", (0, 0.5, 1), ids=("volume=0", "volume=0.5", "volume=1"), ) @pytest.mark.parametrize( "sample_width", (2, 4), ids=("sample_width=2", "sample_width=4") ) def test_stereo_ttf_gen(stereo_ttf_gen, fps, frequencies, volume, sample_width): time_to_frame = stereo_ttf_gen( fps=fps, frequencies=frequencies, volume=volume, sample_width=sample_width, ) sound = Sound.from_datatimes(time_to_frame, fps=fps).with_duration(1) assert sound.fps == fps assert sound.n_channels == len(frequencies) assert sound.duration == 1 assert sound.dtype is getattr(np, f"int{sample_width << 3}")
Law, Bureaucracy, and the Practice of Government and Rule This chapter explores the “logic of empire” in relation to law, bureaucracy, and the practice of government, from the Ancient world to the present. Beginning with the complex example of Haile Selassie I and the Ethiopian Empire, the chapter analyzes the many universalisms of law and empire, before moving on to a survey of different “repertories” of imperial rule. The chapter then examines the numerous ways in which empires put law “to work,” facilitating the development of multiple, normative orders and institutions far beyond the jurisdiction of their own imperial officials. The final section of the chapter briefly introduces (post)colonial legal scholarship and the concept of “legalism from below,” with a specific focus on recent archival research in courtroom records.
/** One side synchronizing to local database method. * If the user select the get applicants from the remoted database, this method get the newer modified applicants from remoted database and storing or modifying in the local database. * @param event is select event with left mouse click on sync both-side (from local database for remoted database and remoted database for local database). * @throws SQLException is there any problem with remote or local SQL database. */ public void GetSyncLeftMouseClicked(ActionEvent event) throws SQLException { MainApp.logger.info("Getting applicants from remoted database..."); offlineService.getAllApplicantsFromMysql(); MainApp.logger.info("Sending all applicants from remoted database..."); offlineService.getAllApplicantFromSqlLite(); if(MainApp.homeWindowLoader != null) { HomeWindowController controller = MainApp.homeWindowLoader.getController(); MainApp.logger.info("Refreshing tableview..."); controller.syncRefresh(); } }
<reponame>dollarkillerx/RUST-Newcomer-Development-Program use salvo::prelude::*; use demo1::*; #[tokio::main] async fn main() { let router = router::router(); Server::new(router).bind(([0, 0, 0, 0], 7878)).await; }
import xlwings as xw import math # import pandas as pd import numpy as np import matplotlib.pyplot as plt # import sympy.physics.units as u from pint import UnitRegistry from catalogo import CheckList u = UnitRegistry() # wb = xw.Book() @xw.sub # only required if you want to import it or run it via UDF Server def main(): wb = xw.Book.caller() wb.sheets[0].range("A1").value = "Hello xlwings!" @xw.func def perda_carga(L, V, D, f): ''' L (m) V (m/s) D (m) f (adimensional) ''' L = L*u.m V = V*u.m/u.s D = D*u.m g = 9.81*u.m/u.s**2 delta_H = f*L*V**2/(D*2*g) return [delta_H.magnitude] + [str(delta_H.units)] # a = CheckList.teste(1) # return a @xw.func def coef_atrito(velocidade, diametro, e): ''' velocidade (m/s) diametro (m) e (mm) = rugosidade absoluta equivalente ''' viscosidade_cinematica = 1.003*10**(-6)*u.m**2/u.s velocidade = velocidade*u.m/u.s diametro = diametro*u.m e = e*u.mm reynolds = velocidade*diametro/viscosidade_cinematica f = ((64/reynolds)**8+9.5*(np.log(e/(3.7*diametro)+5.74/(reynolds**0.9))-(2500/reynolds)**6)**(-16))**(0.125) return [f.magnitude]+[str(f.units)] @xw.func def perda_localizada(velocidade, K): ''' velocidade (m/s) K (adimensional) ''' velocidade = velocidade*u.m/u.s g = 9.81*u.m/u.s**2 delta_H = K*velocidade**2/g return [delta_H.magnitude] + [str(delta_H.units)] @xw.func def preDimensionamento(vazao_necessaria): ''' vazao_necessaria (L/h) ''' catalogo = CheckList() vazao_necessaria = vazao_necessaria*u.l/u.hour vazao_necessaria = vazao_necessaria.to('m**3/s') velocidade_eco = 1.5*u.m/u.s K = (4/(np.pi*velocidade_eco))**(1/2) D = K*(vazao_necessaria)**(1/2) D = D.magnitude D = D*u.m d_catalogo = catalogo.min_diametro(D.magnitude) # velocidade_min = 0.5*u.m/u.s # velocidade_max = 3*u.m/u.s # g = 9.81*u.m/u.s**2 return [K.magnitude]+[vazao_necessaria.magnitude]+[D.magnitude]+[d_catalogo.magnitude] # return [vazao_necessaria.magnitude] + [str(vazao_necessaria.units)] if __name__ == "__main__": # xw.books.active.set_mock_caller() main()
/* returns void because it must not fail on valid LUKS2 header */ static void _load_backup_segments(struct luks2_hdr *hdr, struct luks2_reenc_context *rh) { int segment = LUKS2_get_segment_id_by_flag(hdr, "backup-final"); if (segment >= 0) { rh->jobj_segment_new = json_object_get(LUKS2_get_segment_jobj(hdr, segment)); rh->digest_new = LUKS2_digest_by_segment(hdr, segment); } else { rh->jobj_segment_new = NULL; rh->digest_new = -ENOENT; } segment = LUKS2_get_segment_id_by_flag(hdr, "backup-previous"); if (segment >= 0) { rh->jobj_segment_old = json_object_get(LUKS2_get_segment_jobj(hdr, segment)); rh->digest_old = LUKS2_digest_by_segment(hdr, segment); } else { rh->jobj_segment_old = NULL; rh->digest_old = -ENOENT; } segment = LUKS2_get_segment_id_by_flag(hdr, "backup-moved-segment"); if (segment >= 0) rh->jobj_segment_moved = json_object_get(LUKS2_get_segment_jobj(hdr, segment)); else rh->jobj_segment_moved = NULL; }
def output(keybind="", command=""): headers = ['Keybind', 'Modifier', 'Command'] t_data = [] if binds: binds.sort() for l in binds: if keybind: if keybind in l[0].lower(): t_data.append(l) elif command: if command in l[2].lower(): t_data.append(l) else: t_data.append(l) if t_data: t_data.insert(0, headers) table = SingleTable(t_data) if table.ok: if args.verbose: print('Table ok.') print(table.table) else: fix_table(table) print(table.table) else: print('No keybinds found.')
/** * This data listener can be attached to a serial port. Whenever new data is * available, it retrieves a full line from the serial port and sends it to * {@link PrinterEmulator}. * * @author lukasklinger * */ public class DataListener implements SerialPortDataListener { private PrinterEmulator emulator; private SerialTerminal connection; public DataListener(PrinterEmulator emulator, SerialTerminal connection) { this.emulator = emulator; this.connection = connection; } @Override public int getListeningEvents() { return SerialPort.LISTENING_EVENT_DATA_AVAILABLE; } @Override public void serialEvent(SerialPortEvent arg0) { emulator.receiveChunk(connection.receiveDataFromSerial()); } }
package com.soup.memo.netty.socket; import java.time.LocalDateTime; import io.netty.channel.ChannelHandlerContext; import io.netty.channel.SimpleChannelInboundHandler; import lombok.extern.slf4j.Slf4j; /** * <p> * Description: * </p> * * @author zhaoyi * @date 2019-01-03 17:54 */ @Slf4j public class SocketClientHandler extends SimpleChannelInboundHandler<String> { @Override protected void channelRead0(ChannelHandlerContext ctx, String msg) throws Exception { // server往client发送的地址和内容 log.info(ctx.channel().remoteAddress() + ", " + msg); ctx.writeAndFlush("msg from client: " + LocalDateTime.now()); } @Override public void channelActive(ChannelHandlerContext ctx) throws Exception { ctx.channel().writeAndFlush("from client request!"); } @Override public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception { cause.printStackTrace(); ctx.channel().close(); } }
Iconic motorcycle brand Harley-Davidson is taken down a notch in this fun, simple new spot for Indian Motorcycle, which takes a hard left three-quarters of the way through. Great use of Willie Nelson by Minneapolis agency Colle+McVoy, for whom it must have been fun taking some lighthearted shots at a brand famously advertised for three decades by crosstown shop Carmichael Lynch. (Colle+McVoy tells us it has "great respect for Harley and its loyal riders" despite the mischievous approach here.) A 30-second version of the ad will air on TV next week, setting the stage for the worldwide reveal of the 2014 Indian Chief at the 2013 Sturgis Motorcycle Rally the week of Aug. 5. Credits below. CREDITS Client: Indian Motorcycle Agency: Colle+McVoy, Minneapolis Production Company: Blue Morpho Films Edit: Channel Z
from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ("models", "5881_fileviewer"), ] operations = [ migrations.RunSQL( sql=""" DELETE FROM card_components WHERE componentid = '4e40b397-d6bc-4660-a398-4a72c90dba07'; INSERT INTO card_components(componentid, name, description, component, componentname, defaultconfig) VALUES ('4e40b397-d6bc-4660-a398-4a72c90dba07', 'Photo Gallery Card', 'Photo gallery card UI', 'views/components/cards/photo-gallery-card', 'photo-gallery-card', '{}'); """, reverse_sql=""" UPDATE cards SET componentid = 'f05e4d3a-53c1-11e8-b0ea-784f435179ea' WHERE componentid = '4e40b397-d6bc-4660-a398-4a72c90dba07'; DELETE FROM card_components WHERE componentid = '4e40b397-d6bc-4660-a398-4a72c90dba07'; """, ), ]
<gh_stars>1-10 #include "mlir/IR/DialectImplementation.h" #include "llvm/ADT/TypeSwitch.h" #include "Dialect/ZX/ZXDialect.h" #include "Dialect/ZX/ZXTypes.h" #define GET_TYPEDEF_CLASSES #include "Dialect/ZX/ZXOpsTypes.cpp.inc" using namespace mlir; using namespace mlir::ZX; Type ZXDialect::parseType(DialectAsmParser &parser) const { StringRef mnemonic; if (failed(parser.parseKeyword(&mnemonic))) return Type(); Type type; generatedTypeParser(getContext(), parser, mnemonic, type); return type; } /// Print a type registered to this dialect. void ZXDialect::printType(Type type, DialectAsmPrinter &os) const { (void)generatedTypePrinter(type, os); }
def add_relation(g: SemanticGraph): new_graphs = [] if len(g.edges) > 0 and not any(e.temporal for e in g.edges): add_args = [] sorting = 'MIN' if len(argmin_markers & set(g.tokens)) > 0: add_args = arg_relations['MIN'] sorting = 'MIN' elif len(argmax_markers & set(g.tokens)) > 0: add_args = arg_relations['MAX'] sorting = 'MAX' for rel in add_args: new_g = copy(g) new_g.edges.append(Edge(leftentityid=QUESTION_VAR, rightentityid=sorting, relationid=rel)) new_graphs.append(new_g) return new_graphs
<reponame>quickstrom/specstrom {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE TypeApplications #-} module Specstrom.Gen where import qualified Data.Aeson as JSON import qualified Data.HashMap.Strict as HashMap import qualified Data.Text as Text import qualified Data.Vector as Vector import Hedgehog (Gen) import qualified Hedgehog.Gen as Gen import qualified Hedgehog.Range as Range import qualified Specstrom.Checker.Protocol as Protocol import qualified Specstrom.Dependency as Dependency import Specstrom.Lexer (dummyPosition) import Specstrom.Syntax (Expr (..), Lit (..), Name, Selector (..), TopPattern) name :: Gen Name name = ("n" <>) . Text.pack . show @Int <$> Gen.integral (Range.linear 1 100) selector :: Gen Selector selector = Selector . ("sel-" <>) <$> Gen.text (Range.linear 1 10) Gen.alphaNum literal :: Gen Lit literal = Gen.choice [ IntLit <$> Gen.integral (Range.linear 0 10), -- FloatLit <$> Gen.double (Range.linearFrac (0) 10), StringLit <$> Gen.text (Range.linear 0 10) Gen.unicode, CharLit <$> Gen.unicode, SelectorLit <$> selector ] -- | * Expr literalExpr :: Gen (Expr TopPattern) literalExpr = Literal dummyPosition <$> literal intExpr :: Gen (Expr TopPattern) intExpr = Gen.recursive Gen.choice [ Literal dummyPosition . IntLit <$> Gen.integral (Range.linear 0 10) ] [ Gen.subterm2 intExpr intExpr (App . App (Var dummyPosition "_+_")), Gen.subterm2 intExpr intExpr (App . App (Var dummyPosition "_-_")) ] boolExpr :: Gen (Expr TopPattern) boolExpr = Gen.recursive Gen.choice [ -- Var <$> position <*> name, -- Literal <$> position <*> literal App . App (Var dummyPosition "_==_") <$> literalExpr <*> literalExpr, App . App (Var dummyPosition "_!=_") <$> literalExpr <*> literalExpr ] [ Gen.subterm boolExpr (App (Var dummyPosition "not_")), Gen.subterm2 boolExpr boolExpr (App . App (Var dummyPosition "_&&_")), Gen.subterm2 boolExpr boolExpr (App . App (Var dummyPosition "_||_")), Gen.subterm2 boolExpr boolExpr (App . App (Var dummyPosition "_==>_")) ] expr :: Gen (Expr TopPattern) expr = Gen.recursive Gen.choice [ -- Var <$> position <*> name, -- Literal <$> position <*> literal Gen.subterm boolExpr (App (Var dummyPosition "next_")), -- Gen.subterm boolExpr (App (Var dummyPosition "eventually_")), Gen.subterm2 intExpr boolExpr (App . App (Var dummyPosition "always{_}_")), Gen.subterm2 boolExpr boolExpr (App . App (Var dummyPosition "_until_")) ] [ Gen.subterm expr (App (Var dummyPosition "not_")), Gen.subterm2 expr expr (App . App (Var dummyPosition "_&&_")), Gen.subterm2 expr expr (App . App (Var dummyPosition "_||_")), Gen.subterm2 expr expr (App . App (Var dummyPosition "_==>_")) ] -- * Dep elementState :: Dependency.DepSchema -> Gen JSON.Value elementState (Dependency.DepSchema fields) | HashMap.null fields = JSON.Bool <$> Gen.bool | otherwise = JSON.Object <$> traverse elementState fields state :: Dependency.Dep -> Gen Protocol.State state (Dependency.Dep bySelector) = flip HashMap.traverseWithKey bySelector $ \(Selector s) schema -> do elements <- Gen.list (Range.linear 1 10) (elementState schema) pure ( JSON.Array ( Vector.fromList [ JSON.Object (element <> HashMap.singleton "ref" (JSON.String (s <> "-" <> Text.pack (show i)))) | (i, JSON.Object element) <- zip [0 :: Int ..] elements ] ) )
/** * Velocity LogSystem implementation for Jakarta Commons Logging. * Used by VelocityConfigurer to redirect log output. * @author Juergen Hoeller * @since 07.08.2003 * @see VelocityConfigurer */ public class CommonsLoggingLogSystem implements LogSystem { private Log logger = LogFactory.getLog(getClass()); public void init(RuntimeServices runtimeServices) { } public void logVelocityMessage(int i, String msg) { switch (i) { case ERROR_ID: logger.error(msg); break; case WARN_ID: logger.warn(msg); break; case INFO_ID: logger.info(msg); break; case DEBUG_ID: logger.debug(msg); break; } } }
Guidelines for writing a scientific paper: an address to beginners Summary: Although writing a publishable scientific paper does not require extraordinary writing talent, it does require adherence to a few basic principles. First, choosing a forum for publication is important, since books and journals tend to be highly specialized. Second, following the instructions to authors for that publication is mandatory, and reading those instructions before even beginning the manuscript is advisable. Third, writing the abstract or summary requires a painstaking distillation of background, methods, and results; one should recognize that this is the only part of the work that most readers will ever see. Fourth, the other components of the typical scientific paper: introduction, methods, results, and discussion should constitute an appropriate expansion of the abstract. Results in particular are crucial and should be presented in terms of a few specific findings logically supported by selected data, with details presented in tables and figures that are themselves cogent and easy to follow. Fifth, the writer should review the paper carefully, eliminating all wordiness and clarifying statements that are not as meaningful as they could be. (Writers not proficient in English should ask a colleague to do this.) Following these principles will not guarantee publication, but will guarantee serious consideration of the submission.
Endocytosis and exocytosis of protein in capillary endothelium The transport of proteins across continuous capillary endothelium is believed to be mediated by micropinocytic vesicles which shuttle molecules between the lumenal and abluminal plasma membrane. We have studied the ability of capillary endothelial cells isolated from rat epididymal fat to endocytose fluorescently labelled ovalbumin within micropinocytic vesicles. Net association of fluorescent ovalbumin with endothelial cells reaches an equilibrium after 40 minutes of incubation. This equilibrium is presumably due to a balance between endocytosis and subsequent exocytosis of this protein. Capillaries equilibrated with fluorescent ovalbumin exhibited rapid exocytosis of this protein when it was removed from the external medium. The rate of endocytosis was concentration dependent and obeyed the kinetics expected for adsorptive phase endocytosis. High concentrations of ovalbumin stimulated the ingestion of 14C‐sucrose, a marker of fluid endocytosis, suggesting that protein can affect the movement of vesicles within the endothelial cytoplasm. These results imply that capillary endothelium isolated from rat epididymal fat exhibits the ability to endocytose and subsequently exocytose protein. This demonstrates that the two components of endothelial vesicular transport or transcytosis can be observed and studied in a system of isolated capillary endothelium.
<filename>slow-donkey-server/src/main/java/io/bumble/slowdonkey/server/data/SlowDonkeyDatabase.java package io.bumble.slowdonkey.server.data; import io.bumble.slowdonkey.common.model.network.client2server.WriteRequest; import io.bumble.slowdonkey.common.util.SingletonUtil; import io.bumble.slowdonkey.server.persistence.Snapshot; import io.bumble.slowdonkey.server.persistence.TxnLog; import io.bumble.slowdonkey.server.persistence.TxnLogEntry; /** * Database storing the data. * Store data in {@code dataTree} as memory store. * Store data in {@code dataStore} as persistence store. * * @author shenxiangyu on 2020/04/09 */ public class SlowDonkeyDatabase { private DataTree dataTree = DataTree.getInstance(); private Snapshot snapshot = Snapshot.getInstance(); private TxnLog txnLog = TxnLog.getInstance(); private boolean available; public static SlowDonkeyDatabase getInstance() { return SingletonUtil.getInstance(SlowDonkeyDatabase.class); } /** * Load data from disk. */ public boolean load() { boolean loadSnapshotToDataTreeSuccess = snapshot.loadDataTreeFromSnapshotFile(dataTree); if (!loadSnapshotToDataTreeSuccess) { return false; } // Replay the transactions in transaction log to the data tree return txnLog.replayTransactionsToDataTree(dataTree); } /** * <ol> * <li>Use the incoming bytes array to reload the data tree.</li> * <li>Save the data tree to snapshot file.</li> * </ol> * * @param bytes data bytes array * @return true on success */ public boolean receiveSnapshot(byte[] bytes) { // Reload data tree from the incoming bytes data boolean recoverDataTreeSuccess = dataTree.fromBytes(bytes); if (!recoverDataTreeSuccess) { return false; } // Save the data tree to snapshot file return snapshot.saveDataTreeToSnapshotFile(dataTree); } /** * Append transaction log entry to transaction log * * @param txnLogEntry log entry * @return true on success */ public boolean append(TxnLogEntry txnLogEntry) { return txnLog.append(txnLogEntry); } /** * Append proposal data to transaction log * * @param request write request * @return true on success */ public boolean propose(WriteRequest request) { return this.append(TxnLogEntry.getProposalLog(request)); } /** * Append commit data to transaction log * * @param request write request * @return true on success */ public boolean commit(WriteRequest request) { TxnLogEntry commitLogEntry = TxnLogEntry.getCommitLog(request); boolean appendSuccess = this.append(commitLogEntry); if (!appendSuccess) { return false; } // Update data tree return dataTree.update(commitLogEntry); } public Snapshot getSnapshot() { return snapshot; } public DataTree getDataTree() { return dataTree; } public boolean isAvailable() { return available; } public void setAvailable(boolean available) { this.available = available; } }
class Relationship: """A class representing a dcat:Relationship. Ref: `dcat:Relationship <https://www.w3.org/TR/vocab-dcat-2/#Class:Relationship>`_ Attributes: identifier (URI): a URI uniquely identifying the resource relation (Resource): A URI uniquely identifying related resource had_role (URI): A URI identifying the role """ slots = ("_g", "_identifier", "_relation", "_had_role", "_ref") _g: Graph _identifier: URI _relation: Resource _had_role: URI _ref: URIRef def __init__(self, identifier: Optional[str] = None) -> None: """Inits an object with default values.""" if identifier: self.identifier = identifier @property def identifier(self: Relationship) -> str: """Get/set for identifier.""" return self._identifier @identifier.setter def identifier(self: Relationship, identifier: str) -> None: self._identifier = URI(identifier) @property def had_role(self: Relationship) -> str: """Get/set for had_role.""" return self._had_role @had_role.setter def had_role(self: Relationship, had_role: str) -> None: self._had_role = URI(had_role) @property def relation(self: Relationship) -> Resource: """Get/set for relation.""" return self._relation @relation.setter def relation(self: Relationship, relation: Resource) -> None: self._relation = relation # - def to_rdf( self: Relationship, format: str = "turtle", encoding: Optional[str] = "utf-8" ) -> bytes: """Maps the relationship to rdf. Args: format: a valid format. Default: turtle encoding: the encoding to serialize into Returns: a rdf serialization as a bytes literal according to format. """ return self._to_graph().serialize(format=format, encoding=encoding) # - def _to_graph(self: Relationship) -> Graph: if not getattr(self, "identifier", None): self.identifier = Skolemizer.add_skolemization() # set up graph and namespaces: self._g = Graph() self._g.bind("dct", DCT) self._g.bind("dcat", DCAT) self._ref = URIRef(self.identifier) self._g.add((self._ref, RDF.type, DCAT.Relationship)) if getattr(self, "relation", None): self._relation_to_graph() if getattr(self, "had_role", None): self._had_role_to_graph() return self._g def _relation_to_graph(self: Relationship) -> None: self._g.add( ( self._ref, DCT.relation, URIRef(self.relation.identifier), ) ) def _had_role_to_graph(self: Relationship) -> None: self._g.add( ( self._ref, DCAT.hadRole, URIRef(self.had_role), ) )
<reponame>kmissoumi/demo-java<filename>selenium3-examples/src/test/java/com/saucelabs/selenium3/changes/CapabilitiesMergeTest.java package com.saucelabs.selenium3.changes; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.DisplayName; import org.junit.jupiter.api.Test; import org.openqa.selenium.PageLoadStrategy; import org.openqa.selenium.UnexpectedAlertBehaviour; import org.openqa.selenium.chrome.ChromeOptions; public class CapabilitiesMergeTest { @DisplayName("Selenium 3 Can merge in place") @Test public void mergesInPlace() { ChromeOptions options1 = new ChromeOptions(); ChromeOptions options2 = new ChromeOptions(); options1.setPageLoadStrategy(PageLoadStrategy.EAGER); options2.setUnhandledPromptBehaviour(UnexpectedAlertBehaviour.IGNORE); options1.merge(options2); Assertions.assertEquals(UnexpectedAlertBehaviour.IGNORE, options1.getCapability("unhandledPromptBehavior")); } @DisplayName("Selenium 3 Can merge as a new object") @Test public void mergesNewObject() { ChromeOptions options1 = new ChromeOptions(); ChromeOptions options2 = new ChromeOptions(); options1.setPageLoadStrategy(PageLoadStrategy.EAGER); options2.setUnhandledPromptBehaviour(UnexpectedAlertBehaviour.IGNORE); options1.merge(options2); Assertions.assertEquals(UnexpectedAlertBehaviour.IGNORE, options1.getCapability("unhandledPromptBehavior")); } }
/** * Logger for command line interface */ public class CommandLineLogger extends Logger { private PrintStream m_aOut; private int m_nLevel; /** Creates a new instance of CommandLineLogger * * @param aOut PrintStream for messages */ public CommandLineLogger( PrintStream aOut, int nLevel ) { m_nLevel = nLevel; m_aOut = aOut; } /** * Log a message. * * @param aPrefix Message prefix * @param aMsg Message text * @param aLocation file, row and column number as text (optional) * @param nLevel the warning level (one of INFO, WARNING or ERROR) */ protected void logMessage( String aPrefix, String aMsg, String aLocation, int nLevel ) { if( nLevel <= m_nLevel ) { StringBuffer aOut = new StringBuffer( aLocation != null ? aLocation : getName() ); aOut.append( ": " ); aOut.append( aPrefix ); aOut.append( ':' ); aOut.append( aMsg ); m_aOut.println( aOut.toString() ); } } /** * Log a message. * * @param aPrefix Message prefix * @param aMsgWithLocation Message text including location information * @param nLevel the warning level (one of INFO, WARNING or ERROR) */ protected void logMessageWithLocation( String aPrefix, String aMsgWithLocation, int nLevel ) { StringBuffer aOut = new StringBuffer( aMsgWithLocation ); aOut.append( " (" ); aOut.append( aPrefix ); aOut.append( ')' ); m_aOut.println( aOut.toString() ); } }
/* * Created on 2012/02/16 * Copyright (c) 2010-2012, <NAME>. * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * Neither the name of Wei-ju Wu nor the names of its contributors may * be used to endorse or promote products derived from this software without * specific prior written permission. * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. */ package org.zmpp.glulx; import java.util.logging.*; import org.zmpp.glk.Glk; class AccelFuncEntry { public int callAddress; public AccelFunc func; public AccelFuncEntry(int callAddress, AccelFunc func) { this.callAddress = callAddress; this.func = func; } } class AccelSystem { public static int MaxAccelParams = 9; private GlulxVM vm; private static Logger logger = Logger.getLogger("glulx.accel"); private int[] _accelParams = new int[MaxAccelParams]; private AccelFuncEntry[] _accelFunctions = new AccelFuncEntry[100]; private int _numAccelFuncs; public Glk glk; public AccelSystem(GlulxVM vm) { this.vm = vm; } private AccelFunc accelFuncFor(int funcnum) { switch (funcnum) { case 1: return new Func1ZRegion(vm, glk, _accelParams); case 2: return new Func2CPTab(vm, glk, _accelParams); case 3: return new Func3RAPr(vm, glk, _accelParams); case 4: return new Func4RLPr(vm, glk, _accelParams); case 5: return new Func5OCCl(vm, glk, _accelParams); case 6: return new Func6RVPr(vm, glk, _accelParams); case 7: return new Func7OPPr(vm, glk, _accelParams); default: logger.warning(String.format("Unsupported Function number: %d", funcnum)); return null; } } private AccelFunc accelFuncForCallAddress(int callAddress) { for (int i = 0; i < _numAccelFuncs; i++) { if (_accelFunctions[i].callAddress == callAddress) return _accelFunctions[i].func; i++; } return null; } private void removeFuncForCallAddress(int callAddress) { int i = 0; boolean found = false; while (!found && i < _numAccelFuncs) { if (_accelFunctions[i].callAddress == callAddress) { found = true; } else i++; } // now shift everything from i to the left to fill the gap while (i < _numAccelFuncs - 1) { _accelFunctions[i] = _accelFunctions[i + 1]; i++; } _numAccelFuncs--; } private void addFuncForCallAddress(int callAddress, AccelFunc func) { _accelFunctions[_numAccelFuncs++] = new AccelFuncEntry(callAddress, func); } public void setParameter(int index, int value) { if (index >= 0 && index <= 8) _accelParams[index] = value; } public void setFunction(int accelFuncNum, int callAddress) { //logger.info("accelfunc #$%02x #$%02x".format(accelFuncNum, callAddress)); if (accelFuncNum == 0) removeFuncForCallAddress(callAddress); else { AccelFunc accelFunc = accelFuncFor(accelFuncNum); if (accelFunc != null) addFuncForCallAddress(callAddress, accelFunc); } } public boolean isAccelerated(int callAddress) { return _numAccelFuncs > 0 && accelFuncForCallAddress(callAddress) != null; } public void call(int callAddress, int[] args, int numArgs) { //logger.info("Function address $%02x is accelerated - REPLACE (TODO)") AccelFunc func = accelFuncForCallAddress(callAddress); int retval = func.call(args, numArgs); vm.popCallStub(retval); } }
<gh_stars>1-10 package com.simple.creact.simple.app.data.datasource; import com.simple.creact.library.framework.datasource.impl.BaseDataFetcher; import retrofit2.Call; /** * @author:YJJ * @date:2016/3/10 * @email:<EMAIL> */ public abstract class RetrofitDataFetcher<R> extends BaseDataFetcher<R> { protected Call<R> call; public RetrofitDataFetcher(Call<R> call) { this.call = call; } public RetrofitDataFetcher() { } public Call<R> getCall() { return call; } public void setCall(Call<R> call) { this.call = call; } /** * Subclass don't need to implements this method */ @Override public void close() { if (call != null) call.cancel(); } }
Epidemiological role of birds in the transmission and maintenance of zoonoses. The risk of zoonoses spreading from birds to humans is lower, quantitatively speaking, than the risk of transmission between other host groups, because the two taxonomic groups share fewer pathogens. Nevertheless, birds have a number of epidemiological characteristics that make them extremely important hosts in the transmission and maintenance of zoonoses, including their susceptibility to pathogens that are extremely hazardous to humans (such as highly pathogenic avian influenza virus, West Nile virus and Chlamydia psittaci) and their ability to travel long distances, especially in the case of migratory birds. The fact that the human diet includes poultry products (meat, eggs and their by-products) also means that most human cases of foodborne zoonoses are infections of avian origin. Lastly, close contact between humans and pet birds or urban birds leads to interactions of public health concern. This article sets out to describe the main factors that determine the role of birds in the epidemiology of zoonotic infections.
// An agent which will be part of a chain. // This agent will mutate a data from received message. class chain_agent final : public so_5::agent_t { public : chain_agent(context_t ctx, std::string name, std::string prefix, std::string suffix, so_5::mbox_t next) : so_5::agent_t(std::move(ctx)) , m_name(std::move(name)) , m_prefix(std::move(prefix)) , m_suffix(std::move(suffix)) , m_next(std::move(next)) { so_subscribe_self().event( &chain_agent::on_envelope ); } private : const std::string m_name; const std::string m_prefix; const std::string m_suffix; const so_5::mbox_t m_next; void on_envelope(mutable_mhood_t<envelope> cmd) { std::cout << m_name << ", msg_addr=" << cmd.get() << ", old_content=" << cmd->m_payload; cmd->m_payload.insert(begin(cmd->m_payload), begin(m_prefix), end(m_prefix)); cmd->m_payload.insert(end(cmd->m_payload), begin(m_suffix), end(m_suffix)); std::cout << ", new_content=" << cmd->m_payload << std::endl; so_5::send(m_next, std::move(cmd)); } }
<gh_stars>0 #include "Borg/List.h" #include "Borg/Ref.h" #include <gtest/gtest.h> using namespace Borg; TEST(List, Add) { Ref<IList<int>> numbers = CreateRef<List<int>>(); numbers->Add(10); } TEST(List, ForLoop) { //Ref<IList2<int>> numbers = CreateRef<List2<int>>(); auto numbers = CreateRef<List<int>>(); numbers->Add(10); numbers->Add(1); numbers->Add(11); for(auto number: *numbers) { auto x = 0; } }
def fetch_tf_map(self, attribute, products):
/** * Simple POJO providing parameter details for {@link NamedOperation}s. */ @JsonDeserialize(builder = ParameterDetail.Builder.class) public class ParameterDetail implements Serializable { private static final long serialVersionUID = -883113279877131469L; private String description; private Object defaultValue; private Class valueClass; private boolean required; private List options; public ParameterDetail(final String description, final Class clazz, final boolean required, final Object defaultValue, final List options) { if (null == description) { throw new IllegalArgumentException("description must not be empty"); } if (null == clazz) { throw new IllegalArgumentException("class must not be empty"); } if (required && null != defaultValue) { throw new IllegalArgumentException("required is true but a default value has been provided"); } this.description = description; this.required = required; this.defaultValue = defaultValue; this.valueClass = clazz; this.options = options; try { byte[] json = JSONSerialiser.serialise(defaultValue); JSONSerialiser.deserialise(json, this.valueClass); } catch (final SerialisationException e) { throw new IllegalArgumentException(e); } } public String getDescription() { return description; } public List getOptions() { return options; } public void setOptions(final List options) { this.options = options; } public Object getDefaultValue() { return defaultValue; } public boolean isRequired() { return required; } public Class getValueClass() { return valueClass; } @Override public boolean equals(final Object obj) { if (this == obj) { return true; } if (null == obj || getClass() != obj.getClass()) { return false; } final ParameterDetail pd = (ParameterDetail) obj; return new EqualsBuilder() .append(defaultValue, pd.defaultValue) .append(required, pd.required) .append(description, pd.description) .append(valueClass, pd.valueClass) .append(options, pd.options) .isEquals(); } @Override public int hashCode() { return new HashCodeBuilder(71, 5) .append(defaultValue) .append(required) .append(description) .append(valueClass) .append(options) .hashCode(); } @Override public String toString() { return new ToStringBuilder(this) .appendSuper(super.toString()) .append("description", description) .append("valueClass", valueClass) .append("required", required) .append("defaultValue", defaultValue) .append("options", options) .toString(); } @JsonPOJOBuilder(withPrefix = "") public static final class Builder { private String description; private Object defaultValue; private boolean required = false; private Class valueClass; private List options; public Builder defaultValue(final Object defaultValue) { this.defaultValue = defaultValue; return this; } public Builder description(final String description) { this.description = description; return this; } public Builder valueClass(final Class clazz) { this.valueClass = clazz; return this; } public Builder required(final boolean required) { this.required = required; return this; } public Builder options(final List options) { this.options = options; return this; } public ParameterDetail build() { return new ParameterDetail(description, valueClass, required, defaultValue, options); } } }
import adblock from confvar import * import bookmark import requests import favicon from PyQt5.QtCore import * from PyQt5.QtWidgets import * from PyQt5.QtGui import * import PyQt5.QtWebEngineWidgets from PyQt5.QtWebEngineWidgets import * from PyQt5.QtPrintSupport import * from PyQt5.QtWebEngineCore import * privileges = [] def setPrivileges(p=[]): global privileges if p == "*": privileges = ["bookmarks", "filesystem"] return privileges = p def getPriveleges(): global privileges return privileges class WebChannel(QObject): def __init__(self): super().__init__() setPrivileges([]) @pyqtSlot(result=list) def getBookmarkFavicons(self): if not "bookmarks" in privileges: print("Insufficient permissions") return result = [] for index in range(0, len(bookmark.getBookmarks())): favi = favicon.get(bookmark.getBookmarks()[index])[0].url result.append(favi) return result @pyqtSlot(result=list) def getBookmarkTitles(self): if not "bookmarks" in privileges: print("Insufficient permissions") return result = [] hearders = {'headers':'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:51.0) Gecko/20100101 Firefox/51.0'} for index in range(0, len(bookmark.getBookmarks())): n = requests.get(bookmark.getBookmarks()[index], headers=hearders) result.append(n.text[n.text.find('<title>') + 7 : n.text.find('</title>')]) return result @pyqtSlot(result=list) def getBookmarkUrls(self): if not "bookmarks" in privileges: print("Insufficient permissions") return return bookmark.getBookmarks() @pyqtSlot(str, result=str) def readFile(self, path): if not "filesystem" in privileges: print("Insufficient permissions") return with open(BASE_PATH + path) as F: return F.read() @pyqtSlot(str, str) def writeFile(self, path, data): if not "filesystem" in privileges: print("Insufficient permissions") return with open(BASE_PATH + path, "w") as F: F.write(data) @pyqtSlot(result=str) def locale(self): if not sparse(BROWSER_LOCALE): return "en_US" return BROWSER_LOCALE class CustomWebEnginePage(QWebEnginePage): # Hook the "add_new_tab" method def set_add_new_tab_h(self, _add_new_tab): self.add_new_tab = _add_new_tab def acceptNavigationRequest(self, url, _type, isMainFrame): modifiers = QApplication.keyboardModifiers() if _type == QWebEnginePage.NavigationTypeLinkClicked and (modifiers & Qt.ControlModifier): self.add_new_tab(QUrl(url), silent=1) return False return super().acceptNavigationRequest(url, _type, isMainFrame) class RequestInterceptor(QWebEngineUrlRequestInterceptor): def interceptRequest(self, info): url = info.requestUrl().toString() if adblock.match(url) != False: info.block(True) if BROWSER_HTTPS_ONLY: if url[:5] == "http:": info.redirect(QUrl(url.replace("http:", "https:")))
<gh_stars>0 package fnug.resource; import com.googlecode.jslint4java.JSLintResult; /** * Tagging interface for entities that has a JSLint result. * * @author <NAME> * */ public interface HasJSLintResult { /** * Returns the jslint result. If the entity does not have a lint result, null is returned. */ JSLintResult getJSLintResult(); }
<filename>src/version.cpp #include <date/tz.h> #include <cpp11/strings.hpp> // ----------------------------------------------------------------------------- [[cpp11::register]] cpp11::writable::strings tzdb_version_cpp() { const date::tzdb& db = date::get_tzdb(); cpp11::writable::strings out{db.version}; return out; }
/* * Copyright (c) Facebook, Inc. and its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.buck.rules.coercer; import com.facebook.buck.core.cell.nameresolver.CellNameResolver; import com.facebook.buck.core.model.BuildTarget; import com.facebook.buck.core.model.BuildTargetWithOutputs; import com.facebook.buck.core.model.TargetConfiguration; import com.facebook.buck.core.model.UnconfiguredBuildTargetWithOutputs; import com.facebook.buck.core.path.ForwardRelativePath; import com.facebook.buck.io.filesystem.ProjectFilesystem; import com.google.common.reflect.TypeToken; /** Coercer for {@link BuildTarget} instances that can optionally have output labels. */ public class BuildTargetWithOutputsTypeCoercer extends LeafTypeNewCoercer<UnconfiguredBuildTargetWithOutputs, BuildTargetWithOutputs> { private final TypeCoercer<UnconfiguredBuildTargetWithOutputs, UnconfiguredBuildTargetWithOutputs> unconfiguredCoercer; public BuildTargetWithOutputsTypeCoercer( TypeCoercer<UnconfiguredBuildTargetWithOutputs, UnconfiguredBuildTargetWithOutputs> unconfiguredCoercer) { this.unconfiguredCoercer = unconfiguredCoercer; } @Override public TypeToken<BuildTargetWithOutputs> getOutputType() { return TypeToken.of(BuildTargetWithOutputs.class); } @Override public TypeToken<UnconfiguredBuildTargetWithOutputs> getUnconfiguredType() { return TypeToken.of(UnconfiguredBuildTargetWithOutputs.class); } @Override public UnconfiguredBuildTargetWithOutputs coerceToUnconfigured( CellNameResolver cellRoots, ProjectFilesystem filesystem, ForwardRelativePath pathRelativeToProjectRoot, Object object) throws CoerceFailedException { return unconfiguredCoercer.coerceToUnconfigured( cellRoots, filesystem, pathRelativeToProjectRoot, object); } @Override public BuildTargetWithOutputs coerce( CellNameResolver cellRoots, ProjectFilesystem filesystem, ForwardRelativePath pathRelativeToProjectRoot, TargetConfiguration targetConfiguration, TargetConfiguration hostConfiguration, UnconfiguredBuildTargetWithOutputs object) throws CoerceFailedException { return object.configure(targetConfiguration); } }
/** * Simple example of extending XML Type. * @author jflute */ public class MyTypeOfXML extends TnAbstractValueType { public MyTypeOfXML() { super(Types.OTHER); } public Object getValue(ResultSet resultSet, int index) throws SQLException { String xmlString = resultSet.getString(index); return xmlString != null ? new MyXML().setup(xmlString) : null; } public Object getValue(ResultSet resultSet, String columnName) throws SQLException { String xmlString = resultSet.getString(columnName); return xmlString != null ? new MyXML().setup(xmlString) : null; } public Object getValue(CallableStatement cs, int index) throws SQLException { String xmlString = cs.getString(index); return xmlString != null ? new MyXML().setup(xmlString) : null; } public Object getValue(CallableStatement cs, String parameterName) throws SQLException { String xmlString = cs.getString(parameterName); return xmlString != null ? new MyXML().setup(xmlString) : null; } public void bindValue(Connection conn, PreparedStatement ps, int index, Object value) throws SQLException { if (value == null) { setNull(ps, index); } else { ps.setObject(index, value, Types.OTHER); } } public void bindValue(Connection conn, CallableStatement cs, String parameterName, Object value) throws SQLException { if (value == null) { setNull(cs, parameterName); } else { cs.setObject(parameterName, value, Types.OTHER); } } }
<reponame>jamespayor/pdfium<gh_stars>1-10 // Copyright 2016 PDFium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. // Original code copyright 2014 Foxit Software Inc. http://www.foxitsoftware.com #include "xfa/fxfa/parser/xfa_basic_data.h" #include "fxjs/cjx_datawindow.h" #include "fxjs/cjx_eventpseudomodel.h" #include "fxjs/cjx_hostpseudomodel.h" #include "fxjs/cjx_layoutpseudomodel.h" #include "fxjs/cjx_logpseudomodel.h" #include "fxjs/cjx_node.h" #include "fxjs/cjx_nodelist.h" #include "fxjs/cjx_signaturepseudomodel.h" #include "xfa/fxfa/fxfa_basic.h" const XFA_SCRIPTHIERARCHY g_XFAScriptIndex[] = { {0, 0, 0, 2, 316}, {0, 0, 2, 2, 316}, {0, 0, 4, 2, 316}, {0, 0, 6, 8, 316}, {0, 0, 14, 4, 316}, {0, 0, 18, 5, 317}, {0, 0, 23, 2, 316}, {0, 0, 25, 1, 316}, {0, 0, 26, 12, 316}, {0, 0, 38, 2, 316}, {0, 0, 40, 1, 316}, {0, 0, 41, 3, 316}, {0, 0, 44, 2, 316}, {0, 0, 46, 6, 316}, {0, 0, 52, 0, 316}, {0, 0, 52, 2, 319}, {0, 0, 54, 2, 316}, {0, 0, 56, 6, 316}, {0, 0, 62, 0, 316}, {0, 0, 62, 0, 316}, {0, 0, 62, 2, 316}, {0, 0, 64, 2, 316}, {0, 0, 66, 6, 316}, {0, 0, 72, 4, 316}, {0, 0, 76, 7, 316}, {0, 0, 83, 2, 320}, {0, 0, 85, 3, 316}, {0, 0, 88, 3, 316}, {0, 0, 91, 2, 320}, {0, 0, 93, 6, 320}, {0, 0, 99, 4, 316}, {0, 0, 103, 2, 316}, {0, 0, 105, 3, 319}, {0, 0, 108, 4, 316}, {0, 0, 112, 3, 316}, {0, 0, 115, 2, 316}, {0, 0, 117, 6, 316}, {0, 0, 123, 12, 316}, {0, 0, 135, 2, 316}, {0, 0, 137, 5, 316}, {0, 0, 142, 2, 316}, {0, 0, 144, 3, 316}, {0, 0, 147, 1, 316}, {0, 0, 148, 14, 316}, {0, 0, 162, 2, 316}, {0, 0, 164, 2, 316}, {0, 0, 166, 3, 316}, {0, 0, 169, 2, 316}, {0, 0, 171, 2, 316}, {0, 0, 173, 2, 316}, {0, 0, 175, 4, 316}, {0, 0, 179, 2, 316}, {0, 0, 181, 2, 316}, {0, 0, 183, 0, 319}, {0, 0, 183, 20, 316}, {0, 0, 203, 1, 316}, {0, 0, 204, 3, 316}, {0, 0, 207, 2, 316}, {0, 0, 209, 0, 316}, {0, 0, 209, 2, 316}, {0, 0, 211, 2, 316}, {0, 0, 213, 4, 320}, {0, 0, 217, 0, 316}, {0, 0, 217, 1, 316}, {0, 0, 218, 3, 316}, {0, 4, 221, 0, 312}, {4, 1, 221, 1, 316}, {5, 0, 222, 2, 316}, {5, 1, 224, 3, -1}, {6, 0, 227, 0, 316}, {6, 0, 227, 2, 316}, {6, 0, 229, 2, 316}, {6, 0, 231, 3, 316}, {6, 0, 234, 2, 316}, {6, 0, 236, 6, 316}, {6, 0, 242, 10, 317}, {6, 0, 252, 9, 316}, {6, 0, 261, 5, 318}, {6, 0, 266, 4, 318}, {6, 0, 270, 2, 316}, {6, 0, 272, 2, 316}, {6, 0, 274, 5, 316}, {6, 0, 279, 3, 316}, {6, 0, 282, 2, 316}, {6, 0, 284, 2, 316}, {6, 0, 286, 2, 316}, {6, 0, 288, 2, 316}, {6, 0, 290, 4, 316}, {6, 0, 294, 3, 316}, {6, 0, 297, 3, 316}, {6, 0, 300, 2, 316}, {6, 0, 302, 2, 316}, {6, 0, 304, 2, 316}, {6, 13, 306, 36, 317}, {19, 0, 342, 3, 316}, {19, 0, 345, 2, 316}, {19, 0, 347, 2, 316}, {19, 0, 349, 2, 316}, {19, 0, 351, 7, 317}, {19, 2, 358, 16, 312}, {21, 0, 374, 2, 320}, {21, 0, 376, 2, 316}, {21, 0, 378, 0, 316}, {21, 0, 378, 2, 316}, {21, 0, 380, 6, 316}, {21, 0, 386, 3, 316}, {21, 0, 389, 2, 316}, {21, 0, 391, 4, 316}, {21, 0, 395, 0, 316}, {21, 5, 395, 30, 316}, {26, 0, 425, 2, 320}, {26, 0, 427, 3, 316}, {26, 0, 430, 2, 320}, {26, 0, 432, 5, 316}, {26, 0, 437, 2, 316}, {26, 0, 439, 3, 316}, {26, 0, 442, 2, 316}, {26, 0, 444, 8, 316}, {26, 0, 452, 1, 316}, {26, 0, 453, 2, 316}, {26, 0, 455, 2, 316}, {26, 0, 457, 4, 316}, {26, 0, 461, 0, 316}, {26, 0, 461, 2, 316}, {26, 0, 463, 2, 316}, {26, 0, 465, 6, 318}, {26, 5, 471, 31, 317}, {31, 0, 502, 2, 320}, {31, 0, 504, 2, 316}, {31, 0, 506, 2, 316}, {31, 0, 508, 4, 320}, {31, 16, 512, 11, 312}, {47, 0, 523, 2, 316}, {47, 0, 525, 2, 316}, {47, 0, 527, 2, 316}, {47, 0, 529, 2, 316}, {47, 0, 531, 3, 316}, {47, 0, 534, 2, 316}, {47, 0, 536, 3, 316}, {47, 0, 539, 3, 316}, {47, 0, 542, 4, 318}, {47, 0, 546, 4, 316}, {47, 0, 550, 2, 316}, {47, 0, 552, 0, 316}, {47, 0, 552, 2, 316}, {47, 0, 554, 3, 316}, {47, 0, 557, 6, 316}, {47, 0, 563, 2, 316}, {47, 0, 565, 2, 316}, {47, 0, 567, 3, 316}, {47, 19, 570, 1, 312}, {66, 0, 571, 2, 316}, {66, 0, 573, 8, 316}, {66, 0, 581, 2, 316}, {66, 0, 583, 4, 316}, {66, 0, 587, 5, 316}, {66, 0, 592, 3, 316}, {66, 0, 595, 2, 320}, {66, 0, 597, 3, 316}, {66, 0, 600, 7, 316}, {66, 0, 607, 3, 316}, {66, 0, 610, 0, 316}, {66, 0, 610, 4, 317}, {66, 0, 614, 4, 318}, {66, 0, 618, 2, 320}, {66, 0, 620, 5, 316}, {66, 0, 625, 2, 316}, {66, 0, 627, 2, 316}, {66, 0, 629, 2, 320}, {66, 0, 631, 2, 320}, {66, 0, 633, 4, 316}, {66, 0, 637, 2, 316}, {66, 0, 639, 1, 316}, {66, 0, 640, 1, 316}, {66, 0, 641, 4, 316}, {66, 0, 645, 3, 316}, {66, 7, 648, 2, 319}, {73, 0, 650, 2, 316}, {73, 0, 652, 2, 316}, {73, 0, 654, 3, 316}, {73, 4, 657, 4, 312}, {77, 0, 661, 10, 316}, {77, 0, 671, 2, 316}, {77, 0, 673, 4, 316}, {77, 0, 677, 4, 316}, {77, 0, 681, 3, 316}, {77, 0, 684, 2, 316}, {77, 0, 686, 2, 316}, {77, 0, 688, 8, 317}, {77, 0, 696, 3, 316}, {77, 0, 699, 3, 316}, {77, 0, 702, 2, 316}, {77, 0, 704, 2, 316}, {77, 0, 706, 5, 316}, {77, 0, 711, 4, 316}, {77, 0, 715, 2, 316}, {77, 0, 717, 8, 318}, {77, 0, 725, 1, 316}, {77, 0, 726, 2, 320}, {77, 0, 728, 3, 316}, {77, 1, 731, 4, 316}, {78, 0, 735, 5, 316}, {78, 0, 740, 3, 316}, {78, 0, 743, 1, 316}, {78, 0, 744, 2, 320}, {78, 0, 746, 0, 313}, {78, 0, 746, 2, 316}, {78, 5, 748, 3, 316}, {83, 0, 751, 5, 316}, {83, 0, 756, 7, 316}, {83, 0, 763, 6, 316}, {83, 0, 769, 3, 316}, {83, 0, 772, 2, 316}, {83, 0, 774, 5, 316}, {83, 0, 779, 2, 316}, {83, 0, 781, 0, 314}, {83, 0, 781, 2, 316}, {83, 0, 783, 1, 316}, {83, 0, 784, 2, 316}, {83, 0, 786, 5, 316}, {83, 0, 791, 2, 316}, {83, 0, 793, 1, 316}, {83, 0, 794, 2, 316}, {83, 0, 796, 2, 316}, {83, 0, 798, 6, 316}, {83, 0, 804, 2, 316}, {83, 0, 806, 2, 320}, {83, 0, 808, 3, 316}, {83, 0, 811, 4, 316}, {83, 0, 815, 2, 316}, {83, 0, 817, 2, 316}, {83, 0, 819, 2, 316}, {83, 0, 821, 2, 316}, {83, 0, 823, 2, 316}, {83, 0, 825, 2, 316}, {83, 0, 827, 4, 318}, {83, 1, 831, 2, 316}, {84, 0, 833, 5, 316}, {84, 0, 838, 20, 317}, {84, 0, 858, 2, 316}, {84, 0, 860, 0, 316}, {84, 0, 860, 2, 316}, {84, 0, 862, 4, 320}, {84, 0, 866, 0, 316}, {84, 0, 866, 2, 316}, {84, 0, 868, 1, 316}, {84, 0, 869, 2, 316}, {84, 0, 871, 3, 316}, {84, 0, 874, 2, 316}, {84, 0, 876, 2, 316}, {84, 0, 878, 4, 316}, {84, 0, 882, 3, 316}, {84, 0, 885, 17, 316}, {84, 6, 902, 1, 319}, {90, 0, 903, 2, 316}, {90, 0, 905, 2, 320}, {90, 0, 907, 2, 320}, {90, 0, 909, 2, 316}, {90, 0, 911, 2, 316}, {90, 0, 913, 4, 318}, {90, 0, 917, 2, 316}, {90, 5, 919, 0, 312}, {95, 0, 919, 2, 316}, {95, 0, 921, 4, 316}, {95, 0, 925, 4, 316}, {95, 0, 929, 6, 316}, {95, 0, 935, 2, 316}, {95, 0, 937, 4, 316}, {95, 0, 941, 3, 316}, {95, 0, 944, 3, 316}, {95, 0, 947, 2, 316}, {95, 0, 949, 3, 316}, {95, 0, 952, 0, 316}, {95, 0, 952, 0, 316}, {95, 0, 952, 2, 316}, {95, 0, 954, 7, 316}, {95, 0, 961, 2, 316}, {95, 0, 963, 0, 319}, {95, 0, 963, 7, 316}, {95, 0, 970, 2, 316}, {95, 0, 972, 2, 316}, {95, 3, 974, 1, 316}, {98, 0, 975, 2, 320}, {98, 0, 977, 4, 316}, {98, 0, 981, 0, 316}, {98, 0, 981, 2, 316}, {98, 0, 983, 2, 316}, {98, 0, 985, 4, 316}, {98, 0, 989, 1, 316}, {98, 0, 990, 2, 316}, {98, 0, 992, 2, 316}, {98, 0, 994, 3, 316}, {98, 0, 997, 2, 317}, {98, 0, 999, 0, 316}, {98, 0, 999, 4, 316}, {98, 0, 1003, 4, 316}, {98, 0, 1007, 2, 316}, {98, 0, 1009, 3, 316}, {98, 0, 1012, 4, 316}, {98, 0, 1016, 7, 316}, {98, 0, 1023, 4, 316}, {98, 0, 1027, 1, 313}, {98, 17, 1028, 3, 316}, {115, 0, 1031, 5, 316}, {115, 0, 1036, 2, 316}, {115, 0, 1038, 1, 316}, {115, 0, 1039, 4, 316}, {115, 0, 1043, 2, 316}, {115, 0, 1045, 2, 316}, {115, 0, 1047, 1, 316}, {115, 0, 1048, 2, 316}, {115, 0, 1050, 5, 316}, {115, 0, 1055, 1, -1}, {115, 4, 1056, 1, 312}, {119, 1, 1057, 0, 313}, {120, 2, 1057, 8, 312}, {122, 11, 1065, 6, 315}, {133, 2, 1071, 0, 316}, {135, 0, 1071, 0, 316}, {135, 3, 1071, 2, 316}, {138, 0, 1073, 2, 316}, }; const int32_t g_iScriptIndexCount = sizeof(g_XFAScriptIndex) / sizeof(XFA_SCRIPTHIERARCHY); const XFA_METHODINFO g_SomMethodData[] = { {0x3c752495, L"verify", (XFA_METHOD_CALLBACK)&CJX_SignaturePseudoModel::Verify}, {0xa68635f1, L"sign", (XFA_METHOD_CALLBACK)&CJX_SignaturePseudoModel::Sign}, {0xa7f2c5e6, L"enumerate", (XFA_METHOD_CALLBACK)&CJX_SignaturePseudoModel::Enumerate}, {0xd8ed1467, L"clear", (XFA_METHOD_CALLBACK)&CJX_SignaturePseudoModel::Clear}, {0x4bdcce13, L"execute", (XFA_METHOD_CALLBACK)&CJX_Node::Script_WsdlConnection_Execute}, {0x1c296ba4, L"restore", (XFA_METHOD_CALLBACK)&CJX_Node::Script_Delta_Restore}, {0x7d123a9, L"clearItems", (XFA_METHOD_CALLBACK)&CJX_Node::Script_Field_ClearItems}, {0xfb0b007, L"execEvent", (XFA_METHOD_CALLBACK)&CJX_Node::Script_Field_ExecEvent}, {0x6716ce97, L"execInitialize", (XFA_METHOD_CALLBACK)&CJX_Node::Script_Field_ExecInitialize}, {0x7bb919c2, L"deleteItem", (XFA_METHOD_CALLBACK)&CJX_Node::Script_Field_DeleteItem}, {0x9f053d5e, L"getSaveItem", (XFA_METHOD_CALLBACK)&CJX_Node::Script_Field_GetSaveItem}, {0xbbd32747, L"boundItem", (XFA_METHOD_CALLBACK)&CJX_Node::Script_Field_BoundItem}, {0xc492d950, L"getItemState", (XFA_METHOD_CALLBACK)&CJX_Node::Script_Field_GetItemState}, {0xc6013cd3, L"execCalculate", (XFA_METHOD_CALLBACK)&CJX_Node::Script_Field_ExecCalculate}, {0xd8930d0e, L"setItems", (XFA_METHOD_CALLBACK)&CJX_Node::Script_Field_SetItems}, {0xe0f15045, L"getDisplayItem", (XFA_METHOD_CALLBACK)&CJX_Node::Script_Field_GetDisplayItem}, {0xe23acddc, L"setItemState", (XFA_METHOD_CALLBACK)&CJX_Node::Script_Field_SetItemState}, {0xe2dfb2f8, L"addItem", (XFA_METHOD_CALLBACK)&CJX_Node::Script_Field_AddItem}, {0xef8ce48f, L"execValidate", (XFA_METHOD_CALLBACK)&CJX_Node::Script_Field_ExecValidate}, {0x461079ef, L"emit", (XFA_METHOD_CALLBACK)&CJX_EventPseudoModel::Emit}, {0xfec90c63, L"reset", (XFA_METHOD_CALLBACK)&CJX_EventPseudoModel::Reset}, {0xfb0b007, L"execEvent", (XFA_METHOD_CALLBACK)&CJX_Node::Script_ExclGroup_ExecEvent}, {0x3d832221, L"selectedMember", (XFA_METHOD_CALLBACK)&CJX_Node::Script_ExclGroup_SelectedMember}, {0x6716ce97, L"execInitialize", (XFA_METHOD_CALLBACK)&CJX_Node::Script_ExclGroup_ExecInitialize}, {0xc6013cd3, L"execCalculate", (XFA_METHOD_CALLBACK)&CJX_Node::Script_ExclGroup_ExecCalculate}, {0xef8ce48f, L"execValidate", (XFA_METHOD_CALLBACK)&CJX_Node::Script_ExclGroup_ExecValidate}, {0xfb0b007, L"execEvent", (XFA_METHOD_CALLBACK)&CJX_Node::Script_Subform_ExecEvent}, {0x6716ce97, L"execInitialize", (XFA_METHOD_CALLBACK)&CJX_Node::Script_Subform_ExecInitialize}, {0xc6013cd3, L"execCalculate", (XFA_METHOD_CALLBACK)&CJX_Node::Script_Subform_ExecCalculate}, {0xd9b9b1f1, L"getInvalidObjects", (XFA_METHOD_CALLBACK)&CJX_Node::Script_Subform_GetInvalidObjects}, {0xef8ce48f, L"execValidate", (XFA_METHOD_CALLBACK)&CJX_Node::Script_Subform_ExecValidate}, {0xa366b7c, L"exportData", (XFA_METHOD_CALLBACK)&CJX_HostPseudoModel::ExportData}, {0x16cc226c, L"gotoURL", (XFA_METHOD_CALLBACK)&CJX_HostPseudoModel::GotoURL}, {0x1e0722f5, L"pageDown", (XFA_METHOD_CALLBACK)&CJX_HostPseudoModel::PageDown}, {0x3e66cb2c, L"setFocus", (XFA_METHOD_CALLBACK)&CJX_HostPseudoModel::SetFocus}, {0x4ac9faae, L"openList", (XFA_METHOD_CALLBACK)&CJX_HostPseudoModel::OpenList}, {0x7b89714f, L"response", (XFA_METHOD_CALLBACK)&CJX_HostPseudoModel::Response}, {0x7fd9fd58, L"documentInBatch", (XFA_METHOD_CALLBACK)&CJX_HostPseudoModel::DocumentInBatch}, {0xaf1d019d, L"resetData", (XFA_METHOD_CALLBACK)&CJX_HostPseudoModel::ResetData}, {0xb07be13c, L"beep", (XFA_METHOD_CALLBACK)&CJX_HostPseudoModel::Beep}, {0xb1882ca0, L"getFocus", (XFA_METHOD_CALLBACK)&CJX_HostPseudoModel::GetFocus}, {0xbf4ba9ee, L"messageBox", (XFA_METHOD_CALLBACK)&CJX_HostPseudoModel::MessageBox}, {0xd6d4dbc1, L"documentCountInBatch", (XFA_METHOD_CALLBACK)&CJX_HostPseudoModel::DocumentCountInBatch}, {0xdd7676ed, L"print", (XFA_METHOD_CALLBACK)&CJX_HostPseudoModel::Print}, {0xe2f863d0, L"currentDateTime", (XFA_METHOD_CALLBACK)&CJX_HostPseudoModel::CurrentDateTime}, {0xf995d0f5, L"importData", (XFA_METHOD_CALLBACK)&CJX_HostPseudoModel::ImportData}, {0xfeb96b62, L"pageUp", (XFA_METHOD_CALLBACK)&CJX_HostPseudoModel::PageUp}, {0x68, L"h", (XFA_METHOD_CALLBACK)&CJX_LayoutPseudoModel::H}, {0x77, L"w", (XFA_METHOD_CALLBACK)&CJX_LayoutPseudoModel::W}, {0x78, L"x", (XFA_METHOD_CALLBACK)&CJX_LayoutPseudoModel::X}, {0x79, L"y", (XFA_METHOD_CALLBACK)&CJX_LayoutPseudoModel::Y}, {0x5460206, L"pageCount", (XFA_METHOD_CALLBACK)&CJX_LayoutPseudoModel::PageCount}, {0x5eb5b0f, L"pageSpan", (XFA_METHOD_CALLBACK)&CJX_LayoutPseudoModel::PageSpan}, {0x10f1b1bd, L"page", (XFA_METHOD_CALLBACK)&CJX_LayoutPseudoModel::Page}, {0x1c1e6318, L"pageContent", (XFA_METHOD_CALLBACK)&CJX_LayoutPseudoModel::PageContent}, {0x1c1f4a5c, L"absPageCount", (XFA_METHOD_CALLBACK)&CJX_LayoutPseudoModel::AbsPageCount}, {0x1ec47db5, L"absPageCountInBatch", (XFA_METHOD_CALLBACK)&CJX_LayoutPseudoModel::AbsPageCountInBatch}, {0x2e4ecbdb, L"sheetCountInBatch", (XFA_METHOD_CALLBACK)&CJX_LayoutPseudoModel::SheetCountInBatch}, {0x2fcff4b5, L"relayout", (XFA_METHOD_CALLBACK)&CJX_LayoutPseudoModel::Relayout}, {0x3bf1c2a5, L"absPageSpan", (XFA_METHOD_CALLBACK)&CJX_LayoutPseudoModel::AbsPageSpan}, {0x5775c2cc, L"absPageInBatch", (XFA_METHOD_CALLBACK)&CJX_LayoutPseudoModel::AbsPageInBatch}, {0x8c5feb32, L"sheetInBatch", (XFA_METHOD_CALLBACK)&CJX_LayoutPseudoModel::SheetInBatch}, {0x8f3a8379, L"sheet", (XFA_METHOD_CALLBACK)&CJX_LayoutPseudoModel::Sheet}, {0x96f3c4cb, L"relayoutPageArea", (XFA_METHOD_CALLBACK)&CJX_LayoutPseudoModel::RelayoutPageArea}, {0xd2a4a542, L"sheetCount", (XFA_METHOD_CALLBACK)&CJX_LayoutPseudoModel::SheetCount}, {0xe74f0653, L"absPage", (XFA_METHOD_CALLBACK)&CJX_LayoutPseudoModel::AbsPage}, {0x44c352ad, L"formNodes", (XFA_METHOD_CALLBACK)&CJX_Node::Script_Template_FormNodes}, {0x45efb847, L"remerge", (XFA_METHOD_CALLBACK)&CJX_Node::Script_Template_Remerge}, {0x6716ce97, L"execInitialize", (XFA_METHOD_CALLBACK)&CJX_Node::Script_Template_ExecInitialize}, {0x712c6afa, L"createNode", (XFA_METHOD_CALLBACK)&CJX_Node::Script_Template_CreateNode}, {0xa8a35e25, L"recalculate", (XFA_METHOD_CALLBACK)&CJX_Node::Script_Template_Recalculate}, {0xc6013cd3, L"execCalculate", (XFA_METHOD_CALLBACK)&CJX_Node::Script_Template_ExecCalculate}, {0xef8ce48f, L"execValidate", (XFA_METHOD_CALLBACK)&CJX_Node::Script_Template_ExecValidate}, {0x4cc1c0f9, L"moveCurrentRecord", (XFA_METHOD_CALLBACK)&CJX_DataWindow::MoveCurrentRecord}, {0x5779d65f, L"record", (XFA_METHOD_CALLBACK)&CJX_DataWindow::Record}, {0x8a476498, L"gotoRecord", (XFA_METHOD_CALLBACK)&CJX_DataWindow::GotoRecord}, {0xaac241c8, L"isRecordGroup", (XFA_METHOD_CALLBACK)&CJX_DataWindow::IsRecordGroup}, {0x1c6f4277, L"evaluate", (XFA_METHOD_CALLBACK)&CJX_Node::Script_Manifest_Evaluate}, {0x2afec2cc, L"moveInstance", (XFA_METHOD_CALLBACK)&CJX_Node::Script_InstanceManager_MoveInstance}, {0x2bf94a63, L"removeInstance", (XFA_METHOD_CALLBACK)&CJX_Node::Script_InstanceManager_RemoveInstance}, {0x303adaf4, L"setInstances", (XFA_METHOD_CALLBACK)&CJX_Node::Script_InstanceManager_SetInstances}, {0x4d76b89e, L"addInstance", (XFA_METHOD_CALLBACK)&CJX_Node::Script_InstanceManager_AddInstance}, {0xc660dc8a, L"insertInstance", (XFA_METHOD_CALLBACK)&CJX_Node::Script_InstanceManager_InsertInstance}, {0xddfd1ea1, L"metadata", (XFA_METHOD_CALLBACK)&CJX_Node::Script_Desc_Metadata}, {0x44c352ad, L"formNodes", (XFA_METHOD_CALLBACK)&CJX_Node::Script_Form_FormNodes}, {0x45efb847, L"remerge", (XFA_METHOD_CALLBACK)&CJX_Node::Script_Form_Remerge}, {0x6716ce97, L"execInitialize", (XFA_METHOD_CALLBACK)&CJX_Node::Script_Form_ExecInitialize}, {0xa8a35e25, L"recalculate", (XFA_METHOD_CALLBACK)&CJX_Node::Script_Form_Recalculate}, {0xc6013cd3, L"execCalculate", (XFA_METHOD_CALLBACK)&CJX_Node::Script_Form_ExecCalculate}, {0xef8ce48f, L"execValidate", (XFA_METHOD_CALLBACK)&CJX_Node::Script_Form_ExecValidate}, {0x60490a85, L"message", (XFA_METHOD_CALLBACK)&CJX_LogPseudoModel::Message}, {0x60ecfcc9, L"traceDeactivate", (XFA_METHOD_CALLBACK)&CJX_LogPseudoModel::TraceDeactivate}, {0x86a0f4c0, L"traceActivate", (XFA_METHOD_CALLBACK)&CJX_LogPseudoModel::TraceActivate}, {0x93eac39a, L"traceEnabled", (XFA_METHOD_CALLBACK)&CJX_LogPseudoModel::TraceEnabled}, {0xd1227e6f, L"trace", (XFA_METHOD_CALLBACK)&CJX_LogPseudoModel::Trace}, {0x36c0ee14, L"getAttribute", (XFA_METHOD_CALLBACK)&CJX_Node::Script_Packet_GetAttribute}, {0x5468e2a0, L"setAttribute", (XFA_METHOD_CALLBACK)&CJX_Node::Script_Packet_SetAttribute}, {0xadc48de2, L"removeAttribute", (XFA_METHOD_CALLBACK)&CJX_Node::Script_Packet_RemoveAttribute}, {0x3848b3f, L"next", (XFA_METHOD_CALLBACK)&CJX_Node::Script_Source_Next}, {0x14e25bc8, L"cancelBatch", (XFA_METHOD_CALLBACK)&CJX_Node::Script_Source_CancelBatch}, {0x3ce05d68, L"first", (XFA_METHOD_CALLBACK)&CJX_Node::Script_Source_First}, {0x649e1e65, L"updateBatch", (XFA_METHOD_CALLBACK)&CJX_Node::Script_Source_UpdateBatch}, {0x6a3405dd, L"previous", (XFA_METHOD_CALLBACK)&CJX_Node::Script_Source_Previous}, {0x74818fb3, L"isBOF", (XFA_METHOD_CALLBACK)&CJX_Node::Script_Source_IsBOF}, {0x74d07a76, L"isEOF", (XFA_METHOD_CALLBACK)&CJX_Node::Script_Source_IsEOF}, {0x7613cb66, L"cancel", (XFA_METHOD_CALLBACK)&CJX_Node::Script_Source_Cancel}, {0x7baca2e3, L"update", (XFA_METHOD_CALLBACK)&CJX_Node::Script_Source_Update}, {0x8b90e1f2, L"open", (XFA_METHOD_CALLBACK)&CJX_Node::Script_Source_Open}, {0x9c6471b3, L"delete", (XFA_METHOD_CALLBACK)&CJX_Node::Script_Source_Delete}, {0xa7315093, L"addNew", (XFA_METHOD_CALLBACK)&CJX_Node::Script_Source_AddNew}, {0xa7ce5f8d, L"requery", (XFA_METHOD_CALLBACK)&CJX_Node::Script_Source_Requery}, {0xc7368674, L"resync", (XFA_METHOD_CALLBACK)&CJX_Node::Script_Source_Resync}, {0xd9f47f36, L"close", (XFA_METHOD_CALLBACK)&CJX_Node::Script_Source_Close}, {0xf54481d4, L"last", (XFA_METHOD_CALLBACK)&CJX_Node::Script_Source_Last}, {0xf7965460, L"hasDataChanged", (XFA_METHOD_CALLBACK)&CJX_Node::Script_Source_HasDataChanged}, {0x6275f6af, L"item", (XFA_METHOD_CALLBACK)&CJX_NodeList::Script_ListClass_Item}, {0x7033bfd5, L"insert", (XFA_METHOD_CALLBACK)&CJX_NodeList::Script_ListClass_Insert}, {0x9cab7cae, L"remove", (XFA_METHOD_CALLBACK)&CJX_NodeList::Script_ListClass_Remove}, {0xda12e518, L"append", (XFA_METHOD_CALLBACK)&CJX_NodeList::Script_ListClass_Append}, {0xd892a054, L"namedItem", (XFA_METHOD_CALLBACK)&CJX_NodeList::Script_TreelistClass_NamedItem}, {0xba2dd386, L"resolveNode", (XFA_METHOD_CALLBACK)&CJX_Node::Script_TreeClass_ResolveNode}, {0xe509e2b9, L"resolveNodes", (XFA_METHOD_CALLBACK)&CJX_Node::Script_TreeClass_ResolveNodes}, {0x1bca1ebd, L"applyXSL", (XFA_METHOD_CALLBACK)&CJX_Node::Script_NodeClass_ApplyXSL}, {0x36c0ee14, L"getAttribute", (XFA_METHOD_CALLBACK)&CJX_Node::Script_NodeClass_GetAttribute}, {0x5468e2a0, L"setAttribute", (XFA_METHOD_CALLBACK)&CJX_Node::Script_NodeClass_SetAttribute}, {0x5ee00996, L"setElement", (XFA_METHOD_CALLBACK)&CJX_Node::Script_NodeClass_SetElement}, {0x92dada4f, L"saveFilteredXML", (XFA_METHOD_CALLBACK)&CJX_Node::Script_NodeClass_SaveFilteredXML}, {0x9c456500, L"saveXML", (XFA_METHOD_CALLBACK)&CJX_Node::Script_NodeClass_SaveXML}, {0xabd3200a, L"getElement", (XFA_METHOD_CALLBACK)&CJX_Node::Script_NodeClass_GetElement}, {0xb269c60d, L"isPropertySpecified", (XFA_METHOD_CALLBACK)&CJX_Node::Script_NodeClass_IsPropertySpecified}, {0xb528be91, L"loadXML", (XFA_METHOD_CALLBACK)&CJX_Node::Script_NodeClass_LoadXML}, {0xd9f46591, L"clone", (XFA_METHOD_CALLBACK)&CJX_Node::Script_NodeClass_Clone}, {0xe006a76b, L"assignNode", (XFA_METHOD_CALLBACK)&CJX_Node::Script_NodeClass_AssignNode}, {0x7303fcea, L"getDelta", (XFA_METHOD_CALLBACK)&CJX_Node::Script_ContainerClass_GetDelta}, {0xe7742c9d, L"getDeltas", (XFA_METHOD_CALLBACK)&CJX_Node::Script_ContainerClass_GetDeltas}, {0x30ff6aad, L"clearErrorList", (XFA_METHOD_CALLBACK)&CJX_Node::Script_ModelClass_ClearErrorList}, {0x712c6afa, L"createNode", (XFA_METHOD_CALLBACK)&CJX_Node::Script_ModelClass_CreateNode}, {0x83a6411d, L"isCompatibleNS", (XFA_METHOD_CALLBACK)&CJX_Node::Script_ModelClass_IsCompatibleNS}, }; const int32_t g_iSomMethodCount = sizeof(g_SomMethodData) / sizeof(XFA_METHODINFO); const XFA_SCRIPTATTRIBUTEINFO g_SomAttributeData[] = { {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0xc0811ed, L"use", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Use, XFA_ScriptType::Basic}, {0xbc254332, L"usehref", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Usehref, XFA_ScriptType::Basic}, {0xb3543a6, L"max", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Max, XFA_ScriptType::Basic}, {0xc0811ed, L"use", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Use, XFA_ScriptType::Basic}, {0x45a6daf8, L"eofAction", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::EofAction, XFA_ScriptType::Basic}, {0x5ec958c0, L"cursorType", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::CursorType, XFA_ScriptType::Basic}, {0x79975f2b, L"lockType", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::LockType, XFA_ScriptType::Basic}, {0xa5340ff5, L"bofAction", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::BofAction, XFA_ScriptType::Basic}, {0xbc254332, L"usehref", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Usehref, XFA_ScriptType::Basic}, {0xc5762157, L"cursorLocation", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::CursorLocation, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0xc0811ed, L"use", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Use, XFA_ScriptType::Basic}, {0x1ee2d24d, L"instanceIndex", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Som_InstanceIndex, XFA_Attribute::Unknown, XFA_ScriptType::Basic}, {0x8c99377e, L"relation", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Relation, XFA_ScriptType::Basic}, {0x8e1c2921, L"relevant", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Relevant, XFA_ScriptType::Basic}, {0xbc254332, L"usehref", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Usehref, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0x31b19c1, L"name", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Name, XFA_ScriptType::Basic}, {0x3106c3a, L"beforeTarget", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::BeforeTarget, XFA_ScriptType::Basic}, {0xc0811ed, L"use", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Use, XFA_ScriptType::Basic}, {0x13a08bdb, L"overflowTarget", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::OverflowTarget, XFA_ScriptType::Basic}, {0x169134a1, L"overflowLeader", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::OverflowLeader, XFA_ScriptType::Basic}, {0x20914367, L"overflowTrailer", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::OverflowTrailer, XFA_ScriptType::Basic}, {0x453eaf38, L"startNew", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::StartNew, XFA_ScriptType::Basic}, {0x64110ab5, L"bookendTrailer", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::BookendTrailer, XFA_ScriptType::Basic}, {0xb6b44172, L"after", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::After, XFA_ScriptType::Basic}, {0xbc254332, L"usehref", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Usehref, XFA_ScriptType::Basic}, {0xc3c1442f, L"bookendLeader", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::BookendLeader, XFA_ScriptType::Basic}, {0xcb150479, L"afterTarget", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::AfterTarget, XFA_ScriptType::Basic}, {0xf4ffce73, L"before", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Before, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0x31b19c1, L"name", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Name, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xd6e27f1d, L"value", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Som_DefaultValue_Read, XFA_Attribute::Unknown, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0xc0811ed, L"use", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Use, XFA_ScriptType::Basic}, {0x47cfa43a, L"allowNeutral", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::AllowNeutral, XFA_ScriptType::Basic}, {0x7c2fd80b, L"mark", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Mark, XFA_ScriptType::Basic}, {0x8ed182d1, L"shape", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Shape, XFA_ScriptType::Basic}, {0xa686975b, L"size", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Size, XFA_ScriptType::Basic}, {0xbc254332, L"usehref", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Usehref, XFA_ScriptType::Basic}, {0xc0811ed, L"use", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Use, XFA_ScriptType::Basic}, {0xbc254332, L"usehref", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Usehref, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0xc0811ed, L"use", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Use, XFA_ScriptType::Basic}, {0x5c054755, L"startAngle", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::StartAngle, XFA_ScriptType::Basic}, {0x74788f8b, L"sweepAngle", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::SweepAngle, XFA_ScriptType::Basic}, {0x9d833d75, L"circular", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Circular, XFA_ScriptType::Basic}, {0xbc254332, L"usehref", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Usehref, XFA_ScriptType::Basic}, {0xd996fa9b, L"hand", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Hand, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0xc0811ed, L"use", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Use, XFA_ScriptType::Basic}, {0xb0e5485d, L"bind", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Bind, XFA_ScriptType::Basic}, {0xbc254332, L"usehref", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Usehref, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xcd7f7b54, L"from", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::From, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0xc0811ed, L"use", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Use, XFA_ScriptType::Basic}, {0x8e29d794, L"signatureType", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::SignatureType, XFA_ScriptType::Basic}, {0xbc254332, L"usehref", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Usehref, XFA_ScriptType::Basic}, {0xe11a2cbc, L"permissions", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Permissions, XFA_ScriptType::Basic}, {0xc0811ed, L"use", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Use, XFA_ScriptType::Basic}, {0x453eaf38, L"startNew", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::StartNew, XFA_ScriptType::Basic}, {0x9dcc3ab3, L"trailer", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Trailer, XFA_ScriptType::Basic}, {0xa6118c89, L"targetType", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::TargetType, XFA_ScriptType::Basic}, {0xbc254332, L"usehref", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Usehref, XFA_ScriptType::Basic}, {0xc8da4da7, L"target", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Target, XFA_ScriptType::Basic}, {0xcbcaf66d, L"leader", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Leader, XFA_ScriptType::Basic}, {0xc0811ed, L"use", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Use, XFA_ScriptType::Basic}, {0xbc254332, L"usehref", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Usehref, XFA_ScriptType::Basic}, {0x31b19c1, L"name", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Name, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0x31b19c1, L"name", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Name, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0xc0811ed, L"use", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Use, XFA_ScriptType::Basic}, {0xbc254332, L"usehref", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Usehref, XFA_ScriptType::Basic}, {0xc0811ed, L"use", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Use, XFA_ScriptType::Basic}, {0xbc254332, L"usehref", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Usehref, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0x2d574d58, L"this", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Xfa_This, XFA_Attribute::Unknown, XFA_ScriptType::Object}, {0x4fdc3454, L"timeStamp", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::TimeStamp, XFA_ScriptType::Basic}, {0xb598a1f7, L"uuid", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Uuid, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0x31b19c1, L"name", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Name, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0xc0811ed, L"use", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Use, XFA_ScriptType::Basic}, {0xcfea02e, L"leftInset", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::LeftInset, XFA_ScriptType::Basic}, {0x1356caf8, L"bottomInset", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::BottomInset, XFA_ScriptType::Basic}, {0x25764436, L"topInset", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::TopInset, XFA_ScriptType::Basic}, {0x8a692521, L"rightInset", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::RightInset, XFA_ScriptType::Basic}, {0xbc254332, L"usehref", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Usehref, XFA_ScriptType::Basic}, {0xc0811ed, L"use", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Use, XFA_ScriptType::Basic}, {0x1e459b8f, L"nonRepudiation", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::NonRepudiation, XFA_ScriptType::Basic}, {0x2bb3f470, L"encipherOnly", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::EncipherOnly, XFA_ScriptType::Basic}, {0x2f16a382, L"type", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Type, XFA_ScriptType::Basic}, {0x5f760b50, L"digitalSignature", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::DigitalSignature, XFA_ScriptType::Basic}, {0x69aa2292, L"crlSign", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::CrlSign, XFA_ScriptType::Basic}, {0x98fd4d81, L"keyAgreement", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::KeyAgreement, XFA_ScriptType::Basic}, {0xa66404cb, L"keyEncipherment", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::KeyEncipherment, XFA_ScriptType::Basic}, {0xbc254332, L"usehref", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Usehref, XFA_ScriptType::Basic}, {0xca5dc27c, L"dataEncipherment", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::DataEncipherment, XFA_ScriptType::Basic}, {0xe8f118a8, L"keyCertSign", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::KeyCertSign, XFA_ScriptType::Basic}, {0xfea53ec6, L"decipherOnly", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::DecipherOnly, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0xc0811ed, L"use", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Use, XFA_ScriptType::Basic}, {0x8b90e1f2, L"open", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Open, XFA_ScriptType::Basic}, {0x957fa006, L"commitOn", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::CommitOn, XFA_ScriptType::Basic}, {0xb12128b7, L"textEntry", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::TextEntry, XFA_ScriptType::Basic}, {0xbc254332, L"usehref", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Usehref, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0x31b19c1, L"name", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Name, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0x31b19c1, L"name", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Name, XFA_ScriptType::Basic}, {0x2282c73, L"hAlign", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::HAlign, XFA_ScriptType::Basic}, {0x8d4f1c7, L"textIndent", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::TextIndent, XFA_ScriptType::Basic}, {0xc0811ed, L"use", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Use, XFA_ScriptType::Basic}, {0x2a82d99c, L"marginRight", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::MarginRight, XFA_ScriptType::Basic}, {0x534729c9, L"marginLeft", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::MarginLeft, XFA_ScriptType::Basic}, {0x5739d1ff, L"radixOffset", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::RadixOffset, XFA_ScriptType::Basic}, {0x577682ac, L"preserve", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Preserve, XFA_ScriptType::Basic}, {0x731e0665, L"spaceBelow", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::SpaceBelow, XFA_ScriptType::Basic}, {0x7a7cc341, L"vAlign", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::VAlign, XFA_ScriptType::Basic}, {0x836d4d7c, L"tabDefault", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::TabDefault, XFA_ScriptType::Basic}, {0x8fa01790, L"tabStops", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::TabStops, XFA_ScriptType::Basic}, {0xbc254332, L"usehref", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Usehref, XFA_ScriptType::Basic}, {0xd4b01921, L"lineHeight", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::LineHeight, XFA_ScriptType::Basic}, {0xe18b5659, L"spaceAbove", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::SpaceAbove, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0xc0811ed, L"use", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Use, XFA_ScriptType::Basic}, {0xbc254332, L"usehref", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Usehref, XFA_ScriptType::Basic}, {0xd861f8af, L"addRevocationInfo", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::AddRevocationInfo, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0xc0811ed, L"use", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Use, XFA_ScriptType::Basic}, {0xbc254332, L"usehref", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Usehref, XFA_ScriptType::Basic}, {0xbb8df5d, L"ref", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Ref, XFA_ScriptType::Basic}, {0xc0811ed, L"use", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Use, XFA_ScriptType::Basic}, {0xbc254332, L"usehref", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Usehref, XFA_ScriptType::Basic}, {0xd6128d8d, L"activity", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Activity, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0xc0811ed, L"use", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Use, XFA_ScriptType::Basic}, {0xbc254332, L"usehref", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Usehref, XFA_ScriptType::Basic}, {0x43e349b, L"dataRowCount", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::DataRowCount, XFA_ScriptType::Basic}, {0xc0811ed, L"use", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Use, XFA_ScriptType::Basic}, {0x28e17e91, L"dataPrep", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::DataPrep, XFA_ScriptType::Basic}, {0x2f16a382, L"type", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Type, XFA_ScriptType::Basic}, {0x3650557e, L"textLocation", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::TextLocation, XFA_ScriptType::Basic}, {0x3b582286, L"moduleWidth", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::ModuleWidth, XFA_ScriptType::Basic}, {0x52666f1c, L"printCheckDigit", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::PrintCheckDigit, XFA_ScriptType::Basic}, {0x5404d6df, L"moduleHeight", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::ModuleHeight, XFA_ScriptType::Basic}, {0x5ab23b6c, L"startChar", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::StartChar, XFA_ScriptType::Basic}, {0x7c732a66, L"truncate", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Truncate, XFA_ScriptType::Basic}, {0x8d181d61, L"wideNarrowRatio", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::WideNarrowRatio, XFA_ScriptType::Basic}, {0x99800d7a, L"errorCorrectionLevel", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::ErrorCorrectionLevel, XFA_ScriptType::Basic}, {0x9a63da3d, L"upsMode", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::UpsMode, XFA_ScriptType::Basic}, {0xaf754613, L"checksum", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Checksum, XFA_ScriptType::Basic}, {0xb045fbc5, L"charEncoding", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::CharEncoding, XFA_ScriptType::Basic}, {0xbc254332, L"usehref", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Usehref, XFA_ScriptType::Basic}, {0xc035c6b1, L"dataColumnCount", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::DataColumnCount, XFA_ScriptType::Basic}, {0xd3c84d25, L"rowColumnRatio", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::RowColumnRatio, XFA_ScriptType::Basic}, {0xd57c513c, L"dataLength", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::DataLength, XFA_ScriptType::Basic}, {0xf575ca75, L"endChar", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::EndChar, XFA_ScriptType::Basic}, {0x31b19c1, L"name", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Name, XFA_ScriptType::Basic}, {0x28dee6e9, L"format", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Format, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0xc0811ed, L"use", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Use, XFA_ScriptType::Basic}, {0x60d4c8b1, L"output", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Output, XFA_ScriptType::Basic}, {0xbc254332, L"usehref", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Usehref, XFA_ScriptType::Basic}, {0xd6a39990, L"input", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Input, XFA_ScriptType::Basic}, {0x2f16a382, L"type", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Type, XFA_ScriptType::Basic}, {0xc0811ed, L"use", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Use, XFA_ScriptType::Basic}, {0x2f16a382, L"type", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Type, XFA_ScriptType::Basic}, {0xbc254332, L"usehref", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Usehref, XFA_ScriptType::Basic}, {0x2b5df51e, L"dataDescription", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::DataDescription, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0x6c0d9600, L"currentValue", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Delta_CurrentValue, XFA_Attribute::Unknown, XFA_ScriptType::Basic}, {0x942643f0, L"savedValue", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Delta_SavedValue, XFA_Attribute::Unknown, XFA_ScriptType::Basic}, {0xc8da4da7, L"target", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Delta_Target, XFA_Attribute::Unknown, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0xc0811ed, L"use", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Use, XFA_ScriptType::Basic}, {0xbc254332, L"usehref", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Usehref, XFA_ScriptType::Basic}, {0xd4cc53f8, L"highlight", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Highlight, XFA_ScriptType::Basic}, {0xc0811ed, L"use", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Use, XFA_ScriptType::Basic}, {0xbc254332, L"usehref", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Usehref, XFA_ScriptType::Basic}, {0x5518c25, L"break", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Break, XFA_ScriptType::Basic}, {0xc0811ed, L"use", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Use, XFA_ScriptType::Basic}, {0x570ce835, L"presence", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Presence, XFA_ScriptType::Basic}, {0x8e1c2921, L"relevant", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Relevant, XFA_ScriptType::Basic}, {0xbc254332, L"usehref", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Usehref, XFA_ScriptType::Basic}, {0xd996fa9b, L"hand", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Hand, XFA_ScriptType::Basic}, {0x78, L"x", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::X, XFA_ScriptType::Basic}, {0x79, L"y", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Y, XFA_ScriptType::Basic}, {0x31b19c1, L"name", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Name, XFA_ScriptType::Basic}, {0x31b19c1, L"name", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Name, XFA_ScriptType::Basic}, {0xc0811ed, L"use", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Use, XFA_ScriptType::Basic}, {0x1059ec18, L"level", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_Integer, XFA_Attribute::Level, XFA_ScriptType::Basic}, {0x8e1c2921, L"relevant", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Relevant, XFA_ScriptType::Basic}, {0xac06e2b0, L"colSpan", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::ColSpan, XFA_ScriptType::Basic}, {0xbc254332, L"usehref", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Usehref, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0x21aed, L"id", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Id, XFA_ScriptType::Basic}, {0xc0811ed, L"use", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Use, XFA_ScriptType::Basic}, {0x2f105f72, L"wordCharacterCount", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::WordCharacterCount, XFA_ScriptType::Basic}, {0x3d123c26, L"hyphenate", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Hyphenate, XFA_ScriptType::Basic}, {0x66539c48, L"excludeInitialCap", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::ExcludeInitialCap, XFA_ScriptType::Basic}, {0x6a95c976, L"pushCharacterCount", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::PushCharacterCount, XFA_ScriptType::Basic}, {0x982bd892, L"remainCharacterCount", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::RemainCharacterCount, XFA_ScriptType::Basic}, {0xbc254332, L"usehref", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Usehref, XFA_ScriptType::Basic}, {0xe5c96d6a, L"excludeAllCaps", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::ExcludeAllCaps, XFA_ScriptType::Basic}, {0xc0811ed, L"use", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Use, XFA_ScriptType::Basic}, {0x8af2e657, L"maxChars", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::MaxChars, XFA_ScriptType::Basic}, {0xa52682bd, L"{default}", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Som_DefaultValue, XFA_Attribute::Unknown, XFA_ScriptType::Basic}, {0xbc254332, L"usehref", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Usehref, XFA_ScriptType::Basic}, {0xd6e27f1d, L"value", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Som_DefaultValue, XFA_Attribute::Unknown, XFA_ScriptType::Basic}, {0xc0811ed, L"use", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Use, XFA_ScriptType::Basic}, {0xa52682bd, L"{default}", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Som_DefaultValue, XFA_Attribute::Unknown, XFA_ScriptType::Basic}, {0xbc254332, L"usehref", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Usehref, XFA_ScriptType::Basic}, {0xd6e27f1d, L"value", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Som_DefaultValue, XFA_Attribute::Unknown, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0xc080cd3, L"url", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Url, XFA_ScriptType::Basic}, {0xc0811ed, L"use", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Use, XFA_ScriptType::Basic}, {0xa6710262, L"credentialServerPolicy", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::CredentialServerPolicy, XFA_ScriptType::Basic}, {0xbc254332, L"usehref", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Usehref, XFA_ScriptType::Basic}, {0xc2ba0923, L"urlPolicy", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::UrlPolicy, XFA_ScriptType::Basic}, {0xc0811ed, L"use", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Use, XFA_ScriptType::Basic}, {0x2f16a382, L"type", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Type, XFA_ScriptType::Basic}, {0xbc254332, L"usehref", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Usehref, XFA_ScriptType::Basic}, {0x47d03490, L"connection", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Connection, XFA_ScriptType::Basic}, {0xc8da4da7, L"target", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Target, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0xc0811ed, L"use", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Use, XFA_ScriptType::Basic}, {0xa52682bd, L"{default}", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Som_DefaultValue, XFA_Attribute::Unknown, XFA_ScriptType::Basic}, {0xbc254332, L"usehref", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Usehref, XFA_ScriptType::Basic}, {0xd6e27f1d, L"value", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Som_DefaultValue, XFA_Attribute::Unknown, XFA_ScriptType::Basic}, {0xc0811ed, L"use", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Use, XFA_ScriptType::Basic}, {0x78bff531, L"numberOfCells", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_Integer, XFA_Attribute::NumberOfCells, XFA_ScriptType::Basic}, {0xbc254332, L"usehref", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Usehref, XFA_ScriptType::Basic}, {0xc0811ed, L"use", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Use, XFA_ScriptType::Basic}, {0x2f16a382, L"type", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Type, XFA_ScriptType::Basic}, {0xbc254332, L"usehref", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Usehref, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0x68, L"h", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::H, XFA_ScriptType::Basic}, {0x77, L"w", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::W, XFA_ScriptType::Basic}, {0x78, L"x", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::X, XFA_ScriptType::Basic}, {0x79, L"y", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Y, XFA_ScriptType::Basic}, {0x2282c73, L"hAlign", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::HAlign, XFA_ScriptType::Basic}, {0xc0811ed, L"use", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Use, XFA_ScriptType::Basic}, {0x1abbd7e0, L"dataNode", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Som_DataNode, XFA_Attribute::Unknown, XFA_ScriptType::Object}, {0x25839852, L"access", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Access, XFA_ScriptType::Basic}, {0x2ee7678f, L"rotate", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Rotate, XFA_ScriptType::Basic}, {0x3b1ddd06, L"fillColor", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Som_FillColor, XFA_Attribute::Unknown, XFA_ScriptType::Basic}, {0x54c399e3, L"formattedValue", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Field_FormattedValue, XFA_Attribute::Unknown, XFA_ScriptType::Basic}, {0x570ce835, L"presence", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Presence, XFA_ScriptType::Basic}, {0x5a3b375d, L"borderColor", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Som_BorderColor, XFA_Attribute::Unknown, XFA_ScriptType::Basic}, {0x5e936ed6, L"fontColor", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Som_FontColor, XFA_Attribute::Unknown, XFA_ScriptType::Basic}, {0x6826c408, L"parentSubform", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Field_ParentSubform, XFA_Attribute::Unknown, XFA_ScriptType::Basic}, {0x79b67434, L"mandatoryMessage", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Som_MandatoryMessage, XFA_Attribute::Unknown, XFA_ScriptType::Basic}, {0x7a7cc341, L"vAlign", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::VAlign, XFA_ScriptType::Basic}, {0x7c2ff6ae, L"maxH", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::MaxH, XFA_ScriptType::Basic}, {0x7c2ff6bd, L"maxW", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::MaxW, XFA_ScriptType::Basic}, {0x7d02356c, L"minH", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::MinH, XFA_ScriptType::Basic}, {0x7d02357b, L"minW", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::MinW, XFA_ScriptType::Basic}, {0x85fd6faf, L"mandatory", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Som_Mandatory, XFA_Attribute::Unknown, XFA_ScriptType::Basic}, {0x8e1c2921, L"relevant", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Relevant, XFA_ScriptType::Basic}, {0x964fb42e, L"formatMessage", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Field_FormatMessage, XFA_Attribute::Unknown, XFA_ScriptType::Basic}, {0xa03cf627, L"rawValue", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Som_DefaultValue, XFA_Attribute::Unknown, XFA_ScriptType::Basic}, {0xa52682bd, L"{default}", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Som_DefaultValue, XFA_Attribute::Unknown, XFA_ScriptType::Basic}, {0xa60dd202, L"length", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Field_Length, XFA_Attribute::Unknown, XFA_ScriptType::Basic}, {0xac06e2b0, L"colSpan", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::ColSpan, XFA_ScriptType::Basic}, {0xbc254332, L"usehref", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Usehref, XFA_ScriptType::Basic}, {0xbc8fa350, L"locale", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Locale, XFA_ScriptType::Basic}, {0xc2bd40fd, L"anchorType", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::AnchorType, XFA_ScriptType::Basic}, {0xc4fed09b, L"accessKey", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::AccessKey, XFA_ScriptType::Basic}, {0xcabfa3d0, L"validationMessage", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Som_ValidationMessage, XFA_Attribute::Unknown, XFA_ScriptType::Basic}, {0xdcecd663, L"editValue", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Field_EditValue, XFA_Attribute::Unknown, XFA_ScriptType::Basic}, {0xe07e5061, L"selectedIndex", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Field_SelectedIndex, XFA_Attribute::Unknown, XFA_ScriptType::Basic}, {0xf65e34be, L"borderWidth", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Som_BorderWidth, XFA_Attribute::Unknown, XFA_ScriptType::Basic}, {0x31b19c1, L"name", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Name, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0x68, L"h", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::H, XFA_ScriptType::Basic}, {0x77, L"w", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::W, XFA_ScriptType::Basic}, {0x78, L"x", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::X, XFA_ScriptType::Basic}, {0x79, L"y", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Y, XFA_ScriptType::Basic}, {0xc0811ed, L"use", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Use, XFA_ScriptType::Basic}, {0x8e1c2921, L"relevant", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Relevant, XFA_ScriptType::Basic}, {0xbc254332, L"usehref", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Usehref, XFA_ScriptType::Basic}, {0xd843798, L"fullText", (XFA_ATTRIBUTE_CALLBACK)&CJX_EventPseudoModel::FullText, XFA_Attribute::Unknown, XFA_ScriptType::Basic}, {0x1b6d1cf5, L"reenter", (XFA_ATTRIBUTE_CALLBACK)&CJX_EventPseudoModel::Reenter, XFA_Attribute::Unknown, XFA_ScriptType::Basic}, {0x1e6ffa9a, L"prevContentType", (XFA_ATTRIBUTE_CALLBACK)&CJX_EventPseudoModel::PrevContentType, XFA_Attribute::Unknown, XFA_ScriptType::Basic}, {0x25a3c206, L"soapFaultString", (XFA_ATTRIBUTE_CALLBACK)&CJX_EventPseudoModel::SoapFaultString, XFA_Attribute::Unknown, XFA_ScriptType::Basic}, {0x2e00c007, L"newContentType", (XFA_ATTRIBUTE_CALLBACK)&CJX_EventPseudoModel::NewContentType, XFA_Attribute::Unknown, XFA_ScriptType::Basic}, {0x4570500f, L"modifier", (XFA_ATTRIBUTE_CALLBACK)&CJX_EventPseudoModel::Modifier, XFA_Attribute::Unknown, XFA_ScriptType::Basic}, {0x50e2e33b, L"selEnd", (XFA_ATTRIBUTE_CALLBACK)&CJX_EventPseudoModel::SelEnd, XFA_Attribute::Unknown, XFA_ScriptType::Basic}, {0x57de87c2, L"prevText", (XFA_ATTRIBUTE_CALLBACK)&CJX_EventPseudoModel::PrevText, XFA_Attribute::Unknown, XFA_ScriptType::Basic}, {0x6ea04e0a, L"soapFaultCode", (XFA_ATTRIBUTE_CALLBACK)&CJX_EventPseudoModel::SoapFaultCode, XFA_Attribute::Unknown, XFA_ScriptType::Basic}, {0x6f6556cf, L"newText", (XFA_ATTRIBUTE_CALLBACK)&CJX_EventPseudoModel::NewText, XFA_Attribute::Unknown, XFA_ScriptType::Basic}, {0x891f4606, L"change", (XFA_ATTRIBUTE_CALLBACK)&CJX_EventPseudoModel::Change, XFA_Attribute::Unknown, XFA_ScriptType::Basic}, {0x8fa3c19e, L"shift", (XFA_ATTRIBUTE_CALLBACK)&CJX_EventPseudoModel::Shift, XFA_Attribute::Unknown, XFA_ScriptType::Basic}, {0xa9d9b2e1, L"keyDown", (XFA_ATTRIBUTE_CALLBACK)&CJX_EventPseudoModel::KeyDown, XFA_Attribute::Unknown, XFA_ScriptType::Basic}, {0xbfc89db2, L"selStart", (XFA_ATTRIBUTE_CALLBACK)&CJX_EventPseudoModel::SelStart, XFA_Attribute::Unknown, XFA_ScriptType::Basic}, {0xc32a5812, L"commitKey", (XFA_ATTRIBUTE_CALLBACK)&CJX_EventPseudoModel::CommitKey, XFA_Attribute::Unknown, XFA_ScriptType::Basic}, {0xc8da4da7, L"target", (XFA_ATTRIBUTE_CALLBACK)&CJX_EventPseudoModel::Target, XFA_Attribute::Unknown, XFA_ScriptType::Basic}, {0xc0811ed, L"use", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Use, XFA_ScriptType::Basic}, {0xbc254332, L"usehref", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Usehref, XFA_ScriptType::Basic}, {0xc0811ed, L"use", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Use, XFA_ScriptType::Basic}, {0xbc254332, L"usehref", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Usehref, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0xa2e3514, L"cap", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Cap, XFA_ScriptType::Basic}, {0xc0811ed, L"use", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Use, XFA_ScriptType::Basic}, {0x5392ea58, L"stroke", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Stroke, XFA_ScriptType::Basic}, {0x570ce835, L"presence", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Presence, XFA_ScriptType::Basic}, {0x94446dcc, L"thickness", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Thickness, XFA_ScriptType::Basic}, {0xbc254332, L"usehref", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Usehref, XFA_ScriptType::Basic}, {0xc0811ed, L"use", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Use, XFA_ScriptType::Basic}, {0x1ec8ab2c, L"rate", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Rate, XFA_ScriptType::Basic}, {0xbc254332, L"usehref", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Usehref, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0x7b29630a, L"sourceBelow", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::SourceBelow, XFA_ScriptType::Basic}, {0x8fc36c0a, L"outputBelow", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::OutputBelow, XFA_ScriptType::Basic}, {0xe996b2fe, L"sourceAbove", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::SourceAbove, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0x68, L"h", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::H, XFA_ScriptType::Basic}, {0x77, L"w", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::W, XFA_ScriptType::Basic}, {0x78, L"x", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::X, XFA_ScriptType::Basic}, {0x79, L"y", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Y, XFA_ScriptType::Basic}, {0x2282c73, L"hAlign", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::HAlign, XFA_ScriptType::Basic}, {0xc0811ed, L"use", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Use, XFA_ScriptType::Basic}, {0xf23332f, L"errorText", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_ExclGroup_ErrorText, XFA_Attribute::Unknown, XFA_ScriptType::Basic}, {0x1abbd7e0, L"dataNode", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Som_DataNode, XFA_Attribute::Unknown, XFA_ScriptType::Object}, {0x25839852, L"access", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Access, XFA_ScriptType::Basic}, {0x3b1ddd06, L"fillColor", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Som_FillColor, XFA_Attribute::Unknown, XFA_ScriptType::Basic}, {0x570ce835, L"presence", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Presence, XFA_ScriptType::Basic}, {0x5a3b375d, L"borderColor", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Som_BorderColor, XFA_Attribute::Unknown, XFA_ScriptType::Basic}, {0x79b67434, L"mandatoryMessage", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Som_MandatoryMessage, XFA_Attribute::Unknown, XFA_ScriptType::Basic}, {0x7a7cc341, L"vAlign", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::VAlign, XFA_ScriptType::Basic}, {0x7c2ff6ae, L"maxH", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::MaxH, XFA_ScriptType::Basic}, {0x7c2ff6bd, L"maxW", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::MaxW, XFA_ScriptType::Basic}, {0x7d02356c, L"minH", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::MinH, XFA_ScriptType::Basic}, {0x7d02357b, L"minW", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::MinW, XFA_ScriptType::Basic}, {0x7e7e845e, L"layout", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Layout, XFA_ScriptType::Basic}, {0x846599f8, L"transient", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_ExclGroup_Transient, XFA_Attribute::Unknown, XFA_ScriptType::Basic}, {0x85fd6faf, L"mandatory", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Som_Mandatory, XFA_Attribute::Unknown, XFA_ScriptType::Basic}, {0x8e1c2921, L"relevant", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Relevant, XFA_ScriptType::Basic}, {0xa03cf627, L"rawValue", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_ExclGroup_DefaultAndRawValue, XFA_Attribute::Unknown, XFA_ScriptType::Basic}, {0xa52682bd, L"{default}", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_ExclGroup_DefaultAndRawValue, XFA_Attribute::Unknown, XFA_ScriptType::Basic}, {0xac06e2b0, L"colSpan", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::ColSpan, XFA_ScriptType::Basic}, {0xbc254332, L"usehref", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Usehref, XFA_ScriptType::Basic}, {0xc2bd40fd, L"anchorType", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::AnchorType, XFA_ScriptType::Basic}, {0xc4fed09b, L"accessKey", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::AccessKey, XFA_ScriptType::Basic}, {0xcabfa3d0, L"validationMessage", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Som_ValidationMessage, XFA_Attribute::Unknown, XFA_ScriptType::Basic}, {0xf65e34be, L"borderWidth", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Som_BorderWidth, XFA_Attribute::Unknown, XFA_ScriptType::Basic}, {0xc0811ed, L"use", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Use, XFA_ScriptType::Basic}, {0xbc254332, L"usehref", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Usehref, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xeda9017a, L"scope", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Scope, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0xc0811ed, L"use", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Use, XFA_ScriptType::Basic}, {0xbc254332, L"usehref", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Usehref, XFA_ScriptType::Basic}, {0xc0811ed, L"use", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Use, XFA_ScriptType::Basic}, {0x47d03490, L"connection", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Connection, XFA_ScriptType::Basic}, {0x6cfa828a, L"runAt", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::RunAt, XFA_ScriptType::Basic}, {0xa1b0d2f5, L"executeType", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::ExecuteType, XFA_ScriptType::Basic}, {0xbc254332, L"usehref", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Usehref, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0xc0811ed, L"use", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Use, XFA_ScriptType::Basic}, {0xbc254332, L"usehref", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Usehref, XFA_ScriptType::Basic}, {0xe6f99487, L"hScrollPolicy", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::HScrollPolicy, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0xc0811ed, L"use", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Use, XFA_ScriptType::Basic}, {0x42fed1fd, L"contentType", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::ContentType, XFA_ScriptType::Basic}, {0x54fa722c, L"transferEncoding", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::TransferEncoding, XFA_ScriptType::Basic}, {0xa52682bd, L"{default}", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Som_DefaultValue_Read, XFA_Attribute::Unknown, XFA_ScriptType::Basic}, {0xbc254332, L"usehref", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Usehref, XFA_ScriptType::Basic}, {0xd171b240, L"aspect", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Aspect, XFA_ScriptType::Basic}, {0xd6e27f1d, L"value", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Som_DefaultValue_Read, XFA_Attribute::Unknown, XFA_ScriptType::Basic}, {0xdb55fec5, L"href", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Href, XFA_ScriptType::Basic}, {0xd6e27f1d, L"value", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Value, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0xc0811ed, L"use", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Use, XFA_ScriptType::Basic}, {0x2f16a382, L"type", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Type, XFA_ScriptType::Basic}, {0x7f6fd3d7, L"server", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Server, XFA_ScriptType::Basic}, {0xbc254332, L"usehref", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Usehref, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0xc0811ed, L"use", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Use, XFA_ScriptType::Basic}, {0x4b8bc840, L"fracDigits", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::FracDigits, XFA_ScriptType::Basic}, {0xa52682bd, L"{default}", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Som_DefaultValue, XFA_Attribute::Unknown, XFA_ScriptType::Basic}, {0xbc254332, L"usehref", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Usehref, XFA_ScriptType::Basic}, {0xd6e27f1d, L"value", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Som_DefaultValue, XFA_Attribute::Unknown, XFA_ScriptType::Basic}, {0xde7f92ba, L"leadDigits", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::LeadDigits, XFA_ScriptType::Basic}, {0x68, L"h", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::H, XFA_ScriptType::Basic}, {0x77, L"w", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::W, XFA_ScriptType::Basic}, {0x78, L"x", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::X, XFA_ScriptType::Basic}, {0x79, L"y", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Y, XFA_ScriptType::Basic}, {0x2282c73, L"hAlign", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::HAlign, XFA_ScriptType::Basic}, {0xc0811ed, L"use", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Use, XFA_ScriptType::Basic}, {0x1414d431, L"allowMacro", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::AllowMacro, XFA_ScriptType::Basic}, {0x1517dfa1, L"columnWidths", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::ColumnWidths, XFA_ScriptType::Basic}, {0x1abbd7e0, L"dataNode", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Som_DataNode, XFA_Attribute::Unknown, XFA_ScriptType::Object}, {0x1ee2d24d, L"instanceIndex", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Som_InstanceIndex, XFA_Attribute::Unknown, XFA_ScriptType::Basic}, {0x25839852, L"access", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Access, XFA_ScriptType::Basic}, {0x3b1ddd06, L"fillColor", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Som_FillColor, XFA_Attribute::Unknown, XFA_ScriptType::Basic}, {0x570ce835, L"presence", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Presence, XFA_ScriptType::Basic}, {0x5a3b375d, L"borderColor", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Som_BorderColor, XFA_Attribute::Unknown, XFA_ScriptType::Basic}, {0x7a7cc341, L"vAlign", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::VAlign, XFA_ScriptType::Basic}, {0x7c2ff6ae, L"maxH", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::MaxH, XFA_ScriptType::Basic}, {0x7c2ff6bd, L"maxW", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::MaxW, XFA_ScriptType::Basic}, {0x7d02356c, L"minH", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::MinH, XFA_ScriptType::Basic}, {0x7d02357b, L"minW", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::MinW, XFA_ScriptType::Basic}, {0x7e7e845e, L"layout", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Layout, XFA_ScriptType::Basic}, {0x8e1c2921, L"relevant", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Relevant, XFA_ScriptType::Basic}, {0x9cc17d75, L"mergeMode", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::MergeMode, XFA_ScriptType::Basic}, {0x9f3e9510, L"instanceManager", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Subform_InstanceManager, XFA_Attribute::Unknown, XFA_ScriptType::Object}, {0xac06e2b0, L"colSpan", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::ColSpan, XFA_ScriptType::Basic}, {0xbc254332, L"usehref", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Usehref, XFA_ScriptType::Basic}, {0xbc8fa350, L"locale", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Subform_Locale, XFA_Attribute::Unknown, XFA_ScriptType::Basic}, {0xc2bd40fd, L"anchorType", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::AnchorType, XFA_ScriptType::Basic}, {0xcabfa3d0, L"validationMessage", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Som_ValidationMessage, XFA_Attribute::Unknown, XFA_ScriptType::Basic}, {0xe4c3a5e5, L"restoreState", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::RestoreState, XFA_ScriptType::Basic}, {0xeda9017a, L"scope", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Scope, XFA_ScriptType::Basic}, {0xf65e34be, L"borderWidth", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Som_BorderWidth, XFA_Attribute::Unknown, XFA_ScriptType::Basic}, {0xc0811ed, L"use", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Use, XFA_ScriptType::Basic}, {0xbc254332, L"usehref", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Usehref, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0xc0811ed, L"use", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Use, XFA_ScriptType::Basic}, {0x2f16a382, L"type", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Type, XFA_ScriptType::Basic}, {0x5a50e9e6, L"version", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Handler_Version, XFA_Attribute::Unknown, XFA_ScriptType::Basic}, {0xbc254332, L"usehref", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Usehref, XFA_ScriptType::Basic}, {0x31b19c1, L"name", (XFA_ATTRIBUTE_CALLBACK)&CJX_HostPseudoModel::Name, XFA_Attribute::Unknown, XFA_ScriptType::Basic}, {0x66c1ae9, L"validationsEnabled", (XFA_ATTRIBUTE_CALLBACK)&CJX_HostPseudoModel::ValidationsEnabled, XFA_Attribute::Unknown, XFA_ScriptType::Basic}, {0x14d04502, L"title", (XFA_ATTRIBUTE_CALLBACK)&CJX_HostPseudoModel::Title, XFA_Attribute::Unknown, XFA_ScriptType::Basic}, {0x392ae445, L"platform", (XFA_ATTRIBUTE_CALLBACK)&CJX_HostPseudoModel::Platform, XFA_Attribute::Unknown, XFA_ScriptType::Basic}, {0x5a50e9e6, L"version", (XFA_ATTRIBUTE_CALLBACK)&CJX_HostPseudoModel::Version, XFA_Attribute::Unknown, XFA_ScriptType::Basic}, {0x66cb1eed, L"variation", (XFA_ATTRIBUTE_CALLBACK)&CJX_HostPseudoModel::Variation, XFA_Attribute::Unknown, XFA_ScriptType::Basic}, {0x7717cbc4, L"language", (XFA_ATTRIBUTE_CALLBACK)&CJX_HostPseudoModel::Language, XFA_Attribute::Unknown, XFA_ScriptType::Basic}, {0x86698963, L"appType", (XFA_ATTRIBUTE_CALLBACK)&CJX_HostPseudoModel::AppType, XFA_Attribute::Unknown, XFA_ScriptType::Basic}, {0x94ff9e8d, L"calculationsEnabled", (XFA_ATTRIBUTE_CALLBACK)&CJX_HostPseudoModel::CalculationsEnabled, XFA_Attribute::Unknown, XFA_ScriptType::Basic}, {0xbcd44940, L"currentPage", (XFA_ATTRIBUTE_CALLBACK)&CJX_HostPseudoModel::CurrentPage, XFA_Attribute::Unknown, XFA_ScriptType::Basic}, {0xd592b920, L"numPages", (XFA_ATTRIBUTE_CALLBACK)&CJX_HostPseudoModel::NumPages, XFA_Attribute::Unknown, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0xc0811ed, L"use", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Use, XFA_ScriptType::Basic}, {0x24d85167, L"timeout", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Timeout, XFA_ScriptType::Basic}, {0xbc254332, L"usehref", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Usehref, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0x7d9fd7c5, L"mode", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Mode, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0x7d9fd7c5, L"mode", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_SubmitFormat_Mode, XFA_Attribute::Unknown, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0xc0811ed, L"use", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Use, XFA_ScriptType::Basic}, {0xa52682bd, L"{default}", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Som_DefaultValue, XFA_Attribute::Unknown, XFA_ScriptType::Basic}, {0xbc254332, L"usehref", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Usehref, XFA_ScriptType::Basic}, {0xd6e27f1d, L"value", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Som_DefaultValue, XFA_Attribute::Unknown, XFA_ScriptType::Basic}, {0xc0811ed, L"use", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Use, XFA_ScriptType::Basic}, {0xbc254332, L"usehref", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Usehref, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0xc0811ed, L"use", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Use, XFA_ScriptType::Basic}, {0x2038c9b2, L"role", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Role, XFA_ScriptType::Basic}, {0xbc254332, L"usehref", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Usehref, XFA_ScriptType::Basic}, {0xc0811ed, L"use", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Use, XFA_ScriptType::Basic}, {0xa52682bd, L"{default}", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Som_DefaultValue, XFA_Attribute::Unknown, XFA_ScriptType::Basic}, {0xbc254332, L"usehref", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Usehref, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xd6e27f1d, L"value", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Som_DefaultValue, XFA_Attribute::Unknown, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0xc0811ed, L"use", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Use, XFA_ScriptType::Basic}, {0xbc254332, L"usehref", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Usehref, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0x31b19c1, L"name", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Name, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0xfcef86b5, L"ready", (XFA_ATTRIBUTE_CALLBACK)&CJX_LayoutPseudoModel::Ready, XFA_Attribute::Unknown, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0xc0811ed, L"use", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Use, XFA_ScriptType::Basic}, {0x5392ea58, L"stroke", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Stroke, XFA_ScriptType::Basic}, {0x570ce835, L"presence", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Presence, XFA_ScriptType::Basic}, {0x7b95e661, L"inverted", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Inverted, XFA_ScriptType::Basic}, {0x94446dcc, L"thickness", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Thickness, XFA_ScriptType::Basic}, {0xbc254332, L"usehref", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Usehref, XFA_ScriptType::Basic}, {0xe8dddf50, L"join", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Join, XFA_ScriptType::Basic}, {0xe948b9a8, L"radius", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Radius, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0xc0811ed, L"use", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Use, XFA_ScriptType::Basic}, {0xabfa6c4f, L"cSpace", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::CSpace, XFA_ScriptType::Basic}, {0xbc254332, L"usehref", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Usehref, XFA_ScriptType::Basic}, {0xd6e27f1d, L"value", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Value, XFA_ScriptType::Basic}, {0x3848b3f, L"next", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Next, XFA_ScriptType::Basic}, {0xc0811ed, L"use", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Use, XFA_ScriptType::Basic}, {0x6a3405dd, L"previous", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Previous, XFA_ScriptType::Basic}, {0xbc254332, L"usehref", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Usehref, XFA_ScriptType::Basic}, {0xf6b59543, L"intact", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Intact, XFA_ScriptType::Basic}, {0xc0811ed, L"use", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Use, XFA_ScriptType::Basic}, {0x268b7ec1, L"commandType", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::CommandType, XFA_ScriptType::Basic}, {0xbc254332, L"usehref", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Usehref, XFA_ScriptType::Basic}, {0xc0811ed, L"use", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Use, XFA_ScriptType::Basic}, {0xbc254332, L"usehref", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Usehref, XFA_ScriptType::Basic}, {0xc0811ed, L"use", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Use, XFA_ScriptType::Basic}, {0xbc254332, L"usehref", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Usehref, XFA_ScriptType::Basic}, {0xbde9abda, L"data", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Data, XFA_ScriptType::Basic}, {0xc0811ed, L"use", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Use, XFA_ScriptType::Basic}, {0x5b707a35, L"scriptTest", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::ScriptTest, XFA_ScriptType::Basic}, {0x6b6ddcfb, L"nullTest", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::NullTest, XFA_ScriptType::Basic}, {0xbc254332, L"usehref", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Usehref, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xe64b1129, L"formatTest", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::FormatTest, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0xc0811ed, L"use", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Use, XFA_ScriptType::Basic}, {0x2f16a382, L"type", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Type, XFA_ScriptType::Basic}, {0xbc254332, L"usehref", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Usehref, XFA_ScriptType::Basic}, {0xc0811ed, L"use", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Use, XFA_ScriptType::Basic}, {0x8c99377e, L"relation", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Relation, XFA_ScriptType::Basic}, {0x8e1c2921, L"relevant", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Relevant, XFA_ScriptType::Basic}, {0xbc254332, L"usehref", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Usehref, XFA_ScriptType::Basic}, {0xc0811ed, L"use", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Use, XFA_ScriptType::Basic}, {0xa52682bd, L"{default}", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Som_DefaultValue, XFA_Attribute::Unknown, XFA_ScriptType::Basic}, {0xbc254332, L"usehref", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Usehref, XFA_ScriptType::Basic}, {0xd6e27f1d, L"value", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Som_DefaultValue, XFA_Attribute::Unknown, XFA_ScriptType::Basic}, {0xc0811ed, L"use", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Use, XFA_ScriptType::Basic}, {0xbc254332, L"usehref", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Usehref, XFA_ScriptType::Basic}, {0x25363, L"to", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::To, XFA_ScriptType::Basic}, {0x66642f8f, L"force", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Force, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xcd7f7b54, L"from", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::From, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0xc0811ed, L"use", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Use, XFA_ScriptType::Basic}, {0xbc254332, L"usehref", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Usehref, XFA_ScriptType::Basic}, {0xc0811ed, L"use", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Use, XFA_ScriptType::Basic}, {0xbc254332, L"usehref", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Usehref, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0xd6e27f1d, L"value", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Value, XFA_ScriptType::Basic}, {0x2b5df51e, L"dataDescription", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::DataDescription, XFA_ScriptType::Basic}, {0xbb8df5d, L"ref", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Ref, XFA_ScriptType::Basic}, {0xc0811ed, L"use", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Use, XFA_ScriptType::Basic}, {0x226ca8f1, L"operation", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Operation, XFA_ScriptType::Basic}, {0xbc254332, L"usehref", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Usehref, XFA_ScriptType::Basic}, {0xc0811ed, L"use", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Use, XFA_ScriptType::Basic}, {0x2f16a382, L"type", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Type, XFA_ScriptType::Basic}, {0xbc254332, L"usehref", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Usehref, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0xc0811ed, L"use", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Use, XFA_ScriptType::Basic}, {0x2f16a382, L"type", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Type, XFA_ScriptType::Basic}, {0xbc254332, L"usehref", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Usehref, XFA_ScriptType::Basic}, {0xfb67185, L"recordsBefore", (XFA_ATTRIBUTE_CALLBACK)&CJX_DataWindow::RecordsBefore, XFA_Attribute::Unknown, XFA_ScriptType::Basic}, {0x21d5dfcb, L"currentRecordNumber", (XFA_ATTRIBUTE_CALLBACK)&CJX_DataWindow::CurrentRecordNumber, XFA_Attribute::Unknown, XFA_ScriptType::Basic}, {0x312af044, L"recordsAfter", (XFA_ATTRIBUTE_CALLBACK)&CJX_DataWindow::RecordsAfter, XFA_Attribute::Unknown, XFA_ScriptType::Basic}, {0x6aab37cb, L"isDefined", (XFA_ATTRIBUTE_CALLBACK)&CJX_DataWindow::IsDefined, XFA_Attribute::Unknown, XFA_ScriptType::Basic}, {0xc0811ed, L"use", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Use, XFA_ScriptType::Basic}, {0x42fed1fd, L"contentType", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::ContentType, XFA_ScriptType::Basic}, {0x6cfa828a, L"runAt", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::RunAt, XFA_ScriptType::Basic}, {0xa021b738, L"stateless", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Script_Stateless, XFA_Attribute::Unknown, XFA_ScriptType::Basic}, {0xa52682bd, L"{default}", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Som_DefaultValue, XFA_Attribute::Unknown, XFA_ScriptType::Basic}, {0xadc4c77b, L"binding", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Binding, XFA_ScriptType::Basic}, {0xbc254332, L"usehref", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Usehref, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xd6e27f1d, L"value", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Som_DefaultValue, XFA_Attribute::Unknown, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0xc0811ed, L"use", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Use, XFA_ScriptType::Basic}, {0x7a0cc471, L"passwordChar", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::PasswordChar, XFA_ScriptType::Basic}, {0xbc254332, L"usehref", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Usehref, XFA_ScriptType::Basic}, {0xe6f99487, L"hScrollPolicy", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::HScrollPolicy, XFA_ScriptType::Basic}, {0xc0811ed, L"use", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Use, XFA_ScriptType::Basic}, {0xbc254332, L"usehref", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Usehref, XFA_ScriptType::Basic}, {0xe6f99487, L"hScrollPolicy", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::HScrollPolicy, XFA_ScriptType::Basic}, {0xc0811ed, L"use", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Use, XFA_ScriptType::Basic}, {0xbc254332, L"usehref", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Usehref, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0xc0811ed, L"use", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Use, XFA_ScriptType::Basic}, {0x14a32d52, L"pagePosition", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::PagePosition, XFA_ScriptType::Basic}, {0x8340ea66, L"oddOrEven", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::OddOrEven, XFA_ScriptType::Basic}, {0x8e1c2921, L"relevant", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Relevant, XFA_ScriptType::Basic}, {0xa85e74f3, L"initialNumber", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::InitialNumber, XFA_ScriptType::Basic}, {0xbc254332, L"usehref", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Usehref, XFA_ScriptType::Basic}, {0xbe9ba472, L"numbered", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Numbered, XFA_ScriptType::Basic}, {0xd70798c2, L"blankOrNotBlank", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::BlankOrNotBlank, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xd6e27f1d, L"value", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Som_DefaultValue, XFA_Attribute::Unknown, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0xc0811ed, L"use", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Use, XFA_ScriptType::Basic}, {0x2f16a382, L"type", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Type, XFA_ScriptType::Basic}, {0xbc254332, L"usehref", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Usehref, XFA_ScriptType::Basic}, {0xc0811ed, L"use", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Use, XFA_ScriptType::Basic}, {0xbc254332, L"usehref", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Usehref, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0xc0811ed, L"use", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Use, XFA_ScriptType::Basic}, {0x34ae103c, L"reserve", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Reserve, XFA_ScriptType::Basic}, {0x570ce835, L"presence", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Presence, XFA_ScriptType::Basic}, {0xbc254332, L"usehref", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Usehref, XFA_ScriptType::Basic}, {0xf2009339, L"placement", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Placement, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0xc0811ed, L"use", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Use, XFA_ScriptType::Basic}, {0x42fed1fd, L"contentType", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::ContentType, XFA_ScriptType::Basic}, {0x54fa722c, L"transferEncoding", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::TransferEncoding, XFA_ScriptType::Basic}, {0xa52682bd, L"{default}", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Som_DefaultValue, XFA_Attribute::Unknown, XFA_ScriptType::Basic}, {0xbc254332, L"usehref", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Usehref, XFA_ScriptType::Basic}, {0xc4547a08, L"maxLength", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::MaxLength, XFA_ScriptType::Basic}, {0xd6e27f1d, L"value", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Som_DefaultValue, XFA_Attribute::Unknown, XFA_ScriptType::Basic}, {0xdb55fec5, L"href", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Href, XFA_ScriptType::Basic}, {0x29418bb7, L"abbr", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Abbr, XFA_ScriptType::Basic}, {0xc0811ed, L"use", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Use, XFA_ScriptType::Basic}, {0xbc254332, L"usehref", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Usehref, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xf531b059, L"writingScript", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::WritingScript, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0xc0811ed, L"use", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Use, XFA_ScriptType::Basic}, {0x1b8dce3e, L"action", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Action, XFA_ScriptType::Basic}, {0xa52682bd, L"{default}", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Som_DefaultValue, XFA_Attribute::Unknown, XFA_ScriptType::Basic}, {0xbc254332, L"usehref", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Usehref, XFA_ScriptType::Basic}, {0xc0811ed, L"use", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Use, XFA_ScriptType::Basic}, {0x9dcc3ab3, L"trailer", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Trailer, XFA_ScriptType::Basic}, {0xbc254332, L"usehref", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Usehref, XFA_ScriptType::Basic}, {0xc8da4da7, L"target", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Target, XFA_ScriptType::Basic}, {0xcbcaf66d, L"leader", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Leader, XFA_ScriptType::Basic}, {0xc0811ed, L"use", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Use, XFA_ScriptType::Basic}, {0x2f16a382, L"type", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Type, XFA_ScriptType::Basic}, {0xbc254332, L"usehref", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Usehref, XFA_ScriptType::Basic}, {0x31b19c1, L"name", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Name, XFA_ScriptType::Basic}, {0xc0811ed, L"use", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Use, XFA_ScriptType::Basic}, {0xbc254332, L"usehref", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Usehref, XFA_ScriptType::Basic}, {0xc0811ed, L"use", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Use, XFA_ScriptType::Basic}, {0xbc254332, L"usehref", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Usehref, XFA_ScriptType::Basic}, {0xb3543a6, L"max", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_InstanceManager_Max, XFA_Attribute::Unknown, XFA_ScriptType::Basic}, {0xb356ca4, L"min", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_InstanceManager_Min, XFA_Attribute::Unknown, XFA_ScriptType::Basic}, {0x6f544d49, L"count", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_InstanceManager_Count, XFA_Attribute::Unknown, XFA_ScriptType::Basic}, {0x25363, L"to", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::To, XFA_ScriptType::Basic}, {0xa0933954, L"unicodeRange", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::UnicodeRange, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xcd7f7b54, L"from", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::From, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0xc0811ed, L"use", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Use, XFA_ScriptType::Basic}, {0x4ef3d02c, L"orientation", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Orientation, XFA_ScriptType::Basic}, {0x65e30c67, L"imagingBBox", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::ImagingBBox, XFA_ScriptType::Basic}, {0x9041d4b0, L"short", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Short, XFA_ScriptType::Basic}, {0xbc254332, L"usehref", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Usehref, XFA_ScriptType::Basic}, {0xe349d044, L"stock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Stock, XFA_ScriptType::Basic}, {0xf6b4afb0, L"long", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Long, XFA_ScriptType::Basic}, {0x5ce6195, L"vScrollPolicy", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::VScrollPolicy, XFA_ScriptType::Basic}, {0xc0811ed, L"use", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Use, XFA_ScriptType::Basic}, {0x1ef3a64a, L"allowRichText", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::AllowRichText, XFA_ScriptType::Basic}, {0x5a32e493, L"multiLine", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::MultiLine, XFA_ScriptType::Basic}, {0xbc254332, L"usehref", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Usehref, XFA_ScriptType::Basic}, {0xe6f99487, L"hScrollPolicy", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::HScrollPolicy, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xd52482e0, L"maxEntries", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::MaxEntries, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0x42fed1fd, L"contentType", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::ContentType, XFA_ScriptType::Basic}, {0x8855805f, L"contains", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Contains, XFA_ScriptType::Basic}, {0xa52682bd, L"{default}", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Som_DefaultValue, XFA_Attribute::Unknown, XFA_ScriptType::Basic}, {0xd6e27f1d, L"value", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Som_DefaultValue, XFA_Attribute::Unknown, XFA_ScriptType::Basic}, {0xe372ae97, L"isNull", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::IsNull, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0x2b5df51e, L"dataDescription", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::DataDescription, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0xbb8df5d, L"ref", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Ref, XFA_ScriptType::Basic}, {0xc0811ed, L"use", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Use, XFA_ScriptType::Basic}, {0x226ca8f1, L"operation", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Operation, XFA_ScriptType::Basic}, {0xbc254332, L"usehref", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Usehref, XFA_ScriptType::Basic}, {0xc8da4da7, L"target", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Target, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0x31b19c1, L"name", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Name, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0xbb8df5d, L"ref", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Ref, XFA_ScriptType::Basic}, {0xc0811ed, L"use", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Use, XFA_ScriptType::Basic}, {0x42fed1fd, L"contentType", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::ContentType, XFA_ScriptType::Basic}, {0x54fa722c, L"transferEncoding", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::TransferEncoding, XFA_ScriptType::Basic}, {0xbc254332, L"usehref", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Usehref, XFA_ScriptType::Basic}, {0xf197844d, L"match", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Match, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0xc0811ed, L"use", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Use, XFA_ScriptType::Basic}, {0xbc254332, L"usehref", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Usehref, XFA_ScriptType::Basic}, {0xc0811ed, L"use", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Use, XFA_ScriptType::Basic}, {0xbc254332, L"usehref", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Usehref, XFA_ScriptType::Basic}, {0xd996fa9b, L"hand", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Hand, XFA_ScriptType::Basic}, {0x21aed, L"id", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Id, XFA_ScriptType::Basic}, {0x31b19c1, L"name", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Name, XFA_ScriptType::Basic}, {0xc0811ed, L"use", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Use, XFA_ScriptType::Basic}, {0xbc254332, L"usehref", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Usehref, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0xc0811ed, L"use", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Use, XFA_ScriptType::Basic}, {0xa52682bd, L"{default}", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Som_DefaultValue, XFA_Attribute::Unknown, XFA_ScriptType::Basic}, {0xbc254332, L"usehref", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Usehref, XFA_ScriptType::Basic}, {0xd6e27f1d, L"value", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Som_DefaultValue, XFA_Attribute::Unknown, XFA_ScriptType::Basic}, {0xc0811ed, L"use", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Use, XFA_ScriptType::Basic}, {0xbc254332, L"usehref", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Usehref, XFA_ScriptType::Basic}, {0xc0811ed, L"use", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Use, XFA_ScriptType::Basic}, {0x28dee6e9, L"format", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Encrypt_Format, XFA_Attribute::Unknown, XFA_ScriptType::Basic}, {0xbc254332, L"usehref", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Usehref, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0x68, L"h", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::H, XFA_ScriptType::Basic}, {0x77, L"w", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::W, XFA_ScriptType::Basic}, {0x78, L"x", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::X, XFA_ScriptType::Basic}, {0x79, L"y", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Y, XFA_ScriptType::Basic}, {0x2282c73, L"hAlign", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::HAlign, XFA_ScriptType::Basic}, {0xc0811ed, L"use", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Use, XFA_ScriptType::Basic}, {0x2ee7678f, L"rotate", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Rotate, XFA_ScriptType::Basic}, {0x570ce835, L"presence", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Presence, XFA_ScriptType::Basic}, {0x7a7cc341, L"vAlign", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::VAlign, XFA_ScriptType::Basic}, {0x7c2ff6ae, L"maxH", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::MaxH, XFA_ScriptType::Basic}, {0x7c2ff6bd, L"maxW", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::MaxW, XFA_ScriptType::Basic}, {0x7d02356c, L"minH", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::MinH, XFA_ScriptType::Basic}, {0x7d02357b, L"minW", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::MinW, XFA_ScriptType::Basic}, {0x8e1c2921, L"relevant", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Relevant, XFA_ScriptType::Basic}, {0xa03cf627, L"rawValue", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Som_DefaultValue, XFA_Attribute::Unknown, XFA_ScriptType::Basic}, {0xa52682bd, L"{default}", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Som_DefaultValue, XFA_Attribute::Unknown, XFA_ScriptType::Basic}, {0xac06e2b0, L"colSpan", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::ColSpan, XFA_ScriptType::Basic}, {0xbc254332, L"usehref", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Usehref, XFA_ScriptType::Basic}, {0xbc8fa350, L"locale", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Locale, XFA_ScriptType::Basic}, {0xc2bd40fd, L"anchorType", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::AnchorType, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0xc0811ed, L"use", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Use, XFA_ScriptType::Basic}, {0x39cdb0a2, L"priority", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Priority, XFA_ScriptType::Basic}, {0xbc254332, L"usehref", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Usehref, XFA_ScriptType::Basic}, {0xeb511b54, L"disable", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Disable, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0xd6e27f1d, L"value", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Value, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0xc0811ed, L"use", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Use, XFA_ScriptType::Basic}, {0x2f16a382, L"type", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Type, XFA_ScriptType::Basic}, {0xbc254332, L"usehref", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Usehref, XFA_ScriptType::Basic}, {0xc0811ed, L"use", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Use, XFA_ScriptType::Basic}, {0xbc254332, L"usehref", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Usehref, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0x21aed, L"id", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Id, XFA_ScriptType::Basic}, {0xc0811ed, L"use", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Use, XFA_ScriptType::Basic}, {0x2f16a382, L"type", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Type, XFA_ScriptType::Basic}, {0xbc254332, L"usehref", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Usehref, XFA_ScriptType::Basic}, {0xc0811ed, L"use", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Use, XFA_ScriptType::Basic}, {0x570ce835, L"presence", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Presence, XFA_ScriptType::Basic}, {0xbc254332, L"usehref", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Usehref, XFA_ScriptType::Basic}, {0xcb0ac9, L"lineThrough", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::LineThrough, XFA_ScriptType::Basic}, {0x2c1c7f1, L"typeface", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Typeface, XFA_ScriptType::Basic}, {0x8c74ae9, L"fontHorizontalScale", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::FontHorizontalScale, XFA_ScriptType::Basic}, {0xc0811ed, L"use", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Use, XFA_ScriptType::Basic}, {0x2cd79033, L"kerningMode", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::KerningMode, XFA_ScriptType::Basic}, {0x3a0273a6, L"underline", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Underline, XFA_ScriptType::Basic}, {0x4873c601, L"baselineShift", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::BaselineShift, XFA_ScriptType::Basic}, {0x4b319767, L"overlinePeriod", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::OverlinePeriod, XFA_ScriptType::Basic}, {0x79543055, L"letterSpacing", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::LetterSpacing, XFA_ScriptType::Basic}, {0x8ec6204c, L"lineThroughPeriod", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::LineThroughPeriod, XFA_ScriptType::Basic}, {0x907c7719, L"fontVerticalScale", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::FontVerticalScale, XFA_ScriptType::Basic}, {0xa686975b, L"size", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Size, XFA_ScriptType::Basic}, {0xb5e49bf2, L"posture", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Posture, XFA_ScriptType::Basic}, {0xbc254332, L"usehref", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Usehref, XFA_ScriptType::Basic}, {0xbd6e1d88, L"weight", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Weight, XFA_ScriptType::Basic}, {0xbd96a0e9, L"underlinePeriod", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::UnderlinePeriod, XFA_ScriptType::Basic}, {0xc0ec9fa4, L"overline", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Overline, XFA_ScriptType::Basic}, {0xaf754613, L"checksum", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Form_Checksum, XFA_Attribute::Unknown, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0xc0811ed, L"use", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Use, XFA_ScriptType::Basic}, {0xbc254332, L"usehref", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Usehref, XFA_ScriptType::Basic}, {0xc0811ed, L"use", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Use, XFA_ScriptType::Basic}, {0xbc254332, L"usehref", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Usehref, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0xc0811ed, L"use", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Use, XFA_ScriptType::Basic}, {0xa52682bd, L"{default}", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Som_DefaultValue, XFA_Attribute::Unknown, XFA_ScriptType::Basic}, {0xbc254332, L"usehref", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Usehref, XFA_ScriptType::Basic}, {0xd6e27f1d, L"value", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Som_DefaultValue, XFA_Attribute::Unknown, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0xc0811ed, L"use", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Use, XFA_ScriptType::Basic}, {0x8e1c2921, L"relevant", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Relevant, XFA_ScriptType::Basic}, {0xbc254332, L"usehref", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Usehref, XFA_ScriptType::Basic}, {0xea7090a0, L"override", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Override, XFA_ScriptType::Basic}, {0xc0811ed, L"use", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Use, XFA_ScriptType::Basic}, {0x9dcc3ab3, L"trailer", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Trailer, XFA_ScriptType::Basic}, {0xbc254332, L"usehref", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Usehref, XFA_ScriptType::Basic}, {0xcbcaf66d, L"leader", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Leader, XFA_ScriptType::Basic}, {0xc0811ed, L"use", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Use, XFA_ScriptType::Basic}, {0x60a61edd, L"codeType", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::CodeType, XFA_ScriptType::Basic}, {0xb373a862, L"archive", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Archive, XFA_ScriptType::Basic}, {0xbc254332, L"usehref", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Usehref, XFA_ScriptType::Basic}, {0xe1a26b56, L"codeBase", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::CodeBase, XFA_ScriptType::Basic}, {0xeb091003, L"classId", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::ClassId, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0x47d03490, L"connection", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Connection, XFA_ScriptType::Basic}, {0xc39a88bd, L"labelRef", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::LabelRef, XFA_ScriptType::Basic}, {0xd50f903a, L"valueRef", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::ValueRef, XFA_ScriptType::Basic}, {0xc0811ed, L"use", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Use, XFA_ScriptType::Basic}, {0xbc254332, L"usehref", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Usehref, XFA_ScriptType::Basic}, {0xea7090a0, L"override", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Override, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0xc0811ed, L"use", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Use, XFA_ScriptType::Basic}, {0x2f16a382, L"type", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Extras_Type, XFA_Attribute::Unknown, XFA_ScriptType::Basic}, {0xbc254332, L"usehref", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Usehref, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0xbb8df5d, L"ref", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Ref, XFA_ScriptType::Basic}, {0xc0811ed, L"use", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Use, XFA_ScriptType::Basic}, {0x24d85167, L"timeout", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Timeout, XFA_ScriptType::Basic}, {0x47d03490, L"connection", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Connection, XFA_ScriptType::Basic}, {0x552d9ad5, L"usage", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Usage, XFA_ScriptType::Basic}, {0xbc254332, L"usehref", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Usehref, XFA_ScriptType::Basic}, {0xc860f30a, L"delayedOpen", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::DelayedOpen, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0xc0811ed, L"use", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Use, XFA_ScriptType::Basic}, {0x28dee6e9, L"format", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Format, XFA_ScriptType::Basic}, {0x824f21b7, L"embedPDF", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::EmbedPDF, XFA_ScriptType::Basic}, {0xbc254332, L"usehref", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Usehref, XFA_ScriptType::Basic}, {0xc8da4da7, L"target", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Target, XFA_ScriptType::Basic}, {0xdc75676c, L"textEncoding", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::TextEncoding, XFA_ScriptType::Basic}, {0xf889e747, L"xdpContent", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::XdpContent, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0x97be91b, L"content", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Packet_Content, XFA_Attribute::Unknown, XFA_ScriptType::Basic}, {0xc0811ed, L"use", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Use, XFA_ScriptType::Basic}, {0xbc254332, L"usehref", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Usehref, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0x4156ee3f, L"delimiter", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Delimiter, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0xc0811ed, L"use", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Use, XFA_ScriptType::Basic}, {0x2f16a382, L"type", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Type, XFA_ScriptType::Basic}, {0xbc254332, L"usehref", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Usehref, XFA_ScriptType::Basic}, {0xc0811ed, L"use", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Use, XFA_ScriptType::Basic}, {0xbc254332, L"usehref", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Usehref, XFA_ScriptType::Basic}, {0x21aed, L"id", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Id, XFA_ScriptType::Basic}, {0x31b19c1, L"name", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Name, XFA_ScriptType::Basic}, {0xc0811ed, L"use", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Use, XFA_ScriptType::Basic}, {0xbc254332, L"usehref", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Usehref, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0xbb8df5d, L"ref", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Ref, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0x21aed, L"id", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Id, XFA_ScriptType::Basic}, {0xc0811ed, L"use", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Use, XFA_ScriptType::Basic}, {0x2f16a382, L"type", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Type, XFA_ScriptType::Basic}, {0xbc254332, L"usehref", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Usehref, XFA_ScriptType::Basic}, {0xc0811ed, L"use", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Use, XFA_ScriptType::Basic}, {0x453eaf38, L"startNew", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::StartNew, XFA_ScriptType::Basic}, {0x9dcc3ab3, L"trailer", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Trailer, XFA_ScriptType::Basic}, {0xa6118c89, L"targetType", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::TargetType, XFA_ScriptType::Basic}, {0xbc254332, L"usehref", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Usehref, XFA_ScriptType::Basic}, {0xc8da4da7, L"target", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Target, XFA_ScriptType::Basic}, {0xcbcaf66d, L"leader", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Leader, XFA_ScriptType::Basic}, {0xc0811ed, L"use", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Use, XFA_ScriptType::Basic}, {0xabef37e3, L"slope", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Slope, XFA_ScriptType::Basic}, {0xbc254332, L"usehref", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Usehref, XFA_ScriptType::Basic}, {0xd996fa9b, L"hand", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Hand, XFA_ScriptType::Basic}, {0xa60dd202, L"length", (XFA_ATTRIBUTE_CALLBACK)&CJX_NodeList::Script_ListClass_Length, XFA_Attribute::Unknown, XFA_ScriptType::Basic}, {0x20146, L"db", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Source_Db, XFA_Attribute::Unknown, XFA_ScriptType::Basic}, {0xc0811ed, L"use", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Use, XFA_ScriptType::Basic}, {0xbc254332, L"usehref", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Usehref, XFA_ScriptType::Basic}, {0xb3543a6, L"max", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Occur_Max, XFA_Attribute::Unknown, XFA_ScriptType::Basic}, {0xb356ca4, L"min", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Occur_Min, XFA_Attribute::Unknown, XFA_ScriptType::Basic}, {0xc0811ed, L"use", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Use, XFA_ScriptType::Basic}, {0x7d0b5fca, L"initial", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Initial, XFA_ScriptType::Basic}, {0xbc254332, L"usehref", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Usehref, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0x29418bb7, L"abbr", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Abbr, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0x31b19c1, L"name", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Name, XFA_ScriptType::Basic}, {0xbe52dfbf, L"desc", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Desc, XFA_ScriptType::Basic}, {0xf6b47749, L"lock", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_BOOL, XFA_Attribute::Lock, XFA_ScriptType::Basic}, {0xbb8df5d, L"ref", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Ref, XFA_ScriptType::Basic}, {0xc0811ed, L"use", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Use, XFA_ScriptType::Basic}, {0x570ce835, L"presence", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Presence, XFA_ScriptType::Basic}, {0xa5b410cf, L"save", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Save, XFA_ScriptType::Basic}, {0xbc254332, L"usehref", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Usehref, XFA_ScriptType::Basic}, {0xb2c80857, L"className", (XFA_ATTRIBUTE_CALLBACK)&CJX_Object::Script_ObjectClass_ClassName, XFA_Attribute::Unknown, XFA_ScriptType::Basic}, {0xa60dd202, L"length", (XFA_ATTRIBUTE_CALLBACK)&CJX_NodeList::Script_ListClass_Length, XFA_Attribute::Unknown, XFA_ScriptType::Basic}, {0x31b19c1, L"name", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Name, XFA_ScriptType::Basic}, {0x9f9d0f9, L"all", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_TreeClass_All, XFA_Attribute::Unknown, XFA_ScriptType::Object}, {0x4df15659, L"nodes", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_TreeClass_Nodes, XFA_Attribute::Unknown, XFA_ScriptType::Object}, {0x78a8d6cf, L"classAll", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_TreeClass_ClassAll, XFA_Attribute::Unknown, XFA_ScriptType::Object}, {0xcad6d8ca, L"parent", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_TreeClass_Parent, XFA_Attribute::Unknown, XFA_ScriptType::Object}, {0xd5679c78, L"index", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_TreeClass_Index, XFA_Attribute::Unknown, XFA_ScriptType::Basic}, {0xdb5b4bce, L"classIndex", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_TreeClass_ClassIndex, XFA_Attribute::Unknown, XFA_ScriptType::Basic}, {0xe4989adf, L"somExpression", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_TreeClass_SomExpression, XFA_Attribute::Unknown, XFA_ScriptType::Basic}, {0x21aed, L"id", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Attribute_String, XFA_Attribute::Id, XFA_ScriptType::Basic}, {0x234a1, L"ns", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_NodeClass_Ns, XFA_Attribute::Unknown, XFA_ScriptType::Basic}, {0x50d1a9d1, L"model", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_NodeClass_Model, XFA_Attribute::Unknown, XFA_ScriptType::Object}, {0xacb4823f, L"isContainer", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_NodeClass_IsContainer, XFA_Attribute::Unknown, XFA_ScriptType::Basic}, {0xe372ae97, L"isNull", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_NodeClass_IsNull, XFA_Attribute::Unknown, XFA_ScriptType::Basic}, {0xfe612a5b, L"oneOfChild", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_NodeClass_OneOfChild, XFA_Attribute::Unknown, XFA_ScriptType::Object}, {0x97c1c65, L"context", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_ModelClass_Context, XFA_Attribute::Unknown, XFA_ScriptType::Object}, {0x58be2870, L"aliasNode", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_ModelClass_AliasNode, XFA_Attribute::Unknown, XFA_ScriptType::Object}, {0xa52682bd, L"{default}", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Som_DefaultValue, XFA_Attribute::Unknown, XFA_ScriptType::Basic}, {0xd6e27f1d, L"value", (XFA_ATTRIBUTE_CALLBACK)&CJX_Node::Script_Som_DefaultValue, XFA_Attribute::Unknown, XFA_ScriptType::Basic}, }; const int32_t g_iSomAttributeCount = sizeof(g_SomAttributeData) / sizeof(XFA_ATTRIBUTEINFO);
<filename>client.go<gh_stars>0 package gomailer import ( "context" "errors" ) type Implementation int const ( Gomail = Implementation(iota) ) type Client interface { // Send will send email Send(msg *Message) error // SendContext provide context to send function SendContext(ctx context.Context, msg *Message) error // Close permanently close client connection Close() error } type Message struct { Attachments []*Attachment SendTo []string CC []string Title string Body string } type Attachment struct { Filename string Byte []byte } type Config struct { Host string Port int Email string Password string } var ErrClosed = errors.New("connection has been closed") // New email return email handler struct func NewClient(impl Implementation, emailConfig *Config) (Client, error) { if Gomail == impl { return newGomail(emailConfig), nil } return nil, errors.New("no email implementations found") }
package de.isuret.polos.AetherOnePi.domain; import lombok.AllArgsConstructor; import lombok.Data; @Data @AllArgsConstructor public class VitalityObject { private Integer number; private Integer value; }
module Test.Day17 (tests) where import Common import Day17 (part1, part2) input = "target area: x=20..30, y=-10..-5" expected1 = "45" expected2 = "112" tests = createTests [ TestResult "Day 17 - Part 1" expected1 (part1 input), TestResult "Day 17 - Part 2" expected2 (part2 input) ]
<gh_stars>0 from ipaddress import IPv4Network, AddressValueError from CiscoIPScanner.general import mt from CiscoIPScanner.exceptions import ( InvalidVRF, NoVRFSpecifiedWithIntInVRF, InvalidIntfIPAddress, InvalidDeviceType, NoIntfIPSpecified, NoNXOSIntfIPSpecified, SubnetTooLarge, InvalidVlanID, InvalidNetworkID, InvalidInterfaceIP, InterfaceIPAddressNotInNetwork, VlanNotInVlanDB, TemplatesNotFoundWithinPackage) from CiscoIPScanner.address_validator import ipv4 from progressbar import progressbar # import time # TODO: Add router support # TODO: Add check to make sure SVI def hosts_lists_parse(prefix, all_hosts): """Splits host list into seperate lists for concurrent SSH or TELNET sessions for faster IP scan\n if prefix length is between 29 and 23""" if prefix == 29: return [ all_hosts[0:2], all_hosts[2:4], all_hosts[4:6] ] elif prefix == 28: return [ all_hosts[0:2], all_hosts[2:4], all_hosts[4:6], all_hosts[6:8], all_hosts[8:10], all_hosts[10:12], all_hosts[12:14] ] elif prefix == 27: return [ all_hosts[0:5], all_hosts[5:10], all_hosts[10:15], all_hosts[15:20], all_hosts[20:25], all_hosts[25:30] ] elif prefix == 26: return [ all_hosts[0:15], all_hosts[15:30], all_hosts[30:45], all_hosts[45:60], all_hosts[60:62] ] elif prefix == 25: return [ all_hosts[0:18], all_hosts[18:36], all_hosts[36:54], all_hosts[54:72], all_hosts[72:90], all_hosts[89:108], all_hosts[108:126] ] elif prefix == 24: return [ all_hosts[0:25], all_hosts[25:50], all_hosts[50:75], all_hosts[75:100], all_hosts[100:125], all_hosts[125:150], all_hosts[150:175], all_hosts[175:200], all_hosts[200:225], all_hosts[225:250], all_hosts[250:254] ] elif prefix == 23: return [ all_hosts[0:51], all_hosts[51:102], all_hosts[102:153], all_hosts[153:204], all_hosts[204:255], all_hosts[255:306], all_hosts[306:357], all_hosts[357:408], all_hosts[408:459], all_hosts[459:510] ] else: raise SubnetTooLarge class ProgressBar: def __init__(self, iterable, prefix): self.bar = progressbar(iterable, prefix=prefix) class Scan: """Initiates IP scan of IP subnet from Cisco IOS or NX-OS devices\n Subnet can be between a /29 and /23""" def __init__(self, network, devicetype, source_vlan, connection, create_intf=False, intf_ip=None, vrf=None, count=2, timeout=1, progress_bar=False): # Intf IP required if creating interface if create_intf and intf_ip is None: raise NoIntfIPSpecified # Intf IP required if NX-OS if devicetype == 'cisco_nxos' and intf_ip is None: raise NoNXOSIntfIPSpecified # Checks VLAN to make sure valid VLAN ID within extended VLAN range and not within reserved VLAN ID range if int(source_vlan) not in range(1, 4095) or int(source_vlan) in range(1002, 1006): raise InvalidVlanID # Checks to make sure network is valid try: network = IPv4Network(network) except AddressValueError: raise InvalidNetworkID # Checks to make sure interface IP address is valid IP address and IP address in within the specific network if intf_ip is not None: if not ipv4(intf_ip): raise InvalidInterfaceIP if not any(intf_ip == str(h1) for h1 in network.hosts()): raise InterfaceIPAddressNotInNetwork # Checks for source vlan in device VLAN database session = connection.connection().session try: vlan_db = session.send_command('show vlan brief', use_textfsm=True) except ValueError: raise TemplatesNotFoundWithinPackage if not any(source_vlan == v1['vlan_id'] for v1 in vlan_db): raise VlanNotInVlanDB # Creates Interface if create_intf and intf_ip is not None: if vrf is None: session.send_config_set([ f'interface vlan {source_vlan}', f'ip address {intf_ip} {network.netmask}', 'no shut' ]) else: if devicetype == 'cisco_nxos': vrf_cmd = f'vrf member {vrf}' else: vrf_cmd = f'vrf forwarding {vrf}' session.send_config_set([ f'interface vlan {source_vlan}', vrf_cmd, f'ip address {intf_ip} {network.netmask}', 'no shut' ]) # Creating list of dictionaries for hosts for later sorted of unordered output data from scan self.all_hosts = [] """All hosts in specified subnet with dictionaries including reachability info, ip address, and mac address""" for h in network.hosts(): self.all_hosts.append( { 'address': str(h) } ) reachable_devices = [] def scan(host, conn): """Main IP subnet Scan function""" ip_address = host['address'] # Formats input command for proper formatting based on if IOS(-XE) or NX-OS operating system # Also checks to make sure device type is IOS(-XE) or NX-OS if devicetype == 'cisco_ios': if vrf is None: cmd = f'ping {ip_address} repeat {count} timeout {timeout} source vlan {source_vlan}' else: cmd = f'ping vrf {vrf} {ip_address} repeat {count} timeout {timeout} source vlan {source_vlan}' elif devicetype == 'cisco_nxos': if vrf is None: cmd = f'ping {ip_address} count {count} timeout {timeout} source {intf_ip}' else: cmd = f'ping {ip_address} vrf {vrf} count {count} timeout {timeout} source {intf_ip}' else: raise InvalidDeviceType cmd_output = conn.send_command(cmd) # Checks for cmd input errors if cmd_output.__contains__('Invalid'): if devicetype == 'cisco_ios': if cmd_output.__contains__( 'Invalid source interface - Interface vrf does not match the vrf used for ping'): raise NoVRFSpecifiedWithIntInVRF if cmd_output.__contains__('does not exist'): raise InvalidVRF if cmd_output.__contains__('input detected'): raise InvalidIntfIPAddress else: if cmd_output.__contains__('bind to address'): raise NoVRFSpecifiedWithIntInVRF if cmd_output.__contains__('does not exist'): raise InvalidVRF if cmd_output.__contains__('Invalid host/interface'): raise InvalidIntfIPAddress else: # Checks if device recieved ping echo then appending IP address to list if non-0 value if devicetype == 'cisco_ios': try: if str(cmd_output.split('\n')[4].split(' ')[3]) != '0': reachable_devices.append(ip_address) except IndexError: if str(cmd_output.split('\n')[5].split(' ')[3]) != '0': reachable_devices.append(ip_address) else: if str(cmd_output.split('\n')[5].split(' ')[3]) != '0': reachable_devices.append(ip_address) # Splits hosts list into mulitple smaller lists for for multiple asyncronous SSH/TELNET sessions hosts_lists = hosts_lists_parse(int(network.prefixlen), self.all_hosts) if progress_bar: self.phase_num = 1 """Internal use only for progress bar numbers""" def host_split(host_list): session1 = connection.connection().session if progress_bar: bar = ProgressBar(host_list, f'Phase {str(self.phase_num)}: ').bar self.phase_num += 1 for h1 in bar: scan(h1, session1) else: for h1 in host_list: scan(h1, session1) session1.disconnect() mt(host_split, hosts_lists, threads=len(hosts_lists)) arps = session.send_command(f'show ip arp vlan {source_vlan}', use_textfsm=True) # Creates Interface if create_intf: session.send_config_set([f'no interface vlan {source_vlan}']) session.disconnect() def sort(host): for arp in arps: if arp['address'] == host['address'] and arp['mac'].count('.') == 2: host['mac'] = arp['mac'] if any(host['address'] == reachable_device for reachable_device in reachable_devices): host['status'] = 'Reachable' else: host['status'] = 'Unreachable' mt(sort, self.all_hosts)
Good news, Sensies! Netflix has renewed Sense8 for a second season, and the show’s cast helped share the news. The streaming service made the announcement Saturday through a special video message posted to Twitter and Facebook. “Birthdays are just the beginning,” read the caption, as actors Aml Ameen, Jamie Clayton, Doona Bae, and more sang renditions of “Happy Birthday.” “We are coming back,” Bae said. “Yay! Season 2! See you on Netflix.” The Sense8 season 2 announcement comes a month after the series hosted a panel at this year’s Television Critics Association. At that time, executive producer J. Michael Straczynski said he was “still awaiting word” on a renewal, adding, “We’re cautiously optimistic, but, ultimately, it’s Netflix’s call.” Developed by Andy and Lana Wachowski (The Matrix, Jupiter Ascending), Sense8 follows eight individuals from all around the world who discover they are mentally linked to each other. They must find a way to survive those who believe them to be a global threat. Netflix continues to demonstrate dedication to developing and furthering its original content, including Daredevil, House of Cards, and Orange Is the New Black. Though more Sense8 wasn’t guaranteed, it seemed unlikely they’d give up this property so easily. Now fans can rejoice with this official confirmation. Watch the Sense8 season 2 video announcement below. Related Stories •Sense8: EW review •Watch the trailer for the Wachowskis’ summer Netflix series Sense8 •Netflix announced original film premiere dates: Beasts of No Nation, Pee-wee’s Big Holiday
<reponame>typingtanuki/redmine-api package com.github.typingtanuki.redmine.api.auth; import org.jboss.resteasy.client.jaxrs.BasicAuthentication; /** * Use this authentication method if you want to log in with an API key. * <p> * Your API key can be found in your redmine profile after login in * * @author <NAME> */ public class ApiKey implements RedmineAuth { private final String apiKey; public ApiKey(String apiKey) { super(); this.apiKey = apiKey; } @Override public BasicAuthentication token() { //Redmine API allows to log in with the api key as the name and anything as the password return new BasicAuthentication(apiKey, "--"); } }
n=list(input()) if len(n)==1: print(''.join(n)) else: ans=[] for i in n: if i in '123': ans.append(i) ans.sort() xxx=ans[-1] ans=ans[:-1] for i in ans: print(i+'+', end='') print(xxx)
def _get_triggers(settings): try: triggers_list = settings["triggers"] except KeyError as e: raise SettingNotFound("%s setting not found" % e) triggers = list() for trigger_el in triggers_list: if isinstance(trigger_el, dict): for trigger_name in trigger_el: name = trigger_name parameters = trigger_el[name] new_trigger = Trigger(name=name, parameters=parameters) triggers.append(new_trigger) else: new_trigger = Trigger(name=trigger_el) triggers.append(new_trigger) return triggers
//DatabaseInit creates a connection to the database func DatabaseInit(dbName, connectionstring string) { databaseName = dbName connectionString = connectionstring + dbName session, err = mgo.Dial(connectionString) if err != nil { panic(err) } }
Multiple reports have confirmed that YG Entertainment is planning to hold BIGBANG’s concert in April, but clarified news on WINNER and iKON’s guest appearance. According to numerous reports from an anonymous source on March 19th, BIGBANG has scheduled their comeback concert from April 24th to 26th at the Olympic Gymnasium in Seoul, South Korea. The concert is described to be a part of their promotional activities for their latest album release. However, YG Entertainment clarified these reports and stated that, “We scheduled BIGBANG’s concerts from April 24th to 26th in advance. Since making their debut, they have been holding concerts with their album release so we reserve the venues in advance.” It was further explained that the album release has been delayed as the members, who prioritises quality over release dates, are still working hard on their latest material. Furthermore, as the album release date is yet to be finalised, the decision to continue with the original concert dates is still being discussed. The agency also clarified that WINNER and iKON will not be performing as opening acts, denying previous reports that they will be performing on BIGBANG’s tentative concert dates. The final decision for their concert dates are set to be confirmed next week. Previously, YG Entertainment’s CEO Yang Hyun Suk has mentioned, “Prior to the end of this year, following BIG BANG as the lead, WINNER and iKON both will release new albums as well. Three teams are busily preparing for their albums.” Source: TV Report, Star News and OSEN
Outcomes of a pharmacist-led medication review programme for hospitalised elderly patients. INTRODUCTION Elderly patients are at risk of drug-related problems. This study aimed to determine whether a pharmacist-led medication review programme could reduce inappropriate medications and hospital readmissions among geriatric in-patients in Hong Kong. METHODS This prospective controlled study was conducted in a geriatric unit of a regional hospital in Hong Kong. The study period was from December 2013 to September 2014. Two hundred and twelve patients were allocated to receive either routine care (104) or pharmacist intervention (108) that included medication reconciliation, medication review, and medication counselling. Medication appropriateness was assessed by a pharmacist using the Medication Appropriateness Index. Recommendations made by the pharmacist were communicated to physicians. RESULTS At hospital admission, 51.9% of intervention and 58.7% of control patients had at least one inappropriate medication (P=0.319). Unintended discrepancy applied in 19.4% of intervention patients of which 90.7% were due to omissions. Following pharmacist recommendations, 60 of 93 medication reviews and 32 of 41 medication reconciliations (68.7%) were accepted by physicians and implemented. After the program and at discharge, the proportion of subjects with inappropriate medications in the intervention group was significantly lower than that in the control group (28.0% vs 56.4%; P<0.001). The unplanned hospital readmission rate 1 month after discharge was significantly lower in the intervention group than that in the control group (13.2% vs 29.1%; P=0.005). Overall, 98.0% of intervention subjects were satisfied with the programme. There were no differences in the length of hospital stay, number of emergency department visits, or mortality rate between the intervention and control groups. CONCLUSIONS A pharmacist-led medication review programme that was supported by geriatricians significantly reduced the number of inappropriate medications and unplanned hospital readmissions among geriatric in-patients.
Revolutionary Constitutional Lawmaking in Germany—Rediscovering the German 1989 Revolution Abstract Today, the 1989 Revolution in East Germany is recognized and celebrated as the event that abolished the German Democratic Republic (GDR) and brought about German unification. What is mostly overlooked, however, is that these are not the Revolution's only and, from the perspective of constitutional law, not even its most important achievements. More important with respect to understanding constitutional lawmaking in Germany is that the 1989 Revolution did not lead to an unconditional adoption of West German constitutional law in the new East German states. Instead, the Revolution had its own constitutional agenda, which went beyond the West German Basic Law and was transferred to unified Germany where it then needed to be integrated into the existing West German constitutional order. The Article reinterprets the 1989 Revolution and shows how a revolutionary popular movement in the GDR developed its own constitutional agenda, which first found legal manifestation in GDR legislation, and then was transferred to unified Germany through the Unification Treaty and the new state constitutions.
class ReplaySampler: """ A sampler that records the generated values and then can replay the same outputs in the same order. One of the most interesting use cases for `ReplaySampler` is in conjunction with context free or graph grammars, for generating complex objects. You can pass a sampler wrapped in a `ReplaySampler` during generation, and then reuse it later for generating the same object or graph. ##### Examples First, instantiate a `ReplaySampler` with an internal `Sampler` instance and use it normally. ```python >>> sampler = ReplaySampler(Sampler(random_state=0)) >>> [sampler.discrete(0,10) for _ in range(10)] [6, 6, 0, 4, 8, 7, 6, 4, 7, 5] ``` Then call the `replay` method and reuse the same values. `replay()` returns the same instance, to enable chaining method calls. ```python >>> sampler.replay() <autogoal.sampling.ReplaySampler object at ...> >>> [sampler.discrete(0,10) for _ in range(5)] [6, 6, 0, 4, 8] >>> [sampler.discrete(0,10) for _ in range(5)] [7, 6, 4, 7, 5] ``` If you try to use it in a different way as originally, it will complain. ```python >>> sampler.replay().discrete(0,5) Traceback (most recent call last): ... TypeError: Invalid invocation of `discrete` with `args=(0, 5)`, replay history says args='(0, 10)'. >>> sampler.replay().boolean() Traceback (most recent call last): ... TypeError: Invalid invocation of `boolean`, replay history says discrete comes next. ``` """ RECORD = "record" REPLAY = "replay" def __init__(self, sampler): self.sampler = sampler self._mode = ReplaySampler.RECORD self._history = [] self._current_history = [] def _run(self, method, *args, **kwargs): if self._mode == ReplaySampler.RECORD: result = getattr(self.sampler, method)(*args, **kwargs) self._history.append( dict(method=method, args=repr(args), kwargs=repr(kwargs), result=result) ) return result elif self._mode == ReplaySampler.REPLAY: if not self._current_history: raise TypeError( f"Invalid invocation of `{method}`, replay history is empty. Maybe you forgot to call `replay`?" ) top = self._current_history[0] if top["method"] != method: raise TypeError( f"Invalid invocation of `{method}`, " f"replay history says {top['method']} comes next." ) if top["args"] != repr(args): raise TypeError( f"Invalid invocation of `{method}` with `args={repr(args)}`, " f"replay history says args={repr(top['args'])}." ) if top["kwargs"] != repr(kwargs): raise TypeError( f"Invalid invocation of `{method}` with `kwargs={repr(kwargs)}`, " f"replay history says kwargs={repr(top['kwargs'])}." ) self._current_history.pop(0) return top["result"] def replay(self) -> "ReplaySampler": self._mode = ReplaySampler.REPLAY self._current_history = list(self._history) return self def save(self, fp): """ Saves the state of a `ReplaySampler` to a stream. It must be in replay mode. You are responsible for opening and closing the stream yourself. ##### Examples In this example we create a sampler, and save its state into a `StringIO` stream to be able to see what's being saved. ```python >>> sampler = ReplaySampler(Sampler(random_state=0)) >>> [sampler.discrete(0, 10) for _ in range(3)] [6, 6, 0] >>> import io >>> fp = io.BytesIO() >>> sampler.replay().save(fp) >>> len(fp.getvalue()) 183 ``` """ if self._mode != ReplaySampler.REPLAY: raise TypeError( "A sampler must be in replay mode, i.e., call the `replay()` method." ) pickle.Pickler(fp).dump(self._history) @staticmethod def load(fp) -> "ReplaySampler": """ Creates a `ReplaySampler` from a stream and returns it already in replay mode. You are responsible for opening and closing the stream yourself. ##### Examples ```python >>> sampler = ReplaySampler(Sampler(random_state=1)) >>> [sampler.discrete(0, 10) for _ in range(10)] [2, 9, 1, 4, 1, 7, 7, 7, 10, 6] >>> import io >>> fp = io.BytesIO() >>> sampler.replay().save(fp) >>> fp.seek(0) 0 >>> other_sampler = ReplaySampler.load(fp) >>> [other_sampler.discrete(0, 10) for _ in range(5)] [2, 9, 1, 4, 1] >>> [other_sampler.discrete(0, 10) for _ in range(5)] [7, 7, 7, 10, 6] """ history = pickle.Unpickler(fp).load() sampler = ReplaySampler(None) sampler._history = history return sampler.replay() def choice(self, *args, **kwargs): return self._run("choice", *args, **kwargs) def distribution(self, *args, **kwargs): return self._run("distribution", *args, **kwargs) def discrete(self, *args, **kwargs): return self._run("discrete", *args, **kwargs) def continuous(self, *args, **kwargs): return self._run("continuous", *args, **kwargs) def boolean(self, *args, **kwargs): return self._run("boolean", *args, **kwargs) def categorical(self, *args, **kwargs): return self._run("categorical", *args, **kwargs) def __getattr__(self, attr): if attr == "sampler": return self.__dict__.get("sampler") return getattr(self.sampler, attr)
#include<iostream> #include<string> #include<algorithm> using namespace std; string a; string b; int t; int main() { cin >> a; t = a.size(); int x = t/2; while (x--) b += '7'; x = t/2; while (x--) b += '4'; if (t % 2) { reverse(b.begin(), b.end()); cout << "4"<<b<<"7"; return 0; } int update = 0; for (int i = 0; i < t; i++) { if (a[i] > b[i]) { update = 1; break; } else if (a[i] < b[i]) break; } if (update) { reverse(b.begin(), b.end()); cout << "4" << b << "7"; return 0; } int xx = t / 2; int yy = t / 2; update = -1; for (int i = 0; i < t; i++) { if (a[i] == '4') { xx--; if (xx < 0) { update = i; break; } } else if (a[i] == '7') { yy--; if (yy < 0) { update = i; break; } } else { if (a[i] > '7') { update = i; break; } else if (a[i] > '4') { if (yy) { for (int y = 0; y < i; y++) cout << a[y]; yy--; cout << "7"; while (xx--) cout << "4"; while (yy--) cout << "7"; return 0; } else { update = i; break; } } else { if (xx) { for (int y = 0; y < i; y++) cout << a[y]; xx--; cout << "4"; while (xx--) cout << "4"; while (yy--) cout << "7"; return 0; } } } } if (update == -1) { cout << a; return 0; } int ch = 0; int oo = 0; for (int i = 0; i < update; i++) { if (a[i] == '7') oo++; if (a[i] == '4'&&oo != t / 2) ch = i; } xx = t / 2; yy = t / 2; for (int i = 0; i < ch; i++) { if (a[i] == '4') xx--; else yy--; cout << a[i]; } yy--; cout << "7"; while (xx--) cout << "4"; while (yy--) cout << "7"; }
<gh_stars>10-100 """ File: chapter07/optocoupler_test.py Control an Optocoupler from a GPIO Pin. Dependencies: pip3 install pigpio Built and tested with Python 3.7 on Raspberry Pi 4 Model B """ import pigpio from time import sleep GPIO_PIN = 21 pi = pigpio.pi() try: # Note: Circuit is wired as ACTIVE LOW. pi.write(GPIO_PIN, pigpio.LOW) # On. # (1) print("On") sleep(2) pi.write(GPIO_PIN, pigpio.HIGH) # Off. # (2) print("Off") sleep(2) except KeyboardInterrupt: print("Bye") finally: pi.write(GPIO_PIN, pigpio.HIGH) # Off. pi.stop() # PiGPIO cleanup.
<reponame>ZYMoridae/plato package com.jz.nebula.dao; import com.jz.nebula.entity.Vendor; import org.springframework.data.jpa.repository.JpaRepository; public interface VendorRepository extends JpaRepository<Vendor, Long> { }
def measured_amps(self): return self.measured_current / 1e6
def _create_bounds_colliders(self, map_data: dict, game: Game) -> None: game.entity_manager.add_entity(Collider(-constants.TILE_SIZE, 0, constants.TILE_SIZE, map_data["pixel_height"])) game.entity_manager.add_entity(Collider(0, -constants.TILE_SIZE, map_data["pixel_width"], constants.TILE_SIZE)) game.entity_manager.add_entity(Collider(map_data["pixel_width"], 0, constants.TILE_SIZE, map_data["pixel_height"])) game.entity_manager.add_entity(Collider(0, map_data["pixel_height"], map_data["pixel_width"], constants.TILE_SIZE))
/** * Update pid & wait status of @ptracer's wait(2) for the given * @ptracee. This function returns -errno if an error occurred, 0 if * the wait syscall will be restarted (ie. the event is discarded), * otherwise @ptracee's pid. */ static int update_wait_status(Tracee *ptracer, Tracee *ptracee) { word_t address; int result; if (PTRACEE.ptracer == ptracee->parent && (WIFEXITED(PTRACEE.event4.ptracer.value) || WIFSIGNALED(PTRACEE.event4.ptracer.value))) { restart_original_syscall(ptracer); detach_from_ptracer(ptracee); if (PTRACEE.is_zombie) TALLOC_FREE(ptracee); return 0; } address = peek_reg(ptracer, ORIGINAL, SYSARG_2); if (address != 0) { poke_int32(ptracer, address, PTRACEE.event4.ptracer.value); if (errno != 0) return -errno; } PTRACEE.event4.ptracer.pending = false; result = ptracee->pid; if (PTRACEE.is_zombie) { detach_from_ptracer(ptracee); TALLOC_FREE(ptracee); } return result; }
/** * HPI function saHpiResourcePowerStateGet() * * See also the description of the function inside the specification or header file. * Get the power state of a resource. * * @todo it fits better to implement it in class NewSimulatorResource * * @param res pointer on NewSimulatorResource to be used * @param state address of power state to be filled * * @return HPI error code **/ SaErrorT NewSimulator::IfGetPowerState( NewSimulatorResource *res, SaHpiPowerStateT &state ) { state = res->PowerState(); return SA_OK; }
Hyundai A-League's 10 greatest games of 2017: No.1 – Nine-goal thriller in the west Here it is! We’ve reached number 1 in the greatest games of 2017. And this was an absolute beauty between Perth Glory and Melbourne City in the final regular season game of the 2016/17 Season. There was extra incentive for Glory ahead of the match at nib Stadium, needing a four-goal win to leapfrog City and book a home Elimination Final the following week. And in front of over 12,000 exhilarated fans, they almost pulled it off in one of the best games in Hyundai A-League history. Glory began like a steam train, hitting the net in the first five minutes before Tim Cahill restored parity. Andy Keogh and Chris Harold extended the lead to 3-1 early in the second half as Glory fans urged on their team to score two more. They got one after Nebo Marinkovic’s first touch was to slam home a trademark set piece to make it 4-1. But Cahill - with his second soon after - effectively ended Glory’s brave effort and Nico Colazo iced the cake. But Glory weren’t finished hitting back with another goal before a brain fade from keeper Liam Reddy let City back into it at 5-4. What a crazy Sunday afternoon in the west and a deserved winner of our best game on 2017! Re-live all the action in the player above.
<filename>src/test/ui/issues/issue-18464.rs<gh_stars>1000+ // run-pass #![deny(dead_code)] const LOW_RANGE: char = '0'; const HIGH_RANGE: char = '9'; fn main() { match '5' { LOW_RANGE..=HIGH_RANGE => (), _ => () }; }
/** * Helper class for JAXB binding. * Responsible for marshalling and unmarshalling using given schema and context. * @author loomchild */ public class Bind { private Marshaller marshaller; private Unmarshaller unmarshaller; /** * Creates Bind. * @param context JAXB context * @param schema XML schema */ public Bind(JAXBContext context, Schema schema) { try { unmarshaller = context.createUnmarshaller(); unmarshaller.setEventHandler(new LoggingValidationEventHandler()); unmarshaller.setSchema(schema); marshaller = context.createMarshaller(); marshaller.setSchema(schema); marshaller.setProperty(Marshaller.JAXB_FORMATTED_OUTPUT, true); } catch (JAXBException e) { throw new XmlException("JAXB error", e); } } /** * Writes given object to given writer validating it. * @param writer * @param object */ public void marshal(Writer writer, Object object) { try { marshaller.marshal(object, writer); } catch (JAXBException e) { throw new XmlException("JAXB marshalling error", e); } } /** * Writes given object to a file with given name validating it. * @param fileName * @param object */ public void marshal(String fileName, Object object) { try { Writer writer = getWriter(getFileOutputStream(fileName)); marshal(writer, object); writer.close(); } catch (IOException e) { throw new IORuntimeException(e); } } /** * Retrieves object from given reader validation the input. * @param reader * @return object */ public Object unmarshal(Reader reader) { try { Source source = new SAXSource(getXmlReader(), new InputSource( reader)); return unmarshaller.unmarshal(source); } catch (JAXBException e) { throw new XmlException("JAXB unmarshalling error", e); } } /** * Retrieves object from a file with given name validating the input. * @param fileName * @return object */ public Object unmarshal(String fileName) { try { Reader reader = getReader(getFileInputStream(fileName)); Object object = unmarshal(reader); reader.close(); return object; } catch (IOException e) { throw new IORuntimeException(e); } } }
package manifest var wrapperTemplate = `#!/bin/sh if [ -f ~/.gpg-agent-info ] && [ -n "$(pgrep gpg-agent)" ]; then source ~/.gpg-agent-info export GPG_AGENT_INFO else eval $(gpg-agent --daemon) fi export PATH="$PATH:/usr/local/bin" # required on MacOS/brew export GPG_TTY="$(tty)" %s jsonapi listen exit $?` // DefaultBrowser to select when no browser is specified var DefaultBrowser = "chrome" // DefaultWrapperPath where the gopass wrapper shell script is installed to var DefaultWrapperPath = "/usr/local/bin" // ValidBrowsers are all browsers for which the manifest can be currently installed var ValidBrowsers = []string{"chrome", "chromium", "firefox"} var name = "com.justwatch.gopass" var wrapperName = "gopass_wrapper.sh" var description = "Gopass wrapper to search and return passwords" var connectionType = "stdio" var chromeOrigins = []string{ "chrome-extension://kkhfnlkhiapbiehimabddjbimfaijdhk/", // gopassbridge } var firefoxOrigins = []string{ "{eec37db0-22ad-4bf1-9068-5ae08df8c7e9}", // gopassbridge } type manifestBase struct { Name string `json:"name"` Description string `json:"description"` Path string `json:"path"` Type string `json:"type"` } type chromeManifest struct { manifestBase AllowedOrigins []string `json:"allowed_origins"` } type firefoxManifest struct { manifestBase AllowedExtensions []string `json:"allowed_extensions"` }