Code
stringlengths 103
85.9k
| Summary
sequencelengths 0
94
|
---|---|
Please provide a description of the function:def getCollectionClass(cls, name) :
try :
return cls.collectionClasses[name]
except KeyError :
raise KeyError( "There is no Collection Class of type: '%s'; currently supported values: [%s]" % (name, ', '.join(getCollectionClasses().keys())) ) | [
"Return the class object of a collection given its 'name'"
] |
Please provide a description of the function:def isDocumentCollection(cls, name) :
try :
col = cls.getCollectionClass(name)
return issubclass(col, Collection)
except KeyError :
return False | [
"return true or false wether 'name' is the name of a document collection."
] |
Please provide a description of the function:def isEdgeCollection(cls, name) :
try :
col = cls.getCollectionClass(name)
return issubclass(col, Edges)
except KeyError :
return False | [
"return true or false wether 'name' is the name of an edge collection."
] |
Please provide a description of the function:def getIndexes(self) :
url = "%s/index" % self.database.URL
r = self.connection.session.get(url, params = {"collection": self.name})
data = r.json()
for ind in data["indexes"] :
self.indexes[ind["type"]][ind["id"]] = Index(collection = self, infos = ind)
return self.indexes | [
"Fills self.indexes with all the indexes associates with the collection and returns it"
] |
Please provide a description of the function:def delete(self) :
r = self.connection.session.delete(self.URL)
data = r.json()
if not r.status_code == 200 or data["error"] :
raise DeletionError(data["errorMessage"], data) | [
"deletes the collection from the database"
] |
Please provide a description of the function:def createDocument(self, initDict = None) :
if initDict is not None :
return self.createDocument_(initDict)
else :
if self._validation["on_load"] :
self._validation["on_load"] = False
return self.createDocument_(self.defaultDocument)
self._validation["on_load"] = True
else :
return self.createDocument_(self.defaultDocument) | [
"create and returns a document populated with the defaults or with the values in initDict"
] |
Please provide a description of the function:def createDocument_(self, initDict = None) :
"create and returns a completely empty document or one populated with initDict"
if initDict is None :
initV = {}
else :
initV = initDict
return self.documentClass(self, initV) | [] |
Please provide a description of the function:def ensureHashIndex(self, fields, unique = False, sparse = True, deduplicate = False) :
data = {
"type" : "hash",
"fields" : fields,
"unique" : unique,
"sparse" : sparse,
"deduplicate": deduplicate
}
ind = Index(self, creationData = data)
self.indexes["hash"][ind.infos["id"]] = ind
return ind | [
"Creates a hash index if it does not already exist, and returns it"
] |
Please provide a description of the function:def ensureGeoIndex(self, fields) :
data = {
"type" : "geo",
"fields" : fields,
}
ind = Index(self, creationData = data)
self.indexes["geo"][ind.infos["id"]] = ind
return ind | [
"Creates a geo index if it does not already exist, and returns it"
] |
Please provide a description of the function:def ensureFulltextIndex(self, fields, minLength = None) :
data = {
"type" : "fulltext",
"fields" : fields,
}
if minLength is not None :
data["minLength"] = minLength
ind = Index(self, creationData = data)
self.indexes["fulltext"][ind.infos["id"]] = ind
return ind | [
"Creates a fulltext index if it does not already exist, and returns it"
] |
Please provide a description of the function:def validatePrivate(self, field, value) :
if field not in self.arangoPrivates :
raise ValueError("%s is not a private field of collection %s" % (field, self))
if field in self._fields :
self._fields[field].validate(value)
return True | [
"validate a private field value"
] |
Please provide a description of the function:def hasField(cls, fieldName) :
path = fieldName.split(".")
v = cls._fields
for k in path :
try :
v = v[k]
except KeyError :
return False
return True | [
"returns True/False wether the collection has field K in it's schema. Use the dot notation for the nested fields: address.street"
] |
Please provide a description of the function:def fetchDocument(self, key, rawResults = False, rev = None) :
url = "%s/%s/%s" % (self.documentsURL, self.name, key)
if rev is not None :
r = self.connection.session.get(url, params = {'rev' : rev})
else :
r = self.connection.session.get(url)
if r.status_code < 400 :
if rawResults :
return r.json()
return self.documentClass(self, r.json())
elif r.status_code == 404 :
raise DocumentNotFoundError("Unable to find document with _key: %s" % key, r.json())
else :
raise DocumentNotFoundError("Unable to find document with _key: %s, response: %s" % (key, r.json()), r.json()) | [
"Fetches a document from the collection given it's key. This function always goes straight to the db and bypasses the cache. If you\n want to take advantage of the cache use the __getitem__ interface: collection[key]"
] |
Please provide a description of the function:def fetchByExample(self, exampleDict, batchSize, rawResults = False, **queryArgs) :
return self.simpleQuery('by-example', rawResults, example = exampleDict, batchSize = batchSize, **queryArgs) | [
"exampleDict should be something like {'age' : 28}"
] |
Please provide a description of the function:def fetchFirstExample(self, exampleDict, rawResults = False) :
return self.simpleQuery('first-example', rawResults = rawResults, example = exampleDict) | [
"exampleDict should be something like {'age' : 28}. returns only a single element but still in a SimpleQuery object.\n returns the first example found that matches the example"
] |
Please provide a description of the function:def fetchAll(self, rawResults = False, **queryArgs) :
return self.simpleQuery('all', rawResults = rawResults, **queryArgs) | [
"Returns all the documents in the collection. You can use the optinal arguments 'skip' and 'limit'::\n\n fetchAlll(limit = 3, shik = 10)"
] |
Please provide a description of the function:def simpleQuery(self, queryType, rawResults = False, **queryArgs) :
return SimpleQuery(self, queryType, rawResults, **queryArgs) | [
"General interface for simple queries. queryType can be something like 'all', 'by-example' etc... everything is in the arango doc.\n If rawResults, the query will return dictionaries instead of Document objetcs.\n "
] |
Please provide a description of the function:def action(self, method, action, **params) :
fct = getattr(self.connection.session, method.lower())
r = fct(self.URL + "/" + action, params = params)
return r.json() | [
"a generic fct for interacting everything that doesn't have an assigned fct"
] |
Please provide a description of the function:def bulkSave(self, docs, onDuplicate="error", **params) :
payload = []
for d in docs :
if type(d) is dict :
payload.append(json.dumps(d, default=str))
else :
try:
payload.append(d.toJson())
except Exception as e:
payload.append(json.dumps(d.getStore(), default=str))
payload = '\n'.join(payload)
params["type"] = "documents"
params["onDuplicate"] = onDuplicate
params["collection"] = self.name
URL = "%s/import" % self.database.URL
r = self.connection.session.post(URL, params = params, data = payload)
data = r.json()
if (r.status_code == 201) and "error" not in data :
return True
else :
if data["errors"] > 0 :
raise UpdateError("%d documents could not be created" % data["errors"], data)
return data["updated"] + data["created"] | [
"Parameter docs must be either an iterrable of documents or dictionnaries.\n This function will return the number of documents, created and updated, and will raise an UpdateError exception if there's at least one error.\n params are any parameters from arango's documentation"
] |
Please provide a description of the function:def bulkImport_json(self, filename, onDuplicate="error", formatType="auto", **params) :
url = "%s/import" % self.database.URL
params["onDuplicate"] = onDuplicate
params["collection"] = self.name
params["type"] = formatType
with open(filename) as f:
data = f.read()
r = self.connection.session.post(URL, params = params, data = data)
try :
errorMessage = "At least: %d errors. The first one is: '%s'\n\n more in <this_exception>.data" % (len(data), data[0]["errorMessage"])
except KeyError:
raise UpdateError(data['errorMessage'], data) | [
"bulk import from a file repecting arango's key/value format"
] |
Please provide a description of the function:def getType(self) :
if self.type == CONST.COLLECTION_DOCUMENT_TYPE :
return "document"
elif self.type == CONST.COLLECTION_EDGE_TYPE :
return "edge"
else :
raise ValueError("The collection is of Unknown type %s" % self.type) | [
"returns a word describing the type of the collection (edges or ducments) instead of a number, if you prefer the number it's in self.type"
] |
Please provide a description of the function:def getStatus(self) :
if self.status == CONST.COLLECTION_LOADING_STATUS :
return "loading"
elif self.status == CONST.COLLECTION_LOADED_STATUS :
return "loaded"
elif self.status == CONST.COLLECTION_DELETED_STATUS :
return "deleted"
elif self.status == CONST.COLLECTION_UNLOADED_STATUS :
return "unloaded"
elif self.status == CONST.COLLECTION_NEWBORN_STATUS :
return "newborn"
else :
raise ValueError("The collection has an Unknown status %s" % self.status) | [
"returns a word describing the status of the collection (loaded, loading, deleted, unloaded, newborn) instead of a number, if you prefer the number it's in self.status"
] |
Please provide a description of the function:def validateField(cls, fieldName, value) :
try :
valValue = Collection.validateField(fieldName, value)
except SchemaViolation as e:
if fieldName == "_from" or fieldName == "_to" :
return True
else :
raise e
return valValue | [
"checks if 'value' is valid for field 'fieldName'. If the validation is unsuccessful, raises a SchemaViolation or a ValidationError.\n for nested dicts ex: {address : { street: xxx} }, fieldName can take the form address.street\n "
] |
Please provide a description of the function:def getOutEdges(self, vertex, rawResults = False) :
return self.getEdges(vertex, inEdges = False, outEdges = True, rawResults = rawResults) | [
"An alias for getEdges() that returns only the out Edges"
] |
Please provide a description of the function:def getEdges(self, vertex, inEdges = True, outEdges = True, rawResults = False) :
if isinstance(vertex, Document):
vId = vertex._id
elif (type(vertex) is str) or (type(vertex) is bytes):
vId = vertex
else :
raise ValueError("Vertex is neither a Document nor a String")
params = {"vertex" : vId}
if inEdges and outEdges :
pass
elif inEdges :
params["direction"] = "in"
elif outEdges :
params["direction"] = "out"
else :
raise ValueError("inEdges, outEdges or both must have a boolean value")
r = self.connection.session.get(self.edgesURL, params = params)
data = r.json()
if r.status_code == 200 :
if not rawResults :
ret = []
for e in data["edges"] :
ret.append(Edge(self, e))
return ret
else :
return data["edges"]
else :
raise CreationError("Unable to return edges for vertex: %s" % vId, data) | [
"returns in, out, or both edges liked to a given document. vertex can be either a Document object or a string for an _id.\n If rawResults a arango results will be return as fetched, if false, will return a liste of Edge objects"
] |
Please provide a description of the function:def reloadCollections(self) :
"reloads the collection list."
r = self.connection.session.get(self.collectionsURL)
data = r.json()
if r.status_code == 200 :
self.collections = {}
for colData in data["result"] :
colName = colData['name']
if colData['isSystem'] :
colObj = COL.SystemCollection(self, colData)
else :
try :
colClass = COL.getCollectionClass(colName)
colObj = colClass(self, colData)
except KeyError :
if colData["type"] == CONST.COLLECTION_EDGE_TYPE :
colObj = COL.Edges(self, colData)
elif colData["type"] == CONST.COLLECTION_DOCUMENT_TYPE :
colObj = COL.Collection(self, colData)
else :
print(("Warning!! Collection of unknown type: %d, trying to load it as Collection nonetheless." % colData["type"]))
colObj = COL.Collection(self, colData)
self.collections[colName] = colObj
else :
raise UpdateError(data["errorMessage"], data) | [] |
Please provide a description of the function:def reloadGraphs(self) :
"reloads the graph list"
r = self.connection.session.get(self.graphsURL)
data = r.json()
if r.status_code == 200 :
self.graphs = {}
for graphData in data["graphs"] :
try :
self.graphs[graphData["_key"]] = GR.getGraphClass(graphData["_key"])(self, graphData)
except KeyError :
self.graphs[graphData["_key"]] = Graph(self, graphData)
else :
raise UpdateError(data["errorMessage"], data) | [] |
Please provide a description of the function:def createCollection(self, className = 'Collection', **colProperties) :
colClass = COL.getCollectionClass(className)
if len(colProperties) > 0 :
colProperties = dict(colProperties)
else :
try :
colProperties = dict(colClass._properties)
except AttributeError :
colProperties = {}
if className != 'Collection' and className != 'Edges' :
colProperties['name'] = className
else :
if 'name' not in colProperties :
raise ValueError("a 'name' argument mush be supplied if you want to create a generic collection")
if colProperties['name'] in self.collections :
raise CreationError("Database %s already has a collection named %s" % (self.name, colProperties['name']) )
if issubclass(colClass, COL.Edges) or colClass.__class__ is COL.Edges:
colProperties["type"] = CONST.COLLECTION_EDGE_TYPE
else :
colProperties["type"] = CONST.COLLECTION_DOCUMENT_TYPE
payload = json.dumps(colProperties, default=str)
r = self.connection.session.post(self.collectionsURL, data = payload)
data = r.json()
if r.status_code == 200 and not data["error"] :
col = colClass(self, data)
self.collections[col.name] = col
return self.collections[col.name]
else :
raise CreationError(data["errorMessage"], data) | [
"Creates a collection and returns it.\n ClassName the name of a class inheriting from Collection or Egdes, it can also be set to 'Collection' or 'Edges' in order to create untyped collections of documents or edges.\n Use colProperties to put things such as 'waitForSync = True' (see ArangoDB's doc\n for a full list of possible arugments). If a '_properties' dictionary is defined in the collection schema, arguments to this function overide it"
] |
Please provide a description of the function:def createGraph(self, name, createCollections = True, isSmart = False, numberOfShards = None, smartGraphAttribute = None) :
def _checkCollectionList(lst) :
for colName in lst :
if not COL.isCollection(colName) :
raise ValueError("'%s' is not a defined Collection" % colName)
graphClass = GR.getGraphClass(name)
ed = []
for e in graphClass._edgeDefinitions :
if not COL.isEdgeCollection(e.edgesCollection) :
raise ValueError("'%s' is not a defined Edge Collection" % e.edgesCollection)
_checkCollectionList(e.fromCollections)
_checkCollectionList(e.toCollections)
ed.append(e.toJson())
_checkCollectionList(graphClass._orphanedCollections)
options = {}
if numberOfShards:
options['numberOfShards'] = numberOfShards
if smartGraphAttribute:
options['smartGraphAttribute'] = smartGraphAttribute
payload = {
"name": name,
"edgeDefinitions": ed,
"orphanCollections": graphClass._orphanedCollections
}
if isSmart :
payload['isSmart'] = isSmart
if options:
payload['options'] = options
payload = json.dumps(payload)
r = self.connection.session.post(self.graphsURL, data = payload)
data = r.json()
if r.status_code == 201 or r.status_code == 202 :
self.graphs[name] = graphClass(self, data["graph"])
else :
raise CreationError(data["errorMessage"], data)
return self.graphs[name] | [
"Creates a graph and returns it. 'name' must be the name of a class inheriting from Graph.\n Checks will be performed to make sure that every collection mentionned in the edges definition exist. Raises a ValueError in case of\n a non-existing collection."
] |
Please provide a description of the function:def dropAllCollections(self):
for graph_name in self.graphs:
self.graphs[graph_name].delete()
for collection_name in self.collections:
# Collections whose name starts with '_' are system collections
if not collection_name.startswith('_'):
self[collection_name].delete()
return | [
"drops all public collections (graphs included) from the database"
] |
Please provide a description of the function:def AQLQuery(self, query, batchSize = 100, rawResults = False, bindVars = {}, options = {}, count = False, fullCount = False,
json_encoder = None, **moreArgs) :
return AQLQuery(self, query, rawResults = rawResults, batchSize = batchSize, bindVars = bindVars, options = options, count = count, fullCount = fullCount,
json_encoder = json_encoder, **moreArgs) | [
"Set rawResults = True if you want the query to return dictionnaries instead of Document objects.\n You can use **moreArgs to pass more arguments supported by the api, such as ttl=60 (time to live)"
] |
Please provide a description of the function:def explainAQLQuery(self, query, bindVars={}, allPlans = False) :
payload = {'query' : query, 'bindVars' : bindVars, 'allPlans' : allPlans}
request = self.connection.session.post(self.explainURL, data = json.dumps(payload, default=str))
return request.json() | [
"Returns an explanation of the query. Setting allPlans to True will result in ArangoDB returning all possible plans. False returns only the optimal plan"
] |
Please provide a description of the function:def validateAQLQuery(self, query, bindVars = None, options = None) :
"returns the server answer is the query is valid. Raises an AQLQueryError if not"
if bindVars is None :
bindVars = {}
if options is None :
options = {}
payload = {'query' : query, 'bindVars' : bindVars, 'options' : options}
r = self.connection.session.post(self.cursorsURL, data = json.dumps(payload, default=str))
data = r.json()
if r.status_code == 201 and not data["error"] :
return data
else :
raise AQLQueryError(data["errorMessage"], query, data) | [] |
Please provide a description of the function:def transaction(self, collections, action, waitForSync = False, lockTimeout = None, params = None) :
payload = {
"collections": collections,
"action": action,
"waitForSync": waitForSync}
if lockTimeout is not None:
payload["lockTimeout"] = lockTimeout
if params is not None:
payload["params"] = params
self.connection.reportStart(action)
r = self.connection.session.post(self.transactionURL, data = json.dumps(payload, default=str))
self.connection.reportItem()
data = r.json()
if (r.status_code == 200 or r.status_code == 201 or r.status_code == 202) and not data.get("error") :
return data
else :
raise TransactionError(data["errorMessage"], action, data) | [
"Execute a server-side transaction"
] |
Please provide a description of the function:def getPatches(self) :
if not self.mustValidate :
return self.getStore()
res = {}
res.update(self.patchStore)
for k, v in self.subStores.items() :
res[k] = v.getPatches()
return res | [
"get patches as a dictionary"
] |
Please provide a description of the function:def getStore(self) :
res = {}
res.update(self.store)
for k, v in self.subStores.items() :
res[k] = v.getStore()
return res | [
"get the inner store as dictionary"
] |
Please provide a description of the function:def validateField(self, field) :
if field not in self.validators and not self.collection._validation['allow_foreign_fields'] :
raise SchemaViolation(self.collection.__class__, field)
if field in self.store:
if isinstance(self.store[field], DocumentStore) :
return self[field].validate()
if field in self.patchStore :
return self.validators[field].validate(self.patchStore[field])
else :
try :
return self.validators[field].validate(self.store[field])
except ValidationError as e:
raise ValidationError( "'%s' -> %s" % ( field, str(e)) )
except AttributeError:
if isinstance(self.validators[field], dict) and not isinstance(self.store[field], dict) :
raise ValueError("Validator expected a sub document for field '%s', got '%s' instead" % (field, self.store[field]) )
else :
raise
return True | [
"Validatie a field"
] |
Please provide a description of the function:def validate(self) :
if not self.mustValidate :
return True
res = {}
for field in self.validators.keys() :
try :
if isinstance(self.validators[field], dict) and field not in self.store :
self.store[field] = DocumentStore(self.collection, validators = self.validators[field], initDct = {}, subStore=True, validateInit=self.validateInit)
self.validateField(field)
except InvalidDocument as e :
res.update(e.errors)
except (ValidationError, SchemaViolation) as e:
res[field] = str(e)
if len(res) > 0 :
raise InvalidDocument(res)
return True | [
"Validate the whole document"
] |
Please provide a description of the function:def set(self, dct) :
# if not self.mustValidate :
# self.store = dct
# self.patchStore = dct
# return
for field, value in dct.items() :
if field not in self.collection.arangoPrivates :
if isinstance(value, dict) :
if field in self.validators and isinstance(self.validators[field], dict):
vals = self.validators[field]
else :
vals = {}
self[field] = DocumentStore(self.collection, validators = vals, initDct = value, patch = self.patching, subStore=True, validateInit=self.validateInit)
self.subStores[field] = self.store[field]
else :
self[field] = value | [
"Set the store using a dictionary"
] |
Please provide a description of the function:def reset(self, collection, jsonFieldInit = None) :
if not jsonFieldInit:
jsonFieldInit = {}
self.collection = collection
self.connection = self.collection.connection
self.documentsURL = self.collection.documentsURL
self.URL = None
self.setPrivates(jsonFieldInit)
self._store = DocumentStore(self.collection, validators=self.collection._fields, initDct=jsonFieldInit)
if self.collection._validation['on_load']:
self.validate()
self.modified = True | [
"replaces the current values in the document by those in jsonFieldInit"
] |
Please provide a description of the function:def validate(self) :
self._store.validate()
for pField in self.collection.arangoPrivates :
self.collection.validatePrivate(pField, getattr(self, pField)) | [
"validate the document"
] |
Please provide a description of the function:def setPrivates(self, fieldDict) :
for priv in self.privates :
if priv in fieldDict :
setattr(self, priv, fieldDict[priv])
else :
setattr(self, priv, None)
if self._id is not None :
self.URL = "%s/%s" % (self.documentsURL, self._id) | [
"will set self._id, self._rev and self._key field."
] |
Please provide a description of the function:def save(self, waitForSync = False, **docArgs) :
payload = self._store.getStore()
self._save(payload, waitForSync = False, **docArgs) | [
"Saves the document to the database by either performing a POST (for a new document) or a PUT (complete document overwrite).\n If you want to only update the modified fields use the .patch() function.\n Use docArgs to put things such as 'waitForSync = True' (for a full list cf ArangoDB's doc).\n It will only trigger a saving of the document if it has been modified since the last save. If you want to force the saving you can use forceSave()"
] |
Please provide a description of the function:def saveCopy(self) :
"saves a copy of the object and become that copy. returns a tuple (old _key, new _key)"
old_key = self._key
self.reset(self.collection)
self.save()
return (old_key, self._key) | [] |
Please provide a description of the function:def patch(self, keepNull = True, **docArgs) :
if self.URL is None :
raise ValueError("Cannot patch a document that was not previously saved")
payload = self._store.getPatches()
if self.collection._validation['on_save'] :
self.validate()
if len(payload) > 0 :
params = dict(docArgs)
params.update({'collection': self.collection.name, 'keepNull' : keepNull})
payload = json.dumps(payload, default=str)
r = self.connection.session.patch(self.URL, params = params, data = payload)
data = r.json()
if (r.status_code == 201 or r.status_code == 202) and "error" not in data :
self._rev = data['_rev']
else :
raise UpdateError(data['errorMessage'], data)
self.modified = False
self._store.resetPatch() | [
"Saves the document by only updating the modified fields.\n The default behaviour concening the keepNull parameter is the opposite of ArangoDB's default, Null values won't be ignored\n Use docArgs for things such as waitForSync = True"
] |
Please provide a description of the function:def delete(self) :
"deletes the document from the database"
if self.URL is None :
raise DeletionError("Can't delete a document that was not saved")
r = self.connection.session.delete(self.URL)
data = r.json()
if (r.status_code != 200 and r.status_code != 202) or 'error' in data :
raise DeletionError(data['errorMessage'], data)
self.reset(self.collection)
self.modified = True | [] |
Please provide a description of the function:def getEdges(self, edges, inEdges = True, outEdges = True, rawResults = False) :
try :
return edges.getEdges(self, inEdges, outEdges, rawResults)
except AttributeError :
raise AttributeError("%s does not seem to be a valid Edges object" % edges) | [
"returns in, out, or both edges linked to self belonging the collection 'edges'.\n If rawResults a arango results will be return as fetched, if false, will return a liste of Edge objects"
] |
Please provide a description of the function:def getStore(self) :
store = self._store.getStore()
for priv in self.privates :
v = getattr(self, priv)
if v :
store[priv] = v
return store | [
"return the store in a dict format"
] |
Please provide a description of the function:def links(self, fromVertice, toVertice, **edgeArgs) :
if isinstance(fromVertice, Document) or isinstance(getattr(fromVertice, 'document', None), Document):
if not fromVertice._id :
fromVertice.save()
self._from = fromVertice._id
elif (type(fromVertice) is bytes) or (type(fromVertice) is str):
self._from = fromVertice
elif not self._from:
raise CreationError('fromVertice %s is invalid!' % str(fromVertice))
if isinstance(toVertice, Document) or isinstance(getattr(toVertice, 'document', None), Document):
if not toVertice._id:
toVertice.save()
self._to = toVertice._id
elif (type(toVertice) is bytes) or (type(toVertice) is str):
self._to = toVertice
elif not self._to:
raise CreationError('toVertice %s is invalid!' % str(toVertice))
self.save(**edgeArgs) | [
"\n An alias to save that updates the _from and _to attributes.\n fromVertice and toVertice, can be either strings or documents. It they are unsaved documents, they will be automatically saved.\n "
] |
Please provide a description of the function:def save(self, **edgeArgs) :
if not getattr(self, "_from") or not getattr(self, "_to") :
raise AttributeError("You must specify '_from' and '_to' attributes before saving. You can also use the function 'links()'")
payload = self._store.getStore()
payload["_from"] = self._from
payload["_to"] = self._to
Document._save(self, payload, **edgeArgs) | [
"Works like Document's except that you must specify '_from' and '_to' vertices before.\n There's also a links() function especially for first saves."
] |
Please provide a description of the function:def _set(self, jsonData) :
self["username"] = jsonData["user"]
self["active"] = jsonData["active"]
self["extra"] = jsonData["extra"]
try:
self["changePassword"] = jsonData["changePassword"]
except Exception as e:
pass
# self["changePassword"] = ""
try :
self["password"] = jsonData["passwd"]
except KeyError :
self["password"] = ""
self.URL = "%s/user/%s" % (self.connection.URL, self["username"]) | [
"Initialize all fields at once. If no password is specified, it will be set as an empty string"
] |
Please provide a description of the function:def save(self):
import json
payload = {}
payload.update(self._store)
payload["user"] = payload["username"]
payload["passwd"] = payload["password"]
del(payload["username"])
del(payload["password"])
payload = json.dumps(payload, default=str)
if not self.URL :
if "username" not in self._store or "password" not in self._store :
raise KeyError("You must define self['name'] and self['password'] to be able to create a new user")
r = self.connection.session.post(self.users.URL, data = payload)
data = r.json()
if r.status_code == 201 :
self._set(data)
else :
raise CreationError("Unable to create new user", data)
else :
r = self.connection.session.put(self.URL, data = payload)
data = r.json()
if r.status_code == 200 :
self._set(data)
else :
raise UpdateError("Unable to update user, status: %s" %r.status_code, data) | [
"Save/updates the user"
] |
Please provide a description of the function:def setPermissions(self, dbName, access) :
import json
if not self.URL :
raise CreationError("Please save user first", None, None)
rights = []
if access :
rights.append("rw")
rights = ''.join(rights)
if not self.connection.hasDatabase(dbName) :
raise KeyError("Unknown database: %s" % dbName)
url = "%s/database/%s" % (self.URL, dbName)
r = self.connection.session.put(url, data = json.dumps({"grant": rights}, default=str))
if r.status_code < 200 or r.status_code > 202 :
raise CreationError("Unable to grant rights", r.content) | [
"Grant revoke rights on a database, 'access' is supposed to be boolean. ArangoDB grants/revokes both read and write rights at the same time"
] |
Please provide a description of the function:def delete(self) :
if not self.URL :
raise CreationError("Please save user first", None, None)
r = self.connection.session.delete(self.URL)
if r.status_code < 200 or r.status_code > 202 :
raise DeletionError("Unable to delete user, url: %s, status: %s" %(r.url, r.status_code), r.content )
self.URL = None | [
"Permanently remove the user"
] |
Please provide a description of the function:def fetchAllUsers(self, rawResults = False) :
r = self.connection.session.get(self.URL)
if r.status_code == 200 :
data = r.json()
if rawResults :
return data["result"]
else :
res = []
for resu in data["result"] :
u = User(self, resu)
res.append(u)
return res
else :
raise ConnectionError("Unable to get user list", r.url, r.status_code) | [
"Returns all available users. if rawResults, the result will be a list of python dicts instead of User objects"
] |
Please provide a description of the function:def fetchUser(self, username, rawResults = False) :
url = "%s/%s" % (self.URL, username)
r = self.connection.session.get(url)
if r.status_code == 200 :
data = r.json()
if rawResults :
return data["result"]
else :
u = User(self, data)
return u
else :
raise KeyError("Unable to get user: %s" % username) | [
"Returns a single user. if rawResults, the result will be a list of python dicts instead of User objects"
] |
Please provide a description of the function:def resetSession(self, username=None, password=None, verify=True) :
self.disconnectSession()
self.session = AikidoSession(username, password, verify) | [
"resets the session"
] |
Please provide a description of the function:def reload(self) :
r = self.session.get(self.databasesURL)
data = r.json()
if r.status_code == 200 and not data["error"] :
self.databases = {}
for dbName in data["result"] :
if dbName not in self.databases :
self.databases[dbName] = DBHandle(self, dbName)
else :
raise ConnectionError(data["errorMessage"], self.databasesURL, r.status_code, r.content) | [
"Reloads the database list.\n Because loading a database triggers the loading of all collections and graphs within,\n only handles are loaded when this function is called. The full databases are loaded on demand when accessed\n "
] |
Please provide a description of the function:def createDatabase(self, name, **dbArgs) :
"use dbArgs for arguments other than name. for a full list of arguments please have a look at arangoDB's doc"
dbArgs['name'] = name
payload = json.dumps(dbArgs, default=str)
url = self.URL + "/database"
r = self.session.post(url, data = payload)
data = r.json()
if r.status_code == 201 and not data["error"] :
db = Database(self, name)
self.databases[name] = db
return self.databases[name]
else :
raise CreationError(data["errorMessage"], r.content) | [] |
Please provide a description of the function:def explain(self, bindVars={}, allPlans = False) :
return self.database.explainAQLQuery(self.query, bindVars, allPlans) | [
"Returns an explanation of the query. Setting allPlans to True will result in ArangoDB returning all possible plans. False returns only the optimal plan"
] |
Please provide a description of the function:def output(self, args):
'''
Print the output message.
'''
print("SensuPlugin: {}".format(' '.join(str(a) for a in args))) | [] |
Please provide a description of the function:def __make_dynamic(self, method):
'''
Create a method for each of the exit codes.
'''
def dynamic(*args):
self.plugin_info['status'] = method
if not args:
args = None
self.output(args)
sys.exit(getattr(self.exit_code, method))
method_lc = method.lower()
dynamic.__doc__ = "%s method" % method_lc
dynamic.__name__ = method_lc
setattr(self, dynamic.__name__, dynamic) | [] |
Please provide a description of the function:def __exitfunction(self):
'''
Method called by exit hook, ensures that both an exit code and
output is supplied, also catches errors.
'''
if self._hook.exit_code is None and self._hook.exception is None:
print("Check did not exit! You should call an exit code method.")
sys.stdout.flush()
os._exit(1)
elif self._hook.exception:
print("Check failed to run: %s, %s" %
(sys.last_type, traceback.format_tb(sys.last_traceback)))
sys.stdout.flush()
os._exit(2) | [] |
Please provide a description of the function:def run(self):
'''
Set up the event object, global settings and command line
arguments.
'''
# Parse the stdin into a global event object
stdin = self.read_stdin()
self.event = self.read_event(stdin)
# Prepare global settings
self.settings = get_settings()
self.api_settings = self.get_api_settings()
# Prepare command line arguments and
self.parser = argparse.ArgumentParser()
# set up the 2.x to 1.x event mapping argument
self.parser.add_argument("--map-v2-event-into-v1",
action="store_true",
default=False,
dest="v2event")
if hasattr(self, 'setup'):
self.setup()
(self.options, self.remain) = self.parser.parse_known_args()
# map the event if required
if (self.options.v2event or
os.environ.get("SENSU_MAP_V2_EVENT_INTO_V1")):
self.event = map_v2_event_into_v1(self.event)
# Filter (deprecated) and handle
self.filter()
self.handle() | [] |
Please provide a description of the function:def read_event(self, check_result):
'''
Convert the piped check result (json) into a global 'event' dict
'''
try:
event = json.loads(check_result)
event['occurrences'] = event.get('occurrences', 1)
event['check'] = event.get('check', {})
event['client'] = event.get('client', {})
return event
except Exception:
raise ValueError('error reading event: ' + check_result) | [] |
Please provide a description of the function:def filter(self):
'''
Filters exit the proccess if the event should not be handled.
Filtering events is deprecated and will be removed in a future release.
'''
if self.deprecated_filtering_enabled():
print('warning: event filtering in sensu-plugin is deprecated,' +
'see http://bit.ly/sensu-plugin')
self.filter_disabled()
self.filter_silenced()
self.filter_dependencies()
if self.deprecated_occurrence_filtering():
print('warning: occurrence filtering in sensu-plugin is' +
'deprecated, see http://bit.ly/sensu-plugin')
self.filter_repeated() | [] |
Please provide a description of the function:def bail(self, msg):
'''
Gracefully terminate with message
'''
client_name = self.event['client'].get('name', 'error:no-client-name')
check_name = self.event['check'].get('name', 'error:no-check-name')
print('{}: {}/{}'.format(msg, client_name, check_name))
sys.exit(0) | [] |
Please provide a description of the function:def get_api_settings(self):
'''
Return a dict of API settings derived first from ENV['SENSU_API_URL']
if set, then Sensu config `api` scope if configured, and finally
falling back to to ipv4 localhost address on default API port.
return dict
'''
sensu_api_url = os.environ.get('SENSU_API_URL')
if sensu_api_url:
uri = urlparse(sensu_api_url)
api_settings = {
'host': '{0}://{1}'.format(uri.scheme, uri.hostname),
'port': uri.port,
'user': uri.username,
'password': uri.password
}
else:
api_settings = self.settings.get('api', {})
api_settings['host'] = api_settings.get(
'host', '127.0.0.1')
api_settings['port'] = api_settings.get(
'port', 4567)
return api_settings | [] |
Please provide a description of the function:def api_request(self, method, path):
'''
Query Sensu api for information.
'''
if not hasattr(self, 'api_settings'):
ValueError('api.json settings not found')
if method.lower() == 'get':
_request = requests.get
elif method.lower() == 'post':
_request = requests.post
domain = self.api_settings['host']
uri = '{}:{}/{}'.format(domain, self.api_settings['port'], path)
if self.api_settings.get('user') and self.api_settings.get('password'):
auth = (self.api_settings['user'], self.api_settings['password'])
else:
auth = ()
req = _request(uri, auth=auth)
return req | [] |
Please provide a description of the function:def event_exists(self, client, check):
'''
Query Sensu API for event.
'''
return self.api_request(
'get',
'events/{}/{}'.format(client, check)
).status_code == 200 | [] |
Please provide a description of the function:def filter_silenced(self):
'''
Determine whether a check is silenced and shouldn't handle.
'''
stashes = [
('client', '/silence/{}'.format(self.event['client']['name'])),
('check', '/silence/{}/{}'.format(
self.event['client']['name'],
self.event['check']['name'])),
('check', '/silence/all/{}'.format(self.event['check']['name']))
]
for scope, path in stashes:
if self.stash_exists(path):
self.bail(scope + ' alerts silenced') | [] |
Please provide a description of the function:def filter_dependencies(self):
'''
Determine whether a check has dependencies.
'''
dependencies = self.event['check'].get('dependencies', None)
if dependencies is None or not isinstance(dependencies, list):
return
for dependency in self.event['check']['dependencies']:
if not str(dependency):
continue
dependency_split = tuple(dependency.split('/'))
# If there's a dependency on a check from another client, then use
# that client name, otherwise assume same client.
if len(dependency_split) == 2:
client, check = dependency_split
else:
client = self.event['client']['name']
check = dependency_split[0]
if self.event_exists(client, check):
self.bail('check dependency event exists') | [] |
Please provide a description of the function:def filter_repeated(self):
'''
Determine whether a check is repeating.
'''
defaults = {
'occurrences': 1,
'interval': 30,
'refresh': 1800
}
# Override defaults with anything defined in the settings
if isinstance(self.settings['sensu_plugin'], dict):
defaults.update(self.settings['sensu_plugin'])
occurrences = int(self.event['check'].get(
'occurrences', defaults['occurrences']))
interval = int(self.event['check'].get(
'interval', defaults['interval']))
refresh = int(self.event['check'].get(
'refresh', defaults['refresh']))
if self.event['occurrences'] < occurrences:
self.bail('not enough occurrences')
if (self.event['occurrences'] > occurrences and
self.event['action'] == 'create'):
return
number = int(refresh / interval)
if (number == 0 or
(self.event['occurrences'] - occurrences) % number == 0):
return
self.bail('only handling every ' + str(number) + ' occurrences') | [] |
Please provide a description of the function:def config_files():
'''
Get list of currently used config files.
'''
sensu_loaded_tempfile = os.environ.get('SENSU_LOADED_TEMPFILE')
sensu_config_files = os.environ.get('SENSU_CONFIG_FILES')
sensu_v1_config = '/etc/sensu/config.json'
sensu_v1_confd = '/etc/sensu/conf.d'
if sensu_loaded_tempfile and os.path.isfile(sensu_loaded_tempfile):
with open(sensu_loaded_tempfile, 'r') as tempfile:
contents = tempfile.read()
return contents.split(':')
elif sensu_config_files:
return sensu_config_files.split(':')
else:
files = []
filenames = []
if os.path.isfile(sensu_v1_config):
files = [sensu_v1_config]
if os.path.isdir(sensu_v1_confd):
filenames = [f for f in os.listdir(sensu_v1_confd)
if os.path.splitext(f)[1] == '.json']
for filename in filenames:
files.append('{}/{}'.format(sensu_v1_confd, filename))
return files | [] |
Please provide a description of the function:def get_settings():
'''
Get all currently loaded settings.
'''
settings = {}
for config_file in config_files():
config_contents = load_config(config_file)
if config_contents is not None:
settings = deep_merge(settings, config_contents)
return settings | [] |
Please provide a description of the function:def load_config(filename):
'''
Read contents of config file.
'''
try:
with open(filename, 'r') as config_file:
return json.loads(config_file.read())
except IOError:
pass | [] |
Please provide a description of the function:def deep_merge(dict_one, dict_two):
'''
Deep merge two dicts.
'''
merged = dict_one.copy()
for key, value in dict_two.items():
# value is equivalent to dict_two[key]
if (key in dict_one and
isinstance(dict_one[key], dict) and
isinstance(value, dict)):
merged[key] = deep_merge(dict_one[key], value)
elif (key in dict_one and
isinstance(dict_one[key], list) and
isinstance(value, list)):
merged[key] = list(set(dict_one[key] + value))
else:
merged[key] = value
return merged | [] |
Please provide a description of the function:def map_v2_event_into_v1(event):
'''
Helper method to convert Sensu 2.x event into Sensu 1.x event.
'''
# return the event if it has already been mapped
if "v2_event_mapped_into_v1" in event:
return event
# Trigger mapping code if enity exists and client does not
if not bool(event.get('client')) and "entity" in event:
event['client'] = event['entity']
# Fill in missing client attributes
if "name" not in event['client']:
event['client']['name'] = event['entity']['id']
if "subscribers" not in event['client']:
event['client']['subscribers'] = event['entity']['subscriptions']
# Fill in renamed check attributes expected in 1.4 event
if "subscribers" not in event['check']:
event['check']['subscribers'] = event['check']['subscriptions']
if "source" not in event['check']:
event['check']['source'] = event['check']['proxy_entity_id']
# Mimic 1.4 event action based on 2.0 event state
# action used in logs and fluentd plugins handlers
action_state_mapping = {'flapping': 'flapping', 'passing': 'resolve',
'failing': 'create'}
if "state" in event['check']:
state = event['check']['state']
else:
state = "unknown::2.0_event"
if "action" not in event and state.lower() in action_state_mapping:
event['action'] = action_state_mapping[state.lower()]
else:
event['action'] = state
# Mimic 1.4 event history based on 2.0 event history
if "history" in event['check']:
# save the original history
event['check']['history_v2'] = deepcopy(event['check']['history'])
legacy_history = []
for history in event['check']['history']:
if isinstance(history['status'], int):
legacy_history.append(str(history['status']))
else:
legacy_history.append("3")
event['check']['history'] = legacy_history
# Setting flag indicating this function has already been called
event['v2_event_mapped_into_v1'] = True
# return the updated event
return event | [] |
Please provide a description of the function:def check_name(self, name=None):
'''
Checks the plugin name and sets it accordingly.
Uses name if specified, class name if not set.
'''
if name:
self.plugin_info['check_name'] = name
if self.plugin_info['check_name'] is not None:
return self.plugin_info['check_name']
return self.__class__.__name__ | [] |
Please provide a description of the function:def create(cls, path_name=None, name=None, project_id=None,
log_modified_at=None, crawlable=True):
result = cls(path_name, name, project_id, log_modified_at, crawlable)
db.session.add(result)
db.session.commit()
crawl_result(result, True)
return result | [
"Initialize an instance and save it to db."
] |
Please provide a description of the function:def sampled_logs(self, logs_limit=-1):
logs_count = len(self.logs)
if logs_limit == -1 or logs_count <= logs_limit:
return self.logs
elif logs_limit == 0:
return []
elif logs_limit == 1:
return [self.logs[-1]]
else:
def get_sampled_log(idx):
# always include the first and last element of `self.logs`
return self.logs[idx * (logs_count - 1) // (logs_limit - 1)]
return [get_sampled_log(i) for i in range(logs_limit)] | [
"Return up to `logs_limit` logs.\n\n If `logs_limit` is -1, this function will return all logs that belong\n to the result.\n "
] |
Please provide a description of the function:def serialize_with_sampled_logs(self, logs_limit=-1):
return {
'id': self.id,
'pathName': self.path_name,
'name': self.name,
'isUnregistered': self.is_unregistered,
'logs': [log.serialize for log in self.sampled_logs(logs_limit)],
'args': self.args.serialize if self.args is not None else [],
'commands': [cmd.serialize for cmd in self.commands],
'snapshots': [cmd.serialize for cmd in self.snapshots],
'logModifiedAt': self.log_modified_at.isoformat()
} | [
"serialize a result with up to `logs_limit` logs.\n\n If `logs_limit` is -1, this function will return a result with all its\n logs.\n "
] |
Please provide a description of the function:def reporter(prefix=None, out=None, subdir='', timeout=5, **kwargs):
report = _Reporter(prefix, out, subdir, **kwargs)
yield report
report.save(timeout) | [
"Summary media assets to visualize.\n\n ``reporter`` function collects media assets by the ``with`` statement and\n aggregates in same row to visualize. This function returns an object which\n provides the following methods.\n\n * :meth:`~chainerui.summary._Reporter.image`: collect images. almost same \\\n as :func:`~chainerui.summary.image`\n * :meth:`~chainerui.summary._Reporter.audio`: collect audio. almost same \\\n as :func:`~chainerui.summary.audio`\n\n Example of how to set several assets::\n\n >>> from chainerui.summary import reporter\n >>> summary.set_out('/path/to/output') # same as 'log' file directory\n >>>\n >>> with reporter(epoch=1, iteration=10) as r:\n >>> r.image(image_array1)\n >>> r.image(image_array2)\n >>> r.audio(audio_array, 44100)\n >>> # image_array1 and image_array2 are visualized on a browser\n >>> # audio_array can be listened on a browser\n\n Args:\n prefix (str): prefix of column name.\n out (str): directory path of output.\n subdir (str): sub-directory path of output.\n **kwargs (dict): key-value pair to show as description. regardless of\n empty or not, timestamp is added.\n "
] |
Please provide a description of the function:def image(images, name=None, ch_axis=1, row=0, mode=None, batched=True,
out=None, subdir='', timeout=5, **kwargs):
from chainerui.report.image_report import check_available
if not check_available():
return
from chainerui.report.image_report import report as _image
out_root = _chainerui_asset_observer.get_outpath(out)
out_path = os.path.join(out_root, subdir)
if not os.path.isdir(out_path):
os.makedirs(out_path)
col_name = name
if col_name is None:
col_name = 'image'
filename, created_at = _image(
images, out_path, col_name, ch_axis, row, mode, batched)
value = kwargs
value['timestamp'] = created_at.isoformat()
value['images'] = {col_name: os.path.join(subdir, filename)}
_chainerui_asset_observer.add(value)
_chainerui_asset_observer.save(out_root, timeout) | [
"Summary images to visualize.\n\n Array of images are converted as image format (PNG format on default),\n saved to output directory, and reported to the ChainerUI server.\n The images are saved every called this function. The images will be shown\n on `assets` endpoint vertically. If need to aggregate images in a row, use\n :func:`~chainerui.summary.reporter`.\n\n Examples of how to set arguments::\n\n >>> from chainerui import summary\n >>> summary.set_out('/path/to/log') # same as 'log' file directory\n >>>\n >>> x.shape # = [Batchsize, Channel, Height, Width]\n (10, 3, 5, 5)\n >>> summary.image(x, name='test') # images are tiled as 1x10\n >>> summary.image(x, name='test', row=2) # images are tiled as 2x5\n >>>\n >>> x.shape # = [C, H, W]\n (3, 5, 5)\n >>> # need to set as a non-batched image and channel axis explicitly\n >>> summary.image(x, name='test', ch_axis=0, batched=False)\n >>>\n >>> x.shape # = [B, H, W, C]\n (10, 5, 5, 3)\n >>> # need to set channel axis explicitly\n >>> summary.image(x, name='test', ch_axis=-1, row=2)\n >>>\n >>> x.shape # = [H, W, C]\n (5, 5, 3)\n >>> # need to set as a non-batched image\n >>> summary.image(x, name='test', ch_axis=-1, batched=False)\n >>>\n >>> x.shape # = [B, H, W], grayscale images\n (10, 5, 5)\n >>> summary.image(x, name='test') # image are tiled as 1x10\n >>> summary.image(x, name='test', row=2) # image are tiled as 2x5\n >>>\n >>> x.shape # = [H, W], a grayscale image\n (5, 5)\n >>> # need to set as a non-bathed image\n >>> summary.image(x, name='test', batched=False)\n\n Add description about the image::\n\n >>> summary.image(x, name='test', epoch=1, iteration=100)\n >>> # 'epoch' and 'iteration' column will be shown.\n\n Args:\n images (:class:`numpy.ndarray` or :class:`cupy.ndarray` or \\\n :class:`chainer.Variable`): batch of images. If Number of dimension\n is 3 (or 2 when set `batched=False`), the pixels assume as\n black and white image.\n name (str): name of image. set as column name. when not setting,\n assigned ``'image'``.\n ch_axis (int): index number of channel dimension. set 1 by default.\n if the images don't have channel axis, this parameter is ignored.\n row (int): row size to visualize batched images. when set 0,\n show on unstuck. if images set only one image, the row size\n will be ignored.\n mode (str): if the images are not RGB or RGBA space, set their\n color space code. ChainerUI supports 'HSV'.\n batched (bool): if the image is not batched, set ``False``.\n out (str): directory path of output.\n subdir (str): sub-directory path of output.\n **kwargs (dict): key-value pair to show as description. regardless of\n empty or not, timestamp on created the image is added.\n "
] |
Please provide a description of the function:def audio(audio, sample_rate, name=None, out=None, subdir='', timeout=5,
**kwargs):
from chainerui.report.audio_report import check_available
if not check_available():
return
from chainerui.report.audio_report import report as _audio
out_root = _chainerui_asset_observer.get_outpath(out)
out_path = os.path.join(out_root, subdir)
if not os.path.isdir(out_path):
os.makedirs(out_path)
col_name = name
if col_name is None:
col_name = 'audio'
filename, created_at = _audio(audio, sample_rate, out_path, col_name)
value = kwargs
value['timestamp'] = created_at.isoformat()
value['audios'] = {col_name: os.path.join(subdir, filename)}
_chainerui_asset_observer.add(value)
_chainerui_asset_observer.save(out_root, timeout) | [
"summary audio files to listen on a browser.\n\n An sampled array is converted as WAV audio file, saved to output directory,\n and reported to the ChainerUI server. The audio file is saved every called\n this function. The audio file will be listened on `assets` endpoint\n vertically. If need to aggregate audio files in row, use\n :func:`~chainerui.summary.reporter`.\n\n Example of how to set arguments::\n\n >>> from chainerui import summary\n >>> summary.set_out('/path/to/output')\n >>> rate = 44100\n >>>\n >>> summary.audio(sampled_array, rate, name='test')\n >>> # sampled_array can be listened on a browser.\n\n Add description about the audio file::\n\n >>> summary.image(\n >>> sampled_array, rate, name='test', epoch=1, iteration=100)\n >>> # 'epoch' and 'iteration' column will be shown.\n\n Args:\n audio (:class:`numpy.ndarray` or :class:`cupy.ndarray` or \\\n :class:`chainer.Variable`): sampled wave array.\n sample_rate (int): sampling rate.\n name (str): name of image. set as column name. when not setting,\n assigned ``'audio'``.\n out (str): directory path of output.\n subdir (str): sub-directory path of output.\n **kwargs (dict): key-value pair to show as description. regardless of\n empty or not, timestamp on created the image is added.\n "
] |
Please provide a description of the function:def image(self, images, name=None, ch_axis=1, row=0, mode=None,
batched=True, subdir=''):
from chainerui.report.image_report import check_available
if not check_available():
return
from chainerui.report.image_report import report as _image
col_name = self.get_col_name(name, 'image')
out_dir, rel_out_dir = self.get_subdir(subdir)
filename, _ = _image(
images, out_dir, col_name, ch_axis, row, mode, batched)
self.images[col_name] = os.path.join(rel_out_dir, filename)
self.count += 1 | [
"Summary images to visualize.\n\n Args:\n images (:class:`numpy.ndarray` or :class:`cupy.ndarray` or \\\n :class:`chainer.Variable`): batch of images. If Number of\n dimension is 3 (or 2 when set `batched=False`), the pixels\n assume as black and white image.\n name (str): name of image. set as column name. when not setting,\n assigned ``'image'`` + sequential number.\n ch_axis (int): index number of channel dimension. set 1 by default.\n if the images don't have channel axis, this parameter is\n ignored.\n row (int): row size to visualize batched images. when set 0,\n show on unstuck. if images set only one image, the row size\n will be ignored.\n mode (str): if the images are not RGB or RGBA space, set their\n color space code. ChainerUI supports 'HSV'.\n batched (bool): if the image is not batched, set ``False``.\n subdir (str): sub-directory path of output.\n "
] |
Please provide a description of the function:def audio(self, audio, sample_rate, name=None, subdir=''):
from chainerui.report.audio_report import check_available
if not check_available():
return
from chainerui.report.audio_report import report as _audio
col_name = self.get_col_name(name, 'audio')
out_dir, rel_out_dir = self.get_subdir(subdir)
filename, _ = _audio(audio, sample_rate, out_dir, col_name)
self.audios[col_name] = os.path.join(rel_out_dir, filename)
self.count += 1 | [
"Summary audio to listen on web browser.\n\n Args:\n audio (:class:`numpy.ndarray` or :class:`cupy.ndarray` or \\\n :class:`chainer.Variable`): sampled wave array.\n sample_rate (int): sampling rate.\n name (str): name of image. set as column name. when not setting,\n assigned ``'audio'`` + sequential number.\n subdir (str): sub-directory path of output.\n "
] |
Please provide a description of the function:def create_app():
app = Flask(__name__)
app.logger.disabled = True
for h in app.logger.handlers[:]:
app.logger.removeHandler(h)
app.config['JSONIFY_PRETTYPRINT_REGULAR'] = False
def dated_url_for(endpoint, **values):
if endpoint == 'static':
filename = values.get('filename', None)
if filename:
file_path = os.path.join(app.root_path, endpoint, filename)
values['_'] = int(os.stat(file_path).st_mtime)
return url_for(endpoint, **values)
@app.context_processor
def override_url_for():
return dict(url_for=dated_url_for)
@app.teardown_appcontext
def shutdown_session(exception=None):
db.session.remove()
@app.route('/')
@app.route('/projects/<int:project_id>')
@app.route('/projects/<int:project_id>/results/<int:result_id>')
@app.route('/projects/<int:project_id>/results/<int:result_id>/assets')
def index(**kwargs):
return render_template('index.html')
@app.route('/favicon.ico')
def favicon():
return send_from_directory(
os.path.join(app.root_path, 'static', 'dist'),
'favicon.ico', mimetype='image/vnd.microsoft.icon')
# error handling
@app.errorhandler(OperationalError)
def handle_invalid_usage(error):
logger.error('caught exception from db: %s' % error.args)
response = jsonify({
'error': {
'type': 'DBOperationalError',
'message': 'Failed to send request to the database.'
}
})
response.status_code = 400 # Bad Request
return response
@app.before_request
def add_timestamp():
request._comming_at = datetime.datetime.now()
@app.after_request
def output_log(response):
now = datetime.datetime.now()
log_msg = '%s - - [%s] "%s %s %s" %d' % (
request.remote_addr, now.replace(microsecond=0),
request.method, request.full_path,
request.environ.get('SERVER_PROTOCOL'), response.status_code)
if response.content_length is not None:
log_msg += ' %d' % response.content_length
if request._comming_at is not None:
delta = (now - request._comming_at).total_seconds()
log_msg += ' %.6f' % delta
logger.info(log_msg)
return response
from chainerui.views.argument import ArgumentAPI
from chainerui.views.log import LogAPI
from chainerui.views.project import ProjectAPI
from chainerui.views.result import ResultAPI
from chainerui.views.result_asset import ResultAssetAPI
from chainerui.views.result_command import ResultCommandAPI
project_resource = ProjectAPI.as_view('project_resource')
result_resource = ResultAPI.as_view('result_resource')
log_resource = LogAPI.as_view('log_resource')
arg_resource = ArgumentAPI.as_view('arg_resource')
result_command_resource = ResultCommandAPI.as_view(
'result_command_resource')
result_assets_resource = ResultAssetAPI.as_view('result_assets_resource')
# project API
app.add_url_rule(
'/api/v1/projects',
defaults={'id': None}, view_func=project_resource, methods=['GET'])
app.add_url_rule(
'/api/v1/projects', view_func=project_resource, methods=['POST'])
app.add_url_rule(
'/api/v1/projects/<int:id>',
view_func=project_resource, methods=['GET', 'PUT', 'DELETE'])
# result API
app.add_url_rule(
'/api/v1/projects/<int:project_id>/results',
defaults={'id': None}, view_func=result_resource, methods=['GET'])
app.add_url_rule(
'/api/v1/projects/<int:project_id>/results',
view_func=result_resource, methods=['POST'])
app.add_url_rule(
'/api/v1/projects/<int:project_id>/results/<int:id>',
view_func=result_resource, methods=['GET', 'PUT', 'DELETE'])
# result log API
app.add_url_rule(
'/api/v1/projects/<int:project_id>/results/<int:result_id>/logs',
view_func=log_resource, methods=['POST'])
# result argument API
app.add_url_rule(
'/api/v1/projects/<int:project_id>/results/<int:result_id>/args',
view_func=arg_resource, methods=['POST'])
# result command API
app.add_url_rule(
'/api/v1/projects/<int:project_id>/results/<int:result_id>/commands',
view_func=result_command_resource, methods=['POST'])
# result image API
app.add_url_rule(
'/api/v1/projects/<int:project_id>/results/<int:result_id>/assets',
view_func=result_assets_resource, methods=['GET'])
app.add_url_rule(
'/api/v1/projects/<int:project_id>/results/<int:result_id>/assets/<int:content_id>', # NOQA
view_func=result_assets_resource, methods=['GET'])
return app | [
"create_app.",
"dated_url_for.",
"override_url_for.",
"render react app.",
"handle errors caused by db query."
] |
Please provide a description of the function:def serialize(self):
if self.request is None:
request = None
else:
request = json.loads(self.request)
if self.response is None:
response = None
else:
response = json.loads(self.response)
return {
'id': self.id,
'name': self.name,
'request': request,
'response': response
} | [
"serialize."
] |
Please provide a description of the function:def post(self, result_id, project_id):
result = db.session.query(Result).filter_by(id=result_id).first()
if result is None:
return jsonify({
'result': None,
'message': 'No interface defined for URL.'
}), 404
job_status = CommandsState.job_status(result.path_name)
if job_status != JobStatus.RUNNING:
if job_status == JobStatus.NO_EXTENSION_ERROR:
return jsonify({
'message': '\'CommandsExtension\' is not set or disabled.'
}), 400
elif job_status == JobStatus.INITIALIZED:
return jsonify({
'message': 'The target training job has not run, yet'
}), 400
elif job_status == JobStatus.STOPPED:
return jsonify({
'message': 'The target training job has already stopped'
}), 400
else:
return jsonify({
'message': 'Cannot get the target training job status'
}), 400
request_json = request.get_json()
if request_json is None:
return jsonify({
'message': 'Empty request.'
}), 400
command_name = request_json.get('name', None)
if command_name is None:
return jsonify({
'message': 'Name is required.'
}), 400
schedule = request_json.get('schedule', None)
if not CommandItem.is_valid_schedule(schedule):
return jsonify({
'message': 'Schedule is invalid.'
}), 400
command = CommandItem(
name=command_name,
)
command.set_request(
CommandItem.REQUEST_OPEN,
request_json.get('body', None),
request_json.get('schedule', None)
)
commands = CommandItem.load_commands(result.path_name)
commands.append(command)
CommandItem.dump_commands(commands, result.path_name)
new_result = crawl_result(result, force=True)
new_result_dict = new_result.serialize
return jsonify({'commands': new_result_dict['commands']}) | [
"POST /api/v1/results/<int:id>/commands."
] |
Please provide a description of the function:def create(cls, result_id=None, summary=None, file_modified_at=None):
asset = cls(result_id, summary, file_modified_at)
db.session.add(asset)
db.session.commit()
return asset | [
"Initialize an instance and save it to db."
] |
Please provide a description of the function:def create(cls, path_name=None, name=None, crawlable=True):
project = cls(path_name, name, crawlable)
db.session.add(project)
db.session.commit()
return collect_results(project, force=True) | [
"initialize an instance and save it to db."
] |
Please provide a description of the function:def serialize(self):
log_items = []
data = msgpack.unpackb(self.data, raw=False)
for item in data.items():
value_to_store = (
None
if not isinstance(item[1], numbers.Number)
or isinf(item[1])
or isnan(item[1])
else item[1]
)
log_items.append({
'logId': self.id,
'key': item[0],
'value': value_to_store
})
return {
'id': self.id,
'resultId': self.result_id,
'logItems': log_items
} | [
"serialize."
] |
Please provide a description of the function:def run_migrations_online(config):
connectable = engine_from_config(
config.get_section(config.config_ini_section),
prefix='sqlalchemy.',
poolclass=pool.NullPool)
with connectable.connect() as connection:
alembic.context.configure(connection=connection)
with alembic.context.begin_transaction():
alembic.context.run_migrations() | [
"Run migrations in 'online' mode.\n\n In this scenario we need to create an Engine and associate a\n connection with the context.\n\n "
] |
Please provide a description of the function:def main():
config = context.config
config.set_main_option("sqlalchemy.url", config.get_main_option('url'))
run_migrations_online(config) | [
"main."
] |
Please provide a description of the function:def load_result_json(result_path, json_file_name):
json_path = os.path.join(result_path, json_file_name)
_list = []
if os.path.isfile(json_path):
with open(json_path) as json_data:
try:
_list = json.load(json_data)
except ValueError as err:
logger.error(
'Failed to load json: {}, {}'.format(json_path, err))
return _list | [
"load_result_json."
] |
Please provide a description of the function:def crawl_result_path(result_path, include_log):
result = {
'logs': [],
'args': [],
'commands': [],
'snapshots': []
}
if os.path.isdir(result_path):
if include_log:
result['logs'] = load_result_json(result_path, 'log')
result['args'] = load_result_json(result_path, 'args')
result['commands'] = load_result_json(result_path, 'commands')
snapshots = [
x for x in os.listdir(result_path) if x.count('snapshot_iter_')
]
snapshots.sort()
result['snapshots'] = snapshots
return result | [
"crawl_result_path."
] |
Please provide a description of the function:def crawl_result(result, force=False, commit=True):
if not result.crawlable:
return result
now = datetime.datetime.now()
if (not force) and (now - result.updated_at).total_seconds() < 4:
return result
# if log file is not updated, not necessary to get log contents
is_updated = _check_log_updated(result)
crawled_result = crawl_result_path(result.path_name, is_updated)
if is_updated:
current_log_idx = len(result.logs)
if len(crawled_result['logs']) < current_log_idx:
current_log_idx = 0
result.logs = []
result.args = None
for log in crawled_result['logs'][current_log_idx:]:
result.logs.append(Log(log))
if result.args is None:
result.args = Argument(json.dumps(crawled_result['args']))
if result.name is None:
_update_to_default_name(result)
# commands list includes new commands and already registered commands.
# registered commands can be get response, so need to update
current_cmd_idx = len(result.commands)
if len(crawled_result['commands']) < current_cmd_idx:
current_cmd_idx = 0
result.commands = []
result.snapshots = []
for cmd in crawled_result['commands'][current_cmd_idx:]:
result.commands.append(Command(**cmd))
for i, cmd in enumerate(crawled_result['commands'][:current_cmd_idx]):
result.commands[i].update(cmd.get('response', None))
# snapshots file list are sorted but not natural order, for example,
# 'iter_900' set latter than 'iter_1000', so need to check the file
# is registered or not.
registered_snapshot_keys = [ss.iteration for ss in result.snapshots]
for i, snapshot in enumerate(crawled_result['snapshots']):
number_str = snapshot.split('snapshot_iter_')[1]
if not is_numberable(number_str):
continue
number = int(number_str)
if number in registered_snapshot_keys:
continue
result.snapshots.append(Snapshot(snapshot, number))
result.updated_at = datetime.datetime.now()
if commit:
db.session.commit()
return result | [
"crawl_results."
] |
Please provide a description of the function:def get(self, id=None):
if id is None:
path = request.args.get('path_name', default=None)
if path is not None:
project = db.session.query(Project).filter_by(
path_name=path).first()
if project is None:
return jsonify({
'project': None,
'message': 'Project path \'%s\' is not found' % path
}), 400
return jsonify({'project': project.serialize})
projects = db.session.query(Project).all()
return jsonify({
'projects': [p.serialize for p in projects]
})
else:
project = db.session.query(Project).filter_by(id=id).first()
if project is None:
return jsonify({
'project': None,
'message': 'No interface defined for URL.'
}), 404
return jsonify({
'project': project.serialize
}) | [
"get."
] |
Please provide a description of the function:def put(self, id):
project = db.session.query(Project).filter_by(id=id).first()
if project is None:
return jsonify({
'project': None,
'message': 'No interface defined for URL.'
}), 404
request_project = request.get_json().get('project')
project_name = request_project.get('name', None)
if project_name is not None:
project.name = project_name
db.session.add(project)
db.session.commit()
return jsonify({
'project': project.serialize
}) | [
"put."
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.