index
int64
0
731k
package
stringlengths
2
98
name
stringlengths
1
76
docstring
stringlengths
0
281k
code
stringlengths
4
1.07M
signature
stringlengths
2
42.8k
23,977
jupyterlab_git.git
detailed_log
Execute git log -m --cc -1 --numstat --oneline -z command (used to get insertions & deletions per file) & return the result.
for line in filter(lambda l: len(l) > 0, strip_and_split(text_output)):
(self, selected_hash, path)
23,978
jupyterlab_git.git
diff
Execute git diff command & return the result.
for line in filter(lambda l: len(l) > 0, strip_and_split(text_output)):
(self, path, previous=None, current=None)
23,979
jupyterlab_git.git
drop_stash
Execute git stash drop to delete a single stash entry. If not stash_index is provided, delete the entire stash. path: Git path repository stash_index: number or None Index of the stash list to remove from the stash. If None, the entire stash is removed.
def ensure_git_credential_cache_daemon( self, socket: Optional[pathlib.Path] = None, debug: bool = False, force: bool = False, cwd: Optional[str] = None, env: Dict[str, str] = None, ) -> None: """ Spawn a Git credential cache daemon with the socket file being `socket` if it does not exist. If `debug` is `True`, the daemon will be spawned with `--debug` flag. If `socket` is empty, it is set to `~/.git-credential-cache-daemon`. If `force` is `True`, a daemon will be spawned, and if the daemon process is accessible, the existing daemon process will be terminated before spawning a new one. Otherwise, if `force` is `False`, the PID of the existing daemon process is returned. If the daemon process is not accessible, `-1` is returned. `cwd` and `env` are passed to the process that spawns the daemon. """ if not socket: socket = pathlib.Path.home() / ".git-credential-cache" / "socket" if socket.exists(): return if self._GIT_CREDENTIAL_CACHE_DAEMON_PROCESS is None or force: if force and self._GIT_CREDENTIAL_CACHE_DAEMON_PROCESS: self._GIT_CREDENTIAL_CACHE_DAEMON_PROCESS.terminate() if not socket.parent.exists(): socket.parent.mkdir(parents=True, exist_ok=True) socket.parent.chmod(0o700) args: List[str] = ["git", "credential-cache--daemon"] if debug: args.append("--debug") args.append(socket) self._GIT_CREDENTIAL_CACHE_DAEMON_PROCESS = subprocess.Popen( args, cwd=cwd, env=env, ) get_logger().debug( "A credential cache daemon has been spawned with PID %d", self._GIT_CREDENTIAL_CACHE_DAEMON_PROCESS.pid, ) elif self._GIT_CREDENTIAL_CACHE_DAEMON_PROCESS.poll(): self.ensure_git_credential_cache_daemon(socket, debug, True, cwd, env)
(self, path, stash_index: Optional[int] = None) -> dict
23,980
jupyterlab_git.git
ensure_credential_helper
Check whether `git config --list` contains `credential.helper`. If it is not set, then it will be set to the value string for `credential.helper` defined in the server settings. path: str Git path repository env: Dict[str, str] Environment variables
def read_file(self, path): """ Reads file content located at path and returns it as a string path: str The path of the file """ try: file = pathlib.Path(path) content = file.read_text() return {"code": 0, "content": content} except BaseException as error: return {"code": -1, "content": ""}
(self, path: str, env: Optional[Dict[str, str]] = None) -> NoneType
23,981
jupyterlab_git.git
ensure_git_credential_cache_daemon
Spawn a Git credential cache daemon with the socket file being `socket` if it does not exist. If `debug` is `True`, the daemon will be spawned with `--debug` flag. If `socket` is empty, it is set to `~/.git-credential-cache-daemon`. If `force` is `True`, a daemon will be spawned, and if the daemon process is accessible, the existing daemon process will be terminated before spawning a new one. Otherwise, if `force` is `False`, the PID of the existing daemon process is returned. If the daemon process is not accessible, `-1` is returned. `cwd` and `env` are passed to the process that spawns the daemon.
def ensure_git_credential_cache_daemon( self, socket: Optional[pathlib.Path] = None, debug: bool = False, force: bool = False, cwd: Optional[str] = None, env: Dict[str, str] = None, ) -> None: """ Spawn a Git credential cache daemon with the socket file being `socket` if it does not exist. If `debug` is `True`, the daemon will be spawned with `--debug` flag. If `socket` is empty, it is set to `~/.git-credential-cache-daemon`. If `force` is `True`, a daemon will be spawned, and if the daemon process is accessible, the existing daemon process will be terminated before spawning a new one. Otherwise, if `force` is `False`, the PID of the existing daemon process is returned. If the daemon process is not accessible, `-1` is returned. `cwd` and `env` are passed to the process that spawns the daemon. """ if not socket: socket = pathlib.Path.home() / ".git-credential-cache" / "socket" if socket.exists(): return if self._GIT_CREDENTIAL_CACHE_DAEMON_PROCESS is None or force: if force and self._GIT_CREDENTIAL_CACHE_DAEMON_PROCESS: self._GIT_CREDENTIAL_CACHE_DAEMON_PROCESS.terminate() if not socket.parent.exists(): socket.parent.mkdir(parents=True, exist_ok=True) socket.parent.chmod(0o700) args: List[str] = ["git", "credential-cache--daemon"] if debug: args.append("--debug") args.append(socket) self._GIT_CREDENTIAL_CACHE_DAEMON_PROCESS = subprocess.Popen( args, cwd=cwd, env=env, ) get_logger().debug( "A credential cache daemon has been spawned with PID %d", self._GIT_CREDENTIAL_CACHE_DAEMON_PROCESS.pid, ) elif self._GIT_CREDENTIAL_CACHE_DAEMON_PROCESS.poll(): self.ensure_git_credential_cache_daemon(socket, debug, True, cwd, env)
(self, socket: Optional[pathlib.Path] = None, debug: bool = False, force: bool = False, cwd: Optional[str] = None, env: Optional[Dict[str, str]] = None) -> NoneType
23,982
jupyterlab_git.git
ensure_gitignore
Handle call to ensure .gitignore file exists and the next append will be on a new line (this means an empty file or a file ending with ). path: str Top Git repository path
def read_file(self, path): """ Reads file content located at path and returns it as a string path: str The path of the file """ try: file = pathlib.Path(path) content = file.read_text() return {"code": 0, "content": content} except BaseException as error: return {"code": -1, "content": ""}
(self, path)
23,983
jupyterlab_git.git
fetch
Execute git fetch command
def __del__(self): if self._GIT_CREDENTIAL_CACHE_DAEMON_PROCESS: self._GIT_CREDENTIAL_CACHE_DAEMON_PROCESS.terminate()
(self, path, auth=None)
23,984
jupyterlab_git.git
get_content
Get the file content of filename.
lambda n: n.lower(),
(self, contents_manager, filename, path)
23,985
jupyterlab_git.git
get_content_at_reference
Collect get content of the file at the git reference.
lambda n: n.lower(),
(self, filename, reference, path, contents_manager)
23,986
jupyterlab_git.git
get_current_branch
Use `symbolic-ref` to get the current branch name. In case of failure, assume that the HEAD is currently detached or rebasing, and fall back to the `branch` command to get the name. See https://git-blame.blogspot.com/2013/06/checking-current-branch-programatically.html
def _is_remote_branch(self, branch_reference): """Check if given branch is remote branch by comparing with 'remotes/', TODO : Consider a better way to check remote branch """ return branch_reference.startswith("refs/remotes/")
(self, path)
23,987
jupyterlab_git.git
get_nbdiff
Compute the diff between two notebooks. Args: prev_content: Notebook previous content curr_content: Notebook current content base_content: Notebook base content - only passed during a merge conflict Returns: if not base_content: {"base": Dict, "diff": Dict} else: {"base": Dict, "merge_decisions": Dict}
def __del__(self): if self._GIT_CREDENTIAL_CACHE_DAEMON_PROCESS: self._GIT_CREDENTIAL_CACHE_DAEMON_PROCESS.terminate()
(self, prev_content: str, curr_content: str, base_content=None) -> dict
23,988
jupyterlab_git.git
get_upstream_branch
Execute 'git rev-parse --abbrev-ref branch_name@{upstream}' to get upstream branch name tracked by given local branch. Reference : https://git-scm.com/docs/git-rev-parse#git-rev-parse-emltbranchnamegtupstreamemegemmasterupstreamememuem
def _is_remote_branch(self, branch_reference): """Check if given branch is remote branch by comparing with 'remotes/', TODO : Consider a better way to check remote branch """ return branch_reference.startswith("refs/remotes/")
(self, path, branch_name)
23,989
jupyterlab_git.git
ignore
Handle call to add an entry in .gitignore. path: str Top Git repository path file_path: str The path of the file in .gitignore
def read_file(self, path): """ Reads file content located at path and returns it as a string path: str The path of the file """ try: file = pathlib.Path(path) content = file.read_text() return {"code": 0, "content": content} except BaseException as error: return {"code": -1, "content": ""}
(self, path, file_path)
23,990
jupyterlab_git.git
init
Execute git init command & return the result.
for line in filter(lambda l: len(l) > 0, strip_and_split(text_output)):
(self, path)
23,991
jupyterlab_git.git
log
Execute git log command & return the result.
for line in filter(lambda l: len(l) > 0, strip_and_split(text_output)):
(self, path, history_count=10, follow_path=None)
23,992
jupyterlab_git.git
merge
Execute git merge command & return the result.
for line in filter(lambda l: len(l) > 0, strip_and_split(text_output)):
(self, branch: str, path: str) -> dict
23,993
jupyterlab_git.git
pop_stash
Execute git stash pop for a certain index of the stash list. If no index is provided, it will path: str Git path repository stash_index: number Index of the stash list is first applied to the current branch, then removed from the stash. If the index is not provided, the most recent stash (index=0) will be removed from the stash.
def ensure_git_credential_cache_daemon( self, socket: Optional[pathlib.Path] = None, debug: bool = False, force: bool = False, cwd: Optional[str] = None, env: Dict[str, str] = None, ) -> None: """ Spawn a Git credential cache daemon with the socket file being `socket` if it does not exist. If `debug` is `True`, the daemon will be spawned with `--debug` flag. If `socket` is empty, it is set to `~/.git-credential-cache-daemon`. If `force` is `True`, a daemon will be spawned, and if the daemon process is accessible, the existing daemon process will be terminated before spawning a new one. Otherwise, if `force` is `False`, the PID of the existing daemon process is returned. If the daemon process is not accessible, `-1` is returned. `cwd` and `env` are passed to the process that spawns the daemon. """ if not socket: socket = pathlib.Path.home() / ".git-credential-cache" / "socket" if socket.exists(): return if self._GIT_CREDENTIAL_CACHE_DAEMON_PROCESS is None or force: if force and self._GIT_CREDENTIAL_CACHE_DAEMON_PROCESS: self._GIT_CREDENTIAL_CACHE_DAEMON_PROCESS.terminate() if not socket.parent.exists(): socket.parent.mkdir(parents=True, exist_ok=True) socket.parent.chmod(0o700) args: List[str] = ["git", "credential-cache--daemon"] if debug: args.append("--debug") args.append(socket) self._GIT_CREDENTIAL_CACHE_DAEMON_PROCESS = subprocess.Popen( args, cwd=cwd, env=env, ) get_logger().debug( "A credential cache daemon has been spawned with PID %d", self._GIT_CREDENTIAL_CACHE_DAEMON_PROCESS.pid, ) elif self._GIT_CREDENTIAL_CACHE_DAEMON_PROCESS.poll(): self.ensure_git_credential_cache_daemon(socket, debug, True, cwd, env)
(self, path: str, stash_index: Optional[int] = None) -> dict
23,994
jupyterlab_git.git
pull
Execute git pull --no-commit. Disables prompts for the password to avoid the terminal hanging while waiting for auth.
for line in filter(lambda l: len(l) > 0, strip_and_split(text_output)):
(self, path, auth=None, cancel_on_conflict=False)
23,995
jupyterlab_git.git
push
Execute `git push $UPSTREAM $BRANCH`. The choice of upstream and branch is up to the caller.
for line in filter(lambda l: len(l) > 0, strip_and_split(text_output)):
(self, remote, branch, path, auth=None, set_upstream=False, force=False, tags=True)
23,996
jupyterlab_git.git
read_file
Reads file content located at path and returns it as a string path: str The path of the file
def read_file(self, path): """ Reads file content located at path and returns it as a string path: str The path of the file """ try: file = pathlib.Path(path) content = file.read_text() return {"code": 0, "content": content} except BaseException as error: return {"code": -1, "content": ""}
(self, path)
23,997
jupyterlab_git.git
rebase
Execute git rebase command & return the result. Args: branch: Branch to rebase onto path: Git repository path
def ensure_git_credential_cache_daemon( self, socket: Optional[pathlib.Path] = None, debug: bool = False, force: bool = False, cwd: Optional[str] = None, env: Dict[str, str] = None, ) -> None: """ Spawn a Git credential cache daemon with the socket file being `socket` if it does not exist. If `debug` is `True`, the daemon will be spawned with `--debug` flag. If `socket` is empty, it is set to `~/.git-credential-cache-daemon`. If `force` is `True`, a daemon will be spawned, and if the daemon process is accessible, the existing daemon process will be terminated before spawning a new one. Otherwise, if `force` is `False`, the PID of the existing daemon process is returned. If the daemon process is not accessible, `-1` is returned. `cwd` and `env` are passed to the process that spawns the daemon. """ if not socket: socket = pathlib.Path.home() / ".git-credential-cache" / "socket" if socket.exists(): return if self._GIT_CREDENTIAL_CACHE_DAEMON_PROCESS is None or force: if force and self._GIT_CREDENTIAL_CACHE_DAEMON_PROCESS: self._GIT_CREDENTIAL_CACHE_DAEMON_PROCESS.terminate() if not socket.parent.exists(): socket.parent.mkdir(parents=True, exist_ok=True) socket.parent.chmod(0o700) args: List[str] = ["git", "credential-cache--daemon"] if debug: args.append("--debug") args.append(socket) self._GIT_CREDENTIAL_CACHE_DAEMON_PROCESS = subprocess.Popen( args, cwd=cwd, env=env, ) get_logger().debug( "A credential cache daemon has been spawned with PID %d", self._GIT_CREDENTIAL_CACHE_DAEMON_PROCESS.pid, ) elif self._GIT_CREDENTIAL_CACHE_DAEMON_PROCESS.poll(): self.ensure_git_credential_cache_daemon(socket, debug, True, cwd, env)
(self, branch: str, path: str) -> dict
23,998
jupyterlab_git.git
remote_add
Handle call to `git remote add` command. path: str Top Git repository path url: str Git remote url name: str Remote name; default "origin"
lambda n: n.lower(),
(self, path, url, name='origin')
23,999
jupyterlab_git.git
remote_remove
Handle call to `git remote remove <name>` command. Args: path (str): Git repository path name (str): Remote name
lambda n: n.lower(),
(self, path, name)
24,000
jupyterlab_git.git
remote_show
Handle call to `git remote show` command. Args: path (str): Git repository path verbose (bool): true if details are needed, otherwise, false Returns: if not verbose: List[str]: Known remotes if verbose: List[ { name: str, url: str } ]: Known remotes
lambda n: n.lower(),
(self, path, verbose=False)
24,001
jupyterlab_git.git
reset
Execute git reset <filename> command & return the result.
for line in filter(lambda l: len(l) > 0, strip_and_split(text_output)):
(self, filename, path)
24,002
jupyterlab_git.git
reset_all
Execute git reset command & return the result.
for line in filter(lambda l: len(l) > 0, strip_and_split(text_output)):
(self, path)
24,003
jupyterlab_git.git
reset_to_commit
Reset the current branch to a specific past commit.
for line in filter(lambda l: len(l) > 0, strip_and_split(text_output)):
(self, commit_id, path)
24,004
jupyterlab_git.git
resolve_rebase
Execute git rebase --<action> command & return the result. Args: path: Git repository path
def ensure_git_credential_cache_daemon( self, socket: Optional[pathlib.Path] = None, debug: bool = False, force: bool = False, cwd: Optional[str] = None, env: Dict[str, str] = None, ) -> None: """ Spawn a Git credential cache daemon with the socket file being `socket` if it does not exist. If `debug` is `True`, the daemon will be spawned with `--debug` flag. If `socket` is empty, it is set to `~/.git-credential-cache-daemon`. If `force` is `True`, a daemon will be spawned, and if the daemon process is accessible, the existing daemon process will be terminated before spawning a new one. Otherwise, if `force` is `False`, the PID of the existing daemon process is returned. If the daemon process is not accessible, `-1` is returned. `cwd` and `env` are passed to the process that spawns the daemon. """ if not socket: socket = pathlib.Path.home() / ".git-credential-cache" / "socket" if socket.exists(): return if self._GIT_CREDENTIAL_CACHE_DAEMON_PROCESS is None or force: if force and self._GIT_CREDENTIAL_CACHE_DAEMON_PROCESS: self._GIT_CREDENTIAL_CACHE_DAEMON_PROCESS.terminate() if not socket.parent.exists(): socket.parent.mkdir(parents=True, exist_ok=True) socket.parent.chmod(0o700) args: List[str] = ["git", "credential-cache--daemon"] if debug: args.append("--debug") args.append(socket) self._GIT_CREDENTIAL_CACHE_DAEMON_PROCESS = subprocess.Popen( args, cwd=cwd, env=env, ) get_logger().debug( "A credential cache daemon has been spawned with PID %d", self._GIT_CREDENTIAL_CACHE_DAEMON_PROCESS.pid, ) elif self._GIT_CREDENTIAL_CACHE_DAEMON_PROCESS.poll(): self.ensure_git_credential_cache_daemon(socket, debug, True, cwd, env)
(self, path: str, action: jupyterlab_git.git.RebaseAction) -> dict
24,005
jupyterlab_git.git
set_tag
Set a git tag pointing to a specific commit. path: str Git path repository tag : str Name of new tag. commitId: Identifier of commit tag is pointing to.
def read_file(self, path): """ Reads file content located at path and returns it as a string path: str The path of the file """ try: file = pathlib.Path(path) content = file.read_text() return {"code": 0, "content": content} except BaseException as error: return {"code": -1, "content": ""}
(self, path, tag, commitId)
24,006
jupyterlab_git.git
show
Execute git show <ref:filename> Or git show <ref> Return the file content
def _is_remote_branch(self, branch_reference): """Check if given branch is remote branch by comparing with 'remotes/', TODO : Consider a better way to check remote branch """ return branch_reference.startswith("refs/remotes/")
(self, path, ref, filename=None, is_binary=False)
24,007
jupyterlab_git.git
show_prefix
Execute git --show-prefix command & return the result.
for line in filter(lambda l: len(l) > 0, strip_and_split(text_output)):
(self, path)
24,008
jupyterlab_git.git
show_top_level
Execute git --show-toplevel command & return the result.
for line in filter(lambda l: len(l) > 0, strip_and_split(text_output)):
(self, path)
24,009
jupyterlab_git.git
stash
Stash changes in a dirty working directory away path: str Git path repository stashMsg (optional): str A message that describes the stash entry
def ensure_git_credential_cache_daemon( self, socket: Optional[pathlib.Path] = None, debug: bool = False, force: bool = False, cwd: Optional[str] = None, env: Dict[str, str] = None, ) -> None: """ Spawn a Git credential cache daemon with the socket file being `socket` if it does not exist. If `debug` is `True`, the daemon will be spawned with `--debug` flag. If `socket` is empty, it is set to `~/.git-credential-cache-daemon`. If `force` is `True`, a daemon will be spawned, and if the daemon process is accessible, the existing daemon process will be terminated before spawning a new one. Otherwise, if `force` is `False`, the PID of the existing daemon process is returned. If the daemon process is not accessible, `-1` is returned. `cwd` and `env` are passed to the process that spawns the daemon. """ if not socket: socket = pathlib.Path.home() / ".git-credential-cache" / "socket" if socket.exists(): return if self._GIT_CREDENTIAL_CACHE_DAEMON_PROCESS is None or force: if force and self._GIT_CREDENTIAL_CACHE_DAEMON_PROCESS: self._GIT_CREDENTIAL_CACHE_DAEMON_PROCESS.terminate() if not socket.parent.exists(): socket.parent.mkdir(parents=True, exist_ok=True) socket.parent.chmod(0o700) args: List[str] = ["git", "credential-cache--daemon"] if debug: args.append("--debug") args.append(socket) self._GIT_CREDENTIAL_CACHE_DAEMON_PROCESS = subprocess.Popen( args, cwd=cwd, env=env, ) get_logger().debug( "A credential cache daemon has been spawned with PID %d", self._GIT_CREDENTIAL_CACHE_DAEMON_PROCESS.pid, ) elif self._GIT_CREDENTIAL_CACHE_DAEMON_PROCESS.poll(): self.ensure_git_credential_cache_daemon(socket, debug, True, cwd, env)
(self, path: str, stashMsg: str = '') -> dict
24,010
jupyterlab_git.git
stash_list
Execute git stash list command
def ensure_git_credential_cache_daemon( self, socket: Optional[pathlib.Path] = None, debug: bool = False, force: bool = False, cwd: Optional[str] = None, env: Dict[str, str] = None, ) -> None: """ Spawn a Git credential cache daemon with the socket file being `socket` if it does not exist. If `debug` is `True`, the daemon will be spawned with `--debug` flag. If `socket` is empty, it is set to `~/.git-credential-cache-daemon`. If `force` is `True`, a daemon will be spawned, and if the daemon process is accessible, the existing daemon process will be terminated before spawning a new one. Otherwise, if `force` is `False`, the PID of the existing daemon process is returned. If the daemon process is not accessible, `-1` is returned. `cwd` and `env` are passed to the process that spawns the daemon. """ if not socket: socket = pathlib.Path.home() / ".git-credential-cache" / "socket" if socket.exists(): return if self._GIT_CREDENTIAL_CACHE_DAEMON_PROCESS is None or force: if force and self._GIT_CREDENTIAL_CACHE_DAEMON_PROCESS: self._GIT_CREDENTIAL_CACHE_DAEMON_PROCESS.terminate() if not socket.parent.exists(): socket.parent.mkdir(parents=True, exist_ok=True) socket.parent.chmod(0o700) args: List[str] = ["git", "credential-cache--daemon"] if debug: args.append("--debug") args.append(socket) self._GIT_CREDENTIAL_CACHE_DAEMON_PROCESS = subprocess.Popen( args, cwd=cwd, env=env, ) get_logger().debug( "A credential cache daemon has been spawned with PID %d", self._GIT_CREDENTIAL_CACHE_DAEMON_PROCESS.pid, ) elif self._GIT_CREDENTIAL_CACHE_DAEMON_PROCESS.poll(): self.ensure_git_credential_cache_daemon(socket, debug, True, cwd, env)
(self, path: str) -> dict
24,011
jupyterlab_git.git
stash_show
Execute git stash show command
def ensure_git_credential_cache_daemon( self, socket: Optional[pathlib.Path] = None, debug: bool = False, force: bool = False, cwd: Optional[str] = None, env: Dict[str, str] = None, ) -> None: """ Spawn a Git credential cache daemon with the socket file being `socket` if it does not exist. If `debug` is `True`, the daemon will be spawned with `--debug` flag. If `socket` is empty, it is set to `~/.git-credential-cache-daemon`. If `force` is `True`, a daemon will be spawned, and if the daemon process is accessible, the existing daemon process will be terminated before spawning a new one. Otherwise, if `force` is `False`, the PID of the existing daemon process is returned. If the daemon process is not accessible, `-1` is returned. `cwd` and `env` are passed to the process that spawns the daemon. """ if not socket: socket = pathlib.Path.home() / ".git-credential-cache" / "socket" if socket.exists(): return if self._GIT_CREDENTIAL_CACHE_DAEMON_PROCESS is None or force: if force and self._GIT_CREDENTIAL_CACHE_DAEMON_PROCESS: self._GIT_CREDENTIAL_CACHE_DAEMON_PROCESS.terminate() if not socket.parent.exists(): socket.parent.mkdir(parents=True, exist_ok=True) socket.parent.chmod(0o700) args: List[str] = ["git", "credential-cache--daemon"] if debug: args.append("--debug") args.append(socket) self._GIT_CREDENTIAL_CACHE_DAEMON_PROCESS = subprocess.Popen( args, cwd=cwd, env=env, ) get_logger().debug( "A credential cache daemon has been spawned with PID %d", self._GIT_CREDENTIAL_CACHE_DAEMON_PROCESS.pid, ) elif self._GIT_CREDENTIAL_CACHE_DAEMON_PROCESS.poll(): self.ensure_git_credential_cache_daemon(socket, debug, True, cwd, env)
(self, path: str, index: int) -> dict
24,012
jupyterlab_git.git
status
Execute git status command & return the result.
def remove_cell_ids(nb): for cell in nb.cells: cell.pop("id", None) return nb
(self, path: str) -> dict
24,013
jupyterlab_git.git
tag_checkout
Checkout the git repository at a given tag. path: str Git path repository tag : str Tag to checkout
def read_file(self, path): """ Reads file content located at path and returns it as a string path: str The path of the file """ try: file = pathlib.Path(path) content = file.read_text() return {"code": 0, "content": content} except BaseException as error: return {"code": -1, "content": ""}
(self, path, tag)
24,014
jupyterlab_git.git
tags
List all tags of the git repository, including the commit each tag points to. path: str Git path repository
def read_file(self, path): """ Reads file content located at path and returns it as a string path: str The path of the file """ try: file = pathlib.Path(path) content = file.read_text() return {"code": 0, "content": content} except BaseException as error: return {"code": -1, "content": ""}
(self, path)
24,015
jupyterlab_git.git
version
Return the Git command version. If an error occurs, return None.
def read_file(self, path): """ Reads file content located at path and returns it as a string path: str The path of the file """ try: file = pathlib.Path(path) content = file.read_text() return {"code": 0, "content": content} except BaseException as error: return {"code": -1, "content": ""}
(self)
24,016
jupyterlab_git.git
write_gitignore
Handle call to overwrite .gitignore. Takes the .gitignore file and clears its previous contents Writes the new content onto the file path: str Top Git repository path content: str New file contents
def read_file(self, path): """ Reads file content located at path and returns it as a string path: str The path of the file """ try: file = pathlib.Path(path) content = file.read_text() return {"code": 0, "content": content} except BaseException as error: return {"code": -1, "content": ""}
(self, path, content)
24,017
jupyterlab_git
JupyterLabGit
Config options for jupyterlab_git Modeled after: https://github.com/jupyter/jupyter_server/blob/9dd2a9a114c045cfd8fd8748400c6a697041f7fa/jupyter_server/serverapp.py#L1040
class JupyterLabGit(Configurable): """ Config options for jupyterlab_git Modeled after: https://github.com/jupyter/jupyter_server/blob/9dd2a9a114c045cfd8fd8748400c6a697041f7fa/jupyter_server/serverapp.py#L1040 """ actions = Dict( help="Actions to be taken after a git command. Each action takes a list of commands to execute (strings). Supported actions: post_init", config=True, value_trait=List( trait=Unicode(), help='List of commands to run. E.g. ["touch baz.py"]' ) # TODO Validate ) excluded_paths = List(help="Paths to be excluded", config=True, trait=Unicode()) credential_helper = Unicode( help=""" The value of Git credential helper will be set to this value when the Git credential caching mechanism is activated by this extension. By default it is an in-memory cache of 3600 seconds (1 hour); `cache --timeout=3600`. """, config=True, ) git_command_timeout = CFloat( help="The timeout for executing git operations. By default it is set to 20 seconds.", config=True, ) @default("credential_helper") def _credential_helper_default(self): return "cache --timeout=3600" @default("git_command_timeout") def _git_command_timeout_default(self): return 20.0
(*args: 't.Any', **kwargs: 't.Any') -> 't.Any'
24,100
jupyterlab_git
_jupyter_labextension_paths
null
def _jupyter_labextension_paths(): return [{"src": "labextension", "dest": "@jupyterlab/git"}]
()
24,101
jupyterlab_git
_jupyter_server_extension_points
null
def _jupyter_server_extension_points(): return [{"module": "jupyterlab_git"}]
()
24,102
jupyterlab_git
_load_jupyter_server_extension
Registers the API handler to receive HTTP requests from the frontend extension. Parameters ---------- server_app: jupyterlab.labapp.LabApp JupyterLab application instance
def _load_jupyter_server_extension(server_app): """Registers the API handler to receive HTTP requests from the frontend extension. Parameters ---------- server_app: jupyterlab.labapp.LabApp JupyterLab application instance """ config = JupyterLabGit(config=server_app.config) server_app.web_app.settings["git"] = Git(config) setup_handlers(server_app.web_app)
(server_app)
24,109
jupyterlab_git.handlers
setup_handlers
Setups all of the git command handlers. Every handler is defined here, to be used in git.py file.
def setup_handlers(web_app): """ Setups all of the git command handlers. Every handler is defined here, to be used in git.py file. """ handlers_with_path = [ ("/add_all_unstaged", GitAddAllUnstagedHandler), ("/add_all_untracked", GitAddAllUntrackedHandler), ("/all_history", GitAllHistoryHandler), ("/branch/delete", GitBranchDeleteHandler), ("/branch", GitBranchHandler), ("/changed_files", GitChangedFilesHandler), ("/checkout", GitCheckoutHandler), ("/clone", GitCloneHandler), ("/commit", GitCommitHandler), ("/config", GitConfigHandler), ("/content", GitContentHandler), ("/delete_commit", GitDeleteCommitHandler), ("/detailed_log", GitDetailedLogHandler), ("/diff", GitDiffHandler), ("/init", GitInitHandler), ("/log", GitLogHandler), ("/merge", GitMergeHandler), ("/pull", GitPullHandler), ("/push", GitPushHandler), ("/remote/add", GitRemoteAddHandler), ("/remote/fetch", GitFetchHandler), ("/remote/show", GitRemoteDetailsShowHandler), ("/reset", GitResetHandler), ("/reset_to_commit", GitResetToCommitHandler), ("/show_prefix", GitShowPrefixHandler), ("/show_top_level", GitShowTopLevelHandler), ("/status", GitStatusHandler), ("/upstream", GitUpstreamHandler), ("/ignore", GitIgnoreHandler), ("/tags", GitTagHandler), ("/tag_checkout", GitTagCheckoutHandler), ("/tag", GitNewTagHandler), ("/add", GitAddHandler), ("/rebase", GitRebaseHandler), ("/stash", GitStashHandler), ("/stash_pop", GitStashPopHandler), ("/stash_apply", GitStashApplyHandler), ] handlers = [ ("/diffnotebook", GitDiffNotebookHandler), ("/settings", GitSettingsHandler), ] # add the baseurl to our paths base_url = web_app.settings["base_url"] git_handlers = ( [ (url_path_join(base_url, NAMESPACE + path_regex + endpoint), handler) for endpoint, handler in handlers_with_path ] + [ (url_path_join(base_url, NAMESPACE + endpoint), handler) for endpoint, handler in handlers ] + [ ( url_path_join( base_url, NAMESPACE + path_regex + r"/remote/(?P<name>\w+)" ), GitRemoteRemoveHandler, ) ] ) web_app.add_handlers(".*", git_handlers)
(web_app)
24,110
rpycdec
CachedTranslator
Translator wrapper with cache. Use local disk cache to avoid translate same text again and again.
class CachedTranslator: """ Translator wrapper with cache. Use local disk cache to avoid translate same text again and again. """ cache = {} _translate: Callable[[str], str] def __init__(self, translator: Callable[[str], str], cache_dir=".cache") -> None: self._translate = translator # make sure cache dir exists if not os.path.exists(cache_dir): os.makedirs(cache_dir) conn = sqlite3.connect(cache_dir + "/cache.sqlite") # create table if not exists conn.cursor().execute( "create table if not exists cache (key text primary key, value text)" ) self.cache = conn def get(self, key: str) -> str: result = ( self.cache.cursor() .execute("select (value) from cache where key = ?", (key,)) .fetchone() ) return result[0] if result else None def put(self, key: str, val: str): self.cache.cursor().execute( "insert into cache (key, value) values (?, ?)", (key, val) ) self.cache.commit() def translate(self, text: str) -> str: """ translate text and cache it """ start_time = time.time() logger.debug(">>> [%s]", text) cachekey = sha256(text.encode()).hexdigest() cached = self.get(cachekey) if cached: decoded = cached logger.debug("<-- [%s]", decoded) return decoded translated = self._translate(text) self.put(cachekey, translated) cost_time = time.time() - start_time logger.debug("<<< [%s] [cost %f.2s]", translated, cost_time) return translated
(translator: Callable[[str], str], cache_dir='.cache') -> None
24,111
rpycdec
__init__
null
def __init__(self, translator: Callable[[str], str], cache_dir=".cache") -> None: self._translate = translator # make sure cache dir exists if not os.path.exists(cache_dir): os.makedirs(cache_dir) conn = sqlite3.connect(cache_dir + "/cache.sqlite") # create table if not exists conn.cursor().execute( "create table if not exists cache (key text primary key, value text)" ) self.cache = conn
(self, translator: Callable[[str], str], cache_dir='.cache') -> NoneType
24,112
rpycdec
get
null
def get(self, key: str) -> str: result = ( self.cache.cursor() .execute("select (value) from cache where key = ?", (key,)) .fetchone() ) return result[0] if result else None
(self, key: str) -> str
24,113
rpycdec
put
null
def put(self, key: str, val: str): self.cache.cursor().execute( "insert into cache (key, value) values (?, ?)", (key, val) ) self.cache.commit()
(self, key: str, val: str)
24,114
rpycdec
translate
translate text and cache it
def translate(self, text: str) -> str: """ translate text and cache it """ start_time = time.time() logger.debug(">>> [%s]", text) cachekey = sha256(text.encode()).hexdigest() cached = self.get(cachekey) if cached: decoded = cached logger.debug("<-- [%s]", decoded) return decoded translated = self._translate(text) self.put(cachekey, translated) cost_time = time.time() - start_time logger.debug("<<< [%s] [cost %f.2s]", translated, cost_time) return translated
(self, text: str) -> str
24,115
rpycdec
CodeTranslator
Translate warpped for renpy code. Parse text in renpy code(block, expr, text) and translate it.
class CodeTranslator: """ Translate warpped for renpy code. Parse text in renpy code(block, expr, text) and translate it. """ _translator: Callable[[str], str] def __init__(self, translator: Callable[[str], str]) -> None: """ Parameters ---------- translator : Callable[[str], str] translator function """ self.translator = translator def _call_translate(self, line) -> str: return self.translator(line) def trans_placeholder(self, line) -> str: """ 1. repalace placeholders with @ 2. translate 3. replace back @ with placeholders To avoid translate chars in placeholders eg: bad: {color=#ff0000}hello{/color} -> {颜色=#ff0000}你好{/颜色} good: {color=#ff0000}hello{/color} -> @你好@ -> {color=#ff0000}你好{/color} """ ph_ch = "@" # placeholder char phs = [] totranslate = "" # {} [] braces, squares = [], [] for i, char in enumerate(line): if i > 0 and line[i - 1] == "\\": totranslate += char continue match char: case "[": squares.append(i) case "]" if squares: end = squares.pop() if squares: continue phs.append(line[end : i + 1]) totranslate += ph_ch case "{": braces.append(i) case "}" if braces: end = braces.pop() if braces: continue phs.append(line[end : i + 1]) totranslate += ph_ch case _: if not squares and not braces: totranslate += char translated = self._call_translate(totranslate) if totranslate else line for placeholder in phs: # translate in placeholder # e.g. "{#r=hello}" matched = re.search(r"{#\w=(.+?)}", placeholder) if matched: translated = self.trans_placeholder(matched.group(1)) placeholder = ( placeholder[: matched.start(1)] + translated + placeholder[matched.end(1) :] ) translated = translated.replace(ph_ch, placeholder, 1) return translated def _on_text(self, text: str) -> str: if text.strip() == "": return text if text[0] == '"' and text[-1] == '"': return '"' + self._on_text(text[1:-1]) + '"' if "%" in text: # format string return text result = self.trans_placeholder(text) result = result.replace("%", "") return result def _on_expr(self, expr: str) -> str: prev_end, dquoters = 0, [] result = "" for i, char in enumerate(expr): if i > 0 and expr[i - 1] == "\\": continue if char == '"': if not dquoters: result += expr[prev_end:i] dquoters.append(i) else: result += self._on_text(expr[dquoters.pop() : i + 1]) prev_end = i + 1 result += expr[prev_end:] return result def _on_block(self, code: str) -> str: """ find strings in python expr and translate it """ results = [] for text in code.splitlines(): result = "" prev_end = 0 # match _("hello") 's hello for find in re.finditer(r'_\("(.+?)"\)', text): start, group, end = find.start(1), find.group(1), find.end(1) result += text[prev_end:start] + self._on_text(group) prev_end = end result += text[prev_end:] results.append(result) return "\n".join(results) def translate(self, kind, text) -> str: """ translate text by kind Parameters ---------- kind : str text, expr, block text : str text to translate """ match kind: case "text": text = self._on_text(text) case "expr": text = self._on_expr(text) case "block": text = self._on_block(text) case _: text = self._on_text(text) return text
(translator: Callable[[str], str]) -> None
24,116
rpycdec
__init__
Parameters ---------- translator : Callable[[str], str] translator function
def __init__(self, translator: Callable[[str], str]) -> None: """ Parameters ---------- translator : Callable[[str], str] translator function """ self.translator = translator
(self, translator: Callable[[str], str]) -> NoneType
24,117
rpycdec
_call_translate
null
def _call_translate(self, line) -> str: return self.translator(line)
(self, line) -> str
24,118
rpycdec
_on_block
find strings in python expr and translate it
def _on_block(self, code: str) -> str: """ find strings in python expr and translate it """ results = [] for text in code.splitlines(): result = "" prev_end = 0 # match _("hello") 's hello for find in re.finditer(r'_\("(.+?)"\)', text): start, group, end = find.start(1), find.group(1), find.end(1) result += text[prev_end:start] + self._on_text(group) prev_end = end result += text[prev_end:] results.append(result) return "\n".join(results)
(self, code: str) -> str
24,119
rpycdec
_on_expr
null
def _on_expr(self, expr: str) -> str: prev_end, dquoters = 0, [] result = "" for i, char in enumerate(expr): if i > 0 and expr[i - 1] == "\\": continue if char == '"': if not dquoters: result += expr[prev_end:i] dquoters.append(i) else: result += self._on_text(expr[dquoters.pop() : i + 1]) prev_end = i + 1 result += expr[prev_end:] return result
(self, expr: str) -> str
24,120
rpycdec
_on_text
null
def _on_text(self, text: str) -> str: if text.strip() == "": return text if text[0] == '"' and text[-1] == '"': return '"' + self._on_text(text[1:-1]) + '"' if "%" in text: # format string return text result = self.trans_placeholder(text) result = result.replace("%", "") return result
(self, text: str) -> str
24,121
rpycdec
trans_placeholder
1. repalace placeholders with @ 2. translate 3. replace back @ with placeholders To avoid translate chars in placeholders eg: bad: {color=#ff0000}hello{/color} -> {颜色=#ff0000}你好{/颜色} good: {color=#ff0000}hello{/color} -> @你好@ -> {color=#ff0000}你好{/color}
def trans_placeholder(self, line) -> str: """ 1. repalace placeholders with @ 2. translate 3. replace back @ with placeholders To avoid translate chars in placeholders eg: bad: {color=#ff0000}hello{/color} -> {颜色=#ff0000}你好{/颜色} good: {color=#ff0000}hello{/color} -> @你好@ -> {color=#ff0000}你好{/color} """ ph_ch = "@" # placeholder char phs = [] totranslate = "" # {} [] braces, squares = [], [] for i, char in enumerate(line): if i > 0 and line[i - 1] == "\\": totranslate += char continue match char: case "[": squares.append(i) case "]" if squares: end = squares.pop() if squares: continue phs.append(line[end : i + 1]) totranslate += ph_ch case "{": braces.append(i) case "}" if braces: end = braces.pop() if braces: continue phs.append(line[end : i + 1]) totranslate += ph_ch case _: if not squares and not braces: totranslate += char translated = self._call_translate(totranslate) if totranslate else line for placeholder in phs: # translate in placeholder # e.g. "{#r=hello}" matched = re.search(r"{#\w=(.+?)}", placeholder) if matched: translated = self.trans_placeholder(matched.group(1)) placeholder = ( placeholder[: matched.start(1)] + translated + placeholder[matched.end(1) :] ) translated = translated.replace(ph_ch, placeholder, 1) return translated
(self, line) -> str
24,122
rpycdec
translate
translate text by kind Parameters ---------- kind : str text, expr, block text : str text to translate
def translate(self, kind, text) -> str: """ translate text by kind Parameters ---------- kind : str text, expr, block text : str text to translate """ match kind: case "text": text = self._on_text(text) case "expr": text = self._on_expr(text) case "block": text = self._on_block(text) case _: text = self._on_text(text) return text
(self, kind, text) -> str
24,123
rpycdec
DummyClass
Dummy class for unpickling.
class DummyClass(object): """ Dummy class for unpickling. """ state = None def append(self, value): if self.state is None: self.state = [] self.state.append(value) def __getitem__(self, key): return self.__dict__[key] def __eq__(self, __value: object) -> bool: pass def __setitem__(self, key, value): self.__dict__[key] = value def __getstate__(self): if self.state is not None: return self.state return self.__dict__ def __setstate__(self, state): if isinstance(state, dict): self.__dict__ = state else: self.state = state
()
24,124
rpycdec
__eq__
null
def __eq__(self, __value: object) -> bool: pass
(self, _DummyClass__value: object) -> bool
24,126
rpycdec
__getstate__
null
def __getstate__(self): if self.state is not None: return self.state return self.__dict__
(self)
24,128
rpycdec
__setstate__
null
def __setstate__(self, state): if isinstance(state, dict): self.__dict__ = state else: self.state = state
(self, state)
24,129
rpycdec
append
null
def append(self, value): if self.state is None: self.state = [] self.state.append(value)
(self, value)
24,130
rpycdec
GenericUnpickler
null
class GenericUnpickler(pickle.Unpickler): def find_class(self, module, name): if module.startswith("store") or module.startswith("renpy"): return type(name, (DummyClass,), {"__module__": module}) return super().find_class(module, name)
(file, *, fix_imports=True, encoding='ASCII', errors='strict', buffers=())
24,131
rpycdec
find_class
null
def find_class(self, module, name): if module.startswith("store") or module.startswith("renpy"): return type(name, (DummyClass,), {"__module__": module}) return super().find_class(module, name)
(self, module, name)
24,132
rpycdec
GoogleTranslator
Google translate api wrapper
class GoogleTranslator: """ Google translate api wrapper """ session = requests.Session() def __init__(self, src: str = "auto", dest: str = "zh-CN") -> None: self.src_lang = src self.dest_lang = dest @sleep_and_retry # limit calls per second @limits(calls=5, period=1) # google translate api is not free, so use cache def translate(self, text: str) -> str: """ Translate text to dest language """ if text.strip() == "" or re.match(r"^[0-9\W]+$", text): return text forms = { "client": "gtx", "sl": self.src_lang, "tl": self.dest_lang, "dt": "t", "q": text, } server = "https://translate.google.com" resp = self.session.post(f"{server}/translate_a/single", data=forms) if resp.status_code != 200: raise ValueError(f"translate error: {resp.status_code}") data = resp.json() segments = "" for sec in data[0]: segments += sec[0] return segments
(src: str = 'auto', dest: str = 'zh-CN') -> None
24,133
rpycdec
__init__
null
def __init__(self, src: str = "auto", dest: str = "zh-CN") -> None: self.src_lang = src self.dest_lang = dest
(self, src: str = 'auto', dest: str = 'zh-CN') -> NoneType
24,134
rpycdec
translate
Translate text to dest language
def get(self, key: str) -> str: result = ( self.cache.cursor() .execute("select (value) from cache where key = ?", (key,)) .fetchone() ) return result[0] if result else None
(self, text: str) -> str
24,135
concurrent.futures.thread
ThreadPoolExecutor
null
class ThreadPoolExecutor(_base.Executor): # Used to assign unique thread names when thread_name_prefix is not supplied. _counter = itertools.count().__next__ def __init__(self, max_workers=None, thread_name_prefix='', initializer=None, initargs=()): """Initializes a new ThreadPoolExecutor instance. Args: max_workers: The maximum number of threads that can be used to execute the given calls. thread_name_prefix: An optional name prefix to give our threads. initializer: A callable used to initialize worker threads. initargs: A tuple of arguments to pass to the initializer. """ if max_workers is None: # ThreadPoolExecutor is often used to: # * CPU bound task which releases GIL # * I/O bound task (which releases GIL, of course) # # We use cpu_count + 4 for both types of tasks. # But we limit it to 32 to avoid consuming surprisingly large resource # on many core machine. max_workers = min(32, (os.cpu_count() or 1) + 4) if max_workers <= 0: raise ValueError("max_workers must be greater than 0") if initializer is not None and not callable(initializer): raise TypeError("initializer must be a callable") self._max_workers = max_workers self._work_queue = queue.SimpleQueue() self._idle_semaphore = threading.Semaphore(0) self._threads = set() self._broken = False self._shutdown = False self._shutdown_lock = threading.Lock() self._thread_name_prefix = (thread_name_prefix or ("ThreadPoolExecutor-%d" % self._counter())) self._initializer = initializer self._initargs = initargs def submit(self, fn, /, *args, **kwargs): with self._shutdown_lock, _global_shutdown_lock: if self._broken: raise BrokenThreadPool(self._broken) if self._shutdown: raise RuntimeError('cannot schedule new futures after shutdown') if _shutdown: raise RuntimeError('cannot schedule new futures after ' 'interpreter shutdown') f = _base.Future() w = _WorkItem(f, fn, args, kwargs) self._work_queue.put(w) self._adjust_thread_count() return f submit.__doc__ = _base.Executor.submit.__doc__ def _adjust_thread_count(self): # if idle threads are available, don't spin new threads if self._idle_semaphore.acquire(timeout=0): return # When the executor gets lost, the weakref callback will wake up # the worker threads. def weakref_cb(_, q=self._work_queue): q.put(None) num_threads = len(self._threads) if num_threads < self._max_workers: thread_name = '%s_%d' % (self._thread_name_prefix or self, num_threads) t = threading.Thread(name=thread_name, target=_worker, args=(weakref.ref(self, weakref_cb), self._work_queue, self._initializer, self._initargs)) t.start() self._threads.add(t) _threads_queues[t] = self._work_queue def _initializer_failed(self): with self._shutdown_lock: self._broken = ('A thread initializer failed, the thread pool ' 'is not usable anymore') # Drain work queue and mark pending futures failed while True: try: work_item = self._work_queue.get_nowait() except queue.Empty: break if work_item is not None: work_item.future.set_exception(BrokenThreadPool(self._broken)) def shutdown(self, wait=True, *, cancel_futures=False): with self._shutdown_lock: self._shutdown = True if cancel_futures: # Drain all work items from the queue, and then cancel their # associated futures. while True: try: work_item = self._work_queue.get_nowait() except queue.Empty: break if work_item is not None: work_item.future.cancel() # Send a wake-up to prevent threads calling # _work_queue.get(block=True) from permanently blocking. self._work_queue.put(None) if wait: for t in self._threads: t.join() shutdown.__doc__ = _base.Executor.shutdown.__doc__
(max_workers=None, thread_name_prefix='', initializer=None, initargs=())
24,137
concurrent.futures._base
__exit__
null
def __exit__(self, exc_type, exc_val, exc_tb): self.shutdown(wait=True) return False
(self, exc_type, exc_val, exc_tb)
24,138
concurrent.futures.thread
__init__
Initializes a new ThreadPoolExecutor instance. Args: max_workers: The maximum number of threads that can be used to execute the given calls. thread_name_prefix: An optional name prefix to give our threads. initializer: A callable used to initialize worker threads. initargs: A tuple of arguments to pass to the initializer.
def __init__(self, max_workers=None, thread_name_prefix='', initializer=None, initargs=()): """Initializes a new ThreadPoolExecutor instance. Args: max_workers: The maximum number of threads that can be used to execute the given calls. thread_name_prefix: An optional name prefix to give our threads. initializer: A callable used to initialize worker threads. initargs: A tuple of arguments to pass to the initializer. """ if max_workers is None: # ThreadPoolExecutor is often used to: # * CPU bound task which releases GIL # * I/O bound task (which releases GIL, of course) # # We use cpu_count + 4 for both types of tasks. # But we limit it to 32 to avoid consuming surprisingly large resource # on many core machine. max_workers = min(32, (os.cpu_count() or 1) + 4) if max_workers <= 0: raise ValueError("max_workers must be greater than 0") if initializer is not None and not callable(initializer): raise TypeError("initializer must be a callable") self._max_workers = max_workers self._work_queue = queue.SimpleQueue() self._idle_semaphore = threading.Semaphore(0) self._threads = set() self._broken = False self._shutdown = False self._shutdown_lock = threading.Lock() self._thread_name_prefix = (thread_name_prefix or ("ThreadPoolExecutor-%d" % self._counter())) self._initializer = initializer self._initargs = initargs
(self, max_workers=None, thread_name_prefix='', initializer=None, initargs=())
24,139
concurrent.futures.thread
_adjust_thread_count
null
def _adjust_thread_count(self): # if idle threads are available, don't spin new threads if self._idle_semaphore.acquire(timeout=0): return # When the executor gets lost, the weakref callback will wake up # the worker threads. def weakref_cb(_, q=self._work_queue): q.put(None) num_threads = len(self._threads) if num_threads < self._max_workers: thread_name = '%s_%d' % (self._thread_name_prefix or self, num_threads) t = threading.Thread(name=thread_name, target=_worker, args=(weakref.ref(self, weakref_cb), self._work_queue, self._initializer, self._initargs)) t.start() self._threads.add(t) _threads_queues[t] = self._work_queue
(self)
24,140
concurrent.futures.thread
_initializer_failed
null
def _initializer_failed(self): with self._shutdown_lock: self._broken = ('A thread initializer failed, the thread pool ' 'is not usable anymore') # Drain work queue and mark pending futures failed while True: try: work_item = self._work_queue.get_nowait() except queue.Empty: break if work_item is not None: work_item.future.set_exception(BrokenThreadPool(self._broken))
(self)
24,141
concurrent.futures._base
map
Returns an iterator equivalent to map(fn, iter). Args: fn: A callable that will take as many arguments as there are passed iterables. timeout: The maximum number of seconds to wait. If None, then there is no limit on the wait time. chunksize: The size of the chunks the iterable will be broken into before being passed to a child process. This argument is only used by ProcessPoolExecutor; it is ignored by ThreadPoolExecutor. Returns: An iterator equivalent to: map(func, *iterables) but the calls may be evaluated out-of-order. Raises: TimeoutError: If the entire result iterator could not be generated before the given timeout. Exception: If fn(*args) raises for any values.
def map(self, fn, *iterables, timeout=None, chunksize=1): """Returns an iterator equivalent to map(fn, iter). Args: fn: A callable that will take as many arguments as there are passed iterables. timeout: The maximum number of seconds to wait. If None, then there is no limit on the wait time. chunksize: The size of the chunks the iterable will be broken into before being passed to a child process. This argument is only used by ProcessPoolExecutor; it is ignored by ThreadPoolExecutor. Returns: An iterator equivalent to: map(func, *iterables) but the calls may be evaluated out-of-order. Raises: TimeoutError: If the entire result iterator could not be generated before the given timeout. Exception: If fn(*args) raises for any values. """ if timeout is not None: end_time = timeout + time.monotonic() fs = [self.submit(fn, *args) for args in zip(*iterables)] # Yield must be hidden in closure so that the futures are submitted # before the first iterator value is required. def result_iterator(): try: # reverse to keep finishing order fs.reverse() while fs: # Careful not to keep a reference to the popped future if timeout is None: yield _result_or_cancel(fs.pop()) else: yield _result_or_cancel(fs.pop(), end_time - time.monotonic()) finally: for future in fs: future.cancel() return result_iterator()
(self, fn, *iterables, timeout=None, chunksize=1)
24,142
concurrent.futures.thread
shutdown
Clean-up the resources associated with the Executor. It is safe to call this method several times. Otherwise, no other methods can be called after this one. Args: wait: If True then shutdown will not return until all running futures have finished executing and the resources used by the executor have been reclaimed. cancel_futures: If True then shutdown will cancel all pending futures. Futures that are completed or running will not be cancelled.
def shutdown(self, wait=True, *, cancel_futures=False): with self._shutdown_lock: self._shutdown = True if cancel_futures: # Drain all work items from the queue, and then cancel their # associated futures. while True: try: work_item = self._work_queue.get_nowait() except queue.Empty: break if work_item is not None: work_item.future.cancel() # Send a wake-up to prevent threads calling # _work_queue.get(block=True) from permanently blocking. self._work_queue.put(None) if wait: for t in self._threads: t.join()
(self, wait=True, *, cancel_futures=False)
24,143
concurrent.futures.thread
submit
Submits a callable to be executed with the given arguments. Schedules the callable to be executed as fn(*args, **kwargs) and returns a Future instance representing the execution of the callable. Returns: A Future representing the given call.
def submit(self, fn, /, *args, **kwargs): with self._shutdown_lock, _global_shutdown_lock: if self._broken: raise BrokenThreadPool(self._broken) if self._shutdown: raise RuntimeError('cannot schedule new futures after shutdown') if _shutdown: raise RuntimeError('cannot schedule new futures after ' 'interpreter shutdown') f = _base.Future() w = _WorkItem(f, fn, args, kwargs) self._work_queue.put(w) self._adjust_thread_count() return f
(self, fn, /, *args, **kwargs)
24,144
rpycdec
_do_collect
null
def _do_collect(meta: tuple, accept_lang: str, into: dict) -> str: (kind, label, lang, old, new) = meta key, val = label or old, new or old if accept_lang and lang and lang != accept_lang: return val if lang or (not lang and key not in into): into[key] = (kind, val) return val
(meta: tuple, accept_lang: str, into: dict) -> str
24,145
rpycdec
_do_consume
null
def _do_consume(meta: tuple, cache: dict) -> str: (_, label, _, old, new) = meta key, val = label or old, new or old return cache.get(key) or val
(meta: tuple, cache: dict) -> str
24,146
rpycdec
_walk_callback
null
def _walk_callback(stmts, callback) -> str: return renpy.util.get_code( stmts, modifier=lambda node, **kwargs: walk_node(node, callback, **kwargs), )
(stmts, callback) -> str
24,148
rpycdec
decompile
decompile rpyc file or directory into rpy Parameters ---------- input_path : str path to rpyc file or directory contains rpyc files output_path : str, optional output path, by default it's same path of input_path.
def decompile(input_path, output_path=None): """ decompile rpyc file or directory into rpy Parameters ---------- input_path : str path to rpyc file or directory contains rpyc files output_path : str, optional output path, by default it's same path of input_path. """ if not os.path.isdir(input_path): decompile_file(input_path, output_path) return if not output_path: output_path = input_path for filename in match_files(input_path, r".*\.rpym?c$"): decompile_file( os.path.join(input_path, filename), os.path.join(output_path, filename.removesuffix("c")), )
(input_path, output_path=None)
24,149
rpycdec
decompile_file
decompile rpyc file into rpy file and write to output.
def decompile_file(input_file, output_file=None): """ decompile rpyc file into rpy file and write to output. """ if not output_file: output_file = input_file.removesuffix("c") if not output_file.endswith(".rpy"): output_file = os.path.join( output_file, os.path.basename(input_file).removesuffix("c") ) stmts = load_file(input_file) code = renpy.util.get_code(stmts) logger.info("writing %s", output_file) write_file(output_file, code)
(input_file, output_file=None)
24,150
rpycdec
default_translator
default translator which use google translate api with CachedTranslator
def default_translator() -> Callable[[str], str]: """ default translator which use google translate api with CachedTranslator """ return CachedTranslator(GoogleTranslator().translate).translate
() -> Callable[[str], str]
24,152
ratelimit.decorators
RateLimitDecorator
Rate limit decorator class.
class RateLimitDecorator(object): ''' Rate limit decorator class. ''' def __init__(self, calls=15, period=900, clock=now, raise_on_limit=True): ''' Instantiate a RateLimitDecorator with some sensible defaults. By default the Twitter rate limiting window is respected (15 calls every 15 minutes). :param int calls: Maximum function invocations allowed within a time period. Must be a number greater than 0. :param float period: An upper bound time period (in seconds) before the rate limit resets. Must be a number greater than 0. :param function clock: An optional function retuning the current time. This is used primarily for testing. :param bool raise_on_limit: A boolean allowing the caller to avoiding rasing an exception. ''' self.clamped_calls = max(1, min(sys.maxsize, floor(calls))) self.period = period self.clock = clock self.raise_on_limit = raise_on_limit # Initialise the decorator state. self.last_reset = clock() self.num_calls = 0 # Add thread safety. self.lock = threading.RLock() def __call__(self, func): ''' Return a wrapped function that prevents further function invocations if previously called within a specified period of time. :param function func: The function to decorate. :return: Decorated function. :rtype: function ''' @wraps(func) def wrapper(*args, **kargs): ''' Extend the behaviour of the decoated function, forwarding function invocations previously called no sooner than a specified period of time. The decorator will raise an exception if the function cannot be called so the caller may implement a retry strategy such as an exponential backoff. :param args: non-keyword variable length argument list to the decorated function. :param kargs: keyworded variable length argument list to the decorated function. :raises: RateLimitException ''' with self.lock: period_remaining = self.__period_remaining() # If the time window has elapsed then reset. if period_remaining <= 0: self.num_calls = 0 self.last_reset = self.clock() # Increase the number of attempts to call the function. self.num_calls += 1 # If the number of attempts to call the function exceeds the # maximum then raise an exception. if self.num_calls > self.clamped_calls: if self.raise_on_limit: raise RateLimitException('too many calls', period_remaining) return return func(*args, **kargs) return wrapper def __period_remaining(self): ''' Return the period remaining for the current rate limit window. :return: The remaing period. :rtype: float ''' elapsed = self.clock() - self.last_reset return self.period - elapsed
(calls=15, period=900, clock=<built-in function monotonic>, raise_on_limit=True)
24,153
ratelimit.decorators
__period_remaining
Return the period remaining for the current rate limit window. :return: The remaing period. :rtype: float
def __period_remaining(self): ''' Return the period remaining for the current rate limit window. :return: The remaing period. :rtype: float ''' elapsed = self.clock() - self.last_reset return self.period - elapsed
(self)
24,154
ratelimit.decorators
__call__
Return a wrapped function that prevents further function invocations if previously called within a specified period of time. :param function func: The function to decorate. :return: Decorated function. :rtype: function
def __call__(self, func): ''' Return a wrapped function that prevents further function invocations if previously called within a specified period of time. :param function func: The function to decorate. :return: Decorated function. :rtype: function ''' @wraps(func) def wrapper(*args, **kargs): ''' Extend the behaviour of the decoated function, forwarding function invocations previously called no sooner than a specified period of time. The decorator will raise an exception if the function cannot be called so the caller may implement a retry strategy such as an exponential backoff. :param args: non-keyword variable length argument list to the decorated function. :param kargs: keyworded variable length argument list to the decorated function. :raises: RateLimitException ''' with self.lock: period_remaining = self.__period_remaining() # If the time window has elapsed then reset. if period_remaining <= 0: self.num_calls = 0 self.last_reset = self.clock() # Increase the number of attempts to call the function. self.num_calls += 1 # If the number of attempts to call the function exceeds the # maximum then raise an exception. if self.num_calls > self.clamped_calls: if self.raise_on_limit: raise RateLimitException('too many calls', period_remaining) return return func(*args, **kargs) return wrapper
(self, func)
24,155
ratelimit.decorators
__init__
Instantiate a RateLimitDecorator with some sensible defaults. By default the Twitter rate limiting window is respected (15 calls every 15 minutes). :param int calls: Maximum function invocations allowed within a time period. Must be a number greater than 0. :param float period: An upper bound time period (in seconds) before the rate limit resets. Must be a number greater than 0. :param function clock: An optional function retuning the current time. This is used primarily for testing. :param bool raise_on_limit: A boolean allowing the caller to avoiding rasing an exception.
def __init__(self, calls=15, period=900, clock=now, raise_on_limit=True): ''' Instantiate a RateLimitDecorator with some sensible defaults. By default the Twitter rate limiting window is respected (15 calls every 15 minutes). :param int calls: Maximum function invocations allowed within a time period. Must be a number greater than 0. :param float period: An upper bound time period (in seconds) before the rate limit resets. Must be a number greater than 0. :param function clock: An optional function retuning the current time. This is used primarily for testing. :param bool raise_on_limit: A boolean allowing the caller to avoiding rasing an exception. ''' self.clamped_calls = max(1, min(sys.maxsize, floor(calls))) self.period = period self.clock = clock self.raise_on_limit = raise_on_limit # Initialise the decorator state. self.last_reset = clock() self.num_calls = 0 # Add thread safety. self.lock = threading.RLock()
(self, calls=15, period=900, clock=<built-in function monotonic>, raise_on_limit=True)
24,156
rpycdec
load_file
load renpy code from rpyc file and return ast tree.
def load_file(filename, disasm: bool = False) -> renpy.ast.Node: """ load renpy code from rpyc file and return ast tree. """ ext = os.path.splitext(filename)[1] if ext in [".rpy", ".rpym"]: raise NotImplementedError( "unsupport for pase rpy file or use renpy.parser.parse() in renpy's SDK" ) if ext in [".rpyc", ".rpymc"]: with open(filename, "rb") as file: for slot in [1, 2]: bindata = read_rpyc_data(file, slot) if bindata: if disasm: disasm_file = filename + ".disasm" with open(disasm_file, "w", encoding="utf-8") as disasm_f: pickletools.dis(bindata, out=disasm_f) try: _, stmts = pickle.loads(bindata) except Exception as e: logger.error("load %s failed: %s", filename, e) raise e return stmts file.seek(0) return None
(filename, disasm: bool = False) -> renpy.ast.Node
24,158
rpycdec
main
command line tool entry.
def main(): """ command line tool entry. """ logging.basicConfig(level=logging.INFO) argparser = argparse.ArgumentParser() argparser.add_argument( "--concurent", "-n", type=int, default=0, help="concurent translate" ) argparser.add_argument( "--include-lang", "-i", default=None, help="add items in tl/<lang> dir to translations", ) argparser.add_argument( "--verbose", "-v", action="store_true", help="verbose output" ) argparser.add_argument( "--translate", action="store_true", help="decompile and translate" ) argparser.add_argument("src", nargs=1, help="rpyc file or directory") argparser.add_argument("dest", nargs="?", help="output file or directory") args = argparser.parse_args() logging.basicConfig(level=logging.INFO) if args.verbose: logger.setLevel(logging.DEBUG) if args.translate: translate( args.src[0], args.dest, concurent=args.concurent, include_tl_lang=args.include_lang, ) else: decompile(args.src[0], args.dest)
()
24,159
rpycdec
match_files
match files in dir with regex pattern Parameters ---------- base_dir : str directory to find in pattern : str regex pattern Returns ------- list[str] matched filenames relative to base_dir
def match_files(base_dir: str, pattern: str) -> list[str]: """ match files in dir with regex pattern Parameters ---------- base_dir : str directory to find in pattern : str regex pattern Returns ------- list[str] matched filenames relative to base_dir """ if pattern == "": pattern = ".*" results = [] matched = re.compile(pattern) for root, _, files in os.walk(base_dir): for filename in files: filename = os.path.relpath(os.path.join(root, filename), base_dir) if matched.match(filename): results.append(filename) return results
(base_dir: str, pattern: str) -> list[str]
24,160
rpycdec
noop_translator
translate that do nothing but return text self
def noop_translator(text: str) -> str: """ translate that do nothing but return text self """ return text
(text: str) -> str
24,165
rpycdec
read_rpyc_data
Reads the binary data from `slot` in a .rpyc (v1 or v2) file. Returns the data if the slot exists, or None if the slot does not exist.
def read_rpyc_data(file: io.FileIO, slot): """ Reads the binary data from `slot` in a .rpyc (v1 or v2) file. Returns the data if the slot exists, or None if the slot does not exist. """ file.seek(0) header_data = file.read(1024) # Legacy path. if header_data[: len(RPYC2_HEADER)] != RPYC2_HEADER: if slot != 1: return None file.seek(0) data = file.read() return zlib.decompress(data) # RPYC2 path. pos = len(RPYC2_HEADER) while True: header_slot, start, length = struct.unpack("III", header_data[pos : pos + 12]) if slot == header_slot: break if header_slot == 0: return None pos += 12 file.seek(start) data = file.read(length) return zlib.decompress(data)
(file: _io.FileIO, slot)
24,168
ratelimit.decorators
sleep_and_retry
Return a wrapped function that rescues rate limit exceptions, sleeping the current thread until rate limit resets. :param function func: The function to decorate. :return: Decorated function. :rtype: function
def sleep_and_retry(func): ''' Return a wrapped function that rescues rate limit exceptions, sleeping the current thread until rate limit resets. :param function func: The function to decorate. :return: Decorated function. :rtype: function ''' @wraps(func) def wrapper(*args, **kargs): ''' Call the rate limited function. If the function raises a rate limit exception sleep for the remaing time period and retry the function. :param args: non-keyword variable length argument list to the decorated function. :param kargs: keyworded variable length argument list to the decorated function. ''' while True: try: return func(*args, **kargs) except RateLimitException as exception: time.sleep(exception.period_remaining) return wrapper
(func)
24,172
rpycdec
translate
translate rpyc file or directory
def translate( input_path, output_path=None, translator: Callable[[str], str] = None, include_tl_lang: str = "english", concurent: int = 0, ): """ translate rpyc file or directory """ if os.path.isfile(input_path): if not output_path: output_path = input_path.removesuffix("c") (_, code) = translate_files( "", [input_path], translator=translator, ).popitem() logger.info("writing %s", output_path) write_file(output_path, code) return if not output_path: output_path = input_path matches = match_files(input_path, r".*\.rpym?c$") file_codes = translate_files( input_path, matches, translator=translator, include_tl_lang=include_tl_lang, concurent=concurent, ) for filename, code in file_codes.items(): output_file = os.path.join(output_path, filename.removesuffix("c")) logger.info("writing %s", output_file) write_file(output_file, code)
(input_path, output_path=None, translator: Optional[Callable[[str], str]] = None, include_tl_lang: str = 'english', concurent: int = 0)
24,173
rpycdec
translate_files
translate files and return a map of filename and code
def translate_files( base_dir: str, files: list[str], translator: Callable[[str], str], include_tl_lang: str = "english", concurent: int = 0, ) -> dict[str, str]: """ translate files and return a map of filename and code """ if not translator: logger.info("using default translator") translator = default_translator() stmts_dict = {} translations_dict = {} # load translations for filename in files: logger.info("loading %s", filename) stmts = load_file(os.path.join(base_dir, filename)) stmts_dict[filename] = stmts _walk_callback( stmts, lambda meta: _do_collect(meta, include_tl_lang, translations_dict), ) logger.info("loaded %d translations", len(translations_dict)) # translate logger.info("translating") results_dict = {} code_translator = CodeTranslator(translator) if concurent: logger.info("translating with %d concurent", concurent) with ThreadPoolExecutor(max_workers=concurent) as executor: results = executor.map( lambda item: ( item[0], code_translator.translate(item[1][0], item[1][1]), ), translations_dict.items(), ) for label, result in results: results_dict[label] = result logger.info( "translated %d/%d", len(results_dict), len(translations_dict) ) else: for label, (kind, text) in translations_dict.items(): results_dict[label] = code_translator.translate(kind, text) logger.info("translated %d/%d", len(results_dict), len(translations_dict)) # generate code code_files = {} logger.info("generating code") for filename, stmts in stmts_dict.items(): logger.info("gnerating code for %s", filename) code_files[filename] = _walk_callback( stmts, lambda meta: _do_consume(meta, results_dict) ) return code_files
(base_dir: str, files: list[str], translator: Callable[[str], str], include_tl_lang: str = 'english', concurent: int = 0) -> dict[str, str]
24,174
rpycdec
update_save
decode renpy save file and update it with update function
def update_save(filename, update: Callable[[object], object] = lambda x: x): """ decode renpy save file and update it with update function """ with zipfile.ZipFile(filename, "r") as file: logdata = file.read("log") data = GenericUnpickler(io.BytesIO(logdata)).load() data = update(data) pickledata = pickle.dumps(data) with zipfile.ZipFile(filename, "r") as original_zip: with zipfile.ZipFile(filename + "_patched", "w") as new_zip: for item in original_zip.infolist(): if item.filename != "log": new_zip.write(item, original_zip.read(item.filename)) else: new_zip.write("log", pickledata)
(filename, update: Callable[[object], object] = <function <lambda> at 0x7f17c77c25f0>)
24,175
rpycdec
walk_node
callback: (kind, label, lang, old, new) -> translated walk ast node and call callback on nodes that contains text/expr/block
def walk_node(node, callback, **kwargs): """ callback: (kind, label, lang, old, new) -> translated walk ast node and call callback on nodes that contains text/expr/block """ p_label, p_lang = kwargs.get("label"), kwargs.get("language") if isinstance(node, renpy.ast.Translate): pass elif isinstance(node, renpy.ast.TranslateString): node.new = callback(("text", p_label, node.language, node.old, node.new)) elif isinstance(node, renpy.ast.TranslateBlock): pass elif isinstance(node, renpy.ast.Say): node.what = callback(("text", p_label, p_lang, node.what, None)) elif isinstance(node, renpy.sl2.slast.SLDisplayable): if node.get_name() in ["text", "textbutton"]: for i, val in enumerate(node.positional): node.positional[i] = callback(("expr", p_lang, p_label, val, None)) elif isinstance(node, renpy.ast.Show): pass elif isinstance(node, renpy.ast.UserStatement): pass elif isinstance(node, renpy.ast.PyCode): state = list(node.state) state[1] = callback(("block", p_label, p_lang, state[1], None)) node.state = tuple(state) elif isinstance(node, renpy.sl2.slast.SLBlock): pass elif isinstance(node, renpy.sl2.slast.SLUse): if node.args: for i, (name, val) in enumerate(node.args.arguments): val = callback(("block", p_label, p_lang, val, None)) node.args.arguments[i] = (name, val) elif isinstance(node, renpy.ast.Menu): for i, item in enumerate(node.items): _li = list(item) _li[0] = callback(("text", p_label, p_lang, _li[0], None)) node.items[i] = tuple(_li)
(node, callback, **kwargs)
24,176
rpycdec
write_file
write data to file
def write_file(filename: str, data: str): """ write data to file """ if not os.path.exists(os.path.dirname(filename)): os.makedirs(os.path.dirname(filename)) with open(filename, "w", encoding="utf-8") as file: file.write(data)
(filename: str, data: str)
24,179
supabase._async.client
AsyncClient
Supabase client class.
class AsyncClient: """Supabase client class.""" def __init__( self, supabase_url: str, supabase_key: str, options: Union[ClientOptions, None] = None, ): """Instantiate the client. Parameters ---------- supabase_url: str The URL to the Supabase instance that should be connected to. supabase_key: str The API key to the Supabase instance that should be connected to. **options Any extra settings to be optionally specified - also see the `DEFAULT_OPTIONS` dict. """ if not supabase_url: raise SupabaseException("supabase_url is required") if not supabase_key: raise SupabaseException("supabase_key is required") # Check if the url and key are valid if not re.match(r"^(https?)://.+", supabase_url): raise SupabaseException("Invalid URL") # Check if the key is a valid JWT if not re.match( r"^[A-Za-z0-9-_=]+\.[A-Za-z0-9-_=]+\.?[A-Za-z0-9-_.+/=]*$", supabase_key ): raise SupabaseException("Invalid API key") if options is None: options = ClientOptions(storage=AsyncMemoryStorage()) self.supabase_url = supabase_url self.supabase_key = supabase_key self.options = options options.headers.update(self._get_auth_headers()) self.rest_url = f"{supabase_url}/rest/v1" self.realtime_url = f"{supabase_url}/realtime/v1".replace("http", "ws") self.auth_url = f"{supabase_url}/auth/v1" self.storage_url = f"{supabase_url}/storage/v1" self.functions_url = f"{supabase_url}/functions/v1" self.schema = options.schema # Instantiate clients. self.auth = self._init_supabase_auth_client( auth_url=self.auth_url, client_options=options, ) # TODO: Bring up to parity with JS client. # self.realtime: SupabaseRealtimeClient = self._init_realtime_client( # realtime_url=self.realtime_url, # supabase_key=self.supabase_key, # ) self.realtime = None self._postgrest = None self._storage = None self._functions = None self.auth.on_auth_state_change(self._listen_to_auth_events) @classmethod async def create( cls, supabase_url: str, supabase_key: str, options: Union[ClientOptions, None] = None, ): return cls(supabase_url, supabase_key, options) def table(self, table_name: str) -> AsyncRequestBuilder: """Perform a table operation. Note that the supabase client uses the `from` method, but in Python, this is a reserved keyword, so we have elected to use the name `table`. Alternatively you can use the `.from_()` method. """ return self.from_(table_name) def from_(self, table_name: str) -> AsyncRequestBuilder: """Perform a table operation. See the `table` method. """ return self.postgrest.from_(table_name) def rpc( self, fn: str, params: Optional[Dict[Any, Any]] = None ) -> AsyncRPCFilterRequestBuilder: """Performs a stored procedure call. Parameters ---------- fn : callable The stored procedure call to be executed. params : dict of any Parameters passed into the stored procedure call. Returns ------- SyncFilterRequestBuilder Returns a filter builder. This lets you apply filters on the response of an RPC. """ if params is None: params = {} return self.postgrest.rpc(fn, params) @property def postgrest(self): if self._postgrest is None: self._postgrest = self._init_postgrest_client( rest_url=self.rest_url, headers=self.options.headers, schema=self.options.schema, timeout=self.options.postgrest_client_timeout, ) return self._postgrest @property def storage(self): if self._storage is None: self._storage = self._init_storage_client( storage_url=self.storage_url, headers=self.options.headers, storage_client_timeout=self.options.storage_client_timeout, ) return self._storage @property def functions(self): if self._functions is None: self._functions = AsyncFunctionsClient( self.functions_url, self.options.headers ) return self._functions # async def remove_subscription_helper(resolve): # try: # await self._close_subscription(subscription) # open_subscriptions = len(self.get_subscriptions()) # if not open_subscriptions: # error = await self.realtime.disconnect() # if error: # return {"error": None, "data": { open_subscriptions}} # except Exception as e: # raise e # return remove_subscription_helper(subscription) # async def _close_subscription(self, subscription): # """Close a given subscription # Parameters # ---------- # subscription # The name of the channel # """ # if not subscription.closed: # await self._closeChannel(subscription) # def get_subscriptions(self): # """Return all channels the client is subscribed to.""" # return self.realtime.channels # @staticmethod # def _init_realtime_client( # realtime_url: str, supabase_key: str # ) -> SupabaseRealtimeClient: # """Private method for creating an instance of the realtime-py client.""" # return SupabaseRealtimeClient( # realtime_url, {"params": {"apikey": supabase_key}} # ) @staticmethod def _init_storage_client( storage_url: str, headers: Dict[str, str], storage_client_timeout: int = DEFAULT_STORAGE_CLIENT_TIMEOUT, ) -> AsyncStorageClient: return AsyncStorageClient(storage_url, headers, storage_client_timeout) @staticmethod def _init_supabase_auth_client( auth_url: str, client_options: ClientOptions, ) -> AsyncSupabaseAuthClient: """Creates a wrapped instance of the GoTrue Client.""" return AsyncSupabaseAuthClient( url=auth_url, auto_refresh_token=client_options.auto_refresh_token, persist_session=client_options.persist_session, storage=client_options.storage, headers=client_options.headers, flow_type=client_options.flow_type, ) @staticmethod def _init_postgrest_client( rest_url: str, headers: Dict[str, str], schema: str, timeout: Union[int, float, Timeout] = DEFAULT_POSTGREST_CLIENT_TIMEOUT, ) -> AsyncPostgrestClient: """Private helper for creating an instance of the Postgrest client.""" return AsyncPostgrestClient( rest_url, headers=headers, schema=schema, timeout=timeout ) def _create_auth_header(self, token: str): return f"Bearer {token}" def _get_auth_headers(self) -> Dict[str, str]: """Helper method to get auth headers.""" return { "apiKey": self.supabase_key, "Authorization": self.options.headers.get( "Authorization", self._create_auth_header(self.supabase_key) ), } def _listen_to_auth_events( self, event: AuthChangeEvent, session: Union[Session, None] ): access_token = self.supabase_key if event in ["SIGNED_IN", "TOKEN_REFRESHED", "SIGNED_OUT"]: # reset postgrest and storage instance on event change self._postgrest = None self._storage = None self._functions = None access_token = session.access_token if session else self.supabase_key self.options.headers["Authorization"] = self._create_auth_header(access_token)
(supabase_url: str, supabase_key: str, options: Optional[supabase.lib.client_options.ClientOptions] = None)
24,180
supabase._async.client
__init__
Instantiate the client. Parameters ---------- supabase_url: str The URL to the Supabase instance that should be connected to. supabase_key: str The API key to the Supabase instance that should be connected to. **options Any extra settings to be optionally specified - also see the `DEFAULT_OPTIONS` dict.
def __init__( self, supabase_url: str, supabase_key: str, options: Union[ClientOptions, None] = None, ): """Instantiate the client. Parameters ---------- supabase_url: str The URL to the Supabase instance that should be connected to. supabase_key: str The API key to the Supabase instance that should be connected to. **options Any extra settings to be optionally specified - also see the `DEFAULT_OPTIONS` dict. """ if not supabase_url: raise SupabaseException("supabase_url is required") if not supabase_key: raise SupabaseException("supabase_key is required") # Check if the url and key are valid if not re.match(r"^(https?)://.+", supabase_url): raise SupabaseException("Invalid URL") # Check if the key is a valid JWT if not re.match( r"^[A-Za-z0-9-_=]+\.[A-Za-z0-9-_=]+\.?[A-Za-z0-9-_.+/=]*$", supabase_key ): raise SupabaseException("Invalid API key") if options is None: options = ClientOptions(storage=AsyncMemoryStorage()) self.supabase_url = supabase_url self.supabase_key = supabase_key self.options = options options.headers.update(self._get_auth_headers()) self.rest_url = f"{supabase_url}/rest/v1" self.realtime_url = f"{supabase_url}/realtime/v1".replace("http", "ws") self.auth_url = f"{supabase_url}/auth/v1" self.storage_url = f"{supabase_url}/storage/v1" self.functions_url = f"{supabase_url}/functions/v1" self.schema = options.schema # Instantiate clients. self.auth = self._init_supabase_auth_client( auth_url=self.auth_url, client_options=options, ) # TODO: Bring up to parity with JS client. # self.realtime: SupabaseRealtimeClient = self._init_realtime_client( # realtime_url=self.realtime_url, # supabase_key=self.supabase_key, # ) self.realtime = None self._postgrest = None self._storage = None self._functions = None self.auth.on_auth_state_change(self._listen_to_auth_events)
(self, supabase_url: str, supabase_key: str, options: Optional[supabase.lib.client_options.ClientOptions] = None)
24,181
supabase._async.client
_create_auth_header
null
def _create_auth_header(self, token: str): return f"Bearer {token}"
(self, token: str)