diff --git a/spaces/101-5/gpt4free/g4f/.v1/gpt4free/you/README.md b/spaces/101-5/gpt4free/g4f/.v1/gpt4free/you/README.md
deleted file mode 100644
index e1917c6dc153a0aff2ab1e0ec5093cc55b5b77e1..0000000000000000000000000000000000000000
--- a/spaces/101-5/gpt4free/g4f/.v1/gpt4free/you/README.md
+++ /dev/null
@@ -1,38 +0,0 @@
-### Example: `you` (use like openai pypi package)
-
-```python
-
-from gpt4free import you
-
-# simple request with links and details
-response = you.Completion.create(
- prompt="hello world",
- detailed=True,
- include_links=True, )
-
-print(response.dict())
-
-# {
-# "response": "...",
-# "links": [...],
-# "extra": {...},
-# "slots": {...}
-# }
-# }
-
-# chatbot
-
-chat = []
-
-while True:
- prompt = input("You: ")
- if prompt == 'q':
- break
- response = you.Completion.create(
- prompt=prompt,
- chat=chat)
-
- print("Bot:", response.text)
-
- chat.append({"question": prompt, "answer": response.text})
-```
diff --git a/spaces/101-5/gpt4free/g4f/.v1/unfinished/bing/__ini__.py b/spaces/101-5/gpt4free/g4f/.v1/unfinished/bing/__ini__.py
deleted file mode 100644
index 1e4fd149dd2371c54989bf3b6e034fd60e156213..0000000000000000000000000000000000000000
--- a/spaces/101-5/gpt4free/g4f/.v1/unfinished/bing/__ini__.py
+++ /dev/null
@@ -1,108 +0,0 @@
-# Import necessary libraries
-import asyncio
-from json import dumps, loads
-from ssl import create_default_context
-
-import websockets
-from browser_cookie3 import edge
-from certifi import where
-from requests import get
-
-# Set up SSL context
-ssl_context = create_default_context()
-ssl_context.load_verify_locations(where())
-
-
-def format(msg: dict) -> str:
- """Format message as JSON string with delimiter."""
- return dumps(msg) + '\x1e'
-
-
-def get_token():
- """Retrieve token from browser cookies."""
- cookies = {c.name: c.value for c in edge(domain_name='bing.com')}
- return cookies['_U']
-
-
-class AsyncCompletion:
- async def create(
- prompt: str = 'hello world',
- optionSets: list = [
- 'deepleo',
- 'enable_debug_commands',
- 'disable_emoji_spoken_text',
- 'enablemm',
- 'h3relaxedimg'
- ],
- token: str = get_token()):
- """Create a connection to Bing AI and send the prompt."""
-
- # Send create request
- create = get('https://edgeservices.bing.com/edgesvc/turing/conversation/create',
- headers={
- 'host': 'edgeservices.bing.com',
- 'authority': 'edgeservices.bing.com',
- 'cookie': f'_U={token}',
- 'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/110.0.0.0 Safari/537.36 Edg/110.0.1587.69',
- }
- )
-
- # Extract conversation data
- conversationId = create.json()['conversationId']
- clientId = create.json()['clientId']
- conversationSignature = create.json()['conversationSignature']
-
- # Connect to WebSocket
- wss = await websockets.connect('wss://sydney.bing.com/sydney/ChatHub', max_size=None, ssl=ssl_context,
- extra_headers={
- # Add necessary headers
- }
- )
-
- # Send JSON protocol version
- await wss.send(format({'protocol': 'json', 'version': 1}))
- await wss.recv()
-
- # Define message structure
- struct = {
- # Add necessary message structure
- }
-
- # Send message
- await wss.send(format(struct))
-
- # Process responses
- base_string = ''
- final = False
- while not final:
- objects = str(await wss.recv()).split('\x1e')
- for obj in objects:
- if obj is None or obj == '':
- continue
-
- response = loads(obj)
- if response.get('type') == 1 and response['arguments'][0].get('messages', ):
- response_text = response['arguments'][0]['messages'][0]['adaptiveCards'][0]['body'][0].get(
- 'text')
-
- yield (response_text.replace(base_string, ''))
- base_string = response_text
-
- elif response.get('type') == 2:
- final = True
-
- await wss.close()
-
-
-async def run():
- """Run the async completion and print the result."""
- async for value in AsyncCompletion.create(
- prompt='summarize cinderella with each word beginning with a consecutive letter of the alphabet, a-z',
- optionSets=[
- "galileo",
- ]
- ):
- print(value, end='', flush=True)
-
-
-asyncio.run(run())
diff --git a/spaces/1acneusushi/gradio-2dmoleculeeditor/data/Download Visual Studio 2015 for Windows and Become a Master of Multiple Platforms.md b/spaces/1acneusushi/gradio-2dmoleculeeditor/data/Download Visual Studio 2015 for Windows and Become a Master of Multiple Platforms.md
deleted file mode 100644
index 235e9f3b428ee6bfc90357c490281ed33a5903ee..0000000000000000000000000000000000000000
--- a/spaces/1acneusushi/gradio-2dmoleculeeditor/data/Download Visual Studio 2015 for Windows and Become a Master of Multiple Platforms.md
+++ /dev/null
@@ -1,32 +0,0 @@
-
-
How to Download Visual Studio 2015 for Windows and What's New in It
-
Visual Studio 2015 is a powerful and versatile integrated development environment (IDE) that allows you to create applications for various platforms, such as Windows, web, mobile, cloud, and more. It supports multiple programming languages, such as C#, C++, Visual Basic, Python, JavaScript, and more. It also offers many features and tools to help you code faster, debug easier, test better, and collaborate more efficiently.
Scroll down to the "Visual Studio 2015" section and click on the "Download" button next to the edition you want. You can choose from Community, Professional, or Enterprise editions. The Community edition is free for individual developers, open-source projects, academic research, education, and small teams. The Professional and Enterprise editions require a subscription or a trial license.
-
Save the setup file on your PC and run it.
-
Follow the instructions to install Visual Studio 2015 on your PC. You can customize the installation by selecting the features and components you want.
-
Launch Visual Studio 2015 and sign in with your Microsoft account if prompted.
-
-
-
What's New in Visual Studio 2015
-
Visual Studio 2015 introduces many new features and improvements that make it easier and more productive to develop applications for various platforms. Here are some of the highlights:
-
-
Cross-platform development: You can use Visual Studio 2015 to create applications for Windows, iOS, Android, Linux, and Mac OS using a single code base. You can use C#, C++, or HTML/JavaScript as your programming language and leverage the power of Xamarin, Cordova, or Unity frameworks.
-
New languages and tools: You can use Visual Studio 2015 to code in new languages such as Python, Node.js, Go, and R. You can also use new tools such as Visual Studio Code Editor, which is a lightweight and fast code editor that supports multiple languages and platforms.
-
Improved debugging and diagnostics: You can use Visual Studio 2015 to debug your applications more easily and effectively. You can use new features such as IntelliTrace, which records the execution history of your code and lets you replay it step by step; PerfTips, which shows you the performance impact of each line of code; Diagnostic Tools window, which shows you various metrics and events related to your application's performance and health; and more.
-
Enhanced testing and quality: You can use Visual Studio 2015 to test your applications more thoroughly and efficiently. You can use new features such as Live Unit Testing, which runs your unit tests automatically as you code and shows you the results in real time; CodeLens, which shows you useful information about your code such as references, changes, authors, tests, etc.; Code Analysis, which helps you detect and fix code issues such as errors, warnings, style violations, etc.; and more.
-
Better collaboration and DevOps: You can use Visual Studio 2015 to collaborate more effectively with your team members and deliver your applications faster and more reliably. You can use new features such as Team Explorer, which integrates with various source control systems such as Git, TFS, SVN, etc.; Code Review, which lets you request and provide feedback on code changes; Pull Requests, which lets you merge code changes from different branches; Continuous Delivery Tools, which lets you automate your build-deploy-test cycle; and more.
-
-
-
Conclusion
-
Visual Studio 2015 is a great IDE that offers many features and tools to help you create amazing applications for various platforms. It supports multiple programming languages, cross-platform development, improved debugging and diagnostics, enhanced testing and quality, better collaboration and DevOps, and more.
-
-
If you want to download Visual Studio 2015 for Windows and try
ddb901b051
-
-
\ No newline at end of file
diff --git a/spaces/1gistliPinn/ChatGPT4/Examples/Borderlands 2 Crack Only-SKIDROW Cheat Engine LINK.md b/spaces/1gistliPinn/ChatGPT4/Examples/Borderlands 2 Crack Only-SKIDROW Cheat Engine LINK.md
deleted file mode 100644
index 5830dfb7dc2a3ebd96a9c7863723b6b9008592e7..0000000000000000000000000000000000000000
--- a/spaces/1gistliPinn/ChatGPT4/Examples/Borderlands 2 Crack Only-SKIDROW Cheat Engine LINK.md
+++ /dev/null
@@ -1,6 +0,0 @@
-
-
- d5da3c52bf
-
-
-
diff --git a/spaces/1gistliPinn/ChatGPT4/Examples/Cricket Batting Tips In Tamil Pdf 26 __EXCLUSIVE__.md b/spaces/1gistliPinn/ChatGPT4/Examples/Cricket Batting Tips In Tamil Pdf 26 __EXCLUSIVE__.md
deleted file mode 100644
index cf99aeb1e81a4b15ad1b9f2d8d66e73e3efa9454..0000000000000000000000000000000000000000
--- a/spaces/1gistliPinn/ChatGPT4/Examples/Cricket Batting Tips In Tamil Pdf 26 __EXCLUSIVE__.md
+++ /dev/null
@@ -1,6 +0,0 @@
-
-
-The West Indies Cricket Umpires' Association (WICUA) will be holding its 29 th ... Passover Lamb Preparation, Audi R10 2019 , Hatch Meaning In Tamil, DDCA ... dates & notification, application process, exam level‎, mode‎, preparation tips, ... 2019 - Download 2014 asa umpires test answers PDF Full Ebook online right ... 4d29de3e1b
-
-
-
diff --git a/spaces/1phancelerku/anime-remove-background/Como baixar Solar Smash APK e jogar o simulador de destruio do planeta mais divertido.md b/spaces/1phancelerku/anime-remove-background/Como baixar Solar Smash APK e jogar o simulador de destruio do planeta mais divertido.md
deleted file mode 100644
index 6c552e9c5ed6d952285623853a74ae89fd70891b..0000000000000000000000000000000000000000
--- a/spaces/1phancelerku/anime-remove-background/Como baixar Solar Smash APK e jogar o simulador de destruio do planeta mais divertido.md
+++ /dev/null
@@ -1,148 +0,0 @@
-
-
Baixar Solar Smash APK: Um Simulador de Destruição Planetária
-
Você já imaginou como seria destruir um planeta inteiro com um simples toque na tela do seu celular? Se você é fascinado pelo espaço e pelas possibilidades de exploração e destruição que ele oferece, então você precisa conhecer o Solar Smash, um jogo de simulação que permite que você use diversas armas e desastres para aniquilar planetas e sistemas solares. Neste artigo, vamos te mostrar o que é o Solar Smash, como baixar o APK do jogo no seu dispositivo Android, quais são as principais características do jogo, e quais são as dicas e truques para jogar Solar Smash com mais diversão e eficiência. Vamos lá?
-
O que é Solar Smash?
-
Um jogo de simulação que permite destruir planetas com armas variadas
-
Solar Smash é um jogo de simulação desenvolvido pela Paradyme Games, que permite que você use uma variedade de armas diferentes para destruir o planeta que você escolher. Essas armas incluem mísseis nucleares, lasers, asteroides, invasões alienígenas, buracos negros, explosões solares, e muito mais. Você pode combinar as armas entre si para criar reações espetaculares e ver o planeta se desintegrar em pedaços. Você também pode personalizar as suas armas e os seus planetas, mudando a cor, o tamanho, a velocidade, a gravidade, e outros aspectos.
Um jogo gratuito e divertido para os amantes do espaço
-
O melhor de tudo é que o Solar Smash é um jogo completamente gratuito para jogar no seu dispositivo Android. Você não precisa pagar nada para baixar o APK do jogo ou para acessar as suas funcionalidades. Você também não vai se incomodar com anúncios invasivos ou compras dentro do aplicativo. O Solar Smash é um jogo ideal para quem ama o espaço e quer se divertir com as possibilidades de simulação e destruição que ele oferece. Você pode passar horas brincando com as diferentes armas e planetas, experimentando as diferentes interações e resultados.
-
Um jogo com gráficos realistas e imagens da NASA
-
Outro ponto forte do Solar Smash é a sua qualidade gráfica. O jogo possui gráficos realistas e detalhados, que mostram os planetas e as armas com fidelidade. Você pode ver os estragos causados pelas armas nos planetas, como crateras, rachaduras, fogo, fumaça, e até mesmo a destruição completa do núcleo. Além disso, o jogo usa imagens reais da NASA para representar os planetas e os sistemas solares, o que aumenta ainda mais o realismo e a imersão do jogo. Você pode ver a Terra, Marte, Júpiter, Saturno, e outros planetas como eles realmente são, e também explorar planetas secretos que o jogo esconde.
-
Como baixar Solar Smash APK?
-
Passo a passo para baixar o jogo no seu dispositivo Android
-
Baixar o Solar Smash APK no seu dispositivo Android é muito fácil e rápido. Basta seguir os passos abaixo:
-
-
Acesse o site oficial do Solar Smash ou um site confiável que ofereça o download do APK do jogo.
-
Clique no botão de download e aguarde o arquivo APK ser baixado no seu dispositivo.
-
Localize o arquivo APK na pasta de downloads do seu dispositivo e clique nele para iniciar a instalação.
-
Se for solicitado, permita a instalação de fontes desconhecidas nas configurações do seu dispositivo.
-
Siga as instruções na tela para concluir a instalação do jogo.
-
Pronto! Agora você pode abrir o jogo e começar a destruir planetas.
-
-
Requisitos mínimos e recomendações para rodar o jogo
-
Para rodar o Solar Smash no seu dispositivo Android, você precisa ter pelo menos a versão 4.4 do sistema operacional. Além disso, você precisa ter pelo menos 100 MB de espaço livre na memória do seu dispositivo. O jogo não exige uma conexão com a internet para funcionar, mas é recomendável que você esteja conectado para receber as atualizações e as novidades do jogo. O jogo também funciona melhor em dispositivos com uma boa capacidade de processamento e uma boa resolução de tela, para aproveitar melhor os gráficos e os efeitos do jogo.
-
Alternativas para baixar o jogo no seu PC ou Mac
-
Se você preferir jogar o Solar Smash no seu PC ou Mac, você também pode fazer isso com a ajuda de um emulador de Android. Um emulador é um programa que simula o funcionamento de um dispositivo Android no seu computador, permitindo que você instale e execute aplicativos e jogos que são exclusivos para esse sistema. Existem vários emuladores disponíveis na internet, mas alguns dos mais populares são o BlueStacks, o NoxPlayer, e o MEmu. Para baixar o Solar Smash no seu PC ou Mac usando um emulador, você precisa seguir os passos abaixo:
-
-
Baixe e instale um emulador de Android no seu PC ou Mac.
-
Abra o emulador e faça login com a sua conta Google.
-
Acesse a Google Play Store ou um site confiável que ofereça o download do APK do Solar Smash.
-
Baixe e instale o Solar Smash no emulador.
-
Pronto! Agora você pode jogar o Solar Smash no seu PC ou Mac.
-
-
Quais são as principais características de Solar Smash?
Modos de jogo: Planet Smash e System Smash
-
O Solar Smash possui dois modos de jogo principais: o Planet Smash e o System Smash. No Planet Smash, você pode escolher um planeta específico para destruir com as armas que você quiser. Você pode ver o planeta em diferentes ângulos e zooms, e acompanhar os danos causados pelas suas ações. Você também pode alterar as características do planeta, como a cor, o tamanho, a gravidade, e a atmosfera. No System Smash, você pode escolher um sistema solar inteiro para destruir com as armas que você quiser. Você pode ver os planetas orbitando em torno do sol, e interagir com eles de diversas formas. Você também pode alterar as características do sistema solar, como a distância entre os planetas, a velocidade da órbita, e a luminosidade do sol.
-
como baixar solar smash apk no android
-baixar solar smash apk mod dinheiro infinito
-baixar solar smash apk atualizado 2023
-baixar solar smash apk para pc
-baixar solar smash apk grátis
-baixar solar smash apk hackeado
-baixar solar smash apk versão antiga
-baixar solar smash apk pelo mediafire
-baixar solar smash apk sem anúncios
-baixar solar smash apk com tudo desbloqueado
-baixar solar smash apk: space strategy
-baixar solar smash apk pelo mega
-baixar solar smash apk com armas novas
-baixar solar smash apk para ios
-baixar solar smash apk offline
-baixar solar smash apk com multiplayer
-baixar solar smash apk com planetas personalizados
-baixar solar smash apk pelo aptoide
-baixar solar smash apk com modo sandbox
-baixar solar smash apk com missões
-baixar solar smash apk com gráficos melhorados
-baixar solar smash apk com efeitos sonoros
-baixar solar smash apk com controle de tempo
-baixar solar smash apk com zoom
-baixar solar smash apk com rotação de planetas
-baixar solar smash apk com física realista
-baixar solar smash apk com simulação de colisão
-baixar solar smash apk com modo criativo
-baixar solar smash apk com modo destruição
-baixar solar smash apk com modo sobrevivência
-baixar solar smash apk com modo campanha
-baixar solar smash apk com modo online
-baixar solar smash apk com modo cooperativo
-baixar solar smash apk com modo competitivo
-baixar solar smash apk com ranking mundial
-baixar solar smash apk com conquistas e troféus
-baixar solar smash apk com suporte a joystick
-baixar solar smash apk com suporte a VR
-baixar solar smash apk com suporte a 4K
-baixar solar smash apk com suporte a 60 FPS
-baixar solar smash apk sem vírus e malware
-baixar solar smash apk sem root e jailbreak
-baixar solar smash apk sem precisar de internet
-baixar solar smash apk sem precisar de cadastro
-baixar solar smash apk sem precisar de licença
-baixar solar smash apk sem precisar de atualização
-baixar solar smash apk sem precisar de espaço na memória
-baixar solar smash apk sem precisar de emulador
-baixar solar smash apk sem precisar de código de verificação
-
Armas e desastres: mísseis, lasers, asteroides e mais
-
O Solar Smash possui uma grande variedade de armas e desastres que você pode usar para destruir os planetas e os sistemas solares. Essas armas e desastres incluem:
-
-
Mísseis nucleares: projéteis explosivos que causam grandes crateras e incêndios nos planetas.
-
Lasers: raios de energia que atravessam os planetas e causam rachaduras e fumaça.
-
Asteroides: rochas espaciais que colidem com os planetas e causam impactos devastadores.
-
Invasões alienígenas: naves extraterrestres que atacam os planetas com lasers e bombas.
-
Buracos negros: objetos cósmicos que sugam os planetas e os sistemas solares para dentro deles.
-
Explosões solares: erupções de plasma que saem do sol e atingem os planetas com radiação e calor.
-
E muito mais: terremotos, vulcões, tsunamis, furacões, tornados, meteoros, cometas, etc.
-
-
Planetas e sistemas: Terra, Marte, Júpiter e outros
-
O Solar Smash possui vários planetas e sistemas solares que você pode escolher para destruir. Esses planetas e sistemas solares são baseados em imagens reais da NASA, o que torna o jogo mais realista e interessante. Você pode ver os planetas como eles realmente são, com seus continentes, oceanos, nuvens, anéis, luas, etc. Você também pode ver os sistemas solares como eles realmente são, com seus planetas orbitando em torno do sol em diferentes velocidades e distâncias. Alguns dos planetas e sistemas solares que você pode escolher são:
-
-
Terra: o nosso planeta natal, com seus sete continentes, seus cinco oceanos, sua atmosfera azul, e sua lua.
-
Marte: o planeta vermelho, com seus desertos, suas montanhas, seus vulcões, seus polos gelados, e suas duas luas.
-
Júpiter: o maior planeta do sistema solar, com sua atmosfera colorida, sua grande mancha vermelha, seus anéis finos, e suas 79 luas.
-
Saturno: o segundo maior planeta do sistema solar, com sua atmosfera amarelada, seus anéis brilhantes, e suas 82 luas.
-
Urano: o sétimo planeta do sistema solar, com sua atmosfera azul-esverdeada, seu eixo inclinado, seus anéis escuros, e suas 27 luas.
-
Netuno: o oitavo planeta do sistema solar, com sua atmosfera azulada, seu vento forte, seus anéis finos, e suas 14 luas.
-
Sistema Solar: o nosso sistema planetário, com o sol e os oito planetas principais, além de planetas anões, asteroides, cometas, e outros corpos celestes.
-
E muito mais: Vênus, Mercúrio, Plutão, Ceres, Haumea, Makemake, Eris, etc.
-
-
Planetas secretos: como desbloqueá-los e quais são
-
Além dos planetas e sistemas solares conhecidos, o Solar Smash também possui alguns planetas secretos que você pode desbloquear e destruir. Esses planetas secretos são baseados em referências da cultura pop, como filmes, séries, jogos, livros, etc. Para desbloquear os planetas secretos, você precisa seguir algumas dicas e pistas que o jogo te dá. Por exemplo, para desbloquear o planeta Tatooine, da saga Star Wars, você precisa usar a arma de laser verde no planeta Marte. Para desbloquear o planeta Namekusei, da série Dragon Ball Z, você precisa usar a arma de explosão solar no planeta Júpiter. Alguns dos planetas secretos que você pode desbloquear são:
-
-
Tatooine: o planeta desértico com dois sóis, onde vivem os personagens Luke Skywalker e Obi-Wan Kenobi.
-
Namekusei: o planeta verde com três sóis, onde vivem os personagens Piccolo e Dende.
-
Hogwarts: a escola de magia e bruxaria onde estudam os personagens Harry Potter e seus amigos.
-
Pandora: o planeta exuberante com uma lua gigante, onde vivem os personagens Jake Sully e Neytiri.
-
Middle Earth: o mundo fantástico com vários reinos e raças, onde se passa a história de O Senhor dos Anéis.
-
E muito mais: Nárnia, Westeros, Asgard, Cybertron, etc.
-
-
Quais são as dicas e truques para jogar Solar Smash?
Como completar os desafios e conquistas do jogo
-
O Solar Smash possui uma série de desafios e conquistas que você pode completar para ganhar recompensas e desbloquear novas armas e planetas. Esses desafios e conquistas são baseados em diferentes objetivos, como destruir um determinado número de planetas, usar uma determinada arma, causar um determinado tipo de dano, etc. Para ver os desafios e conquistas disponíveis, você pode acessar o menu do jogo e clicar no ícone de troféu. Lá, você pode ver os seus progressos e as suas recompensas. Alguns exemplos de desafios e conquistas são:
-
-
Destruir 10 planetas com mísseis nucleares.
-
Destruir o núcleo de 5 planetas com lasers.
-
Causar um terremoto de magnitude 10 em um planeta.
-
Criar um buraco negro com um asteroide.
-
Desbloquear todos os planetas secretos.
-
E muito mais: destruir a lua, causar uma invasão alienígena, criar uma supernova, etc.
-
-
Como acertar o ponto certo para destruir o núcleo do planeta
-
Uma das formas mais eficientes e satisfatórias de destruir um planeta no Solar Smash é acertar o seu núcleo com uma arma poderosa, como um laser ou um míssil. Isso vai causar uma explosão enorme que vai despedaçar o planeta em vários fragmentos. No entanto, acertar o núcleo do planeta não é tão fácil quanto parece. Você precisa ter uma boa mira e uma boa precisão para atingir o ponto certo. Uma dica para facilitar essa tarefa é usar o zoom para aproximar a imagem do planeta e ver melhor o seu centro. Outra dica é usar a arma de raios X para ver através do planeta e localizar o seu núcleo. Assim, você pode mirar com mais confiança e acertar o alvo com mais facilidade.
-
Como combinar as armas e os elementos para criar reações incríveis
-
Outra forma de se divertir no Solar Smash é combinar as diferentes armas e os diferentes elementos para criar reações incríveis e inesperadas. Você pode usar a sua criatividade e a sua curiosidade para experimentar as diversas possibilidades que o jogo oferece. Por exemplo, você pode usar a arma de chuva para molhar um planeta e depois usar a arma de raio para causar um choque elétrico. Ou você pode usar a arma de gelo para congelar um planeta e depois usar a arma de fogo para causar uma explosão térmica. Ou você pode usar a arma de gravidade para atrair vários asteroides para um planeta e depois usar a arma de buraco negro para sugá-los todos. As combinações são infinitas e podem surpreender você com os seus resultados.
-
Como personalizar as suas armas e os seus planetas
-
O Solar Smash também permite que você personalize as suas armas e os seus planetas, mudando vários aspectos como a cor, o tamanho, a velocidade, a gravidade, etc. Isso pode tornar o jogo mais divertido e mais desafiador, pois você pode criar cenários diferentes e testar os seus limites. Para personalizar as suas armas e os seus planetas, você precisa acessar o menu do jogo e clicar no ícone de engrenagem. Lá, você pode ver as opções disponíveis para cada arma e cada planeta, e ajustá-las conforme a sua preferência. Por exemplo, você pode mudar a cor do seu laser, o tamanho do seu asteroide, a velocidade do seu míssil, a gravidade do seu planeta, etc.
-
Conclusão
-
O Solar Smash é um jogo de simulação que permite que você destrua planetas e sistemas solares com diversas armas e desastres. O jogo é gratuito, divertido, realista, e viciante. Você pode baixar o APK do jogo no seu dispositivo Android ou no seu PC ou Mac com um emulador. Você também pode escolher entre vários planetas e sistemas solares conhecidos ou secret os, e personalizar as suas armas e os seus planetas. Você também pode completar desafios e conquistas, acertar o núcleo dos planetas, combinar as armas e os elementos, e criar reações incríveis. O Solar Smash é um jogo que vai te proporcionar horas de diversão e destruição. Baixe já o Solar Smash APK e comece a sua aventura espacial!
-
FAQs
-
O que é o Solar Smash?
-
O Solar Smash é um jogo de simulação que permite que você destrua planetas e sistemas solares com diversas armas e desastres.
-
Como baixar o Solar Smash APK?
-
Você pode baixar o Solar Smash APK no seu dispositivo Android ou no seu PC ou Mac com um emulador. Você precisa acessar um site confiável que ofereça o download do APK do jogo, baixar o arquivo, e instalá-lo no seu dispositivo.
-
Quais são as principais características do Solar Smash?
-
O Solar Smash possui dois modos de jogo: Planet Smash e System Smash. Você pode escolher entre vários planetas e sistemas solares conhecidos ou secretos, e personalizar as suas armas e os seus planetas. Você também pode completar desafios e conquistas, acertar o núcleo dos planetas, combinar as armas e os elementos, e criar reações incríveis.
-
Quais são as melhores armas para destruir os planetas?
-
Isso depende do seu gosto e do seu objetivo. Algumas das armas mais poderosas e divertidas são os lasers, os mísseis nucleares, os asteroides, os buracos negros, e as explosões solares.
-
Como desbloquear os planetas secretos?
-
Você precisa seguir algumas dicas e pistas que o jogo te dá. Por exemplo, para desbloquear o planeta Tatooine, da saga Star Wars, você precisa usar a arma de laser verde no planeta Marte. Para desbloquear o planeta Namekusei, da série Dragon Ball Z, você precisa usar a arma de explosão solar no planeta Júpiter.
401be4b1e0
-
-
\ No newline at end of file
diff --git a/spaces/801artistry/RVC801/utils/backups_test.py b/spaces/801artistry/RVC801/utils/backups_test.py
deleted file mode 100644
index f3edf15811b5035ee82f21e54e87b7e87ce413eb..0000000000000000000000000000000000000000
--- a/spaces/801artistry/RVC801/utils/backups_test.py
+++ /dev/null
@@ -1,138 +0,0 @@
-
-import os
-import shutil
-import hashlib
-import time
-
-LOGS_FOLDER = '/content/Applio-RVC-Fork/logs'
-WEIGHTS_FOLDER = '/content/Applio-RVC-Fork/weights'
-GOOGLE_DRIVE_PATH = '/content/drive/MyDrive/RVC_Backup'
-
-def import_google_drive_backup():
- print("Importing Google Drive backup...")
- GOOGLE_DRIVE_PATH = '/content/drive/MyDrive/RVC_Backup' # change this to your Google Drive path
- LOGS_FOLDER = '/content/Applio-RVC-Fork/logs'
- WEIGHTS_FOLDER = '/content/Applio-RVC-Fork/weights'
- weights_exist = False
- files_to_copy = []
- weights_to_copy = []
-
- def handle_files(root, files, is_weight_files=False):
- for filename in files:
- filepath = os.path.join(root, filename)
- if filename.endswith('.pth') and is_weight_files:
- weights_exist = True
- backup_filepath = os.path.join(WEIGHTS_FOLDER, os.path.relpath(filepath, GOOGLE_DRIVE_PATH))
- else:
- backup_filepath = os.path.join(LOGS_FOLDER, os.path.relpath(filepath, GOOGLE_DRIVE_PATH))
- backup_folderpath = os.path.dirname(backup_filepath)
- if not os.path.exists(backup_folderpath):
- os.makedirs(backup_folderpath)
- print(f'Created folder: {backup_folderpath}', flush=True)
- if is_weight_files:
- weights_to_copy.append((filepath, backup_filepath))
- else:
- files_to_copy.append((filepath, backup_filepath))
-
- for root, dirs, files in os.walk(os.path.join(GOOGLE_DRIVE_PATH, 'logs')):
- handle_files(root, files)
-
- for root, dirs, files in os.walk(os.path.join(GOOGLE_DRIVE_PATH, 'weights')):
- handle_files(root, files, True)
-
- # Copy files in batches
- total_files = len(files_to_copy)
- start_time = time.time()
- for i, (source, dest) in enumerate(files_to_copy, start=1):
- with open(source, 'rb') as src, open(dest, 'wb') as dst:
- shutil.copyfileobj(src, dst, 1024*1024) # 1MB buffer size
- # Report progress every 5 seconds or after every 100 files, whichever is less frequent
- if time.time() - start_time > 5 or i % 100 == 0:
- print(f'\rCopying file {i} of {total_files} ({i * 100 / total_files:.2f}%)', end="")
- start_time = time.time()
- print(f'\nImported {len(files_to_copy)} files from Google Drive backup')
-
- # Copy weights in batches
- total_weights = len(weights_to_copy)
- start_time = time.time()
- for i, (source, dest) in enumerate(weights_to_copy, start=1):
- with open(source, 'rb') as src, open(dest, 'wb') as dst:
- shutil.copyfileobj(src, dst, 1024*1024) # 1MB buffer size
- # Report progress every 5 seconds or after every 100 files, whichever is less frequent
- if time.time() - start_time > 5 or i % 100 == 0:
- print(f'\rCopying weight file {i} of {total_weights} ({i * 100 / total_weights:.2f}%)', end="")
- start_time = time.time()
- if weights_exist:
- print(f'\nImported {len(weights_to_copy)} weight files')
- print("Copied weights from Google Drive backup to local weights folder.")
- else:
- print("\nNo weights found in Google Drive backup.")
- print("Google Drive backup import completed.")
-
-def backup_files():
- print("\n Starting backup loop...")
- last_backup_timestamps_path = os.path.join(LOGS_FOLDER, 'last_backup_timestamps.txt')
- fully_updated = False # boolean to track if all files are up to date
- try:
- with open(last_backup_timestamps_path, 'r') as f:
- last_backup_timestamps = dict(line.strip().split(':') for line in f)
- except:
- last_backup_timestamps = {}
-
- while True:
- updated = False
- files_to_copy = []
- files_to_delete = []
-
- for root, dirs, files in os.walk(LOGS_FOLDER):
- for filename in files:
- if filename != 'last_backup_timestamps.txt':
- filepath = os.path.join(root, filename)
- if os.path.isfile(filepath):
- backup_filepath = os.path.join(GOOGLE_DRIVE_PATH, os.path.relpath(filepath, LOGS_FOLDER))
- backup_folderpath = os.path.dirname(backup_filepath)
-
- if not os.path.exists(backup_folderpath):
- os.makedirs(backup_folderpath)
- print(f'Created backup folder: {backup_folderpath}', flush=True)
-
- # check if file has changed since last backup
- last_backup_timestamp = last_backup_timestamps.get(filepath)
- current_timestamp = os.path.getmtime(filepath)
- if last_backup_timestamp is None or float(last_backup_timestamp) < current_timestamp:
- files_to_copy.append((filepath, backup_filepath)) # add to list of files to copy
- last_backup_timestamps[filepath] = str(current_timestamp) # update last backup timestamp
- updated = True
- fully_updated = False # if a file is updated, all files are not up to date
-
- # check if any files were deleted in Colab and delete them from the backup drive
- for filepath in list(last_backup_timestamps.keys()):
- if not os.path.exists(filepath):
- backup_filepath = os.path.join(GOOGLE_DRIVE_PATH, os.path.relpath(filepath, LOGS_FOLDER))
- if os.path.exists(backup_filepath):
- files_to_delete.append(backup_filepath) # add to list of files to delete
- del last_backup_timestamps[filepath]
- updated = True
- fully_updated = False # if a file is deleted, all files are not up to date
-
- # Copy files in batches
- if files_to_copy:
- for source, dest in files_to_copy:
- shutil.copy2(source, dest)
- print(f'Copied or updated {len(files_to_copy)} files')
-
- # Delete files in batches
- if files_to_delete:
- for file in files_to_delete:
- os.remove(file)
- print(f'Deleted {len(files_to_delete)} files')
-
- if not updated and not fully_updated:
- print("Files are up to date.")
- fully_updated = True # if all files are up to date, set the boolean to True
- copy_weights_folder_to_drive()
-
- with open(last_backup_timestamps_path, 'w') as f:
- for filepath, timestamp in last_backup_timestamps.items():
- f.write(f'{filepath}:{timestamp}\n')
- time.sleep(15) # wait for 15 seconds before checking again
diff --git a/spaces/AFRAC/NCM_DEMO/app.py b/spaces/AFRAC/NCM_DEMO/app.py
deleted file mode 100644
index 9298bd93cdd66edf35ab0313391514b65078c0dd..0000000000000000000000000000000000000000
--- a/spaces/AFRAC/NCM_DEMO/app.py
+++ /dev/null
@@ -1,55 +0,0 @@
-#!/usr/bin/env python
-# coding: utf-8
-
-import subprocess
-
-# Define the command you want to run
-command = 'pip install --no-cache-dir gradio==3.26.0'
-
-# Use subprocess to run the command
-try:
- subprocess.check_call(command, shell=True)
- print("Installation successful!")
-except subprocess.CalledProcessError as e:
- print(f"Installation failed with error: {e}")
-
-import pandas as pd
-import numpy as np
-import tensorflow as tf
-from tensorflow.keras.preprocessing.text import Tokenizer
-from sklearn.preprocessing import OneHotEncoder
-import gradio as gr
-from gradio import components
-
-print("\n\n\n****************************>>>>>>>>> GRADIO VERSION: ",gr.__version__,"\n\n\n")
-
-model = tf.keras.models.load_model("NCM_DEMO.H5", compile=False)
-
-ncm_table = pd.read_csv("https://raw.githubusercontent.com/mfilipak/AFRAC_IA/main/DATASET/TABELA_NCM.CSV", index_col="CODIGO")
-valid_ncms = sorted(ncm_table[ncm_table.index > 1000000].index)
-ncmst = np.array(valid_ncms)
-ncmst = ncmst.reshape([-1,1])
-ohe = OneHotEncoder()
-ohe.fit(ncmst)
-
-tk = Tokenizer(num_words=None, char_level=True, oov_token='UNK')
-tk.word_index = {'UNK': 1, ' ': 2, 'a': 3, 'o': 4, 'e': 5, 'r': 6, 'i': 7, 'c': 8, 'l': 9, 's': 10, 't': 11, 'n': 12, 'm': 13, '0': 14, 'p': 15, 'g': 16, 'd': 17, 'u': 18, 'b': 19, '1': 20, 'f': 21, 'h': 22, '2': 23, '5': 24, 'v': 25, '3': 26, 'k': 27, '4': 28, '.': 29, 'x': 30, '6': 31, '8': 32, '-': 33, '7': 34, '9': 35, 'j': 36, 'z': 37, '/': 38, 'y': 39, 'q': 40, 'w': 41, ',': 42, ':': 43, '(': 44, ')': 45, '_': 46, '#': 47, '+': 48, '*': 49, '%': 50, '"': 51, "'": 52, 'ç': 53, '&': 54, 'ã': 55, ';': 56, ']': 57, '[': 58, '$': 59, 'á': 60, '\\': 61, '|': 62, 'é': 63, 'º': 64, 'ó': 65, '!': 66, '=': 67, 'í': 68, 'ê': 69, '?': 70, '>': 71, '@': 72, '¿': 73, '°': 74, 'ú': 75, '\xa0': 76, 'ô': 77, 'â': 78, '`': 79, 'à': 80, 'õ': 81, 'ï': 82, 'ª': 83, '²': 84, '{': 85, '<': 86, '~': 87, 'è': 88, '§': 89, 'ø': 90, 'ñ': 91, '³': 92, 'û': 93, 'ù': 94, '\xad': 95, '}': 96, '\x81': 97, 'ä': 98, 'ü': 99, '¶': 100, '^': 101, '€': 102, '¹': 103, 'µ': 104, '®': 105, '¡': 106}
-
-def PredictNCM(txt):
- x = [txt[:120].lower() ]
-
- print(txt)
-
- X = np.array(tk.texts_to_sequences([_+(120-len(_))*" " for _ in x]))
- pred = model.predict(X, verbose=0)[0]
- aux = np.argsort(pred)[::-1][:5]
- return {f"{int(valid_ncms[i]):08}":float(pred[i]) for i in aux}, ncm_table.loc[valid_ncms[aux[0]],"DESCRICAO"]
-
-
-demo = gr.Interface(fn=PredictNCM, outputs=[components.Label(label="NCMs"), components.Textbox(label="Descrição do NCM")], title='AFRAC NOTA CERTA',
- inputs=components.Textbox(label="DESCRIÇÃO"),
- examples=["Coca-Cola PET 2l","Pepsi 500ml", "Guaraná Antarctica 2l", "Ração Bocão Premium","Mentos Kiss Morango", "Bombom Sonho de Valsa"])
-demo.launch()
-#display(demo.launch(share=True))
-#demo.close()
-
diff --git a/spaces/AIFILMS/StyleGANEX/utils/common.py b/spaces/AIFILMS/StyleGANEX/utils/common.py
deleted file mode 100644
index 4813fe311ee40720697e4862c5fbfad811d39237..0000000000000000000000000000000000000000
--- a/spaces/AIFILMS/StyleGANEX/utils/common.py
+++ /dev/null
@@ -1,87 +0,0 @@
-import cv2
-import numpy as np
-from PIL import Image
-import matplotlib.pyplot as plt
-
-
-# Log images
-def log_input_image(x, opts):
- if opts.label_nc == 0:
- return tensor2im(x)
- elif opts.label_nc == 1:
- return tensor2sketch(x)
- else:
- return tensor2map(x)
-
-
-def tensor2im(var):
- var = var.cpu().detach().transpose(0, 2).transpose(0, 1).numpy()
- var = ((var + 1) / 2)
- var[var < 0] = 0
- var[var > 1] = 1
- var = var * 255
- return Image.fromarray(var.astype('uint8'))
-
-
-def tensor2map(var):
- mask = np.argmax(var.data.cpu().numpy(), axis=0)
- colors = get_colors()
- mask_image = np.ones(shape=(mask.shape[0], mask.shape[1], 3))
- for class_idx in np.unique(mask):
- mask_image[mask == class_idx] = colors[class_idx]
- mask_image = mask_image.astype('uint8')
- return Image.fromarray(mask_image)
-
-
-def tensor2sketch(var):
- im = var[0].cpu().detach().numpy()
- im = cv2.cvtColor(im, cv2.COLOR_GRAY2BGR)
- im = (im * 255).astype(np.uint8)
- return Image.fromarray(im)
-
-
-# Visualization utils
-def get_colors():
- # currently support up to 19 classes (for the celebs-hq-mask dataset)
- colors = [[0, 0, 0], [204, 0, 0], [76, 153, 0], [204, 204, 0], [51, 51, 255], [204, 0, 204], [0, 255, 255],
- [255, 204, 204], [102, 51, 0], [255, 0, 0], [102, 204, 0], [255, 255, 0], [0, 0, 153], [0, 0, 204],
- [255, 51, 153], [0, 204, 204], [0, 51, 0], [255, 153, 51], [0, 204, 0]]
- return colors
-
-
-def vis_faces(log_hooks):
- display_count = len(log_hooks)
- fig = plt.figure(figsize=(8, 4 * display_count))
- gs = fig.add_gridspec(display_count, 3)
- for i in range(display_count):
- hooks_dict = log_hooks[i]
- fig.add_subplot(gs[i, 0])
- if 'diff_input' in hooks_dict:
- vis_faces_with_id(hooks_dict, fig, gs, i)
- else:
- vis_faces_no_id(hooks_dict, fig, gs, i)
- plt.tight_layout()
- return fig
-
-
-def vis_faces_with_id(hooks_dict, fig, gs, i):
- plt.imshow(hooks_dict['input_face'])
- plt.title('Input\nOut Sim={:.2f}'.format(float(hooks_dict['diff_input'])))
- fig.add_subplot(gs[i, 1])
- plt.imshow(hooks_dict['target_face'])
- plt.title('Target\nIn={:.2f}, Out={:.2f}'.format(float(hooks_dict['diff_views']),
- float(hooks_dict['diff_target'])))
- fig.add_subplot(gs[i, 2])
- plt.imshow(hooks_dict['output_face'])
- plt.title('Output\n Target Sim={:.2f}'.format(float(hooks_dict['diff_target'])))
-
-
-def vis_faces_no_id(hooks_dict, fig, gs, i):
- plt.imshow(hooks_dict['input_face'], cmap="gray")
- plt.title('Input')
- fig.add_subplot(gs[i, 1])
- plt.imshow(hooks_dict['target_face'])
- plt.title('Target')
- fig.add_subplot(gs[i, 2])
- plt.imshow(hooks_dict['output_face'])
- plt.title('Output')
diff --git a/spaces/AIatUIUC/CodeLATS/app.py b/spaces/AIatUIUC/CodeLATS/app.py
deleted file mode 100644
index c92b9453e7ab65bb8fdddbdb03369535efc1584c..0000000000000000000000000000000000000000
--- a/spaces/AIatUIUC/CodeLATS/app.py
+++ /dev/null
@@ -1,109 +0,0 @@
-import streamlit as st
-import openai
-import os
-import sys
-import argparse
-sys.path.append('./lats')
-from lats_main import lats_main
-
-st.set_page_config(layout="wide")
-
-# Initialize session state variables if they don't exist.
-if 'response_content' not in st.session_state:
- st.session_state.response_content = None
-
-# Creating main columns for the chat and runtime notifications
-chat_col = st.container()
-
-chat_col.title("CodeLATS")
-description = """This tech demo is an implementation of Language Agent Tree Search (LATS) (https://arxiv.org/abs/2310.04406) built specifically for generating code in the form of python functions. It achieves :green[**state-of-the-art**] results on HumanEval with a :green[**94.4% pass@1 rate**] on GPT-4.
-
-Listed below is an example programming problem (https://leetcode.com/problems/longest-valid-parentheses/description/) to get started with.
-
-```python
-Given a string containing just the characters '(' and ')', return the length of the longest valid (well-formed) parentheses substring
-```
-:red[**NOTE:**] On average a call for a HumanEval or Leetcode question will cost around 5-30 cents on GPT-4, using the default parameters. This value may change depending on problem difficulty and parameters.
-"""
-
-chat_col.markdown(description)
-sidebar = st.sidebar
-# Runtime Section
-runtime_container = st.container()
-
-# Parameters Section
-sidebar.title("**An AI@UIUC Project** (https://uiuc.ai/)")
-parameters_section = sidebar.expander("Parameters", expanded=False)
-tree_width = parameters_section.number_input("Tree Width", min_value=1, max_value=5, value=1)
-tree_depth = parameters_section.number_input("Tree Depth", min_value=1, max_value=8, value=3)
-iterations = parameters_section.number_input("Iterations", min_value=1, max_value=4, value=2)
-key = st.sidebar.text_input("Enter your OpenAI Api Key:", type="password")
-sidebar.markdown('', unsafe_allow_html=True)
-
-with sidebar:
- runtime_container = st.container()
- runtime_container.empty()
-
-runtime_messages = []
-
-def make_args(instruction, tree_depth, tree_width, iterations):
- parser = argparse.ArgumentParser()
-
- parser.add_argument("--strategy", default="mcts", help="Strategy to use")
- parser.add_argument("--language", default="py", help="Programming language")
- parser.add_argument("--model", default="gpt-4", help="Model type")
- parser.add_argument("--max_iters", default=iterations, help="Maximum iterations")
- parser.add_argument("--instruction", default=instruction, help="Instruction text")
- parser.add_argument("--verbose", action="store_true", help="Verbose output")
- parser.add_argument("--is_leetcode", action='store_true',
- help="To run the leetcode benchmark") # Temporary
- parser.add_argument("--n_samples", type=int,
- help="The number of nodes added during expansion", default=tree_width)
- parser.add_argument("--depth", type=int,
- help="Tree depth", default=tree_depth)
- args = parser.parse_args()
- return args
-
-def run_querry():
- if user_input:
-
- # Create a new container for each subsequent message
- runtime_container.write("Initiating process...")
-
- # Make it so that prints go to runtime_container writes instead
- old_stdout = sys.stdout
- sys.stdout = runtime_container
-
- with chat_col:
-
- with st.spinner('Running...'):
- args = make_args(user_input, tree_depth, tree_width, iterations)
- # main call
- response = lats_main(args)
-
- sys.stdout = old_stdout
- runtime_container.write("Response fetched.")
- chat_col.markdown('', unsafe_allow_html=True)
- chat_col.write(f"```python\n{response} \n")
-
- return response
-
-# User input section at the bottom of the page
-with chat_col:
- user_input = st.text_area("Enter your message here:", placeholder="Type your message here...", label_visibility="collapsed")
- button = st.button("Send")
-
- if button:
- fail = False
- if key == "":
- st.warning("Missing OpenAI API Key")
- fail = True
-
- if user_input == "":
- st.warning("Missing a coding problem")
- fail = True
-
- if (not fail):
- openai.api_key = key
- run_querry()
-
diff --git a/spaces/ATang0729/Forecast4Muses/Model/Model6/Model6_0_ClothesDetection/mmyolo/configs/yolov5/voc/yolov5_x-v61_fast_1xb32-50e_voc.py b/spaces/ATang0729/Forecast4Muses/Model/Model6/Model6_0_ClothesDetection/mmyolo/configs/yolov5/voc/yolov5_x-v61_fast_1xb32-50e_voc.py
deleted file mode 100644
index 2fc4d79f86b40c45d3f7692f32adc88295bbb4a4..0000000000000000000000000000000000000000
--- a/spaces/ATang0729/Forecast4Muses/Model/Model6/Model6_0_ClothesDetection/mmyolo/configs/yolov5/voc/yolov5_x-v61_fast_1xb32-50e_voc.py
+++ /dev/null
@@ -1,26 +0,0 @@
-_base_ = './yolov5_s-v61_fast_1xb64-50e_voc.py'
-
-deepen_factor = 1.33
-widen_factor = 1.25
-train_batch_size_per_gpu = 32
-train_num_workers = 8
-
-# TODO: need to add pretrained_model
-load_from = None
-
-model = dict(
- backbone=dict(
- deepen_factor=deepen_factor,
- widen_factor=widen_factor,
- ),
- neck=dict(
- deepen_factor=deepen_factor,
- widen_factor=widen_factor,
- ),
- bbox_head=dict(head_module=dict(widen_factor=widen_factor)))
-
-train_dataloader = dict(
- batch_size=train_batch_size_per_gpu, num_workers=train_num_workers)
-
-optim_wrapper = dict(
- optimizer=dict(batch_size_per_gpu=train_batch_size_per_gpu))
diff --git a/spaces/ATang0729/Forecast4Muses/Model/Model6/Model6_2_ProfileRecogition/mmpretrain/configs/_base_/models/resnest269.py b/spaces/ATang0729/Forecast4Muses/Model/Model6/Model6_2_ProfileRecogition/mmpretrain/configs/_base_/models/resnest269.py
deleted file mode 100644
index c37626f5678630383693d784d2590f27caa11de2..0000000000000000000000000000000000000000
--- a/spaces/ATang0729/Forecast4Muses/Model/Model6/Model6_2_ProfileRecogition/mmpretrain/configs/_base_/models/resnest269.py
+++ /dev/null
@@ -1,25 +0,0 @@
-# model settings
-model = dict(
- type='ImageClassifier',
- backbone=dict(
- type='ResNeSt',
- depth=269,
- num_stages=4,
- stem_channels=128,
- out_indices=(3, ),
- style='pytorch'),
- neck=dict(type='GlobalAveragePooling'),
- head=dict(
- type='LinearClsHead',
- num_classes=1000,
- in_channels=2048,
- loss=dict(
- type='LabelSmoothLoss',
- label_smooth_val=0.1,
- num_classes=1000,
- reduction='mean',
- loss_weight=1.0),
- topk=(1, 5),
- cal_acc=False),
- train_cfg=dict(augments=dict(type='Mixup', alpha=0.2)),
-)
diff --git a/spaces/AchyuthGamer/OpenGPT/g4f/Provider/needs_auth/HuggingChat.py b/spaces/AchyuthGamer/OpenGPT/g4f/Provider/needs_auth/HuggingChat.py
deleted file mode 100644
index 1d500338ac910d8b5d181eb75d00c9158f795194..0000000000000000000000000000000000000000
--- a/spaces/AchyuthGamer/OpenGPT/g4f/Provider/needs_auth/HuggingChat.py
+++ /dev/null
@@ -1,74 +0,0 @@
-from __future__ import annotations
-
-import json, uuid
-
-from aiohttp import ClientSession
-
-from ...typing import AsyncGenerator
-from ..base_provider import AsyncGeneratorProvider, format_prompt, get_cookies
-
-
-class HuggingChat(AsyncGeneratorProvider):
- url = "https://huggingface.co/chat"
- needs_auth = True
- working = True
- model = "meta-llama/Llama-2-70b-chat-hf"
-
- @classmethod
- async def create_async_generator(
- cls,
- model: str,
- messages: list[dict[str, str]],
- stream: bool = True,
- proxy: str = None,
- cookies: dict = None,
- **kwargs
- ) -> AsyncGenerator:
- model = model if model else cls.model
- if proxy and "://" not in proxy:
- proxy = f"http://{proxy}"
- if not cookies:
- cookies = get_cookies(".huggingface.co")
-
- headers = {
- 'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36',
- }
- async with ClientSession(
- cookies=cookies,
- headers=headers
- ) as session:
- async with session.post(f"{cls.url}/conversation", json={"model": model}, proxy=proxy) as response:
- conversation_id = (await response.json())["conversationId"]
-
- send = {
- "id": str(uuid.uuid4()),
- "inputs": format_prompt(messages),
- "is_retry": False,
- "response_id": str(uuid.uuid4()),
- "web_search": False
- }
- async with session.post(f"{cls.url}/conversation/{conversation_id}", json=send, proxy=proxy) as response:
- async for line in response.content:
- line = json.loads(line[:-1])
- if "type" not in line:
- raise RuntimeError(f"Response: {line}")
- elif line["type"] == "stream":
- yield line["token"]
- elif line["type"] == "finalAnswer":
- break
-
- async with session.delete(f"{cls.url}/conversation/{conversation_id}", proxy=proxy) as response:
- response.raise_for_status()
-
-
- @classmethod
- @property
- def params(cls):
- params = [
- ("model", "str"),
- ("messages", "list[dict[str, str]]"),
- ("stream", "bool"),
- ("proxy", "str"),
- ]
- param = ", ".join([": ".join(p) for p in params])
- return f"g4f.provider.{cls.__name__} supports: ({param})"
diff --git a/spaces/Adapter/CoAdapter/ldm/data/utils.py b/spaces/Adapter/CoAdapter/ldm/data/utils.py
deleted file mode 100644
index 1c5696fefff628b31e77d98ec1f05047bb5762f5..0000000000000000000000000000000000000000
--- a/spaces/Adapter/CoAdapter/ldm/data/utils.py
+++ /dev/null
@@ -1,60 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import cv2
-import numpy as np
-from torchvision.transforms import transforms
-from torchvision.transforms.functional import to_tensor
-from transformers import CLIPProcessor
-
-from basicsr.utils import img2tensor
-
-
-class AddCannyFreezeThreshold(object):
-
- def __init__(self, low_threshold=100, high_threshold=200):
- self.low_threshold = low_threshold
- self.high_threshold = high_threshold
-
- def __call__(self, sample):
- # sample['jpg'] is PIL image
- x = sample['jpg']
- img = cv2.cvtColor(np.array(x), cv2.COLOR_RGB2BGR)
- canny = cv2.Canny(img, self.low_threshold, self.high_threshold)[..., None]
- sample['canny'] = img2tensor(canny, bgr2rgb=True, float32=True) / 255.
- sample['jpg'] = to_tensor(x)
- return sample
-
-
-class AddCannyRandomThreshold(object):
-
- def __init__(self, low_threshold=100, high_threshold=200, shift_range=50):
- self.low_threshold = low_threshold
- self.high_threshold = high_threshold
- self.threshold_prng = np.random.RandomState()
- self.shift_range = shift_range
-
- def __call__(self, sample):
- # sample['jpg'] is PIL image
- x = sample['jpg']
- img = cv2.cvtColor(np.array(x), cv2.COLOR_RGB2BGR)
- low_threshold = self.low_threshold + self.threshold_prng.randint(-self.shift_range, self.shift_range)
- high_threshold = self.high_threshold + self.threshold_prng.randint(-self.shift_range, self.shift_range)
- canny = cv2.Canny(img, low_threshold, high_threshold)[..., None]
- sample['canny'] = img2tensor(canny, bgr2rgb=True, float32=True) / 255.
- sample['jpg'] = to_tensor(x)
- return sample
-
-
-class AddStyle(object):
-
- def __init__(self, version):
- self.processor = CLIPProcessor.from_pretrained(version)
- self.pil_to_tensor = transforms.ToTensor()
-
- def __call__(self, sample):
- # sample['jpg'] is PIL image
- x = sample['jpg']
- style = self.processor(images=x, return_tensors="pt")['pixel_values'][0]
- sample['style'] = style
- sample['jpg'] = to_tensor(x)
- return sample
diff --git a/spaces/AgentVerse/agentVerse/agentverse/environments/tasksolving_env/rules/executor/code_test.py b/spaces/AgentVerse/agentVerse/agentverse/environments/tasksolving_env/rules/executor/code_test.py
deleted file mode 100644
index 121aabc679b2bd53a92cdbfbe757d1b88075f123..0000000000000000000000000000000000000000
--- a/spaces/AgentVerse/agentVerse/agentverse/environments/tasksolving_env/rules/executor/code_test.py
+++ /dev/null
@@ -1,111 +0,0 @@
-from __future__ import annotations
-
-import os
-import subprocess
-import multiprocessing
-from typing import TYPE_CHECKING, Any, List, Tuple
-
-from agentverse.logging import get_logger
-from agentverse.agents import ExecutorAgent
-from agentverse.message import ExecutorMessage, SolverMessage
-from agentverse.logging import logger
-
-from . import BaseExecutor, executor_registry
-
-
-def execute_command(command: str, result_list) -> str:
- # TODO: make it more secure
- result = subprocess.run(command, capture_output=True, shell=True, encoding="utf-8")
- result_list.append(f"STDOUT:\n{result.stdout}\nSTDERR:\n{result.stderr}")
- # return f"STDOUT:\n{result.stdout}\nSTDERR:\n{result.stderr}"
-
-
-@executor_registry.register("code-test")
-class CodeTestExecutor(BaseExecutor):
- has_test: dict = {}
- timeout: int = 10
-
- async def astep(
- self,
- agent: ExecutorAgent,
- task_description: str,
- solution: List[SolverMessage],
- *args,
- **kwargs,
- ) -> Any:
- solution = solution[0].content
- os.makedirs("tmp", exist_ok=True)
- self.write_to_file("tmp/main.py", solution)
- manager = multiprocessing.Manager()
- result = manager.list()
- if task_description not in self.has_test:
- response = (await agent.astep(task_description, solution)).content
- self.write_to_file(response["file_path"], response["code"])
- self.has_test[task_description] = f"python {response['file_path']}"
- p = multiprocessing.Process(
- target=execute_command, args=(f"python {response['file_path']}", result)
- )
- p.start()
- p.join(timeout=self.timeout + 1)
- if p.is_alive():
- p.kill()
- # result = execute_command(f"python {response['file_path']}")
- else:
- # result = execute_command(self.has_test[task_description])
- p = multiprocessing.Process(
- target=execute_command, args=(self.has_test[task_description], result)
- )
- p.start()
- p.join(timeout=self.timeout + 1)
- if p.is_alive():
- p.kill()
- if not result:
- result.append("Execution timed out.")
- return [ExecutorMessage(content=result[0], sender="Code Tester")]
-
- def step(
- self,
- agent: ExecutorAgent,
- task_description: str,
- solution: List[SolverMessage],
- *args,
- **kwargs,
- ) -> Any:
- solution = solution[0].content
- os.makedirs("tmp", exist_ok=True)
- self.write_to_file("tmp/main.py", solution)
- manager = multiprocessing.Manager()
- result = manager.list()
- if task_description not in self.has_test:
- response = agent.step(task_description, solution).content
- self.write_to_file(response["file_path"], response["code"])
- self.has_test[task_description] = f"python {response['file_path']}"
- p = multiprocessing.Process(
- target=execute_command, args=(f"python {response['file_path']}", result)
- )
- p.start()
- p.join(timeout=self.timeout + 1)
- if p.is_alive():
- p.kill()
- # result = execute_command(f"python {response['file_path']}")
- else:
- # result = execute_command(self.has_test[task_description])
- p = multiprocessing.Process(
- target=execute_command, args=(self.has_test[task_description], result)
- )
- p.start()
- p.join(timeout=self.timeout + 1)
- if p.is_alive():
- p.kill()
- if not result:
- result.append("Execution timed out.")
- return [ExecutorMessage(content=result[0], sender="Code Tester")]
-
- def write_to_file(self, file_name, file_content):
- # TODO: generalize this method to a common tool
- try:
- with open(file_name, "w") as f:
- f.write(file_content)
- f.flush()
- except:
- logger.error(f"Failed to write to {file_name}")
diff --git a/spaces/AgentVerse/agentVerse/ui/src/phaser3-rex-plugins/templates/ui/clickoutside/ClickOutside.js b/spaces/AgentVerse/agentVerse/ui/src/phaser3-rex-plugins/templates/ui/clickoutside/ClickOutside.js
deleted file mode 100644
index 5527388e1dbf20ae4b9197790763693a0b740311..0000000000000000000000000000000000000000
--- a/spaces/AgentVerse/agentVerse/ui/src/phaser3-rex-plugins/templates/ui/clickoutside/ClickOutside.js
+++ /dev/null
@@ -1,2 +0,0 @@
-import ClickOutside from '../../../plugins/clickoutside.js'
-export default ClickOutside;
\ No newline at end of file
diff --git a/spaces/AgentVerse/agentVerse/ui/src/phaser3-rex-plugins/templates/ui/gridtable/SetItems.js b/spaces/AgentVerse/agentVerse/ui/src/phaser3-rex-plugins/templates/ui/gridtable/SetItems.js
deleted file mode 100644
index 288da02b301d6f4bd0db7fa8b7933b18c35ea4c8..0000000000000000000000000000000000000000
--- a/spaces/AgentVerse/agentVerse/ui/src/phaser3-rex-plugins/templates/ui/gridtable/SetItems.js
+++ /dev/null
@@ -1,16 +0,0 @@
-var SetItems = function (items) {
- if (items === undefined) {
- this.items = [];
- } else {
- this.items = items;
- }
-
- var table = this.childrenMap.child;
- table.setCellsCount(this.items.length);
- table.updateTable(true);
-
- this.resizeController();
- return this;
-}
-
-export default SetItems;
\ No newline at end of file
diff --git a/spaces/AgentVerse/agentVerse/ui/src/phaser3-rex-plugins/templates/ui/modal/Modal.js b/spaces/AgentVerse/agentVerse/ui/src/phaser3-rex-plugins/templates/ui/modal/Modal.js
deleted file mode 100644
index a7fc717714fd098684fd8990ad4f72e16a323cdf..0000000000000000000000000000000000000000
--- a/spaces/AgentVerse/agentVerse/ui/src/phaser3-rex-plugins/templates/ui/modal/Modal.js
+++ /dev/null
@@ -1,2 +0,0 @@
-import { ModalBehavoir, Modal, ModalPromise, ModalClose } from '../../../plugins/modal.js';
-export { ModalBehavoir, Modal, ModalPromise, ModalClose };
\ No newline at end of file
diff --git a/spaces/AhmedM20/Email_Marketing_Content_Generator/app.py b/spaces/AhmedM20/Email_Marketing_Content_Generator/app.py
deleted file mode 100644
index a7f145ba3d6e53cbffcaed8b940a43931e8e6077..0000000000000000000000000000000000000000
--- a/spaces/AhmedM20/Email_Marketing_Content_Generator/app.py
+++ /dev/null
@@ -1,68 +0,0 @@
-import pip
-import os
-SECRET_TOKEN = os.getenv("SECRET_TOKEN")
-
-def install(package):
- if hasattr(pip, 'main'):
- pip.main(['install', package])
- else:
- pip._internal.main(['install', package])
-
-# Example
-if __name__ == '__main__':
- install('cohere')
-import cohere
-import gradio as gr
-co = cohere.Client(SECRET_TOKEN) # This is your trial API key
-
-def write_email(tone="",goal="",industry="",text="",audience="",other=""):
- if goal=="Other":
- goal=other
- if audience=="" and industry=="":
- print(f'write 5 different {tone} emails to {goal} {text}')
- Message=f'write 5 different {tone} emails to {goal} {text}'
- elif audience=="":
- print(f'write 5 different {tone} emails to {goal} in the {industry} industry {text}')
- Message=f'write 5 different {tone} emails to {goal} in the {industry} industry {text}'
- elif industry=="":
- print(f'write 5 different {tone} emails for {audience} to {goal} {text}')
- Message=f'write 5 different {tone} emails for {audience} to {goal} {text}'
- else:
- print(f'write 5 different {tone} emails for {audience} to {goal} in the {industry} industry {text}')
- Message=f'write 5 different {tone} emails for {audience} to {goal} in the {industry} industry {text}'
-
- response = co.generate(
- model='command',
- prompt=Message,
- max_tokens=1208,
- temperature=1,
- k=0,
- stop_sequences=[],
- return_likelihoods='NONE')
- return(response.generations[0].text)
-
-
-
-with gr.Blocks() as demo:
- def other_field(choice):
- if choice != "Other":
- return gr.update(visible=False)
- else:
- return gr.update(visible=True)
- gr.Markdown("Create your marketing emails with AI")
- inp1 = gr.Radio(
- ["Convince to buy a product", "Recover churned customers", "Teach a new concept","Onboard users","Share product updates","Other"], value="Convince to buy a product",label = "Campagin goal"
- )
- other=gr.Textbox(visible=False,placeholder="Please enter other text",label = "Other:")
- inp1.input(other_field,inp1, other)
- inp2 = gr.Radio(
- ["Formal", "Semi-formal", "Informal"], value="Formal",label = "Brand Tone"
- )
- inp3 = gr.Textbox(placeholder="Example: marketing agency" ,label = "Industry")
- inp4= gr.Textbox(placeholder="Example:Females aged between 18 and 30" ,label = "Target audience")
- inp5 = gr.Textbox(placeholder="Example: I am offering 10 dollars discount for customers who cancelled their subscription and want to find a way to bring them back ", label = "Tell us more about the email you want to send")
- btn = gr.Button("Generate 🚀")
- out = gr.Textbox(label = "Here is your 5 Generated emails")
- btn.click(fn=write_email, inputs=[inp2, inp1,inp3,inp5,inp4,other], outputs=out)
-
-demo.launch(debug = True)
\ No newline at end of file
diff --git a/spaces/AkitoP/umamusume_bert_vits2/text/japanese.py b/spaces/AkitoP/umamusume_bert_vits2/text/japanese.py
deleted file mode 100644
index 7c1817ec91b6c438c7e12c32d7facafd13f04741..0000000000000000000000000000000000000000
--- a/spaces/AkitoP/umamusume_bert_vits2/text/japanese.py
+++ /dev/null
@@ -1,704 +0,0 @@
-# Convert Japanese text to phonemes which is
-# compatible with Julius https://github.com/julius-speech/segmentation-kit
-import re
-import unicodedata
-
-from transformers import AutoTokenizer
-
-from text import punctuation, symbols
-
-try:
- import MeCab
-except ImportError as e:
- raise ImportError("Japanese requires mecab-python3 and unidic-lite.") from e
-from num2words import num2words
-
-_CONVRULES = [
- # Conversion of 2 letters
- "アァ/ a a",
- "イィ/ i i",
- "イェ/ i e",
- "イャ/ y a",
- "ウゥ/ u:",
- "エェ/ e e",
- "オォ/ o:",
- "カァ/ k a:",
- "キィ/ k i:",
- "クゥ/ k u:",
- "クャ/ ky a",
- "クュ/ ky u",
- "クョ/ ky o",
- "ケェ/ k e:",
- "コォ/ k o:",
- "ガァ/ g a:",
- "ギィ/ g i:",
- "グゥ/ g u:",
- "グャ/ gy a",
- "グュ/ gy u",
- "グョ/ gy o",
- "ゲェ/ g e:",
- "ゴォ/ g o:",
- "サァ/ s a:",
- "シィ/ sh i:",
- "スゥ/ s u:",
- "スャ/ sh a",
- "スュ/ sh u",
- "スョ/ sh o",
- "セェ/ s e:",
- "ソォ/ s o:",
- "ザァ/ z a:",
- "ジィ/ j i:",
- "ズゥ/ z u:",
- "ズャ/ zy a",
- "ズュ/ zy u",
- "ズョ/ zy o",
- "ゼェ/ z e:",
- "ゾォ/ z o:",
- "タァ/ t a:",
- "チィ/ ch i:",
- "ツァ/ ts a",
- "ツィ/ ts i",
- "ツゥ/ ts u:",
- "ツャ/ ch a",
- "ツュ/ ch u",
- "ツョ/ ch o",
- "ツェ/ ts e",
- "ツォ/ ts o",
- "テェ/ t e:",
- "トォ/ t o:",
- "ダァ/ d a:",
- "ヂィ/ j i:",
- "ヅゥ/ d u:",
- "ヅャ/ zy a",
- "ヅュ/ zy u",
- "ヅョ/ zy o",
- "デェ/ d e:",
- "ドォ/ d o:",
- "ナァ/ n a:",
- "ニィ/ n i:",
- "ヌゥ/ n u:",
- "ヌャ/ ny a",
- "ヌュ/ ny u",
- "ヌョ/ ny o",
- "ネェ/ n e:",
- "ノォ/ n o:",
- "ハァ/ h a:",
- "ヒィ/ h i:",
- "フゥ/ f u:",
- "フャ/ hy a",
- "フュ/ hy u",
- "フョ/ hy o",
- "ヘェ/ h e:",
- "ホォ/ h o:",
- "バァ/ b a:",
- "ビィ/ b i:",
- "ブゥ/ b u:",
- "フャ/ hy a",
- "ブュ/ by u",
- "フョ/ hy o",
- "ベェ/ b e:",
- "ボォ/ b o:",
- "パァ/ p a:",
- "ピィ/ p i:",
- "プゥ/ p u:",
- "プャ/ py a",
- "プュ/ py u",
- "プョ/ py o",
- "ペェ/ p e:",
- "ポォ/ p o:",
- "マァ/ m a:",
- "ミィ/ m i:",
- "ムゥ/ m u:",
- "ムャ/ my a",
- "ムュ/ my u",
- "ムョ/ my o",
- "メェ/ m e:",
- "モォ/ m o:",
- "ヤァ/ y a:",
- "ユゥ/ y u:",
- "ユャ/ y a:",
- "ユュ/ y u:",
- "ユョ/ y o:",
- "ヨォ/ y o:",
- "ラァ/ r a:",
- "リィ/ r i:",
- "ルゥ/ r u:",
- "ルャ/ ry a",
- "ルュ/ ry u",
- "ルョ/ ry o",
- "レェ/ r e:",
- "ロォ/ r o:",
- "ワァ/ w a:",
- "ヲォ/ o:",
- "ディ/ d i",
- "デェ/ d e:",
- "デャ/ dy a",
- "デュ/ dy u",
- "デョ/ dy o",
- "ティ/ t i",
- "テェ/ t e:",
- "テャ/ ty a",
- "テュ/ ty u",
- "テョ/ ty o",
- "スィ/ s i",
- "ズァ/ z u a",
- "ズィ/ z i",
- "ズゥ/ z u",
- "ズャ/ zy a",
- "ズュ/ zy u",
- "ズョ/ zy o",
- "ズェ/ z e",
- "ズォ/ z o",
- "キャ/ ky a",
- "キュ/ ky u",
- "キョ/ ky o",
- "シャ/ sh a",
- "シュ/ sh u",
- "シェ/ sh e",
- "ショ/ sh o",
- "チャ/ ch a",
- "チュ/ ch u",
- "チェ/ ch e",
- "チョ/ ch o",
- "トゥ/ t u",
- "トャ/ ty a",
- "トュ/ ty u",
- "トョ/ ty o",
- "ドァ/ d o a",
- "ドゥ/ d u",
- "ドャ/ dy a",
- "ドュ/ dy u",
- "ドョ/ dy o",
- "ドォ/ d o:",
- "ニャ/ ny a",
- "ニュ/ ny u",
- "ニョ/ ny o",
- "ヒャ/ hy a",
- "ヒュ/ hy u",
- "ヒョ/ hy o",
- "ミャ/ my a",
- "ミュ/ my u",
- "ミョ/ my o",
- "リャ/ ry a",
- "リュ/ ry u",
- "リョ/ ry o",
- "ギャ/ gy a",
- "ギュ/ gy u",
- "ギョ/ gy o",
- "ヂェ/ j e",
- "ヂャ/ j a",
- "ヂュ/ j u",
- "ヂョ/ j o",
- "ジェ/ j e",
- "ジャ/ j a",
- "ジュ/ j u",
- "ジョ/ j o",
- "ビャ/ by a",
- "ビュ/ by u",
- "ビョ/ by o",
- "ピャ/ py a",
- "ピュ/ py u",
- "ピョ/ py o",
- "ウァ/ u a",
- "ウィ/ w i",
- "ウェ/ w e",
- "ウォ/ w o",
- "ファ/ f a",
- "フィ/ f i",
- "フゥ/ f u",
- "フャ/ hy a",
- "フュ/ hy u",
- "フョ/ hy o",
- "フェ/ f e",
- "フォ/ f o",
- "ヴァ/ b a",
- "ヴィ/ b i",
- "ヴェ/ b e",
- "ヴォ/ b o",
- "ヴュ/ by u",
- "アー/ a:",
- "イー/ i:",
- "ウー/ u:",
- "エー/ e:",
- "オー/ o:",
- "カー/ k a:",
- "キー/ k i:",
- "クー/ k u:",
- "ケー/ k e:",
- "コー/ k o:",
- "サー/ s a:",
- "シー/ sh i:",
- "スー/ s u:",
- "セー/ s e:",
- "ソー/ s o:",
- "ター/ t a:",
- "チー/ ch i:",
- "ツー/ ts u:",
- "テー/ t e:",
- "トー/ t o:",
- "ナー/ n a:",
- "ニー/ n i:",
- "ヌー/ n u:",
- "ネー/ n e:",
- "ノー/ n o:",
- "ハー/ h a:",
- "ヒー/ h i:",
- "フー/ f u:",
- "ヘー/ h e:",
- "ホー/ h o:",
- "マー/ m a:",
- "ミー/ m i:",
- "ムー/ m u:",
- "メー/ m e:",
- "モー/ m o:",
- "ラー/ r a:",
- "リー/ r i:",
- "ルー/ r u:",
- "レー/ r e:",
- "ロー/ r o:",
- "ガー/ g a:",
- "ギー/ g i:",
- "グー/ g u:",
- "ゲー/ g e:",
- "ゴー/ g o:",
- "ザー/ z a:",
- "ジー/ j i:",
- "ズー/ z u:",
- "ゼー/ z e:",
- "ゾー/ z o:",
- "ダー/ d a:",
- "ヂー/ j i:",
- "ヅー/ z u:",
- "デー/ d e:",
- "ドー/ d o:",
- "バー/ b a:",
- "ビー/ b i:",
- "ブー/ b u:",
- "ベー/ b e:",
- "ボー/ b o:",
- "パー/ p a:",
- "ピー/ p i:",
- "プー/ p u:",
- "ペー/ p e:",
- "ポー/ p o:",
- "ヤー/ y a:",
- "ユー/ y u:",
- "ヨー/ y o:",
- "ワー/ w a:",
- "ヰー/ i:",
- "ヱー/ e:",
- "ヲー/ o:",
- "ヴー/ b u:",
- # Conversion of 1 letter
- "ア/ a",
- "イ/ i",
- "ウ/ u",
- "エ/ e",
- "オ/ o",
- "カ/ k a",
- "キ/ k i",
- "ク/ k u",
- "ケ/ k e",
- "コ/ k o",
- "サ/ s a",
- "シ/ sh i",
- "ス/ s u",
- "セ/ s e",
- "ソ/ s o",
- "タ/ t a",
- "チ/ ch i",
- "ツ/ ts u",
- "テ/ t e",
- "ト/ t o",
- "ナ/ n a",
- "ニ/ n i",
- "ヌ/ n u",
- "ネ/ n e",
- "ノ/ n o",
- "ハ/ h a",
- "ヒ/ h i",
- "フ/ f u",
- "ヘ/ h e",
- "ホ/ h o",
- "マ/ m a",
- "ミ/ m i",
- "ム/ m u",
- "メ/ m e",
- "モ/ m o",
- "ラ/ r a",
- "リ/ r i",
- "ル/ r u",
- "レ/ r e",
- "ロ/ r o",
- "ガ/ g a",
- "ギ/ g i",
- "グ/ g u",
- "ゲ/ g e",
- "ゴ/ g o",
- "ザ/ z a",
- "ジ/ j i",
- "ズ/ z u",
- "ゼ/ z e",
- "ゾ/ z o",
- "ダ/ d a",
- "ヂ/ j i",
- "ヅ/ z u",
- "デ/ d e",
- "ド/ d o",
- "バ/ b a",
- "ビ/ b i",
- "ブ/ b u",
- "ベ/ b e",
- "ボ/ b o",
- "パ/ p a",
- "ピ/ p i",
- "プ/ p u",
- "ペ/ p e",
- "ポ/ p o",
- "ヤ/ y a",
- "ユ/ y u",
- "ヨ/ y o",
- "ワ/ w a",
- "ヰ/ i",
- "ヱ/ e",
- "ヲ/ o",
- "ン/ N",
- "ッ/ q",
- "ヴ/ b u",
- "ー/:", #这个不起作用
- # Try converting broken text
- "ァ/ a",
- "ィ/ i",
- "ゥ/ u",
- "ェ/ e",
- "ォ/ o",
- "ヮ/ w a",
- "ォ/ o",
- # Symbols
- "、/ ,",
- "。/ .",
- "!/ !",
- "?/ ?",
- "・/ ,",
-]
-
-_COLON_RX = re.compile(":+")
-_REJECT_RX = re.compile("[^ a-zA-Z:,.?]")
-
-
-def _makerulemap():
- l = [tuple(x.split("/")) for x in _CONVRULES]
- return tuple({k: v for k, v in l if len(k) == i} for i in (1, 2))
-
-
-_RULEMAP1, _RULEMAP2 = _makerulemap()
-
-
-def kata2phoneme(text: str) -> str:
- """Convert katakana text to phonemes."""
- text = text.strip()
- res = []
- while text:
- if len(text) >= 2:
- x = _RULEMAP2.get(text[:2])
- if x is not None:
- text = text[2:]
- res += x.split(" ")[1:]
- continue
- x = _RULEMAP1.get(text[0])
- if x is not None:
- text = text[1:]
- res += x.split(" ")[1:]
- continue
- res.append(text[0])
- text = text[1:]
- # res = _COLON_RX.sub(":", res)
- return res
-
-
-_KATAKANA = "".join(chr(ch) for ch in range(ord("ァ"), ord("ン") + 1))
-_HIRAGANA = "".join(chr(ch) for ch in range(ord("ぁ"), ord("ん") + 1))
-_HIRA2KATATRANS = str.maketrans(_HIRAGANA, _KATAKANA)
-
-
-def hira2kata(text: str) -> str:
- text = text.translate(_HIRA2KATATRANS)
- return text.replace("う゛", "ヴ")
-
-
-_SYMBOL_TOKENS = set(list("・、。?!"))
-_NO_YOMI_TOKENS = set(list("「」『』―()[][]"))
-_TAGGER = MeCab.Tagger()
-
-
-def text2kata(text: str) -> str:
- parsed = _TAGGER.parse(text)
- res = []
- for line in parsed.split("\n"):
- if line == "EOS":
- break
- parts = line.split("\t")
-
- word, yomi = parts[0], parts[1]
- if yomi:
- res.append(yomi)
- else:
- if word in _SYMBOL_TOKENS:
- res.append(word)
- elif word in ("っ", "ッ"):
- res.append("ッ")
- elif word in _NO_YOMI_TOKENS:
- pass
- else:
- res.append(word)
- return hira2kata("".join(res))
-
-
-def text2sep_kata(text: str) -> (list, list):
- parsed = _TAGGER.parse(text)
- res = []
- sep = []
- for line in parsed.split("\n"):
- if line == "EOS":
- break
- parts = line.split("\t")
-
- word, yomi = parts[0], parts[1]
- if yomi:
- res.append(yomi)
- else:
- if word in _SYMBOL_TOKENS:
- res.append(word)
- elif word in ("っ", "ッ"):
- res.append("ッ")
- elif word in _NO_YOMI_TOKENS:
- pass
- else:
- res.append(word)
- sep.append(word)
- return sep, [hira2kata(i) for i in res]
-
-
-_ALPHASYMBOL_YOMI = {
- "#": "シャープ",
- "%": "パーセント",
- "&": "アンド",
- "+": "プラス",
- "-": "マイナス",
- ":": "コロン",
- ";": "セミコロン",
- "<": "小なり",
- "=": "イコール",
- ">": "大なり",
- "@": "アット",
- "a": "エー",
- "b": "ビー",
- "c": "シー",
- "d": "ディー",
- "e": "イー",
- "f": "エフ",
- "g": "ジー",
- "h": "エイチ",
- "i": "アイ",
- "j": "ジェー",
- "k": "ケー",
- "l": "エル",
- "m": "エム",
- "n": "エヌ",
- "o": "オー",
- "p": "ピー",
- "q": "キュー",
- "r": "アール",
- "s": "エス",
- "t": "ティー",
- "u": "ユー",
- "v": "ブイ",
- "w": "ダブリュー",
- "x": "エックス",
- "y": "ワイ",
- "z": "ゼット",
- "α": "アルファ",
- "β": "ベータ",
- "γ": "ガンマ",
- "δ": "デルタ",
- "ε": "イプシロン",
- "ζ": "ゼータ",
- "η": "イータ",
- "θ": "シータ",
- "ι": "イオタ",
- "κ": "カッパ",
- "λ": "ラムダ",
- "μ": "ミュー",
- "ν": "ニュー",
- "ξ": "クサイ",
- "ο": "オミクロン",
- "π": "パイ",
- "ρ": "ロー",
- "σ": "シグマ",
- "τ": "タウ",
- "υ": "ウプシロン",
- "φ": "ファイ",
- "χ": "カイ",
- "ψ": "プサイ",
- "ω": "オメガ",
-}
-
-
-_NUMBER_WITH_SEPARATOR_RX = re.compile("[0-9]{1,3}(,[0-9]{3})+")
-_CURRENCY_MAP = {"$": "ドル", "¥": "円", "£": "ポンド", "€": "ユーロ"}
-_CURRENCY_RX = re.compile(r"([$¥£€])([0-9.]*[0-9])")
-_NUMBER_RX = re.compile(r"[0-9]+(\.[0-9]+)?")
-
-
-def japanese_convert_numbers_to_words(text: str) -> str:
- res = _NUMBER_WITH_SEPARATOR_RX.sub(lambda m: m[0].replace(",", ""), text)
- res = _CURRENCY_RX.sub(lambda m: m[2] + _CURRENCY_MAP.get(m[1], m[1]), res)
- res = _NUMBER_RX.sub(lambda m: num2words(m[0], lang="ja"), res)
- return res
-
-
-def japanese_convert_alpha_symbols_to_words(text: str) -> str:
- return "".join([_ALPHASYMBOL_YOMI.get(ch, ch) for ch in text.lower()])
-
-
-def japanese_text_to_phonemes(text: str) -> str:
- """Convert Japanese text to phonemes."""
- res = unicodedata.normalize("NFKC", text)
- res = japanese_convert_numbers_to_words(res)
- # res = japanese_convert_alpha_symbols_to_words(res)
- res = text2kata(res)
- res = kata2phoneme(res)
- return res
-
-
-def is_japanese_character(char):
- # 定义日语文字系统的 Unicode 范围
- japanese_ranges = [
- (0x3040, 0x309F), # 平假名
- (0x30A0, 0x30FF), # 片假名
- (0x4E00, 0x9FFF), # 汉字 (CJK Unified Ideographs)
- (0x3400, 0x4DBF), # 汉字扩展 A
- (0x20000, 0x2A6DF), # 汉字扩展 B
- # 可以根据需要添加其他汉字扩展范围
- ]
-
- # 将字符的 Unicode 编码转换为整数
- char_code = ord(char)
-
- # 检查字符是否在任何一个日语范围内
- for start, end in japanese_ranges:
- if start <= char_code <= end:
- return True
-
- return False
-
-
-rep_map = {
- ":": ",",
- ";": ",",
- ",": ",",
- "。": ".",
- "!": "!",
- "?": "?",
- "\n": ".",
- "·": ",",
- "、": ",",
- "…": "...",
-}
-
-
-def replace_punctuation(text):
- pattern = re.compile("|".join(re.escape(p) for p in rep_map.keys()))
-
- replaced_text = pattern.sub(lambda x: rep_map[x.group()], text)
-
- replaced_text = re.sub(
- r"[^\u3040-\u309F\u30A0-\u30FF\u4E00-\u9FFF\u3400-\u4DBF"
- + "".join(punctuation)
- + r"]+",
- "",
- replaced_text,
- )
-
- return replaced_text
-
-
-def text_normalize(text):
- res = unicodedata.normalize("NFKC", text)
- res = japanese_convert_numbers_to_words(res)
- # res = "".join([i for i in res if is_japanese_character(i)])
- res = replace_punctuation(res)
- return res
-
-
-def distribute_phone(n_phone, n_word):
- phones_per_word = [0] * n_word
- for task in range(n_phone):
- min_tasks = min(phones_per_word)
- min_index = phones_per_word.index(min_tasks)
- phones_per_word[min_index] += 1
- return phones_per_word
-
-
-tokenizer = AutoTokenizer.from_pretrained("./bert/bert-base-japanese-v3")
-
-
-def g2p(norm_text):
- sep_text, sep_kata = text2sep_kata(norm_text)
- sep_tokenized = [tokenizer.tokenize(i) for i in sep_text]
- sep_phonemes = [kata2phoneme(i) for i in sep_kata]
- # 异常处理,MeCab不认识的词的话会一路传到这里来,然后炸掉。目前来看只有那些超级稀有的生僻词会出现这种情况
- for i in sep_phonemes:
- for j in i:
- assert j in symbols, (sep_text, sep_kata, sep_phonemes)
-
- word2ph = []
- for token, phoneme in zip(sep_tokenized, sep_phonemes):
- phone_len = len(phoneme)
- word_len = len(token)
-
- aaa = distribute_phone(phone_len, word_len)
- word2ph += aaa
- phones = ["_"] + [j for i in sep_phonemes for j in i] + ["_"]
- tones = [0 for i in phones]
- word2ph = [1] + word2ph + [1]
- return phones, tones, word2ph
-
-if __name__ == "__main__":
- tokenizer = AutoTokenizer.from_pretrained("./bert/bert-base-japanese-v3")
- text = "だったら私、スズカさんと同じチームに入りたいです! スズカさんの走りを毎日近くで、なんなら真横から見ていたいので!"
- #print(_TAGGER.parse(text))
- # nodes = [{"surface": "こんにちは", "pos": "感動詞:*:*:*", "pron": "コンニチワ", "c_type": "*", "c_form": "*", "accent_type": 0, "accent_con_type": "-1", "chain_flag": -1}]
- nodes = [{"surface":"こんにちは","pron": "コンニチワ","pos": "感動詞:*:*:*",}]
- from text.japanese_bert import get_bert_feature
- import pyopenjtalk
- from marine.predict import Predictor
- from marine.utils.openjtalk_util import convert_njd_feature_to_marine_feature
- text = text_normalize(text)
- NJD_NODES = pyopenjtalk.run_frontend(text)
- predictor = Predictor()
- # important_info = [{"string":i["string"],"pron":i["pron"],"acc":i["acc"]}for i in pyopenjtalk.estimate_accent(NJD_NODES)]
- print(text)
-
- marine_feature = convert_njd_feature_to_marine_feature(NJD_NODES)
- results = predictor.predict([marine_feature])
- for mora,acc in zip(results["mora"][0],results["accent_status"][0]):
- print(f"{mora}:{acc}")
- # for i in pyopenjtalk.estimate_accent(NJD_NODES):
- # print(f"{i['string']}:{i['pron']}:{i['acc']}")
-# info = pyopenjtalk.extract_fullcontext(text,run_marine=True)
-# info_nomarine = pyopenjtalk.extract_fullcontext(text,run_marine=False)
-# # nodes = pyopenjtalk
-# # print(info)
-# for i,j in zip(info,info_nomarine):
-# print(i)
-# print(j)
-# print("\n")
- # predictor = Predictor()
- #print(pyopenjtalk.estimate_accent(text))
- # output = predictor.predict([nodes],accent_represent_mode="high_low")
- #print(output)
- # phones, tones, word2ph = g2p(text)
- # bert = get_bert_feature(text, word2ph)
-
- # print(phones, tones, word2ph, bert.shape)
diff --git a/spaces/Akmyradov/TurkmenTTSweSTT/uroman/README.md b/spaces/Akmyradov/TurkmenTTSweSTT/uroman/README.md
deleted file mode 100644
index 6a0a40f6d4ebda9041d23efe0345340b7da9d4b8..0000000000000000000000000000000000000000
--- a/spaces/Akmyradov/TurkmenTTSweSTT/uroman/README.md
+++ /dev/null
@@ -1,165 +0,0 @@
-# uroman
-
-*uroman* is a *universal romanizer*. It converts text in any script to the Latin alphabet.
-
-Version: 1.2.8
-Release date: April 23, 2021
-Author: Ulf Hermjakob, USC Information Sciences Institute
-
-
-### Usage
-```bash
-$ uroman.pl [-l ] [--chart] [--no-cache] < STDIN
- where the optional is a 3-letter languages code, e.g. ara, bel, bul, deu, ell, eng, fas,
- grc, ell, eng, heb, kaz, kir, lav, lit, mkd, mkd2, oss, pnt, pus, rus, srp, srp2, tur, uig, ukr, yid.
- --chart specifies chart output (in JSON format) to represent alternative romanizations.
- --no-cache disables caching.
-```
-### Examples
-```bash
-$ bin/uroman.pl < text/zho.txt
-$ bin/uroman.pl -l tur < text/tur.txt
-$ bin/uroman.pl -l heb --chart < text/heb.txt
-$ bin/uroman.pl < test/multi-script.txt > test/multi-script.uroman.txt
-```
-
-Identifying the input as Arabic, Belarusian, Bulgarian, English, Farsi, German,
-Ancient Greek, Modern Greek, Pontic Greek, Hebrew, Kazakh, Kyrgyz, Latvian,
-Lithuanian, North Macedonian, Russian, Serbian, Turkish, Ukrainian, Uyghur or
-Yiddish will improve romanization for those languages as some letters in those
-languages have different sound values from other languages using the same script
-(French, Russian, Hebrew respectively).
-No effect for other languages in this version.
-
-### Bibliography
-Ulf Hermjakob, Jonathan May, and Kevin Knight. 2018. Out-of-the-box universal romanization tool uroman. In Proceedings of the 56th Annual Meeting of Association for Computational Linguistics, Demo Track. ACL-2018 Best Demo Paper Award. [Paper in ACL Anthology](https://www.aclweb.org/anthology/P18-4003) | [Poster](https://www.isi.edu/~ulf/papers/poster-uroman-acl2018.pdf) | [BibTex](https://www.aclweb.org/anthology/P18-4003.bib)
-
-### Change History
-Changes in version 1.2.8
- * Updated to Unicode 13.0 (2021), which supports several new scripts (10% larger UnicodeData.txt).
- * Improved support for Georgian.
- * Preserve various symbols (as opposed to mapping to the symbols' names).
- * Various small improvements.
-
-Changes in version 1.2.7
- * Improved support for Pashto.
-
-Changes in version 1.2.6
- * Improved support for Ukrainian, Russian and Ogham (ancient Irish script).
- * Added support for English Braille.
- * Added alternative Romanization for North Macedonian and Serbian (mkd2/srp2)
- reflecting a casual style that many native speakers of those languages use
- when writing text in Latin script, e.g. non-accented single letters (e.g. "s")
- rather than phonetically motivated combinations of letters (e.g. "sh").
- * When a line starts with "::lcode xyz ", the new uroman version will switch to
- that language for that line. This is used for the new reference test file.
- * Various small improvements.
-
-Changes in version 1.2.5
- * Improved support for Armenian and eight languages using Cyrillic scripts.
- -- For Serbian and Macedonian, which are often written in both Cyrillic
- and Latin scripts, uroman will map both official versions to the same
- romanized text, e.g. both "Ниш" and "Niš" will be mapped to "Nish" (which
- properly reflects the pronunciation of the city's name).
- For both Serbian and Macedonian, casual writers often use a simplified
- Latin form without diacritics, e.g. "s" to represent not only Cyrillic "с"
- and Latin "s", but also "ш" or "š", even if this conflates "s" and "sh" and
- other such pairs. The casual romanization can be simulated by using
- alternative uroman language codes "srp2" and "mkd2", which romanize
- both "Ниш" and "Niš" to "Nis" to reflect the casual Latin spelling.
- * Various small improvements.
-
-Changes in version 1.2.4
- * Bug-fix that generated two emtpy lines for each empty line in cache mode.
-
-Changes in version 1.2
- * Run-time improvement based on (1) token-based caching and (2) shortcut
- romanization (identity) of ASCII strings for default 1-best (non-chart)
- output. Speed-up by a factor of 10 for Bengali and Uyghur on medium and
- large size texts.
- * Incremental improvements for Farsi, Amharic, Russian, Hebrew and related
- languages.
- * Richer lattice structure (more alternatives) for "Romanization" of English
- to support better matching to romanizations of other languages.
- Changes output only when --chart option is specified. No change in output for
- default 1-best output, which for ASCII characters is always the input string.
-
-Changes in version 1.1 (major upgrade)
- * Offers chart output (in JSON format) to represent alternative romanizations.
- -- Location of first character is defined to be "line: 1, start:0, end:0".
- * Incremental improvements of Hebrew and Greek romanization; Chinese numbers.
- * Improved web-interface at http://www.isi.edu/~ulf/uroman.html
- -- Shows corresponding original and romanization text in red
- when hovering over a text segment.
- -- Shows alternative romanizations when hovering over romanized text
- marked by dotted underline.
- -- Added right-to-left script detection and improved display for right-to-left
- script text (as determined line by line).
- -- On-page support for some scripts that are often not pre-installed on users'
- computers (Burmese, Egyptian, Klingon).
-
-Changes in version 1.0 (major upgrade)
- * Upgraded principal internal data structure from string to lattice.
- * Improvements mostly in vowelization of South and Southeast Asian languages.
- * Vocalic 'r' more consistently treated as vowel (no additional vowel added).
- * Repetition signs (Japanese/Chinese/Thai/Khmer/Lao) are mapped to superscript 2.
- * Japanese Katakana middle dots now mapped to ASCII space.
- * Tibetan intersyllabic mark now mapped to middle dot (U+00B7).
- * Some corrections regarding analysis of Chinese numbers.
- * Many more foreign diacritics and punctuation marks dropped or mapped to ASCII.
- * Zero-width characters dropped, except line/sentence-initial byte order marks.
- * Spaces normalized to ASCII space.
- * Fixed bug that in some cases mapped signs (such as dagger or bullet) to their verbal descriptions.
- * Tested against previous version of uroman with a new uroman visual diff tool.
- * Almost an order of magnitude faster.
-
-Changes in version 0.7 (minor upgrade)
- * Added script uroman-quick.pl for Arabic script languages, incl. Uyghur.
- Much faster, pre-caching mapping of Arabic to Latin characters, simple greedy processing.
- Will not convert material from non-Arabic blocks such as any (somewhat unusual) Cyrillic
- or Chinese characters in Uyghur texts.
-
-Changes in version 0.6 (minor upgrade)
- * Added support for two letter characters used in Uzbek:
- (1) character "ʻ" ("modifier letter turned comma", which modifies preceding "g" and "u" letters)
- (2) character "ʼ" ("modifier letter apostrophe", which Uzbek uses to mark a glottal stop).
- Both are now mapped to "'" (plain ASCII apostrophe).
- * Added support for Uyghur vowel characters such as "ې" (Arabic e) and "ۆ" (Arabic oe)
- even when they are not preceded by "ئ" (yeh with hamza above).
- * Added support for Arabic semicolon "؛", Arabic ligature forms for phrases such as "ﷺ"
- ("sallallahou alayhe wasallam" = "prayer of God be upon him and his family and peace")
- * Added robustness for Arabic letter presentation forms (initial/medial/final/isolated).
- However, it is strongly recommended to normalize any presentation form Arabic letters
- to their non-presentation form before calling uroman.
- * Added force flush directive ($|=1;).
-
-Changes in version 0.5 (minor upgrade)
- * Improvements for Uyghur (make sure to use language option: -l uig)
-
-Changes in version 0.4 (minor upgrade)
- * Improvements for Thai (special cases for vowel/consonant reordering, e.g. for "sara o"; dropped some aspiration 'h's)
- * Minor change for Arabic (added "alef+fathatan" = "an")
-
-New features in version 0.3
- * Covers Mandarin (Chinese)
- * Improved romanization for numerous languages
- * Preserves capitalization (e.g. from Latin, Cyrillic, Greek scripts)
- * Maps from native digits to Western numbers
- * Faster for South Asian languages
-
-### Other features
- * Web interface: http://www.isi.edu/~ulf/uroman.html
- * Vowelization is provided when locally computable, e.g. for many South Asian languages and Tibetan.
-
-### Limitations
- * The current version of uroman has a few limitations, some of which we plan to address in future versions.
- For Japanese, *uroman* currently romanizes hiragana and katakana as expected, but kanji are interpreted as Chinese characters and romanized as such.
- For Egyptian hieroglyphs, only single-sound phonetic characters and numbers are currently romanized.
- For Linear B, only phonetic syllabic characters are romanized.
- For some other extinct scripts such as cuneiform, no romanization is provided.
- * A romanizer is not a full transliterator. For example, this version of
- uroman does not vowelize text that lacks explicit vowelization such as
- normal text in Arabic and Hebrew (without diacritics/points).
-
-### Acknowledgments
-This research is based upon work supported in part by the Office of the Director of National Intelligence (ODNI), Intelligence Advanced Research Projects Activity (IARPA), via contract # FA8650-17-C-9116, and by research sponsored by Air Force Research Laboratory (AFRL) under agreement number FA8750-19-1-1000. The views and conclusions contained herein are those of the authors and should not be interpreted as necessarily representing the official policies, either expressed or implied, of ODNI, IARPA, Air Force Laboratory, DARPA, or the U.S. Government. The U.S. Government is authorized to reproduce and distribute reprints for governmental purposes notwithstanding any copyright annotation therein.
diff --git a/spaces/AlanMars/QYL-AI-Space/modules/models/base_model.py b/spaces/AlanMars/QYL-AI-Space/modules/models/base_model.py
deleted file mode 100644
index 01cee5b82d61b2a0369f6c8cd77553074776103a..0000000000000000000000000000000000000000
--- a/spaces/AlanMars/QYL-AI-Space/modules/models/base_model.py
+++ /dev/null
@@ -1,592 +0,0 @@
-from __future__ import annotations
-from typing import TYPE_CHECKING, List
-
-import logging
-import json
-import commentjson as cjson
-import os
-import sys
-import requests
-import urllib3
-import traceback
-import pathlib
-
-from tqdm import tqdm
-import colorama
-from duckduckgo_search import ddg
-import asyncio
-import aiohttp
-from enum import Enum
-
-from ..presets import *
-from ..llama_func import *
-from ..utils import *
-from .. import shared
-from ..config import retrieve_proxy
-
-
-class ModelType(Enum):
- Unknown = -1
- OpenAI = 0
- ChatGLM = 1
- LLaMA = 2
- XMChat = 3
- StableLM = 4
- MOSS = 5
- YuanAI = 6
-
- @classmethod
- def get_type(cls, model_name: str):
- model_type = None
- model_name_lower = model_name.lower()
- if "gpt" in model_name_lower:
- model_type = ModelType.OpenAI
- elif "chatglm" in model_name_lower:
- model_type = ModelType.ChatGLM
- elif "llama" in model_name_lower or "alpaca" in model_name_lower:
- model_type = ModelType.LLaMA
- elif "xmchat" in model_name_lower:
- model_type = ModelType.XMChat
- elif "stablelm" in model_name_lower:
- model_type = ModelType.StableLM
- elif "moss" in model_name_lower:
- model_type = ModelType.MOSS
- elif "yuanai" in model_name_lower:
- model_type = ModelType.YuanAI
- else:
- model_type = ModelType.Unknown
- return model_type
-
-
-class BaseLLMModel:
- def __init__(
- self,
- model_name,
- system_prompt="",
- temperature=1.0,
- top_p=1.0,
- n_choices=1,
- stop=None,
- max_generation_token=None,
- presence_penalty=0,
- frequency_penalty=0,
- logit_bias=None,
- user="",
- ) -> None:
- self.history = []
- self.all_token_counts = []
- self.model_name = model_name
- self.model_type = ModelType.get_type(model_name)
- try:
- self.token_upper_limit = MODEL_TOKEN_LIMIT[model_name]
- except KeyError:
- self.token_upper_limit = DEFAULT_TOKEN_LIMIT
- self.interrupted = False
- self.system_prompt = system_prompt
- self.api_key = None
- self.need_api_key = False
- self.single_turn = False
-
- self.temperature = temperature
- self.top_p = top_p
- self.n_choices = n_choices
- self.stop_sequence = stop
- self.max_generation_token = None
- self.presence_penalty = presence_penalty
- self.frequency_penalty = frequency_penalty
- self.logit_bias = logit_bias
- self.user_identifier = user
-
- def get_answer_stream_iter(self):
- """stream predict, need to be implemented
- conversations are stored in self.history, with the most recent question, in OpenAI format
- should return a generator, each time give the next word (str) in the answer
- """
- logging.warning("stream predict not implemented, using at once predict instead")
- response, _ = self.get_answer_at_once()
- yield response
-
- def get_answer_at_once(self):
- """predict at once, need to be implemented
- conversations are stored in self.history, with the most recent question, in OpenAI format
- Should return:
- the answer (str)
- total token count (int)
- """
- logging.warning("at once predict not implemented, using stream predict instead")
- response_iter = self.get_answer_stream_iter()
- count = 0
- for response in response_iter:
- count += 1
- return response, sum(self.all_token_counts) + count
-
- def billing_info(self):
- """get billing infomation, inplement if needed"""
- logging.warning("billing info not implemented, using default")
- return BILLING_NOT_APPLICABLE_MSG
-
- def count_token(self, user_input):
- """get token count from input, implement if needed"""
- # logging.warning("token count not implemented, using default")
- return len(user_input)
-
- def stream_next_chatbot(self, inputs, chatbot, fake_input=None, display_append=""):
- def get_return_value():
- return chatbot, status_text
-
- status_text = i18n("开始实时传输回答……")
- if fake_input:
- chatbot.append((fake_input, ""))
- else:
- chatbot.append((inputs, ""))
-
- user_token_count = self.count_token(inputs)
- self.all_token_counts.append(user_token_count)
- logging.debug(f"输入token计数: {user_token_count}")
-
- stream_iter = self.get_answer_stream_iter()
-
- for partial_text in stream_iter:
- chatbot[-1] = (chatbot[-1][0], partial_text + display_append)
- self.all_token_counts[-1] += 1
- status_text = self.token_message()
- yield get_return_value()
- if self.interrupted:
- self.recover()
- break
- self.history.append(construct_assistant(partial_text))
-
- def next_chatbot_at_once(self, inputs, chatbot, fake_input=None, display_append=""):
- if fake_input:
- chatbot.append((fake_input, ""))
- else:
- chatbot.append((inputs, ""))
- if fake_input is not None:
- user_token_count = self.count_token(fake_input)
- else:
- user_token_count = self.count_token(inputs)
- self.all_token_counts.append(user_token_count)
- ai_reply, total_token_count = self.get_answer_at_once()
- self.history.append(construct_assistant(ai_reply))
- if fake_input is not None:
- self.history[-2] = construct_user(fake_input)
- chatbot[-1] = (chatbot[-1][0], ai_reply + display_append)
- if fake_input is not None:
- self.all_token_counts[-1] += count_token(construct_assistant(ai_reply))
- else:
- self.all_token_counts[-1] = total_token_count - sum(self.all_token_counts)
- status_text = self.token_message()
- return chatbot, status_text
-
- def handle_file_upload(self, files, chatbot):
- """if the model accepts multi modal input, implement this function"""
- status = gr.Markdown.update()
- if files:
- construct_index(self.api_key, file_src=files)
- status = "索引构建完成"
- return gr.Files.update(), chatbot, status
-
- def prepare_inputs(self, real_inputs, use_websearch, files, reply_language, chatbot):
- fake_inputs = None
- display_append = []
- limited_context = False
- fake_inputs = real_inputs
- if files:
- from llama_index.indices.vector_store.base_query import GPTVectorStoreIndexQuery
- from llama_index.indices.query.schema import QueryBundle
- from langchain.embeddings.huggingface import HuggingFaceEmbeddings
- from langchain.chat_models import ChatOpenAI
- from llama_index import (
- GPTSimpleVectorIndex,
- ServiceContext,
- LangchainEmbedding,
- OpenAIEmbedding,
- )
- limited_context = True
- msg = "加载索引中……"
- logging.info(msg)
- # yield chatbot + [(inputs, "")], msg
- index = construct_index(self.api_key, file_src=files)
- assert index is not None, "获取索引失败"
- msg = "索引获取成功,生成回答中……"
- logging.info(msg)
- if local_embedding or self.model_type != ModelType.OpenAI:
- embed_model = LangchainEmbedding(HuggingFaceEmbeddings(model_name = "sentence-transformers/distiluse-base-multilingual-cased-v2"))
- else:
- embed_model = OpenAIEmbedding()
- # yield chatbot + [(inputs, "")], msg
- with retrieve_proxy():
- prompt_helper = PromptHelper(
- max_input_size=4096,
- num_output=5,
- max_chunk_overlap=20,
- chunk_size_limit=600,
- )
- from llama_index import ServiceContext
-
- service_context = ServiceContext.from_defaults(
- prompt_helper=prompt_helper, embed_model=embed_model
- )
- query_object = GPTVectorStoreIndexQuery(
- index.index_struct,
- service_context=service_context,
- similarity_top_k=5,
- vector_store=index._vector_store,
- docstore=index._docstore,
- response_synthesizer=None
- )
- query_bundle = QueryBundle(real_inputs)
- nodes = query_object.retrieve(query_bundle)
- reference_results = [n.node.text for n in nodes]
- reference_results = add_source_numbers(reference_results, use_source=False)
- display_append = add_details(reference_results)
- display_append = "\n\n" + "".join(display_append)
- real_inputs = (
- replace_today(PROMPT_TEMPLATE)
- .replace("{query_str}", real_inputs)
- .replace("{context_str}", "\n\n".join(reference_results))
- .replace("{reply_language}", reply_language)
- )
- elif use_websearch:
- limited_context = True
- search_results = ddg(real_inputs, max_results=5)
- reference_results = []
- for idx, result in enumerate(search_results):
- logging.debug(f"搜索结果{idx + 1}:{result}")
- domain_name = urllib3.util.parse_url(result["href"]).host
- reference_results.append([result["body"], result["href"]])
- display_append.append(
- # f"{idx+1}. [{domain_name}]({result['href']})\n"
- f"
\n"
- )
- reference_results = add_source_numbers(reference_results)
- display_append = "\n\n" + "".join(display_append) + ""
- real_inputs = (
- replace_today(WEBSEARCH_PTOMPT_TEMPLATE)
- .replace("{query}", real_inputs)
- .replace("{web_results}", "\n\n".join(reference_results))
- .replace("{reply_language}", reply_language)
- )
- else:
- display_append = ""
- return limited_context, fake_inputs, display_append, real_inputs, chatbot
-
- def predict(
- self,
- inputs,
- chatbot,
- stream=False,
- use_websearch=False,
- files=None,
- reply_language="中文",
- should_check_token_count=True,
- ): # repetition_penalty, top_k
-
- status_text = "开始生成回答……"
- logging.info(
- "输入为:" + colorama.Fore.BLUE + f"{inputs}" + colorama.Style.RESET_ALL
- )
- if should_check_token_count:
- yield chatbot + [(inputs, "")], status_text
- if reply_language == "跟随问题语言(不稳定)":
- reply_language = "the same language as the question, such as English, 中文, 日本語, Español, Français, or Deutsch."
-
- limited_context, fake_inputs, display_append, inputs, chatbot = self.prepare_inputs(real_inputs=inputs, use_websearch=use_websearch, files=files, reply_language=reply_language, chatbot=chatbot)
- yield chatbot + [(fake_inputs, "")], status_text
-
- if (
- self.need_api_key and
- self.api_key is None
- and not shared.state.multi_api_key
- ):
- status_text = STANDARD_ERROR_MSG + NO_APIKEY_MSG
- logging.info(status_text)
- chatbot.append((inputs, ""))
- if len(self.history) == 0:
- self.history.append(construct_user(inputs))
- self.history.append("")
- self.all_token_counts.append(0)
- else:
- self.history[-2] = construct_user(inputs)
- yield chatbot + [(inputs, "")], status_text
- return
- elif len(inputs.strip()) == 0:
- status_text = STANDARD_ERROR_MSG + NO_INPUT_MSG
- logging.info(status_text)
- yield chatbot + [(inputs, "")], status_text
- return
-
- if self.single_turn:
- self.history = []
- self.all_token_counts = []
- self.history.append(construct_user(inputs))
-
- try:
- if stream:
- logging.debug("使用流式传输")
- iter = self.stream_next_chatbot(
- inputs,
- chatbot,
- fake_input=fake_inputs,
- display_append=display_append,
- )
- for chatbot, status_text in iter:
- yield chatbot, status_text
- else:
- logging.debug("不使用流式传输")
- chatbot, status_text = self.next_chatbot_at_once(
- inputs,
- chatbot,
- fake_input=fake_inputs,
- display_append=display_append,
- )
- yield chatbot, status_text
- except Exception as e:
- traceback.print_exc()
- status_text = STANDARD_ERROR_MSG + str(e)
- yield chatbot, status_text
-
- if len(self.history) > 1 and self.history[-1]["content"] != inputs:
- logging.info(
- "回答为:"
- + colorama.Fore.BLUE
- + f"{self.history[-1]['content']}"
- + colorama.Style.RESET_ALL
- )
-
- if limited_context:
- # self.history = self.history[-4:]
- # self.all_token_counts = self.all_token_counts[-2:]
- self.history = []
- self.all_token_counts = []
-
- max_token = self.token_upper_limit - TOKEN_OFFSET
-
- if sum(self.all_token_counts) > max_token and should_check_token_count:
- count = 0
- while (
- sum(self.all_token_counts)
- > self.token_upper_limit * REDUCE_TOKEN_FACTOR
- and sum(self.all_token_counts) > 0
- ):
- count += 1
- del self.all_token_counts[0]
- del self.history[:2]
- logging.info(status_text)
- status_text = f"为了防止token超限,模型忘记了早期的 {count} 轮对话"
- yield chatbot, status_text
-
- self.auto_save(chatbot)
-
- def retry(
- self,
- chatbot,
- stream=False,
- use_websearch=False,
- files=None,
- reply_language="中文",
- ):
- logging.debug("重试中……")
- if len(self.history) > 0:
- inputs = self.history[-2]["content"]
- del self.history[-2:]
- self.all_token_counts.pop()
- elif len(chatbot) > 0:
- inputs = chatbot[-1][0]
- else:
- yield chatbot, f"{STANDARD_ERROR_MSG}上下文是空的"
- return
-
- iter = self.predict(
- inputs,
- chatbot,
- stream=stream,
- use_websearch=use_websearch,
- files=files,
- reply_language=reply_language,
- )
- for x in iter:
- yield x
- logging.debug("重试完毕")
-
- # def reduce_token_size(self, chatbot):
- # logging.info("开始减少token数量……")
- # chatbot, status_text = self.next_chatbot_at_once(
- # summarize_prompt,
- # chatbot
- # )
- # max_token_count = self.token_upper_limit * REDUCE_TOKEN_FACTOR
- # num_chat = find_n(self.all_token_counts, max_token_count)
- # logging.info(f"previous_token_count: {self.all_token_counts}, keeping {num_chat} chats")
- # chatbot = chatbot[:-1]
- # self.history = self.history[-2*num_chat:] if num_chat > 0 else []
- # self.all_token_counts = self.all_token_counts[-num_chat:] if num_chat > 0 else []
- # msg = f"保留了最近{num_chat}轮对话"
- # logging.info(msg)
- # logging.info("减少token数量完毕")
- # return chatbot, msg + "," + self.token_message(self.all_token_counts if len(self.all_token_counts) > 0 else [0])
-
- def interrupt(self):
- self.interrupted = True
-
- def recover(self):
- self.interrupted = False
-
- def set_token_upper_limit(self, new_upper_limit):
- self.token_upper_limit = new_upper_limit
- print(f"token上限设置为{new_upper_limit}")
-
- def set_temperature(self, new_temperature):
- self.temperature = new_temperature
-
- def set_top_p(self, new_top_p):
- self.top_p = new_top_p
-
- def set_n_choices(self, new_n_choices):
- self.n_choices = new_n_choices
-
- def set_stop_sequence(self, new_stop_sequence: str):
- new_stop_sequence = new_stop_sequence.split(",")
- self.stop_sequence = new_stop_sequence
-
- def set_max_tokens(self, new_max_tokens):
- self.max_generation_token = new_max_tokens
-
- def set_presence_penalty(self, new_presence_penalty):
- self.presence_penalty = new_presence_penalty
-
- def set_frequency_penalty(self, new_frequency_penalty):
- self.frequency_penalty = new_frequency_penalty
-
- def set_logit_bias(self, logit_bias):
- logit_bias = logit_bias.split()
- bias_map = {}
- encoding = tiktoken.get_encoding("cl100k_base")
- for line in logit_bias:
- word, bias_amount = line.split(":")
- if word:
- for token in encoding.encode(word):
- bias_map[token] = float(bias_amount)
- self.logit_bias = bias_map
-
- def set_user_identifier(self, new_user_identifier):
- self.user_identifier = new_user_identifier
-
- def set_system_prompt(self, new_system_prompt):
- self.system_prompt = new_system_prompt
-
- def set_key(self, new_access_key):
- self.api_key = new_access_key.strip()
- msg = i18n("API密钥更改为了") + hide_middle_chars(self.api_key)
- logging.info(msg)
- return self.api_key, msg
-
- def set_single_turn(self, new_single_turn):
- self.single_turn = new_single_turn
-
- def reset(self):
- self.history = []
- self.all_token_counts = []
- self.interrupted = False
- pathlib.Path(os.path.join(HISTORY_DIR, self.user_identifier, new_auto_history_filename(os.path.join(HISTORY_DIR, self.user_identifier)))).touch()
- return [], self.token_message([0])
-
- def delete_first_conversation(self):
- if self.history:
- del self.history[:2]
- del self.all_token_counts[0]
- return self.token_message()
-
- def delete_last_conversation(self, chatbot):
- if len(chatbot) > 0 and STANDARD_ERROR_MSG in chatbot[-1][1]:
- msg = "由于包含报错信息,只删除chatbot记录"
- chatbot.pop()
- return chatbot, self.history
- if len(self.history) > 0:
- self.history.pop()
- self.history.pop()
- if len(chatbot) > 0:
- msg = "删除了一组chatbot对话"
- chatbot.pop()
- if len(self.all_token_counts) > 0:
- msg = "删除了一组对话的token计数记录"
- self.all_token_counts.pop()
- msg = "删除了一组对话"
- return chatbot, msg
-
- def token_message(self, token_lst=None):
- if token_lst is None:
- token_lst = self.all_token_counts
- token_sum = 0
- for i in range(len(token_lst)):
- token_sum += sum(token_lst[: i + 1])
- return i18n("Token 计数: ") + f"{sum(token_lst)}" + i18n(",本次对话累计消耗了 ") + f"{token_sum} tokens"
-
- def save_chat_history(self, filename, chatbot, user_name):
- if filename == "":
- return
- if not filename.endswith(".json"):
- filename += ".json"
- return save_file(filename, self.system_prompt, self.history, chatbot, user_name)
-
- def auto_save(self, chatbot):
- history_file_path = get_history_filepath(self.user_identifier)
- save_file(history_file_path, self.system_prompt, self.history, chatbot, self.user_identifier)
-
- def export_markdown(self, filename, chatbot, user_name):
- if filename == "":
- return
- if not filename.endswith(".md"):
- filename += ".md"
- return save_file(filename, self.system_prompt, self.history, chatbot, user_name)
-
- def load_chat_history(self, filename, user_name):
- logging.debug(f"{user_name} 加载对话历史中……")
- logging.info(f"filename: {filename}")
- if type(filename) != str and filename is not None:
- filename = filename.name
- try:
- if "/" not in filename:
- history_file_path = os.path.join(HISTORY_DIR, user_name, filename)
- else:
- history_file_path = filename
- with open(history_file_path, "r") as f:
- json_s = json.load(f)
- try:
- if type(json_s["history"][0]) == str:
- logging.info("历史记录格式为旧版,正在转换……")
- new_history = []
- for index, item in enumerate(json_s["history"]):
- if index % 2 == 0:
- new_history.append(construct_user(item))
- else:
- new_history.append(construct_assistant(item))
- json_s["history"] = new_history
- logging.info(new_history)
- except:
- pass
- logging.debug(f"{user_name} 加载对话历史完毕")
- self.history = json_s["history"]
- return os.path.basename(filename), json_s["system"], json_s["chatbot"]
- except:
- # 没有对话历史或者对话历史解析失败
- logging.info(f"没有找到对话历史记录 {filename}")
- return gr.update(), self.system_prompt, gr.update()
-
- def auto_load(self):
- if self.user_identifier == "":
- self.reset()
- return self.system_prompt, gr.update()
- history_file_path = get_history_filepath(self.user_identifier)
- filename, system_prompt, chatbot = self.load_chat_history(history_file_path, self.user_identifier)
- return system_prompt, chatbot
-
- def like(self):
- """like the last response, implement if needed
- """
- return gr.update()
-
- def dislike(self):
- """dislike the last response, implement if needed
- """
- return gr.update()
diff --git a/spaces/AlhitawiMohammed22/CER_Hu-Evaluation-Metrics/test_eval_wer.py b/spaces/AlhitawiMohammed22/CER_Hu-Evaluation-Metrics/test_eval_wer.py
deleted file mode 100644
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..0000000000000000000000000000000000000000
diff --git a/spaces/Andy1621/uniformer_image_detection/configs/cascade_rcnn/cascade_mask_rcnn_r50_caffe_fpn_1x_coco.py b/spaces/Andy1621/uniformer_image_detection/configs/cascade_rcnn/cascade_mask_rcnn_r50_caffe_fpn_1x_coco.py
deleted file mode 100644
index b371ed757bf7dd95ef9ecfc2e609ca5ab03795d6..0000000000000000000000000000000000000000
--- a/spaces/Andy1621/uniformer_image_detection/configs/cascade_rcnn/cascade_mask_rcnn_r50_caffe_fpn_1x_coco.py
+++ /dev/null
@@ -1,38 +0,0 @@
-_base_ = ['./cascade_mask_rcnn_r50_fpn_1x_coco.py']
-
-model = dict(
- pretrained='open-mmlab://detectron2/resnet50_caffe',
- backbone=dict(
- norm_cfg=dict(requires_grad=False), norm_eval=True, style='caffe'))
-
-img_norm_cfg = dict(
- mean=[103.530, 116.280, 123.675], std=[1.0, 1.0, 1.0], to_rgb=False)
-train_pipeline = [
- dict(type='LoadImageFromFile'),
- dict(type='LoadAnnotations', with_bbox=True, with_mask=True),
- dict(type='Resize', img_scale=(1333, 800), keep_ratio=True),
- dict(type='RandomFlip', flip_ratio=0.5),
- dict(type='Normalize', **img_norm_cfg),
- dict(type='Pad', size_divisor=32),
- dict(type='DefaultFormatBundle'),
- dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']),
-]
-test_pipeline = [
- dict(type='LoadImageFromFile'),
- dict(
- type='MultiScaleFlipAug',
- img_scale=(1333, 800),
- flip=False,
- transforms=[
- dict(type='Resize', keep_ratio=True),
- dict(type='RandomFlip'),
- dict(type='Normalize', **img_norm_cfg),
- dict(type='Pad', size_divisor=32),
- dict(type='ImageToTensor', keys=['img']),
- dict(type='Collect', keys=['img']),
- ])
-]
-data = dict(
- train=dict(pipeline=train_pipeline),
- val=dict(pipeline=test_pipeline),
- test=dict(pipeline=test_pipeline))
diff --git a/spaces/Andy1621/uniformer_image_detection/mmdet/models/backbones/resnext.py b/spaces/Andy1621/uniformer_image_detection/mmdet/models/backbones/resnext.py
deleted file mode 100644
index 6dbcbd516fd308b1d703eecb83ab275f6b159516..0000000000000000000000000000000000000000
--- a/spaces/Andy1621/uniformer_image_detection/mmdet/models/backbones/resnext.py
+++ /dev/null
@@ -1,153 +0,0 @@
-import math
-
-from mmcv.cnn import build_conv_layer, build_norm_layer
-
-from ..builder import BACKBONES
-from ..utils import ResLayer
-from .resnet import Bottleneck as _Bottleneck
-from .resnet import ResNet
-
-
-class Bottleneck(_Bottleneck):
- expansion = 4
-
- def __init__(self,
- inplanes,
- planes,
- groups=1,
- base_width=4,
- base_channels=64,
- **kwargs):
- """Bottleneck block for ResNeXt.
-
- If style is "pytorch", the stride-two layer is the 3x3 conv layer, if
- it is "caffe", the stride-two layer is the first 1x1 conv layer.
- """
- super(Bottleneck, self).__init__(inplanes, planes, **kwargs)
-
- if groups == 1:
- width = self.planes
- else:
- width = math.floor(self.planes *
- (base_width / base_channels)) * groups
-
- self.norm1_name, norm1 = build_norm_layer(
- self.norm_cfg, width, postfix=1)
- self.norm2_name, norm2 = build_norm_layer(
- self.norm_cfg, width, postfix=2)
- self.norm3_name, norm3 = build_norm_layer(
- self.norm_cfg, self.planes * self.expansion, postfix=3)
-
- self.conv1 = build_conv_layer(
- self.conv_cfg,
- self.inplanes,
- width,
- kernel_size=1,
- stride=self.conv1_stride,
- bias=False)
- self.add_module(self.norm1_name, norm1)
- fallback_on_stride = False
- self.with_modulated_dcn = False
- if self.with_dcn:
- fallback_on_stride = self.dcn.pop('fallback_on_stride', False)
- if not self.with_dcn or fallback_on_stride:
- self.conv2 = build_conv_layer(
- self.conv_cfg,
- width,
- width,
- kernel_size=3,
- stride=self.conv2_stride,
- padding=self.dilation,
- dilation=self.dilation,
- groups=groups,
- bias=False)
- else:
- assert self.conv_cfg is None, 'conv_cfg must be None for DCN'
- self.conv2 = build_conv_layer(
- self.dcn,
- width,
- width,
- kernel_size=3,
- stride=self.conv2_stride,
- padding=self.dilation,
- dilation=self.dilation,
- groups=groups,
- bias=False)
-
- self.add_module(self.norm2_name, norm2)
- self.conv3 = build_conv_layer(
- self.conv_cfg,
- width,
- self.planes * self.expansion,
- kernel_size=1,
- bias=False)
- self.add_module(self.norm3_name, norm3)
-
- if self.with_plugins:
- self._del_block_plugins(self.after_conv1_plugin_names +
- self.after_conv2_plugin_names +
- self.after_conv3_plugin_names)
- self.after_conv1_plugin_names = self.make_block_plugins(
- width, self.after_conv1_plugins)
- self.after_conv2_plugin_names = self.make_block_plugins(
- width, self.after_conv2_plugins)
- self.after_conv3_plugin_names = self.make_block_plugins(
- self.planes * self.expansion, self.after_conv3_plugins)
-
- def _del_block_plugins(self, plugin_names):
- """delete plugins for block if exist.
-
- Args:
- plugin_names (list[str]): List of plugins name to delete.
- """
- assert isinstance(plugin_names, list)
- for plugin_name in plugin_names:
- del self._modules[plugin_name]
-
-
-@BACKBONES.register_module()
-class ResNeXt(ResNet):
- """ResNeXt backbone.
-
- Args:
- depth (int): Depth of resnet, from {18, 34, 50, 101, 152}.
- in_channels (int): Number of input image channels. Default: 3.
- num_stages (int): Resnet stages. Default: 4.
- groups (int): Group of resnext.
- base_width (int): Base width of resnext.
- strides (Sequence[int]): Strides of the first block of each stage.
- dilations (Sequence[int]): Dilation of each stage.
- out_indices (Sequence[int]): Output from which stages.
- style (str): `pytorch` or `caffe`. If set to "pytorch", the stride-two
- layer is the 3x3 conv layer, otherwise the stride-two layer is
- the first 1x1 conv layer.
- frozen_stages (int): Stages to be frozen (all param fixed). -1 means
- not freezing any parameters.
- norm_cfg (dict): dictionary to construct and config norm layer.
- norm_eval (bool): Whether to set norm layers to eval mode, namely,
- freeze running stats (mean and var). Note: Effect on Batch Norm
- and its variants only.
- with_cp (bool): Use checkpoint or not. Using checkpoint will save some
- memory while slowing down the training speed.
- zero_init_residual (bool): whether to use zero init for last norm layer
- in resblocks to let them behave as identity.
- """
-
- arch_settings = {
- 50: (Bottleneck, (3, 4, 6, 3)),
- 101: (Bottleneck, (3, 4, 23, 3)),
- 152: (Bottleneck, (3, 8, 36, 3))
- }
-
- def __init__(self, groups=1, base_width=4, **kwargs):
- self.groups = groups
- self.base_width = base_width
- super(ResNeXt, self).__init__(**kwargs)
-
- def make_res_layer(self, **kwargs):
- """Pack all blocks in a stage into a ``ResLayer``"""
- return ResLayer(
- groups=self.groups,
- base_width=self.base_width,
- base_channels=self.base_channels,
- **kwargs)
diff --git a/spaces/AndySAnker/DeepStruc/app.py b/spaces/AndySAnker/DeepStruc/app.py
deleted file mode 100644
index c01386499d591dd65858979f493b12d7e97335c1..0000000000000000000000000000000000000000
--- a/spaces/AndySAnker/DeepStruc/app.py
+++ /dev/null
@@ -1,94 +0,0 @@
-import streamlit as st
-import io, os, argparse, torch, random
-import pytorch_lightning as pl
-import numpy as np
-from predict import main
-from tools.utils import plot_ls
-
-seed = 37
-torch.manual_seed(seed)
-pl.seed_everything(seed)
-torch.manual_seed(seed)
-np.random.seed(seed)
-random.seed(seed)
-
-st.title('DeepStruc')
-
-st.write('Welcome to DeepStruc that is a Deep Generative Model which has been trained to solve a mono-metallic structure (<200 atoms) based on a PDF!')
-st.write('Upload a PDF to use DeepStruc to predict the structure.')
-
-
-# Define the file upload widget
-pdf_file = st.file_uploader("Upload PDF file in .gr format", type=["gr"])
-
-# Define the form to get the other parameters
-num_structures = st.number_input("Number of structures to generate", min_value=1, max_value=100, value=10)
-structure_index = st.number_input("Index of structure to visualize", min_value=0, value=3)
-sigma = st.number_input("Standard deviation for sampling", min_value=0.1, value=3.0)
-
-# Define parser
-parser = argparse.ArgumentParser(formatter_class=argparse.ArgumentDefaultsHelpFormatter)
-args = parser.parse_args()
-args.num_samples = num_structures
-args.index_plot = structure_index
-args.sigma = sigma
-# Fixed for DeepStruc app
-args.model = 'DeepStruc'
-args.save_path = './'
-
-# Define the predict button and its behavior
-if st.button("Generate structures"):
- if pdf_file is None:
- st.warning("Please upload a PDF file.")
- else:
- # Get the contents of the file as bytes
- file_bytes = pdf_file.read()
-
- # Save the contents of the file to disk
- with open("uploaded_file.gr", "wb") as f:
- f.write(file_bytes)
-
- df, index_highlight, these_cords = main(args)
-
- # Plot the latent space
- fig = plot_ls(df, index_highlight)
- st.pyplot(fig)
- st.write('**The two-dimensional latent space with location of the input.** The size of the points relates to the size of the embedded structure. Each point is coloured after its structure type, FCC (light blue), octahedral (dark grey), decahedral (orange), BCC (green), icosahedral (dark blue), HCP (pink) and SC (red). Each point in the latent space corresponds to a structure based on its simulated PDF. Test data point are plotted on top of the training and validation data, which is made semi-transparent. The latent space locations of the reconstructed structures from the input are shown with black markers and the specific reconstructed structure that is shown in the next box is shown with a black and white marker.')
-
- # Define the save directory and file name
- file_name = "DeepStruc_prediction.xyz"
-
- # Define a download button to download the file
- def download_button(file_name, button_text):
- with open(file_name, "rb") as f:
- bytes = f.read()
- st.download_button(
- label=button_text,
- data=bytes,
- file_name=file_name,
- mime="text/xyz",)
-
- # Save the coordinates to a file and display a download button
- np.savetxt(file_name, these_cords, fmt="%s")
- download_button(file_name, "Download XYZ file")
-
-
-
-st.subheader('Cite')
-
-st.write('If you use DeepStruc, our code or results, please consider citing our papers. Thanks in advance!')
-
-st.write('DeepStruc: Towards structure solution from pair distribution function data using deep generative models **2023** (https://pubs.rsc.org/en/content/articlehtml/2022/dd/d2dd00086e)')
-st.write('Characterising the atomic structure of mono-metallic nanoparticles from x-ray scattering data using conditional generative models **2020** (https://chemrxiv.org/engage/chemrxiv/article-details/60c74dd1842e6514f2db3527)')
-
-st.subheader('LICENSE')
-
-st.write('This project is licensed under the Apache License Version 2.0, January 2004 - see the LICENSE file at https://github.com/EmilSkaaning/DeepStruc/blob/main/LICENSE.md for details.')
-st.write("")
-
-st.subheader('Github')
-st.write('https://github.com/EmilSkaaning/DeepStruc')
-
-st.subheader('Questions')
-st.write('andy@chem.ku.dk or etsk@chem.ku.dk')
-
diff --git a/spaces/Aniquel/WizApp_Code_Generator/app.py b/spaces/Aniquel/WizApp_Code_Generator/app.py
deleted file mode 100644
index 38bc6f2be220cad566d7a6613326d43c43899191..0000000000000000000000000000000000000000
--- a/spaces/Aniquel/WizApp_Code_Generator/app.py
+++ /dev/null
@@ -1,31 +0,0 @@
-import gradio as gr
-import openai
-import os
-
-openai.api_key = os.getenv("OPENAI_API_KEY")
-
-
-def generate_response(text):
- prompt = f"Code generation:\n\n```python\n{text}\n```"
- response = openai.Completion.create(
- model=gpt-3.5-turbo,
- prompt=prompt,
- max_tokens=3000,
- n=1,
- stop=None,
- temperature=0.2,
- )
- message = response.choices[0].text.strip()
- return message
-
-iface = gr.Interface(
- fn=generate_response,
- inputs=gr.inputs.Textbox(label="Enter your code here"),
- outputs=gr.outputs.Textbox(label="Chatbot's response"),
- title="WizApp Code Generation",
- description="Use AI to generate code based on your input",
- theme="default"
-)
-
-if __name__ == "__main__":
- iface.launch()
diff --git a/spaces/Ariharasudhan/YoloV5/utils/segment/general.py b/spaces/Ariharasudhan/YoloV5/utils/segment/general.py
deleted file mode 100644
index b526333dc5a1b8625d7e6a51ee6ba41818c62adb..0000000000000000000000000000000000000000
--- a/spaces/Ariharasudhan/YoloV5/utils/segment/general.py
+++ /dev/null
@@ -1,137 +0,0 @@
-import cv2
-import numpy as np
-import torch
-import torch.nn.functional as F
-
-
-def crop_mask(masks, boxes):
- """
- "Crop" predicted masks by zeroing out everything not in the predicted bbox.
- Vectorized by Chong (thanks Chong).
-
- Args:
- - masks should be a size [h, w, n] tensor of masks
- - boxes should be a size [n, 4] tensor of bbox coords in relative point form
- """
-
- n, h, w = masks.shape
- x1, y1, x2, y2 = torch.chunk(boxes[:, :, None], 4, 1) # x1 shape(1,1,n)
- r = torch.arange(w, device=masks.device, dtype=x1.dtype)[None, None, :] # rows shape(1,w,1)
- c = torch.arange(h, device=masks.device, dtype=x1.dtype)[None, :, None] # cols shape(h,1,1)
-
- return masks * ((r >= x1) * (r < x2) * (c >= y1) * (c < y2))
-
-
-def process_mask_upsample(protos, masks_in, bboxes, shape):
- """
- Crop after upsample.
- proto_out: [mask_dim, mask_h, mask_w]
- out_masks: [n, mask_dim], n is number of masks after nms
- bboxes: [n, 4], n is number of masks after nms
- shape:input_image_size, (h, w)
-
- return: h, w, n
- """
-
- c, mh, mw = protos.shape # CHW
- masks = (masks_in @ protos.float().view(c, -1)).sigmoid().view(-1, mh, mw)
- masks = F.interpolate(masks[None], shape, mode='bilinear', align_corners=False)[0] # CHW
- masks = crop_mask(masks, bboxes) # CHW
- return masks.gt_(0.5)
-
-
-def process_mask(protos, masks_in, bboxes, shape, upsample=False):
- """
- Crop before upsample.
- proto_out: [mask_dim, mask_h, mask_w]
- out_masks: [n, mask_dim], n is number of masks after nms
- bboxes: [n, 4], n is number of masks after nms
- shape:input_image_size, (h, w)
-
- return: h, w, n
- """
-
- c, mh, mw = protos.shape # CHW
- ih, iw = shape
- masks = (masks_in @ protos.float().view(c, -1)).sigmoid().view(-1, mh, mw) # CHW
-
- downsampled_bboxes = bboxes.clone()
- downsampled_bboxes[:, 0] *= mw / iw
- downsampled_bboxes[:, 2] *= mw / iw
- downsampled_bboxes[:, 3] *= mh / ih
- downsampled_bboxes[:, 1] *= mh / ih
-
- masks = crop_mask(masks, downsampled_bboxes) # CHW
- if upsample:
- masks = F.interpolate(masks[None], shape, mode='bilinear', align_corners=False)[0] # CHW
- return masks.gt_(0.5)
-
-
-def scale_image(im1_shape, masks, im0_shape, ratio_pad=None):
- """
- img1_shape: model input shape, [h, w]
- img0_shape: origin pic shape, [h, w, 3]
- masks: [h, w, num]
- """
- # Rescale coordinates (xyxy) from im1_shape to im0_shape
- if ratio_pad is None: # calculate from im0_shape
- gain = min(im1_shape[0] / im0_shape[0], im1_shape[1] / im0_shape[1]) # gain = old / new
- pad = (im1_shape[1] - im0_shape[1] * gain) / 2, (im1_shape[0] - im0_shape[0] * gain) / 2 # wh padding
- else:
- pad = ratio_pad[1]
- top, left = int(pad[1]), int(pad[0]) # y, x
- bottom, right = int(im1_shape[0] - pad[1]), int(im1_shape[1] - pad[0])
-
- if len(masks.shape) < 2:
- raise ValueError(f'"len of masks shape" should be 2 or 3, but got {len(masks.shape)}')
- masks = masks[top:bottom, left:right]
- # masks = masks.permute(2, 0, 1).contiguous()
- # masks = F.interpolate(masks[None], im0_shape[:2], mode='bilinear', align_corners=False)[0]
- # masks = masks.permute(1, 2, 0).contiguous()
- masks = cv2.resize(masks, (im0_shape[1], im0_shape[0]))
-
- if len(masks.shape) == 2:
- masks = masks[:, :, None]
- return masks
-
-
-def mask_iou(mask1, mask2, eps=1e-7):
- """
- mask1: [N, n] m1 means number of predicted objects
- mask2: [M, n] m2 means number of gt objects
- Note: n means image_w x image_h
-
- return: masks iou, [N, M]
- """
- intersection = torch.matmul(mask1, mask2.t()).clamp(0)
- union = (mask1.sum(1)[:, None] + mask2.sum(1)[None]) - intersection # (area1 + area2) - intersection
- return intersection / (union + eps)
-
-
-def masks_iou(mask1, mask2, eps=1e-7):
- """
- mask1: [N, n] m1 means number of predicted objects
- mask2: [N, n] m2 means number of gt objects
- Note: n means image_w x image_h
-
- return: masks iou, (N, )
- """
- intersection = (mask1 * mask2).sum(1).clamp(0) # (N, )
- union = (mask1.sum(1) + mask2.sum(1))[None] - intersection # (area1 + area2) - intersection
- return intersection / (union + eps)
-
-
-def masks2segments(masks, strategy='largest'):
- # Convert masks(n,160,160) into segments(n,xy)
- segments = []
- for x in masks.int().cpu().numpy().astype('uint8'):
- c = cv2.findContours(x, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)[0]
- if c:
- if strategy == 'concat': # concatenate all segments
- c = np.concatenate([x.reshape(-1, 2) for x in c])
- elif strategy == 'largest': # select largest segment
- c = np.array(c[np.array([len(x) for x in c]).argmax()]).reshape(-1, 2)
- else:
- c = np.zeros((0, 2)) # no segments found
- segments.append(c.astype('float32'))
- return segments
diff --git a/spaces/Artples/llama-2-7b-chat/README.md b/spaces/Artples/llama-2-7b-chat/README.md
deleted file mode 100644
index abd16640bb893287e1d68be11d87cff5b3793667..0000000000000000000000000000000000000000
--- a/spaces/Artples/llama-2-7b-chat/README.md
+++ /dev/null
@@ -1,14 +0,0 @@
----
-title: llama-2-7b-chat
-emoji: 🚀
-colorFrom: green
-colorTo: green
-sdk: gradio
-sdk_version: 3.37.0
-app_file: app.py
-pinned: true
-duplicated_from: Artples/llama2-7b-chat
-license: apache-2.0
----
-
-Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
diff --git a/spaces/Ataturk-Chatbot/HuggingFaceChat/venv/lib/python3.11/site-packages/pip/_vendor/webencodings/x_user_defined.py b/spaces/Ataturk-Chatbot/HuggingFaceChat/venv/lib/python3.11/site-packages/pip/_vendor/webencodings/x_user_defined.py
deleted file mode 100644
index d16e326024c05a59548619e13258acad781e0a6d..0000000000000000000000000000000000000000
--- a/spaces/Ataturk-Chatbot/HuggingFaceChat/venv/lib/python3.11/site-packages/pip/_vendor/webencodings/x_user_defined.py
+++ /dev/null
@@ -1,325 +0,0 @@
-# coding: utf-8
-"""
-
- webencodings.x_user_defined
- ~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
- An implementation of the x-user-defined encoding.
-
- :copyright: Copyright 2012 by Simon Sapin
- :license: BSD, see LICENSE for details.
-
-"""
-
-from __future__ import unicode_literals
-
-import codecs
-
-
-### Codec APIs
-
-class Codec(codecs.Codec):
-
- def encode(self, input, errors='strict'):
- return codecs.charmap_encode(input, errors, encoding_table)
-
- def decode(self, input, errors='strict'):
- return codecs.charmap_decode(input, errors, decoding_table)
-
-
-class IncrementalEncoder(codecs.IncrementalEncoder):
- def encode(self, input, final=False):
- return codecs.charmap_encode(input, self.errors, encoding_table)[0]
-
-
-class IncrementalDecoder(codecs.IncrementalDecoder):
- def decode(self, input, final=False):
- return codecs.charmap_decode(input, self.errors, decoding_table)[0]
-
-
-class StreamWriter(Codec, codecs.StreamWriter):
- pass
-
-
-class StreamReader(Codec, codecs.StreamReader):
- pass
-
-
-### encodings module API
-
-codec_info = codecs.CodecInfo(
- name='x-user-defined',
- encode=Codec().encode,
- decode=Codec().decode,
- incrementalencoder=IncrementalEncoder,
- incrementaldecoder=IncrementalDecoder,
- streamreader=StreamReader,
- streamwriter=StreamWriter,
-)
-
-
-### Decoding Table
-
-# Python 3:
-# for c in range(256): print(' %r' % chr(c if c < 128 else c + 0xF700))
-decoding_table = (
- '\x00'
- '\x01'
- '\x02'
- '\x03'
- '\x04'
- '\x05'
- '\x06'
- '\x07'
- '\x08'
- '\t'
- '\n'
- '\x0b'
- '\x0c'
- '\r'
- '\x0e'
- '\x0f'
- '\x10'
- '\x11'
- '\x12'
- '\x13'
- '\x14'
- '\x15'
- '\x16'
- '\x17'
- '\x18'
- '\x19'
- '\x1a'
- '\x1b'
- '\x1c'
- '\x1d'
- '\x1e'
- '\x1f'
- ' '
- '!'
- '"'
- '#'
- '$'
- '%'
- '&'
- "'"
- '('
- ')'
- '*'
- '+'
- ','
- '-'
- '.'
- '/'
- '0'
- '1'
- '2'
- '3'
- '4'
- '5'
- '6'
- '7'
- '8'
- '9'
- ':'
- ';'
- '<'
- '='
- '>'
- '?'
- '@'
- 'A'
- 'B'
- 'C'
- 'D'
- 'E'
- 'F'
- 'G'
- 'H'
- 'I'
- 'J'
- 'K'
- 'L'
- 'M'
- 'N'
- 'O'
- 'P'
- 'Q'
- 'R'
- 'S'
- 'T'
- 'U'
- 'V'
- 'W'
- 'X'
- 'Y'
- 'Z'
- '['
- '\\'
- ']'
- '^'
- '_'
- '`'
- 'a'
- 'b'
- 'c'
- 'd'
- 'e'
- 'f'
- 'g'
- 'h'
- 'i'
- 'j'
- 'k'
- 'l'
- 'm'
- 'n'
- 'o'
- 'p'
- 'q'
- 'r'
- 's'
- 't'
- 'u'
- 'v'
- 'w'
- 'x'
- 'y'
- 'z'
- '{'
- '|'
- '}'
- '~'
- '\x7f'
- '\uf780'
- '\uf781'
- '\uf782'
- '\uf783'
- '\uf784'
- '\uf785'
- '\uf786'
- '\uf787'
- '\uf788'
- '\uf789'
- '\uf78a'
- '\uf78b'
- '\uf78c'
- '\uf78d'
- '\uf78e'
- '\uf78f'
- '\uf790'
- '\uf791'
- '\uf792'
- '\uf793'
- '\uf794'
- '\uf795'
- '\uf796'
- '\uf797'
- '\uf798'
- '\uf799'
- '\uf79a'
- '\uf79b'
- '\uf79c'
- '\uf79d'
- '\uf79e'
- '\uf79f'
- '\uf7a0'
- '\uf7a1'
- '\uf7a2'
- '\uf7a3'
- '\uf7a4'
- '\uf7a5'
- '\uf7a6'
- '\uf7a7'
- '\uf7a8'
- '\uf7a9'
- '\uf7aa'
- '\uf7ab'
- '\uf7ac'
- '\uf7ad'
- '\uf7ae'
- '\uf7af'
- '\uf7b0'
- '\uf7b1'
- '\uf7b2'
- '\uf7b3'
- '\uf7b4'
- '\uf7b5'
- '\uf7b6'
- '\uf7b7'
- '\uf7b8'
- '\uf7b9'
- '\uf7ba'
- '\uf7bb'
- '\uf7bc'
- '\uf7bd'
- '\uf7be'
- '\uf7bf'
- '\uf7c0'
- '\uf7c1'
- '\uf7c2'
- '\uf7c3'
- '\uf7c4'
- '\uf7c5'
- '\uf7c6'
- '\uf7c7'
- '\uf7c8'
- '\uf7c9'
- '\uf7ca'
- '\uf7cb'
- '\uf7cc'
- '\uf7cd'
- '\uf7ce'
- '\uf7cf'
- '\uf7d0'
- '\uf7d1'
- '\uf7d2'
- '\uf7d3'
- '\uf7d4'
- '\uf7d5'
- '\uf7d6'
- '\uf7d7'
- '\uf7d8'
- '\uf7d9'
- '\uf7da'
- '\uf7db'
- '\uf7dc'
- '\uf7dd'
- '\uf7de'
- '\uf7df'
- '\uf7e0'
- '\uf7e1'
- '\uf7e2'
- '\uf7e3'
- '\uf7e4'
- '\uf7e5'
- '\uf7e6'
- '\uf7e7'
- '\uf7e8'
- '\uf7e9'
- '\uf7ea'
- '\uf7eb'
- '\uf7ec'
- '\uf7ed'
- '\uf7ee'
- '\uf7ef'
- '\uf7f0'
- '\uf7f1'
- '\uf7f2'
- '\uf7f3'
- '\uf7f4'
- '\uf7f5'
- '\uf7f6'
- '\uf7f7'
- '\uf7f8'
- '\uf7f9'
- '\uf7fa'
- '\uf7fb'
- '\uf7fc'
- '\uf7fd'
- '\uf7fe'
- '\uf7ff'
-)
-
-### Encoding table
-encoding_table = codecs.charmap_build(decoding_table)
diff --git a/spaces/Ataturk-Chatbot/HuggingFaceChat/venv/lib/python3.11/site-packages/setuptools/config/_validate_pyproject/fastjsonschema_validations.py b/spaces/Ataturk-Chatbot/HuggingFaceChat/venv/lib/python3.11/site-packages/setuptools/config/_validate_pyproject/fastjsonschema_validations.py
deleted file mode 100644
index ad5ee31ef53370fe7ec95799db390a33c3680b3b..0000000000000000000000000000000000000000
--- a/spaces/Ataturk-Chatbot/HuggingFaceChat/venv/lib/python3.11/site-packages/setuptools/config/_validate_pyproject/fastjsonschema_validations.py
+++ /dev/null
@@ -1,1035 +0,0 @@
-# noqa
-# type: ignore
-# flake8: noqa
-# pylint: skip-file
-# mypy: ignore-errors
-# yapf: disable
-# pylama:skip=1
-
-
-# *** PLEASE DO NOT MODIFY DIRECTLY: Automatically generated code ***
-
-
-VERSION = "2.15.3"
-import re
-from .fastjsonschema_exceptions import JsonSchemaValueException
-
-
-REGEX_PATTERNS = {
- '^.*$': re.compile('^.*$'),
- '.+': re.compile('.+'),
- '^.+$': re.compile('^.+$'),
- 'idn-email_re_pattern': re.compile('^[^@]+@[^@]+\\.[^@]+\\Z')
-}
-
-NoneType = type(None)
-
-def validate(data, custom_formats={}, name_prefix=None):
- validate_https___packaging_python_org_en_latest_specifications_declaring_build_dependencies(data, custom_formats, (name_prefix or "data") + "")
- return data
-
-def validate_https___packaging_python_org_en_latest_specifications_declaring_build_dependencies(data, custom_formats={}, name_prefix=None):
- if not isinstance(data, (dict)):
- raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'$schema': 'http://json-schema.org/draft-07/schema', '$id': 'https://packaging.python.org/en/latest/specifications/declaring-build-dependencies/', 'title': 'Data structure for ``pyproject.toml`` files', '$$description': ['File format containing build-time configurations for the Python ecosystem. ', ':pep:`517` initially defined a build-system independent format for source trees', 'which was complemented by :pep:`518` to provide a way of specifying dependencies ', 'for building Python projects.', 'Please notice the ``project`` table (as initially defined in :pep:`621`) is not included', 'in this schema and should be considered separately.'], 'type': 'object', 'additionalProperties': False, 'properties': {'build-system': {'type': 'object', 'description': 'Table used to store build-related data', 'additionalProperties': False, 'properties': {'requires': {'type': 'array', '$$description': ['List of dependencies in the :pep:`508` format required to execute the build', 'system. Please notice that the resulting dependency graph', '**MUST NOT contain cycles**'], 'items': {'type': 'string'}}, 'build-backend': {'type': 'string', 'description': 'Python object that will be used to perform the build according to :pep:`517`', 'format': 'pep517-backend-reference'}, 'backend-path': {'type': 'array', '$$description': ['List of directories to be prepended to ``sys.path`` when loading the', 'back-end, and running its hooks'], 'items': {'type': 'string', '$comment': 'Should be a path (TODO: enforce it with format?)'}}}, 'required': ['requires']}, 'project': {'$schema': 'http://json-schema.org/draft-07/schema', '$id': 'https://packaging.python.org/en/latest/specifications/declaring-project-metadata/', 'title': 'Package metadata stored in the ``project`` table', '$$description': ['Data structure for the **project** table inside ``pyproject.toml``', '(as initially defined in :pep:`621`)'], 'type': 'object', 'properties': {'name': {'type': 'string', 'description': 'The name (primary identifier) of the project. MUST be statically defined.', 'format': 'pep508-identifier'}, 'version': {'type': 'string', 'description': 'The version of the project as supported by :pep:`440`.', 'format': 'pep440'}, 'description': {'type': 'string', '$$description': ['The `summary description of the project', '`_']}, 'readme': {'$$description': ['`Full/detailed description of the project in the form of a README', '`_', "with meaning similar to the one defined in `core metadata's Description", '`_'], 'oneOf': [{'type': 'string', '$$description': ['Relative path to a text file (UTF-8) containing the full description', 'of the project. If the file path ends in case-insensitive ``.md`` or', '``.rst`` suffixes, then the content-type is respectively', '``text/markdown`` or ``text/x-rst``']}, {'type': 'object', 'allOf': [{'anyOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', 'description': 'Full text describing the project.'}}, 'required': ['text']}]}, {'properties': {'content-type': {'type': 'string', '$$description': ['Content-type (:rfc:`1341`) of the full description', '(e.g. ``text/markdown``). The ``charset`` parameter is assumed', 'UTF-8 when not present.'], '$comment': 'TODO: add regex pattern or format?'}}, 'required': ['content-type']}]}]}, 'requires-python': {'type': 'string', 'format': 'pep508-versionspec', '$$description': ['`The Python version requirements of the project', '`_.']}, 'license': {'description': '`Project license `_.', 'oneOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to the file (UTF-8) which contains the license for the', 'project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', '$$description': ['The license of the project whose meaning is that of the', '`License field from the core metadata', '`_.']}}, 'required': ['text']}]}, 'authors': {'type': 'array', 'items': {'$ref': '#/definitions/author'}, '$$description': ["The people or organizations considered to be the 'authors' of the project.", 'The exact meaning is open to interpretation (e.g. original or primary authors,', 'current maintainers, or owners of the package).']}, 'maintainers': {'type': 'array', 'items': {'$ref': '#/definitions/author'}, '$$description': ["The people or organizations considered to be the 'maintainers' of the project.", 'Similarly to ``authors``, the exact meaning is open to interpretation.']}, 'keywords': {'type': 'array', 'items': {'type': 'string'}, 'description': 'List of keywords to assist searching for the distribution in a larger catalog.'}, 'classifiers': {'type': 'array', 'items': {'type': 'string', 'format': 'trove-classifier', 'description': '`PyPI classifier `_.'}, '$$description': ['`Trove classifiers `_', 'which apply to the project.']}, 'urls': {'type': 'object', 'description': 'URLs associated with the project in the form ``label => value``.', 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', 'format': 'url'}}}, 'scripts': {'$ref': '#/definitions/entry-point-group', '$$description': ['Instruct the installer to create command-line wrappers for the given', '`entry points `_.']}, 'gui-scripts': {'$ref': '#/definitions/entry-point-group', '$$description': ['Instruct the installer to create GUI wrappers for the given', '`entry points `_.', 'The difference between ``scripts`` and ``gui-scripts`` is only relevant in', 'Windows.']}, 'entry-points': {'$$description': ['Instruct the installer to expose the given modules/functions via', '``entry-point`` discovery mechanism (useful for plugins).', 'More information available in the `Python packaging guide', '`_.'], 'propertyNames': {'format': 'python-entrypoint-group'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'$ref': '#/definitions/entry-point-group'}}}, 'dependencies': {'type': 'array', 'description': 'Project (mandatory) dependencies.', 'items': {'$ref': '#/definitions/dependency'}}, 'optional-dependencies': {'type': 'object', 'description': 'Optional dependency for the project', 'propertyNames': {'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'array', 'items': {'$ref': '#/definitions/dependency'}}}}, 'dynamic': {'type': 'array', '$$description': ['Specifies which fields are intentionally unspecified and expected to be', 'dynamically provided by build tools'], 'items': {'enum': ['version', 'description', 'readme', 'requires-python', 'license', 'authors', 'maintainers', 'keywords', 'classifiers', 'urls', 'scripts', 'gui-scripts', 'entry-points', 'dependencies', 'optional-dependencies']}}}, 'required': ['name'], 'additionalProperties': False, 'if': {'not': {'required': ['dynamic'], 'properties': {'dynamic': {'contains': {'const': 'version'}, '$$description': ['version is listed in ``dynamic``']}}}, '$$comment': ['According to :pep:`621`:', ' If the core metadata specification lists a field as "Required", then', ' the metadata MUST specify the field statically or list it in dynamic', 'In turn, `core metadata`_ defines:', ' The required fields are: Metadata-Version, Name, Version.', ' All the other fields are optional.', 'Since ``Metadata-Version`` is defined by the build back-end, ``name`` and', '``version`` are the only mandatory information in ``pyproject.toml``.', '.. _core metadata: https://packaging.python.org/specifications/core-metadata/']}, 'then': {'required': ['version'], '$$description': ['version should be statically defined in the ``version`` field']}, 'definitions': {'author': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://www.python.org/dev/peps/pep-0621/#authors-maintainers', 'type': 'object', 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, 'entry-point-group': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '`_', 'and `setuptools docs', '`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'dependency': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}}, 'tool': {'type': 'object', 'properties': {'distutils': {'$schema': 'http://json-schema.org/draft-07/schema', '$id': 'https://docs.python.org/3/install/', 'title': '``tool.distutils`` table', '$$description': ['Originally, ``distutils`` allowed developers to configure arguments for', '``setup.py`` scripts via `distutils configuration files', '`_.', '``tool.distutils`` subtables could be used with the same purpose', '(NOT CURRENTLY IMPLEMENTED).'], 'type': 'object', 'properties': {'global': {'type': 'object', 'description': 'Global options applied to all ``distutils`` commands'}}, 'patternProperties': {'.+': {'type': 'object'}}, '$comment': 'TODO: Is there a practical way of making this schema more specific?'}, 'setuptools': {'$schema': 'http://json-schema.org/draft-07/schema', '$id': 'https://setuptools.pypa.io/en/latest/references/keywords.html', 'title': '``tool.setuptools`` table', '$$description': ['Please notice for the time being the ``setuptools`` project does not specify', 'a way of configuring builds via ``pyproject.toml``.', 'Therefore this schema should be taken just as a *"thought experiment"* on how', 'this *might be done*, by following the principles established in', '`ini2toml `_.', 'It considers only ``setuptools`` `parameters', '`_', 'that can currently be configured via ``setup.cfg`` and are not covered by :pep:`621`', 'but intentionally excludes ``dependency_links`` and ``setup_requires``.', 'NOTE: ``scripts`` was renamed to ``script-files`` to avoid confusion with', 'entry-point based scripts (defined in :pep:`621`).'], 'type': 'object', 'additionalProperties': False, 'properties': {'platforms': {'type': 'array', 'items': {'type': 'string'}}, 'provides': {'$$description': ['Package and virtual package names contained within this package', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'obsoletes': {'$$description': ['Packages which this package renders obsolete', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'zip-safe': {'description': 'Whether the project can be safely installed and run from a zip file.', 'type': 'boolean'}, 'script-files': {'description': 'Legacy way of defining scripts (entry-points are preferred).', 'type': 'array', 'items': {'type': 'string'}, '$comment': 'TODO: is this field deprecated/should be removed?'}, 'eager-resources': {'$$description': ['Resources that should be extracted together, if any of them is needed,', 'or if any C extensions included in the project are imported.'], 'type': 'array', 'items': {'type': 'string'}}, 'packages': {'$$description': ['Packages that should be included in the distribution.', 'It can be given either as a list of package identifiers', 'or as a ``dict``-like structure with a single key ``find``', 'which corresponds to a dynamic call to', '``setuptools.config.expand.find_packages`` function.', 'The ``find`` key is associated with a nested ``dict``-like structure that can', 'contain ``where``, ``include``, ``exclude`` and ``namespaces`` keys,', 'mimicking the keyword arguments of the associated function.'], 'oneOf': [{'title': 'Array of Python package identifiers', 'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}}, {'$ref': '#/definitions/find-directive'}]}, 'package-dir': {'$$description': [':class:`dict`-like structure mapping from package names to directories where their', 'code can be found.', 'The empty string (as key) means that all packages are contained inside', 'the given directory will be included in the distribution.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': ''}]}, 'patternProperties': {'^.*$': {'type': 'string'}}}, 'package-data': {'$$description': ['Mapping from package names to lists of glob patterns.', 'Usually this option is not needed when using ``include-package-data = true``', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'include-package-data': {'$$description': ['Automatically include any data files inside the package directories', 'that are specified by ``MANIFEST.in``', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'boolean'}, 'exclude-package-data': {'$$description': ['Mapping from package names to lists of glob patterns that should be excluded', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'namespace-packages': {'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}, '$comment': 'https://setuptools.pypa.io/en/latest/userguide/package_discovery.html'}, 'py-modules': {'description': 'Modules that setuptools will manipulate', 'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}, '$comment': 'TODO: clarify the relationship with ``packages``'}, 'data-files': {'$$description': ['**DEPRECATED**: dict-like structure where each key represents a directory and', 'the value is a list of glob patterns that should be installed in them.', "Please notice this don't work with wheels. See `data files support", '`_'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'cmdclass': {'$$description': ['Mapping of distutils-style command names to ``setuptools.Command`` subclasses', 'which in turn should be represented by strings with a qualified class name', '(i.e., "dotted" form with module), e.g.::\n\n', ' cmdclass = {mycmd = "pkg.subpkg.module.CommandClass"}\n\n', 'The command class should be a directly defined at the top-level of the', 'containing module (no class nesting).'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'string', 'format': 'python-qualified-identifier'}}}, 'license-files': {'type': 'array', 'items': {'type': 'string'}, '$$description': ['PROVISIONAL: List of glob patterns for all license files being distributed.', '(might become standard with PEP 639).'], 'default': ['LICEN[CS]E*', ' COPYING*', ' NOTICE*', 'AUTHORS*'], '$comment': 'TODO: revise if PEP 639 is accepted. Probably ``project.license-files``?'}, 'dynamic': {'type': 'object', 'description': 'Instructions for loading :pep:`621`-related metadata dynamically', 'additionalProperties': False, 'properties': {'version': {'$$description': ['A version dynamically loaded via either the ``attr:`` or ``file:``', 'directives. Please make sure the given file or attribute respects :pep:`440`.'], 'oneOf': [{'$ref': '#/definitions/attr-directive'}, {'$ref': '#/definitions/file-directive'}]}, 'classifiers': {'$ref': '#/definitions/file-directive'}, 'description': {'$ref': '#/definitions/file-directive'}, 'dependencies': {'$ref': '#/definitions/file-directive'}, 'entry-points': {'$ref': '#/definitions/file-directive'}, 'optional-dependencies': {'type': 'object', 'propertyNames': {'format': 'python-identifier'}, 'additionalProperties': False, 'patternProperties': {'.+': {'$ref': '#/definitions/file-directive'}}}, 'readme': {'anyOf': [{'$ref': '#/definitions/file-directive'}, {'properties': {'content-type': {'type': 'string'}}}], 'required': ['file']}}}}, 'definitions': {'file-directive': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'attr-directive': {'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string'}}, 'required': ['attr']}, 'find-directive': {'$id': '#/definitions/find-directive', 'title': "'find:' directive", 'type': 'object', 'additionalProperties': False, 'properties': {'find': {'type': 'object', '$$description': ['Dynamic `package discovery', '`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}}}}}}}}, 'project': {'$schema': 'http://json-schema.org/draft-07/schema', '$id': 'https://packaging.python.org/en/latest/specifications/declaring-project-metadata/', 'title': 'Package metadata stored in the ``project`` table', '$$description': ['Data structure for the **project** table inside ``pyproject.toml``', '(as initially defined in :pep:`621`)'], 'type': 'object', 'properties': {'name': {'type': 'string', 'description': 'The name (primary identifier) of the project. MUST be statically defined.', 'format': 'pep508-identifier'}, 'version': {'type': 'string', 'description': 'The version of the project as supported by :pep:`440`.', 'format': 'pep440'}, 'description': {'type': 'string', '$$description': ['The `summary description of the project', '`_']}, 'readme': {'$$description': ['`Full/detailed description of the project in the form of a README', '`_', "with meaning similar to the one defined in `core metadata's Description", '`_'], 'oneOf': [{'type': 'string', '$$description': ['Relative path to a text file (UTF-8) containing the full description', 'of the project. If the file path ends in case-insensitive ``.md`` or', '``.rst`` suffixes, then the content-type is respectively', '``text/markdown`` or ``text/x-rst``']}, {'type': 'object', 'allOf': [{'anyOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', 'description': 'Full text describing the project.'}}, 'required': ['text']}]}, {'properties': {'content-type': {'type': 'string', '$$description': ['Content-type (:rfc:`1341`) of the full description', '(e.g. ``text/markdown``). The ``charset`` parameter is assumed', 'UTF-8 when not present.'], '$comment': 'TODO: add regex pattern or format?'}}, 'required': ['content-type']}]}]}, 'requires-python': {'type': 'string', 'format': 'pep508-versionspec', '$$description': ['`The Python version requirements of the project', '`_.']}, 'license': {'description': '`Project license `_.', 'oneOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to the file (UTF-8) which contains the license for the', 'project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', '$$description': ['The license of the project whose meaning is that of the', '`License field from the core metadata', '`_.']}}, 'required': ['text']}]}, 'authors': {'type': 'array', 'items': {'$ref': '#/definitions/author'}, '$$description': ["The people or organizations considered to be the 'authors' of the project.", 'The exact meaning is open to interpretation (e.g. original or primary authors,', 'current maintainers, or owners of the package).']}, 'maintainers': {'type': 'array', 'items': {'$ref': '#/definitions/author'}, '$$description': ["The people or organizations considered to be the 'maintainers' of the project.", 'Similarly to ``authors``, the exact meaning is open to interpretation.']}, 'keywords': {'type': 'array', 'items': {'type': 'string'}, 'description': 'List of keywords to assist searching for the distribution in a larger catalog.'}, 'classifiers': {'type': 'array', 'items': {'type': 'string', 'format': 'trove-classifier', 'description': '`PyPI classifier `_.'}, '$$description': ['`Trove classifiers `_', 'which apply to the project.']}, 'urls': {'type': 'object', 'description': 'URLs associated with the project in the form ``label => value``.', 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', 'format': 'url'}}}, 'scripts': {'$ref': '#/definitions/entry-point-group', '$$description': ['Instruct the installer to create command-line wrappers for the given', '`entry points `_.']}, 'gui-scripts': {'$ref': '#/definitions/entry-point-group', '$$description': ['Instruct the installer to create GUI wrappers for the given', '`entry points `_.', 'The difference between ``scripts`` and ``gui-scripts`` is only relevant in', 'Windows.']}, 'entry-points': {'$$description': ['Instruct the installer to expose the given modules/functions via', '``entry-point`` discovery mechanism (useful for plugins).', 'More information available in the `Python packaging guide', '`_.'], 'propertyNames': {'format': 'python-entrypoint-group'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'$ref': '#/definitions/entry-point-group'}}}, 'dependencies': {'type': 'array', 'description': 'Project (mandatory) dependencies.', 'items': {'$ref': '#/definitions/dependency'}}, 'optional-dependencies': {'type': 'object', 'description': 'Optional dependency for the project', 'propertyNames': {'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'array', 'items': {'$ref': '#/definitions/dependency'}}}}, 'dynamic': {'type': 'array', '$$description': ['Specifies which fields are intentionally unspecified and expected to be', 'dynamically provided by build tools'], 'items': {'enum': ['version', 'description', 'readme', 'requires-python', 'license', 'authors', 'maintainers', 'keywords', 'classifiers', 'urls', 'scripts', 'gui-scripts', 'entry-points', 'dependencies', 'optional-dependencies']}}}, 'required': ['name'], 'additionalProperties': False, 'if': {'not': {'required': ['dynamic'], 'properties': {'dynamic': {'contains': {'const': 'version'}, '$$description': ['version is listed in ``dynamic``']}}}, '$$comment': ['According to :pep:`621`:', ' If the core metadata specification lists a field as "Required", then', ' the metadata MUST specify the field statically or list it in dynamic', 'In turn, `core metadata`_ defines:', ' The required fields are: Metadata-Version, Name, Version.', ' All the other fields are optional.', 'Since ``Metadata-Version`` is defined by the build back-end, ``name`` and', '``version`` are the only mandatory information in ``pyproject.toml``.', '.. _core metadata: https://packaging.python.org/specifications/core-metadata/']}, 'then': {'required': ['version'], '$$description': ['version should be statically defined in the ``version`` field']}, 'definitions': {'author': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://www.python.org/dev/peps/pep-0621/#authors-maintainers', 'type': 'object', 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, 'entry-point-group': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '`_', 'and `setuptools docs', '`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'dependency': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}}}, rule='type')
- data_is_dict = isinstance(data, dict)
- if data_is_dict:
- data_keys = set(data.keys())
- if "build-system" in data_keys:
- data_keys.remove("build-system")
- data__buildsystem = data["build-system"]
- if not isinstance(data__buildsystem, (dict)):
- raise JsonSchemaValueException("" + (name_prefix or "data") + ".build-system must be object", value=data__buildsystem, name="" + (name_prefix or "data") + ".build-system", definition={'type': 'object', 'description': 'Table used to store build-related data', 'additionalProperties': False, 'properties': {'requires': {'type': 'array', '$$description': ['List of dependencies in the :pep:`508` format required to execute the build', 'system. Please notice that the resulting dependency graph', '**MUST NOT contain cycles**'], 'items': {'type': 'string'}}, 'build-backend': {'type': 'string', 'description': 'Python object that will be used to perform the build according to :pep:`517`', 'format': 'pep517-backend-reference'}, 'backend-path': {'type': 'array', '$$description': ['List of directories to be prepended to ``sys.path`` when loading the', 'back-end, and running its hooks'], 'items': {'type': 'string', '$comment': 'Should be a path (TODO: enforce it with format?)'}}}, 'required': ['requires']}, rule='type')
- data__buildsystem_is_dict = isinstance(data__buildsystem, dict)
- if data__buildsystem_is_dict:
- data__buildsystem_len = len(data__buildsystem)
- if not all(prop in data__buildsystem for prop in ['requires']):
- raise JsonSchemaValueException("" + (name_prefix or "data") + ".build-system must contain ['requires'] properties", value=data__buildsystem, name="" + (name_prefix or "data") + ".build-system", definition={'type': 'object', 'description': 'Table used to store build-related data', 'additionalProperties': False, 'properties': {'requires': {'type': 'array', '$$description': ['List of dependencies in the :pep:`508` format required to execute the build', 'system. Please notice that the resulting dependency graph', '**MUST NOT contain cycles**'], 'items': {'type': 'string'}}, 'build-backend': {'type': 'string', 'description': 'Python object that will be used to perform the build according to :pep:`517`', 'format': 'pep517-backend-reference'}, 'backend-path': {'type': 'array', '$$description': ['List of directories to be prepended to ``sys.path`` when loading the', 'back-end, and running its hooks'], 'items': {'type': 'string', '$comment': 'Should be a path (TODO: enforce it with format?)'}}}, 'required': ['requires']}, rule='required')
- data__buildsystem_keys = set(data__buildsystem.keys())
- if "requires" in data__buildsystem_keys:
- data__buildsystem_keys.remove("requires")
- data__buildsystem__requires = data__buildsystem["requires"]
- if not isinstance(data__buildsystem__requires, (list, tuple)):
- raise JsonSchemaValueException("" + (name_prefix or "data") + ".build-system.requires must be array", value=data__buildsystem__requires, name="" + (name_prefix or "data") + ".build-system.requires", definition={'type': 'array', '$$description': ['List of dependencies in the :pep:`508` format required to execute the build', 'system. Please notice that the resulting dependency graph', '**MUST NOT contain cycles**'], 'items': {'type': 'string'}}, rule='type')
- data__buildsystem__requires_is_list = isinstance(data__buildsystem__requires, (list, tuple))
- if data__buildsystem__requires_is_list:
- data__buildsystem__requires_len = len(data__buildsystem__requires)
- for data__buildsystem__requires_x, data__buildsystem__requires_item in enumerate(data__buildsystem__requires):
- if not isinstance(data__buildsystem__requires_item, (str)):
- raise JsonSchemaValueException("" + (name_prefix or "data") + ".build-system.requires[{data__buildsystem__requires_x}]".format(**locals()) + " must be string", value=data__buildsystem__requires_item, name="" + (name_prefix or "data") + ".build-system.requires[{data__buildsystem__requires_x}]".format(**locals()) + "", definition={'type': 'string'}, rule='type')
- if "build-backend" in data__buildsystem_keys:
- data__buildsystem_keys.remove("build-backend")
- data__buildsystem__buildbackend = data__buildsystem["build-backend"]
- if not isinstance(data__buildsystem__buildbackend, (str)):
- raise JsonSchemaValueException("" + (name_prefix or "data") + ".build-system.build-backend must be string", value=data__buildsystem__buildbackend, name="" + (name_prefix or "data") + ".build-system.build-backend", definition={'type': 'string', 'description': 'Python object that will be used to perform the build according to :pep:`517`', 'format': 'pep517-backend-reference'}, rule='type')
- if isinstance(data__buildsystem__buildbackend, str):
- if not custom_formats["pep517-backend-reference"](data__buildsystem__buildbackend):
- raise JsonSchemaValueException("" + (name_prefix or "data") + ".build-system.build-backend must be pep517-backend-reference", value=data__buildsystem__buildbackend, name="" + (name_prefix or "data") + ".build-system.build-backend", definition={'type': 'string', 'description': 'Python object that will be used to perform the build according to :pep:`517`', 'format': 'pep517-backend-reference'}, rule='format')
- if "backend-path" in data__buildsystem_keys:
- data__buildsystem_keys.remove("backend-path")
- data__buildsystem__backendpath = data__buildsystem["backend-path"]
- if not isinstance(data__buildsystem__backendpath, (list, tuple)):
- raise JsonSchemaValueException("" + (name_prefix or "data") + ".build-system.backend-path must be array", value=data__buildsystem__backendpath, name="" + (name_prefix or "data") + ".build-system.backend-path", definition={'type': 'array', '$$description': ['List of directories to be prepended to ``sys.path`` when loading the', 'back-end, and running its hooks'], 'items': {'type': 'string', '$comment': 'Should be a path (TODO: enforce it with format?)'}}, rule='type')
- data__buildsystem__backendpath_is_list = isinstance(data__buildsystem__backendpath, (list, tuple))
- if data__buildsystem__backendpath_is_list:
- data__buildsystem__backendpath_len = len(data__buildsystem__backendpath)
- for data__buildsystem__backendpath_x, data__buildsystem__backendpath_item in enumerate(data__buildsystem__backendpath):
- if not isinstance(data__buildsystem__backendpath_item, (str)):
- raise JsonSchemaValueException("" + (name_prefix or "data") + ".build-system.backend-path[{data__buildsystem__backendpath_x}]".format(**locals()) + " must be string", value=data__buildsystem__backendpath_item, name="" + (name_prefix or "data") + ".build-system.backend-path[{data__buildsystem__backendpath_x}]".format(**locals()) + "", definition={'type': 'string', '$comment': 'Should be a path (TODO: enforce it with format?)'}, rule='type')
- if data__buildsystem_keys:
- raise JsonSchemaValueException("" + (name_prefix or "data") + ".build-system must not contain "+str(data__buildsystem_keys)+" properties", value=data__buildsystem, name="" + (name_prefix or "data") + ".build-system", definition={'type': 'object', 'description': 'Table used to store build-related data', 'additionalProperties': False, 'properties': {'requires': {'type': 'array', '$$description': ['List of dependencies in the :pep:`508` format required to execute the build', 'system. Please notice that the resulting dependency graph', '**MUST NOT contain cycles**'], 'items': {'type': 'string'}}, 'build-backend': {'type': 'string', 'description': 'Python object that will be used to perform the build according to :pep:`517`', 'format': 'pep517-backend-reference'}, 'backend-path': {'type': 'array', '$$description': ['List of directories to be prepended to ``sys.path`` when loading the', 'back-end, and running its hooks'], 'items': {'type': 'string', '$comment': 'Should be a path (TODO: enforce it with format?)'}}}, 'required': ['requires']}, rule='additionalProperties')
- if "project" in data_keys:
- data_keys.remove("project")
- data__project = data["project"]
- validate_https___packaging_python_org_en_latest_specifications_declaring_project_metadata(data__project, custom_formats, (name_prefix or "data") + ".project")
- if "tool" in data_keys:
- data_keys.remove("tool")
- data__tool = data["tool"]
- if not isinstance(data__tool, (dict)):
- raise JsonSchemaValueException("" + (name_prefix or "data") + ".tool must be object", value=data__tool, name="" + (name_prefix or "data") + ".tool", definition={'type': 'object', 'properties': {'distutils': {'$schema': 'http://json-schema.org/draft-07/schema', '$id': 'https://docs.python.org/3/install/', 'title': '``tool.distutils`` table', '$$description': ['Originally, ``distutils`` allowed developers to configure arguments for', '``setup.py`` scripts via `distutils configuration files', '`_.', '``tool.distutils`` subtables could be used with the same purpose', '(NOT CURRENTLY IMPLEMENTED).'], 'type': 'object', 'properties': {'global': {'type': 'object', 'description': 'Global options applied to all ``distutils`` commands'}}, 'patternProperties': {'.+': {'type': 'object'}}, '$comment': 'TODO: Is there a practical way of making this schema more specific?'}, 'setuptools': {'$schema': 'http://json-schema.org/draft-07/schema', '$id': 'https://setuptools.pypa.io/en/latest/references/keywords.html', 'title': '``tool.setuptools`` table', '$$description': ['Please notice for the time being the ``setuptools`` project does not specify', 'a way of configuring builds via ``pyproject.toml``.', 'Therefore this schema should be taken just as a *"thought experiment"* on how', 'this *might be done*, by following the principles established in', '`ini2toml `_.', 'It considers only ``setuptools`` `parameters', '`_', 'that can currently be configured via ``setup.cfg`` and are not covered by :pep:`621`', 'but intentionally excludes ``dependency_links`` and ``setup_requires``.', 'NOTE: ``scripts`` was renamed to ``script-files`` to avoid confusion with', 'entry-point based scripts (defined in :pep:`621`).'], 'type': 'object', 'additionalProperties': False, 'properties': {'platforms': {'type': 'array', 'items': {'type': 'string'}}, 'provides': {'$$description': ['Package and virtual package names contained within this package', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'obsoletes': {'$$description': ['Packages which this package renders obsolete', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'zip-safe': {'description': 'Whether the project can be safely installed and run from a zip file.', 'type': 'boolean'}, 'script-files': {'description': 'Legacy way of defining scripts (entry-points are preferred).', 'type': 'array', 'items': {'type': 'string'}, '$comment': 'TODO: is this field deprecated/should be removed?'}, 'eager-resources': {'$$description': ['Resources that should be extracted together, if any of them is needed,', 'or if any C extensions included in the project are imported.'], 'type': 'array', 'items': {'type': 'string'}}, 'packages': {'$$description': ['Packages that should be included in the distribution.', 'It can be given either as a list of package identifiers', 'or as a ``dict``-like structure with a single key ``find``', 'which corresponds to a dynamic call to', '``setuptools.config.expand.find_packages`` function.', 'The ``find`` key is associated with a nested ``dict``-like structure that can', 'contain ``where``, ``include``, ``exclude`` and ``namespaces`` keys,', 'mimicking the keyword arguments of the associated function.'], 'oneOf': [{'title': 'Array of Python package identifiers', 'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}}, {'$ref': '#/definitions/find-directive'}]}, 'package-dir': {'$$description': [':class:`dict`-like structure mapping from package names to directories where their', 'code can be found.', 'The empty string (as key) means that all packages are contained inside', 'the given directory will be included in the distribution.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': ''}]}, 'patternProperties': {'^.*$': {'type': 'string'}}}, 'package-data': {'$$description': ['Mapping from package names to lists of glob patterns.', 'Usually this option is not needed when using ``include-package-data = true``', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'include-package-data': {'$$description': ['Automatically include any data files inside the package directories', 'that are specified by ``MANIFEST.in``', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'boolean'}, 'exclude-package-data': {'$$description': ['Mapping from package names to lists of glob patterns that should be excluded', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'namespace-packages': {'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}, '$comment': 'https://setuptools.pypa.io/en/latest/userguide/package_discovery.html'}, 'py-modules': {'description': 'Modules that setuptools will manipulate', 'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}, '$comment': 'TODO: clarify the relationship with ``packages``'}, 'data-files': {'$$description': ['**DEPRECATED**: dict-like structure where each key represents a directory and', 'the value is a list of glob patterns that should be installed in them.', "Please notice this don't work with wheels. See `data files support", '`_'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'cmdclass': {'$$description': ['Mapping of distutils-style command names to ``setuptools.Command`` subclasses', 'which in turn should be represented by strings with a qualified class name', '(i.e., "dotted" form with module), e.g.::\n\n', ' cmdclass = {mycmd = "pkg.subpkg.module.CommandClass"}\n\n', 'The command class should be a directly defined at the top-level of the', 'containing module (no class nesting).'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'string', 'format': 'python-qualified-identifier'}}}, 'license-files': {'type': 'array', 'items': {'type': 'string'}, '$$description': ['PROVISIONAL: List of glob patterns for all license files being distributed.', '(might become standard with PEP 639).'], 'default': ['LICEN[CS]E*', ' COPYING*', ' NOTICE*', 'AUTHORS*'], '$comment': 'TODO: revise if PEP 639 is accepted. Probably ``project.license-files``?'}, 'dynamic': {'type': 'object', 'description': 'Instructions for loading :pep:`621`-related metadata dynamically', 'additionalProperties': False, 'properties': {'version': {'$$description': ['A version dynamically loaded via either the ``attr:`` or ``file:``', 'directives. Please make sure the given file or attribute respects :pep:`440`.'], 'oneOf': [{'$ref': '#/definitions/attr-directive'}, {'$ref': '#/definitions/file-directive'}]}, 'classifiers': {'$ref': '#/definitions/file-directive'}, 'description': {'$ref': '#/definitions/file-directive'}, 'dependencies': {'$ref': '#/definitions/file-directive'}, 'entry-points': {'$ref': '#/definitions/file-directive'}, 'optional-dependencies': {'type': 'object', 'propertyNames': {'format': 'python-identifier'}, 'additionalProperties': False, 'patternProperties': {'.+': {'$ref': '#/definitions/file-directive'}}}, 'readme': {'anyOf': [{'$ref': '#/definitions/file-directive'}, {'properties': {'content-type': {'type': 'string'}}}], 'required': ['file']}}}}, 'definitions': {'file-directive': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'attr-directive': {'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string'}}, 'required': ['attr']}, 'find-directive': {'$id': '#/definitions/find-directive', 'title': "'find:' directive", 'type': 'object', 'additionalProperties': False, 'properties': {'find': {'type': 'object', '$$description': ['Dynamic `package discovery', '`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}}}}}}}, rule='type')
- data__tool_is_dict = isinstance(data__tool, dict)
- if data__tool_is_dict:
- data__tool_keys = set(data__tool.keys())
- if "distutils" in data__tool_keys:
- data__tool_keys.remove("distutils")
- data__tool__distutils = data__tool["distutils"]
- validate_https___docs_python_org_3_install(data__tool__distutils, custom_formats, (name_prefix or "data") + ".tool.distutils")
- if "setuptools" in data__tool_keys:
- data__tool_keys.remove("setuptools")
- data__tool__setuptools = data__tool["setuptools"]
- validate_https___setuptools_pypa_io_en_latest_references_keywords_html(data__tool__setuptools, custom_formats, (name_prefix or "data") + ".tool.setuptools")
- if data_keys:
- raise JsonSchemaValueException("" + (name_prefix or "data") + " must not contain "+str(data_keys)+" properties", value=data, name="" + (name_prefix or "data") + "", definition={'$schema': 'http://json-schema.org/draft-07/schema', '$id': 'https://packaging.python.org/en/latest/specifications/declaring-build-dependencies/', 'title': 'Data structure for ``pyproject.toml`` files', '$$description': ['File format containing build-time configurations for the Python ecosystem. ', ':pep:`517` initially defined a build-system independent format for source trees', 'which was complemented by :pep:`518` to provide a way of specifying dependencies ', 'for building Python projects.', 'Please notice the ``project`` table (as initially defined in :pep:`621`) is not included', 'in this schema and should be considered separately.'], 'type': 'object', 'additionalProperties': False, 'properties': {'build-system': {'type': 'object', 'description': 'Table used to store build-related data', 'additionalProperties': False, 'properties': {'requires': {'type': 'array', '$$description': ['List of dependencies in the :pep:`508` format required to execute the build', 'system. Please notice that the resulting dependency graph', '**MUST NOT contain cycles**'], 'items': {'type': 'string'}}, 'build-backend': {'type': 'string', 'description': 'Python object that will be used to perform the build according to :pep:`517`', 'format': 'pep517-backend-reference'}, 'backend-path': {'type': 'array', '$$description': ['List of directories to be prepended to ``sys.path`` when loading the', 'back-end, and running its hooks'], 'items': {'type': 'string', '$comment': 'Should be a path (TODO: enforce it with format?)'}}}, 'required': ['requires']}, 'project': {'$schema': 'http://json-schema.org/draft-07/schema', '$id': 'https://packaging.python.org/en/latest/specifications/declaring-project-metadata/', 'title': 'Package metadata stored in the ``project`` table', '$$description': ['Data structure for the **project** table inside ``pyproject.toml``', '(as initially defined in :pep:`621`)'], 'type': 'object', 'properties': {'name': {'type': 'string', 'description': 'The name (primary identifier) of the project. MUST be statically defined.', 'format': 'pep508-identifier'}, 'version': {'type': 'string', 'description': 'The version of the project as supported by :pep:`440`.', 'format': 'pep440'}, 'description': {'type': 'string', '$$description': ['The `summary description of the project', '`_']}, 'readme': {'$$description': ['`Full/detailed description of the project in the form of a README', '`_', "with meaning similar to the one defined in `core metadata's Description", '`_'], 'oneOf': [{'type': 'string', '$$description': ['Relative path to a text file (UTF-8) containing the full description', 'of the project. If the file path ends in case-insensitive ``.md`` or', '``.rst`` suffixes, then the content-type is respectively', '``text/markdown`` or ``text/x-rst``']}, {'type': 'object', 'allOf': [{'anyOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', 'description': 'Full text describing the project.'}}, 'required': ['text']}]}, {'properties': {'content-type': {'type': 'string', '$$description': ['Content-type (:rfc:`1341`) of the full description', '(e.g. ``text/markdown``). The ``charset`` parameter is assumed', 'UTF-8 when not present.'], '$comment': 'TODO: add regex pattern or format?'}}, 'required': ['content-type']}]}]}, 'requires-python': {'type': 'string', 'format': 'pep508-versionspec', '$$description': ['`The Python version requirements of the project', '`_.']}, 'license': {'description': '`Project license `_.', 'oneOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to the file (UTF-8) which contains the license for the', 'project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', '$$description': ['The license of the project whose meaning is that of the', '`License field from the core metadata', '`_.']}}, 'required': ['text']}]}, 'authors': {'type': 'array', 'items': {'$ref': '#/definitions/author'}, '$$description': ["The people or organizations considered to be the 'authors' of the project.", 'The exact meaning is open to interpretation (e.g. original or primary authors,', 'current maintainers, or owners of the package).']}, 'maintainers': {'type': 'array', 'items': {'$ref': '#/definitions/author'}, '$$description': ["The people or organizations considered to be the 'maintainers' of the project.", 'Similarly to ``authors``, the exact meaning is open to interpretation.']}, 'keywords': {'type': 'array', 'items': {'type': 'string'}, 'description': 'List of keywords to assist searching for the distribution in a larger catalog.'}, 'classifiers': {'type': 'array', 'items': {'type': 'string', 'format': 'trove-classifier', 'description': '`PyPI classifier `_.'}, '$$description': ['`Trove classifiers `_', 'which apply to the project.']}, 'urls': {'type': 'object', 'description': 'URLs associated with the project in the form ``label => value``.', 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', 'format': 'url'}}}, 'scripts': {'$ref': '#/definitions/entry-point-group', '$$description': ['Instruct the installer to create command-line wrappers for the given', '`entry points `_.']}, 'gui-scripts': {'$ref': '#/definitions/entry-point-group', '$$description': ['Instruct the installer to create GUI wrappers for the given', '`entry points `_.', 'The difference between ``scripts`` and ``gui-scripts`` is only relevant in', 'Windows.']}, 'entry-points': {'$$description': ['Instruct the installer to expose the given modules/functions via', '``entry-point`` discovery mechanism (useful for plugins).', 'More information available in the `Python packaging guide', '`_.'], 'propertyNames': {'format': 'python-entrypoint-group'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'$ref': '#/definitions/entry-point-group'}}}, 'dependencies': {'type': 'array', 'description': 'Project (mandatory) dependencies.', 'items': {'$ref': '#/definitions/dependency'}}, 'optional-dependencies': {'type': 'object', 'description': 'Optional dependency for the project', 'propertyNames': {'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'array', 'items': {'$ref': '#/definitions/dependency'}}}}, 'dynamic': {'type': 'array', '$$description': ['Specifies which fields are intentionally unspecified and expected to be', 'dynamically provided by build tools'], 'items': {'enum': ['version', 'description', 'readme', 'requires-python', 'license', 'authors', 'maintainers', 'keywords', 'classifiers', 'urls', 'scripts', 'gui-scripts', 'entry-points', 'dependencies', 'optional-dependencies']}}}, 'required': ['name'], 'additionalProperties': False, 'if': {'not': {'required': ['dynamic'], 'properties': {'dynamic': {'contains': {'const': 'version'}, '$$description': ['version is listed in ``dynamic``']}}}, '$$comment': ['According to :pep:`621`:', ' If the core metadata specification lists a field as "Required", then', ' the metadata MUST specify the field statically or list it in dynamic', 'In turn, `core metadata`_ defines:', ' The required fields are: Metadata-Version, Name, Version.', ' All the other fields are optional.', 'Since ``Metadata-Version`` is defined by the build back-end, ``name`` and', '``version`` are the only mandatory information in ``pyproject.toml``.', '.. _core metadata: https://packaging.python.org/specifications/core-metadata/']}, 'then': {'required': ['version'], '$$description': ['version should be statically defined in the ``version`` field']}, 'definitions': {'author': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://www.python.org/dev/peps/pep-0621/#authors-maintainers', 'type': 'object', 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, 'entry-point-group': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '`_', 'and `setuptools docs', '`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'dependency': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}}, 'tool': {'type': 'object', 'properties': {'distutils': {'$schema': 'http://json-schema.org/draft-07/schema', '$id': 'https://docs.python.org/3/install/', 'title': '``tool.distutils`` table', '$$description': ['Originally, ``distutils`` allowed developers to configure arguments for', '``setup.py`` scripts via `distutils configuration files', '`_.', '``tool.distutils`` subtables could be used with the same purpose', '(NOT CURRENTLY IMPLEMENTED).'], 'type': 'object', 'properties': {'global': {'type': 'object', 'description': 'Global options applied to all ``distutils`` commands'}}, 'patternProperties': {'.+': {'type': 'object'}}, '$comment': 'TODO: Is there a practical way of making this schema more specific?'}, 'setuptools': {'$schema': 'http://json-schema.org/draft-07/schema', '$id': 'https://setuptools.pypa.io/en/latest/references/keywords.html', 'title': '``tool.setuptools`` table', '$$description': ['Please notice for the time being the ``setuptools`` project does not specify', 'a way of configuring builds via ``pyproject.toml``.', 'Therefore this schema should be taken just as a *"thought experiment"* on how', 'this *might be done*, by following the principles established in', '`ini2toml `_.', 'It considers only ``setuptools`` `parameters', '`_', 'that can currently be configured via ``setup.cfg`` and are not covered by :pep:`621`', 'but intentionally excludes ``dependency_links`` and ``setup_requires``.', 'NOTE: ``scripts`` was renamed to ``script-files`` to avoid confusion with', 'entry-point based scripts (defined in :pep:`621`).'], 'type': 'object', 'additionalProperties': False, 'properties': {'platforms': {'type': 'array', 'items': {'type': 'string'}}, 'provides': {'$$description': ['Package and virtual package names contained within this package', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'obsoletes': {'$$description': ['Packages which this package renders obsolete', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'zip-safe': {'description': 'Whether the project can be safely installed and run from a zip file.', 'type': 'boolean'}, 'script-files': {'description': 'Legacy way of defining scripts (entry-points are preferred).', 'type': 'array', 'items': {'type': 'string'}, '$comment': 'TODO: is this field deprecated/should be removed?'}, 'eager-resources': {'$$description': ['Resources that should be extracted together, if any of them is needed,', 'or if any C extensions included in the project are imported.'], 'type': 'array', 'items': {'type': 'string'}}, 'packages': {'$$description': ['Packages that should be included in the distribution.', 'It can be given either as a list of package identifiers', 'or as a ``dict``-like structure with a single key ``find``', 'which corresponds to a dynamic call to', '``setuptools.config.expand.find_packages`` function.', 'The ``find`` key is associated with a nested ``dict``-like structure that can', 'contain ``where``, ``include``, ``exclude`` and ``namespaces`` keys,', 'mimicking the keyword arguments of the associated function.'], 'oneOf': [{'title': 'Array of Python package identifiers', 'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}}, {'$ref': '#/definitions/find-directive'}]}, 'package-dir': {'$$description': [':class:`dict`-like structure mapping from package names to directories where their', 'code can be found.', 'The empty string (as key) means that all packages are contained inside', 'the given directory will be included in the distribution.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': ''}]}, 'patternProperties': {'^.*$': {'type': 'string'}}}, 'package-data': {'$$description': ['Mapping from package names to lists of glob patterns.', 'Usually this option is not needed when using ``include-package-data = true``', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'include-package-data': {'$$description': ['Automatically include any data files inside the package directories', 'that are specified by ``MANIFEST.in``', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'boolean'}, 'exclude-package-data': {'$$description': ['Mapping from package names to lists of glob patterns that should be excluded', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'namespace-packages': {'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}, '$comment': 'https://setuptools.pypa.io/en/latest/userguide/package_discovery.html'}, 'py-modules': {'description': 'Modules that setuptools will manipulate', 'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}, '$comment': 'TODO: clarify the relationship with ``packages``'}, 'data-files': {'$$description': ['**DEPRECATED**: dict-like structure where each key represents a directory and', 'the value is a list of glob patterns that should be installed in them.', "Please notice this don't work with wheels. See `data files support", '`_'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'cmdclass': {'$$description': ['Mapping of distutils-style command names to ``setuptools.Command`` subclasses', 'which in turn should be represented by strings with a qualified class name', '(i.e., "dotted" form with module), e.g.::\n\n', ' cmdclass = {mycmd = "pkg.subpkg.module.CommandClass"}\n\n', 'The command class should be a directly defined at the top-level of the', 'containing module (no class nesting).'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'string', 'format': 'python-qualified-identifier'}}}, 'license-files': {'type': 'array', 'items': {'type': 'string'}, '$$description': ['PROVISIONAL: List of glob patterns for all license files being distributed.', '(might become standard with PEP 639).'], 'default': ['LICEN[CS]E*', ' COPYING*', ' NOTICE*', 'AUTHORS*'], '$comment': 'TODO: revise if PEP 639 is accepted. Probably ``project.license-files``?'}, 'dynamic': {'type': 'object', 'description': 'Instructions for loading :pep:`621`-related metadata dynamically', 'additionalProperties': False, 'properties': {'version': {'$$description': ['A version dynamically loaded via either the ``attr:`` or ``file:``', 'directives. Please make sure the given file or attribute respects :pep:`440`.'], 'oneOf': [{'$ref': '#/definitions/attr-directive'}, {'$ref': '#/definitions/file-directive'}]}, 'classifiers': {'$ref': '#/definitions/file-directive'}, 'description': {'$ref': '#/definitions/file-directive'}, 'dependencies': {'$ref': '#/definitions/file-directive'}, 'entry-points': {'$ref': '#/definitions/file-directive'}, 'optional-dependencies': {'type': 'object', 'propertyNames': {'format': 'python-identifier'}, 'additionalProperties': False, 'patternProperties': {'.+': {'$ref': '#/definitions/file-directive'}}}, 'readme': {'anyOf': [{'$ref': '#/definitions/file-directive'}, {'properties': {'content-type': {'type': 'string'}}}], 'required': ['file']}}}}, 'definitions': {'file-directive': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'attr-directive': {'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string'}}, 'required': ['attr']}, 'find-directive': {'$id': '#/definitions/find-directive', 'title': "'find:' directive", 'type': 'object', 'additionalProperties': False, 'properties': {'find': {'type': 'object', '$$description': ['Dynamic `package discovery', '`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}}}}}}}}, 'project': {'$schema': 'http://json-schema.org/draft-07/schema', '$id': 'https://packaging.python.org/en/latest/specifications/declaring-project-metadata/', 'title': 'Package metadata stored in the ``project`` table', '$$description': ['Data structure for the **project** table inside ``pyproject.toml``', '(as initially defined in :pep:`621`)'], 'type': 'object', 'properties': {'name': {'type': 'string', 'description': 'The name (primary identifier) of the project. MUST be statically defined.', 'format': 'pep508-identifier'}, 'version': {'type': 'string', 'description': 'The version of the project as supported by :pep:`440`.', 'format': 'pep440'}, 'description': {'type': 'string', '$$description': ['The `summary description of the project', '`_']}, 'readme': {'$$description': ['`Full/detailed description of the project in the form of a README', '`_', "with meaning similar to the one defined in `core metadata's Description", '`_'], 'oneOf': [{'type': 'string', '$$description': ['Relative path to a text file (UTF-8) containing the full description', 'of the project. If the file path ends in case-insensitive ``.md`` or', '``.rst`` suffixes, then the content-type is respectively', '``text/markdown`` or ``text/x-rst``']}, {'type': 'object', 'allOf': [{'anyOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', 'description': 'Full text describing the project.'}}, 'required': ['text']}]}, {'properties': {'content-type': {'type': 'string', '$$description': ['Content-type (:rfc:`1341`) of the full description', '(e.g. ``text/markdown``). The ``charset`` parameter is assumed', 'UTF-8 when not present.'], '$comment': 'TODO: add regex pattern or format?'}}, 'required': ['content-type']}]}]}, 'requires-python': {'type': 'string', 'format': 'pep508-versionspec', '$$description': ['`The Python version requirements of the project', '`_.']}, 'license': {'description': '`Project license `_.', 'oneOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to the file (UTF-8) which contains the license for the', 'project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', '$$description': ['The license of the project whose meaning is that of the', '`License field from the core metadata', '`_.']}}, 'required': ['text']}]}, 'authors': {'type': 'array', 'items': {'$ref': '#/definitions/author'}, '$$description': ["The people or organizations considered to be the 'authors' of the project.", 'The exact meaning is open to interpretation (e.g. original or primary authors,', 'current maintainers, or owners of the package).']}, 'maintainers': {'type': 'array', 'items': {'$ref': '#/definitions/author'}, '$$description': ["The people or organizations considered to be the 'maintainers' of the project.", 'Similarly to ``authors``, the exact meaning is open to interpretation.']}, 'keywords': {'type': 'array', 'items': {'type': 'string'}, 'description': 'List of keywords to assist searching for the distribution in a larger catalog.'}, 'classifiers': {'type': 'array', 'items': {'type': 'string', 'format': 'trove-classifier', 'description': '`PyPI classifier `_.'}, '$$description': ['`Trove classifiers `_', 'which apply to the project.']}, 'urls': {'type': 'object', 'description': 'URLs associated with the project in the form ``label => value``.', 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', 'format': 'url'}}}, 'scripts': {'$ref': '#/definitions/entry-point-group', '$$description': ['Instruct the installer to create command-line wrappers for the given', '`entry points `_.']}, 'gui-scripts': {'$ref': '#/definitions/entry-point-group', '$$description': ['Instruct the installer to create GUI wrappers for the given', '`entry points `_.', 'The difference between ``scripts`` and ``gui-scripts`` is only relevant in', 'Windows.']}, 'entry-points': {'$$description': ['Instruct the installer to expose the given modules/functions via', '``entry-point`` discovery mechanism (useful for plugins).', 'More information available in the `Python packaging guide', '`_.'], 'propertyNames': {'format': 'python-entrypoint-group'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'$ref': '#/definitions/entry-point-group'}}}, 'dependencies': {'type': 'array', 'description': 'Project (mandatory) dependencies.', 'items': {'$ref': '#/definitions/dependency'}}, 'optional-dependencies': {'type': 'object', 'description': 'Optional dependency for the project', 'propertyNames': {'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'array', 'items': {'$ref': '#/definitions/dependency'}}}}, 'dynamic': {'type': 'array', '$$description': ['Specifies which fields are intentionally unspecified and expected to be', 'dynamically provided by build tools'], 'items': {'enum': ['version', 'description', 'readme', 'requires-python', 'license', 'authors', 'maintainers', 'keywords', 'classifiers', 'urls', 'scripts', 'gui-scripts', 'entry-points', 'dependencies', 'optional-dependencies']}}}, 'required': ['name'], 'additionalProperties': False, 'if': {'not': {'required': ['dynamic'], 'properties': {'dynamic': {'contains': {'const': 'version'}, '$$description': ['version is listed in ``dynamic``']}}}, '$$comment': ['According to :pep:`621`:', ' If the core metadata specification lists a field as "Required", then', ' the metadata MUST specify the field statically or list it in dynamic', 'In turn, `core metadata`_ defines:', ' The required fields are: Metadata-Version, Name, Version.', ' All the other fields are optional.', 'Since ``Metadata-Version`` is defined by the build back-end, ``name`` and', '``version`` are the only mandatory information in ``pyproject.toml``.', '.. _core metadata: https://packaging.python.org/specifications/core-metadata/']}, 'then': {'required': ['version'], '$$description': ['version should be statically defined in the ``version`` field']}, 'definitions': {'author': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://www.python.org/dev/peps/pep-0621/#authors-maintainers', 'type': 'object', 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, 'entry-point-group': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '`_', 'and `setuptools docs', '`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'dependency': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}}}, rule='additionalProperties')
- return data
-
-def validate_https___setuptools_pypa_io_en_latest_references_keywords_html(data, custom_formats={}, name_prefix=None):
- if not isinstance(data, (dict)):
- raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'$schema': 'http://json-schema.org/draft-07/schema', '$id': 'https://setuptools.pypa.io/en/latest/references/keywords.html', 'title': '``tool.setuptools`` table', '$$description': ['Please notice for the time being the ``setuptools`` project does not specify', 'a way of configuring builds via ``pyproject.toml``.', 'Therefore this schema should be taken just as a *"thought experiment"* on how', 'this *might be done*, by following the principles established in', '`ini2toml `_.', 'It considers only ``setuptools`` `parameters', '`_', 'that can currently be configured via ``setup.cfg`` and are not covered by :pep:`621`', 'but intentionally excludes ``dependency_links`` and ``setup_requires``.', 'NOTE: ``scripts`` was renamed to ``script-files`` to avoid confusion with', 'entry-point based scripts (defined in :pep:`621`).'], 'type': 'object', 'additionalProperties': False, 'properties': {'platforms': {'type': 'array', 'items': {'type': 'string'}}, 'provides': {'$$description': ['Package and virtual package names contained within this package', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'obsoletes': {'$$description': ['Packages which this package renders obsolete', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'zip-safe': {'description': 'Whether the project can be safely installed and run from a zip file.', 'type': 'boolean'}, 'script-files': {'description': 'Legacy way of defining scripts (entry-points are preferred).', 'type': 'array', 'items': {'type': 'string'}, '$comment': 'TODO: is this field deprecated/should be removed?'}, 'eager-resources': {'$$description': ['Resources that should be extracted together, if any of them is needed,', 'or if any C extensions included in the project are imported.'], 'type': 'array', 'items': {'type': 'string'}}, 'packages': {'$$description': ['Packages that should be included in the distribution.', 'It can be given either as a list of package identifiers', 'or as a ``dict``-like structure with a single key ``find``', 'which corresponds to a dynamic call to', '``setuptools.config.expand.find_packages`` function.', 'The ``find`` key is associated with a nested ``dict``-like structure that can', 'contain ``where``, ``include``, ``exclude`` and ``namespaces`` keys,', 'mimicking the keyword arguments of the associated function.'], 'oneOf': [{'title': 'Array of Python package identifiers', 'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}}, {'$id': '#/definitions/find-directive', 'title': "'find:' directive", 'type': 'object', 'additionalProperties': False, 'properties': {'find': {'type': 'object', '$$description': ['Dynamic `package discovery', '`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}}}]}, 'package-dir': {'$$description': [':class:`dict`-like structure mapping from package names to directories where their', 'code can be found.', 'The empty string (as key) means that all packages are contained inside', 'the given directory will be included in the distribution.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': ''}]}, 'patternProperties': {'^.*$': {'type': 'string'}}}, 'package-data': {'$$description': ['Mapping from package names to lists of glob patterns.', 'Usually this option is not needed when using ``include-package-data = true``', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'include-package-data': {'$$description': ['Automatically include any data files inside the package directories', 'that are specified by ``MANIFEST.in``', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'boolean'}, 'exclude-package-data': {'$$description': ['Mapping from package names to lists of glob patterns that should be excluded', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'namespace-packages': {'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}, '$comment': 'https://setuptools.pypa.io/en/latest/userguide/package_discovery.html'}, 'py-modules': {'description': 'Modules that setuptools will manipulate', 'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}, '$comment': 'TODO: clarify the relationship with ``packages``'}, 'data-files': {'$$description': ['**DEPRECATED**: dict-like structure where each key represents a directory and', 'the value is a list of glob patterns that should be installed in them.', "Please notice this don't work with wheels. See `data files support", '`_'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'cmdclass': {'$$description': ['Mapping of distutils-style command names to ``setuptools.Command`` subclasses', 'which in turn should be represented by strings with a qualified class name', '(i.e., "dotted" form with module), e.g.::\n\n', ' cmdclass = {mycmd = "pkg.subpkg.module.CommandClass"}\n\n', 'The command class should be a directly defined at the top-level of the', 'containing module (no class nesting).'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'string', 'format': 'python-qualified-identifier'}}}, 'license-files': {'type': 'array', 'items': {'type': 'string'}, '$$description': ['PROVISIONAL: List of glob patterns for all license files being distributed.', '(might become standard with PEP 639).'], 'default': ['LICEN[CS]E*', ' COPYING*', ' NOTICE*', 'AUTHORS*'], '$comment': 'TODO: revise if PEP 639 is accepted. Probably ``project.license-files``?'}, 'dynamic': {'type': 'object', 'description': 'Instructions for loading :pep:`621`-related metadata dynamically', 'additionalProperties': False, 'properties': {'version': {'$$description': ['A version dynamically loaded via either the ``attr:`` or ``file:``', 'directives. Please make sure the given file or attribute respects :pep:`440`.'], 'oneOf': [{'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string'}}, 'required': ['attr']}, {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}]}, 'classifiers': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'description': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'dependencies': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'entry-points': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'optional-dependencies': {'type': 'object', 'propertyNames': {'format': 'python-identifier'}, 'additionalProperties': False, 'patternProperties': {'.+': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}}}, 'readme': {'anyOf': [{'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, {'properties': {'content-type': {'type': 'string'}}}], 'required': ['file']}}}}, 'definitions': {'file-directive': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'attr-directive': {'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string'}}, 'required': ['attr']}, 'find-directive': {'$id': '#/definitions/find-directive', 'title': "'find:' directive", 'type': 'object', 'additionalProperties': False, 'properties': {'find': {'type': 'object', '$$description': ['Dynamic `package discovery', '`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}}}}}, rule='type')
- data_is_dict = isinstance(data, dict)
- if data_is_dict:
- data_keys = set(data.keys())
- if "platforms" in data_keys:
- data_keys.remove("platforms")
- data__platforms = data["platforms"]
- if not isinstance(data__platforms, (list, tuple)):
- raise JsonSchemaValueException("" + (name_prefix or "data") + ".platforms must be array", value=data__platforms, name="" + (name_prefix or "data") + ".platforms", definition={'type': 'array', 'items': {'type': 'string'}}, rule='type')
- data__platforms_is_list = isinstance(data__platforms, (list, tuple))
- if data__platforms_is_list:
- data__platforms_len = len(data__platforms)
- for data__platforms_x, data__platforms_item in enumerate(data__platforms):
- if not isinstance(data__platforms_item, (str)):
- raise JsonSchemaValueException("" + (name_prefix or "data") + ".platforms[{data__platforms_x}]".format(**locals()) + " must be string", value=data__platforms_item, name="" + (name_prefix or "data") + ".platforms[{data__platforms_x}]".format(**locals()) + "", definition={'type': 'string'}, rule='type')
- if "provides" in data_keys:
- data_keys.remove("provides")
- data__provides = data["provides"]
- if not isinstance(data__provides, (list, tuple)):
- raise JsonSchemaValueException("" + (name_prefix or "data") + ".provides must be array", value=data__provides, name="" + (name_prefix or "data") + ".provides", definition={'$$description': ['Package and virtual package names contained within this package', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, rule='type')
- data__provides_is_list = isinstance(data__provides, (list, tuple))
- if data__provides_is_list:
- data__provides_len = len(data__provides)
- for data__provides_x, data__provides_item in enumerate(data__provides):
- if not isinstance(data__provides_item, (str)):
- raise JsonSchemaValueException("" + (name_prefix or "data") + ".provides[{data__provides_x}]".format(**locals()) + " must be string", value=data__provides_item, name="" + (name_prefix or "data") + ".provides[{data__provides_x}]".format(**locals()) + "", definition={'type': 'string', 'format': 'pep508-identifier'}, rule='type')
- if isinstance(data__provides_item, str):
- if not custom_formats["pep508-identifier"](data__provides_item):
- raise JsonSchemaValueException("" + (name_prefix or "data") + ".provides[{data__provides_x}]".format(**locals()) + " must be pep508-identifier", value=data__provides_item, name="" + (name_prefix or "data") + ".provides[{data__provides_x}]".format(**locals()) + "", definition={'type': 'string', 'format': 'pep508-identifier'}, rule='format')
- if "obsoletes" in data_keys:
- data_keys.remove("obsoletes")
- data__obsoletes = data["obsoletes"]
- if not isinstance(data__obsoletes, (list, tuple)):
- raise JsonSchemaValueException("" + (name_prefix or "data") + ".obsoletes must be array", value=data__obsoletes, name="" + (name_prefix or "data") + ".obsoletes", definition={'$$description': ['Packages which this package renders obsolete', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, rule='type')
- data__obsoletes_is_list = isinstance(data__obsoletes, (list, tuple))
- if data__obsoletes_is_list:
- data__obsoletes_len = len(data__obsoletes)
- for data__obsoletes_x, data__obsoletes_item in enumerate(data__obsoletes):
- if not isinstance(data__obsoletes_item, (str)):
- raise JsonSchemaValueException("" + (name_prefix or "data") + ".obsoletes[{data__obsoletes_x}]".format(**locals()) + " must be string", value=data__obsoletes_item, name="" + (name_prefix or "data") + ".obsoletes[{data__obsoletes_x}]".format(**locals()) + "", definition={'type': 'string', 'format': 'pep508-identifier'}, rule='type')
- if isinstance(data__obsoletes_item, str):
- if not custom_formats["pep508-identifier"](data__obsoletes_item):
- raise JsonSchemaValueException("" + (name_prefix or "data") + ".obsoletes[{data__obsoletes_x}]".format(**locals()) + " must be pep508-identifier", value=data__obsoletes_item, name="" + (name_prefix or "data") + ".obsoletes[{data__obsoletes_x}]".format(**locals()) + "", definition={'type': 'string', 'format': 'pep508-identifier'}, rule='format')
- if "zip-safe" in data_keys:
- data_keys.remove("zip-safe")
- data__zipsafe = data["zip-safe"]
- if not isinstance(data__zipsafe, (bool)):
- raise JsonSchemaValueException("" + (name_prefix or "data") + ".zip-safe must be boolean", value=data__zipsafe, name="" + (name_prefix or "data") + ".zip-safe", definition={'description': 'Whether the project can be safely installed and run from a zip file.', 'type': 'boolean'}, rule='type')
- if "script-files" in data_keys:
- data_keys.remove("script-files")
- data__scriptfiles = data["script-files"]
- if not isinstance(data__scriptfiles, (list, tuple)):
- raise JsonSchemaValueException("" + (name_prefix or "data") + ".script-files must be array", value=data__scriptfiles, name="" + (name_prefix or "data") + ".script-files", definition={'description': 'Legacy way of defining scripts (entry-points are preferred).', 'type': 'array', 'items': {'type': 'string'}, '$comment': 'TODO: is this field deprecated/should be removed?'}, rule='type')
- data__scriptfiles_is_list = isinstance(data__scriptfiles, (list, tuple))
- if data__scriptfiles_is_list:
- data__scriptfiles_len = len(data__scriptfiles)
- for data__scriptfiles_x, data__scriptfiles_item in enumerate(data__scriptfiles):
- if not isinstance(data__scriptfiles_item, (str)):
- raise JsonSchemaValueException("" + (name_prefix or "data") + ".script-files[{data__scriptfiles_x}]".format(**locals()) + " must be string", value=data__scriptfiles_item, name="" + (name_prefix or "data") + ".script-files[{data__scriptfiles_x}]".format(**locals()) + "", definition={'type': 'string'}, rule='type')
- if "eager-resources" in data_keys:
- data_keys.remove("eager-resources")
- data__eagerresources = data["eager-resources"]
- if not isinstance(data__eagerresources, (list, tuple)):
- raise JsonSchemaValueException("" + (name_prefix or "data") + ".eager-resources must be array", value=data__eagerresources, name="" + (name_prefix or "data") + ".eager-resources", definition={'$$description': ['Resources that should be extracted together, if any of them is needed,', 'or if any C extensions included in the project are imported.'], 'type': 'array', 'items': {'type': 'string'}}, rule='type')
- data__eagerresources_is_list = isinstance(data__eagerresources, (list, tuple))
- if data__eagerresources_is_list:
- data__eagerresources_len = len(data__eagerresources)
- for data__eagerresources_x, data__eagerresources_item in enumerate(data__eagerresources):
- if not isinstance(data__eagerresources_item, (str)):
- raise JsonSchemaValueException("" + (name_prefix or "data") + ".eager-resources[{data__eagerresources_x}]".format(**locals()) + " must be string", value=data__eagerresources_item, name="" + (name_prefix or "data") + ".eager-resources[{data__eagerresources_x}]".format(**locals()) + "", definition={'type': 'string'}, rule='type')
- if "packages" in data_keys:
- data_keys.remove("packages")
- data__packages = data["packages"]
- data__packages_one_of_count1 = 0
- if data__packages_one_of_count1 < 2:
- try:
- if not isinstance(data__packages, (list, tuple)):
- raise JsonSchemaValueException("" + (name_prefix or "data") + ".packages must be array", value=data__packages, name="" + (name_prefix or "data") + ".packages", definition={'title': 'Array of Python package identifiers', 'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}}, rule='type')
- data__packages_is_list = isinstance(data__packages, (list, tuple))
- if data__packages_is_list:
- data__packages_len = len(data__packages)
- for data__packages_x, data__packages_item in enumerate(data__packages):
- if not isinstance(data__packages_item, (str)):
- raise JsonSchemaValueException("" + (name_prefix or "data") + ".packages[{data__packages_x}]".format(**locals()) + " must be string", value=data__packages_item, name="" + (name_prefix or "data") + ".packages[{data__packages_x}]".format(**locals()) + "", definition={'type': 'string', 'format': 'python-module-name'}, rule='type')
- if isinstance(data__packages_item, str):
- if not custom_formats["python-module-name"](data__packages_item):
- raise JsonSchemaValueException("" + (name_prefix or "data") + ".packages[{data__packages_x}]".format(**locals()) + " must be python-module-name", value=data__packages_item, name="" + (name_prefix or "data") + ".packages[{data__packages_x}]".format(**locals()) + "", definition={'type': 'string', 'format': 'python-module-name'}, rule='format')
- data__packages_one_of_count1 += 1
- except JsonSchemaValueException: pass
- if data__packages_one_of_count1 < 2:
- try:
- validate_https___setuptools_pypa_io_en_latest_references_keywords_html__definitions_find_directive(data__packages, custom_formats, (name_prefix or "data") + ".packages")
- data__packages_one_of_count1 += 1
- except JsonSchemaValueException: pass
- if data__packages_one_of_count1 != 1:
- raise JsonSchemaValueException("" + (name_prefix or "data") + ".packages must be valid exactly by one definition" + (" (" + str(data__packages_one_of_count1) + " matches found)"), value=data__packages, name="" + (name_prefix or "data") + ".packages", definition={'$$description': ['Packages that should be included in the distribution.', 'It can be given either as a list of package identifiers', 'or as a ``dict``-like structure with a single key ``find``', 'which corresponds to a dynamic call to', '``setuptools.config.expand.find_packages`` function.', 'The ``find`` key is associated with a nested ``dict``-like structure that can', 'contain ``where``, ``include``, ``exclude`` and ``namespaces`` keys,', 'mimicking the keyword arguments of the associated function.'], 'oneOf': [{'title': 'Array of Python package identifiers', 'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}}, {'$id': '#/definitions/find-directive', 'title': "'find:' directive", 'type': 'object', 'additionalProperties': False, 'properties': {'find': {'type': 'object', '$$description': ['Dynamic `package discovery', '`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}}}]}, rule='oneOf')
- if "package-dir" in data_keys:
- data_keys.remove("package-dir")
- data__packagedir = data["package-dir"]
- if not isinstance(data__packagedir, (dict)):
- raise JsonSchemaValueException("" + (name_prefix or "data") + ".package-dir must be object", value=data__packagedir, name="" + (name_prefix or "data") + ".package-dir", definition={'$$description': [':class:`dict`-like structure mapping from package names to directories where their', 'code can be found.', 'The empty string (as key) means that all packages are contained inside', 'the given directory will be included in the distribution.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': ''}]}, 'patternProperties': {'^.*$': {'type': 'string'}}}, rule='type')
- data__packagedir_is_dict = isinstance(data__packagedir, dict)
- if data__packagedir_is_dict:
- data__packagedir_keys = set(data__packagedir.keys())
- for data__packagedir_key, data__packagedir_val in data__packagedir.items():
- if REGEX_PATTERNS['^.*$'].search(data__packagedir_key):
- if data__packagedir_key in data__packagedir_keys:
- data__packagedir_keys.remove(data__packagedir_key)
- if not isinstance(data__packagedir_val, (str)):
- raise JsonSchemaValueException("" + (name_prefix or "data") + ".package-dir.{data__packagedir_key}".format(**locals()) + " must be string", value=data__packagedir_val, name="" + (name_prefix or "data") + ".package-dir.{data__packagedir_key}".format(**locals()) + "", definition={'type': 'string'}, rule='type')
- if data__packagedir_keys:
- raise JsonSchemaValueException("" + (name_prefix or "data") + ".package-dir must not contain "+str(data__packagedir_keys)+" properties", value=data__packagedir, name="" + (name_prefix or "data") + ".package-dir", definition={'$$description': [':class:`dict`-like structure mapping from package names to directories where their', 'code can be found.', 'The empty string (as key) means that all packages are contained inside', 'the given directory will be included in the distribution.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': ''}]}, 'patternProperties': {'^.*$': {'type': 'string'}}}, rule='additionalProperties')
- data__packagedir_len = len(data__packagedir)
- if data__packagedir_len != 0:
- data__packagedir_property_names = True
- for data__packagedir_key in data__packagedir:
- try:
- data__packagedir_key_one_of_count2 = 0
- if data__packagedir_key_one_of_count2 < 2:
- try:
- if isinstance(data__packagedir_key, str):
- if not custom_formats["python-module-name"](data__packagedir_key):
- raise JsonSchemaValueException("" + (name_prefix or "data") + ".package-dir must be python-module-name", value=data__packagedir_key, name="" + (name_prefix or "data") + ".package-dir", definition={'format': 'python-module-name'}, rule='format')
- data__packagedir_key_one_of_count2 += 1
- except JsonSchemaValueException: pass
- if data__packagedir_key_one_of_count2 < 2:
- try:
- if data__packagedir_key != "":
- raise JsonSchemaValueException("" + (name_prefix or "data") + ".package-dir must be same as const definition: ", value=data__packagedir_key, name="" + (name_prefix or "data") + ".package-dir", definition={'const': ''}, rule='const')
- data__packagedir_key_one_of_count2 += 1
- except JsonSchemaValueException: pass
- if data__packagedir_key_one_of_count2 != 1:
- raise JsonSchemaValueException("" + (name_prefix or "data") + ".package-dir must be valid exactly by one definition" + (" (" + str(data__packagedir_key_one_of_count2) + " matches found)"), value=data__packagedir_key, name="" + (name_prefix or "data") + ".package-dir", definition={'oneOf': [{'format': 'python-module-name'}, {'const': ''}]}, rule='oneOf')
- except JsonSchemaValueException:
- data__packagedir_property_names = False
- if not data__packagedir_property_names:
- raise JsonSchemaValueException("" + (name_prefix or "data") + ".package-dir must be named by propertyName definition", value=data__packagedir, name="" + (name_prefix or "data") + ".package-dir", definition={'$$description': [':class:`dict`-like structure mapping from package names to directories where their', 'code can be found.', 'The empty string (as key) means that all packages are contained inside', 'the given directory will be included in the distribution.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': ''}]}, 'patternProperties': {'^.*$': {'type': 'string'}}}, rule='propertyNames')
- if "package-data" in data_keys:
- data_keys.remove("package-data")
- data__packagedata = data["package-data"]
- if not isinstance(data__packagedata, (dict)):
- raise JsonSchemaValueException("" + (name_prefix or "data") + ".package-data must be object", value=data__packagedata, name="" + (name_prefix or "data") + ".package-data", definition={'$$description': ['Mapping from package names to lists of glob patterns.', 'Usually this option is not needed when using ``include-package-data = true``', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, rule='type')
- data__packagedata_is_dict = isinstance(data__packagedata, dict)
- if data__packagedata_is_dict:
- data__packagedata_keys = set(data__packagedata.keys())
- for data__packagedata_key, data__packagedata_val in data__packagedata.items():
- if REGEX_PATTERNS['^.*$'].search(data__packagedata_key):
- if data__packagedata_key in data__packagedata_keys:
- data__packagedata_keys.remove(data__packagedata_key)
- if not isinstance(data__packagedata_val, (list, tuple)):
- raise JsonSchemaValueException("" + (name_prefix or "data") + ".package-data.{data__packagedata_key}".format(**locals()) + " must be array", value=data__packagedata_val, name="" + (name_prefix or "data") + ".package-data.{data__packagedata_key}".format(**locals()) + "", definition={'type': 'array', 'items': {'type': 'string'}}, rule='type')
- data__packagedata_val_is_list = isinstance(data__packagedata_val, (list, tuple))
- if data__packagedata_val_is_list:
- data__packagedata_val_len = len(data__packagedata_val)
- for data__packagedata_val_x, data__packagedata_val_item in enumerate(data__packagedata_val):
- if not isinstance(data__packagedata_val_item, (str)):
- raise JsonSchemaValueException("" + (name_prefix or "data") + ".package-data.{data__packagedata_key}[{data__packagedata_val_x}]".format(**locals()) + " must be string", value=data__packagedata_val_item, name="" + (name_prefix or "data") + ".package-data.{data__packagedata_key}[{data__packagedata_val_x}]".format(**locals()) + "", definition={'type': 'string'}, rule='type')
- if data__packagedata_keys:
- raise JsonSchemaValueException("" + (name_prefix or "data") + ".package-data must not contain "+str(data__packagedata_keys)+" properties", value=data__packagedata, name="" + (name_prefix or "data") + ".package-data", definition={'$$description': ['Mapping from package names to lists of glob patterns.', 'Usually this option is not needed when using ``include-package-data = true``', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, rule='additionalProperties')
- data__packagedata_len = len(data__packagedata)
- if data__packagedata_len != 0:
- data__packagedata_property_names = True
- for data__packagedata_key in data__packagedata:
- try:
- data__packagedata_key_one_of_count3 = 0
- if data__packagedata_key_one_of_count3 < 2:
- try:
- if isinstance(data__packagedata_key, str):
- if not custom_formats["python-module-name"](data__packagedata_key):
- raise JsonSchemaValueException("" + (name_prefix or "data") + ".package-data must be python-module-name", value=data__packagedata_key, name="" + (name_prefix or "data") + ".package-data", definition={'format': 'python-module-name'}, rule='format')
- data__packagedata_key_one_of_count3 += 1
- except JsonSchemaValueException: pass
- if data__packagedata_key_one_of_count3 < 2:
- try:
- if data__packagedata_key != "*":
- raise JsonSchemaValueException("" + (name_prefix or "data") + ".package-data must be same as const definition: *", value=data__packagedata_key, name="" + (name_prefix or "data") + ".package-data", definition={'const': '*'}, rule='const')
- data__packagedata_key_one_of_count3 += 1
- except JsonSchemaValueException: pass
- if data__packagedata_key_one_of_count3 != 1:
- raise JsonSchemaValueException("" + (name_prefix or "data") + ".package-data must be valid exactly by one definition" + (" (" + str(data__packagedata_key_one_of_count3) + " matches found)"), value=data__packagedata_key, name="" + (name_prefix or "data") + ".package-data", definition={'oneOf': [{'format': 'python-module-name'}, {'const': '*'}]}, rule='oneOf')
- except JsonSchemaValueException:
- data__packagedata_property_names = False
- if not data__packagedata_property_names:
- raise JsonSchemaValueException("" + (name_prefix or "data") + ".package-data must be named by propertyName definition", value=data__packagedata, name="" + (name_prefix or "data") + ".package-data", definition={'$$description': ['Mapping from package names to lists of glob patterns.', 'Usually this option is not needed when using ``include-package-data = true``', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, rule='propertyNames')
- if "include-package-data" in data_keys:
- data_keys.remove("include-package-data")
- data__includepackagedata = data["include-package-data"]
- if not isinstance(data__includepackagedata, (bool)):
- raise JsonSchemaValueException("" + (name_prefix or "data") + ".include-package-data must be boolean", value=data__includepackagedata, name="" + (name_prefix or "data") + ".include-package-data", definition={'$$description': ['Automatically include any data files inside the package directories', 'that are specified by ``MANIFEST.in``', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'boolean'}, rule='type')
- if "exclude-package-data" in data_keys:
- data_keys.remove("exclude-package-data")
- data__excludepackagedata = data["exclude-package-data"]
- if not isinstance(data__excludepackagedata, (dict)):
- raise JsonSchemaValueException("" + (name_prefix or "data") + ".exclude-package-data must be object", value=data__excludepackagedata, name="" + (name_prefix or "data") + ".exclude-package-data", definition={'$$description': ['Mapping from package names to lists of glob patterns that should be excluded', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, rule='type')
- data__excludepackagedata_is_dict = isinstance(data__excludepackagedata, dict)
- if data__excludepackagedata_is_dict:
- data__excludepackagedata_keys = set(data__excludepackagedata.keys())
- for data__excludepackagedata_key, data__excludepackagedata_val in data__excludepackagedata.items():
- if REGEX_PATTERNS['^.*$'].search(data__excludepackagedata_key):
- if data__excludepackagedata_key in data__excludepackagedata_keys:
- data__excludepackagedata_keys.remove(data__excludepackagedata_key)
- if not isinstance(data__excludepackagedata_val, (list, tuple)):
- raise JsonSchemaValueException("" + (name_prefix or "data") + ".exclude-package-data.{data__excludepackagedata_key}".format(**locals()) + " must be array", value=data__excludepackagedata_val, name="" + (name_prefix or "data") + ".exclude-package-data.{data__excludepackagedata_key}".format(**locals()) + "", definition={'type': 'array', 'items': {'type': 'string'}}, rule='type')
- data__excludepackagedata_val_is_list = isinstance(data__excludepackagedata_val, (list, tuple))
- if data__excludepackagedata_val_is_list:
- data__excludepackagedata_val_len = len(data__excludepackagedata_val)
- for data__excludepackagedata_val_x, data__excludepackagedata_val_item in enumerate(data__excludepackagedata_val):
- if not isinstance(data__excludepackagedata_val_item, (str)):
- raise JsonSchemaValueException("" + (name_prefix or "data") + ".exclude-package-data.{data__excludepackagedata_key}[{data__excludepackagedata_val_x}]".format(**locals()) + " must be string", value=data__excludepackagedata_val_item, name="" + (name_prefix or "data") + ".exclude-package-data.{data__excludepackagedata_key}[{data__excludepackagedata_val_x}]".format(**locals()) + "", definition={'type': 'string'}, rule='type')
- if data__excludepackagedata_keys:
- raise JsonSchemaValueException("" + (name_prefix or "data") + ".exclude-package-data must not contain "+str(data__excludepackagedata_keys)+" properties", value=data__excludepackagedata, name="" + (name_prefix or "data") + ".exclude-package-data", definition={'$$description': ['Mapping from package names to lists of glob patterns that should be excluded', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, rule='additionalProperties')
- data__excludepackagedata_len = len(data__excludepackagedata)
- if data__excludepackagedata_len != 0:
- data__excludepackagedata_property_names = True
- for data__excludepackagedata_key in data__excludepackagedata:
- try:
- data__excludepackagedata_key_one_of_count4 = 0
- if data__excludepackagedata_key_one_of_count4 < 2:
- try:
- if isinstance(data__excludepackagedata_key, str):
- if not custom_formats["python-module-name"](data__excludepackagedata_key):
- raise JsonSchemaValueException("" + (name_prefix or "data") + ".exclude-package-data must be python-module-name", value=data__excludepackagedata_key, name="" + (name_prefix or "data") + ".exclude-package-data", definition={'format': 'python-module-name'}, rule='format')
- data__excludepackagedata_key_one_of_count4 += 1
- except JsonSchemaValueException: pass
- if data__excludepackagedata_key_one_of_count4 < 2:
- try:
- if data__excludepackagedata_key != "*":
- raise JsonSchemaValueException("" + (name_prefix or "data") + ".exclude-package-data must be same as const definition: *", value=data__excludepackagedata_key, name="" + (name_prefix or "data") + ".exclude-package-data", definition={'const': '*'}, rule='const')
- data__excludepackagedata_key_one_of_count4 += 1
- except JsonSchemaValueException: pass
- if data__excludepackagedata_key_one_of_count4 != 1:
- raise JsonSchemaValueException("" + (name_prefix or "data") + ".exclude-package-data must be valid exactly by one definition" + (" (" + str(data__excludepackagedata_key_one_of_count4) + " matches found)"), value=data__excludepackagedata_key, name="" + (name_prefix or "data") + ".exclude-package-data", definition={'oneOf': [{'format': 'python-module-name'}, {'const': '*'}]}, rule='oneOf')
- except JsonSchemaValueException:
- data__excludepackagedata_property_names = False
- if not data__excludepackagedata_property_names:
- raise JsonSchemaValueException("" + (name_prefix or "data") + ".exclude-package-data must be named by propertyName definition", value=data__excludepackagedata, name="" + (name_prefix or "data") + ".exclude-package-data", definition={'$$description': ['Mapping from package names to lists of glob patterns that should be excluded', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, rule='propertyNames')
- if "namespace-packages" in data_keys:
- data_keys.remove("namespace-packages")
- data__namespacepackages = data["namespace-packages"]
- if not isinstance(data__namespacepackages, (list, tuple)):
- raise JsonSchemaValueException("" + (name_prefix or "data") + ".namespace-packages must be array", value=data__namespacepackages, name="" + (name_prefix or "data") + ".namespace-packages", definition={'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}, '$comment': 'https://setuptools.pypa.io/en/latest/userguide/package_discovery.html'}, rule='type')
- data__namespacepackages_is_list = isinstance(data__namespacepackages, (list, tuple))
- if data__namespacepackages_is_list:
- data__namespacepackages_len = len(data__namespacepackages)
- for data__namespacepackages_x, data__namespacepackages_item in enumerate(data__namespacepackages):
- if not isinstance(data__namespacepackages_item, (str)):
- raise JsonSchemaValueException("" + (name_prefix or "data") + ".namespace-packages[{data__namespacepackages_x}]".format(**locals()) + " must be string", value=data__namespacepackages_item, name="" + (name_prefix or "data") + ".namespace-packages[{data__namespacepackages_x}]".format(**locals()) + "", definition={'type': 'string', 'format': 'python-module-name'}, rule='type')
- if isinstance(data__namespacepackages_item, str):
- if not custom_formats["python-module-name"](data__namespacepackages_item):
- raise JsonSchemaValueException("" + (name_prefix or "data") + ".namespace-packages[{data__namespacepackages_x}]".format(**locals()) + " must be python-module-name", value=data__namespacepackages_item, name="" + (name_prefix or "data") + ".namespace-packages[{data__namespacepackages_x}]".format(**locals()) + "", definition={'type': 'string', 'format': 'python-module-name'}, rule='format')
- if "py-modules" in data_keys:
- data_keys.remove("py-modules")
- data__pymodules = data["py-modules"]
- if not isinstance(data__pymodules, (list, tuple)):
- raise JsonSchemaValueException("" + (name_prefix or "data") + ".py-modules must be array", value=data__pymodules, name="" + (name_prefix or "data") + ".py-modules", definition={'description': 'Modules that setuptools will manipulate', 'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}, '$comment': 'TODO: clarify the relationship with ``packages``'}, rule='type')
- data__pymodules_is_list = isinstance(data__pymodules, (list, tuple))
- if data__pymodules_is_list:
- data__pymodules_len = len(data__pymodules)
- for data__pymodules_x, data__pymodules_item in enumerate(data__pymodules):
- if not isinstance(data__pymodules_item, (str)):
- raise JsonSchemaValueException("" + (name_prefix or "data") + ".py-modules[{data__pymodules_x}]".format(**locals()) + " must be string", value=data__pymodules_item, name="" + (name_prefix or "data") + ".py-modules[{data__pymodules_x}]".format(**locals()) + "", definition={'type': 'string', 'format': 'python-module-name'}, rule='type')
- if isinstance(data__pymodules_item, str):
- if not custom_formats["python-module-name"](data__pymodules_item):
- raise JsonSchemaValueException("" + (name_prefix or "data") + ".py-modules[{data__pymodules_x}]".format(**locals()) + " must be python-module-name", value=data__pymodules_item, name="" + (name_prefix or "data") + ".py-modules[{data__pymodules_x}]".format(**locals()) + "", definition={'type': 'string', 'format': 'python-module-name'}, rule='format')
- if "data-files" in data_keys:
- data_keys.remove("data-files")
- data__datafiles = data["data-files"]
- if not isinstance(data__datafiles, (dict)):
- raise JsonSchemaValueException("" + (name_prefix or "data") + ".data-files must be object", value=data__datafiles, name="" + (name_prefix or "data") + ".data-files", definition={'$$description': ['**DEPRECATED**: dict-like structure where each key represents a directory and', 'the value is a list of glob patterns that should be installed in them.', "Please notice this don't work with wheels. See `data files support", '`_'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, rule='type')
- data__datafiles_is_dict = isinstance(data__datafiles, dict)
- if data__datafiles_is_dict:
- data__datafiles_keys = set(data__datafiles.keys())
- for data__datafiles_key, data__datafiles_val in data__datafiles.items():
- if REGEX_PATTERNS['^.*$'].search(data__datafiles_key):
- if data__datafiles_key in data__datafiles_keys:
- data__datafiles_keys.remove(data__datafiles_key)
- if not isinstance(data__datafiles_val, (list, tuple)):
- raise JsonSchemaValueException("" + (name_prefix or "data") + ".data-files.{data__datafiles_key}".format(**locals()) + " must be array", value=data__datafiles_val, name="" + (name_prefix or "data") + ".data-files.{data__datafiles_key}".format(**locals()) + "", definition={'type': 'array', 'items': {'type': 'string'}}, rule='type')
- data__datafiles_val_is_list = isinstance(data__datafiles_val, (list, tuple))
- if data__datafiles_val_is_list:
- data__datafiles_val_len = len(data__datafiles_val)
- for data__datafiles_val_x, data__datafiles_val_item in enumerate(data__datafiles_val):
- if not isinstance(data__datafiles_val_item, (str)):
- raise JsonSchemaValueException("" + (name_prefix or "data") + ".data-files.{data__datafiles_key}[{data__datafiles_val_x}]".format(**locals()) + " must be string", value=data__datafiles_val_item, name="" + (name_prefix or "data") + ".data-files.{data__datafiles_key}[{data__datafiles_val_x}]".format(**locals()) + "", definition={'type': 'string'}, rule='type')
- if "cmdclass" in data_keys:
- data_keys.remove("cmdclass")
- data__cmdclass = data["cmdclass"]
- if not isinstance(data__cmdclass, (dict)):
- raise JsonSchemaValueException("" + (name_prefix or "data") + ".cmdclass must be object", value=data__cmdclass, name="" + (name_prefix or "data") + ".cmdclass", definition={'$$description': ['Mapping of distutils-style command names to ``setuptools.Command`` subclasses', 'which in turn should be represented by strings with a qualified class name', '(i.e., "dotted" form with module), e.g.::\n\n', ' cmdclass = {mycmd = "pkg.subpkg.module.CommandClass"}\n\n', 'The command class should be a directly defined at the top-level of the', 'containing module (no class nesting).'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'string', 'format': 'python-qualified-identifier'}}}, rule='type')
- data__cmdclass_is_dict = isinstance(data__cmdclass, dict)
- if data__cmdclass_is_dict:
- data__cmdclass_keys = set(data__cmdclass.keys())
- for data__cmdclass_key, data__cmdclass_val in data__cmdclass.items():
- if REGEX_PATTERNS['^.*$'].search(data__cmdclass_key):
- if data__cmdclass_key in data__cmdclass_keys:
- data__cmdclass_keys.remove(data__cmdclass_key)
- if not isinstance(data__cmdclass_val, (str)):
- raise JsonSchemaValueException("" + (name_prefix or "data") + ".cmdclass.{data__cmdclass_key}".format(**locals()) + " must be string", value=data__cmdclass_val, name="" + (name_prefix or "data") + ".cmdclass.{data__cmdclass_key}".format(**locals()) + "", definition={'type': 'string', 'format': 'python-qualified-identifier'}, rule='type')
- if isinstance(data__cmdclass_val, str):
- if not custom_formats["python-qualified-identifier"](data__cmdclass_val):
- raise JsonSchemaValueException("" + (name_prefix or "data") + ".cmdclass.{data__cmdclass_key}".format(**locals()) + " must be python-qualified-identifier", value=data__cmdclass_val, name="" + (name_prefix or "data") + ".cmdclass.{data__cmdclass_key}".format(**locals()) + "", definition={'type': 'string', 'format': 'python-qualified-identifier'}, rule='format')
- if "license-files" in data_keys:
- data_keys.remove("license-files")
- data__licensefiles = data["license-files"]
- if not isinstance(data__licensefiles, (list, tuple)):
- raise JsonSchemaValueException("" + (name_prefix or "data") + ".license-files must be array", value=data__licensefiles, name="" + (name_prefix or "data") + ".license-files", definition={'type': 'array', 'items': {'type': 'string'}, '$$description': ['PROVISIONAL: List of glob patterns for all license files being distributed.', '(might become standard with PEP 639).'], 'default': ['LICEN[CS]E*', ' COPYING*', ' NOTICE*', 'AUTHORS*'], '$comment': 'TODO: revise if PEP 639 is accepted. Probably ``project.license-files``?'}, rule='type')
- data__licensefiles_is_list = isinstance(data__licensefiles, (list, tuple))
- if data__licensefiles_is_list:
- data__licensefiles_len = len(data__licensefiles)
- for data__licensefiles_x, data__licensefiles_item in enumerate(data__licensefiles):
- if not isinstance(data__licensefiles_item, (str)):
- raise JsonSchemaValueException("" + (name_prefix or "data") + ".license-files[{data__licensefiles_x}]".format(**locals()) + " must be string", value=data__licensefiles_item, name="" + (name_prefix or "data") + ".license-files[{data__licensefiles_x}]".format(**locals()) + "", definition={'type': 'string'}, rule='type')
- else: data["license-files"] = ['LICEN[CS]E*', ' COPYING*', ' NOTICE*', 'AUTHORS*']
- if "dynamic" in data_keys:
- data_keys.remove("dynamic")
- data__dynamic = data["dynamic"]
- if not isinstance(data__dynamic, (dict)):
- raise JsonSchemaValueException("" + (name_prefix or "data") + ".dynamic must be object", value=data__dynamic, name="" + (name_prefix or "data") + ".dynamic", definition={'type': 'object', 'description': 'Instructions for loading :pep:`621`-related metadata dynamically', 'additionalProperties': False, 'properties': {'version': {'$$description': ['A version dynamically loaded via either the ``attr:`` or ``file:``', 'directives. Please make sure the given file or attribute respects :pep:`440`.'], 'oneOf': [{'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string'}}, 'required': ['attr']}, {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}]}, 'classifiers': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'description': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'dependencies': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'entry-points': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'optional-dependencies': {'type': 'object', 'propertyNames': {'format': 'python-identifier'}, 'additionalProperties': False, 'patternProperties': {'.+': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}}}, 'readme': {'anyOf': [{'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, {'properties': {'content-type': {'type': 'string'}}}], 'required': ['file']}}}, rule='type')
- data__dynamic_is_dict = isinstance(data__dynamic, dict)
- if data__dynamic_is_dict:
- data__dynamic_keys = set(data__dynamic.keys())
- if "version" in data__dynamic_keys:
- data__dynamic_keys.remove("version")
- data__dynamic__version = data__dynamic["version"]
- data__dynamic__version_one_of_count5 = 0
- if data__dynamic__version_one_of_count5 < 2:
- try:
- validate_https___setuptools_pypa_io_en_latest_references_keywords_html__definitions_attr_directive(data__dynamic__version, custom_formats, (name_prefix or "data") + ".dynamic.version")
- data__dynamic__version_one_of_count5 += 1
- except JsonSchemaValueException: pass
- if data__dynamic__version_one_of_count5 < 2:
- try:
- validate_https___setuptools_pypa_io_en_latest_references_keywords_html__definitions_file_directive(data__dynamic__version, custom_formats, (name_prefix or "data") + ".dynamic.version")
- data__dynamic__version_one_of_count5 += 1
- except JsonSchemaValueException: pass
- if data__dynamic__version_one_of_count5 != 1:
- raise JsonSchemaValueException("" + (name_prefix or "data") + ".dynamic.version must be valid exactly by one definition" + (" (" + str(data__dynamic__version_one_of_count5) + " matches found)"), value=data__dynamic__version, name="" + (name_prefix or "data") + ".dynamic.version", definition={'$$description': ['A version dynamically loaded via either the ``attr:`` or ``file:``', 'directives. Please make sure the given file or attribute respects :pep:`440`.'], 'oneOf': [{'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string'}}, 'required': ['attr']}, {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}]}, rule='oneOf')
- if "classifiers" in data__dynamic_keys:
- data__dynamic_keys.remove("classifiers")
- data__dynamic__classifiers = data__dynamic["classifiers"]
- validate_https___setuptools_pypa_io_en_latest_references_keywords_html__definitions_file_directive(data__dynamic__classifiers, custom_formats, (name_prefix or "data") + ".dynamic.classifiers")
- if "description" in data__dynamic_keys:
- data__dynamic_keys.remove("description")
- data__dynamic__description = data__dynamic["description"]
- validate_https___setuptools_pypa_io_en_latest_references_keywords_html__definitions_file_directive(data__dynamic__description, custom_formats, (name_prefix or "data") + ".dynamic.description")
- if "dependencies" in data__dynamic_keys:
- data__dynamic_keys.remove("dependencies")
- data__dynamic__dependencies = data__dynamic["dependencies"]
- validate_https___setuptools_pypa_io_en_latest_references_keywords_html__definitions_file_directive(data__dynamic__dependencies, custom_formats, (name_prefix or "data") + ".dynamic.dependencies")
- if "entry-points" in data__dynamic_keys:
- data__dynamic_keys.remove("entry-points")
- data__dynamic__entrypoints = data__dynamic["entry-points"]
- validate_https___setuptools_pypa_io_en_latest_references_keywords_html__definitions_file_directive(data__dynamic__entrypoints, custom_formats, (name_prefix or "data") + ".dynamic.entry-points")
- if "optional-dependencies" in data__dynamic_keys:
- data__dynamic_keys.remove("optional-dependencies")
- data__dynamic__optionaldependencies = data__dynamic["optional-dependencies"]
- if not isinstance(data__dynamic__optionaldependencies, (dict)):
- raise JsonSchemaValueException("" + (name_prefix or "data") + ".dynamic.optional-dependencies must be object", value=data__dynamic__optionaldependencies, name="" + (name_prefix or "data") + ".dynamic.optional-dependencies", definition={'type': 'object', 'propertyNames': {'format': 'python-identifier'}, 'additionalProperties': False, 'patternProperties': {'.+': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}}}, rule='type')
- data__dynamic__optionaldependencies_is_dict = isinstance(data__dynamic__optionaldependencies, dict)
- if data__dynamic__optionaldependencies_is_dict:
- data__dynamic__optionaldependencies_keys = set(data__dynamic__optionaldependencies.keys())
- for data__dynamic__optionaldependencies_key, data__dynamic__optionaldependencies_val in data__dynamic__optionaldependencies.items():
- if REGEX_PATTERNS['.+'].search(data__dynamic__optionaldependencies_key):
- if data__dynamic__optionaldependencies_key in data__dynamic__optionaldependencies_keys:
- data__dynamic__optionaldependencies_keys.remove(data__dynamic__optionaldependencies_key)
- validate_https___setuptools_pypa_io_en_latest_references_keywords_html__definitions_file_directive(data__dynamic__optionaldependencies_val, custom_formats, (name_prefix or "data") + ".dynamic.optional-dependencies.{data__dynamic__optionaldependencies_key}")
- if data__dynamic__optionaldependencies_keys:
- raise JsonSchemaValueException("" + (name_prefix or "data") + ".dynamic.optional-dependencies must not contain "+str(data__dynamic__optionaldependencies_keys)+" properties", value=data__dynamic__optionaldependencies, name="" + (name_prefix or "data") + ".dynamic.optional-dependencies", definition={'type': 'object', 'propertyNames': {'format': 'python-identifier'}, 'additionalProperties': False, 'patternProperties': {'.+': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}}}, rule='additionalProperties')
- data__dynamic__optionaldependencies_len = len(data__dynamic__optionaldependencies)
- if data__dynamic__optionaldependencies_len != 0:
- data__dynamic__optionaldependencies_property_names = True
- for data__dynamic__optionaldependencies_key in data__dynamic__optionaldependencies:
- try:
- if isinstance(data__dynamic__optionaldependencies_key, str):
- if not custom_formats["python-identifier"](data__dynamic__optionaldependencies_key):
- raise JsonSchemaValueException("" + (name_prefix or "data") + ".dynamic.optional-dependencies must be python-identifier", value=data__dynamic__optionaldependencies_key, name="" + (name_prefix or "data") + ".dynamic.optional-dependencies", definition={'format': 'python-identifier'}, rule='format')
- except JsonSchemaValueException:
- data__dynamic__optionaldependencies_property_names = False
- if not data__dynamic__optionaldependencies_property_names:
- raise JsonSchemaValueException("" + (name_prefix or "data") + ".dynamic.optional-dependencies must be named by propertyName definition", value=data__dynamic__optionaldependencies, name="" + (name_prefix or "data") + ".dynamic.optional-dependencies", definition={'type': 'object', 'propertyNames': {'format': 'python-identifier'}, 'additionalProperties': False, 'patternProperties': {'.+': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}}}, rule='propertyNames')
- if "readme" in data__dynamic_keys:
- data__dynamic_keys.remove("readme")
- data__dynamic__readme = data__dynamic["readme"]
- data__dynamic__readme_any_of_count6 = 0
- if not data__dynamic__readme_any_of_count6:
- try:
- validate_https___setuptools_pypa_io_en_latest_references_keywords_html__definitions_file_directive(data__dynamic__readme, custom_formats, (name_prefix or "data") + ".dynamic.readme")
- data__dynamic__readme_any_of_count6 += 1
- except JsonSchemaValueException: pass
- if not data__dynamic__readme_any_of_count6:
- try:
- data__dynamic__readme_is_dict = isinstance(data__dynamic__readme, dict)
- if data__dynamic__readme_is_dict:
- data__dynamic__readme_keys = set(data__dynamic__readme.keys())
- if "content-type" in data__dynamic__readme_keys:
- data__dynamic__readme_keys.remove("content-type")
- data__dynamic__readme__contenttype = data__dynamic__readme["content-type"]
- if not isinstance(data__dynamic__readme__contenttype, (str)):
- raise JsonSchemaValueException("" + (name_prefix or "data") + ".dynamic.readme.content-type must be string", value=data__dynamic__readme__contenttype, name="" + (name_prefix or "data") + ".dynamic.readme.content-type", definition={'type': 'string'}, rule='type')
- data__dynamic__readme_any_of_count6 += 1
- except JsonSchemaValueException: pass
- if not data__dynamic__readme_any_of_count6:
- raise JsonSchemaValueException("" + (name_prefix or "data") + ".dynamic.readme cannot be validated by any definition", value=data__dynamic__readme, name="" + (name_prefix or "data") + ".dynamic.readme", definition={'anyOf': [{'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, {'properties': {'content-type': {'type': 'string'}}}], 'required': ['file']}, rule='anyOf')
- data__dynamic__readme_is_dict = isinstance(data__dynamic__readme, dict)
- if data__dynamic__readme_is_dict:
- data__dynamic__readme_len = len(data__dynamic__readme)
- if not all(prop in data__dynamic__readme for prop in ['file']):
- raise JsonSchemaValueException("" + (name_prefix or "data") + ".dynamic.readme must contain ['file'] properties", value=data__dynamic__readme, name="" + (name_prefix or "data") + ".dynamic.readme", definition={'anyOf': [{'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, {'properties': {'content-type': {'type': 'string'}}}], 'required': ['file']}, rule='required')
- if data__dynamic_keys:
- raise JsonSchemaValueException("" + (name_prefix or "data") + ".dynamic must not contain "+str(data__dynamic_keys)+" properties", value=data__dynamic, name="" + (name_prefix or "data") + ".dynamic", definition={'type': 'object', 'description': 'Instructions for loading :pep:`621`-related metadata dynamically', 'additionalProperties': False, 'properties': {'version': {'$$description': ['A version dynamically loaded via either the ``attr:`` or ``file:``', 'directives. Please make sure the given file or attribute respects :pep:`440`.'], 'oneOf': [{'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string'}}, 'required': ['attr']}, {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}]}, 'classifiers': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'description': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'dependencies': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'entry-points': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'optional-dependencies': {'type': 'object', 'propertyNames': {'format': 'python-identifier'}, 'additionalProperties': False, 'patternProperties': {'.+': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}}}, 'readme': {'anyOf': [{'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, {'properties': {'content-type': {'type': 'string'}}}], 'required': ['file']}}}, rule='additionalProperties')
- if data_keys:
- raise JsonSchemaValueException("" + (name_prefix or "data") + " must not contain "+str(data_keys)+" properties", value=data, name="" + (name_prefix or "data") + "", definition={'$schema': 'http://json-schema.org/draft-07/schema', '$id': 'https://setuptools.pypa.io/en/latest/references/keywords.html', 'title': '``tool.setuptools`` table', '$$description': ['Please notice for the time being the ``setuptools`` project does not specify', 'a way of configuring builds via ``pyproject.toml``.', 'Therefore this schema should be taken just as a *"thought experiment"* on how', 'this *might be done*, by following the principles established in', '`ini2toml `_.', 'It considers only ``setuptools`` `parameters', '`_', 'that can currently be configured via ``setup.cfg`` and are not covered by :pep:`621`', 'but intentionally excludes ``dependency_links`` and ``setup_requires``.', 'NOTE: ``scripts`` was renamed to ``script-files`` to avoid confusion with', 'entry-point based scripts (defined in :pep:`621`).'], 'type': 'object', 'additionalProperties': False, 'properties': {'platforms': {'type': 'array', 'items': {'type': 'string'}}, 'provides': {'$$description': ['Package and virtual package names contained within this package', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'obsoletes': {'$$description': ['Packages which this package renders obsolete', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'zip-safe': {'description': 'Whether the project can be safely installed and run from a zip file.', 'type': 'boolean'}, 'script-files': {'description': 'Legacy way of defining scripts (entry-points are preferred).', 'type': 'array', 'items': {'type': 'string'}, '$comment': 'TODO: is this field deprecated/should be removed?'}, 'eager-resources': {'$$description': ['Resources that should be extracted together, if any of them is needed,', 'or if any C extensions included in the project are imported.'], 'type': 'array', 'items': {'type': 'string'}}, 'packages': {'$$description': ['Packages that should be included in the distribution.', 'It can be given either as a list of package identifiers', 'or as a ``dict``-like structure with a single key ``find``', 'which corresponds to a dynamic call to', '``setuptools.config.expand.find_packages`` function.', 'The ``find`` key is associated with a nested ``dict``-like structure that can', 'contain ``where``, ``include``, ``exclude`` and ``namespaces`` keys,', 'mimicking the keyword arguments of the associated function.'], 'oneOf': [{'title': 'Array of Python package identifiers', 'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}}, {'$id': '#/definitions/find-directive', 'title': "'find:' directive", 'type': 'object', 'additionalProperties': False, 'properties': {'find': {'type': 'object', '$$description': ['Dynamic `package discovery', '`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}}}]}, 'package-dir': {'$$description': [':class:`dict`-like structure mapping from package names to directories where their', 'code can be found.', 'The empty string (as key) means that all packages are contained inside', 'the given directory will be included in the distribution.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': ''}]}, 'patternProperties': {'^.*$': {'type': 'string'}}}, 'package-data': {'$$description': ['Mapping from package names to lists of glob patterns.', 'Usually this option is not needed when using ``include-package-data = true``', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'include-package-data': {'$$description': ['Automatically include any data files inside the package directories', 'that are specified by ``MANIFEST.in``', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'boolean'}, 'exclude-package-data': {'$$description': ['Mapping from package names to lists of glob patterns that should be excluded', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'namespace-packages': {'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}, '$comment': 'https://setuptools.pypa.io/en/latest/userguide/package_discovery.html'}, 'py-modules': {'description': 'Modules that setuptools will manipulate', 'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}, '$comment': 'TODO: clarify the relationship with ``packages``'}, 'data-files': {'$$description': ['**DEPRECATED**: dict-like structure where each key represents a directory and', 'the value is a list of glob patterns that should be installed in them.', "Please notice this don't work with wheels. See `data files support", '`_'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'cmdclass': {'$$description': ['Mapping of distutils-style command names to ``setuptools.Command`` subclasses', 'which in turn should be represented by strings with a qualified class name', '(i.e., "dotted" form with module), e.g.::\n\n', ' cmdclass = {mycmd = "pkg.subpkg.module.CommandClass"}\n\n', 'The command class should be a directly defined at the top-level of the', 'containing module (no class nesting).'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'string', 'format': 'python-qualified-identifier'}}}, 'license-files': {'type': 'array', 'items': {'type': 'string'}, '$$description': ['PROVISIONAL: List of glob patterns for all license files being distributed.', '(might become standard with PEP 639).'], 'default': ['LICEN[CS]E*', ' COPYING*', ' NOTICE*', 'AUTHORS*'], '$comment': 'TODO: revise if PEP 639 is accepted. Probably ``project.license-files``?'}, 'dynamic': {'type': 'object', 'description': 'Instructions for loading :pep:`621`-related metadata dynamically', 'additionalProperties': False, 'properties': {'version': {'$$description': ['A version dynamically loaded via either the ``attr:`` or ``file:``', 'directives. Please make sure the given file or attribute respects :pep:`440`.'], 'oneOf': [{'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string'}}, 'required': ['attr']}, {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}]}, 'classifiers': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'description': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'dependencies': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'entry-points': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'optional-dependencies': {'type': 'object', 'propertyNames': {'format': 'python-identifier'}, 'additionalProperties': False, 'patternProperties': {'.+': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}}}, 'readme': {'anyOf': [{'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, {'properties': {'content-type': {'type': 'string'}}}], 'required': ['file']}}}}, 'definitions': {'file-directive': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'attr-directive': {'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string'}}, 'required': ['attr']}, 'find-directive': {'$id': '#/definitions/find-directive', 'title': "'find:' directive", 'type': 'object', 'additionalProperties': False, 'properties': {'find': {'type': 'object', '$$description': ['Dynamic `package discovery', '`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}}}}}, rule='additionalProperties')
- return data
-
-def validate_https___setuptools_pypa_io_en_latest_references_keywords_html__definitions_file_directive(data, custom_formats={}, name_prefix=None):
- if not isinstance(data, (dict)):
- raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, rule='type')
- data_is_dict = isinstance(data, dict)
- if data_is_dict:
- data_len = len(data)
- if not all(prop in data for prop in ['file']):
- raise JsonSchemaValueException("" + (name_prefix or "data") + " must contain ['file'] properties", value=data, name="" + (name_prefix or "data") + "", definition={'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, rule='required')
- data_keys = set(data.keys())
- if "file" in data_keys:
- data_keys.remove("file")
- data__file = data["file"]
- data__file_one_of_count7 = 0
- if data__file_one_of_count7 < 2:
- try:
- if not isinstance(data__file, (str)):
- raise JsonSchemaValueException("" + (name_prefix or "data") + ".file must be string", value=data__file, name="" + (name_prefix or "data") + ".file", definition={'type': 'string'}, rule='type')
- data__file_one_of_count7 += 1
- except JsonSchemaValueException: pass
- if data__file_one_of_count7 < 2:
- try:
- if not isinstance(data__file, (list, tuple)):
- raise JsonSchemaValueException("" + (name_prefix or "data") + ".file must be array", value=data__file, name="" + (name_prefix or "data") + ".file", definition={'type': 'array', 'items': {'type': 'string'}}, rule='type')
- data__file_is_list = isinstance(data__file, (list, tuple))
- if data__file_is_list:
- data__file_len = len(data__file)
- for data__file_x, data__file_item in enumerate(data__file):
- if not isinstance(data__file_item, (str)):
- raise JsonSchemaValueException("" + (name_prefix or "data") + ".file[{data__file_x}]".format(**locals()) + " must be string", value=data__file_item, name="" + (name_prefix or "data") + ".file[{data__file_x}]".format(**locals()) + "", definition={'type': 'string'}, rule='type')
- data__file_one_of_count7 += 1
- except JsonSchemaValueException: pass
- if data__file_one_of_count7 != 1:
- raise JsonSchemaValueException("" + (name_prefix or "data") + ".file must be valid exactly by one definition" + (" (" + str(data__file_one_of_count7) + " matches found)"), value=data__file, name="" + (name_prefix or "data") + ".file", definition={'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}, rule='oneOf')
- if data_keys:
- raise JsonSchemaValueException("" + (name_prefix or "data") + " must not contain "+str(data_keys)+" properties", value=data, name="" + (name_prefix or "data") + "", definition={'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, rule='additionalProperties')
- return data
-
-def validate_https___setuptools_pypa_io_en_latest_references_keywords_html__definitions_attr_directive(data, custom_formats={}, name_prefix=None):
- if not isinstance(data, (dict)):
- raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string'}}, 'required': ['attr']}, rule='type')
- data_is_dict = isinstance(data, dict)
- if data_is_dict:
- data_len = len(data)
- if not all(prop in data for prop in ['attr']):
- raise JsonSchemaValueException("" + (name_prefix or "data") + " must contain ['attr'] properties", value=data, name="" + (name_prefix or "data") + "", definition={'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string'}}, 'required': ['attr']}, rule='required')
- data_keys = set(data.keys())
- if "attr" in data_keys:
- data_keys.remove("attr")
- data__attr = data["attr"]
- if not isinstance(data__attr, (str)):
- raise JsonSchemaValueException("" + (name_prefix or "data") + ".attr must be string", value=data__attr, name="" + (name_prefix or "data") + ".attr", definition={'type': 'string'}, rule='type')
- if data_keys:
- raise JsonSchemaValueException("" + (name_prefix or "data") + " must not contain "+str(data_keys)+" properties", value=data, name="" + (name_prefix or "data") + "", definition={'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string'}}, 'required': ['attr']}, rule='additionalProperties')
- return data
-
-def validate_https___setuptools_pypa_io_en_latest_references_keywords_html__definitions_find_directive(data, custom_formats={}, name_prefix=None):
- if not isinstance(data, (dict)):
- raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'$id': '#/definitions/find-directive', 'title': "'find:' directive", 'type': 'object', 'additionalProperties': False, 'properties': {'find': {'type': 'object', '$$description': ['Dynamic `package discovery', '`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}}}, rule='type')
- data_is_dict = isinstance(data, dict)
- if data_is_dict:
- data_keys = set(data.keys())
- if "find" in data_keys:
- data_keys.remove("find")
- data__find = data["find"]
- if not isinstance(data__find, (dict)):
- raise JsonSchemaValueException("" + (name_prefix or "data") + ".find must be object", value=data__find, name="" + (name_prefix or "data") + ".find", definition={'type': 'object', '$$description': ['Dynamic `package discovery', '`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}, rule='type')
- data__find_is_dict = isinstance(data__find, dict)
- if data__find_is_dict:
- data__find_keys = set(data__find.keys())
- if "where" in data__find_keys:
- data__find_keys.remove("where")
- data__find__where = data__find["where"]
- if not isinstance(data__find__where, (list, tuple)):
- raise JsonSchemaValueException("" + (name_prefix or "data") + ".find.where must be array", value=data__find__where, name="" + (name_prefix or "data") + ".find.where", definition={'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, rule='type')
- data__find__where_is_list = isinstance(data__find__where, (list, tuple))
- if data__find__where_is_list:
- data__find__where_len = len(data__find__where)
- for data__find__where_x, data__find__where_item in enumerate(data__find__where):
- if not isinstance(data__find__where_item, (str)):
- raise JsonSchemaValueException("" + (name_prefix or "data") + ".find.where[{data__find__where_x}]".format(**locals()) + " must be string", value=data__find__where_item, name="" + (name_prefix or "data") + ".find.where[{data__find__where_x}]".format(**locals()) + "", definition={'type': 'string'}, rule='type')
- if "exclude" in data__find_keys:
- data__find_keys.remove("exclude")
- data__find__exclude = data__find["exclude"]
- if not isinstance(data__find__exclude, (list, tuple)):
- raise JsonSchemaValueException("" + (name_prefix or "data") + ".find.exclude must be array", value=data__find__exclude, name="" + (name_prefix or "data") + ".find.exclude", definition={'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, rule='type')
- data__find__exclude_is_list = isinstance(data__find__exclude, (list, tuple))
- if data__find__exclude_is_list:
- data__find__exclude_len = len(data__find__exclude)
- for data__find__exclude_x, data__find__exclude_item in enumerate(data__find__exclude):
- if not isinstance(data__find__exclude_item, (str)):
- raise JsonSchemaValueException("" + (name_prefix or "data") + ".find.exclude[{data__find__exclude_x}]".format(**locals()) + " must be string", value=data__find__exclude_item, name="" + (name_prefix or "data") + ".find.exclude[{data__find__exclude_x}]".format(**locals()) + "", definition={'type': 'string'}, rule='type')
- if "include" in data__find_keys:
- data__find_keys.remove("include")
- data__find__include = data__find["include"]
- if not isinstance(data__find__include, (list, tuple)):
- raise JsonSchemaValueException("" + (name_prefix or "data") + ".find.include must be array", value=data__find__include, name="" + (name_prefix or "data") + ".find.include", definition={'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, rule='type')
- data__find__include_is_list = isinstance(data__find__include, (list, tuple))
- if data__find__include_is_list:
- data__find__include_len = len(data__find__include)
- for data__find__include_x, data__find__include_item in enumerate(data__find__include):
- if not isinstance(data__find__include_item, (str)):
- raise JsonSchemaValueException("" + (name_prefix or "data") + ".find.include[{data__find__include_x}]".format(**locals()) + " must be string", value=data__find__include_item, name="" + (name_prefix or "data") + ".find.include[{data__find__include_x}]".format(**locals()) + "", definition={'type': 'string'}, rule='type')
- if "namespaces" in data__find_keys:
- data__find_keys.remove("namespaces")
- data__find__namespaces = data__find["namespaces"]
- if not isinstance(data__find__namespaces, (bool)):
- raise JsonSchemaValueException("" + (name_prefix or "data") + ".find.namespaces must be boolean", value=data__find__namespaces, name="" + (name_prefix or "data") + ".find.namespaces", definition={'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}, rule='type')
- if data__find_keys:
- raise JsonSchemaValueException("" + (name_prefix or "data") + ".find must not contain "+str(data__find_keys)+" properties", value=data__find, name="" + (name_prefix or "data") + ".find", definition={'type': 'object', '$$description': ['Dynamic `package discovery', '`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}, rule='additionalProperties')
- if data_keys:
- raise JsonSchemaValueException("" + (name_prefix or "data") + " must not contain "+str(data_keys)+" properties", value=data, name="" + (name_prefix or "data") + "", definition={'$id': '#/definitions/find-directive', 'title': "'find:' directive", 'type': 'object', 'additionalProperties': False, 'properties': {'find': {'type': 'object', '$$description': ['Dynamic `package discovery', '`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}}}, rule='additionalProperties')
- return data
-
-def validate_https___docs_python_org_3_install(data, custom_formats={}, name_prefix=None):
- if not isinstance(data, (dict)):
- raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'$schema': 'http://json-schema.org/draft-07/schema', '$id': 'https://docs.python.org/3/install/', 'title': '``tool.distutils`` table', '$$description': ['Originally, ``distutils`` allowed developers to configure arguments for', '``setup.py`` scripts via `distutils configuration files', '`_.', '``tool.distutils`` subtables could be used with the same purpose', '(NOT CURRENTLY IMPLEMENTED).'], 'type': 'object', 'properties': {'global': {'type': 'object', 'description': 'Global options applied to all ``distutils`` commands'}}, 'patternProperties': {'.+': {'type': 'object'}}, '$comment': 'TODO: Is there a practical way of making this schema more specific?'}, rule='type')
- data_is_dict = isinstance(data, dict)
- if data_is_dict:
- data_keys = set(data.keys())
- if "global" in data_keys:
- data_keys.remove("global")
- data__global = data["global"]
- if not isinstance(data__global, (dict)):
- raise JsonSchemaValueException("" + (name_prefix or "data") + ".global must be object", value=data__global, name="" + (name_prefix or "data") + ".global", definition={'type': 'object', 'description': 'Global options applied to all ``distutils`` commands'}, rule='type')
- for data_key, data_val in data.items():
- if REGEX_PATTERNS['.+'].search(data_key):
- if data_key in data_keys:
- data_keys.remove(data_key)
- if not isinstance(data_val, (dict)):
- raise JsonSchemaValueException("" + (name_prefix or "data") + ".{data_key}".format(**locals()) + " must be object", value=data_val, name="" + (name_prefix or "data") + ".{data_key}".format(**locals()) + "", definition={'type': 'object'}, rule='type')
- return data
-
-def validate_https___packaging_python_org_en_latest_specifications_declaring_project_metadata(data, custom_formats={}, name_prefix=None):
- if not isinstance(data, (dict)):
- raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'$schema': 'http://json-schema.org/draft-07/schema', '$id': 'https://packaging.python.org/en/latest/specifications/declaring-project-metadata/', 'title': 'Package metadata stored in the ``project`` table', '$$description': ['Data structure for the **project** table inside ``pyproject.toml``', '(as initially defined in :pep:`621`)'], 'type': 'object', 'properties': {'name': {'type': 'string', 'description': 'The name (primary identifier) of the project. MUST be statically defined.', 'format': 'pep508-identifier'}, 'version': {'type': 'string', 'description': 'The version of the project as supported by :pep:`440`.', 'format': 'pep440'}, 'description': {'type': 'string', '$$description': ['The `summary description of the project', '`_']}, 'readme': {'$$description': ['`Full/detailed description of the project in the form of a README', '`_', "with meaning similar to the one defined in `core metadata's Description", '`_'], 'oneOf': [{'type': 'string', '$$description': ['Relative path to a text file (UTF-8) containing the full description', 'of the project. If the file path ends in case-insensitive ``.md`` or', '``.rst`` suffixes, then the content-type is respectively', '``text/markdown`` or ``text/x-rst``']}, {'type': 'object', 'allOf': [{'anyOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', 'description': 'Full text describing the project.'}}, 'required': ['text']}]}, {'properties': {'content-type': {'type': 'string', '$$description': ['Content-type (:rfc:`1341`) of the full description', '(e.g. ``text/markdown``). The ``charset`` parameter is assumed', 'UTF-8 when not present.'], '$comment': 'TODO: add regex pattern or format?'}}, 'required': ['content-type']}]}]}, 'requires-python': {'type': 'string', 'format': 'pep508-versionspec', '$$description': ['`The Python version requirements of the project', '`_.']}, 'license': {'description': '`Project license `_.', 'oneOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to the file (UTF-8) which contains the license for the', 'project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', '$$description': ['The license of the project whose meaning is that of the', '`License field from the core metadata', '`_.']}}, 'required': ['text']}]}, 'authors': {'type': 'array', 'items': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://www.python.org/dev/peps/pep-0621/#authors-maintainers', 'type': 'object', 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, '$$description': ["The people or organizations considered to be the 'authors' of the project.", 'The exact meaning is open to interpretation (e.g. original or primary authors,', 'current maintainers, or owners of the package).']}, 'maintainers': {'type': 'array', 'items': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://www.python.org/dev/peps/pep-0621/#authors-maintainers', 'type': 'object', 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, '$$description': ["The people or organizations considered to be the 'maintainers' of the project.", 'Similarly to ``authors``, the exact meaning is open to interpretation.']}, 'keywords': {'type': 'array', 'items': {'type': 'string'}, 'description': 'List of keywords to assist searching for the distribution in a larger catalog.'}, 'classifiers': {'type': 'array', 'items': {'type': 'string', 'format': 'trove-classifier', 'description': '`PyPI classifier `_.'}, '$$description': ['`Trove classifiers `_', 'which apply to the project.']}, 'urls': {'type': 'object', 'description': 'URLs associated with the project in the form ``label => value``.', 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', 'format': 'url'}}}, 'scripts': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '`_', 'and `setuptools docs', '`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'gui-scripts': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '`_', 'and `setuptools docs', '`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'entry-points': {'$$description': ['Instruct the installer to expose the given modules/functions via', '``entry-point`` discovery mechanism (useful for plugins).', 'More information available in the `Python packaging guide', '`_.'], 'propertyNames': {'format': 'python-entrypoint-group'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '`_', 'and `setuptools docs', '`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}}}, 'dependencies': {'type': 'array', 'description': 'Project (mandatory) dependencies.', 'items': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}, 'optional-dependencies': {'type': 'object', 'description': 'Optional dependency for the project', 'propertyNames': {'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'array', 'items': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}}}, 'dynamic': {'type': 'array', '$$description': ['Specifies which fields are intentionally unspecified and expected to be', 'dynamically provided by build tools'], 'items': {'enum': ['version', 'description', 'readme', 'requires-python', 'license', 'authors', 'maintainers', 'keywords', 'classifiers', 'urls', 'scripts', 'gui-scripts', 'entry-points', 'dependencies', 'optional-dependencies']}}}, 'required': ['name'], 'additionalProperties': False, 'if': {'not': {'required': ['dynamic'], 'properties': {'dynamic': {'contains': {'const': 'version'}, '$$description': ['version is listed in ``dynamic``']}}}, '$$comment': ['According to :pep:`621`:', ' If the core metadata specification lists a field as "Required", then', ' the metadata MUST specify the field statically or list it in dynamic', 'In turn, `core metadata`_ defines:', ' The required fields are: Metadata-Version, Name, Version.', ' All the other fields are optional.', 'Since ``Metadata-Version`` is defined by the build back-end, ``name`` and', '``version`` are the only mandatory information in ``pyproject.toml``.', '.. _core metadata: https://packaging.python.org/specifications/core-metadata/']}, 'then': {'required': ['version'], '$$description': ['version should be statically defined in the ``version`` field']}, 'definitions': {'author': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://www.python.org/dev/peps/pep-0621/#authors-maintainers', 'type': 'object', 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, 'entry-point-group': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '`_', 'and `setuptools docs', '`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'dependency': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}}, rule='type')
- data_is_dict = isinstance(data, dict)
- if data_is_dict:
- data_len = len(data)
- if not all(prop in data for prop in ['name']):
- raise JsonSchemaValueException("" + (name_prefix or "data") + " must contain ['name'] properties", value=data, name="" + (name_prefix or "data") + "", definition={'$schema': 'http://json-schema.org/draft-07/schema', '$id': 'https://packaging.python.org/en/latest/specifications/declaring-project-metadata/', 'title': 'Package metadata stored in the ``project`` table', '$$description': ['Data structure for the **project** table inside ``pyproject.toml``', '(as initially defined in :pep:`621`)'], 'type': 'object', 'properties': {'name': {'type': 'string', 'description': 'The name (primary identifier) of the project. MUST be statically defined.', 'format': 'pep508-identifier'}, 'version': {'type': 'string', 'description': 'The version of the project as supported by :pep:`440`.', 'format': 'pep440'}, 'description': {'type': 'string', '$$description': ['The `summary description of the project', '`_']}, 'readme': {'$$description': ['`Full/detailed description of the project in the form of a README', '`_', "with meaning similar to the one defined in `core metadata's Description", '`_'], 'oneOf': [{'type': 'string', '$$description': ['Relative path to a text file (UTF-8) containing the full description', 'of the project. If the file path ends in case-insensitive ``.md`` or', '``.rst`` suffixes, then the content-type is respectively', '``text/markdown`` or ``text/x-rst``']}, {'type': 'object', 'allOf': [{'anyOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', 'description': 'Full text describing the project.'}}, 'required': ['text']}]}, {'properties': {'content-type': {'type': 'string', '$$description': ['Content-type (:rfc:`1341`) of the full description', '(e.g. ``text/markdown``). The ``charset`` parameter is assumed', 'UTF-8 when not present.'], '$comment': 'TODO: add regex pattern or format?'}}, 'required': ['content-type']}]}]}, 'requires-python': {'type': 'string', 'format': 'pep508-versionspec', '$$description': ['`The Python version requirements of the project', '`_.']}, 'license': {'description': '`Project license `_.', 'oneOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to the file (UTF-8) which contains the license for the', 'project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', '$$description': ['The license of the project whose meaning is that of the', '`License field from the core metadata', '`_.']}}, 'required': ['text']}]}, 'authors': {'type': 'array', 'items': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://www.python.org/dev/peps/pep-0621/#authors-maintainers', 'type': 'object', 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, '$$description': ["The people or organizations considered to be the 'authors' of the project.", 'The exact meaning is open to interpretation (e.g. original or primary authors,', 'current maintainers, or owners of the package).']}, 'maintainers': {'type': 'array', 'items': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://www.python.org/dev/peps/pep-0621/#authors-maintainers', 'type': 'object', 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, '$$description': ["The people or organizations considered to be the 'maintainers' of the project.", 'Similarly to ``authors``, the exact meaning is open to interpretation.']}, 'keywords': {'type': 'array', 'items': {'type': 'string'}, 'description': 'List of keywords to assist searching for the distribution in a larger catalog.'}, 'classifiers': {'type': 'array', 'items': {'type': 'string', 'format': 'trove-classifier', 'description': '`PyPI classifier `_.'}, '$$description': ['`Trove classifiers `_', 'which apply to the project.']}, 'urls': {'type': 'object', 'description': 'URLs associated with the project in the form ``label => value``.', 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', 'format': 'url'}}}, 'scripts': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '`_', 'and `setuptools docs', '`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'gui-scripts': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '`_', 'and `setuptools docs', '`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'entry-points': {'$$description': ['Instruct the installer to expose the given modules/functions via', '``entry-point`` discovery mechanism (useful for plugins).', 'More information available in the `Python packaging guide', '`_.'], 'propertyNames': {'format': 'python-entrypoint-group'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '`_', 'and `setuptools docs', '`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}}}, 'dependencies': {'type': 'array', 'description': 'Project (mandatory) dependencies.', 'items': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}, 'optional-dependencies': {'type': 'object', 'description': 'Optional dependency for the project', 'propertyNames': {'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'array', 'items': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}}}, 'dynamic': {'type': 'array', '$$description': ['Specifies which fields are intentionally unspecified and expected to be', 'dynamically provided by build tools'], 'items': {'enum': ['version', 'description', 'readme', 'requires-python', 'license', 'authors', 'maintainers', 'keywords', 'classifiers', 'urls', 'scripts', 'gui-scripts', 'entry-points', 'dependencies', 'optional-dependencies']}}}, 'required': ['name'], 'additionalProperties': False, 'if': {'not': {'required': ['dynamic'], 'properties': {'dynamic': {'contains': {'const': 'version'}, '$$description': ['version is listed in ``dynamic``']}}}, '$$comment': ['According to :pep:`621`:', ' If the core metadata specification lists a field as "Required", then', ' the metadata MUST specify the field statically or list it in dynamic', 'In turn, `core metadata`_ defines:', ' The required fields are: Metadata-Version, Name, Version.', ' All the other fields are optional.', 'Since ``Metadata-Version`` is defined by the build back-end, ``name`` and', '``version`` are the only mandatory information in ``pyproject.toml``.', '.. _core metadata: https://packaging.python.org/specifications/core-metadata/']}, 'then': {'required': ['version'], '$$description': ['version should be statically defined in the ``version`` field']}, 'definitions': {'author': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://www.python.org/dev/peps/pep-0621/#authors-maintainers', 'type': 'object', 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, 'entry-point-group': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '`_', 'and `setuptools docs', '`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'dependency': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}}, rule='required')
- data_keys = set(data.keys())
- if "name" in data_keys:
- data_keys.remove("name")
- data__name = data["name"]
- if not isinstance(data__name, (str)):
- raise JsonSchemaValueException("" + (name_prefix or "data") + ".name must be string", value=data__name, name="" + (name_prefix or "data") + ".name", definition={'type': 'string', 'description': 'The name (primary identifier) of the project. MUST be statically defined.', 'format': 'pep508-identifier'}, rule='type')
- if isinstance(data__name, str):
- if not custom_formats["pep508-identifier"](data__name):
- raise JsonSchemaValueException("" + (name_prefix or "data") + ".name must be pep508-identifier", value=data__name, name="" + (name_prefix or "data") + ".name", definition={'type': 'string', 'description': 'The name (primary identifier) of the project. MUST be statically defined.', 'format': 'pep508-identifier'}, rule='format')
- if "version" in data_keys:
- data_keys.remove("version")
- data__version = data["version"]
- if not isinstance(data__version, (str)):
- raise JsonSchemaValueException("" + (name_prefix or "data") + ".version must be string", value=data__version, name="" + (name_prefix or "data") + ".version", definition={'type': 'string', 'description': 'The version of the project as supported by :pep:`440`.', 'format': 'pep440'}, rule='type')
- if isinstance(data__version, str):
- if not custom_formats["pep440"](data__version):
- raise JsonSchemaValueException("" + (name_prefix or "data") + ".version must be pep440", value=data__version, name="" + (name_prefix or "data") + ".version", definition={'type': 'string', 'description': 'The version of the project as supported by :pep:`440`.', 'format': 'pep440'}, rule='format')
- if "description" in data_keys:
- data_keys.remove("description")
- data__description = data["description"]
- if not isinstance(data__description, (str)):
- raise JsonSchemaValueException("" + (name_prefix or "data") + ".description must be string", value=data__description, name="" + (name_prefix or "data") + ".description", definition={'type': 'string', '$$description': ['The `summary description of the project', '`_']}, rule='type')
- if "readme" in data_keys:
- data_keys.remove("readme")
- data__readme = data["readme"]
- data__readme_one_of_count8 = 0
- if data__readme_one_of_count8 < 2:
- try:
- if not isinstance(data__readme, (str)):
- raise JsonSchemaValueException("" + (name_prefix or "data") + ".readme must be string", value=data__readme, name="" + (name_prefix or "data") + ".readme", definition={'type': 'string', '$$description': ['Relative path to a text file (UTF-8) containing the full description', 'of the project. If the file path ends in case-insensitive ``.md`` or', '``.rst`` suffixes, then the content-type is respectively', '``text/markdown`` or ``text/x-rst``']}, rule='type')
- data__readme_one_of_count8 += 1
- except JsonSchemaValueException: pass
- if data__readme_one_of_count8 < 2:
- try:
- if not isinstance(data__readme, (dict)):
- raise JsonSchemaValueException("" + (name_prefix or "data") + ".readme must be object", value=data__readme, name="" + (name_prefix or "data") + ".readme", definition={'type': 'object', 'allOf': [{'anyOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', 'description': 'Full text describing the project.'}}, 'required': ['text']}]}, {'properties': {'content-type': {'type': 'string', '$$description': ['Content-type (:rfc:`1341`) of the full description', '(e.g. ``text/markdown``). The ``charset`` parameter is assumed', 'UTF-8 when not present.'], '$comment': 'TODO: add regex pattern or format?'}}, 'required': ['content-type']}]}, rule='type')
- data__readme_any_of_count9 = 0
- if not data__readme_any_of_count9:
- try:
- data__readme_is_dict = isinstance(data__readme, dict)
- if data__readme_is_dict:
- data__readme_len = len(data__readme)
- if not all(prop in data__readme for prop in ['file']):
- raise JsonSchemaValueException("" + (name_prefix or "data") + ".readme must contain ['file'] properties", value=data__readme, name="" + (name_prefix or "data") + ".readme", definition={'properties': {'file': {'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}}, 'required': ['file']}, rule='required')
- data__readme_keys = set(data__readme.keys())
- if "file" in data__readme_keys:
- data__readme_keys.remove("file")
- data__readme__file = data__readme["file"]
- if not isinstance(data__readme__file, (str)):
- raise JsonSchemaValueException("" + (name_prefix or "data") + ".readme.file must be string", value=data__readme__file, name="" + (name_prefix or "data") + ".readme.file", definition={'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}, rule='type')
- data__readme_any_of_count9 += 1
- except JsonSchemaValueException: pass
- if not data__readme_any_of_count9:
- try:
- data__readme_is_dict = isinstance(data__readme, dict)
- if data__readme_is_dict:
- data__readme_len = len(data__readme)
- if not all(prop in data__readme for prop in ['text']):
- raise JsonSchemaValueException("" + (name_prefix or "data") + ".readme must contain ['text'] properties", value=data__readme, name="" + (name_prefix or "data") + ".readme", definition={'properties': {'text': {'type': 'string', 'description': 'Full text describing the project.'}}, 'required': ['text']}, rule='required')
- data__readme_keys = set(data__readme.keys())
- if "text" in data__readme_keys:
- data__readme_keys.remove("text")
- data__readme__text = data__readme["text"]
- if not isinstance(data__readme__text, (str)):
- raise JsonSchemaValueException("" + (name_prefix or "data") + ".readme.text must be string", value=data__readme__text, name="" + (name_prefix or "data") + ".readme.text", definition={'type': 'string', 'description': 'Full text describing the project.'}, rule='type')
- data__readme_any_of_count9 += 1
- except JsonSchemaValueException: pass
- if not data__readme_any_of_count9:
- raise JsonSchemaValueException("" + (name_prefix or "data") + ".readme cannot be validated by any definition", value=data__readme, name="" + (name_prefix or "data") + ".readme", definition={'anyOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', 'description': 'Full text describing the project.'}}, 'required': ['text']}]}, rule='anyOf')
- data__readme_is_dict = isinstance(data__readme, dict)
- if data__readme_is_dict:
- data__readme_len = len(data__readme)
- if not all(prop in data__readme for prop in ['content-type']):
- raise JsonSchemaValueException("" + (name_prefix or "data") + ".readme must contain ['content-type'] properties", value=data__readme, name="" + (name_prefix or "data") + ".readme", definition={'properties': {'content-type': {'type': 'string', '$$description': ['Content-type (:rfc:`1341`) of the full description', '(e.g. ``text/markdown``). The ``charset`` parameter is assumed', 'UTF-8 when not present.'], '$comment': 'TODO: add regex pattern or format?'}}, 'required': ['content-type']}, rule='required')
- data__readme_keys = set(data__readme.keys())
- if "content-type" in data__readme_keys:
- data__readme_keys.remove("content-type")
- data__readme__contenttype = data__readme["content-type"]
- if not isinstance(data__readme__contenttype, (str)):
- raise JsonSchemaValueException("" + (name_prefix or "data") + ".readme.content-type must be string", value=data__readme__contenttype, name="" + (name_prefix or "data") + ".readme.content-type", definition={'type': 'string', '$$description': ['Content-type (:rfc:`1341`) of the full description', '(e.g. ``text/markdown``). The ``charset`` parameter is assumed', 'UTF-8 when not present.'], '$comment': 'TODO: add regex pattern or format?'}, rule='type')
- data__readme_one_of_count8 += 1
- except JsonSchemaValueException: pass
- if data__readme_one_of_count8 != 1:
- raise JsonSchemaValueException("" + (name_prefix or "data") + ".readme must be valid exactly by one definition" + (" (" + str(data__readme_one_of_count8) + " matches found)"), value=data__readme, name="" + (name_prefix or "data") + ".readme", definition={'$$description': ['`Full/detailed description of the project in the form of a README', '`_', "with meaning similar to the one defined in `core metadata's Description", '`_'], 'oneOf': [{'type': 'string', '$$description': ['Relative path to a text file (UTF-8) containing the full description', 'of the project. If the file path ends in case-insensitive ``.md`` or', '``.rst`` suffixes, then the content-type is respectively', '``text/markdown`` or ``text/x-rst``']}, {'type': 'object', 'allOf': [{'anyOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', 'description': 'Full text describing the project.'}}, 'required': ['text']}]}, {'properties': {'content-type': {'type': 'string', '$$description': ['Content-type (:rfc:`1341`) of the full description', '(e.g. ``text/markdown``). The ``charset`` parameter is assumed', 'UTF-8 when not present.'], '$comment': 'TODO: add regex pattern or format?'}}, 'required': ['content-type']}]}]}, rule='oneOf')
- if "requires-python" in data_keys:
- data_keys.remove("requires-python")
- data__requirespython = data["requires-python"]
- if not isinstance(data__requirespython, (str)):
- raise JsonSchemaValueException("" + (name_prefix or "data") + ".requires-python must be string", value=data__requirespython, name="" + (name_prefix or "data") + ".requires-python", definition={'type': 'string', 'format': 'pep508-versionspec', '$$description': ['`The Python version requirements of the project', '`_.']}, rule='type')
- if isinstance(data__requirespython, str):
- if not custom_formats["pep508-versionspec"](data__requirespython):
- raise JsonSchemaValueException("" + (name_prefix or "data") + ".requires-python must be pep508-versionspec", value=data__requirespython, name="" + (name_prefix or "data") + ".requires-python", definition={'type': 'string', 'format': 'pep508-versionspec', '$$description': ['`The Python version requirements of the project', '`_.']}, rule='format')
- if "license" in data_keys:
- data_keys.remove("license")
- data__license = data["license"]
- data__license_one_of_count10 = 0
- if data__license_one_of_count10 < 2:
- try:
- data__license_is_dict = isinstance(data__license, dict)
- if data__license_is_dict:
- data__license_len = len(data__license)
- if not all(prop in data__license for prop in ['file']):
- raise JsonSchemaValueException("" + (name_prefix or "data") + ".license must contain ['file'] properties", value=data__license, name="" + (name_prefix or "data") + ".license", definition={'properties': {'file': {'type': 'string', '$$description': ['Relative path to the file (UTF-8) which contains the license for the', 'project.']}}, 'required': ['file']}, rule='required')
- data__license_keys = set(data__license.keys())
- if "file" in data__license_keys:
- data__license_keys.remove("file")
- data__license__file = data__license["file"]
- if not isinstance(data__license__file, (str)):
- raise JsonSchemaValueException("" + (name_prefix or "data") + ".license.file must be string", value=data__license__file, name="" + (name_prefix or "data") + ".license.file", definition={'type': 'string', '$$description': ['Relative path to the file (UTF-8) which contains the license for the', 'project.']}, rule='type')
- data__license_one_of_count10 += 1
- except JsonSchemaValueException: pass
- if data__license_one_of_count10 < 2:
- try:
- data__license_is_dict = isinstance(data__license, dict)
- if data__license_is_dict:
- data__license_len = len(data__license)
- if not all(prop in data__license for prop in ['text']):
- raise JsonSchemaValueException("" + (name_prefix or "data") + ".license must contain ['text'] properties", value=data__license, name="" + (name_prefix or "data") + ".license", definition={'properties': {'text': {'type': 'string', '$$description': ['The license of the project whose meaning is that of the', '`License field from the core metadata', '`_.']}}, 'required': ['text']}, rule='required')
- data__license_keys = set(data__license.keys())
- if "text" in data__license_keys:
- data__license_keys.remove("text")
- data__license__text = data__license["text"]
- if not isinstance(data__license__text, (str)):
- raise JsonSchemaValueException("" + (name_prefix or "data") + ".license.text must be string", value=data__license__text, name="" + (name_prefix or "data") + ".license.text", definition={'type': 'string', '$$description': ['The license of the project whose meaning is that of the', '`License field from the core metadata', '`_.']}, rule='type')
- data__license_one_of_count10 += 1
- except JsonSchemaValueException: pass
- if data__license_one_of_count10 != 1:
- raise JsonSchemaValueException("" + (name_prefix or "data") + ".license must be valid exactly by one definition" + (" (" + str(data__license_one_of_count10) + " matches found)"), value=data__license, name="" + (name_prefix or "data") + ".license", definition={'description': '`Project license `_.', 'oneOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to the file (UTF-8) which contains the license for the', 'project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', '$$description': ['The license of the project whose meaning is that of the', '`License field from the core metadata', '`_.']}}, 'required': ['text']}]}, rule='oneOf')
- if "authors" in data_keys:
- data_keys.remove("authors")
- data__authors = data["authors"]
- if not isinstance(data__authors, (list, tuple)):
- raise JsonSchemaValueException("" + (name_prefix or "data") + ".authors must be array", value=data__authors, name="" + (name_prefix or "data") + ".authors", definition={'type': 'array', 'items': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://www.python.org/dev/peps/pep-0621/#authors-maintainers', 'type': 'object', 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, '$$description': ["The people or organizations considered to be the 'authors' of the project.", 'The exact meaning is open to interpretation (e.g. original or primary authors,', 'current maintainers, or owners of the package).']}, rule='type')
- data__authors_is_list = isinstance(data__authors, (list, tuple))
- if data__authors_is_list:
- data__authors_len = len(data__authors)
- for data__authors_x, data__authors_item in enumerate(data__authors):
- validate_https___packaging_python_org_en_latest_specifications_declaring_project_metadata___definitions_author(data__authors_item, custom_formats, (name_prefix or "data") + ".authors[{data__authors_x}]")
- if "maintainers" in data_keys:
- data_keys.remove("maintainers")
- data__maintainers = data["maintainers"]
- if not isinstance(data__maintainers, (list, tuple)):
- raise JsonSchemaValueException("" + (name_prefix or "data") + ".maintainers must be array", value=data__maintainers, name="" + (name_prefix or "data") + ".maintainers", definition={'type': 'array', 'items': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://www.python.org/dev/peps/pep-0621/#authors-maintainers', 'type': 'object', 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, '$$description': ["The people or organizations considered to be the 'maintainers' of the project.", 'Similarly to ``authors``, the exact meaning is open to interpretation.']}, rule='type')
- data__maintainers_is_list = isinstance(data__maintainers, (list, tuple))
- if data__maintainers_is_list:
- data__maintainers_len = len(data__maintainers)
- for data__maintainers_x, data__maintainers_item in enumerate(data__maintainers):
- validate_https___packaging_python_org_en_latest_specifications_declaring_project_metadata___definitions_author(data__maintainers_item, custom_formats, (name_prefix or "data") + ".maintainers[{data__maintainers_x}]")
- if "keywords" in data_keys:
- data_keys.remove("keywords")
- data__keywords = data["keywords"]
- if not isinstance(data__keywords, (list, tuple)):
- raise JsonSchemaValueException("" + (name_prefix or "data") + ".keywords must be array", value=data__keywords, name="" + (name_prefix or "data") + ".keywords", definition={'type': 'array', 'items': {'type': 'string'}, 'description': 'List of keywords to assist searching for the distribution in a larger catalog.'}, rule='type')
- data__keywords_is_list = isinstance(data__keywords, (list, tuple))
- if data__keywords_is_list:
- data__keywords_len = len(data__keywords)
- for data__keywords_x, data__keywords_item in enumerate(data__keywords):
- if not isinstance(data__keywords_item, (str)):
- raise JsonSchemaValueException("" + (name_prefix or "data") + ".keywords[{data__keywords_x}]".format(**locals()) + " must be string", value=data__keywords_item, name="" + (name_prefix or "data") + ".keywords[{data__keywords_x}]".format(**locals()) + "", definition={'type': 'string'}, rule='type')
- if "classifiers" in data_keys:
- data_keys.remove("classifiers")
- data__classifiers = data["classifiers"]
- if not isinstance(data__classifiers, (list, tuple)):
- raise JsonSchemaValueException("" + (name_prefix or "data") + ".classifiers must be array", value=data__classifiers, name="" + (name_prefix or "data") + ".classifiers", definition={'type': 'array', 'items': {'type': 'string', 'format': 'trove-classifier', 'description': '`PyPI classifier `_.'}, '$$description': ['`Trove classifiers `_', 'which apply to the project.']}, rule='type')
- data__classifiers_is_list = isinstance(data__classifiers, (list, tuple))
- if data__classifiers_is_list:
- data__classifiers_len = len(data__classifiers)
- for data__classifiers_x, data__classifiers_item in enumerate(data__classifiers):
- if not isinstance(data__classifiers_item, (str)):
- raise JsonSchemaValueException("" + (name_prefix or "data") + ".classifiers[{data__classifiers_x}]".format(**locals()) + " must be string", value=data__classifiers_item, name="" + (name_prefix or "data") + ".classifiers[{data__classifiers_x}]".format(**locals()) + "", definition={'type': 'string', 'format': 'trove-classifier', 'description': '`PyPI classifier `_.'}, rule='type')
- if isinstance(data__classifiers_item, str):
- if not custom_formats["trove-classifier"](data__classifiers_item):
- raise JsonSchemaValueException("" + (name_prefix or "data") + ".classifiers[{data__classifiers_x}]".format(**locals()) + " must be trove-classifier", value=data__classifiers_item, name="" + (name_prefix or "data") + ".classifiers[{data__classifiers_x}]".format(**locals()) + "", definition={'type': 'string', 'format': 'trove-classifier', 'description': '`PyPI classifier `_.'}, rule='format')
- if "urls" in data_keys:
- data_keys.remove("urls")
- data__urls = data["urls"]
- if not isinstance(data__urls, (dict)):
- raise JsonSchemaValueException("" + (name_prefix or "data") + ".urls must be object", value=data__urls, name="" + (name_prefix or "data") + ".urls", definition={'type': 'object', 'description': 'URLs associated with the project in the form ``label => value``.', 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', 'format': 'url'}}}, rule='type')
- data__urls_is_dict = isinstance(data__urls, dict)
- if data__urls_is_dict:
- data__urls_keys = set(data__urls.keys())
- for data__urls_key, data__urls_val in data__urls.items():
- if REGEX_PATTERNS['^.+$'].search(data__urls_key):
- if data__urls_key in data__urls_keys:
- data__urls_keys.remove(data__urls_key)
- if not isinstance(data__urls_val, (str)):
- raise JsonSchemaValueException("" + (name_prefix or "data") + ".urls.{data__urls_key}".format(**locals()) + " must be string", value=data__urls_val, name="" + (name_prefix or "data") + ".urls.{data__urls_key}".format(**locals()) + "", definition={'type': 'string', 'format': 'url'}, rule='type')
- if isinstance(data__urls_val, str):
- if not custom_formats["url"](data__urls_val):
- raise JsonSchemaValueException("" + (name_prefix or "data") + ".urls.{data__urls_key}".format(**locals()) + " must be url", value=data__urls_val, name="" + (name_prefix or "data") + ".urls.{data__urls_key}".format(**locals()) + "", definition={'type': 'string', 'format': 'url'}, rule='format')
- if data__urls_keys:
- raise JsonSchemaValueException("" + (name_prefix or "data") + ".urls must not contain "+str(data__urls_keys)+" properties", value=data__urls, name="" + (name_prefix or "data") + ".urls", definition={'type': 'object', 'description': 'URLs associated with the project in the form ``label => value``.', 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', 'format': 'url'}}}, rule='additionalProperties')
- if "scripts" in data_keys:
- data_keys.remove("scripts")
- data__scripts = data["scripts"]
- validate_https___packaging_python_org_en_latest_specifications_declaring_project_metadata___definitions_entry_point_group(data__scripts, custom_formats, (name_prefix or "data") + ".scripts")
- if "gui-scripts" in data_keys:
- data_keys.remove("gui-scripts")
- data__guiscripts = data["gui-scripts"]
- validate_https___packaging_python_org_en_latest_specifications_declaring_project_metadata___definitions_entry_point_group(data__guiscripts, custom_formats, (name_prefix or "data") + ".gui-scripts")
- if "entry-points" in data_keys:
- data_keys.remove("entry-points")
- data__entrypoints = data["entry-points"]
- data__entrypoints_is_dict = isinstance(data__entrypoints, dict)
- if data__entrypoints_is_dict:
- data__entrypoints_keys = set(data__entrypoints.keys())
- for data__entrypoints_key, data__entrypoints_val in data__entrypoints.items():
- if REGEX_PATTERNS['^.+$'].search(data__entrypoints_key):
- if data__entrypoints_key in data__entrypoints_keys:
- data__entrypoints_keys.remove(data__entrypoints_key)
- validate_https___packaging_python_org_en_latest_specifications_declaring_project_metadata___definitions_entry_point_group(data__entrypoints_val, custom_formats, (name_prefix or "data") + ".entry-points.{data__entrypoints_key}")
- if data__entrypoints_keys:
- raise JsonSchemaValueException("" + (name_prefix or "data") + ".entry-points must not contain "+str(data__entrypoints_keys)+" properties", value=data__entrypoints, name="" + (name_prefix or "data") + ".entry-points", definition={'$$description': ['Instruct the installer to expose the given modules/functions via', '``entry-point`` discovery mechanism (useful for plugins).', 'More information available in the `Python packaging guide', '`_.'], 'propertyNames': {'format': 'python-entrypoint-group'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '`_', 'and `setuptools docs', '`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}}}, rule='additionalProperties')
- data__entrypoints_len = len(data__entrypoints)
- if data__entrypoints_len != 0:
- data__entrypoints_property_names = True
- for data__entrypoints_key in data__entrypoints:
- try:
- if isinstance(data__entrypoints_key, str):
- if not custom_formats["python-entrypoint-group"](data__entrypoints_key):
- raise JsonSchemaValueException("" + (name_prefix or "data") + ".entry-points must be python-entrypoint-group", value=data__entrypoints_key, name="" + (name_prefix or "data") + ".entry-points", definition={'format': 'python-entrypoint-group'}, rule='format')
- except JsonSchemaValueException:
- data__entrypoints_property_names = False
- if not data__entrypoints_property_names:
- raise JsonSchemaValueException("" + (name_prefix or "data") + ".entry-points must be named by propertyName definition", value=data__entrypoints, name="" + (name_prefix or "data") + ".entry-points", definition={'$$description': ['Instruct the installer to expose the given modules/functions via', '``entry-point`` discovery mechanism (useful for plugins).', 'More information available in the `Python packaging guide', '`_.'], 'propertyNames': {'format': 'python-entrypoint-group'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '`_', 'and `setuptools docs', '`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}}}, rule='propertyNames')
- if "dependencies" in data_keys:
- data_keys.remove("dependencies")
- data__dependencies = data["dependencies"]
- if not isinstance(data__dependencies, (list, tuple)):
- raise JsonSchemaValueException("" + (name_prefix or "data") + ".dependencies must be array", value=data__dependencies, name="" + (name_prefix or "data") + ".dependencies", definition={'type': 'array', 'description': 'Project (mandatory) dependencies.', 'items': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}, rule='type')
- data__dependencies_is_list = isinstance(data__dependencies, (list, tuple))
- if data__dependencies_is_list:
- data__dependencies_len = len(data__dependencies)
- for data__dependencies_x, data__dependencies_item in enumerate(data__dependencies):
- validate_https___packaging_python_org_en_latest_specifications_declaring_project_metadata___definitions_dependency(data__dependencies_item, custom_formats, (name_prefix or "data") + ".dependencies[{data__dependencies_x}]")
- if "optional-dependencies" in data_keys:
- data_keys.remove("optional-dependencies")
- data__optionaldependencies = data["optional-dependencies"]
- if not isinstance(data__optionaldependencies, (dict)):
- raise JsonSchemaValueException("" + (name_prefix or "data") + ".optional-dependencies must be object", value=data__optionaldependencies, name="" + (name_prefix or "data") + ".optional-dependencies", definition={'type': 'object', 'description': 'Optional dependency for the project', 'propertyNames': {'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'array', 'items': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}}}, rule='type')
- data__optionaldependencies_is_dict = isinstance(data__optionaldependencies, dict)
- if data__optionaldependencies_is_dict:
- data__optionaldependencies_keys = set(data__optionaldependencies.keys())
- for data__optionaldependencies_key, data__optionaldependencies_val in data__optionaldependencies.items():
- if REGEX_PATTERNS['^.+$'].search(data__optionaldependencies_key):
- if data__optionaldependencies_key in data__optionaldependencies_keys:
- data__optionaldependencies_keys.remove(data__optionaldependencies_key)
- if not isinstance(data__optionaldependencies_val, (list, tuple)):
- raise JsonSchemaValueException("" + (name_prefix or "data") + ".optional-dependencies.{data__optionaldependencies_key}".format(**locals()) + " must be array", value=data__optionaldependencies_val, name="" + (name_prefix or "data") + ".optional-dependencies.{data__optionaldependencies_key}".format(**locals()) + "", definition={'type': 'array', 'items': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}, rule='type')
- data__optionaldependencies_val_is_list = isinstance(data__optionaldependencies_val, (list, tuple))
- if data__optionaldependencies_val_is_list:
- data__optionaldependencies_val_len = len(data__optionaldependencies_val)
- for data__optionaldependencies_val_x, data__optionaldependencies_val_item in enumerate(data__optionaldependencies_val):
- validate_https___packaging_python_org_en_latest_specifications_declaring_project_metadata___definitions_dependency(data__optionaldependencies_val_item, custom_formats, (name_prefix or "data") + ".optional-dependencies.{data__optionaldependencies_key}[{data__optionaldependencies_val_x}]")
- if data__optionaldependencies_keys:
- raise JsonSchemaValueException("" + (name_prefix or "data") + ".optional-dependencies must not contain "+str(data__optionaldependencies_keys)+" properties", value=data__optionaldependencies, name="" + (name_prefix or "data") + ".optional-dependencies", definition={'type': 'object', 'description': 'Optional dependency for the project', 'propertyNames': {'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'array', 'items': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}}}, rule='additionalProperties')
- data__optionaldependencies_len = len(data__optionaldependencies)
- if data__optionaldependencies_len != 0:
- data__optionaldependencies_property_names = True
- for data__optionaldependencies_key in data__optionaldependencies:
- try:
- if isinstance(data__optionaldependencies_key, str):
- if not custom_formats["pep508-identifier"](data__optionaldependencies_key):
- raise JsonSchemaValueException("" + (name_prefix or "data") + ".optional-dependencies must be pep508-identifier", value=data__optionaldependencies_key, name="" + (name_prefix or "data") + ".optional-dependencies", definition={'format': 'pep508-identifier'}, rule='format')
- except JsonSchemaValueException:
- data__optionaldependencies_property_names = False
- if not data__optionaldependencies_property_names:
- raise JsonSchemaValueException("" + (name_prefix or "data") + ".optional-dependencies must be named by propertyName definition", value=data__optionaldependencies, name="" + (name_prefix or "data") + ".optional-dependencies", definition={'type': 'object', 'description': 'Optional dependency for the project', 'propertyNames': {'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'array', 'items': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}}}, rule='propertyNames')
- if "dynamic" in data_keys:
- data_keys.remove("dynamic")
- data__dynamic = data["dynamic"]
- if not isinstance(data__dynamic, (list, tuple)):
- raise JsonSchemaValueException("" + (name_prefix or "data") + ".dynamic must be array", value=data__dynamic, name="" + (name_prefix or "data") + ".dynamic", definition={'type': 'array', '$$description': ['Specifies which fields are intentionally unspecified and expected to be', 'dynamically provided by build tools'], 'items': {'enum': ['version', 'description', 'readme', 'requires-python', 'license', 'authors', 'maintainers', 'keywords', 'classifiers', 'urls', 'scripts', 'gui-scripts', 'entry-points', 'dependencies', 'optional-dependencies']}}, rule='type')
- data__dynamic_is_list = isinstance(data__dynamic, (list, tuple))
- if data__dynamic_is_list:
- data__dynamic_len = len(data__dynamic)
- for data__dynamic_x, data__dynamic_item in enumerate(data__dynamic):
- if data__dynamic_item not in ['version', 'description', 'readme', 'requires-python', 'license', 'authors', 'maintainers', 'keywords', 'classifiers', 'urls', 'scripts', 'gui-scripts', 'entry-points', 'dependencies', 'optional-dependencies']:
- raise JsonSchemaValueException("" + (name_prefix or "data") + ".dynamic[{data__dynamic_x}]".format(**locals()) + " must be one of ['version', 'description', 'readme', 'requires-python', 'license', 'authors', 'maintainers', 'keywords', 'classifiers', 'urls', 'scripts', 'gui-scripts', 'entry-points', 'dependencies', 'optional-dependencies']", value=data__dynamic_item, name="" + (name_prefix or "data") + ".dynamic[{data__dynamic_x}]".format(**locals()) + "", definition={'enum': ['version', 'description', 'readme', 'requires-python', 'license', 'authors', 'maintainers', 'keywords', 'classifiers', 'urls', 'scripts', 'gui-scripts', 'entry-points', 'dependencies', 'optional-dependencies']}, rule='enum')
- if data_keys:
- raise JsonSchemaValueException("" + (name_prefix or "data") + " must not contain "+str(data_keys)+" properties", value=data, name="" + (name_prefix or "data") + "", definition={'$schema': 'http://json-schema.org/draft-07/schema', '$id': 'https://packaging.python.org/en/latest/specifications/declaring-project-metadata/', 'title': 'Package metadata stored in the ``project`` table', '$$description': ['Data structure for the **project** table inside ``pyproject.toml``', '(as initially defined in :pep:`621`)'], 'type': 'object', 'properties': {'name': {'type': 'string', 'description': 'The name (primary identifier) of the project. MUST be statically defined.', 'format': 'pep508-identifier'}, 'version': {'type': 'string', 'description': 'The version of the project as supported by :pep:`440`.', 'format': 'pep440'}, 'description': {'type': 'string', '$$description': ['The `summary description of the project', '`_']}, 'readme': {'$$description': ['`Full/detailed description of the project in the form of a README', '`_', "with meaning similar to the one defined in `core metadata's Description", '`_'], 'oneOf': [{'type': 'string', '$$description': ['Relative path to a text file (UTF-8) containing the full description', 'of the project. If the file path ends in case-insensitive ``.md`` or', '``.rst`` suffixes, then the content-type is respectively', '``text/markdown`` or ``text/x-rst``']}, {'type': 'object', 'allOf': [{'anyOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', 'description': 'Full text describing the project.'}}, 'required': ['text']}]}, {'properties': {'content-type': {'type': 'string', '$$description': ['Content-type (:rfc:`1341`) of the full description', '(e.g. ``text/markdown``). The ``charset`` parameter is assumed', 'UTF-8 when not present.'], '$comment': 'TODO: add regex pattern or format?'}}, 'required': ['content-type']}]}]}, 'requires-python': {'type': 'string', 'format': 'pep508-versionspec', '$$description': ['`The Python version requirements of the project', '`_.']}, 'license': {'description': '`Project license `_.', 'oneOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to the file (UTF-8) which contains the license for the', 'project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', '$$description': ['The license of the project whose meaning is that of the', '`License field from the core metadata', '`_.']}}, 'required': ['text']}]}, 'authors': {'type': 'array', 'items': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://www.python.org/dev/peps/pep-0621/#authors-maintainers', 'type': 'object', 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, '$$description': ["The people or organizations considered to be the 'authors' of the project.", 'The exact meaning is open to interpretation (e.g. original or primary authors,', 'current maintainers, or owners of the package).']}, 'maintainers': {'type': 'array', 'items': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://www.python.org/dev/peps/pep-0621/#authors-maintainers', 'type': 'object', 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, '$$description': ["The people or organizations considered to be the 'maintainers' of the project.", 'Similarly to ``authors``, the exact meaning is open to interpretation.']}, 'keywords': {'type': 'array', 'items': {'type': 'string'}, 'description': 'List of keywords to assist searching for the distribution in a larger catalog.'}, 'classifiers': {'type': 'array', 'items': {'type': 'string', 'format': 'trove-classifier', 'description': '`PyPI classifier `_.'}, '$$description': ['`Trove classifiers `_', 'which apply to the project.']}, 'urls': {'type': 'object', 'description': 'URLs associated with the project in the form ``label => value``.', 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', 'format': 'url'}}}, 'scripts': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '`_', 'and `setuptools docs', '`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'gui-scripts': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '`_', 'and `setuptools docs', '`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'entry-points': {'$$description': ['Instruct the installer to expose the given modules/functions via', '``entry-point`` discovery mechanism (useful for plugins).', 'More information available in the `Python packaging guide', '`_.'], 'propertyNames': {'format': 'python-entrypoint-group'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '`_', 'and `setuptools docs', '`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}}}, 'dependencies': {'type': 'array', 'description': 'Project (mandatory) dependencies.', 'items': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}, 'optional-dependencies': {'type': 'object', 'description': 'Optional dependency for the project', 'propertyNames': {'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'array', 'items': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}}}, 'dynamic': {'type': 'array', '$$description': ['Specifies which fields are intentionally unspecified and expected to be', 'dynamically provided by build tools'], 'items': {'enum': ['version', 'description', 'readme', 'requires-python', 'license', 'authors', 'maintainers', 'keywords', 'classifiers', 'urls', 'scripts', 'gui-scripts', 'entry-points', 'dependencies', 'optional-dependencies']}}}, 'required': ['name'], 'additionalProperties': False, 'if': {'not': {'required': ['dynamic'], 'properties': {'dynamic': {'contains': {'const': 'version'}, '$$description': ['version is listed in ``dynamic``']}}}, '$$comment': ['According to :pep:`621`:', ' If the core metadata specification lists a field as "Required", then', ' the metadata MUST specify the field statically or list it in dynamic', 'In turn, `core metadata`_ defines:', ' The required fields are: Metadata-Version, Name, Version.', ' All the other fields are optional.', 'Since ``Metadata-Version`` is defined by the build back-end, ``name`` and', '``version`` are the only mandatory information in ``pyproject.toml``.', '.. _core metadata: https://packaging.python.org/specifications/core-metadata/']}, 'then': {'required': ['version'], '$$description': ['version should be statically defined in the ``version`` field']}, 'definitions': {'author': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://www.python.org/dev/peps/pep-0621/#authors-maintainers', 'type': 'object', 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, 'entry-point-group': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '`_', 'and `setuptools docs', '`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'dependency': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}}, rule='additionalProperties')
- try:
- try:
- data_is_dict = isinstance(data, dict)
- if data_is_dict:
- data_len = len(data)
- if not all(prop in data for prop in ['dynamic']):
- raise JsonSchemaValueException("" + (name_prefix or "data") + " must contain ['dynamic'] properties", value=data, name="" + (name_prefix or "data") + "", definition={'required': ['dynamic'], 'properties': {'dynamic': {'contains': {'const': 'version'}, '$$description': ['version is listed in ``dynamic``']}}}, rule='required')
- data_keys = set(data.keys())
- if "dynamic" in data_keys:
- data_keys.remove("dynamic")
- data__dynamic = data["dynamic"]
- data__dynamic_is_list = isinstance(data__dynamic, (list, tuple))
- if data__dynamic_is_list:
- data__dynamic_contains = False
- for data__dynamic_key in data__dynamic:
- try:
- if data__dynamic_key != "version":
- raise JsonSchemaValueException("" + (name_prefix or "data") + ".dynamic must be same as const definition: version", value=data__dynamic_key, name="" + (name_prefix or "data") + ".dynamic", definition={'const': 'version'}, rule='const')
- data__dynamic_contains = True
- break
- except JsonSchemaValueException: pass
- if not data__dynamic_contains:
- raise JsonSchemaValueException("" + (name_prefix or "data") + ".dynamic must contain one of contains definition", value=data__dynamic, name="" + (name_prefix or "data") + ".dynamic", definition={'contains': {'const': 'version'}, '$$description': ['version is listed in ``dynamic``']}, rule='contains')
- except JsonSchemaValueException: pass
- else:
- raise JsonSchemaValueException("" + (name_prefix or "data") + " must NOT match a disallowed definition", value=data, name="" + (name_prefix or "data") + "", definition={'not': {'required': ['dynamic'], 'properties': {'dynamic': {'contains': {'const': 'version'}, '$$description': ['version is listed in ``dynamic``']}}}, '$$comment': ['According to :pep:`621`:', ' If the core metadata specification lists a field as "Required", then', ' the metadata MUST specify the field statically or list it in dynamic', 'In turn, `core metadata`_ defines:', ' The required fields are: Metadata-Version, Name, Version.', ' All the other fields are optional.', 'Since ``Metadata-Version`` is defined by the build back-end, ``name`` and', '``version`` are the only mandatory information in ``pyproject.toml``.', '.. _core metadata: https://packaging.python.org/specifications/core-metadata/']}, rule='not')
- except JsonSchemaValueException:
- pass
- else:
- data_is_dict = isinstance(data, dict)
- if data_is_dict:
- data_len = len(data)
- if not all(prop in data for prop in ['version']):
- raise JsonSchemaValueException("" + (name_prefix or "data") + " must contain ['version'] properties", value=data, name="" + (name_prefix or "data") + "", definition={'required': ['version'], '$$description': ['version should be statically defined in the ``version`` field']}, rule='required')
- return data
-
-def validate_https___packaging_python_org_en_latest_specifications_declaring_project_metadata___definitions_dependency(data, custom_formats={}, name_prefix=None):
- if not isinstance(data, (str)):
- raise JsonSchemaValueException("" + (name_prefix or "data") + " must be string", value=data, name="" + (name_prefix or "data") + "", definition={'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}, rule='type')
- if isinstance(data, str):
- if not custom_formats["pep508"](data):
- raise JsonSchemaValueException("" + (name_prefix or "data") + " must be pep508", value=data, name="" + (name_prefix or "data") + "", definition={'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}, rule='format')
- return data
-
-def validate_https___packaging_python_org_en_latest_specifications_declaring_project_metadata___definitions_entry_point_group(data, custom_formats={}, name_prefix=None):
- if not isinstance(data, (dict)):
- raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '`_', 'and `setuptools docs', '`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, rule='type')
- data_is_dict = isinstance(data, dict)
- if data_is_dict:
- data_keys = set(data.keys())
- for data_key, data_val in data.items():
- if REGEX_PATTERNS['^.+$'].search(data_key):
- if data_key in data_keys:
- data_keys.remove(data_key)
- if not isinstance(data_val, (str)):
- raise JsonSchemaValueException("" + (name_prefix or "data") + ".{data_key}".format(**locals()) + " must be string", value=data_val, name="" + (name_prefix or "data") + ".{data_key}".format(**locals()) + "", definition={'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}, rule='type')
- if isinstance(data_val, str):
- if not custom_formats["python-entrypoint-reference"](data_val):
- raise JsonSchemaValueException("" + (name_prefix or "data") + ".{data_key}".format(**locals()) + " must be python-entrypoint-reference", value=data_val, name="" + (name_prefix or "data") + ".{data_key}".format(**locals()) + "", definition={'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}, rule='format')
- if data_keys:
- raise JsonSchemaValueException("" + (name_prefix or "data") + " must not contain "+str(data_keys)+" properties", value=data, name="" + (name_prefix or "data") + "", definition={'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '`_', 'and `setuptools docs', '`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, rule='additionalProperties')
- data_len = len(data)
- if data_len != 0:
- data_property_names = True
- for data_key in data:
- try:
- if isinstance(data_key, str):
- if not custom_formats["python-entrypoint-name"](data_key):
- raise JsonSchemaValueException("" + (name_prefix or "data") + " must be python-entrypoint-name", value=data_key, name="" + (name_prefix or "data") + "", definition={'format': 'python-entrypoint-name'}, rule='format')
- except JsonSchemaValueException:
- data_property_names = False
- if not data_property_names:
- raise JsonSchemaValueException("" + (name_prefix or "data") + " must be named by propertyName definition", value=data, name="" + (name_prefix or "data") + "", definition={'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '`_', 'and `setuptools docs', '`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, rule='propertyNames')
- return data
-
-def validate_https___packaging_python_org_en_latest_specifications_declaring_project_metadata___definitions_author(data, custom_formats={}, name_prefix=None):
- if not isinstance(data, (dict)):
- raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://www.python.org/dev/peps/pep-0621/#authors-maintainers', 'type': 'object', 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, rule='type')
- data_is_dict = isinstance(data, dict)
- if data_is_dict:
- data_keys = set(data.keys())
- if "name" in data_keys:
- data_keys.remove("name")
- data__name = data["name"]
- if not isinstance(data__name, (str)):
- raise JsonSchemaValueException("" + (name_prefix or "data") + ".name must be string", value=data__name, name="" + (name_prefix or "data") + ".name", definition={'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, rule='type')
- if "email" in data_keys:
- data_keys.remove("email")
- data__email = data["email"]
- if not isinstance(data__email, (str)):
- raise JsonSchemaValueException("" + (name_prefix or "data") + ".email must be string", value=data__email, name="" + (name_prefix or "data") + ".email", definition={'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}, rule='type')
- if isinstance(data__email, str):
- if not REGEX_PATTERNS["idn-email_re_pattern"].match(data__email):
- raise JsonSchemaValueException("" + (name_prefix or "data") + ".email must be idn-email", value=data__email, name="" + (name_prefix or "data") + ".email", definition={'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}, rule='format')
- return data
\ No newline at end of file
diff --git a/spaces/Autodog/nova/Dockerfile b/spaces/Autodog/nova/Dockerfile
deleted file mode 100644
index 193895fa20562461afd09099037992c02d62b113..0000000000000000000000000000000000000000
--- a/spaces/Autodog/nova/Dockerfile
+++ /dev/null
@@ -1,9 +0,0 @@
-FROM node:18
-RUN git clone https://github.com/supercyx3/ChatGPT-Next-Web-LangChain.git
-WORKDIR "ChatGPT-Next-Web-LangChain"
-
-RUN yarn install && yarn build
-# 设置环境变量
-#ENV BASE_URL=https://api.nova-oss.com
-EXPOSE 3000
-CMD yarn start
\ No newline at end of file
diff --git a/spaces/Banbri/zcvzcv/src/lib/loadImage.ts b/spaces/Banbri/zcvzcv/src/lib/loadImage.ts
deleted file mode 100644
index d2e7dcb6a548a9ce1937315486954e66e2c54746..0000000000000000000000000000000000000000
--- a/spaces/Banbri/zcvzcv/src/lib/loadImage.ts
+++ /dev/null
@@ -1,14 +0,0 @@
-export async function loadImage(image: string): Promise {
- const img = new Image();
- img.src = image;
-
- const imgOnLoad = () => {
- return new Promise((resolve, reject) => {
- img.onload = () => { resolve(img) };
- img.onerror = (err) => { reject(err) };
- })
- };
-
- const loadImg = await imgOnLoad();
- return loadImg
-}
\ No newline at end of file
diff --git a/spaces/Benson/text-generation/Examples/Apklz.md b/spaces/Benson/text-generation/Examples/Apklz.md
deleted file mode 100644
index 136d0af73d7f669a239508a256ba604812883b03..0000000000000000000000000000000000000000
--- a/spaces/Benson/text-generation/Examples/Apklz.md
+++ /dev/null
@@ -1,70 +0,0 @@
-
-
¿Qué es Apklz y cómo usarlo?
-
Si usted es un usuario de Android, es posible que haya llegado a través del término "apklz" o visto archivos con la extensión . apklz. Pero ¿qué es exactamente apklz y cómo se puede utilizar en su dispositivo? En este artículo, le explicaremos todo lo que necesita saber sobre los archivos apklz, incluyendo sus características, beneficios, riesgos y precauciones. Al final de este artículo, podrás descargar, instalar, actualizar, desinstalar y administrar archivos apklz como un profesional.
-
Introducción
-
¿Qué es apklz y qué significa?
-
Apklz es un formato de archivo que significa paquete Android Lempel-Ziv. Es una versión comprimida del formato de archivo estándar de Android Package (APK), que se utiliza para distribuir e instalar aplicaciones en dispositivos Android. Los archivos apklz se crean mediante un algoritmo de compresión sin pérdidas llamado Lempel-Ziv (LZ), que reduce el tamaño del archivo sin afectar la calidad o la funcionalidad de la aplicación.
¿Por qué la gente usa archivos apklz y cuáles son los beneficios?
-
La gente usa archivos apklz por varias razones, como:
-
-
Para ahorrar espacio de almacenamiento en sus dispositivos. Los archivos apklz suelen ser más pequeños que los archivos APK, lo que significa que ocupan menos espacio en la memoria del dispositivo.
-
Para descargar aplicaciones más rápido. Los archivos apklz son más rápidos de descargar que los archivos APK, especialmente si tiene una conexión a Internet lenta o limitada.
-
Para acceder a aplicaciones que no están disponibles en la tienda oficial de Google Play. Los archivos apklz le permiten instalar aplicaciones que están restringidas o eliminadas de Play Store debido a varias razones, como limitaciones regionales, problemas legales o violaciones de políticas.
-
Para probar versiones nuevas o modificadas de aplicaciones. Los archivos de Apklz le permiten probar versiones beta, versiones modificadas o versiones personalizadas de aplicaciones que ofrecen características adicionales o mejoras que no están disponibles en las versiones originales.
-
-
-
Para descargar e instalar archivos apklz en su dispositivo Android, debe seguir estos pasos:
-
-
Encuentra una fuente confiable para descargar archivos apklz. Puede utilizar sitios web como Apk Plz, Google Play, o Scamvoid para buscar y descargar archivos apklz de su elección. Asegúrese de revisar las calificaciones, reseñas y comentarios de otros usuarios antes de descargar cualquier archivo.
-
Habilitar fuentes desconocidas en su dispositivo. Para hacer esto, vaya a Configuración > Seguridad > Fuentes desconocidas y conéctelo. Esto le permitirá instalar aplicaciones desde fuentes distintas de Play Store.
-
Busque el archivo apklz descargado en su dispositivo. Puede usar una aplicación de administrador de archivos como ES File Explorer o File Manager para encontrar el archivo en su carpeta de descargas o en cualquier otra ubicación donde lo haya guardado.
-
Toque en el archivo y siga las instrucciones en la pantalla para instalarlo. Es posible que necesite conceder algunos permisos o aceptar algunos términos y condiciones antes de que se complete la instalación.
-
-
Felicidades, ha instalado con éxito un archivo apklz en su dispositivo. Ahora puede iniciar y usar la aplicación como lo haría normalmente.
-
Características de Apklz
-
¿Cuáles son algunas de las características que hacen que los archivos apklz sean diferentes de otros formatos de archivo?
-
Los archivos apklz tienen algunas características únicas que los distinguen de otros formatos de archivo, como:
-
-
Son autónomos y ejecutables. Los archivos Apklz contienen todos los componentes y recursos necesarios para ejecutar una aplicación, como código, imágenes, sonidos, fuentes, etc. No requieren archivos o bibliotecas adicionales para funcionar.
-
Son compatibles y portátiles. Los archivos apklz pueden ejecutarse en cualquier dispositivo Android que admita el nivel mínimo de API y las especificaciones de hardware requeridas por la aplicación. No dependen del fabricante, modelo o versión del sistema operativo del dispositivo.
-
-
-
¿Cómo acceder y administrar archivos apklz en su dispositivo?
-
Para acceder y administrar archivos apklz en su dispositivo, debe usar una aplicación de administrador de archivos que admita la extensión . apklz. Algunas de las aplicaciones populares de administrador de archivos que pueden manejar archivos apklz son:
-
-
Nombre de la aplicación
Descripción
Descargar enlace
-
ES File Explorer
Una aplicación de administrador de archivos potente y versátil que puede acceder y gestionar todo tipo de archivos en su dispositivo, incluidos los archivos apklz. También tiene un administrador de aplicaciones incorporado que puede instalar, desinstalar, hacer copias de seguridad y restaurar archivos apklz.
Una aplicación de administrador de archivos sencilla y fácil de usar que puede acceder fácilmente y administrar archivos apklz en su dispositivo. También tiene una sección dedicada a los archivos apklz donde puede ver sus detalles, instalarlos o eliminarlos.
Una aplicación de administrador de archivos de doble panel que puede acceder y administrar archivos apklz en su dispositivo. También tiene un administrador de aplicaciones incorporado que puede instalar, desinstalar, hacer copias de seguridad y restaurar archivos apklz.
Para actualizar y desinstalar archivos apklz en su dispositivo, debe seguir estos pasos:
-
-
-
Para actualizar un archivo apklz, necesita descargar la última versión del archivo desde una fuente confiable e instalarlo sobre la versión existente. Puede utilizar los mismos pasos como se mencionó anteriormente para instalar un archivo apklz. Alternativamente, puede usar una aplicación de administrador de archivos que tenga un administrador de aplicaciones incorporado para buscar actualizaciones e instalarlas automáticamente.
-
Para desinstalar un archivo apklz, es necesario ir a Configuración > Aplicaciones > ApkLZ (o el nombre de la aplicación) y toque en Desinstalar. También puede utilizar una aplicación de administrador de archivos que tiene un administrador de aplicaciones incorporado para desinstalar archivos apklz fácilmente.
-
-
-
¿Cuáles son algunos de los riesgos y desafíos de usar archivos apklz?
-
Si bien los archivos apklz tienen muchas ventajas, también vienen con algunos riesgos y desafíos que debe tener en cuenta, como:
-
-
No pueden ser seguros. Los archivos apklz no son verificados o aprobados por Google o cualquier otra autoridad. Pueden contener malware, virus, spyware u otros elementos dañinos que pueden dañar su dispositivo o comprometer su privacidad. También pueden tener errores, errores o problemas de compatibilidad que pueden afectar el rendimiento o la funcionalidad de su dispositivo o aplicación.
-
Pueden no ser legales o éticas. Los archivos de Apklz pueden violar los derechos de propiedad intelectual o los términos de servicio de los desarrolladores o editores originales de las aplicaciones. También pueden contener contenido pirateado, agrietado, hackeado o modificado que es ilegal o poco ético de usar.
-
No pueden ser actualizados o soportados. Es posible que los archivos apklz no reciban o Titanium Backup para respaldar y restaurar sus datos en caso de problemas o problemas con sus archivos apklz.
-
-
Conclusión
-
Los archivos de Apklz son una forma conveniente y eficiente de descargar e instalar aplicaciones en su dispositivo Android. Ofrecen muchos beneficios, como ahorrar espacio de almacenamiento, descargar más rápido, acceder a aplicaciones no disponibles y probar versiones nuevas o modificadas de aplicaciones. Sin embargo, también vienen con algunos riesgos y desafíos, como ser inseguro, ilegal o sin apoyo. Por lo tanto, debe tener cuidado y precaución al usar archivos apklz. Necesita descargarlos solo de fuentes confiables, escanearlos con software antivirus, leer sus permisos y términos de servicio, y hacer copias de seguridad de sus datos regularmente. Siguiendo estos consejos y precauciones, puedes disfrutar usando archivos apklz sin ninguna preocupación.
-
-
Preguntas frecuentes
-
¿Cuál es la diferencia entre apklz y apk?
-
Apklz y apk son formatos de archivo que se utilizan para distribuir e instalar aplicaciones en dispositivos Android. La principal diferencia entre ellos es que los archivos apklz son versiones comprimidas de archivos apk, lo que significa que tienen tamaños de archivo más pequeños y velocidades de descarga más rápidas. Sin embargo, también tienen algunos inconvenientes, como ser menos seguro, menos legal y menos compatible que los archivos apk.
-
¿Cómo puedo abrir un archivo apklz en mi PC o Mac?
¿Cómo puedo comprobar si un archivo apklz es seguro o no?
64aa2da5cf
-
-
\ No newline at end of file
diff --git a/spaces/Benson/text-generation/Examples/Cazador Asesino Mod Apk Ilimitado Todo.md b/spaces/Benson/text-generation/Examples/Cazador Asesino Mod Apk Ilimitado Todo.md
deleted file mode 100644
index ccf75ff3ff53a04afead3841276ce5731692f479..0000000000000000000000000000000000000000
--- a/spaces/Benson/text-generation/Examples/Cazador Asesino Mod Apk Ilimitado Todo.md
+++ /dev/null
@@ -1,81 +0,0 @@
-
-
Hunter Assassin Mod APK: Un juego sigiloso y estratégico para los usuarios de Android
-
Si usted está buscando un juego móvil divertido y desafiante que pone a prueba sus habilidades de sigilo y estrategia, es posible que desee probar Hunter Assassin. Este es un juego popular que tiene más de 100 millones de descargas en Google Play Store. Pero lo que si quieres disfrutar del juego con recursos y características ilimitadas? Ahí es donde Hunter Assassin Mod APK entra en juego. En este artículo, te contaremos todo lo que necesitas saber sobre Hunter Assassin y su versión modificada.
Hunter Assassin es un juego para móviles desarrollado por Ruby Game Studio. Es un juego de ritmo rápido que requiere controlar a un asesino que tiene un cuchillo mortal. Su misión es eliminar todos los objetivos en cada nivel sin ser detectado por los guardias. Tienes que usar tu velocidad, agilidad y astucia para sorprender a tus enemigos y derrotarlos uno por uno.
-
El juego de Hunter Assassin
-
El juego de Hunter Assassin es simple pero adictivo. Tienes que tocar la pantalla para mover a tu asesino y deslizar el dedo para cambiar la dirección. Tienes que evitar los rayos de linterna de los guardias y esconderte detrás de las paredes, cajas o barriles. Tienes que llegar a tu objetivo y tocarlos para matarlos. También puede recoger gemas y llaves en el camino, que se pueden utilizar para desbloquear nuevos asesinos con diferentes habilidades y habilidades.
-
Las características de Hunter Assassin
-
Hunter Assassin tiene muchas características que lo convierten en un juego agradable para jugadores de todas las edades. Algunas de estas características son:
-
-
Más de 500 niveles con dificultad y variedad crecientes.
-
Diferentes tipos de guardias con diferentes comportamientos y armas.
-
Diferentes tipos de asesinos con diferentes estadísticas y apariencias.
-
Controles simples e intuitivos que son fáciles de aprender.
-
Gráficos suaves y coloridos que crean una atmósfera vívida.
-
-
-
¿Qué es Hunter Assassin Mod APK?
-
Hunter Assassin Mod APK es una versión modificada del juego original que le da acceso a recursos y características ilimitadas. Con esta versión modificada, puedes disfrutar del juego sin limitaciones ni restricciones. Puedes jugar con dinero ilimitado, gemas, llaves y diamantes. También puede desbloquear todos los asesinos y niveles sin gastar dinero real. También puede eliminar los anuncios que podrían interrumpir su experiencia de juego.
-
-
Los beneficios de Hunter Assassin Mod APK
-
Hunter Assassin Mod APK tiene muchos beneficios que lo hacen una mejor opción que el juego original. Algunos de estos beneficios son:
-
-
Puedes jugar el juego sin preocuparte por quedarte sin recursos o esperar a que se regeneren.
-
Puede desbloquear todos los asesinos y niveles sin completar ninguna tarea o logros.
-
Puedes personalizar la apariencia y habilidades de tu asesino de acuerdo a tu preferencia.
-
Puedes disfrutar del juego sin ningún anuncio o pop-ups que puedan distraerte o ralentizar tu dispositivo.
-
Puedes divertirte más y desafiarte a ti mismo con los recursos y características ilimitados.
-
-
Los inconvenientes de Hunter Assassin Mod APK
-
Hunter Assassin Mod APK también tiene algunos inconvenientes que usted debe tener en cuenta antes de descargar e instalar. Algunos de estos inconvenientes son:
-
-
Es posible que tenga algunos problemas de compatibilidad o errores con algunos dispositivos o sistemas operativos.
-
Puedes perder tu progreso o datos si desinstalas la versión modificada o actualizas el juego original.
-
Es posible que te prohíban o suspendan del juego si usas la versión modded en línea o en modo multijugador.
-
Es posible que se pierda algunas actualizaciones o nuevas características que se agregan al juego original por los desarrolladores.
-
Usted puede perder la emoción y la satisfacción de jugar el juego justo y cuadrado.
-
-
-
Si desea descargar e instalar Hunter Assassin Mod APK, usted tiene que seguir algunos pasos simples y precauciones. Aquí hay una guía sobre cómo hacerlo:
-
Los pasos para descargar e instalar Hunter Assassin Mod APK
-
-
Vaya a un sitio web confiable y confiable que ofrece la versión modificada del juego. Puede buscarlo en Google o usar el enlace de abajo.
-
Descargar el archivo APK de la versión modificada. Asegúrese de que tiene suficiente espacio de almacenamiento en el dispositivo.
-
Ve a la configuración de tu dispositivo y habilita la opción de instalar aplicaciones desde fuentes desconocidas. Esto le permitirá instalar la versión modded sin ningún problema.
-
Busque el archivo APK descargado en su dispositivo y toque en él para iniciar el proceso de instalación.
-
Siga las instrucciones en la pantalla y espere a que termine la instalación.
-
Iniciar el juego y disfrutar de los recursos y características ilimitadas.
-
-
Las precauciones a tomar antes de descargar e instalar Hunter Assassin Mod APK
-
Antes de descargar e instalar Hunter Assassin Mod APK, usted debe tomar algunas precauciones para evitar cualquier problema o riesgo. Aquí están algunos de ellos:
-
-
Asegúrese de tener una conexión a Internet estable y un buen software antivirus en su dispositivo.
-
Asegúrese de descargar la versión modificada de un sitio web confiable y confiable. Evite cualquier enlace sospechoso o malicioso que pueda dañar su dispositivo o datos.
-
Asegúrese de hacer una copia de seguridad de sus datos y el progreso del juego original. Puede usar un servicio en la nube o un dispositivo de almacenamiento externo para este propósito.
-
Asegúrese de desinstalar el juego original antes de instalar la versión modificada. Esto evitará cualquier conflicto o error entre las dos versiones.
-
Asegúrese de usar la versión modded sin conexión o en modo para un jugador. No lo uses en línea o en modo multijugador, ya que esto puede hacer que te prohíban o suspendan del juego.
-
-
Conclusión
-
-
Hunter Assassin Mod APK es una versión modificada del juego original que le da acceso a recursos y características ilimitadas. Puedes jugar con dinero ilimitado, gemas, llaves y diamantes. También puede desbloquear todos los asesinos y niveles sin gastar dinero real. También puede eliminar los anuncios que podrían interrumpir su experiencia de juego.
-
Si desea descargar e instalar Hunter Assassin Mod APK, usted tiene que seguir algunos pasos simples y precauciones. Tienes que ir a un sitio web confiable y confiable que ofrece la versión modificada del juego. Tienes que descargar el archivo APK de la versión modificada y habilitar la opción de instalar aplicaciones de fuentes desconocidas en tu dispositivo. Usted tiene que localizar el archivo APK descargado en su dispositivo y toque en él para iniciar el proceso de instalación. Tienes que lanzar el juego y disfrutar de los recursos y características ilimitadas.
-
Sin embargo, también debe ser consciente de algunos inconvenientes de Hunter Assassin Mod APK. Es posible que se enfrenten a algunos problemas de compatibilidad o errores con algunos dispositivos o sistemas operativos. Puede perder su progreso o datos si desinstala la versión modificada o actualiza el juego original. Es posible que te prohíban o suspendan del juego si usas la versión modificada en línea o en modo multijugador. Es posible que se pierda algunas actualizaciones o nuevas características que se agregan al juego original por los desarrolladores. Usted puede perder la emoción y la satisfacción de jugar el juego justo y cuadrado.
-
Por lo tanto, usted debe pesar los pros y los contras de Hunter Assassin Mod APK antes de decidir descargarlo e instalarlo. También debe seguir los pasos y precauciones cuidadosamente para evitar cualquier problema o riesgo. También debes respetar las reglas y políticas del juego y jugar responsablemente.
-
Preguntas frecuentes
-
Aquí hay algunas preguntas frecuentes sobre Hunter Assassin y Hunter Assassin Mod APK:
-
-
¿Cuál es la última versión de Hunter Assassin Mod APK?
-
-
¿Es seguro descargar e instalar Hunter Assassin Mod APK?
-
Hunter Assassin Mod APK es seguro de descargar e instalar si lo obtiene de un sitio web confiable y confiable. Sin embargo, siempre debe escanear el archivo APK con un buen software antivirus antes de instalarlo. También debes hacer una copia de seguridad de tus datos y progreso del juego original antes de instalar la versión modificada.
-
¿Puedo jugar Hunter Assassin Mod APK offline?
-
Sí, puede jugar Hunter Assassin Mod APK sin conexión a Internet. Sin embargo, es posible que no pueda acceder a algunas funciones o actualizaciones que requieren una conexión en línea.
-
¿Puedo jugar Hunter Assassin Mod APK con mis amigos?
-
No, no se puede jugar Hunter Assassin Mod APK con tus amigos, ya que no tiene un modo multijugador. Solo se puede jugar en el modo de un solo jugador. Si quieres jugar con tus amigos, deberías usar el juego original.
-
¿Cómo puedo contactar a los desarrolladores de Hunter Assassin?
-
Puede ponerse en contacto con los desarrolladores de Hunter Assassin enviándoles un correo electrónico a support@rubygamestudio.com. También puede visitar su sitio web en https://www.rubygamestudio.com/ o seguirlos en Facebook en https://www.facebook.com/rubygamestudio.