diff --git a/spaces/101-5/gpt4free/g4f/.v1/CODE_OF_CONDUCT.md b/spaces/101-5/gpt4free/g4f/.v1/CODE_OF_CONDUCT.md
deleted file mode 100644
index c5afe0a46456e33de178e4242232b51fedaf54d9..0000000000000000000000000000000000000000
--- a/spaces/101-5/gpt4free/g4f/.v1/CODE_OF_CONDUCT.md
+++ /dev/null
@@ -1,128 +0,0 @@
-# Contributor Covenant Code of Conduct
-
-## Our Pledge
-
-We as members, contributors, and leaders pledge to make participation in our
-community a harassment-free experience for everyone, regardless of age, body
-size, visible or invisible disability, ethnicity, sex characteristics, gender
-identity and expression, level of experience, education, socio-economic status,
-nationality, personal appearance, race, religion, or sexual identity
-and orientation.
-
-We pledge to act and interact in ways that contribute to an open, welcoming,
-diverse, inclusive, and healthy community.
-
-## Our Standards
-
-Examples of behavior that contributes to a positive environment for our
-community include:
-
-* Demonstrating empathy and kindness toward other people
-* Being respectful of differing opinions, viewpoints, and experiences
-* Giving and gracefully accepting constructive feedback
-* Accepting responsibility and apologizing to those affected by our mistakes,
- and learning from the experience
-* Focusing on what is best not just for us as individuals, but for the
- overall community
-
-Examples of unacceptable behavior include:
-
-* The use of sexualized language or imagery, and sexual attention or
- advances of any kind
-* Trolling, insulting or derogatory comments, and personal or political attacks
-* Public or private harassment
-* Publishing others' private information, such as a physical or email
- address, without their explicit permission
-* Other conduct which could reasonably be considered inappropriate in a
- professional setting
-
-## Enforcement Responsibilities
-
-Community leaders are responsible for clarifying and enforcing our standards of
-acceptable behavior and will take appropriate and fair corrective action in
-response to any behavior that they deem inappropriate, threatening, offensive,
-or harmful.
-
-Community leaders have the right and responsibility to remove, edit, or reject
-comments, commits, code, wiki edits, issues, and other contributions that are
-not aligned to this Code of Conduct, and will communicate reasons for moderation
-decisions when appropriate.
-
-## Scope
-
-This Code of Conduct applies within all community spaces, and also applies when
-an individual is officially representing the community in public spaces.
-Examples of representing our community include using an official e-mail address,
-posting via an official social media account, or acting as an appointed
-representative at an online or offline event.
-
-## Enforcement
-
-Instances of abusive, harassing, or otherwise unacceptable behavior may be
-reported to the community leaders responsible for enforcement at
-https://t.me/xtekky.
-All complaints will be reviewed and investigated promptly and fairly.
-
-All community leaders are obligated to respect the privacy and security of the
-reporter of any incident.
-
-## Enforcement Guidelines
-
-Community leaders will follow these Community Impact Guidelines in determining
-the consequences for any action they deem in violation of this Code of Conduct:
-
-### 1. Correction
-
-**Community Impact**: Use of inappropriate language or other behavior deemed
-unprofessional or unwelcome in the community.
-
-**Consequence**: A private, written warning from community leaders, providing
-clarity around the nature of the violation and an explanation of why the
-behavior was inappropriate. A public apology may be requested.
-
-### 2. Warning
-
-**Community Impact**: A violation through a single incident or series
-of actions.
-
-**Consequence**: A warning with consequences for continued behavior. No
-interaction with the people involved, including unsolicited interaction with
-those enforcing the Code of Conduct, for a specified period of time. This
-includes avoiding interactions in community spaces as well as external channels
-like social media. Violating these terms may lead to a temporary or
-permanent ban.
-
-### 3. Temporary Ban
-
-**Community Impact**: A serious violation of community standards, including
-sustained inappropriate behavior.
-
-**Consequence**: A temporary ban from any sort of interaction or public
-communication with the community for a specified period of time. No public or
-private interaction with the people involved, including unsolicited interaction
-with those enforcing the Code of Conduct, is allowed during this period.
-Violating these terms may lead to a permanent ban.
-
-### 4. Permanent Ban
-
-**Community Impact**: Demonstrating a pattern of violation of community
-standards, including sustained inappropriate behavior, harassment of an
-individual, or aggression toward or disparagement of classes of individuals.
-
-**Consequence**: A permanent ban from any sort of public interaction within
-the community.
-
-## Attribution
-
-This Code of Conduct is adapted from the [Contributor Covenant][homepage],
-version 2.0, available at
-https://www.contributor-covenant.org/version/2/0/code_of_conduct.html.
-
-Community Impact Guidelines were inspired by [Mozilla's code of conduct
-enforcement ladder](https://github.com/mozilla/diversity).
-
-[homepage]: https://www.contributor-covenant.org
-
-For answers to common questions about this code of conduct, see the FAQ at
-https://www.contributor-covenant.org/faq. Translations are available at
-https://www.contributor-covenant.org/translations.
diff --git a/spaces/1gistliPinn/ChatGPT4/Examples/Ativador Do Windows 8.1 Utorrent.md b/spaces/1gistliPinn/ChatGPT4/Examples/Ativador Do Windows 8.1 Utorrent.md
deleted file mode 100644
index 9a81e2090986dd89b14f953e6f4bf2a6487df553..0000000000000000000000000000000000000000
--- a/spaces/1gistliPinn/ChatGPT4/Examples/Ativador Do Windows 8.1 Utorrent.md
+++ /dev/null
@@ -1,6 +0,0 @@
-
-
-January 7, 2022 - Windows 8.1 Crack is one of the OS widely used by millions of people besides Windows. This OS is simple and easy to use. If you . NET developer, you will be able to create applications that use most of the technologies available on Windows. But if you are a .NET programmer, then I strongly recommend that you download Windows 8.1. It is a Windows operating system, but with a different architecture, which is an improved version of Windows. All security updates and bug fixes that Windows 8.1 is currently receiving are also available for Windows 8.1. 8a78ff9644
-
-
-
diff --git a/spaces/1pelhydcardo/ChatGPT-prompt-generator/assets/DRAGON BALL LEGENDS APK Actualizado 2022 Join Goku and Friends in Epic 3D Battles.md b/spaces/1pelhydcardo/ChatGPT-prompt-generator/assets/DRAGON BALL LEGENDS APK Actualizado 2022 Join Goku and Friends in Epic 3D Battles.md
deleted file mode 100644
index ee1589e7ec313db356a74371240e8581d36716b7..0000000000000000000000000000000000000000
--- a/spaces/1pelhydcardo/ChatGPT-prompt-generator/assets/DRAGON BALL LEGENDS APK Actualizado 2022 Join Goku and Friends in Epic 3D Battles.md
+++ /dev/null
@@ -1,167 +0,0 @@
-
-
Dragon Ball Legends APK Actualizado 2022: The Ultimate Anime Action RPG for Android
-
If you are a fan of the Dragon Ball anime series, you will love playing Dragon Ball Legends APK, the latest game from Bandai Namco Entertainment. Dragon Ball Legends APK is an action-packed anime RPG that lets you summon your favorite DB characters for battle in stunning 3D graphics. You can enjoy a new original story based on the mysterious Saiyan Shallot, or relive the classic DB sagas with Goku, Vegeta, Frieza, and more. Dragon Ball Legends APK is free to download and play on your Android device, and it is updated regularly with new features and content. In this article, we will tell you everything you need to know about Dragon Ball Legends APK, including its features, how to download and install it, and why you should play it.
Dragon Ball Legends APK is an Android game that is based on the popular Dragon Ball anime series. It is developed by Bandai Namco Entertainment, the same company that created other DB games such as Dragon Ball Z Dokkan Battle and Dragon Ball FighterZ. Dragon Ball Legends APK is an anime action RPG that combines fast-paced fighting with card-based strategy. You can control your favorite DB heroes in 3D battles, using your ability cards to unleash powerful combos and special moves. You can also enjoy a new original story that features a new character designed by Akira Toriyama, the creator of Dragon Ball. You can also join other players from around the world in live PVP matches, or test your skills in casual or ranked matches.
-
Features of Dragon Ball Legends APK
-
Dragon Ball Legends APK has many features that make it one of the best DB games for Android. Here are some of them:
-
Epic 3D visuals and animations
-
Dragon Ball Legends APK brings the anime to life on your mobile device with high-quality 3D characters and stages. You can see your favorite DB heroes and villains in action, with smooth animations and modern takes on their iconic moves. You can also witness the amazing team-based Rising Rush attack, which fills the screen with explosive effects.
-
Intuitive fighting controls and card-based strategy
-
Dragon Ball Legends APK has simple and intuitive fighting controls that let you dodge, counter, and attack your opponent in real time. You can also use your ability cards to build your own strategy, choosing when to use melee, ranged, or special attacks. You can also customize your deck of cards to suit your play style and preferences.
Dragon Ball Legends APK has an original RPG storyline that follows the adventures of Shallot, a new Saiyan character designed by Akira Toriyama. You can join Shallot and other DB characters as they try to save the world from a mysterious threat. You can also enjoy voice acting from the original anime cast, which adds more immersion and authenticity to the game.
-
Iconic DB characters and sagas
-
Dragon Ball Legends APK has more than 400 characters to collect and train, from various DB anime series such as DBZ, DBGT, and DBS. You can summon characters such as Goku, Vegeta, Trunks, Piccolo, Frieza, Broly, Majin Buu, and many more. You can also play through classic DB sagas such as the Saiyan Saga, the Frieza Saga, the Cell Saga, and the Tournament of Power Saga. You can also participate in special events and limited-time missions that feature exclusive characters and rewards.
-
How to download and install Dragon Ball Legends APK?
-
Dragon Ball Legends APK is easy to download and install on your Android device. Here are the steps you need to follow:
-
Requirements and compatibility
-
Before you download Dragon Ball Legends APK, you need to make sure that your device meets the following requirements:
-
-
Your device must have Android 6.0 or higher.
-
Your device must have at least 2 GB of RAM and 1.5 GB of free storage space.
-
Your device must have a stable internet connection.
-
Your device must support OpenGL ES 3.0 or higher.
-
-
You can check the compatibility of your device by visiting the official website of Dragon Ball Legends APK . You can also see the list of supported devices and the minimum specifications for the game.
-
Steps to download and install
-
Once you have confirmed that your device is compatible, you can follow these steps to download and install Dragon Ball Legends APK:
-
-
Go to the official website of Dragon Ball Legends APK and click on the "Download" button.
-
You will be redirected to a third-party website where you can download the APK file. Click on the "Download APK" button and wait for the file to be downloaded.
-
After the file is downloaded, locate it in your device's file manager and tap on it to install it. You may need to enable "Unknown sources" in your device's settings to allow the installation of apps from sources other than Google Play Store.
-
Follow the on-screen instructions to complete the installation process. You may need to grant some permissions to the app to access your device's features.
-
Once the installation is done, you can launch the app and enjoy playing Dragon Ball Legends APK.
-
-
Tips and tricks to play Dragon Ball Legends APK
-
If you want to improve your skills and performance in Dragon Ball Legends APK, here are some tips and tricks that you can use:
-
-
Learn the basics of combat, such as how to dodge, counter, charge, switch, and use cards. You can practice in the training mode or watch the tutorial videos in the game.
-
Upgrade your characters by leveling them up, increasing their soul boost, equipping them with items, and unlocking their limit breaks. You can also use Z power and Z medals to increase their star rating and stats.
-
Build a balanced team that has a good synergy and covers different elements, roles, and abilities. You can also use tags and categories to boost your team's power and bonuses.
-
Use your Rising Rush wisely, as it can turn the tide of battle in your favor. You can activate it by using seven different cards in a row, and then choose one card for the final attack. Try to guess your opponent's card to cancel their Rising Rush.
-
Play online with other players in PVP mode, where you can compete in casual or ranked matches, or join guilds and co-op battles. You can also earn rewards such as crystals, coins, tickets, and items by completing missions and events.
-
Why should you play Dragon Ball Legends APK?
-
Dragon Ball Legends APK is a game that will appeal to any DB fan or anime lover. It has many advantages and benefits that make it worth playing. Here are some of them:
-
Pros and cons of Dragon Ball Legends APK
-
Like any game, Dragon Ball Legends APK has its pros and cons. Here are some of them:
-
-
-
Pros
-
Cons
-
-
-
- Free to download and play
-
- Requires internet connection
-
-
-
- Amazing 3D graphics and animations
-
- May drain battery and data
-
-
-
- Simple and fun fighting controls and strategy
-
- May be repetitive and grindy
-
-
-
- Original RPG story and voice acting
-
- May have some bugs and glitches
-
-
-
- Iconic DB characters and sagas
-
- May have some balance issues and power creep
-
-
-
- Online PVP and co-op modes
-
- May have some lag and disconnect issues
-
-
-
- Regular updates and events
-
- May be hard to get rare characters and items
-
-
-
Reviews and ratings of Dragon Ball Legends APK
-
Dragon Ball Legends APK has received positive reviews and ratings from players and critics alike. It has a 4.2 out of 5 stars rating on Google Play Store, based on more than 1.5 million reviews. It also has a 4.6 out of 5 stars rating on App Store, based on more than 100 thousand reviews. Some of the comments from the users are:
-
-
"This game is awesome! The graphics are amazing, the gameplay is smooth, and the story is engaging. I love the characters and the voice acting. It feels like I'm watching the anime."
-
"This game is very fun and addictive. I like the card system and the strategy involved. The PVP mode is challenging and rewarding. The events are exciting and generous."
-
"This game is good, but it could be better. I wish there were more modes and features. I also wish there were more ways to get crystals and items. The game can be frustrating sometimes."
-
"This game is bad, don't waste your time. The game is rigged and unfair. The PVP mode is full of hackers and cheaters. The game is boring and repetitive. The game is greedy and pay-to-win."
-
-
Comparison with other DB games for Android
-
Dragon Ball Legends APK is not the only DB game for Android. There are other games that you can try, such as:
-
-
Dragon Ball Z Dokkan Battle: A puzzle RPG game that lets you collect and awaken DB characters, create your own team, and fight in various events and modes.
-
Dragon Ball Z Kakarot: An action RPG game that lets you relive the DBZ story, explore the open world, train your characters, and fight in epic battles.
-
Dragon Ball FighterZ: A fighting game that lets you choose from a roster of DB characters, customize your team, and fight in online or offline matches.
-
Dragon Ball Legends APK vs Dragon Ball Z Dokkan Battle: Dragon Ball Legends APK has better graphics, animations, voice acting, and story than Dragon Ball Z Dokkan Battle. However, Dragon Ball Z Dokkan Battle has more characters, modes, events, and customization than Dragon Ball Legends APK.
-
Dragon Ball Legends APK vs Dragon Ball Z Kakarot: Dragon Ball Legends APK has more online features, PVP modes, co-op modes, and updates than Dragon Ball Z Kakarot. However, Dragon Ball Z Kakarot has more offline features, exploration, quests, side stories, and mini-games than Dragon Ball Legends APK.
-
Dragon Ball Legends APK vs Dragon Ball FighterZ: Dragon Ball Legends APK has more RPG elements, card-based strategy, original story, and character development than Dragon Ball FighterZ. However, Dragon Ball FighterZ has more fighting elements, combo system, competitive mode, and cross-platform support than Dragon Ball Legends APK.
-
-
Conclusion
-
Dragon Ball Legends APK is a great game for anyone who loves DB or anime in general. It is a free-to-play anime action RPG that lets you summon your favorite DB characters for battle in stunning 3D graphics. You can enjoy a new original story based on the mysterious Saiyan Shallot, or relive the classic DB sagas with Goku, Vegeta, Frieza, and more. You can also join other players from around the world in live PVP matches, or test your skills in casual or ranked matches. Dragon Ball Legends APK has many features that make it one of the best DB games for Android, such as epic 3D visuals and animations, intuitive fighting controls and card-based strategy, original RPG storyline and voice acting, and iconic DB characters and sagas. Dragon Ball Legends APK is easy to download and install on your Android device, and it is updated regularly with new features and content. You can also use some tips and tricks to improve your skills and performance in the game. Dragon Ball Legends APK has its pros and cons, as well as reviews and ratings from other players and critics. You can also compare it with other DB games for Android, such as Dragon Ball Z Dokkan Battle, Dragon Ball Z Kakarot, and Dragon Ball FighterZ. We hope that this article has helped you learn more about Dragon Ball Legends APK, and that you will enjoy playing it on your Android device.
-
FAQs
-
Here are some frequently asked questions about Dragon Ball Legends APK:
-
-
Q: Is Dragon Ball Legends APK safe to download and play?
-
A: Yes, Dragon Ball Legends APK is safe to download and play, as long as you download it from the official website or a trusted source. You should also scan the APK file with an antivirus program before installing it.
-
Q: How can I get more crystals and items in Dragon Ball Legends APK?
-
A: You can get more crystals and items in Dragon Ball Legends APK by completing missions, events, challenges, achievements, login bonuses, daily rewards, and summoning banners. You can also buy them with real money through in-app purchases.
-
Q: How can I contact the support team of Dragon Ball Legends APK?
-
A: You can contact the support team of Dragon Ball Legends APK by visiting the official website of the game and clicking on the "Support" button. You can also send them an email at support@bandainamcoent.com or use the in-game feedback option.
-
Q: How can I play Dragon Ball Legends APK on PC?
-
A: You can play Dragon Ball Legends APK on PC by using an Android emulator such as BlueStacks, NoxPlayer, or LDPlayer. You can download and install the emulator on your PC, then download and install the APK file of the game on the emulator. You can then launch the game and enjoy playing it on a bigger screen.
-
Q: How can I update Dragon Ball Legends APK?
-
A: You can update Dragon Ball Legends APK by visiting the official website of the game and downloading the latest version of the APK file. You can also check for updates in the Google Play Store or App Store, if you have installed the game from there.
Grupo de Fuerzas Especiales 2 Mod APK versión antigua: Una guía para los jugadores
-
Si eres un fan de los juegos de disparos en primera persona, es posible que hayas oído hablar de Special Forces Group 2. Este es un popular juego multijugador en línea que te permite experimentar intensas batallas con diferentes modos, armas, mapas y personajes. Pero ¿sabías que también se puede descargar la versión antigua apk mod de este juego y disfrutar de algunas características y ventajas adicionales? En este artículo, le diremos todo lo que necesita saber sobre Special Forces Group 2 mod apk versión antigua, incluyendo lo que es, por qué debe descargarlo, cómo descargarlo, y cómo usarlo. ¡Vamos a empezar!
Special Forces Group 2 es un juego de acción en 3D desarrollado por ForgeGames. Fue lanzado en 2016 para dispositivos Android e iOS. El juego tiene más de 100 millones de descargas en Google Play Store y tiene una calificación de 4.5 de 5 estrellas. El juego está inspirado en la famosa serie Counter-Strike y ofrece una jugabilidad y gráficos similares.
-
Las características y la jugabilidad del Grupo de Fuerzas Especiales 2
-
El juego tiene muchas características que lo hacen divertido y adictivo. Algunos de ellos son:
-
-
9 modos de juego: Clásico, Resurrección, Capturar la bandera, Modo Zombie, Modo Bomba, Cuchillos, Deathmatch, Carrera de Armas y Francotirador.
-
30 mapas: Puede elegir entre diferentes lugares como desierto, ciudad, nieve, bosque, etc.
-
25 armas: Puede equiparse con varias armas como pistolas, rifles, escopetas, francotiradores, granadas, etc.
-
8 caracteres por equipo: puede personalizar su apariencia y elegir entre diferentes pieles y trajes.
-
Modo multijugador: Puedes jugar online con tus amigos u otros jugadores de todo el mundo.
-
Modo offline: También puedes jugar offline con bots o practicar tus habilidades.
-
Chat de voz: Puedes comunicarte con tus compañeros de equipo usando chat de voz.
-
-
-
El modo de juego de Special Forces Group 2 es simple y directo. Usted tiene que unirse a un equipo (ya sea terroristas o antiterroristas) y completar los objetivos de cada modo. Por ejemplo, en el modo Clásico, tienes que eliminar a todos los enemigos o desactivar la bomba. En el modo Zombie, tienes que sobrevivir a los ataques zombi o infectar a otros jugadores. En el modo Capturar la bandera, tienes que capturar la bandera enemiga y llevarla de vuelta a tu base. Y así sucesivamente.
-
¿Por qué descargar la versión antigua mod apk de Special Forces Group 2?
-
Los beneficios de usar la versión antigua mod apk
-
La versión antigua apk mod de Special Forces Group 2 es una versión modificada del juego original que tiene algunas características y ventajas adicionales. Algunos de ellos son:
-
-
Dinero ilimitado: Puede comprar cualquier arma o artículo sin preocuparse por el costo.
-
Pieles y trajes desbloqueados: Puede acceder a todas las pieles y trajes de forma gratuita.
-
No hay anuncios: Puedes disfrutar del juego sin anuncios molestos.
-
No se requiere raíz: No es necesario rootear el dispositivo para instalar la versión antigua mod apk.
-
-
Los inconvenientes y riesgos de usar la versión antigua apk mod
-
Sin embargo, el uso de la versión antigua apk mod también tiene algunos inconvenientes y riesgos que usted debe ser consciente de. Algunos de ellos son:
- Problemas de compatibilidad: La versión antigua mod apk puede no funcionar correctamente en algunos dispositivos o con algunas actualizaciones.
-
- Riesgo de prohibición: La versión antigua mod apk puede ser detectado por los desarrolladores de juegos y dar lugar a una prohibición del modo en línea.
-
-
- Riesgo de virus: La versión antigua apk mod puede contener malware o spyware que puede dañar su dispositivo o robar sus datos.
-
- Cuestiones éticas: La versión antigua apk mod puede darle una ventaja injusta sobre otros jugadores y arruinar el equilibrio y la diversión del juego.
-
-
Cómo descargar e instalar la versión antigua mod apk de Special Forces Group 2?
-
Los pasos para descargar e instalar la versión antigua apk mod
-
Si todavía desea probar la versión antigua apk mod de Special Forces Group 2, aquí están los pasos para descargarlo e instalarlo:
-
-
Ir a un sitio web de confianza que proporciona la versión antigua apk mod de Special Forces Group 2. Por ejemplo, puede visitar [este enlace] para descargar la versión antigua apk mod 4.21 del juego.
-
Descargar el archivo apk mod y el archivo obb a su dispositivo. Asegúrese de que tiene suficiente espacio de almacenamiento y una conexión a Internet estable.
-
Habilite la instalación de aplicaciones de fuentes desconocidas en su dispositivo. Para hacer esto, vaya a Configuración > Seguridad > Fuentes desconocidas y conéctelo.
-
Busque los archivos descargados en su dispositivo e instale el archivo apk mod. No lo abra todavía.
-
Extraiga el archivo obb usando una aplicación de administrador de archivos y copie la carpeta llamada "com.ForgeGames.SpecialForcesGroup2" al directorio Android/obb en su dispositivo.
-
Iniciar el juego y disfrutar de la versión antigua apk mod de Grupo de Fuerzas Especiales 2.
-
-
Los consejos y trucos para disfrutar de la versión antigua apk mod
-
Aquí hay algunos consejos y trucos para disfrutar de la versión antigua apk mod de Special Forces Group 2:
-
-
Usa diferentes armas y modos para explorar el juego y divertirte.
-
Juega con tus amigos o únete a un clan para cooperar y competir con otros jugadores.
-
Ajuste la configuración de los gráficos y los controles de acuerdo con su preferencia y el rendimiento del dispositivo.
-
Sé respetuoso y amigable con otros jugadores y evita hacer trampa o abusar del juego.
-
Actualizar el juego con regularidad para obtener nuevas características y correcciones de errores.
-
-
Conclusión
-
-
Preguntas frecuentes
-
¿Qué es el Grupo de Fuerzas Especiales 2?
-
Special Forces Group 2 es un popular juego multijugador en primera persona desarrollado por ForgeGames. Fue lanzado en 2016 para dispositivos Android e iOS.
-
¿Qué es el grupo de fuerzas especiales 2 mod apk versión antigua?
-
Grupo de Fuerzas Especiales 2 mod apk versión antigua es una versión modificada del juego original que tiene algunas características y ventajas adicionales como dinero ilimitado, pieles desbloqueadas, sin anuncios, etc.
-
¿Cómo descargar el grupo de fuerzas especiales 2 mod apk versión antigua?
-
Puede descargar Grupo de Fuerzas Especiales 2 mod apk versión antigua de un sitio web de confianza que lo proporciona. También necesita descargar el archivo obb y seguir algunos pasos para instalarlo en su dispositivo.
-
¿Es seguro el grupo de fuerzas especiales 2 mod apk versión antigua?
-
No, Grupo de Fuerzas Especiales 2 mod apk versión antigua no es seguro. Puede tener problemas de compatibilidad, riesgo de prohibición, riesgo de virus, cuestiones éticas, etc. Debe usarlo bajo su propio riesgo y discreción.
-
¿Cómo disfrutar de Special Forces Group 2 mod apk versión antigua?
-
Puedes disfrutar de Special Forces Group 2 mod apk versión antigua mediante el uso de diferentes armas y modos, jugando con tus amigos o unirse a un clan, ajustar la configuración de gráficos y controles, ser respetuoso y amigable con otros jugadores, y actualizar el juego regularmente.
64aa2da5cf
-
-
\ No newline at end of file
diff --git a/spaces/Big-Web/MMSD/env/Lib/site-packages/pip/_vendor/rich/ansi.py b/spaces/Big-Web/MMSD/env/Lib/site-packages/pip/_vendor/rich/ansi.py
deleted file mode 100644
index 66365e6536080bd9372d2a7a58b8ffa3447fec34..0000000000000000000000000000000000000000
--- a/spaces/Big-Web/MMSD/env/Lib/site-packages/pip/_vendor/rich/ansi.py
+++ /dev/null
@@ -1,240 +0,0 @@
-import re
-import sys
-from contextlib import suppress
-from typing import Iterable, NamedTuple, Optional
-
-from .color import Color
-from .style import Style
-from .text import Text
-
-re_ansi = re.compile(
- r"""
-(?:\x1b\](.*?)\x1b\\)|
-(?:\x1b([(@-Z\\-_]|\[[0-?]*[ -/]*[@-~]))
-""",
- re.VERBOSE,
-)
-
-
-class _AnsiToken(NamedTuple):
- """Result of ansi tokenized string."""
-
- plain: str = ""
- sgr: Optional[str] = ""
- osc: Optional[str] = ""
-
-
-def _ansi_tokenize(ansi_text: str) -> Iterable[_AnsiToken]:
- """Tokenize a string in to plain text and ANSI codes.
-
- Args:
- ansi_text (str): A String containing ANSI codes.
-
- Yields:
- AnsiToken: A named tuple of (plain, sgr, osc)
- """
-
- position = 0
- sgr: Optional[str]
- osc: Optional[str]
- for match in re_ansi.finditer(ansi_text):
- start, end = match.span(0)
- osc, sgr = match.groups()
- if start > position:
- yield _AnsiToken(ansi_text[position:start])
- if sgr:
- if sgr == "(":
- position = end + 1
- continue
- if sgr.endswith("m"):
- yield _AnsiToken("", sgr[1:-1], osc)
- else:
- yield _AnsiToken("", sgr, osc)
- position = end
- if position < len(ansi_text):
- yield _AnsiToken(ansi_text[position:])
-
-
-SGR_STYLE_MAP = {
- 1: "bold",
- 2: "dim",
- 3: "italic",
- 4: "underline",
- 5: "blink",
- 6: "blink2",
- 7: "reverse",
- 8: "conceal",
- 9: "strike",
- 21: "underline2",
- 22: "not dim not bold",
- 23: "not italic",
- 24: "not underline",
- 25: "not blink",
- 26: "not blink2",
- 27: "not reverse",
- 28: "not conceal",
- 29: "not strike",
- 30: "color(0)",
- 31: "color(1)",
- 32: "color(2)",
- 33: "color(3)",
- 34: "color(4)",
- 35: "color(5)",
- 36: "color(6)",
- 37: "color(7)",
- 39: "default",
- 40: "on color(0)",
- 41: "on color(1)",
- 42: "on color(2)",
- 43: "on color(3)",
- 44: "on color(4)",
- 45: "on color(5)",
- 46: "on color(6)",
- 47: "on color(7)",
- 49: "on default",
- 51: "frame",
- 52: "encircle",
- 53: "overline",
- 54: "not frame not encircle",
- 55: "not overline",
- 90: "color(8)",
- 91: "color(9)",
- 92: "color(10)",
- 93: "color(11)",
- 94: "color(12)",
- 95: "color(13)",
- 96: "color(14)",
- 97: "color(15)",
- 100: "on color(8)",
- 101: "on color(9)",
- 102: "on color(10)",
- 103: "on color(11)",
- 104: "on color(12)",
- 105: "on color(13)",
- 106: "on color(14)",
- 107: "on color(15)",
-}
-
-
-class AnsiDecoder:
- """Translate ANSI code in to styled Text."""
-
- def __init__(self) -> None:
- self.style = Style.null()
-
- def decode(self, terminal_text: str) -> Iterable[Text]:
- """Decode ANSI codes in an iterable of lines.
-
- Args:
- lines (Iterable[str]): An iterable of lines of terminal output.
-
- Yields:
- Text: Marked up Text.
- """
- for line in terminal_text.splitlines():
- yield self.decode_line(line)
-
- def decode_line(self, line: str) -> Text:
- """Decode a line containing ansi codes.
-
- Args:
- line (str): A line of terminal output.
-
- Returns:
- Text: A Text instance marked up according to ansi codes.
- """
- from_ansi = Color.from_ansi
- from_rgb = Color.from_rgb
- _Style = Style
- text = Text()
- append = text.append
- line = line.rsplit("\r", 1)[-1]
- for plain_text, sgr, osc in _ansi_tokenize(line):
- if plain_text:
- append(plain_text, self.style or None)
- elif osc is not None:
- if osc.startswith("8;"):
- _params, semicolon, link = osc[2:].partition(";")
- if semicolon:
- self.style = self.style.update_link(link or None)
- elif sgr is not None:
- # Translate in to semi-colon separated codes
- # Ignore invalid codes, because we want to be lenient
- codes = [
- min(255, int(_code) if _code else 0)
- for _code in sgr.split(";")
- if _code.isdigit() or _code == ""
- ]
- iter_codes = iter(codes)
- for code in iter_codes:
- if code == 0:
- # reset
- self.style = _Style.null()
- elif code in SGR_STYLE_MAP:
- # styles
- self.style += _Style.parse(SGR_STYLE_MAP[code])
- elif code == 38:
- # Foreground
- with suppress(StopIteration):
- color_type = next(iter_codes)
- if color_type == 5:
- self.style += _Style.from_color(
- from_ansi(next(iter_codes))
- )
- elif color_type == 2:
- self.style += _Style.from_color(
- from_rgb(
- next(iter_codes),
- next(iter_codes),
- next(iter_codes),
- )
- )
- elif code == 48:
- # Background
- with suppress(StopIteration):
- color_type = next(iter_codes)
- if color_type == 5:
- self.style += _Style.from_color(
- None, from_ansi(next(iter_codes))
- )
- elif color_type == 2:
- self.style += _Style.from_color(
- None,
- from_rgb(
- next(iter_codes),
- next(iter_codes),
- next(iter_codes),
- ),
- )
-
- return text
-
-
-if sys.platform != "win32" and __name__ == "__main__": # pragma: no cover
- import io
- import os
- import pty
- import sys
-
- decoder = AnsiDecoder()
-
- stdout = io.BytesIO()
-
- def read(fd: int) -> bytes:
- data = os.read(fd, 1024)
- stdout.write(data)
- return data
-
- pty.spawn(sys.argv[1:], read)
-
- from .console import Console
-
- console = Console(record=True)
-
- stdout_result = stdout.getvalue().decode("utf-8")
- print(stdout_result)
-
- for line in decoder.decode(stdout_result):
- console.print(line)
-
- console.save_html("stdout.html")
diff --git a/spaces/Big-Web/MMSD/env/Lib/site-packages/pip/_vendor/urllib3/packages/backports/__init__.py b/spaces/Big-Web/MMSD/env/Lib/site-packages/pip/_vendor/urllib3/packages/backports/__init__.py
deleted file mode 100644
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..0000000000000000000000000000000000000000
diff --git a/spaces/Big-Web/MMSD/env/Lib/site-packages/setuptools/_distutils/filelist.py b/spaces/Big-Web/MMSD/env/Lib/site-packages/setuptools/_distutils/filelist.py
deleted file mode 100644
index 987931a9883ff36862dbd0831bd0a16903977879..0000000000000000000000000000000000000000
--- a/spaces/Big-Web/MMSD/env/Lib/site-packages/setuptools/_distutils/filelist.py
+++ /dev/null
@@ -1,371 +0,0 @@
-"""distutils.filelist
-
-Provides the FileList class, used for poking about the filesystem
-and building lists of files.
-"""
-
-import os
-import re
-import fnmatch
-import functools
-
-from distutils.util import convert_path
-from distutils.errors import DistutilsTemplateError, DistutilsInternalError
-from distutils import log
-
-
-class FileList:
- """A list of files built by on exploring the filesystem and filtered by
- applying various patterns to what we find there.
-
- Instance attributes:
- dir
- directory from which files will be taken -- only used if
- 'allfiles' not supplied to constructor
- files
- list of filenames currently being built/filtered/manipulated
- allfiles
- complete list of files under consideration (ie. without any
- filtering applied)
- """
-
- def __init__(self, warn=None, debug_print=None):
- # ignore argument to FileList, but keep them for backwards
- # compatibility
- self.allfiles = None
- self.files = []
-
- def set_allfiles(self, allfiles):
- self.allfiles = allfiles
-
- def findall(self, dir=os.curdir):
- self.allfiles = findall(dir)
-
- def debug_print(self, msg):
- """Print 'msg' to stdout if the global DEBUG (taken from the
- DISTUTILS_DEBUG environment variable) flag is true.
- """
- from distutils.debug import DEBUG
-
- if DEBUG:
- print(msg)
-
- # Collection methods
-
- def append(self, item):
- self.files.append(item)
-
- def extend(self, items):
- self.files.extend(items)
-
- def sort(self):
- # Not a strict lexical sort!
- sortable_files = sorted(map(os.path.split, self.files))
- self.files = []
- for sort_tuple in sortable_files:
- self.files.append(os.path.join(*sort_tuple))
-
- # Other miscellaneous utility methods
-
- def remove_duplicates(self):
- # Assumes list has been sorted!
- for i in range(len(self.files) - 1, 0, -1):
- if self.files[i] == self.files[i - 1]:
- del self.files[i]
-
- # "File template" methods
-
- def _parse_template_line(self, line):
- words = line.split()
- action = words[0]
-
- patterns = dir = dir_pattern = None
-
- if action in ('include', 'exclude', 'global-include', 'global-exclude'):
- if len(words) < 2:
- raise DistutilsTemplateError(
- "'%s' expects ..." % action
- )
- patterns = [convert_path(w) for w in words[1:]]
- elif action in ('recursive-include', 'recursive-exclude'):
- if len(words) < 3:
- raise DistutilsTemplateError(
- "'%s' expects ..." % action
- )
- dir = convert_path(words[1])
- patterns = [convert_path(w) for w in words[2:]]
- elif action in ('graft', 'prune'):
- if len(words) != 2:
- raise DistutilsTemplateError(
- "'%s' expects a single " % action
- )
- dir_pattern = convert_path(words[1])
- else:
- raise DistutilsTemplateError("unknown action '%s'" % action)
-
- return (action, patterns, dir, dir_pattern)
-
- def process_template_line(self, line): # noqa: C901
- # Parse the line: split it up, make sure the right number of words
- # is there, and return the relevant words. 'action' is always
- # defined: it's the first word of the line. Which of the other
- # three are defined depends on the action; it'll be either
- # patterns, (dir and patterns), or (dir_pattern).
- (action, patterns, dir, dir_pattern) = self._parse_template_line(line)
-
- # OK, now we know that the action is valid and we have the
- # right number of words on the line for that action -- so we
- # can proceed with minimal error-checking.
- if action == 'include':
- self.debug_print("include " + ' '.join(patterns))
- for pattern in patterns:
- if not self.include_pattern(pattern, anchor=1):
- log.warn("warning: no files found matching '%s'", pattern)
-
- elif action == 'exclude':
- self.debug_print("exclude " + ' '.join(patterns))
- for pattern in patterns:
- if not self.exclude_pattern(pattern, anchor=1):
- log.warn(
- (
- "warning: no previously-included files "
- "found matching '%s'"
- ),
- pattern,
- )
-
- elif action == 'global-include':
- self.debug_print("global-include " + ' '.join(patterns))
- for pattern in patterns:
- if not self.include_pattern(pattern, anchor=0):
- log.warn(
- (
- "warning: no files found matching '%s' "
- "anywhere in distribution"
- ),
- pattern,
- )
-
- elif action == 'global-exclude':
- self.debug_print("global-exclude " + ' '.join(patterns))
- for pattern in patterns:
- if not self.exclude_pattern(pattern, anchor=0):
- log.warn(
- (
- "warning: no previously-included files matching "
- "'%s' found anywhere in distribution"
- ),
- pattern,
- )
-
- elif action == 'recursive-include':
- self.debug_print("recursive-include {} {}".format(dir, ' '.join(patterns)))
- for pattern in patterns:
- if not self.include_pattern(pattern, prefix=dir):
- msg = (
- "warning: no files found matching '%s' " "under directory '%s'"
- )
- log.warn(msg, pattern, dir)
-
- elif action == 'recursive-exclude':
- self.debug_print("recursive-exclude {} {}".format(dir, ' '.join(patterns)))
- for pattern in patterns:
- if not self.exclude_pattern(pattern, prefix=dir):
- log.warn(
- (
- "warning: no previously-included files matching "
- "'%s' found under directory '%s'"
- ),
- pattern,
- dir,
- )
-
- elif action == 'graft':
- self.debug_print("graft " + dir_pattern)
- if not self.include_pattern(None, prefix=dir_pattern):
- log.warn("warning: no directories found matching '%s'", dir_pattern)
-
- elif action == 'prune':
- self.debug_print("prune " + dir_pattern)
- if not self.exclude_pattern(None, prefix=dir_pattern):
- log.warn(
- ("no previously-included directories found " "matching '%s'"),
- dir_pattern,
- )
- else:
- raise DistutilsInternalError(
- "this cannot happen: invalid action '%s'" % action
- )
-
- # Filtering/selection methods
-
- def include_pattern(self, pattern, anchor=1, prefix=None, is_regex=0):
- """Select strings (presumably filenames) from 'self.files' that
- match 'pattern', a Unix-style wildcard (glob) pattern. Patterns
- are not quite the same as implemented by the 'fnmatch' module: '*'
- and '?' match non-special characters, where "special" is platform-
- dependent: slash on Unix; colon, slash, and backslash on
- DOS/Windows; and colon on Mac OS.
-
- If 'anchor' is true (the default), then the pattern match is more
- stringent: "*.py" will match "foo.py" but not "foo/bar.py". If
- 'anchor' is false, both of these will match.
-
- If 'prefix' is supplied, then only filenames starting with 'prefix'
- (itself a pattern) and ending with 'pattern', with anything in between
- them, will match. 'anchor' is ignored in this case.
-
- If 'is_regex' is true, 'anchor' and 'prefix' are ignored, and
- 'pattern' is assumed to be either a string containing a regex or a
- regex object -- no translation is done, the regex is just compiled
- and used as-is.
-
- Selected strings will be added to self.files.
-
- Return True if files are found, False otherwise.
- """
- # XXX docstring lying about what the special chars are?
- files_found = False
- pattern_re = translate_pattern(pattern, anchor, prefix, is_regex)
- self.debug_print("include_pattern: applying regex r'%s'" % pattern_re.pattern)
-
- # delayed loading of allfiles list
- if self.allfiles is None:
- self.findall()
-
- for name in self.allfiles:
- if pattern_re.search(name):
- self.debug_print(" adding " + name)
- self.files.append(name)
- files_found = True
- return files_found
-
- def exclude_pattern(self, pattern, anchor=1, prefix=None, is_regex=0):
- """Remove strings (presumably filenames) from 'files' that match
- 'pattern'. Other parameters are the same as for
- 'include_pattern()', above.
- The list 'self.files' is modified in place.
- Return True if files are found, False otherwise.
- """
- files_found = False
- pattern_re = translate_pattern(pattern, anchor, prefix, is_regex)
- self.debug_print("exclude_pattern: applying regex r'%s'" % pattern_re.pattern)
- for i in range(len(self.files) - 1, -1, -1):
- if pattern_re.search(self.files[i]):
- self.debug_print(" removing " + self.files[i])
- del self.files[i]
- files_found = True
- return files_found
-
-
-# Utility functions
-
-
-def _find_all_simple(path):
- """
- Find all files under 'path'
- """
- all_unique = _UniqueDirs.filter(os.walk(path, followlinks=True))
- results = (
- os.path.join(base, file) for base, dirs, files in all_unique for file in files
- )
- return filter(os.path.isfile, results)
-
-
-class _UniqueDirs(set):
- """
- Exclude previously-seen dirs from walk results,
- avoiding infinite recursion.
- Ref https://bugs.python.org/issue44497.
- """
-
- def __call__(self, walk_item):
- """
- Given an item from an os.walk result, determine
- if the item represents a unique dir for this instance
- and if not, prevent further traversal.
- """
- base, dirs, files = walk_item
- stat = os.stat(base)
- candidate = stat.st_dev, stat.st_ino
- found = candidate in self
- if found:
- del dirs[:]
- self.add(candidate)
- return not found
-
- @classmethod
- def filter(cls, items):
- return filter(cls(), items)
-
-
-def findall(dir=os.curdir):
- """
- Find all files under 'dir' and return the list of full filenames.
- Unless dir is '.', return full filenames with dir prepended.
- """
- files = _find_all_simple(dir)
- if dir == os.curdir:
- make_rel = functools.partial(os.path.relpath, start=dir)
- files = map(make_rel, files)
- return list(files)
-
-
-def glob_to_re(pattern):
- """Translate a shell-like glob pattern to a regular expression; return
- a string containing the regex. Differs from 'fnmatch.translate()' in
- that '*' does not match "special characters" (which are
- platform-specific).
- """
- pattern_re = fnmatch.translate(pattern)
-
- # '?' and '*' in the glob pattern become '.' and '.*' in the RE, which
- # IMHO is wrong -- '?' and '*' aren't supposed to match slash in Unix,
- # and by extension they shouldn't match such "special characters" under
- # any OS. So change all non-escaped dots in the RE to match any
- # character except the special characters (currently: just os.sep).
- sep = os.sep
- if os.sep == '\\':
- # we're using a regex to manipulate a regex, so we need
- # to escape the backslash twice
- sep = r'\\\\'
- escaped = r'\1[^%s]' % sep
- pattern_re = re.sub(r'((?>> for item in Sectioned.read(Sectioned._sample):
- ... print(item)
- Pair(name='sec1', value='# comments ignored')
- Pair(name='sec1', value='a = 1')
- Pair(name='sec1', value='b = 2')
- Pair(name='sec2', value='a = 2')
-
- >>> res = Sectioned.section_pairs(Sectioned._sample)
- >>> item = next(res)
- >>> item.name
- 'sec1'
- >>> item.value
- Pair(name='a', value='1')
- >>> item = next(res)
- >>> item.value
- Pair(name='b', value='2')
- >>> item = next(res)
- >>> item.name
- 'sec2'
- >>> item.value
- Pair(name='a', value='2')
- >>> list(res)
- []
- """
-
- _sample = textwrap.dedent(
- """
- [sec1]
- # comments ignored
- a = 1
- b = 2
-
- [sec2]
- a = 2
- """
- ).lstrip()
-
- @classmethod
- def section_pairs(cls, text):
- return (
- section._replace(value=Pair.parse(section.value))
- for section in cls.read(text, filter_=cls.valid)
- if section.name is not None
- )
-
- @staticmethod
- def read(text, filter_=None):
- lines = filter(filter_, map(str.strip, text.splitlines()))
- name = None
- for value in lines:
- section_match = value.startswith('[') and value.endswith(']')
- if section_match:
- name = value.strip('[]')
- continue
- yield Pair(name, value)
-
- @staticmethod
- def valid(line):
- return line and not line.startswith('#')
-
-
-class DeprecatedTuple:
- """
- Provide subscript item access for backward compatibility.
-
- >>> recwarn = getfixture('recwarn')
- >>> ep = EntryPoint(name='name', value='value', group='group')
- >>> ep[:]
- ('name', 'value', 'group')
- >>> ep[0]
- 'name'
- >>> len(recwarn)
- 1
- """
-
- _warn = functools.partial(
- warnings.warn,
- "EntryPoint tuple interface is deprecated. Access members by name.",
- DeprecationWarning,
- stacklevel=pypy_partial(2),
- )
-
- def __getitem__(self, item):
- self._warn()
- return self._key()[item]
-
-
-class EntryPoint(DeprecatedTuple):
- """An entry point as defined by Python packaging conventions.
-
- See `the packaging docs on entry points
- `_
- for more information.
- """
-
- pattern = re.compile(
- r'(?P[\w.]+)\s*'
- r'(:\s*(?P[\w.]+)\s*)?'
- r'((?P\[.*\])\s*)?$'
- )
- """
- A regular expression describing the syntax for an entry point,
- which might look like:
-
- - module
- - package.module
- - package.module:attribute
- - package.module:object.attribute
- - package.module:attr [extra1, extra2]
-
- Other combinations are possible as well.
-
- The expression is lenient about whitespace around the ':',
- following the attr, and following any extras.
- """
-
- dist: Optional['Distribution'] = None
-
- def __init__(self, name, value, group):
- vars(self).update(name=name, value=value, group=group)
-
- def load(self):
- """Load the entry point from its definition. If only a module
- is indicated by the value, return that module. Otherwise,
- return the named object.
- """
- match = self.pattern.match(self.value)
- module = import_module(match.group('module'))
- attrs = filter(None, (match.group('attr') or '').split('.'))
- return functools.reduce(getattr, attrs, module)
-
- @property
- def module(self):
- match = self.pattern.match(self.value)
- return match.group('module')
-
- @property
- def attr(self):
- match = self.pattern.match(self.value)
- return match.group('attr')
-
- @property
- def extras(self):
- match = self.pattern.match(self.value)
- return list(re.finditer(r'\w+', match.group('extras') or ''))
-
- def _for(self, dist):
- vars(self).update(dist=dist)
- return self
-
- def __iter__(self):
- """
- Supply iter so one may construct dicts of EntryPoints by name.
- """
- msg = (
- "Construction of dict of EntryPoints is deprecated in "
- "favor of EntryPoints."
- )
- warnings.warn(msg, DeprecationWarning)
- return iter((self.name, self))
-
- def matches(self, **params):
- attrs = (getattr(self, param) for param in params)
- return all(map(operator.eq, params.values(), attrs))
-
- def _key(self):
- return self.name, self.value, self.group
-
- def __lt__(self, other):
- return self._key() < other._key()
-
- def __eq__(self, other):
- return self._key() == other._key()
-
- def __setattr__(self, name, value):
- raise AttributeError("EntryPoint objects are immutable.")
-
- def __repr__(self):
- return (
- f'EntryPoint(name={self.name!r}, value={self.value!r}, '
- f'group={self.group!r})'
- )
-
- def __hash__(self):
- return hash(self._key())
-
-
-class DeprecatedList(list):
- """
- Allow an otherwise immutable object to implement mutability
- for compatibility.
-
- >>> recwarn = getfixture('recwarn')
- >>> dl = DeprecatedList(range(3))
- >>> dl[0] = 1
- >>> dl.append(3)
- >>> del dl[3]
- >>> dl.reverse()
- >>> dl.sort()
- >>> dl.extend([4])
- >>> dl.pop(-1)
- 4
- >>> dl.remove(1)
- >>> dl += [5]
- >>> dl + [6]
- [1, 2, 5, 6]
- >>> dl + (6,)
- [1, 2, 5, 6]
- >>> dl.insert(0, 0)
- >>> dl
- [0, 1, 2, 5]
- >>> dl == [0, 1, 2, 5]
- True
- >>> dl == (0, 1, 2, 5)
- True
- >>> len(recwarn)
- 1
- """
-
- __slots__ = ()
-
- _warn = functools.partial(
- warnings.warn,
- "EntryPoints list interface is deprecated. Cast to list if needed.",
- DeprecationWarning,
- stacklevel=pypy_partial(2),
- )
-
- def _wrap_deprecated_method(method_name: str): # type: ignore
- def wrapped(self, *args, **kwargs):
- self._warn()
- return getattr(super(), method_name)(*args, **kwargs)
-
- return method_name, wrapped
-
- locals().update(
- map(
- _wrap_deprecated_method,
- '__setitem__ __delitem__ append reverse extend pop remove '
- '__iadd__ insert sort'.split(),
- )
- )
-
- def __add__(self, other):
- if not isinstance(other, tuple):
- self._warn()
- other = tuple(other)
- return self.__class__(tuple(self) + other)
-
- def __eq__(self, other):
- if not isinstance(other, tuple):
- self._warn()
- other = tuple(other)
-
- return tuple(self).__eq__(other)
-
-
-class EntryPoints(DeprecatedList):
- """
- An immutable collection of selectable EntryPoint objects.
- """
-
- __slots__ = ()
-
- def __getitem__(self, name): # -> EntryPoint:
- """
- Get the EntryPoint in self matching name.
- """
- if isinstance(name, int):
- warnings.warn(
- "Accessing entry points by index is deprecated. "
- "Cast to tuple if needed.",
- DeprecationWarning,
- stacklevel=2,
- )
- return super().__getitem__(name)
- try:
- return next(iter(self.select(name=name)))
- except StopIteration:
- raise KeyError(name)
-
- def select(self, **params):
- """
- Select entry points from self that match the
- given parameters (typically group and/or name).
- """
- return EntryPoints(ep for ep in self if ep.matches(**params))
-
- @property
- def names(self):
- """
- Return the set of all names of all entry points.
- """
- return {ep.name for ep in self}
-
- @property
- def groups(self):
- """
- Return the set of all groups of all entry points.
-
- For coverage while SelectableGroups is present.
- >>> EntryPoints().groups
- set()
- """
- return {ep.group for ep in self}
-
- @classmethod
- def _from_text_for(cls, text, dist):
- return cls(ep._for(dist) for ep in cls._from_text(text))
-
- @staticmethod
- def _from_text(text):
- return (
- EntryPoint(name=item.value.name, value=item.value.value, group=item.name)
- for item in Sectioned.section_pairs(text or '')
- )
-
-
-class Deprecated:
- """
- Compatibility add-in for mapping to indicate that
- mapping behavior is deprecated.
-
- >>> recwarn = getfixture('recwarn')
- >>> class DeprecatedDict(Deprecated, dict): pass
- >>> dd = DeprecatedDict(foo='bar')
- >>> dd.get('baz', None)
- >>> dd['foo']
- 'bar'
- >>> list(dd)
- ['foo']
- >>> list(dd.keys())
- ['foo']
- >>> 'foo' in dd
- True
- >>> list(dd.values())
- ['bar']
- >>> len(recwarn)
- 1
- """
-
- _warn = functools.partial(
- warnings.warn,
- "SelectableGroups dict interface is deprecated. Use select.",
- DeprecationWarning,
- stacklevel=pypy_partial(2),
- )
-
- def __getitem__(self, name):
- self._warn()
- return super().__getitem__(name)
-
- def get(self, name, default=None):
- self._warn()
- return super().get(name, default)
-
- def __iter__(self):
- self._warn()
- return super().__iter__()
-
- def __contains__(self, *args):
- self._warn()
- return super().__contains__(*args)
-
- def keys(self):
- self._warn()
- return super().keys()
-
- def values(self):
- self._warn()
- return super().values()
-
-
-class SelectableGroups(Deprecated, dict):
- """
- A backward- and forward-compatible result from
- entry_points that fully implements the dict interface.
- """
-
- @classmethod
- def load(cls, eps):
- by_group = operator.attrgetter('group')
- ordered = sorted(eps, key=by_group)
- grouped = itertools.groupby(ordered, by_group)
- return cls((group, EntryPoints(eps)) for group, eps in grouped)
-
- @property
- def _all(self):
- """
- Reconstruct a list of all entrypoints from the groups.
- """
- groups = super(Deprecated, self).values()
- return EntryPoints(itertools.chain.from_iterable(groups))
-
- @property
- def groups(self):
- return self._all.groups
-
- @property
- def names(self):
- """
- for coverage:
- >>> SelectableGroups().names
- set()
- """
- return self._all.names
-
- def select(self, **params):
- if not params:
- return self
- return self._all.select(**params)
-
-
-class PackagePath(pathlib.PurePosixPath):
- """A reference to a path in a package"""
-
- def read_text(self, encoding='utf-8'):
- with self.locate().open(encoding=encoding) as stream:
- return stream.read()
-
- def read_binary(self):
- with self.locate().open('rb') as stream:
- return stream.read()
-
- def locate(self):
- """Return a path-like object for this path"""
- return self.dist.locate_file(self)
-
-
-class FileHash:
- def __init__(self, spec):
- self.mode, _, self.value = spec.partition('=')
-
- def __repr__(self):
- return f''
-
-
-class Distribution:
- """A Python distribution package."""
-
- @abc.abstractmethod
- def read_text(self, filename):
- """Attempt to load metadata file given by the name.
-
- :param filename: The name of the file in the distribution info.
- :return: The text if found, otherwise None.
- """
-
- @abc.abstractmethod
- def locate_file(self, path):
- """
- Given a path to a file in this distribution, return a path
- to it.
- """
-
- @classmethod
- def from_name(cls, name):
- """Return the Distribution for the given package name.
-
- :param name: The name of the distribution package to search for.
- :return: The Distribution instance (or subclass thereof) for the named
- package, if found.
- :raises PackageNotFoundError: When the named package's distribution
- metadata cannot be found.
- """
- for resolver in cls._discover_resolvers():
- dists = resolver(DistributionFinder.Context(name=name))
- dist = next(iter(dists), None)
- if dist is not None:
- return dist
- else:
- raise PackageNotFoundError(name)
-
- @classmethod
- def discover(cls, **kwargs):
- """Return an iterable of Distribution objects for all packages.
-
- Pass a ``context`` or pass keyword arguments for constructing
- a context.
-
- :context: A ``DistributionFinder.Context`` object.
- :return: Iterable of Distribution objects for all packages.
- """
- context = kwargs.pop('context', None)
- if context and kwargs:
- raise ValueError("cannot accept context and kwargs")
- context = context or DistributionFinder.Context(**kwargs)
- return itertools.chain.from_iterable(
- resolver(context) for resolver in cls._discover_resolvers()
- )
-
- @staticmethod
- def at(path):
- """Return a Distribution for the indicated metadata path
-
- :param path: a string or path-like object
- :return: a concrete Distribution instance for the path
- """
- return PathDistribution(pathlib.Path(path))
-
- @staticmethod
- def _discover_resolvers():
- """Search the meta_path for resolvers."""
- declared = (
- getattr(finder, 'find_distributions', None) for finder in sys.meta_path
- )
- return filter(None, declared)
-
- @property
- def metadata(self) -> _meta.PackageMetadata:
- """Return the parsed metadata for this Distribution.
-
- The returned object will have keys that name the various bits of
- metadata. See PEP 566 for details.
- """
- text = (
- self.read_text('METADATA')
- or self.read_text('PKG-INFO')
- # This last clause is here to support old egg-info files. Its
- # effect is to just end up using the PathDistribution's self._path
- # (which points to the egg-info file) attribute unchanged.
- or self.read_text('')
- )
- return _adapters.Message(email.message_from_string(text))
-
- @property
- def name(self):
- """Return the 'Name' metadata for the distribution package."""
- return self.metadata['Name']
-
- @property
- def _normalized_name(self):
- """Return a normalized version of the name."""
- return Prepared.normalize(self.name)
-
- @property
- def version(self):
- """Return the 'Version' metadata for the distribution package."""
- return self.metadata['Version']
-
- @property
- def entry_points(self):
- return EntryPoints._from_text_for(self.read_text('entry_points.txt'), self)
-
- @property
- def files(self):
- """Files in this distribution.
-
- :return: List of PackagePath for this distribution or None
-
- Result is `None` if the metadata file that enumerates files
- (i.e. RECORD for dist-info or SOURCES.txt for egg-info) is
- missing.
- Result may be empty if the metadata exists but is empty.
- """
-
- def make_file(name, hash=None, size_str=None):
- result = PackagePath(name)
- result.hash = FileHash(hash) if hash else None
- result.size = int(size_str) if size_str else None
- result.dist = self
- return result
-
- @pass_none
- def make_files(lines):
- return list(starmap(make_file, csv.reader(lines)))
-
- return make_files(self._read_files_distinfo() or self._read_files_egginfo())
-
- def _read_files_distinfo(self):
- """
- Read the lines of RECORD
- """
- text = self.read_text('RECORD')
- return text and text.splitlines()
-
- def _read_files_egginfo(self):
- """
- SOURCES.txt might contain literal commas, so wrap each line
- in quotes.
- """
- text = self.read_text('SOURCES.txt')
- return text and map('"{}"'.format, text.splitlines())
-
- @property
- def requires(self):
- """Generated requirements specified for this Distribution"""
- reqs = self._read_dist_info_reqs() or self._read_egg_info_reqs()
- return reqs and list(reqs)
-
- def _read_dist_info_reqs(self):
- return self.metadata.get_all('Requires-Dist')
-
- def _read_egg_info_reqs(self):
- source = self.read_text('requires.txt')
- return pass_none(self._deps_from_requires_text)(source)
-
- @classmethod
- def _deps_from_requires_text(cls, source):
- return cls._convert_egg_info_reqs_to_simple_reqs(Sectioned.read(source))
-
- @staticmethod
- def _convert_egg_info_reqs_to_simple_reqs(sections):
- """
- Historically, setuptools would solicit and store 'extra'
- requirements, including those with environment markers,
- in separate sections. More modern tools expect each
- dependency to be defined separately, with any relevant
- extras and environment markers attached directly to that
- requirement. This method converts the former to the
- latter. See _test_deps_from_requires_text for an example.
- """
-
- def make_condition(name):
- return name and f'extra == "{name}"'
-
- def quoted_marker(section):
- section = section or ''
- extra, sep, markers = section.partition(':')
- if extra and markers:
- markers = f'({markers})'
- conditions = list(filter(None, [markers, make_condition(extra)]))
- return '; ' + ' and '.join(conditions) if conditions else ''
-
- def url_req_space(req):
- """
- PEP 508 requires a space between the url_spec and the quoted_marker.
- Ref python/importlib_metadata#357.
- """
- # '@' is uniquely indicative of a url_req.
- return ' ' * ('@' in req)
-
- for section in sections:
- space = url_req_space(section.value)
- yield section.value + space + quoted_marker(section.name)
-
-
-class DistributionFinder(MetaPathFinder):
- """
- A MetaPathFinder capable of discovering installed distributions.
- """
-
- class Context:
- """
- Keyword arguments presented by the caller to
- ``distributions()`` or ``Distribution.discover()``
- to narrow the scope of a search for distributions
- in all DistributionFinders.
-
- Each DistributionFinder may expect any parameters
- and should attempt to honor the canonical
- parameters defined below when appropriate.
- """
-
- name = None
- """
- Specific name for which a distribution finder should match.
- A name of ``None`` matches all distributions.
- """
-
- def __init__(self, **kwargs):
- vars(self).update(kwargs)
-
- @property
- def path(self):
- """
- The sequence of directory path that a distribution finder
- should search.
-
- Typically refers to Python installed package paths such as
- "site-packages" directories and defaults to ``sys.path``.
- """
- return vars(self).get('path', sys.path)
-
- @abc.abstractmethod
- def find_distributions(self, context=Context()):
- """
- Find distributions.
-
- Return an iterable of all Distribution instances capable of
- loading the metadata for packages matching the ``context``,
- a DistributionFinder.Context instance.
- """
-
-
-class FastPath:
- """
- Micro-optimized class for searching a path for
- children.
-
- >>> FastPath('').children()
- ['...']
- """
-
- @functools.lru_cache() # type: ignore
- def __new__(cls, root):
- return super().__new__(cls)
-
- def __init__(self, root):
- self.root = str(root)
-
- def joinpath(self, child):
- return pathlib.Path(self.root, child)
-
- def children(self):
- with suppress(Exception):
- return os.listdir(self.root or '.')
- with suppress(Exception):
- return self.zip_children()
- return []
-
- def zip_children(self):
- zip_path = zipp.Path(self.root)
- names = zip_path.root.namelist()
- self.joinpath = zip_path.joinpath
-
- return dict.fromkeys(child.split(posixpath.sep, 1)[0] for child in names)
-
- def search(self, name):
- return self.lookup(self.mtime).search(name)
-
- @property
- def mtime(self):
- with suppress(OSError):
- return os.stat(self.root).st_mtime
- self.lookup.cache_clear()
-
- @method_cache
- def lookup(self, mtime):
- return Lookup(self)
-
-
-class Lookup:
- def __init__(self, path: FastPath):
- base = os.path.basename(path.root).lower()
- base_is_egg = base.endswith(".egg")
- self.infos = FreezableDefaultDict(list)
- self.eggs = FreezableDefaultDict(list)
-
- for child in path.children():
- low = child.lower()
- if low.endswith((".dist-info", ".egg-info")):
- # rpartition is faster than splitext and suitable for this purpose.
- name = low.rpartition(".")[0].partition("-")[0]
- normalized = Prepared.normalize(name)
- self.infos[normalized].append(path.joinpath(child))
- elif base_is_egg and low == "egg-info":
- name = base.rpartition(".")[0].partition("-")[0]
- legacy_normalized = Prepared.legacy_normalize(name)
- self.eggs[legacy_normalized].append(path.joinpath(child))
-
- self.infos.freeze()
- self.eggs.freeze()
-
- def search(self, prepared):
- infos = (
- self.infos[prepared.normalized]
- if prepared
- else itertools.chain.from_iterable(self.infos.values())
- )
- eggs = (
- self.eggs[prepared.legacy_normalized]
- if prepared
- else itertools.chain.from_iterable(self.eggs.values())
- )
- return itertools.chain(infos, eggs)
-
-
-class Prepared:
- """
- A prepared search for metadata on a possibly-named package.
- """
-
- normalized = None
- legacy_normalized = None
-
- def __init__(self, name):
- self.name = name
- if name is None:
- return
- self.normalized = self.normalize(name)
- self.legacy_normalized = self.legacy_normalize(name)
-
- @staticmethod
- def normalize(name):
- """
- PEP 503 normalization plus dashes as underscores.
- """
- return re.sub(r"[-_.]+", "-", name).lower().replace('-', '_')
-
- @staticmethod
- def legacy_normalize(name):
- """
- Normalize the package name as found in the convention in
- older packaging tools versions and specs.
- """
- return name.lower().replace('-', '_')
-
- def __bool__(self):
- return bool(self.name)
-
-
-@install
-class MetadataPathFinder(NullFinder, DistributionFinder):
- """A degenerate finder for distribution packages on the file system.
-
- This finder supplies only a find_distributions() method for versions
- of Python that do not have a PathFinder find_distributions().
- """
-
- def find_distributions(self, context=DistributionFinder.Context()):
- """
- Find distributions.
-
- Return an iterable of all Distribution instances capable of
- loading the metadata for packages matching ``context.name``
- (or all names if ``None`` indicated) along the paths in the list
- of directories ``context.path``.
- """
- found = self._search_paths(context.name, context.path)
- return map(PathDistribution, found)
-
- @classmethod
- def _search_paths(cls, name, paths):
- """Find metadata directories in paths heuristically."""
- prepared = Prepared(name)
- return itertools.chain.from_iterable(
- path.search(prepared) for path in map(FastPath, paths)
- )
-
- def invalidate_caches(cls):
- FastPath.__new__.cache_clear()
-
-
-class PathDistribution(Distribution):
- def __init__(self, path: SimplePath):
- """Construct a distribution.
-
- :param path: SimplePath indicating the metadata directory.
- """
- self._path = path
-
- def read_text(self, filename):
- with suppress(
- FileNotFoundError,
- IsADirectoryError,
- KeyError,
- NotADirectoryError,
- PermissionError,
- ):
- return self._path.joinpath(filename).read_text(encoding='utf-8')
-
- read_text.__doc__ = Distribution.read_text.__doc__
-
- def locate_file(self, path):
- return self._path.parent / path
-
- @property
- def _normalized_name(self):
- """
- Performance optimization: where possible, resolve the
- normalized name from the file system path.
- """
- stem = os.path.basename(str(self._path))
- return self._name_from_stem(stem) or super()._normalized_name
-
- def _name_from_stem(self, stem):
- name, ext = os.path.splitext(stem)
- if ext not in ('.dist-info', '.egg-info'):
- return
- name, sep, rest = stem.partition('-')
- return name
-
-
-def distribution(distribution_name):
- """Get the ``Distribution`` instance for the named package.
-
- :param distribution_name: The name of the distribution package as a string.
- :return: A ``Distribution`` instance (or subclass thereof).
- """
- return Distribution.from_name(distribution_name)
-
-
-def distributions(**kwargs):
- """Get all ``Distribution`` instances in the current environment.
-
- :return: An iterable of ``Distribution`` instances.
- """
- return Distribution.discover(**kwargs)
-
-
-def metadata(distribution_name) -> _meta.PackageMetadata:
- """Get the metadata for the named package.
-
- :param distribution_name: The name of the distribution package to query.
- :return: A PackageMetadata containing the parsed metadata.
- """
- return Distribution.from_name(distribution_name).metadata
-
-
-def version(distribution_name):
- """Get the version string for the named package.
-
- :param distribution_name: The name of the distribution package to query.
- :return: The version string for the package as defined in the package's
- "Version" metadata key.
- """
- return distribution(distribution_name).version
-
-
-def entry_points(**params) -> Union[EntryPoints, SelectableGroups]:
- """Return EntryPoint objects for all installed packages.
-
- Pass selection parameters (group or name) to filter the
- result to entry points matching those properties (see
- EntryPoints.select()).
-
- For compatibility, returns ``SelectableGroups`` object unless
- selection parameters are supplied. In the future, this function
- will return ``EntryPoints`` instead of ``SelectableGroups``
- even when no selection parameters are supplied.
-
- For maximum future compatibility, pass selection parameters
- or invoke ``.select`` with parameters on the result.
-
- :return: EntryPoints or SelectableGroups for all installed packages.
- """
- norm_name = operator.attrgetter('_normalized_name')
- unique = functools.partial(unique_everseen, key=norm_name)
- eps = itertools.chain.from_iterable(
- dist.entry_points for dist in unique(distributions())
- )
- return SelectableGroups.load(eps).select(**params)
-
-
-def files(distribution_name):
- """Return a list of files for the named package.
-
- :param distribution_name: The name of the distribution package to query.
- :return: List of files composing the distribution.
- """
- return distribution(distribution_name).files
-
-
-def requires(distribution_name):
- """
- Return a list of requirements for the named package.
-
- :return: An iterator of requirements, suitable for
- packaging.requirement.Requirement.
- """
- return distribution(distribution_name).requires
-
-
-def packages_distributions() -> Mapping[str, List[str]]:
- """
- Return a mapping of top-level packages to their
- distributions.
-
- >>> import collections.abc
- >>> pkgs = packages_distributions()
- >>> all(isinstance(dist, collections.abc.Sequence) for dist in pkgs.values())
- True
- """
- pkg_to_dist = collections.defaultdict(list)
- for dist in distributions():
- for pkg in _top_level_declared(dist) or _top_level_inferred(dist):
- pkg_to_dist[pkg].append(dist.metadata['Name'])
- return dict(pkg_to_dist)
-
-
-def _top_level_declared(dist):
- return (dist.read_text('top_level.txt') or '').split()
-
-
-def _top_level_inferred(dist):
- return {
- f.parts[0] if len(f.parts) > 1 else f.with_suffix('').name
- for f in always_iterable(dist.files)
- if f.suffix == ".py"
- }
diff --git a/spaces/BilalSardar/Voice-Cloning/app.py b/spaces/BilalSardar/Voice-Cloning/app.py
deleted file mode 100644
index 778e84d07de39404d953894513627e7eb138b397..0000000000000000000000000000000000000000
--- a/spaces/BilalSardar/Voice-Cloning/app.py
+++ /dev/null
@@ -1,165 +0,0 @@
-from turtle import title
-import gradio as gr
-
-import git
-import os
-os.system('git clone https://github.com/Edresson/Coqui-TTS -b multilingual-torchaudio-SE TTS')
-os.system('pip install -q -e TTS/')
-os.system('pip install -q torchaudio==0.9.0')
-
-import sys
-TTS_PATH = "TTS/"
-
-# add libraries into environment
-sys.path.append(TTS_PATH) # set this if TTS is not installed globally
-
-import os
-import string
-import time
-import argparse
-import json
-
-import numpy as np
-import IPython
-from IPython.display import Audio
-
-
-import torch
-
-from TTS.tts.utils.synthesis import synthesis
-#from TTS.tts.utils.text.symbols import make_symbols, phonemes, symbols
-try:
- from TTS.utils.audio import AudioProcessor
-except:
- from TTS.utils.audio import AudioProcessor
-
-
-from TTS.tts.models import setup_model
-from TTS.config import load_config
-from TTS.tts.models.vits import *
-
-OUT_PATH = 'out/'
-
-# create output path
-os.makedirs(OUT_PATH, exist_ok=True)
-
-# model vars
-MODEL_PATH = '/home/user/app/best_model_latest.pth.tar'
-CONFIG_PATH = '/home/user/app/config.json'
-TTS_LANGUAGES = "/home/user/app/language_ids.json"
-TTS_SPEAKERS = "/home/user/app/speakers.json"
-USE_CUDA = torch.cuda.is_available()
-
-# load the config
-C = load_config(CONFIG_PATH)
-
-
-# load the audio processor
-ap = AudioProcessor(**C.audio)
-
-speaker_embedding = None
-
-C.model_args['d_vector_file'] = TTS_SPEAKERS
-C.model_args['use_speaker_encoder_as_loss'] = False
-
-model = setup_model(C)
-model.language_manager.set_language_ids_from_file(TTS_LANGUAGES)
-# print(model.language_manager.num_languages, model.embedded_language_dim)
-# print(model.emb_l)
-cp = torch.load(MODEL_PATH, map_location=torch.device('cpu'))
-# remove speaker encoder
-model_weights = cp['model'].copy()
-for key in list(model_weights.keys()):
- if "speaker_encoder" in key:
- del model_weights[key]
-
-model.load_state_dict(model_weights)
-
-
-model.eval()
-
-if USE_CUDA:
- model = model.cuda()
-
-# synthesize voice
-use_griffin_lim = False
-
-os.system('pip install -q pydub ffmpeg-normalize')
-
-CONFIG_SE_PATH = "config_se.json"
-CHECKPOINT_SE_PATH = "SE_checkpoint.pth.tar"
-
-from TTS.tts.utils.speakers import SpeakerManager
-from pydub import AudioSegment
-import librosa
-
-SE_speaker_manager = SpeakerManager(encoder_model_path=CHECKPOINT_SE_PATH, encoder_config_path=CONFIG_SE_PATH, use_cuda=USE_CUDA)
-
-def compute_spec(ref_file):
- y, sr = librosa.load(ref_file, sr=ap.sample_rate)
- spec = ap.spectrogram(y)
- spec = torch.FloatTensor(spec).unsqueeze(0)
- return spec
-
-
-
-def greet(Text,Voicetoclone,VoiceMicrophone):
- text= "%s" % (Text)
- if Voicetoclone is not None:
- reference_files= "%s" % (Voicetoclone)
- print("path url")
- print(Voicetoclone)
- sample= str(Voicetoclone)
- else:
- reference_files= "%s" % (VoiceMicrophone)
- print("path url")
- print(VoiceMicrophone)
- sample= str(VoiceMicrophone)
- size= len(reference_files)*sys.getsizeof(reference_files)
- size2= size / 1000000
- if (size2 > 0.012) or len(text)>2000:
- message="File is greater than 30mb or Text inserted is longer than 2000 characters. Please re-try with smaller sizes."
- print(message)
- raise SystemExit("File is greater than 30mb. Please re-try or Text inserted is longer than 2000 characters. Please re-try with smaller sizes.")
- else:
- os.system('ffmpeg-normalize $sample -nt rms -t=-27 -o $sample -ar 16000 -f')
- reference_emb = SE_speaker_manager.compute_d_vector_from_clip(reference_files)
- model.length_scale = 1 # scaler for the duration predictor. The larger it is, the slower the speech.
- model.inference_noise_scale = 0.3 # defines the noise variance applied to the random z vector at inference.
- model.inference_noise_scale_dp = 0.3 # defines the noise variance applied to the duration predictor z vector at inference.
- text = text
- model.language_manager.language_id_mapping
- language_id = 0
-
- print(" > text: {}".format(text))
- wav, alignment, _, _ = synthesis(
- model,
- text,
- C,
- "cuda" in str(next(model.parameters()).device),
- ap,
- speaker_id=None,
- d_vector=reference_emb,
- style_wav=None,
- language_id=language_id,
- enable_eos_bos_chars=C.enable_eos_bos_chars,
- use_griffin_lim=True,
- do_trim_silence=False,
- ).values()
- print("Generated Audio")
- IPython.display.display(Audio(wav, rate=ap.sample_rate))
- #file_name = text.replace(" ", "_")
- #file_name = file_name.translate(str.maketrans('', '', string.punctuation.replace('_', ''))) + '.wav'
- file_name="Audio.wav"
- out_path = os.path.join(OUT_PATH, file_name)
- print(" > Saving output to {}".format(out_path))
- ap.save_wav(wav, out_path)
- return out_path
-
-demo = gr.Interface(
- fn=greet,
- inputs=[gr.inputs.Textbox(label='What would you like the voice to say? (max. 2000 characters per request)'),gr.Audio(type="filepath", source="upload",label='Please upload a voice to clone (max. 30mb)'),gr.Audio(source="microphone", type="filepath", streaming=True)],
- outputs="audio",
- title="Bilal's Voice Cloning Tool"
- )
-demo.launch()
\ No newline at end of file
diff --git a/spaces/CVPR/LIVE/pybind11/include/pybind11/eigen.h b/spaces/CVPR/LIVE/pybind11/include/pybind11/eigen.h
deleted file mode 100644
index 22139def6013b47005df22be778bd6984e05ea1d..0000000000000000000000000000000000000000
--- a/spaces/CVPR/LIVE/pybind11/include/pybind11/eigen.h
+++ /dev/null
@@ -1,607 +0,0 @@
-/*
- pybind11/eigen.h: Transparent conversion for dense and sparse Eigen matrices
-
- Copyright (c) 2016 Wenzel Jakob
-
- All rights reserved. Use of this source code is governed by a
- BSD-style license that can be found in the LICENSE file.
-*/
-
-#pragma once
-
-#include "numpy.h"
-
-#if defined(__INTEL_COMPILER)
-# pragma warning(disable: 1682) // implicit conversion of a 64-bit integral type to a smaller integral type (potential portability problem)
-#elif defined(__GNUG__) || defined(__clang__)
-# pragma GCC diagnostic push
-# pragma GCC diagnostic ignored "-Wconversion"
-# pragma GCC diagnostic ignored "-Wdeprecated-declarations"
-# ifdef __clang__
-// Eigen generates a bunch of implicit-copy-constructor-is-deprecated warnings with -Wdeprecated
-// under Clang, so disable that warning here:
-# pragma GCC diagnostic ignored "-Wdeprecated"
-# endif
-# if __GNUC__ >= 7
-# pragma GCC diagnostic ignored "-Wint-in-bool-context"
-# endif
-#endif
-
-#if defined(_MSC_VER)
-# pragma warning(push)
-# pragma warning(disable: 4127) // warning C4127: Conditional expression is constant
-# pragma warning(disable: 4996) // warning C4996: std::unary_negate is deprecated in C++17
-#endif
-
-#include
-#include
-
-// Eigen prior to 3.2.7 doesn't have proper move constructors--but worse, some classes get implicit
-// move constructors that break things. We could detect this an explicitly copy, but an extra copy
-// of matrices seems highly undesirable.
-static_assert(EIGEN_VERSION_AT_LEAST(3,2,7), "Eigen support in pybind11 requires Eigen >= 3.2.7");
-
-PYBIND11_NAMESPACE_BEGIN(PYBIND11_NAMESPACE)
-
-// Provide a convenience alias for easier pass-by-ref usage with fully dynamic strides:
-using EigenDStride = Eigen::Stride;
-template using EigenDRef = Eigen::Ref;
-template using EigenDMap = Eigen::Map;
-
-PYBIND11_NAMESPACE_BEGIN(detail)
-
-#if EIGEN_VERSION_AT_LEAST(3,3,0)
-using EigenIndex = Eigen::Index;
-#else
-using EigenIndex = EIGEN_DEFAULT_DENSE_INDEX_TYPE;
-#endif
-
-// Matches Eigen::Map, Eigen::Ref, blocks, etc:
-template using is_eigen_dense_map = all_of, std::is_base_of, T>>;
-template using is_eigen_mutable_map = std::is_base_of, T>;
-template using is_eigen_dense_plain = all_of>, is_template_base_of>;
-template using is_eigen_sparse = is_template_base_of;
-// Test for objects inheriting from EigenBase that aren't captured by the above. This
-// basically covers anything that can be assigned to a dense matrix but that don't have a typical
-// matrix data layout that can be copied from their .data(). For example, DiagonalMatrix and
-// SelfAdjointView fall into this category.
-template using is_eigen_other = all_of<
- is_template_base_of,
- negation, is_eigen_dense_plain, is_eigen_sparse>>
->;
-
-// Captures numpy/eigen conformability status (returned by EigenProps::conformable()):
-template struct EigenConformable {
- bool conformable = false;
- EigenIndex rows = 0, cols = 0;
- EigenDStride stride{0, 0}; // Only valid if negativestrides is false!
- bool negativestrides = false; // If true, do not use stride!
-
- EigenConformable(bool fits = false) : conformable{fits} {}
- // Matrix type:
- EigenConformable(EigenIndex r, EigenIndex c,
- EigenIndex rstride, EigenIndex cstride) :
- conformable{true}, rows{r}, cols{c} {
- // TODO: when Eigen bug #747 is fixed, remove the tests for non-negativity. http://eigen.tuxfamily.org/bz/show_bug.cgi?id=747
- if (rstride < 0 || cstride < 0) {
- negativestrides = true;
- } else {
- stride = {EigenRowMajor ? rstride : cstride /* outer stride */,
- EigenRowMajor ? cstride : rstride /* inner stride */ };
- }
- }
- // Vector type:
- EigenConformable(EigenIndex r, EigenIndex c, EigenIndex stride)
- : EigenConformable(r, c, r == 1 ? c*stride : stride, c == 1 ? r : r*stride) {}
-
- template bool stride_compatible() const {
- // To have compatible strides, we need (on both dimensions) one of fully dynamic strides,
- // matching strides, or a dimension size of 1 (in which case the stride value is irrelevant)
- return
- !negativestrides &&
- (props::inner_stride == Eigen::Dynamic || props::inner_stride == stride.inner() ||
- (EigenRowMajor ? cols : rows) == 1) &&
- (props::outer_stride == Eigen::Dynamic || props::outer_stride == stride.outer() ||
- (EigenRowMajor ? rows : cols) == 1);
- }
- operator bool() const { return conformable; }
-};
-
-template struct eigen_extract_stride { using type = Type; };
-template
-struct eigen_extract_stride> { using type = StrideType; };
-template
-struct eigen_extract_stride> { using type = StrideType; };
-
-// Helper struct for extracting information from an Eigen type
-template struct EigenProps {
- using Type = Type_;
- using Scalar = typename Type::Scalar;
- using StrideType = typename eigen_extract_stride::type;
- static constexpr EigenIndex
- rows = Type::RowsAtCompileTime,
- cols = Type::ColsAtCompileTime,
- size = Type::SizeAtCompileTime;
- static constexpr bool
- row_major = Type::IsRowMajor,
- vector = Type::IsVectorAtCompileTime, // At least one dimension has fixed size 1
- fixed_rows = rows != Eigen::Dynamic,
- fixed_cols = cols != Eigen::Dynamic,
- fixed = size != Eigen::Dynamic, // Fully-fixed size
- dynamic = !fixed_rows && !fixed_cols; // Fully-dynamic size
-
- template using if_zero = std::integral_constant;
- static constexpr EigenIndex inner_stride = if_zero::value,
- outer_stride = if_zero::value;
- static constexpr bool dynamic_stride = inner_stride == Eigen::Dynamic && outer_stride == Eigen::Dynamic;
- static constexpr bool requires_row_major = !dynamic_stride && !vector && (row_major ? inner_stride : outer_stride) == 1;
- static constexpr bool requires_col_major = !dynamic_stride && !vector && (row_major ? outer_stride : inner_stride) == 1;
-
- // Takes an input array and determines whether we can make it fit into the Eigen type. If
- // the array is a vector, we attempt to fit it into either an Eigen 1xN or Nx1 vector
- // (preferring the latter if it will fit in either, i.e. for a fully dynamic matrix type).
- static EigenConformable conformable(const array &a) {
- const auto dims = a.ndim();
- if (dims < 1 || dims > 2)
- return false;
-
- if (dims == 2) { // Matrix type: require exact match (or dynamic)
-
- EigenIndex
- np_rows = a.shape(0),
- np_cols = a.shape(1),
- np_rstride = a.strides(0) / static_cast(sizeof(Scalar)),
- np_cstride = a.strides(1) / static_cast(sizeof(Scalar));
- if ((fixed_rows && np_rows != rows) || (fixed_cols && np_cols != cols))
- return false;
-
- return {np_rows, np_cols, np_rstride, np_cstride};
- }
-
- // Otherwise we're storing an n-vector. Only one of the strides will be used, but whichever
- // is used, we want the (single) numpy stride value.
- const EigenIndex n = a.shape(0),
- stride = a.strides(0) / static_cast(sizeof(Scalar));
-
- if (vector) { // Eigen type is a compile-time vector
- if (fixed && size != n)
- return false; // Vector size mismatch
- return {rows == 1 ? 1 : n, cols == 1 ? 1 : n, stride};
- }
- else if (fixed) {
- // The type has a fixed size, but is not a vector: abort
- return false;
- }
- else if (fixed_cols) {
- // Since this isn't a vector, cols must be != 1. We allow this only if it exactly
- // equals the number of elements (rows is Dynamic, and so 1 row is allowed).
- if (cols != n) return false;
- return {1, n, stride};
- }
- else {
- // Otherwise it's either fully dynamic, or column dynamic; both become a column vector
- if (fixed_rows && rows != n) return false;
- return {n, 1, stride};
- }
- }
-
- static constexpr bool show_writeable = is_eigen_dense_map::value && is_eigen_mutable_map::value;
- static constexpr bool show_order = is_eigen_dense_map::value;
- static constexpr bool show_c_contiguous = show_order && requires_row_major;
- static constexpr bool show_f_contiguous = !show_c_contiguous && show_order && requires_col_major;
-
- static constexpr auto descriptor =
- _("numpy.ndarray[") + npy_format_descriptor::name +
- _("[") + _(_<(size_t) rows>(), _("m")) +
- _(", ") + _(_<(size_t) cols>(), _("n")) +
- _("]") +
- // For a reference type (e.g. Ref) we have other constraints that might need to be
- // satisfied: writeable=True (for a mutable reference), and, depending on the map's stride
- // options, possibly f_contiguous or c_contiguous. We include them in the descriptor output
- // to provide some hint as to why a TypeError is occurring (otherwise it can be confusing to
- // see that a function accepts a 'numpy.ndarray[float64[3,2]]' and an error message that you
- // *gave* a numpy.ndarray of the right type and dimensions.
- _(", flags.writeable", "") +
- _(", flags.c_contiguous", "") +
- _(", flags.f_contiguous", "") +
- _("]");
-};
-
-// Casts an Eigen type to numpy array. If given a base, the numpy array references the src data,
-// otherwise it'll make a copy. writeable lets you turn off the writeable flag for the array.
-template handle eigen_array_cast(typename props::Type const &src, handle base = handle(), bool writeable = true) {
- constexpr ssize_t elem_size = sizeof(typename props::Scalar);
- array a;
- if (props::vector)
- a = array({ src.size() }, { elem_size * src.innerStride() }, src.data(), base);
- else
- a = array({ src.rows(), src.cols() }, { elem_size * src.rowStride(), elem_size * src.colStride() },
- src.data(), base);
-
- if (!writeable)
- array_proxy(a.ptr())->flags &= ~detail::npy_api::NPY_ARRAY_WRITEABLE_;
-
- return a.release();
-}
-
-// Takes an lvalue ref to some Eigen type and a (python) base object, creating a numpy array that
-// reference the Eigen object's data with `base` as the python-registered base class (if omitted,
-// the base will be set to None, and lifetime management is up to the caller). The numpy array is
-// non-writeable if the given type is const.
-template
-handle eigen_ref_array(Type &src, handle parent = none()) {
- // none here is to get past array's should-we-copy detection, which currently always
- // copies when there is no base. Setting the base to None should be harmless.
- return eigen_array_cast(src, parent, !std::is_const::value);
-}
-
-// Takes a pointer to some dense, plain Eigen type, builds a capsule around it, then returns a numpy
-// array that references the encapsulated data with a python-side reference to the capsule to tie
-// its destruction to that of any dependent python objects. Const-ness is determined by whether or
-// not the Type of the pointer given is const.
-template ::value>>
-handle eigen_encapsulate(Type *src) {
- capsule base(src, [](void *o) { delete static_cast(o); });
- return eigen_ref_array(*src, base);
-}
-
-// Type caster for regular, dense matrix types (e.g. MatrixXd), but not maps/refs/etc. of dense
-// types.
-template
-struct type_caster::value>> {
- using Scalar = typename Type::Scalar;
- using props = EigenProps;
-
- bool load(handle src, bool convert) {
- // If we're in no-convert mode, only load if given an array of the correct type
- if (!convert && !isinstance>(src))
- return false;
-
- // Coerce into an array, but don't do type conversion yet; the copy below handles it.
- auto buf = array::ensure(src);
-
- if (!buf)
- return false;
-
- auto dims = buf.ndim();
- if (dims < 1 || dims > 2)
- return false;
-
- auto fits = props::conformable(buf);
- if (!fits)
- return false;
-
- // Allocate the new type, then build a numpy reference into it
- value = Type(fits.rows, fits.cols);
- auto ref = reinterpret_steal(eigen_ref_array(value));
- if (dims == 1) ref = ref.squeeze();
- else if (ref.ndim() == 1) buf = buf.squeeze();
-
- int result = detail::npy_api::get().PyArray_CopyInto_(ref.ptr(), buf.ptr());
-
- if (result < 0) { // Copy failed!
- PyErr_Clear();
- return false;
- }
-
- return true;
- }
-
-private:
-
- // Cast implementation
- template
- static handle cast_impl(CType *src, return_value_policy policy, handle parent) {
- switch (policy) {
- case return_value_policy::take_ownership:
- case return_value_policy::automatic:
- return eigen_encapsulate(src);
- case return_value_policy::move:
- return eigen_encapsulate(new CType(std::move(*src)));
- case return_value_policy::copy:
- return eigen_array_cast(*src);
- case return_value_policy::reference:
- case return_value_policy::automatic_reference:
- return eigen_ref_array(*src);
- case return_value_policy::reference_internal:
- return eigen_ref_array(*src, parent);
- default:
- throw cast_error("unhandled return_value_policy: should not happen!");
- };
- }
-
-public:
-
- // Normal returned non-reference, non-const value:
- static handle cast(Type &&src, return_value_policy /* policy */, handle parent) {
- return cast_impl(&src, return_value_policy::move, parent);
- }
- // If you return a non-reference const, we mark the numpy array readonly:
- static handle cast(const Type &&src, return_value_policy /* policy */, handle parent) {
- return cast_impl(&src, return_value_policy::move, parent);
- }
- // lvalue reference return; default (automatic) becomes copy
- static handle cast(Type &src, return_value_policy policy, handle parent) {
- if (policy == return_value_policy::automatic || policy == return_value_policy::automatic_reference)
- policy = return_value_policy::copy;
- return cast_impl(&src, policy, parent);
- }
- // const lvalue reference return; default (automatic) becomes copy
- static handle cast(const Type &src, return_value_policy policy, handle parent) {
- if (policy == return_value_policy::automatic || policy == return_value_policy::automatic_reference)
- policy = return_value_policy::copy;
- return cast(&src, policy, parent);
- }
- // non-const pointer return
- static handle cast(Type *src, return_value_policy policy, handle parent) {
- return cast_impl(src, policy, parent);
- }
- // const pointer return
- static handle cast(const Type *src, return_value_policy policy, handle parent) {
- return cast_impl(src, policy, parent);
- }
-
- static constexpr auto name = props::descriptor;
-
- operator Type*() { return &value; }
- operator Type&() { return value; }
- operator Type&&() && { return std::move(value); }
- template using cast_op_type = movable_cast_op_type;
-
-private:
- Type value;
-};
-
-// Base class for casting reference/map/block/etc. objects back to python.
-template struct eigen_map_caster {
-private:
- using props = EigenProps;
-
-public:
-
- // Directly referencing a ref/map's data is a bit dangerous (whatever the map/ref points to has
- // to stay around), but we'll allow it under the assumption that you know what you're doing (and
- // have an appropriate keep_alive in place). We return a numpy array pointing directly at the
- // ref's data (The numpy array ends up read-only if the ref was to a const matrix type.) Note
- // that this means you need to ensure you don't destroy the object in some other way (e.g. with
- // an appropriate keep_alive, or with a reference to a statically allocated matrix).
- static handle cast(const MapType &src, return_value_policy policy, handle parent) {
- switch (policy) {
- case return_value_policy::copy:
- return eigen_array_cast(src);
- case return_value_policy::reference_internal:
- return eigen_array_cast(src, parent, is_eigen_mutable_map::value);
- case return_value_policy::reference:
- case return_value_policy::automatic:
- case return_value_policy::automatic_reference:
- return eigen_array_cast(src, none(), is_eigen_mutable_map::value);
- default:
- // move, take_ownership don't make any sense for a ref/map:
- pybind11_fail("Invalid return_value_policy for Eigen Map/Ref/Block type");
- }
- }
-
- static constexpr auto name = props::descriptor;
-
- // Explicitly delete these: support python -> C++ conversion on these (i.e. these can be return
- // types but not bound arguments). We still provide them (with an explicitly delete) so that
- // you end up here if you try anyway.
- bool load(handle, bool) = delete;
- operator MapType() = delete;
- template using cast_op_type = MapType;
-};
-
-// We can return any map-like object (but can only load Refs, specialized next):
-template struct type_caster::value>>
- : eigen_map_caster {};
-
-// Loader for Ref<...> arguments. See the documentation for info on how to make this work without
-// copying (it requires some extra effort in many cases).
-template
-struct type_caster<
- Eigen::Ref,
- enable_if_t>::value>
-> : public eigen_map_caster> {
-private:
- using Type = Eigen::Ref;
- using props = EigenProps;
- using Scalar = typename props::Scalar;
- using MapType = Eigen::Map;
- using Array = array_t;
- static constexpr bool need_writeable = is_eigen_mutable_map::value;
- // Delay construction (these have no default constructor)
- std::unique_ptr map;
- std::unique_ptr ref;
- // Our array. When possible, this is just a numpy array pointing to the source data, but
- // sometimes we can't avoid copying (e.g. input is not a numpy array at all, has an incompatible
- // layout, or is an array of a type that needs to be converted). Using a numpy temporary
- // (rather than an Eigen temporary) saves an extra copy when we need both type conversion and
- // storage order conversion. (Note that we refuse to use this temporary copy when loading an
- // argument for a Ref with M non-const, i.e. a read-write reference).
- Array copy_or_ref;
-public:
- bool load(handle src, bool convert) {
- // First check whether what we have is already an array of the right type. If not, we can't
- // avoid a copy (because the copy is also going to do type conversion).
- bool need_copy = !isinstance(src);
-
- EigenConformable fits;
- if (!need_copy) {
- // We don't need a converting copy, but we also need to check whether the strides are
- // compatible with the Ref's stride requirements
- Array aref = reinterpret_borrow(src);
-
- if (aref && (!need_writeable || aref.writeable())) {
- fits = props::conformable(aref);
- if (!fits) return false; // Incompatible dimensions
- if (!fits.template stride_compatible())
- need_copy = true;
- else
- copy_or_ref = std::move(aref);
- }
- else {
- need_copy = true;
- }
- }
-
- if (need_copy) {
- // We need to copy: If we need a mutable reference, or we're not supposed to convert
- // (either because we're in the no-convert overload pass, or because we're explicitly
- // instructed not to copy (via `py::arg().noconvert()`) we have to fail loading.
- if (!convert || need_writeable) return false;
-
- Array copy = Array::ensure(src);
- if (!copy) return false;
- fits = props::conformable(copy);
- if (!fits || !fits.template stride_compatible())
- return false;
- copy_or_ref = std::move(copy);
- loader_life_support::add_patient(copy_or_ref);
- }
-
- ref.reset();
- map.reset(new MapType(data(copy_or_ref), fits.rows, fits.cols, make_stride(fits.stride.outer(), fits.stride.inner())));
- ref.reset(new Type(*map));
-
- return true;
- }
-
- operator Type*() { return ref.get(); }
- operator Type&() { return *ref; }
- template using cast_op_type = pybind11::detail::cast_op_type<_T>;
-
-private:
- template ::value, int> = 0>
- Scalar *data(Array &a) { return a.mutable_data(); }
-
- template ::value, int> = 0>
- const Scalar *data(Array &a) { return a.data(); }
-
- // Attempt to figure out a constructor of `Stride` that will work.
- // If both strides are fixed, use a default constructor:
- template using stride_ctor_default = bool_constant<
- S::InnerStrideAtCompileTime != Eigen::Dynamic && S::OuterStrideAtCompileTime != Eigen::Dynamic &&
- std::is_default_constructible::value>;
- // Otherwise, if there is a two-index constructor, assume it is (outer,inner) like
- // Eigen::Stride, and use it:
- template using stride_ctor_dual = bool_constant<
- !stride_ctor_default::value && std::is_constructible::value>;
- // Otherwise, if there is a one-index constructor, and just one of the strides is dynamic, use
- // it (passing whichever stride is dynamic).
- template using stride_ctor_outer = bool_constant<
- !any_of, stride_ctor_dual>::value &&
- S::OuterStrideAtCompileTime == Eigen::Dynamic && S::InnerStrideAtCompileTime != Eigen::Dynamic &&
- std::is_constructible::value>;
- template using stride_ctor_inner = bool_constant<
- !any_of, stride_ctor_dual>::value &&
- S::InnerStrideAtCompileTime == Eigen::Dynamic && S::OuterStrideAtCompileTime != Eigen::Dynamic &&
- std::is_constructible::value>;
-
- template ::value, int> = 0>
- static S make_stride(EigenIndex, EigenIndex) { return S(); }
- template ::value, int> = 0>
- static S make_stride(EigenIndex outer, EigenIndex inner) { return S(outer, inner); }
- template ::value, int> = 0>
- static S make_stride(EigenIndex outer, EigenIndex) { return S(outer); }
- template ::value, int> = 0>
- static S make_stride(EigenIndex, EigenIndex inner) { return S(inner); }
-
-};
-
-// type_caster for special matrix types (e.g. DiagonalMatrix), which are EigenBase, but not
-// EigenDense (i.e. they don't have a data(), at least not with the usual matrix layout).
-// load() is not supported, but we can cast them into the python domain by first copying to a
-// regular Eigen::Matrix, then casting that.
-template
-struct type_caster::value>> {
-protected:
- using Matrix = Eigen::Matrix;
- using props = EigenProps;
-public:
- static handle cast(const Type &src, return_value_policy /* policy */, handle /* parent */) {
- handle h = eigen_encapsulate(new Matrix(src));
- return h;
- }
- static handle cast(const Type *src, return_value_policy policy, handle parent) { return cast(*src, policy, parent); }
-
- static constexpr auto name = props::descriptor;
-
- // Explicitly delete these: support python -> C++ conversion on these (i.e. these can be return
- // types but not bound arguments). We still provide them (with an explicitly delete) so that
- // you end up here if you try anyway.
- bool load(handle, bool) = delete;
- operator Type() = delete;
- template using cast_op_type = Type;
-};
-
-template
-struct type_caster::value>> {
- typedef typename Type::Scalar Scalar;
- typedef remove_reference_t().outerIndexPtr())> StorageIndex;
- typedef typename Type::Index Index;
- static constexpr bool rowMajor = Type::IsRowMajor;
-
- bool load(handle src, bool) {
- if (!src)
- return false;
-
- auto obj = reinterpret_borrow