File size: 5,901 Bytes
23e94c0 8c1d14f 12203d9 5ebbe1b 12203d9 8c1d14f 12203d9 23e94c0 4604847 23e94c0 4604847 8c1d14f 4604847 5ebbe1b 4604847 5ebbe1b 4604847 5ebbe1b 4604847 8c1d14f 4604847 23e94c0 4604847 8c1d14f 4604847 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 |
import streamlit as st
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
import seaborn as sns
from wordlist_generator import generate_wordlist # A mock-up function for your project
from dotenv import load_dotenv
import os
import requests
import time
# Load environment variables from .env file
load_dotenv()
access_token = os.getenv("HUGGINGFACE_ACCESS_TOKEN")
# Page configuration
st.set_page_config(page_title="ReconNinja Wordlists", page_icon="💬", layout="wide")
# Header section
def display_header():
st.title("💬 ReconNinja Wordlists")
st.subheader("Tailored wordlists for efficient penetration testing")
st.markdown("""
This application generates customized wordlists for use in network reconnaissance and penetration testing.
Adjust the parameters to generate wordlists suited for your specific testing scenario.
""")
# Sidebar for user input
def get_user_inputs():
st.sidebar.header("Customize Your Wordlist")
st.sidebar.markdown("""
Adjust the following parameters to create wordlists optimized for your penetration testing tasks.
""")
wordlist_size = st.sidebar.slider("Wordlist Size", min_value=50, max_value=10000, value=1000, step=50)
min_length = st.sidebar.slider("Minimum Word Length", min_value=3, max_value=12, value=6)
max_length = st.sidebar.slider("Maximum Word Length", min_value=3, max_value=12, value=8)
include_special_chars = st.sidebar.checkbox("Include Special Characters", value=False)
include_numbers = st.sidebar.checkbox("Include Numbers", value=True)
return wordlist_size, min_length, max_length, include_special_chars, include_numbers
# Word frequency filter
def check_common_passwords(wordlist):
# Placeholder URL for common passwords (replace with a real one or use a local file)
common_passwords = requests.get("https://example.com/common_passwords.txt").text.splitlines()
filtered_list = [word for word in wordlist if word not in common_passwords]
return filtered_list
# Wordlist generation with progress indicator
def generate_wordlist_with_progress(size, min_length, max_length, special_chars, numbers):
wordlist = []
for i in range(size):
# Simulate wordlist generation (replace this with your actual word generation logic)
word = f"word{i+1}"
wordlist.append(word)
# Update the progress bar every 100 words
if i % 100 == 0:
st.progress(i / size) # This will update the progress bar
time.sleep(0.01) # Simulate delay for wordlist generation (remove in production)
return wordlist
# Wordlist generation logic
def generate_and_display_wordlist(wordlist_size, min_length, max_length, include_special_chars, include_numbers):
try:
# Generate the wordlist with progress
wordlist = generate_wordlist_with_progress(
wordlist_size,
min_length,
max_length,
include_special_chars,
include_numbers
)
# Apply word frequency filter
wordlist = check_common_passwords(wordlist)
# Display a preview of the wordlist
st.write(f"Preview of {wordlist_size} words:")
st.dataframe(pd.DataFrame(wordlist[:20], columns=["Generated Words"])) # Display first 20 words
# Provide a download link for the full wordlist
st.markdown("### Download Full Wordlist")
csv_data = pd.Series(wordlist).to_csv(index=False).encode()
st.download_button(
label="Download Wordlist as CSV",
data=csv_data,
file_name="reconninja_wordlist.csv",
mime="text/csv"
)
return wordlist
except Exception as e:
st.error(f"Error generating wordlist: {e}")
return None
# Visualizing the wordlist statistics
def display_wordlist_statistics(wordlist):
if wordlist:
st.header("Wordlist Statistics")
# Calculate and display word length distribution
word_lengths = [len(word) for word in wordlist]
word_length_df = pd.DataFrame(word_lengths, columns=["Word Length"])
fig, ax = plt.subplots(figsize=(8, 6))
sns.histplot(word_length_df["Word Length"], kde=True, bins=20, ax=ax)
ax.set_title("Word Length Distribution")
ax.set_xlabel("Word Length")
ax.set_ylabel("Frequency")
st.pyplot(fig)
# Analyze wordlist security (entropy)
def analyze_wordlist_security(wordlist):
if wordlist:
st.header("Analyze Wordlist Security")
entropy_slider = st.slider(
"Select Entropy Multiplier",
min_value=1.0,
max_value=10.0,
value=3.0,
step=0.1
)
# Simulate password entropy calculation
entropy = np.log2(len(wordlist) ** entropy_slider)
st.write(f"Estimated Entropy: {entropy:.2f} bits")
# Security analysis feedback
if entropy < 50:
st.warning("Low entropy detected! This wordlist might be vulnerable to brute-force attacks.")
else:
st.success("Good entropy! This wordlist is secure against most brute-force attempts.")
# Footer section
def display_footer():
st.markdown("---")
st.markdown(
"Made with ❤️ by Canstralian. For more information on ReconNinja, visit our [GitHub](https://github.com/Canstralian)."
)
# Main application function
def main():
display_header()
wordlist_size, min_length, max_length, include_special_chars, include_numbers = get_user_inputs()
wordlist = generate_and_display_wordlist(
wordlist_size, min_length, max_length, include_special_chars, include_numbers
)
display_wordlist_statistics(wordlist)
analyze_wordlist_security(wordlist)
display_footer()
if __name__ == "__main__":
main()
|