Spaces:
Running
Running
File size: 6,763 Bytes
9df4cc0 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 |
import time
from collections import namedtuple
from pathlib import Path
from typing import List
import requests
from requests.adapters import HTTPAdapter
from urllib3.util.retry import Retry
from faker import Faker
fake = Faker()
MAX_RETRIES = 10
SEC_EDGAR_RATE_LIMIT_SLEEP_INTERVAL = 0.1
FILING_DETAILS_FILENAME_STEM = "filing-details"
SEC_EDGAR_SEARCH_API_ENDPOINT = "https://efts.sec.gov/LATEST/search-index"
SEC_EDGAR_ARCHIVES_BASE_URL = "https://www.sec.gov/Archives/edgar/data"
retries = Retry(
total=MAX_RETRIES,
backoff_factor=SEC_EDGAR_RATE_LIMIT_SLEEP_INTERVAL,
status_forcelist=[403, 500, 502, 503, 504],
)
FilingMetadata = namedtuple(
"FilingMetadata",
[
"accession_number",
"full_submission_url",
"filing_details_url",
"filing_details_filename",
],
)
class EdgarSearchApiError(Exception):
pass
def form_request_payload(
ticker_or_cik: str,
filing_types: List[str],
start_date: str,
end_date: str,
start_index: int,
query: str,
) -> dict:
payload = {
"dateRange": "custom",
"startdt": start_date,
"enddt": end_date,
"entityName": ticker_or_cik,
"forms": filing_types,
"from": start_index,
"q": query,
}
return payload
def build_filing_metadata_from_hit(hit: dict) -> FilingMetadata:
accession_number, filing_details_filename = hit["_id"].split(":", 1)
# Company CIK should be last in the CIK list. This list may also include
# the CIKs of executives carrying out insider transactions like in form 4.
cik = hit["_source"]["ciks"][-1]
accession_number_no_dashes = accession_number.replace("-", "", 2)
submission_base_url = (
f"{SEC_EDGAR_ARCHIVES_BASE_URL}/{cik}/{accession_number_no_dashes}"
)
full_submission_url = f"{submission_base_url}/{accession_number}.txt"
# Get XSL if human readable is wanted
# XSL is required to download the human-readable
# and styled version of XML documents like form 4
# SEC_EDGAR_ARCHIVES_BASE_URL + /320193/000032019320000066/wf-form4_159839550969947.xml
# SEC_EDGAR_ARCHIVES_BASE_URL +
# /320193/000032019320000066/xslF345X03/wf-form4_159839550969947.xml
# xsl = hit["_source"]["xsl"]
# if xsl is not None:
# filing_details_url = f"{submission_base_url}/{xsl}/{filing_details_filename}"
# else:
# filing_details_url = f"{submission_base_url}/{filing_details_filename}"
filing_details_url = f"{submission_base_url}/{filing_details_filename}"
filing_details_filename_extension = Path(filing_details_filename).suffix.replace(
"htm", "html"
)
filing_details_filename = (
f"{FILING_DETAILS_FILENAME_STEM}{filing_details_filename_extension}"
)
return FilingMetadata(
accession_number=accession_number,
full_submission_url=full_submission_url,
filing_details_url=filing_details_url,
filing_details_filename=filing_details_filename,
)
def generate_random_user_agent() -> str:
return f"{fake.first_name()} {fake.last_name()} {fake.email()}"
def get_filing_urls_to_download(
filing_type: str,
ticker_or_cik: str,
num_filings_to_download: int,
after_date: str,
before_date: str,
include_amends: bool,
query: str = "",
) -> List[FilingMetadata]:
"""Get the filings URL to download the data
Returns:
List[FilingMetadata]: Filing metadata from SEC
"""
filings_to_fetch: List[FilingMetadata] = []
start_index = 0
client = requests.Session()
client.mount("http://", HTTPAdapter(max_retries=retries))
client.mount("https://", HTTPAdapter(max_retries=retries))
try:
while len(filings_to_fetch) < num_filings_to_download:
payload = form_request_payload(
ticker_or_cik,
[filing_type],
after_date,
before_date,
start_index,
query,
)
headers = {
"User-Agent": generate_random_user_agent(),
"Accept-Encoding": "gzip, deflate",
"Host": "efts.sec.gov",
}
resp = client.post(
SEC_EDGAR_SEARCH_API_ENDPOINT, json=payload, headers=headers
)
resp.raise_for_status()
search_query_results = resp.json()
if "error" in search_query_results:
try:
root_cause = search_query_results["error"]["root_cause"]
if not root_cause: # pragma: no cover
raise ValueError
error_reason = root_cause[0]["reason"]
raise EdgarSearchApiError(
f"Edgar Search API encountered an error: {error_reason}. "
f"Request payload:\n{payload}"
)
except (ValueError, KeyError): # pragma: no cover
raise EdgarSearchApiError(
"Edgar Search API encountered an unknown error. "
f"Request payload:\n{payload}"
) from None
query_hits = search_query_results["hits"]["hits"]
# No more results to process
if not query_hits:
break
for hit in query_hits:
hit_filing_type = hit["_source"]["file_type"]
is_amend = hit_filing_type[-2:] == "/A"
if not include_amends and is_amend:
continue
if is_amend:
num_filings_to_download += 1
# Work around bug where incorrect filings are sometimes included.
# For example, AAPL 8-K searches include N-Q entries.
if not is_amend and hit_filing_type != filing_type:
continue
metadata = build_filing_metadata_from_hit(hit)
filings_to_fetch.append(metadata)
if len(filings_to_fetch) == num_filings_to_download:
return filings_to_fetch
# Edgar queries 100 entries at a time, but it is best to set this
# from the response payload in case it changes in the future
query_size = search_query_results["query"]["size"]
start_index += query_size
# Prevent rate limiting
time.sleep(SEC_EDGAR_RATE_LIMIT_SLEEP_INTERVAL)
finally:
client.close()
return filings_to_fetch
|