From e124457829876d2e0a4894c5f28bab2a4897529e Mon Sep 17 00:00:00 2001 From: Danglewood <85772166+deeleeramone@users.noreply.github.com> Date: Fri, 17 May 2024 02:40:43 -0700 Subject: [BugFix] Clear Linting Items (#6423) * fix some linting items * pylint stuff * black * index_snapshots * ruff * forgot to add this file to commit * black --- .../openbb_alpha_vantage/models/historical_eps.py | 2 +- .../cboe/openbb_cboe/models/index_snapshots.py | 18 ++++++++++-------- .../providers/cboe/openbb_cboe/utils/helpers.py | 1 + .../econdb/openbb_econdb/utils/main_indicators.py | 3 ++- .../providers/fred/openbb_fred/models/search.py | 4 ++-- .../intrinio/openbb_intrinio/models/balance_sheet.py | 10 +++++----- .../intrinio/openbb_intrinio/models/world_news.py | 2 +- .../providers/sec/openbb_sec/models/company_filings.py | 4 ++-- .../providers/sec/openbb_sec/models/etf_holdings.py | 4 ++++ .../providers/sec/openbb_sec/models/sic_search.py | 3 ++- .../providers/sec/openbb_sec/utils/helpers.py | 7 +++++-- .../providers/tmx/openbb_tmx/utils/helpers.py | 1 + 12 files changed, 36 insertions(+), 23 deletions(-) diff --git a/openbb_platform/providers/alpha_vantage/openbb_alpha_vantage/models/historical_eps.py b/openbb_platform/providers/alpha_vantage/openbb_alpha_vantage/models/historical_eps.py index efc27de80e8..038b89e747e 100644 --- a/openbb_platform/providers/alpha_vantage/openbb_alpha_vantage/models/historical_eps.py +++ b/openbb_platform/providers/alpha_vantage/openbb_alpha_vantage/models/historical_eps.py @@ -117,7 +117,7 @@ class AVHistoricalEpsFetcher( target = ( "annualEarnings" if query.period == "annual" else "quarterlyEarnings" ) - message = data.get("Information", "") + message = data.get("Information", "") # type: ignore if message: messages.append(message) warn(f"Symbol Error for {symbol}: {message}") diff --git a/openbb_platform/providers/cboe/openbb_cboe/models/index_snapshots.py b/openbb_platform/providers/cboe/openbb_cboe/models/index_snapshots.py index 77313159449..bc0cb48f336 100644 --- a/openbb_platform/providers/cboe/openbb_cboe/models/index_snapshots.py +++ b/openbb_platform/providers/cboe/openbb_cboe/models/index_snapshots.py @@ -1,5 +1,7 @@ """CBOE Index Snapshots Model.""" +# pylint: disable=unused-argument + from datetime import datetime from typing import Any, Dict, List, Literal, Optional @@ -27,9 +29,9 @@ class CboeIndexSnapshotsQueryParams(IndexSnapshotsQueryParams): @field_validator("region", mode="after", check_fields=False) @classmethod - def validate_region(cls, v: str): + def validate_region(cls, v): """Validate region.""" - return "us" if v is None else v + return v if v else "us" class CboeIndexSnapshotsData(IndexSnapshotsData): @@ -89,24 +91,24 @@ class CboeIndexSnapshotsFetcher( @staticmethod async def aextract_data( query: CboeIndexSnapshotsQueryParams, - credentials: Optional[Dict[str, str]], # pylint: disable=unused-argument + credentials: Optional[Dict[str, str]], **kwargs: Any, ) -> List[Dict]: """Return the raw data from the Cboe endpoint""" - + url: str = "" if query.region == "us": url = "https://cdn.cboe.com/api/global/delayed_quotes/quotes/all_us_indices.json" if query.region == "eu": url = "https://cdn.cboe.com/api/global/european_indices/index_quotes/all-indices.json" data = await amake_request(url, **kwargs) - return data.get("data") + return data.get("data") # type: ignore @staticmethod def transform_data( - query: CboeIndexSnapshotsQueryParams, # pylint: disable=unused-argument - data: dict, - **kwargs: Any, # pylint: disable=unused-argument + query: CboeIndexSnapshotsQueryParams, + data: List[Dict], + **kwargs: Any, ) -> List[CboeIndexSnapshotsData]: """Transform the data to the standard format""" if not data: diff --git a/openbb_platform/providers/cboe/openbb_cboe/utils/helpers.py b/openbb_platform/providers/cboe/openbb_cboe/utils/helpers.py index e49361b1549..83464f36511 100644 --- a/openbb_platform/providers/cboe/openbb_cboe/utils/helpers.py +++ b/openbb_platform/providers/cboe/openbb_cboe/utils/helpers.py @@ -92,6 +92,7 @@ async def response_callback(response: ClientResponse, _: Any): async def get_cboe_data(url, use_cache: bool = True, **kwargs) -> Any: """Use the generic Cboe HTTP request.""" + data: Any = None if use_cache is True: async with CachedSession(cache=backend) as cached_session: try: diff --git a/openbb_platform/providers/econdb/openbb_econdb/utils/main_indicators.py b/openbb_platform/providers/econdb/openbb_econdb/utils/main_indicators.py index 152fa876713..3845e97d235 100644 --- a/openbb_platform/providers/econdb/openbb_econdb/utils/main_indicators.py +++ b/openbb_platform/providers/econdb/openbb_econdb/utils/main_indicators.py @@ -1,7 +1,7 @@ """Main Indicators""" from datetime import datetime, timedelta -from typing import Dict, List, Literal +from typing import Dict, List, Literal, Union from aiohttp_client_cache import SQLiteBackend from aiohttp_client_cache.session import CachedSession @@ -72,6 +72,7 @@ main_indicators_order = [ async def fetch_data(url, use_cache: bool = True): """Fetch the data with or without the cached session object.""" + response: Union[dict, List[dict]] = {} if use_cache is True: cache_dir = f"{get_user_cache_directory()}/http/econdb_main_indicators" async with CachedSession( diff --git a/openbb_platform/providers/fred/openbb_fred/models/search.py b/openbb_platform/providers/fred/openbb_fred/models/search.py index 0681472256a..0ede3d32a48 100644 --- a/openbb_platform/providers/fred/openbb_fred/models/search.py +++ b/openbb_platform/providers/fred/openbb_fred/models/search.py @@ -121,10 +121,10 @@ class FredSearchFetcher( api_key = credentials.get("fred_api_key") if credentials else "" if query.series_id is not None: - results = [] + results: List = [] async def get_one(_id: str): - data = {} + data: Dict = {} url = f"https://api.stlouisfed.org/geofred/series/group?series_id={_id}&api_key={api_key}&file_type=json" response = await amake_request(url) data = response.get("series_group") # type: ignore diff --git a/openbb_platform/providers/intrinio/openbb_intrinio/models/balance_sheet.py b/openbb_platform/providers/intrinio/openbb_intrinio/models/balance_sheet.py index 4428fab9091..5b1bc8bd7fa 100644 --- a/openbb_platform/providers/intrinio/openbb_intrinio/models/balance_sheet.py +++ b/openbb_platform/providers/intrinio/openbb_intrinio/models/balance_sheet.py @@ -445,12 +445,12 @@ class IntrinioBalanceSheetFetcher( async def callback(response: ClientResponse, _: Any) -> Dict: """Return the response.""" - statement_data = await response.json() + statement_data = await response.json() # type: ignore return { - "period_ending": statement_data["fundamental"]["end_date"], - "fiscal_year": statement_data["fundamental"]["fiscal_year"], - "fiscal_period": statement_data["fundamental"]["fiscal_period"], - "financials": statement_data["standardized_financials"], + "period_ending": statement_data["fundamental"]["end_date"], # type: ignore + "fiscal_year": statement_data["fundamental"]["fiscal_year"], # type: ignore + "fiscal_period": statement_data["fundamental"]["fiscal_period"], # type: ignore + "financials": statement_data["standardized_financials"], # type: ignore } urls = [ diff --git a/openbb_platform/providers/intrinio/openbb_intrinio/models/world_news.py b/openbb_platform/providers/intrinio/openbb_intrinio/models/world_news.py index 9be9ca3b5d5..69035f8d476 100644 --- a/openbb_platform/providers/intrinio/openbb_intrinio/models/world_news.py +++ b/openbb_platform/providers/intrinio/openbb_intrinio/models/world_news.py @@ -213,7 +213,7 @@ class IntrinioWorldNewsFetcher( : query.limit ] - return await amake_request(url, response_callback=callback, **kwargs) + return await amake_request(url, response_callback=callback, **kwargs) # type: ignore # pylint: disable=unused-argument @staticmethod diff --git a/openbb_platform/providers/sec/openbb_sec/models/company_filings.py b/openbb_platform/providers/sec/openbb_sec/models/company_filings.py index 90233d5f23b..e210a5a0c72 100644 --- a/openbb_platform/providers/sec/openbb_sec/models/company_filings.py +++ b/openbb_platform/providers/sec/openbb_sec/models/company_filings.py @@ -171,7 +171,7 @@ class SecCompanyFilingsFetcher( query.cik = cik_ + str(query.cik) # type: ignore url = f"https://data.sec.gov/submissions/CIK{query.cik}.json" - + data: Union[dict, List[dict]] = [] if query.use_cache is True: cache_dir = f"{get_user_cache_directory()}/http/sec_company_filings" async with CachedSession( @@ -206,7 +206,7 @@ class SecCompanyFilingsFetcher( new_data = DataFrame.from_records(result) results.extend(new_data.to_dict("records")) - urls = [] + urls: List = [] new_urls = ( DataFrame(data["filings"].get("files")) # type: ignore if "filings" in data diff --git a/openbb_platform/providers/sec/openbb_sec/models/etf_holdings.py b/openbb_platform/providers/sec/openbb_sec/models/etf_holdings.py index 89b65b42d64..86747169fe9 100644 --- a/openbb_platform/providers/sec/openbb_sec/models/etf_holdings.py +++ b/openbb_platform/providers/sec/openbb_sec/models/etf_holdings.py @@ -371,6 +371,7 @@ class SecEtfHoldingsFetcher( """Response callback for the request.""" return await response.read() + response: Union[dict, List[dict]] = [] if query.use_cache is True: cache_dir = f"{get_user_cache_directory()}/http/sec_etf" async with CachedSession(cache=SQLiteBackend(cache_dir)) as session: @@ -747,6 +748,9 @@ class SecEtfHoldingsFetcher( ) # Extract additional information from the form that doesn't belong in the holdings table. metadata = {} + month_1: str = "" + month_2: str = "" + month_3: str = "" try: gen_info = response["edgarSubmission"]["formData"].get("genInfo", {}) # type: ignore if gen_info: diff --git a/openbb_platform/providers/sec/openbb_sec/models/sic_search.py b/openbb_platform/providers/sec/openbb_sec/models/sic_search.py index a605f39831e..a130bddead6 100644 --- a/openbb_platform/providers/sec/openbb_sec/models/sic_search.py +++ b/openbb_platform/providers/sec/openbb_sec/models/sic_search.py @@ -2,7 +2,7 @@ # pylint: disable=unused-argument -from typing import Any, Dict, List, Optional +from typing import Any, Dict, List, Optional, Union import pandas as pd from aiohttp_client_cache import SQLiteBackend @@ -62,6 +62,7 @@ class SecSicSearchFetcher( "https://www.sec.gov/corpfin/" "division-of-corporation-finance-standard-industrial-classification-sic-code-list" ) + response: Union[dict, List[dict], str] = {} if query.use_cache is True: cache_dir = f"{get_user_cache_directory()}/http/sec_sic" async with CachedSession( diff --git a/openbb_platform/providers/sec/openbb_sec/utils/helpers.py b/openbb_platform/providers/sec/openbb_sec/utils/helpers.py index 407a8fc773b..be306c64984 100644 --- a/openbb_platform/providers/sec/openbb_sec/utils/helpers.py +++ b/openbb_platform/providers/sec/openbb_sec/utils/helpers.py @@ -38,7 +38,7 @@ async def get_all_companies(use_cache: bool = True) -> pd.DataFrame: >>> tickers = get_all_companies() """ url = "https://www.sec.gov/files/company_tickers.json" - + response: Union[dict, List[dict]] = {} if use_cache is True: cache_dir = f"{get_user_cache_directory()}/http/sec_companies" async with CachedSession( @@ -65,6 +65,7 @@ async def get_all_ciks(use_cache: bool = True) -> pd.DataFrame: """Response callback for CIK lookup data.""" return await response.text(encoding="latin-1") + response: Union[dict, List[dict], str] = {} if use_cache is True: cache_dir = f"{get_user_cache_directory()}/http/sec_ciks" async with CachedSession( @@ -97,6 +98,7 @@ async def get_mf_and_etf_map(use_cache: bool = True) -> pd.DataFrame: symbols = pd.DataFrame() url = "https://www.sec.gov/files/company_tickers_mf.json" + response: Union[dict, List[dict]] = {} if use_cache is True: cache_dir = f"{get_user_cache_directory()}/http/sec_mf_etf_map" async with CachedSession( @@ -189,6 +191,7 @@ async def download_zip_file( """Response callback for ZIP file downloads.""" return await response.read() + response: Union[dict, List[dict]] = {} if use_cache is True: cache_dir = f"{get_user_cache_directory()}/http/sec_ftd" async with CachedSession(cache=SQLiteBackend(cache_dir)) as session: @@ -315,7 +318,7 @@ async def get_nport_candidates(symbol: str, use_cache: bool = True) -> List[Dict raise ValueError("Fund not found for, the symbol: " + symbol) url = f"https://efts.sec.gov/LATEST/search-index?q={series_id}&dateRange=all&forms=NPORT-P" - + response: Union[dict, List[dict]] = {} if use_cache is True: cache_dir = f"{get_user_cache_directory()}/http/sec_etf" async with CachedSession(cache=SQLiteBackend(cache_dir)) as session: diff --git a/openbb_platform/providers/tmx/openbb_tmx/utils/helpers.py b/openbb_platform/providers/tmx/openbb_tmx/utils/helpers.py index 2ed84d0abb0..04e769c8b4c 100644 --- a/openbb_platform/providers/tmx/openbb_tmx/utils/helpers.py +++ b/openbb_platform/providers/tmx/openbb_tmx/utils/helpers.py @@ -281,6 +281,7 @@ async def get_data_from_url( **kwargs: Any, ) -> Any: """Make an asynchronous HTTP request to a static file.""" + data: Any = None if use_cache is True: async with CachedSession(cache=backend) as cached_session: try: -- cgit v1.2.3