1
1
#!/usr/bin/env python3
2
2
3
- from Trading .stock .gurufocus .gurufocus import download_html , extract_stock_info
3
+ from Trading .stock .gurufocus .gurufocus import GurufocusAnalyzer
4
4
from Trading .utils .google_search import get_first_google_result
5
- from Trading .utils .cli import Named
5
+ from Trading .utils .cli import Named , JsonFileWriter
6
6
from Trading .utils .custom_logging import get_logger
7
+ from Trading .config .config import GURUFOCUS_DOWNLOADS_PATH
7
8
from typing import Optional , List
8
9
import os
9
10
import fire
13
14
14
15
15
16
# DEBUG=true cli.py analyze --names '["pdco", "paypal", "johnson&johnson", "mcdonalds", "pepsi", "uniper", "palantir"]'
16
- class GuruFocusCLI (Named ):
17
- def __init__ (self , name : Optional [str ] = None , names : Optional [List [str ]] = None ):
17
+ class GuruFocusCLI (Named , JsonFileWriter ):
18
+ def __init__ (self , name : Optional [str ] = None , names : Optional [List [str ]] = None , filename : Optional [ str ] = None ):
18
19
Named .__init__ (self , name = name , names = names )
20
+ if filename is None :
21
+ filename = GURUFOCUS_DOWNLOADS_PATH / "gurufocus.json"
22
+ JsonFileWriter .__init__ (self , filename = filename )
23
+
19
24
def analyze (self ):
20
25
if not self .name and not self .names :
21
26
LOGGER .error ("Name is required" )
@@ -27,18 +32,20 @@ def analyze(self):
27
32
urls .append (get_first_google_result ("gurufocus summary " + name ))
28
33
LOGGER .debug (f"URLs: { urls } " )
29
34
30
- for url in urls :
31
- LOGGER .info (f"Scraping { url } " )
32
- download_html (url , filename = "gurufocus_page.html" )
33
- html_file_path = "gurufocus_page.html"
34
- stock_info = extract_stock_info (html_file_path )
35
- if os .path .exists (html_file_path ):
36
- os .remove (html_file_path )
37
- if not stock_info :
38
- LOGGER .error (f"Failed to extract stock info from { url } " )
39
- continue
40
- LOGGER .info (stock_info )
41
- time .sleep (0.5 )
35
+ gf_analyzer = GurufocusAnalyzer (self .json_file_writer )
36
+ gf_analyzer .run (items = urls , data = {})
37
+ # for url in urls:
38
+ # LOGGER.info(f"Scraping {url}")
39
+ # download_html(url, filename="gurufocus_page.html")
40
+ # html_file_path = "gurufocus_page.html"
41
+ # stock_info = extract_stock_info(html_file_path)
42
+ # if os.path.exists(html_file_path):
43
+ # os.remove(html_file_path)
44
+ # if not stock_info:
45
+ # LOGGER.error(f"Failed to extract stock info from {url}")
46
+ # continue
47
+ # LOGGER.info(stock_info)
48
+ # time.sleep(0.1)
42
49
43
50
if __name__ == "__main__" :
44
51
fire .Fire (GuruFocusCLI )
0 commit comments