@@ -255,6 +255,21 @@ def default_data_dir() -> str:
255
255
return os .path .dirname (__file__ )
256
256
257
257
258
+ def normpath (path : str , options : Options ) -> str :
259
+ """Convert path to absolute; but to relative in bazel mode.
260
+
261
+ (Bazel's distributed cache doesn't like filesystem metadata to
262
+ end up in output files.)
263
+ """
264
+ # TODO: Could we always use relpath? (A worry in non-bazel
265
+ # mode would be that a moved file may change its full module
266
+ # name without changing its size, mtime or hash.)
267
+ if options .bazel :
268
+ return os .path .relpath (path )
269
+ else :
270
+ return os .path .abspath (path )
271
+
272
+
258
273
CacheMeta = NamedTuple ('CacheMeta' ,
259
274
[('id' , str ),
260
275
('path' , str ),
@@ -589,9 +604,10 @@ def __init__(self, data_dir: str,
589
604
self .fscache = fscache
590
605
self .find_module_cache = FindModuleCache (self .search_paths , self .fscache , self .options )
591
606
if options .sqlite_cache :
592
- self .metastore = SqliteMetadataStore (_cache_dir_prefix (self )) # type: MetadataStore
607
+ self .metastore = SqliteMetadataStore (
608
+ _cache_dir_prefix (self .options )) # type: MetadataStore
593
609
else :
594
- self .metastore = FilesystemMetadataStore (_cache_dir_prefix (self ))
610
+ self .metastore = FilesystemMetadataStore (_cache_dir_prefix (self . options ))
595
611
596
612
# a mapping from source files to their corresponding shadow files
597
613
# for efficient lookup
@@ -623,7 +639,7 @@ def maybe_swap_for_shadow_path(self, path: str) -> str:
623
639
if not self .shadow_map :
624
640
return path
625
641
626
- path = self . normpath (path )
642
+ path = normpath (path , self . options )
627
643
628
644
previously_checked = path in self .shadow_equivalence_map
629
645
if not previously_checked :
@@ -651,20 +667,6 @@ def getmtime(self, path: str) -> int:
651
667
else :
652
668
return int (self .metastore .getmtime (path ))
653
669
654
- def normpath (self , path : str ) -> str :
655
- """Convert path to absolute; but to relative in bazel mode.
656
-
657
- (Bazel's distributed cache doesn't like filesystem metadata to
658
- end up in output files.)
659
- """
660
- # TODO: Could we always use relpath? (A worry in non-bazel
661
- # mode would be that a moved file may change its full module
662
- # name without changing its size, mtime or hash.)
663
- if self .options .bazel :
664
- return os .path .relpath (path )
665
- else :
666
- return os .path .abspath (path )
667
-
668
670
def all_imported_modules_in_file (self ,
669
671
file : MypyFile ) -> List [Tuple [int , str , int ]]:
670
672
"""Find all reachable import statements in a file.
@@ -866,7 +868,7 @@ def write_deps_cache(rdeps: Dict[str, Dict[str, Set[str]]],
866
868
867
869
for id in rdeps :
868
870
if id != FAKE_ROOT_MODULE :
869
- _ , _ , deps_json = get_cache_names (id , graph [id ].xpath , manager )
871
+ _ , _ , deps_json = get_cache_names (id , graph [id ].xpath , manager . options )
870
872
else :
871
873
deps_json = DEPS_ROOT_FILE
872
874
assert deps_json
@@ -896,7 +898,7 @@ def write_deps_cache(rdeps: Dict[str, Dict[str, Set[str]]],
896
898
error = True
897
899
898
900
if error :
899
- manager .errors .set_file (_cache_dir_prefix (manager ), None )
901
+ manager .errors .set_file (_cache_dir_prefix (manager . options ), None )
900
902
manager .errors .report (0 , 0 , "Error writing fine-grained dependencies cache" ,
901
903
blocker = True )
902
904
@@ -963,7 +965,7 @@ def generate_deps_for_cache(manager: BuildManager,
963
965
def write_plugins_snapshot (manager : BuildManager ) -> None :
964
966
"""Write snapshot of versions and hashes of currently active plugins."""
965
967
if not manager .metastore .write (PLUGIN_SNAPSHOT_FILE , json .dumps (manager .plugins_snapshot )):
966
- manager .errors .set_file (_cache_dir_prefix (manager ), None )
968
+ manager .errors .set_file (_cache_dir_prefix (manager . options ), None )
967
969
manager .errors .report (0 , 0 , "Error writing plugins snapshot" ,
968
970
blocker = True )
969
971
@@ -1073,18 +1075,18 @@ def _load_json_file(file: str, manager: BuildManager,
1073
1075
return result
1074
1076
1075
1077
1076
- def _cache_dir_prefix (manager : BuildManager ) -> str :
1078
+ def _cache_dir_prefix (options : Options ) -> str :
1077
1079
"""Get current cache directory (or file if id is given)."""
1078
- if manager . options .bazel :
1080
+ if options .bazel :
1079
1081
# This is needed so the cache map works.
1080
1082
return os .curdir
1081
- cache_dir = manager . options .cache_dir
1082
- pyversion = manager . options .python_version
1083
+ cache_dir = options .cache_dir
1084
+ pyversion = options .python_version
1083
1085
base = os .path .join (cache_dir , '%d.%d' % pyversion )
1084
1086
return base
1085
1087
1086
1088
1087
- def get_cache_names (id : str , path : str , manager : BuildManager ) -> Tuple [str , str , Optional [str ]]:
1089
+ def get_cache_names (id : str , path : str , options : Options ) -> Tuple [str , str , Optional [str ]]:
1088
1090
"""Return the file names for the cache files.
1089
1091
1090
1092
Args:
@@ -1097,8 +1099,8 @@ def get_cache_names(id: str, path: str, manager: BuildManager) -> Tuple[str, str
1097
1099
A tuple with the file names to be used for the meta JSON, the
1098
1100
data JSON, and the fine-grained deps JSON, respectively.
1099
1101
"""
1100
- if manager . options .cache_map :
1101
- pair = manager . options .cache_map .get (manager . normpath (path ))
1102
+ if options .cache_map :
1103
+ pair = options .cache_map .get (normpath (path , options ))
1102
1104
else :
1103
1105
pair = None
1104
1106
if pair is not None :
@@ -1107,15 +1109,15 @@ def get_cache_names(id: str, path: str, manager: BuildManager) -> Tuple[str, str
1107
1109
# prefix directory.
1108
1110
# Solve this by rewriting the paths as relative to the root dir.
1109
1111
# This only makes sense when using the filesystem backed cache.
1110
- root = _cache_dir_prefix (manager )
1112
+ root = _cache_dir_prefix (options )
1111
1113
return (os .path .relpath (pair [0 ], root ), os .path .relpath (pair [1 ], root ), None )
1112
1114
prefix = os .path .join (* id .split ('.' ))
1113
1115
is_package = os .path .basename (path ).startswith ('__init__.py' )
1114
1116
if is_package :
1115
1117
prefix = os .path .join (prefix , '__init__' )
1116
1118
1117
1119
deps_json = None
1118
- if manager . options .cache_fine_grained :
1120
+ if options .cache_fine_grained :
1119
1121
deps_json = prefix + '.deps.json'
1120
1122
return (prefix + '.meta.json' , prefix + '.data.json' , deps_json )
1121
1123
@@ -1133,7 +1135,7 @@ def find_cache_meta(id: str, path: str, manager: BuildManager) -> Optional[Cache
1133
1135
valid; otherwise None.
1134
1136
"""
1135
1137
# TODO: May need to take more build options into account
1136
- meta_json , data_json , _ = get_cache_names (id , path , manager )
1138
+ meta_json , data_json , _ = get_cache_names (id , path , manager . options )
1137
1139
manager .trace ('Looking for {} at {}' .format (id , meta_json ))
1138
1140
t0 = time .time ()
1139
1141
meta = _load_json_file (meta_json , manager ,
@@ -1237,7 +1239,7 @@ def validate_meta(meta: Optional[CacheMeta], id: str, path: Optional[str],
1237
1239
1238
1240
if bazel :
1239
1241
# Normalize path under bazel to make sure it isn't absolute
1240
- path = manager . normpath (path )
1242
+ path = normpath (path , manager . options )
1241
1243
try :
1242
1244
st = manager .get_stat (path )
1243
1245
except OSError :
@@ -1325,7 +1327,7 @@ def validate_meta(meta: Optional[CacheMeta], id: str, path: Optional[str],
1325
1327
meta_str = json .dumps (meta_dict , indent = 2 , sort_keys = True )
1326
1328
else :
1327
1329
meta_str = json .dumps (meta_dict )
1328
- meta_json , _ , _ = get_cache_names (id , path , manager )
1330
+ meta_json , _ , _ = get_cache_names (id , path , manager . options )
1329
1331
manager .log ('Updating mtime for {}: file {}, meta {}, mtime {}'
1330
1332
.format (id , path , meta_json , meta .mtime ))
1331
1333
t1 = time .time ()
@@ -1388,7 +1390,7 @@ def write_cache(id: str, path: str, tree: MypyFile,
1388
1390
bazel = manager .options .bazel
1389
1391
1390
1392
# Obtain file paths.
1391
- meta_json , data_json , _ = get_cache_names (id , path , manager )
1393
+ meta_json , data_json , _ = get_cache_names (id , path , manager . options )
1392
1394
manager .log ('Writing {} {} {} {}' .format (
1393
1395
id , path , meta_json , data_json ))
1394
1396
@@ -1491,7 +1493,7 @@ def delete_cache(id: str, path: str, manager: BuildManager) -> None:
1491
1493
# We don't delete .deps files on errors, since the dependencies
1492
1494
# are mostly generated from other files and the metadata is
1493
1495
# tracked separately.
1494
- meta_path , data_path , _ = get_cache_names (id , path , manager )
1496
+ meta_path , data_path , _ = get_cache_names (id , path , manager . options )
1495
1497
cache_paths = [meta_path , data_path ]
1496
1498
manager .log ('Deleting {} {} {}' .format (id , path , " " .join (x for x in cache_paths if x )))
1497
1499
0 commit comments