crytic_compile.platform.hardhat
Hardhat platform
1""" 2Hardhat platform 3""" 4import json 5import logging 6import os 7import shutil 8import subprocess 9from pathlib import Path 10from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Union 11 12from crytic_compile.compilation_unit import CompilationUnit 13from crytic_compile.compiler.compiler import CompilerVersion 14from crytic_compile.platform.abstract_platform import AbstractPlatform 15from crytic_compile.platform.exceptions import InvalidCompilation 16 17# Handle cycle 18from crytic_compile.platform.solc import relative_to_short 19from crytic_compile.platform.types import Type 20from crytic_compile.utils.naming import convert_filename, extract_name 21from crytic_compile.utils.natspec import Natspec 22from crytic_compile.utils.subprocess import run 23 24if TYPE_CHECKING: 25 from crytic_compile import CryticCompile 26 27LOGGER = logging.getLogger("CryticCompile") 28 29# pylint: disable=too-many-locals 30def hardhat_like_parsing( 31 crytic_compile: "CryticCompile", target: str, build_directory: Path, working_dir: str 32) -> None: 33 """ 34 This function parse the output generated by hardhat. 35 It can be re-used by any platform that follows the same schema (ex:foudnry) 36 37 38 Args: 39 crytic_compile: CryticCompile object 40 target: target 41 build_directory: build directory 42 working_dir: working directory 43 44 Raises: 45 InvalidCompilation: If hardhat failed to run 46 47 """ 48 if not os.path.isdir(build_directory): 49 txt = ( 50 f"Compilation failed. Can you run build command?\n{build_directory} is not a directory." 51 ) 52 raise InvalidCompilation(txt) 53 54 files = sorted( 55 os.listdir(build_directory), key=lambda x: os.path.getmtime(Path(build_directory, x)) 56 ) 57 files = [str(f) for f in files if str(f).endswith(".json")] 58 if not files: 59 txt = f"Compilation failed. Can you run build command?\n{build_directory} is empty." 60 raise InvalidCompilation(txt) 61 62 for file in files: 63 build_info = Path(build_directory, file) 64 65 # The file here should always ends .json, but just in case use ife 66 uniq_id = file if ".json" not in file else file[0:-5] 67 compilation_unit = CompilationUnit(crytic_compile, uniq_id) 68 69 with open(build_info, encoding="utf8") as file_desc: 70 loaded_json = json.load(file_desc) 71 72 targets_json = loaded_json["output"] 73 74 version_from_config = loaded_json["solcVersion"] # TODO supper vyper 75 input_json = loaded_json["input"] 76 compiler = "solc" if input_json["language"] == "Solidity" else "vyper" 77 # Foundry has the optimizer dict empty when the "optimizer" key is not set in foundry.toml 78 optimized = input_json["settings"]["optimizer"].get("enabled", False) 79 80 compilation_unit.compiler_version = CompilerVersion( 81 compiler=compiler, version=version_from_config, optimized=optimized 82 ) 83 84 skip_filename = compilation_unit.compiler_version.version in [ 85 f"0.4.{x}" for x in range(0, 10) 86 ] 87 88 if "sources" in targets_json: 89 for path, info in targets_json["sources"].items(): 90 if skip_filename: 91 path = convert_filename( 92 target, 93 relative_to_short, 94 crytic_compile, 95 working_dir=working_dir, 96 ) 97 else: 98 path = convert_filename( 99 path, 100 relative_to_short, 101 crytic_compile, 102 working_dir=working_dir, 103 ) 104 105 source_unit = compilation_unit.create_source_unit(path) 106 source_unit.ast = info.get("ast", info.get("legacyAST")) 107 if source_unit.ast is None: 108 raise InvalidCompilation( 109 f"AST not found for {path} in {build_info} directory" 110 ) 111 112 if "contracts" in targets_json: 113 for original_filename, contracts_info in targets_json["contracts"].items(): 114 115 filename = convert_filename( 116 original_filename, 117 relative_to_short, 118 crytic_compile, 119 working_dir=working_dir, 120 ) 121 122 source_unit = compilation_unit.create_source_unit(filename) 123 124 for original_contract_name, info in contracts_info.items(): 125 contract_name = extract_name(original_contract_name) 126 127 source_unit.add_contract_name(contract_name) 128 compilation_unit.filename_to_contracts[filename].add(contract_name) 129 130 source_unit.abis[contract_name] = info["abi"] 131 source_unit.bytecodes_init[contract_name] = info["evm"]["bytecode"][ 132 "object" 133 ] 134 source_unit.bytecodes_runtime[contract_name] = info["evm"][ 135 "deployedBytecode" 136 ]["object"] 137 source_unit.srcmaps_init[contract_name] = info["evm"]["bytecode"][ 138 "sourceMap" 139 ].split(";") 140 source_unit.srcmaps_runtime[contract_name] = info["evm"][ 141 "deployedBytecode" 142 ]["sourceMap"].split(";") 143 userdoc = info.get("userdoc", {}) 144 devdoc = info.get("devdoc", {}) 145 natspec = Natspec(userdoc, devdoc) 146 source_unit.natspec[contract_name] = natspec 147 148 149class Hardhat(AbstractPlatform): 150 """ 151 Hardhat platform 152 """ 153 154 NAME = "Hardhat" 155 PROJECT_URL = "https://github.com/nomiclabs/hardhat" 156 TYPE = Type.HARDHAT 157 158 def compile(self, crytic_compile: "CryticCompile", **kwargs: str) -> None: 159 """Run the compilation 160 161 Args: 162 crytic_compile (CryticCompile): Associated CryticCompile object 163 **kwargs: optional arguments. Used: "hardhat_ignore", "hardhat_ignore_compile", "ignore_compile", 164 "hardhat_artifacts_directory","hardhat_working_dir","npx_disable" 165 166 """ 167 168 hardhat_ignore_compile, base_cmd = self._settings(kwargs) 169 170 detected_paths = self._get_hardhat_paths(base_cmd, kwargs) 171 172 build_directory = Path( 173 self._target, 174 detected_paths["artifacts"], 175 "build-info", 176 ) 177 178 hardhat_working_dir = str(Path(self._target, detected_paths["root"])) 179 180 if not hardhat_ignore_compile: 181 cmd = base_cmd + ["compile", "--force"] 182 run(cmd, cwd=self._target) 183 184 hardhat_like_parsing(crytic_compile, self._target, build_directory, hardhat_working_dir) 185 186 def clean(self, **kwargs: str) -> None: 187 """Clean compilation artifacts 188 189 Args: 190 **kwargs: optional arguments. 191 """ 192 193 hardhat_ignore_compile, base_cmd = self._settings(kwargs) 194 195 if hardhat_ignore_compile: 196 return 197 198 for clean_cmd in [["clean"], ["clean", "--global"]]: 199 run(base_cmd + clean_cmd, cwd=self._target) 200 201 @staticmethod 202 def is_supported(target: str, **kwargs: str) -> bool: 203 """Check if the target is an hardhat project 204 205 Args: 206 target (str): path to the target 207 **kwargs: optional arguments. Used: "hardhat_ignore" 208 209 Returns: 210 bool: True if the target is an hardhat project 211 """ 212 hardhat_ignore = kwargs.get("hardhat_ignore", False) 213 if hardhat_ignore: 214 return False 215 216 return ( 217 os.path.isfile(os.path.join(target, "hardhat.config.js")) 218 or os.path.isfile(os.path.join(target, "hardhat.config.ts")) 219 or os.path.isfile(os.path.join(target, "hardhat.config.cjs")) 220 ) 221 222 def is_dependency(self, path: str) -> bool: 223 """Check if the path is a dependency 224 225 Args: 226 path (str): path to the target 227 228 Returns: 229 bool: True if the target is a dependency 230 """ 231 if path in self._cached_dependencies: 232 return self._cached_dependencies[path] 233 ret = "node_modules" in Path(path).parts 234 self._cached_dependencies[path] = ret 235 return ret 236 237 def _guessed_tests(self) -> List[str]: 238 """Guess the potential unit tests commands 239 240 Returns: 241 List[str]: The guessed unit tests commands 242 """ 243 return ["hardhat test"] 244 245 @staticmethod 246 def _settings(args: Dict[str, Any]) -> Tuple[bool, List[str]]: 247 hardhat_ignore_compile = args.get("hardhat_ignore_compile", False) or args.get( 248 "ignore_compile", False 249 ) 250 251 base_cmd = ["hardhat"] 252 if not args.get("npx_disable", False): 253 base_cmd = ["npx"] + base_cmd 254 255 return hardhat_ignore_compile, base_cmd 256 257 def _get_hardhat_paths( 258 self, base_cmd: List[str], args: Dict[str, str] 259 ) -> Dict[str, Union[Path, str]]: 260 """Obtain hardhat configuration paths, defaulting to the 261 standard config if needed. 262 263 Args: 264 base_cmd ([str]): hardhat command 265 args (Dict[str, str]): crytic-compile options that may affect paths 266 267 Returns: 268 Dict[str, str]: hardhat paths configuration 269 """ 270 target_path = Path(self._target).resolve() 271 default_paths = { 272 "root": target_path, 273 "configFile": target_path.joinpath("hardhat.config.js"), 274 "sources": target_path.joinpath("contracts"), 275 "cache": target_path.joinpath("cache"), 276 "artifacts": target_path.joinpath("artifacts"), 277 "tests": target_path.joinpath("test"), 278 } 279 override_paths = {} 280 281 if args.get("hardhat_cache_directory", None): 282 override_paths["cache"] = Path(target_path, args["hardhat_cache_directory"]) 283 284 if args.get("hardhat_artifacts_directory", None): 285 override_paths["artifacts"] = Path(target_path, args["hardhat_artifacts_directory"]) 286 287 if args.get("hardhat_working_dir", None): 288 override_paths["root"] = Path(target_path, args["hardhat_working_dir"]) 289 290 print_paths = "console.log(JSON.stringify(config.paths));process.exit()" 291 292 try: 293 config_str = self._run_hardhat_console(base_cmd, print_paths) 294 paths = json.loads(config_str or "{}") 295 return {**default_paths, **paths, **override_paths} 296 except ValueError as e: 297 LOGGER.info("Problem deserializing hardhat configuration, using defaults: %s", e) 298 except (OSError, subprocess.SubprocessError) as e: 299 LOGGER.info("Problem executing hardhat to fetch configuration, using defaults: %s", e) 300 301 return {**default_paths, **override_paths} 302 303 def _run_hardhat_console(self, base_cmd: List[str], command: str) -> Optional[str]: 304 """Run a JS command in the hardhat console 305 306 Args: 307 base_cmd ([str]): hardhat command 308 command (str): console command to run 309 310 Returns: 311 Optional[str]: command output if execution succeeds 312 """ 313 with subprocess.Popen( 314 base_cmd + ["console", "--no-compile"], 315 stdin=subprocess.PIPE, 316 stdout=subprocess.PIPE, 317 stderr=subprocess.PIPE, 318 cwd=self._target, 319 executable=shutil.which(base_cmd[0]), 320 ) as process: 321 stdout_bytes, stderr_bytes = process.communicate(command.encode("utf-8")) 322 stdout, stderr = ( 323 stdout_bytes.decode(), 324 stderr_bytes.decode(errors="backslashreplace"), 325 ) 326 327 if stderr: 328 LOGGER.info("Problem executing hardhat: %s", stderr) 329 return None 330 331 return stdout
31def hardhat_like_parsing( 32 crytic_compile: "CryticCompile", target: str, build_directory: Path, working_dir: str 33) -> None: 34 """ 35 This function parse the output generated by hardhat. 36 It can be re-used by any platform that follows the same schema (ex:foudnry) 37 38 39 Args: 40 crytic_compile: CryticCompile object 41 target: target 42 build_directory: build directory 43 working_dir: working directory 44 45 Raises: 46 InvalidCompilation: If hardhat failed to run 47 48 """ 49 if not os.path.isdir(build_directory): 50 txt = ( 51 f"Compilation failed. Can you run build command?\n{build_directory} is not a directory." 52 ) 53 raise InvalidCompilation(txt) 54 55 files = sorted( 56 os.listdir(build_directory), key=lambda x: os.path.getmtime(Path(build_directory, x)) 57 ) 58 files = [str(f) for f in files if str(f).endswith(".json")] 59 if not files: 60 txt = f"Compilation failed. Can you run build command?\n{build_directory} is empty." 61 raise InvalidCompilation(txt) 62 63 for file in files: 64 build_info = Path(build_directory, file) 65 66 # The file here should always ends .json, but just in case use ife 67 uniq_id = file if ".json" not in file else file[0:-5] 68 compilation_unit = CompilationUnit(crytic_compile, uniq_id) 69 70 with open(build_info, encoding="utf8") as file_desc: 71 loaded_json = json.load(file_desc) 72 73 targets_json = loaded_json["output"] 74 75 version_from_config = loaded_json["solcVersion"] # TODO supper vyper 76 input_json = loaded_json["input"] 77 compiler = "solc" if input_json["language"] == "Solidity" else "vyper" 78 # Foundry has the optimizer dict empty when the "optimizer" key is not set in foundry.toml 79 optimized = input_json["settings"]["optimizer"].get("enabled", False) 80 81 compilation_unit.compiler_version = CompilerVersion( 82 compiler=compiler, version=version_from_config, optimized=optimized 83 ) 84 85 skip_filename = compilation_unit.compiler_version.version in [ 86 f"0.4.{x}" for x in range(0, 10) 87 ] 88 89 if "sources" in targets_json: 90 for path, info in targets_json["sources"].items(): 91 if skip_filename: 92 path = convert_filename( 93 target, 94 relative_to_short, 95 crytic_compile, 96 working_dir=working_dir, 97 ) 98 else: 99 path = convert_filename( 100 path, 101 relative_to_short, 102 crytic_compile, 103 working_dir=working_dir, 104 ) 105 106 source_unit = compilation_unit.create_source_unit(path) 107 source_unit.ast = info.get("ast", info.get("legacyAST")) 108 if source_unit.ast is None: 109 raise InvalidCompilation( 110 f"AST not found for {path} in {build_info} directory" 111 ) 112 113 if "contracts" in targets_json: 114 for original_filename, contracts_info in targets_json["contracts"].items(): 115 116 filename = convert_filename( 117 original_filename, 118 relative_to_short, 119 crytic_compile, 120 working_dir=working_dir, 121 ) 122 123 source_unit = compilation_unit.create_source_unit(filename) 124 125 for original_contract_name, info in contracts_info.items(): 126 contract_name = extract_name(original_contract_name) 127 128 source_unit.add_contract_name(contract_name) 129 compilation_unit.filename_to_contracts[filename].add(contract_name) 130 131 source_unit.abis[contract_name] = info["abi"] 132 source_unit.bytecodes_init[contract_name] = info["evm"]["bytecode"][ 133 "object" 134 ] 135 source_unit.bytecodes_runtime[contract_name] = info["evm"][ 136 "deployedBytecode" 137 ]["object"] 138 source_unit.srcmaps_init[contract_name] = info["evm"]["bytecode"][ 139 "sourceMap" 140 ].split(";") 141 source_unit.srcmaps_runtime[contract_name] = info["evm"][ 142 "deployedBytecode" 143 ]["sourceMap"].split(";") 144 userdoc = info.get("userdoc", {}) 145 devdoc = info.get("devdoc", {}) 146 natspec = Natspec(userdoc, devdoc) 147 source_unit.natspec[contract_name] = natspec
This function parse the output generated by hardhat. It can be re-used by any platform that follows the same schema (ex:foudnry)
Args: crytic_compile: CryticCompile object target: target build_directory: build directory working_dir: working directory
Raises: InvalidCompilation: If hardhat failed to run
150class Hardhat(AbstractPlatform): 151 """ 152 Hardhat platform 153 """ 154 155 NAME = "Hardhat" 156 PROJECT_URL = "https://github.com/nomiclabs/hardhat" 157 TYPE = Type.HARDHAT 158 159 def compile(self, crytic_compile: "CryticCompile", **kwargs: str) -> None: 160 """Run the compilation 161 162 Args: 163 crytic_compile (CryticCompile): Associated CryticCompile object 164 **kwargs: optional arguments. Used: "hardhat_ignore", "hardhat_ignore_compile", "ignore_compile", 165 "hardhat_artifacts_directory","hardhat_working_dir","npx_disable" 166 167 """ 168 169 hardhat_ignore_compile, base_cmd = self._settings(kwargs) 170 171 detected_paths = self._get_hardhat_paths(base_cmd, kwargs) 172 173 build_directory = Path( 174 self._target, 175 detected_paths["artifacts"], 176 "build-info", 177 ) 178 179 hardhat_working_dir = str(Path(self._target, detected_paths["root"])) 180 181 if not hardhat_ignore_compile: 182 cmd = base_cmd + ["compile", "--force"] 183 run(cmd, cwd=self._target) 184 185 hardhat_like_parsing(crytic_compile, self._target, build_directory, hardhat_working_dir) 186 187 def clean(self, **kwargs: str) -> None: 188 """Clean compilation artifacts 189 190 Args: 191 **kwargs: optional arguments. 192 """ 193 194 hardhat_ignore_compile, base_cmd = self._settings(kwargs) 195 196 if hardhat_ignore_compile: 197 return 198 199 for clean_cmd in [["clean"], ["clean", "--global"]]: 200 run(base_cmd + clean_cmd, cwd=self._target) 201 202 @staticmethod 203 def is_supported(target: str, **kwargs: str) -> bool: 204 """Check if the target is an hardhat project 205 206 Args: 207 target (str): path to the target 208 **kwargs: optional arguments. Used: "hardhat_ignore" 209 210 Returns: 211 bool: True if the target is an hardhat project 212 """ 213 hardhat_ignore = kwargs.get("hardhat_ignore", False) 214 if hardhat_ignore: 215 return False 216 217 return ( 218 os.path.isfile(os.path.join(target, "hardhat.config.js")) 219 or os.path.isfile(os.path.join(target, "hardhat.config.ts")) 220 or os.path.isfile(os.path.join(target, "hardhat.config.cjs")) 221 ) 222 223 def is_dependency(self, path: str) -> bool: 224 """Check if the path is a dependency 225 226 Args: 227 path (str): path to the target 228 229 Returns: 230 bool: True if the target is a dependency 231 """ 232 if path in self._cached_dependencies: 233 return self._cached_dependencies[path] 234 ret = "node_modules" in Path(path).parts 235 self._cached_dependencies[path] = ret 236 return ret 237 238 def _guessed_tests(self) -> List[str]: 239 """Guess the potential unit tests commands 240 241 Returns: 242 List[str]: The guessed unit tests commands 243 """ 244 return ["hardhat test"] 245 246 @staticmethod 247 def _settings(args: Dict[str, Any]) -> Tuple[bool, List[str]]: 248 hardhat_ignore_compile = args.get("hardhat_ignore_compile", False) or args.get( 249 "ignore_compile", False 250 ) 251 252 base_cmd = ["hardhat"] 253 if not args.get("npx_disable", False): 254 base_cmd = ["npx"] + base_cmd 255 256 return hardhat_ignore_compile, base_cmd 257 258 def _get_hardhat_paths( 259 self, base_cmd: List[str], args: Dict[str, str] 260 ) -> Dict[str, Union[Path, str]]: 261 """Obtain hardhat configuration paths, defaulting to the 262 standard config if needed. 263 264 Args: 265 base_cmd ([str]): hardhat command 266 args (Dict[str, str]): crytic-compile options that may affect paths 267 268 Returns: 269 Dict[str, str]: hardhat paths configuration 270 """ 271 target_path = Path(self._target).resolve() 272 default_paths = { 273 "root": target_path, 274 "configFile": target_path.joinpath("hardhat.config.js"), 275 "sources": target_path.joinpath("contracts"), 276 "cache": target_path.joinpath("cache"), 277 "artifacts": target_path.joinpath("artifacts"), 278 "tests": target_path.joinpath("test"), 279 } 280 override_paths = {} 281 282 if args.get("hardhat_cache_directory", None): 283 override_paths["cache"] = Path(target_path, args["hardhat_cache_directory"]) 284 285 if args.get("hardhat_artifacts_directory", None): 286 override_paths["artifacts"] = Path(target_path, args["hardhat_artifacts_directory"]) 287 288 if args.get("hardhat_working_dir", None): 289 override_paths["root"] = Path(target_path, args["hardhat_working_dir"]) 290 291 print_paths = "console.log(JSON.stringify(config.paths));process.exit()" 292 293 try: 294 config_str = self._run_hardhat_console(base_cmd, print_paths) 295 paths = json.loads(config_str or "{}") 296 return {**default_paths, **paths, **override_paths} 297 except ValueError as e: 298 LOGGER.info("Problem deserializing hardhat configuration, using defaults: %s", e) 299 except (OSError, subprocess.SubprocessError) as e: 300 LOGGER.info("Problem executing hardhat to fetch configuration, using defaults: %s", e) 301 302 return {**default_paths, **override_paths} 303 304 def _run_hardhat_console(self, base_cmd: List[str], command: str) -> Optional[str]: 305 """Run a JS command in the hardhat console 306 307 Args: 308 base_cmd ([str]): hardhat command 309 command (str): console command to run 310 311 Returns: 312 Optional[str]: command output if execution succeeds 313 """ 314 with subprocess.Popen( 315 base_cmd + ["console", "--no-compile"], 316 stdin=subprocess.PIPE, 317 stdout=subprocess.PIPE, 318 stderr=subprocess.PIPE, 319 cwd=self._target, 320 executable=shutil.which(base_cmd[0]), 321 ) as process: 322 stdout_bytes, stderr_bytes = process.communicate(command.encode("utf-8")) 323 stdout, stderr = ( 324 stdout_bytes.decode(), 325 stderr_bytes.decode(errors="backslashreplace"), 326 ) 327 328 if stderr: 329 LOGGER.info("Problem executing hardhat: %s", stderr) 330 return None 331 332 return stdout
Hardhat platform
159 def compile(self, crytic_compile: "CryticCompile", **kwargs: str) -> None: 160 """Run the compilation 161 162 Args: 163 crytic_compile (CryticCompile): Associated CryticCompile object 164 **kwargs: optional arguments. Used: "hardhat_ignore", "hardhat_ignore_compile", "ignore_compile", 165 "hardhat_artifacts_directory","hardhat_working_dir","npx_disable" 166 167 """ 168 169 hardhat_ignore_compile, base_cmd = self._settings(kwargs) 170 171 detected_paths = self._get_hardhat_paths(base_cmd, kwargs) 172 173 build_directory = Path( 174 self._target, 175 detected_paths["artifacts"], 176 "build-info", 177 ) 178 179 hardhat_working_dir = str(Path(self._target, detected_paths["root"])) 180 181 if not hardhat_ignore_compile: 182 cmd = base_cmd + ["compile", "--force"] 183 run(cmd, cwd=self._target) 184 185 hardhat_like_parsing(crytic_compile, self._target, build_directory, hardhat_working_dir)
Run the compilation
Args: crytic_compile (CryticCompile): Associated CryticCompile object **kwargs: optional arguments. Used: "hardhat_ignore", "hardhat_ignore_compile", "ignore_compile", "hardhat_artifacts_directory","hardhat_working_dir","npx_disable"
187 def clean(self, **kwargs: str) -> None: 188 """Clean compilation artifacts 189 190 Args: 191 **kwargs: optional arguments. 192 """ 193 194 hardhat_ignore_compile, base_cmd = self._settings(kwargs) 195 196 if hardhat_ignore_compile: 197 return 198 199 for clean_cmd in [["clean"], ["clean", "--global"]]: 200 run(base_cmd + clean_cmd, cwd=self._target)
Clean compilation artifacts
Args: **kwargs: optional arguments.
202 @staticmethod 203 def is_supported(target: str, **kwargs: str) -> bool: 204 """Check if the target is an hardhat project 205 206 Args: 207 target (str): path to the target 208 **kwargs: optional arguments. Used: "hardhat_ignore" 209 210 Returns: 211 bool: True if the target is an hardhat project 212 """ 213 hardhat_ignore = kwargs.get("hardhat_ignore", False) 214 if hardhat_ignore: 215 return False 216 217 return ( 218 os.path.isfile(os.path.join(target, "hardhat.config.js")) 219 or os.path.isfile(os.path.join(target, "hardhat.config.ts")) 220 or os.path.isfile(os.path.join(target, "hardhat.config.cjs")) 221 )
Check if the target is an hardhat project
Args: target (str): path to the target **kwargs: optional arguments. Used: "hardhat_ignore"
Returns: bool: True if the target is an hardhat project
223 def is_dependency(self, path: str) -> bool: 224 """Check if the path is a dependency 225 226 Args: 227 path (str): path to the target 228 229 Returns: 230 bool: True if the target is a dependency 231 """ 232 if path in self._cached_dependencies: 233 return self._cached_dependencies[path] 234 ret = "node_modules" in Path(path).parts 235 self._cached_dependencies[path] = ret 236 return ret
Check if the path is a dependency
Args: path (str): path to the target
Returns: bool: True if the target is a dependency