crytic_compile.crytic_compile

CryticCompile main module. Handle the compilation.

  1"""
  2CryticCompile main module. Handle the compilation.
  3"""
  4import base64
  5import glob
  6import inspect
  7import json
  8import logging
  9import os
 10import re
 11import subprocess
 12import tempfile
 13from collections import defaultdict
 14from pathlib import Path
 15from typing import TYPE_CHECKING, Dict, List, Optional, Set, Tuple, Type, Union
 16
 17from solc_select.solc_select import (
 18    install_artifacts,
 19    installed_versions,
 20    artifact_path,
 21)
 22from crytic_compile.compilation_unit import CompilationUnit
 23from crytic_compile.platform import all_platforms
 24from crytic_compile.platform.solc_standard_json import SolcStandardJson
 25from crytic_compile.platform.vyper import VyperStandardJson
 26from crytic_compile.platform.abstract_platform import AbstractPlatform
 27from crytic_compile.platform.all_export import PLATFORMS_EXPORT
 28from crytic_compile.platform.solc import Solc
 29from crytic_compile.platform.standard import export_to_standard
 30from crytic_compile.utils.naming import Filename
 31from crytic_compile.utils.npm import get_package_name
 32from crytic_compile.utils.zip import load_from_zip
 33
 34# Cycle dependency
 35if TYPE_CHECKING:
 36    pass
 37
 38LOGGER = logging.getLogger("CryticCompile")
 39logging.basicConfig()
 40
 41
 42# pylint: disable=too-many-lines
 43
 44
 45def get_platforms() -> List[Type[AbstractPlatform]]:
 46    """Return the available platforms classes in order of preference
 47
 48    Returns:
 49        List[Type[AbstractPlatform]]: Available platforms
 50    """
 51    platforms = [getattr(all_platforms, name) for name in dir(all_platforms)]
 52    platforms = [d for d in platforms if inspect.isclass(d) and issubclass(d, AbstractPlatform)]
 53    return sorted(platforms, key=lambda platform: (platform.TYPE.priority(), platform.TYPE))
 54
 55
 56def is_supported(target: str) -> bool:
 57    """Check if the target is supporte. Iterate over all known platforms
 58
 59    Args:
 60        target (str): path to the target
 61
 62    Returns:
 63        bool: True if the target is supported
 64    """
 65    platforms = get_platforms()
 66    return any(platform.is_supported(target) for platform in platforms) or target.endswith(".zip")
 67
 68
 69def _extract_libraries(libraries_str: Optional[str]) -> Optional[Dict[str, int]]:
 70
 71    if not libraries_str:
 72        return None
 73    # Extract tuple like (libname1, 0x00)
 74    pattern = r"\((?P<name>\w+),\s*(?P<value1>0x[0-9a-fA-F]{2,40})\),?"
 75    matches = re.findall(pattern, libraries_str)
 76
 77    if not matches:
 78        raise ValueError(
 79            f"Invalid library linking directive\nGot:\n{libraries_str}\nExpected format:\n(libname1, 0x00),(libname2, 0x02)"
 80        )
 81
 82    ret: Dict[str, int] = {}
 83    for key, value in matches:
 84        ret[key] = int(value, 16) if value.startswith("0x") else int(value)
 85    return ret
 86
 87
 88def _configure_solc(solc_requested: str, offline: bool) -> str:
 89    """
 90    Determine which solc binary to use based on the requested version or path (e.g. '0.8.0' or '/usr/bin/solc-0.8.0').
 91
 92    Args:
 93        solc_requested (str): solc version or path
 94        offline (bool): whether to allow network requests
 95
 96    Returns:
 97        str: path to solc binary
 98    """
 99    if Path(solc_requested).exists():
100        solc_path = Path(solc_requested)
101    else:
102        solc_version = solc_requested
103        if solc_requested in installed_versions():
104            solc_path = artifact_path(solc_requested)
105        else:
106            # Respect foundry offline option and skip installation.
107            if not offline:
108                install_artifacts([solc_version])
109            solc_path = artifact_path(solc_version)
110    return solc_path.absolute().as_posix()
111
112
113# pylint: disable=too-many-instance-attributes
114class CryticCompile:
115    """
116    Main class.
117    """
118
119    # pylint: disable=too-many-branches
120    def __init__(self, target: Union[str, AbstractPlatform], **kwargs: str) -> None:
121        """See https://github.com/crytic/crytic-compile/wiki/Configuration
122        Target is usually a file or a project directory. It can be an AbstractPlatform
123        for custom setup
124
125        Args:
126            target (Union[str, AbstractPlatform]): Target
127            **kwargs: additional arguments. Used: "cwd"
128        """
129
130        # dependencies is needed for platform conversion
131        self._dependencies: Set = set()
132
133        self._src_content: Dict = {}
134
135        # Mapping each file to
136        #  offset -> line, column
137        # This is not memory optimized, but allow an offset lookup in O(1)
138        # Because we frequently do this lookup in Slither during the AST parsing
139        # We decided to favor the running time versus memory
140        self._cached_offset_to_line: Dict[Filename, Dict[int, Tuple[int, int]]] = {}
141        # Lines are indexed from 1
142        self._cached_line_to_offset: Dict[Filename, Dict[int, int]] = defaultdict(dict)
143
144        # Return the line from the line number
145        # Note: line 1 is at index 0
146        self._cached_line_to_code: Dict[Filename, List[bytes]] = {}
147
148        custom_cwd = kwargs.get("cwd")
149        if custom_cwd is not None:
150            self._working_dir = Path(custom_cwd)
151        else:
152            self._working_dir = Path.cwd()
153
154        # pylint: disable=too-many-nested-blocks
155        if isinstance(target, str):
156            platform = self._init_platform(target, **kwargs)
157            # If the platform is Solc it means we are trying to compile a single
158            # we try to see if we are in a known compilation framework to retrieve
159            # information like remappings and solc version
160            if isinstance(platform, Solc):
161                # Try to get the platform of the current working directory
162                platform_wd = next(
163                    (
164                        p(target)
165                        for p in get_platforms()
166                        if p.is_supported(str(self._working_dir), **kwargs)
167                    ),
168                    None,
169                )
170                # If no platform has been found or if it's the Solc platform, we can't automatically compile.
171                if platform_wd and not isinstance(platform_wd, Solc):
172                    platform_config = platform_wd.config(str(self._working_dir))
173                    if platform_config:
174                        kwargs["solc_args"] = ""
175                        kwargs["solc_remaps"] = ""
176
177                        if platform_config.remappings:
178                            kwargs["solc_remaps"] = platform_config.remappings
179                        if platform_config.solc_version is None:
180                            message = f"Could not detect solc version from {platform_wd.NAME} config. Falling back to system version..."
181                            LOGGER.warning(message)
182                        else:
183                            kwargs["solc"] = _configure_solc(
184                                platform_config.solc_version, platform_config.offline
185                            )
186                        if platform_config.optimizer:
187                            kwargs["solc_args"] += "--optimize"
188                        if platform_config.optimizer_runs:
189                            kwargs[
190                                "solc_args"
191                            ] += f"--optimize-runs {platform_config.optimizer_runs}"
192                        if platform_config.via_ir:
193                            kwargs["solc_args"] += "--via-ir"
194                        if platform_config.allow_paths:
195                            kwargs["solc_args"] += f"--allow-paths {platform_config.allow_paths}"
196                        if platform_config.evm_version:
197                            kwargs["solc_args"] += f"--evm-version {platform_config.evm_version}"
198        else:
199            platform = target
200
201        self._package = get_package_name(platform.target)
202
203        self._platform: AbstractPlatform = platform
204
205        self._compilation_units: Dict[str, CompilationUnit] = {}
206
207        self._bytecode_only = False
208
209        self.libraries: Optional[Dict[str, int]] = _extract_libraries(kwargs.get("compile_libraries", None))  # type: ignore
210
211        self._compile(**kwargs)
212
213    @property
214    def target(self) -> str:
215        """Return the project's target
216
217        Returns:
218            str: target
219        """
220        return self._platform.target
221
222    @property
223    def compilation_units(self) -> Dict[str, CompilationUnit]:
224        """Return the compilation units
225
226        Returns:
227            Dict[str, CompilationUnit]: compilation id => CompilationUnit
228        """
229        return self._compilation_units
230
231    def is_in_multiple_compilation_unit(self, contract: str) -> bool:
232        """Check if the contract is shared by multiple compilation unit
233
234        Args:
235            contract (str): contract name
236
237        Returns:
238            bool: True if the contract is in multiple compilation units
239        """
240        count = 0
241        for compilation_unit in self._compilation_units.values():
242            for source_unit in compilation_unit.source_units.values():
243                if contract in source_unit.contracts_names:
244                    count += 1
245        return count >= 2
246
247    ###################################################################################
248    ###################################################################################
249    # region Utils
250    ###################################################################################
251    ###################################################################################
252    @property
253    def filenames(self) -> Set[Filename]:
254        """
255        Return the set of all the filenames used
256
257        Returns:
258             Set[Filename]: set of filenames
259        """
260        filenames: Set[Filename] = set()
261        for compile_unit in self._compilation_units.values():
262            filenames |= set(compile_unit.filenames)
263        return filenames
264
265    def filename_lookup(self, filename: str) -> Filename:
266        """Return a crytic_compile.naming.Filename from a any filename
267
268        Args:
269            filename (str): filename (used/absolute/relative)
270
271        Raises:
272            ValueError: If the filename is not in the project
273
274        Returns:
275            Filename: Associated Filename object
276        """
277        for compile_unit in self.compilation_units.values():
278            try:
279                return compile_unit.filename_lookup(filename)
280            except ValueError:
281                pass
282
283        raise ValueError(f"{filename} does not exist")
284
285    @property
286    def dependencies(self) -> Set[str]:
287        """Return the dependencies files
288
289        Returns:
290            Set[str]: Dependencies files
291        """
292        return self._dependencies
293
294    def is_dependency(self, filename: str) -> bool:
295        """Check if the filename is a dependency
296
297        Args:
298            filename (str): filename
299
300        Returns:
301            bool: True if the filename is a dependency
302        """
303        return filename in self._dependencies or self.platform.is_dependency(filename)
304
305    @property
306    def package(self) -> Optional[str]:
307        """Return the package name
308
309        Returns:
310            Optional[str]: package name
311        """
312        return self._package
313
314    @property
315    def working_dir(self) -> Path:
316        """Return the working directory
317
318        Returns:
319            Path: Working directory
320        """
321        return self._working_dir
322
323    @working_dir.setter
324    def working_dir(self, path: Path) -> None:
325        """Set the working directory
326
327        Args:
328            path (Path): new working directory
329        """
330        self._working_dir = path
331
332    def _get_cached_offset_to_line(self, file: Filename) -> None:
333        """Compute the cached offsets to lines
334
335        Args:
336            file (Filename): filename
337        """
338        if file not in self._cached_line_to_code:
339            self._get_cached_line_to_code(file)
340
341        source_code = self._cached_line_to_code[file]
342        acc = 0
343        lines_delimiters: Dict[int, Tuple[int, int]] = {}
344        for line_number, x in enumerate(source_code):
345            self._cached_line_to_offset[file][line_number + 1] = acc
346
347            for i in range(acc, acc + len(x)):
348                lines_delimiters[i] = (line_number + 1, i - acc + 1)
349
350            acc += len(x)
351        lines_delimiters[acc] = (len(source_code) + 1, 0)
352        self._cached_offset_to_line[file] = lines_delimiters
353
354    def get_line_from_offset(self, filename: Union[Filename, str], offset: int) -> Tuple[int, int]:
355        """Return the line from a given offset
356
357        Args:
358            filename (Union[Filename, str]): filename
359            offset (int): global offset
360
361        Returns:
362            Tuple[int, int]: (line, line offset)
363        """
364        if isinstance(filename, str):
365            file = self.filename_lookup(filename)
366        else:
367            file = filename
368        if file not in self._cached_offset_to_line:
369            self._get_cached_offset_to_line(file)
370
371        lines_delimiters = self._cached_offset_to_line[file]
372        return lines_delimiters[offset]
373
374    def get_global_offset_from_line(self, filename: Union[Filename, str], line: int) -> int:
375        """Return the global offset from a given line
376
377        Args:
378            filename (Union[Filename, str]): filename
379            line (int): line
380
381        Returns:
382            int: global offset
383        """
384        if isinstance(filename, str):
385            file = self.filename_lookup(filename)
386        else:
387            file = filename
388        if file not in self._cached_line_to_offset:
389            self._get_cached_offset_to_line(file)
390
391        return self._cached_line_to_offset[file][line]
392
393    def _get_cached_line_to_code(self, file: Filename) -> None:
394        """Compute the cached lines
395
396        Args:
397            file (Filename): filename
398        """
399        source_code = self.src_content[file.absolute]
400        source_code_encoded = source_code.encode("utf-8")
401        source_code_list = source_code_encoded.splitlines(True)
402        self._cached_line_to_code[file] = source_code_list
403
404    def get_code_from_line(self, filename: Union[Filename, str], line: int) -> Optional[bytes]:
405        """Return the code from the line. Start at line = 1.
406        Return None if the line is not in the file
407
408        Args:
409            filename (Union[Filename, str]): filename
410            line (int): line
411
412        Returns:
413            Optional[bytes]: line of code
414        """
415        if isinstance(filename, str):
416            file = self.filename_lookup(filename)
417        else:
418            file = filename
419        if file not in self._cached_line_to_code:
420            self._get_cached_line_to_code(file)
421
422        lines = self._cached_line_to_code[file]
423        if line - 1 < 0 or line - 1 >= len(lines):
424            return None
425        return lines[line - 1]
426
427    @property
428    def src_content(self) -> Dict[str, str]:
429        """Return the source content
430
431        Returns:
432            Dict[str, str]: filename -> source_code
433        """
434        # If we have no source code loaded yet, load it for every contract.
435        if not self._src_content:
436            for filename in self.filenames:
437                if filename.absolute not in self._src_content and os.path.isfile(filename.absolute):
438                    with open(
439                        filename.absolute, encoding="utf8", newline="", errors="replace"
440                    ) as source_file:
441                        self._src_content[filename.absolute] = source_file.read()
442        return self._src_content
443
444    @src_content.setter
445    def src_content(self, src: Dict) -> None:
446        """Set the source content
447
448        Args:
449            src (Dict): New source content
450        """
451        self._src_content = src
452
453    def src_content_for_file(self, filename_absolute: str) -> Optional[str]:
454        """Get the source code of the file
455
456        Args:
457            filename_absolute (str): absolute filename
458
459        Returns:
460            Optional[str]: source code
461        """
462        return self.src_content.get(filename_absolute, None)
463
464    # endregion
465    ###################################################################################
466    ###################################################################################
467    # region Type
468    ###################################################################################
469    ###################################################################################
470
471    @property
472    def type(self) -> int:
473        """Return the type of the platform used
474
475        Returns:
476            int: Platform type (see AbstractPatform.TYPE)
477        """
478        # Type should have been set by now
479        assert self._platform.TYPE
480        return self._platform.TYPE
481
482    @property
483    def platform(self) -> AbstractPlatform:
484        """Return the underlying platform
485
486        Returns:
487            AbstractPlatform: Underlying platform
488        """
489        assert self._platform
490        return self._platform
491
492    # endregion
493    ###################################################################################
494    ###################################################################################
495    # region Compiler information
496    ###################################################################################
497    ###################################################################################
498
499    @property
500    def bytecode_only(self) -> bool:
501        """Return true if only the bytecode was retrieved.
502        This can only happen for the etherscan platform
503
504        Returns:
505            bool: True if the project is bytecode only
506        """
507        return self._bytecode_only
508
509    @bytecode_only.setter
510    def bytecode_only(self, bytecode: bool) -> None:
511        """Set the bytecode_only info (only for etherscan)
512
513        Args:
514            bytecode (bool): new bytecode_only status
515        """
516        self._bytecode_only = bytecode
517
518    # endregion
519    ###################################################################################
520    ###################################################################################
521    # region Import
522    ###################################################################################
523    ###################################################################################
524
525    # TODO: refactor import_archive_compilations to rely on one CryticCompile object
526    # But multiple compilation units
527    @staticmethod
528    def import_archive_compilations(compiled_archive: Union[str, Dict]) -> List["CryticCompile"]:
529        """Import from an archive. compiled_archive is either a json file or the loaded dictionary
530        The dictionary myst contain the "compilations" keyword
531
532        Args:
533            compiled_archive: Union[str, Dict]: list of archive to import
534
535        Raises:
536            ValueError: The import did not worked
537
538        Returns:
539            [CryticCompile]: List of crytic compile object
540        """
541        # If the argument is a string, it is likely a filepath, load the archive.
542        if isinstance(compiled_archive, str):
543            with open(compiled_archive, encoding="utf8") as file:
544                compiled_archive = json.load(file)
545
546        # Verify the compiled archive is of the correct form
547        if not isinstance(compiled_archive, dict) or "compilations" not in compiled_archive:
548            raise ValueError("Cannot import compiled archive, invalid format.")
549
550        return [CryticCompile(archive) for archive in compiled_archive["compilations"]]
551
552    # endregion
553
554    ###################################################################################
555    ###################################################################################
556    # region Export
557    ###################################################################################
558    ###################################################################################
559
560    def export(self, **kwargs: str) -> List[str]:
561        """Export to json.
562        The json format can be crytic-compile, solc or truffle.
563        The type must be specified in the kwargs with "export_format"
564
565        Args:
566            **kwargs: optional arguments. Used: "export_format"
567
568        Raises:
569            ValueError: Incorrect type
570
571        Returns:
572            List[str]: List of the filenames generated
573        """
574        export_format = kwargs.get("export_format", None)
575        if export_format is None:
576            return export_to_standard(self, **kwargs)
577        if export_format not in PLATFORMS_EXPORT:
578            raise ValueError("Export format unknown")
579        return PLATFORMS_EXPORT[export_format](self, **kwargs)
580
581    # endregion
582    ###################################################################################
583    ###################################################################################
584    # region Compile
585    ###################################################################################
586    ###################################################################################
587
588    # pylint: disable=no-self-use
589    def _init_platform(self, target: str, **kwargs: str) -> AbstractPlatform:
590        """Init the platform
591
592        Args:
593            target (str): path to the target
594            **kwargs: optional arguments. Used: "compile_force_framework", "compile_custom_build", "compile_remove_metadata"
595
596
597        Returns:
598            AbstractPlatform: Underlying platform
599        """
600        platforms = get_platforms()
601        platform = None
602
603        compile_force_framework: Union[str, None] = kwargs.get("compile_force_framework", None)
604        if compile_force_framework:
605            platform = next(
606                (p(target) for p in platforms if p.NAME.lower() == compile_force_framework.lower()),
607                None,
608            )
609
610        if not platform:
611            platform = next(
612                (p(target) for p in platforms if p.is_supported(target, **kwargs)), None
613            )
614
615        if not platform:
616            platform = Solc(target)
617
618        return platform
619
620    def _compile(self, **kwargs: str) -> None:
621        """Compile the project
622
623        Args:
624            **kwargs: optional arguments. Used: "compile_custom_build", "compile_remove_metadata"
625        """
626        custom_build: Union[None, str] = kwargs.get("compile_custom_build", None)
627        if custom_build:
628            self._run_custom_build(custom_build)
629
630        else:
631            if not kwargs.get("skip_clean", False) and not kwargs.get("ignore_compile", False):
632                self._platform.clean(**kwargs)
633            self._platform.compile(self, **kwargs)
634
635        remove_metadata = kwargs.get("compile_remove_metadata", False)
636        if remove_metadata:
637            for compilation_unit in self._compilation_units.values():
638                for source_unit in compilation_unit.source_units.values():
639                    source_unit.remove_metadata()
640
641    @staticmethod
642    def _run_custom_build(custom_build: str) -> None:
643        """Run a custom build
644
645        Args:
646            custom_build (str): Command to run
647        """
648        cmd = custom_build.split(" ")
649        LOGGER.info(
650            "'%s' running",
651            " ".join(cmd),
652        )
653        with subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) as process:
654            stdout_bytes, stderr_bytes = process.communicate()
655            stdout, stderr = (
656                stdout_bytes.decode(errors="backslashreplace"),
657                stderr_bytes.decode(errors="backslashreplace"),
658            )  # convert bytestrings to unicode strings
659
660            LOGGER.info(stdout)
661            if stderr:
662                LOGGER.error("Custom build error: \n%s", stderr)
663
664    # endregion
665    ###################################################################################
666    ###################################################################################
667    # region NPM
668    ###################################################################################
669    ###################################################################################
670
671    @property
672    def package_name(self) -> Optional[str]:
673        """Return the npm package name
674
675        Returns:
676            Optional[str]: Package name
677        """
678        return self._package
679
680    @package_name.setter
681    def package_name(self, name: Optional[str]) -> None:
682        """Set the package name
683
684        Args:
685            name (Optional[str]): New package name
686        """
687        self._package = name
688
689
690# endregion
691###################################################################################
692###################################################################################
693
694# TODO: refactor me to be integrated within CryticCompile.__init__
695def compile_all(target: str, **kwargs: str) -> List[CryticCompile]:
696    """Given a direct or glob pattern target, compiles all underlying sources and returns
697    all the relevant instances of CryticCompile.
698
699    Args:
700        target (str): A string representing a file/directory path or glob pattern denoting where compilation should occur.
701        **kwargs: optional arguments. Used: "solc_standard_json"
702
703    Raises:
704        ValueError: If the target could not be compiled
705
706    Returns:
707        List[CryticCompile]: Returns a list of CryticCompile instances for all compilations which occurred.
708    """
709    use_solc_standard_json = kwargs.get("solc_standard_json", False)
710
711    # Check if the target refers to a valid target already.
712    compilations: List[CryticCompile] = []
713    if os.path.isfile(target) or is_supported(target):
714        if target.endswith(".zip"):
715            compilations = load_from_zip(target)
716        elif target.endswith(".zip.base64"):
717            with tempfile.NamedTemporaryFile() as tmp:
718                with open(target, encoding="utf8") as target_file:
719                    tmp.write(base64.b64decode(target_file.read()))
720                    compilations = load_from_zip(tmp.name)
721        else:
722            compilations.append(CryticCompile(target, **kwargs))
723    elif os.path.isdir(target):
724        solidity_filenames = glob.glob(os.path.join(target, "*.sol"))
725        vyper_filenames = glob.glob(os.path.join(target, "*.vy"))
726        # Determine if we're using --standard-solc option to
727        # aggregate many files into a single compilation.
728        if use_solc_standard_json:
729            # If we're using standard solc, then we generated our
730            # input to create a single compilation with all files
731            solc_standard_json = SolcStandardJson()
732            solc_standard_json.add_source_files(solidity_filenames)
733            compilations.append(CryticCompile(solc_standard_json, **kwargs))
734        else:
735            # We compile each file and add it to our compilations.
736            for filename in solidity_filenames:
737                compilations.append(CryticCompile(filename, **kwargs))
738
739        if vyper_filenames:
740            vyper_standard_json = VyperStandardJson()
741            vyper_standard_json.add_source_files(vyper_filenames)
742            compilations.append(CryticCompile(vyper_standard_json, **kwargs))
743    else:
744        # TODO split glob into language
745        # # Attempt to perform glob expansion of target/filename
746        # globbed_targets = glob.glob(target, recursive=True)
747        # print(globbed_targets)
748
749        raise ValueError(f"{str(target)} is not a file or directory.")
750
751    return compilations
LOGGER = <Logger CryticCompile (WARNING)>
def get_platforms() -> List[Type[crytic_compile.platform.abstract_platform.AbstractPlatform]]:
46def get_platforms() -> List[Type[AbstractPlatform]]:
47    """Return the available platforms classes in order of preference
48
49    Returns:
50        List[Type[AbstractPlatform]]: Available platforms
51    """
52    platforms = [getattr(all_platforms, name) for name in dir(all_platforms)]
53    platforms = [d for d in platforms if inspect.isclass(d) and issubclass(d, AbstractPlatform)]
54    return sorted(platforms, key=lambda platform: (platform.TYPE.priority(), platform.TYPE))

Return the available platforms classes in order of preference

Returns: List[Type[AbstractPlatform]]: Available platforms

def is_supported(target: str) -> bool:
57def is_supported(target: str) -> bool:
58    """Check if the target is supporte. Iterate over all known platforms
59
60    Args:
61        target (str): path to the target
62
63    Returns:
64        bool: True if the target is supported
65    """
66    platforms = get_platforms()
67    return any(platform.is_supported(target) for platform in platforms) or target.endswith(".zip")

Check if the target is supporte. Iterate over all known platforms

Args: target (str): path to the target

Returns: bool: True if the target is supported

class CryticCompile:
115class CryticCompile:
116    """
117    Main class.
118    """
119
120    # pylint: disable=too-many-branches
121    def __init__(self, target: Union[str, AbstractPlatform], **kwargs: str) -> None:
122        """See https://github.com/crytic/crytic-compile/wiki/Configuration
123        Target is usually a file or a project directory. It can be an AbstractPlatform
124        for custom setup
125
126        Args:
127            target (Union[str, AbstractPlatform]): Target
128            **kwargs: additional arguments. Used: "cwd"
129        """
130
131        # dependencies is needed for platform conversion
132        self._dependencies: Set = set()
133
134        self._src_content: Dict = {}
135
136        # Mapping each file to
137        #  offset -> line, column
138        # This is not memory optimized, but allow an offset lookup in O(1)
139        # Because we frequently do this lookup in Slither during the AST parsing
140        # We decided to favor the running time versus memory
141        self._cached_offset_to_line: Dict[Filename, Dict[int, Tuple[int, int]]] = {}
142        # Lines are indexed from 1
143        self._cached_line_to_offset: Dict[Filename, Dict[int, int]] = defaultdict(dict)
144
145        # Return the line from the line number
146        # Note: line 1 is at index 0
147        self._cached_line_to_code: Dict[Filename, List[bytes]] = {}
148
149        custom_cwd = kwargs.get("cwd")
150        if custom_cwd is not None:
151            self._working_dir = Path(custom_cwd)
152        else:
153            self._working_dir = Path.cwd()
154
155        # pylint: disable=too-many-nested-blocks
156        if isinstance(target, str):
157            platform = self._init_platform(target, **kwargs)
158            # If the platform is Solc it means we are trying to compile a single
159            # we try to see if we are in a known compilation framework to retrieve
160            # information like remappings and solc version
161            if isinstance(platform, Solc):
162                # Try to get the platform of the current working directory
163                platform_wd = next(
164                    (
165                        p(target)
166                        for p in get_platforms()
167                        if p.is_supported(str(self._working_dir), **kwargs)
168                    ),
169                    None,
170                )
171                # If no platform has been found or if it's the Solc platform, we can't automatically compile.
172                if platform_wd and not isinstance(platform_wd, Solc):
173                    platform_config = platform_wd.config(str(self._working_dir))
174                    if platform_config:
175                        kwargs["solc_args"] = ""
176                        kwargs["solc_remaps"] = ""
177
178                        if platform_config.remappings:
179                            kwargs["solc_remaps"] = platform_config.remappings
180                        if platform_config.solc_version is None:
181                            message = f"Could not detect solc version from {platform_wd.NAME} config. Falling back to system version..."
182                            LOGGER.warning(message)
183                        else:
184                            kwargs["solc"] = _configure_solc(
185                                platform_config.solc_version, platform_config.offline
186                            )
187                        if platform_config.optimizer:
188                            kwargs["solc_args"] += "--optimize"
189                        if platform_config.optimizer_runs:
190                            kwargs[
191                                "solc_args"
192                            ] += f"--optimize-runs {platform_config.optimizer_runs}"
193                        if platform_config.via_ir:
194                            kwargs["solc_args"] += "--via-ir"
195                        if platform_config.allow_paths:
196                            kwargs["solc_args"] += f"--allow-paths {platform_config.allow_paths}"
197                        if platform_config.evm_version:
198                            kwargs["solc_args"] += f"--evm-version {platform_config.evm_version}"
199        else:
200            platform = target
201
202        self._package = get_package_name(platform.target)
203
204        self._platform: AbstractPlatform = platform
205
206        self._compilation_units: Dict[str, CompilationUnit] = {}
207
208        self._bytecode_only = False
209
210        self.libraries: Optional[Dict[str, int]] = _extract_libraries(kwargs.get("compile_libraries", None))  # type: ignore
211
212        self._compile(**kwargs)
213
214    @property
215    def target(self) -> str:
216        """Return the project's target
217
218        Returns:
219            str: target
220        """
221        return self._platform.target
222
223    @property
224    def compilation_units(self) -> Dict[str, CompilationUnit]:
225        """Return the compilation units
226
227        Returns:
228            Dict[str, CompilationUnit]: compilation id => CompilationUnit
229        """
230        return self._compilation_units
231
232    def is_in_multiple_compilation_unit(self, contract: str) -> bool:
233        """Check if the contract is shared by multiple compilation unit
234
235        Args:
236            contract (str): contract name
237
238        Returns:
239            bool: True if the contract is in multiple compilation units
240        """
241        count = 0
242        for compilation_unit in self._compilation_units.values():
243            for source_unit in compilation_unit.source_units.values():
244                if contract in source_unit.contracts_names:
245                    count += 1
246        return count >= 2
247
248    ###################################################################################
249    ###################################################################################
250    # region Utils
251    ###################################################################################
252    ###################################################################################
253    @property
254    def filenames(self) -> Set[Filename]:
255        """
256        Return the set of all the filenames used
257
258        Returns:
259             Set[Filename]: set of filenames
260        """
261        filenames: Set[Filename] = set()
262        for compile_unit in self._compilation_units.values():
263            filenames |= set(compile_unit.filenames)
264        return filenames
265
266    def filename_lookup(self, filename: str) -> Filename:
267        """Return a crytic_compile.naming.Filename from a any filename
268
269        Args:
270            filename (str): filename (used/absolute/relative)
271
272        Raises:
273            ValueError: If the filename is not in the project
274
275        Returns:
276            Filename: Associated Filename object
277        """
278        for compile_unit in self.compilation_units.values():
279            try:
280                return compile_unit.filename_lookup(filename)
281            except ValueError:
282                pass
283
284        raise ValueError(f"{filename} does not exist")
285
286    @property
287    def dependencies(self) -> Set[str]:
288        """Return the dependencies files
289
290        Returns:
291            Set[str]: Dependencies files
292        """
293        return self._dependencies
294
295    def is_dependency(self, filename: str) -> bool:
296        """Check if the filename is a dependency
297
298        Args:
299            filename (str): filename
300
301        Returns:
302            bool: True if the filename is a dependency
303        """
304        return filename in self._dependencies or self.platform.is_dependency(filename)
305
306    @property
307    def package(self) -> Optional[str]:
308        """Return the package name
309
310        Returns:
311            Optional[str]: package name
312        """
313        return self._package
314
315    @property
316    def working_dir(self) -> Path:
317        """Return the working directory
318
319        Returns:
320            Path: Working directory
321        """
322        return self._working_dir
323
324    @working_dir.setter
325    def working_dir(self, path: Path) -> None:
326        """Set the working directory
327
328        Args:
329            path (Path): new working directory
330        """
331        self._working_dir = path
332
333    def _get_cached_offset_to_line(self, file: Filename) -> None:
334        """Compute the cached offsets to lines
335
336        Args:
337            file (Filename): filename
338        """
339        if file not in self._cached_line_to_code:
340            self._get_cached_line_to_code(file)
341
342        source_code = self._cached_line_to_code[file]
343        acc = 0
344        lines_delimiters: Dict[int, Tuple[int, int]] = {}
345        for line_number, x in enumerate(source_code):
346            self._cached_line_to_offset[file][line_number + 1] = acc
347
348            for i in range(acc, acc + len(x)):
349                lines_delimiters[i] = (line_number + 1, i - acc + 1)
350
351            acc += len(x)
352        lines_delimiters[acc] = (len(source_code) + 1, 0)
353        self._cached_offset_to_line[file] = lines_delimiters
354
355    def get_line_from_offset(self, filename: Union[Filename, str], offset: int) -> Tuple[int, int]:
356        """Return the line from a given offset
357
358        Args:
359            filename (Union[Filename, str]): filename
360            offset (int): global offset
361
362        Returns:
363            Tuple[int, int]: (line, line offset)
364        """
365        if isinstance(filename, str):
366            file = self.filename_lookup(filename)
367        else:
368            file = filename
369        if file not in self._cached_offset_to_line:
370            self._get_cached_offset_to_line(file)
371
372        lines_delimiters = self._cached_offset_to_line[file]
373        return lines_delimiters[offset]
374
375    def get_global_offset_from_line(self, filename: Union[Filename, str], line: int) -> int:
376        """Return the global offset from a given line
377
378        Args:
379            filename (Union[Filename, str]): filename
380            line (int): line
381
382        Returns:
383            int: global offset
384        """
385        if isinstance(filename, str):
386            file = self.filename_lookup(filename)
387        else:
388            file = filename
389        if file not in self._cached_line_to_offset:
390            self._get_cached_offset_to_line(file)
391
392        return self._cached_line_to_offset[file][line]
393
394    def _get_cached_line_to_code(self, file: Filename) -> None:
395        """Compute the cached lines
396
397        Args:
398            file (Filename): filename
399        """
400        source_code = self.src_content[file.absolute]
401        source_code_encoded = source_code.encode("utf-8")
402        source_code_list = source_code_encoded.splitlines(True)
403        self._cached_line_to_code[file] = source_code_list
404
405    def get_code_from_line(self, filename: Union[Filename, str], line: int) -> Optional[bytes]:
406        """Return the code from the line. Start at line = 1.
407        Return None if the line is not in the file
408
409        Args:
410            filename (Union[Filename, str]): filename
411            line (int): line
412
413        Returns:
414            Optional[bytes]: line of code
415        """
416        if isinstance(filename, str):
417            file = self.filename_lookup(filename)
418        else:
419            file = filename
420        if file not in self._cached_line_to_code:
421            self._get_cached_line_to_code(file)
422
423        lines = self._cached_line_to_code[file]
424        if line - 1 < 0 or line - 1 >= len(lines):
425            return None
426        return lines[line - 1]
427
428    @property
429    def src_content(self) -> Dict[str, str]:
430        """Return the source content
431
432        Returns:
433            Dict[str, str]: filename -> source_code
434        """
435        # If we have no source code loaded yet, load it for every contract.
436        if not self._src_content:
437            for filename in self.filenames:
438                if filename.absolute not in self._src_content and os.path.isfile(filename.absolute):
439                    with open(
440                        filename.absolute, encoding="utf8", newline="", errors="replace"
441                    ) as source_file:
442                        self._src_content[filename.absolute] = source_file.read()
443        return self._src_content
444
445    @src_content.setter
446    def src_content(self, src: Dict) -> None:
447        """Set the source content
448
449        Args:
450            src (Dict): New source content
451        """
452        self._src_content = src
453
454    def src_content_for_file(self, filename_absolute: str) -> Optional[str]:
455        """Get the source code of the file
456
457        Args:
458            filename_absolute (str): absolute filename
459
460        Returns:
461            Optional[str]: source code
462        """
463        return self.src_content.get(filename_absolute, None)
464
465    # endregion
466    ###################################################################################
467    ###################################################################################
468    # region Type
469    ###################################################################################
470    ###################################################################################
471
472    @property
473    def type(self) -> int:
474        """Return the type of the platform used
475
476        Returns:
477            int: Platform type (see AbstractPatform.TYPE)
478        """
479        # Type should have been set by now
480        assert self._platform.TYPE
481        return self._platform.TYPE
482
483    @property
484    def platform(self) -> AbstractPlatform:
485        """Return the underlying platform
486
487        Returns:
488            AbstractPlatform: Underlying platform
489        """
490        assert self._platform
491        return self._platform
492
493    # endregion
494    ###################################################################################
495    ###################################################################################
496    # region Compiler information
497    ###################################################################################
498    ###################################################################################
499
500    @property
501    def bytecode_only(self) -> bool:
502        """Return true if only the bytecode was retrieved.
503        This can only happen for the etherscan platform
504
505        Returns:
506            bool: True if the project is bytecode only
507        """
508        return self._bytecode_only
509
510    @bytecode_only.setter
511    def bytecode_only(self, bytecode: bool) -> None:
512        """Set the bytecode_only info (only for etherscan)
513
514        Args:
515            bytecode (bool): new bytecode_only status
516        """
517        self._bytecode_only = bytecode
518
519    # endregion
520    ###################################################################################
521    ###################################################################################
522    # region Import
523    ###################################################################################
524    ###################################################################################
525
526    # TODO: refactor import_archive_compilations to rely on one CryticCompile object
527    # But multiple compilation units
528    @staticmethod
529    def import_archive_compilations(compiled_archive: Union[str, Dict]) -> List["CryticCompile"]:
530        """Import from an archive. compiled_archive is either a json file or the loaded dictionary
531        The dictionary myst contain the "compilations" keyword
532
533        Args:
534            compiled_archive: Union[str, Dict]: list of archive to import
535
536        Raises:
537            ValueError: The import did not worked
538
539        Returns:
540            [CryticCompile]: List of crytic compile object
541        """
542        # If the argument is a string, it is likely a filepath, load the archive.
543        if isinstance(compiled_archive, str):
544            with open(compiled_archive, encoding="utf8") as file:
545                compiled_archive = json.load(file)
546
547        # Verify the compiled archive is of the correct form
548        if not isinstance(compiled_archive, dict) or "compilations" not in compiled_archive:
549            raise ValueError("Cannot import compiled archive, invalid format.")
550
551        return [CryticCompile(archive) for archive in compiled_archive["compilations"]]
552
553    # endregion
554
555    ###################################################################################
556    ###################################################################################
557    # region Export
558    ###################################################################################
559    ###################################################################################
560
561    def export(self, **kwargs: str) -> List[str]:
562        """Export to json.
563        The json format can be crytic-compile, solc or truffle.
564        The type must be specified in the kwargs with "export_format"
565
566        Args:
567            **kwargs: optional arguments. Used: "export_format"
568
569        Raises:
570            ValueError: Incorrect type
571
572        Returns:
573            List[str]: List of the filenames generated
574        """
575        export_format = kwargs.get("export_format", None)
576        if export_format is None:
577            return export_to_standard(self, **kwargs)
578        if export_format not in PLATFORMS_EXPORT:
579            raise ValueError("Export format unknown")
580        return PLATFORMS_EXPORT[export_format](self, **kwargs)
581
582    # endregion
583    ###################################################################################
584    ###################################################################################
585    # region Compile
586    ###################################################################################
587    ###################################################################################
588
589    # pylint: disable=no-self-use
590    def _init_platform(self, target: str, **kwargs: str) -> AbstractPlatform:
591        """Init the platform
592
593        Args:
594            target (str): path to the target
595            **kwargs: optional arguments. Used: "compile_force_framework", "compile_custom_build", "compile_remove_metadata"
596
597
598        Returns:
599            AbstractPlatform: Underlying platform
600        """
601        platforms = get_platforms()
602        platform = None
603
604        compile_force_framework: Union[str, None] = kwargs.get("compile_force_framework", None)
605        if compile_force_framework:
606            platform = next(
607                (p(target) for p in platforms if p.NAME.lower() == compile_force_framework.lower()),
608                None,
609            )
610
611        if not platform:
612            platform = next(
613                (p(target) for p in platforms if p.is_supported(target, **kwargs)), None
614            )
615
616        if not platform:
617            platform = Solc(target)
618
619        return platform
620
621    def _compile(self, **kwargs: str) -> None:
622        """Compile the project
623
624        Args:
625            **kwargs: optional arguments. Used: "compile_custom_build", "compile_remove_metadata"
626        """
627        custom_build: Union[None, str] = kwargs.get("compile_custom_build", None)
628        if custom_build:
629            self._run_custom_build(custom_build)
630
631        else:
632            if not kwargs.get("skip_clean", False) and not kwargs.get("ignore_compile", False):
633                self._platform.clean(**kwargs)
634            self._platform.compile(self, **kwargs)
635
636        remove_metadata = kwargs.get("compile_remove_metadata", False)
637        if remove_metadata:
638            for compilation_unit in self._compilation_units.values():
639                for source_unit in compilation_unit.source_units.values():
640                    source_unit.remove_metadata()
641
642    @staticmethod
643    def _run_custom_build(custom_build: str) -> None:
644        """Run a custom build
645
646        Args:
647            custom_build (str): Command to run
648        """
649        cmd = custom_build.split(" ")
650        LOGGER.info(
651            "'%s' running",
652            " ".join(cmd),
653        )
654        with subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) as process:
655            stdout_bytes, stderr_bytes = process.communicate()
656            stdout, stderr = (
657                stdout_bytes.decode(errors="backslashreplace"),
658                stderr_bytes.decode(errors="backslashreplace"),
659            )  # convert bytestrings to unicode strings
660
661            LOGGER.info(stdout)
662            if stderr:
663                LOGGER.error("Custom build error: \n%s", stderr)
664
665    # endregion
666    ###################################################################################
667    ###################################################################################
668    # region NPM
669    ###################################################################################
670    ###################################################################################
671
672    @property
673    def package_name(self) -> Optional[str]:
674        """Return the npm package name
675
676        Returns:
677            Optional[str]: Package name
678        """
679        return self._package
680
681    @package_name.setter
682    def package_name(self, name: Optional[str]) -> None:
683        """Set the package name
684
685        Args:
686            name (Optional[str]): New package name
687        """
688        self._package = name

Main class.

CryticCompile( target: Union[str, crytic_compile.platform.abstract_platform.AbstractPlatform], **kwargs: str)
121    def __init__(self, target: Union[str, AbstractPlatform], **kwargs: str) -> None:
122        """See https://github.com/crytic/crytic-compile/wiki/Configuration
123        Target is usually a file or a project directory. It can be an AbstractPlatform
124        for custom setup
125
126        Args:
127            target (Union[str, AbstractPlatform]): Target
128            **kwargs: additional arguments. Used: "cwd"
129        """
130
131        # dependencies is needed for platform conversion
132        self._dependencies: Set = set()
133
134        self._src_content: Dict = {}
135
136        # Mapping each file to
137        #  offset -> line, column
138        # This is not memory optimized, but allow an offset lookup in O(1)
139        # Because we frequently do this lookup in Slither during the AST parsing
140        # We decided to favor the running time versus memory
141        self._cached_offset_to_line: Dict[Filename, Dict[int, Tuple[int, int]]] = {}
142        # Lines are indexed from 1
143        self._cached_line_to_offset: Dict[Filename, Dict[int, int]] = defaultdict(dict)
144
145        # Return the line from the line number
146        # Note: line 1 is at index 0
147        self._cached_line_to_code: Dict[Filename, List[bytes]] = {}
148
149        custom_cwd = kwargs.get("cwd")
150        if custom_cwd is not None:
151            self._working_dir = Path(custom_cwd)
152        else:
153            self._working_dir = Path.cwd()
154
155        # pylint: disable=too-many-nested-blocks
156        if isinstance(target, str):
157            platform = self._init_platform(target, **kwargs)
158            # If the platform is Solc it means we are trying to compile a single
159            # we try to see if we are in a known compilation framework to retrieve
160            # information like remappings and solc version
161            if isinstance(platform, Solc):
162                # Try to get the platform of the current working directory
163                platform_wd = next(
164                    (
165                        p(target)
166                        for p in get_platforms()
167                        if p.is_supported(str(self._working_dir), **kwargs)
168                    ),
169                    None,
170                )
171                # If no platform has been found or if it's the Solc platform, we can't automatically compile.
172                if platform_wd and not isinstance(platform_wd, Solc):
173                    platform_config = platform_wd.config(str(self._working_dir))
174                    if platform_config:
175                        kwargs["solc_args"] = ""
176                        kwargs["solc_remaps"] = ""
177
178                        if platform_config.remappings:
179                            kwargs["solc_remaps"] = platform_config.remappings
180                        if platform_config.solc_version is None:
181                            message = f"Could not detect solc version from {platform_wd.NAME} config. Falling back to system version..."
182                            LOGGER.warning(message)
183                        else:
184                            kwargs["solc"] = _configure_solc(
185                                platform_config.solc_version, platform_config.offline
186                            )
187                        if platform_config.optimizer:
188                            kwargs["solc_args"] += "--optimize"
189                        if platform_config.optimizer_runs:
190                            kwargs[
191                                "solc_args"
192                            ] += f"--optimize-runs {platform_config.optimizer_runs}"
193                        if platform_config.via_ir:
194                            kwargs["solc_args"] += "--via-ir"
195                        if platform_config.allow_paths:
196                            kwargs["solc_args"] += f"--allow-paths {platform_config.allow_paths}"
197                        if platform_config.evm_version:
198                            kwargs["solc_args"] += f"--evm-version {platform_config.evm_version}"
199        else:
200            platform = target
201
202        self._package = get_package_name(platform.target)
203
204        self._platform: AbstractPlatform = platform
205
206        self._compilation_units: Dict[str, CompilationUnit] = {}
207
208        self._bytecode_only = False
209
210        self.libraries: Optional[Dict[str, int]] = _extract_libraries(kwargs.get("compile_libraries", None))  # type: ignore
211
212        self._compile(**kwargs)

See https://github.com/crytic/crytic-compile/wiki/Configuration Target is usually a file or a project directory. It can be an AbstractPlatform for custom setup

Args: target (Union[str, AbstractPlatform]): Target **kwargs: additional arguments. Used: "cwd"

libraries: Union[Dict[str, int], NoneType]
target: str
214    @property
215    def target(self) -> str:
216        """Return the project's target
217
218        Returns:
219            str: target
220        """
221        return self._platform.target

Return the project's target

Returns: str: target

compilation_units: Dict[str, crytic_compile.compilation_unit.CompilationUnit]
223    @property
224    def compilation_units(self) -> Dict[str, CompilationUnit]:
225        """Return the compilation units
226
227        Returns:
228            Dict[str, CompilationUnit]: compilation id => CompilationUnit
229        """
230        return self._compilation_units

Return the compilation units

Returns: Dict[str, CompilationUnit]: compilation id => CompilationUnit

def is_in_multiple_compilation_unit(self, contract: str) -> bool:
232    def is_in_multiple_compilation_unit(self, contract: str) -> bool:
233        """Check if the contract is shared by multiple compilation unit
234
235        Args:
236            contract (str): contract name
237
238        Returns:
239            bool: True if the contract is in multiple compilation units
240        """
241        count = 0
242        for compilation_unit in self._compilation_units.values():
243            for source_unit in compilation_unit.source_units.values():
244                if contract in source_unit.contracts_names:
245                    count += 1
246        return count >= 2

Check if the contract is shared by multiple compilation unit

Args: contract (str): contract name

Returns: bool: True if the contract is in multiple compilation units

filenames: Set[crytic_compile.utils.naming.Filename]
253    @property
254    def filenames(self) -> Set[Filename]:
255        """
256        Return the set of all the filenames used
257
258        Returns:
259             Set[Filename]: set of filenames
260        """
261        filenames: Set[Filename] = set()
262        for compile_unit in self._compilation_units.values():
263            filenames |= set(compile_unit.filenames)
264        return filenames

Return the set of all the filenames used

Returns: Set[Filename]: set of filenames

def filename_lookup(self, filename: str) -> crytic_compile.utils.naming.Filename:
266    def filename_lookup(self, filename: str) -> Filename:
267        """Return a crytic_compile.naming.Filename from a any filename
268
269        Args:
270            filename (str): filename (used/absolute/relative)
271
272        Raises:
273            ValueError: If the filename is not in the project
274
275        Returns:
276            Filename: Associated Filename object
277        """
278        for compile_unit in self.compilation_units.values():
279            try:
280                return compile_unit.filename_lookup(filename)
281            except ValueError:
282                pass
283
284        raise ValueError(f"{filename} does not exist")

Return a crytic_compile.naming.Filename from a any filename

Args: filename (str): filename (used/absolute/relative)

Raises: ValueError: If the filename is not in the project

Returns: Filename: Associated Filename object

dependencies: Set[str]
286    @property
287    def dependencies(self) -> Set[str]:
288        """Return the dependencies files
289
290        Returns:
291            Set[str]: Dependencies files
292        """
293        return self._dependencies

Return the dependencies files

Returns: Set[str]: Dependencies files

def is_dependency(self, filename: str) -> bool:
295    def is_dependency(self, filename: str) -> bool:
296        """Check if the filename is a dependency
297
298        Args:
299            filename (str): filename
300
301        Returns:
302            bool: True if the filename is a dependency
303        """
304        return filename in self._dependencies or self.platform.is_dependency(filename)

Check if the filename is a dependency

Args: filename (str): filename

Returns: bool: True if the filename is a dependency

package: Union[str, NoneType]
306    @property
307    def package(self) -> Optional[str]:
308        """Return the package name
309
310        Returns:
311            Optional[str]: package name
312        """
313        return self._package

Return the package name

Returns: Optional[str]: package name

working_dir: pathlib.Path
315    @property
316    def working_dir(self) -> Path:
317        """Return the working directory
318
319        Returns:
320            Path: Working directory
321        """
322        return self._working_dir

Return the working directory

Returns: Path: Working directory

def get_line_from_offset( self, filename: Union[crytic_compile.utils.naming.Filename, str], offset: int) -> Tuple[int, int]:
355    def get_line_from_offset(self, filename: Union[Filename, str], offset: int) -> Tuple[int, int]:
356        """Return the line from a given offset
357
358        Args:
359            filename (Union[Filename, str]): filename
360            offset (int): global offset
361
362        Returns:
363            Tuple[int, int]: (line, line offset)
364        """
365        if isinstance(filename, str):
366            file = self.filename_lookup(filename)
367        else:
368            file = filename
369        if file not in self._cached_offset_to_line:
370            self._get_cached_offset_to_line(file)
371
372        lines_delimiters = self._cached_offset_to_line[file]
373        return lines_delimiters[offset]

Return the line from a given offset

Args: filename (Union[Filename, str]): filename offset (int): global offset

Returns: Tuple[int, int]: (line, line offset)

def get_global_offset_from_line( self, filename: Union[crytic_compile.utils.naming.Filename, str], line: int) -> int:
375    def get_global_offset_from_line(self, filename: Union[Filename, str], line: int) -> int:
376        """Return the global offset from a given line
377
378        Args:
379            filename (Union[Filename, str]): filename
380            line (int): line
381
382        Returns:
383            int: global offset
384        """
385        if isinstance(filename, str):
386            file = self.filename_lookup(filename)
387        else:
388            file = filename
389        if file not in self._cached_line_to_offset:
390            self._get_cached_offset_to_line(file)
391
392        return self._cached_line_to_offset[file][line]

Return the global offset from a given line

Args: filename (Union[Filename, str]): filename line (int): line

Returns: int: global offset

def get_code_from_line( self, filename: Union[crytic_compile.utils.naming.Filename, str], line: int) -> Union[bytes, NoneType]:
405    def get_code_from_line(self, filename: Union[Filename, str], line: int) -> Optional[bytes]:
406        """Return the code from the line. Start at line = 1.
407        Return None if the line is not in the file
408
409        Args:
410            filename (Union[Filename, str]): filename
411            line (int): line
412
413        Returns:
414            Optional[bytes]: line of code
415        """
416        if isinstance(filename, str):
417            file = self.filename_lookup(filename)
418        else:
419            file = filename
420        if file not in self._cached_line_to_code:
421            self._get_cached_line_to_code(file)
422
423        lines = self._cached_line_to_code[file]
424        if line - 1 < 0 or line - 1 >= len(lines):
425            return None
426        return lines[line - 1]

Return the code from the line. Start at line = 1. Return None if the line is not in the file

Args: filename (Union[Filename, str]): filename line (int): line

Returns: Optional[bytes]: line of code

src_content: Dict[str, str]
428    @property
429    def src_content(self) -> Dict[str, str]:
430        """Return the source content
431
432        Returns:
433            Dict[str, str]: filename -> source_code
434        """
435        # If we have no source code loaded yet, load it for every contract.
436        if not self._src_content:
437            for filename in self.filenames:
438                if filename.absolute not in self._src_content and os.path.isfile(filename.absolute):
439                    with open(
440                        filename.absolute, encoding="utf8", newline="", errors="replace"
441                    ) as source_file:
442                        self._src_content[filename.absolute] = source_file.read()
443        return self._src_content

Return the source content

Returns: Dict[str, str]: filename -> source_code

def src_content_for_file(self, filename_absolute: str) -> Union[str, NoneType]:
454    def src_content_for_file(self, filename_absolute: str) -> Optional[str]:
455        """Get the source code of the file
456
457        Args:
458            filename_absolute (str): absolute filename
459
460        Returns:
461            Optional[str]: source code
462        """
463        return self.src_content.get(filename_absolute, None)

Get the source code of the file

Args: filename_absolute (str): absolute filename

Returns: Optional[str]: source code

type: int
472    @property
473    def type(self) -> int:
474        """Return the type of the platform used
475
476        Returns:
477            int: Platform type (see AbstractPatform.TYPE)
478        """
479        # Type should have been set by now
480        assert self._platform.TYPE
481        return self._platform.TYPE

Return the type of the platform used

Returns: int: Platform type (see AbstractPatform.TYPE)

483    @property
484    def platform(self) -> AbstractPlatform:
485        """Return the underlying platform
486
487        Returns:
488            AbstractPlatform: Underlying platform
489        """
490        assert self._platform
491        return self._platform

Return the underlying platform

Returns: AbstractPlatform: Underlying platform

bytecode_only: bool
500    @property
501    def bytecode_only(self) -> bool:
502        """Return true if only the bytecode was retrieved.
503        This can only happen for the etherscan platform
504
505        Returns:
506            bool: True if the project is bytecode only
507        """
508        return self._bytecode_only

Return true if only the bytecode was retrieved. This can only happen for the etherscan platform

Returns: bool: True if the project is bytecode only

@staticmethod
def import_archive_compilations( compiled_archive: Union[str, Dict]) -> list[CryticCompile]:
528    @staticmethod
529    def import_archive_compilations(compiled_archive: Union[str, Dict]) -> List["CryticCompile"]:
530        """Import from an archive. compiled_archive is either a json file or the loaded dictionary
531        The dictionary myst contain the "compilations" keyword
532
533        Args:
534            compiled_archive: Union[str, Dict]: list of archive to import
535
536        Raises:
537            ValueError: The import did not worked
538
539        Returns:
540            [CryticCompile]: List of crytic compile object
541        """
542        # If the argument is a string, it is likely a filepath, load the archive.
543        if isinstance(compiled_archive, str):
544            with open(compiled_archive, encoding="utf8") as file:
545                compiled_archive = json.load(file)
546
547        # Verify the compiled archive is of the correct form
548        if not isinstance(compiled_archive, dict) or "compilations" not in compiled_archive:
549            raise ValueError("Cannot import compiled archive, invalid format.")
550
551        return [CryticCompile(archive) for archive in compiled_archive["compilations"]]

Import from an archive. compiled_archive is either a json file or the loaded dictionary The dictionary myst contain the "compilations" keyword

Args: compiled_archive: Union[str, Dict]: list of archive to import

Raises: ValueError: The import did not worked

Returns: [CryticCompile]: List of crytic compile object

def export(self, **kwargs: str) -> List[str]:
561    def export(self, **kwargs: str) -> List[str]:
562        """Export to json.
563        The json format can be crytic-compile, solc or truffle.
564        The type must be specified in the kwargs with "export_format"
565
566        Args:
567            **kwargs: optional arguments. Used: "export_format"
568
569        Raises:
570            ValueError: Incorrect type
571
572        Returns:
573            List[str]: List of the filenames generated
574        """
575        export_format = kwargs.get("export_format", None)
576        if export_format is None:
577            return export_to_standard(self, **kwargs)
578        if export_format not in PLATFORMS_EXPORT:
579            raise ValueError("Export format unknown")
580        return PLATFORMS_EXPORT[export_format](self, **kwargs)

Export to json. The json format can be crytic-compile, solc or truffle. The type must be specified in the kwargs with "export_format"

Args: **kwargs: optional arguments. Used: "export_format"

Raises: ValueError: Incorrect type

Returns: List[str]: List of the filenames generated

package_name: Union[str, NoneType]
672    @property
673    def package_name(self) -> Optional[str]:
674        """Return the npm package name
675
676        Returns:
677            Optional[str]: Package name
678        """
679        return self._package

Return the npm package name

Returns: Optional[str]: Package name

def compile_all( target: str, **kwargs: str) -> List[CryticCompile]:
696def compile_all(target: str, **kwargs: str) -> List[CryticCompile]:
697    """Given a direct or glob pattern target, compiles all underlying sources and returns
698    all the relevant instances of CryticCompile.
699
700    Args:
701        target (str): A string representing a file/directory path or glob pattern denoting where compilation should occur.
702        **kwargs: optional arguments. Used: "solc_standard_json"
703
704    Raises:
705        ValueError: If the target could not be compiled
706
707    Returns:
708        List[CryticCompile]: Returns a list of CryticCompile instances for all compilations which occurred.
709    """
710    use_solc_standard_json = kwargs.get("solc_standard_json", False)
711
712    # Check if the target refers to a valid target already.
713    compilations: List[CryticCompile] = []
714    if os.path.isfile(target) or is_supported(target):
715        if target.endswith(".zip"):
716            compilations = load_from_zip(target)
717        elif target.endswith(".zip.base64"):
718            with tempfile.NamedTemporaryFile() as tmp:
719                with open(target, encoding="utf8") as target_file:
720                    tmp.write(base64.b64decode(target_file.read()))
721                    compilations = load_from_zip(tmp.name)
722        else:
723            compilations.append(CryticCompile(target, **kwargs))
724    elif os.path.isdir(target):
725        solidity_filenames = glob.glob(os.path.join(target, "*.sol"))
726        vyper_filenames = glob.glob(os.path.join(target, "*.vy"))
727        # Determine if we're using --standard-solc option to
728        # aggregate many files into a single compilation.
729        if use_solc_standard_json:
730            # If we're using standard solc, then we generated our
731            # input to create a single compilation with all files
732            solc_standard_json = SolcStandardJson()
733            solc_standard_json.add_source_files(solidity_filenames)
734            compilations.append(CryticCompile(solc_standard_json, **kwargs))
735        else:
736            # We compile each file and add it to our compilations.
737            for filename in solidity_filenames:
738                compilations.append(CryticCompile(filename, **kwargs))
739
740        if vyper_filenames:
741            vyper_standard_json = VyperStandardJson()
742            vyper_standard_json.add_source_files(vyper_filenames)
743            compilations.append(CryticCompile(vyper_standard_json, **kwargs))
744    else:
745        # TODO split glob into language
746        # # Attempt to perform glob expansion of target/filename
747        # globbed_targets = glob.glob(target, recursive=True)
748        # print(globbed_targets)
749
750        raise ValueError(f"{str(target)} is not a file or directory.")
751
752    return compilations

Given a direct or glob pattern target, compiles all underlying sources and returns all the relevant instances of CryticCompile.

Args: target (str): A string representing a file/directory path or glob pattern denoting where compilation should occur. **kwargs: optional arguments. Used: "solc_standard_json"

Raises: ValueError: If the target could not be compiled

Returns: List[CryticCompile]: Returns a list of CryticCompile instances for all compilations which occurred.