Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Update yarn lock parser #3931 #3943

Merged
merged 6 commits into from
Oct 8, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions CHANGELOG.rst
Original file line number Diff line number Diff line change
Expand Up @@ -886,6 +886,8 @@ Package detection:

- For Pypi packages, python_requires is treated as a package dependency.

- Update JavaScript package handlers to handle aliases in npm and yarn manifests.


License Clarity Scoring Update:
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Expand Down
44 changes: 36 additions & 8 deletions src/packagedcode/npm.py
Original file line number Diff line number Diff line change
Expand Up @@ -207,7 +207,7 @@ def assemble(cls, package_data, resource, codebase, package_adder):

@classmethod
def yield_npm_dependencies_and_resources(cls, package_resource, package_data, package_uid, codebase, package_adder):

# in all cases yield possible dependencies
yield from yield_dependencies_from_package_data(package_data, package_resource.path, package_uid)

Expand Down Expand Up @@ -276,7 +276,9 @@ def create_packages_from_workspaces(
if npm_res.for_packages:
continue

npm_res.for_packages = workspace_package_uids
for package_uid in workspace_package_uids:
package_adder(package_uid, npm_res, codebase)

npm_res.save(codebase)

@classmethod
Expand Down Expand Up @@ -374,6 +376,18 @@ def update_dependencies_by_purl(
if '_' in metadata:
requirement, _extra = metadata.split('_')

if ':' in requirement and '@' in requirement:
# dependencies with requirements like this are aliases and should be reported
aliased_package, _, constraint = requirement.rpartition('@')
_, _, aliased_package_name = aliased_package.rpartition(':')
sdns, _ , sdname = aliased_package_name.rpartition('/')
dep_purl = PackageURL(
type=cls.default_package_type,
namespace=sdns,
name=sdname
).to_string()
requirement = constraint

dep_package = models.DependentPackage(
purl=dep_purl,
scope=scope,
Expand Down Expand Up @@ -424,7 +438,7 @@ def get_workspace_members(cls, workspaces, codebase, workspace_root_path):
workspace_members.append(resource)

# Case 3: This is a complex glob pattern, we are doing a full codebase walk
# and glob matching each resource
# and glob matching each resource
else:
for resource in workspace_root_path:
if NpmPackageJsonHandler.is_datafile(resource.location) and fnmatch.fnmatch(
Expand Down Expand Up @@ -469,7 +483,7 @@ def update_workspace_members(cls, workspace_members, codebase):
workspace_package_versions_by_base_purl[base_purl] = version

# Update workspace member package information from
# workspace level data
# workspace level data
for base_purl, dependency in workspace_dependencies_by_base_purl.items():
extracted_requirement = dependency.get('extracted_requirement')
if 'workspace' in extracted_requirement:
Expand Down Expand Up @@ -1011,6 +1025,13 @@ def parse(cls, location, package_only=False):
if '"' in ns_name:
ns_name = ns_name.replace('"', '')
ns, _ , name = ns_name.rpartition('/')

if ':' in constraint and '@' in constraint:
# dependencies with requirements like this are aliases and should be reported
aliased_package, _, constraint = constraint.rpartition('@')
_, _, aliased_package_name = aliased_package.rpartition(':')
ns, _ , name = aliased_package_name.rpartition('/')

sub_dependencies.append((ns, name, constraint,))

elif line.startswith(' ' * 2):
Expand Down Expand Up @@ -1112,7 +1133,7 @@ def parse(cls, location, package_only=False):
resolved_package=resolved_package_data.to_dict(),
)

if not dep_purl in dependencies_by_purl:
if not dep_purl in dependencies_by_purl:
dependencies_by_purl[dep_purl] = dep.to_dict()
else:
# FIXME: We have duplicate dependencies because of aliases
Expand Down Expand Up @@ -1176,7 +1197,7 @@ def parse(cls, location, package_only=False):
_, name_version = sections
elif len(sections) == 3:
_, namespace, name_version = sections

name, version = name_version.split("@")
elif major_v == "5" or is_shrinkwrap:
if len(sections) == 3:
Expand Down Expand Up @@ -1264,7 +1285,7 @@ def parse(cls, location, package_only=False):
for key in extra_data_fields:
value = data.get(key, None)
if value is not None:
extra_data_deps[key] = value
extra_data_deps[key] = value

dependency_data = models.DependentPackage(
purl=purl,
Expand Down Expand Up @@ -1762,14 +1783,21 @@ def deps_mapper(deps, package, field_name, is_direct=True):
deps_by_name[npm_name] = d

for fqname, requirement in deps.items():
# Handle cases in ``resolutions`` with ``**``
# Handle cases in ``resolutions`` with ``**``
# "resolutions": {
# "**/@typescript-eslint/eslint-plugin": "^4.1.1",
if fqname.startswith('**'):
fqname = fqname.replace('**', '')
ns, name = split_scoped_package_name(fqname)
if not name:
continue

if ':' in requirement and '@' in requirement:
# dependencies with requirements like this are aliases and should be reported
aliased_package, _, requirement = requirement.rpartition('@')
_, _, aliased_package_name = aliased_package.rpartition(':')
ns, _ , name = aliased_package_name.rpartition('/')

purl = PackageURL(type='npm', namespace=ns, name=name).to_string()

# optionalDependencies override the dependencies with the same name
Expand Down
57 changes: 57 additions & 0 deletions tests/packagedcode/data/npm/alias/package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

149 changes: 149 additions & 0 deletions tests/packagedcode/data/npm/alias/package-lock.json.expected

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Loading
Loading