Skip to content

Commit

Permalink
Fix tests
Browse files Browse the repository at this point in the history
Signed-off-by: Shivam Sandbhor <[email protected]>
  • Loading branch information
sbs2001 committed Jun 8, 2021
1 parent a442302 commit 1cdb873
Show file tree
Hide file tree
Showing 26 changed files with 615 additions and 273 deletions.
4 changes: 2 additions & 2 deletions vulnerabilities/importers/apache_httpd.py
Original file line number Diff line number Diff line change
Expand Up @@ -106,7 +106,7 @@ def to_advisory(self, data):
fixed_packages.extend(
[
PackageURL(type="apache", name="httpd", version=version)
for version in self.version_api.get("apache/httpd")
for version in self.version_api.get("apache/httpd")["valid"]
if MavenVersion(version) in version_range
]
)
Expand All @@ -115,7 +115,7 @@ def to_advisory(self, data):
affected_packages.extend(
[
PackageURL(type="apache", name="httpd", version=version)
for version in self.version_api.get("apache/httpd")
for version in self.version_api.get("apache/httpd")["valid"]
if MavenVersion(version) in version_range
]
)
Expand Down
4 changes: 2 additions & 2 deletions vulnerabilities/importers/apache_kafka.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,7 @@ def to_advisory(self, advisory_page):

fixed_packages = [
PackageURL(type="apache", name="kafka", version=version)
for version in self.version_api.get("apache/kafka")
for version in self.version_api.get("apache/kafka")["valid"]
if any(
[
MavenVersion(version) in version_range
Expand All @@ -83,7 +83,7 @@ def to_advisory(self, advisory_page):

affected_packages = [
PackageURL(type="apache", name="kafka", version=version)
for version in self.version_api.get("apache/kafka")
for version in self.version_api.get("apache/kafka")["valid"]
if any(
[
MavenVersion(version) in version_range
Expand Down
6 changes: 4 additions & 2 deletions vulnerabilities/importers/apache_tomcat.py
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,9 @@ def updated_advisories(self):
return self.batch_advisories(advisories)

def fetch_pages(self):
tomcat_major_versions = {i[0] for i in self.version_api.get("org.apache.tomcat:tomcat")}
tomcat_major_versions = {
i[0] for i in self.version_api.get("org.apache.tomcat:tomcat")["valid"]
}
for version in tomcat_major_versions:
page_url = self.base_url.format(version)
if create_etag(self, page_url, "ETag"):
Expand Down Expand Up @@ -102,7 +104,7 @@ def to_advisories(self, apache_tomcat_advisory_html):
PackageURL(
type="maven", namespace="apache", name="tomcat", version=version
)
for version in self.version_api.get("org.apache.tomcat:tomcat")
for version in self.version_api.get("org.apache.tomcat:tomcat")["valid"]
if MavenVersion(version) in version_range
]
)
Expand Down
4 changes: 3 additions & 1 deletion vulnerabilities/importers/nginx.py
Original file line number Diff line number Diff line change
Expand Up @@ -171,7 +171,9 @@ def extract_vuln_pkgs(self, vuln_info):
)
)

valid_versions = find_valid_versions(self.version_api.get("nginx/nginx"), version_ranges)
valid_versions = find_valid_versions(
self.version_api.get("nginx/nginx")["valid"], version_ranges
)
qualifiers = {}
if windows_only:
qualifiers["os"] = "windows"
Expand Down
2 changes: 1 addition & 1 deletion vulnerabilities/importers/npm.py
Original file line number Diff line number Diff line change
Expand Up @@ -88,7 +88,7 @@ def process_file(self, file) -> List[Advisory]:
publish_date = parse(record["updated_at"])
publish_date.replace(tzinfo=pytz.UTC)

all_versions = self.versions.get(package_name, until=publish_date)
all_versions = self.versions.get(package_name, until=publish_date)["valid"]
aff_range = record.get("vulnerable_versions")
if not aff_range:
aff_range = ""
Expand Down
2 changes: 0 additions & 2 deletions vulnerabilities/importers/project_kb_msr2019.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,12 +22,10 @@

import csv
import dataclasses
import re
import urllib.request

# Reading CSV file from a url using `requests` is bit too complicated.
# Use `urllib.request` for that purpose.
from packageurl import PackageURL


from vulnerabilities.data_source import Advisory
Expand Down
8 changes: 7 additions & 1 deletion vulnerabilities/importers/ruby.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,8 @@
import asyncio
from typing import Set
from typing import List
from dateutil.parser import parse
from pytz import UTC

from packageurl import PackageURL
from univers.version_specifier import VersionSpecifier
Expand Down Expand Up @@ -90,6 +92,7 @@ def process_file(self, path) -> List[Advisory]:
else:
return

publish_time = parse(record["date"]).replace(tzinfo=UTC)
safe_version_ranges = record.get("patched_versions", [])
# this case happens when the advisory contain only 'patched_versions' field
# and it has value None(i.e it is empty :( ).
Expand All @@ -100,7 +103,10 @@ def process_file(self, path) -> List[Advisory]:

if not getattr(self, "pkg_manager_api", None):
self.pkg_manager_api = RubyVersionAPI()
all_vers = self.pkg_manager_api.get(package_name)
all_vers = self.pkg_manager_api.get(package_name, until=publish_time)["valid"]
print(
f"Ignored {len(self.pkg_manager_api.get(package_name,until=publish_time)['new'])} versions"
)
safe_versions, affected_versions = self.categorize_versions(all_vers, safe_version_ranges)

impacted_purls = [
Expand Down
2 changes: 1 addition & 1 deletion vulnerabilities/importers/safety_db.py
Original file line number Diff line number Diff line change
Expand Up @@ -111,7 +111,7 @@ def updated_advisories(self) -> Set[Advisory]:
logger.error(e)
continue

all_package_versions = self.versions.get(package_name)
all_package_versions = self.versions.get(package_name)["valid"]
if not len(all_package_versions):
# PyPi does not have data about this package, we skip these
continue
Expand Down
162 changes: 81 additions & 81 deletions vulnerabilities/importers/suse_backports.py
Original file line number Diff line number Diff line change
@@ -1,95 +1,95 @@
# # Copyright (c) 2017 nexB Inc. and others. All rights reserved.
# # http://nexb.com and https://github.com/nexB/vulnerablecode/
# # The VulnerableCode software is licensed under the Apache License version 2.0.
# # Data generated with VulnerableCode require an acknowledgment.
# #
# # You may not use this software except in compliance with the License.
# # You may obtain a copy of the License at: http://apache.org/licenses/LICENSE-2.0
# # Unless required by applicable law or agreed to in writing, software distributed
# # under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
# # CONDITIONS OF ANY KIND, either express or implied. See the License for the
# # specific language governing permissions and limitations under the License.
# #
# # When you publish or redistribute any data created with VulnerableCode or any VulnerableCode
# # derivative work, you must accompany this data with the following acknowledgment:
# #
# # Generated with VulnerableCode and provided on an "AS IS" BASIS, WITHOUT WARRANTIES
# # OR CONDITIONS OF ANY KIND, either express or implied. No content created from
# # VulnerableCode should be considered or used as legal advice. Consult an Attorney
# # for any legal advice.
# # VulnerableCode is a free software code scanning tool from nexB Inc. and others.
# # Visit https://github.com/nexB/vulnerablecode/ for support and download.
# import dataclasses
# Copyright (c) 2017 nexB Inc. and others. All rights reserved.
# http://nexb.com and https://github.com/nexB/vulnerablecode/
# The VulnerableCode software is licensed under the Apache License version 2.0.
# Data generated with VulnerableCode require an acknowledgment.
#
# You may not use this software except in compliance with the License.
# You may obtain a copy of the License at: http://apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software distributed
# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
# CONDITIONS OF ANY KIND, either express or implied. See the License for the
# specific language governing permissions and limitations under the License.
#
# When you publish or redistribute any data created with VulnerableCode or any VulnerableCode
# derivative work, you must accompany this data with the following acknowledgment:
#
# Generated with VulnerableCode and provided on an "AS IS" BASIS, WITHOUT WARRANTIES
# OR CONDITIONS OF ANY KIND, either express or implied. No content created from
# VulnerableCode should be considered or used as legal advice. Consult an Attorney
# for any legal advice.
# VulnerableCode is a free software code scanning tool from nexB Inc. and others.
# Visit https://github.com/nexB/vulnerablecode/ for support and download.
import dataclasses

# import requests
# import saneyaml
# from bs4 import BeautifulSoup
# from packageurl import PackageURL
import requests
import saneyaml
from bs4 import BeautifulSoup
from packageurl import PackageURL

# from vulnerabilities.data_source import Advisory
# from vulnerabilities.data_source import DataSource
# from vulnerabilities.data_source import DataSourceConfiguration
# from vulnerabilities.helpers import create_etag
from vulnerabilities.data_source import Advisory
from vulnerabilities.data_source import DataSource
from vulnerabilities.data_source import DataSourceConfiguration
from vulnerabilities.helpers import create_etag


# @dataclasses.dataclass
# class SUSEBackportsConfiguration(DataSourceConfiguration):
# url: str
# etags: dict
@dataclasses.dataclass
class SUSEBackportsConfiguration(DataSourceConfiguration):
url: str
etags: dict


# class SUSEBackportsDataSource(DataSource):
class SUSEBackportsDataSource(DataSource):

# CONFIG_CLASS = SUSEBackportsConfiguration
CONFIG_CLASS = SUSEBackportsConfiguration

# @staticmethod
# def get_all_urls_of_backports(url):
# r = requests.get(url)
# soup = BeautifulSoup(r.content, "lxml")
# for a_tag in soup.find_all("a", href=True):
# if a_tag["href"].endswith(".yaml") and a_tag["href"].startswith("backports"):
# yield url + a_tag["href"]
@staticmethod
def get_all_urls_of_backports(url):
r = requests.get(url)
soup = BeautifulSoup(r.content, "lxml")
for a_tag in soup.find_all("a", href=True):
if a_tag["href"].endswith(".yaml") and a_tag["href"].startswith("backports"):
yield url + a_tag["href"]

# def updated_advisories(self):
# advisories = []
# all_urls = self.get_all_urls_of_backports(self.config.url)
# for url in all_urls:
# if not create_etag(data_src=self, url=url, etag_key="ETag"):
# continue
# advisories.extend(self.process_file(self._fetch_yaml(url)))
# return self.batch_advisories(advisories)
def updated_advisories(self):
advisories = []
all_urls = self.get_all_urls_of_backports(self.config.url)
for url in all_urls:
if not create_etag(data_src=self, url=url, etag_key="ETag"):
continue
advisories.extend(self.process_file(self._fetch_yaml(url)))
return self.batch_advisories(advisories)

# def _fetch_yaml(self, url):
def _fetch_yaml(self, url):

# try:
# resp = requests.get(url)
# resp.raise_for_status()
# return saneyaml.load(resp.content)
try:
resp = requests.get(url)
resp.raise_for_status()
return saneyaml.load(resp.content)

# except requests.HTTPError:
# return {}
except requests.HTTPError:
return {}

# @staticmethod
# def process_file(yaml_file):
# advisories = []
# try:
# for pkg in yaml_file[0]["packages"]:
# for version in yaml_file[0]["packages"][pkg]["fixed"]:
# for vuln in yaml_file[0]["packages"][pkg]["fixed"][version]:
# # yaml_file specific data can be added
# purl = [
# PackageURL(name=pkg, type="rpm", version=version, namespace="opensuse")
# ]
# advisories.append(
# Advisory(
# vulnerability_id=vuln,
# resolved_package_urls=purl,
# summary="",
# impacted_package_urls=[],
# )
# )
# except TypeError:
# # could've used pass
# return advisories
@staticmethod
def process_file(yaml_file):
advisories = []
try:
for pkg in yaml_file[0]["packages"]:
for version in yaml_file[0]["packages"][pkg]["fixed"]:
for vuln in yaml_file[0]["packages"][pkg]["fixed"][version]:
# yaml_file specific data can be added
purl = [
PackageURL(name=pkg, type="rpm", version=version, namespace="opensuse")
]
advisories.append(
Advisory(
vulnerability_id=vuln,
resolved_package_urls=purl,
summary="",
impacted_package_urls=[],
)
)
except TypeError:
# could've used pass
return advisories

# return advisories
return advisories
8 changes: 0 additions & 8 deletions vulnerabilities/importers/ubuntu.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,19 +25,11 @@
import bz2
import dataclasses
import logging
from typing import Iterable
from typing import List
from typing import Mapping
from typing import Set
import xml.etree.ElementTree as ET

from aiohttp import ClientSession
from aiohttp.client_exceptions import ClientResponseError
import requests

from vulnerabilities.data_source import OvalDataSource, DataSourceConfiguration
from vulnerabilities.package_managers import LaunchpadVersionAPI
from vulnerabilities.helpers import create_etag

logger = logging.getLogger(__name__)

Expand Down
12 changes: 8 additions & 4 deletions vulnerabilities/package_managers.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,20 +21,24 @@
# Visit https://github.com/nexB/vulnerablecode/ for support and download.

import asyncio
from collections import namedtuple
import dataclasses
import pytz
from bs4 import BeautifulSoup
from dateutil import parser
from json import JSONDecodeError
from typing import Mapping
from typing import Set
from datetime import datetime

from aiohttp import ClientSession
from aiohttp.client_exceptions import ClientResponseError
from aiohttp.client_exceptions import ServerDisconnectedError


Version = namedtuple("Version", field_names=["value", "release_date"])
@dataclasses.dataclass(frozen=True)
class Version:
value: str
release_date: datetime = None


class VersionAPI:
Expand Down Expand Up @@ -303,8 +307,8 @@ def extract_versions(soup: BeautifulSoup) -> Set[Version]:
pre_tag = soup.find("pre")
prev_tag = None
versions = set()
for atag in pre_tag:
if atag.name == "a" and atag["href"] != "../":
for i, atag in enumerate(pre_tag):
if atag.name == "a" and i != 0:
prev_tag = atag
elif prev_tag:
text_groups = atag.split()
Expand Down
3 changes: 2 additions & 1 deletion vulnerabilities/tests/test_apache_httpd.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@
from vulnerabilities.data_source import Advisory
from vulnerabilities.data_source import VulnerabilitySeverity
from vulnerabilities.package_managers import GitHubTagsAPI
from vulnerabilities.package_managers import Version
from vulnerabilities.severity_systems import scoring_systems
from vulnerabilities.importers.apache_httpd import ApacheHTTPDDataSource
from vulnerabilities.helpers import AffectedPackage
Expand All @@ -44,7 +45,7 @@ class TestApacheHTTPDDataSource(TestCase):
def setUpClass(cls):
data_source_cfg = {"etags": {}}
cls.data_src = ApacheHTTPDDataSource(1, config=data_source_cfg)
known_versions = ["1.3.2", "1.3.1", "1.3.0"]
known_versions = [Version("1.3.2"), Version("1.3.1"), Version("1.3.0")]
cls.data_src.version_api = GitHubTagsAPI(cache={"apache/httpd": known_versions})
with open(TEST_DATA) as f:
cls.data = json.load(f)
Expand Down
5 changes: 4 additions & 1 deletion vulnerabilities/tests/test_apache_kafka.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@
from vulnerabilities.data_source import Advisory
from vulnerabilities.data_source import Reference
from vulnerabilities.package_managers import GitHubTagsAPI
from vulnerabilities.package_managers import Version
from vulnerabilities.importers.apache_kafka import ApacheKafkaDataSource
from vulnerabilities.importers.apache_kafka import to_version_ranges
from vulnerabilities.helpers import AffectedPackage
Expand Down Expand Up @@ -63,7 +64,9 @@ def test_to_version_ranges(self):

def test_to_advisory(self):
data_source = ApacheKafkaDataSource(batch_size=1)
data_source.version_api = GitHubTagsAPI(cache={"apache/kafka": ["2.1.2", "0.10.2.2"]})
data_source.version_api = GitHubTagsAPI(
cache={"apache/kafka": [Version("2.1.2"), Version("0.10.2.2")]}
)
expected_advisories = [
Advisory(
summary="In Apache Kafka versions between 0.11.0.0 and 2.1.0, it is possible to manually\n craft a Produce request which bypasses transaction/idempotent ACL validation.\n Only authenticated clients with Write permission on the respective topics are\n able to exploit this vulnerability. Users should upgrade to 2.1.1 or later\n where this vulnerability has been fixed.",
Expand Down
Loading

0 comments on commit 1cdb873

Please sign in to comment.