From 62fdcdc414df7bd9aa3fccfd9ea13312106ea19b Mon Sep 17 00:00:00 2001 From: "frederic.dymko" Date: Thu, 21 Sep 2023 11:02:48 +0200 Subject: [PATCH] Revert "Merge branch 'devel' into feat/topsis_risk_assessment" This reverts commit 98a3bcb2dd0a173574aab83777f36273c09a0148, reversing changes made to 8e26cfe88c6652565d0f15e5e8f43ca9d68a2b74. --- .env-example | 6 +- Dockerfile | 2 +- README.md | 3 +- configuration.py | 84 ++++++---------- connectors/git.py | 2 +- connectors/github.py | 46 +++------ connectors/gitlab.py | 123 ++++++++---------------- connectors/jira.py | 2 +- connectors/pylint/custom_ast_checker.py | 6 +- docs/docker.md | 8 -- main.py | 76 +-------------- requirements.txt | 2 +- 12 files changed, 98 insertions(+), 262 deletions(-) diff --git a/.env-example b/.env-example index 4b677e4..219d745 100644 --- a/.env-example +++ b/.env-example @@ -20,8 +20,6 @@ OTTM_CURRENT_BRANCH=devel OTTM_SOURCE_REPO_URL=https://github.com/dbeaver/dbeaver # Either "github" or "gitlab", other SCM are not yet supported OTTM_SOURCE_REPO_SCM=github -# Consider all tags as versions. If False, will only take releases -OTTM_SCM_USE_ALL_TAGS=False # SCM base URL - leave empty for public repo OTTM_SCM_BASE_URL= # Token to access github or gitlab @@ -68,8 +66,8 @@ OTTM_LEGACY_MINIMUM_DAYS=365 # The number of seconds to wait after a failed API call due to a limit of calls exceeded OTTM_RETRY_DELAY=3600 -# The url for Survey API -SURVEY_BACK_API_URL=http://localhost:8000/ +# The url for survey api +SURVEY_BACK_API_URL="http://localhost:8000/" # The name of the project for Survey API SURVEY_PROJECT_NAME= diff --git a/Dockerfile b/Dockerfile index 75f3f44..c7374a1 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,4 +1,4 @@ -FROM python:3.11-slim +FROM python:3.10-slim RUN useradd --create-home --shell /bin/bash optittm-user diff --git a/README.md b/README.md index 5db8fe0..8246c96 100644 --- a/README.md +++ b/README.md @@ -15,8 +15,7 @@ For each release the tool will: ### Prerequisites -- Python >= 3.11 to run BugPrediction or Docker -- A project in Java, PHP or Python +- A project in Java or PHP - Source code on GitHub or GitLab - Project releases and issues on GitHub, GitLab, Jira or other - To analyse a Java project, have Java >= 8 installed diff --git a/configuration.py b/configuration.py index 8d54bc7..4cd7851 100644 --- a/configuration.py +++ b/configuration.py @@ -8,54 +8,53 @@ AVAILABLE_SCM = ["github", "gitlab"] - class Configuration: + next_version_name = "Next Release" def __init__(self): + self.log_level = self.__get_log_level("OTTM_LOG_LEVEL") - self.code_maat_path = self.__get_external_tool("OTTM_CODE_MAAT_PATH") - self.code_ck_path = self.__get_external_tool("OTTM_CODE_CK_PATH") + self.code_maat_path = self.__get_external_tool("OTTM_CODE_MAAT_PATH") + self.code_ck_path = self.__get_external_tool("OTTM_CODE_CK_PATH") self.code_jpeek_path = self.__get_external_tool("OTTM_CODE_JPEEK_PATH") self.code_pdepend_path = self.__get_external_tool("OTTM_CODE_PDEPEND_PATH") - + self.language = os.getenv("OTTM_LANGUAGE", "") - self.scm_path = self.__get_executable("OTTM_SCM_PATH") + self.scm_path = self.__get_executable("OTTM_SCM_PATH") if self.language.lower() == "java": self.java_path = self.__get_executable("OTTM_JAVA_PATH") elif self.language.lower() == "php": self.php_path = self.__get_executable("OTTM_PHP_PATH") - + self.target_database = self.__get_required_value("OTTM_TARGET_DATABASE") self.source_repo_scm = self.__get_repo_scm("OTTM_SOURCE_REPO_SCM") - self.use_all_tags = self.__get_bool("OTTM_SCM_USE_ALL_TAGS", False) self.source_project = self.__get_required_value("OTTM_SOURCE_PROJECT") self.source_repo = self.__get_required_value("OTTM_SOURCE_REPO") self.current_branch = self.__get_required_value("OTTM_CURRENT_BRANCH") self.source_repo_url = self.__get_required_value("OTTM_SOURCE_REPO_URL") - self.source_bugs = self.__get_str_list("OTTM_SOURCE_BUGS") + self.source_bugs = self.__get_str_list("OTTM_SOURCE_BUGS") - self.survey_back_api_url = os.getenv("SURVEY_BACK_API_URL", "") + self.survey_back_api_url = os.getenv("SURVEY_BACK_API_URL","") self.survey_project_name = self.__get_str_list("SURVEY_PROJECT_NAME") - self.scm_base_url = os.getenv("OTTM_SCM_BASE_URL", "") - self.scm_token = os.getenv("OTTM_SCM_TOKEN", "") - self.scm_issues_labels = os.getenv("OTTM_SCM_ISSUES_LABELS", "") - - self.jira_base_url = os.getenv("OTTM_JIRA_BASE_URL", "") - self.jira_project = os.getenv("OTTM_JIRA_PROJECT", "") - self.jira_email = os.getenv("OTTM_JIRA_EMAIL", "") - self.jira_token = os.getenv("OTTM_JIRA_TOKEN", "") + self.scm_base_url = os.getenv("OTTM_SCM_BASE_URL", "") + self.scm_token = os.getenv("OTTM_SCM_TOKEN", "") + + self.jira_base_url = os.getenv("OTTM_JIRA_BASE_URL", "") + self.jira_project = os.getenv("OTTM_JIRA_PROJECT", "") + self.jira_email = os.getenv("OTTM_JIRA_EMAIL", "") + self.jira_token = os.getenv("OTTM_JIRA_TOKEN", "") self.jira_issue_type = self.__get_str_list("OTTM_JIRA_ISSUE_TYPE") self.glpi_categories = self.__get_str_list("OTTM_GLPI_CATEGORIES") - self.glpi_base_url = os.getenv("OTTM_GLPI_BASE_URL", "") - self.glpi_app_token = os.getenv("OTTM_GLPI_APP_TOKEN", "") + self.glpi_base_url = os.getenv("OTTM_GLPI_BASE_URL", "") + self.glpi_app_token = os.getenv("OTTM_GLPI_APP_TOKEN", "") self.glpi_user_token = os.getenv("OTTM_GLPI_USER_TOKEN", "") - self.glpi_username = os.getenv("OTTM_GLPI_USERNAME", "") - self.glpi_password = os.getenv("OTTM_GLPI_PASSWORD", "") + self.glpi_username = os.getenv("OTTM_GLPI_USERNAME", "") + self.glpi_password = os.getenv("OTTM_GLPI_PASSWORD", "") self.issue_tags = self.__get_str_list("OTTM_ISSUE_TAGS") self.exclude_issuers = self.__get_str_list("OTTM_EXCLUDE_ISSUERS") @@ -72,7 +71,7 @@ def __init__(self): self.insignificant_commits_message = self.__get_str_list("OTTM_COMMIT_BAD_MSG") self.retry_delay = self.__get_retry_delay("OTTM_RETRY_DELAY") - + self.legacy_percent = self.__get_float("OTTM_LEGACY_PERCENT", 20) self.legacy_minimum_days = self.__get_int("OTTM_LEGACY_MINIMUM_DAYS", 365) @@ -101,37 +100,27 @@ def __get_log_level(env_var): elif required_level == "NOTSET": log_level = logging.NOTSET else: - raise ConfigurationValidationException( - f"Invalid value for log level: {required_level}" - ) + raise ConfigurationValidationException(f"Invalid value for log level: {required_level}") return log_level @staticmethod def __get_external_tool(env_var): if env_var not in os.environ: - raise ConfigurationValidationException( - f"No external tool specified for ${env_var}" - ) + raise ConfigurationValidationException(f"No external tool specified for ${env_var}") file_path = os.environ[env_var] if not os.path.exists(file_path): - raise ConfigurationValidationException( - f"The following external tool was not found: {file_path}" - ) + raise ConfigurationValidationException(f"The following external tool was not found: {file_path}") return file_path @staticmethod def __get_executable(env_var): if env_var not in os.environ: - raise ConfigurationValidationException( - f"No executable specified for ${env_var}" - ) + raise ConfigurationValidationException(f"No executable specified for ${env_var}") executable = os.environ[env_var] executable_found = shutil.which(executable) if not executable_found: - raise ConfigurationValidationException( - f"The following executable was not found: {executable}" - ) + raise ConfigurationValidationException(f"The following executable was not found: {executable}") return executable @staticmethod @@ -141,8 +130,8 @@ def __get_repo_scm(env_var) -> str: raise ConfigurationValidationException(f"No source code manager specified") if repo_scm not in AVAILABLE_SCM: raise ConfigurationValidationException( - f"The following source code manager is not handled by OTTM : {repo_scm}." - + f" Availables SCM are : {AVAILABLE_SCM}" + f"The following source code manager is not handled by OTTM : {repo_scm}." +\ + f" Availables SCM are : {AVAILABLE_SCM}" ) return repo_scm @@ -175,7 +164,7 @@ def __get_float(env_var, default): ) return parsed_float - @staticmethod + @staticmethod def __get_int(env_var, default): parsed_value = os.getenv(env_var) if parsed_value is None: @@ -205,18 +194,3 @@ def __get_required_value(env_var): if not value: raise ConfigurationValidationException(f"Value for {env_var} is required") return value - - @staticmethod - def __get_bool(env_var, default): - parsed_value = os.getenv(env_var) - if parsed_value is None: - return default - if parsed_value.lower() == "true": - parsed_bool = True - elif parsed_value.lower() == "false": - parsed_bool = False - else: - raise ConfigurationValidationException( - f"Incorrect value : {parsed_value}, {env_var} should be a boolean" - ) - return parsed_bool diff --git a/connectors/git.py b/connectors/git.py index 96f3c81..4db0dbd 100644 --- a/connectors/git.py +++ b/connectors/git.py @@ -225,5 +225,5 @@ def _get_issues(self, since, labels): raise NotImplementedError @abstractmethod - def _get_git_versions(self, all, order_by, sort): + def _get_releases(self, all, order_by, sort): raise NotImplementedError diff --git a/connectors/github.py b/connectors/github.py index 7edf836..a1a5786 100644 --- a/connectors/github.py +++ b/connectors/github.py @@ -1,6 +1,5 @@ import logging from time import sleep, time -from typing import List, Union import github from sqlalchemy import desc, update @@ -9,9 +8,6 @@ from models.issue import Issue from models.version import Version from github import Github -from github.GitRelease import GitRelease -from github.Tag import Tag -from github.PaginatedList import PaginatedList import datetime from connectors.git import GitConnector from utils.timeit import timeit @@ -36,14 +32,12 @@ def _get_issues(self, since=None, labels=None): labels = github.GithubObject.NotSet try: - return self.remote.get_issues(state="all", labels=labels) + return self.remote.get_issues(state="all", since=since, labels=labels) except github.GithubException.RateLimitExceededException: sleep(self.configuration.retry_delay) self._get_issues(since, labels) - def _get_git_versions( - self, all=None, order_by=None, sort=None - ) -> List[Union[GitRelease, Tag]]: + def _get_releases(self, all=None, order_by=None, sort=None): if not all: all = None if not order_by: @@ -52,13 +46,10 @@ def _get_git_versions( sort = None try: - if self.configuration.use_all_tags: - return self.remote.get_tags() - else: - return self.remote.get_releases() + return self.remote.get_releases() except github.GithubException.RateLimitExceededException: sleep(self.configuration.retry_delay) - self._get_git_versions(all, order_by, sort) + self._get_releases(all, order_by, sort) @timeit def create_issues(self): @@ -77,7 +68,7 @@ def create_issues(self): ) if last_issue is not None: # Update existing database by fetching new issues - if len(self.configuration.issue_tags) == 0: + if not self.configuration.issue_tags: git_issues = self._get_issues( since=last_issue.updated_at + datetime.timedelta(seconds=1) ) @@ -88,7 +79,7 @@ def create_issues(self): ) # e.g. Filter by labels=['bug'] else: # Create a database with all issues - if len(self.configuration.issue_tags) == 0: + if not self.configuration.issue_tags: git_issues = self._get_issues() else: git_issues = self._get_issues( @@ -138,7 +129,7 @@ def create_versions(self): Create versions into the database from GitHub tags """ logging.info("GitHubConnector: create_versions") - git_versions = self._get_git_versions() + releases = self._get_releases() self._clean_project_existing_versions() versions = [] @@ -148,34 +139,25 @@ def create_versions(self): forks = list(self.remote.get_forks()) subscribers = list(self.remote.get_subscribers()) - for v in git_versions.reversed: - if type(v) is GitRelease: - v_name = v.title - v_tag = v.tag_name - v_end_date = v.published_at - elif type(v) is Tag: - v_name = v.name - v_tag = v.name - v_end_date = v.commit.commit.committer.date - + for release in releases.reversed: # Set UTC Timezone for previous release and release published_at when they are None if previous_release_published_at.tzinfo is None: previous_release_published_at = ( previous_release_published_at.astimezone(datetime.timezone.utc) ) - if v_end_date.tzinfo is None: - release_published_at_timezone = v_end_date.astimezone( + if release.published_at.tzinfo is None: + release_published_at_timezone = release.published_at.astimezone( datetime.timezone.utc ) versions.append( Version( project_id=self.project_id, - name=v_name, - tag=v_tag, + name=release.title, + tag=release.tag_name, start_date=previous_release_published_at, - end_date=v_end_date, + end_date=release.published_at, stars=len( list( filter( @@ -222,7 +204,7 @@ def create_versions(self): ), ) ) - previous_release_published_at = v_end_date + previous_release_published_at = release.published_at # Put current branch at the end of the list # Set UTC Timezone for previous release published_at when it's None diff --git a/connectors/gitlab.py b/connectors/gitlab.py index 189d05a..6249e1c 100644 --- a/connectors/gitlab.py +++ b/connectors/gitlab.py @@ -1,6 +1,5 @@ import logging from time import sleep -from typing import List, Union import gitlab from sqlalchemy import desc, update @@ -11,11 +10,8 @@ from utils.timeit import timeit from connectors.git import GitConnector from gitlab import Gitlab -from gitlab.v4.objects.tags import ProjectTag -from gitlab.v4.objects.releases import ProjectRelease from datetime import datetime, timedelta - class GitLabConnector(GitConnector): """ Connector to GitLab @@ -24,25 +20,18 @@ class GitLabConnector(GitConnector): ----------- - base_url URL to GitLab, empty if gitlab.com """ - - def __init__( - self, project_id, directory, base_url, token, repo, current, session, config - ): - GitConnector.__init__( - self, project_id, directory, token, repo, current, session, config - ) + def __init__(self, project_id, directory, base_url, token, repo, current, session, config): + GitConnector.__init__(self, project_id, directory, token, repo, current, session, config) if not base_url and not self.token: logging.info("anonymous read-only access for public resources (GitLab.com)") self.api = Gitlab() if base_url and self.token: - logging.info( - "private token or personal token authentication (self-hosted GitLab instance)" - ) + logging.info("private token or personal token authentication (self-hosted GitLab instance)") self.api = Gitlab(url=base_url, private_token=self.token) if not base_url and self.token: logging.info("private token or personal token authentication (GitLab.com)") self.api = Gitlab(private_token=self.token) - + # Check the authentification. Doesn't work for public read only access if base_url or self.token: self.api.auth() @@ -56,85 +45,69 @@ def _get_issues(self, since=None, labels=None): labels = None try: - return self.remote.issues.list( - state="all", since=since, with_labels_details=labels, get_all=True - ) + return self.remote.issues.list(state="all", since=since, with_labels_details=labels, get_all=True) except gitlab.GitlabJobRetryError: sleep(self.configuration.retry_delay) self._get_issues(since, labels) - def _get_git_versions(self, all=None, order_by=None, sort=None) -> List[Union[ProjectRelease, ProjectTag]]: + def _get_releases(self, all, order_by, sort): if not all: all = None if not order_by: order_by = None if not sort: sort = None - + try: - if self.configuration.use_all_tags: - return self.remote.tags.list(all=all, order_by=order_by, sort=sort) - else: - return self.remote.releases.list(all=all, order_by=order_by, sort=sort) + return self.remote.releases.list(all=all, order_by=order_by,sort=sort) except gitlab.GitlabJobRetryError: sleep(self.configuration.retry_delay) - self._get_git_versions(all, order_by, sort) + self._get_releases(all, order_by, sort) @timeit def create_issues(self): """ Create issues into the database from GitLab Issues """ - logging.info("GitLabConnector: create_issues") + logging.info('GitLabConnector: create_issues') # Check if a database already exist - last_issue = ( - self.session.query(Issue) - .filter(Issue.project_id == self.project_id) - .filter(Issue.source == "git") - .order_by(desc(Issue.updated_at)) - .first() - ) + last_issue = self.session.query(Issue) \ + .filter(Issue.project_id == self.project_id) \ + .filter(Issue.source == 'git') \ + .order_by(desc(Issue.updated_at)).first() if last_issue is not None: # Update existing database by fetching new issues - if len(self.configuration.issue_tags) == 0: - git_issues = self._get_issues( - since=last_issue.updated_at + timedelta(seconds=1), labels=None - ) + if not self.configuration.issue_tags: + git_issues = self._get_issues(since=last_issue.updated_at + timedelta(seconds=1), labels=None) else: - git_issues = self._get_issues( - since=last_issue.updated_at + timedelta(seconds=1), - labels=self.configuration.issue_tags, - ) # e.g. Filter by labels=['bug'] + git_issues = self._get_issues(since=last_issue.updated_at + timedelta(seconds=1), + labels=self.configuration.issue_tags) # e.g. Filter by labels=['bug'] else: # Create a database with all issues - if len(self.configuration.issue_tags) == 0: + if not self.configuration.issue_tags: git_issues = self._get_issues(since=None, labels=None) else: - git_issues = self._get_issues( - labels=self.configuration.issue_tags - ) # e.g. Filter by labels=['bug'] - + git_issues = self._get_issues(labels=self.configuration.issue_tags) # e.g. Filter by labels=['bug'] + # versions = self.session.query(Version).all - logging.info("Syncing " + str(len(git_issues)) + " issue(s) from GitLab") + logging.info('Syncing ' + str(len(git_issues)) + ' issue(s) from GitLab') new_bugs = [] # for version in versions: for issue in git_issues: # Check if the issue is linked to a selected version (included or not +IN?.ยง .?NBVCXd) # if version.end_date > issue.created_at > version.start_date: - if issue.author["username"] not in self.configuration.exclude_issuers: + if issue.author['username'] not in self.configuration.exclude_issuers: + updated_issue_date = date_iso_8601_to_datetime(issue.updated_at) existing_issue_id = self._get_existing_issue_id(issue.iid) if existing_issue_id: - logging.info( - "Issue %s already exists, updating it", existing_issue_id - ) + logging.info("Issue %s already exists, updating it", existing_issue_id) self.session.execute( - update(Issue) - .where(Issue.issue_id == existing_issue_id) - .values(title=issue.title, updated_at=updated_issue_date) + update(Issue).where(Issue.issue_id == existing_issue_id) \ + .values(title=issue.title, updated_at=updated_issue_date) ) else: new_bugs.append( @@ -150,47 +123,31 @@ def create_issues(self): self.session.add_all(new_bugs) self.session.commit() - + @timeit def create_versions(self): """ Create versions into the database from GitLab releases """ logging.info('GitLabConnector: create_versions') - if self.configuration.use_all_tags: - git_versions = self._get_git_versions(all=True, order_by="updated", sort="asc") - else: - git_versions = self._get_git_versions(all=True, order_by="released_at", sort="asc") + releases = self._get_releases(all=True, order_by="released_at", sort="asc") self._clean_project_existing_versions() versions = [] previous_release_published_at = self._get_first_commit_date() - for v in git_versions: - if type(v) is ProjectRelease: - release_published_at = date_iso_8601_to_datetime(v.released_at) - versions.append( - Version( - project_id=self.project_id, - name=v.name, - tag=v.tag_name, - start_date=previous_release_published_at, - end_date=release_published_at, - ) + for release in releases: + release_published_at = date_iso_8601_to_datetime(release.released_at) + versions.append( + Version( + project_id=self.project_id, + name=release.name, + tag=release.tag_name, + start_date=previous_release_published_at, + end_date=release_published_at, ) - previous_release_published_at = release_published_at - elif type(v) is ProjectTag: - release_published_at = date_iso_8601_to_datetime(v.commit["committed_date"]) - versions.append( - Version( - project_id=self.project_id, - name=v.name, - tag=v.name, - start_date=previous_release_published_at, - end_date=release_published_at, - ) - ) - previous_release_published_at = release_published_at + ) + previous_release_published_at = release_published_at # Put current branch at the end of the list versions.append( diff --git a/connectors/jira.py b/connectors/jira.py index 514e63f..d701de4 100644 --- a/connectors/jira.py +++ b/connectors/jira.py @@ -68,7 +68,7 @@ def __get_builded_jql_query(self, updated_after: datetime) -> str: jql_query = f'project={self.config.jira_project}' - if len(self.config.issue_tags) > 0: + if self.config.issue_tags: labels_as_string = ",".join(self.config.issue_tags) jql_query += f' AND labels IN ({labels_as_string})' diff --git a/connectors/pylint/custom_ast_checker.py b/connectors/pylint/custom_ast_checker.py index 8c9a2c3..c316160 100644 --- a/connectors/pylint/custom_ast_checker.py +++ b/connectors/pylint/custom_ast_checker.py @@ -1,6 +1,7 @@ import astroid from astroid.exceptions import InferenceError from pylint.checkers import BaseChecker +from pylint import interfaces from connectors.pylint.custom_linter import CustomLinter from utils.math import Math @@ -21,6 +22,9 @@ class CustomAstChecker(BaseChecker): class_method_calls (dict): A dictionary containing the call information of each method in a class. """ + # In python 3.8 implements the astroidChecker interface + __implements__ = interfaces.IAstroidChecker + # These properties have to be defined # or the linter fink is a malformed checker name = 'class-visitor' @@ -375,7 +379,7 @@ def count_docstring(self, node: astroid) -> None: Returns: None """ - if node.doc_node: + if node.doc: self.data.num_docstring += 1 # Compute the DIT for a simple class diff --git a/docs/docker.md b/docs/docker.md index feb30e7..a5c8e80 100644 --- a/docs/docker.md +++ b/docs/docker.md @@ -8,14 +8,6 @@ To build your latest docker image run the following command : docker build -t optittm/bugprediction:latest . ``` -## Run unit tests - -To run the unit tests from the image, execute the command : - -``` -docker run --rm --name bugprediction --entrypoint python optittm/bugprediction -m unittest discover tests -``` - ## Launch container with this image Execute **bugprediction** command by using docker thanks to this command on Linux: diff --git a/main.py b/main.py index 2dc2ec4..b387737 100644 --- a/main.py +++ b/main.py @@ -394,7 +394,7 @@ def populate( elif source_bugs.strip() == 'glpi': glpi: GlpiConnector = glpi_connector_provider(project.project_id) - # survey = survey_connector_provider() + survey = survey_connector_provider() # Checkout, execute the tool and inject CSV result into the database # with tempfile.TemporaryDirectory() as tmp_dir: @@ -418,8 +418,8 @@ def populate( # if we use code maat git.setup_aliases(configuration.author_alias) git.populate_db(skip_versions) - # survey.populate_comments() - + survey.populate_comments() + # List the versions and checkout each one of them versions = session.query(Version).filter(Version.project_id == project.project_id).all() restrict_folder = RestrictFolder(versions, configuration) @@ -598,41 +598,6 @@ def display_topsis_weight(): return output -@inject -def display_topsis_weight(): - """ - Perform TOPSIS analysis on a dataset. - - Args: - session (Session, optional): The database session. Defaults to the provided container session. - configuration (Configuration, optional): The configuration settings for TOPSIS analysis. Defaults to the provided container configuration. - - Returns: - dict: A dictionary containing the weights of alternatives after TOPSIS analysis. - - Note: - This command performs the TOPSIS (Technique for Order of Preference by Similarity to Ideal Solution) analysis on a given dataset to determine the weights of alternatives based on criteria and their corresponding weights provided in the configuration. - - Raises: - Various exceptions from CriterionParser and AlternativesParser classes: - - InvalidCriterionError: If an invalid criterion name is encountered. - - MissingWeightError: If some criteria are missing weights. - - NoCriteriaProvidedError: If no criteria are provided. - - InvalidAlternativeError: If an invalid alternative name is encountered. - - NoAlternativeProvidedError: If no alternatives are provided. - """ - output = topsis() - - # Display the weights of alternatives - print("**********************") - print("* ALTERNATIVES WEIGHTS *") - print("**********************") - for key, value in output.items(): - print("* " + key + " : ", value) - print("**********************") - - return output - @inject def topsis( session = Provide[Container.session], @@ -692,31 +657,6 @@ def topsis( alternative_names = configuration.topsis_alternatives - try: - alternatives = alternative_parser.parse_alternatives(alternative_names) - except (InvalidAlternativeError, NoAlternativeProvidedError) as e: - print(f"Error: {e}") - return - - # Prepare data for alternatives - alternative_data = {} - for alternative in alternatives: - data = alternative.get_data(df) - alternative_data[alternative.get_name()] = preprocessing.normalize(data) - criteria_parser = CriterionParser() - alternative_parser = AlternativesParser() - - criteria_names = configuration.topsis_criteria - criteria_weights = configuration.topsis_weigths - - try: - criteria = criteria_parser.parse_criteria(criteria_names, criteria_weights) - except (InvalidCriterionError, MissingWeightError, NoCriteriaProvidedError) as e: - print(f"Error: {e}") - return - - alternative_names = configuration.topsis_alternatives - try: alternatives = alternative_parser.parse_alternatives(alternative_names) except (InvalidAlternativeError, NoAlternativeProvidedError) as e: @@ -732,21 +672,14 @@ def topsis( # Create the decision matrix decision_matrix_builder = mt.Math.DecisionMatrixBuilder() - # Add criteria to the decision matrix - for criterion in criteria: - decision_matrix_builder.add_criteria(criterion.get_data(df), criterion.get_name()) # Add criteria to the decision matrix for criterion in criteria: decision_matrix_builder.add_criteria(criterion.get_data(df), criterion.get_name()) # Add alternatives to the decision matrix - for alternative in alternatives: - decision_matrix_builder.add_alternative(alternative.get_data(df), alternative.get_name()) - # Add alternatives to the decision matrix for alternative in alternatives: decision_matrix_builder.add_alternative(alternative.get_data(df), alternative.get_name()) - # Set correlation methods if provided in the configuration # Set correlation methods if provided in the configuration methods = [] for method in configuration.topsis_corr_method: @@ -754,7 +687,6 @@ def topsis( if len(methods) > 0: decision_matrix_builder.set_correlation_methods(methods) - # Build the decision matrix # Build the decision matrix decision_matrix = decision_matrix_builder.build() @@ -763,8 +695,6 @@ def topsis( decision_matrix, [criterion.get_weight() for criterion in criteria], [criterion.get_direction() for criterion in criteria] - [criterion.get_weight() for criterion in criteria], - [criterion.get_direction() for criterion in criteria] ) ts.topsis() diff --git a/requirements.txt b/requirements.txt index 6c35090..29ea718 100644 --- a/requirements.txt +++ b/requirements.txt @@ -20,5 +20,5 @@ kneed~=0.8.1 glpi-api~=0.3.5 jira~=3.4.1 radon~=5.1.0 -pylint~=2.17.5 +pylint==2.7.4 semver~=3.0.0