diff --git a/.gitlab/ci/.rules-conditions.yml b/.gitlab/ci/.rules-conditions.yml new file mode 100644 index 0000000..c9d76bf --- /dev/null +++ b/.gitlab/ci/.rules-conditions.yml @@ -0,0 +1,52 @@ +# When should run +.dev-staging-master: + rules: + - if: '$CI_COMMIT_REF_NAME =~ /^.*\/(master|staging|dev)$/' + when: on_success + +.on-master-staging: + rules: + - if: '$CI_COMMIT_REF_NAME =~ /^.*\/(master|staging)$/' + when: on_success + +.on-master: + rules: + - if: '$CI_COMMIT_REF_NAME =~ /^.*\/(master)$/' + when: on_success + +.on-community-tag: + rules: + - if: '$CI_COMMIT_TAG =~ /^white-v[0-9.]+$/' + when: on_success + +.pipeline-control-test: + rules: + - if: $FULL_TEST || $DAILY_TEST + when: on_success + +.be-built: + rules: + - if: '$CI_COMMIT_TAG || $BUILD_TEST || $FULL_TEST || $DAILY_TEST' + when: on_success + +.be-uploaded: + rules: + - if: '$BUILD_TEST || $FULL_TEST || $DAILY_TEST' + when: on_success + + +# Ignore +.ignore-on-tag: + rules: + - if: '$CI_COMMIT_TAG' + when: never + +.ignore-on-master: + rules: + - if: '$CI_COMMIT_REF_NAME =~ /^.*\/(master)$/' + when: never + +.ignore-on-build: + rules: + - if: $BUILD_TEST + when: never diff --git a/.gitlab/ci/build-ci/.build-gitlab-ci.yml b/.gitlab/ci/build-ci/.build-gitlab-ci.yml index 0747ee6..88db5e6 100644 --- a/.gitlab/ci/build-ci/.build-gitlab-ci.yml +++ b/.gitlab/ci/build-ci/.build-gitlab-ci.yml @@ -37,10 +37,8 @@ - "faraday-server_amd64.deb" expire_in: 15 days rules: - - if: '$CI_COMMIT_REF_NAME =~ /^.*\/(master)$/' - when: on_success - - if: '$CI_COMMIT_TAG || $BUILD_TEST || $FULL_TEST || $DAILY_TEST' - when: on_success + - !reference [.on-master-staging, rules] + - !reference [.be-built, rules] - when: never @@ -91,10 +89,8 @@ - "faraday-server_amd64.rpm" expire_in: 15 days rules: - - if: '$CI_COMMIT_REF_NAME =~ /^.*\/(master)$/' - when: on_success - - if: '$CI_COMMIT_TAG || $BUILD_TEST || $FULL_TEST || $DAILY_TEST' - when: on_success + - !reference [.on-master-staging, rules] + - !reference [.be-built, rules] - when: never generate_docker_tar_gz: @@ -115,7 +111,6 @@ paths: - faraday-server-docker.tar.gz rules: - - if: '$CI_COMMIT_REF_NAME =~ /^.*\/(master)$/' - when: on_success - - if: '$CI_COMMIT_TAG || $BUILD_TEST || $FULL_TEST || $DAILY_TEST' - when: on_success + - !reference [.on-master-staging, rules] + - !reference [.be-built, rules] + - when: never diff --git a/.gitlab/ci/build-ci/.prebuild-gitlab-ci.yml b/.gitlab/ci/build-ci/.prebuild-gitlab-ci.yml index 9de3c82..1ea5d57 100644 --- a/.gitlab/ci/build-ci/.prebuild-gitlab-ci.yml +++ b/.gitlab/ci/build-ci/.prebuild-gitlab-ci.yml @@ -20,10 +20,8 @@ - "/usr/bin/rsync -aq --exclude 'faraday_copy' --exclude '.cache' . faraday_copy" - "/bin/tar -zcf faraday.tar.gz faraday_copy" rules: - - if: '$CI_COMMIT_REF_NAME =~ /^.*\/(master)$/' - when: on_success - - if: '$CI_COMMIT_TAG || $BUILD_TEST || $FULL_TEST || $DAILY_TEST' - when: on_success + - !reference [.on-master-staging, rules] + - !reference [.be-built, rules] - when: never artifacts: name: 'faraday' @@ -55,8 +53,6 @@ - py3.tar expire_in: 15 days # in the future we don't need to expire this. rules: - - if: '$CI_COMMIT_REF_NAME =~ /^.*\/(master)$/' - when: on_success - - if: '$CI_COMMIT_TAG || $BUILD_TEST || $FULL_TEST || $DAILY_TEST' - when: on_success - - when: never + - !reference [.on-master-staging, rules] + - !reference [.be-built, rules] + - when: never diff --git a/.gitlab/ci/build-ci/.testing-gitlab-ci.yml b/.gitlab/ci/build-ci/.testing-gitlab-ci.yml index 86b094d..6b1bd01 100644 --- a/.gitlab/ci/build-ci/.testing-gitlab-ci.yml +++ b/.gitlab/ci/build-ci/.testing-gitlab-ci.yml @@ -23,8 +23,6 @@ - kill $(cat ~faraday/.faraday/faraday-server-port-5985.pid) - jobs rules: - - if: '$CI_COMMIT_REF_NAME =~ /^.*\/(master)$/' - when: on_success - - if: '$CI_COMMIT_TAG || $BUILD_TEST || $FULL_TEST || $DAILY_TEST' - when: on_success + - !reference [.on-master-staging, rules] + - !reference [.be-built, rules] - when: never diff --git a/.gitlab/ci/publish/.docker-publish-gitlab-ci.yml b/.gitlab/ci/publish/.docker-publish-gitlab-ci.yml index 8e47a2d..9ef2cc4 100644 --- a/.gitlab/ci/publish/.docker-publish-gitlab-ci.yml +++ b/.gitlab/ci/publish/.docker-publish-gitlab-ci.yml @@ -14,8 +14,7 @@ - docker image tag $CI_REGISTRY_IMAGE:latest $CI_REGISTRY_IMAGE:$VERSION - docker push "$CI_REGISTRY_IMAGE" rules: - - if: '$CI_COMMIT_REF_NAME =~ /^.*\/(master)$/' - when: on_success + - !reference [ .on-master, rules ] needs: # dev won't wait for any previous stage, it will deploy instantly, and # then run tests in docker image (To be done) - job: generate_docker_tar_gz @@ -35,7 +34,6 @@ - docker image tag $CI_REGISTRY_IMAGE:latest $CI_REGISTRY_IMAGE:$VERSION - docker push $CI_REGISTRY_IMAGE:$VERSION rules: - - if: '$CI_COMMIT_TAG =~ /^white-v[0-9.]+$/' - when: on_success + - !reference [ .on-community-tag, rules ] dependencies: # prod will wait for any previous stage - generate_docker_tar_gz diff --git a/.gitlab/ci/publish/.mirror-to-github-gitlab-ci.yml b/.gitlab/ci/publish/.mirror-to-github-gitlab-ci.yml new file mode 100644 index 0000000..8b9be35 --- /dev/null +++ b/.gitlab/ci/publish/.mirror-to-github-gitlab-ci.yml @@ -0,0 +1,37 @@ +update_github: + image: python:3 + stage: publish + script: + - git remote set-url github https://${GH_USER}:${GH_TOKEN}@github.com/infobyte/faraday.git + - git push github $CI_COMMIT_REF_NAME:$DESTINY_BRANCH + rules: + - if: '$CI_COMMIT_REF_NAME == "white/master"' + variables: + DESTINY_BRANCH: master + when: on_success + - if: '$CI_COMMIT_REF_NAME == "white/staging"' + variables: + DESTINY_BRANCH: staging + when: on_success + tags: + - faradaytests + + +tag_on_github: + image: python:3 + stage: publish + script: + - git remote set-url github https://${GH_USER}:${GH_TOKEN}@github.com/infobyte/faraday.git + - export FARADAY_VERSION=$(eval $IMAGE_TAG) + - CHANGELOG/check_pre_tag.py + - git push github $CI_COMMIT_TAG:master + - git tag v$FARADAY_VERSION -m "$(cat CHANGELOG/$FARADAY_VERSION/community.md)" + - git push github v$FARADAY_VERSION + - scripts/github_release.py --deb-file ./faraday-server_amd64.deb --rpm-file ./faraday-server_amd64.rpm + rules: + - !reference [ .on-community-tag, rules ] + dependencies: + - generate_deb + - generate_rpm + tags: + - faradaytests diff --git a/.gitlab/ci/publish/.set-tag-gitlab-ci.yml b/.gitlab/ci/publish/.set-tag-gitlab-ci.yml index 8fe7cab..7b5220e 100644 --- a/.gitlab/ci/publish/.set-tag-gitlab-ci.yml +++ b/.gitlab/ci/publish/.set-tag-gitlab-ci.yml @@ -1,22 +1,3 @@ -tag_on_github: - image: python:3 - stage: publish - before_script: - script: - - git remote set-url github https://${GH_USER}:${GH_TOKEN}@github.com/infobyte/faraday.git - - export FARADAY_VERSION=$(eval $IMAGE_TAG) - - CHANGELOG/check_pre_tag.py - - git push github $CI_COMMIT_TAG:master - - git tag v$FARADAY_VERSION -m "$(cat CHANGELOG/$FARADAY_VERSION/white.md)" - - git push github v$FARADAY_VERSION - - scripts/github_release.py --deb-file ./faraday-server_amd64.deb --rpm-file ./faraday-server_amd64.rpm - rules: - - if: '$CI_COMMIT_TAG =~ /^white-v[0-9.]+$/' - when: on_success - dependencies: - - generate_deb - - generate_rpm - publish_pypi: image: python:3 stage: publish @@ -27,5 +8,4 @@ - python setup.py sdist bdist_wheel - twine upload -u $PYPI_USER -p $PYPI_PASS dist/* --verbose rules: - - if: '$CI_COMMIT_TAG =~ /^white-v[0-9.]+$/' - when: on_success + - !reference [ .on-community-tag, rules ] diff --git a/.gitlab/ci/testing/.hypothesis-nix-gitlab-ci.yml b/.gitlab/ci/testing/.hypothesis-nix-gitlab-ci.yml index d995095..6eefb24 100644 --- a/.gitlab/ci/testing/.hypothesis-nix-gitlab-ci.yml +++ b/.gitlab/ci/testing/.hypothesis-nix-gitlab-ci.yml @@ -20,6 +20,7 @@ - mkdir run_from - nix-shell --command "pytest tests -v --cov=faraday/server/api --disable-warnings --connection-string=postgresql+psycopg2://$POSTGRES_USER:$POSTGRES_PASSWORD@postgres/$POSTGRES_DB -m hypothesis" rules: - - if: '$HYPO_TEST || $FULL_TEST || $DAILY_TEST' + - if: $HYPO_TEST when: on_success + - !reference [.pipeline-control-test, rules] - when: never diff --git a/.gitlab/ci/testing/.nix-testing-gitlab-ci.yml b/.gitlab/ci/testing/.nix-testing-gitlab-ci.yml index bb4a6c2..1dc6ef6 100644 --- a/.gitlab/ci/testing/.nix-testing-gitlab-ci.yml +++ b/.gitlab/ci/testing/.nix-testing-gitlab-ci.yml @@ -20,12 +20,10 @@ - pylint.svg - pylint3.svg rules: - - if: $BUILD_TEST - when: never - - if: '$CI_COMMIT_TAG' - when: never - - if: '$FULL_TEST || $DAILY_TEST' - when: on_success + - !reference [.ignore-on-build, rules] + - !reference [.ignore-on-tag, rules] + - !reference [.ignore-on-master, rules] + - !reference [.pipeline-control-test, rules] - when: on_success .postgresql_test_nix_base: @@ -57,13 +55,10 @@ artifacts: false # Speed up tests rules: - - if: $BUILD_TEST - when: never - - if: '$FULL_TEST || $DAILY_TEST' - when: on_success - - if: '$CI_COMMIT_TAG' - when: never - - when: on_success + - !reference [.ignore-on-build, rules] + - !reference [.ignore-on-tag, rules] + - !reference [.pipeline-control-test, rules] + - when: on_success .sqlite_test_nix_base: tags: @@ -93,12 +88,9 @@ - job: build_and_push_to_cachix artifacts: false rules: - - if: $BUILD_TEST - when: never - - if: '$CI_COMMIT_TAG' - when: never - - if: '$FULL_TEST || $DAILY_TEST' - when: on_success + - !reference [.ignore-on-build, rules] + - !reference [.ignore-on-tag, rules] + - !reference [.pipeline-control-test, rules] - when: on_success sqlite_test_nix: diff --git a/.gitlab/ci/testing/.posttesting-gitlab-ci.yml b/.gitlab/ci/testing/.posttesting-gitlab-ci.yml index e9d0159..dd85091 100644 --- a/.gitlab/ci/testing/.posttesting-gitlab-ci.yml +++ b/.gitlab/ci/testing/.posttesting-gitlab-ci.yml @@ -8,7 +8,15 @@ strategy: depend rules: - if: '$CI_COMMIT_REF_NAME =~ /^.*\/(master)$/' + variables: + DISPATCHER_REF: master + when: on_success + - if: '$CI_COMMIT_REF_NAME =~ /^.*\/(staging)$/' + variables: + DISPATCHER_REF: staging when: on_success - if: '$INTEGRATION || $FULL_TEST || $DAILY_TEST' + variables: + DISPATCHER_REF: staging when: on_success - when: never diff --git a/.gitlab/ci/testing/.pretesting-gitlab-ci.yml b/.gitlab/ci/testing/.pretesting-gitlab-ci.yml index d9e53ff..9f99b1d 100644 --- a/.gitlab/ci/testing/.pretesting-gitlab-ci.yml +++ b/.gitlab/ci/testing/.pretesting-gitlab-ci.yml @@ -9,10 +9,8 @@ - git config --global user.name "Mergerbot" - python3 scripts/merge-conflict-detector.py rules: - - if: '$CI_COMMIT_REF_NAME =~ /^.*\/(master|dev)$/' - when: on_success - - if: '$CI_COMMIT_TAG' - when: never + - !reference [.dev-staging-master, rules] + - !reference [.ignore-on-tag, rules] - when: never sanity_check: @@ -24,10 +22,8 @@ - bash scripts/sanity_check_commit.sh - scripts/sanity_check_file.py --mode=ls rules: - - if: '$CI_COMMIT_REF_NAME =~ /^.*\/(master|dev)$/' - when: on_success - - if: '$CI_COMMIT_TAG' - when: never + - !reference [.dev-staging-master, rules] + - !reference [.ignore-on-tag, rules] - when: never migration_sanity_check: @@ -41,10 +37,8 @@ - cd faraday - $(alembic branches) rules: - - if: '$CI_COMMIT_REF_NAME =~ /^.*\/(master|dev)$/' - when: always - - if: '$CI_COMMIT_TAG' - when: never + - !reference [.dev-staging-master, rules] + - !reference [.ignore-on-tag, rules] - when: never bandit: @@ -60,8 +54,7 @@ - "bandit -r ${CI_PROJECT_DIR}/faraday --format custom --skip B101 --msg-template \ \"{abspath}:{line}: {test_id}[bandit]: {severity}: {msg}\"" rules: - - if: '$CI_COMMIT_TAG' - when: never + - !reference [.ignore-on-tag, rules] - when: on_success build_and_push_to_cachix: @@ -84,9 +77,7 @@ - nix-build | cachix push faradaysec - ./scripts/check-closure-size ./result rules: - - if: '$FULL_TEST || $DAILY_TEST' - when: on_success - - when: always + - when: on_success flake8: image: python:3 @@ -95,17 +86,16 @@ - pip install flake8 - flake8 . rules: - - if: '$CI_COMMIT_TAG' - when: never + - !reference [.ignore-on-tag, rules] - when: on_success no-format-str: image: python:3 stage: pre_testing script: - - pip install flynt + - pip install flynt==0.69 - flynt -df faraday tests rules: - - if: '$CI_COMMIT_TAG' - when: never + - !reference [.ignore-on-tag, rules] + - !reference [.ignore-on-master, rules] - when: on_success diff --git a/.gitlab/ci/testing/.venv-testing-gitlab-ci.yml b/.gitlab/ci/testing/.venv-testing-gitlab-ci.yml index 2c4772f..6466938 100644 --- a/.gitlab/ci/testing/.venv-testing-gitlab-ci.yml +++ b/.gitlab/ci/testing/.venv-testing-gitlab-ci.yml @@ -32,12 +32,10 @@ - pip freeze allow_failure: true rules: - #- if: '$FULL_TEST || $DAILY_TEST || $ALPHA_TEST' - - if: '$ALPHA_TEST' # FOR NOW, ASKED TO NOT CHARGE CI WORKER + # - !reference [ .pipeline-control-test, rules ] + # - !reference [ .dev-staging-master, rules ] + - if: '$ALPHA_TEST' when: on_success - - if: '$CI_COMMIT_REF_NAME =~ /^.*\/(master|dev)$/' - when: never # FOR NOW, ASKED TO NOT CHARGE CI WORKER - #when: on_success - when: never @@ -56,12 +54,10 @@ - pip freeze allow_failure: true rules: - #- if: '$FULL_TEST || $DAILY_TEST || $ALPHA_TEST' - - if: '$ALPHA_TEST' # FOR NOW, ASKED TO FIX #6474 first + # - !reference [ .pipeline-control-test, rules ] + # - !reference [ .dev-staging-master, rules ] + - if: '$ALPHA_TEST' when: on_success - - if: '$CI_COMMIT_REF_NAME =~ /^.*\/(master|dev)$/' - when: never # FOR NOW, ASKED TO FIX #6474 first - #when: on_success - when: never unit_test 3.7: @@ -74,7 +70,11 @@ unit_test 3.9: extends: .latest_unit_test_base - image: python:3.9-rc + image: python:3.9 + +unit_test 3.10: + extends: .latest_unit_test_base + image: python:3.10 alpha_unit_test 3.7: extends: .alpha_unit_test_base @@ -84,13 +84,17 @@ extends: .alpha_unit_test_base image: python:3.8 rules: - #- if: '$FULL_TEST || $DAILY_TEST || $ALPHA_TEST' - - if: '$ALPHA_TEST' - when: on_success - - if: '$CI_COMMIT_REF_NAME =~ /^.*\/(master|dev)$/' - when: never #on_success REACTIVATE WHEN HAVE TIME TO CHECK - - when: never + # - !reference [ .pipeline-control-test, rules ] + # - !reference [ .dev-staging-master, rules ] + - if: '$ALPHA_TEST' + when: on_success + - when: never alpha_unit_test 3.9: extends: .alpha_unit_test_base - image: python:3.9-rc + image: python:3.9 + + +alpha_unit_test 3.10: + extends: .alpha_unit_test_base + image: python:3.10 diff --git a/.gitlab/ci/upload/.storage-gitlab-ci.yml b/.gitlab/ci/upload/.storage-gitlab-ci.yml index dbf3b0e..f15a652 100644 --- a/.gitlab/ci/upload/.storage-gitlab-ci.yml +++ b/.gitlab/ci/upload/.storage-gitlab-ci.yml @@ -35,10 +35,8 @@ - *google_storage_deb_rpm_base - "gsutil setmeta -h x-goog-meta-branch:${CI_COMMIT_BRANCH} ${GCLOUD_FILE_PATH}*.*" rules: - - if: '$CI_COMMIT_REF_NAME =~ /^.*\/(master)$/' - when: on_success - - if: '$BUILD_TEST || $FULL_TEST || $DAILY_TEST' - when: on_success + - !reference [ .on-master-staging, rules ] + - !reference [ .be-uploaded, rules ] - when: never needs: - job: generate_deb diff --git a/.gitlab/ci/upload/.testing-gitlab-ci.yml b/.gitlab/ci/upload/.testing-gitlab-ci.yml deleted file mode 100644 index a23bfb6..0000000 --- a/.gitlab/ci/upload/.testing-gitlab-ci.yml +++ /dev/null @@ -1,14 +0,0 @@ -.qa_integration: - stage: upload_testing - variables: - REMOTE_BRANCH: $CI_COMMIT_REF_NAME - MAIN_COMMIT_SHA: $CI_COMMIT_SHA - trigger: - project: faradaysec/qa/automation - strategy: depend - branch: develop - rules: - - if: '$CI_COMMIT_REF_NAME =~ /^.*\/(master)$/' - when: on_success - - if: '$CI_COMMIT_TAG || $BUILD_TEST || $FULL_TEST || $DAILY_TEST' - when: on_success diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 6099deb..f99a6c6 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -31,6 +31,9 @@ include: - local: .gitlab/ci/fetch-secrets.yaml + - local: .gitlab/ci/.rules-conditions.yml + + - local: .gitlab/ci/testing/.pretesting-gitlab-ci.yml - local: .gitlab/ci/testing/.nix-testing-gitlab-ci.yml - local: .gitlab/ci/testing/.venv-testing-gitlab-ci.yml @@ -42,11 +45,11 @@ - local: .gitlab/ci/build-ci/.testing-gitlab-ci.yml - local: .gitlab/ci/upload/.storage-gitlab-ci.yml - - local: .gitlab/ci/upload/.testing-gitlab-ci.yml - local: .gitlab/ci/deploy/deploy-gitlab-ci.yml - local: .gitlab/ci/publish/.set-tag-gitlab-ci.yml + - local: .gitlab/ci/publish/.mirror-to-github-gitlab-ci.yml - local: .gitlab/ci/publish/.docker-publish-gitlab-ci.yml - template: Security/Secret-Detection.gitlab-ci.yml diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 993639b..9acfe76 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -11,6 +11,7 @@ exclude: '^faraday/server/www/' - id: check-yaml exclude: '^faraday/server/www/' + args: [ --unsafe ] - id: debug-statements exclude: '^faraday/server/www/' - repo: https://gitlab.com/pycqa/flake8 @@ -48,3 +49,8 @@ pass_filenames: false args: [--mode=ls, --local] stages: [push] +- repo: https://github.com/asottile/pyupgrade + rev: v2.29.0 + hooks: + - id: pyupgrade + args: [ --py3-plus , --py36-plus] diff --git a/CHANGELOG/3.19.0/community.md b/CHANGELOG/3.19.0/community.md new file mode 100644 index 0000000..ee49a67 --- /dev/null +++ b/CHANGELOG/3.19.0/community.md @@ -0,0 +1,11 @@ + * ADD v3 bulks endpoints DELETE and EDIT (PATCH) + * Add logs of loggin, logout and log error to main log + * Fix bug in bulk update for m2m fields + * ADD clear settings command + * Add open medium, high and critical vulns histogram + * Fix integrity constraint error on cve update + * FIX static content for react + * Add cvss within vulnerability model + * add check to see if workspace name is longer than 250 characters. In that case raises an error + * change concat in urlstrings for join or urljoin + * Add cve to csv export diff --git a/CHANGELOG/3.19.0/date.md b/CHANGELOG/3.19.0/date.md new file mode 100644 index 0000000..7f30195 --- /dev/null +++ b/CHANGELOG/3.19.0/date.md @@ -0,0 +1 @@ +Dec 27th, 2021 diff --git a/RELEASE.md b/RELEASE.md index c42123a..c0288a3 100644 --- a/RELEASE.md +++ b/RELEASE.md @@ -2,6 +2,20 @@ ===================================== +3.19.0 [Dec 27th, 2021]: +--- + * ADD v3 bulks endpoints DELETE and EDIT (PATCH) + * Add logs of loggin, logout and log error to main log + * Fix bug in bulk update for m2m fields + * ADD clear settings command + * Add open medium, high and critical vulns histogram + * Fix integrity constraint error on cve update + * FIX static content for react + * Add cvss within vulnerability model + * add check to see if workspace name is longer than 250 characters. In that case raises an error + * change concat in urlstrings for join or urljoin + * Add cve to csv export + 3.18.1 [Nov 5th, 2021]: --- Fix CVE issue @@ -10,14 +24,12 @@ --- * Remove attachments in vulns filter endpoint * Add open and confirmed vulns in workspace stats - * Add migration disabling several notifications. * Add user id to session API endpoint * Add cve to vulnerability model * Change funcs to views * FIX report import * Add `last_run_agent_date` field to workspace endpoint * Fix cve parsing in `vulnerability create` and `bulk create` - * ADD check if postgres db is running during server start * Fix order_by in filters api * Fix 500 status code with invalid executor arguments diff --git a/doc/conf.py b/doc/conf.py index cca9ea6..f9d5270 100644 --- a/doc/conf.py +++ b/doc/conf.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # # Faraday documentation build configuration file, created by # sphinx-quickstart on Tue Oct 31 19:10:26 2017. @@ -46,18 +45,18 @@ master_doc = 'index' # General information about the project. -project = u'Faraday' -copyright = u'2017, Daniel Foguelman, Esteban Guillardoy, Ezequiel Tavella, Facundo de Guzmán, Federico Kirschbaum, Francisco Amato, Franco Linares, German Riera, Joaquín López Pereyra, Leonardo Lazzaro, Martín Rocha, Matias Ariel Ré Medina, Matias Lang, Micaela Ranea Sánchez, Sebastian Kulesz' -author = u'Daniel Foguelman, Esteban Guillardoy, Ezequiel Tavella, Facundo de Guzmán, Federico Kirschbaum, Francisco Amato, Franco Linares, German Riera, Joaquín López Pereyra, Leonardo Lazzaro, Martín Rocha, Matias Ariel Ré Medina, Matias Lang, Micaela Ranea Sánchez, Sebastian Kulesz' +project = 'Faraday' +copyright = '2017, Daniel Foguelman, Esteban Guillardoy, Ezequiel Tavella, Facundo de Guzmán, Federico Kirschbaum, Francisco Amato, Franco Linares, German Riera, Joaquín López Pereyra, Leonardo Lazzaro, Martín Rocha, Matias Ariel Ré Medina, Matias Lang, Micaela Ranea Sánchez, Sebastian Kulesz' +author = 'Daniel Foguelman, Esteban Guillardoy, Ezequiel Tavella, Facundo de Guzmán, Federico Kirschbaum, Francisco Amato, Franco Linares, German Riera, Joaquín López Pereyra, Leonardo Lazzaro, Martín Rocha, Matias Ariel Ré Medina, Matias Lang, Micaela Ranea Sánchez, Sebastian Kulesz' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # The short X.Y version. -version = u'3.0.0' +version = '3.0.0' # The full version, including alpha/beta/rc tags. -release = u'3.0.0' +release = '3.0.0' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. @@ -142,8 +141,8 @@ # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). latex_documents = [ - (master_doc, 'Faraday.tex', u'Faraday Documentation', - u'Daniel Foguelman, Esteban Guillardoy, Ezequiel Tavella, Facundo de Guzmán, Federico Kirschbaum, Francisco Amato, Franco Linares, German Riera, Joaquín López Pereyra, Leonardo Lazzaro, Martín Rocha, Matias Ariel Ré Medina, Matias Lang, Micaela Ranea Sánchez, Sebastian Kulesz', 'manual'), + (master_doc, 'Faraday.tex', 'Faraday Documentation', + 'Daniel Foguelman, Esteban Guillardoy, Ezequiel Tavella, Facundo de Guzmán, Federico Kirschbaum, Francisco Amato, Franco Linares, German Riera, Joaquín López Pereyra, Leonardo Lazzaro, Martín Rocha, Matias Ariel Ré Medina, Matias Lang, Micaela Ranea Sánchez, Sebastian Kulesz', 'manual'), ] @@ -152,7 +151,7 @@ # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ - (master_doc, 'faraday', u'Faraday Documentation', + (master_doc, 'faraday', 'Faraday Documentation', [author], 1) ] @@ -163,11 +162,7 @@ # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ - (master_doc, 'Faraday', u'Faraday Documentation', + (master_doc, 'Faraday', 'Faraday Documentation', author, 'Faraday', 'One line description of project.', 'Miscellaneous'), ] - - - -# I'm Py3 \ No newline at end of file diff --git a/faraday/__init__.py b/faraday/__init__.py index 8191de2..9718c4e 100644 --- a/faraday/__init__.py +++ b/faraday/__init__.py @@ -2,5 +2,5 @@ # Copyright (C) 2013 Infobyte LLC (http://www.infobytesec.com/) # See the file 'doc/LICENSE' for the license information -__version__ = '3.18.1' +__version__ = '3.19.0' __license_version__ = __version__ diff --git a/faraday/manage.py b/faraday/manage.py index 0d51542..00d5189 100755 --- a/faraday/manage.py +++ b/faraday/manage.py @@ -144,8 +144,8 @@ except OperationalError: logger = logging.getLogger(__name__) logger.error( - ('Could not connect to PostgreSQL. Please check: ' - 'if database is running or if the configuration settings are correct.') + 'Could not connect to PostgreSQL. Please check: ' + 'if database is running or if the configuration settings are correct.' ) sys.exit(1) @@ -200,9 +200,9 @@ if not conn_string: logger = logging.getLogger(__name__) logger.error( - ('No database configuration found. Please check: ' + 'No database configuration found. Please check: ' 'if the database is running or if the configuration settings are correct. ' - 'For first time installations execute: faraday-manage initdb') + 'For first time installations execute: faraday-manage initdb' ) sys.exit(1) InitDB()._create_tables(conn_string) @@ -281,7 +281,7 @@ @click.command(help="Manage settings") -@click.option('-a', '--action', type=click.Choice(['show', 'update', 'list'], case_sensitive=False), +@click.option('-a', '--action', type=click.Choice(['show', 'update', 'list', 'clear'], case_sensitive=False), default='list', show_default=True, help="Action") @click.option('--data', type=str, required=False, callback=manage_settings.settings_format_validation, help="Settings config in json") @@ -309,5 +309,3 @@ if __name__ == '__main__': cli() - -# I'm Py3 diff --git a/faraday/migrations/env.py b/faraday/migrations/env.py index 4ba732a..1704677 100644 --- a/faraday/migrations/env.py +++ b/faraday/migrations/env.py @@ -1,4 +1,3 @@ - import logging import faraday.server.config from faraday.server.web import get_app @@ -79,4 +78,3 @@ run_migrations_offline() else: run_migrations_online() -# I'm Py3 diff --git a/faraday/migrations/versions/085188e0a016_create_rules_tables.py b/faraday/migrations/versions/085188e0a016_create_rules_tables.py index 9a0d2d2..30cb98b 100644 --- a/faraday/migrations/versions/085188e0a016_create_rules_tables.py +++ b/faraday/migrations/versions/085188e0a016_create_rules_tables.py @@ -184,6 +184,3 @@ op.drop_table('rule_action') op.drop_table('action') op.drop_table('rule') - - -# I'm Py3 diff --git a/faraday/migrations/versions/0d216660da28_add_notification_table.py b/faraday/migrations/versions/0d216660da28_add_notification_table.py index 90d7b73..d304a36 100644 --- a/faraday/migrations/versions/0d216660da28_add_notification_table.py +++ b/faraday/migrations/versions/0d216660da28_add_notification_table.py @@ -67,4 +67,3 @@ op.drop_table('notification') # op.drop_constraint(None, 'notification_user_id_fkey', type_='foreignkey') # op.drop_constraint(None, 'notification_workspace_id_fkey', type_='foreignkey') -# I'm Py3 diff --git a/faraday/migrations/versions/15d70093d262_severities_histogram_model.py b/faraday/migrations/versions/15d70093d262_severities_histogram_model.py new file mode 100644 index 0000000..a2bbf28 --- /dev/null +++ b/faraday/migrations/versions/15d70093d262_severities_histogram_model.py @@ -0,0 +1,75 @@ +"""Severities histogram model + +Revision ID: 15d70093d262 +Revises: d8f0b32a5c0e +Create Date: 2021-11-08 13:57:28.099487+00:00 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +from sqlalchemy import func, case + +from faraday.server.models import VulnerabilityGeneric, SeveritiesHistogram, Workspace + +revision = '15d70093d262' +down_revision = 'd8f0b32a5c0e' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.create_table('severities_histogram', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('workspace_id', sa.Integer(), nullable=False), + sa.Column('date', sa.Date(), nullable=False), + sa.Column('medium', sa.Integer(), nullable=False), + sa.Column('high', sa.Integer(), nullable=False), + sa.Column('critical', sa.Integer(), nullable=False), + sa.Column('confirmed', sa.Integer(), nullable=False), + sa.ForeignKeyConstraint(['workspace_id'], ['workspace.id'], ), + sa.PrimaryKeyConstraint('id') + ) + op.create_index(op.f('ix_severities_histogram_workspace_id'), 'severities_histogram', ['workspace_id'], unique=False) + # ### end Alembic commands ### + + # Init histogram + bind = op.get_bind() + session = sa.orm.Session(bind=bind) + workspaces = session.query(Workspace).all() + for workspace in workspaces: + vulnerabilities = session.query(VulnerabilityGeneric) \ + .with_entities(func.date_trunc('day', VulnerabilityGeneric.create_date), + VulnerabilityGeneric.severity, + func.count(VulnerabilityGeneric.severity), + func.sum(case([(VulnerabilityGeneric.confirmed, 1)], else_=0)))\ + .filter(VulnerabilityGeneric.workspace_id == workspace.id, + VulnerabilityGeneric.status.notin_(['closed', 'risk-accepted']), + VulnerabilityGeneric.severity.in_(['medium', 'high', 'critical']))\ + .group_by(func.date_trunc('day', VulnerabilityGeneric.create_date), VulnerabilityGeneric.severity).all() + for histogram_date, severity_type, severity_count, confirmed_count in vulnerabilities: + severity_histogram = session.query(SeveritiesHistogram)\ + .filter(SeveritiesHistogram.date == histogram_date, + SeveritiesHistogram.workspace_id == workspace.id).first() + if severity_histogram is None: + severity_histogram = SeveritiesHistogram(date=histogram_date, workspace=workspace, medium=0, high=0, critical=0, confirmed=0) + session.add(severity_histogram) + session.commit() + if severity_type == 'medium': + severity_histogram.medium = severity_count + if severity_type == 'high': + severity_histogram.high = severity_count + if severity_type == 'critical': + severity_histogram.critical = severity_count + severity_histogram.confirmed += confirmed_count + session.commit() + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_index(op.f('ix_severities_histogram_workspace_id'), table_name='severities_histogram') + op.drop_table('severities_histogram') + # ### end Alembic commands ### diff --git a/faraday/migrations/versions/1b2533cc16fe_fix_custom_fields_display_name_was_used_.py b/faraday/migrations/versions/1b2533cc16fe_fix_custom_fields_display_name_was_used_.py index 5c0b073..5f132f0 100644 --- a/faraday/migrations/versions/1b2533cc16fe_fix_custom_fields_display_name_was_used_.py +++ b/faraday/migrations/versions/1b2533cc16fe_fix_custom_fields_display_name_was_used_.py @@ -73,5 +73,3 @@ 'json_data': json.dumps(custom_fields), 'vuln_id': vuln_id }) - -# I'm Py3 diff --git a/faraday/migrations/versions/282ac9b6569f_add_agent_as_import_source_to_command.py b/faraday/migrations/versions/282ac9b6569f_add_agent_as_import_source_to_command.py index 13b8564..3cab94d 100644 --- a/faraday/migrations/versions/282ac9b6569f_add_agent_as_import_source_to_command.py +++ b/faraday/migrations/versions/282ac9b6569f_add_agent_as_import_source_to_command.py @@ -45,7 +45,7 @@ def downgrade(): # Convert 'asset_owner' status into 'client' - op.execute(cmd.update().where(cmd.c.import_source == u'agent') + op.execute(cmd.update().where(cmd.c.import_source == 'agent') .values(import_source=None)) # Create a temporary "_role" type, convert and drop the "new" type tmp_type.create(op.get_bind(), checkfirst=False) diff --git a/faraday/migrations/versions/2ca03a8feef5_workspace_readonly.py b/faraday/migrations/versions/2ca03a8feef5_workspace_readonly.py index 9711fe0..3c15f85 100644 --- a/faraday/migrations/versions/2ca03a8feef5_workspace_readonly.py +++ b/faraday/migrations/versions/2ca03a8feef5_workspace_readonly.py @@ -22,4 +22,3 @@ def downgrade(): op.drop_column('workspace', 'readonly') -# I'm Py3 diff --git a/faraday/migrations/versions/2db31733fb78_unique_field_name_in_customfield.py b/faraday/migrations/versions/2db31733fb78_unique_field_name_in_customfield.py index 5563159..9b0d91a 100644 --- a/faraday/migrations/versions/2db31733fb78_unique_field_name_in_customfield.py +++ b/faraday/migrations/versions/2db31733fb78_unique_field_name_in_customfield.py @@ -23,4 +23,3 @@ def downgrade(): conn = op.get_bind() conn.execute('ALTER TABLE custom_fields_schema DROP CONSTRAINT custom_fields_schema_field_name_key;') -# I'm Py3 diff --git a/faraday/migrations/versions/38bb251889e6_bulks.py b/faraday/migrations/versions/38bb251889e6_bulks.py new file mode 100644 index 0000000..9f45638 --- /dev/null +++ b/faraday/migrations/versions/38bb251889e6_bulks.py @@ -0,0 +1,231 @@ +"""empty message + +Revision ID: 38bb251889e6 +Revises: 15d70093d262 +Create Date: 2021-07-30 02:12:00.706416+00:00 + +""" +from alembic import op + +# revision identifiers, used by Alembic. +revision = '38bb251889e6' +down_revision = '15d70093d262' +branch_labels = None +depends_on = None + + +def upgrade(): + + # Agent table + op.drop_constraint('executor_agent_id_fkey', 'executor') + op.create_foreign_key( + 'executor_agent_id_fkey', + 'executor', + 'agent', ['agent_id'], ['id'], + ondelete='CASCADE' + ) + op.drop_constraint('association_workspace_and_agents_table_agent_id_fkey', + 'association_workspace_and_agents_table') + op.create_foreign_key( + 'association_workspace_and_agents_table_agent_id_fkey', + 'association_workspace_and_agents_table', + 'agent', ['agent_id'], ['id'], + ondelete='CASCADE' + ) + + # Vulnerability_template table + op.drop_constraint('knowledge_base_vulnerability_template_id_fkey', + 'knowledge_base') + op.create_foreign_key( + 'knowledge_base_vulnerability_template_id_fkey', 'knowledge_base', + 'vulnerability_template', ['vulnerability_template_id'], ['id'], + ondelete='CASCADE' + ) + + # Comment table + op.drop_constraint('comment_reply_to_id_fkey', + 'comment') + op.create_foreign_key( + 'comment_reply_to_id_fkey', 'comment', + 'comment', ['reply_to_id'], ['id'], + ondelete='SET NULL' + ) + + # Service table + op.drop_constraint('credential_service_id_fkey', 'credential') + op.create_foreign_key( + 'credential_service_id_fkey', 'credential', + 'service', ['service_id'], ['id'], + ondelete='CASCADE' + ) + + # Command table + op.drop_constraint('command_object_command_id_fkey', 'command_object') + op.create_foreign_key( + 'command_object_command_id_fkey', 'command_object', + 'command', ['command_id'], ['id'], + ondelete='SET NULL' + ) + op.drop_constraint('agent_execution_command_id_fkey', 'agent_execution') + op.create_foreign_key( + 'agent_execution_command_id_fkey', 'agent_execution', + 'command', ['command_id'], ['id'], + ondelete='SET NULL' + ) + op.drop_constraint('rule_execution_command_id_fkey', 'rule_execution') + op.create_foreign_key( + 'rule_execution_command_id_fkey', 'rule_execution', + 'command', ['command_id'], ['id'], + ondelete='CASCADE' + ) + + # Host table + op.drop_constraint('hostname_host_id_fkey', 'hostname') + op.create_foreign_key( + 'hostname_host_id_fkey', 'hostname', + 'host', ['host_id'], ['id'], + ondelete='CASCADE' + ) + op.drop_constraint('service_host_id_fkey', 'service') + op.create_foreign_key( + 'service_host_id_fkey', 'service', + 'host', ['host_id'], ['id'], + ondelete='CASCADE' + ) + op.drop_constraint('vulnerability_host_id_fkey', 'vulnerability') + op.create_foreign_key( + 'vulnerability_host_id_fkey', 'vulnerability', + 'host', ['host_id'], ['id'], + ondelete='CASCADE' + ) + op.drop_constraint('credential_host_id_fkey', 'credential') + op.create_foreign_key( + 'credential_host_id_fkey', 'credential', + 'host', ['host_id'], ['id'], + ondelete='CASCADE' + ) + + # Vulnerability Table + op.drop_constraint('vulnerability_vulnerability_duplicate_id_fkey', 'vulnerability') + op.create_foreign_key( + 'vulnerability_vulnerability_duplicate_id_fkey', 'vulnerability', + 'vulnerability', ['vulnerability_duplicate_id'], ['id'], + ondelete='SET NULL' + ) + + # VulnerabilityTemplate Table + op.drop_constraint('vulnerability_vulnerability_template_id_fkey', 'vulnerability') + op.create_foreign_key( + 'vulnerability_vulnerability_template_id_fkey', 'vulnerability', + 'vulnerability_template', ['vulnerability_template_id'], ['id'], + ondelete='SET NULL' + ) + + # SourceCode Table + op.drop_constraint('vulnerability_source_code_id_fkey', 'vulnerability') + op.create_foreign_key( + 'vulnerability_source_code_id_fkey', 'vulnerability', + 'source_code', ['source_code_id'], ['id'], + ondelete='CASCADE' + ) + + +def downgrade(): + + # Agent table + op.drop_constraint('executor_agent_id_fkey', + 'executor') + op.create_foreign_key( + 'executor_agent_id_fkey', + 'executor', + 'agent', ['agent_id'], ['id'] + ) + op.drop_constraint('association_workspace_and_agents_table_agent_id_fkey', + 'association_workspace_and_agents_table') + op.create_foreign_key( + 'association_workspace_and_agents_table_agent_id_fkey', + 'association_workspace_and_agents_table', + 'agent', ['agent_id'], ['id'] + ) + + # Vulnerability_template table + op.drop_constraint('knowledge_base_vulnerability_template_id_fkey', + 'knowledge_base') + op.create_foreign_key( + 'knowledge_base_vulnerability_template_id_fkey', 'knowledge_base', + 'vulnerability_template', ['vulnerability_template_id'], ['id'] + ) + + # Comment table + op.drop_constraint('comment_reply_to_id_fkey', + 'comment') + op.create_foreign_key( + 'comment_reply_to_id_fkey', 'comment', + 'comment', ['reply_to_id'], ['id'] + ) + + # Service table + op.drop_constraint('credential_service_id_fkey', 'credential') + op.create_foreign_key( + 'credential_service_id_fkey', 'credential', + 'service', ['service_id'], ['id'] + ) + + # Command table + op.drop_constraint('command_object_command_id_fkey', 'command_object') + op.create_foreign_key( + 'command_object_command_id_fkey', 'command_object', + 'command', ['command_id'], ['id'] + ) + op.drop_constraint('agent_execution_command_id_fkey', 'agent_execution') + op.create_foreign_key( + 'agent_execution_command_id_fkey', 'agent_execution', + 'command', ['command_id'], ['id'] + ) + op.drop_constraint('rule_execution_command_id_fkey', 'rule_execution') + op.create_foreign_key( + 'rule_execution_command_id_fkey', 'rule_execution', + 'command', ['command_id'], ['id'] + ) + + # Host table + op.drop_constraint('credential_host_id_fkey', 'credential') + op.create_foreign_key( + 'credential_host_id_fkey', 'credential', + 'host', ['host_id'], ['id'] + ) + op.drop_constraint('hostname_host_id_fkey', 'hostname') + op.create_foreign_key( + 'hostname_host_id_fkey', 'hostname', + 'host', ['host_id'], ['id'] + ) + op.drop_constraint('service_host_id_fkey', 'service') + op.create_foreign_key( + 'service_host_id_fkey', 'service', + 'host', ['host_id'], ['id'] + ) + op.drop_constraint('vulnerability_host_id_fkey', 'vulnerability') + op.create_foreign_key( + 'vulnerability_host_id_fkey', 'vulnerability', + 'host', ['host_id'], ['id'] + ) + + op.drop_constraint('vulnerability_vulnerability_duplicate_id_fkey', 'vulnerability') + op.create_foreign_key( + 'vulnerability_vulnerability_duplicate_id_fkey', 'vulnerability', + 'vulnerability', ['vulnerability_duplicate_id'], ['id'] + ) + + # VulnerabilityTemplate Table + op.drop_constraint('vulnerability_vulnerability_template_id_fkey', 'vulnerability') + op.create_foreign_key( + 'vulnerability_vulnerability_template_id_fkey', 'vulnerability', + 'vulnerability_template', ['vulnerability_template_id'], ['id'] + ) + + # SourceCode Table + op.drop_constraint('vulnerability_source_code_id_fkey', 'vulnerability') + op.create_foreign_key( + 'vulnerability_source_code_id_fkey', 'vulnerability', + 'source_code', ['source_code_id'], ['id'] + ) diff --git a/faraday/migrations/versions/5272b3f5a820_add_markdown_column_to_exectuive_reports.py b/faraday/migrations/versions/5272b3f5a820_add_markdown_column_to_exectuive_reports.py index aa398c7..249ab7f 100644 --- a/faraday/migrations/versions/5272b3f5a820_add_markdown_column_to_exectuive_reports.py +++ b/faraday/migrations/versions/5272b3f5a820_add_markdown_column_to_exectuive_reports.py @@ -22,4 +22,3 @@ def downgrade(): op.drop_column('executive_report', 'markdown') -# I'm Py3 diff --git a/faraday/migrations/versions/59bed5515407_vuln_external_id.py b/faraday/migrations/versions/59bed5515407_vuln_external_id.py index 92d74ba..dfc0e7b 100644 --- a/faraday/migrations/versions/59bed5515407_vuln_external_id.py +++ b/faraday/migrations/versions/59bed5515407_vuln_external_id.py @@ -26,6 +26,3 @@ conn = op.get_bind() conn.execute('ALTER TABLE vulnerability DROP COLUMN external_id') conn.execute('ALTER TABLE vulnerability_template DROP COLUMN external_id') - - -# I'm Py3 diff --git a/faraday/migrations/versions/5cf9660bba80_policyviolationvulnerabilityassociation_.py b/faraday/migrations/versions/5cf9660bba80_policyviolationvulnerabilityassociation_.py new file mode 100644 index 0000000..f89e20d --- /dev/null +++ b/faraday/migrations/versions/5cf9660bba80_policyviolationvulnerabilityassociation_.py @@ -0,0 +1,46 @@ +"""PolicyViolationVulnerabilityAssociation FK ondelete action + +Revision ID: 5cf9660bba80 +Revises: 7dea3a6caf51 +Create Date: 2021-12-01 16:09:40.318964+00:00 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = '5cf9660bba80' +down_revision = '7dea3a6caf51' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_constraint('policy_violation_vulnerability_associatio_vulnerability_id_fkey', 'policy_violation_vulnerability_association', type_='foreignkey') + op.create_foreign_key(None, 'policy_violation_vulnerability_association', 'vulnerability', ['vulnerability_id'], ['id'], ondelete='CASCADE') + op.alter_column('vulnerability', 'risk', + existing_type=sa.REAL(), + type_=sa.Float(precision=3, asdecimal=1), + existing_nullable=True) + op.alter_column('vulnerability_template', 'risk', + existing_type=sa.REAL(), + type_=sa.Float(precision=3, asdecimal=1), + existing_nullable=True) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.alter_column('vulnerability_template', 'risk', + existing_type=sa.Float(precision=3, asdecimal=1), + type_=sa.REAL(), + existing_nullable=True) + op.alter_column('vulnerability', 'risk', + existing_type=sa.Float(precision=3, asdecimal=1), + type_=sa.REAL(), + existing_nullable=True) + op.drop_constraint('policy_violation_vulnerability_associatio_vulnerability_id_fkey', 'policy_violation_vulnerability_association', type_='foreignkey') + op.create_foreign_key(None, 'policy_violation_vulnerability_association', 'vulnerability', ['vulnerability_id'], ['id']) + # ### end Alembic commands ### diff --git a/faraday/migrations/versions/7dea3a6caf51_cascade_in_vuls_relation.py b/faraday/migrations/versions/7dea3a6caf51_cascade_in_vuls_relation.py new file mode 100644 index 0000000..5f70ee9 --- /dev/null +++ b/faraday/migrations/versions/7dea3a6caf51_cascade_in_vuls_relation.py @@ -0,0 +1,58 @@ +"""cascade in vuls relation + +Revision ID: 7dea3a6caf51 +Revises: 38bb251889e6 +Create Date: 2021-11-10 21:23:24.837776+00:00 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = '7dea3a6caf51' +down_revision = '38bb251889e6' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_constraint('command_workspace_id_fkey', 'command', type_='foreignkey') + op.create_foreign_key(None, 'command', 'workspace', ['workspace_id'], ['id'], ondelete='CASCADE') + op.drop_constraint('knowledge_base_vulnerability_template_id_fkey', 'knowledge_base', type_='foreignkey') + op.create_foreign_key(None, 'knowledge_base', 'vulnerability_template', ['vulnerability_template_id'], ['id']) + op.drop_constraint('reference_vulnerability_association_vulnerability_id_fkey', 'reference_vulnerability_association', type_='foreignkey') + op.create_foreign_key(None, 'reference_vulnerability_association', 'vulnerability', ['vulnerability_id'], ['id'], ondelete='CASCADE') + op.alter_column('vulnerability', 'risk', + existing_type=sa.REAL(), + type_=sa.Float(precision=3, asdecimal=1), + existing_nullable=True) + op.drop_constraint('vulnerability_service_id_fkey', 'vulnerability', type_='foreignkey') + op.create_foreign_key(None, 'vulnerability', 'service', ['service_id'], ['id'], ondelete='CASCADE') + op.alter_column('vulnerability_template', 'risk', + existing_type=sa.REAL(), + type_=sa.Float(precision=3, asdecimal=1), + existing_nullable=True) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.alter_column('vulnerability_template', 'risk', + existing_type=sa.Float(precision=3, asdecimal=1), + type_=sa.REAL(), + existing_nullable=True) + op.drop_constraint('vulnerability_service_id_fkey', 'vulnerability', type_='foreignkey') + op.create_foreign_key(None, 'vulnerability', 'service', ['service_id'], ['id']) + op.alter_column('vulnerability', 'risk', + existing_type=sa.Float(precision=3, asdecimal=1), + type_=sa.REAL(), + existing_nullable=True) + op.drop_constraint('reference_vulnerability_association_vulnerability_id_fkey', 'reference_vulnerability_association', type_='foreignkey') + op.create_foreign_key(None, 'reference_vulnerability_association', 'vulnerability', ['vulnerability_id'], ['id']) + op.drop_constraint('knowledge_base_vulnerability_template_id_fkey', 'knowledge_base', type_='foreignkey') + op.create_foreign_key(None, 'knowledge_base', 'vulnerability_template', ['vulnerability_template_id'], ['id'], ondelete='CASCADE') + op.drop_constraint('command_workspace_id_fkey', 'command', type_='foreignkey') + op.create_foreign_key(None, 'command', 'workspace', ['workspace_id'], ['id']) + # ### end Alembic commands ### diff --git a/faraday/migrations/versions/8a10ff3926a5_2fa_columns.py b/faraday/migrations/versions/8a10ff3926a5_2fa_columns.py index 5439e1a..7e9eeec 100644 --- a/faraday/migrations/versions/8a10ff3926a5_2fa_columns.py +++ b/faraday/migrations/versions/8a10ff3926a5_2fa_columns.py @@ -30,4 +30,3 @@ op.drop_column('faraday_user', 'otp_secret') op.drop_column('faraday_user', 'state_otp') op.execute('DROP TYPE user_otp_states') -# I'm Py3 diff --git a/faraday/migrations/versions/9c4091d1a09b_create_agent_table.py b/faraday/migrations/versions/9c4091d1a09b_create_agent_table.py index 5acbc4d..049fab2 100644 --- a/faraday/migrations/versions/9c4091d1a09b_create_agent_table.py +++ b/faraday/migrations/versions/9c4091d1a09b_create_agent_table.py @@ -80,6 +80,3 @@ def downgrade(): op.drop_table('agent_schedule') op.drop_table('agent') - - -# I'm Py3 diff --git a/faraday/migrations/versions/be89aa03e35e_add_severities_column_to_executive_.py b/faraday/migrations/versions/be89aa03e35e_add_severities_column_to_executive_.py index f613d42..c2ac17a 100644 --- a/faraday/migrations/versions/be89aa03e35e_add_severities_column_to_executive_.py +++ b/faraday/migrations/versions/be89aa03e35e_add_severities_column_to_executive_.py @@ -24,6 +24,3 @@ def downgrade(): conn = op.get_bind() conn.execute('ALTER TABLE executive_report DROP COLUMN filter') - - -# I'm Py3 diff --git a/faraday/migrations/versions/d8f0b32a5c0e_cvss_model.py b/faraday/migrations/versions/d8f0b32a5c0e_cvss_model.py new file mode 100644 index 0000000..bd60b34 --- /dev/null +++ b/faraday/migrations/versions/d8f0b32a5c0e_cvss_model.py @@ -0,0 +1,83 @@ +"""cvss_model + +Revision ID: d8f0b32a5c0e +Revises: f28eae25416b +Create Date: 2021-09-01 10:30:06.693843+00:00 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = 'd8f0b32a5c0e' +down_revision = 'f28eae25416b' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.create_table('cvss_base', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('version', sa.String(length=8), nullable=False), + sa.Column('vector_string', sa.String(length=64)), + sa.Column('type', sa.String(length=24), nullable=True), + sa.Column('base_score', sa.Float(), default=0.0), + sa.Column('fixed_base_score', sa.Float(), default=0.0), + sa.PrimaryKeyConstraint('id') + ) + op.create_table('cvss_v2', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('access_vector', sa.Enum('N', 'A', 'L', name='cvss_access_vector')), + sa.Column('access_complexity', sa.Enum('L', 'M', 'H', name='cvss_access_complexity')), + sa.Column('authentication', sa.Enum('N', 'S', 'M', name='cvss_authentication')), + sa.Column('confidentiality_impact', sa.Enum('N', 'P', 'C', name='cvss_impact_types_v2')), + sa.Column('integrity_impact', sa.Enum('N', 'P', 'C', name='cvss_impact_types_v2')), + sa.Column('availability_impact', sa.Enum('N', 'P', 'C', name='cvss_impact_types_v2')), + sa.ForeignKeyConstraint(['id'], ['cvss_base.id'], ), + sa.PrimaryKeyConstraint('id') + ) + op.create_table('cvss_v3', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('attack_vector', sa.Enum('N', 'A', 'L', 'P', name='cvss_attack_vector')), + sa.Column('attack_complexity', sa.Enum('L', 'H', name='cvss_attack_complexity')), + sa.Column('privileges_required', sa.Enum('N', 'L', 'H', name='cvss_privileges_required')), + sa.Column('user_interaction', sa.Enum('N', 'R', name='cvss_user_interaction')), + sa.Column('scope', sa.Enum('U', 'C', name='cvss_scope')), + sa.Column('confidentiality_impact', sa.Enum('N', 'L', 'H', name='cvss_impact_types_v3')), + sa.Column('integrity_impact', sa.Enum('N', 'L', 'H', name='cvss_impact_types_v3')), + sa.Column('availability_impact', sa.Enum('N', 'L', 'H', name='cvss_impact_types_v3')), + sa.ForeignKeyConstraint(['id'], ['cvss_base.id'], ), + sa.PrimaryKeyConstraint('id') + ) + + # Vuln relationship with cvss + op.add_column('vulnerability', sa.Column('cvssv2_id', sa.Integer(), nullable=True)) + op.add_column('vulnerability', sa.Column('cvssv3_id', sa.Integer(), nullable=True)) + op.create_foreign_key(None, 'vulnerability', 'cvss_v2', ['cvssv2_id'], ['id']) + op.create_foreign_key(None, 'vulnerability', 'cvss_v3', ['cvssv3_id'], ['id']) + + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + # op.drop_constraint(None, 'vulnerability', type_='foreignkey') + op.drop_column('vulnerability', 'cvssv2_id') + op.drop_column('vulnerability', 'cvssv3_id') + op.drop_table('cvss_v3') + op.drop_table('cvss_v2') + op.drop_table('cvss_base') + op.execute('drop type cvss_attack_complexity') + op.execute('drop type cvss_access_vector') + op.execute('drop type cvss_access_complexity') + op.execute('drop type cvss_attack_vector') + op.execute('drop type cvss_authentication') + op.execute('drop type cvss_privileges_required') + op.execute('drop type cvss_scope') + op.execute('drop type cvss_user_interaction') + op.execute('drop type cvss_impact_types_v2') + op.execute('drop type cvss_impact_types_v3') + # + # ### end Alembic commands ### diff --git a/faraday/migrations/versions/e61afb450465_add_custom_fields.py b/faraday/migrations/versions/e61afb450465_add_custom_fields.py index 723a568..b311a59 100644 --- a/faraday/migrations/versions/e61afb450465_add_custom_fields.py +++ b/faraday/migrations/versions/e61afb450465_add_custom_fields.py @@ -34,4 +34,3 @@ conn.execute('ALTER TABLE vulnerability DROP COLUMN custom_fields') conn.execute('ALTER TABLE vulnerability_template DROP COLUMN custom_fields') conn.execute('DROP TABLE custom_fields_schema') -# I'm Py3 diff --git a/faraday/migrations/versions/f8a44acd0e41_add_new_user_role.py b/faraday/migrations/versions/f8a44acd0e41_add_new_user_role.py index 8db6966..7c2e7d9 100644 --- a/faraday/migrations/versions/f8a44acd0e41_add_new_user_role.py +++ b/faraday/migrations/versions/f8a44acd0e41_add_new_user_role.py @@ -50,7 +50,7 @@ old_type = sa.Enum(*ROLES, name='user_roles') # Convert 'asset_owner' status into 'client' - op.execute(tcr.update().where(tcr.c.role == u'asset_owner') + op.execute(tcr.update().where(tcr.c.role == 'asset_owner') .values(status='client')) # Create a temporary "_role" type, convert and drop the "new" type tmp_type.create(op.get_bind(), checkfirst=False) diff --git a/faraday/searcher/api.py b/faraday/searcher/api.py index dc263f8..674367e 100644 --- a/faraday/searcher/api.py +++ b/faraday/searcher/api.py @@ -1,8 +1,7 @@ import json import logging import socket -from urllib.parse import urlencode - +from urllib.parse import urlencode, urljoin, urlparse from requests.adapters import ConnectionError, ReadTimeout logger = logging.getLogger('Faraday searcher') @@ -57,14 +56,13 @@ raise UserWarning('Invalid username or password') def _url(self, path, is_get=False): - url = self.base + 'v3/' + path + url = urljoin(self.base, f'v3/{path}') if self.command_id and 'commands' not in url and not url.endswith('}') and not is_get: - if '?' in url: - url += f'&command_id={self.command_id}' - elif url.endswith('/'): - url = f'{url[:-1]}?command_id={self.command_id}' + if url.endswith('/'): + url = urljoin(url[:-1], f'?command_id={self.command_id}') else: - url += f'?command_id={self.command_id}' + q = urlparse(url).query + url = urljoin(url, f'?{q}&command_id={self.command_id}') return url def _get(self, url, object_name): diff --git a/faraday/searcher/searcher.py b/faraday/searcher/searcher.py index 4ebcef8..7eccd7e 100755 --- a/faraday/searcher/searcher.py +++ b/faraday/searcher/searcher.py @@ -1,12 +1,10 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- ### # Faraday Penetration Test IDE # Copyright (C) 2018 Infobyte LLC (http://www.infobytesec.com/) # See the file 'doc/LICENSE' for the license information ### -from builtins import str import ast import json @@ -180,7 +178,7 @@ set_array(field, value, add=to_add) action = f'Adding {value} to {key} list in vulnerability {vuln.name} with id {vuln.id}' if not to_add: - action = 'Removing %s from %s list in vulnerability %s with id %s' % ( + action = 'Removing {} from {} list in vulnerability {} with id {}'.format( value, key, vuln.name, vuln.id) logger.info(action) @@ -217,7 +215,7 @@ set_array(field, value, add=to_add) action = f'Adding {value} to {key} list in service {service.name} with id {service.id}' if not to_add: - action = 'Removing %s from %s list in service %s with id %s' % ( + action = 'Removing {} from {} list in service {} with id {}'.format( value, key, service.name, service.id) logger.info(action) @@ -248,7 +246,7 @@ set_array(field, value, add=to_add) action = f'Adding {value} to {key} list in host {host.name} with id {host.id}' if not to_add: - action = 'Removing %s from %s list in host %s with id %s' % ( + action = 'Removing {} from {} list in host {} with id {}'.format( value, key, host.name, host.id) logger.info(action) @@ -367,7 +365,7 @@ return rule rule_str = json.dumps(rule) - r = re.findall("\{\{(.*?)\}\}", rule_str) + r = re.findall(r"\{\{(.*?)\}\}", rule_str) _vars = list(set(r)) for var in _vars: value = value_item[var] @@ -599,12 +597,12 @@ action = action.strip('--') array = action.split(':') command = array[0] - expression = str(':').join(array[1:]) + expression = ':'.join(array[1:]) if command == 'UPDATE': array_exp = expression.split('=') key = array_exp[0] - value = str('=').join(array_exp[1:]) + value = '='.join(array_exp[1:]) if object_type in ['Vulnerabilityweb', 'Vulnerability_web', 'Vulnerability']: self._update_vulnerability(obj, key, value) @@ -629,7 +627,7 @@ else: if self.mail_notification: subject = 'Faraday searcher alert' - body = '%s %s have been modified by rule %s at %s' % ( + body = '{} {} have been modified by rule {} at {}'.format( object_type, obj.name, rule['id'], str(datetime.utcnow())) self.mail_notification.send_mail(expression, subject, body) logger.info(f"Sending mail to: '{expression}'") @@ -688,14 +686,14 @@ if isinstance(field, str): setattr(vuln, key, value) logger.info( - "Changing property %s to %s in vulnerability '%s' with id %s" % ( + "Changing property {} to {} in vulnerability '{}' with id {}".format( key, value, vuln.name, vuln.id)) else: self.api.set_array(field, value, add=to_add, key=key, object=vuln) - action = 'Adding %s to %s list in vulnerability %s with id %s' % ( + action = 'Adding {} to {} list in vulnerability {} with id {}'.format( value, key, vuln.name, vuln.id) if not to_add: - action = 'Removing %s from %s list in vulnerability %s with id %s' % ( + action = 'Removing {} from {} list in vulnerability {} with id {}'.format( value, key, vuln.name, vuln.id) logger.info(action) @@ -703,7 +701,7 @@ else: vuln.custom_fields[key] = value logger.info( - "Changing custom field %s to %s in vulnerability '%s' with id %s" % ( + "Changing custom field {} to {} in vulnerability '{}' with id {}".format( key, value, vuln.name, vuln.id)) result = self.api.update_vulnerability(vuln) @@ -729,13 +727,13 @@ if isinstance(field, str): setattr(service, key, value) logger.info( - "Changing property %s to %s in service '%s' with id %s" % ( + "Changing property {} to {} in service '{}' with id {}".format( key, value, service.name, service.id)) else: self.api.set_array(field, value, add=to_add, key=key, object=service) action = f'Adding {value} to {key} list in service {service.name} with id {service.id}' if not to_add: - action = 'Removing %s from %s list in service %s with id %s' % ( + action = 'Removing {} from {} list in service {} with id {}'.format( value, key, service.name, service.id) logger.info(action) @@ -765,7 +763,7 @@ self.api.set_array(field, value, add=to_add, key=key, object=host) action = f'Adding {value} to {key} list in host {host.ip} with id {host.id}' if not to_add: - action = 'Removing %s from %s list in host %s with id %s' % ( + action = 'Removing {} from {} list in host {} with id {}'.format( value, key, host.ip, host.id) logger.info(action) @@ -815,7 +813,7 @@ signal.signal(signal.SIGINT, signal_handler) loglevel = log - with open(rules, 'r') as rules_file: + with open(rules) as rules_file: try: rules = json.loads(rules_file.read()) except Exception: @@ -877,4 +875,3 @@ if __name__ == "__main__": main() -# I'm Py3 diff --git a/faraday/searcher/sqlapi.py b/faraday/searcher/sqlapi.py index 11464cc..b9da9b8 100644 --- a/faraday/searcher/sqlapi.py +++ b/faraday/searcher/sqlapi.py @@ -1,4 +1,3 @@ - import json import logging import socket diff --git a/faraday/searcher/validator.py b/faraday/searcher/validator.py index aab47e8..bc95a29 100755 --- a/faraday/searcher/validator.py +++ b/faraday/searcher/validator.py @@ -1,5 +1,4 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- ### # Faraday Penetration Test IDE @@ -83,7 +82,7 @@ def validate_values(values, rule, rule_id): - r = re.findall("\{\{(.*?)\}\}", json.dumps(rule)) + r = re.findall(r"\{\{(.*?)\}\}", json.dumps(rule)) _vars = list(set(r)) keys = [] for index, item in enumerate(values): @@ -119,7 +118,7 @@ if action.startswith('--ALERT:'): expression = action.strip('--ALERT:') - if expression == '' or re.match("^(.+\@.+\..+)$", expression) is None: + if expression == '' or re.match(r"^(.+\@.+\..+)$", expression) is None: return False if action.startswith('--EXECUTE:'): @@ -197,4 +196,3 @@ logger.info('<-- Rules OK') return True -# I'm Py3 diff --git a/faraday/server/api/base.py b/faraday/server/api/base.py index fe6d730..e06b5ef 100644 --- a/faraday/server/api/base.py +++ b/faraday/server/api/base.py @@ -201,7 +201,7 @@ """ return getattr(self.model_class, self.lookup_field) - def _validate_object_id(self, object_id): + def _validate_object_id(self, object_id, raise_error=True): """ By default, it validates the value of the lookup field set by the user in the URL by calling ``self.lookup_field_type(object_id)``. @@ -211,7 +211,10 @@ try: self.lookup_field_type(object_id) except ValueError: - flask.abort(404, 'Invalid format of lookup field') + if raise_error: + flask.abort(404, 'Invalid format of lookup field') + return False + return True def _get_base_query(self): """Return the initial query all views should use @@ -289,6 +292,22 @@ obj = query.filter(self._get_lookup_field() == object_id).one() except NoResultFound: flask.abort(404, f'Object with id "{object_id}" not found') + return obj + + def _get_objects(self, object_ids, eagerload=False, **kwargs): + """ + Given the object_id and extra route params, get an instance of + ``self.model_class`` + """ + object_ids = [object_id for object_id in object_ids if self._validate_object_id(object_id, raise_error=False)] + if eagerload: + query = self._get_eagerloaded_query(**kwargs) + else: + query = self._get_base_query(**kwargs) + try: + obj = query.filter(self._get_lookup_field().in_(object_ids)).all() + except NoResultFound: + return [] return obj def _dump(self, obj, route_kwargs, **kwargs): @@ -1223,6 +1242,89 @@ self._perform_update(object_id, obj, data, partial=True, **kwargs) return self._dump(obj, kwargs), 200 + + +class BulkUpdateMixin: + # These mixin should be merged with DeleteMixin after v2 is removed + + @route('', methods=['PATCH']) + def bulk_update(self, **kwargs): + """ + --- + tags: [{tag_name}] + summary: "Update a group of {class_model} by ids." + responses: + 204: + description: Ok + """ + # TODO BULK_UPDATE_SCHEMA + if not flask.request.json or 'ids' not in flask.request.json: + flask.abort(400) + ids = list(filter(lambda x: type(x) == self.lookup_field_type, flask.request.json['ids'])) + objects = self._get_objects(ids, **kwargs) + context = {'updating': True, 'objects': objects} + data = self._parse_data(self._get_schema_instance(kwargs, context=context, partial=True), + flask.request) + # just in case an schema allows id as writable. + data.pop('id', None) + data.pop('ids', None) + + return self._perform_bulk_update(ids, data, **kwargs), 200 + + def _bulk_update_query(self, ids, **kwargs): + # It IS better to as is but warn of ON CASCADE + return self.model_class.query.filter(self.model_class.id.in_(ids)) + + def _pre_bulk_update(self, data, **kwargs): + return {} + + def _post_bulk_update(self, ids, extracted_data, **kwargs): + pass + + def _perform_bulk_update(self, ids, data, workspace_name=None, **kwargs): + try: + post_bulk_update_data = self._pre_bulk_update(data, **kwargs) + if (len(data) > 0 or len(post_bulk_update_data) > 0) and len(ids) > 0: + queryset = self._bulk_update_query(ids, workspace_name=workspace_name, **kwargs) + updated = queryset.update(data, synchronize_session='fetch') + self._post_bulk_update(ids, post_bulk_update_data, workspace_name=workspace_name) + else: + updated = 0 + db.session.commit() + response = {'updated': updated} + return flask.jsonify(response) + except ValueError as e: + db.session.rollback() + flask.abort(400, ValidationError( + { + 'message': str(e), + } + )) + except sqlalchemy.exc.IntegrityError as ex: + if not is_unique_constraint_violation(ex): + raise + db.session.rollback() + workspace = None + if workspace_name: + workspace = db.session.query(Workspace).filter_by(name=workspace_name).first() + conflict_obj = get_conflict_object(db.session, self.model_class(), data, workspace) + if conflict_obj is not None: + flask.abort(409, ValidationError( + { + 'message': 'Existing value', + 'object': self._get_schema_class()().dump( + conflict_obj), + } + )) + elif len(ids) >= 2: + flask.abort(409, ValidationError( + { + 'message': 'Updating more than one object with unique data', + 'data': data + } + )) + else: + raise class UpdateWorkspacedMixin(UpdateMixin, CommandMixin): @@ -1314,6 +1416,25 @@ return super().patch(object_id, workspace_name=workspace_name) +class BulkUpdateWorkspacedMixin(BulkUpdateMixin): + + @route('', methods=['PATCH']) + def bulk_update(self, workspace_name, **kwargs): + """ + --- + tags: [{tag_name}] + summary: "Delete a group of {class_model} by ids." + responses: + 204: + description: Ok + """ + return super().bulk_update(workspace_name=workspace_name) + + def _bulk_update_query(self, ids, **kwargs): + workspace = self._get_workspace(kwargs["workspace_name"]) + return super()._bulk_update_query(ids).filter(self.model_class.workspace_id == workspace.id) + + class DeleteMixin: """Add DELETE // route""" @@ -1339,6 +1460,38 @@ def _perform_delete(self, obj, workspace_name=None): db.session.delete(obj) db.session.commit() + + +class BulkDeleteMixin: + # These mixin should be merged with DeleteMixin after v2 is removed + + @route('', methods=['DELETE']) + def bulk_delete(self, **kwargs): + """ + --- + tags: [{tag_name}] + summary: "Delete a group of {class_model} by ids." + responses: + 204: + description: Ok + """ + # TODO BULK_DELETE_SCHEMA + if not flask.request.json or 'ids' not in flask.request.json: + flask.abort(400) + # objs = self._get_objects(flask.request.json['ids'], **kwargs) + # self._perform_bulk_delete(objs, **kwargs) + ids = list(filter(lambda x: type(x) == self.lookup_field_type, flask.request.json['ids'])) + return self._perform_bulk_delete(ids, **kwargs), 200 + + def _bulk_delete_query(self, ids, **kwargs): + # It IS better to as is but warn of ON CASCADE + return self.model_class.query.filter(self.model_class.id.in_(ids)) + + def _perform_bulk_delete(self, ids, **kwargs): + deleted = self._bulk_delete_query(ids, **kwargs).delete(synchronize_session='fetch') + db.session.commit() + response = {'deleted': deleted} + return flask.jsonify(response) class DeleteWorkspacedMixin(DeleteMixin): @@ -1373,6 +1526,26 @@ return super()._perform_delete(obj, workspace_name) +class BulkDeleteWorkspacedMixin(BulkDeleteMixin): + # These mixin should be merged with DeleteMixin after v2 is removed + + @route('', methods=['DELETE']) + def bulk_delete(self, workspace_name, **kwargs): + """ + --- + tags: [{tag_name}] + summary: "Delete a group of {class_model} by ids." + responses: + 204: + description: Ok + """ + return super().bulk_delete(workspace_name=workspace_name) + + def _bulk_delete_query(self, ids, **kwargs): + workspace = self._get_workspace(kwargs.pop("workspace_name")) + return super()._bulk_delete_query(ids).filter(self.model_class.workspace_id == workspace.id) + + class CountWorkspacedMixin: """Add GET ///count/ route diff --git a/faraday/server/api/modules/activity_feed.py b/faraday/server/api/modules/activity_feed.py index 3604b40..594b4df 100644 --- a/faraday/server/api/modules/activity_feed.py +++ b/faraday/server/api/modules/activity_feed.py @@ -7,7 +7,11 @@ from flask import Blueprint from marshmallow import fields -from faraday.server.api.base import AutoSchema, ReadWriteWorkspacedView, PaginatedMixin +from faraday.server.api.base import ( + AutoSchema, + ReadWriteWorkspacedView, + PaginatedMixin +) from faraday.server.models import Command from faraday.server.schemas import PrimaryKeyRelatedField diff --git a/faraday/server/api/modules/agent.py b/faraday/server/api/modules/agent.py index 777df16..70e26c5 100644 --- a/faraday/server/api/modules/agent.py +++ b/faraday/server/api/modules/agent.py @@ -211,19 +211,9 @@ except NoResultFound: flask.abort(404, f"No such workspace: {workspace_name}") - def _update_object(self, obj, data, **kwargs): - """Perform changes in the selected object - - It modifies the attributes of the SQLAlchemy model to match - the data passed by the Marshmallow schema. - - It is common to overwrite this method to do something strange - with some specific field. Typically the new method should call - this one to handle the update of the rest of the fields. - """ + def _get_workspaces_from_data(self, data, **kwargs): workspace_names = data.pop('workspaces', '') partial = False if 'partial' not in kwargs else kwargs['partial'] - if len(workspace_names) == 0 and not partial: abort( make_response( @@ -238,15 +228,25 @@ 400 ) ) - workspace_names = [ dict_["name"] for dict_ in workspace_names ] - - workspaces = list( + return list( self._get_workspace(workspace_name) for workspace_name in workspace_names ) + + def _update_object(self, obj, data, **kwargs): + """Perform changes in the selected object + + It modifies the attributes of the SQLAlchemy model to match + the data passed by the Marshmallow schema. + + It is common to overwrite this method to do something strange + with some specific field. Typically the new method should call + this one to handle the update of the rest of the fields. + """ + workspaces = self._get_workspaces_from_data(data, **kwargs) super()._update_object(obj, data) obj.workspaces = workspaces diff --git a/faraday/server/api/modules/auth.py b/faraday/server/api/modules/auth.py index db540fc..47072ad 100644 --- a/faraday/server/api/modules/auth.py +++ b/faraday/server/api/modules/auth.py @@ -3,8 +3,6 @@ Copyright (C) 2013 Infobyte LLC (http://www.infobytesec.com/) See the file 'doc/LICENSE' for the license information """ -from __future__ import print_function -from __future__ import absolute_import import flask diff --git a/faraday/server/api/modules/commandsrun.py b/faraday/server/api/modules/commandsrun.py index d42d29f..79ca8c8 100644 --- a/faraday/server/api/modules/commandsrun.py +++ b/faraday/server/api/modules/commandsrun.py @@ -10,7 +10,11 @@ from flask_classful import route from marshmallow import fields, post_load, ValidationError -from faraday.server.api.base import AutoSchema, ReadWriteWorkspacedView, PaginatedMixin +from faraday.server.api.base import ( + AutoSchema, + ReadWriteWorkspacedView, + PaginatedMixin +) from faraday.server.models import Command, Workspace from faraday.server.schemas import MutableField, PrimaryKeyRelatedField, SelfNestedField, MetadataSchema diff --git a/faraday/server/api/modules/comments.py b/faraday/server/api/modules/comments.py index 94d9536..cc7297f 100644 --- a/faraday/server/api/modules/comments.py +++ b/faraday/server/api/modules/comments.py @@ -12,7 +12,8 @@ ReadWriteWorkspacedView, InvalidUsage, CreateWorkspacedMixin, - GenericWorkspacedView + GenericWorkspacedView, + BulkDeleteWorkspacedMixin ) from faraday.server.models import Comment comment_api = Blueprint('comment_api', __name__) @@ -52,14 +53,15 @@ return super()._perform_create(data, workspace_name) -class CommentView(CommentCreateMixing, ReadWriteWorkspacedView): +class CommentView(CommentCreateMixing, ReadWriteWorkspacedView, BulkDeleteWorkspacedMixin): route_base = 'comment' model_class = Comment schema_class = CommentSchema order_field = 'create_date' -class UniqueCommentView(GenericWorkspacedView, CommentCreateMixing): +class UniqueCommentView(GenericWorkspacedView, + CommentCreateMixing): """ This view is used by the plugin engine to avoid duplicate comments when the same plugin and data was ran multiple times. diff --git a/faraday/server/api/modules/credentials.py b/faraday/server/api/modules/credentials.py index d850c29..3b8f004 100644 --- a/faraday/server/api/modules/credentials.py +++ b/faraday/server/api/modules/credentials.py @@ -11,7 +11,10 @@ ReadWriteWorkspacedView, FilterSetMeta, FilterAlchemyMixin, - InvalidUsage + + InvalidUsage, + BulkDeleteWorkspacedMixin, + BulkUpdateWorkspacedMixin ) from faraday.server.models import Credential, Host, Service, Workspace, db from faraday.server.schemas import MutableField, SelfNestedField, MetadataSchema @@ -109,7 +112,10 @@ operators = (operators.Equal, ) -class CredentialView(FilterAlchemyMixin, ReadWriteWorkspacedView): +class CredentialView(FilterAlchemyMixin, + ReadWriteWorkspacedView, + BulkDeleteWorkspacedMixin, + BulkUpdateWorkspacedMixin): route_base = 'credential' model_class = Credential schema_class = CredentialSchema diff --git a/faraday/server/api/modules/custom_fields.py b/faraday/server/api/modules/custom_fields.py index 6e62235..9b8aa12 100644 --- a/faraday/server/api/modules/custom_fields.py +++ b/faraday/server/api/modules/custom_fields.py @@ -7,7 +7,8 @@ from faraday.server.models import CustomFieldsSchema from faraday.server.api.base import ( AutoSchema, - ReadWriteView + ReadWriteView, + BulkDeleteMixin ) @@ -36,18 +37,21 @@ ) -class CustomFieldsSchemaView(ReadWriteView): +class CustomFieldsSchemaView(ReadWriteView, BulkDeleteMixin): route_base = 'custom_fields_schema' model_class = CustomFieldsSchema schema_class = CustomFieldsSchemaSchema + + def _check_post_only_data(self, data): + for read_only_key in ['field_name', 'table_name', 'field_type']: + data.pop(read_only_key, None) + return data def _update_object(self, obj, data, **kwargs): """ Field name must be read only """ - for read_only_key in ['field_name', 'table_name', 'field_type']: - if read_only_key in data: - data.pop(read_only_key) + data = self._check_post_only_data(data) return super()._update_object(obj, data) diff --git a/faraday/server/api/modules/hosts.py b/faraday/server/api/modules/hosts.py index d9325b3..2c021d9 100644 --- a/faraday/server/api/modules/hosts.py +++ b/faraday/server/api/modules/hosts.py @@ -24,7 +24,10 @@ AutoSchema, FilterAlchemyMixin, FilterSetMeta, - FilterWorkspacedMixin + + FilterWorkspacedMixin, + BulkDeleteWorkspacedMixin, + BulkUpdateWorkspacedMixin ) from faraday.server.schemas import ( MetadataSchema, @@ -137,7 +140,9 @@ class HostsView(PaginatedMixin, FilterAlchemyMixin, ReadWriteWorkspacedView, - FilterWorkspacedMixin): + FilterWorkspacedMixin, + BulkDeleteWorkspacedMixin, + BulkUpdateWorkspacedMixin): route_base = 'hosts' model_class = Host order_field = desc(Host.vulnerability_critical_generic_count),\ @@ -240,7 +245,7 @@ for host_dict in hosts_reader: try: hostnames = parse_hosts(host_dict.pop('hostnames')) - other_fields = {'owned': False, 'mac': u'00:00:00:00:00:00', 'default_gateway_ip': u'None'} + other_fields = {'owned': False, 'mac': '00:00:00:00:00:00', 'default_gateway_ip': 'None'} host_dict.update(other_fields) host = super()._perform_create(host_dict, workspace_name) host.workspace = workspace @@ -399,44 +404,24 @@ or len(hosts)), } - # ### THIS WAS FROM V2 - # TODO SCHEMA - # @route('bulk_delete/', methods=['DELETE']) - # def bulk_delete(self, workspace_name): - # """ - # --- - # delete: - # tags: ["Bulk", "Host"] - # description: Delete hosts in bulk - # responses: - # 200: - # description: Ok - # 400: - # description: Bad request - # 403: - # description: Forbidden - # tags: ["Bulk", "Host"] - # responses: - # 200: - # description: Ok - # """ - # workspace = self._get_workspace(workspace_name) - # json_request = flask.request.get_json() - # if not json_request: - # flask.abort(400, 'Invalid request. Check the request data or the content type of the request') - # hosts_ids = json_request.get('hosts_ids', []) - # hosts_ids = [host_id for host_id in hosts_ids if isinstance(host_id, int)] - # deleted_hosts = 0 - # if hosts_ids: - # deleted_hosts = Host.query.filter( - # Host.id.in_(hosts_ids), - # Host.workspace_id == workspace.id).delete(synchronize_session='fetch') - # else: - # flask.abort(400, "Invalid request") - # - # db.session.commit() - # response = {'deleted_hosts': deleted_hosts} - # return flask.jsonify(response) + @route('', methods=['DELETE']) + def bulk_delete(self, workspace_name, **kwargs): + # TODO REVISE ORIGINAL METHOD TO UPDATE NEW METHOD + return BulkDeleteWorkspacedMixin.bulk_delete(self, workspace_name, **kwargs) + + bulk_delete.__doc__ = BulkDeleteWorkspacedMixin.bulk_delete.__doc__ + + def _pre_bulk_update(self, data, **kwargs): + hostnames = data.pop('hostnames', None) + ans_data = super()._pre_bulk_update(data, **kwargs) + if hostnames is not None: + ans_data["hostnames"] = hostnames + return ans_data + + def _post_bulk_update(self, ids, extracted_data, **kwargs): + if "hostnames" in extracted_data: + for obj in self._bulk_update_query(ids, **kwargs).all(): + obj.set_hostnames(extracted_data["hostnames"]) HostsView.register(host_api) diff --git a/faraday/server/api/modules/licenses.py b/faraday/server/api/modules/licenses.py index db9f4d1..977c165 100644 --- a/faraday/server/api/modules/licenses.py +++ b/faraday/server/api/modules/licenses.py @@ -7,7 +7,7 @@ from faraday.server.models import License from faraday.server.api.base import ( ReadWriteView, - AutoSchema + AutoSchema, ) from faraday.server.schemas import ( StrictDateTimeField, diff --git a/faraday/server/api/modules/search_filter.py b/faraday/server/api/modules/search_filter.py index 6773e80..a2c2ef5 100644 --- a/faraday/server/api/modules/search_filter.py +++ b/faraday/server/api/modules/search_filter.py @@ -8,7 +8,10 @@ from faraday.server.models import SearchFilter from faraday.server.api.base import ( ReadWriteView, - AutoSchema + + AutoSchema, + BulkDeleteMixin, + BulkUpdateMixin ) searchfilter_api = Blueprint('searchfilter_api', __name__) @@ -24,7 +27,7 @@ 'json_query', 'user_query') -class SearchFilterView(ReadWriteView): +class SearchFilterView(ReadWriteView, BulkDeleteMixin, BulkUpdateMixin): route_base = 'searchfilter' model_class = SearchFilter schema_class = SearchFilterSchema diff --git a/faraday/server/api/modules/services.py b/faraday/server/api/modules/services.py index d075591..8a5f9f8 100644 --- a/faraday/server/api/modules/services.py +++ b/faraday/server/api/modules/services.py @@ -11,7 +11,9 @@ AutoSchema, ReadWriteWorkspacedView, FilterSetMeta, - FilterAlchemyMixin + FilterAlchemyMixin, + BulkDeleteWorkspacedMixin, + BulkUpdateWorkspacedMixin ) from faraday.server.models import Host, Service, Workspace from faraday.server.schemas import ( @@ -76,8 +78,12 @@ # Partial update? return data - if host_id != self.context['object'].parent.id: - raise ValidationError('Can\'t change service parent.') + if 'object' in self.context: + if host_id != self.context['object'].parent.id: + raise ValidationError('Can\'t change service parent.') + else: + if any([host_id != obj.parent.id for obj in self.context['objects']]): + raise ValidationError('Can\'t change service parent.') else: if not host_id: @@ -110,7 +116,7 @@ operators = (operators.Equal,) -class ServiceView(FilterAlchemyMixin, ReadWriteWorkspacedView): +class ServiceView(FilterAlchemyMixin, ReadWriteWorkspacedView, BulkDeleteWorkspacedMixin, BulkUpdateWorkspacedMixin): route_base = 'services' model_class = Service diff --git a/faraday/server/api/modules/vulnerability_template.py b/faraday/server/api/modules/vulnerability_template.py index 0dd42fd..50922ec 100644 --- a/faraday/server/api/modules/vulnerability_template.py +++ b/faraday/server/api/modules/vulnerability_template.py @@ -1,7 +1,6 @@ # Faraday Penetration Test IDE # Copyright (C) 2016 Infobyte LLC (http://www.infobytesec.com/) # See the file 'doc/LICENSE' for the license information -from builtins import str, bytes from io import TextIOWrapper import json @@ -27,7 +26,9 @@ FilterSetMeta, PaginatedMixin, ReadWriteView, - FilterMixin + FilterMixin, + BulkDeleteMixin, + BulkUpdateMixin ) from faraday.server.schemas import ( @@ -135,7 +136,9 @@ class VulnerabilityTemplateView(PaginatedMixin, FilterAlchemyMixin, ReadWriteView, - FilterMixin): + FilterMixin, + BulkDeleteMixin, + BulkUpdateMixin): route_base = 'vulnerability_template' model_class = VulnerabilityTemplate schema_class = VulnerabilityTemplateSchema diff --git a/faraday/server/api/modules/vulns.py b/faraday/server/api/modules/vulns.py index 7ca0031..acfd47f 100644 --- a/faraday/server/api/modules/vulns.py +++ b/faraday/server/api/modules/vulns.py @@ -20,6 +20,7 @@ from sqlalchemy.orm import aliased, joinedload, selectin_polymorphic, undefer, noload from sqlalchemy.orm.exc import NoResultFound from sqlalchemy import desc, or_, func +from sqlalchemy.inspection import inspect from werkzeug.datastructures import ImmutableMultiDict from depot.manager import DepotManager @@ -34,7 +35,9 @@ PaginatedMixin, ReadWriteWorkspacedView, InvalidUsage, - CountMultiWorkspacedMixin + CountMultiWorkspacedMixin, + BulkDeleteWorkspacedMixin, + BulkUpdateWorkspacedMixin ) from faraday.server.fields import FaradayUploadedFile from faraday.server.models import ( @@ -199,7 +202,7 @@ for file_obj in obj.evidence: try: res[file_obj.filename] = EvidenceSchema().dump(file_obj) - except IOError: + except OSError: logger.warning("File not found. Did you move your server?") return res @@ -471,7 +474,9 @@ class VulnerabilityView(PaginatedMixin, FilterAlchemyMixin, ReadWriteWorkspacedView, - CountMultiWorkspacedMixin): + CountMultiWorkspacedMixin, + BulkDeleteWorkspacedMixin, + BulkUpdateWorkspacedMixin): route_base = 'vulns' filterset_class = VulnerabilityFilterSet sort_model_class = VulnerabilityWeb # It has all the fields @@ -1056,48 +1061,6 @@ as_attachment=True, cache_timeout=-1) - @route('bulk_delete/', methods=['DELETE']) - def bulk_delete(self, workspace_name): - """ - --- - delete: - tags: ["Bulk", "Vulnerability"] - description: Delete vulnerabilities in bulk - responses: - 200: - description: Ok - 400: - description: Bad request - 403: - description: Forbidden - tags: ["Bulk", "Vulnerability"] - responses: - 200: - description: Ok - """ - workspace = self._get_workspace(workspace_name) - json_quest = request.get_json() - vulnerability_ids = json_quest.get('vulnerability_ids', []) - vulnerability_severities = json_quest.get('severities', []) - deleted_vulns = 0 - vulns = [] - if vulnerability_ids: - logger.info("Delete Vuln IDs: %s", vulnerability_ids) - vulns = VulnerabilityGeneric.query.filter(VulnerabilityGeneric.id.in_(vulnerability_ids), - VulnerabilityGeneric.workspace_id == workspace.id) - elif vulnerability_severities: - logger.info("Delete Vuln Severities: %s", vulnerability_severities) - vulns = VulnerabilityGeneric.query.filter(VulnerabilityGeneric.severity.in_(vulnerability_severities), - VulnerabilityGeneric.workspace_id == workspace.id) - else: - flask.abort(400, "Invalid Request") - for vuln in vulns: - db.session.delete(vuln) - deleted_vulns += 1 - db.session.commit() - response = {'deleted_vulns': deleted_vulns} - return flask.jsonify(response) - @route('top_users', methods=['GET']) def top_users(self, workspace_name): """ @@ -1130,5 +1093,65 @@ response = {'users': users} return flask.jsonify(response) + @route('', methods=['DELETE']) + def bulk_delete(self, workspace_name, **kwargs): + # TODO BULK_DELETE_SCHEMA + if not flask.request.json or 'severities' not in flask.request.json: + return BulkDeleteWorkspacedMixin.bulk_delete(self, workspace_name, **kwargs) + return self._perform_bulk_delete(flask.request.json['severities'], by='severity', + workspace_name=workspace_name, **kwargs), 200 + bulk_delete.__doc__ = BulkDeleteWorkspacedMixin.bulk_delete.__doc__ + + def _bulk_update_query(self, ids, **kwargs): + # It IS better to as is but warn of ON CASCADE + query = self.model_class.query.filter(self.model_class.id.in_(ids)) + workspace = self._get_workspace(kwargs.pop("workspace_name")) + return query.filter(self.model_class.workspace_id == workspace.id) + + def _bulk_delete_query(self, ids, **kwargs): + # It IS better to as is but warn of ON CASCADE + if kwargs.get("by", "id") != "severity": + query = self.model_class.query.filter(self.model_class.id.in_(ids)) + else: + query = self.model_class.query.filter(self.model_class.severity.in_(ids)) + workspace = self._get_workspace(kwargs.pop("workspace_name")) + return query.filter(self.model_class.workspace_id == workspace.id) + + def _get_model_association_proxy_fields(self): + return [ + field.target_collection + for field in inspect(self.model_class).all_orm_descriptors + if field.extension_type.name == "ASSOCIATION_PROXY" + ] + + def _pre_bulk_update(self, data, **kwargs): + data.pop('type', '') # It's forbidden to change vuln type! + data.pop('tool', '') + data.pop('service_id', '') + data.pop('host_id', '') + # TODO For now, we don't want to accept multiples attachments; moreover, attachments have its own endpoint + data.pop('_attachments', []) + super()._pre_bulk_update(data, **kwargs) + + model_association_proxy_fields = self._get_model_association_proxy_fields() + association_proxy_fields = {} + for key in list(data): + parent = getattr(VulnerabilityWeb, key).parent + field_name = getattr(parent, "target_collection", None) + if field_name and field_name in model_association_proxy_fields: + association_proxy_fields[key] = data.pop(key) + return association_proxy_fields + + def _post_bulk_update(self, ids, extracted_data, workspace_name, **kwargs): + if extracted_data: + queryset = self._bulk_update_query( + ids, + workspace_name=workspace_name, + **kwargs) + for obj in queryset.all(): + for (key, value) in extracted_data.items(): + setattr(obj, key, value) + db.session.add(obj) + VulnerabilityView.register(vulns_api) diff --git a/faraday/server/api/modules/workspaces.py b/faraday/server/api/modules/workspaces.py index 1a5489f..55f721d 100644 --- a/faraday/server/api/modules/workspaces.py +++ b/faraday/server/api/modules/workspaces.py @@ -1,11 +1,13 @@ # Faraday Penetration Test IDE # Copyright (C) 2016 Infobyte LLC (http://www.infobytesec.com/) # See the file 'doc/LICENSE' for the license information +from datetime import timedelta, date + import re -from builtins import str import json import logging +from itertools import groupby import flask from flask import Blueprint, abort, make_response, jsonify @@ -16,21 +18,21 @@ ) from sqlalchemy.orm.exc import NoResultFound - from faraday.server.models import (db, Workspace, _make_vuln_count_property, Vulnerability, _make_active_agents_count_property, count_vulnerability_severities, - _last_run_agent_date) + _last_run_agent_date, + SeveritiesHistogram) from faraday.server.schemas import ( JSTimestampField, MutableField, PrimaryKeyRelatedField, SelfNestedField, ) -from faraday.server.api.base import ReadWriteView, AutoSchema, FilterMixin +from faraday.server.api.base import ReadWriteView, AutoSchema, FilterMixin, BulkDeleteMixin logger = logging.getLogger(__name__) @@ -55,6 +57,14 @@ total_vulns = fields.Integer(dump_only=True, allow_none=False, attribute='vulnerability_total_count') +class HistogramSchema(Schema): + date = fields.Date(dump_only=True, attribute='date') + medium = fields.Integer(dump_only=True, attribute='medium') + high = fields.Integer(dump_only=True, attribute='high') + critical = fields.Integer(dump_only=True, attribute='critical') + confirmed = fields.Integer(dump_only=True, attribute='confirmed') + + class WorkspaceDurationSchema(Schema): start_date = JSTimestampField(attribute='start_date') end_date = JSTimestampField(attribute='end_date') @@ -64,9 +74,9 @@ blacklist = ["filter"] if name in blacklist: raise ValidationError(f"Not possible to create workspace of name: {name}") - if not re.match(r"^[a-z0-9][a-z0-9_$()+-]*$", name): + if not re.match(r"^[a-z0-9][a-z0-9_$()+-]{0,250}$", name): raise ValidationError("The workspace name must validate with the regex " - "^[a-z0-9][a-z0-9_$()+-]*$") + "^[a-z0-9][a-z0-9_$()+-]{0,250}$") class WorkspaceSchema(AutoSchema): @@ -85,13 +95,14 @@ update_date = fields.DateTime(attribute='update_date', dump_only=True) active_agents_count = fields.Integer(dump_only=True) last_run_agent_date = fields.DateTime(dump_only=True, attribute='last_run_agent_date') + histogram = fields.Nested(HistogramSchema(many=True)) class Meta: model = Workspace fields = ('_id', 'id', 'customer', 'description', 'active', 'duration', 'name', 'public', 'scope', 'stats', 'create_date', 'update_date', 'readonly', - 'active_agents_count', 'last_run_agent_date') + 'active_agents_count', 'last_run_agent_date', 'histogram') @post_load def post_load_duration(self, data, **kwargs): @@ -105,7 +116,67 @@ return data -class WorkspaceView(ReadWriteView, FilterMixin): +def init_date_range(from_day, days): + date_list = [{'date': from_day - timedelta(days=x), + Vulnerability.SEVERITY_MEDIUM: 0, + Vulnerability.SEVERITY_HIGH: 0, + Vulnerability.SEVERITY_CRITICAL: 0, + 'confirmed': 0} for x in range(days)] + return date_list + + +def generate_histogram(from_date, days_before): + histogram_dict = dict() + + workspaces_histograms = SeveritiesHistogram.query \ + .order_by(SeveritiesHistogram.workspace_id.asc(), SeveritiesHistogram.date.asc()).all() + + # group dates by workspace + grouped_histograms_by_ws = groupby(workspaces_histograms, lambda x: x.workspace.name) + + ws_histogram = {} + for ws_name, dates in grouped_histograms_by_ws: + first_date = None + ws_histogram[ws_name] = {} + # Convert to dict + for d in dates: + if first_date is None: + first_date = d.date + ws_histogram[ws_name][d.date] = {Vulnerability.SEVERITY_MEDIUM: d.medium, + Vulnerability.SEVERITY_HIGH: d.high, + Vulnerability.SEVERITY_CRITICAL: d.critical, + 'confirmed': d.confirmed} + + # fix histogram gaps + if (date.today() - first_date).days < days_before: + # move first_date to diff between first day and days required + first_date = first_date - timedelta(days=(days_before - (date.today() - first_date).days)) + histogram_dict[ws_name] = [{'date': first_date + timedelta(days=x), + Vulnerability.SEVERITY_MEDIUM: 0, + Vulnerability.SEVERITY_HIGH: 0, + Vulnerability.SEVERITY_CRITICAL: 0, + 'confirmed': 0} + for x in range((date.today() - first_date).days + 1)] + + # merge counters with days required + confirmed = high = medium = critical = 0 + for current_workspace_histogram_counters in histogram_dict[ws_name]: + current_date = current_workspace_histogram_counters['date'] + if current_date in ws_histogram[ws_name]: + medium += ws_histogram[ws_name][current_date][Vulnerability.SEVERITY_MEDIUM] + high += ws_histogram[ws_name][current_date][Vulnerability.SEVERITY_HIGH] + critical += ws_histogram[ws_name][current_date][Vulnerability.SEVERITY_CRITICAL] + confirmed += ws_histogram[ws_name][current_date]['confirmed'] + current_workspace_histogram_counters[Vulnerability.SEVERITY_MEDIUM] = medium + current_workspace_histogram_counters[Vulnerability.SEVERITY_HIGH] = high + current_workspace_histogram_counters[Vulnerability.SEVERITY_CRITICAL] = critical + current_workspace_histogram_counters['confirmed'] = confirmed + histogram_dict[ws_name] = histogram_dict[ws_name][-days_before:] + + return histogram_dict + + +class WorkspaceView(ReadWriteView, FilterMixin, BulkDeleteMixin): route_base = 'ws' lookup_field = 'name' lookup_field_type = str @@ -130,7 +201,21 @@ 200: description: Ok """ + histogram = flask.request.args.get('histogram', type=lambda v: v.lower() == 'true') + + if histogram: + today = date.today() + + histogram_days = flask.request.args.get('histogram_days', + type=lambda x: int(x) + if x.isnumeric() and int(x) > 0 + else SeveritiesHistogram.DEFAULT_DAYS_BEFORE, + default=SeveritiesHistogram.DEFAULT_DAYS_BEFORE + ) + histogram_dict = generate_histogram(today, histogram_days) + query = self._get_base_query() + objects = [] for workspace_stat in query: workspace_stat_dict = dict(workspace_stat) @@ -143,6 +228,13 @@ workspace_stat_dict['scope_raw'] = workspace_stat_dict['scope_raw'].split(',') for scope in workspace_stat_dict['scope_raw']: workspace_stat_dict['scope'].append({'name': scope}) + + if histogram: + if workspace_stat_dict['name'] in histogram_dict: + workspace_stat_dict['histogram'] = histogram_dict[workspace_stat_dict['name']] + else: + workspace_stat_dict['histogram'] = init_date_range(today, histogram_days) + objects.append(workspace_stat_dict) return self._envelope_list(self._dump(objects, kwargs, many=True)) @@ -362,5 +454,9 @@ db.session.commit() return self._get_object(workspace_id).readonly + def _bulk_delete_query(self, ids, **kwargs): + # It IS better to as is but warn of ON CASCADE + return self.model_class.query.filter(self.model_class.name.in_(ids)) + WorkspaceView.register(workspace_api) diff --git a/faraday/server/app.py b/faraday/server/app.py index 9da6544..11c8371 100644 --- a/faraday/server/app.py +++ b/faraday/server/app.py @@ -102,7 +102,8 @@ # Custom reset password from faraday.server.api.modules.auth import auth # pylint:disable=import-outside-toplevel from faraday.server.websockets import websockets # pylint:disable=import-outside-toplevel - from faraday.server.api.modules.settings_reports import reports_settings_api # pylint:disable=import-outside-toplevel + from faraday.server.api.modules.settings_reports import \ + reports_settings_api # pylint:disable=import-outside-toplevel from faraday.server.api.modules.settings_dashboard import \ dashboard_settings_api # pylint:disable=import-outside-toplevel @@ -269,6 +270,7 @@ user_ip = request.headers.get('X-Forwarded-For', request.remote_addr) user_logout_at = datetime.datetime.utcnow() audit_logger.info(f"User [{user.username}] logged out from IP [{user_ip}] at [{user_logout_at}]") + logger.info(f"User [{user.username}] logged out from IP [{user_ip}] at [{user_logout_at}]") def user_logged_in_succesfull(app, user): @@ -289,6 +291,7 @@ user_ip = request.headers.get('X-Forwarded-For', request.remote_addr) user_login_at = datetime.datetime.utcnow() audit_logger.info(f"User [{user.username}] logged in from IP [{user_ip}] at [{user_login_at}]") + logger.info(f"User [{user.username}] logged in from IP [{user_ip}] at [{user_login_at}]") def uia_username_mapper(identity): @@ -485,6 +488,7 @@ # want to skip the LoginForm validate logic if not super(LoginForm, self).validate(): audit_logger.warning(f"Invalid Login - User [{self.email.data}] from IP [{user_ip}] at [{time_now}]") + logger.warning(f"Invalid Login - User [{self.email.data}] from IP [{user_ip}] at [{time_now}]") return False self.email.data = remove_null_caracters(self.email.data) @@ -493,6 +497,8 @@ if self.user is None: audit_logger.warning(f"Invalid Login - User [{self.email.data}] from IP [{user_ip}] at [{time_now}] - " f"Reason: [Invalid Username]") + logger.warning(f"Invalid Login - User [{self.email.data}] from IP [{user_ip}] at [{time_now}] - " + f"Reason: [Invalid Username]") self.email.errors.append(get_message('USER_DOES_NOT_EXIST')[0]) return False @@ -500,12 +506,16 @@ if not self.user.password: audit_logger.warning(f"Invalid Login - User [{self.email.data}] from IP [{user_ip}] at [{time_now}] - " f"Reason: [Invalid Password]") + logger.warning(f"Invalid Login - User [{self.email.data}] from IP [{user_ip}] at [{time_now}] - " + f"Reason: [Invalid Password]") self.email.errors.append(get_message('USER_DOES_NOT_EXIST')[0]) return False self.password.data = remove_null_caracters(self.password.data) if not verify_and_update_password(self.password.data, self.user): audit_logger.warning(f"Invalid Login - User [{self.email.data}] from IP [{user_ip}] at [{time_now}] - " f"Reason: [Invalid Password]") + logger.warning(f"Invalid Login - User [{self.email.data}] from IP [{user_ip}] at [{time_now}] - " + f"Reason: [Invalid Password]") self.email.errors.append(get_message('USER_DOES_NOT_EXIST')[0]) return False # if requires_confirmation(self.user): @@ -514,6 +524,8 @@ if not self.user.is_active: audit_logger.warning(f"Invalid Login - User [{self.email.data}] from IP [{user_ip}] at [{time_now}] - " f"Reason: [Disabled Account]") + logger.warning(f"Invalid Login - User [{self.email.data}] from IP [{user_ip}] at [{time_now}] - " + f"Reason: [Disabled Account]") self.email.errors.append(get_message('DISABLED_ACCOUNT')[0]) return False return True diff --git a/faraday/server/commands/change_username.py b/faraday/server/commands/change_username.py index 9b39213..c8fcb40 100644 --- a/faraday/server/commands/change_username.py +++ b/faraday/server/commands/change_username.py @@ -23,6 +23,3 @@ print(f"Username {current_username} changed to {new_username}") else: print("Username not changed.") - - -# I'm Py3 diff --git a/faraday/server/commands/custom_fields.py b/faraday/server/commands/custom_fields.py index a8b8b10..4594446 100644 --- a/faraday/server/commands/custom_fields.py +++ b/faraday/server/commands/custom_fields.py @@ -90,4 +90,3 @@ custom_field_data.field_display_name = field_display_name custom_field_data.field_type = field_type db.session.commit() -# I'm Py3 diff --git a/faraday/server/commands/faraday_schema_display.py b/faraday/server/commands/faraday_schema_display.py index a23606f..b149f98 100644 --- a/faraday/server/commands/faraday_schema_display.py +++ b/faraday/server/commands/faraday_schema_display.py @@ -70,4 +70,3 @@ ) graph.write_png('uml_schema.png') # write out the file print("Graph written to fle uml_schema.png") -# I'm Py3 diff --git a/faraday/server/commands/import_vulnerability_template.py b/faraday/server/commands/import_vulnerability_template.py index 62f2d98..cd5a299 100644 --- a/faraday/server/commands/import_vulnerability_template.py +++ b/faraday/server/commands/import_vulnerability_template.py @@ -1,5 +1,3 @@ -from __future__ import absolute_import - import csv import tempfile diff --git a/faraday/server/commands/initdb.py b/faraday/server/commands/initdb.py index 8bb7676..606dbac 100644 --- a/faraday/server/commands/initdb.py +++ b/faraday/server/commands/initdb.py @@ -4,7 +4,6 @@ See the file 'doc/LICENSE' for the license information """ -from builtins import input import getpass import string @@ -337,7 +336,7 @@ print(f'{Fore.BLUE}MAC OS detected{Fore.WHITE}') postgres_command = ['psql', 'postgres'] password = self.generate_random_pw(25) - command = postgres_command + ['-c', 'CREATE ROLE {0} WITH LOGIN PASSWORD \'{1}\';'.format(username, password)] + command = postgres_command + ['-c', f'CREATE ROLE {username} WITH LOGIN PASSWORD \'{password}\';'] p = Popen(command, stderr=psql_log_file, stdout=psql_log_file) # nosec p.wait() psql_log_file.seek(0) diff --git a/faraday/server/commands/manage_settings.py b/faraday/server/commands/manage_settings.py index 1f2e955..41ad011 100644 --- a/faraday/server/commands/manage_settings.py +++ b/faraday/server/commands/manage_settings.py @@ -27,7 +27,7 @@ if name: name = name.lower() available_settings = get_all_settings() - if action in ('show', 'update'): + if action in ('show', 'update', 'clear'): if not name: click.secho(f"You must indicate a settings name to {action}", fg="red") sys.exit(1) @@ -80,7 +80,9 @@ click.secho("Updated!!", fg='green') else: click.secho("No changes where made to the settings", fg="green") - + elif action == "clear": + click.secho(f"Clear settings for: {name}", fg="green") + settings.delete_configuration() else: click.secho("Available settings:", fg="green") for i in available_settings: diff --git a/faraday/server/commands/nginx_config.py b/faraday/server/commands/nginx_config.py index f6b2a9d..4e27b61 100644 --- a/faraday/server/commands/nginx_config.py +++ b/faraday/server/commands/nginx_config.py @@ -15,7 +15,7 @@ confirm = click.prompt('Confirm [Y/n]', type=bool) if confirm: version = sys.version_info - static_path = f"/opt/faraday/lib/python{version.major}.{version.minor}/site-packages/faraday/server/www/" + static_path = f"/opt/faraday/lib/python{version.major}.{version.minor}/site-packages/faraday/server/www" templates_path = Path(__file__).parent / 'templates' file_loader = FileSystemLoader(templates_path) env = Environment(loader=file_loader, autoescape=True) diff --git a/faraday/server/commands/templates/nginx_config.j2 b/faraday/server/commands/templates/nginx_config.j2 index 13c73e2..2f99636 100644 --- a/faraday/server/commands/templates/nginx_config.j2 +++ b/faraday/server/commands/templates/nginx_config.j2 @@ -9,8 +9,11 @@ ssl_certificate {{ ssl_certificate }}; ssl_certificate_key {{ ssl_key }}; + root {{ static_path }}; + index index.html index.htm; + location /{% if multitenant_url %}{{ multitenant_url }}/{% endif %} { - alias {{ static_path }}; + try_files $uri $uri/ /index.html; } location {% if multitenant_url %}/{{ multitenant_url }}{% endif %}/_api/ { diff --git a/faraday/server/events.py b/faraday/server/events.py index b7e944f..7a8c3e4 100644 --- a/faraday/server/events.py +++ b/faraday/server/events.py @@ -7,9 +7,13 @@ import sys import logging import inspect +from datetime import date from queue import Queue from sqlalchemy import event +from sqlalchemy.dialects import postgresql +from sqlalchemy.orm import Query +from sqlalchemy.orm.attributes import get_history from faraday.server.models import ( Host, @@ -17,6 +21,10 @@ TagObject, Comment, File, + SeveritiesHistogram, + Vulnerability, + VulnerabilityWeb, + VulnerabilityGeneric, ) from faraday.server.models import db @@ -98,6 +106,207 @@ "This should never happen!!!" +def _create_or_update_histogram(connection, workspace_id=None, medium=0, high=0, critical=0, confirmed=0): + if workspace_id is None: + logger.error("Workspace with None value. Histogram could not be updated") + return + ws_id = SeveritiesHistogram.query.with_entities('id').filter( + SeveritiesHistogram.date == date.today(), + SeveritiesHistogram.workspace_id == workspace_id).first() + if ws_id is None: + connection.execute( + f"INSERT " # nosec + f"INTO severities_histogram (workspace_id, medium, high, critical, date, confirmed) " + f"VALUES ({workspace_id}, {medium}, {high}, {critical}, '{date.today()}', {confirmed})") + else: + connection.execute( + f"UPDATE severities_histogram " # nosec + f"SET medium = medium + {medium}, " + f"high = high + {high}, " + f"critical = critical + {critical}, " + f"confirmed = confirmed + {confirmed} " + f"WHERE id = {ws_id[0]}") + + +def _dicrease_severities_histogram(instance_severity, medium=0, high=0, critical=0): + medium = -1 if instance_severity == Vulnerability.SEVERITY_MEDIUM else medium + high = -1 if instance_severity == Vulnerability.SEVERITY_HIGH else high + critical = -1 if instance_severity == Vulnerability.SEVERITY_CRITICAL else critical + + return medium, high, critical + + +def _increase_severities_histogram(instance_severity, medium=0, high=0, critical=0): + medium = 1 if instance_severity == Vulnerability.SEVERITY_MEDIUM else medium + high = 1 if instance_severity == Vulnerability.SEVERITY_HIGH else high + critical = 1 if instance_severity == Vulnerability.SEVERITY_CRITICAL else critical + + return medium, high, critical + + +def alter_histogram_on_insert(mapper, connection, instance): + if instance.severity in SeveritiesHistogram.SEVERITIES_ALLOWED: + medium, high, critical = _increase_severities_histogram(instance.severity) + confirmed = 1 if instance.confirmed else 0 + + _create_or_update_histogram(connection, + instance.workspace_id, + medium=medium, + high=high, + critical=critical, + confirmed=confirmed) + + +def alter_histogram_on_update(mapper, connection, instance): + alter_histogram_on_update_general(connection, + instance.workspace_id, + status_history=get_history(instance, 'status'), + confirmed_history=get_history(instance, 'confirmed'), + severity_history=get_history(instance, 'severity')) + + +def alter_histogram_on_update_general(connection, workspace_id, status_history=None, + confirmed_history=None, severity_history=None): + + if not status_history or not confirmed_history or not severity_history: + logger.error("Not all history fields provided") + return + + if len(confirmed_history.unchanged) > 0: + confirmed_counter = 0 + confirmed_counter_on_close = -1 if confirmed_history.unchanged[0] is True else 0 + confirmed_counter_on_reopen = 1 if confirmed_history.unchanged[0] is True else 0 + else: + if not confirmed_history.deleted or not confirmed_history.added: + logger.error("Confirmed history deleted or added is None. Could not update confirmed value.") + return + if confirmed_history.deleted[0] is True: + confirmed_counter = -1 + confirmed_counter_on_close = confirmed_counter + confirmed_counter_on_reopen = 0 + else: + confirmed_counter = 1 + confirmed_counter_on_close = 0 + confirmed_counter_on_reopen = confirmed_counter + + if len(status_history.unchanged) > 0: + if len(severity_history.unchanged) > 0: + if confirmed_counter != 0 and status_history.unchanged[0] in [Vulnerability.STATUS_OPEN, Vulnerability.STATUS_RE_OPENED]: + _create_or_update_histogram(connection, workspace_id, confirmed=confirmed_counter) + return + medium = high = critical = 0 + if not severity_history.deleted or not severity_history.added: + if confirmed_counter != 0 and status_history.unchanged[0] in [Vulnerability.STATUS_OPEN, Vulnerability.STATUS_RE_OPENED]: + _create_or_update_histogram(connection, workspace_id, confirmed=confirmed_counter) + logger.error("Severity history deleted or added is None. Could not update severity histogram.") + return + + if severity_history.deleted[0] in SeveritiesHistogram.SEVERITIES_ALLOWED: + medium, high, critical = _dicrease_severities_histogram(severity_history.deleted[0]) + + if severity_history.added[0] in SeveritiesHistogram.SEVERITIES_ALLOWED: + medium, high, critical = _increase_severities_histogram(severity_history.added[0], + medium=medium, + high=high, + critical=critical) + _create_or_update_histogram(connection, + workspace_id, + medium=medium, + high=high, + critical=critical, + confirmed=confirmed_counter) + + elif status_history.added[0] in [Vulnerability.STATUS_CLOSED, Vulnerability.STATUS_RISK_ACCEPTED]\ + and status_history.deleted[0] in [Vulnerability.STATUS_OPEN, Vulnerability.STATUS_RE_OPENED]: + if len(severity_history.unchanged) > 0: + severity = severity_history.unchanged[0] + if len(severity_history.deleted) > 0: + severity = severity_history.deleted[0] + if severity in SeveritiesHistogram.SEVERITIES_ALLOWED: + medium, high, critical = _dicrease_severities_histogram(severity) + _create_or_update_histogram(connection, workspace_id, medium=medium, high=high, + critical=critical, confirmed=confirmed_counter_on_close) + elif status_history.added[0] in [Vulnerability.STATUS_OPEN, Vulnerability.STATUS_RE_OPENED] \ + and status_history.deleted[0] in [Vulnerability.STATUS_CLOSED, Vulnerability.STATUS_RISK_ACCEPTED]: + if len(severity_history.unchanged) > 0: + severity = severity_history.unchanged[0] + if len(severity_history.added) > 0: + severity = severity_history.added[0] + if severity in SeveritiesHistogram.SEVERITIES_ALLOWED: + medium, high, critical = _increase_severities_histogram(severity) + _create_or_update_histogram(connection, workspace_id, medium=medium, high=high, + critical=critical, confirmed=confirmed_counter_on_reopen) + elif confirmed_counter != 0: + _create_or_update_histogram(connection, workspace_id, confirmed=confirmed_counter) + + +def alter_histogram_on_delete(mapper, connection, instance): + if instance.status in [Vulnerability.STATUS_OPEN, Vulnerability.STATUS_RE_OPENED]: + confirmed = -1 if instance.confirmed is True else 0 + if instance.severity in SeveritiesHistogram.SEVERITIES_ALLOWED: + medium, high, critical = _dicrease_severities_histogram(instance.severity) + _create_or_update_histogram(connection, instance.workspace_id, + medium=medium, + high=high, + critical=critical, + confirmed=confirmed) + + +def alter_histogram_on_before_compile_delete(query, delete_context): + for desc in query.column_descriptions: + if desc['type'] is Vulnerability or \ + desc['type'] is VulnerabilityGeneric or\ + desc['type'] is VulnerabilityWeb: + instances = query.all() + for instance in instances: + if instance.status in [Vulnerability.STATUS_OPEN, Vulnerability.STATUS_RE_OPENED]: + if instance.severity in SeveritiesHistogram.SEVERITIES_ALLOWED: + medium, high, critical = _dicrease_severities_histogram(instance.severity) + _create_or_update_histogram(delete_context.session, + instance.workspace_id, + medium=medium, + high=high, + critical=critical, + confirmed=-1 if instance.confirmed is True else 0) + + +def get_history_from_context_values(context_values, field, old_value): + field_history = type('history_dummy_class', (object,), {'added': [], 'unchanged': [old_value], 'deleted': []})() + if field in context_values: + if context_values[field] != old_value: + field_history.deleted.append(old_value) + field_history.added.append(context_values[field]) + field_history.unchanged.pop() + return field_history + + +def alter_histogram_on_before_compile_update(query, update_context): + for desc in query.column_descriptions: + if desc['type'] is Vulnerability or \ + desc['type'] is VulnerabilityGeneric or\ + desc['type'] is VulnerabilityWeb: + ids = [x[1] for x in filter(lambda x: x[0].startswith("id_"), + query.statement.compile(dialect=postgresql.dialect()).params.items())] + if ids: + # this can arise some issues with counters when other filters were applied to query but... + instances = update_context.session.query(VulnerabilityGeneric).filter( + VulnerabilityGeneric.id.in_(ids)).all() + else: + instances = query.all() + + for instance in instances: + status_history = get_history_from_context_values(update_context.values, 'status', instance.status) + severity_history = get_history_from_context_values(update_context.values, 'severity', instance.severity) + confirmed_history = get_history_from_context_values(update_context.values, 'confirmed', + instance.confirmed) + + alter_histogram_on_update_general(update_context.session, + instance.workspace_id, + status_history=status_history, + confirmed_history=confirmed_history, + severity_history=severity_history) + + # register the workspace verification for all objs that has workspace_id for name, obj in inspect.getmembers(sys.modules['faraday.server.models']): if inspect.isclass(obj) and getattr(obj, 'workspace_id', None): @@ -116,3 +325,10 @@ # Update object bindings event.listen(Host, 'after_update', update_object_event) event.listen(Service, 'after_update', update_object_event) + +# Severities Histogram +event.listen(VulnerabilityGeneric, "before_insert", alter_histogram_on_insert, propagate=True) +event.listen(VulnerabilityGeneric, "before_update", alter_histogram_on_update, propagate=True) +event.listen(VulnerabilityGeneric, "after_delete", alter_histogram_on_delete, propagate=True) +event.listen(Query, "before_compile_delete", alter_histogram_on_before_compile_delete) +event.listen(Query, "before_compile_update", alter_histogram_on_before_compile_update) diff --git a/faraday/server/fields.py b/faraday/server/fields.py index 5f55b92..e3b8ae3 100644 --- a/faraday/server/fields.py +++ b/faraday/server/fields.py @@ -4,7 +4,6 @@ See the file 'doc/LICENSE' for the license information """ -from builtins import str import json import imghdr @@ -125,4 +124,3 @@ if value is not None: value = json.loads(value) return value -# I'm Py3 diff --git a/faraday/server/models.py b/faraday/server/models.py index e2d89d7..09cd5a8 100644 --- a/faraday/server/models.py +++ b/faraday/server/models.py @@ -3,11 +3,14 @@ # See the file 'doc/LICENSE' for the license information import json import logging +import math import operator +import re import string -from datetime import datetime, timedelta +from datetime import datetime, timedelta, date from functools import partial from random import SystemRandom +from typing import Callable from sqlalchemy import ( Boolean, @@ -24,6 +27,7 @@ event, Table, literal, + Date, ) from sqlalchemy.exc import IntegrityError from sqlalchemy.orm import relationship @@ -450,7 +454,7 @@ id = Column(Integer, primary_key=True) name = NonBlankColumn(Text) - host_id = Column(Integer, ForeignKey('host.id'), index=True, nullable=False) + host_id = Column(Integer, ForeignKey('host.id', ondelete='CASCADE'), index=True, nullable=False) host = relationship('Host', backref=backref("hostnames", cascade="all, delete-orphan")) # 1 workspace <--> N hostnames @@ -495,13 +499,21 @@ 'difficult', 'infeasible' ] + + SEVERITY_UNCLASSIFIED = 'unclassified' + SEVERITY_INFORMATIONAL = 'informational' + SEVERITY_LOW = 'low' + SEVERITY_MEDIUM = 'medium' + SEVERITY_HIGH = 'high' + SEVERITY_CRITICAL = 'critical' + SEVERITIES = [ - 'unclassified', - 'informational', - 'low', - 'medium', - 'high', - 'critical', + SEVERITY_UNCLASSIFIED, + SEVERITY_INFORMATIONAL, + SEVERITY_LOW, + SEVERITY_MEDIUM, + SEVERITY_HIGH, + SEVERITY_CRITICAL, ] __abstract__ = True @@ -532,6 +544,34 @@ @property def parent(self): raise NotImplementedError('ABC property called') + + +class SeveritiesHistogram(db.Model): + __tablename__ = "severities_histogram" + + SEVERITIES_ALLOWED = [VulnerabilityABC.SEVERITY_MEDIUM, + VulnerabilityABC.SEVERITY_HIGH, + VulnerabilityABC.SEVERITY_CRITICAL] + + DEFAULT_DAYS_BEFORE = 20 + + id = Column(Integer, primary_key=True) + workspace_id = Column(Integer, ForeignKey('workspace.id'), index=True, nullable=False) + workspace = relationship( + 'Workspace', + foreign_keys=[workspace_id], + backref=backref('severities_histogram', cascade="all, delete-orphan") + ) + date = Column(Date, default=date.today(), nullable=False) + medium = Column(Integer, nullable=False) + high = Column(Integer, nullable=False) + critical = Column(Integer, nullable=False) + confirmed = Column(Integer, nullable=False) + + # This method is required by event :_( + @property + def parent(self): + return class CustomAssociationSet(_AssociationSet): @@ -617,7 +657,7 @@ return creator -def _build_associationproxy_creator_non_workspaced(model_class_name): +def _build_associationproxy_creator_non_workspaced(model_class_name, preprocess_value_func: Callable = None): def creator(name, vulnerability): """Get or create a reference/policyviolation/CVE with the corresponding name. This is not workspace aware""" @@ -625,6 +665,10 @@ # Ugly hack to avoid the fact that Reference is defined after # Vulnerability model_class = globals()[model_class_name] + + if preprocess_value_func: + name = preprocess_value_func(name) + child = model_class.query.filter( getattr(model_class, 'name') == name, ).first() @@ -684,7 +728,7 @@ object_type = Column(Enum(*OBJECT_TYPES, name='object_types'), nullable=False) command = relationship('Command', backref='command_objects') - command_id = Column(Integer, ForeignKey('command.id'), index=True) + command_id = Column(Integer, ForeignKey('command.id', ondelete='SET NULL'), index=True) # 1 workspace <--> N command_objects # 1 to N (the FK is placed in the child) and bidirectional (backref) @@ -793,7 +837,7 @@ # 1 workspace <--> N commands # 1 to N (the FK is placed in the child) and bidirectional (backref) - workspace_id = Column(Integer, ForeignKey('workspace.id'), index=True, nullable=False) + workspace_id = Column(Integer, ForeignKey('workspace.id', ondelete="CASCADE"), index=True, nullable=False) workspace = relationship( 'Workspace', foreign_keys=[workspace_id], @@ -1033,6 +1077,233 @@ raise ValueError("Invalid cve format. Should be CVE-YEAR-NUMBERID.") +class CVSS2GeneralConfig: + VERSION = '2' + PATTERN = 'AV:(?P[LAN])' \ + '/AC:(?P[HML])' \ + '/Au:(?P[MSN])' \ + '/C:(?P[NPC])' \ + '/I:(?P[NPC])' \ + '/A:(?P[NPC])' + + # CVSSV2 ENUMS + ACCESS_VECTOR_TYPES = ['L', 'N', 'A'] + ACCESS_COMPLEXITY_TYPES = ['L', 'M', 'H'] + AUTHENTICATION_TYPES = ['N', 'S', 'M'] + IMPACT_TYPES_V2 = ['N', 'P', 'C'] + + # CVSSV2 SCORE + ACCESS_VECTOR_SCORE = {'L': 0.395, 'A': 0.646, 'N': 1.0} + ACCESS_COMPLEXITY_SCORE = {'L': 0.71, 'M': 0.61, 'H': 0.35} + AUTHENTICATION_SCORE = {'N': 0.704, 'S': 0.56, 'M': 0.45} + IMPACT_SCORES_V2 = {'N': 0.0, 'P': 0.275, 'C': 0.660} + + +class CVSS3GeneralConfig: + VERSION = '3' + PATTERN = 'AV:(?P[LANP])' \ + '/AC:(?P[HL])' \ + '/PR:(?P[NLH])' \ + '/UI:(?P[NR])' \ + '/S:(?P[UC])' \ + '/C:(?P[NLH])' \ + '/I:(?P[NLH])' \ + '/A:(?P[NLH])' + + CHANGED = 'C' + UNCHANGED = 'U' + + # CVSSV3 ENUMS + ATTACK_VECTOR_TYPES = ['N', 'A', 'L', 'P'] + ATTACK_COMPLEXITY_TYPES = ['L', 'H'] + PRIVILEGES_REQUIRED_TYPES = ['N', 'L', 'H'] + USER_INTERACTION_TYPES = ['N', 'R'] + SCOPE_TYPES = [UNCHANGED, CHANGED] + IMPACT_TYPES_V3 = ['N', 'L', 'H'] + + # CVSSV3 SCORE + ATTACK_VECTOR_SCORES = {'N': 0.85, 'A': 0.62, 'L': 0.55, 'P': 0.2} + ATTACK_COMPLEXITY_SCORES = {'L': 0.77, 'H': 0.44} + PRIVILEGES_REQUIRED_SCORES = {'U': {'N': 0.85, 'L': 0.62, 'H': 0.27}, + 'C': {'N': 0.85, 'L': 0.68, 'H': 0.5}} + USER_INTERACTION_SCORES = {'N': 0.85, 'R': 0.62} + SCOPE_SCORES = {'U': 6.42, 'C': 7.52} + IMPACT_SCORES_V3 = {'N': 0.0, 'L': 0.22, 'H': 0.56} + + +class CVSSBase(db.Model): + __tablename__ = "cvss_base" + id = Column(Integer, primary_key=True) + version = Column(String(8), nullable=False) + _vector_string = Column('vector_string', String(64)) + _base_score = Column('base_score', Float) + _fixed_base_score = Column('fixed_base_score', Float) + + type = Column(String(24)) + + __mapper_args__ = { + 'polymorphic_on': type, + 'polymorphic_identity': 'base' + } + + @hybrid_property + def vector_string(self): + return self._vector_string + + @vector_string.setter + def vector_string(self, vector_string): + self.assign_vector_string(vector_string) + self.base_score = self.calculate_base_score() + + @hybrid_property + def base_score(self): + if self._base_score is not None: + return self._base_score + return self._fixed_base_score + + @base_score.setter + def base_score(self, base_score): + self._base_score = base_score + + def assign_vector_string(self, vector_string, base_score): + raise NotImplementedError + + def calculate_base_score(self): + raise NotImplementedError + + def __repr__(self): + return f'{self.vector_string}' + + +class CVSSV2(CVSSBase): + __tablename__ = "cvss_v2" + id = Column(Integer, ForeignKey('cvss_base.id'), primary_key=True) + access_vector = Column(Enum(*CVSS2GeneralConfig.ACCESS_VECTOR_TYPES, name="cvss_access_vector")) + access_complexity = Column(Enum(*CVSS2GeneralConfig.ACCESS_COMPLEXITY_TYPES, name="cvss_access_complexity")) + authentication = Column(Enum(*CVSS2GeneralConfig.AUTHENTICATION_TYPES, name="cvss_authentication")) + confidentiality_impact = Column(Enum(*CVSS2GeneralConfig.IMPACT_TYPES_V2, name="cvss_impact_types_v2")) + integrity_impact = Column(Enum(*CVSS2GeneralConfig.IMPACT_TYPES_V2, name="cvss_impact_types_v2")) + availability_impact = Column(Enum(*CVSS2GeneralConfig.IMPACT_TYPES_V2, name="cvss_impact_types_v2")) + + __mapper_args__ = { + 'polymorphic_identity': "v2" + } + + def __init__(self, base_score: Float = None, vector_string=None, **kwargs): + super().__init__(version=CVSS2GeneralConfig.VERSION, vector_string=vector_string, + _fixed_base_score=base_score, **kwargs) + + def assign_vector_string(self, vector_string): + self._vector_string = vector_string + vector_string_parsed = re.match(CVSS2GeneralConfig.PATTERN, vector_string if vector_string else '') + if vector_string_parsed: + self.access_vector = vector_string_parsed['access_vector'] + self.access_complexity = vector_string_parsed['access_complexity'] + self.authentication = vector_string_parsed['authentication'] + self.confidentiality_impact = vector_string_parsed['confidentiality'] + self.integrity_impact = vector_string_parsed['integrity'] + self.availability_impact = vector_string_parsed['availability'] + else: + self.access_vector = None + self.access_complexity = None + self.authentication = None + self.confidentiality_impact = None + self.integrity_impact = None + self.availability_impact = None + + def exploitability(self): + return 20 * CVSS2GeneralConfig.ACCESS_VECTOR_SCORE[self.access_vector] * CVSS2GeneralConfig.ACCESS_COMPLEXITY_SCORE[self.access_complexity] * CVSS2GeneralConfig.AUTHENTICATION_SCORE[self.authentication] + + def impact(self): + return 10.41 * (1 - (1 - CVSS2GeneralConfig.IMPACT_SCORES_V2[self.confidentiality_impact]) * (1 - CVSS2GeneralConfig.IMPACT_SCORES_V2[self.integrity_impact]) * (1 - CVSS2GeneralConfig.IMPACT_SCORES_V2[self.availability_impact])) + + def fimpact(self): + if self.impact() == 0: + return 0 + return 1.176 + + def calculate_base_score(self): + if re.match(CVSS2GeneralConfig.PATTERN, self.vector_string if self.vector_string else ''): + score = (0.6 * self.impact() + 0.4 * self.exploitability() - 1.5) * self.fimpact() + return round(score, 1) # pylint: disable=round-builtin + return None + + +class CVSSV3(CVSSBase): + __tablename__ = "cvss_v3" + id = Column(Integer, ForeignKey('cvss_base.id'), primary_key=True) + attack_vector = Column(Enum(*CVSS3GeneralConfig.ATTACK_VECTOR_TYPES, name="cvss_attack_vector")) + attack_complexity = Column(Enum(*CVSS3GeneralConfig.ATTACK_COMPLEXITY_TYPES, name="cvss_attack_complexity")) + privileges_required = Column(Enum(*CVSS3GeneralConfig.PRIVILEGES_REQUIRED_TYPES, name="cvss_privileges_required")) + user_interaction = Column(Enum(*CVSS3GeneralConfig.USER_INTERACTION_TYPES, name="cvss_user_interaction")) + scope = Column(Enum(*CVSS3GeneralConfig.SCOPE_TYPES, name="cvss_scope")) + confidentiality_impact = Column(Enum(*CVSS3GeneralConfig.IMPACT_TYPES_V3, name="cvss_impact_types_v3")) + integrity_impact = Column(Enum(*CVSS3GeneralConfig.IMPACT_TYPES_V3, name="cvss_impact_types_v3")) + availability_impact = Column(Enum(*CVSS3GeneralConfig.IMPACT_TYPES_V3, name="cvss_impact_types_v3")) + + __mapper_args__ = { + 'polymorphic_identity': "v3" + } + + def __init__(self, base_score: Float = None, vector_string=None, **kwargs): + super().__init__(version=CVSS3GeneralConfig.VERSION, vector_string=vector_string, + _fixed_base_score=base_score, **kwargs) + + def assign_vector_string(self, vector_string): + self._vector_string = vector_string + vector_string_parsed = re.match(CVSS3GeneralConfig.PATTERN, vector_string if vector_string else '') + if vector_string_parsed: + self.attack_vector = vector_string_parsed['attack_vector'] + self.attack_complexity = vector_string_parsed['attack_complexity'] + self.privileges_required = vector_string_parsed['privileges_required'] + self.user_interaction = vector_string_parsed['user_interaction'] + self.scope = vector_string_parsed['scope'] + self.confidentiality_impact = vector_string_parsed['confidentiality'] + self.integrity_impact = vector_string_parsed['integrity'] + self.availability_impact = vector_string_parsed['availability'] + else: + self.attack_vector = None + self.attack_complexity = None + self.privileges_required = None + self.user_interaction = None + self.scope = None + self.confidentiality_impact = None + self.integrity_impact = None + self.availability_impact = None + + def isc_base(self): + return 1 - ((1 - CVSS3GeneralConfig.IMPACT_SCORES_V3[self.confidentiality_impact]) * (1 - CVSS3GeneralConfig.IMPACT_SCORES_V3[self.integrity_impact]) * (1 - CVSS3GeneralConfig.IMPACT_SCORES_V3[self.availability_impact])) + + def impact(self): + if self.scope == CVSS3GeneralConfig.UNCHANGED: + return 6.42 * self.isc_base() + else: + return 7.52 * (self.isc_base() - 0.029) - 3.25 * (self.isc_base() - 0.02) ** 15 + + def exploitability(self): + return 8.22 * CVSS3GeneralConfig.ATTACK_VECTOR_SCORES[self.attack_vector] * CVSS3GeneralConfig.ATTACK_COMPLEXITY_SCORES[self.attack_complexity] * CVSS3GeneralConfig.PRIVILEGES_REQUIRED_SCORES[self.scope][self.privileges_required] * CVSS3GeneralConfig.USER_INTERACTION_SCORES[self.user_interaction] + + def calculate_base_score(self): + if re.match(CVSS3GeneralConfig.PATTERN, self.vector_string if self.vector_string else ''): + score = 10 + if self.impact() <= 0: + return 0.0 + impact_plus_exploitability = self.impact() + self.exploitability() + if self.scope == CVSS3GeneralConfig.UNCHANGED: + if impact_plus_exploitability < 10: + score = impact_plus_exploitability + else: + impact_plus_exploitability = impact_plus_exploitability * 1.08 + if impact_plus_exploitability < 10: + score = impact_plus_exploitability + + # round up score + # Where “Round up” is defined as the smallest number, specified to one decimal place, + # that is equal to or higher than its input. For example, Round up (4.02) is 4.1; and Round up (4.00) is 4.0. + return math.ceil(score * 10) / 10 + return None + + class Service(Metadata): STATUSES = [ 'open', @@ -1052,7 +1323,7 @@ banner = BlankColumn(Text) - host_id = Column(Integer, ForeignKey('host.id'), index=True, nullable=False) + host_id = Column(Integer, ForeignKey('host.id', ondelete='CASCADE'), index=True, nullable=False) host = relationship( 'Host', foreign_keys=[host_id], @@ -1089,11 +1360,16 @@ class VulnerabilityGeneric(VulnerabilityABC): + STATUS_OPEN = 'open' + STATUS_RE_OPENED = 're-opened' + STATUS_CLOSED = 'closed' + STATUS_RISK_ACCEPTED = 'risk-accepted' + STATUSES = [ - 'open', - 'closed', - 're-opened', - 'risk-accepted' + STATUS_OPEN, + STATUS_CLOSED, + STATUS_RE_OPENED, + STATUS_RISK_ACCEPTED ] VULN_TYPES = [ 'vulnerability', @@ -1122,7 +1398,7 @@ vulnerability_duplicate_id = Column( Integer, - ForeignKey('vulnerability.id'), + ForeignKey('vulnerability.id', ondelete='SET NULL'), index=True, nullable=True, ) @@ -1132,7 +1408,7 @@ vulnerability_template_id = Column( Integer, - ForeignKey('vulnerability_template.id'), + ForeignKey('vulnerability_template.id', ondelete='SET NULL'), index=True, nullable=True, ) @@ -1156,7 +1432,23 @@ cve = association_proxy('cve_instances', 'name', proxy_factory=CustomAssociationSet, - creator=_build_associationproxy_creator_non_workspaced('CVE')) + creator=_build_associationproxy_creator_non_workspaced('CVE', lambda c: c.upper())) + + # TODO: Ver si el nombre deberia ser cvss_v2_id + cvssv2_id = Column( + Integer, + ForeignKey('cvss_v2.id'), + nullable=True + ) + cvssv2 = relationship('CVSSV2', backref=backref('vulnerability_cvssv2')) + + # TODO: Ver si el nombre deberia ser cvss_v3_id + cvssv3_id = Column( + Integer, + ForeignKey('cvss_v3.id'), + nullable=True + ) + cvssv3 = relationship('CVSSV3', backref=backref('vulnerability_cvssv3')) reference_instances = relationship( "Reference", @@ -1250,7 +1542,7 @@ 'host_inner.id = service.host_id')) ) - host_id = Column(Integer, ForeignKey(Host.id), index=True) + host_id = Column(Integer, ForeignKey(Host.id, ondelete='CASCADE'), index=True) host = relationship( 'Host', backref=backref("vulnerabilities", cascade="all, delete-orphan"), @@ -1311,8 +1603,10 @@ @declared_attr def service_id(cls): - return VulnerabilityGeneric.__table__.c.get('service_id', Column(Integer, db.ForeignKey('service.id'), - index=True)) + return VulnerabilityGeneric.__table__.c.get('service_id', + Column(Integer, + db.ForeignKey('service.id', ondelete='CASCADE'), + index=True)) @declared_attr def service(cls): @@ -1341,7 +1635,7 @@ @declared_attr def service_id(cls): return VulnerabilityGeneric.__table__.c.get( - 'service_id', Column(Integer, db.ForeignKey('service.id'), + 'service_id', Column(Integer, db.ForeignKey('service.id', ondelete='CASCADE'), nullable=False)) @declared_attr @@ -1367,7 +1661,7 @@ start_line = Column(Integer, nullable=True) end_line = Column(Integer, nullable=True) - source_code_id = Column(Integer, ForeignKey(SourceCode.id), index=True) + source_code_id = Column(Integer, ForeignKey(SourceCode.id, ondelete='CASCADE'), index=True) source_code = relationship( SourceCode, backref='vulnerabilities', @@ -1430,7 +1724,7 @@ class ReferenceVulnerabilityAssociation(db.Model): __tablename__ = 'reference_vulnerability_association' - vulnerability_id = Column(Integer, ForeignKey('vulnerability.id'), primary_key=True) + vulnerability_id = Column(Integer, ForeignKey('vulnerability.id', ondelete="CASCADE"), primary_key=True) reference_id = Column(Integer, ForeignKey('reference.id'), primary_key=True) reference = relationship("Reference", @@ -1447,7 +1741,7 @@ class PolicyViolationVulnerabilityAssociation(db.Model): __tablename__ = 'policy_violation_vulnerability_association' - vulnerability_id = Column(Integer, ForeignKey('vulnerability.id'), primary_key=True) + vulnerability_id = Column(Integer, ForeignKey('vulnerability.id', ondelete="CASCADE"), primary_key=True) policy_violation_id = Column(Integer, ForeignKey('policy_violation.id'), primary_key=True) policy_violation = relationship("PolicyViolation", backref=backref("policy_violation_associations", cascade="all, delete-orphan"), foreign_keys=[policy_violation_id]) @@ -1539,13 +1833,13 @@ description = BlankColumn(Text) name = BlankColumn(Text) - host_id = Column(Integer, ForeignKey(Host.id), index=True, nullable=True) + host_id = Column(Integer, ForeignKey(Host.id, ondelete='CASCADE'), index=True, nullable=True) host = relationship( 'Host', backref=backref("credentials", cascade="all, delete-orphan"), foreign_keys=[host_id]) - service_id = Column(Integer, ForeignKey(Service.id), index=True, nullable=True) + service_id = Column(Integer, ForeignKey(Service.id, ondelete='CASCADE'), index=True, nullable=True) service = relationship( 'Service', backref=backref('credentials', cascade="all, delete-orphan"), @@ -1605,7 +1899,7 @@ 'association_workspace_and_agents_table', db.Model.metadata, Column('workspace_id', Integer, ForeignKey('workspace.id')), - Column('agent_id', Integer, ForeignKey('agent.id')) + Column('agent_id', Integer, ForeignKey('agent.id', ondelete='CASCADE')) ) @@ -1675,7 +1969,17 @@ FROM association_workspace_and_agents_table as assoc JOIN agent ON agent.id = assoc.agent_id and assoc.workspace_id = workspace.id WHERE agent.active is TRUE - ) AS active_agents_count, + ) AS run_agent_date, + (SELECT executor.last_run + FROM executor + JOIN agent ON executor.agent_id = agent.id + JOIN association_workspace_and_agents_table ON + agent.id = association_workspace_and_agents_table.agent_id + and association_workspace_and_agents_table.workspace_id = workspace.id + WHERE executor.last_run is not null + ORDER BY executor.last_run DESC + LIMIT 1 + ) AS last_run_agent_date, p_4.count_3 as open_services, p_4.count_4 as total_service_count, p_5.count_5 as vulnerability_web_count, @@ -1688,6 +1992,8 @@ p_5.count_12 as vulnerability_low_count, p_5.count_13 as vulnerability_informational_count, p_5.count_14 as vulnerability_unclassified_count, + p_5.count_15 as vulnerability_open_count, + p_5.count_16 as vulnerability_confirmed_count, workspace.create_date AS workspace_create_date, workspace.update_date AS workspace_update_date, workspace.id AS workspace_id, @@ -1720,7 +2026,9 @@ COUNT(case when vulnerability.severity = 'medium' then 1 else null end) as count_11, COUNT(case when vulnerability.severity = 'low' then 1 else null end) as count_12, COUNT(case when vulnerability.severity = 'informational' then 1 else null end) as count_13, - COUNT(case when vulnerability.severity = 'unclassified' then 1 else null end) as count_14 + COUNT(case when vulnerability.severity = 'unclassified' then 1 else null end) as count_14, + COUNT(case when vulnerability.status = 'open' OR vulnerability.status='re-opened' then 1 else null end) as count_15, + COUNT(case when vulnerability.confirmed is True then 1 else null end) as count_16 FROM vulnerability RIGHT JOIN workspace w ON vulnerability.workspace_id = w.id WHERE 1=1 {0} @@ -2101,7 +2409,7 @@ text = BlankColumn(Text) - reply_to_id = Column(Integer, ForeignKey('comment.id')) + reply_to_id = Column(Integer, ForeignKey('comment.id', ondelete='SET NULL')) reply_to = relationship( 'Comment', remote_side=[id], @@ -2484,7 +2792,7 @@ __tablename__ = 'executor' id = Column(Integer, primary_key=True) name = Column(String, nullable=False) - agent_id = Column(Integer, ForeignKey('agent.id'), index=True, nullable=False) + agent_id = Column(Integer, ForeignKey('agent.id', ondelete='CASCADE'), index=True, nullable=False) agent = relationship( 'Agent', backref=backref('executors', cascade="all, delete-orphan"), @@ -2605,7 +2913,7 @@ backref=backref('agent_executions', cascade="all, delete-orphan") ) parameters_data = Column(JSONType, nullable=False) - command_id = Column(Integer, ForeignKey('command.id'), index=True) + command_id = Column(Integer, ForeignKey('command.id', ondelete='SET NULL'), index=True) command = relationship( 'Command', foreign_keys=[command_id], @@ -2648,7 +2956,7 @@ end = Column(DateTime, nullable=True) rule_id = Column(Integer, ForeignKey('rule.id'), index=True, nullable=False) rule = relationship('Rule', foreign_keys=[rule_id], backref=backref('executions', cascade="all, delete-orphan")) - command_id = Column(Integer, ForeignKey('command.id'), index=True, nullable=False) + command_id = Column(Integer, ForeignKey('command.id', ondelete='CASCADE'), index=True, nullable=False) command = relationship('Command', foreign_keys=[command_id], backref=backref('rule_executions', cascade="all, delete-orphan")) diff --git a/faraday/server/schemas.py b/faraday/server/schemas.py index 3643725..818a284 100644 --- a/faraday/server/schemas.py +++ b/faraday/server/schemas.py @@ -362,5 +362,3 @@ key: value for key, value in data.items() if value } - -# I'm Py3 diff --git a/faraday/server/utils/daemonize.py b/faraday/server/utils/daemonize.py index 52c5646..d479db1 100644 --- a/faraday/server/utils/daemonize.py +++ b/faraday/server/utils/daemonize.py @@ -195,7 +195,7 @@ if not Path(str(FARADAY_SERVER_PID_FILE).format(port)).exists(): return None - with open(str(FARADAY_SERVER_PID_FILE).format(port), 'r') as pid_file: + with open(str(FARADAY_SERVER_PID_FILE).format(port)) as pid_file: # If PID file is badly written, delete it and # assume server is not running try: diff --git a/faraday/server/utils/database.py b/faraday/server/utils/database.py index 4aa1339..b6c760e 100644 --- a/faraday/server/utils/database.py +++ b/faraday/server/utils/database.py @@ -68,9 +68,9 @@ # Add wildcards to both ends of a search term if is_direct_filter_search: - like_str = u'%' + field_filter.get(attribute) + u'%' + like_str = '%' + field_filter.get(attribute) + '%' elif is_free_text_search: - like_str = u'%' + free_text_search + u'%' + like_str = '%' + free_text_search + '%' else: continue @@ -168,7 +168,7 @@ if instance: return instance, False else: - params = dict((k, v) for k, v in kwargs.items() if not isinstance(v, ClauseElement)) + params = {k: v for k, v in kwargs.items() if not isinstance(v, ClauseElement)} params.update(defaults or {}) instance = model(**params) session.add(instance) @@ -186,7 +186,7 @@ else: separator = ',' - res = 'array_to_string(array_agg({0}), \'{1}\')'.format( + res = 'array_to_string(array_agg({}), \'{}\')'.format( compiler.process(element.clauses.clauses[0]), separator, ) diff --git a/faraday/server/utils/debug.py b/faraday/server/utils/debug.py index 163140d..3a06ee2 100644 --- a/faraday/server/utils/debug.py +++ b/faraday/server/utils/debug.py @@ -43,5 +43,3 @@ # uncomment this to see who's calling what # ps.print_callers() debug_logger.debug(s.getvalue()) - -# I'm Py3 diff --git a/faraday/server/utils/export.py b/faraday/server/utils/export.py index ff4393b..b840b84 100644 --- a/faraday/server/utils/export.py +++ b/faraday/server/utils/export.py @@ -1,7 +1,9 @@ +# Standard library imports import csv +import logging from io import StringIO, BytesIO -import logging - + +# Local application imports from faraday.server.models import ( db, Comment, @@ -20,7 +22,7 @@ "target", "desc", "status", "hostnames", "comments", "owner", "os", "resolution", "refs", "easeofresolution", "web_vulnerability", "data", "website", "path", "status_code", "request", "response", "method", - "params", "pname", "query", "policyviolations", "external_id", "impact_confidentiality", + "params", "pname", "query", "cve", "policyviolations", "external_id", "impact_confidentiality", "impact_integrity", "impact_availability", "impact_accountability", "update_date" ] @@ -183,6 +185,7 @@ "params": vuln.get('params', None), "pname": vuln.get('pname', None), "query": vuln.get('query', None), + "cve": vuln.get('cve', None), "policyviolations": vuln.get('policyviolations', None), "external_id": vuln.get('external_id', None), "impact_confidentiality": vuln["impact"]["confidentiality"], @@ -204,7 +207,8 @@ # Patch possible formula injection attacks def csv_escape(vuln_dict): for key, value in vuln_dict.items(): - if str(value).startswith('=') or str(value).startswith('+') or str(value).startswith('-') or str(value).startswith('@'): + if str(value).startswith('=') or str(value).startswith('+') or str(value).startswith('-') \ + or str(value).startswith('@'): # Convert value to str just in case is has another type (like a list or # dict). This would be done anyway by the csv writer. vuln_dict[key] = "'" + str(value) diff --git a/faraday/server/utils/filters.py b/faraday/server/utils/filters.py index 9fa57e4..34f8689 100644 --- a/faraday/server/utils/filters.py +++ b/faraday/server/utils/filters.py @@ -24,7 +24,7 @@ from faraday.server.fields import JSONType -VALID_OPERATORS = set(OPERATORS.keys()) - set(['desc', 'asc']) +VALID_OPERATORS = set(OPERATORS.keys()) - {'desc', 'asc'} logger = logging.getLogger(__name__) @@ -270,8 +270,8 @@ an error on PostgreSQL """ if 'group_by' in data and 'order_by' in data: - group_by_fields = set(group_field['field'] for group_field in data['group_by']) - order_by_fields = set(order_field['field'] for order_field in data['order_by']) + group_by_fields = {group_field['field'] for group_field in data['group_by']} + order_by_fields = {order_field['field'] for order_field in data['order_by']} if not order_by_fields.issubset(group_by_fields): logger.error(f'All order fields ({order_by_fields}) must be in group by {group_by_fields}.') raise ValidationError(f'All order fields ({order_by_fields}) must be in group by {group_by_fields}.') diff --git a/faraday/server/utils/logger.py b/faraday/server/utils/logger.py index c513bfb..ecc4c04 100644 --- a/faraday/server/utils/logger.py +++ b/faraday/server/utils/logger.py @@ -78,6 +78,3 @@ setup_logging() - - -# I'm Py3 diff --git a/faraday/server/utils/search.py b/faraday/server/utils/search.py index 2b65f44..69e95aa 100644 --- a/faraday/server/utils/search.py +++ b/faraday/server/utils/search.py @@ -224,7 +224,7 @@ def __repr__(self): """Returns a string representation of this object.""" - return ''.format(self.fieldname, self.operator, + return ''.format(self.fieldname, self.operator, self.argument or self.otherfield) @staticmethod diff --git a/faraday/server/web.py b/faraday/server/web.py index ccc9c0b..7725412 100644 --- a/faraday/server/web.py +++ b/faraday/server/web.py @@ -10,6 +10,7 @@ from twisted.web.resource import Resource, ForbiddenResource from twisted.internet import reactor, error +from twisted.web.server import Site from twisted.web.static import File from twisted.web.util import Redirect from twisted.web.http import proxiedLogFormatter @@ -33,6 +34,14 @@ FARADAY_APP = None logger = logging.getLogger(__name__) + + +class FaradaySite(Site): + def getResourceFor(self, request): + resource = super().getResourceFor(request) + if isinstance(resource, twisted.web.resource.NoResource): + resource = self.resource.getChild("index.html", request) + return resource class CleanHttpHeadersResource(Resource): @@ -131,9 +140,7 @@ self.stop_threads() log_path = CONST_FARADAY_HOME_PATH / 'logs' / 'access-logging.log' - site = twisted.web.server.Site(self.root_resource, - logPath=log_path, - logFormatter=proxiedLogFormatter) + site = FaradaySite(self.root_resource, logPath=log_path, logFormatter=proxiedLogFormatter) site.displayTracebacks = False try: diff --git a/faraday/server/websocket_factories.py b/faraday/server/websocket_factories.py index a087418..93aaaa5 100644 --- a/faraday/server/websocket_factories.py +++ b/faraday/server/websocket_factories.py @@ -106,11 +106,11 @@ return self.factory.join_agent(self, agent) if message['action'] == 'LEAVE_AGENT': with get_app().app_context(): - (agent_id,) = [ + (agent_id,) = ( k for (k, v) in connected_agents.items() if v == self - ] + ) agent = Agent.query.get(agent_id) assert agent is not None # TODO the agent could be deleted here return self.factory.leave_agent(self, agent) @@ -120,11 +120,11 @@ logger.warning(f'Missing executor_name param in message: {message}') return True - (agent_id,) = [ + (agent_id,) = ( k for (k, v) in connected_agents.items() if v == self - ] + ) agent = Agent.query.get(agent_id) assert agent is not None # TODO the agent could be deleted here diff --git a/faraday/settings/base.py b/faraday/settings/base.py index 7256757..34897e7 100644 --- a/faraday/settings/base.py +++ b/faraday/settings/base.py @@ -44,6 +44,13 @@ settings_config.update(query.value) settings_config = self.clear_configuration(settings_config) return settings_config + + def delete_configuration(self): + from faraday.server.web import get_app # pylint: disable=import-outside-toplevel + with get_app().app_context(): + db.session.query(Configuration).filter(Configuration.key == self.settings_key).delete() + db.session.commit() + self.__class__.value.fget.cache_clear() def get_default_config(self): return {} diff --git a/faraday/settings/exceptions.py b/faraday/settings/exceptions.py index 4c2a641..81a793d 100644 --- a/faraday/settings/exceptions.py +++ b/faraday/settings/exceptions.py @@ -1,4 +1,3 @@ - class MissingConfigurationError(Exception): """Raised when setting configuration is missing""" pass diff --git a/pynixify/packages/faraday-agent-parameters-types/default.nix b/pynixify/packages/faraday-agent-parameters-types/default.nix index 26899ac..bd379dc 100644 --- a/pynixify/packages/faraday-agent-parameters-types/default.nix +++ b/pynixify/packages/faraday-agent-parameters-types/default.nix @@ -14,7 +14,7 @@ pname = "faraday-agent-parameters-types"; version = - "1.0.2"; + "1.0.3"; src = fetchPypi { @@ -23,7 +23,7 @@ pname = "faraday_agent_parameters_types"; sha256 = - "0dw2s7lyg9s1qjj6yrn5hxpasbb32qg89pcfsv4vv47yla9djzyc"; + "1rz0mrpgg529fd7ppi9fkpgvmfriwamlx0ah10637hvpnjfncmb1"; }; buildInputs = diff --git a/pynixify/packages/faraday-plugins/default.nix b/pynixify/packages/faraday-plugins/default.nix index acf3e2b..4ea402a 100644 --- a/pynixify/packages/faraday-plugins/default.nix +++ b/pynixify/packages/faraday-plugins/default.nix @@ -21,7 +21,7 @@ pname = "faraday-plugins"; version = - "1.5.5"; + "1.5.9"; src = fetchPypi { @@ -29,7 +29,7 @@ pname version; sha256 = - "1dw7j8zfa8j9m0qcpyzl6k0z29k3j5i90lyfg2zapwkpalkgx0d1"; + "0wkwan2vg7np0z1pwskpgwgnxxk9d46ffq1a3f5ai12ibwj9kwgh"; }; propagatedBuildInputs = diff --git a/pynixify/packages/faradaysec/default.nix b/pynixify/packages/faradaysec/default.nix index ca9dac3..2e1bdde 100644 --- a/pynixify/packages/faradaysec/default.nix +++ b/pynixify/packages/faradaysec/default.nix @@ -65,7 +65,7 @@ pname = "faradaysec"; version = - "3.18.1"; + "3.19.0"; src = lib.cleanSource diff --git a/pynixify/packages/marshmallow-sqlalchemy/default.nix b/pynixify/packages/marshmallow-sqlalchemy/default.nix index 15d10d9..7855416 100644 --- a/pynixify/packages/marshmallow-sqlalchemy/default.nix +++ b/pynixify/packages/marshmallow-sqlalchemy/default.nix @@ -13,7 +13,7 @@ pname = "marshmallow-sqlalchemy"; version = - "0.26.1"; + "0.27.0"; src = fetchPypi { @@ -21,7 +21,7 @@ pname version; sha256 = - "0wval5lqak31zwrzmgi9c919lqk0dw1zxvwihif4nmaivrs5ylnq"; + "0za0zl1vyphx2pnf2zcwbjp1lzqkdi2gcf1saa668i24aqlv288m"; }; propagatedBuildInputs = diff --git a/pynixify/packages/python-socketio/default.nix b/pynixify/packages/python-socketio/default.nix index 0bd1bd4..67e744d 100644 --- a/pynixify/packages/python-socketio/default.nix +++ b/pynixify/packages/python-socketio/default.nix @@ -13,7 +13,7 @@ pname = "python-socketio"; version = - "5.4.1"; + "5.5.0"; src = fetchPypi { @@ -21,7 +21,7 @@ pname version; sha256 = - "1c17cvm91map3rbgl5156y6zwzz2wyqvm31298a23d7bvwyjfkpg"; + "02ygri5qaw7ynqlnimn3b0arl6r5bh6wyc0dl4gq389ap2hjx5yf"; }; propagatedBuildInputs = diff --git a/requirements.txt b/requirements.txt index 93d8893..4c71014 100644 --- a/requirements.txt +++ b/requirements.txt @@ -33,12 +33,12 @@ simplekv>=0.13.0 Flask-KVSession-fork>=0.6.3 distro>=1.4.0 -faraday-plugins>=1.4.4,<2.0.0 -apispec>=4.0.0 +faraday-plugins>=1.5.9,<2.0.0 +apispec>=4.0.0,<5.0.0 apispec-webframeworks>=0.5.0 pyyaml Flask-SocketIO>=5.0.1 pyotp>=2.6.0 Flask-Limiter Flask-Mail -faraday_agent_parameters_types>=1.0.0 +faraday_agent_parameters_types>=1.0.3 diff --git a/setup.py b/setup.py index 02554ff..bde6a8e 100644 --- a/setup.py +++ b/setup.py @@ -11,7 +11,6 @@ # It ensures open() defaults to text mode with universal newlines, # and accepts an argument to specify the text encoding # Python 3 only projects can skip this import -from io import open from re import search # Get the long description from the README file @@ -25,7 +24,7 @@ To read about the latest features check out the [release notes](https://github.com/infobyte/faraday/blob/master/RELEASE.md)!""" -with open('faraday/__init__.py', 'rt', encoding='utf8') as f: +with open('faraday/__init__.py', encoding='utf8') as f: version = search(r'__version__ = \'(.*?)\'', f.read()).group(1) # Taken from https://stackoverflow.com/questions/14399534/reference-requirements-txt-for-the-install-requires-kwarg-in-setuptools-setup-py/14399775#14399775 diff --git a/tests/conftest.py b/tests/conftest.py index 7160b41..be5e98c 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -299,5 +299,3 @@ def csrf_token(logged_user, test_client): session_response = test_client.get('/session') return session_response.json.get('csrf_token') - -# I'm Py3 diff --git a/tests/factories.py b/tests/factories.py index d6f98df..ad13dc1 100644 --- a/tests/factories.py +++ b/tests/factories.py @@ -4,7 +4,6 @@ See the file 'doc/LICENSE' for the license information ''' -from builtins import chr, range import random import string @@ -54,7 +53,7 @@ Action, RuleAction, Condition, - Role + Role, RuleExecution ) @@ -133,6 +132,7 @@ class WorkspaceFactory(FaradayFactory): name = FuzzyText(chars=string.ascii_lowercase + string.digits) + description = FuzzyText() creator = factory.SubFactory(UserFactory) class Meta: @@ -339,7 +339,6 @@ host = factory.SubFactory(HostFactory, workspace=factory.SelfAttribute('..workspace')) service = factory.SubFactory(ServiceFactory, workspace=factory.SelfAttribute('..workspace')) - description = FuzzyText() type = "vulnerability" @classmethod @@ -669,4 +668,13 @@ model = RuleAction sqlalchemy_session = db.session + +class RuleExecutionFactory(FaradayFactory): + rule = factory.SubFactory(RuleFactory) + command = factory.SubFactory(CommandFactory) + + class Meta: + model = RuleExecution + sqlalchemy_session = db.session + # I'm Py3 diff --git a/tests/models/test_file.py b/tests/models/test_file.py index abf8c4f..470073d 100644 --- a/tests/models/test_file.py +++ b/tests/models/test_file.py @@ -56,4 +56,3 @@ assert len(vulnerability_web.evidence) == 1 assert vulnerability_web.evidence[0].object_type == 'vulnerability' assert vulnerability_web.evidence[0].object_id == vulnerability_web.id -# I'm Py3 diff --git a/tests/models/test_host.py b/tests/models/test_host.py index 906211b..d624285 100644 --- a/tests/models/test_host.py +++ b/tests/models/test_host.py @@ -85,8 +85,8 @@ session.commit() assert len(session.new) == len(session.deleted) == len( session.dirty) == 0 - assert set(new_value) == set(hn.name for hn in - host_with_hostnames.hostnames) + assert set(new_value) == {hn.name for hn in + host_with_hostnames.hostnames} def test_all(self, host, session): a = Hostname(workspace=host.workspace, host=host, name='a') @@ -107,7 +107,7 @@ assert c.name == 'c' session.commit() - assert set(hn.name for hn in host.hostnames) == {'b', 'c'} + assert {hn.name for hn in host.hostnames} == {'b', 'c'} def test_change_one(self, host, session): hn = Hostname(workspace=host.workspace, diff --git a/tests/models/test_vulnerability.py b/tests/models/test_vulnerability.py index e135a96..4300032 100644 --- a/tests/models/test_vulnerability.py +++ b/tests/models/test_vulnerability.py @@ -251,4 +251,3 @@ session.commit() assert vulnerability.creator_command_id == command.id assert vulnerability.creator_command_tool == command.tool -# I'm Py3 diff --git a/tests/test_api_agent.py b/tests/test_api_agent.py index daf7246..a41e5bb 100644 --- a/tests/test_api_agent.py +++ b/tests/test_api_agent.py @@ -6,7 +6,8 @@ from unittest import mock -from posixpath import join as urljoin +from posixpath import join +from urllib.parse import urljoin import pyotp import pytest @@ -241,10 +242,10 @@ assert '405' in exc_info.value.args[0] def workspaced_url(self, workspace, obj=None): - url = API_PREFIX + workspace.name + '/' + self.api_endpoint + url = urljoin(API_PREFIX, f"{workspace.name}{self.api_endpoint}") if obj is not None: id_ = str(obj.id) if isinstance(obj, self.model) else str(obj) - url += u'/' + id_ + url = urljoin(url, id_) return url def create_raw_agent(self, active=False, token="TOKEN", @@ -394,7 +395,7 @@ }, } res = test_client.post( - self.url(agent) + 'run/', + join(self.url(agent), 'run'), json=payload ) assert res.status_code == 404 @@ -452,7 +453,7 @@ 'csrf_token': csrf_token } res = test_client.post( - urljoin(self.url(agent), 'run'), + join(self.url(agent), 'run'), json=payload ) assert res.status_code == 400 @@ -474,7 +475,7 @@ } res = test_client.post( - urljoin(self.url(agent), 'run'), + join(self.url(agent), 'run'), json=payload ) @@ -485,7 +486,7 @@ session.add(agent) session.commit() res = test_client.post( - urljoin(self.url(agent), 'run'), + join(self.url(agent), 'run'), data='[" broken]"{' ) assert res.status_code == 400 @@ -507,7 +508,7 @@ ('content-type', 'text/html'), ] res = test_client.post( - urljoin(self.url(agent), 'run'), + join(self.url(agent), 'run'), data=payload, headers=headers) assert res.status_code == 400 @@ -526,7 +527,7 @@ }, } res = test_client.post( - urljoin(self.url(agent), 'run'), + join(self.url(agent), 'run'), json=payload ) assert res.status_code == 400 @@ -553,7 +554,7 @@ }, } res = test_client.post( - urljoin(self.url(agent), 'run'), + join(self.url(agent), 'run'), json=payload ) assert res.status_code == 200 @@ -582,7 +583,7 @@ }, } res = test_client.post( - urljoin(self.url(agent), 'run'), + join(self.url(agent), 'run'), json=payload ) assert res.status_code == 400 @@ -597,7 +598,7 @@ 'executorData': '[][dassa', } res = test_client.post( - urljoin(self.url(agent), 'run'), + join(self.url(agent), 'run'), json=payload ) assert res.status_code == 400 @@ -611,7 +612,7 @@ 'executorData': '', } res = test_client.post( - urljoin(self.url(agent), 'run'), + join(self.url(agent), 'run'), json=payload ) assert res.status_code == 400 @@ -620,5 +621,5 @@ agent = AgentFactory.create(workspaces=[self.workspace]) session.add(agent) session.commit() - res = test_client.get(urljoin(self.url(), 'get_manifests')) + res = test_client.get(join(self.url(), 'get_manifests')) assert res.status_code == 200 diff --git a/tests/test_api_auth.py b/tests/test_api_auth.py index 23cc1b5..90cceca 100644 --- a/tests/test_api_auth.py +++ b/tests/test_api_auth.py @@ -4,7 +4,6 @@ See the file 'doc/LICENSE' for the license information ''' -from builtins import str import base64 import pytest diff --git a/tests/test_api_commands.py b/tests/test_api_commands.py index cd44a85..ad9f30f 100644 --- a/tests/test_api_commands.py +++ b/tests/test_api_commands.py @@ -1,4 +1,3 @@ -# -*- coding: utf8 -*- ''' Faraday Penetration Test IDE Copyright (C) 2013 Infobyte LLC (http://www.infobytesec.com/) @@ -51,21 +50,21 @@ assert res.status_code == 200 assert 'commands' in res.json for command in res.json['commands']: - assert set([u'id', u'key', u'value']) == set(command.keys()) + assert {'id', 'key', 'value'} == set(command.keys()) object_properties = [ - u'_id', - u'command', - u'duration', - u'hostname', - u'ip', - u'itime', - u'params', - u'user', - u'workspace', - u'tool', - u'import_source', - u'creator', - u'metadata' + '_id', + 'command', + 'duration', + 'hostname', + 'ip', + 'itime', + 'params', + 'user', + 'workspace', + 'tool', + 'import_source', + 'creator', + 'metadata' ] assert command['value']['workspace'] == self.workspace.name assert set(object_properties) == set(command['value'].keys()) @@ -97,32 +96,32 @@ assert res.status_code == 200 assert list(filter(lambda stats: stats['_id'] == command.id, res.json)) == [ - {u'_id': command.id, - u'command': command.command, - u'import_source': u'shell', - u'user': command.user, - u'date': time.mktime(command.start_date.timetuple()) * 1000, - u'params': command.params, - u'tool': command.tool, - u'hosts_count': 1, - u'services_count': 0, - u'vulnerabilities_count': 1, - u'criticalIssue': 0}] + {'_id': command.id, + 'command': command.command, + 'import_source': 'shell', + 'user': command.user, + 'date': time.mktime(command.start_date.timetuple()) * 1000, + 'params': command.params, + 'tool': command.tool, + 'hosts_count': 1, + 'services_count': 0, + 'vulnerabilities_count': 1, + 'criticalIssue': 0}] assert list(filter(lambda stats: stats['_id'] == another_command.id, res.json)) == [{ - u'_id': another_command.id, - u'command': another_command.command, - u'import_source': u'shell', - u'tool': another_command.tool, - u'user': another_command.user, - u'date': time.mktime( + '_id': another_command.id, + 'command': another_command.command, + 'import_source': 'shell', + 'tool': another_command.tool, + 'user': another_command.user, + 'date': time.mktime( another_command.start_date.timetuple()) * 1000, - u'params': another_command.params, - u'hosts_count': 0, - u'services_count': 0, - u'vulnerabilities_count': 0, - u'criticalIssue': 0}] + 'params': another_command.params, + 'hosts_count': 0, + 'services_count': 0, + 'vulnerabilities_count': 0, + 'criticalIssue': 0}] def test_verify_created_critical_vulns_is_correctly_showing_sum_values(self, session, test_client): workspace = WorkspaceFactory.create() @@ -153,17 +152,17 @@ res = test_client.get(urljoin(self.url(workspace=command.workspace), 'activity_feed')) assert res.status_code == 200 assert res.json == [ - {u'_id': command.id, - u'command': command.command, - u'import_source': u'shell', - u'tool': command.tool, - u'user': command.user, - u'date': time.mktime(command.start_date.timetuple()) * 1000, - u'params': command.params, - u'hosts_count': 1, - u'services_count': 0, - u'vulnerabilities_count': 2, - u'criticalIssue': 1} + {'_id': command.id, + 'command': command.command, + 'import_source': 'shell', + 'tool': command.tool, + 'user': command.user, + 'date': time.mktime(command.start_date.timetuple()) * 1000, + 'params': command.params, + 'hosts_count': 1, + 'services_count': 0, + 'vulnerabilities_count': 2, + 'criticalIssue': 1} ] def test_verify_created_vulns_with_host_and_service_verification(self, session, test_client): @@ -202,17 +201,17 @@ res = test_client.get(urljoin(self.url(workspace=command.workspace), 'activity_feed')) assert res.status_code == 200 assert res.json == [{ - u'_id': command.id, - u'command': command.command, - u'import_source': u'shell', - u'tool': command.tool, - u'user': command.user, - u'date': time.mktime(command.start_date.timetuple()) * 1000, - u'params': command.params, - u'hosts_count': 1, - u'services_count': 1, - u'vulnerabilities_count': 2, - u'criticalIssue': 1} + '_id': command.id, + 'command': command.command, + 'import_source': 'shell', + 'tool': command.tool, + 'user': command.user, + 'date': time.mktime(command.start_date.timetuple()) * 1000, + 'params': command.params, + 'hosts_count': 1, + 'services_count': 1, + 'vulnerabilities_count': 2, + 'criticalIssue': 1} ] def test_multiple_commands_executed_with_same_objects_found(self, session, test_client): @@ -270,47 +269,47 @@ raw_first_command = list(filter(lambda comm: comm['_id'] == commands[0].id, res.json)) assert raw_first_command.pop() == { - u'_id': first_command.id, - u'command': first_command.command, - u'import_source': u'shell', - u'user': first_command.user, - u'date': time.mktime(first_command.start_date.timetuple()) * 1000, - u'params': first_command.params, - u'hosts_count': 1, - u'services_count': 0, - u'vulnerabilities_count': 1, - u'tool': first_command.tool, - u'criticalIssue': 0 + '_id': first_command.id, + 'command': first_command.command, + 'import_source': 'shell', + 'user': first_command.user, + 'date': time.mktime(first_command.start_date.timetuple()) * 1000, + 'params': first_command.params, + 'hosts_count': 1, + 'services_count': 0, + 'vulnerabilities_count': 1, + 'tool': first_command.tool, + 'criticalIssue': 0 } for in_the_middle_command in in_the_middle_commands: raw_in_the_middle_command = list(filter(lambda comm: comm['_id'] == in_the_middle_command.id, res.json)) - assert raw_in_the_middle_command.pop() == {u'_id': in_the_middle_command.id, - u'command': in_the_middle_command.command, - u'import_source': u'shell', - u'user': in_the_middle_command.user, - u'date': time.mktime( + assert raw_in_the_middle_command.pop() == {'_id': in_the_middle_command.id, + 'command': in_the_middle_command.command, + 'import_source': 'shell', + 'user': in_the_middle_command.user, + 'date': time.mktime( in_the_middle_command.start_date.timetuple()) * 1000, - u'params': in_the_middle_command.params, - u'hosts_count': 0, - u'tool': in_the_middle_command.tool, - u'services_count': 0, - u'vulnerabilities_count': 0, - u'criticalIssue': 0} + 'params': in_the_middle_command.params, + 'hosts_count': 0, + 'tool': in_the_middle_command.tool, + 'services_count': 0, + 'vulnerabilities_count': 0, + 'criticalIssue': 0} # new command must create new service and vuln raw_last_command = list(filter(lambda comm: comm['_id'] == last_command.id, res.json)) - assert raw_last_command.pop() == {u'_id': last_command.id, - u'command': last_command.command, - u'import_source': u'shell', - u'user': last_command.user, - u'date': time.mktime(last_command.start_date.timetuple()) * 1000, - u'params': last_command.params, - u'hosts_count': 0, - u'tool': last_command.tool, - u'services_count': 1, - u'vulnerabilities_count': 1, - u'criticalIssue': 0} + assert raw_last_command.pop() == {'_id': last_command.id, + 'command': last_command.command, + 'import_source': 'shell', + 'user': last_command.user, + 'date': time.mktime(last_command.start_date.timetuple()) * 1000, + 'params': last_command.params, + 'hosts_count': 0, + 'tool': last_command.tool, + 'services_count': 1, + 'vulnerabilities_count': 1, + 'criticalIssue': 0} @pytest.mark.usefixtures('ignore_nplusone') def test_sub_second_command_returns_correct_duration_value(self, test_client): @@ -370,7 +369,7 @@ 'hostname': 'mandarina', 'ip': '192.168.20.53', 'itime': 1511387720.048548, - 'params': u'/home/lcubo/.faraday/report/airbnb/nessus_report_Remote.nessus', + 'params': '/home/lcubo/.faraday/report/airbnb/nessus_report_Remote.nessus', 'user': 'lcubo' } @@ -388,7 +387,7 @@ 'hostname': 'mandarina', 'ip': '192.168.20.53', 'itime': start_date.timestamp(), - 'params': u'/home/lcubo/.faraday/report/airbnb/nessus_report_Remote.nessus', + 'params': '/home/lcubo/.faraday/report/airbnb/nessus_report_Remote.nessus', 'user': 'lcubo' } @@ -435,7 +434,7 @@ 'hostname': 'mandarina', 'ip': '192.168.20.53', 'itime': 1511387720000.048548, - 'params': u'/home/lcubo/.faraday/report/airbnb/nessus_report_Remote.nessus', + 'params': '/home/lcubo/.faraday/report/airbnb/nessus_report_Remote.nessus', 'user': 'lcubo' } diff --git a/tests/test_api_comment.py b/tests/test_api_comment.py index bbccfa6..70f383b 100644 --- a/tests/test_api_comment.py +++ b/tests/test_api_comment.py @@ -8,11 +8,11 @@ from faraday.server.api.modules.comments import CommentView from faraday.server.models import Comment from tests.factories import ServiceFactory -from tests.test_api_workspaced_base import ReadWriteAPITests +from tests.test_api_workspaced_base import ReadWriteAPITests, BulkDeleteTestsMixin from tests import factories -class TestCommentAPIGeneric(ReadWriteAPITests): +class TestCommentAPIGeneric(ReadWriteAPITests, BulkDeleteTestsMixin): model = Comment factory = factories.CommentFactory view_class = CommentView @@ -65,13 +65,13 @@ raw_comment = self._create_raw_comment('service', service.id) res = test_client.post(self.url(workspace=second_workspace), data=raw_comment) assert res.status_code == 400 - assert res.json == {u'message': u"Can't comment object of another workspace"} + assert res.json == {'message': "Can't comment object of another workspace"} def test_cannot_create_comment_of_inexistent_object(self, test_client, session): raw_comment = self._create_raw_comment('service', 456464556) res = test_client.post(self.url(workspace=self.workspace), data=raw_comment) assert res.status_code == 400 - assert res.json == {u'message': u"Can't comment inexistent object"} + assert res.json == {'message': "Can't comment inexistent object"} def test_create_unique_comment_for_plugins(self, session, test_client): """ @@ -122,3 +122,18 @@ get_comments = test_client.get(self.url(workspace=workspace)) expected = ['first', 'second', 'third', 'fourth'] assert expected == [comment['text'] for comment in get_comments.json] + + def test_bulk_delete_with_references(self, session, test_client): + previous_count = session.query(Comment).count() + comment_first = factories.CommentFactory.create(workspace=self.workspace, text='first') + comment_second = factories.CommentFactory.create(workspace=self.workspace, text='second', reply_to=comment_first) + _ = factories.CommentFactory.create(workspace=self.workspace, text='third', reply_to=comment_second) + comment_fourth = factories.CommentFactory.create(workspace=self.workspace, text='fourth') + session.commit() + + data = {'ids': [comment_first.id, comment_fourth.id]} + res = test_client.delete(self.url(), data=data) + + assert res.status_code == 200 + assert res.json['deleted'] == 2 + assert previous_count + 2 == session.query(Comment).count() diff --git a/tests/test_api_credentials.py b/tests/test_api_credentials.py index e3b971f..7adaeeb 100644 --- a/tests/test_api_credentials.py +++ b/tests/test_api_credentials.py @@ -4,19 +4,22 @@ See the file 'doc/LICENSE' for the license information ''' +from urllib.parse import urljoin import pytest from tests import factories from tests.test_api_workspaced_base import ( ReadWriteAPITests, + BulkUpdateTestsMixin, + BulkDeleteTestsMixin ) from faraday.server.api.modules.credentials import CredentialView from faraday.server.models import Credential from tests.factories import HostFactory, ServiceFactory -class TestCredentialsAPIGeneric(ReadWriteAPITests): +class TestCredentialsAPIGeneric(ReadWriteAPITests, BulkUpdateTestsMixin, BulkDeleteTestsMixin): model = Credential factory = factories.CredentialFactory view_class = CredentialView @@ -32,19 +35,19 @@ assert res.status_code == 200 assert 'rows' in res.json for vuln in res.json['rows']: - assert set([u'_id', u'id', u'key', u'value']) == set(vuln.keys()) + assert {'_id', 'id', 'key', 'value'} == set(vuln.keys()) object_properties = [ - u'_id', - u'couchdbid', - u'description', - u'metadata', - u'name', - u'owner', - u'password', - u'username', - u'host_ip', - u'service_name', - u'target' + '_id', + 'couchdbid', + 'description', + 'metadata', + 'name', + 'owner', + 'password', + 'username', + 'host_ip', + 'service_name', + 'target' ] expected = set(object_properties) result = set(vuln['value'].keys()) @@ -105,19 +108,19 @@ credential = self.factory.create(host=host, service=None, workspace=self.workspace) session.commit() - res = test_client.get(self.url(workspace=credential.workspace) + f'?host_id={credential.host.id}') + res = test_client.get(urljoin(self.url(workspace=credential.workspace), f'?host_id={credential.host.id}')) assert res.status_code == 200 assert [cred['value']['parent'] for cred in res.json['rows']] == [credential.host.id] - assert [cred['value']['parent_type'] for cred in res.json['rows']] == [u'Host'] + assert [cred['value']['parent_type'] for cred in res.json['rows']] == ['Host'] def test_get_credentials_for_a_service_backwards_compatibility(self, session, test_client): service = ServiceFactory.create() credential = self.factory.create(service=service, host=None, workspace=service.workspace) session.commit() - res = test_client.get(self.url(workspace=credential.workspace) + f'?service={credential.service.id}') + res = test_client.get(urljoin(self.url(workspace=credential.workspace), f'?service={credential.service.id}')) assert res.status_code == 200 assert [cred['value']['parent'] for cred in res.json['rows']] == [credential.service.id] - assert [cred['value']['parent_type'] for cred in res.json['rows']] == [u'Service'] + assert [cred['value']['parent_type'] for cred in res.json['rows']] == ['Service'] def _generate_raw_update_data(self, name, username, password, parent_id): return { @@ -179,9 +182,9 @@ res = test_client.put(self.url(credential, workspace=credential.workspace), data=raw_data) assert res.status_code == 200 - assert res.json['username'] == u'Username2' - assert res.json['password'] == u'Password3' - assert res.json['name'] == u'Name1' + assert res.json['username'] == 'Username2' + assert res.json['password'] == 'Password3' + assert res.json['name'] == 'Name1' @pytest.mark.parametrize("parent_type, parent_factory", [ ("Host", HostFactory), @@ -255,11 +258,11 @@ ] # Desc order - response = test_client.get(self.url(workspace=second_workspace) + "?sort=target&sort_dir=desc") + response = test_client.get(urljoin(self.url(workspace=second_workspace), "?sort=target&sort_dir=desc")) assert response.status_code == 200 assert sorted(credentials_target, reverse=True) == [v['value']['target'] for v in response.json['rows']] # Asc order - response = test_client.get(self.url(workspace=second_workspace) + "?sort=target&sort_dir=asc") + response = test_client.get(urljoin(self.url(workspace=second_workspace), "?sort=target&sort_dir=asc")) assert response.status_code == 200 assert sorted(credentials_target) == [v['value']['target'] for v in response.json['rows']] diff --git a/tests/test_api_custom_fields.py b/tests/test_api_custom_fields.py index ae07609..42b88d2 100644 --- a/tests/test_api_custom_fields.py +++ b/tests/test_api_custom_fields.py @@ -1,7 +1,7 @@ import pytest from tests.factories import CustomFieldsSchemaFactory -from tests.test_api_non_workspaced_base import ReadWriteAPITests +from tests.test_api_non_workspaced_base import ReadWriteAPITests, BulkDeleteTestsMixin from faraday.server.api.modules.custom_fields import CustomFieldsSchemaView from faraday.server.models import ( @@ -10,14 +10,14 @@ @pytest.mark.usefixtures('logged_user') -class TestVulnerabilityCustomFields(ReadWriteAPITests): +class TestVulnerabilityCustomFields(ReadWriteAPITests, BulkDeleteTestsMixin): model = CustomFieldsSchema factory = CustomFieldsSchemaFactory api_endpoint = 'custom_fields_schema' # unique_fields = ['ip'] # update_fields = ['ip', 'description', 'os'] view_class = CustomFieldsSchemaView - patchable_fields = ['field_name'] + patchable_fields = ['field_display_name'] def test_custom_fields_data(self, session, test_client): add_text_field = CustomFieldsSchemaFactory.create( @@ -32,9 +32,9 @@ res = test_client.get(self.url()) assert res.status_code == 200 - assert {u'table_name': u'vulnerability', u'id': add_text_field.id, u'field_type': u'text', - u'field_name': u'cvss', u'field_display_name': u'CVSS', u'field_metadata': None, - u'field_order': 1} in res.json + assert {'table_name': 'vulnerability', 'id': add_text_field.id, 'field_type': 'text', + 'field_name': 'cvss', 'field_display_name': 'CVSS', 'field_metadata': None, + 'field_order': 1} in res.json def test_custom_fields_field_name_cant_be_changed(self, session, test_client): add_text_field = CustomFieldsSchemaFactory.create( @@ -48,11 +48,11 @@ session.commit() data = { - u'field_name': u'cvss 2', - u'field_type': 'int', - u'table_name': 'sarasa', - u'field_display_name': u'CVSS new', - u'field_order': 1 + 'field_name': 'cvss 2', + 'field_type': 'int', + 'table_name': 'sarasa', + 'field_display_name': 'CVSS new', + 'field_order': 1 } res = test_client.put(self.url(add_text_field.id), data=data) assert res.status_code == 200 @@ -78,6 +78,6 @@ res = test_client.get(self.url()) assert res.status_code == 200 - assert {u'table_name': u'vulnerability', u'id': add_choice_field.id, u'field_type': u'choice', - u'field_name': u'gender', u'field_display_name': u'Gender', u'field_metadata': "['Male', 'Female']", - u'field_order': 1} in res.json + assert {'table_name': 'vulnerability', 'id': add_choice_field.id, 'field_type': 'choice', + 'field_name': 'gender', 'field_display_name': 'Gender', 'field_metadata': "['Male', 'Female']", + 'field_order': 1} in res.json diff --git a/tests/test_api_hosts.py b/tests/test_api_hosts.py index 1fcb190..d92150f 100644 --- a/tests/test_api_hosts.py +++ b/tests/test_api_hosts.py @@ -6,11 +6,11 @@ ''' import operator from io import BytesIO -from posixpath import join as urljoin +from posixpath import join import pytz -from urllib.parse import urlencode +from urllib.parse import urlencode, urljoin from random import choice from sqlalchemy.orm.util import was_deleted from hypothesis import given, strategies as st @@ -22,10 +22,12 @@ API_PREFIX, ReadWriteAPITests, PaginationTestsMixin, + BulkUpdateTestsMixin, + BulkDeleteTestsMixin ) from faraday.server.models import db, Host, Hostname from faraday.server.api.modules.hosts import HostsView -from tests.factories import HostFactory, EmptyCommandFactory, WorkspaceFactory +from tests.factories import HostFactory, EmptyCommandFactory, WorkspaceFactory, HostnameFactory HOSTS_COUNT = 5 SERVICE_COUNT = [10, 5] # 10 services to the first host, 5 to the second @@ -61,20 +63,20 @@ def url(self, host=None, workspace=None): workspace = workspace or self.workspace - url = API_PREFIX + workspace.name + '/hosts' + url = join(API_PREFIX + workspace.name, 'hosts') if host is not None: - url += '/' + str(host.id) + url = join(url, str(host.id)) return url def services_url(self, host, workspace=None): - return self.url(host, workspace) + '/services' + return join(self.url(host, workspace), 'services') def compare_results(self, hosts, response): """ Compare is the hosts in response are the same that in hosts. It only compares the IDs of each one, not other fields""" - hosts_in_list = set(host.id for host in hosts) - hosts_in_response = set(host['id'] for host in response.json['rows']) + hosts_in_list = {host.id for host in hosts} + hosts_in_response = {host['id'] for host in response.json['rows']} assert hosts_in_list == hosts_in_response def test_list_retrieves_all_items_from_workspace(self, test_client, @@ -279,7 +281,7 @@ host_factory.create_batch(5, workspace=second_workspace, os='Unix') session.commit() - url = self.url() + '?os=Unix' + url = urljoin(self.url(), '?os=Unix') res = test_client.get(url) assert res.status_code == 200 self.compare_results(hosts, res) @@ -297,7 +299,7 @@ host_factory.create_batch(5, workspace=second_workspace, os='Unix') session.commit() - res = test_client.get(urljoin(self.url(), 'filter?q={"filters":[{"name": "os", "op":"eq", "val":"Unix"}]}')) + res = test_client.get(join(self.url(), 'filter?q={"filters":[{"name": "os", "op":"eq", "val":"Unix"}]}')) assert res.status_code == 200 self.compare_results(hosts, res) @@ -311,7 +313,7 @@ host_factory.create_batch(5, workspace=second_workspace, os='Unix') session.commit() - res = test_client.get(urljoin(self.url(), 'filter?q={"filters":[{"name": "os", "op":"eq", "val":"Unix"}],' + res = test_client.get(join(self.url(), 'filter?q={"filters":[{"name": "os", "op":"eq", "val":"Unix"}],' '"offset":0, "limit":20}')) assert res.status_code == 200 assert res.json['count'] == 30 @@ -321,7 +323,7 @@ host_factory.create_batch(10, workspace=workspace, os='Unix') host_factory.create_batch(1, workspace=workspace, os='unix') session.commit() - res = test_client.get(urljoin(self.url(), 'filter?q={"filters":[{"name": "os", "op": "like", "val": "%nix"}], ' + res = test_client.get(join(self.url(), 'filter?q={"filters":[{"name": "os", "op": "like", "val": "%nix"}], ' '"group_by":[{"field": "os"}], "order_by":[{"field": "os", "direction": "desc"}]}')) assert res.status_code == 200 assert len(res.json['rows']) == 2 @@ -346,11 +348,11 @@ host_factory.create_batch(5, workspace=second_workspace, os='Unix') session.commit() - res = test_client.get(self.url() + '?os__like=Unix %') + res = test_client.get(urljoin(self.url(), '?os__like=Unix %')) assert res.status_code == 200 self.compare_results(hosts, res) - res = test_client.get(self.url() + '?os__ilike=Unix %') + res = test_client.get(urljoin(self.url(), '?os__ilike=Unix %')) assert res.status_code == 200 self.compare_results(hosts + [case_insensitive_host], res) @@ -372,7 +374,7 @@ host_factory.create_batch(5, workspace=second_workspace, os='Unix') session.commit() - res = test_client.get(urljoin( + res = test_client.get(join( self.url(), 'filter?q={"filters":[{"name": "os", "op":"like", "val":"Unix %"}]}' ) @@ -380,7 +382,7 @@ assert res.status_code == 200 self.compare_results(hosts, res) - res = test_client.get(urljoin( + res = test_client.get(join( self.url(), 'filter?q={"filters":[{"name": "os", "op":"ilike", "val":"Unix %"}]}' ) @@ -398,10 +400,10 @@ host_factory.create_batch(5, workspace=workspace) session.commit() - res = test_client.get(self.url() + '?service=IRC') - assert res.status_code == 200 - shown_hosts_ids = set(obj['id'] for obj in res.json['rows']) - expected_host_ids = set(host.id for host in hosts) + res = test_client.get(urljoin(self.url(), '?service=IRC')) + assert res.status_code == 200 + shown_hosts_ids = {obj['id'] for obj in res.json['rows']} + expected_host_ids = {host.id for host in hosts} assert shown_hosts_ids == expected_host_ids @pytest.mark.usefixtures('ignore_nplusone') @@ -417,14 +419,14 @@ session.commit() res = test_client.get( - urljoin( + join( self.url(), 'filter?q={"filters":[{"name": "services__name", "op":"any", "val":"IRC"}]}' ) ) assert res.status_code == 200 - shown_hosts_ids = set(obj['id'] for obj in res.json['rows']) - expected_host_ids = set(host.id for host in hosts) + shown_hosts_ids = {obj['id'] for obj in res.json['rows']} + expected_host_ids = {host.id for host in hosts} assert shown_hosts_ids == expected_host_ids def test_filter_by_service_port(self, test_client, session, workspace, @@ -436,10 +438,10 @@ host_factory.create_batch(5, workspace=workspace) session.commit() - res = test_client.get(self.url() + '?port=25') - assert res.status_code == 200 - shown_hosts_ids = set(obj['id'] for obj in res.json['rows']) - expected_host_ids = set(host.id for host in hosts) + res = test_client.get(urljoin(self.url(), '?port=25')) + assert res.status_code == 200 + shown_hosts_ids = {obj['id'] for obj in res.json['rows']} + expected_host_ids = {host.id for host in hosts} assert shown_hosts_ids == expected_host_ids @pytest.mark.usefixtures('ignore_nplusone') @@ -453,14 +455,14 @@ session.commit() res = test_client.get( - urljoin( + join( self.url(), 'filter?q={"filters":[{"name": "services__port", "op":"any", "val":"25"}]}' ) ) assert res.status_code == 200 - shown_hosts_ids = set(obj['id'] for obj in res.json['rows']) - expected_host_ids = set(host.id for host in hosts) + shown_hosts_ids = {obj['id'] for obj in res.json['rows']} + expected_host_ids = {host.id for host in hosts} assert shown_hosts_ids == expected_host_ids @pytest.mark.usefixtures('ignore_nplusone') @@ -479,7 +481,7 @@ session.commit() res = test_client.get( - urljoin( + join( self.url(), f'filter?q={{"filters":[{{"name": "ip", "op":"eq", "val":"{host.ip}"}}]}}' ) @@ -505,7 +507,7 @@ host_factory.create_batch(5, workspace=workspace) session.commit() - res = test_client.get(self.url() + '?port=invalid_port') + res = test_client.get(urljoin(self.url(), '?port=invalid_port')) assert res.status_code == 200 assert res.json['count'] == 0 @@ -520,7 +522,7 @@ session.commit() res = test_client.get( - urljoin( + join( self.url(), 'filter?q={"filters":[{"name": "services__port", "op":"any", "val":"sarasa"}]}' ) @@ -529,7 +531,7 @@ def test_filter_restless_by_invalid_field(self, test_client): res = test_client.get( - urljoin( + join( self.url(), 'filter?q={"filters":[{"name": "severity", "op":"any", "val":"sarasa"}]}' ) @@ -538,20 +540,20 @@ @pytest.mark.usefixtures('ignore_nplusone') def test_filter_restless_with_no_q_param(self, test_client, session, workspace, host_factory): - res = test_client.get(urljoin(self.url(), 'filter')) + res = test_client.get(join(self.url(), 'filter')) assert res.status_code == 200 assert len(res.json['rows']) == HOSTS_COUNT @pytest.mark.usefixtures('ignore_nplusone') def test_filter_restless_with_empty_q_param(self, test_client, session, workspace, host_factory): - res = test_client.get(urljoin(self.url(), 'filter?q')) + res = test_client.get(join(self.url(), 'filter?q')) assert res.status_code == 400 def test_search_ip(self, test_client, session, workspace, host_factory): host = host_factory.create(ip="longname", workspace=workspace) session.commit() - res = test_client.get(self.url() + '?search=ONGNAM') + res = test_client.get(urljoin(self.url(), '?search=ONGNAM')) assert res.status_code == 200 assert len(res.json['rows']) == 1 assert res.json['rows'][0]['id'] == host.id @@ -564,10 +566,10 @@ service_factory.create(host=host, name="GOPHER 5", workspace=workspace) session.commit() - res = test_client.get(self.url() + '?search=gopher') - assert res.status_code == 200 - shown_hosts_ids = set(obj['id'] for obj in res.json['rows']) - expected_host_ids = set(host.id for host in expected_hosts) + res = test_client.get(urljoin(self.url(), '?search=gopher')) + assert res.status_code == 200 + shown_hosts_ids = {obj['id'] for obj in res.json['rows']} + expected_host_ids = {host.id for host in expected_hosts} assert shown_hosts_ids == expected_host_ids @pytest.mark.usefixtures('host_with_hostnames') @@ -576,10 +578,10 @@ for host in expected_hosts: host.set_hostnames(['staging.twitter.com']) session.commit() - res = test_client.get(self.url() + '?search=twitter') - assert res.status_code == 200 - shown_hosts_ids = set(obj['id'] for obj in res.json['rows']) - expected_host_ids = set(host.id for host in expected_hosts) + res = test_client.get(urljoin(self.url(), '?search=twitter')) + assert res.status_code == 200 + shown_hosts_ids = {obj['id'] for obj in res.json['rows']} + expected_host_ids = {host.id for host in expected_hosts} assert shown_hosts_ids == expected_host_ids def test_host_with_open_vuln_count_verification(self, test_client, session, @@ -614,7 +616,7 @@ vulnerability_factory.create(service=service, host=None, workspace=workspace) session.commit() - res = test_client.get(urljoin(self.url(host), 'services')) + res = test_client.get(join(self.url(host), 'services')) assert res.status_code == 200 assert res.json[0]['vulns'] == 1 @@ -649,9 +651,9 @@ } res = test_client.put(self.url(host_with_hostnames), data=data) assert res.status_code == 200 - expected = set(["other.com", "test.com"]) + expected = {"other.com", "test.com"} assert set(res.json['hostnames']) == expected - assert set(hn.name for hn in host_with_hostnames.hostnames) == expected + assert {hn.name for hn in host_with_hostnames.hostnames} == expected def test_create_host_with_default_gateway(self, test_client): raw_data = { @@ -701,43 +703,43 @@ assert res.status_code == 200 updated_host = Host.query.filter_by(id=host.id).first() assert res.json == { - u'_id': host.id, - u'type': u'Host', - u'_rev': u'', - u'credentials': 0, - u'default_gateway': '', - u'description': u'', - u'hostnames': [], - u'id': host.id, - u'ip': u'10.31.112.21', - u'mac': '', - u'metadata': { - u'command_id': None, - u'create_time': pytz.UTC.localize(updated_host.create_date).isoformat(), - u'creator': u'', - u'owner': host.creator.username, - u'update_action': 0, - u'update_controller_action': u'', - u'update_time': pytz.UTC.localize(updated_host.update_date).isoformat(), - u'update_user': None}, - u'name': u'10.31.112.21', - u'os': u'Microsoft Windows Server 2008 R2 Standard Service Pack 1', - u'owned': False, - u'owner': host.creator.username, - u'services': 0, - u'service_summaries': [], - u'vulns': 0, - u"versions": [], - u'important': False, - u'severity_counts': { - u'critical': None, - u'high': None, - u'host_id': host.id, - u'info': None, - u'med': None, - u'low': None, - u'total': None, - u'unclassified': None + '_id': host.id, + 'type': 'Host', + '_rev': '', + 'credentials': 0, + 'default_gateway': '', + 'description': '', + 'hostnames': [], + 'id': host.id, + 'ip': '10.31.112.21', + 'mac': '', + 'metadata': { + 'command_id': None, + 'create_time': pytz.UTC.localize(updated_host.create_date).isoformat(), + 'creator': '', + 'owner': host.creator.username, + 'update_action': 0, + 'update_controller_action': '', + 'update_time': pytz.UTC.localize(updated_host.update_date).isoformat(), + 'update_user': None}, + 'name': '10.31.112.21', + 'os': 'Microsoft Windows Server 2008 R2 Standard Service Pack 1', + 'owned': False, + 'owner': host.creator.username, + 'services': 0, + 'service_summaries': [], + 'vulns': 0, + "versions": [], + 'important': False, + 'severity_counts': { + 'critical': None, + 'high': None, + 'host_id': host.id, + 'info': None, + 'med': None, + 'low': None, + 'total': None, + 'unclassified': None } } @@ -762,36 +764,31 @@ assert res.json['hosts_with_errors'] == 0 assert session.query(Host).filter_by(description="test_host").count() == expected_created_hosts - @pytest.mark.skip("This was a v2 test, will be reimplemented") def test_bulk_delete_hosts(self, test_client, session): - ws = WorkspaceFactory.create(name="abc") - host_1 = HostFactory.create(workspace=ws) - host_2 = HostFactory.create(workspace=ws) + host_1 = HostFactory.create(workspace=self.workspace) + host_2 = HostFactory.create(workspace=self.workspace) session.commit() hosts_ids = [host_1.id, host_2.id] - request_data = {'hosts_ids': hosts_ids} - - delete_response = test_client.delete(f'/v3/ws/{ws.name}/hosts/bulk_delete', data=request_data) - - deleted_hosts = delete_response.json['deleted_hosts'] + request_data = {'ids': hosts_ids} + + delete_response = test_client.delete(self.url(), data=request_data) + + deleted_hosts = delete_response.json['deleted'] host_count_after_delete = db.session.query(Host).filter( Host.id.in_(hosts_ids), - Host.workspace_id == ws.id).count() + Host.workspace_id == self.workspace.id).count() assert delete_response.status_code == 200 assert deleted_hosts == len(hosts_ids) assert host_count_after_delete == 0 - @pytest.mark.skip("This was a v2 test, will be reimplemented") def test_bulk_delete_hosts_without_hosts_ids(self, test_client): - ws = WorkspaceFactory.create(name="abc") request_data = {'hosts_ids': []} - delete_response = test_client.delete(f'/v3/ws/{ws.name}/hosts/bulk_delete', data=request_data) + delete_response = test_client.delete(self.url(), data=request_data) assert delete_response.status_code == 400 - @pytest.mark.skip("This was a v2 test, will be reimplemented") def test_bulk_delete_hosts_from_another_workspace(self, test_client, session): workspace_1 = WorkspaceFactory.create(name='workspace_1') host_of_ws_1 = HostFactory.create(workspace=workspace_1) @@ -800,21 +797,20 @@ session.commit() # Try to delete workspace_2's host from workspace_1 - request_data = {'hosts_ids': [host_of_ws_2.id]} - url = f'/v3/ws/{workspace_1.name}/hosts/bulk_delete' + request_data = {'ids': [host_of_ws_2.id]} + url = f'/v3/ws/{workspace_1.name}/hosts' delete_response = test_client.delete(url, data=request_data) - assert delete_response.json['deleted_hosts'] == 0 - - @pytest.mark.skip("This was a v2 test, will be reimplemented") + assert delete_response.status_code == 200 + assert delete_response.json['deleted'] == 0 + def test_bulk_delete_hosts_invalid_characters_in_request(self, test_client): ws = WorkspaceFactory.create(name="abc") - request_data = {'hosts_ids': [-1, 'test']} - delete_response = test_client.delete(f'/v3/ws/{ws.name}/hosts/bulk_delete', data=request_data) - - assert delete_response.json['deleted_hosts'] == 0 - - @pytest.mark.skip("This was a v2 test, will be reimplemented") + request_data = {'ids': [-1, 'test']} + delete_response = test_client.delete(f'/v3/ws/{ws.name}/hosts', data=request_data) + + assert delete_response.json['deleted'] == 0 + def test_bulk_delete_hosts_wrong_content_type(self, test_client, session): ws = WorkspaceFactory.create(name="abc") host_1 = HostFactory.create(workspace=ws) @@ -822,18 +818,42 @@ session.commit() hosts_ids = [host_1.id, host_2.id] - request_data = {'hosts_ids': hosts_ids} + request_data = {'ids': hosts_ids} headers = [('content-type', 'text/xml')] delete_response = test_client.delete( - f'/v3/ws/{ws.name}/hosts/bulk_delete', + f'/v3/ws/{ws.name}/hosts', data=request_data, headers=headers) assert delete_response.status_code == 400 - -class TestHostAPIGeneric(ReadWriteAPITests, PaginationTestsMixin): + def test_bulk_delete_with_references(self, test_client, session, workspace, host_factory, vulnerability_factory, + service_factory, credential_factory): + host_1 = host_factory.create(workspace=workspace) + service_factory.create(host=host_1, workspace=workspace) + vulnerability_factory.create(service=None, host=host_1, workspace=workspace) + host_1.hostnames.append(HostnameFactory.create(name='pepe1', workspace=workspace, host=host_1)) + credential_factory.create(workspace=workspace, host=host_1) + + host_2 = host_factory.create(workspace=workspace) + service_factory.create(host=host_2, workspace=workspace) + vulnerability_factory.create(service=None, host=host_2, workspace=workspace) + host_1.hostnames.append(HostnameFactory.create(name='pepe2', workspace=workspace, host=host_2)) + credential_factory.create(workspace=workspace, host=host_2) + + session.commit() + + hosts_ids = [host_1.id, host_2.id] + request_data = {'ids': hosts_ids} + url = f'/v3/ws/{workspace.name}/hosts' + delete_response = test_client.delete(url, data=request_data) + + assert delete_response.status_code == 200 + assert delete_response.json['deleted'] == 2 + + +class TestHostAPIGeneric(ReadWriteAPITests, PaginationTestsMixin, BulkUpdateTestsMixin, BulkDeleteTestsMixin): model = Host factory = factories.HostFactory api_endpoint = 'hosts' @@ -851,12 +871,12 @@ expected_ids = [host.id for host in sorted(Host.query.all(), key=operator.attrgetter('description'))] - res = test_client.get(self.url() + '?sort=description&sort_dir=asc') + res = test_client.get(urljoin(self.url(), '?sort=description&sort_dir=asc')) assert res.status_code == 200 assert [host['_id'] for host in res.json['data']] == expected_ids expected_ids.reverse() # In place list reverse - res = test_client.get(self.url() + '?sort=description&sort_dir=desc') + res = test_client.get(urljoin(self.url(), '?sort=description&sort_dir=desc')) assert res.status_code == 200 assert [host['_id'] for host in res.json['data']] == expected_ids @@ -871,8 +891,8 @@ session.flush() expected_ids.append(host.id) session.commit() - res = test_client.get(self.url(workspace=second_workspace) - + '?sort=services&sort_dir=asc') + res = test_client.get(urljoin(self.url(workspace=second_workspace), + '?sort=services&sort_dir=asc')) assert res.status_code == 200 assert [h['_id'] for h in res.json['data']] == expected_ids @@ -893,8 +913,8 @@ session.add(host) session.commit() expected.append(host) # Put it on the end - res = test_client.get(self.url(workspace=second_workspace) - + '?sort=metadata.update_time&sort_dir=asc') + res = test_client.get(urljoin(self.url(workspace=second_workspace), + '?sort=metadata.update_time&sort_dir=asc')) assert res.status_code == 200, res.data assert [h['_id'] for h in res.json['data']] == [h.id for h in expected] @@ -922,7 +942,7 @@ command = EmptyCommandFactory.create() session.commit() assert len(command.command_objects) == 0 - url = self.url(workspace=command.workspace) + '?' + urlencode({'command_id': command.id}) + url = urljoin(self.url(workspace=command.workspace), f"?{urlencode({'command_id': command.id})}") res = test_client.post(url, data={ "ip": "127.0.0.1", @@ -940,7 +960,7 @@ new_workspace = WorkspaceFactory.create() session.commit() assert len(command.command_objects) == 0 - url = self.url(workspace=new_workspace) + '?' + urlencode({'command_id': command.id}) + url = urljoin(self.url(workspace=new_workspace), f"?{urlencode({'command_id': command.id})}") res = test_client.post(url, data={ "ip": "127.0.0.1", @@ -948,7 +968,7 @@ }) assert res.status_code == 400 - assert res.json == {u'message': u'Command not found.'} + assert res.json == {'message': 'Command not found.'} assert len(command.command_objects) == 0 def test_service_summaries(self, test_client, session, service_factory): @@ -1103,6 +1123,35 @@ index_in_response_hosts = response_hosts.index(host) assert index_in_hosts_ids == index_in_response_hosts + + @pytest.mark.usefixtures('ignore_nplusone') + def test_bulk_update_host_with_hostnames(self, test_client, session, + host_with_hostnames): + session.commit() + data = { + "ids": [host_with_hostnames.id, self.first_object.id], + "hostnames": ["other.com", "test.com"], + } + res = test_client.patch(self.url(), data=data) + assert res.status_code == 200 + assert res.json["updated"] == 2 + expected = {"other.com", "test.com"} + assert {hn.name for hn in host_with_hostnames.hostnames} == expected + assert {hn.name for hn in self.first_object.hostnames} == expected + + @pytest.mark.usefixtures('ignore_nplusone') + def test_bulk_update_host_without_hostnames(self, test_client, session, + host_with_hostnames): + session.commit() + expected = {hn.name for hn in host_with_hostnames.hostnames} + data = { + "ids": [host_with_hostnames.id], + "os": "NotAnOS" + } + res = test_client.patch(self.url(), data=data) + assert res.status_code == 200 + assert res.json["updated"] == 1 + assert {hn.name for hn in host_with_hostnames.hostnames} == expected def host_json(): diff --git a/tests/test_api_license.py b/tests/test_api_license.py index d6971dd..43538ef 100644 --- a/tests/test_api_license.py +++ b/tests/test_api_license.py @@ -1,4 +1,3 @@ -# -*- coding: utf8 -*- ''' Faraday Penetration Test IDE Copyright (C) 2013 Infobyte LLC (http://www.infobytesec.com/) @@ -13,7 +12,10 @@ from hypothesis import given, strategies as st from tests import factories -from tests.test_api_non_workspaced_base import ReadWriteAPITests, API_PREFIX +from tests.test_api_non_workspaced_base import ( + ReadWriteAPITests, + API_PREFIX +) from faraday.server.models import ( License, ) @@ -33,7 +35,8 @@ model = License factory = factories.LicenseFactory api_endpoint = 'licenses' - patchable_fields = ["products"] + view_class = LicenseView + patchable_fields = ["product"] # @pytest.mark.skip(reason="Not a license actually test") def test_envelope_list(self, test_client, app): diff --git a/tests/test_api_login.py b/tests/test_api_login.py index 13c8216..c4173da 100644 --- a/tests/test_api_login.py +++ b/tests/test_api_login.py @@ -1,4 +1,3 @@ - import pytest from flask_security.utils import hash_password from itsdangerous import TimedJSONWebSignatureSerializer diff --git a/tests/test_api_non_workspaced_base.py b/tests/test_api_non_workspaced_base.py index e73adeb..1e9ed09 100644 --- a/tests/test_api_non_workspaced_base.py +++ b/tests/test_api_non_workspaced_base.py @@ -1,12 +1,10 @@ -# -*- coding: utf8 -*- ''' Faraday Penetration Test IDE Copyright (C) 2013 Infobyte LLC (http://www.infobytesec.com/) See the file 'doc/LICENSE' for the license information ''' -from builtins import str - +from posixpath import join """Generic tests for APIs NOT prefixed with a workspace_name""" import pytest @@ -45,7 +43,7 @@ if obj is not None: id_ = str(getattr(obj, self.lookup_field)) if isinstance( obj, self.model) else str(obj) - url += u'/' + id_ + url = join(url, id_) return url @@ -69,7 +67,7 @@ assert res.status_code == 200 assert isinstance(res.json, dict) - @pytest.mark.parametrize('object_id', [123456789, -1, 'xxx', u'áá']) + @pytest.mark.parametrize('object_id', [123456789, -1, 'xxx', 'áá']) def test_404_when_retrieving_unexistent_object(self, test_client, object_id): url = self.url(object_id) @@ -145,6 +143,85 @@ @pytest.mark.usefixtures('logged_user') +class BulkUpdateTestsMixin: + + @staticmethod + def control_data(test_suite, data: dict) -> dict: + return UpdateTestsMixin.control_data(test_suite, data) + + def test_bulk_update_an_object(self, test_client, logged_user): + all_objs = self.model.query.all() + all_objs_id = [obj.__getattribute__(self.view_class.lookup_field) for obj in self.model.query.all()] + all_objs, all_objs_id = all_objs[:-1], all_objs_id[:-1] + + data = self.factory.build_dict() + data = BulkUpdateTestsMixin.control_data(self, data) + + res = test_client.patch(self.url(), data={}) + assert res.status_code == 400 + data["ids"] = all_objs_id + res = test_client.patch(self.url(), data=data) + + assert res.status_code == 200, (res.status_code, res.json) + assert self.model.query.count() == OBJECT_COUNT + assert res.json['updated'] == len(all_objs) + for obj in self.model.query.all(): + if getattr(obj, self.view_class.lookup_field) not in all_objs_id: + assert any( + [ + data[updated_field] != getattr(obj, updated_field) + for updated_field in data if updated_field != 'ids' + ] + ) + else: + assert all( + [ + data[updated_field] == getattr(obj, updated_field) + for updated_field in data if updated_field != 'ids' + ] + ) + + def test_bulk_update_fails_with_existing(self, test_client, session): + for unique_field in self.unique_fields: + data = self.factory.build_dict() + data[unique_field] = getattr(self.objects[3], unique_field) + data["ids"] = [getattr(self.objects[i], self.view_class.lookup_field) for i in range(0, 2)], + res = test_client.patch(self.url(), data=data) + assert res.status_code == 400 + assert self.model.query.count() == OBJECT_COUNT + assert res.json['updated'] == 0 + + def test_patch_bulk_update_an_object_does_not_fail_with_partial_data(self, test_client, logged_user): + """To do this the user should use a PATCH request""" + all_objs_id = [obj.__getattribute__(self.view_class.lookup_field) for obj in self.model.query.all()] + res = test_client.patch(self.url(), data={"ids": all_objs_id}) + assert res.status_code == 200, (res.status_code, res.json) + assert res.json['updated'] == 0 + + def test_bulk_update_invalid_ids(self, test_client): + data = self.factory.build_dict() + data = BulkUpdateTestsMixin.control_data(self, data) + data['ids'] = [-1, 'test'] + res = test_client.patch(self.url(), data=data) + assert res.status_code == 200 + assert res.json['updated'] == 0 + data['ids'] = [-1, 'test', self.first_object.__getattribute__(self.view_class.lookup_field)] + res = test_client.patch(self.url(), data=data) + assert res.status_code == 200 + assert res.json['updated'] == 1 + + def test_bulk_update_wrong_content_type(self, test_client): + all_objs = self.model.query.all() + all_objs_id = [obj.__getattribute__(self.view_class.lookup_field) for obj in all_objs] + + request_data = {'ids': all_objs_id} + headers = [('content-type', 'text/xml')] + + res = test_client.patch(self.url(), data=request_data, headers=headers) + assert res.status_code == 400 + + +@pytest.mark.usefixtures('logged_user') class DeleteTestsMixin: def test_delete(self, test_client, logged_user): @@ -153,12 +230,55 @@ assert was_deleted(self.first_object) assert self.model.query.count() == OBJECT_COUNT - 1 - @pytest.mark.parametrize('object_id', [12300, -1, 'xxx', u'áá']) + @pytest.mark.parametrize('object_id', [12300, -1, 'xxx', 'áá']) def test_delete_non_existent_raises_404(self, test_client, object_id): res = test_client.delete(self.url(object_id)) assert res.status_code == 404 # No content assert self.model.query.count() == OBJECT_COUNT + + +@pytest.mark.usefixtures('logged_user') +class BulkDeleteTestsMixin: + + @pytest.mark.usefixtures('ignore_nplusone') + def test_bulk_delete(self, test_client): + + all_objs = self.model.query.all() + all_objs_id = [obj.__getattribute__(self.view_class.lookup_field) for obj in all_objs] + ignored_obj = all_objs[-1] + all_objs, all_objs_id = all_objs[:-1], all_objs_id[:-1] + + res = test_client.delete(self.url(), data={}) + assert res.status_code == 400 + data = {"ids": all_objs_id} + res = test_client.delete(self.url(), data=data) + assert res.status_code == 200 + assert all([was_deleted(obj) for obj in all_objs]) + assert res.json['deleted'] == len(all_objs) + assert not was_deleted(ignored_obj) + assert self.model.query.count() == 1 + + def test_bulk_delete_invalid_ids(self, test_client): + request_data = {'ids': [-1, 'test']} + count = self.model.query.count() + res = test_client.delete(self.url(), data=request_data) + assert res.status_code == 200 + assert res.json['deleted'] == 0 + assert self.model.query.count() == count + + def test_bulk_delete_wrong_content_type(self, test_client): + all_objs = self.model.query.all() + all_objs_id = [obj.__getattribute__(self.view_class.lookup_field) for obj in all_objs] + count = self.model.query.count() + + request_data = {'ids': all_objs_id} + headers = [('content-type', 'text/xml')] + + res = test_client.delete(self.url(), data=request_data, headers=headers) + assert res.status_code == 400 + assert self.model.query.count() == count + assert all([not was_deleted(obj) for obj in all_objs]) class ReadWriteTestsMixin(ListTestsMixin, diff --git a/tests/test_api_pagination.py b/tests/test_api_pagination.py index 61467a5..7d69d04 100644 --- a/tests/test_api_pagination.py +++ b/tests/test_api_pagination.py @@ -1,4 +1,3 @@ -# -*- coding: utf8 -*- ''' Faraday Penetration Test IDE Copyright (C) 2013 Infobyte LLC (http://www.infobytesec.com/) @@ -13,7 +12,7 @@ try: from urllib import urlencode except ImportError as e: - from urllib.parse import urlencode + from urllib.parse import urlencode, urljoin def with_0_and_n_objects(n=10): @@ -46,7 +45,7 @@ self.view_class.page_number_parameter_name] = page_number if per_page is not None: parameters[self.view_class.per_page_parameter_name] = per_page - return self.url() + '?' + urlencode(parameters) + return urljoin(self.url(), f'?{urlencode(parameters)}') @pytest.mark.parametrize("page_number", [None, 1, 2]) @pytest.mark.usefixtures('pagination_test_logic') @@ -78,7 +77,7 @@ self.create_many_objects(session, object_count) res = test_client.get(self.page_url(-1, 10)) assert res.status_code == 200 - assert res.json == {u'data': []} + assert res.json == {'data': []} @pytest.mark.usefixtures('pagination_test_logic') @pytest.mark.pagination @@ -102,7 +101,7 @@ self.create_many_objects(session, 5) res = test_client.get(self.page_url(2, 5)) assert res.status_code == 200 - assert res.json == {u'data': []} + assert res.json == {'data': []} @pytest.mark.usefixtures('pagination_test_logic') @pytest.mark.pagination diff --git a/tests/test_api_search_filter.py b/tests/test_api_search_filter.py index 69cd613..4fd2e72 100644 --- a/tests/test_api_search_filter.py +++ b/tests/test_api_search_filter.py @@ -1,4 +1,3 @@ -# -*- coding: utf8 -*- ''' Faraday Penetration Test IDE Copyright (C) 2013 Infobyte LLC (http://www.infobytesec.com/) @@ -10,7 +9,11 @@ import pytest from tests.factories import SearchFilterFactory, UserFactory -from tests.test_api_non_workspaced_base import ReadWriteAPITests +from tests.test_api_non_workspaced_base import ( + ReadWriteAPITests, + BulkUpdateTestsMixin, + BulkDeleteTestsMixin +) from tests.test_api_agent import logout from tests.conftest import login_as from faraday.server.models import SearchFilter @@ -19,7 +22,7 @@ @pytest.mark.usefixtures('logged_user') -class TestSearchFilterAPI(ReadWriteAPITests): +class TestSearchFilterAPI(ReadWriteAPITests, BulkUpdateTestsMixin, BulkDeleteTestsMixin): model = SearchFilter factory = SearchFilterFactory api_endpoint = 'searchfilter' @@ -118,4 +121,10 @@ def test_patch_update_an_object_does_not_fail_with_partial_data(self, test_client, logged_user): self.first_object.creator = logged_user - super().test_update_an_object_fails_with_empty_dict(test_client, logged_user) + super().test_patch_update_an_object_does_not_fail_with_partial_data(test_client, logged_user) + + @pytest.mark.usefixtures('ignore_nplusone') + def test_bulk_delete(self, test_client, logged_user): + for obj in self.model.query.all(): + obj.creator = logged_user + super().test_bulk_delete(test_client) diff --git a/tests/test_api_services.py b/tests/test_api_services.py index 682c1a6..5537cae 100644 --- a/tests/test_api_services.py +++ b/tests/test_api_services.py @@ -1,4 +1,3 @@ -# -*- coding: utf8 -*- ''' Faraday Penetration Test IDE Copyright (C) 2013 Infobyte LLC (http://www.infobytesec.com/) @@ -16,15 +15,15 @@ from faraday.server.api.modules.services import ServiceView from tests import factories -from tests.test_api_workspaced_base import ReadWriteAPITests +from tests.test_api_workspaced_base import ReadWriteAPITests, BulkDeleteTestsMixin, BulkUpdateTestsMixin from faraday.server.models import ( - Service + Service, Credential, Vulnerability ) -from tests.factories import HostFactory, EmptyCommandFactory +from tests.factories import HostFactory, EmptyCommandFactory, CredentialFactory, VulnerabilityFactory @pytest.mark.usefixtures('logged_user') -class TestListServiceView(ReadWriteAPITests): +class TestListServiceView(ReadWriteAPITests, BulkUpdateTestsMixin, BulkDeleteTestsMixin): model = Service factory = factories.ServiceFactory api_endpoint = 'services' @@ -44,19 +43,19 @@ assert res.status_code == 200 assert 'services' in res.json for service in res.json['services']: - assert set([u'id', u'key', u'value']) == set(service.keys()) + assert {'id', 'key', 'value'} == set(service.keys()) object_properties = [ - u'status', - u'protocol', - u'description', - u'_rev', - u'owned', - u'owner', - u'credentials', - u'name', - u'version', - u'_id', - u'metadata' + 'status', + 'protocol', + 'description', + '_rev', + 'owned', + 'owner', + 'credentials', + 'name', + 'version', + '_id', + 'metadata' ] expected = set(object_properties) result = set(service['value'].keys()) @@ -335,3 +334,29 @@ res = test_client.post(self.url(), data=data) print(res.data) assert res.status_code == 400 + + @pytest.mark.usefixtures('ignore_nplusone') + def test_bulk_update_cant_change_id(self, test_client): + super().test_bulk_update_cant_change_id(test_client) + + def test_bulk_delete_with_references(self, test_client, session): + previous_creds = Credential.query.count() + previous_vulns = Vulnerability.query.count() + previous_services = Service.query.count() + + service_1 = self.factory.create(workspace=self.workspace) + service_2 = self.factory.create(workspace=self.workspace) + service_3 = self.factory.create(workspace=self.workspace) + + for _ in range(3): + CredentialFactory.create(service=service_1, workspace=self.workspace) + VulnerabilityFactory.create(service=service_2, workspace=self.workspace) + CredentialFactory.create(service=service_3, workspace=self.workspace) + VulnerabilityFactory.create(service=service_3, workspace=self.workspace) + session.commit() + + raw_data = {'ids': [service_1.id, service_2.id, service_3.id]} + res = test_client.delete(self.url(), data=raw_data) + + assert res.status_code == 200 + assert res.json['deleted'] == 3 diff --git a/tests/test_api_vulnerability.py b/tests/test_api_vulnerability.py index 7ddc460..cc6dea0 100644 --- a/tests/test_api_vulnerability.py +++ b/tests/test_api_vulnerability.py @@ -1,25 +1,24 @@ -# -*- coding: utf8 -*- ''' Faraday Penetration Test IDE Copyright (C) 2013 Infobyte LLC (http://www.infobytesec.com/) See the file 'doc/LICENSE' for the license information - ''' import csv import json import urllib import datetime -from builtins import str from pathlib import Path from tempfile import NamedTemporaryFile from base64 import b64encode from io import BytesIO, StringIO -from posixpath import join as urljoin +from posixpath import join + +from sqlalchemy.orm.util import was_deleted try: from urllib import urlencode except ImportError: - from urllib.parse import urlencode + from urllib.parse import urlencode, urljoin import pytz import pytest @@ -38,7 +37,9 @@ from tests import factories from tests.conftest import TEST_DATA_PATH from tests.test_api_workspaced_base import ( - ReadWriteAPITests + ReadWriteAPITests, + BulkDeleteTestsMixin, + BulkUpdateTestsMixin ) from faraday.server.models import ( VulnerabilityGeneric, @@ -51,7 +52,11 @@ File, Host, Service, - CVE) + CVE, + CVSSV2, + CVSSV3, + SeveritiesHistogram, +) from tests.factories import ( ServiceFactory, CommandFactory, @@ -176,14 +181,14 @@ @pytest.mark.usefixtures('logged_user') -class TestListVulnerabilityView(ReadWriteAPITests): +class TestListVulnerabilityView(ReadWriteAPITests, BulkUpdateTestsMixin, BulkDeleteTestsMixin): model = Vulnerability factory = factories.VulnerabilityFactory api_endpoint = 'vulns' # unique_fields = ['ip'] # update_fields = ['ip', 'description', 'os'] view_class = VulnerabilityView - patchable_fields = ['description'] + patchable_fields = ['name'] def test_backward_json_compatibility(self, test_client, second_workspace, session): new_obj = self.factory.create(workspace=second_workspace) @@ -193,42 +198,42 @@ assert res.status_code == 200 assert 'vulnerabilities' in res.json for vuln in res.json['vulnerabilities']: - assert set([u'id', u'key', u'value']) == set(vuln.keys()) + assert {'id', 'key', 'value'} == set(vuln.keys()) object_properties = [ - u'status', - u'issuetracker', - u'description', - u'parent', - u'tags', - u'severity', - u'_rev', - u'easeofresolution', - u'owned', - u'hostnames', - u'pname', - u'query', - u'owner', - u'path', - u'data', - u'response', - u'refs', - u'desc', - u'impact', - u'confirmed', - u'name', - u'service', - u'obj_id', - u'type', - u'policyviolations', - u'request', - u'_attachments', - u'target', - u'_id', - u'resolution', - u'method', - u'metadata', - u'website', - u'params', + 'status', + 'issuetracker', + 'description', + 'parent', + 'tags', + 'severity', + '_rev', + 'easeofresolution', + 'owned', + 'hostnames', + 'pname', + 'query', + 'owner', + 'path', + 'data', + 'response', + 'refs', + 'desc', + 'impact', + 'confirmed', + 'name', + 'service', + 'obj_id', + 'type', + 'policyviolations', + 'request', + '_attachments', + 'target', + '_id', + 'resolution', + 'method', + 'metadata', + 'website', + 'params', ] expected = set(object_properties) result = set(vuln['value'].keys()) @@ -295,8 +300,8 @@ res = test_client.get(self.url(vuln)) assert res.status_code == 200 assert isinstance(res.json['hostnames'], list) - assert set(res.json['hostnames']) == set(hostname.name for hostname in - host_with_hostnames.hostnames) + assert set(res.json['hostnames']) == {hostname.name for hostname in + host_with_hostnames.hostnames} def test_create_vuln(self, host_with_hostnames, test_client, session): """ @@ -330,6 +335,387 @@ assert res.json['description'] == 'helloworld' assert res.json['severity'] == 'low' + def test_histogram_creation(self, vulnerability_factory, second_workspace, test_client, session): + """ + This one should only check basic vuln properties + :param host_with_hostnames: + :param test_client: + :param session: + :return: + """ + + vulns = VulnerabilityWeb.query.all() + for vuln in vulns: + session.delete(vuln) + session.commit() + + vulns = Vulnerability.query.all() + for vuln in vulns: + session.delete(vuln) + session.commit() + + session.query(SeveritiesHistogram).delete() + session.commit() + vulns_unconfirmed = vulnerability_factory.create_batch(4, confirmed=False, + workspace=self.workspace, + status='open', + severity='critical') + + vulns_confirmed = vulnerability_factory.create_batch(4, confirmed=True, + workspace=self.workspace, + status='open', + severity='critical') + + session.add_all(vulns_confirmed + vulns_unconfirmed) + session.commit() + + histogram = SeveritiesHistogram.query.all() + assert len(histogram) == 1 + assert histogram[0].workspace_id == self.workspace.id + assert histogram[0].critical == 8 + assert histogram[0].confirmed == 4 + assert histogram[0].date == datetime.date.today() + + vulns_high = vulnerability_factory.create_batch(4, + confirmed=True, + workspace=second_workspace, + status='open', + severity='high') + + owner = UserFactory.create() + service = ServiceFactory.create(workspace=self.workspace) + vuln_web = VulnerabilityWebFactory.create( + confirmed=True, + service=service, + creator=owner, + workspace=self.workspace, + severity='medium' + ) + + vulns_critical = vulnerability_factory.create_batch(4, + confirmed=False, + workspace=second_workspace, + status='open', + severity='critical') + + session.add_all(vulns_high + vulns_critical + [vuln_web]) + session.commit() + + vhigh_id = vulns_high[0].id + vhigh2_id = vulns_high[1].id + vhigh3_id = vulns_high[2].id + + histogram = SeveritiesHistogram.query.filter(SeveritiesHistogram.workspace == self.workspace).all() + assert len(histogram) == 1 + assert histogram[0].workspace_id == self.workspace.id + assert histogram[0].critical == 8 + assert histogram[0].medium == 1 + assert histogram[0].confirmed == 5 + + histogram = SeveritiesHistogram.query.filter(SeveritiesHistogram.workspace == second_workspace).all() + assert len(histogram) == 1 + assert histogram[0].workspace_id == second_workspace.id + assert histogram[0].critical == 4 + assert histogram[0].high == 4 + assert histogram[0].medium == 0 + assert histogram[0].confirmed == 4 + assert histogram[0].date == datetime.date.today() + + v = Vulnerability.query.get(vhigh_id) + v.confirmed = False + session.commit() + + histogram = SeveritiesHistogram.query.filter(SeveritiesHistogram.workspace == second_workspace).all() + assert len(histogram) == 1 + assert histogram[0].workspace_id == second_workspace.id + assert histogram[0].critical == 4 + assert histogram[0].high == 4 + assert histogram[0].confirmed == 3 + assert histogram[0].date == datetime.date.today() + + v = Vulnerability.query.get(vhigh_id) + v.status = 'closed' + session.commit() + + histogram = SeveritiesHistogram.query.filter(SeveritiesHistogram.workspace == second_workspace).all() + assert len(histogram) == 1 + assert histogram[0].workspace_id == second_workspace.id + assert histogram[0].critical == 4 + assert histogram[0].high == 3 + assert histogram[0].confirmed == 3 + assert histogram[0].date == datetime.date.today() + + v = Vulnerability.query.get(vhigh_id) + v.status = 'closed' + session.commit() + + histogram = SeveritiesHistogram.query.filter(SeveritiesHistogram.workspace == second_workspace).all() + assert len(histogram) == 1 + assert histogram[0].workspace_id == second_workspace.id + assert histogram[0].critical == 4 + assert histogram[0].high == 3 + assert histogram[0].confirmed == 3 + assert histogram[0].date == datetime.date.today() + + v = Vulnerability.query.get(vhigh_id) + v.status = 'open' + v.confirmed = False + session.commit() + + histogram = SeveritiesHistogram.query.filter(SeveritiesHistogram.workspace == second_workspace).all() + assert len(histogram) == 1 + assert histogram[0].workspace_id == second_workspace.id + assert histogram[0].critical == 4 + assert histogram[0].high == 4 + assert histogram[0].confirmed == 3 + assert histogram[0].date == datetime.date.today() + + v = Vulnerability.query.get(vhigh_id) + v.status = 're-opened' + v.confirmed = True + session.commit() + + histogram = SeveritiesHistogram.query.filter(SeveritiesHistogram.workspace == second_workspace).all() + assert len(histogram) == 1 + assert histogram[0].workspace_id == second_workspace.id + assert histogram[0].critical == 4 + assert histogram[0].high == 4 + assert histogram[0].confirmed == 4 + assert histogram[0].date == datetime.date.today() + + v = Vulnerability.query.get(vhigh_id) + v.status = 'risk-accepted' + session.commit() + + histogram = SeveritiesHistogram.query.filter(SeveritiesHistogram.workspace == second_workspace).all() + assert len(histogram) == 1 + assert histogram[0].workspace_id == second_workspace.id + assert histogram[0].critical == 4 + assert histogram[0].high == 3 + assert histogram[0].confirmed == 3 + assert histogram[0].date == datetime.date.today() + + v = VulnerabilityWeb.query.get(vuln_web.id) + v.status = 'closed' + session.commit() + + histogram = SeveritiesHistogram.query.filter(SeveritiesHistogram.workspace == self.workspace).all() + assert len(histogram) == 1 + assert histogram[0].workspace_id == self.workspace.id + assert histogram[0].critical == 8 + assert histogram[0].medium == 0 + assert histogram[0].date == datetime.date.today() + assert histogram[0].confirmed == 4 + + v = VulnerabilityWeb.query.get(vuln_web.id) + v.status = 'closed' + session.commit() + + histogram = SeveritiesHistogram.query.filter(SeveritiesHistogram.workspace == self.workspace).all() + assert len(histogram) == 1 + assert histogram[0].workspace_id == self.workspace.id + assert histogram[0].critical == 8 + assert histogram[0].medium == 0 + assert histogram[0].date == datetime.date.today() + assert histogram[0].confirmed == 4 + + v = VulnerabilityWeb.query.get(vuln_web.id) + v.status = 'open' + session.commit() + + histogram = SeveritiesHistogram.query.filter(SeveritiesHistogram.workspace == self.workspace).all() + assert len(histogram) == 1 + assert histogram[0].workspace_id == self.workspace.id + assert histogram[0].critical == 8 + assert histogram[0].medium == 1 + assert histogram[0].date == datetime.date.today() + assert histogram[0].confirmed == 5 + + v = VulnerabilityWeb.query.get(vuln_web.id) + v.status = 're-opened' + session.commit() + + histogram = SeveritiesHistogram.query.filter(SeveritiesHistogram.workspace == self.workspace).all() + assert len(histogram) == 1 + assert histogram[0].workspace_id == self.workspace.id + assert histogram[0].critical == 8 + assert histogram[0].medium == 1 + assert histogram[0].date == datetime.date.today() + assert histogram[0].confirmed == 5 + + v = VulnerabilityWeb.query.get(vuln_web.id) + v.status = 'risk-accepted' + session.commit() + + histogram = SeveritiesHistogram.query.filter(SeveritiesHistogram.workspace == self.workspace).all() + assert len(histogram) == 1 + assert histogram[0].workspace_id == self.workspace.id + assert histogram[0].critical == 8 + assert histogram[0].medium == 0 + assert histogram[0].date == datetime.date.today() + assert histogram[0].confirmed == 4 + + v = VulnerabilityWeb.query.get(vuln_web.id) + v.status = 'closed' + session.commit() + + histogram = SeveritiesHistogram.query.filter(SeveritiesHistogram.workspace == self.workspace).all() + assert len(histogram) == 1 + assert histogram[0].workspace_id == self.workspace.id + assert histogram[0].critical == 8 + assert histogram[0].medium == 0 + assert histogram[0].date == datetime.date.today() + assert histogram[0].confirmed == 4 + + v = Vulnerability.query.get(vhigh_id) + v.confirmed = False + session.commit() + + histogram = SeveritiesHistogram.query.filter(SeveritiesHistogram.workspace == second_workspace).all() + assert len(histogram) == 1 + assert histogram[0].workspace_id == second_workspace.id + assert histogram[0].critical == 4 + assert histogram[0].high == 3 + assert histogram[0].confirmed == 3 + assert histogram[0].date == datetime.date.today() + + v = Vulnerability.query.get(vhigh_id) + v.confirmed = True + session.commit() + + histogram = SeveritiesHistogram.query.filter(SeveritiesHistogram.workspace == second_workspace).all() + assert len(histogram) == 1 + assert histogram[0].workspace_id == second_workspace.id + assert histogram[0].critical == 4 + assert histogram[0].high == 3 + assert histogram[0].confirmed == 3 + assert histogram[0].date == datetime.date.today() + + v = Vulnerability.query.get(vhigh_id) + v.status = "re-opened" + v.confirmed = True + session.commit() + + histogram = SeveritiesHistogram.query.filter(SeveritiesHistogram.workspace == second_workspace).all() + assert len(histogram) == 1 + assert histogram[0].workspace_id == second_workspace.id + assert histogram[0].critical == 4 + assert histogram[0].high == 4 + assert histogram[0].confirmed == 4 + assert histogram[0].date == datetime.date.today() + + v = Vulnerability.query.get(vhigh_id) + v.confirmed = False + session.commit() + + histogram = SeveritiesHistogram.query.filter(SeveritiesHistogram.workspace == second_workspace).all() + assert len(histogram) == 1 + assert histogram[0].workspace_id == second_workspace.id + assert histogram[0].critical == 4 + assert histogram[0].high == 4 + assert histogram[0].confirmed == 3 + assert histogram[0].date == datetime.date.today() + + v = session.query(Vulnerability).filter(Vulnerability.id == vhigh_id).first() + session.delete(v) + session.commit() + + histogram = SeveritiesHistogram.query.filter(SeveritiesHistogram.workspace == second_workspace).all() + assert len(histogram) == 1 + assert histogram[0].workspace_id == second_workspace.id + assert histogram[0].critical == 4 + assert histogram[0].high == 3 + assert histogram[0].confirmed == 3 + assert histogram[0].date == datetime.date.today() + + Vulnerability.query.filter(Vulnerability.id.in_([vhigh2_id, vhigh3_id])).delete(synchronize_session=False) + session.commit() + + histogram = SeveritiesHistogram.query.filter(SeveritiesHistogram.workspace == second_workspace).all() + assert len(histogram) == 1 + assert histogram[0].workspace_id == second_workspace.id + assert histogram[0].critical == 4 + assert histogram[0].high == 1 + assert histogram[0].confirmed == 1 + assert histogram[0].date == datetime.date.today() + + v = VulnerabilityWeb.query.get(vuln_web.id) + v.status = 'open' + session.commit() + + histogram = SeveritiesHistogram.query.filter(SeveritiesHistogram.workspace == self.workspace).all() + assert len(histogram) == 1 + assert histogram[0].workspace_id == self.workspace.id + assert histogram[0].critical == 8 + assert histogram[0].medium == 1 + assert histogram[0].date == datetime.date.today() + assert histogram[0].confirmed == 5 + + VulnerabilityWeb.query.filter(VulnerabilityWeb.id == vuln_web.id).delete() + session.commit() + + histogram = SeveritiesHistogram.query.filter(SeveritiesHistogram.workspace == self.workspace).all() + assert len(histogram) == 1 + assert histogram[0].workspace_id == self.workspace.id + assert histogram[0].critical == 8 + assert histogram[0].medium == 0 + assert histogram[0].confirmed == 4 + assert histogram[0].date == datetime.date.today() + + Vulnerability.query.filter(Vulnerability.workspace == self.workspace, + Vulnerability.status == 'open', + Vulnerability.severity == 'critical', + Vulnerability.confirmed == False).update({'status': 'closed'}) # noqa: E712 + session.commit() + + histogram = SeveritiesHistogram.query.filter(SeveritiesHistogram.workspace == self.workspace).all() + assert len(histogram) == 1 + assert histogram[0].workspace_id == self.workspace.id + assert histogram[0].critical == 4 + assert histogram[0].medium == 0 + assert histogram[0].date == datetime.date.today() + assert histogram[0].confirmed == 4 + + Vulnerability.query.filter(Vulnerability.workspace == self.workspace, + Vulnerability.status == 'open', + Vulnerability.severity == 'critical').update({'severity': 'medium'}) + session.commit() + + histogram = SeveritiesHistogram.query.filter(SeveritiesHistogram.workspace == self.workspace).all() + assert len(histogram) == 1 + assert histogram[0].workspace_id == self.workspace.id + assert histogram[0].critical == 0 + assert histogram[0].medium == 4 + assert histogram[0].date == datetime.date.today() + assert histogram[0].confirmed == 4 + + Vulnerability.query.filter(Vulnerability.workspace == self.workspace, + Vulnerability.status == 'open', + Vulnerability.severity == 'medium').update({'severity': 'critical', 'status': 'closed'}) + session.commit() + + histogram = SeveritiesHistogram.query.filter(SeveritiesHistogram.workspace == self.workspace).all() + assert len(histogram) == 1 + assert histogram[0].workspace_id == self.workspace.id + assert histogram[0].critical == 0 + assert histogram[0].medium == 0 + assert histogram[0].date == datetime.date.today() + assert histogram[0].confirmed == 0 + + Vulnerability.query.filter(Vulnerability.workspace == self.workspace, + Vulnerability.status == 'closed', + Vulnerability.severity == 'critical', + Vulnerability.confirmed == True).update({'severity': 'medium', 'status': 're-opened'}) # noqa: E712 + session.commit() + + histogram = SeveritiesHistogram.query.filter(SeveritiesHistogram.workspace == self.workspace).all() + assert len(histogram) == 1 + assert histogram[0].workspace_id == self.workspace.id + assert histogram[0].critical == 0 + assert histogram[0].medium == 4 + assert histogram[0].date == datetime.date.today() + assert histogram[0].confirmed == 4 + def test_create_cannot_create_vuln_with_empty_name_fails( self, host, session, test_client): # I'm using this to test the NonBlankColumn which works for @@ -410,11 +796,11 @@ assert filename in res.json['_attachments'] attachment.close() # check the attachment can be downloaded - res = test_client.get(urljoin(self.url(), f'{vuln_id}/attachment/{filename}')) + res = test_client.get(join(self.url(), f'{vuln_id}/attachment/{filename}')) assert res.status_code == 200 assert res.data == file_content - res = test_client.get(urljoin( + res = test_client.get(join( self.url(), f'{vuln_id}/attachment/notexistingattachment.png' )) @@ -441,7 +827,7 @@ res = test_client.put(self.url(obj=vuln, workspace=self.workspace), data=raw_data) assert res.status_code == 200 filename = attachment.name.split('/')[-1] - res = test_client.get(urljoin( + res = test_client.get(join( self.url(), f'{vuln.id}/attachment/{filename}' )) assert res.status_code == 200 @@ -465,11 +851,11 @@ assert res.status_code == 200 # verify that the old file was deleted and the new one exists - res = test_client.get(urljoin( + res = test_client.get(join( self.url(), f'{vuln.id}/attachment/{filename}' )) assert res.status_code == 404 - res = test_client.get(urljoin( + res = test_client.get(join( self.url(), f'{vuln.id}/attachment/{new_filename}' )) assert res.status_code == 200 @@ -489,7 +875,7 @@ session.add(new_attach) session.commit() - res = test_client.get(urljoin(self.url(workspace=workspace), f'{vuln.id}/attachment')) + res = test_client.get(join(self.url(workspace=workspace), f'{vuln.id}/attachment')) assert res.status_code == 200 assert new_attach.filename in res.json assert 'image/png' in res.json[new_attach.filename]['content_type'] @@ -583,12 +969,8 @@ def _create_put_data(self, name, desc, status, parent, parent_type, - attachments=None, impact=None, refs=None, - policy_violations=None): - if not refs: - refs = [] - if not policy_violations: - policy_violations = [] + attachments=None, impact=None, refs=[], + policy_violations=[], cve=[]): if not impact: impact = {"accountability": False, "availability": False, "confidentiality": False, "integrity": False} @@ -625,7 +1007,9 @@ "description": "", "parent_type": parent_type, "protocol": "", - "version": ""} + "version": "", + "cve": cve + } if attachments: raw_data['_attachments'] = {} @@ -676,6 +1060,32 @@ res = test_client.put(self.url(vuln), data=raw_data) assert res.status_code in [400, 409] assert vuln_count_previous == session.query(Vulnerability).count() + + def test_update_vuln_cve(self, test_client, session, host_with_hostnames): + vuln = self.factory.create(status='open', cve=['CVE-2021-1234'], host=host_with_hostnames, service=None, + workspace=host_with_hostnames.workspace) + session.add(vuln) + session.commit() + + vuln = self.factory.create(status='open', cve=['CVE-2021-1234'], host=host_with_hostnames, service=None, + workspace=host_with_hostnames.workspace) + session.add(vuln) + session.commit() + + raw_data = self._create_put_data( + name='New name', + desc='New desc', + status='open', + parent=vuln.host.id, + parent_type='Host', + policy_violations=['pv0'], + cve=['cve-2021-1234'] + ) + vuln_count_previous = session.query(CVE).count() + assert vuln_count_previous == 1 + res = test_client.put(self.url(vuln), data=raw_data) + assert res.status_code == 200 + assert vuln_count_previous == session.query(CVE).count() def test_create_vuln_web(self, host_with_hostnames, test_client, session): service = ServiceFactory.create(host=host_with_hostnames, workspace=host_with_hostnames.workspace) @@ -725,13 +1135,13 @@ session.commit() expected_ids = {vuln.id for vuln in expected_vulns} - res = test_client.get( - self.url(workspace=second_workspace) + f'?{param_name}=bbb') + res = test_client.get(urljoin( + self.url(workspace=second_workspace), f'?{param_name}=bbb')) assert res.status_code == 200 for vuln in res.json['data']: assert vuln['query'] == 'bbb' - assert set(vuln['_id'] for vuln in res.json['data']) == expected_ids + assert {vuln['_id'] for vuln in res.json['data']} == expected_ids @pytest.mark.usefixtures('mock_envelope_list') @pytest.mark.parametrize('medium_name', ['medium', 'med']) @@ -758,21 +1168,21 @@ expected_ids.update(vuln.id for vuln in medium_vulns) expected_ids.update(vuln.id for vuln in medium_vulns_web) - res = test_client.get(self.url( - workspace=second_workspace) + f'?severity={medium_name}') + res = test_client.get(urljoin(self.url( + workspace=second_workspace), f'?severity={medium_name}')) assert res.status_code == 200 for vuln in res.json['data']: assert vuln['severity'] == 'med' - assert set(vuln['_id'] for vuln in res.json['data']) == expected_ids + assert {vuln['_id'] for vuln in res.json['data']} == expected_ids def test_filter_by_invalid_severity_fails(self, test_client): - res = test_client.get(self.url() + '?severity=131231') + res = test_client.get(urljoin(self.url(), '?severity=131231')) assert res.status_code == 400 assert b'Invalid severity type' in res.data @pytest.mark.usefixtures('mock_envelope_list') def test_filter_by_invalid_severity(self, test_client): - res = test_client.get(self.url() + '?severity=invalid') + res = test_client.get(urljoin(self.url(), '?severity=invalid')) assert res.status_code == 400 @pytest.mark.usefixtures('mock_envelope_list') @@ -796,16 +1206,16 @@ expected_ids = {vuln.id for vuln in expected_vulns} # This shouldn't show any vulns with POSTT method - res = test_client.get(self.url( - workspace=second_workspace) + '?method=POST') - assert res.status_code == 200 - assert set(vuln['_id'] for vuln in res.json['data']) == expected_ids, "This may fail because no presence of " \ + res = test_client.get(urljoin(self.url( + workspace=second_workspace), '?method=POST')) + assert res.status_code == 200 + assert {vuln['_id'] for vuln in res.json['data']} == expected_ids, "This may fail because no presence of " \ "filter_alchemy branch" # This shouldn't show any vulns since by default method filter is # an exact match, not a like statement - res = test_client.get(self.url( - workspace=second_workspace) + '?method=%25POST%25') + res = test_client.get(urljoin(self.url( + workspace=second_workspace), '?method=%25POST%25')) assert res.status_code == 200 assert len(res.json['data']) == 0 @@ -830,13 +1240,13 @@ session.commit() expected_ids = {vuln.id for vuln in expected_vulns} - res = test_client.get(self.url( - workspace=second_workspace) + '?website=faradaysec.com') + res = test_client.get(urljoin(self.url( + workspace=second_workspace), '?website=faradaysec.com')) assert res.status_code == 200 for vuln in res.json['data']: assert vuln['website'] == 'faradaysec.com' - assert set(vuln['_id'] for vuln in res.json['data']) == expected_ids + assert {vuln['_id'] for vuln in res.json['data']} == expected_ids @pytest.mark.usefixtures('mock_envelope_list') def test_filter_by_target(self, test_client, session, host_factory, @@ -862,11 +1272,11 @@ expected_ids.add(service_vuln.id) expected_ids.add(web_vuln.id) - res = test_client.get(self.url() + '?target=9.9.9.9') + res = test_client.get(urljoin(self.url(), '?target=9.9.9.9')) assert res.status_code == 200 for vuln in res.json['data']: assert vuln['target'] == '9.9.9.9' - assert set(vuln['_id'] for vuln in res.json['data']) == expected_ids + assert {vuln['_id'] for vuln in res.json['data']} == expected_ids @pytest.mark.usefixtures('ignore_nplusone') @pytest.mark.parametrize('filter_params', [ @@ -1041,7 +1451,7 @@ f'"op":"{operation["filter_operation"]}",' \ f'"val": {operation["filter_value"]} }}]}}' - res = test_client.get(urljoin(self.url(), qparams)) + res = test_client.get(join(self.url(), qparams)) assert res.status_code == operation['res_status_code'] assert len(res.json['vulnerabilities']) == operation['count'] @@ -1058,7 +1468,7 @@ f'{{"name": "{filter_params["filter_field_name"]}", '\ f'"op":"{operation["filter_operation"]}",'\ f'"val": {operation["filter_value"]} }}], {orderparams} }}' - res = test_client.get(urljoin(self.url(), qparams)) + res = test_client.get(join(self.url(), qparams)) assert res.status_code == operation['res_status_code'] assert len(res.json['vulnerabilities']) == operation['count'] @@ -1075,7 +1485,7 @@ f'{{"name": "{filter_params["filter_field_name"]}", '\ f'"op":"{operation["filter_operation"]}",'\ f'"val": {operation["filter_value"]} }}], {groupparams} }}' - res = test_client.get(urljoin(self.url(), qparams)) + res = test_client.get(join(self.url(), qparams)) assert res.status_code == 200 @@ -1228,7 +1638,162 @@ res = test_client.post(self.url(workspace=ws), data=raw_data) assert res.status_code == 201 assert len(res.json['cve']) == 3 - assert set(res.json['cve']) == set(['CVE-2018-1234', 'CVE-2017-0002', 'CVE-2017-0012']) + assert set(res.json['cve']) == {'CVE-2018-1234', 'CVE-2017-0002', 'CVE-2017-0012'} + + def test_create_vuln_with_cvssv2(self, host_with_hostnames, test_client, session): + cvssv2_obj = CVSSV2(vector_string='AV:L/AC:L/Au:M/C:C/I:P/A:N') + session.add(cvssv2_obj) + session.commit() + + assert session.query(CVSSV2).count() == 1 + assert cvssv2_obj.base_score == 5.0 + + cvssv2_obj = CVSSV2(vector_string='AV:N/AC:L/Au:M/C:C/I:P/A:N') + + session.add(cvssv2_obj) + session.commit() + + assert session.query(CVSSV2).count() == 2 + assert cvssv2_obj.base_score == 6.8 + + cvssv2_obj = CVSSV2(base_score=4.5) + session.add(cvssv2_obj) + session.commit() + + assert session.query(CVSSV2).count() == 3 + assert cvssv2_obj.base_score == 4.5 + + cvssv2_obj = CVSSV2() + session.add(cvssv2_obj) + session.commit() + + assert session.query(CVSSV2).count() == 4 + assert cvssv2_obj.base_score is None + + cvssv2_obj = CVSSV2(vector_string='AV:N/AC:H/Au:N/C:P/I:N/A:N') + assert cvssv2_obj.base_score == 2.6 + + cvssv2_obj = CVSSV2(vector_string='AV:N/AC:L/Au:M/C:N/I:P/A:P') + assert cvssv2_obj.base_score == 4.7 + + cvssv2_obj = CVSSV2(vector_string='AV:N/AC:H/Au:M/C:N/I:P/A:P') + assert cvssv2_obj.base_score == 3.2 + + cvssv2_obj = CVSSV2(vector_string='AV:N/AC:L/Au:M/C:N/I:P/A:P') + assert cvssv2_obj.base_score == 4.7 + + cvssv2_obj = CVSSV2(vector_string='AV:N/AC:L/Au:N/C:N/I:P/A:P') + assert cvssv2_obj.base_score == 6.4 + + cvssv2_obj = CVSSV2(vector_string='AV:A/AC:L/Au:N/C:N/I:P/A:P') + assert cvssv2_obj.base_score == 4.8 + + cvssv2_obj = CVSSV2(vector_string='AV:N/AC:L/Au:N/C:N/I:P/A:P') + assert cvssv2_obj.base_score == 6.4 + + cvssv2_obj = CVSSV2(vector_string='AV:N/AC:H/Au:N/C:P/I:P/A:P') + assert cvssv2_obj.base_score == 5.1 + + cvssv2_obj = CVSSV2(vector_string='AV:N/AC:H/Au:N/C:P/I:N/A:P') + assert cvssv2_obj.base_score == 4.0 + + cvssv2_obj = CVSSV2(vector_string='AV:N/AC:H/Au:N/C:C/I:C/A:C') + assert cvssv2_obj.base_score == 7.6 + + cvssv2_obj = CVSSV2(vector_string='AV:L/AC:L/Au:N/C:C/I:C/A:N') + assert cvssv2_obj.base_score == 6.6 + + cvssv2_obj = CVSSV2(vector_string='AV:L/AC:L/Au:N/C:C/I:C/A:P') + assert cvssv2_obj.base_score == 6.8 + + cvssv2_obj = CVSSV2(vector_string='AV:L/AC:L/Au:N/C:C/I:C/A:C') + assert cvssv2_obj.base_score == 7.2 + + cvssv2_obj = CVSSV2(vector_string='AV:L/AC:L/Au:N/C:C/I:N/A:C') + assert cvssv2_obj.base_score == 6.6 + + cvssv2_obj = CVSSV2(vector_string='AV:L/AC:L/Au:N/C:N/I:P/A:P') + assert cvssv2_obj.base_score == 3.6 + + def test_create_vuln_with_cvssv3(self, host_with_hostnames, test_client, session): + cvssv3_obj = CVSSV3(vector_string='AV:N/AC:L/PR:L/UI:N/S:C/C:H/I:H/A:L') + session.add(cvssv3_obj) + session.commit() + + assert session.query(CVSSV3).count() == 1 + assert cvssv3_obj.base_score == 9.9 + + cvssv3_obj = CVSSV3(vector_string='AV:L/AC:L/PR:L/UI:N/S:C/C:N/I:H/A:L') + session.add(cvssv3_obj) + session.commit() + + assert session.query(CVSSV3).count() == 2 + assert cvssv3_obj.base_score == 7.3 + + cvssv3_obj = CVSSV3(base_score=4.5) + session.add(cvssv3_obj) + session.commit() + + assert session.query(CVSSV3).count() == 3 + assert cvssv3_obj.base_score == 4.5 + + cvssv3_obj = CVSSV3() + session.add(cvssv3_obj) + session.commit() + + assert session.query(CVSSV3).count() == 4 + assert cvssv3_obj.base_score is None + + cvssv3_obj = CVSSV3(vector_string='AV:N/AC:L/PR:L/UI:N/S:U/C:N/I:N/A:N') + assert cvssv3_obj.base_score == 0.0 + + cvssv3_obj = CVSSV3(vector_string='AV:N/AC:L/PR:L/UI:N/S:U/C:L/I:N/A:N') + assert cvssv3_obj.base_score == 4.3 + + cvssv3_obj = CVSSV3(vector_string='AV:N/AC:L/PR:L/UI:N/S:U/C:H/I:L/A:N') + assert cvssv3_obj.base_score == 7.1 + + cvssv3_obj = CVSSV3(vector_string='AV:N/AC:L/PR:L/UI:N/S:U/C:H/I:H/A:N') + assert cvssv3_obj.base_score == 8.1 + + cvssv3_obj = CVSSV3(vector_string='AV:N/AC:L/PR:L/UI:N/S:U/C:H/I:H/A:L') + assert cvssv3_obj.base_score == 8.3 + + cvssv3_obj = CVSSV3(vector_string='AV:N/AC:L/PR:L/UI:N/S:U/C:H/I:H/A:H') + assert cvssv3_obj.base_score == 8.8 + + cvssv3_obj = CVSSV3(vector_string='AV:N/AC:L/PR:L/UI:N/S:C/C:N/I:N/A:N') + assert cvssv3_obj.base_score == 0.0 + + cvssv3_obj = CVSSV3(vector_string='AV:N/AC:L/PR:L/UI:N/S:C/C:L/I:N/A:N') + assert cvssv3_obj.base_score == 5.0 + + cvssv3_obj = CVSSV3(vector_string='AV:N/AC:L/PR:L/UI:N/S:C/C:H/I:N/A:N') + assert cvssv3_obj.base_score == 7.7 + + cvssv3_obj = CVSSV3(vector_string='AV:N/AC:L/PR:L/UI:N/S:C/C:H/I:L/A:N') + assert cvssv3_obj.base_score == 8.5 + + cvssv3_obj = CVSSV3(vector_string='AV:N/AC:L/PR:L/UI:N/S:C/C:H/I:H/A:N') + assert cvssv3_obj.base_score == 9.6 + + cvssv3_obj = CVSSV3(vector_string='AV:N/AC:L/PR:L/UI:N/S:C/C:H/I:H/A:L') + assert cvssv3_obj.base_score == 9.9 + + cvssv3_obj = CVSSV3(vector_string='AV:N/AC:L/PR:L/UI:N/S:C/C:H/I:H/A:H') + assert cvssv3_obj.base_score == 9.9 + + cvssv3_obj = CVSSV3(vector_string='AV:P/AC:L/PR:L/UI:N/S:C/C:H/I:H/A:H') + assert cvssv3_obj.base_score == 7.4 + + cvssv3_obj = CVSSV3(vector_string='AV:L/AC:L/PR:L/UI:N/S:C/C:H/I:H/A:H') + assert cvssv3_obj.base_score == 8.8 + + cvssv3_obj = CVSSV3(vector_string='AV:A/AC:L/PR:L/UI:N/S:C/C:H/I:H/A:H') + assert cvssv3_obj.base_score == 9.0 + + cvssv3_obj = CVSSV3(vector_string='AV:L/AC:L/PR:H/UI:N/S:C/C:H/I:H/A:H') + assert cvssv3_obj.base_score == 8.2 def test_create_vuln_with_policyviolations(self, host_with_hostnames, test_client, session): session.commit() # flush host_with_hostnames @@ -1270,10 +1835,10 @@ assert res.status_code == 201 assert vuln_count_previous + 1 == session.query(Vulnerability).count() assert res.json['name'] == 'New vulns' - assert res.json['impact'] == {u'accountability': True, - u'availability': True, - u'confidentiality': True, - u'integrity': True} + assert res.json['impact'] == {'accountability': True, + 'availability': True, + 'confidentiality': True, + 'integrity': True} def test_handles_invalid_impact(self, host_with_hostnames, test_client, session): @@ -1423,12 +1988,12 @@ # Desc res = test_client.get( - urljoin(self.url(), "count?confirmed=1&group_by=severity&order=sc" + join(self.url(), "count?confirmed=1&group_by=severity&order=sc" )) assert res.status_code == 400 # Asc - res = test_client.get(urljoin(self.url(), "count?confirmed=1&group_by=severity&order=name,asc")) + res = test_client.get(join(self.url(), "count?confirmed=1&group_by=severity&order=name,asc")) assert res.status_code == 400 def test_count_order_by(self, test_client, session): @@ -1445,7 +2010,7 @@ # Desc res = test_client.get( - urljoin(self.url(), "count?confirmed=1&group_by=severity&order=desc" + join(self.url(), "count?confirmed=1&group_by=severity&order=desc" )) assert res.status_code == 200 assert res.json['total_count'] == 3 @@ -1456,7 +2021,7 @@ # Asc res = test_client.get( - urljoin(self.url(), "count?confirmed=1&group_by=severity&order=asc")) + join(self.url(), "count?confirmed=1&group_by=severity&order=asc")) assert res.status_code == 200 assert res.json['total_count'] == 3 assert sorted(res.json['groups'], key=lambda i: (i['name'], i['count'], i['severity']), reverse=True) == sorted( @@ -1477,10 +2042,10 @@ session.add(vuln) session.commit() - res = test_client.get(urljoin(self.url(), "count?confirmed=1&group_by=username")) + res = test_client.get(join(self.url(), "count?confirmed=1&group_by=username")) assert res.status_code == 400 - res = test_client.get(urljoin(self.url(), "count?confirmed=1&group_by=")) + res = test_client.get(join(self.url(), "count?confirmed=1&group_by=")) assert res.status_code == 400 def test_count_confirmed(self, test_client, session): @@ -1496,7 +2061,7 @@ session.add(vuln) session.commit() - res = test_client.get(urljoin(self.url(), 'count?confirmed=1&group_by=severity')) + res = test_client.get(join(self.url(), 'count?confirmed=1&group_by=severity')) assert res.status_code == 200 assert res.json['total_count'] == 3 assert sorted(res.json['groups'], key=lambda i: (i['count'], i['name'], i['severity'])) == sorted([ @@ -1515,7 +2080,7 @@ session.commit() res = test_client.get( - urljoin(self.url(workspace=second_workspace), 'count?group_by=severity' + join(self.url(workspace=second_workspace), 'count?group_by=severity' )) assert res.status_code == 200 assert res.json['total_count'] == 9 @@ -1538,7 +2103,7 @@ session.commit() res = test_client.get( - urljoin( + join( self.url(), f'count_multi_workspace?workspaces={self.workspace.name}&confirmed=1&group_by=severity&order=desc' ) @@ -1570,7 +2135,7 @@ session.commit() res = test_client.get( - urljoin( + join( self.url(), f'count_multi_workspace?workspaces={self.workspace.name},' f'{second_workspace.name}&confirmed=1&group_by=severity&order=desc' @@ -1583,19 +2148,19 @@ def test_count_multiworkspace_no_workspace_param(self, test_client): res = test_client.get( - urljoin(self.url(), 'count_multi_workspace?confirmed=1&group_by=severity&order=desc' + join(self.url(), 'count_multi_workspace?confirmed=1&group_by=severity&order=desc' )) assert res.status_code == 400 def test_count_multiworkspace_no_groupby_param(self, test_client): res = test_client.get( - urljoin(self.url(), f'count_multi_workspace?workspaces={self.workspace.name}&confirmed=1&order=desc' + join(self.url(), f'count_multi_workspace?workspaces={self.workspace.name}&confirmed=1&order=desc' )) assert res.status_code == 400 def test_count_multiworkspace_nonexistent_ws(self, test_client): res = test_client.get( - urljoin( + join( self.url(), f'count_multi_workspace?workspaces=asdf,{self.workspace.name}&confirmed=1&group_by=severity&order=desc' ) @@ -1701,8 +2266,8 @@ expected_ids.update(vuln.id for vuln in high_vulns) web_expected_ids.update(vuln.id for vuln in high_vulns_web) - res = test_client.get(self.url( - workspace=second_workspace) + f'?command_id={command.id}') + res = test_client.get(urljoin(self.url( + workspace=second_workspace), f'?command_id={command.id}')) assert res.status_code == 200 for vuln in res.json['data']: command_object = CommandObject.query.filter_by( @@ -1711,11 +2276,11 @@ workspace=second_workspace, ).first() vuln['metadata']['command_id'] == command_object.command.id - assert set(vuln['_id'] for vuln in res.json['data']) == expected_ids + assert {vuln['_id'] for vuln in res.json['data']} == expected_ids # Check for web vulns - res = test_client.get(self.url( - workspace=second_workspace) + f'?command_id={web_command.id}') + res = test_client.get(urljoin(self.url( + workspace=second_workspace), f'?command_id={web_command.id}')) assert res.status_code == 200 for vuln in res.json['data']: command_object = CommandObject.query.filter_by( @@ -1724,11 +2289,11 @@ workspace=second_workspace, ).first() vuln['metadata']['command_id'] == command_object.command.id - assert set(vuln['_id'] for vuln in res.json['data']) == web_expected_ids + assert {vuln['_id'] for vuln in res.json['data']} == web_expected_ids # Check for cross-workspace bugs - res = test_client.get(self.url( - workspace=workspace) + f'?command_id={web_command.id}') + res = test_client.get(urljoin(self.url( + workspace=workspace), f'?command_id={web_command.id}')) assert res.status_code == 200 assert len(res.json['data']) == 0 @@ -1765,14 +2330,14 @@ res.json['vulnerabilities'])) assert 'metadata' in from_json_vuln[0]['value'] expected_metadata = { - u'command_id': command.id, - u'create_time': pytz.UTC.localize(vuln.create_date).isoformat(), - u'creator': command.tool, - u'owner': owner.username, - u'update_action': 0, - u'update_controller_action': u'', - u'update_time': pytz.UTC.localize(vuln.update_date).isoformat(), - u'update_user': None + 'command_id': command.id, + 'create_time': pytz.UTC.localize(vuln.create_date).isoformat(), + 'creator': command.tool, + 'owner': owner.username, + 'update_action': 0, + 'update_controller_action': '', + 'update_time': pytz.UTC.localize(vuln.update_date).isoformat(), + 'update_user': None } assert expected_metadata == from_json_vuln[0]['value']['metadata'] @@ -1939,7 +2504,7 @@ ) ws_name = host_with_hostnames.workspace.name res = test_client.post( - urljoin(self.url(workspace=host_with_hostnames.workspace) + f'?command_id={command.id}'), + urljoin(self.url(workspace=host_with_hostnames.workspace), f'?command_id={command.id}'), data=raw_data ) assert res.status_code == 201 @@ -1954,7 +2519,7 @@ severity='high', ) res = test_client.put( - urljoin( + join( self.url(workspace=host_with_hostnames.workspace), f'{res.json["_id"]}?command_id={command.id}' ), data=raw_data @@ -1966,7 +2531,7 @@ service = ServiceFactory.create(workspace=self.workspace) session.commit() assert len(command.command_objects) == 0 - url = self.url(workspace=command.workspace) + '?' + urlencode({'command_id': command.id}) + url = urljoin(self.url(workspace=command.workspace), f"?{urlencode({'command_id': command.id})}") raw_data = _create_post_data_vulnerability( name='Update vulnsweb', vuln_type='VulnerabilityWeb', @@ -2056,7 +2621,7 @@ res = test_client.post(self.url(), data=raw_data) assert res.status_code == 400 - assert res.json == {u'messages': {'json': {u'_schema': [u'Parent id not found: 358302']}}} + assert res.json == {'messages': {'json': {'_schema': ['Parent id not found: 358302']}}} def test_after_deleting_vuln_ref_and_policies_remains(self, session, test_client): vuln = VulnerabilityFactory.create(workspace=self.workspace) @@ -2102,7 +2667,7 @@ session.add(service) session.add(hostname) session.commit() - url = self.url(workspace=workspace) + f'?hostnames={hostname.name}' + url = urljoin(self.url(workspace=workspace), f'?hostnames={hostname.name}') res = test_client.get(url) assert res.status_code == 200 @@ -2121,7 +2686,7 @@ session.add(host) session.add(hostname) session.commit() - url = self.url(workspace=workspace) + f'?hostnames={hostname.name}' + url = urljoin(self.url(workspace=workspace), f'?hostnames={hostname.name}') res = test_client.get(url) assert res.status_code == 200 assert res.json['count'] == 1 @@ -2145,7 +2710,7 @@ session.commit() # Search with hosnames=HA,HB - res = test_client.get(self.url(workspace=vuln.workspace) + f'?hostname={hostnameA},{hostnameB}') + res = test_client.get(urljoin(self.url(workspace=vuln.workspace), f'?hostname={hostnameA},{hostnameB}')) assert res.status_code == 200 assert res.json['count'] == 2 @@ -2647,7 +3212,7 @@ "target", "desc", "status", "hostnames", "comments", "owner", "os", "resolution", "refs", "easeofresolution", "web_vulnerability", "data", "website", "path", "status_code", "request", "response", "method", - "params", "pname", "query", "policyviolations", "external_id", "impact_confidentiality", + "params", "pname", "query", "cve", "policyviolations", "external_id", "impact_confidentiality", "impact_integrity", "impact_availability", "impact_accountability", "update_date", "host_id", "host_description", "mac", "host_owned", "host_creator_id", "host_date", "host_update_date", @@ -2664,7 +3229,7 @@ session.add(confirmed_vulns) session.commit() res = test_client.get( - urljoin( + join( self.url(workspace=workspace), 'export_csv?q={"filters":[{"name":"confirmed","op":"==","val":"true"}]}' ) @@ -2675,14 +3240,14 @@ @pytest.mark.usefixtures('ignore_nplusone') def test_export_vuln_csv_unicode_bug(self, test_client, session): workspace = WorkspaceFactory.create() - desc = u'Latin-1 Supplement \xa1 \xa2 \xa3 \xa4 \xa5 \xa6 \xa7 \xa8' + desc = 'Latin-1 Supplement \xa1 \xa2 \xa3 \xa4 \xa5 \xa6 \xa7 \xa8' confirmed_vulns = VulnerabilityFactory.create( confirmed=True, description=desc, workspace=workspace) session.add(confirmed_vulns) session.commit() - res = test_client.get(urljoin(self.url(workspace=workspace), 'export_csv')) + res = test_client.get(join(self.url(workspace=workspace), 'export_csv')) assert res.status_code == 200 assert self._verify_csv(res.data, confirmed=True) @@ -2693,7 +3258,7 @@ session.add(confirmed_vulns) session.commit() res = test_client.get( - urljoin( + join( self.url(workspace=workspace), 'export_csv?q={"filters":[{"name":"severity","op":"==","val":"critical"}]}' ) @@ -2707,7 +3272,7 @@ session.add(self.first_object) session.commit() res = test_client.get( - urljoin(self.url(), 'export_csv?confirmed=true') + join(self.url(), 'export_csv?confirmed=true') ) assert res.status_code == 200 self._verify_csv(res.data, confirmed=True) @@ -2772,7 +3337,7 @@ session.add(vuln) session.commit() - res = test_client.get(urljoin(self.url(), 'export_csv')) + res = test_client.get(join(self.url(), 'export_csv')) assert self._verify_csv(res.data) def _verify_csv(self, raw_csv_data, confirmed=False, severity=None): @@ -2865,6 +3430,33 @@ assert new_attach.filename in res.json assert 'image/png' in res.json[new_attach.filename]['content_type'] + @pytest.mark.usefixtures('ignore_nplusone') + def test_bulk_update_vuln_cant_change_tool_type_or_attachments(self, test_client, session): + host = HostFactory.create(workspace=self.workspace) + tool = "tool_name" + updated_tool = "new_tool" + vuln = VulnerabilityFactory.create(workspace=self.workspace, host_id=host.id, tool=tool) + session.add(vuln) + session.commit() # flush host_with_hostnames + type = "Vulnerability" if "web" in vuln.type.lower() else "Vulnerability" + # flush host_with_hostnames + attachment = NamedTemporaryFile() + file_content = b'test file' + attachment.write(file_content) + attachment.seek(0) + attachment_data = self._create_put_data( + 'Updated with attachment', + 'Updated vuln', + 'open', + host.id, + 'Host', + attachments=[attachment] + )["_attachments"] + raw_data = {'ids': [vuln.id], 'tool': updated_tool, "type": type, "_attachments": attachment_data} + res = test_client.patch(self.url(), data=raw_data) + assert res.status_code == 200 + assert res.json['updated'] == 0 + @pytest.mark.usefixtures('logged_user') class TestCustomFieldVulnerability(ReadWriteAPITests): @@ -2872,7 +3464,7 @@ factory = factories.VulnerabilityFactory api_endpoint = 'vulns' view_class = VulnerabilityView - patchable_fields = ['description'] + patchable_fields = ['name'] def test_create_vuln_with_custom_fields_shown(self, test_client, second_workspace, session): host = HostFactory.create(workspace=self.workspace) @@ -3039,7 +3631,6 @@ assert res.status_code == 400 @pytest.mark.usefixtures('ignore_nplusone') - @pytest.mark.skip(reason="To be reimplemented") def test_bulk_delete_vuln_id(self, host_with_hostnames, test_client, session): """ This one should only check basic vuln properties @@ -3073,13 +3664,13 @@ vuln_count_previous = session.query(Vulnerability).count() res_1 = test_client.post(f'/v3/ws/{ws_name}/vulns', data=raw_data_vuln_1) res_2 = test_client.post(f'/v3/ws/{ws_name}/vulns', data=raw_data_vuln_2) - vuln_1_id = res_1.json['obj_id'] - vuln_2_id = res_2.json['obj_id'] + vuln_1_id = int(res_1.json['obj_id']) + vuln_2_id = int(res_2.json['obj_id']) vulns_to_delete = [vuln_1_id, vuln_2_id] - request_data = {'vulnerability_ids': vulns_to_delete} - delete_response = test_client.delete(f'/v3/ws/{ws_name}/vulns/bulk_delete', data=request_data) + request_data = {'ids': vulns_to_delete} + delete_response = test_client.delete(f'/v3/ws/{ws_name}/vulns', data=request_data) vuln_count_after = session.query(Vulnerability).count() - deleted_vulns = delete_response.json['deleted_vulns'] + deleted_vulns = delete_response.json['deleted'] assert delete_response.status_code == 200 assert vuln_count_previous == vuln_count_after assert deleted_vulns == len(vulns_to_delete) @@ -3232,6 +3823,30 @@ assert cmd_obj.object_id == res.json['_id'] assert res.json['tool'] == command.tool + def test_custom_field_cvss(self, session, test_client): + add_text_field = CustomFieldsSchemaFactory.create( + table_name='vulnerability', + field_name='cvss', + field_type='text', + field_display_name='CVSS', + ) + session.add(add_text_field) + session.commit() + + # @pytest.mark.usefixtures('ignore_nplusone') + def test_bulk_delete_by_severity(self, test_client): + all_objs = self.model.query.all() + for obj in all_objs[0:2]: + obj.severity = 'low' # Factory just use "critical" or "high" + + data = {"severities": ["low"]} + res = test_client.delete(self.url(), data=data) + assert res.status_code == 200 + assert all([was_deleted(obj) for obj in all_objs[0:2]]) + assert res.json['deleted'] == 2 + assert all([not was_deleted(obj) for obj in all_objs[2:]]) + assert self.model.query.count() == 3 + @pytest.mark.parametrize('refs', [ ('owasp', 'https://www.owasp.org/index.php/XSS_%28Cross_Site_Scripting%29_Prevention_Cheat_Sheet'), ('cwe', 'CWE-135'), @@ -3258,25 +3873,6 @@ assert ref_name in get_response.json assert 1 == len(get_response.json[ref_name]) assert ref_example == get_response.json[ref_name][0] - - -@pytest.mark.usefixtures('logged_user') -class TestVulnerabilityCustomFields(ReadWriteAPITests): - model = Vulnerability - factory = factories.VulnerabilityFactory - api_endpoint = 'vulns' - view_class = VulnerabilityView - patchable_fields = ['description'] - - def test_custom_field_cvss(self, session, test_client): - add_text_field = CustomFieldsSchemaFactory.create( - table_name='vulnerability', - field_name='cvss', - field_type='text', - field_display_name='CVSS', - ) - session.add(add_text_field) - session.commit() @pytest.mark.usefixtures('logged_user') @@ -3433,7 +4029,7 @@ session.add(host) session.commit() paginated_vulns = set() - expected_vulns = set([vuln.id for vuln in vulns]) + expected_vulns = {vuln.id for vuln in vulns} for offset in range(0, 10): query_filter = { "filters": [{"name": "severity", "op": "eq", "val": "high"}], @@ -3472,7 +4068,7 @@ session.add(host) session.commit() paginated_vulns = set() - expected_vulns = set([vuln.id for vuln in med_vulns]) + expected_vulns = {vuln.id for vuln in med_vulns} for offset in range(0, 10): query_filter = { "filters": [{"name": "severity", "op": "eq", "val": "medium"}], diff --git a/tests/test_api_vulnerability_template.py b/tests/test_api_vulnerability_template.py index b2c1687..6b35ce2 100644 --- a/tests/test_api_vulnerability_template.py +++ b/tests/test_api_vulnerability_template.py @@ -1,4 +1,3 @@ -# -*- coding: utf8 -*- ''' Faraday Penetration Test IDE Copyright (C) 2013 Infobyte LLC (http://www.infobytesec.com/) @@ -12,7 +11,7 @@ from faraday.server.api.modules.vulnerability_template import VulnerabilityTemplateView from tests import factories from tests.test_api_non_workspaced_base import ( - ReadWriteAPITests + ReadWriteAPITests, BulkUpdateTestsMixin, BulkDeleteTestsMixin ) from faraday.server.models import ( VulnerabilityTemplate, @@ -44,7 +43,7 @@ @pytest.mark.usefixtures('logged_user') -class TestListVulnerabilityTemplateView(ReadWriteAPITests): +class TestListVulnerabilityTemplateView(ReadWriteAPITests, BulkUpdateTestsMixin, BulkDeleteTestsMixin): model = VulnerabilityTemplate factory = factories.VulnerabilityTemplateFactory api_endpoint = 'vulnerability_template' @@ -58,18 +57,18 @@ assert res.status_code == 200 assert 'rows' in res.json for vuln in res.json['rows']: - assert set([u'id', u'key', u'value', u'doc']) == set(vuln.keys()) + assert {'id', 'key', 'value', 'doc'} == set(vuln.keys()) object_properties = [ - u'exploitation', - u'references', - u'refs', - u'name', - u'cwe', - u'_rev', - u'_id', - u'resolution', - u'description', - u'desc' + 'exploitation', + 'references', + 'refs', + 'name', + 'cwe', + '_rev', + '_id', + 'resolution', + 'description', + 'desc' ] expected = set(object_properties) @@ -254,7 +253,7 @@ assert updated_template.severity == raw_data['exploitation'] assert updated_template.resolution == raw_data['resolution'] assert updated_template.description == raw_data['description'] - assert updated_template.references == set([]) + assert updated_template.references == set() @pytest.mark.parametrize('references', [ ',', @@ -274,7 +273,7 @@ def test_update_vulnerabiliy_template_change_refs(self, session, test_client): template = self.factory.create() - for ref_name in set(['old1', 'old2']): + for ref_name in {'old1', 'old2'}: ref = ReferenceTemplateFactory.create(name=ref_name) self.first_object.reference_template_instances.add(ref) session.commit() @@ -286,7 +285,7 @@ assert updated_template.severity == raw_data['exploitation'] assert updated_template.resolution == raw_data['resolution'] assert updated_template.description == raw_data['description'] - assert updated_template.references == set([u'another_ref', u'new_ref']) + assert updated_template.references == {'another_ref', 'new_ref'} def test_create_new_vulnerability_template_with_references(self, session, test_client): vuln_count_previous = session.query(VulnerabilityTemplate).count() @@ -294,10 +293,10 @@ res = test_client.post('/v3/vulnerability_template', data=raw_data) assert res.status_code == 201 assert isinstance(res.json['_id'], int) - assert set(res.json['refs']) == set(['ref1', 'ref2']) + assert set(res.json['refs']) == {'ref1', 'ref2'} assert vuln_count_previous + 1 == session.query(VulnerabilityTemplate).count() new_template = session.query(VulnerabilityTemplate).filter_by(id=res.json['_id']).first() - assert new_template.references == set([u'ref1', u'ref2']) + assert new_template.references == {'ref1', 'ref2'} def test_delete_vuln_template(self, session, test_client): template = self.factory.create() @@ -372,7 +371,7 @@ res = test_client.post(self.url(), data=raw_data) assert res.status_code == 201 - assert res.json['customfields'] == {u'cvss': u'value'} + assert res.json['customfields'] == {'cvss': 'value'} def test_update_vuln_template_with_custom_fields(self, session, test_client): @@ -404,10 +403,10 @@ res = test_client.put(self.url(template.id), data=raw_data) assert res.status_code == 200 - assert res.json['customfields'] == {u'cvss': u'updated value'} + assert res.json['customfields'] == {'cvss': 'updated value'} vuln_template = session.query(VulnerabilityTemplate).filter_by(id=template.id).first() - assert vuln_template.custom_fields == {u'cvss': u'updated value'} + assert vuln_template.custom_fields == {'cvss': 'updated value'} def test_add_vuln_template_from_csv(self, session, test_client, csrf_token): expected_created_vuln_template = 1 @@ -610,3 +609,19 @@ assert res.json['vulns_with_conflict'][1][1] == vuln_2['name'] assert len(res.json['vulns_created']) == 0 + + def test_bulk_delete_vulnerabilities_template(self, test_client, session): + previous_count = session.query(VulnerabilityTemplate).count() + vuln_template_1 = VulnerabilityTemplate(name='vuln_1', severity='high') + session.add(vuln_template_1) + vuln_template_2 = VulnerabilityTemplate(name='vuln_2', severity='high') + session.add(vuln_template_2) + vuln_template_3 = VulnerabilityTemplate(name='vuln_3', severity='high') + session.add(vuln_template_3) + session.commit() + + data = {'ids': [vuln_template_1.id, vuln_template_2.id, vuln_template_3.id]} + res = test_client.delete(self.url(), data=data) + assert res.status_code == 200 + assert res.json['deleted'] == 3 + assert previous_count == session.query(VulnerabilityTemplate).count() diff --git a/tests/test_api_workspace.py b/tests/test_api_workspace.py index 4f14cd8..1fdd0a1 100644 --- a/tests/test_api_workspace.py +++ b/tests/test_api_workspace.py @@ -4,29 +4,31 @@ See the file 'doc/LICENSE' for the license information ''' - +from datetime import date import time +from urllib.parse import urljoin + import pytest -from posixpath import join as urljoin - -from faraday.server.models import Workspace, Scope +from posixpath import join + +from faraday.server.models import Workspace, Scope, SeveritiesHistogram from faraday.server.api.modules.workspaces import WorkspaceView -from tests.test_api_non_workspaced_base import ReadWriteAPITests +from tests.test_api_non_workspaced_base import ReadWriteAPITests, BulkDeleteTestsMixin from tests import factories -class TestWorkspaceAPI(ReadWriteAPITests): +class TestWorkspaceAPI(ReadWriteAPITests, BulkDeleteTestsMixin): model = Workspace factory = factories.WorkspaceFactory api_endpoint = 'ws' lookup_field = 'name' view_class = WorkspaceView - patchable_fields = ['name'] + patchable_fields = ['description'] @pytest.mark.usefixtures('ignore_nplusone') def test_filter_restless_by_name(self, test_client): res = test_client.get( - urljoin( + join( self.url(), f'filter?q={{"filters":[{{"name": "name", "op":"eq", "val": "{self.first_object.name}"}}]}}' ) @@ -38,7 +40,7 @@ @pytest.mark.usefixtures('ignore_nplusone') def test_filter_restless_by_name_zero_results_found(self, test_client): res = test_client.get( - urljoin( + join( self.url(), 'filter?q={"filters":[{"name": "name", "op":"eq", "val": "thiswsdoesnotexist"}]}' ) @@ -49,7 +51,7 @@ def test_filter_restless_by_description(self, test_client): self.first_object.description = "this is a new description" res = test_client.get( - urljoin( + join( self.url(), f'filter?q={{"filters":[{{"name": "description", "op":"eq", "val": "{self.first_object.description}"}}' ']}' @@ -85,7 +87,7 @@ self.first_object.description = "this is a new description" res = test_client.get( - urljoin( + join( self.url(), f'filter?q={{"filters":[{{"name": "description", "op":"eq", "val": "{self.first_object.description}"}}' ']}' @@ -136,7 +138,7 @@ session.add_all(vulns) session.commit() - res = test_client.get(self.url(self.first_object) + querystring) + res = test_client.get(urljoin(self.url(self.first_object), querystring)) assert res.status_code == 200 assert res.json['stats']['code_vulns'] == 0 assert res.json['stats']['web_vulns'] == 2 @@ -148,6 +150,94 @@ assert res.json['stats']['opened_vulns'] == 10 assert res.json['stats']['confirmed_vulns'] == 2 + @pytest.mark.skip_sql_dialect('sqlite') + @pytest.mark.usefixtures('ignore_nplusone') + def test_histogram(self, + vulnerability_factory, + vulnerability_web_factory, + second_workspace, + test_client, + session): + + session.query(SeveritiesHistogram).delete() + session.commit() + + vulns = vulnerability_factory.create_batch(8, workspace=self.first_object, + confirmed=False, status='open', severity='critical') + + vulns += vulnerability_factory.create_batch(3, workspace=self.first_object, + confirmed=True, status='open', severity='high') + + vulns += vulnerability_web_factory.create_batch(2, workspace=second_workspace, + confirmed=True, status='open', severity='medium') + + vulns += vulnerability_web_factory.create_batch(2, workspace=second_workspace, + confirmed=True, status='open', severity='low') + + session.add_all(vulns) + session.commit() + res = test_client.get('/v3/ws?histogram=true') + assert res.status_code == 200 + firs_ws = [ws['histogram'] for ws in res.json if ws['name'] == self.first_object.name] + assert len(firs_ws[0]) == 20 + ws_histogram = firs_ws[0] + for ws_date in ws_histogram: + if ws_date['date'] == date.today().strftime("%Y-%m-%d"): + assert ws_date['medium'] == 0 + assert ws_date['high'] == 3 + assert ws_date['critical'] == 8 + assert ws_date['confirmed'] == 3 + else: + assert ws_date['medium'] == 0 + assert ws_date['high'] == 0 + assert ws_date['critical'] == 0 + assert ws_date['confirmed'] == 0 + + second_ws = [ws['histogram'] for ws in res.json if ws['name'] == second_workspace.name] + assert len(second_ws[0]) == 20 + ws_histogram = second_ws[0] + for ws_date in ws_histogram: + if ws_date['date'] == date.today().strftime("%Y-%m-%d"): + assert ws_date['medium'] == 2 + assert ws_date['high'] == 0 + assert ws_date['critical'] == 0 + assert ws_date['confirmed'] == 2 + else: + assert ws_date['medium'] == 0 + assert ws_date['high'] == 0 + assert ws_date['critical'] == 0 + assert ws_date['confirmed'] == 0 + + res = test_client.get('/v3/ws?histogram=True&histogram_days=a') + assert res.status_code == 200 + firs_ws = [ws['histogram'] for ws in res.json if ws['name'] == self.first_object.name] + assert len(firs_ws[0]) == 20 + + res = test_client.get('/v3/ws?histogram=true&histogram_days=[asdf, "adsf"]') + assert res.status_code == 200 + firs_ws = [ws['histogram'] for ws in res.json if ws['name'] == self.first_object.name] + assert len(firs_ws[0]) == 20 + + res = test_client.get('/v3/ws?histogram=true&histogram_days=[asdf, "adsf"]') + assert res.status_code == 200 + firs_ws = [ws['histogram'] for ws in res.json if ws['name'] == self.first_object.name] + assert len(firs_ws[0]) == 20 + + res = test_client.get('/v3/ws?histogram=true&histogram_days=5') + assert res.status_code == 200 + firs_ws = [ws['histogram'] for ws in res.json if ws['name'] == self.first_object.name] + assert len(firs_ws[0]) == 5 + + res = test_client.get('/v3/ws?histogram=true&histogram_days=365') + assert res.status_code == 200 + firs_ws = [ws['histogram'] for ws in res.json if ws['name'] == self.first_object.name] + assert len(firs_ws[0]) == 365 + + res = test_client.get('/v3/ws?histogram=asdf&histogram_days=365') + assert res.status_code == 200 + for ws in res.json: + assert 'histogram' not in ws + @pytest.mark.parametrize('querystring', [ '?status=closed' ]) @@ -168,7 +258,7 @@ session.add_all(vulns) session.commit() - res = test_client.get(self.url(self.first_object) + querystring) + res = test_client.get(urljoin(self.url(self.first_object), querystring)) assert res.status_code == 200 assert res.json['stats']['code_vulns'] == 0 assert res.json['stats']['web_vulns'] == 0 @@ -202,7 +292,7 @@ session.add_all(vulns) session.commit() - res = test_client.get(self.url(self.first_object) + querystring) + res = test_client.get(urljoin(self.url(self.first_object), querystring)) assert res.status_code == 200 assert res.json['stats']['code_vulns'] == 0 assert res.json['stats']['web_vulns'] == 2 @@ -224,7 +314,7 @@ confirmed=True) session.add_all(vulns) session.commit() - res = test_client.get(self.url(self.first_object) + querystring) + res = test_client.get(urljoin(self.url(self.first_object), querystring)) assert res.status_code == 200 assert res.json['stats']['total_vulns'] == 5 @@ -355,7 +445,7 @@ assert res.status_code == 201 assert set(res.json['scope']) == set(desired_scope) workspace = Workspace.query.get(res.json['id']) - assert set(s.name for s in workspace.scope) == set(desired_scope) + assert {s.name for s in workspace.scope} == set(desired_scope) def test_update_with_scope(self, session, test_client, workspace): session.add(Scope(name='test.com', workspace=workspace)) @@ -369,11 +459,11 @@ res = test_client.put(self.url(obj=workspace), data=raw_data) assert res.status_code == 200 assert set(res.json['scope']) == set(desired_scope) - assert set(s.name for s in workspace.scope) == set(desired_scope) - - @pytest.mark.skip # TODO fix fox sqlite - def test_list_retrieves_all_items_from(self, test_client): - super().test_list_retrieves_all_items_from(test_client) + assert {s.name for s in workspace.scope} == set(desired_scope) + + @pytest.mark.skip_sql_dialect('sqlite') + def test_list_retrieves_all_items_from(self, test_client, logged_user): + super().test_list_retrieves_all_items_from(test_client, logged_user) def test_workspace_activation(self, test_client, workspace, session): workspace.active = False diff --git a/tests/test_api_workspaced_base.py b/tests/test_api_workspaced_base.py index b0644c3..6af9bae 100644 --- a/tests/test_api_workspaced_base.py +++ b/tests/test_api_workspaced_base.py @@ -1,12 +1,10 @@ -# -*- coding: utf8 -*- ''' Faraday Penetration Test IDE Copyright (C) 2013 Infobyte LLC (http://www.infobytesec.com/) See the file 'doc/LICENSE' for the license information ''' -from builtins import str -from posixpath import join as urljoin +from posixpath import join """Generic tests for APIs prefixed with a workspace_name""" @@ -50,11 +48,11 @@ def url(self, obj=None, workspace=None): workspace = workspace or self.workspace - url = API_PREFIX + workspace.name + '/' + self.api_endpoint + url = join(API_PREFIX + workspace.name, self.api_endpoint) if obj is not None: id_ = str(obj.id) if isinstance( obj, self.model) else str(obj) - url += '/' + id_ + url = join(url, id_) return url @@ -103,7 +101,7 @@ res = test_client.get(self.url(self.first_object, second_workspace)) assert res.status_code == 404 - @pytest.mark.parametrize('object_id', [123456789, -1, 'xxx', u'áá']) + @pytest.mark.parametrize('object_id', [123456789, -1, 'xxx', 'áá']) def test_404_when_retrieving_unexistent_object(self, test_client, object_id): url = self.url(object_id) @@ -204,18 +202,17 @@ def test_update_an_object_readonly_fails(self, test_client, method): self.workspace.readonly = True db.session.commit() - for unique_field in self.unique_fields: - data = self.factory.build_dict() - old_field = getattr(self.objects[0], unique_field) - old_id = getattr(self.objects[0], 'id') - if method == "PUT": - res = test_client.put(self.url(self.first_object), data=data) - elif method == "PATCH": - res = test_client.patch(self.url(self.first_object), data=data) - db.session.commit() - assert res.status_code == 403 - assert self.model.query.count() == OBJECT_COUNT - assert old_field == getattr(self.model.query.filter(self.model.id == old_id).one(), unique_field) + + data = self.factory.build_dict(workspace=self.workspace) + count = self.model.query.count() + if method == "PUT": + res = test_client.put(self.url(self.first_object), + data=data) + elif method == "PATCH": + res = test_client.patch(self.url(self.first_object), + data=data) + assert res.status_code == 403 + assert self.model.query.count() == count @pytest.mark.parametrize("method", ["PUT", "PATCH"]) def test_update_inactive_fails(self, test_client, method): @@ -272,6 +269,134 @@ assert object_id == expected_id +@pytest.mark.usefixtures('logged_user') +class BulkUpdateTestsMixin: + + @staticmethod + def control_data(test_suite, data: dict) -> dict: + return { + key: value for (key, value) in data.items() + if key in UpdateTestsMixin.control_patcheable_data(test_suite, data) + and key not in test_suite.unique_fields + } + + def get_all_objs_and_ids(self): + all_objs = self.model.query.all() + all_objs_id = [obj.__getattribute__(self.view_class.lookup_field) for obj in all_objs] + return all_objs, all_objs_id + + def test_bulk_update_an_object(self, test_client, logged_user): + all_objs = self.model.query.all() + all_objs_id = [obj.__getattribute__(self.view_class.lookup_field) for obj in self.model.query.all()] + all_objs, all_objs_id = all_objs[:-1], all_objs_id[:-1] + + data = self.factory.build_dict(workspace=self.workspace) + data = self.control_cant_change_data(data) + count = self.model.query.count() + data = BulkUpdateTestsMixin.control_data(self, data) + + res = test_client.patch(self.url(), data={}) + assert res.status_code == 400 + data["ids"] = all_objs_id + res = test_client.patch(self.url(), data=data) + + assert res.status_code == 200, (res.status_code, res.json) + assert self.model.query.count() == count + assert res.json['updated'] == len(all_objs) + for obj in self.model.query.all(): + if getattr(obj, self.view_class.lookup_field) not in all_objs_id: + assert any( + [ + data[updated_field] != getattr(obj, updated_field) + for updated_field in data if updated_field != 'ids' + ] + ) + else: + assert all( + [ + data[updated_field] == getattr(obj, updated_field) + for updated_field in data if updated_field != 'ids' + ] + ) + + def test_bulk_update_an_object_readonly_fails(self, test_client): + self.workspace.readonly = True + db.session.commit() + all_objs, all_objs_id = self.get_all_objs_and_ids() + data = self.factory.build_dict(workspace=self.workspace) + data = self.control_cant_change_data(data) + data = BulkUpdateTestsMixin.control_data(self, data) + count = self.model.query.count() + data["ids"] = all_objs_id + res = test_client.patch(self.url(), data=data) + assert res.status_code == 403 + assert self.model.query.count() == count + + def test_bulk_update_inactive_fails(self, test_client): + self.workspace.deactivate() + db.session.commit() + all_objs, all_objs_id = self.get_all_objs_and_ids() + data = self.factory.build_dict(workspace=self.workspace) + data = self.control_cant_change_data(data) + data = BulkUpdateTestsMixin.control_data(self, data) + count = self.model.query.count() + data["ids"] = all_objs_id + res = test_client.patch(self.url(), data=data) + assert res.status_code == 403 + assert self.model.query.count() == count + + @pytest.mark.parametrize('existing', (True, False)) + def test_bulk_update_fails_with_repeated_unique(self, test_client, session, existing): + for unique_field in self.unique_fields: + data = self.factory.build_dict() + if existing: + data[unique_field] = getattr(self.objects[3], unique_field) + data["ids"] = [getattr(self.objects[0], self.view_class.lookup_field)] + else: + data["ids"] = [getattr(self.objects[i], self.view_class.lookup_field) for i in range(0, 2)] + res = test_client.patch(self.url(), data=data) + assert res.status_code == 409 + assert self.model.query.count() == OBJECT_COUNT + + def test_bulk_update_cant_change_id(self, test_client): + raw_json = self.factory.build_dict(workspace=self.workspace) + raw_json = self.control_cant_change_data(raw_json) + raw_json['id'] = 100000 + expected_id = self.first_object.id + raw_json["ids"] = [expected_id] + res = test_client.patch(self.url(), data=raw_json) + assert res.status_code == 200, (res.status_code, res.data) + assert self.model.query.filter(self.model.id == 100000).first() is None + + def test_patch_bulk_update_an_object_does_not_fail_with_partial_data(self, test_client, logged_user): + """To do this the user should use a PATCH request""" + all_objs_id = [obj.__getattribute__(self.view_class.lookup_field) for obj in self.model.query.all()] + res = test_client.patch(self.url(), data={"ids": all_objs_id}) + assert res.status_code == 200, (res.status_code, res.json) + + def test_bulk_update_invalid_ids(self, test_client): + data = self.factory.build_dict(workspace=self.workspace) + data = BulkUpdateTestsMixin.control_data(self, data) + data['ids'] = [-1, 'test'] + res = test_client.patch(self.url(), data=data) + assert res.status_code == 200 + assert res.json['updated'] == 0 + data['ids'] = [-1, 'test', self.first_object.__getattribute__(self.view_class.lookup_field)] + res = test_client.patch(self.url(), data=data) + assert res.status_code == 200 + assert res.json['updated'] == 1 + + def test_bulk_update_wrong_content_type(self, test_client): + all_objs = self.model.query.all() + all_objs_id = [obj.__getattribute__(self.view_class.lookup_field) for obj in all_objs] + + request_data = {'ids': all_objs_id} + headers = [('content-type', 'text/xml')] + + res = test_client.patch(self.url(), data=request_data, headers=headers) + assert res.status_code == 400 + + class CountTestsMixin: def test_count(self, test_client, session, user_factory): @@ -288,7 +413,7 @@ session.commit() - res = test_client.get(urljoin(self.url(), "count?group_by=creator_id")) + res = test_client.get(join(self.url(), "count?group_by=creator_id")) assert res.status_code == 200, res.json res = res.get_json() @@ -318,7 +443,7 @@ session.commit() - res = test_client.get(urljoin(self.url(), "count?group_by=creator_id&order=desc")) + res = test_client.get(join(self.url(), "count?group_by=creator_id&order=desc")) assert res.status_code == 200, res.json res = res.get_json() @@ -367,6 +492,82 @@ assert self.model.query.count() == OBJECT_COUNT +@pytest.mark.usefixtures('logged_user') +class BulkDeleteTestsMixin: + + def get_all_objs_and_ids(self): + all_objs = self.model.query.all() + all_objs_id = [obj.__getattribute__(self.view_class.lookup_field) for obj in all_objs] + return all_objs, all_objs_id + + @pytest.mark.usefixtures('ignore_nplusone') + def test_bulk_delete(self, test_client): + all_objs, all_objs_id = self.get_all_objs_and_ids() + ignored_obj = all_objs[-1] + all_objs, all_objs_id = all_objs[:-1], all_objs_id[:-1] + + res = test_client.delete(self.url(), data={}) + assert res.status_code == 400 + data = {"ids": all_objs_id} + res = test_client.delete(self.url(), data=data) + assert res.status_code == 200 + assert all([was_deleted(obj) for obj in all_objs]) + assert res.json['deleted'] == len(all_objs) + assert not was_deleted(ignored_obj) + assert self.model.query.count() == 1 + + def test_bulk_delete_invalid_ids(self, test_client): + request_data = {'ids': [-1, 'test']} + count = self.model.query.count() + res = test_client.delete(self.url(), data=request_data) + assert res.status_code == 200 + assert res.json['deleted'] == 0 + assert self.model.query.count() == count + + def test_bulk_delete_wrong_content_type(self, test_client): + all_objs = self.model.query.all() + all_objs_id = [obj.__getattribute__(self.view_class.lookup_field) for obj in all_objs] + count = self.model.query.count() + + request_data = {'ids': all_objs_id} + headers = [('content-type', 'text/xml')] + + res = test_client.delete(self.url(), data=request_data, headers=headers) + assert res.status_code == 400 + assert self.model.query.count() == count + assert all([not was_deleted(obj) for obj in all_objs]) + + def test_bulk_delete_readonly_fails(self, test_client, session): + self.workspace.readonly = True + session.commit() + all_objs, all_objs_id = self.get_all_objs_and_ids() + data = {"ids": all_objs_id} + res = test_client.delete(self.url(), data=data) + assert res.status_code == 403 # No content + assert not any([was_deleted(obj) for obj in all_objs]) + assert self.model.query.count() == OBJECT_COUNT + + def test_delete_inactive_fails(self, test_client): + self.workspace.deactivate() + db.session.commit() + all_objs, all_objs_id = self.get_all_objs_and_ids() + data = {"ids": all_objs_id} + res = test_client.delete(self.url(), data=data) + assert res.status_code == 403 # No content + assert not any([was_deleted(obj) for obj in all_objs]) + assert self.model.query.count() == OBJECT_COUNT + + def test_delete_from_other_workspace_fails(self, test_client): + all_objs, all_objs_id = self.get_all_objs_and_ids() + + data = {"ids": all_objs_id + [10000000]} + res = test_client.delete(self.url(), data=data) + assert res.status_code == 204 + assert all([was_deleted(obj) for obj in all_objs]) + assert res.json['deleted'] == len(all_objs) + assert self.model.query.count() == 0 + + class PaginationTestsMixin(OriginalPaginationTestsMixin): def create_many_objects(self, session, n): objects = self.factory.create_batch(n, workspace=self.workspace) diff --git a/tests/test_faraday_manage.py b/tests/test_faraday_manage.py index 3a5c34a..435a356 100644 --- a/tests/test_faraday_manage.py +++ b/tests/test_faraday_manage.py @@ -1,4 +1,3 @@ - import os import subprocess @@ -43,6 +42,3 @@ print(std) print(err) assert subproc.returncode == 0, ('manage migrate failed!', std, err) - - -# I'm Py3 diff --git a/tests/test_model_events.py b/tests/test_model_events.py index c752646..9839aea 100644 --- a/tests/test_model_events.py +++ b/tests/test_model_events.py @@ -68,6 +68,3 @@ with pytest.raises(AssertionError): session.commit() - - -# I'm Py3 diff --git a/tests/test_python_config_imports.py b/tests/test_python_config_imports.py index a814618..21f2e33 100644 --- a/tests/test_python_config_imports.py +++ b/tests/test_python_config_imports.py @@ -1,4 +1,3 @@ - import unittest diff --git a/tests/test_searcher.py b/tests/test_searcher.py index 69acee5..fc13e69 100644 --- a/tests/test_searcher.py +++ b/tests/test_searcher.py @@ -1,4 +1,3 @@ - import json import pytest diff --git a/tests/test_server.py b/tests/test_server.py index 0cf070a..902ce7d 100644 --- a/tests/test_server.py +++ b/tests/test_server.py @@ -91,5 +91,3 @@ if __name__ == '__main__': unittest.main() - -# I'm Py3 diff --git a/tests/test_utils_database.py b/tests/test_utils_database.py index 61dda92..7340150 100644 --- a/tests/test_utils_database.py +++ b/tests/test_utils_database.py @@ -18,9 +18,9 @@ ) UNIQUE_FIELDS = { - License: [u'product', u'start_date', u'end_date'], - Service: [u'port', u'protocol', u'host_id', u'workspace_id'], - Host: [u'ip', u'workspace_id'], + License: ['product', 'start_date', 'end_date'], + Service: ['port', 'protocol', 'host_id', 'workspace_id'], + Host: ['ip', 'workspace_id'], Vulnerability: [ 'name', 'description', @@ -69,5 +69,3 @@ unique_constraints = get_unique_fields(session, object_) for unique_constraint in unique_constraints: assert unique_constraint == expected_unique_fields - -# I'm Py3 diff --git a/tests/test_websocket_BroadcastServerProtocol.py b/tests/test_websocket_BroadcastServerProtocol.py index 2a911c9..727ae2a 100644 --- a/tests/test_websocket_BroadcastServerProtocol.py +++ b/tests/test_websocket_BroadcastServerProtocol.py @@ -1,4 +1,3 @@ - import pytest from faraday.server.models import Agent, Executor from faraday.server.websocket_factories import WorkspaceServerFactory, \